Merge branch 'master' of git://github.com/highfidelity/hifi into jh21402

This commit is contained in:
rick@ghostpunch.com 2017-07-20 17:03:46 -04:00
commit 52c8d77d8e
159 changed files with 9990 additions and 1427 deletions

View file

@ -1,7 +1,96 @@
# Linux build guide
Please read the [general build guide](BUILD.md) for information on dependencies required for all platforms. Only Linux specific instructions are found in this file.
### Qt5 Dependencies
## Qt5 Dependencies
Should you choose not to install Qt5 via a package manager that handles dependencies for you, you may be missing some Qt5 dependencies. On Ubuntu, for example, the following additional packages are required:
libasound2 libxmu-dev libxi-dev freeglut3-dev libasound2-dev libjack0 libjack-dev libxrandr-dev libudev-dev libssl-dev
## Ubuntu 16.04 specific build guide
### Prepare environment
Install qt:
```bash
wget http://debian.highfidelity.com/pool/h/hi/hifi-qt5.6.1_5.6.1_amd64.deb
sudo dpkg -i hifi-qt5.6.1_5.6.1_amd64.deb
```
Install build dependencies:
```bash
sudo apt-get install libasound2 libxmu-dev libxi-dev freeglut3-dev libasound2-dev libjack0 libjack-dev libxrandr-dev libudev-dev libssl-dev
```
To compile interface in a server you must install:
```bash
sudo apt -y install libpulse0 libnss3 libnspr4 libfontconfig1 libxcursor1 libxcomposite1 libxtst6 libxslt1.1
```
Install build tools:
```bash
sudo apt install cmake
```
### Get code and checkout the tag you need
Clone this repository:
```bash
git clone https://github.com/highfidelity/hifi.git
```
To compile a RELEASE version checkout the tag you need getting a list of all tags:
```bash
git fetch -a
git tags
```
Then checkout last tag with:
```bash
git checkout tags/RELEASE-6819
```
Or go to the highfidelity download page (https://highfidelity.com/download) to get the release version. For example, if there is a BETA 6731 type:
```bash
git checkout tags/RELEASE-6731
```
### Compiling
Create the build directory:
```bash
mkdir -p hifi/build
cd hifi/build
```
Prepare makefiles:
```bash
cmake -DQT_CMAKE_PREFIX_PATH=/usr/local/Qt5.6.1/5.6/gcc_64/lib/cmake ..
```
Start compilation and get a cup of coffee:
```bash
make domain-server assignment-client interface
```
In a server does not make sense to compile interface
### Running the software
Running domain server:
```bash
./domain-server/domain-server
```
Running assignment client:
```bash
./assignment-client/assignment-client -n 6
```
Running interface:
```bash
./interface/interface
```
Go to localhost in running interface.

View file

@ -16,7 +16,7 @@ Contributing
git checkout -b new_branch_name
```
4. Code
* Follow the [coding standard](https://wiki.highfidelity.com/wiki/Coding_Standards)
* Follow the [coding standard](https://docs.highfidelity.com/build-guide/coding-standards)
5. Commit
* Use [well formed commit messages](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html)
6. Update your branch

View file

@ -23,6 +23,7 @@
#include <AvatarHashMap.h>
#include <AudioInjectorManager.h>
#include <AssetClient.h>
#include <DebugDraw.h>
#include <LocationScriptingInterface.h>
#include <MessagesClient.h>
#include <NetworkAccessManager.h>
@ -50,14 +51,14 @@
#include "RecordingScriptingInterface.h"
#include "AbstractAudioInterface.h"
#include "AvatarAudioTimer.h"
static const int RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES = 10;
Agent::Agent(ReceivedMessage& message) :
ThreadedAssignment(message),
_receivedAudioStream(RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES),
_audioGate(AudioConstants::SAMPLE_RATE, AudioConstants::MONO)
_audioGate(AudioConstants::SAMPLE_RATE, AudioConstants::MONO),
_avatarAudioTimer(this)
{
_entityEditSender.setPacketsPerSecond(DEFAULT_ENTITY_PPS_PER_SCRIPT);
DependencyManager::get<EntityScriptingInterface>()->setPacketSender(&_entityEditSender);
@ -81,6 +82,9 @@ Agent::Agent(ReceivedMessage& message) :
DependencyManager::set<RecordingScriptingInterface>();
DependencyManager::set<UsersScriptingInterface>();
// Needed to ensure the creation of the DebugDraw instance on the main thread
DebugDraw::getInstance();
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
@ -92,6 +96,14 @@ Agent::Agent(ReceivedMessage& message) :
this, "handleOctreePacket");
packetReceiver.registerListener(PacketType::Jurisdiction, this, "handleJurisdictionPacket");
packetReceiver.registerListener(PacketType::SelectedAudioFormat, this, "handleSelectedAudioFormat");
// 100Hz timer for audio
const int TARGET_INTERVAL_MSEC = 10; // 10ms
connect(&_avatarAudioTimer, &QTimer::timeout, this, &Agent::processAgentAvatarAudio);
_avatarAudioTimer.setSingleShot(false);
_avatarAudioTimer.setInterval(TARGET_INTERVAL_MSEC);
_avatarAudioTimer.setTimerType(Qt::PreciseTimer);
}
void Agent::playAvatarSound(SharedSoundPointer sound) {
@ -471,14 +483,7 @@ void Agent::executeScript() {
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
// 100Hz timer for audio
AvatarAudioTimer* audioTimerWorker = new AvatarAudioTimer();
audioTimerWorker->moveToThread(&_avatarAudioTimerThread);
connect(audioTimerWorker, &AvatarAudioTimer::avatarTick, this, &Agent::processAgentAvatarAudio);
connect(this, &Agent::startAvatarAudioTimer, audioTimerWorker, &AvatarAudioTimer::start);
connect(this, &Agent::stopAvatarAudioTimer, audioTimerWorker, &AvatarAudioTimer::stop);
connect(&_avatarAudioTimerThread, &QThread::finished, audioTimerWorker, &QObject::deleteLater);
_avatarAudioTimerThread.start();
QMetaObject::invokeMethod(&_avatarAudioTimer, "start");
// Agents should run at 45hz
static const int AVATAR_DATA_HZ = 45;
@ -557,7 +562,7 @@ void Agent::setIsAvatar(bool isAvatar) {
_avatarIdentityTimer->start(AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS); // FIXME - we shouldn't really need to constantly send identity packets
// tell the avatarAudioTimer to start ticking
emit startAvatarAudioTimer();
QMetaObject::invokeMethod(&_avatarAudioTimer, "start");
}
@ -586,7 +591,7 @@ void Agent::setIsAvatar(bool isAvatar) {
nodeList->sendPacket(std::move(packet), *node);
});
}
emit stopAvatarAudioTimer();
QMetaObject::invokeMethod(&_avatarAudioTimer, "stop");
}
}
@ -814,8 +819,7 @@ void Agent::aboutToFinish() {
DependencyManager::destroy<recording::Recorder>();
DependencyManager::destroy<recording::ClipCache>();
emit stopAvatarAudioTimer();
_avatarAudioTimerThread.quit();
QMetaObject::invokeMethod(&_avatarAudioTimer, "stop");
// cleanup codec & encoder
if (_codec && _encoder) {

View file

@ -81,9 +81,6 @@ private slots:
void processAgentAvatar();
void processAgentAvatarAudio();
signals:
void startAvatarAudioTimer();
void stopAvatarAudioTimer();
private:
void negotiateAudioFormat();
void selectAudioFormat(const QString& selectedCodecName);
@ -118,7 +115,7 @@ private:
CodecPluginPointer _codec;
QString _selectedCodecName;
Encoder* _encoder { nullptr };
QThread _avatarAudioTimerThread;
QTimer _avatarAudioTimer;
bool _flushEncoder { false };
};

View file

@ -91,9 +91,22 @@ void AssignmentClientMonitor::simultaneousWaitOnChildren(int waitMsecs) {
}
}
void AssignmentClientMonitor::childProcessFinished(qint64 pid) {
void AssignmentClientMonitor::childProcessFinished(qint64 pid, int exitCode, QProcess::ExitStatus exitStatus) {
auto message = "Child process " + QString::number(pid) + " has %1 with exit code " + QString::number(exitCode) + ".";
if (_childProcesses.remove(pid)) {
qDebug() << "Child process" << pid << "has finished. Removed from internal map.";
message.append(" Removed from internal map.");
} else {
message.append(" Could not find process in internal map.");
}
switch (exitStatus) {
case QProcess::NormalExit:
qDebug() << qPrintable(message.arg("returned"));
break;
case QProcess::CrashExit:
qCritical() << qPrintable(message.arg("crashed"));
break;
}
}
@ -221,7 +234,9 @@ void AssignmentClientMonitor::spawnChildClient() {
auto pid = assignmentClient->processId();
// make sure we hear that this process has finished when it does
connect(assignmentClient, static_cast<void(QProcess::*)(int, QProcess::ExitStatus)>(&QProcess::finished),
this, [this, pid]() { childProcessFinished(pid); });
this, [this, pid](int exitCode, QProcess::ExitStatus exitStatus) {
childProcessFinished(pid, exitCode, exitStatus);
});
qDebug() << "Spawned a child client with PID" << assignmentClient->processId();

View file

@ -44,7 +44,7 @@ public:
void stopChildProcesses();
private slots:
void checkSpares();
void childProcessFinished(qint64 pid);
void childProcessFinished(qint64 pid, int exitCode, QProcess::ExitStatus exitStatus);
void handleChildStatusPacket(QSharedPointer<ReceivedMessage> message);
bool handleHTTPRequest(HTTPConnection* connection, const QUrl& url, bool skipSubHandler = false) override;

View file

@ -1,36 +0,0 @@
//
// AvatarAudioTimer.cpp
// assignment-client/src
//
// Created by David Kelly on 10/12/13.
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QDebug>
#include <SharedUtil.h>
#include "AvatarAudioTimer.h"
// this should send a signal every 10ms, with pretty good precision. Hardcoding
// to 10ms since that's what you'd want for audio.
void AvatarAudioTimer::start() {
auto startTime = usecTimestampNow();
quint64 frameCounter = 0;
const int TARGET_INTERVAL_USEC = 10000; // 10ms
while (!_quit) {
++frameCounter;
// tick every 10ms from startTime
quint64 targetTime = startTime + frameCounter * TARGET_INTERVAL_USEC;
quint64 now = usecTimestampNow();
// avoid quint64 underflow
if (now < targetTime) {
usleep(targetTime - now);
}
emit avatarTick();
}
qDebug() << "AvatarAudioTimer is finished";
}

View file

@ -1,31 +0,0 @@
//
// AvatarAudioTimer.h
// assignment-client/src
//
// Created by David Kelly on 10/12/13.
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AvatarAudioTimer_h
#define hifi_AvatarAudioTimer_h
#include <QtCore/QObject>
class AvatarAudioTimer : public QObject {
Q_OBJECT
signals:
void avatarTick();
public slots:
void start();
void stop() { _quit = true; }
private:
bool _quit { false };
};
#endif //hifi_AvatarAudioTimer_h

View file

@ -76,7 +76,7 @@ void AudioMixerSlavePool::processPackets(ConstIter begin, ConstIter end) {
void AudioMixerSlavePool::mix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
_function = &AudioMixerSlave::mix;
_configure = [&](AudioMixerSlave& slave) {
_configure = [=](AudioMixerSlave& slave) {
slave.configureMix(_begin, _end, _frame, _throttlingRatio);
};
_frame = frame;

View file

@ -108,9 +108,6 @@ void AvatarMixerClientData::ignoreOther(SharedNodePointer self, SharedNodePointe
void AvatarMixerClientData::removeFromRadiusIgnoringSet(SharedNodePointer self, const QUuid& other) {
if (isRadiusIgnoring(other)) {
_radiusIgnoredOthers.erase(other);
auto exitingSpaceBubblePacket = NLPacket::create(PacketType::ExitingSpaceBubble, NUM_BYTES_RFC4122_UUID);
exitingSpaceBubblePacket->write(other.toRfc4122());
DependencyManager::get<NodeList>()->sendUnreliablePacket(*exitingSpaceBubblePacket, *self);
}
}

View file

@ -69,7 +69,7 @@ static AvatarMixerSlave slave;
void AvatarMixerSlavePool::processIncomingPackets(ConstIter begin, ConstIter end) {
_function = &AvatarMixerSlave::processIncomingPackets;
_configure = [&](AvatarMixerSlave& slave) {
_configure = [=](AvatarMixerSlave& slave) {
slave.configure(begin, end);
};
run(begin, end);
@ -79,7 +79,7 @@ void AvatarMixerSlavePool::broadcastAvatarData(ConstIter begin, ConstIter end,
p_high_resolution_clock::time_point lastFrameTimestamp,
float maxKbpsPerNode, float throttlingRatio) {
_function = &AvatarMixerSlave::broadcastAvatarData;
_configure = [&](AvatarMixerSlave& slave) {
_configure = [=](AvatarMixerSlave& slave) {
slave.configureBroadcast(begin, end, lastFrameTimestamp, maxKbpsPerNode, throttlingRatio);
};
run(begin, end);

View file

@ -50,6 +50,12 @@ EntityServer::~EntityServer() {
tree->removeNewlyCreatedHook(this);
}
void EntityServer::aboutToFinish() {
DependencyManager::get<ResourceManager>()->cleanup();
OctreeServer::aboutToFinish();
}
void EntityServer::handleEntityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
if (_octreeInboundPacketProcessor) {
_octreeInboundPacketProcessor->queueReceivedPacket(message, senderNode);

View file

@ -59,6 +59,8 @@ public:
virtual void trackSend(const QUuid& dataID, quint64 dataLastEdited, const QUuid& sessionID) override;
virtual void trackViewerGone(const QUuid& sessionID) override;
virtual void aboutToFinish() override;
public slots:
virtual void nodeAdded(SharedNodePointer node) override;
virtual void nodeKilled(SharedNodePointer node) override;

View file

@ -81,7 +81,6 @@ bool OctreeSendThread::process() {
// don't do any send processing until the initial load of the octree is complete...
if (_myServer->isInitialLoadComplete()) {
if (auto node = _node.lock()) {
_nodeMissingCount = 0;
OctreeQueryNode* nodeData = static_cast<OctreeQueryNode*>(node->getLinkedData());
// Sometimes the node data has not yet been linked, in which case we can't really do anything
@ -129,8 +128,7 @@ AtomicUIntStat OctreeSendThread::_totalSpecialBytes { 0 };
AtomicUIntStat OctreeSendThread::_totalSpecialPackets { 0 };
int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode* nodeData, int& trueBytesSent,
int& truePacketsSent, bool dontSuppressDuplicate) {
int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode* nodeData, bool dontSuppressDuplicate) {
OctreeServer::didHandlePacketSend(this);
// if we're shutting down, then exit early
@ -141,15 +139,14 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
bool debug = _myServer->wantsDebugSending();
quint64 now = usecTimestampNow();
bool packetSent = false; // did we send a packet?
int packetsSent = 0;
int numPackets = 0;
// Here's where we check to see if this packet is a duplicate of the last packet. If it is, we will silently
// obscure the packet and not send it. This allows the callers and upper level logic to not need to know about
// this rate control savings.
if (!dontSuppressDuplicate && nodeData->shouldSuppressDuplicatePacket()) {
nodeData->resetOctreePacket(); // we still need to reset it though!
return packetsSent; // without sending...
return numPackets; // without sending...
}
// If we've got a stats message ready to send, then see if we can piggyback them together
@ -163,12 +160,15 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
// copy octree message to back of stats message
statsPacket.write(nodeData->getPacket().getData(), nodeData->getPacket().getDataSize());
// since a stats message is only included on end of scene, don't consider any of these bytes "wasted", since
int numBytes = statsPacket.getDataSize();
_totalBytes += numBytes;
_totalPackets++;
// since a stats message is only included on end of scene, don't consider any of these bytes "wasted"
// there was nothing else to send.
int thisWastedBytes = 0;
_totalWastedBytes += thisWastedBytes;
_totalBytes += statsPacket.getDataSize();
_totalPackets++;
//_totalWastedBytes += 0;
_trueBytesSent += numBytes;
numPackets++;
if (debug) {
NLPacket& sentPacket = nodeData->getPacket();
@ -191,18 +191,22 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
// actually send it
OctreeServer::didCallWriteDatagram(this);
DependencyManager::get<NodeList>()->sendUnreliablePacket(statsPacket, *node);
packetSent = true;
} else {
// not enough room in the packet, send two packets
// first packet
OctreeServer::didCallWriteDatagram(this);
DependencyManager::get<NodeList>()->sendUnreliablePacket(statsPacket, *node);
// since a stats message is only included on end of scene, don't consider any of these bytes "wasted", since
int numBytes = statsPacket.getDataSize();
_totalBytes += numBytes;
_totalPackets++;
// since a stats message is only included on end of scene, don't consider any of these bytes "wasted"
// there was nothing else to send.
int thisWastedBytes = 0;
_totalWastedBytes += thisWastedBytes;
_totalBytes += statsPacket.getDataSize();
_totalPackets++;
//_totalWastedBytes += 0;
_trueBytesSent += numBytes;
numPackets++;
if (debug) {
NLPacket& sentPacket = nodeData->getPacket();
@ -221,19 +225,18 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
"] wasted bytes:" << thisWastedBytes << " [" << _totalWastedBytes << "]";
}
trueBytesSent += statsPacket.getDataSize();
truePacketsSent++;
packetsSent++;
// second packet
OctreeServer::didCallWriteDatagram(this);
DependencyManager::get<NodeList>()->sendUnreliablePacket(nodeData->getPacket(), *node);
packetSent = true;
int packetSizeWithHeader = nodeData->getPacket().getDataSize();
thisWastedBytes = udt::MAX_PACKET_SIZE - packetSizeWithHeader;
_totalWastedBytes += thisWastedBytes;
_totalBytes += nodeData->getPacket().getDataSize();
numBytes = nodeData->getPacket().getDataSize();
_totalBytes += numBytes;
_totalPackets++;
// we count wasted bytes here because we were unable to fit the stats packet
thisWastedBytes = udt::MAX_PACKET_SIZE - numBytes;
_totalWastedBytes += thisWastedBytes;
_trueBytesSent += numBytes;
numPackets++;
if (debug) {
NLPacket& sentPacket = nodeData->getPacket();
@ -259,13 +262,14 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
// just send the octree packet
OctreeServer::didCallWriteDatagram(this);
DependencyManager::get<NodeList>()->sendUnreliablePacket(nodeData->getPacket(), *node);
packetSent = true;
int packetSizeWithHeader = nodeData->getPacket().getDataSize();
int thisWastedBytes = udt::MAX_PACKET_SIZE - packetSizeWithHeader;
_totalWastedBytes += thisWastedBytes;
_totalBytes += packetSizeWithHeader;
int numBytes = nodeData->getPacket().getDataSize();
_totalBytes += numBytes;
_totalPackets++;
int thisWastedBytes = udt::MAX_PACKET_SIZE - numBytes;
_totalWastedBytes += thisWastedBytes;
numPackets++;
_trueBytesSent += numBytes;
if (debug) {
NLPacket& sentPacket = nodeData->getPacket();
@ -280,23 +284,21 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
qDebug() << "Sending packet at " << now << " [" << _totalPackets <<"]: sequence: " << sequence <<
" timestamp: " << timestamp <<
" size: " << packetSizeWithHeader << " [" << _totalBytes <<
" size: " << numBytes << " [" << _totalBytes <<
"] wasted bytes:" << thisWastedBytes << " [" << _totalWastedBytes << "]";
}
}
}
// remember to track our stats
if (packetSent) {
if (numPackets > 0) {
nodeData->stats.packetSent(nodeData->getPacket().getPayloadSize());
trueBytesSent += nodeData->getPacket().getPayloadSize();
truePacketsSent++;
packetsSent++;
nodeData->octreePacketSent();
nodeData->resetOctreePacket();
}
return packetsSent;
_truePacketsSent += numPackets;
return numPackets;
}
/// Version of octree element distributor that sends the deepest LOD level at once
@ -315,13 +317,9 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
preDistributionProcessing();
}
// calculate max number of packets that can be sent during this interval
int clientMaxPacketsPerInterval = std::max(1, (nodeData->getMaxQueryPacketsPerSecond() / INTERVALS_PER_SECOND));
int maxPacketsPerInterval = std::min(clientMaxPacketsPerInterval, _myServer->getPacketsPerClientPerInterval());
int truePacketsSent = 0;
int trueBytesSent = 0;
int packetsSentThisInterval = 0;
_truePacketsSent = 0;
_trueBytesSent = 0;
_packetsSentThisInterval = 0;
bool isFullScene = nodeData->shouldForceFullScene();
if (isFullScene) {
@ -334,17 +332,9 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
&& ((!viewFrustumChanged && nodeData->getViewFrustumJustStoppedChanging()) || nodeData->hasLodChanged()));
}
bool somethingToSend = true; // assume we have something
// If our packet already has content in it, then we must use the color choice of the waiting packet.
// If we're starting a fresh packet, then...
// If we're moving, and the client asked for low res, then we force monochrome, otherwise, use
// the clients requested color state.
// If we have a packet waiting, and our desired want color, doesn't match the current waiting packets color
// then let's just send that waiting packet.
if (nodeData->isPacketWaiting()) {
packetsSentThisInterval += handlePacketSend(node, nodeData, trueBytesSent, truePacketsSent);
// send the waiting packet
_packetsSentThisInterval += handlePacketSend(node, nodeData);
} else {
nodeData->resetOctreePacket();
}
@ -375,8 +365,7 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
//unsigned long encodeTime = nodeData->stats.getTotalEncodeTime();
//unsigned long elapsedTime = nodeData->stats.getElapsedTime();
int packetsJustSent = handlePacketSend(node, nodeData, trueBytesSent, truePacketsSent, isFullScene);
packetsSentThisInterval += packetsJustSent;
_packetsSentThisInterval += handlePacketSend(node, nodeData, isFullScene);
// If we're starting a full scene, then definitely we want to empty the elementBag
if (isFullScene) {
@ -404,185 +393,44 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
// If we have something in our elementBag, then turn them into packets and send them out...
if (!nodeData->elementBag.isEmpty()) {
int bytesWritten = 0;
quint64 start = usecTimestampNow();
// TODO: add these to stats page
//quint64 startCompressTimeMsecs = OctreePacketData::getCompressContentTime() / 1000;
//quint64 startCompressCalls = OctreePacketData::getCompressContentCalls();
int extraPackingAttempts = 0;
bool completedScene = false;
while (somethingToSend && packetsSentThisInterval < maxPacketsPerInterval && !nodeData->isShuttingDown()) {
float lockWaitElapsedUsec = OctreeServer::SKIP_TIME;
float encodeElapsedUsec = OctreeServer::SKIP_TIME;
float compressAndWriteElapsedUsec = OctreeServer::SKIP_TIME;
float packetSendingElapsedUsec = OctreeServer::SKIP_TIME;
quint64 startInside = usecTimestampNow();
bool lastNodeDidntFit = false; // assume each node fits
if (!nodeData->elementBag.isEmpty()) {
quint64 lockWaitStart = usecTimestampNow();
_myServer->getOctree()->withReadLock([&]{
quint64 lockWaitEnd = usecTimestampNow();
lockWaitElapsedUsec = (float)(lockWaitEnd - lockWaitStart);
quint64 encodeStart = usecTimestampNow();
OctreeElementPointer subTree = nodeData->elementBag.extract();
if (!subTree) {
return;
}
float octreeSizeScale = nodeData->getOctreeSizeScale();
int boundaryLevelAdjustClient = nodeData->getBoundaryLevelAdjust();
int boundaryLevelAdjust = boundaryLevelAdjustClient +
(viewFrustumChanged ? LOW_RES_MOVING_ADJUST : NO_BOUNDARY_ADJUST);
EncodeBitstreamParams params(INT_MAX, WANT_EXISTS_BITS, DONT_CHOP,
viewFrustumChanged, boundaryLevelAdjust, octreeSizeScale,
isFullScene, _myServer->getJurisdiction(), nodeData);
nodeData->copyCurrentViewFrustum(params.viewFrustum);
if (viewFrustumChanged) {
nodeData->copyLastKnownViewFrustum(params.lastViewFrustum);
}
// Our trackSend() function is implemented by the server subclass, and will be called back
// during the encodeTreeBitstream() as new entities/data elements are sent
params.trackSend = [this, node](const QUuid& dataID, quint64 dataEdited) {
_myServer->trackSend(dataID, dataEdited, node->getUUID());
};
// TODO: should this include the lock time or not? This stat is sent down to the client,
// it seems like it may be a good idea to include the lock time as part of the encode time
// are reported to client. Since you can encode without the lock
nodeData->stats.encodeStarted();
bytesWritten = _myServer->getOctree()->encodeTreeBitstream(subTree, &_packetData, nodeData->elementBag, params);
quint64 encodeEnd = usecTimestampNow();
encodeElapsedUsec = (float)(encodeEnd - encodeStart);
// If after calling encodeTreeBitstream() there are no nodes left to send, then we know we've
// sent the entire scene. We want to know this below so we'll actually write this content into
// the packet and send it
completedScene = nodeData->elementBag.isEmpty();
if (params.stopReason == EncodeBitstreamParams::DIDNT_FIT) {
lastNodeDidntFit = true;
extraPackingAttempts++;
}
nodeData->stats.encodeStopped();
});
} else {
// If the bag was empty then we didn't even attempt to encode, and so we know the bytesWritten were 0
bytesWritten = 0;
somethingToSend = false; // this will cause us to drop out of the loop...
}
// If the last node didn't fit, but we're in compressed mode, then we actually want to see if we can fit a
// little bit more in this packet. To do this we write into the packet, but don't send it yet, we'll
// keep attempting to write in compressed mode to add more compressed segments
// We only consider sending anything if there is something in the _packetData to send... But
// if bytesWritten == 0 it means either the subTree couldn't fit or we had an empty bag... Both cases
// mean we should send the previous packet contents and reset it.
if (completedScene || lastNodeDidntFit) {
if (_packetData.hasContent()) {
quint64 compressAndWriteStart = usecTimestampNow();
// if for some reason the finalized size is greater than our available size, then probably the "compressed"
// form actually inflated beyond our padding, and in this case we will send the current packet, then
// write to out new packet...
unsigned int writtenSize = _packetData.getFinalizedSize() + sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
if (writtenSize > nodeData->getAvailable()) {
packetsSentThisInterval += handlePacketSend(node, nodeData, trueBytesSent, truePacketsSent);
}
nodeData->writeToPacket(_packetData.getFinalizedData(), _packetData.getFinalizedSize());
quint64 compressAndWriteEnd = usecTimestampNow();
compressAndWriteElapsedUsec = (float)(compressAndWriteEnd - compressAndWriteStart);
}
// If we're not running compressed, then we know we can just send now. Or if we're running compressed, but
// the packet doesn't have enough space to bother attempting to pack more...
bool sendNow = true;
if (!completedScene && (nodeData->getAvailable() >= MINIMUM_ATTEMPT_MORE_PACKING &&
extraPackingAttempts <= REASONABLE_NUMBER_OF_PACKING_ATTEMPTS)) {
sendNow = false; // try to pack more
}
int targetSize = MAX_OCTREE_PACKET_DATA_SIZE;
if (sendNow) {
quint64 packetSendingStart = usecTimestampNow();
packetsSentThisInterval += handlePacketSend(node, nodeData, trueBytesSent, truePacketsSent);
quint64 packetSendingEnd = usecTimestampNow();
packetSendingElapsedUsec = (float)(packetSendingEnd - packetSendingStart);
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
extraPackingAttempts = 0;
} else {
// If we're in compressed mode, then we want to see if we have room for more in this wire packet.
// but we've finalized the _packetData, so we want to start a new section, we will do that by
// resetting the packet settings with the max uncompressed size of our current available space
// in the wire packet. We also include room for our section header, and a little bit of padding
// to account for the fact that whenc compressing small amounts of data, we sometimes end up with
// a larger compressed size then uncompressed size
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE) - COMPRESS_PADDING;
}
_packetData.changeSettings(true, targetSize); // will do reset - NOTE: Always compressed
}
OctreeServer::trackTreeWaitTime(lockWaitElapsedUsec);
OctreeServer::trackEncodeTime(encodeElapsedUsec);
OctreeServer::trackCompressAndWriteTime(compressAndWriteElapsedUsec);
OctreeServer::trackPacketSendingTime(packetSendingElapsedUsec);
quint64 endInside = usecTimestampNow();
quint64 elapsedInsideUsecs = endInside - startInside;
OctreeServer::trackInsideTime((float)elapsedInsideUsecs);
}
if (somethingToSend && _myServer->wantsVerboseDebug()) {
qCDebug(octree) << "Hit PPS Limit, packetsSentThisInterval =" << packetsSentThisInterval
<< " maxPacketsPerInterval = " << maxPacketsPerInterval
<< " clientMaxPacketsPerInterval = " << clientMaxPacketsPerInterval;
}
traverseTreeAndSendContents(node, nodeData, viewFrustumChanged, isFullScene);
// Here's where we can/should allow the server to send other data...
// send the environment packet
// TODO: should we turn this into a while loop to better handle sending multiple special packets
if (_myServer->hasSpecialPacketsToSend(node) && !nodeData->isShuttingDown()) {
int specialPacketsSent = 0;
trueBytesSent += _myServer->sendSpecialPackets(node, nodeData, specialPacketsSent);
int specialBytesSent = _myServer->sendSpecialPackets(node, nodeData, specialPacketsSent);
nodeData->resetOctreePacket(); // because nodeData's _sequenceNumber has changed
truePacketsSent += specialPacketsSent;
packetsSentThisInterval += specialPacketsSent;
_truePacketsSent += specialPacketsSent;
_trueBytesSent += specialBytesSent;
_packetsSentThisInterval += specialPacketsSent;
_totalPackets += specialPacketsSent;
_totalBytes += trueBytesSent;
_totalBytes += specialBytesSent;
_totalSpecialPackets += specialPacketsSent;
_totalSpecialBytes += trueBytesSent;
_totalSpecialBytes += specialBytesSent;
}
// calculate max number of packets that can be sent during this interval
int clientMaxPacketsPerInterval = std::max(1, (nodeData->getMaxQueryPacketsPerSecond() / INTERVALS_PER_SECOND));
int maxPacketsPerInterval = std::min(clientMaxPacketsPerInterval, _myServer->getPacketsPerClientPerInterval());
// Re-send packets that were nacked by the client
while (nodeData->hasNextNackedPacket() && packetsSentThisInterval < maxPacketsPerInterval) {
while (nodeData->hasNextNackedPacket() && _packetsSentThisInterval < maxPacketsPerInterval) {
const NLPacket* packet = nodeData->getNextNackedPacket();
if (packet) {
DependencyManager::get<NodeList>()->sendUnreliablePacket(*packet, *node);
truePacketsSent++;
packetsSentThisInterval++;
int numBytes = packet->getDataSize();
_truePacketsSent++;
_trueBytesSent += numBytes;
_packetsSentThisInterval++;
_totalBytes += packet->getDataSize();
_totalPackets++;
_totalBytes += numBytes;
_totalWastedBytes += udt::MAX_PACKET_SIZE - packet->getDataSize();
}
}
@ -591,12 +439,6 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
int elapsedmsec = (end - start) / USECS_PER_MSEC;
OctreeServer::trackLoopTime(elapsedmsec);
// TODO: add these to stats page
//quint64 endCompressCalls = OctreePacketData::getCompressContentCalls();
//int elapsedCompressCalls = endCompressCalls - startCompressCalls;
//quint64 endCompressTimeMsecs = OctreePacketData::getCompressContentTime() / 1000;
//int elapsedCompressTimeMsecs = endCompressTimeMsecs - startCompressTimeMsecs;
// if after sending packets we've emptied our bag, then we want to remember that we've sent all
// the octree elements from the current view frustum
if (nodeData->elementBag.isEmpty()) {
@ -606,17 +448,147 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
// If this was a full scene then make sure we really send out a stats packet at this point so that
// the clients will know the scene is stable
if (isFullScene) {
int thisTrueBytesSent = 0;
int thisTruePacketsSent = 0;
nodeData->stats.sceneCompleted();
int packetsJustSent = handlePacketSend(node, nodeData, thisTrueBytesSent, thisTruePacketsSent, true);
_totalBytes += thisTrueBytesSent;
_totalPackets += thisTruePacketsSent;
truePacketsSent += packetsJustSent;
handlePacketSend(node, nodeData, true);
}
}
} // end if bag wasn't empty, and so we sent stuff...
return truePacketsSent;
return _truePacketsSent;
}
void OctreeSendThread::traverseTreeAndSendContents(SharedNodePointer node, OctreeQueryNode* nodeData, bool viewFrustumChanged, bool isFullScene) {
// calculate max number of packets that can be sent during this interval
int clientMaxPacketsPerInterval = std::max(1, (nodeData->getMaxQueryPacketsPerSecond() / INTERVALS_PER_SECOND));
int maxPacketsPerInterval = std::min(clientMaxPacketsPerInterval, _myServer->getPacketsPerClientPerInterval());
int extraPackingAttempts = 0;
bool completedScene = false;
bool somethingToSend = true; // assume we have something
while (somethingToSend && _packetsSentThisInterval < maxPacketsPerInterval && !nodeData->isShuttingDown()) {
float lockWaitElapsedUsec = OctreeServer::SKIP_TIME;
float encodeElapsedUsec = OctreeServer::SKIP_TIME;
float compressAndWriteElapsedUsec = OctreeServer::SKIP_TIME;
float packetSendingElapsedUsec = OctreeServer::SKIP_TIME;
quint64 startInside = usecTimestampNow();
bool lastNodeDidntFit = false; // assume each node fits
if (!nodeData->elementBag.isEmpty()) {
quint64 lockWaitStart = usecTimestampNow();
_myServer->getOctree()->withReadLock([&]{
quint64 lockWaitEnd = usecTimestampNow();
lockWaitElapsedUsec = (float)(lockWaitEnd - lockWaitStart);
quint64 encodeStart = usecTimestampNow();
OctreeElementPointer subTree = nodeData->elementBag.extract();
if (!subTree) {
return;
}
float octreeSizeScale = nodeData->getOctreeSizeScale();
int boundaryLevelAdjustClient = nodeData->getBoundaryLevelAdjust();
int boundaryLevelAdjust = boundaryLevelAdjustClient +
(viewFrustumChanged ? LOW_RES_MOVING_ADJUST : NO_BOUNDARY_ADJUST);
EncodeBitstreamParams params(INT_MAX, WANT_EXISTS_BITS, DONT_CHOP,
viewFrustumChanged, boundaryLevelAdjust, octreeSizeScale,
isFullScene, _myServer->getJurisdiction(), nodeData);
nodeData->copyCurrentViewFrustum(params.viewFrustum);
if (viewFrustumChanged) {
nodeData->copyLastKnownViewFrustum(params.lastViewFrustum);
}
// Our trackSend() function is implemented by the server subclass, and will be called back
// during the encodeTreeBitstream() as new entities/data elements are sent
params.trackSend = [this](const QUuid& dataID, quint64 dataEdited) {
_myServer->trackSend(dataID, dataEdited, _nodeUuid);
};
// TODO: should this include the lock time or not? This stat is sent down to the client,
// it seems like it may be a good idea to include the lock time as part of the encode time
// are reported to client. Since you can encode without the lock
nodeData->stats.encodeStarted();
// NOTE: this is where the tree "contents" are actaully packed
_myServer->getOctree()->encodeTreeBitstream(subTree, &_packetData, nodeData->elementBag, params);
quint64 encodeEnd = usecTimestampNow();
encodeElapsedUsec = (float)(encodeEnd - encodeStart);
// If after calling encodeTreeBitstream() there are no nodes left to send, then we know we've
// sent the entire scene. We want to know this below so we'll actually write this content into
// the packet and send it
completedScene = nodeData->elementBag.isEmpty();
if (params.stopReason == EncodeBitstreamParams::DIDNT_FIT) {
lastNodeDidntFit = true;
extraPackingAttempts++;
}
nodeData->stats.encodeStopped();
});
} else {
somethingToSend = false; // this will cause us to drop out of the loop...
}
if (completedScene || lastNodeDidntFit) {
// we probably want to flush what has accumulated in nodeData but:
// do we have more data to send? and is there room?
if (_packetData.hasContent()) {
// yes, more data to send
quint64 compressAndWriteStart = usecTimestampNow();
unsigned int additionalSize = _packetData.getFinalizedSize() + sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
if (additionalSize > nodeData->getAvailable()) {
// no room --> flush what we've got
_packetsSentThisInterval += handlePacketSend(node, nodeData);
}
// either there is room, or we've flushed and reset nodeData's data buffer
// so we can transfer whatever is in _packetData to nodeData
nodeData->writeToPacket(_packetData.getFinalizedData(), _packetData.getFinalizedSize());
compressAndWriteElapsedUsec = (float)(usecTimestampNow()- compressAndWriteStart);
}
bool sendNow = completedScene ||
nodeData->getAvailable() < MINIMUM_ATTEMPT_MORE_PACKING ||
extraPackingAttempts > REASONABLE_NUMBER_OF_PACKING_ATTEMPTS;
int targetSize = MAX_OCTREE_PACKET_DATA_SIZE;
if (sendNow) {
quint64 packetSendingStart = usecTimestampNow();
_packetsSentThisInterval += handlePacketSend(node, nodeData);
quint64 packetSendingEnd = usecTimestampNow();
packetSendingElapsedUsec = (float)(packetSendingEnd - packetSendingStart);
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
extraPackingAttempts = 0;
} else {
// We want to see if we have room for more in this wire packet but we've copied the _packetData,
// so we want to start a new section. We will do that by resetting the packet settings with the max
// size of our current available space in the wire packet plus room for our section header and a
// little bit of padding.
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE) - COMPRESS_PADDING;
}
_packetData.changeSettings(true, targetSize); // will do reset - NOTE: Always compressed
}
OctreeServer::trackTreeWaitTime(lockWaitElapsedUsec);
OctreeServer::trackEncodeTime(encodeElapsedUsec);
OctreeServer::trackCompressAndWriteTime(compressAndWriteElapsedUsec);
OctreeServer::trackPacketSendingTime(packetSendingElapsedUsec);
quint64 endInside = usecTimestampNow();
quint64 elapsedInsideUsecs = endInside - startInside;
OctreeServer::trackInsideTime((float)elapsedInsideUsecs);
}
if (somethingToSend && _myServer->wantsVerboseDebug()) {
qCDebug(octree) << "Hit PPS Limit, packetsSentThisInterval =" << _packetsSentThisInterval
<< " maxPacketsPerInterval = " << maxPacketsPerInterval
<< " clientMaxPacketsPerInterval = " << clientMaxPacketsPerInterval;
}
}

View file

@ -34,7 +34,7 @@ public:
void setIsShuttingDown();
bool isShuttingDown() { return _isShuttingDown; }
QUuid getNodeUuid() const { return _nodeUuid; }
static AtomicUIntStat _totalBytes;
@ -53,20 +53,23 @@ protected:
/// Called before a packetDistributor pass to allow for pre-distribution processing
virtual void preDistributionProcessing() {};
virtual void traverseTreeAndSendContents(SharedNodePointer node, OctreeQueryNode* nodeData, bool viewFrustumChanged, bool isFullScene);
OctreeServer* _myServer { nullptr };
QWeakPointer<Node> _node;
private:
int handlePacketSend(SharedNodePointer node, OctreeQueryNode* nodeData, int& trueBytesSent, int& truePacketsSent, bool dontSuppressDuplicate = false);
int handlePacketSend(SharedNodePointer node, OctreeQueryNode* nodeData, bool dontSuppressDuplicate = false);
int packetDistributor(SharedNodePointer node, OctreeQueryNode* nodeData, bool viewFrustumChanged);
QUuid _nodeUuid;
OctreePacketData _packetData;
int _nodeMissingCount { 0 };
int _truePacketsSent { 0 }; // available for debug stats
int _trueBytesSent { 0 }; // available for debug stats
int _packetsSentThisInterval { 0 }; // used for bandwidth throttle condition
bool _isShuttingDown { false };
};

View file

@ -16,6 +16,7 @@
#include <AudioConstants.h>
#include <AudioInjectorManager.h>
#include <ClientServerUtils.h>
#include <DebugDraw.h>
#include <EntityNodeData.h>
#include <EntityScriptingInterface.h>
#include <LogHandler.h>
@ -67,6 +68,9 @@ EntityScriptServer::EntityScriptServer(ReceivedMessage& message) : ThreadedAssig
DependencyManager::set<ScriptCache>();
DependencyManager::set<ScriptEngines>(ScriptEngine::ENTITY_SERVER_SCRIPT);
// Needed to ensure the creation of the DebugDraw instance on the main thread
DebugDraw::getInstance();
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
packetReceiver.registerListenerForTypes({ PacketType::OctreeStats, PacketType::EntityData, PacketType::EntityErase },
this, "handleOctreePacket");

View file

@ -49,7 +49,7 @@
Var STR_CONTAINS_VAR_3
Var STR_CONTAINS_VAR_4
Var STR_RETURN_VAR
Function StrContains
Exch $STR_NEEDLE
Exch 1
@ -438,6 +438,7 @@ Var DesktopServerCheckbox
Var ServerStartupCheckbox
Var LaunchServerNowCheckbox
Var LaunchClientNowCheckbox
Var CleanInstallCheckbox
Var CurrentOffset
Var OffsetUnits
Var CopyFromProductionCheckbox
@ -475,27 +476,18 @@ Function PostInstallOptionsPage
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Create a desktop shortcut for @INTERFACE_HF_SHORTCUT_NAME@"
Pop $DesktopClientCheckbox
IntOp $CurrentOffset $CurrentOffset + 15
; set the checkbox state depending on what is present in the registry
!insertmacro SetPostInstallOption $DesktopClientCheckbox @CLIENT_DESKTOP_SHORTCUT_REG_KEY@ ${BST_CHECKED}
${EndIf}
${If} ${SectionIsSelected} ${@SERVER_COMPONENT_NAME@}
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Create a desktop shortcut for @CONSOLE_HF_SHORTCUT_NAME@"
Pop $DesktopServerCheckbox
IntOp $CurrentOffset $CurrentOffset + 15
; set the checkbox state depending on what is present in the registry
!insertmacro SetPostInstallOption $DesktopServerCheckbox @CONSOLE_DESKTOP_SHORTCUT_REG_KEY@ ${BST_UNCHECKED}
IntOp $CurrentOffset $CurrentOffset + 15
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Launch @CONSOLE_HF_SHORTCUT_NAME@ on startup"
Pop $ServerStartupCheckbox
; set the checkbox state depending on what is present in the registry
!insertmacro SetPostInstallOption $ServerStartupCheckbox @CONSOLE_STARTUP_REG_KEY@ ${BST_CHECKED}
IntOp $CurrentOffset $CurrentOffset + 15
${EndIf}
${If} ${SectionIsSelected} ${@SERVER_COMPONENT_NAME@}
@ -511,17 +503,33 @@ Function PostInstallOptionsPage
IntOp $CurrentOffset $CurrentOffset + 15
${EndIf}
${If} ${SectionIsSelected} ${@CLIENT_COMPONENT_NAME@}
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Launch @INTERFACE_HF_SHORTCUT_NAME@ after install"
Pop $LaunchClientNowCheckbox
; set the checkbox state depending on what is present in the registry
!insertmacro SetPostInstallOption $LaunchClientNowCheckbox @CLIENT_LAUNCH_NOW_REG_KEY@ ${BST_CHECKED}
${StrContains} $substringResult "/forceNoLaunchClient" $CMDLINE
${IfNot} $substringResult == ""
${NSD_SetState} $LaunchClientNowCheckbox ${BST_UNCHECKED}
${EndIf}
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Launch @INTERFACE_HF_SHORTCUT_NAME@ after install"
Pop $LaunchClientNowCheckbox
IntOp $CurrentOffset $CurrentOffset + 30
; set the checkbox state depending on what is present in the registry
!insertmacro SetPostInstallOption $LaunchClientNowCheckbox @CLIENT_LAUNCH_NOW_REG_KEY@ ${BST_CHECKED}
${StrContains} $substringResult "/forceNoLaunchClient" $CMDLINE
${IfNot} $substringResult == ""
${NSD_SetState} $LaunchClientNowCheckbox ${BST_UNCHECKED}
${EndIf}
${EndIf}
${If} ${SectionIsSelected} ${@SERVER_COMPONENT_NAME@}
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Launch @CONSOLE_HF_SHORTCUT_NAME@ on startup"
Pop $ServerStartupCheckbox
IntOp $CurrentOffset $CurrentOffset + 15
; set the checkbox state depending on what is present in the registry
!insertmacro SetPostInstallOption $ServerStartupCheckbox @CONSOLE_STARTUP_REG_KEY@ ${BST_CHECKED}
${EndIf}
${If} ${SectionIsSelected} ${@CLIENT_COMPONENT_NAME@}
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Perform a clean install (Delete older settings and content)"
Pop $CleanInstallCheckbox
IntOp $CurrentOffset $CurrentOffset + 15
${EndIf}
${If} @PR_BUILD@ == 1
@ -543,7 +551,7 @@ Function PostInstallOptionsPage
${NSD_SetState} $CopyFromProductionCheckbox ${BST_CHECKED}
${EndIf}
nsDialogs::Show
FunctionEnd
@ -558,6 +566,7 @@ Var ServerStartupState
Var LaunchServerNowState
Var LaunchClientNowState
Var CopyFromProductionState
Var CleanInstallState
Function ReadPostInstallOptions
${If} ${SectionIsSelected} ${@CLIENT_COMPONENT_NAME@}
@ -579,13 +588,18 @@ Function ReadPostInstallOptions
${EndIf}
${If} ${SectionIsSelected} ${@SERVER_COMPONENT_NAME@}
; check if we need to launch the server post-install
${NSD_GetState} $LaunchServerNowCheckbox $LaunchServerNowState
; check if we need to launch the server post-install
${NSD_GetState} $LaunchServerNowCheckbox $LaunchServerNowState
${EndIf}
${If} ${SectionIsSelected} ${@CLIENT_COMPONENT_NAME@}
; check if we need to launch the client post-install
${NSD_GetState} $LaunchClientNowCheckbox $LaunchClientNowState
; check if we need to launch the client post-install
${NSD_GetState} $LaunchClientNowCheckbox $LaunchClientNowState
${EndIf}
${If} ${SectionIsSelected} ${@CLIENT_COMPONENT_NAME@}
; check if the user asked for a clean install
${NSD_GetState} $CleanInstallCheckbox $CleanInstallState
${EndIf}
FunctionEnd
@ -628,6 +642,15 @@ Function HandlePostInstallOptions
!insertmacro WritePostInstallOption @CONSOLE_STARTUP_REG_KEY@ NO
${EndIf}
${EndIf}
${If} ${SectionIsSelected} ${@CLIENT_COMPONENT_NAME@}
; check if the user asked for a clean install
${If} $CleanInstallState == ${BST_CHECKED}
SetShellVarContext current
RMDir /r "$APPDATA\@BUILD_ORGANIZATION@"
RMDir /r "$LOCALAPPDATA\@BUILD_ORGANIZATION@"
${EndIf}
${EndIf}
${If} @PR_BUILD@ == 1

View file

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve" opacity="0.33">
<style type="text/css">
.st0{fill:#FFFFFF;}
</style>
<g id="Layer_2">
</g>
<g>
<path class="st0" d="M20.7,29.7c-2.2,2.2-4.4,4.4-6.7,6.7c-0.5-0.5-1.1-1.1-1.6-1.6c2.2-2.2,4.4-4.4,6.7-6.7l-1.8-1.8
c-2.6,2.5-5.1,5.1-7.7,7.6c-0.5,0.5-0.9,1.1-1,1.8C8.3,37.8,8,39.8,7.7,42c0.2,0,0.4,0,0.5,0c2-0.4,4-0.8,5.9-1.2
c0.4-0.1,0.8-0.3,1.1-0.6c2.7-2.6,5.3-5.3,8-8L20.7,29.7z"/>
<path class="st0" d="M31.1,11c0.8-0.8,1.8-1.8,2.7-2.7C34.2,8,34.6,8,34.9,8.4c1.6,1.6,3.1,3.1,4.7,4.7c0.4,0.4,0.4,0.8,0,1.2
c-0.9,0.9-1.8,1.8-2.7,2.7C35,15,33.1,13,31.1,11z"/>
<path class="st0" d="M33,25.9c-0.4,0.1-0.6,0-0.9-0.2c-0.6-0.6-1.3-1.3-1.9-1.9c1.5-1.5,3.1-3.1,4.6-4.6c0.1-0.1,0.3-0.3,0.3-0.3
c-2-2-3.9-4-5.9-6.1c-0.1,0.2-0.2,0.3-0.4,0.5c-1.5,1.5-3,3-4.6,4.6c-2.8-2.8-5.6-5.6-8.4-8.4c-0.2-0.2-0.4-0.4-0.6-0.6
c-1.5-1.2-3.5-1-4.8,0.4c-1.2,1.4-1.1,3.5,0.2,4.8c4.2,4.2,8.3,8.3,12.5,12.5c1.4,1.4,2.7,2.7,4.1,4.1c0.2,0.2,0.2,0.4,0.2,0.7
c-0.2,0.6-0.3,1.2-0.3,1.9c-0.3,4,2.3,7.5,6.1,8.5c1.6,0.4,3.2,0.3,4.8-0.3c-0.1-0.2-0.3-0.2-0.4-0.4c-1.2-1.2-2.3-2.3-3.5-3.5
c-0.8-0.9-0.9-2.1-0.1-3c0.6-0.7,1.3-1.3,2-2c0.9-0.8,2-0.8,2.9,0c0.2,0.2,0.3,0.3,0.5,0.5c1.2,1.2,2.3,2.3,3.5,3.5
c0.1,0,0.1,0,0.2,0c0.1-0.7,0.3-1.3,0.3-2C43.9,28.7,38.5,24.3,33,25.9z M12.9,12.6c-0.6,0-1.2-0.5-1.2-1.2s0.5-1.2,1.2-1.2
c0.6,0,1.2,0.6,1.2,1.2C14.1,12,13.6,12.6,12.9,12.6z M29.3,16.3c0.5,0.5,1,1.1,1.6,1.6c-1.1,1.1-2.2,2.2-3.3,3.3
c-0.5-0.5-1.1-1.1-1.6-1.6C27.1,18.5,28.2,17.4,29.3,16.3z"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.8 KiB

View file

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 21.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
<path d="M43.9,13.3c-1.3-0.6-2.4-0.3-3.5,0.4c-1.6,1.2-3.3,2.4-5,3.5c-1.4-3.4-3.3-5-6.3-5c-5.9-0.1-11.9-0.1-17.9,0
c-3.8,0.1-6.4,3.1-6.4,7.3c0,3.7-0.1,7.6,0,11.4c0,1.1,0.2,2.1,0.6,3.1c1.2,2.7,3.3,3.8,6,3.8c5.6,0,11-0.1,16.5,0
c3.5,0.1,6-1.5,7.4-5.1c1.7,1.2,3.4,2.4,5.1,3.5c1.1,0.7,2.2,1.1,3.5,0.3c1.2-0.7,1.6-1.9,1.6-3.3c0-5.6,0-11,0-16.6
C45.5,15.3,45.2,14.1,43.9,13.3z M32.2,30.5c0,2.5-1,3.6-3.4,3.6c-2.9,0-5.8,0-8.7,0s-5.6,0-8.5,0.1c-2.4-0.1-3.4-1.2-3.4-3.7
c0-3.7,0-7.5,0-11.2c0-2.2,1.1-3.4,3.1-3.4c5.9,0,11.9,0,17.8,0c2,0,3.1,1.2,3.1,3.4C32.2,23,32.2,26.8,32.2,30.5z M41.9,32.8
c-2.1-1.4-4.2-2.9-6.3-4.3c-0.1-0.1-0.2-0.4-0.2-0.7c0-1.9,0-3.7,0-5.5c0-0.3,0.1-0.7,0.3-0.8c2-1.4,4-2.8,6.2-4.3
C41.9,22.3,41.9,27.4,41.9,32.8z"/>
<path d="M27.4,25C27.4,24.7,27.4,25.2,27.4,25c0-1.1-0.1-2-0.2-2.7c-0.2-1.4-0.7-2.7-1.6-4c-0.2-0.3-0.5-0.5-1-0.6
c-0.4-0.1-0.9,0-1.3,0.2c-0.5,0.3-0.7,1.3-0.3,1.8c1.2,1.6,1.4,3,1.4,4.8c0.1,2.1-0.2,3.4-1.5,5.2c-0.2,0.3-0.2,1.1,0.1,1.6
c0.1,0.2,0.3,0.4,0.6,0.6c0.2,0.1,0.3,0.1,0.5,0.1c0.5,0,1-0.3,1.3-0.9C27,29.3,27.3,27.3,27.4,25L27.4,25z"/>
<ellipse cx="15.2" cy="24.7" rx="2.1" ry="2.4"/>
<path d="M22.3,24.8C22.3,24.7,22.3,25,22.3,24.8c0-0.7-0.1-1.5-0.1-1.9c-0.2-1-0.6-2.1-1.3-3c-0.1-0.2-0.4-0.5-0.9-0.5
c-0.7,0-0.9,0.2-1.2,0.4c-0.4,0.2-0.5,0.9-0.2,1.3c0.9,1.2,1,2.1,1.1,3.5c0,1.6-0.2,2.5-1.1,3.8c-0.1,0.2-0.1,0.7,0,1.2
c0.1,0.2,0.2,0.3,0.5,0.4c0.1,0,0.2,0.1,0.3,0.1c0.5,0.2,1.2,0.1,1.5-0.5C21.7,28,22.2,26.5,22.3,24.8L22.3,24.8z"/>
</svg>

After

Width:  |  Height:  |  Size: 1.8 KiB

View file

@ -0,0 +1,22 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 21.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FFFFFF;}
</style>
<path class="st0" d="M43.9,13.3c-1.3-0.6-2.4-0.3-3.5,0.4c-1.6,1.2-3.3,2.4-5,3.5c-1.4-3.4-3.3-5-6.3-5c-5.9-0.1-11.9-0.1-17.9,0
c-3.8,0.1-6.4,3.1-6.4,7.3c0,3.7-0.1,7.6,0,11.4c0,1.1,0.2,2.1,0.6,3.1c1.2,2.7,3.3,3.8,6,3.8c5.6,0,11-0.1,16.5,0
c3.5,0.1,6-1.5,7.4-5.1c1.7,1.2,3.4,2.4,5.1,3.5c1.1,0.7,2.2,1.1,3.5,0.3c1.2-0.7,1.6-1.9,1.6-3.3c0-5.6,0-11,0-16.6
C45.5,15.3,45.2,14.1,43.9,13.3z M32.2,30.5c0,2.5-1,3.6-3.4,3.6c-2.9,0-5.8,0-8.7,0s-5.6,0-8.5,0.1c-2.4-0.1-3.4-1.2-3.4-3.7
c0-3.7,0-7.5,0-11.2c0-2.2,1.1-3.4,3.1-3.4c5.9,0,11.9,0,17.8,0c2,0,3.1,1.2,3.1,3.4C32.2,23,32.2,26.8,32.2,30.5z M41.9,32.8
c-2.1-1.4-4.2-2.9-6.3-4.3c-0.1-0.1-0.2-0.4-0.2-0.7c0-1.9,0-3.7,0-5.5c0-0.3,0.1-0.7,0.3-0.8c2-1.4,4-2.8,6.2-4.3
C41.9,22.3,41.9,27.4,41.9,32.8z"/>
<path class="st0" d="M27.4,25C27.4,24.7,27.4,25.2,27.4,25c0-1.1-0.1-2-0.2-2.7c-0.2-1.4-0.7-2.7-1.6-4c-0.2-0.3-0.5-0.5-1-0.6
c-0.4-0.1-0.9,0-1.3,0.2c-0.5,0.3-0.7,1.3-0.3,1.8c1.2,1.6,1.4,3,1.4,4.8c0.1,2.1-0.2,3.4-1.5,5.2c-0.2,0.3-0.2,1.1,0.1,1.6
c0.1,0.2,0.3,0.4,0.6,0.6c0.2,0.1,0.3,0.1,0.5,0.1c0.5,0,1-0.3,1.3-0.9C27,29.3,27.3,27.3,27.4,25L27.4,25z"/>
<ellipse class="st0" cx="15.2" cy="24.7" rx="2.1" ry="2.4"/>
<path class="st0" d="M22.3,24.8C22.3,24.7,22.3,25,22.3,24.8c0-0.7-0.1-1.5-0.1-1.9c-0.2-1-0.6-2.1-1.3-3c-0.1-0.2-0.4-0.5-0.9-0.5
c-0.7,0-0.9,0.2-1.2,0.4c-0.4,0.2-0.5,0.9-0.2,1.3c0.9,1.2,1,2.1,1.1,3.5c0,1.6-0.2,2.5-1.1,3.8c-0.1,0.2-0.1,0.7,0,1.2
c0.1,0.2,0.2,0.3,0.5,0.4c0.1,0,0.2,0.1,0.3,0.1c0.5,0.2,1.2,0.1,1.5-0.5C21.7,28,22.2,26.5,22.3,24.8L22.3,24.8z"/>
</svg>

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 899 KiB

View file

@ -18,7 +18,7 @@ Original.CheckBox {
id: checkBox
property int colorScheme: hifi.colorSchemes.light
property string color: hifi.colors.lightGray
property string color: hifi.colors.lightGrayText
readonly property bool isLightColorScheme: colorScheme == hifi.colorSchemes.light
property bool isRedCheck: false
property int boxSize: 14

View file

@ -0,0 +1,38 @@
//
// Separator.qml
//
// Created by Zach Fox on 2017-06-06
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import "../styles-uit"
Item {
// Size
height: 2;
Rectangle {
// Size
width: parent.width;
height: 1;
// Anchors
anchors.left: parent.left;
anchors.bottom: parent.bottom;
anchors.bottomMargin: height;
// Style
color: hifi.colors.baseGrayShadow;
}
Rectangle {
// Size
width: parent.width;
height: 1;
// Anchors
anchors.left: parent.left;
anchors.bottom: parent.bottom;
// Style
color: hifi.colors.baseGrayHighlight;
}
}

View file

@ -36,7 +36,7 @@ Slider {
Rectangle {
width: parent.height - 2
height: slider.value * (slider.width/(slider.maximumValue - slider.minimumValue)) - 1
height: slider.width * (slider.value - slider.minimumValue) / (slider.maximumValue - slider.minimumValue) - 1
radius: height / 2
anchors {
top: parent.top

View file

@ -0,0 +1,156 @@
//
// Switch.qml
//
// Created by Zach Fox on 2017-06-06
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import QtQuick.Controls 1.4 as Original
import QtQuick.Controls.Styles 1.4
import "../styles-uit"
Item {
id: rootSwitch;
property int colorScheme: hifi.colorSchemes.light;
readonly property bool isLightColorScheme: colorScheme == hifi.colorSchemes.light;
property int switchWidth: 70;
readonly property int switchRadius: height/2;
property string labelTextOff: "";
property string labelGlyphOffText: "";
property int labelGlyphOffSize: 32;
property string labelTextOn: "";
property string labelGlyphOnText: "";
property int labelGlyphOnSize: 32;
property alias checked: originalSwitch.checked;
signal onCheckedChanged;
signal clicked;
Original.Switch {
id: originalSwitch;
activeFocusOnPress: true;
anchors.top: rootSwitch.top;
anchors.left: rootSwitch.left;
anchors.leftMargin: rootSwitch.width/2 - rootSwitch.switchWidth/2;
onCheckedChanged: rootSwitch.onCheckedChanged();
onClicked: rootSwitch.clicked();
style: SwitchStyle {
padding {
top: 3;
left: 3;
right: 3;
bottom: 3;
}
groove: Rectangle {
color: "#252525";
implicitWidth: rootSwitch.switchWidth;
implicitHeight: rootSwitch.height;
radius: rootSwitch.switchRadius;
}
handle: Rectangle {
id: switchHandle;
implicitWidth: rootSwitch.height - padding.top - padding.bottom;
implicitHeight: implicitWidth;
radius: implicitWidth/2;
border.color: hifi.colors.lightGrayText;
color: hifi.colors.lightGray;
MouseArea {
anchors.fill: parent;
hoverEnabled: true;
onEntered: parent.color = hifi.colors.blueHighlight;
onExited: parent.color = hifi.colors.lightGray;
}
}
}
}
// OFF Label
Item {
anchors.right: originalSwitch.left;
anchors.rightMargin: 10;
anchors.top: rootSwitch.top;
height: rootSwitch.height;
RalewaySemiBold {
id: labelOff;
text: labelTextOff;
size: hifi.fontSizes.inputLabel;
color: originalSwitch.checked ? hifi.colors.lightGrayText : "#FFFFFF";
anchors.top: parent.top;
anchors.right: parent.right;
width: paintedWidth;
height: parent.height;
verticalAlignment: Text.AlignVCenter;
}
HiFiGlyphs {
id: labelGlyphOff;
text: labelGlyphOffText;
size: labelGlyphOffSize;
color: labelOff.color;
anchors.top: parent.top;
anchors.topMargin: 2;
anchors.right: labelOff.left;
anchors.rightMargin: 4;
}
MouseArea {
anchors.top: parent.top;
anchors.bottom: parent.bottom;
anchors.left: labelGlyphOff.left;
anchors.right: labelOff.right;
onClicked: {
originalSwitch.checked = false;
}
}
}
// ON Label
Item {
anchors.left: originalSwitch.right;
anchors.leftMargin: 10;
anchors.top: rootSwitch.top;
height: rootSwitch.height;
RalewaySemiBold {
id: labelOn;
text: labelTextOn;
size: hifi.fontSizes.inputLabel;
color: originalSwitch.checked ? "#FFFFFF" : hifi.colors.lightGrayText;
anchors.top: parent.top;
anchors.left: parent.left;
width: paintedWidth;
height: parent.height;
verticalAlignment: Text.AlignVCenter;
}
HiFiGlyphs {
id: labelGlyphOn;
text: labelGlyphOnText;
size: labelGlyphOnSize;
color: labelOn.color;
anchors.top: parent.top;
anchors.left: labelOn.right;
}
MouseArea {
anchors.top: parent.top;
anchors.bottom: parent.bottom;
anchors.left: labelOn.left;
anchors.right: labelGlyphOn.right;
onClicked: {
originalSwitch.checked = true;
}
}
}
}

View file

@ -72,6 +72,7 @@ Preference {
property var avatarBuilder: Component { AvatarPreference { } }
property var buttonBuilder: Component { ButtonPreference { } }
property var comboBoxBuilder: Component { ComboBoxPreference { } }
property var spinnerSliderBuilder: Component { SpinnerSliderPreference { } }
property var preferences: []
property int checkBoxCount: 0
@ -86,7 +87,7 @@ Preference {
}
function buildPreference(preference) {
console.log("\tPreference type " + preference.type + " name " + preference.name)
console.log("\tPreference type " + preference.type + " name " + preference.name);
var builder;
switch (preference.type) {
case Preference.Editable:
@ -128,6 +129,11 @@ Preference {
checkBoxCount = 0;
builder = comboBoxBuilder;
break;
case Preference.SpinnerSlider:
checkBoxCount = 0;
builder = spinnerSliderBuilder;
break;
};
if (builder) {

View file

@ -0,0 +1,111 @@
//
// SpinnerSliderPreference.qml
//
// Created by Cain Kilgore on 11th July 2017
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import "../../dialogs"
import "../../controls-uit"
Preference {
id: root
property alias slider: slider
property alias spinner: spinner
height: control.height + hifi.dimensions.controlInterlineHeight
Component.onCompleted: {
slider.value = preference.value;
spinner.value = preference.value;
}
function save() {
preference.value = slider.value;
preference.save();
}
Item {
id: control
anchors {
left: parent.left
right: parent.right
bottom: parent.bottom
}
height: Math.max(labelText.height, slider.height, spinner.height, button.height)
Label {
id: labelText
text: root.label + ":"
colorScheme: hifi.colorSchemes.dark
anchors {
left: parent.left
right: slider.left
rightMargin: hifi.dimensions.labelPadding
verticalCenter: parent.verticalCenter
}
horizontalAlignment: Text.AlignRight
wrapMode: Text.Wrap
}
Slider {
id: slider
value: preference.value
width: 100
minimumValue: MyAvatar.getDomainMinScale()
maximumValue: MyAvatar.getDomainMaxScale()
stepSize: preference.step
onValueChanged: {
spinner.value = value
}
anchors {
right: spinner.left
rightMargin: 10
verticalCenter: parent.verticalCenter
}
colorScheme: hifi.colorSchemes.dark
}
SpinBox {
id: spinner
decimals: preference.decimals
value: preference.value
minimumValue: MyAvatar.getDomainMinScale()
maximumValue: MyAvatar.getDomainMaxScale()
width: 100
onValueChanged: {
slider.value = value;
}
anchors {
right: button.left
rightMargin: 10
verticalCenter: parent.verticalCenter
}
colorScheme: hifi.colorSchemes.dark
}
GlyphButton {
id: button
onClicked: {
if (spinner.maximumValue >= 1) {
spinner.value = 1
slider.value = 1
} else {
spinner.value = spinner.maximumValue
slider.value = spinner.maximumValue
}
}
width: 30
glyph: hifi.glyphs.reload
anchors {
right: parent.right
verticalCenter: parent.verticalCenter
}
colorScheme: hifi.colorSchemes.dark
}
}
}

View file

@ -32,14 +32,15 @@ Item {
radius: popupRadius
}
Rectangle {
width: Math.max(parent.width * 0.75, 400)
id: textContainer;
width: Math.max(parent.width * 0.8, 400)
height: contentContainer.height + 50
anchors.centerIn: parent
radius: popupRadius
color: "white"
Item {
id: contentContainer
width: parent.width - 60
width: parent.width - 50
height: childrenRect.height
anchors.centerIn: parent
Item {
@ -92,7 +93,7 @@ Item {
anchors.top: parent.top
anchors.topMargin: -20
anchors.right: parent.right
anchors.rightMargin: -25
anchors.rightMargin: -20
MouseArea {
anchors.fill: closeGlyphButton
hoverEnabled: true
@ -127,11 +128,51 @@ Item {
color: hifi.colors.darkGray
wrapMode: Text.WordWrap
textFormat: Text.StyledText
onLinkActivated: {
Qt.openUrlExternally(link)
}
}
}
}
// Left gray MouseArea
MouseArea {
anchors.fill: parent
anchors.left: parent.left;
anchors.right: textContainer.left;
anchors.top: textContainer.top;
anchors.bottom: textContainer.bottom;
acceptedButtons: Qt.LeftButton
onClicked: {
letterbox.visible = false
}
}
// Right gray MouseArea
MouseArea {
anchors.left: textContainer.left;
anchors.right: parent.left;
anchors.top: textContainer.top;
anchors.bottom: textContainer.bottom;
acceptedButtons: Qt.LeftButton
onClicked: {
letterbox.visible = false
}
}
// Top gray MouseArea
MouseArea {
anchors.left: parent.left;
anchors.right: parent.right;
anchors.top: parent.top;
anchors.bottom: textContainer.top;
acceptedButtons: Qt.LeftButton
onClicked: {
letterbox.visible = false
}
}
// Bottom gray MouseArea
MouseArea {
anchors.left: parent.left;
anchors.right: parent.right;
anchors.top: textContainer.bottom;
anchors.bottom: parent.bottom;
acceptedButtons: Qt.LeftButton
onClicked: {
letterbox.visible = false

View file

@ -0,0 +1,374 @@
//
// SpectatorCamera.qml
// qml/hifi
//
// Spectator Camera
//
// Created by Zach Fox on 2017-06-05
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import Hifi 1.0 as Hifi
import QtQuick 2.5
import QtQuick.Controls 1.4
import "../styles-uit"
import "../controls-uit" as HifiControlsUit
import "../controls" as HifiControls
// references HMD, XXX from root context
Rectangle {
HifiConstants { id: hifi; }
id: spectatorCamera;
// Style
color: hifi.colors.baseGray;
// The letterbox used for popup messages
LetterboxMessage {
id: letterboxMessage;
z: 999; // Force the popup on top of everything else
}
function letterbox(headerGlyph, headerText, message) {
letterboxMessage.headerGlyph = headerGlyph;
letterboxMessage.headerText = headerText;
letterboxMessage.text = message;
letterboxMessage.visible = true;
letterboxMessage.popupRadius = 0;
}
//
// TITLE BAR START
//
Item {
id: titleBarContainer;
// Size
width: spectatorCamera.width;
height: 50;
// Anchors
anchors.left: parent.left;
anchors.top: parent.top;
// "Spectator" text
RalewaySemiBold {
id: titleBarText;
text: "Spectator";
// Text size
size: hifi.fontSizes.overlayTitle;
// Anchors
anchors.fill: parent;
anchors.leftMargin: 16;
// Style
color: hifi.colors.lightGrayText;
// Alignment
horizontalAlignment: Text.AlignHLeft;
verticalAlignment: Text.AlignVCenter;
}
// Separator
HifiControlsUit.Separator {
anchors.left: parent.left;
anchors.right: parent.right;
anchors.bottom: parent.bottom;
}
}
//
// TITLE BAR END
//
//
// SPECTATOR APP DESCRIPTION START
//
Item {
id: spectatorDescriptionContainer;
// Size
width: spectatorCamera.width;
height: childrenRect.height;
// Anchors
anchors.left: parent.left;
anchors.top: titleBarContainer.bottom;
// (i) Glyph
HiFiGlyphs {
id: spectatorDescriptionGlyph;
text: hifi.glyphs.info;
// Size
width: 20;
height: parent.height;
size: 60;
// Anchors
anchors.left: parent.left;
anchors.leftMargin: 20;
anchors.top: parent.top;
anchors.topMargin: 0;
// Style
color: hifi.colors.lightGrayText;
horizontalAlignment: Text.AlignHLeft;
verticalAlignment: Text.AlignTop;
}
// "Spectator" app description text
RalewayLight {
id: spectatorDescriptionText;
text: "Spectator lets you change what your monitor displays while you're using a VR headset. Use Spectator when streaming and recording video.";
// Text size
size: 14;
// Size
width: 350;
height: paintedHeight;
// Anchors
anchors.top: parent.top;
anchors.topMargin: 15;
anchors.left: spectatorDescriptionGlyph.right;
anchors.leftMargin: 40;
// Style
color: hifi.colors.lightGrayText;
wrapMode: Text.WordWrap;
// Alignment
horizontalAlignment: Text.AlignHLeft;
verticalAlignment: Text.AlignVCenter;
}
// "Learn More" text
RalewayRegular {
id: spectatorLearnMoreText;
text: "Learn More About Spectator";
// Text size
size: 14;
// Size
width: paintedWidth;
height: paintedHeight;
// Anchors
anchors.top: spectatorDescriptionText.bottom;
anchors.topMargin: 10;
anchors.left: spectatorDescriptionText.anchors.left;
anchors.leftMargin: spectatorDescriptionText.anchors.leftMargin;
// Style
color: hifi.colors.blueAccent;
wrapMode: Text.WordWrap;
font.underline: true;
// Alignment
horizontalAlignment: Text.AlignHLeft;
verticalAlignment: Text.AlignVCenter;
MouseArea {
anchors.fill: parent;
hoverEnabled: enabled;
onClicked: {
letterbox(hifi.glyphs.question,
"Spectator Camera",
"By default, your monitor shows a preview of what you're seeing in VR. " +
"Using the Spectator Camera app, your monitor can display the view " +
"from a virtual hand-held camera - perfect for taking selfies or filming " +
"your friends!<br>" +
"<h3>Streaming and Recording</h3>" +
"We recommend OBS for streaming and recording the contents of your monitor to services like " +
"Twitch, YouTube Live, and Facebook Live.<br><br>" +
"To get started using OBS, click this link now. The page will open in an external browser:<br>" +
'<font size="4"><a href="https://obsproject.com/forum/threads/official-overview-guide.402/">OBS Official Overview Guide</a></font>');
}
onEntered: parent.color = hifi.colors.blueHighlight;
onExited: parent.color = hifi.colors.blueAccent;
}
}
// Separator
HifiControlsUit.Separator {
anchors.left: parent.left;
anchors.right: parent.right;
anchors.top: spectatorLearnMoreText.bottom;
anchors.topMargin: spectatorDescriptionText.anchors.topMargin;
}
}
//
// SPECTATOR APP DESCRIPTION END
//
//
// SPECTATOR CONTROLS START
//
Item {
id: spectatorControlsContainer;
// Size
height: spectatorCamera.height - spectatorDescriptionContainer.height - titleBarContainer.height;
// Anchors
anchors.top: spectatorDescriptionContainer.bottom;
anchors.topMargin: 20;
anchors.left: parent.left;
anchors.leftMargin: 25;
anchors.right: parent.right;
anchors.rightMargin: anchors.leftMargin;
// "Camera On" Checkbox
HifiControlsUit.CheckBox {
id: cameraToggleCheckBox;
colorScheme: hifi.colorSchemes.dark;
anchors.left: parent.left;
anchors.top: parent.top;
text: "Spectator Camera On";
boxSize: 24;
onClicked: {
sendToScript({method: (checked ? 'spectatorCameraOn' : 'spectatorCameraOff')});
spectatorCameraPreview.ready = checked;
}
}
// Instructions or Preview
Rectangle {
id: spectatorCameraImageContainer;
anchors.left: parent.left;
anchors.top: cameraToggleCheckBox.bottom;
anchors.topMargin: 20;
anchors.right: parent.right;
height: 250;
color: cameraToggleCheckBox.checked ? "transparent" : "black";
AnimatedImage {
source: "../../images/static.gif"
visible: !cameraToggleCheckBox.checked;
anchors.fill: parent;
opacity: 0.15;
}
// Instructions (visible when display texture isn't set)
FiraSansRegular {
id: spectatorCameraInstructions;
text: "Turn on Spectator Camera for a preview\nof what your monitor shows.";
size: 16;
color: hifi.colors.lightGrayText;
visible: !cameraToggleCheckBox.checked;
anchors.fill: parent;
horizontalAlignment: Text.AlignHCenter;
verticalAlignment: Text.AlignVCenter;
}
// Spectator Camera Preview
Hifi.ResourceImageItem {
id: spectatorCameraPreview;
visible: cameraToggleCheckbox.checked;
url: monitorShowsSwitch.checked ? "resource://spectatorCameraFrame" : "resource://hmdPreviewFrame";
ready: cameraToggleCheckBox.checked;
mirrorVertically: true;
anchors.fill: parent;
onVisibleChanged: {
ready = cameraToggleCheckBox.checked;
update();
}
}
}
// "Monitor Shows" Switch Label Glyph
HiFiGlyphs {
id: monitorShowsSwitchLabelGlyph;
text: hifi.glyphs.screen;
size: 32;
color: hifi.colors.blueHighlight;
anchors.top: spectatorCameraImageContainer.bottom;
anchors.topMargin: 13;
anchors.left: parent.left;
}
// "Monitor Shows" Switch Label
RalewayLight {
id: monitorShowsSwitchLabel;
text: "MONITOR SHOWS:";
anchors.top: spectatorCameraImageContainer.bottom;
anchors.topMargin: 20;
anchors.left: monitorShowsSwitchLabelGlyph.right;
anchors.leftMargin: 6;
size: 16;
width: paintedWidth;
height: paintedHeight;
color: hifi.colors.lightGrayText;
verticalAlignment: Text.AlignVCenter;
}
// "Monitor Shows" Switch
HifiControlsUit.Switch {
id: monitorShowsSwitch;
height: 30;
anchors.left: parent.left;
anchors.right: parent.right;
anchors.top: monitorShowsSwitchLabel.bottom;
anchors.topMargin: 10;
labelTextOff: "HMD Preview";
labelTextOn: "Camera View";
labelGlyphOnText: hifi.glyphs.alert;
onCheckedChanged: {
sendToScript({method: 'setMonitorShowsCameraView', params: checked});
}
}
// "Switch View From Controller" Checkbox
HifiControlsUit.CheckBox {
id: switchViewFromControllerCheckBox;
colorScheme: hifi.colorSchemes.dark;
anchors.left: parent.left;
anchors.top: monitorShowsSwitch.bottom;
anchors.topMargin: 25;
text: "";
boxSize: 24;
onClicked: {
sendToScript({method: 'changeSwitchViewFromControllerPreference', params: checked});
}
}
}
//
// SPECTATOR CONTROLS END
//
//
// FUNCTION DEFINITIONS START
//
//
// Function Name: fromScript()
//
// Relevant Variables:
// None
//
// Arguments:
// message: The message sent from the SpectatorCamera JavaScript.
// Messages are in format "{method, params}", like json-rpc.
//
// Description:
// Called when a message is received from spectatorCamera.js.
//
function fromScript(message) {
switch (message.method) {
case 'updateSpectatorCameraCheckbox':
cameraToggleCheckBox.checked = message.params;
break;
case 'updateMonitorShowsSwitch':
monitorShowsSwitch.checked = message.params;
break;
case 'updateControllerMappingCheckbox':
switchViewFromControllerCheckBox.checked = message.setting;
switchViewFromControllerCheckBox.enabled = true;
if (message.controller === "OculusTouch") {
switchViewFromControllerCheckBox.text = "Clicking Touch's Left Thumbstick Switches Monitor View";
} else if (message.controller === "Vive") {
switchViewFromControllerCheckBox.text = "Clicking Left Thumb Pad Switches Monitor View";
} else {
switchViewFromControllerCheckBox.text = "Pressing Ctrl+0 Switches Monitor View";
switchViewFromControllerCheckBox.checked = true;
switchViewFromControllerCheckBox.enabled = false;
}
break;
case 'showPreviewTextureNotInstructions':
console.log('showPreviewTextureNotInstructions recvd', JSON.stringify(message));
spectatorCameraPreview.url = message.url;
spectatorCameraPreview.visible = message.setting;
break;
default:
console.log('Unrecognized message from spectatorCamera.js:', JSON.stringify(message));
}
}
signal sendToScript(var message);
//
// FUNCTION DEFINITIONS END
//
}

View file

@ -117,26 +117,28 @@ Rectangle {
delegate: Item {
width: parent.width;
height: 36;
AudioControls.CheckBox {
id: checkbox
anchors.verticalCenter: parent.verticalCenter
anchors.left: parent.left
text: display;
wrap: false;
checked: selected;
enabled: false;
}
RowLayout {
width: parent.width;
MouseArea {
anchors.fill: checkbox
onClicked: Audio.setInputDevice(info);
}
AudioControls.CheckBox {
Layout.maximumWidth: parent.width - level.width - 40;
text: display;
wrap: false;
checked: selected;
onClicked: {
selected = checked;
checked = Qt.binding(function() { return selected; }); // restore binding
}
}
InputLevel {
id: level;
Layout.alignment: Qt.AlignRight;
Layout.rightMargin: 30;
visible: selected;
}
InputLevel {
id: level;
anchors.verticalCenter: parent.verticalCenter
anchors.right: parent.right
anchors.rightMargin: 30
visible: selected;
}
}
}
@ -174,13 +176,19 @@ Rectangle {
delegate: Item {
width: parent.width;
height: 36;
AudioControls.CheckBox {
id: checkbox
anchors.verticalCenter: parent.verticalCenter
anchors.left: parent.left
text: display;
checked: selected;
onClicked: {
selected = checked;
checked = Qt.binding(function() { return selected; }); // restore binding
}
enabled: false;
}
MouseArea {
anchors.fill: checkbox
onClicked: Audio.setOutputDevice(info);
}
}
}

View file

@ -65,7 +65,8 @@ Rectangle {
onClicked: {
newModelDialog.keyboardEnabled = HMD.active
parent.focus = true;
parent.forceActiveFocus()
parent.forceActiveFocus();
modelURL.cursorPosition = modelURL.positionAt(mouseX, mouseY, TextInput.CursorBetweenCharaters);
}
}
}

View file

@ -81,6 +81,7 @@ Preference {
property var avatarBuilder: Component { AvatarPreference { } }
property var buttonBuilder: Component { ButtonPreference { } }
property var comboBoxBuilder: Component { ComboBoxPreference { } }
property var spinnerSliderBuilder: Component { SpinnerSliderPreference { } }
property var preferences: []
property int checkBoxCount: 0
@ -143,6 +144,10 @@ Preference {
//to be not overlapped when drop down is active
zpos = root.z + 1000 - itemNum
break;
case Preference.SpinnerSlider:
checkBoxCount = 0;
builder = spinnerSliderBuilder;
break;
};
if (builder) {

View file

@ -50,7 +50,7 @@ Item {
id: colors
// Base colors
readonly property color baseGray: "#404040"
readonly property color baseGray: "#393939"
readonly property color darkGray: "#121212"
readonly property color baseGrayShadow: "#252525"
readonly property color baseGrayHighlight: "#575757"

View file

@ -69,6 +69,7 @@
#include <EntityScriptClient.h>
#include <EntityScriptServerLogClient.h>
#include <EntityScriptingInterface.h>
#include <HoverOverlayInterface.h>
#include <ErrorDialog.h>
#include <FileScriptingInterface.h>
#include <Finally.h>
@ -167,6 +168,7 @@
#if defined(Q_OS_MAC) || defined(Q_OS_WIN)
#include "SpeechRecognizer.h"
#endif
#include "ui/ResourceImageItem.h"
#include "ui/AddressBarDialog.h"
#include "ui/AvatarInputs.h"
#include "ui/DialogsManager.h"
@ -587,6 +589,7 @@ bool setupEssentials(int& argc, char** argv, bool runningMarkerExisted) {
DependencyManager::set<Snapshot>();
DependencyManager::set<CloseEventSender>();
DependencyManager::set<ResourceManager>();
DependencyManager::set<HoverOverlayInterface>();
return previousSessionCrashed;
}
@ -1003,7 +1006,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
properties["processor_l2_cache_count"] = procInfo.numProcessorCachesL2;
properties["processor_l3_cache_count"] = procInfo.numProcessorCachesL3;
}
properties["first_run"] = firstRun.get();
// add the user's machine ID to the launch event
@ -1466,6 +1469,14 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
properties["atp_mapping_requests"] = atpMappingRequests;
properties["throttled"] = _displayPlugin ? _displayPlugin->isThrottled() : false;
QJsonObject bytesDownloaded;
bytesDownloaded["atp"] = statTracker->getStat(STAT_ATP_RESOURCE_TOTAL_BYTES).toInt();
bytesDownloaded["http"] = statTracker->getStat(STAT_HTTP_RESOURCE_TOTAL_BYTES).toInt();
bytesDownloaded["file"] = statTracker->getStat(STAT_FILE_RESOURCE_TOTAL_BYTES).toInt();
bytesDownloaded["total"] = bytesDownloaded["atp"].toInt() + bytesDownloaded["http"].toInt()
+ bytesDownloaded["file"].toInt();
properties["bytesDownloaded"] = bytesDownloaded;
auto myAvatar = getMyAvatar();
glm::vec3 avatarPosition = myAvatar->getPosition();
@ -1704,9 +1715,7 @@ QString Application::getUserAgent() {
void Application::toggleTabletUI(bool shouldOpen) const {
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
auto hmd = DependencyManager::get<HMDScriptingInterface>();
TabletProxy* tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet(SYSTEM_TABLET));
bool messageOpen = tablet->isMessageDialogOpen();
if ((!messageOpen || (messageOpen && !hmd->getShouldShowTablet())) && !(shouldOpen && hmd->getShouldShowTablet())) {
if (!(shouldOpen && hmd->getShouldShowTablet())) {
auto HMD = DependencyManager::get<HMDScriptingInterface>();
HMD->toggleShouldShowTablet();
}
@ -1971,7 +1980,7 @@ void Application::initializeGL() {
static const QString RENDER_FORWARD = "HIFI_RENDER_FORWARD";
bool isDeferred = !QProcessEnvironment::systemEnvironment().contains(RENDER_FORWARD);
_renderEngine->addJob<UpdateSceneTask>("UpdateScene");
_renderEngine->addJob<SecondaryCameraRenderTask>("SecondaryCameraFrame", cullFunctor);
_renderEngine->addJob<SecondaryCameraRenderTask>("SecondaryCameraJob", cullFunctor);
_renderEngine->addJob<RenderViewTask>("RenderMainView", cullFunctor, isDeferred);
_renderEngine->load();
_renderEngine->registerScene(_main3DScene);
@ -2019,6 +2028,7 @@ void Application::initializeUi() {
LoginDialog::registerType();
Tooltip::registerType();
UpdateDialog::registerType();
qmlRegisterType<ResourceImageItem>("Hifi", 1, 0, "ResourceImageItem");
qmlRegisterType<Preference>("Hifi", 1, 0, "Preference");
auto offscreenUi = DependencyManager::get<OffscreenUi>();
@ -2114,6 +2124,7 @@ void Application::initializeUi() {
surfaceContext->setContextProperty("ApplicationCompositor", &getApplicationCompositor());
surfaceContext->setContextProperty("AvatarInputs", AvatarInputs::getInstance());
surfaceContext->setContextProperty("HoverOverlay", DependencyManager::get<HoverOverlayInterface>().data());
if (auto steamClient = PluginManager::getInstance()->getSteamClientPlugin()) {
surfaceContext->setContextProperty("Steam", new SteamScriptingInterface(engine, steamClient.get()));
@ -2740,6 +2751,16 @@ bool Application::event(QEvent* event) {
static_cast<LambdaEvent*>(event)->call();
return true;
// Explicit idle keeps the idle running at a lower interval, but without any rendering
// see (windowMinimizedChanged)
case Event::Idle:
{
float nsecsElapsed = (float)_lastTimeUpdated.nsecsElapsed();
_lastTimeUpdated.start();
idle(nsecsElapsed);
}
return true;
case Event::Present:
if (!_renderRequested) {
float nsecsElapsed = (float)_lastTimeUpdated.nsecsElapsed();
@ -2749,7 +2770,7 @@ bool Application::event(QEvent* event) {
idle(nsecsElapsed);
postEvent(this, new QEvent(static_cast<QEvent::Type>(Paint)), Qt::HighEventPriority);
}
}
}
return true;
case Event::Paint:
@ -3725,8 +3746,8 @@ void updateCpuInformation() {
// Update friendly structure
auto& myCpuInfo = myCpuInfos[i];
myCpuInfo.update(cpuInfo);
PROFILE_COUNTER(app, myCpuInfo.name.c_str(), {
{ "kernel", myCpuInfo.kernelUsage },
PROFILE_COUNTER(app, myCpuInfo.name.c_str(), {
{ "kernel", myCpuInfo.kernelUsage },
{ "user", myCpuInfo.userUsage }
});
}
@ -3793,7 +3814,7 @@ void getCpuUsage(vec3& systemAndUser) {
void setupCpuMonitorThread() {
initCpuUsage();
auto cpuMonitorThread = QThread::currentThread();
QTimer* timer = new QTimer();
timer->setInterval(50);
QObject::connect(timer, &QTimer::timeout, [] {
@ -5427,6 +5448,10 @@ void Application::updateWindowTitle() const {
qCDebug(interfaceapp, "Application title set to: %s", title.toStdString().c_str());
#endif
_window->setWindowTitle(title);
// updateTitleWindow gets called whenever there's a change regarding the domain, so rather
// than placing this within domainChanged, it's placed here to cover the other potential cases.
DependencyManager::get< MessagesClient >()->sendLocalMessage("Toolbar-DomainChanged", "");
}
void Application::clearDomainOctreeDetails() {
@ -5812,6 +5837,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
auto entityScriptServerLog = DependencyManager::get<EntityScriptServerLogClient>();
scriptEngine->registerGlobalObject("EntityScriptServerLog", entityScriptServerLog.data());
scriptEngine->registerGlobalObject("AvatarInputs", AvatarInputs::getInstance());
scriptEngine->registerGlobalObject("HoverOverlay", DependencyManager::get<HoverOverlayInterface>().data());
qScriptRegisterMetaType(scriptEngine, OverlayIDtoScriptValue, OverlayIDfromScriptValue);
@ -6583,11 +6609,11 @@ void Application::setPreviousScriptLocation(const QString& location) {
}
void Application::loadScriptURLDialog() const {
auto newScript = OffscreenUi::getText(OffscreenUi::ICON_NONE, "Open and Run Script", "Script URL");
QString newScript = OffscreenUi::getText(OffscreenUi::ICON_NONE, "Open and Run Script", "Script URL");
if (QUrl(newScript).scheme() == "atp") {
OffscreenUi::warning("Error Loading Script", "Cannot load client script over ATP");
} else if (!newScript.isEmpty()) {
DependencyManager::get<ScriptEngines>()->loadScript(newScript);
DependencyManager::get<ScriptEngines>()->loadScript(newScript.trimmed());
}
}

View file

@ -678,7 +678,7 @@ private:
QTimer _addAssetToWorldErrorTimer;
FileScriptingInterface* _fileDownload;
AudioInjector* _snapshotSoundInjector { nullptr };
AudioInjectorPointer _snapshotSoundInjector;
SharedSoundPointer _snapshotSound;
DisplayPluginPointer _autoSwitchDisplayModeSupportedHMDPlugin;

View file

@ -680,7 +680,7 @@ Menu::Menu() {
// Developer > Physics >>>
MenuWrapper* physicsOptionsMenu = developerMenu->addMenu("Physics");
{
auto drawStatusConfig = qApp->getRenderEngine()->getConfiguration()->getConfig<render::DrawStatus>();
auto drawStatusConfig = qApp->getRenderEngine()->getConfiguration()->getConfig<render::DrawStatus>("RenderMainView.DrawStatus");
addCheckableActionToQMenuAndActionHash(physicsOptionsMenu, MenuOption::PhysicsShowOwned,
0, false, drawStatusConfig, SLOT(setShowNetwork(bool)));
}

View file

@ -9,9 +9,11 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Application.h"
#include "SecondaryCamera.h"
#include <TextureCache.h>
#include <gpu/Context.h>
#include <EntityScriptingInterface.h>
using RenderArgsPointer = std::shared_ptr<RenderArgs>;
@ -27,39 +29,32 @@ void MainRenderTask::build(JobModel& task, const render::Varying& inputs, render
}
}
void SecondaryCameraRenderTaskConfig::resetSize(int width, int height) { // FIXME: Add an arg here for "destinationFramebuffer"
bool wasEnabled = isEnabled();
setEnabled(false);
auto textureCache = DependencyManager::get<TextureCache>();
textureCache->resetSpectatorCameraFramebuffer(width, height); // FIXME: Call the correct reset function based on the "destinationFramebuffer" arg
setEnabled(wasEnabled);
}
void SecondaryCameraRenderTaskConfig::resetSizeSpectatorCamera(int width, int height) { // Carefully adjust the framebuffer / texture.
resetSize(width, height);
}
class BeginSecondaryCameraFrame { // Changes renderContext for our framebuffer and and view.
class SecondaryCameraJob { // Changes renderContext for our framebuffer and view.
QUuid _attachedEntityId{};
glm::vec3 _position{};
glm::quat _orientation{};
float _vFoV{};
float _nearClipPlaneDistance{};
float _farClipPlaneDistance{};
EntityPropertyFlags _attachedEntityPropertyFlags;
QSharedPointer<EntityScriptingInterface> _entityScriptingInterface;
public:
using Config = BeginSecondaryCameraFrameConfig;
using JobModel = render::Job::ModelO<BeginSecondaryCameraFrame, RenderArgsPointer, Config>;
BeginSecondaryCameraFrame() {
using Config = SecondaryCameraJobConfig;
using JobModel = render::Job::ModelO<SecondaryCameraJob, RenderArgsPointer, Config>;
SecondaryCameraJob() {
_cachedArgsPointer = std::make_shared<RenderArgs>(_cachedArgs);
_entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
_attachedEntityPropertyFlags += PROP_POSITION;
_attachedEntityPropertyFlags += PROP_ROTATION;
}
void configure(const Config& config) {
if (config.enabled || config.alwaysEnabled) {
_position = config.position;
_orientation = config.orientation;
_vFoV = config.vFoV;
_nearClipPlaneDistance = config.nearClipPlaneDistance;
_farClipPlaneDistance = config.farClipPlaneDistance;
}
_attachedEntityId = config.attachedEntityId;
_position = config.position;
_orientation = config.orientation;
_vFoV = config.vFoV;
_nearClipPlaneDistance = config.nearClipPlaneDistance;
_farClipPlaneDistance = config.farClipPlaneDistance;
}
void run(const render::RenderContextPointer& renderContext, RenderArgsPointer& cachedArgs) {
@ -83,8 +78,14 @@ public:
});
auto srcViewFrustum = args->getViewFrustum();
srcViewFrustum.setPosition(_position);
srcViewFrustum.setOrientation(_orientation);
if (!_attachedEntityId.isNull()) {
EntityItemProperties entityProperties = _entityScriptingInterface->getEntityProperties(_attachedEntityId, _attachedEntityPropertyFlags);
srcViewFrustum.setPosition(entityProperties.getPosition());
srcViewFrustum.setOrientation(entityProperties.getRotation());
} else {
srcViewFrustum.setPosition(_position);
srcViewFrustum.setOrientation(_orientation);
}
srcViewFrustum.setProjection(glm::perspective(glm::radians(_vFoV), ((float)args->_viewport.z / (float)args->_viewport.w), _nearClipPlaneDistance, _farClipPlaneDistance));
// Without calculating the bound planes, the secondary camera will use the same culling frustum as the main camera,
// which is not what we want here.
@ -99,6 +100,41 @@ protected:
RenderArgsPointer _cachedArgsPointer;
};
void SecondaryCameraJobConfig::setPosition(glm::vec3 pos) {
if (attachedEntityId.isNull()) {
position = pos;
emit dirty();
} else {
qDebug() << "ERROR: Cannot set position of SecondaryCamera while attachedEntityId is set.";
}
}
void SecondaryCameraJobConfig::setOrientation(glm::quat orient) {
if (attachedEntityId.isNull()) {
orientation = orient;
emit dirty();
} else {
qDebug() << "ERROR: Cannot set orientation of SecondaryCamera while attachedEntityId is set.";
}
}
void SecondaryCameraJobConfig::enableSecondaryCameraRenderConfigs(bool enabled) {
qApp->getRenderEngine()->getConfiguration()->getConfig<SecondaryCameraRenderTask>()->setEnabled(enabled);
setEnabled(enabled);
}
void SecondaryCameraJobConfig::resetSizeSpectatorCamera(int width, int height) { // Carefully adjust the framebuffer / texture.
qApp->getRenderEngine()->getConfiguration()->getConfig<SecondaryCameraRenderTask>()->resetSize(width, height);
}
void SecondaryCameraRenderTaskConfig::resetSize(int width, int height) { // FIXME: Add an arg here for "destinationFramebuffer"
bool wasEnabled = isEnabled();
setEnabled(false);
auto textureCache = DependencyManager::get<TextureCache>();
textureCache->resetSpectatorCameraFramebuffer(width, height); // FIXME: Call the correct reset function based on the "destinationFramebuffer" arg
setEnabled(wasEnabled);
}
class EndSecondaryCameraFrame { // Restores renderContext.
public:
using JobModel = render::Job::ModelI<EndSecondaryCameraFrame, RenderArgsPointer>;
@ -119,7 +155,7 @@ public:
};
void SecondaryCameraRenderTask::build(JobModel& task, const render::Varying& inputs, render::Varying& outputs, render::CullFunctor cullFunctor) {
const auto cachedArg = task.addJob<BeginSecondaryCameraFrame>("BeginSecondaryCamera");
const auto cachedArg = task.addJob<SecondaryCameraJob>("SecondaryCamera");
const auto items = task.addJob<RenderFetchCullSortTask>("FetchCullSort", cullFunctor);
assert(items.canCast<RenderFetchCullSortTask::Output>());
task.addJob<RenderDeferredTask>("RenderDeferredTask", items);

View file

@ -28,34 +28,40 @@ public:
void build(JobModel& task, const render::Varying& inputs, render::Varying& outputs, render::CullFunctor cullFunctor, bool isDeferred = true);
};
class BeginSecondaryCameraFrameConfig : public render::Task::Config { // Exposes secondary camera parameters to JavaScript.
class SecondaryCameraJobConfig : public render::Task::Config { // Exposes secondary camera parameters to JavaScript.
Q_OBJECT
Q_PROPERTY(glm::vec3 position MEMBER position NOTIFY dirty) // of viewpoint to render from
Q_PROPERTY(glm::quat orientation MEMBER orientation NOTIFY dirty) // of viewpoint to render from
Q_PROPERTY(QUuid attachedEntityId MEMBER attachedEntityId NOTIFY dirty) // entity whose properties define camera position and orientation
Q_PROPERTY(glm::vec3 position READ getPosition WRITE setPosition) // of viewpoint to render from
Q_PROPERTY(glm::quat orientation READ getOrientation WRITE setOrientation) // of viewpoint to render from
Q_PROPERTY(float vFoV MEMBER vFoV NOTIFY dirty) // Secondary camera's vertical field of view. In degrees.
Q_PROPERTY(float nearClipPlaneDistance MEMBER nearClipPlaneDistance NOTIFY dirty) // Secondary camera's near clip plane distance. In meters.
Q_PROPERTY(float farClipPlaneDistance MEMBER farClipPlaneDistance NOTIFY dirty) // Secondary camera's far clip plane distance. In meters.
public:
QUuid attachedEntityId{};
glm::vec3 position{};
glm::quat orientation{};
float vFoV{ 45.0f };
float nearClipPlaneDistance{ 0.1f };
float farClipPlaneDistance{ 100.0f };
BeginSecondaryCameraFrameConfig() : render::Task::Config(false) {}
float vFoV{ DEFAULT_FIELD_OF_VIEW_DEGREES };
float nearClipPlaneDistance{ DEFAULT_NEAR_CLIP };
float farClipPlaneDistance{ DEFAULT_FAR_CLIP };
SecondaryCameraJobConfig() : render::Task::Config(false) {}
signals:
void dirty();
public slots:
glm::vec3 getPosition() { return position; }
void setPosition(glm::vec3 pos);
glm::quat getOrientation() { return orientation; }
void setOrientation(glm::quat orient);
void enableSecondaryCameraRenderConfigs(bool enabled);
void resetSizeSpectatorCamera(int width, int height);
};
class SecondaryCameraRenderTaskConfig : public render::Task::Config {
Q_OBJECT
public:
SecondaryCameraRenderTaskConfig() : render::Task::Config(false) {}
private:
void resetSize(int width, int height);
signals:
void dirty();
public slots:
void resetSizeSpectatorCamera(int width, int height);
};
class SecondaryCameraRenderTask {

View file

@ -63,7 +63,6 @@ AvatarManager::AvatarManager(QObject* parent) :
packetReceiver.registerListener(PacketType::BulkAvatarData, this, "processAvatarDataPacket");
packetReceiver.registerListener(PacketType::KillAvatar, this, "processKillAvatar");
packetReceiver.registerListener(PacketType::AvatarIdentity, this, "processAvatarIdentityPacket");
packetReceiver.registerListener(PacketType::ExitingSpaceBubble, this, "processExitingSpaceBubble");
// when we hear that the user has ignored an avatar by session UUID
// immediately remove that avatar instead of waiting for the absence of packets from avatar mixer
@ -320,9 +319,6 @@ void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar
if (removalReason == KillAvatarReason::TheirAvatarEnteredYourBubble) {
emit DependencyManager::get<UsersScriptingInterface>()->enteredIgnoreRadius();
}
if (removalReason == KillAvatarReason::TheirAvatarEnteredYourBubble || removalReason == YourAvatarEnteredTheirBubble) {
DependencyManager::get<NodeList>()->radiusIgnoreNodeBySessionID(avatar->getSessionUUID(), true);
} else if (removalReason == KillAvatarReason::AvatarDisconnected) {
// remove from node sets, if present
DependencyManager::get<NodeList>()->removeFromIgnoreMuteSets(avatar->getSessionUUID());
@ -434,8 +430,7 @@ void AvatarManager::handleCollisionEvents(const CollisionEvents& collisionEvents
// but most avatars are roughly the same size, so let's not be so fancy yet.
const float AVATAR_STRETCH_FACTOR = 1.0f;
_collisionInjectors.remove_if([](QPointer<AudioInjector>& injector) {
_collisionInjectors.remove_if([](const AudioInjectorPointer& injector) {
return !injector || injector->isFinished();
});

View file

@ -22,11 +22,11 @@
#include <SimpleMovingAverage.h>
#include <shared/RateCounter.h>
#include <avatars-renderer/ScriptAvatar.h>
#include <AudioInjector.h>
#include "AvatarMotionState.h"
#include "MyAvatar.h"
class AudioInjector;
class AvatarManager : public AvatarHashMap {
Q_OBJECT
@ -104,7 +104,7 @@ private:
std::shared_ptr<MyAvatar> _myAvatar;
quint64 _lastSendAvatarDataTime = 0; // Controls MyAvatar send data rate.
std::list<QPointer<AudioInjector>> _collisionInjectors;
std::list<AudioInjectorPointer> _collisionInjectors;
RateCounter<> _myAvatarSendRate;
int _numAvatarsUpdated { 0 };

View file

@ -1075,9 +1075,6 @@ void MyAvatar::loadData() {
getHead()->setBasePitch(loadSetting(settings, "headPitch", 0.0f));
_targetScale = loadSetting(settings, "scale", 1.0f);
setScale(glm::vec3(_targetScale));
_prefOverrideAnimGraphUrl.set(QUrl(settings.value("animGraphURL", "").toString()));
_fullAvatarURLFromPreferences = settings.value("fullAvatarURL", AvatarData::defaultFullAvatarModelUrl()).toUrl();
_fullAvatarModelName = settings.value("fullAvatarModelName", DEFAULT_FULL_AVATAR_MODEL_NAME).toString();
@ -2227,6 +2224,14 @@ void MyAvatar::clampScaleChangeToDomainLimits(float desiredScale) {
qCDebug(interfaceapp, "Changed scale to %f", (double)_targetScale);
}
float MyAvatar::getDomainMinScale() {
return _domainMinimumScale;
}
float MyAvatar::getDomainMaxScale() {
return _domainMaximumScale;
}
void MyAvatar::increaseSize() {
// make sure we're starting from an allowable scale
clampTargetScaleToDomainLimits();
@ -2274,17 +2279,27 @@ void MyAvatar::restrictScaleFromDomainSettings(const QJsonObject& domainSettings
if (_domainMinimumScale > _domainMaximumScale) {
std::swap(_domainMinimumScale, _domainMaximumScale);
}
// Set avatar current scale
Settings settings;
settings.beginGroup("Avatar");
_targetScale = loadSetting(settings, "scale", 1.0f);
qCDebug(interfaceapp, "This domain requires a minimum avatar scale of %f and a maximum avatar scale of %f",
(double)_domainMinimumScale, (double)_domainMaximumScale);
qCDebug(interfaceapp) << "This domain requires a minimum avatar scale of " << _domainMinimumScale
<< " and a maximum avatar scale of " << _domainMaximumScale
<< ". Current avatar scale is " << _targetScale;
// debug to log if this avatar's scale in this domain will be clamped
auto clampedScale = glm::clamp(_targetScale, _domainMinimumScale, _domainMaximumScale);
float clampedScale = glm::clamp(_targetScale, _domainMinimumScale, _domainMaximumScale);
if (_targetScale != clampedScale) {
qCDebug(interfaceapp, "Avatar scale will be clamped to %f because %f is not allowed by current domain",
(double)clampedScale, (double)_targetScale);
qCDebug(interfaceapp) << "Current avatar scale is clamped to " << clampedScale
<< " because " << _targetScale << " is not allowed by current domain";
// The current scale of avatar should not be more than domain's max_avatar_scale and not less than domain's min_avatar_scale .
_targetScale = clampedScale;
}
setScale(glm::vec3(_targetScale));
settings.endGroup();
}
void MyAvatar::clearScaleRestriction() {

View file

@ -556,6 +556,8 @@ public slots:
void increaseSize();
void decreaseSize();
void resetSize();
float getDomainMinScale();
float getDomainMaxScale();
void goToLocation(const glm::vec3& newPosition,
bool hasOrientation = false, const glm::quat& newOrientation = glm::quat(),

View file

@ -133,4 +133,12 @@ void Audio::setReverb(bool enable) {
void Audio::setReverbOptions(const AudioEffectOptions* options) {
DependencyManager::get<AudioClient>()->setReverbOptions(options);
}
}
void Audio::setInputDevice(const QAudioDeviceInfo& device) {
_devices.chooseInputDevice(device);
}
void Audio::setOutputDevice(const QAudioDeviceInfo& device) {
_devices.chooseOutputDevice(device);
}

View file

@ -50,6 +50,8 @@ public:
void showMicMeter(bool show);
void setInputVolume(float volume);
Q_INVOKABLE void setInputDevice(const QAudioDeviceInfo& device);
Q_INVOKABLE void setOutputDevice(const QAudioDeviceInfo& device);
Q_INVOKABLE void setReverb(bool enable);
Q_INVOKABLE void setReverbOptions(const AudioEffectOptions* options);
@ -79,7 +81,7 @@ private:
float _inputVolume { 1.0f };
float _inputLevel { 0.0f };
bool _isMuted { false };
bool _enableNoiseReduction;
bool _enableNoiseReduction { true }; // Match default value of AudioClient::_isNoiseGateEnabled.
bool _contextIsHMD { false };
AudioDevices* getDevices() { return &_devices; }

View file

@ -38,7 +38,8 @@ Setting::Handle<QString>& getSetting(bool contextIsHMD, QAudio::Mode mode) {
QHash<int, QByteArray> AudioDeviceList::_roles {
{ Qt::DisplayRole, "display" },
{ Qt::CheckStateRole, "selected" }
{ Qt::CheckStateRole, "selected" },
{ Qt::UserRole, "info" }
};
Qt::ItemFlags AudioDeviceList::_flags { Qt::ItemIsSelectable | Qt::ItemIsEnabled };
@ -51,66 +52,24 @@ QVariant AudioDeviceList::data(const QModelIndex& index, int role) const {
return _devices.at(index.row()).display;
} else if (role == Qt::CheckStateRole) {
return _devices.at(index.row()).selected;
} else if (role == Qt::UserRole) {
return QVariant::fromValue<QAudioDeviceInfo>(_devices.at(index.row()).info);
} else {
return QVariant();
}
}
bool AudioDeviceList::setData(const QModelIndex& index, const QVariant& value, int role) {
if (!index.isValid() || index.row() >= _devices.size() || role != Qt::CheckStateRole) {
return false;
}
// only allow switching to a new device, not deactivating an in-use device
auto selected = value.toBool();
if (!selected) {
return false;
}
return setDevice(index.row(), true);
}
bool AudioDeviceList::setDevice(int row, bool fromUser) {
bool success = false;
auto& device = _devices[row];
_userSelection = fromUser;
// skip if already selected
if (!device.selected) {
auto client = DependencyManager::get<AudioClient>();
QMetaObject::invokeMethod(client.data(), "switchAudioDevice",
Q_ARG(QAudio::Mode, _mode),
Q_ARG(const QAudioDeviceInfo&, device.info));
}
emit dataChanged(createIndex(0, 0), createIndex(rowCount() - 1, 0));
return success;
}
void AudioDeviceList::resetDevice(bool contextIsHMD, const QString& device) {
bool success { false };
// try to set the last selected device
if (!device.isNull()) {
auto i = 0;
for (; i < rowCount(); ++i) {
if (device == _devices[i].info.deviceName()) {
break;
}
}
if (i < rowCount()) {
success = setDevice(i, false);
}
// the selection failed - reset it
if (!success) {
emit deviceSelected();
}
}
auto client = DependencyManager::get<AudioClient>().data();
auto deviceName = getSetting(contextIsHMD, _mode).get();
bool switchResult = false;
QMetaObject::invokeMethod(client, "switchAudioDevice", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(bool, switchResult),
Q_ARG(QAudio::Mode, _mode), Q_ARG(QString, deviceName));
// try to set to the default device for this mode
if (!success) {
auto client = DependencyManager::get<AudioClient>().data();
if (!switchResult) {
if (contextIsHMD) {
QString deviceName;
if (_mode == QAudio::AudioInput) {
@ -131,7 +90,6 @@ void AudioDeviceList::resetDevice(bool contextIsHMD, const QString& device) {
void AudioDeviceList::onDeviceChanged(const QAudioDeviceInfo& device) {
auto oldDevice = _selectedDevice;
_selectedDevice = device;
QModelIndex index;
for (auto i = 0; i < _devices.size(); ++i) {
AudioDevice& device = _devices[i];
@ -139,15 +97,9 @@ void AudioDeviceList::onDeviceChanged(const QAudioDeviceInfo& device) {
device.selected = false;
} else if (device.info == _selectedDevice) {
device.selected = true;
index = createIndex(i, 0);
}
}
if (_userSelection) {
_userSelection = false;
emit deviceSelected(_selectedDevice, oldDevice);
}
emit deviceChanged(_selectedDevice);
emit dataChanged(createIndex(0, 0), createIndex(rowCount() - 1, 0));
}
@ -182,13 +134,6 @@ AudioDevices::AudioDevices(bool& contextIsHMD) : _contextIsHMD(contextIsHMD) {
_outputs.onDeviceChanged(client->getActiveAudioDevice(QAudio::AudioOutput));
_inputs.onDevicesChanged(client->getAudioDevices(QAudio::AudioInput));
_outputs.onDevicesChanged(client->getAudioDevices(QAudio::AudioOutput));
connect(&_inputs, &AudioDeviceList::deviceSelected, [&](const QAudioDeviceInfo& device, const QAudioDeviceInfo& previousDevice) {
onDeviceSelected(QAudio::AudioInput, device, previousDevice);
});
connect(&_outputs, &AudioDeviceList::deviceSelected, [&](const QAudioDeviceInfo& device, const QAudioDeviceInfo& previousDevice) {
onDeviceSelected(QAudio::AudioOutput, device, previousDevice);
});
}
void AudioDevices::onContextChanged(const QString& context) {
@ -244,22 +189,40 @@ void AudioDevices::onDeviceChanged(QAudio::Mode mode, const QAudioDeviceInfo& de
}
void AudioDevices::onDevicesChanged(QAudio::Mode mode, const QList<QAudioDeviceInfo>& devices) {
static bool initialized { false };
auto initialize = [&]{
if (initialized) {
onContextChanged(QString());
} else {
initialized = true;
}
};
static std::once_flag once;
if (mode == QAudio::AudioInput) {
_inputs.onDevicesChanged(devices);
static std::once_flag inputFlag;
std::call_once(inputFlag, initialize);
} else { // if (mode == QAudio::AudioOutput)
_outputs.onDevicesChanged(devices);
static std::once_flag outputFlag;
std::call_once(outputFlag, initialize);
}
std::call_once(once, [&] { onContextChanged(QString()); });
}
void AudioDevices::chooseInputDevice(const QAudioDeviceInfo& device) {
auto client = DependencyManager::get<AudioClient>();
bool success = false;
QMetaObject::invokeMethod(client.data(), "switchAudioDevice",
Qt::BlockingQueuedConnection,
Q_RETURN_ARG(bool, success),
Q_ARG(QAudio::Mode, QAudio::AudioInput),
Q_ARG(const QAudioDeviceInfo&, device));
if (success) {
onDeviceSelected(QAudio::AudioInput, device, _inputs._selectedDevice);
}
}
void AudioDevices::chooseOutputDevice(const QAudioDeviceInfo& device) {
auto client = DependencyManager::get<AudioClient>();
bool success = false;
QMetaObject::invokeMethod(client.data(), "switchAudioDevice",
Qt::BlockingQueuedConnection,
Q_RETURN_ARG(bool, success),
Q_ARG(QAudio::Mode, QAudio::AudioOutput),
Q_ARG(const QAudioDeviceInfo&, device));
if (success) {
onDeviceSelected(QAudio::AudioOutput, device, _outputs._selectedDevice);
}
}

View file

@ -37,14 +37,11 @@ public:
// get/set devices through a QML ListView
QVariant data(const QModelIndex& index, int role) const override;
bool setData(const QModelIndex& index, const QVariant &value, int role) override;
// reset device to the last selected device in this context, or the default
void resetDevice(bool contextIsHMD, const QString& device);
signals:
void deviceSelected(const QAudioDeviceInfo& device = QAudioDeviceInfo(),
const QAudioDeviceInfo& previousDevice = QAudioDeviceInfo());
void deviceChanged(const QAudioDeviceInfo& device);
private slots:
@ -54,12 +51,9 @@ private slots:
private:
friend class AudioDevices;
bool setDevice(int index, bool fromUser);
static QHash<int, QByteArray> _roles;
static Qt::ItemFlags _flags;
bool _userSelection { false };
QAudio::Mode _mode;
const QAudio::Mode _mode;
QAudioDeviceInfo _selectedDevice;
QList<AudioDevice> _devices;
};
@ -73,6 +67,8 @@ class AudioDevices : public QObject {
public:
AudioDevices(bool& contextIsHMD);
void chooseInputDevice(const QAudioDeviceInfo& device);
void chooseOutputDevice(const QAudioDeviceInfo& device);
signals:
void nop();

View file

@ -184,12 +184,15 @@ void setupPreferences() {
{
auto getter = [=]()->float { return myAvatar->getUniformScale(); };
auto setter = [=](float value) { myAvatar->setTargetScale(value); };
auto preference = new SpinnerPreference(AVATAR_TUNING, "Avatar scale (default is 1.0)", getter, setter);
preference->setMin(0.01f);
preference->setMax(99.9f);
auto preference = new SpinnerSliderPreference(AVATAR_TUNING, "Avatar Scale", getter, setter);
preference->setStep(0.05f);
preference->setDecimals(2);
preference->setStep(1);
preferences->addPreference(preference);
// When the Interface is first loaded, this section setupPreferences(); is loaded -
// causing the myAvatar->getDomainMinScale() and myAvatar->getDomainMaxScale() to get set to incorrect values
// which can't be changed across domain switches. Having these values loaded up when you load the Dialog each time
// is a way around this, therefore they're not specified here but in the QML.
}
{
auto getter = []()->float { return DependencyManager::get<DdeFaceTracker>()->getEyeClosingThreshold(); };

View file

@ -0,0 +1,114 @@
//
// ResourceImageItem.cpp
//
// Created by David Kelly and Howard Stearns on 2017/06/08
// Copyright 2017 High Fidelity, Inc.
// Distributed under the Apache License, Version 2.0
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
//#include "Application.h"
#include "ResourceImageItem.h"
#include <QOpenGLFramebufferObjectFormat>
#include <QOpenGLFunctions>
#include <QOpenGLExtraFunctions>
#include <QOpenGLContext>
ResourceImageItem::ResourceImageItem() : QQuickFramebufferObject() {
auto textureCache = DependencyManager::get<TextureCache>();
connect(textureCache.data(), SIGNAL(spectatorCameraFramebufferReset()), this, SLOT(update()));
}
void ResourceImageItem::setUrl(const QString& url) {
if (url != m_url) {
m_url = url;
update();
}
}
void ResourceImageItem::setReady(bool ready) {
if (ready != m_ready) {
m_ready = ready;
update();
}
}
void ResourceImageItemRenderer::onUpdateTimer() {
if (_ready) {
if (_networkTexture && _networkTexture->isLoaded()) {
if(_fboMutex.tryLock()) {
invalidateFramebufferObject();
qApp->getActiveDisplayPlugin()->copyTextureToQuickFramebuffer(_networkTexture, _copyFbo, &_fenceSync);
_fboMutex.unlock();
} else {
qDebug() << "couldn't get a lock, using last frame";
}
} else {
_networkTexture = DependencyManager::get<TextureCache>()->getTexture(_url);
}
}
update();
}
ResourceImageItemRenderer::ResourceImageItemRenderer() : QQuickFramebufferObject::Renderer() {
connect(&_updateTimer, SIGNAL(timeout()), this, SLOT(onUpdateTimer()));
auto textureCache = DependencyManager::get<TextureCache>();
}
void ResourceImageItemRenderer::synchronize(QQuickFramebufferObject* item) {
ResourceImageItem* resourceImageItem = static_cast<ResourceImageItem*>(item);
resourceImageItem->setFlag(QQuickItem::ItemHasContents);
_url = resourceImageItem->getUrl();
_ready = resourceImageItem->getReady();
_visible = resourceImageItem->isVisible();
_window = resourceImageItem->window();
_networkTexture = DependencyManager::get<TextureCache>()->getTexture(_url);
static const int UPDATE_TIMER_DELAY_IN_MS = 100; // 100 ms = 10 hz for now
if (_ready && _visible && !_updateTimer.isActive()) {
_updateTimer.start(UPDATE_TIMER_DELAY_IN_MS);
} else if (!(_ready && _visible) && _updateTimer.isActive()) {
_updateTimer.stop();
}
}
QOpenGLFramebufferObject* ResourceImageItemRenderer::createFramebufferObject(const QSize& size) {
if (_copyFbo) {
delete _copyFbo;
}
QOpenGLFramebufferObjectFormat format;
format.setAttachment(QOpenGLFramebufferObject::CombinedDepthStencil);
_copyFbo = new QOpenGLFramebufferObject(size, format);
_copyFbo->bind();
return new QOpenGLFramebufferObject(size, format);
}
void ResourceImageItemRenderer::render() {
auto f = QOpenGLContext::currentContext()->extraFunctions();
if (_fenceSync) {
f->glWaitSync(_fenceSync, 0, GL_TIMEOUT_IGNORED);
f->glDeleteSync(_fenceSync);
_fenceSync = 0;
}
if (_ready) {
_fboMutex.lock();
_copyFbo->bind();
QOpenGLFramebufferObject::blitFramebuffer(framebufferObject(), _copyFbo, GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT, GL_NEAREST);
// this clears the copyFbo texture
// so next frame starts fresh - helps
// when aspect ratio changes
_copyFbo->takeTexture();
_copyFbo->bind();
_copyFbo->release();
_fboMutex.unlock();
}
glFlush();
_window->resetOpenGLState();
}

View file

@ -0,0 +1,63 @@
//
// ResourceImageItem.h
//
// Created by David Kelly and Howard Stearns on 2017/06/08
// Copyright 2017 High Fidelity, Inc.
// Distributed under the Apache License, Version 2.0
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_ResourceImageItem_h
#define hifi_ResourceImageItem_h
#include "Application.h"
#include <QQuickFramebufferObject>
#include <QQuickWindow>
#include <QTimer>
#include <TextureCache.h>
class ResourceImageItemRenderer : public QObject, public QQuickFramebufferObject::Renderer {
Q_OBJECT
public:
ResourceImageItemRenderer();
QOpenGLFramebufferObject* createFramebufferObject(const QSize& size) override;
void synchronize(QQuickFramebufferObject* item) override;
void render() override;
private:
bool _ready;
QString _url;
bool _visible;
NetworkTexturePointer _networkTexture;
QQuickWindow* _window;
QMutex _fboMutex;
QOpenGLFramebufferObject* _copyFbo { nullptr };
GLsync _fenceSync { 0 };
QTimer _updateTimer;
public slots:
void onUpdateTimer();
};
class ResourceImageItem : public QQuickFramebufferObject {
Q_OBJECT
Q_PROPERTY(QString url READ getUrl WRITE setUrl)
Q_PROPERTY(bool ready READ getReady WRITE setReady)
public:
ResourceImageItem();
QString getUrl() const { return m_url; }
void setUrl(const QString& url);
bool getReady() const { return m_ready; }
void setReady(bool ready);
QQuickFramebufferObject::Renderer* createRenderer() const override { return new ResourceImageItemRenderer; }
private:
QString m_url;
bool m_ready { false };
};
#endif // hifi_ResourceImageItem_h

View file

@ -37,9 +37,11 @@ QVariant Billboard3DOverlay::getProperty(const QString &property) {
return Planar3DOverlay::getProperty(property);
}
void Billboard3DOverlay::applyTransformTo(Transform& transform, bool force) {
bool Billboard3DOverlay::applyTransformTo(Transform& transform, bool force) {
bool transformChanged = false;
if (force || usecTimestampNow() > _transformExpiry) {
PanelAttachable::applyTransformTo(transform, true);
pointTransformAtCamera(transform, getOffsetRotation());
transformChanged = PanelAttachable::applyTransformTo(transform, true);
transformChanged |= pointTransformAtCamera(transform, getOffsetRotation());
}
return transformChanged;
}

View file

@ -27,7 +27,7 @@ public:
QVariant getProperty(const QString& property) override;
protected:
virtual void applyTransformTo(Transform& transform, bool force = false) override;
virtual bool applyTransformTo(Transform& transform, bool force = false) override;
};
#endif // hifi_Billboard3DOverlay_h

View file

@ -28,7 +28,7 @@ QVariant Billboardable::getProperty(const QString &property) {
return QVariant();
}
void Billboardable::pointTransformAtCamera(Transform& transform, glm::quat offsetRotation) {
bool Billboardable::pointTransformAtCamera(Transform& transform, glm::quat offsetRotation) {
if (isFacingAvatar()) {
glm::vec3 billboardPos = transform.getTranslation();
glm::vec3 cameraPos = qApp->getCamera().getPosition();
@ -38,5 +38,7 @@ void Billboardable::pointTransformAtCamera(Transform& transform, glm::quat offse
glm::quat rotation(glm::vec3(elevation, azimuth, 0));
transform.setRotation(rotation);
transform.postRotate(offsetRotation);
return true;
}
return false;
}

View file

@ -27,7 +27,7 @@ protected:
void setProperties(const QVariantMap& properties);
QVariant getProperty(const QString& property);
void pointTransformAtCamera(Transform& transform, glm::quat offsetRotation = {1, 0, 0, 0});
bool pointTransformAtCamera(Transform& transform, glm::quat offsetRotation = {1, 0, 0, 0});
private:
bool _isFacingAvatar = false;

View file

@ -99,10 +99,14 @@ void Image3DOverlay::render(RenderArgs* args) {
const float MAX_COLOR = 255.0f;
xColor color = getColor();
float alpha = getAlpha();
Transform transform = getTransform();
applyTransformTo(transform, true);
setTransform(transform);
bool transformChanged = applyTransformTo(transform, true);
// If the transform is not modified, setting the transform to
// itself will cause drift over time due to floating point errors.
if (transformChanged) {
setTransform(transform);
}
transform.postScale(glm::vec3(getDimensions(), 1.0f));
batch->setModelTransform(transform);

View file

@ -61,7 +61,7 @@ void PanelAttachable::setProperties(const QVariantMap& properties) {
}
}
void PanelAttachable::applyTransformTo(Transform& transform, bool force) {
bool PanelAttachable::applyTransformTo(Transform& transform, bool force) {
if (force || usecTimestampNow() > _transformExpiry) {
const quint64 TRANSFORM_UPDATE_PERIOD = 100000; // frequency is 10 Hz
_transformExpiry = usecTimestampNow() + TRANSFORM_UPDATE_PERIOD;
@ -71,7 +71,9 @@ void PanelAttachable::applyTransformTo(Transform& transform, bool force) {
transform.postTranslate(getOffsetPosition());
transform.postRotate(getOffsetRotation());
transform.postScale(getOffsetScale());
return true;
}
#endif
}
return false;
}

View file

@ -67,7 +67,7 @@ protected:
/// set position, rotation and scale on transform based on offsets, and parent panel offsets
/// if force is false, only apply transform if it hasn't been applied in the last .1 seconds
virtual void applyTransformTo(Transform& transform, bool force = false);
virtual bool applyTransformTo(Transform& transform, bool force = false);
quint64 _transformExpiry = 0;
private:

View file

@ -23,20 +23,23 @@
#include "CubicHermiteSpline.h"
#include "AnimUtil.h"
static void lookupJointChainInfo(AnimInverseKinematics::JointChainInfo* jointChainInfos, size_t numJointChainInfos,
static const float JOINT_CHAIN_INTERP_TIME = 0.25f;
static void lookupJointInfo(const AnimInverseKinematics::JointChainInfo& jointChainInfo,
int indexA, int indexB,
AnimInverseKinematics::JointChainInfo** jointChainInfoA,
AnimInverseKinematics::JointChainInfo** jointChainInfoB) {
*jointChainInfoA = nullptr;
*jointChainInfoB = nullptr;
for (size_t i = 0; i < numJointChainInfos; i++) {
if (jointChainInfos[i].jointIndex == indexA) {
*jointChainInfoA = jointChainInfos + i;
const AnimInverseKinematics::JointInfo** jointInfoA,
const AnimInverseKinematics::JointInfo** jointInfoB) {
*jointInfoA = nullptr;
*jointInfoB = nullptr;
for (size_t i = 0; i < jointChainInfo.jointInfoVec.size(); i++) {
const AnimInverseKinematics::JointInfo* jointInfo = &jointChainInfo.jointInfoVec[i];
if (jointInfo->jointIndex == indexA) {
*jointInfoA = jointInfo;
}
if (jointChainInfos[i].jointIndex == indexB) {
*jointChainInfoB = jointChainInfos + i;
if (jointInfo->jointIndex == indexB) {
*jointInfoB = jointInfo;
}
if (*jointChainInfoA && *jointChainInfoB) {
if (*jointInfoA && *jointInfoB) {
break;
}
}
@ -149,25 +152,28 @@ void AnimInverseKinematics::setTargetVars(const QString& jointName, const QStrin
}
void AnimInverseKinematics::computeTargets(const AnimVariantMap& animVars, std::vector<IKTarget>& targets, const AnimPoseVec& underPoses) {
// build a list of valid targets from _targetVarVec and animVars
_maxTargetIndex = -1;
_hipsTargetIndex = -1;
bool removeUnfoundJoints = false;
targets.reserve(_targetVarVec.size());
for (auto& targetVar : _targetVarVec) {
// update targetVar jointIndex cache
if (targetVar.jointIndex == -1) {
// this targetVar hasn't been validated yet...
int jointIndex = _skeleton->nameToJointIndex(targetVar.jointName);
if (jointIndex >= 0) {
// this targetVar has a valid joint --> cache the indices
targetVar.jointIndex = jointIndex;
} else {
qCWarning(animation) << "AnimInverseKinematics could not find jointName" << targetVar.jointName << "in skeleton";
removeUnfoundJoints = true;
}
} else {
IKTarget target;
}
IKTarget target;
if (targetVar.jointIndex != -1) {
target.setType(animVars.lookup(targetVar.typeVar, (int)IKTarget::Type::RotationAndPosition));
target.setIndex(targetVar.jointIndex);
if (target.getType() != IKTarget::Type::Unknown) {
AnimPose absPose = _skeleton->getAbsolutePose(targetVar.jointIndex, underPoses);
glm::quat rotation = animVars.lookupRigToGeometry(targetVar.rotationVar, absPose.rot());
@ -175,7 +181,6 @@ void AnimInverseKinematics::computeTargets(const AnimVariantMap& animVars, std::
float weight = animVars.lookup(targetVar.weightVar, targetVar.weight);
target.setPose(rotation, translation);
target.setIndex(targetVar.jointIndex);
target.setWeight(weight);
target.setFlexCoefficients(targetVar.numFlexCoefficients, targetVar.flexCoefficients);
@ -188,39 +193,20 @@ void AnimInverseKinematics::computeTargets(const AnimVariantMap& animVars, std::
glm::vec3 poleReferenceVector = animVars.lookupRigToGeometryVector(targetVar.poleReferenceVectorVar, Vectors::UNIT_Z);
target.setPoleReferenceVector(glm::normalize(poleReferenceVector));
targets.push_back(target);
if (targetVar.jointIndex > _maxTargetIndex) {
_maxTargetIndex = targetVar.jointIndex;
}
// record the index of the hips ik target.
if (target.getIndex() == _hipsIndex) {
_hipsTargetIndex = (int)targets.size() - 1;
_hipsTargetIndex = (int)targets.size();
}
}
} else {
target.setType((int)IKTarget::Type::Unknown);
}
}
if (removeUnfoundJoints) {
int numVars = (int)_targetVarVec.size();
int i = 0;
while (i < numVars) {
if (_targetVarVec[i].jointIndex == -1) {
if (numVars > 1) {
// swap i for last element
_targetVarVec[i] = _targetVarVec[numVars - 1];
}
_targetVarVec.pop_back();
--numVars;
} else {
++i;
}
}
targets.push_back(target);
}
}
void AnimInverseKinematics::solve(const AnimContext& context, const std::vector<IKTarget>& targets) {
void AnimInverseKinematics::solve(const AnimContext& context, const std::vector<IKTarget>& targets, float dt, JointChainInfoVec& jointChainInfoVec) {
// compute absolute poses that correspond to relative target poses
AnimPoseVec absolutePoses;
absolutePoses.resize(_relativePoses.size());
@ -234,26 +220,75 @@ void AnimInverseKinematics::solve(const AnimContext& context, const std::vector<
accumulator.clearAndClean();
}
float maxError = FLT_MAX;
float maxError = 0.0f;
int numLoops = 0;
const int MAX_IK_LOOPS = 16;
const float MAX_ERROR_TOLERANCE = 0.1f; // cm
while (maxError > MAX_ERROR_TOLERANCE && numLoops < MAX_IK_LOOPS) {
while (numLoops < MAX_IK_LOOPS) {
++numLoops;
bool debug = context.getEnableDebugDrawIKChains() && numLoops == MAX_IK_LOOPS;
// solve all targets
for (auto& target: targets) {
if (target.getType() == IKTarget::Type::Spline) {
solveTargetWithSpline(context, target, absolutePoses, debug);
} else {
solveTargetWithCCD(context, target, absolutePoses, debug);
for (size_t i = 0; i < targets.size(); i++) {
switch (targets[i].getType()) {
case IKTarget::Type::Unknown:
break;
case IKTarget::Type::Spline:
solveTargetWithSpline(context, targets[i], absolutePoses, debug, jointChainInfoVec[i]);
break;
default:
solveTargetWithCCD(context, targets[i], absolutePoses, debug, jointChainInfoVec[i]);
break;
}
}
// on last iteration, interpolate jointChains, if necessary
if (numLoops == MAX_IK_LOOPS) {
for (size_t i = 0; i < _prevJointChainInfoVec.size(); i++) {
if (_prevJointChainInfoVec[i].timer > 0.0f) {
float alpha = (JOINT_CHAIN_INTERP_TIME - _prevJointChainInfoVec[i].timer) / JOINT_CHAIN_INTERP_TIME;
size_t chainSize = std::min(_prevJointChainInfoVec[i].jointInfoVec.size(), jointChainInfoVec[i].jointInfoVec.size());
for (size_t j = 0; j < chainSize; j++) {
jointChainInfoVec[i].jointInfoVec[j].rot = safeMix(_prevJointChainInfoVec[i].jointInfoVec[j].rot, jointChainInfoVec[i].jointInfoVec[j].rot, alpha);
jointChainInfoVec[i].jointInfoVec[j].trans = lerp(_prevJointChainInfoVec[i].jointInfoVec[j].trans, jointChainInfoVec[i].jointInfoVec[j].trans, alpha);
}
// if joint chain was just disabled, ramp the weight toward zero.
if (_prevJointChainInfoVec[i].target.getType() != IKTarget::Type::Unknown &&
jointChainInfoVec[i].target.getType() == IKTarget::Type::Unknown) {
IKTarget newTarget = _prevJointChainInfoVec[i].target;
newTarget.setWeight((1.0f - alpha) * _prevJointChainInfoVec[i].target.getWeight());
jointChainInfoVec[i].target = newTarget;
}
}
}
}
// copy jointChainInfoVecs into accumulators
for (size_t i = 0; i < targets.size(); i++) {
const std::vector<JointInfo>& jointInfoVec = jointChainInfoVec[i].jointInfoVec;
// don't accumulate disabled or rotation only ik targets.
IKTarget::Type type = jointChainInfoVec[i].target.getType();
if (type != IKTarget::Type::Unknown && type != IKTarget::Type::RotationOnly) {
float weight = jointChainInfoVec[i].target.getWeight();
if (weight > 0.0f) {
for (size_t j = 0; j < jointInfoVec.size(); j++) {
const JointInfo& info = jointInfoVec[j];
if (info.jointIndex >= 0) {
_rotationAccumulators[info.jointIndex].add(info.rot, weight);
_translationAccumulators[info.jointIndex].add(info.trans, weight);
}
}
}
}
}
// harvest accumulated rotations and apply the average
for (int i = 0; i < (int)_relativePoses.size(); ++i) {
if (i == _hipsIndex) {
continue; // don't apply accumulators to hips
}
if (_rotationAccumulators[i].size() > 0) {
_relativePoses[i].rot() = _rotationAccumulators[i].getAverage();
_rotationAccumulators[i].clear();
@ -289,7 +324,7 @@ void AnimInverseKinematics::solve(const AnimContext& context, const std::vector<
// finally set the relative rotation of each tip to agree with absolute target rotation
for (auto& target: targets) {
int tipIndex = target.getIndex();
int parentIndex = _skeleton->getParentIndex(tipIndex);
int parentIndex = (tipIndex >= 0) ? _skeleton->getParentIndex(tipIndex) : -1;
// update rotationOnly targets that don't lie on the ik chain of other ik targets.
if (parentIndex != -1 && !_rotationAccumulators[tipIndex].isDirty() && target.getType() == IKTarget::Type::RotationOnly) {
@ -308,9 +343,34 @@ void AnimInverseKinematics::solve(const AnimContext& context, const std::vector<
absolutePoses[tipIndex].rot() = targetRotation;
}
}
// copy jointChainInfoVec into _prevJointChainInfoVec, and update timers
for (size_t i = 0; i < jointChainInfoVec.size(); i++) {
_prevJointChainInfoVec[i].timer = _prevJointChainInfoVec[i].timer - dt;
if (_prevJointChainInfoVec[i].timer <= 0.0f) {
_prevJointChainInfoVec[i] = jointChainInfoVec[i];
_prevJointChainInfoVec[i].target = targets[i];
// store relative poses into unknown/rotation only joint chains.
// so we have something to interpolate from if this chain is activated.
IKTarget::Type type = _prevJointChainInfoVec[i].target.getType();
if (type == IKTarget::Type::Unknown || type == IKTarget::Type::RotationOnly) {
for (size_t j = 0; j < _prevJointChainInfoVec[i].jointInfoVec.size(); j++) {
auto& info = _prevJointChainInfoVec[i].jointInfoVec[j];
if (info.jointIndex >= 0) {
info.rot = _relativePoses[info.jointIndex].rot();
info.trans = _relativePoses[info.jointIndex].trans();
} else {
info.rot = Quaternions::IDENTITY;
info.trans = glm::vec3(0.0f);
}
}
}
}
}
}
void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const IKTarget& target, const AnimPoseVec& absolutePoses, bool debug) {
void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const IKTarget& target, const AnimPoseVec& absolutePoses,
bool debug, JointChainInfo& jointChainInfoOut) const {
size_t chainDepth = 0;
IKTarget::Type targetType = target.getType();
@ -338,9 +398,6 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
// the tip's parent-relative as we proceed up the chain
glm::quat tipParentOrientation = absolutePoses[pivotIndex].rot();
const size_t MAX_CHAIN_DEPTH = 30;
JointChainInfo jointChainInfos[MAX_CHAIN_DEPTH];
// NOTE: if this code is removed, the head will remain rigid, causing the spine/hips to thrust forward backward
// as the head is nodded.
if (targetType == IKTarget::Type::HmdHead ||
@ -368,7 +425,7 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
}
glm::vec3 tipRelativeTranslation = _relativePoses[target.getIndex()].trans();
jointChainInfos[chainDepth] = { tipRelativeRotation, tipRelativeTranslation, target.getWeight(), tipIndex, constrained };
jointChainInfoOut.jointInfoVec[chainDepth] = { tipRelativeRotation, tipRelativeTranslation, tipIndex, constrained };
}
// cache tip absolute position
@ -379,7 +436,7 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
// descend toward root, pivoting each joint to get tip closer to target position
while (pivotIndex != _hipsIndex && pivotsParentIndex != -1) {
assert(chainDepth < MAX_CHAIN_DEPTH);
assert(chainDepth < jointChainInfoOut.jointInfoVec.size());
// compute the two lines that should be aligned
glm::vec3 jointPosition = absolutePoses[pivotIndex].trans();
@ -444,9 +501,8 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
glm::quat twistPart;
glm::vec3 axis = glm::normalize(deltaRotation * leverArm);
swingTwistDecomposition(missingRotation, axis, swingPart, twistPart);
float dotSign = copysignf(1.0f, twistPart.w);
const float LIMIT_LEAK_FRACTION = 0.1f;
deltaRotation = glm::normalize(glm::lerp(glm::quat(), dotSign * twistPart, LIMIT_LEAK_FRACTION)) * deltaRotation;
deltaRotation = safeLerp(glm::quat(), twistPart, LIMIT_LEAK_FRACTION);
}
}
}
@ -455,9 +511,8 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
// An HmdHead target slaves the orientation of the end-effector by distributing rotation
// deltas up the hierarchy. Its target position is enforced later (by shifting the hips).
deltaRotation = target.getRotation() * glm::inverse(tipOrientation);
float dotSign = copysignf(1.0f, deltaRotation.w);
const float ANGLE_DISTRIBUTION_FACTOR = 0.45f;
deltaRotation = glm::normalize(glm::lerp(glm::quat(), dotSign * deltaRotation, ANGLE_DISTRIBUTION_FACTOR));
deltaRotation = safeLerp(glm::quat(), deltaRotation, ANGLE_DISTRIBUTION_FACTOR);
}
// compute joint's new parent-relative rotation after swing
@ -480,7 +535,7 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
}
glm::vec3 newTrans = _relativePoses[pivotIndex].trans();
jointChainInfos[chainDepth] = { newRot, newTrans, target.getWeight(), pivotIndex, constrained };
jointChainInfoOut.jointInfoVec[chainDepth] = { newRot, newTrans, pivotIndex, constrained };
// keep track of tip's new transform as we descend towards root
tipPosition = jointPosition + deltaRotation * (tipPosition - jointPosition);
@ -502,24 +557,25 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
int baseParentJointIndex = _skeleton->getParentIndex(baseJointIndex);
AnimPose topPose, midPose, basePose;
int topChainIndex = -1, baseChainIndex = -1;
const size_t MAX_CHAIN_DEPTH = 30;
AnimPose postAbsPoses[MAX_CHAIN_DEPTH];
AnimPose accum = absolutePoses[_hipsIndex];
AnimPose baseParentPose = absolutePoses[_hipsIndex];
for (int i = (int)chainDepth - 1; i >= 0; i--) {
accum = accum * AnimPose(glm::vec3(1.0f), jointChainInfos[i].relRot, jointChainInfos[i].relTrans);
accum = accum * AnimPose(glm::vec3(1.0f), jointChainInfoOut.jointInfoVec[i].rot, jointChainInfoOut.jointInfoVec[i].trans);
postAbsPoses[i] = accum;
if (jointChainInfos[i].jointIndex == topJointIndex) {
if (jointChainInfoOut.jointInfoVec[i].jointIndex == topJointIndex) {
topChainIndex = i;
topPose = accum;
}
if (jointChainInfos[i].jointIndex == midJointIndex) {
if (jointChainInfoOut.jointInfoVec[i].jointIndex == midJointIndex) {
midPose = accum;
}
if (jointChainInfos[i].jointIndex == baseJointIndex) {
if (jointChainInfoOut.jointInfoVec[i].jointIndex == baseJointIndex) {
baseChainIndex = i;
basePose = accum;
}
if (jointChainInfos[i].jointIndex == baseParentJointIndex) {
if (jointChainInfoOut.jointInfoVec[i].jointIndex == baseParentJointIndex) {
baseParentPose = accum;
}
}
@ -599,21 +655,16 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
}
glm::quat newBaseRelRot = glm::inverse(baseParentPose.rot()) * poleRot * basePose.rot();
jointChainInfos[baseChainIndex].relRot = newBaseRelRot;
jointChainInfoOut.jointInfoVec[baseChainIndex].rot = newBaseRelRot;
glm::quat newTopRelRot = glm::inverse(midPose.rot()) * glm::inverse(poleRot) * topPose.rot();
jointChainInfos[topChainIndex].relRot = newTopRelRot;
jointChainInfoOut.jointInfoVec[topChainIndex].rot = newTopRelRot;
}
}
}
for (size_t i = 0; i < chainDepth; i++) {
_rotationAccumulators[jointChainInfos[i].jointIndex].add(jointChainInfos[i].relRot, jointChainInfos[i].weight);
_translationAccumulators[jointChainInfos[i].jointIndex].add(jointChainInfos[i].relTrans, jointChainInfos[i].weight);
}
if (debug) {
debugDrawIKChain(jointChainInfos, chainDepth, context);
debugDrawIKChain(jointChainInfoOut, context);
}
}
@ -628,7 +679,7 @@ static CubicHermiteSplineFunctorWithArcLength computeSplineFromTipAndBase(const
}
// pre-compute information about each joint influeced by this spline IK target.
void AnimInverseKinematics::computeSplineJointInfosForIKTarget(const AnimContext& context, const IKTarget& target) {
void AnimInverseKinematics::computeAndCacheSplineJointInfosForIKTarget(const AnimContext& context, const IKTarget& target) const {
std::vector<SplineJointInfo> splineJointInfoVec;
// build spline between the default poses.
@ -681,13 +732,13 @@ void AnimInverseKinematics::computeSplineJointInfosForIKTarget(const AnimContext
_splineJointInfoMap[target.getIndex()] = splineJointInfoVec;
}
const std::vector<AnimInverseKinematics::SplineJointInfo>* AnimInverseKinematics::findOrCreateSplineJointInfo(const AnimContext& context, const IKTarget& target) {
const std::vector<AnimInverseKinematics::SplineJointInfo>* AnimInverseKinematics::findOrCreateSplineJointInfo(const AnimContext& context, const IKTarget& target) const {
// find or create splineJointInfo for this target
auto iter = _splineJointInfoMap.find(target.getIndex());
if (iter != _splineJointInfoMap.end()) {
return &(iter->second);
} else {
computeSplineJointInfosForIKTarget(context, target);
computeAndCacheSplineJointInfosForIKTarget(context, target);
auto iter = _splineJointInfoMap.find(target.getIndex());
if (iter != _splineJointInfoMap.end()) {
return &(iter->second);
@ -697,10 +748,8 @@ const std::vector<AnimInverseKinematics::SplineJointInfo>* AnimInverseKinematics
return nullptr;
}
void AnimInverseKinematics::solveTargetWithSpline(const AnimContext& context, const IKTarget& target, const AnimPoseVec& absolutePoses, bool debug) {
const size_t MAX_CHAIN_DEPTH = 30;
JointChainInfo jointChainInfos[MAX_CHAIN_DEPTH];
void AnimInverseKinematics::solveTargetWithSpline(const AnimContext& context, const IKTarget& target, const AnimPoseVec& absolutePoses,
bool debug, JointChainInfo& jointChainInfoOut) const {
const int baseIndex = _hipsIndex;
@ -720,7 +769,7 @@ void AnimInverseKinematics::solveTargetWithSpline(const AnimContext& context, co
// This prevents the rotation interpolation from rotating the wrong physical way (but correct mathematical way)
// when the head is arched backwards very far.
glm::quat halfRot = glm::normalize(glm::lerp(basePose.rot(), tipPose.rot(), 0.5f));
glm::quat halfRot = safeLerp(basePose.rot(), tipPose.rot(), 0.5f);
if (glm::dot(halfRot * Vectors::UNIT_Z, basePose.rot() * Vectors::UNIT_Z) < 0.0f) {
tipPose.rot() = -tipPose.rot();
}
@ -743,7 +792,7 @@ void AnimInverseKinematics::solveTargetWithSpline(const AnimContext& context, co
if (target.getIndex() == _headIndex) {
rotT = t * t;
}
glm::quat twistRot = glm::normalize(glm::lerp(basePose.rot(), tipPose.rot(), rotT));
glm::quat twistRot = safeLerp(basePose.rot(), tipPose.rot(), rotT);
// compute the rotation by using the derivative of the spline as the y-axis, and the twistRot x-axis
glm::vec3 y = glm::normalize(spline.d(t));
@ -783,19 +832,14 @@ void AnimInverseKinematics::solveTargetWithSpline(const AnimContext& context, co
}
}
jointChainInfos[i] = { relPose.rot(), relPose.trans(), target.getWeight(), splineJointInfo.jointIndex, constrained };
jointChainInfoOut.jointInfoVec[i] = { relPose.rot(), relPose.trans(), splineJointInfo.jointIndex, constrained };
parentAbsPose = flexedAbsPose;
}
}
for (size_t i = 0; i < splineJointInfoVec->size(); i++) {
_rotationAccumulators[jointChainInfos[i].jointIndex].add(jointChainInfos[i].relRot, jointChainInfos[i].weight);
_translationAccumulators[jointChainInfos[i].jointIndex].add(jointChainInfos[i].relTrans, jointChainInfos[i].weight);
}
if (debug) {
debugDrawIKChain(jointChainInfos, splineJointInfoVec->size(), context);
debugDrawIKChain(jointChainInfoOut, context);
}
}
@ -806,6 +850,24 @@ const AnimPoseVec& AnimInverseKinematics::evaluate(const AnimVariantMap& animVar
return _relativePoses;
}
AnimPose AnimInverseKinematics::applyHipsOffset() const {
glm::vec3 hipsOffset = _hipsOffset;
AnimPose relHipsPose = _relativePoses[_hipsIndex];
float offsetLength = glm::length(hipsOffset);
const float MIN_HIPS_OFFSET_LENGTH = 0.03f;
if (offsetLength > MIN_HIPS_OFFSET_LENGTH) {
float scaleFactor = ((offsetLength - MIN_HIPS_OFFSET_LENGTH) / offsetLength);
glm::vec3 scaledHipsOffset = scaleFactor * hipsOffset;
if (_hipsParentIndex == -1) {
relHipsPose.trans() = _relativePoses[_hipsIndex].trans() + scaledHipsOffset;
} else {
AnimPose absHipsPose = _skeleton->getAbsolutePose(_hipsIndex, _relativePoses);
absHipsPose.trans() += scaledHipsOffset;
relHipsPose = _skeleton->getAbsolutePose(_hipsParentIndex, _relativePoses).inverse() * absHipsPose;
}
}
return relHipsPose;
}
//virtual
const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars, const AnimContext& context, float dt, Triggers& triggersOut, const AnimPoseVec& underPoses) {
@ -850,33 +912,88 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
_relativePoses = underPoses;
} else {
JointChainInfoVec jointChainInfoVec(targets.size());
{
PROFILE_RANGE_EX(simulation_animation, "ik/jointChainInfo", 0xffff00ff, 0);
// initialize a new jointChainInfoVec, this will hold the results for solving each ik chain.
JointInfo defaultJointInfo = { glm::quat(), glm::vec3(), -1, false };
for (size_t i = 0; i < targets.size(); i++) {
size_t chainDepth = (size_t)_skeleton->getChainDepth(targets[i].getIndex());
jointChainInfoVec[i].jointInfoVec.reserve(chainDepth);
jointChainInfoVec[i].target = targets[i];
int index = targets[i].getIndex();
for (size_t j = 0; j < chainDepth; j++) {
jointChainInfoVec[i].jointInfoVec.push_back(defaultJointInfo);
jointChainInfoVec[i].jointInfoVec[j].jointIndex = index;
index = _skeleton->getParentIndex(index);
}
}
// identify joint chains that have changed types this frame.
_prevJointChainInfoVec.resize(jointChainInfoVec.size());
for (size_t i = 0; i < _prevJointChainInfoVec.size(); i++) {
if (_prevJointChainInfoVec[i].timer <= 0.0f &&
(jointChainInfoVec[i].target.getType() != _prevJointChainInfoVec[i].target.getType() ||
jointChainInfoVec[i].target.getPoleVectorEnabled() != _prevJointChainInfoVec[i].target.getPoleVectorEnabled())) {
_prevJointChainInfoVec[i].timer = JOINT_CHAIN_INTERP_TIME;
}
}
}
{
PROFILE_RANGE_EX(simulation_animation, "ik/shiftHips", 0xffff00ff, 0);
if (_hipsTargetIndex >= 0 && _hipsTargetIndex < (int)targets.size()) {
if (_hipsTargetIndex >= 0) {
assert(_hipsTargetIndex < (int)targets.size());
// slam the hips to match the _hipsTarget
AnimPose absPose = targets[_hipsTargetIndex].getPose();
int parentIndex = _skeleton->getParentIndex(targets[_hipsTargetIndex].getIndex());
if (parentIndex != -1) {
_relativePoses[_hipsIndex] = _skeleton->getAbsolutePose(parentIndex, _relativePoses).inverse() * absPose;
} else {
_relativePoses[_hipsIndex] = absPose;
AnimPose parentAbsPose = _skeleton->getAbsolutePose(parentIndex, _relativePoses);
// do smooth interpolation of hips, if necessary.
if (_prevJointChainInfoVec[_hipsTargetIndex].timer > 0.0f && _prevJointChainInfoVec[_hipsTargetIndex].jointInfoVec.size() > 0) {
float alpha = (JOINT_CHAIN_INTERP_TIME - _prevJointChainInfoVec[_hipsTargetIndex].timer) / JOINT_CHAIN_INTERP_TIME;
auto& info = _prevJointChainInfoVec[_hipsTargetIndex].jointInfoVec[0];
AnimPose prevHipsRelPose(info.rot, info.trans);
AnimPose prevHipsAbsPose = parentAbsPose * prevHipsRelPose;
::blend(1, &prevHipsAbsPose, &absPose, alpha, &absPose);
}
} else {
_relativePoses[_hipsIndex] = parentAbsPose.inverse() * absPose;
_relativePoses[_hipsIndex].scale() = glm::vec3(1.0f);
_hipsOffset = Vectors::ZERO;
} else if (_hipsIndex >= 0) {
// if there is no hips target, shift hips according to the _hipsOffset from the previous frame
float offsetLength = glm::length(_hipsOffset);
const float MIN_HIPS_OFFSET_LENGTH = 0.03f;
if (offsetLength > MIN_HIPS_OFFSET_LENGTH && _hipsIndex >= 0) {
float scaleFactor = ((offsetLength - MIN_HIPS_OFFSET_LENGTH) / offsetLength);
glm::vec3 hipsOffset = scaleFactor * _hipsOffset;
if (_hipsParentIndex == -1) {
_relativePoses[_hipsIndex].trans() = _relativePoses[_hipsIndex].trans() + hipsOffset;
} else {
auto absHipsPose = _skeleton->getAbsolutePose(_hipsIndex, _relativePoses);
absHipsPose.trans() += hipsOffset;
_relativePoses[_hipsIndex] = _skeleton->getAbsolutePose(_hipsParentIndex, _relativePoses).inverse() * absHipsPose;
AnimPose relHipsPose = applyHipsOffset();
// determine if we should begin interpolating the hips.
for (size_t i = 0; i < targets.size(); i++) {
if (_prevJointChainInfoVec[i].target.getIndex() == _hipsIndex) {
if (_prevJointChainInfoVec[i].timer > 0.0f) {
// smoothly lerp in hipsOffset
float alpha = (JOINT_CHAIN_INTERP_TIME - _prevJointChainInfoVec[i].timer) / JOINT_CHAIN_INTERP_TIME;
AnimPose prevRelHipsPose(_prevJointChainInfoVec[i].jointInfoVec[0].rot, _prevJointChainInfoVec[i].jointInfoVec[0].trans);
::blend(1, &prevRelHipsPose, &relHipsPose, alpha, &relHipsPose);
}
break;
}
}
_relativePoses[_hipsIndex] = relHipsPose;
}
// if there is an active jointChainInfo for the hips store the post shifted hips into it.
// This is so we have a valid pose to interplate from when the hips target is disabled.
if (_hipsTargetIndex >= 0) {
jointChainInfoVec[_hipsTargetIndex].jointInfoVec[0].rot = _relativePoses[_hipsIndex].rot();
jointChainInfoVec[_hipsTargetIndex].jointInfoVec[0].trans = _relativePoses[_hipsIndex].trans();
}
// update all HipsRelative targets to account for the hips shift/ik target.
@ -920,15 +1037,14 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
{
PROFILE_RANGE_EX(simulation_animation, "ik/ccd", 0xffff00ff, 0);
preconditionRelativePosesToAvoidLimbLock(context, targets);
solve(context, targets);
solve(context, targets, dt, jointChainInfoVec);
}
if (_hipsTargetIndex < 0) {
PROFILE_RANGE_EX(simulation_animation, "ik/measureHipsOffset", 0xffff00ff, 0);
computeHipsOffset(targets, underPoses, dt);
} else {
_hipsOffset = Vectors::ZERO;
_hipsOffset = computeHipsOffset(targets, underPoses, dt, _hipsOffset);
}
}
@ -937,23 +1053,15 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
}
}
if (_leftHandIndex > -1) {
_uncontrolledLeftHandPose = _skeleton->getAbsolutePose(_leftHandIndex, underPoses);
}
if (_rightHandIndex > -1) {
_uncontrolledRightHandPose = _skeleton->getAbsolutePose(_rightHandIndex, underPoses);
}
if (_hipsIndex > -1) {
_uncontrolledHipsPose = _skeleton->getAbsolutePose(_hipsIndex, underPoses);
}
return _relativePoses;
}
void AnimInverseKinematics::computeHipsOffset(const std::vector<IKTarget>& targets, const AnimPoseVec& underPoses, float dt) {
glm::vec3 AnimInverseKinematics::computeHipsOffset(const std::vector<IKTarget>& targets, const AnimPoseVec& underPoses, float dt, glm::vec3 prevHipsOffset) const {
// measure new _hipsOffset for next frame
// by looking for discrepancies between where a targeted endEffector is
// and where it wants to be (after IK solutions are done)
glm::vec3 hipsOffset = prevHipsOffset;
glm::vec3 newHipsOffset = Vectors::ZERO;
for (auto& target: targets) {
int targetIndex = target.getIndex();
@ -969,9 +1077,9 @@ void AnimInverseKinematics::computeHipsOffset(const std::vector<IKTarget>& targe
} else if (target.getType() == IKTarget::Type::HmdHead) {
// we want to shift the hips to bring the head to its designated position
glm::vec3 actual = _skeleton->getAbsolutePose(_headIndex, _relativePoses).trans();
_hipsOffset += target.getTranslation() - actual;
hipsOffset += target.getTranslation() - actual;
// and ignore all other targets
newHipsOffset = _hipsOffset;
newHipsOffset = hipsOffset;
break;
} else if (target.getType() == IKTarget::Type::RotationAndPosition) {
glm::vec3 actualPosition = _skeleton->getAbsolutePose(targetIndex, _relativePoses).trans();
@ -991,16 +1099,18 @@ void AnimInverseKinematics::computeHipsOffset(const std::vector<IKTarget>& targe
}
}
// smooth transitions by relaxing _hipsOffset toward the new value
// smooth transitions by relaxing hipsOffset toward the new value
const float HIPS_OFFSET_SLAVE_TIMESCALE = 0.10f;
float tau = dt < HIPS_OFFSET_SLAVE_TIMESCALE ? dt / HIPS_OFFSET_SLAVE_TIMESCALE : 1.0f;
_hipsOffset += (newHipsOffset - _hipsOffset) * tau;
hipsOffset += (newHipsOffset - hipsOffset) * tau;
// clamp the hips offset
float hipsOffsetLength = glm::length(_hipsOffset);
float hipsOffsetLength = glm::length(hipsOffset);
if (hipsOffsetLength > _maxHipsOffsetLength) {
_hipsOffset *= _maxHipsOffsetLength / hipsOffsetLength;
hipsOffset *= _maxHipsOffsetLength / hipsOffsetLength;
}
return hipsOffset;
}
void AnimInverseKinematics::setMaxHipsOffsetLength(float maxLength) {
@ -1414,8 +1524,6 @@ void AnimInverseKinematics::setSkeletonInternal(AnimSkeleton::ConstPointer skele
targetVar.jointIndex = -1;
}
_maxTargetIndex = -1;
for (auto& accumulator: _rotationAccumulators) {
accumulator.clearAndClean();
}
@ -1446,10 +1554,6 @@ void AnimInverseKinematics::setSkeletonInternal(AnimSkeleton::ConstPointer skele
_leftHandIndex = -1;
_rightHandIndex = -1;
}
_uncontrolledLeftHandPose = AnimPose();
_uncontrolledRightHandPose = AnimPose();
_uncontrolledHipsPose = AnimPose();
}
static glm::vec3 sphericalToCartesian(float phi, float theta) {
@ -1495,14 +1599,14 @@ void AnimInverseKinematics::debugDrawRelativePoses(const AnimContext& context) c
}
}
void AnimInverseKinematics::debugDrawIKChain(JointChainInfo* jointChainInfos, size_t numJointChainInfos, const AnimContext& context) const {
void AnimInverseKinematics::debugDrawIKChain(const JointChainInfo& jointChainInfo, const AnimContext& context) const {
AnimPoseVec poses = _relativePoses;
// copy debug joint rotations into the relative poses
for (size_t i = 0; i < numJointChainInfos; i++) {
const JointChainInfo& info = jointChainInfos[i];
poses[info.jointIndex].rot() = info.relRot;
poses[info.jointIndex].trans() = info.relTrans;
for (size_t i = 0; i < jointChainInfo.jointInfoVec.size(); i++) {
const JointInfo& info = jointChainInfo.jointInfoVec[i];
poses[info.jointIndex].rot() = info.rot;
poses[info.jointIndex].trans() = info.trans;
}
// convert relative poses to absolute
@ -1519,9 +1623,9 @@ void AnimInverseKinematics::debugDrawIKChain(JointChainInfo* jointChainInfos, si
// draw each pose
for (int i = 0; i < (int)poses.size(); i++) {
int parentIndex = _skeleton->getParentIndex(i);
JointChainInfo* jointInfo = nullptr;
JointChainInfo* parentJointInfo = nullptr;
lookupJointChainInfo(jointChainInfos, numJointChainInfos, i, parentIndex, &jointInfo, &parentJointInfo);
const JointInfo* jointInfo = nullptr;
const JointInfo* parentJointInfo = nullptr;
lookupJointInfo(jointChainInfo, i, parentIndex, &jointInfo, &parentJointInfo);
if (jointInfo && parentJointInfo) {
// transform local axes into world space.
@ -1608,7 +1712,7 @@ void AnimInverseKinematics::debugDrawConstraints(const AnimContext& context) con
const int NUM_SWING_STEPS = 10;
for (int i = 0; i < NUM_SWING_STEPS + 1; i++) {
glm::quat rot = glm::normalize(glm::lerp(minRot, maxRot, i * (1.0f / NUM_SWING_STEPS)));
glm::quat rot = safeLerp(minRot, maxRot, i * (1.0f / NUM_SWING_STEPS));
glm::vec3 axis = transformVectorFast(geomToWorldMatrix, parentAbsRot * rot * refRot * Vectors::UNIT_Y);
DebugDraw::getInstance().drawRay(pos, pos + TWIST_LENGTH * axis, CYAN);
}
@ -1626,7 +1730,7 @@ void AnimInverseKinematics::debugDrawConstraints(const AnimContext& context) con
const int NUM_SWING_STEPS = 10;
for (int i = 0; i < NUM_SWING_STEPS + 1; i++) {
glm::quat rot = glm::normalize(glm::lerp(minRot, maxRot, i * (1.0f / NUM_SWING_STEPS)));
glm::quat rot = safeLerp(minRot, maxRot, i * (1.0f / NUM_SWING_STEPS));
glm::vec3 axis = transformVectorFast(geomToWorldMatrix, parentAbsRot * rot * refRot * Vectors::UNIT_X);
DebugDraw::getInstance().drawRay(pos, pos + TWIST_LENGTH * axis, CYAN);
}
@ -1666,10 +1770,9 @@ void AnimInverseKinematics::blendToPoses(const AnimPoseVec& targetPoses, const A
// relax toward poses
int numJoints = (int)_relativePoses.size();
for (int i = 0; i < numJoints; ++i) {
float dotSign = copysignf(1.0f, glm::dot(_relativePoses[i].rot(), targetPoses[i].rot()));
if (_rotationAccumulators[i].isDirty()) {
// this joint is affected by IK --> blend toward the targetPoses rotation
_relativePoses[i].rot() = glm::normalize(glm::lerp(_relativePoses[i].rot(), dotSign * targetPoses[i].rot(), blendFactor));
_relativePoses[i].rot() = safeLerp(_relativePoses[i].rot(), targetPoses[i].rot(), blendFactor);
} else {
// this joint is NOT affected by IK --> slam to underPoses rotation
_relativePoses[i].rot() = underPoses[i].rot();

View file

@ -26,14 +26,21 @@ class RotationConstraint;
class AnimInverseKinematics : public AnimNode {
public:
struct JointChainInfo {
glm::quat relRot;
glm::vec3 relTrans;
float weight;
struct JointInfo {
glm::quat rot;
glm::vec3 trans;
int jointIndex;
bool constrained;
};
struct JointChainInfo {
std::vector<JointInfo> jointInfoVec;
IKTarget target;
float timer { 0.0f };
};
using JointChainInfoVec = std::vector<JointChainInfo>;
explicit AnimInverseKinematics(const QString& id);
virtual ~AnimInverseKinematics() override;
@ -66,23 +73,22 @@ public:
void setSolutionSource(SolutionSource solutionSource) { _solutionSource = solutionSource; }
void setSolutionSourceVar(const QString& solutionSourceVar) { _solutionSourceVar = solutionSourceVar; }
const AnimPose& getUncontrolledLeftHandPose() { return _uncontrolledLeftHandPose; }
const AnimPose& getUncontrolledRightHandPose() { return _uncontrolledRightHandPose; }
const AnimPose& getUncontrolledHipPose() { return _uncontrolledHipsPose; }
protected:
void computeTargets(const AnimVariantMap& animVars, std::vector<IKTarget>& targets, const AnimPoseVec& underPoses);
void solve(const AnimContext& context, const std::vector<IKTarget>& targets);
void solveTargetWithCCD(const AnimContext& context, const IKTarget& target, const AnimPoseVec& absolutePoses, bool debug);
void solveTargetWithSpline(const AnimContext& context, const IKTarget& target, const AnimPoseVec& absolutePoses, bool debug);
void solve(const AnimContext& context, const std::vector<IKTarget>& targets, float dt, JointChainInfoVec& jointChainInfoVec);
void solveTargetWithCCD(const AnimContext& context, const IKTarget& target, const AnimPoseVec& absolutePoses,
bool debug, JointChainInfo& jointChainInfoOut) const;
void solveTargetWithSpline(const AnimContext& context, const IKTarget& target, const AnimPoseVec& absolutePoses,
bool debug, JointChainInfo& jointChainInfoOut) const;
virtual void setSkeletonInternal(AnimSkeleton::ConstPointer skeleton) override;
void debugDrawIKChain(JointChainInfo* jointChainInfos, size_t numJointChainInfos, const AnimContext& context) const;
void debugDrawIKChain(const JointChainInfo& jointChainInfo, const AnimContext& context) const;
void debugDrawRelativePoses(const AnimContext& context) const;
void debugDrawConstraints(const AnimContext& context) const;
void debugDrawSpineSplines(const AnimContext& context, const std::vector<IKTarget>& targets) const;
void initRelativePosesFromSolutionSource(SolutionSource solutionSource, const AnimPoseVec& underPose);
void blendToPoses(const AnimPoseVec& targetPoses, const AnimPoseVec& underPose, float blendFactor);
void preconditionRelativePosesToAvoidLimbLock(const AnimContext& context, const std::vector<IKTarget>& targets);
AnimPose applyHipsOffset() const;
// used to pre-compute information about each joint influeced by a spline IK target.
struct SplineJointInfo {
@ -91,8 +97,8 @@ protected:
AnimPose offsetPose; // local offset from the spline to the joint.
};
void computeSplineJointInfosForIKTarget(const AnimContext& context, const IKTarget& target);
const std::vector<SplineJointInfo>* findOrCreateSplineJointInfo(const AnimContext& context, const IKTarget& target);
void computeAndCacheSplineJointInfosForIKTarget(const AnimContext& context, const IKTarget& target) const;
const std::vector<SplineJointInfo>* findOrCreateSplineJointInfo(const AnimContext& context, const IKTarget& target) const;
// for AnimDebugDraw rendering
virtual const AnimPoseVec& getPosesInternal() const override { return _relativePoses; }
@ -101,7 +107,7 @@ protected:
void clearConstraints();
void initConstraints();
void initLimitCenterPoses();
void computeHipsOffset(const std::vector<IKTarget>& targets, const AnimPoseVec& underPoses, float dt);
glm::vec3 computeHipsOffset(const std::vector<IKTarget>& targets, const AnimPoseVec& underPoses, float dt, glm::vec3 prevHipsOffset) const;
// no copies
AnimInverseKinematics(const AnimInverseKinematics&) = delete;
@ -136,7 +142,7 @@ protected:
AnimPoseVec _relativePoses; // current relative poses
AnimPoseVec _limitCenterPoses; // relative
std::map<int, std::vector<SplineJointInfo>> _splineJointInfoMap;
mutable std::map<int, std::vector<SplineJointInfo>> _splineJointInfoMap;
// experimental data for moving hips during IK
glm::vec3 _hipsOffset { Vectors::ZERO };
@ -148,18 +154,12 @@ protected:
int _leftHandIndex { -1 };
int _rightHandIndex { -1 };
// _maxTargetIndex is tracked to help optimize the recalculation of absolute poses
// during the the cyclic coordinate descent algorithm
int _maxTargetIndex { 0 };
float _maxErrorOnLastSolve { FLT_MAX };
bool _previousEnableDebugIKTargets { false };
SolutionSource _solutionSource { SolutionSource::RelaxToUnderPoses };
QString _solutionSourceVar;
AnimPose _uncontrolledLeftHandPose { AnimPose() };
AnimPose _uncontrolledRightHandPose { AnimPose() };
AnimPose _uncontrolledHipsPose { AnimPose() };
JointChainInfoVec _prevJointChainInfoVec;
};
#endif // hifi_AnimInverseKinematics_h

View file

@ -42,6 +42,20 @@ int AnimSkeleton::getNumJoints() const {
return _jointsSize;
}
int AnimSkeleton::getChainDepth(int jointIndex) const {
if (jointIndex >= 0) {
int chainDepth = 0;
int index = jointIndex;
do {
chainDepth++;
index = _joints[index].parentIndex;
} while (index != -1);
return chainDepth;
} else {
return 0;
}
}
const AnimPose& AnimSkeleton::getAbsoluteBindPose(int jointIndex) const {
return _absoluteBindPoses[jointIndex];
}

View file

@ -28,6 +28,7 @@ public:
int nameToJointIndex(const QString& jointName) const;
const QString& getJointName(int jointIndex) const;
int getNumJoints() const;
int getChainDepth(int jointIndex) const;
// absolute pose, not relative to parent
const AnimPose& getAbsoluteBindPose(int jointIndex) const;

View file

@ -28,7 +28,7 @@ void blend(size_t numPoses, const AnimPose* a, const AnimPose* b, float alpha, A
}
result[i].scale() = lerp(aPose.scale(), bPose.scale(), alpha);
result[i].rot() = glm::normalize(glm::lerp(aPose.rot(), q2, alpha));
result[i].rot() = safeLerp(aPose.rot(), bPose.rot(), alpha);
result[i].trans() = lerp(aPose.trans(), bPose.trans(), alpha);
}
}

View file

@ -21,4 +21,14 @@ glm::quat averageQuats(size_t numQuats, const glm::quat* quats);
float accumulateTime(float startFrame, float endFrame, float timeScale, float currentFrame, float dt, bool loopFlag,
const QString& id, AnimNode::Triggers& triggersOut);
inline glm::quat safeLerp(const glm::quat& a, const glm::quat& b, float alpha) {
// adjust signs if necessary
glm::quat bTemp = b;
float dot = glm::dot(a, bTemp);
if (dot < 0.0f) {
bTemp = -bTemp;
}
return glm::normalize(glm::lerp(a, bTemp, alpha));
}
#endif

View file

@ -56,8 +56,8 @@ private:
glm::vec3 _poleReferenceVector;
bool _poleVectorEnabled { false };
int _index { -1 };
Type _type { Type::RotationAndPosition };
float _weight;
Type _type { Type::Unknown };
float _weight { 0.0f };
float _flexCoefficients[MAX_FLEX_COEFFICIENTS];
size_t _numFlexCoefficients;
};

View file

@ -1115,36 +1115,13 @@ void Rig::updateHands(bool leftHandEnabled, bool rightHandEnabled, bool hipsEnab
const glm::vec3 bodyCapsuleEnd = bodyCapsuleCenter + glm::vec3(0, bodyCapsuleHalfHeight, 0);
const float HAND_RADIUS = 0.05f;
const float RELAX_DURATION = 0.6f;
const float CONTROL_DURATION = 0.4f;
const bool TO_CONTROLLED = true;
const bool FROM_CONTROLLED = false;
const bool LEFT_HAND = true;
const bool RIGHT_HAND = false;
const float ELBOW_POLE_VECTOR_BLEND_FACTOR = 0.95f;
if (leftHandEnabled) {
if (!_isLeftHandControlled) {
_leftHandControlTimeRemaining = CONTROL_DURATION;
_isLeftHandControlled = true;
}
glm::vec3 handPosition = leftHandPose.trans();
glm::quat handRotation = leftHandPose.rot();
if (_leftHandControlTimeRemaining > 0.0f) {
// Move hand from non-controlled position to controlled position.
_leftHandControlTimeRemaining = std::max(_leftHandControlTimeRemaining - dt, 0.0f);
AnimPose handPose(Vectors::ONE, handRotation, handPosition);
if (transitionHandPose(_leftHandControlTimeRemaining, CONTROL_DURATION, handPose,
LEFT_HAND, TO_CONTROLLED, handPose)) {
handPosition = handPose.trans();
handRotation = handPose.rot();
}
}
if (!hipsEnabled) {
// prevent the hand IK targets from intersecting the body capsule
glm::vec3 displacement;
@ -1157,9 +1134,6 @@ void Rig::updateHands(bool leftHandEnabled, bool rightHandEnabled, bool hipsEnab
_animVars.set("leftHandRotation", handRotation);
_animVars.set("leftHandType", (int)IKTarget::Type::RotationAndPosition);
_lastLeftHandControlledPose = AnimPose(Vectors::ONE, handRotation, handPosition);
_isLeftHandControlled = true;
// compute pole vector
int handJointIndex = _animSkeleton->nameToJointIndex("LeftHand");
int armJointIndex = _animSkeleton->nameToJointIndex("LeftArm");
@ -1187,47 +1161,17 @@ void Rig::updateHands(bool leftHandEnabled, bool rightHandEnabled, bool hipsEnab
_prevLeftHandPoleVectorValid = false;
_animVars.set("leftHandPoleVectorEnabled", false);
if (_isLeftHandControlled) {
_leftHandRelaxTimeRemaining = RELAX_DURATION;
_isLeftHandControlled = false;
}
_animVars.unset("leftHandPosition");
_animVars.unset("leftHandRotation");
_animVars.set("leftHandType", (int)IKTarget::Type::HipsRelativeRotationAndPosition);
if (_leftHandRelaxTimeRemaining > 0.0f) {
// Move hand from controlled position to non-controlled position.
_leftHandRelaxTimeRemaining = std::max(_leftHandRelaxTimeRemaining - dt, 0.0f);
AnimPose handPose;
if (transitionHandPose(_leftHandRelaxTimeRemaining, RELAX_DURATION, _lastLeftHandControlledPose,
LEFT_HAND, FROM_CONTROLLED, handPose)) {
_animVars.set("leftHandPosition", handPose.trans());
_animVars.set("leftHandRotation", handPose.rot());
_animVars.set("leftHandType", (int)IKTarget::Type::RotationAndPosition);
}
} else {
_animVars.unset("leftHandPosition");
_animVars.unset("leftHandRotation");
_animVars.set("leftHandType", (int)IKTarget::Type::HipsRelativeRotationAndPosition);
}
}
if (rightHandEnabled) {
if (!_isRightHandControlled) {
_rightHandControlTimeRemaining = CONTROL_DURATION;
_isRightHandControlled = true;
}
glm::vec3 handPosition = rightHandPose.trans();
glm::quat handRotation = rightHandPose.rot();
if (_rightHandControlTimeRemaining > 0.0f) {
// Move hand from non-controlled position to controlled position.
_rightHandControlTimeRemaining = std::max(_rightHandControlTimeRemaining - dt, 0.0f);
AnimPose handPose(Vectors::ONE, handRotation, handPosition);
if (transitionHandPose(_rightHandControlTimeRemaining, CONTROL_DURATION, handPose, RIGHT_HAND, TO_CONTROLLED, handPose)) {
handPosition = handPose.trans();
handRotation = handPose.rot();
}
}
if (!hipsEnabled) {
// prevent the hand IK targets from intersecting the body capsule
glm::vec3 displacement;
@ -1240,9 +1184,6 @@ void Rig::updateHands(bool leftHandEnabled, bool rightHandEnabled, bool hipsEnab
_animVars.set("rightHandRotation", handRotation);
_animVars.set("rightHandType", (int)IKTarget::Type::RotationAndPosition);
_lastRightHandControlledPose = AnimPose(Vectors::ONE, handRotation, handPosition);
_isRightHandControlled = true;
// compute pole vector
int handJointIndex = _animSkeleton->nameToJointIndex("RightHand");
int armJointIndex = _animSkeleton->nameToJointIndex("RightArm");
@ -1270,25 +1211,9 @@ void Rig::updateHands(bool leftHandEnabled, bool rightHandEnabled, bool hipsEnab
_prevRightHandPoleVectorValid = false;
_animVars.set("rightHandPoleVectorEnabled", false);
if (_isRightHandControlled) {
_rightHandRelaxTimeRemaining = RELAX_DURATION;
_isRightHandControlled = false;
}
if (_rightHandRelaxTimeRemaining > 0.0f) {
// Move hand from controlled position to non-controlled position.
_rightHandRelaxTimeRemaining = std::max(_rightHandRelaxTimeRemaining - dt, 0.0f);
AnimPose handPose;
if (transitionHandPose(_rightHandRelaxTimeRemaining, RELAX_DURATION, _lastRightHandControlledPose, RIGHT_HAND, FROM_CONTROLLED, handPose)) {
_animVars.set("rightHandPosition", handPose.trans());
_animVars.set("rightHandRotation", handPose.rot());
_animVars.set("rightHandType", (int)IKTarget::Type::RotationAndPosition);
}
} else {
_animVars.unset("rightHandPosition");
_animVars.unset("rightHandRotation");
_animVars.set("rightHandType", (int)IKTarget::Type::HipsRelativeRotationAndPosition);
}
_animVars.unset("rightHandPosition");
_animVars.unset("rightHandRotation");
_animVars.set("rightHandType", (int)IKTarget::Type::HipsRelativeRotationAndPosition);
}
}
@ -1737,39 +1662,38 @@ void Rig::computeAvatarBoundingCapsule(
ikNode.setTargetVars("RightFoot", "rightFootPosition", "rightFootRotation",
"rightFootType", "rightFootWeight", 1.0f, {},
QString(), QString(), QString());
AnimPose geometryToRig = _modelOffset * _geometryOffset;
AnimPose hips(glm::vec3(1), glm::quat(), glm::vec3());
glm::vec3 hipsPosition(0.0f);
int hipsIndex = indexOfJoint("Hips");
if (hipsIndex >= 0) {
hips = geometryToRig * _animSkeleton->getAbsoluteBindPose(hipsIndex);
hipsPosition = transformPoint(_geometryToRigTransform, _animSkeleton->getAbsoluteDefaultPose(hipsIndex).trans());
}
AnimVariantMap animVars;
animVars.setRigToGeometryTransform(_rigToGeometryTransform);
glm::quat handRotation = glm::angleAxis(PI, Vectors::UNIT_X);
animVars.set("leftHandPosition", hips.trans());
animVars.set("leftHandPosition", hipsPosition);
animVars.set("leftHandRotation", handRotation);
animVars.set("leftHandType", (int)IKTarget::Type::RotationAndPosition);
animVars.set("rightHandPosition", hips.trans());
animVars.set("rightHandPosition", hipsPosition);
animVars.set("rightHandRotation", handRotation);
animVars.set("rightHandType", (int)IKTarget::Type::RotationAndPosition);
int rightFootIndex = indexOfJoint("RightFoot");
int leftFootIndex = indexOfJoint("LeftFoot");
if (rightFootIndex != -1 && leftFootIndex != -1) {
glm::vec3 foot = Vectors::ZERO;
glm::vec3 geomFootPosition = glm::vec3(0.0f, _animSkeleton->getAbsoluteDefaultPose(rightFootIndex).trans().y, 0.0f);
glm::vec3 footPosition = transformPoint(_geometryToRigTransform, geomFootPosition);
glm::quat footRotation = glm::angleAxis(0.5f * PI, Vectors::UNIT_X);
animVars.set("leftFootPosition", foot);
animVars.set("leftFootPosition", footPosition);
animVars.set("leftFootRotation", footRotation);
animVars.set("leftFootType", (int)IKTarget::Type::RotationAndPosition);
animVars.set("rightFootPosition", foot);
animVars.set("rightFootPosition", footPosition);
animVars.set("rightFootRotation", footRotation);
animVars.set("rightFootType", (int)IKTarget::Type::RotationAndPosition);
}
// call overlay twice: once to verify AnimPoseVec joints and again to do the IK
AnimNode::Triggers triggersOut;
AnimContext context(false, false, false, glm::mat4(), glm::mat4());
AnimContext context(false, false, false, _geometryToRigTransform, _rigToGeometryTransform);
float dt = 1.0f; // the value of this does not matter
ikNode.overlay(animVars, context, dt, triggersOut, _animSkeleton->getRelativeBindPoses());
AnimPoseVec finalPoses = ikNode.overlay(animVars, context, dt, triggersOut, _animSkeleton->getRelativeBindPoses());
@ -1802,34 +1726,13 @@ void Rig::computeAvatarBoundingCapsule(
// compute bounding shape parameters
// NOTE: we assume that the longest side of totalExtents is the yAxis...
glm::vec3 diagonal = (geometryToRig * totalExtents.maximum) - (geometryToRig * totalExtents.minimum);
glm::vec3 diagonal = (transformPoint(_geometryToRigTransform, totalExtents.maximum) -
transformPoint(_geometryToRigTransform, totalExtents.minimum));
// ... and assume the radiusOut is half the RMS of the X and Z sides:
radiusOut = 0.5f * sqrtf(0.5f * (diagonal.x * diagonal.x + diagonal.z * diagonal.z));
heightOut = diagonal.y - 2.0f * radiusOut;
glm::vec3 rootPosition = finalPoses[geometry.rootJointIndex].trans();
glm::vec3 rigCenter = (geometryToRig * (0.5f * (totalExtents.maximum + totalExtents.minimum)));
localOffsetOut = rigCenter - (geometryToRig * rootPosition);
}
bool Rig::transitionHandPose(float deltaTime, float totalDuration, AnimPose& controlledHandPose, bool isLeftHand,
bool isToControlled, AnimPose& returnHandPose) {
auto ikNode = getAnimInverseKinematicsNode();
if (ikNode) {
float alpha = 1.0f - deltaTime / totalDuration;
const AnimPose geometryToRigTransform(_geometryToRigTransform);
AnimPose uncontrolledHandPose;
if (isLeftHand) {
uncontrolledHandPose = geometryToRigTransform * ikNode->getUncontrolledLeftHandPose();
} else {
uncontrolledHandPose = geometryToRigTransform * ikNode->getUncontrolledRightHandPose();
}
if (isToControlled) {
::blend(1, &uncontrolledHandPose, &controlledHandPose, alpha, &returnHandPose);
} else {
::blend(1, &controlledHandPose, &uncontrolledHandPose, alpha, &returnHandPose);
}
return true;
}
return false;
glm::vec3 rigCenter = transformPoint(_geometryToRigTransform, (0.5f * (totalExtents.maximum + totalExtents.minimum)));
localOffsetOut = rigCenter - transformPoint(_geometryToRigTransform, rootPosition);
}

View file

@ -340,18 +340,6 @@ protected:
int _nextStateHandlerId { 0 };
QMutex _stateMutex;
bool transitionHandPose(float deltaTime, float totalDuration, AnimPose& controlledHandPose, bool isLeftHand,
bool isToControlled, AnimPose& returnHandPose);
bool _isLeftHandControlled { false };
bool _isRightHandControlled { false };
float _leftHandControlTimeRemaining { 0.0f };
float _rightHandControlTimeRemaining { 0.0f };
float _leftHandRelaxTimeRemaining { 0.0f };
float _rightHandRelaxTimeRemaining { 0.0f };
AnimPose _lastLeftHandControlledPose;
AnimPose _lastRightHandControlledPose;
glm::vec3 _prevRightFootPoleVector { Vectors::UNIT_Z };
bool _prevRightFootPoleVectorValid { false };

View file

@ -92,6 +92,7 @@ void AudioClient::checkDevices() {
auto inputDevices = getAvailableDevices(QAudio::AudioInput);
auto outputDevices = getAvailableDevices(QAudio::AudioOutput);
Lock lock(_deviceMutex);
if (inputDevices != _inputDevices) {
_inputDevices.swap(inputDevices);
emit devicesChanged(QAudio::AudioInput, _inputDevices);
@ -210,9 +211,9 @@ AudioClient::AudioClient() :
connect(&_receivedAudioStream, &MixedProcessedAudioStream::processSamples,
this, &AudioClient::processReceivedSamples, Qt::DirectConnection);
connect(this, &AudioClient::changeDevice, this, [=](const QAudioDeviceInfo& outputDeviceInfo) {
connect(this, &AudioClient::changeDevice, this, [=](const QAudioDeviceInfo& outputDeviceInfo) {
qCDebug(audioclient) << "got AudioClient::changeDevice signal, about to call switchOutputToAudioDevice() outputDeviceInfo: [" << outputDeviceInfo.deviceName() << "]";
switchOutputToAudioDevice(outputDeviceInfo);
switchOutputToAudioDevice(outputDeviceInfo);
});
connect(&_receivedAudioStream, &InboundAudioStream::mismatchedAudioCodec, this, &AudioClient::handleMismatchAudioFormat);
@ -261,10 +262,10 @@ void AudioClient::cleanupBeforeQuit() {
// so this must be explicitly, synchronously stopped
static ConditionalGuard guard;
if (QThread::currentThread() != thread()) {
// This will likely be called from the main thread, but we don't want to do blocking queued calls
// from the main thread, so we use a normal auto-connection invoke, and then use a conditional to wait
// This will likely be called from the main thread, but we don't want to do blocking queued calls
// from the main thread, so we use a normal auto-connection invoke, and then use a conditional to wait
// for completion
// The effect is the same, yes, but we actually want to avoid the use of Qt::BlockingQueuedConnection
// The effect is the same, yes, but we actually want to avoid the use of Qt::BlockingQueuedConnection
// in the code
QMetaObject::invokeMethod(this, "cleanupBeforeQuit");
guard.wait();
@ -630,7 +631,7 @@ void AudioClient::handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessag
message->readPrimitive(&bitset);
bool hasReverb = oneAtBit(bitset, HAS_REVERB_BIT);
if (hasReverb) {
float reverbTime, wetLevel;
message->readPrimitive(&reverbTime);
@ -728,7 +729,7 @@ void AudioClient::Gate::flush() {
void AudioClient::handleNoisyMutePacket(QSharedPointer<ReceivedMessage> message) {
if (!_muted) {
toggleMute();
// have the audio scripting interface emit a signal to say we were muted by the mixer
emit mutedByMixer();
}
@ -737,7 +738,7 @@ void AudioClient::handleNoisyMutePacket(QSharedPointer<ReceivedMessage> message)
void AudioClient::handleMuteEnvironmentPacket(QSharedPointer<ReceivedMessage> message) {
glm::vec3 position;
float radius;
message->readPrimitive(&position);
message->readPrimitive(&radius);
@ -770,7 +771,7 @@ void AudioClient::handleSelectedAudioFormat(QSharedPointer<ReceivedMessage> mess
}
void AudioClient::selectAudioFormat(const QString& selectedCodecName) {
_selectedCodecName = selectedCodecName;
qCDebug(audioclient) << "Selected Codec:" << _selectedCodecName;
@ -787,7 +788,7 @@ void AudioClient::selectAudioFormat(const QString& selectedCodecName) {
for (auto& plugin : codecPlugins) {
if (_selectedCodecName == plugin->getName()) {
_codec = plugin;
_receivedAudioStream.setupCodec(plugin, _selectedCodecName, AudioConstants::STEREO);
_receivedAudioStream.setupCodec(plugin, _selectedCodecName, AudioConstants::STEREO);
_encoder = plugin->createEncoder(AudioConstants::SAMPLE_RATE, AudioConstants::MONO);
qCDebug(audioclient) << "Selected Codec Plugin:" << _codec.get();
break;
@ -795,7 +796,7 @@ void AudioClient::selectAudioFormat(const QString& selectedCodecName) {
}
}
bool AudioClient::switchAudioDevice(QAudio::Mode mode, const QAudioDeviceInfo& deviceInfo) {
auto device = deviceInfo;
@ -1203,11 +1204,11 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
// lock the injectors
Lock lock(_injectorsMutex);
QVector<AudioInjector*> injectorsToRemove;
QVector<AudioInjectorPointer> injectorsToRemove;
memset(mixBuffer, 0, AudioConstants::NETWORK_FRAME_SAMPLES_STEREO * sizeof(float));
for (AudioInjector* injector : _activeLocalAudioInjectors) {
for (const AudioInjectorPointer& injector : _activeLocalAudioInjectors) {
// the lock guarantees that injectorBuffer, if found, is invariant
AudioInjectorLocalBuffer* injectorBuffer = injector->getLocalBuffer();
if (injectorBuffer) {
@ -1220,7 +1221,7 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
// get one frame from the injector
memset(_localScratchBuffer, 0, bytesToRead);
if (0 < injectorBuffer->readData((char*)_localScratchBuffer, bytesToRead)) {
if (injector->isAmbisonic()) {
// no distance attenuation
@ -1249,36 +1250,36 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; i++) {
mixBuffer[i] += convertToFloat(_localScratchBuffer[i]) * gain;
}
} else {
// calculate distance, gain and azimuth for hrtf
glm::vec3 relativePosition = injector->getPosition() - _positionGetter();
float distance = glm::max(glm::length(relativePosition), EPSILON);
float gain = gainForSource(distance, injector->getVolume());
float gain = gainForSource(distance, injector->getVolume());
float azimuth = azimuthForSource(relativePosition);
// mono gets spatialized into mixBuffer
injector->getLocalHRTF().render(_localScratchBuffer, mixBuffer, HRTF_DATASET_INDEX,
injector->getLocalHRTF().render(_localScratchBuffer, mixBuffer, HRTF_DATASET_INDEX,
azimuth, distance, gain, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
}
} else {
qCDebug(audioclient) << "injector has no more data, marking finished for removal";
injector->finishLocalInjection();
injectorsToRemove.append(injector);
}
} else {
qCDebug(audioclient) << "injector has no local buffer, marking as finished for removal";
injector->finishLocalInjection();
injectorsToRemove.append(injector);
}
}
for (AudioInjector* injector : injectorsToRemove) {
for (const AudioInjectorPointer& injector : injectorsToRemove) {
qCDebug(audioclient) << "removing injector";
_activeLocalAudioInjectors.removeOne(injector);
}
@ -1369,7 +1370,7 @@ void AudioClient::setIsStereoInput(bool isStereoInput) {
}
}
bool AudioClient::outputLocalInjector(AudioInjector* injector) {
bool AudioClient::outputLocalInjector(const AudioInjectorPointer& injector) {
AudioInjectorLocalBuffer* injectorBuffer = injector->getLocalBuffer();
if (injectorBuffer) {
// local injectors are on the AudioInjectorsThread, so we must guard access
@ -1711,9 +1712,9 @@ int AudioClient::calculateNumberOfFrameSamples(int numBytes) const {
float AudioClient::azimuthForSource(const glm::vec3& relativePosition) {
glm::quat inverseOrientation = glm::inverse(_orientationGetter());
glm::vec3 rotatedSourcePosition = inverseOrientation * relativePosition;
// project the rotated source position vector onto the XZ plane
rotatedSourcePosition.y = 0.0f;
@ -1721,15 +1722,15 @@ float AudioClient::azimuthForSource(const glm::vec3& relativePosition) {
float rotatedSourcePositionLength2 = glm::length2(rotatedSourcePosition);
if (rotatedSourcePositionLength2 > SOURCE_DISTANCE_THRESHOLD) {
// produce an oriented angle about the y-axis
glm::vec3 direction = rotatedSourcePosition * (1.0f / fastSqrtf(rotatedSourcePositionLength2));
float angle = fastAcosf(glm::clamp(-direction.z, -1.0f, 1.0f)); // UNIT_NEG_Z is "forward"
return (direction.x < 0.0f) ? -angle : angle;
} else {
} else {
// no azimuth if they are in same spot
return 0.0f;
return 0.0f;
}
}
@ -1869,9 +1870,9 @@ void AudioClient::startThread() {
moveToNewNamedThread(this, "Audio Thread", [this] { start(); });
}
void AudioClient::setInputVolume(float volume) {
if (_audioInput && volume != (float)_audioInput->volume()) {
_audioInput->setVolume(volume);
void AudioClient::setInputVolume(float volume) {
if (_audioInput && volume != (float)_audioInput->volume()) {
_audioInput->setVolume(volume);
emit inputVolumeChanged(_audioInput->volume());
}
}

View file

@ -143,7 +143,7 @@ public:
Q_INVOKABLE void setAvatarBoundingBoxParameters(glm::vec3 corner, glm::vec3 scale);
bool outputLocalInjector(AudioInjector* injector) override;
bool outputLocalInjector(const AudioInjectorPointer& injector) override;
QAudioDeviceInfo getActiveAudioDevice(QAudio::Mode mode) const;
QList<QAudioDeviceInfo> getAudioDevices(QAudio::Mode mode) const;
@ -380,7 +380,7 @@ private:
bool _hasReceivedFirstPacket { false };
QVector<AudioInjector*> _activeLocalAudioInjectors;
QVector<AudioInjectorPointer> _activeLocalAudioInjectors;
bool _isPlayingBackRecording { false };

View file

@ -18,6 +18,7 @@
#include <udt/PacketHeaders.h>
#include "AudioInjectorOptions.h"
#include "AudioInjector.h"
class AudioInjector;
class AudioInjectorLocalBuffer;
@ -35,7 +36,7 @@ public:
// threadsafe
// moves injector->getLocalBuffer() to another thread (so removes its parent)
// take care to delete it when ~AudioInjector, as parenting Qt semantics will not work
virtual bool outputLocalInjector(AudioInjector* injector) = 0;
virtual bool outputLocalInjector(const AudioInjectorPointer& injector) = 0;
public slots:
virtual bool shouldLoopbackInjectors() { return false; }

View file

@ -27,8 +27,6 @@
#include "AudioSRC.h"
#include "AudioHelpers.h"
int audioInjectorPtrMetaTypeId = qRegisterMetaType<AudioInjector*>();
AbstractAudioInterface* AudioInjector::_localAudioInterface{ nullptr };
AudioInjectorState operator& (AudioInjectorState lhs, AudioInjectorState rhs) {
@ -92,11 +90,6 @@ void AudioInjector::finish() {
emit finished();
deleteLocalBuffer();
if (stateHas(AudioInjectorState::PendingDelete)) {
// we've been asked to delete after finishing, trigger a deleteLater here
deleteLater();
}
}
void AudioInjector::restart() {
@ -132,7 +125,7 @@ void AudioInjector::restart() {
}
}
bool AudioInjector::inject(bool(AudioInjectorManager::*injection)(AudioInjector*)) {
bool AudioInjector::inject(bool(AudioInjectorManager::*injection)(const AudioInjectorPointer&)) {
_state = AudioInjectorState::NotFinished;
int byteOffset = 0;
@ -150,7 +143,7 @@ bool AudioInjector::inject(bool(AudioInjectorManager::*injection)(AudioInjector*
bool success = true;
if (!_options.localOnly) {
auto injectorManager = DependencyManager::get<AudioInjectorManager>();
if (!(*injectorManager.*injection)(this)) {
if (!(*injectorManager.*injection)(sharedFromThis())) {
success = false;
finishNetworkInjection();
}
@ -173,7 +166,7 @@ bool AudioInjector::injectLocally() {
// call this function on the AudioClient's thread
// this will move the local buffer's thread to the LocalInjectorThread
success = _localAudioInterface->outputLocalInjector(this);
success = _localAudioInterface->outputLocalInjector(sharedFromThis());
if (!success) {
qCDebug(audio) << "AudioInjector::injectLocally could not output locally via _localAudioInterface";
@ -418,20 +411,16 @@ void AudioInjector::triggerDeleteAfterFinish() {
}
if (stateHas(AudioInjectorState::Finished)) {
stopAndDeleteLater();
stop();
} else {
_state |= AudioInjectorState::PendingDelete;
}
}
void AudioInjector::stopAndDeleteLater() {
stop();
QMetaObject::invokeMethod(this, "deleteLater", Qt::QueuedConnection);
}
AudioInjector* AudioInjector::playSound(SharedSoundPointer sound, const float volume, const float stretchFactor, const glm::vec3 position) {
AudioInjectorPointer AudioInjector::playSound(SharedSoundPointer sound, const float volume,
const float stretchFactor, const glm::vec3 position) {
if (!sound || !sound->isReady()) {
return nullptr;
return AudioInjectorPointer();
}
AudioInjectorOptions options;
@ -462,8 +451,8 @@ AudioInjector* AudioInjector::playSound(SharedSoundPointer sound, const float vo
return playSoundAndDelete(resampled, options);
}
AudioInjector* AudioInjector::playSoundAndDelete(const QByteArray& buffer, const AudioInjectorOptions options) {
AudioInjector* sound = playSound(buffer, options);
AudioInjectorPointer AudioInjector::playSoundAndDelete(const QByteArray& buffer, const AudioInjectorOptions options) {
AudioInjectorPointer sound = playSound(buffer, options);
if (sound) {
sound->_state |= AudioInjectorState::PendingDelete;
@ -473,8 +462,9 @@ AudioInjector* AudioInjector::playSoundAndDelete(const QByteArray& buffer, const
}
AudioInjector* AudioInjector::playSound(const QByteArray& buffer, const AudioInjectorOptions options) {
AudioInjector* injector = new AudioInjector(buffer, options);
AudioInjectorPointer AudioInjector::playSound(const QByteArray& buffer, const AudioInjectorOptions options) {
AudioInjectorPointer injector = AudioInjectorPointer::create(buffer, options);
if (!injector->inject(&AudioInjectorManager::threadInjector)) {
qWarning() << "AudioInjector::playSound failed to thread injector";
}

View file

@ -32,6 +32,8 @@
class AbstractAudioInterface;
class AudioInjectorManager;
class AudioInjector;
using AudioInjectorPointer = QSharedPointer<AudioInjector>;
enum class AudioInjectorState : uint8_t {
@ -46,19 +48,19 @@ AudioInjectorState operator& (AudioInjectorState lhs, AudioInjectorState rhs);
AudioInjectorState& operator|= (AudioInjectorState& lhs, AudioInjectorState rhs);
// In order to make scripting cleaner for the AudioInjector, the script now holds on to the AudioInjector object
// until it dies.
class AudioInjector : public QObject {
// until it dies.
class AudioInjector : public QObject, public QEnableSharedFromThis<AudioInjector> {
Q_OBJECT
public:
AudioInjector(const Sound& sound, const AudioInjectorOptions& injectorOptions);
AudioInjector(const QByteArray& audioData, const AudioInjectorOptions& injectorOptions);
~AudioInjector();
bool isFinished() const { return (stateHas(AudioInjectorState::Finished)); }
int getCurrentSendOffset() const { return _currentSendOffset; }
void setCurrentSendOffset(int currentSendOffset) { _currentSendOffset = currentSendOffset; }
AudioInjectorLocalBuffer* getLocalBuffer() const { return _localBuffer; }
AudioHRTF& getLocalHRTF() { return _localHRTF; }
AudioFOA& getLocalFOA() { return _localFOA; }
@ -72,36 +74,36 @@ public:
bool stateHas(AudioInjectorState state) const ;
static void setLocalAudioInterface(AbstractAudioInterface* audioInterface) { _localAudioInterface = audioInterface; }
static AudioInjector* playSoundAndDelete(const QByteArray& buffer, const AudioInjectorOptions options);
static AudioInjector* playSound(const QByteArray& buffer, const AudioInjectorOptions options);
static AudioInjector* playSound(SharedSoundPointer sound, const float volume, const float stretchFactor, const glm::vec3 position);
static AudioInjectorPointer playSoundAndDelete(const QByteArray& buffer, const AudioInjectorOptions options);
static AudioInjectorPointer playSound(const QByteArray& buffer, const AudioInjectorOptions options);
static AudioInjectorPointer playSound(SharedSoundPointer sound, const float volume,
const float stretchFactor, const glm::vec3 position);
public slots:
void restart();
void stop();
void triggerDeleteAfterFinish();
void stopAndDeleteLater();
const AudioInjectorOptions& getOptions() const { return _options; }
void setOptions(const AudioInjectorOptions& options);
float getLoudness() const { return _loudness; }
bool isPlaying() const { return !stateHas(AudioInjectorState::Finished); }
void finish();
void finishLocalInjection();
void finishNetworkInjection();
signals:
void finished();
void restarting();
private:
int64_t injectNextFrame();
bool inject(bool(AudioInjectorManager::*injection)(AudioInjector*));
bool inject(bool(AudioInjectorManager::*injection)(const AudioInjectorPointer&));
bool injectLocally();
void deleteLocalBuffer();
static AbstractAudioInterface* _localAudioInterface;
QByteArray _audioData;
@ -112,17 +114,15 @@ private:
int _currentSendOffset { 0 };
std::unique_ptr<NLPacket> _currentPacket { nullptr };
AudioInjectorLocalBuffer* _localBuffer { nullptr };
int64_t _nextFrame { 0 };
std::unique_ptr<QElapsedTimer> _frameTimer { nullptr };
quint16 _outgoingSequenceNumber { 0 };
// when the injector is local, we need this
AudioHRTF _localHRTF;
AudioFOA _localFOA;
friend class AudioInjectorManager;
};
Q_DECLARE_METATYPE(AudioInjector*)
#endif // hifi_AudioInjector_h

View file

@ -21,26 +21,26 @@
AudioInjectorManager::~AudioInjectorManager() {
_shouldStop = true;
Lock lock(_injectorsMutex);
// make sure any still living injectors are stopped and deleted
while (!_injectors.empty()) {
// grab the injector at the front
auto& timePointerPair = _injectors.top();
// ask it to stop and be deleted
timePointerPair.second->stopAndDeleteLater();
timePointerPair.second->stop();
_injectors.pop();
}
// get rid of the lock now that we've stopped all living injectors
lock.unlock();
// in case the thread is waiting for injectors wake it up now
_injectorReady.notify_one();
// quit and wait on the manager thread, if we ever created it
if (_thread) {
_thread->quit();
@ -51,10 +51,10 @@ AudioInjectorManager::~AudioInjectorManager() {
void AudioInjectorManager::createThread() {
_thread = new QThread;
_thread->setObjectName("Audio Injector Thread");
// when the thread is started, have it call our run to handle injection of audio
connect(_thread, &QThread::started, this, &AudioInjectorManager::run, Qt::DirectConnection);
// start the thread
_thread->start();
}
@ -63,20 +63,20 @@ void AudioInjectorManager::run() {
while (!_shouldStop) {
// wait until the next injector is ready, or until we get a new injector given to us
Lock lock(_injectorsMutex);
if (_injectors.size() > 0) {
// when does the next injector need to send a frame?
// do we get to wait or should we just go for it now?
auto timeInjectorPair = _injectors.top();
auto nextTimestamp = timeInjectorPair.first;
int64_t difference = int64_t(nextTimestamp - usecTimestampNow());
if (difference > 0) {
_injectorReady.wait_for(lock, std::chrono::microseconds(difference));
}
if (_injectors.size() > 0) {
// loop through the injectors in the map and send whatever frames need to go out
auto front = _injectors.top();
@ -90,7 +90,7 @@ void AudioInjectorManager::run() {
// either way we're popping this injector off - get a copy first
auto injector = front.second;
_injectors.pop();
if (!injector.isNull()) {
// this is an injector that's ready to go, have it send a frame now
auto nextCallDelta = injector->injectNextFrame();
@ -100,7 +100,7 @@ void AudioInjectorManager::run() {
heldInjectors.emplace(heldInjectors.end(), usecTimestampNow() + nextCallDelta, injector);
}
}
if (_injectors.size() > 0) {
front = _injectors.top();
} else {
@ -120,10 +120,10 @@ void AudioInjectorManager::run() {
// we have no current injectors, wait until we get at least one before we do anything
_injectorReady.wait(lock);
}
// unlock the lock in case something in process events needs to modify the queue
lock.unlock();
QCoreApplication::processEvents();
}
}
@ -139,36 +139,36 @@ bool AudioInjectorManager::wouldExceedLimits() { // Should be called inside of a
return false;
}
bool AudioInjectorManager::threadInjector(AudioInjector* injector) {
bool AudioInjectorManager::threadInjector(const AudioInjectorPointer& injector) {
if (_shouldStop) {
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
return false;
}
// guard the injectors vector with a mutex
Lock lock(_injectorsMutex);
if (wouldExceedLimits()) {
return false;
} else {
if (!_thread) {
createThread();
}
// move the injector to the QThread
injector->moveToThread(_thread);
// add the injector to the queue with a send timestamp of now
_injectors.emplace(usecTimestampNow(), InjectorQPointer { injector });
_injectors.emplace(usecTimestampNow(), injector);
// notify our wait condition so we can inject two frames for this injector immediately
_injectorReady.notify_one();
return true;
}
}
bool AudioInjectorManager::restartFinishedInjector(AudioInjector* injector) {
bool AudioInjectorManager::restartFinishedInjector(const AudioInjectorPointer& injector) {
if (_shouldStop) {
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
return false;
@ -181,8 +181,8 @@ bool AudioInjectorManager::restartFinishedInjector(AudioInjector* injector) {
return false;
} else {
// add the injector to the queue with a send timestamp of now
_injectors.emplace(usecTimestampNow(), InjectorQPointer { injector });
_injectors.emplace(usecTimestampNow(), injector);
// notify our wait condition so we can inject two frames for this injector immediately
_injectorReady.notify_one();
}

View file

@ -23,7 +23,7 @@
#include <DependencyManager.h>
class AudioInjector;
#include "AudioInjector.h"
class AudioInjectorManager : public QObject, public Dependency {
Q_OBJECT
@ -33,39 +33,38 @@ public:
private slots:
void run();
private:
using InjectorQPointer = QPointer<AudioInjector>;
using TimeInjectorPointerPair = std::pair<uint64_t, InjectorQPointer>;
using TimeInjectorPointerPair = std::pair<uint64_t, AudioInjectorPointer>;
struct greaterTime {
bool operator() (const TimeInjectorPointerPair& x, const TimeInjectorPointerPair& y) const {
return x.first > y.first;
}
};
using InjectorQueue = std::priority_queue<TimeInjectorPointerPair,
std::deque<TimeInjectorPointerPair>,
greaterTime>;
using Mutex = std::mutex;
using Lock = std::unique_lock<Mutex>;
bool threadInjector(AudioInjector* injector);
bool restartFinishedInjector(AudioInjector* injector);
bool threadInjector(const AudioInjectorPointer& injector);
bool restartFinishedInjector(const AudioInjectorPointer& injector);
void notifyInjectorReadyCondition() { _injectorReady.notify_one(); }
bool wouldExceedLimits();
AudioInjectorManager() {};
AudioInjectorManager(const AudioInjectorManager&) = delete;
AudioInjectorManager& operator=(const AudioInjectorManager&) = delete;
void createThread();
QThread* _thread { nullptr };
bool _shouldStop { false };
InjectorQueue _injectors;
Mutex _injectorsMutex;
std::condition_variable _injectorReady;
friend class AudioInjector;
};

View file

@ -1050,7 +1050,34 @@ int Avatar::getJointIndex(const QString& name) const {
QStringList Avatar::getJointNames() const {
QStringList result;
withValidJointIndicesCache([&]() {
result = _modelJointIndicesCache.keys();
// find out how large the vector needs to be
int maxJointIndex = -1;
QHashIterator<QString, int> k(_modelJointIndicesCache);
while (k.hasNext()) {
k.next();
int index = k.value();
if (index > maxJointIndex) {
maxJointIndex = index;
}
}
// iterate through the hash and put joint names
// into the vector at their indices
QVector<QString> resultVector(maxJointIndex+1);
QHashIterator<QString, int> i(_modelJointIndicesCache);
while (i.hasNext()) {
i.next();
int index = i.value();
resultVector[index] = i.key();
}
// convert to QList and drop out blanks
result = resultVector.toList();
QMutableListIterator<QString> j(result);
while (j.hasNext()) {
QString jointName = j.next();
if (jointName.isEmpty()) {
j.remove();
}
}
});
return result;
}
@ -1453,8 +1480,7 @@ void Avatar::addToScene(AvatarSharedPointer myHandle, const render::ScenePointer
if (scene) {
auto nodelist = DependencyManager::get<NodeList>();
if (showAvatars
&& !nodelist->isIgnoringNode(getSessionUUID())
&& !nodelist->isRadiusIgnoringNode(getSessionUUID())) {
&& !nodelist->isIgnoringNode(getSessionUUID())) {
render::Transaction transaction;
addToScene(myHandle, scene, transaction);
scene->enqueueTransaction(transaction);

View file

@ -2366,6 +2366,12 @@ AvatarEntityMap AvatarData::getAvatarEntityData() const {
return result;
}
void AvatarData::insertDetachedEntityID(const QUuid entityID) {
_avatarEntitiesLock.withWriteLock([&] {
_avatarEntityDetached.insert(entityID);
});
}
void AvatarData::setAvatarEntityData(const AvatarEntityMap& avatarEntityData) {
if (avatarEntityData.size() > MAX_NUM_AVATAR_ENTITIES) {
// the data is suspect

View file

@ -615,6 +615,7 @@ public:
Q_INVOKABLE AvatarEntityMap getAvatarEntityData() const;
Q_INVOKABLE void setAvatarEntityData(const AvatarEntityMap& avatarEntityData);
void setAvatarEntityDataChanged(bool value) { _avatarEntityDataChanged = value; }
void insertDetachedEntityID(const QUuid entityID);
AvatarEntityIDs getAndClearRecentlyDetachedIDs();
// thread safe

View file

@ -170,13 +170,6 @@ void AvatarHashMap::processKillAvatar(QSharedPointer<ReceivedMessage> message, S
removeAvatar(sessionUUID, reason);
}
void AvatarHashMap::processExitingSpaceBubble(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
// read the node id
QUuid sessionUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
auto nodeList = DependencyManager::get<NodeList>();
nodeList->radiusIgnoreNodeBySessionID(sessionUUID, false);
}
void AvatarHashMap::removeAvatar(const QUuid& sessionUUID, KillAvatarReason removalReason) {
QWriteLocker locker(&_hashLock);

View file

@ -60,7 +60,6 @@ protected slots:
void processAvatarDataPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
void processAvatarIdentityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
void processKillAvatar(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
void processExitingSpaceBubble(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
protected:
AvatarHashMap();

View file

@ -19,6 +19,7 @@ public:
bool hasFocus() const override;
void submitFrame(const gpu::FramePointer& newFrame) override;
QImage getScreenshot(float aspectRatio = 0.0f) const override;
void copyTextureToQuickFramebuffer(NetworkTexturePointer source, QOpenGLFramebufferObject* target, GLsync* fenceSync) override {};
private:
static const QString NAME;
};

View file

@ -16,6 +16,7 @@
#include <QtOpenGL/QGLWidget>
#include <QtGui/QImage>
#include <QOpenGLFramebufferObject>
#if defined(Q_OS_MAC)
#include <OpenGL/CGLCurrent.h>
#endif
@ -41,7 +42,7 @@
#include <ui-plugins/PluginContainer.h>
#include <ui/Menu.h>
#include <CursorManager.h>
#include <TextureCache.h>
#include "CompositorHelper.h"
#include "Logging.h"
@ -55,7 +56,7 @@ out vec4 outFragColor;
float sRGBFloatToLinear(float value) {
const float SRGB_ELBOW = 0.04045;
return (value <= SRGB_ELBOW) ? value / 12.92 : pow((value + 0.055) / 1.055, 2.4);
}
@ -121,10 +122,10 @@ public:
PROFILE_SET_THREAD_NAME("Present Thread");
// FIXME determine the best priority balance between this and the main thread...
// It may be dependent on the display plugin being used, since VR plugins should
// It may be dependent on the display plugin being used, since VR plugins should
// have higher priority on rendering (although we could say that the Oculus plugin
// doesn't need that since it has async timewarp).
// A higher priority here
// A higher priority here
setPriority(QThread::HighPriority);
OpenGLDisplayPlugin* currentPlugin{ nullptr };
Q_ASSERT(_context);
@ -233,7 +234,7 @@ public:
// Move the context back to the presentation thread
_context->moveToThread(this);
// restore control of the context to the presentation thread and signal
// restore control of the context to the presentation thread and signal
// the end of the operation
_finishedMainThreadOperation = true;
lock.unlock();
@ -291,7 +292,7 @@ bool OpenGLDisplayPlugin::activate() {
if (!RENDER_THREAD) {
RENDER_THREAD = _presentThread;
}
// Child classes may override this in order to do things like initialize
// libraries, etc
if (!internalActivate()) {
@ -411,7 +412,7 @@ void OpenGLDisplayPlugin::customizeContext() {
gpu::Shader::makeProgram(*program);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(gpu::State::DepthTest(false));
state->setBlendFunction(true,
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
_overlayPipeline = gpu::Pipeline::create(program, state);
@ -496,16 +497,48 @@ void OpenGLDisplayPlugin::submitFrame(const gpu::FramePointer& newFrame) {
_newFrameQueue.push(newFrame);
});
}
void OpenGLDisplayPlugin::renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer texture, glm::ivec4 viewport, const glm::ivec4 scissor) {
renderFromTexture(batch, texture, viewport, scissor, gpu::FramebufferPointer());
}
void OpenGLDisplayPlugin::renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer texture, glm::ivec4 viewport, const glm::ivec4 scissor, gpu::FramebufferPointer copyFbo /*=gpu::FramebufferPointer()*/) {
auto fbo = gpu::FramebufferPointer();
batch.enableStereo(false);
batch.resetViewTransform();
batch.setFramebuffer(gpu::FramebufferPointer());
batch.setFramebuffer(fbo);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, vec4(0));
batch.setStateScissorRect(scissor);
batch.setViewportTransform(viewport);
batch.setResourceTexture(0, texture);
batch.setPipeline(_presentPipeline);
batch.draw(gpu::TRIANGLE_STRIP, 4);
if (copyFbo) {
gpu::Vec4i copyFboRect(0, 0, copyFbo->getWidth(), copyFbo->getHeight());
gpu::Vec4i sourceRect(scissor.x, scissor.y, scissor.x + scissor.z, scissor.y + scissor.w);
float aspectRatio = (float)scissor.w / (float) scissor.z; // height/width
// scale width first
int xOffset = 0;
int yOffset = 0;
int newWidth = copyFbo->getWidth();
int newHeight = std::round(aspectRatio * (float) copyFbo->getWidth());
if (newHeight > copyFbo->getHeight()) {
// ok, so now fill height instead
newHeight = copyFbo->getHeight();
newWidth = std::round((float)copyFbo->getHeight() / aspectRatio);
xOffset = (copyFbo->getWidth() - newWidth) / 2;
} else {
yOffset = (copyFbo->getHeight() - newHeight) / 2;
}
gpu::Vec4i copyRect(xOffset, yOffset, xOffset + newWidth, yOffset + newHeight);
batch.setFramebuffer(copyFbo);
batch.resetViewTransform();
batch.setViewportTransform(copyFboRect);
batch.setStateScissorRect(copyFboRect);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, {0.0f, 0.0f, 0.0f, 1.0f});
batch.blit(fbo, sourceRect, copyFbo, copyRect);
}
}
void OpenGLDisplayPlugin::updateFrameData() {
@ -686,7 +719,7 @@ void OpenGLDisplayPlugin::resetPresentRate() {
// _presentRate = RateCounter<100>();
}
float OpenGLDisplayPlugin::renderRate() const {
float OpenGLDisplayPlugin::renderRate() const {
return _renderRate.rate();
}
@ -821,3 +854,53 @@ void OpenGLDisplayPlugin::updateCompositeFramebuffer() {
_compositeFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("OpenGLDisplayPlugin::composite", gpu::Element::COLOR_RGBA_32, renderSize.x, renderSize.y));
}
}
void OpenGLDisplayPlugin::copyTextureToQuickFramebuffer(NetworkTexturePointer networkTexture, QOpenGLFramebufferObject* target, GLsync* fenceSync) {
auto glBackend = const_cast<OpenGLDisplayPlugin&>(*this).getGLBackend();
withMainThreadContext([&] {
GLuint sourceTexture = glBackend->getTextureID(networkTexture->getGPUTexture());
GLuint targetTexture = target->texture();
GLuint fbo[2] {0, 0};
// need mipmaps for blitting texture
glGenerateTextureMipmap(sourceTexture);
// create 2 fbos (one for initial texture, second for scaled one)
glCreateFramebuffers(2, fbo);
// setup source fbo
glBindFramebuffer(GL_FRAMEBUFFER, fbo[0]);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, sourceTexture, 0);
GLint texWidth = networkTexture->getWidth();
GLint texHeight = networkTexture->getHeight();
// setup destination fbo
glBindFramebuffer(GL_FRAMEBUFFER, fbo[1]);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, targetTexture, 0);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
// maintain aspect ratio, filling the width first if possible. If that makes the height too
// much, fill height instead. TODO: only do this when texture changes
GLint newX = 0;
GLint newY = 0;
float aspectRatio = (float)texHeight / (float)texWidth;
GLint newWidth = target->width();
GLint newHeight = std::round(aspectRatio * (float) target->width());
if (newHeight > target->height()) {
newHeight = target->height();
newWidth = std::round((float)target->height() / aspectRatio);
newX = (target->width() - newWidth) / 2;
} else {
newY = (target->height() - newHeight) / 2;
}
glBlitNamedFramebuffer(fbo[0], fbo[1], 0, 0, texWidth, texHeight, newX, newY, newX + newWidth, newY + newHeight, GL_DEPTH_BUFFER_BIT|GL_COLOR_BUFFER_BIT, GL_NEAREST);
// don't delete the textures!
glDeleteFramebuffers(2, fbo);
*fenceSync = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
});
}

View file

@ -38,7 +38,7 @@ protected:
using Condition = std::condition_variable;
public:
~OpenGLDisplayPlugin();
// These must be final to ensure proper ordering of operations
// These must be final to ensure proper ordering of operations
// between the main thread and the presentation thread
bool activate() override final;
void deactivate() override final;
@ -79,6 +79,8 @@ public:
// Three threads, one for rendering, one for texture transfers, one reserved for the GL driver
int getRequiredThreadCount() const override { return 3; }
void copyTextureToQuickFramebuffer(NetworkTexturePointer source, QOpenGLFramebufferObject* target, GLsync* fenceSync) override;
protected:
friend class PresentThread;
@ -103,7 +105,7 @@ protected:
// Returns true on successful activation
virtual bool internalActivate() { return true; }
virtual void internalDeactivate() {}
// Returns true on successful activation of standby session
virtual bool activateStandBySession() { return true; }
virtual void deactivateSession() {}
@ -111,6 +113,7 @@ protected:
// Plugin specific functionality to send the composed scene to the output window or device
virtual void internalPresent();
void renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer texture, glm::ivec4 viewport, const glm::ivec4 scissor, gpu::FramebufferPointer fbo);
void renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer texture, glm::ivec4 viewport, const glm::ivec4 scissor);
virtual void updateFrameData();

View file

@ -134,7 +134,7 @@ void HmdDisplayPlugin::customizeContext() {
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
gpu::Shader::BindingSet bindings;
bindings.insert({ "lineData", LINE_DATA_SLOT });;
gpu::Shader::makeProgram(*program, bindings);
@ -243,6 +243,8 @@ void HmdDisplayPlugin::internalPresent() {
glm::ivec4 viewport = getViewportForSourceSize(sourceSize);
glm::ivec4 scissor = viewport;
auto fbo = gpu::FramebufferPointer();
render([&](gpu::Batch& batch) {
if (_monoPreview) {
@ -285,11 +287,15 @@ void HmdDisplayPlugin::internalPresent() {
viewport = ivec4(scissorOffset - scaledShiftLeftBy, viewportOffset, viewportSizeX, viewportSizeY);
}
// TODO: only bother getting and passing in the hmdPreviewFramebuffer if the camera is on
fbo = DependencyManager::get<TextureCache>()->getHmdPreviewFramebuffer(windowSize.x, windowSize.y);
viewport.z *= 2;
}
renderFromTexture(batch, _compositeFramebuffer->getRenderBuffer(0), viewport, scissor);
renderFromTexture(batch, _compositeFramebuffer->getRenderBuffer(0), viewport, scissor, fbo);
});
swapBuffers();
} else if (_clearPreviewFlag) {
QImage image;
if (_vsyncEnabled) {
@ -312,7 +318,7 @@ void HmdDisplayPlugin::internalPresent() {
_previewTexture->assignStoredMip(0, image.byteCount(), image.constBits());
_previewTexture->setAutoGenerateMips(true);
}
auto viewport = getViewportForSourceSize(uvec2(_previewTexture->getDimensions()));
render([&](gpu::Batch& batch) {
@ -323,7 +329,7 @@ void HmdDisplayPlugin::internalPresent() {
}
postPreview();
// If preview is disabled, we need to check to see if the window size has changed
// If preview is disabled, we need to check to see if the window size has changed
// and re-render the no-preview message
if (_disablePreview) {
auto window = _container->getPrimaryWidget();
@ -510,7 +516,7 @@ void HmdDisplayPlugin::OverlayRenderer::build() {
indices = std::make_shared<gpu::Buffer>();
//UV mapping source: http://www.mvps.org/directx/articles/spheremap.htm
static const float fov = CompositorHelper::VIRTUAL_UI_TARGET_FOV.y;
static const float aspectRatio = CompositorHelper::VIRTUAL_UI_ASPECT_RATIO;
static const uint16_t stacks = 128;
@ -672,7 +678,7 @@ bool HmdDisplayPlugin::setHandLaser(uint32_t hands, HandLaserMode mode, const ve
_handLasers[1] = info;
}
});
// FIXME defer to a child class plugin to determine if hand lasers are actually
// FIXME defer to a child class plugin to determine if hand lasers are actually
// available based on the presence or absence of hand controllers
return true;
}
@ -687,7 +693,7 @@ bool HmdDisplayPlugin::setExtraLaser(HandLaserMode mode, const vec4& color, cons
_extraLaserStart = sensorSpaceStart;
});
// FIXME defer to a child class plugin to determine if hand lasers are actually
// FIXME defer to a child class plugin to determine if hand lasers are actually
// available based on the presence or absence of hand controllers
return true;
}
@ -702,7 +708,7 @@ void HmdDisplayPlugin::compositeExtra() {
if (_presentHandPoses[0] == IDENTITY_MATRIX && _presentHandPoses[1] == IDENTITY_MATRIX && !_presentExtraLaser.valid()) {
return;
}
render([&](gpu::Batch& batch) {
batch.setFramebuffer(_compositeFramebuffer);
batch.setModelTransform(Transform());

View file

@ -26,6 +26,7 @@
#include <PerfStat.h>
#include <SceneScriptingInterface.h>
#include <ScriptEngine.h>
#include <HoverOverlayInterface.h>
#include "RenderableEntityItem.h"
@ -452,6 +453,8 @@ RayToEntityIntersectionResult EntityTreeRenderer::findRayIntersectionWorker(cons
void EntityTreeRenderer::connectSignalsToSlots(EntityScriptingInterface* entityScriptingInterface) {
auto hoverOverlayInterface = DependencyManager::get<HoverOverlayInterface>().data();
connect(this, &EntityTreeRenderer::mousePressOnEntity, entityScriptingInterface, &EntityScriptingInterface::mousePressOnEntity);
connect(this, &EntityTreeRenderer::mouseMoveOnEntity, entityScriptingInterface, &EntityScriptingInterface::mouseMoveOnEntity);
connect(this, &EntityTreeRenderer::mouseReleaseOnEntity, entityScriptingInterface, &EntityScriptingInterface::mouseReleaseOnEntity);
@ -461,8 +464,12 @@ void EntityTreeRenderer::connectSignalsToSlots(EntityScriptingInterface* entityS
connect(this, &EntityTreeRenderer::clickReleaseOnEntity, entityScriptingInterface, &EntityScriptingInterface::clickReleaseOnEntity);
connect(this, &EntityTreeRenderer::hoverEnterEntity, entityScriptingInterface, &EntityScriptingInterface::hoverEnterEntity);
connect(this, SIGNAL(hoverEnterEntity(const EntityItemID&, const PointerEvent&)), hoverOverlayInterface, SLOT(createHoverOverlay(const EntityItemID&, const PointerEvent&)));
connect(this, &EntityTreeRenderer::hoverOverEntity, entityScriptingInterface, &EntityScriptingInterface::hoverOverEntity);
connect(this, &EntityTreeRenderer::hoverLeaveEntity, entityScriptingInterface, &EntityScriptingInterface::hoverLeaveEntity);
connect(this, SIGNAL(hoverLeaveEntity(const EntityItemID&, const PointerEvent&)), hoverOverlayInterface, SLOT(destroyHoverOverlay(const EntityItemID&, const PointerEvent&)));
connect(this, &EntityTreeRenderer::enterEntity, entityScriptingInterface, &EntityScriptingInterface::enterEntity);
connect(this, &EntityTreeRenderer::leaveEntity, entityScriptingInterface, &EntityScriptingInterface::leaveEntity);

View file

@ -13,14 +13,13 @@
#include <DependencyManager.h>
#include <PerfStat.h>
#include <GeometryCache.h>
#include <render/ShapePipeline.h>
#include <StencilMaskPass.h>
#include <AbstractViewStateInterface.h>
#include "EntitiesRendererLogging.h"
#include "RenderableParticleEffectEntityItem.h"
#include "untextured_particle_vert.h"
#include "untextured_particle_frag.h"
#include "textured_particle_vert.h"
#include "textured_particle_frag.h"
@ -29,6 +28,16 @@ class ParticlePayloadData {
public:
static const size_t VERTEX_PER_PARTICLE = 4;
static uint8_t CUSTOM_PIPELINE_NUMBER;
static render::ShapePipelinePointer shapePipelineFactory(const render::ShapePlumber& plumber, const render::ShapeKey& key);
static void registerShapePipeline() {
if (!CUSTOM_PIPELINE_NUMBER) {
CUSTOM_PIPELINE_NUMBER = render::ShapePipeline::registerCustomShapePipelineFactory(shapePipelineFactory);
}
}
static std::weak_ptr<gpu::Pipeline> _texturedPipeline;
template<typename T>
struct InterpolationData {
T start;
@ -70,9 +79,6 @@ public:
offsetof(ParticlePrimitive, uv), gpu::Stream::PER_INSTANCE);
}
void setPipeline(PipelinePointer pipeline) { _pipeline = pipeline; }
const PipelinePointer& getPipeline() const { return _pipeline; }
const Transform& getModelTransform() const { return _modelTransform; }
void setModelTransform(const Transform& modelTransform) { _modelTransform = modelTransform; }
@ -90,15 +96,15 @@ public:
bool getVisibleFlag() const { return _visibleFlag; }
void setVisibleFlag(bool visibleFlag) { _visibleFlag = visibleFlag; }
void render(RenderArgs* args) const {
assert(_pipeline);
gpu::Batch& batch = *args->_batch;
batch.setPipeline(_pipeline);
if (_texture) {
batch.setResourceTexture(0, _texture);
} else {
batch.setResourceTexture(0, DependencyManager::get<TextureCache>()->getWhiteTexture());
}
batch.setModelTransform(_modelTransform);
@ -113,7 +119,6 @@ public:
protected:
Transform _modelTransform;
AABox _bound;
PipelinePointer _pipeline;
FormatPointer _vertexFormat { std::make_shared<Format>() };
BufferPointer _particleBuffer { std::make_shared<Buffer>() };
BufferView _uniformBuffer;
@ -142,23 +147,49 @@ namespace render {
payload->render(args);
}
}
template <>
const ShapeKey shapeGetShapeKey(const ParticlePayloadData::Pointer& payload) {
return render::ShapeKey::Builder().withCustom(ParticlePayloadData::CUSTOM_PIPELINE_NUMBER).withTranslucent().build();
}
}
uint8_t ParticlePayloadData::CUSTOM_PIPELINE_NUMBER = 0;
std::weak_ptr<gpu::Pipeline> ParticlePayloadData::_texturedPipeline;
render::ShapePipelinePointer ParticlePayloadData::shapePipelineFactory(const render::ShapePlumber& plumber, const render::ShapeKey& key) {
auto texturedPipeline = _texturedPipeline.lock();
if (!texturedPipeline) {
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, false, gpu::LESS_EQUAL);
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
PrepareStencil::testMask(*state);
auto vertShader = gpu::Shader::createVertex(std::string(textured_particle_vert));
auto fragShader = gpu::Shader::createPixel(std::string(textured_particle_frag));
auto program = gpu::Shader::createProgram(vertShader, fragShader);
_texturedPipeline = texturedPipeline = gpu::Pipeline::create(program, state);
}
return std::make_shared<render::ShapePipeline>(texturedPipeline, nullptr, nullptr, nullptr);
}
EntityItemPointer RenderableParticleEffectEntityItem::factory(const EntityItemID& entityID,
const EntityItemProperties& properties) {
auto entity = std::make_shared<RenderableParticleEffectEntityItem>(entityID);
entity->setProperties(properties);
// As we create the first ParticuleSystem entity, let s register its special shapePIpeline factory:
ParticlePayloadData::registerShapePipeline();
return entity;
}
RenderableParticleEffectEntityItem::RenderableParticleEffectEntityItem(const EntityItemID& entityItemID) :
ParticleEffectEntityItem(entityItemID) {
// lazy creation of particle system pipeline
if (!_untexturedPipeline || !_texturedPipeline) {
createPipelines();
}
}
bool RenderableParticleEffectEntityItem::addToScene(const EntityItemPointer& self,
@ -167,7 +198,6 @@ bool RenderableParticleEffectEntityItem::addToScene(const EntityItemPointer& sel
_scene = scene;
_renderItemId = _scene->allocateID();
auto particlePayloadData = std::make_shared<ParticlePayloadData>();
particlePayloadData->setPipeline(_untexturedPipeline);
auto renderPayload = std::make_shared<ParticlePayloadData::Payload>(particlePayloadData);
render::Item::Status::Getters statusGetters;
makeEntityItemStatusGetters(getThisPointer(), statusGetters);
@ -276,47 +306,14 @@ void RenderableParticleEffectEntityItem::updateRenderItem() {
if (_texture && _texture->isLoaded()) {
payload.setTexture(_texture->getGPUTexture());
payload.setPipeline(_texturedPipeline);
} else {
payload.setTexture(nullptr);
payload.setPipeline(_untexturedPipeline);
}
});
_scene->enqueueTransaction(transaction);
}
void RenderableParticleEffectEntityItem::createPipelines() {
if (!_untexturedPipeline) {
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, false, gpu::LESS_EQUAL);
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
PrepareStencil::testMask(*state);
auto vertShader = gpu::Shader::createVertex(std::string(untextured_particle_vert));
auto fragShader = gpu::Shader::createPixel(std::string(untextured_particle_frag));
auto program = gpu::Shader::createProgram(vertShader, fragShader);
_untexturedPipeline = gpu::Pipeline::create(program, state);
}
if (!_texturedPipeline) {
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, false, gpu::LESS_EQUAL);
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
PrepareStencil::testMask(*state);
auto vertShader = gpu::Shader::createVertex(std::string(textured_particle_vert));
auto fragShader = gpu::Shader::createPixel(std::string(textured_particle_frag));
auto program = gpu::Shader::createProgram(vertShader, fragShader);
_texturedPipeline = gpu::Pipeline::create(program, state);
}
}
void RenderableParticleEffectEntityItem::notifyBoundChanged() {
if (!render::Item::isValidID(_renderItemId)) {
return;

View file

@ -34,16 +34,13 @@ protected:
virtual void locationChanged(bool tellPhysics = true) override { EntityItem::locationChanged(tellPhysics); notifyBoundChanged(); }
virtual void dimensionsChanged() override { EntityItem::dimensionsChanged(); notifyBoundChanged(); }
void notifyBoundChanged();
void notifyBoundChanged();
void createPipelines();
render::ScenePointer _scene;
render::ItemID _renderItemId{ render::Item::INVALID_ITEM_ID };
NetworkTexturePointer _texture;
gpu::PipelinePointer _untexturedPipeline;
gpu::PipelinePointer _texturedPipeline;
};

View file

@ -59,12 +59,8 @@
#include "EntityEditPacketSender.h"
#include "PhysicalEntitySimulation.h"
gpu::PipelinePointer RenderablePolyVoxEntityItem::_pipeline = nullptr;
gpu::PipelinePointer RenderablePolyVoxEntityItem::_wireframePipeline = nullptr;
const float MARCHING_CUBE_COLLISION_HULL_OFFSET = 0.5;
/*
A PolyVoxEntity has several interdependent parts:
@ -116,6 +112,10 @@ EntityItemPointer RenderablePolyVoxEntityItem::factory(const EntityItemID& entit
EntityItemPointer entity{ new RenderablePolyVoxEntityItem(entityID) };
entity->setProperties(properties);
std::static_pointer_cast<RenderablePolyVoxEntityItem>(entity)->initializePolyVox();
// As we create the first Polyvox entity, let's register its special shapePipeline factory:
PolyVoxPayload::registerShapePipeline();
return entity;
}
@ -732,35 +732,6 @@ void RenderablePolyVoxEntityItem::render(RenderArgs* args) {
!mesh->getIndexBuffer()._buffer) {
return;
}
if (!_pipeline) {
gpu::ShaderPointer vertexShader = gpu::Shader::createVertex(std::string(polyvox_vert));
gpu::ShaderPointer pixelShader = gpu::Shader::createPixel(std::string(polyvox_frag));
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("materialBuffer"), MATERIAL_GPU_SLOT));
slotBindings.insert(gpu::Shader::Binding(std::string("xMap"), 0));
slotBindings.insert(gpu::Shader::Binding(std::string("yMap"), 1));
slotBindings.insert(gpu::Shader::Binding(std::string("zMap"), 2));
gpu::ShaderPointer program = gpu::Shader::createProgram(vertexShader, pixelShader);
gpu::Shader::makeProgram(*program, slotBindings);
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, true, gpu::LESS_EQUAL);
PrepareStencil::testMaskDrawShape(*state);
_pipeline = gpu::Pipeline::create(program, state);
auto wireframeState = std::make_shared<gpu::State>();
wireframeState->setCullMode(gpu::State::CULL_BACK);
wireframeState->setDepthTest(true, true, gpu::LESS_EQUAL);
wireframeState->setFillMode(gpu::State::FILL_LINE);
PrepareStencil::testMaskDrawShape(*wireframeState);
_wireframePipeline = gpu::Pipeline::create(program, wireframeState);
}
if (!_vertexFormat) {
auto vf = std::make_shared<gpu::Stream::Format>();
@ -771,11 +742,6 @@ void RenderablePolyVoxEntityItem::render(RenderArgs* args) {
gpu::Batch& batch = *args->_batch;
// Pick correct Pipeline
bool wireframe = (render::ShapeKey(args->_globalShapeKey).isWireframe());
auto pipeline = (wireframe ? _wireframePipeline : _pipeline);
batch.setPipeline(pipeline);
Transform transform(voxelToWorldMatrix());
batch.setModelTransform(transform);
batch.setInputFormat(_vertexFormat);
@ -817,7 +783,7 @@ void RenderablePolyVoxEntityItem::render(RenderArgs* args) {
batch.setResourceTexture(2, DependencyManager::get<TextureCache>()->getWhiteTexture());
}
int voxelVolumeSizeLocation = pipeline->getProgram()->getUniforms().findLocation("voxelVolumeSize");
int voxelVolumeSizeLocation = args->_shapePipeline->pipeline->getProgram()->getUniforms().findLocation("voxelVolumeSize");
batch._glUniform3f(voxelVolumeSizeLocation, voxelVolumeSize.x, voxelVolumeSize.y, voxelVolumeSize.z);
batch.drawIndexed(gpu::TRIANGLES, (gpu::uint32)mesh->getNumIndices(), 0);
@ -848,6 +814,48 @@ void RenderablePolyVoxEntityItem::removeFromScene(const EntityItemPointer& self,
render::Item::clearID(_myItem);
}
uint8_t PolyVoxPayload::CUSTOM_PIPELINE_NUMBER = 0;
std::shared_ptr<gpu::Pipeline> PolyVoxPayload::_pipeline;
std::shared_ptr<gpu::Pipeline> PolyVoxPayload::_wireframePipeline;
render::ShapePipelinePointer PolyVoxPayload::shapePipelineFactory(const render::ShapePlumber& plumber, const render::ShapeKey& key) {
if (!_pipeline) {
gpu::ShaderPointer vertexShader = gpu::Shader::createVertex(std::string(polyvox_vert));
gpu::ShaderPointer pixelShader = gpu::Shader::createPixel(std::string(polyvox_frag));
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("materialBuffer"), PolyVoxPayload::MATERIAL_GPU_SLOT));
slotBindings.insert(gpu::Shader::Binding(std::string("xMap"), 0));
slotBindings.insert(gpu::Shader::Binding(std::string("yMap"), 1));
slotBindings.insert(gpu::Shader::Binding(std::string("zMap"), 2));
gpu::ShaderPointer program = gpu::Shader::createProgram(vertexShader, pixelShader);
gpu::Shader::makeProgram(*program, slotBindings);
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, true, gpu::LESS_EQUAL);
PrepareStencil::testMaskDrawShape(*state);
_pipeline = gpu::Pipeline::create(program, state);
auto wireframeState = std::make_shared<gpu::State>();
wireframeState->setCullMode(gpu::State::CULL_BACK);
wireframeState->setDepthTest(true, true, gpu::LESS_EQUAL);
wireframeState->setFillMode(gpu::State::FILL_LINE);
PrepareStencil::testMaskDrawShape(*wireframeState);
_wireframePipeline = gpu::Pipeline::create(program, wireframeState);
}
if (key.isWireframe()) {
return std::make_shared<render::ShapePipeline>(_wireframePipeline, nullptr, nullptr, nullptr);
} else {
return std::make_shared<render::ShapePipeline>(_pipeline, nullptr, nullptr, nullptr);
}
}
namespace render {
template <> const ItemKey payloadGetKey(const PolyVoxPayload::Pointer& payload) {
return ItemKey::Builder::opaqueShape();
@ -871,6 +879,10 @@ namespace render {
payload->_owner->getRenderableInterface()->render(args);
}
}
template <> const ShapeKey shapeGetShapeKey(const PolyVoxPayload::Pointer& payload) {
return ShapeKey::Builder().withCustom(PolyVoxPayload::CUSTOM_PIPELINE_NUMBER).build();
}
}
@ -1619,7 +1631,7 @@ void RenderablePolyVoxEntityItem::bonkNeighbors() {
void RenderablePolyVoxEntityItem::locationChanged(bool tellPhysics) {
EntityItem::locationChanged(tellPhysics);
if (!_pipeline || !render::Item::isValidID(_myItem)) {
if (!render::Item::isValidID(_myItem)) {
return;
}
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();

View file

@ -28,6 +28,19 @@
class PolyVoxPayload {
public:
static uint8_t CUSTOM_PIPELINE_NUMBER;
static render::ShapePipelinePointer shapePipelineFactory(const render::ShapePlumber& plumber, const render::ShapeKey& key);
static void registerShapePipeline() {
if (!CUSTOM_PIPELINE_NUMBER) {
CUSTOM_PIPELINE_NUMBER = render::ShapePipeline::registerCustomShapePipelineFactory(shapePipelineFactory);
}
}
static const int MATERIAL_GPU_SLOT = 3;
static std::shared_ptr<gpu::Pipeline> _pipeline;
static std::shared_ptr<gpu::Pipeline> _wireframePipeline;
PolyVoxPayload(EntityItemPointer owner) : _owner(owner), _bounds(AABox()) { }
typedef render::Payload<PolyVoxPayload> Payload;
typedef Payload::DataPointer Pointer;
@ -40,6 +53,7 @@ namespace render {
template <> const ItemKey payloadGetKey(const PolyVoxPayload::Pointer& payload);
template <> const Item::Bound payloadGetBound(const PolyVoxPayload::Pointer& payload);
template <> void payloadRender(const PolyVoxPayload::Pointer& payload, RenderArgs* args);
template <> const ShapeKey shapeGetShapeKey(const PolyVoxPayload::Pointer& payload);
}
@ -168,10 +182,7 @@ private:
NetworkTexturePointer _yTexture;
NetworkTexturePointer _zTexture;
const int MATERIAL_GPU_SLOT = 3;
render::ItemID _myItem{ render::Item::INVALID_ITEM_ID };
static gpu::PipelinePointer _pipeline;
static gpu::PipelinePointer _wireframePipeline;
ShapeInfo _shapeInfo;

View file

@ -1,18 +0,0 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// fragment shader
//
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
in vec4 _color;
out vec4 outFragColor;
void main(void) {
outFragColor = _color;
}

View file

@ -1,25 +0,0 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// particle vertex shader
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Inputs.slh@>
<@include gpu/Color.slh@>
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
out vec4 _color;
void main(void) {
// pass along the color
_color = colorToLinearRGBA(inColor);
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
<$transformModelToClipPos(cam, obj, inPosition, gl_Position)$>
}

View file

@ -46,10 +46,8 @@ bool AddEntityOperator::preRecursion(const OctreeElementPointer& element) {
// If this element is the best fit for the new entity properties, then add/or update it
if (entityTreeElement->bestFitBounds(_newEntityBox)) {
_tree->addEntityMapEntry(_newEntity);
entityTreeElement->addEntityItem(_newEntity);
_tree->setContainingElement(_newEntity->getEntityItemID(), entityTreeElement);
_foundNew = true;
keepSearching = false;
} else {

View file

@ -96,7 +96,7 @@ bool DeleteEntityOperator::preRecursion(const OctreeElementPointer& element) {
bool entityDeleted = entityTreeElement->removeEntityItem(theEntity); // remove it from the element
assert(entityDeleted);
(void)entityDeleted; // quite warning
_tree->setContainingElement(details.entity->getEntityItemID(), NULL); // update or id to element lookup
_tree->clearEntityMapEntry(details.entity->getEntityItemID());
_foundCount++;
}
}

View file

@ -89,7 +89,8 @@ EntityItem::EntityItem(const EntityItemID& entityItemID) :
EntityItem::~EntityItem() {
// clear out any left-over actions
EntityTreePointer entityTree = _element ? _element->getTree() : nullptr;
EntityTreeElementPointer element = _element; // use local copy of _element for logic below
EntityTreePointer entityTree = element ? element->getTree() : nullptr;
EntitySimulationPointer simulation = entityTree ? entityTree->getSimulation() : nullptr;
if (simulation) {
clearActions(simulation);
@ -880,8 +881,9 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
// Tracking for editing roundtrips here. We will tell our EntityTree that we just got incoming data about
// and entity that was edited at some time in the past. The tree will determine how it wants to track this
// information.
if (_element && _element->getTree()) {
_element->getTree()->trackIncomingEntityLastEdited(lastEditedFromBufferAdjusted, bytesRead);
EntityTreeElementPointer element = _element; // use local copy of _element for logic below
if (element && element->getTree()) {
element->getTree()->trackIncomingEntityLastEdited(lastEditedFromBufferAdjusted, bytesRead);
}
@ -2056,7 +2058,8 @@ bool EntityItem::removeActionInternal(const QUuid& actionID, EntitySimulationPoi
_previouslyDeletedActions.insert(actionID, usecTimestampNow());
if (_objectActions.contains(actionID)) {
if (!simulation) {
EntityTreePointer entityTree = _element ? _element->getTree() : nullptr;
EntityTreeElementPointer element = _element; // use local copy of _element for logic below
EntityTreePointer entityTree = element ? element->getTree() : nullptr;
simulation = entityTree ? entityTree->getSimulation() : nullptr;
}

View file

@ -21,6 +21,7 @@
#include <VariantMapToScriptValue.h>
#include <SharedUtil.h>
#include <SpatialParentFinder.h>
#include <AvatarHashMap.h>
#include "EntityItemID.h"
#include "EntitiesLogging.h"
@ -499,6 +500,11 @@ void EntityScriptingInterface::deleteEntity(QUuid id) {
const QUuid myNodeID = nodeList->getSessionUUID();
if (entity->getClientOnly() && entity->getOwningAvatarID() != myNodeID) {
// don't delete other avatar's avatarEntities
// If you actually own the entity but the onwership property is not set because of a domain switch
// The lines below makes sure the entity is deleted once its properties are set.
auto avatarHashMap = DependencyManager::get<AvatarHashMap>();
AvatarSharedPointer myAvatar = avatarHashMap->getAvatarBySessionID(myNodeID);
myAvatar->insertDetachedEntityID(id);
shouldDelete = false;
return;
}

View file

@ -90,13 +90,17 @@ void EntityTree::eraseAllOctreeElements(bool createNewRoot) {
if (_simulation) {
_simulation->clearEntities();
}
{
QWriteLocker locker(&_entityToElementLock);
foreach(EntityTreeElementPointer element, _entityToElementMap) {
element->cleanupEntities();
QHash<EntityItemID, EntityItemPointer> localMap;
localMap.swap(_entityMap);
this->withWriteLock([&] {
foreach(EntityItemPointer entity, localMap) {
EntityTreeElementPointer element = entity->getElement();
if (element) {
element->cleanupEntities();
}
}
_entityToElementMap.clear();
}
});
localMap.clear();
Octree::eraseAllOctreeElements(createNewRoot);
resetClientEditStats();
@ -136,29 +140,24 @@ void EntityTree::postAddEntity(EntityItemPointer entity) {
}
bool EntityTree::updateEntity(const EntityItemID& entityID, const EntityItemProperties& properties, const SharedNodePointer& senderNode) {
EntityTreeElementPointer containingElement = getContainingElement(entityID);
EntityItemPointer entity;
{
QReadLocker locker(&_entityMapLock);
entity = _entityMap.value(entityID);
}
if (!entity) {
return false;
}
return updateEntity(entity, properties, senderNode);
}
bool EntityTree::updateEntity(EntityItemPointer entity, const EntityItemProperties& origProperties,
const SharedNodePointer& senderNode) {
EntityTreeElementPointer containingElement = entity->getElement();
if (!containingElement) {
return false;
}
EntityItemPointer existingEntity = containingElement->getEntityWithEntityItemID(entityID);
if (!existingEntity) {
return false;
}
return updateEntityWithElement(existingEntity, properties, containingElement, senderNode);
}
bool EntityTree::updateEntity(EntityItemPointer entity, const EntityItemProperties& properties, const SharedNodePointer& senderNode) {
EntityTreeElementPointer containingElement = getContainingElement(entity->getEntityItemID());
if (!containingElement) {
return false;
}
return updateEntityWithElement(entity, properties, containingElement, senderNode);
}
bool EntityTree::updateEntityWithElement(EntityItemPointer entity, const EntityItemProperties& origProperties,
EntityTreeElementPointer containingElement, const SharedNodePointer& senderNode) {
EntityItemProperties properties = origProperties;
bool allowLockChange;
@ -190,7 +189,7 @@ bool EntityTree::updateEntityWithElement(EntityItemPointer entity, const EntityI
bool success;
AACube queryCube = entity->getQueryAACube(success);
if (!success) {
qCDebug(entities) << "Warning -- failed to get query-cube for" << entity->getID();
qCWarning(entities) << "failed to get query-cube for" << entity->getID();
}
UpdateEntityOperator theOperator(getThisPointer(), containingElement, entity, queryCube);
recurseTreeWithOperator(&theOperator);
@ -331,9 +330,8 @@ bool EntityTree::updateEntityWithElement(EntityItemPointer entity, const EntityI
}
// TODO: this final containingElement check should eventually be removed (or wrapped in an #ifdef DEBUG).
containingElement = getContainingElement(entity->getEntityItemID());
if (!containingElement) {
qCDebug(entities) << "UNEXPECTED!!!! after updateEntity() we no longer have a containing element??? entityID="
if (!entity->getElement()) {
qCWarning(entities) << "EntityTree::updateEntity() we no longer have a containing element for entityID="
<< entity->getEntityItemID();
return false;
}
@ -366,7 +364,7 @@ EntityItemPointer EntityTree::addEntity(const EntityItemID& entityID, const Enti
// You should not call this on existing entities that are already part of the tree! Call updateEntity()
EntityTreeElementPointer containingElement = getContainingElement(entityID);
if (containingElement) {
qCDebug(entities) << "UNEXPECTED!!! ----- don't call addEntity() on existing entity items. entityID=" << entityID
qCWarning(entities) << "EntityTree::addEntity() on existing entity item with entityID=" << entityID
<< "containingElement=" << containingElement.get();
return result;
}
@ -422,7 +420,7 @@ void EntityTree::deleteEntity(const EntityItemID& entityID, bool force, bool ign
EntityTreeElementPointer containingElement = getContainingElement(entityID);
if (!containingElement) {
if (!ignoreWarnings) {
qCDebug(entities) << "UNEXPECTED!!!! EntityTree::deleteEntity() entityID doesn't exist!!! entityID=" << entityID;
qCWarning(entities) << "EntityTree::deleteEntity() on non-existent entityID=" << entityID;
}
return;
}
@ -430,8 +428,7 @@ void EntityTree::deleteEntity(const EntityItemID& entityID, bool force, bool ign
EntityItemPointer existingEntity = containingElement->getEntityWithEntityItemID(entityID);
if (!existingEntity) {
if (!ignoreWarnings) {
qCDebug(entities) << "UNEXPECTED!!!! don't call EntityTree::deleteEntity() on entity items that don't exist. "
"entityID=" << entityID;
qCWarning(entities) << "EntityTree::deleteEntity() on non-existant entity item with entityID=" << entityID;
}
return;
}
@ -478,7 +475,7 @@ void EntityTree::deleteEntities(QSet<EntityItemID> entityIDs, bool force, bool i
EntityTreeElementPointer containingElement = getContainingElement(entityID);
if (!containingElement) {
if (!ignoreWarnings) {
qCDebug(entities) << "UNEXPECTED!!!! EntityTree::deleteEntities() entityID doesn't exist!!! entityID=" << entityID;
qCWarning(entities) << "EntityTree::deleteEntities() on non-existent entityID=" << entityID;
}
continue;
}
@ -486,8 +483,7 @@ void EntityTree::deleteEntities(QSet<EntityItemID> entityIDs, bool force, bool i
EntityItemPointer existingEntity = containingElement->getEntityWithEntityItemID(entityID);
if (!existingEntity) {
if (!ignoreWarnings) {
qCDebug(entities) << "UNEXPECTED!!!! don't call EntityTree::deleteEntities() on entity items that don't exist. "
"entityID=" << entityID;
qCWarning(entities) << "EntityTree::deleteEntities() on non-existent entity item with entityID=" << entityID;
}
continue;
}
@ -975,7 +971,7 @@ int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned c
const SharedNodePointer& senderNode) {
if (!getIsServer()) {
qCDebug(entities) << "UNEXPECTED!!! processEditPacketData() should only be called on a server tree.";
qCWarning(entities) << "EntityTree::processEditPacketData() should only be called on a server tree.";
return 0;
}
@ -1502,27 +1498,43 @@ int EntityTree::processEraseMessageDetails(const QByteArray& dataByteArray, cons
}
EntityTreeElementPointer EntityTree::getContainingElement(const EntityItemID& entityItemID) /*const*/ {
QReadLocker locker(&_entityToElementLock);
EntityTreeElementPointer element = _entityToElementMap.value(entityItemID);
return element;
EntityItemPointer entity;
{
QReadLocker locker(&_entityMapLock);
entity = _entityMap.value(entityItemID);
}
if (entity) {
return entity->getElement();
}
return EntityTreeElementPointer(nullptr);
}
void EntityTree::setContainingElement(const EntityItemID& entityItemID, EntityTreeElementPointer element) {
QWriteLocker locker(&_entityToElementLock);
if (element) {
_entityToElementMap[entityItemID] = element;
} else {
_entityToElementMap.remove(entityItemID);
void EntityTree::addEntityMapEntry(EntityItemPointer entity) {
EntityItemID id = entity->getEntityItemID();
QWriteLocker locker(&_entityMapLock);
EntityItemPointer otherEntity = _entityMap.value(id);
if (otherEntity) {
qCWarning(entities) << "EntityTree::addEntityMapEntry() found pre-existing id " << id;
assert(false);
return;
}
_entityMap.insert(id, entity);
}
void EntityTree::clearEntityMapEntry(const EntityItemID& id) {
QWriteLocker locker(&_entityMapLock);
_entityMap.remove(id);
}
void EntityTree::debugDumpMap() {
// QHash's are implicitly shared, so we make a shared copy and use that instead.
// This way we might be able to avoid both a lock and a true copy.
QHash<EntityItemID, EntityItemPointer> localMap(_entityMap);
qCDebug(entities) << "EntityTree::debugDumpMap() --------------------------";
QReadLocker locker(&_entityToElementLock);
QHashIterator<EntityItemID, EntityTreeElementPointer> i(_entityToElementMap);
QHashIterator<EntityItemID, EntityItemPointer> i(localMap);
while (i.hasNext()) {
i.next();
qCDebug(entities) << i.key() << ": " << i.value().get();
qCDebug(entities) << i.key() << ": " << i.value()->getElement().get();
}
qCDebug(entities) << "-----------------------------------------------------";
}

View file

@ -119,9 +119,6 @@ public:
// use this method if you only know the entityID
bool updateEntity(const EntityItemID& entityID, const EntityItemProperties& properties, const SharedNodePointer& senderNode = SharedNodePointer(nullptr));
// use this method if you have a pointer to the entity (avoid an extra entity lookup)
bool updateEntity(EntityItemPointer entity, const EntityItemProperties& properties, const SharedNodePointer& senderNode = SharedNodePointer(nullptr));
// check if the avatar is a child of this entity, If so set the avatar parentID to null
void unhookChildAvatar(const EntityItemID entityID);
void deleteEntity(const EntityItemID& entityID, bool force = false, bool ignoreWarnings = true);
@ -183,7 +180,8 @@ public:
int processEraseMessageDetails(const QByteArray& buffer, const SharedNodePointer& sourceNode);
EntityTreeElementPointer getContainingElement(const EntityItemID& entityItemID) /*const*/;
void setContainingElement(const EntityItemID& entityItemID, EntityTreeElementPointer element);
void addEntityMapEntry(EntityItemPointer entity);
void clearEntityMapEntry(const EntityItemID& id);
void debugDumpMap();
virtual void dumpTree() override;
virtual void pruneTree() override;
@ -275,9 +273,8 @@ signals:
protected:
void processRemovedEntities(const DeleteEntityOperator& theOperator);
bool updateEntityWithElement(EntityItemPointer entity, const EntityItemProperties& properties,
EntityTreeElementPointer containingElement,
const SharedNodePointer& senderNode = SharedNodePointer(nullptr));
bool updateEntity(EntityItemPointer entity, const EntityItemProperties& properties,
const SharedNodePointer& senderNode = SharedNodePointer(nullptr));
static bool findNearPointOperation(const OctreeElementPointer& element, void* extraData);
static bool findInSphereOperation(const OctreeElementPointer& element, void* extraData);
static bool findInCubeOperation(const OctreeElementPointer& element, void* extraData);
@ -309,8 +306,8 @@ protected:
_deletedEntityItemIDs << id;
}
mutable QReadWriteLock _entityToElementLock;
QHash<EntityItemID, EntityTreeElementPointer> _entityToElementMap;
mutable QReadWriteLock _entityMapLock;
QHash<EntityItemID, EntityItemPointer> _entityMap;
EntitySimulationPointer _simulation;

View file

@ -885,10 +885,10 @@ EntityItemPointer EntityTreeElement::getEntityWithEntityItemID(const EntityItemI
void EntityTreeElement::cleanupEntities() {
withWriteLock([&] {
foreach(EntityItemPointer entity, _entityItems) {
// NOTE: only EntityTreeElement should ever be changing the value of entity->_element
// NOTE: We explicitly don't delete the EntityItem here because since we only
// access it by smart pointers, when we remove it from the _entityItems
// we know that it will be deleted.
//delete entity;
entity->_element = NULL;
}
_entityItems.clear();
@ -903,6 +903,7 @@ bool EntityTreeElement::removeEntityWithEntityItemID(const EntityItemID& id) {
EntityItemPointer& entity = _entityItems[i];
if (entity->getEntityItemID() == id) {
foundEntity = true;
// NOTE: only EntityTreeElement should ever be changing the value of entity->_element
entity->_element = NULL;
_entityItems.removeAt(i);
break;
@ -918,6 +919,7 @@ bool EntityTreeElement::removeEntityItem(EntityItemPointer entity) {
numEntries = _entityItems.removeAll(entity);
});
if (numEntries > 0) {
// NOTE: only EntityTreeElement should ever be changing the value of entity->_element
assert(entity->_element.get() == this);
entity->_element = NULL;
return true;
@ -1001,7 +1003,6 @@ int EntityTreeElement::readElementDataFromBuffer(const unsigned char* data, int
if (currentContainingElement.get() != this) {
currentContainingElement->removeEntityItem(entityItem);
addEntityItem(entityItem);
_myTree->setContainingElement(entityItemID, getThisPointer());
}
}
}
@ -1032,9 +1033,9 @@ int EntityTreeElement::readElementDataFromBuffer(const unsigned char* data, int
// don't add if we've recently deleted....
if (!_myTree->isDeletedEntity(entityItem->getID())) {
_myTree->addEntityMapEntry(entityItem);
addEntityItem(entityItem); // add this new entity to this elements entities
entityItemID = entityItem->getEntityItemID();
_myTree->setContainingElement(entityItemID, getThisPointer());
_myTree->postAddEntity(entityItem);
if (entityItem->getCreated() == UNKNOWN_CREATED_TIME) {
entityItem->recordCreationTime();

View file

@ -0,0 +1,38 @@
//
// HoverOverlayInterface.cpp
// libraries/entities/src
//
// Created by Zach Fox on 2017-07-14.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "HoverOverlayInterface.h"
HoverOverlayInterface::HoverOverlayInterface() {
// "hover_overlay" debug log category disabled by default.
// Create your own "qtlogging.ini" file and set your "QT_LOGGING_CONF" environment variable
// if you'd like to enable/disable certain categories.
// More details: http://doc.qt.io/qt-5/qloggingcategory.html#configuring-categories
QLoggingCategory::setFilterRules(QStringLiteral("hifi.hover_overlay.debug=false"));
}
void HoverOverlayInterface::createHoverOverlay(const EntityItemID& entityItemID, const PointerEvent& event) {
qCDebug(hover_overlay) << "Creating Hover Overlay on top of entity with ID: " << entityItemID;
setCurrentHoveredEntity(entityItemID);
}
void HoverOverlayInterface::createHoverOverlay(const EntityItemID& entityItemID) {
HoverOverlayInterface::createHoverOverlay(entityItemID, PointerEvent());
}
void HoverOverlayInterface::destroyHoverOverlay(const EntityItemID& entityItemID, const PointerEvent& event) {
qCDebug(hover_overlay) << "Destroying Hover Overlay on top of entity with ID: " << entityItemID;
setCurrentHoveredEntity(QUuid());
}
void HoverOverlayInterface::destroyHoverOverlay(const EntityItemID& entityItemID) {
HoverOverlayInterface::destroyHoverOverlay(entityItemID, PointerEvent());
}

View file

@ -0,0 +1,49 @@
//
// HoverOverlayInterface.h
// libraries/entities/src
//
// Created by Zach Fox on 2017-07-14.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_HoverOverlayInterface_h
#define hifi_HoverOverlayInterface_h
#include <QtCore/QObject>
#include <QUuid>
#include <DependencyManager.h>
#include <PointerEvent.h>
#include "EntityTree.h"
#include "HoverOverlayLogging.h"
/**jsdoc
* @namespace HoverOverlay
*/
class HoverOverlayInterface : public QObject, public Dependency {
Q_OBJECT
Q_PROPERTY(QUuid currentHoveredEntity READ getCurrentHoveredEntity WRITE setCurrentHoveredEntity)
public:
HoverOverlayInterface();
Q_INVOKABLE QUuid getCurrentHoveredEntity() { return _currentHoveredEntity; }
void setCurrentHoveredEntity(const QUuid& entityID) { _currentHoveredEntity = entityID; }
public slots:
void createHoverOverlay(const EntityItemID& entityItemID, const PointerEvent& event);
void createHoverOverlay(const EntityItemID& entityItemID);
void destroyHoverOverlay(const EntityItemID& entityItemID, const PointerEvent& event);
void destroyHoverOverlay(const EntityItemID& entityItemID);
private:
bool _verboseLogging { true };
QUuid _currentHoveredEntity{};
};
#endif // hifi_HoverOverlayInterface_h

View file

@ -0,0 +1,14 @@
//
// HoverOverlayLogging.cpp
// libraries/entities/src
//
// Created by Zach Fox on 2017-07-17
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "HoverOverlayLogging.h"
Q_LOGGING_CATEGORY(hover_overlay, "hifi.hover_overlay")

View file

@ -0,0 +1,20 @@
//
// HoverOverlayLogging.h
// libraries/entities/src
//
// Created by Zach Fox on 2017-07-17
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_HoverOverlayLogging_h
#define hifi_HoverOverlayLogging_h
#include <QLoggingCategory>
Q_DECLARE_LOGGING_CATEGORY(hover_overlay)
#endif // hifi_HoverOverlayLogging_h

View file

@ -51,7 +51,7 @@ MovingEntitiesOperator::~MovingEntitiesOperator() {
void MovingEntitiesOperator::addEntityToMoveList(EntityItemPointer entity, const AACube& newCube) {
EntityTreeElementPointer oldContainingElement = _tree->getContainingElement(entity->getEntityItemID());
EntityTreeElementPointer oldContainingElement = entity->getElement();
AABox newCubeClamped = newCube.clamp((float)-HALF_TREE_SCALE, (float)HALF_TREE_SCALE);
if (_wantDebug) {
@ -193,7 +193,6 @@ bool MovingEntitiesOperator::preRecursion(const OctreeElementPointer& element) {
// If this element is the best fit for the new bounds of this entity then add the entity to the element
if (!details.newFound && entityTreeElement->bestFitBounds(details.newCube)) {
EntityItemID entityItemID = details.entity->getEntityItemID();
// remove from the old before adding
EntityTreeElementPointer oldElement = details.entity->getElement();
if (oldElement != entityTreeElement) {
@ -201,7 +200,6 @@ bool MovingEntitiesOperator::preRecursion(const OctreeElementPointer& element) {
oldElement->removeEntityItem(details.entity);
}
entityTreeElement->addEntityItem(details.entity);
_tree->setContainingElement(entityItemID, entityTreeElement);
}
_foundNewCount++;
//details.newFound = true; // TODO: would be nice to add this optimization

Some files were not shown because too many files have changed in this diff Show more