mirror of
https://github.com/lubosz/overte.git
synced 2025-04-23 23:33:48 +02:00
Merge branch 'master' of github.com:highfidelity/hifi into audio/threaded-local
This commit is contained in:
commit
3d526a1024
144 changed files with 2993 additions and 1449 deletions
|
@ -9,6 +9,8 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "Agent.h"
|
||||
|
||||
#include <QtCore/QCoreApplication>
|
||||
#include <QtCore/QEventLoop>
|
||||
#include <QtCore/QStandardPaths>
|
||||
|
@ -46,14 +48,12 @@
|
|||
#include "RecordingScriptingInterface.h"
|
||||
#include "AbstractAudioInterface.h"
|
||||
|
||||
#include "Agent.h"
|
||||
#include "AvatarAudioTimer.h"
|
||||
|
||||
static const int RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES = 10;
|
||||
|
||||
Agent::Agent(ReceivedMessage& message) :
|
||||
ThreadedAssignment(message),
|
||||
_entityEditSender(),
|
||||
_receivedAudioStream(RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES) {
|
||||
DependencyManager::get<EntityScriptingInterface>()->setPacketSender(&_entityEditSender);
|
||||
|
||||
|
@ -68,7 +68,7 @@ Agent::Agent(ReceivedMessage& message) :
|
|||
DependencyManager::set<recording::Recorder>();
|
||||
DependencyManager::set<RecordingScriptingInterface>();
|
||||
DependencyManager::set<ScriptCache>();
|
||||
DependencyManager::set<ScriptEngines>();
|
||||
DependencyManager::set<ScriptEngines>(ScriptEngine::AGENT_SCRIPT);
|
||||
|
||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||
|
||||
|
@ -143,7 +143,7 @@ void Agent::handleAudioPacket(QSharedPointer<ReceivedMessage> message) {
|
|||
_receivedAudioStream.clearBuffer();
|
||||
}
|
||||
|
||||
const QString AGENT_LOGGING_NAME = "agent";
|
||||
static const QString AGENT_LOGGING_NAME = "agent";
|
||||
|
||||
void Agent::run() {
|
||||
|
||||
|
@ -321,7 +321,7 @@ void Agent::scriptRequestFinished() {
|
|||
}
|
||||
|
||||
void Agent::executeScript() {
|
||||
_scriptEngine = std::unique_ptr<ScriptEngine>(new ScriptEngine(_scriptContents, _payload));
|
||||
_scriptEngine = std::unique_ptr<ScriptEngine>(new ScriptEngine(ScriptEngine::AGENT_SCRIPT, _scriptContents, _payload));
|
||||
_scriptEngine->setParent(this); // be the parent of the script engine so it gets moved when we do
|
||||
|
||||
// setup an Avatar for the script to use
|
||||
|
@ -376,6 +376,9 @@ void Agent::executeScript() {
|
|||
|
||||
_scriptEngine->registerGlobalObject("EntityViewer", &_entityViewer);
|
||||
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
_scriptEngine->registerGlobalObject("Recording", recordingInterface.data());
|
||||
|
||||
// we need to make sure that init has been called for our EntityScriptingInterface
|
||||
// so that it actually has a jurisdiction listener when we ask it for it next
|
||||
entityScriptingInterface->init();
|
||||
|
|
|
@ -12,12 +12,13 @@
|
|||
#include <udt/PacketHeaders.h>
|
||||
|
||||
#include "Agent.h"
|
||||
#include "assets/AssetServer.h"
|
||||
#include "AssignmentFactory.h"
|
||||
#include "audio/AudioMixer.h"
|
||||
#include "avatars/AvatarMixer.h"
|
||||
#include "entities/EntityServer.h"
|
||||
#include "assets/AssetServer.h"
|
||||
#include "messages/MessagesMixer.h"
|
||||
#include "scripts/EntityScriptServer.h"
|
||||
|
||||
ThreadedAssignment* AssignmentFactory::unpackAssignment(ReceivedMessage& message) {
|
||||
|
||||
|
@ -39,7 +40,9 @@ ThreadedAssignment* AssignmentFactory::unpackAssignment(ReceivedMessage& message
|
|||
return new AssetServer(message);
|
||||
case Assignment::MessagesMixerType:
|
||||
return new MessagesMixer(message);
|
||||
case Assignment::EntityScriptServerType:
|
||||
return new EntityScriptServer(message);
|
||||
default:
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@
|
|||
#include "NodeType.h"
|
||||
#include "SendAssetTask.h"
|
||||
#include "UploadAssetTask.h"
|
||||
#include <ClientServerUtils.h>
|
||||
|
||||
static const uint8_t MIN_CORES_FOR_MULTICORE = 4;
|
||||
static const uint8_t CPU_AFFINITY_COUNT_HIGH = 2;
|
||||
|
@ -190,7 +191,7 @@ void AssetServer::completeSetup() {
|
|||
cleanupUnmappedFiles();
|
||||
}
|
||||
|
||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
nodeList->addSetOfNodeTypesToNodeInterestSet({ NodeType::Agent, NodeType::EntityScriptServer });
|
||||
} else {
|
||||
qCritical() << "Asset Server assignment will not continue because mapping file could not be loaded.";
|
||||
setFinished(true);
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
#include <udt/Packet.h>
|
||||
|
||||
#include "AssetUtils.h"
|
||||
#include "ClientServerUtils.h"
|
||||
|
||||
SendAssetTask::SendAssetTask(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& sendToNode, const QDir& resourcesDir) :
|
||||
QRunnable(),
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
#include <NodeList.h>
|
||||
#include <NLPacketList.h>
|
||||
|
||||
#include "ClientServerUtils.h"
|
||||
|
||||
|
||||
UploadAssetTask::UploadAssetTask(QSharedPointer<ReceivedMessage> receivedMessage, SharedNodePointer senderNode,
|
||||
const QDir& resourcesDir) :
|
||||
|
|
|
@ -36,9 +36,8 @@
|
|||
|
||||
#include "AudioMixer.h"
|
||||
|
||||
static const float LOUDNESS_TO_DISTANCE_RATIO = 0.00001f;
|
||||
static const float DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE = 0.5f; // attenuation = -6dB * log2(distance)
|
||||
static const float DEFAULT_NOISE_MUTING_THRESHOLD = 0.003f;
|
||||
static const float DEFAULT_NOISE_MUTING_THRESHOLD = 1.0f;
|
||||
static const QString AUDIO_MIXER_LOGGING_TARGET_NAME = "audio-mixer";
|
||||
static const QString AUDIO_ENV_GROUP_KEY = "audio_env";
|
||||
static const QString AUDIO_BUFFER_GROUP_KEY = "audio_buffer";
|
||||
|
@ -47,9 +46,6 @@ static const QString AUDIO_THREADING_GROUP_KEY = "audio_threading";
|
|||
int AudioMixer::_numStaticJitterFrames{ -1 };
|
||||
float AudioMixer::_noiseMutingThreshold{ DEFAULT_NOISE_MUTING_THRESHOLD };
|
||||
float AudioMixer::_attenuationPerDoublingInDistance{ DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE };
|
||||
float AudioMixer::_trailingSleepRatio{ 1.0f };
|
||||
float AudioMixer::_performanceThrottlingRatio{ 0.0f };
|
||||
float AudioMixer::_minAudibilityThreshold{ LOUDNESS_TO_DISTANCE_RATIO / 2.0f };
|
||||
QHash<QString, AABox> AudioMixer::_audioZones;
|
||||
QVector<AudioMixer::ZoneSettings> AudioMixer::_zoneSettings;
|
||||
QVector<AudioMixer::ReverbSettings> AudioMixer::_zoneReverbSettings;
|
||||
|
@ -69,7 +65,7 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
|
|||
packetReceiver.registerListener(PacketType::KillAvatar, this, "handleKillAvatarPacket");
|
||||
packetReceiver.registerListener(PacketType::NodeMuteRequest, this, "handleNodeMuteRequestPacket");
|
||||
packetReceiver.registerListener(PacketType::RadiusIgnoreRequest, this, "handleRadiusIgnoreRequestPacket");
|
||||
packetReceiver.registerListener(PacketType::RequestsDomainListData, this, "handleRequestsDomainListDataPacket");
|
||||
packetReceiver.registerListener(PacketType::RequestsDomainListData, this, "handleRequestsDomainListDataPacket");
|
||||
packetReceiver.registerListener(PacketType::PerAvatarGainSet, this, "handlePerAvatarGainSetDataPacket");
|
||||
|
||||
connect(nodeList.data(), &NodeList::nodeKilled, this, &AudioMixer::handleNodeKilled);
|
||||
|
@ -294,35 +290,31 @@ void AudioMixer::sendStatsPacket() {
|
|||
|
||||
// general stats
|
||||
statsObject["useDynamicJitterBuffers"] = _numStaticJitterFrames == -1;
|
||||
statsObject["trailing_sleep_percentage"] = _trailingSleepRatio * 100.0f;
|
||||
statsObject["performance_throttling_ratio"] = _performanceThrottlingRatio;
|
||||
|
||||
statsObject["threads"] = _slavePool.numThreads();
|
||||
|
||||
statsObject["trailing_mix_ratio"] = _trailingMixRatio;
|
||||
statsObject["throttling_ratio"] = _throttlingRatio;
|
||||
|
||||
statsObject["avg_streams_per_frame"] = (float)_stats.sumStreams / (float)_numStatFrames;
|
||||
statsObject["avg_listeners_per_frame"] = (float)_stats.sumListeners / (float)_numStatFrames;
|
||||
|
||||
// timing stats
|
||||
QJsonObject timingStats;
|
||||
uint64_t timing, trailing;
|
||||
|
||||
_sleepTiming.get(timing, trailing);
|
||||
timingStats["us_per_sleep"] = (qint64)(timing / _numStatFrames);
|
||||
timingStats["us_per_sleep_trailing"] = (qint64)(trailing / _numStatFrames);
|
||||
auto addTiming = [&](Timer& timer, std::string name) {
|
||||
uint64_t timing, trailing;
|
||||
timer.get(timing, trailing);
|
||||
timingStats[("us_per_" + name).c_str()] = (qint64)(timing / _numStatFrames);
|
||||
timingStats[("us_per_" + name + "_trailing").c_str()] = (qint64)(trailing / _numStatFrames);
|
||||
};
|
||||
|
||||
_frameTiming.get(timing, trailing);
|
||||
timingStats["us_per_frame"] = (qint64)(timing / _numStatFrames);
|
||||
timingStats["us_per_frame_trailing"] = (qint64)(trailing / _numStatFrames);
|
||||
|
||||
_prepareTiming.get(timing, trailing);
|
||||
timingStats["us_per_prepare"] = (qint64)(timing / _numStatFrames);
|
||||
timingStats["us_per_prepare_trailing"] = (qint64)(trailing / _numStatFrames);
|
||||
|
||||
_mixTiming.get(timing, trailing);
|
||||
timingStats["us_per_mix"] = (qint64)(timing / _numStatFrames);
|
||||
timingStats["us_per_mix_trailing"] = (qint64)(trailing / _numStatFrames);
|
||||
|
||||
_eventsTiming.get(timing, trailing);
|
||||
timingStats["us_per_events"] = (qint64)(timing / _numStatFrames);
|
||||
timingStats["us_per_events_trailing"] = (qint64)(trailing / _numStatFrames);
|
||||
addTiming(_ticTiming, "tic");
|
||||
addTiming(_sleepTiming, "sleep");
|
||||
addTiming(_frameTiming, "frame");
|
||||
addTiming(_prepareTiming, "prepare");
|
||||
addTiming(_mixTiming, "mix");
|
||||
addTiming(_eventsTiming, "events");
|
||||
|
||||
// call it "avg_..." to keep it higher in the display, sorted alphabetically
|
||||
statsObject["avg_timing_stats"] = timingStats;
|
||||
|
@ -332,7 +324,7 @@ void AudioMixer::sendStatsPacket() {
|
|||
|
||||
mixStats["%_hrtf_mixes"] = percentageForMixStats(_stats.hrtfRenders);
|
||||
mixStats["%_hrtf_silent_mixes"] = percentageForMixStats(_stats.hrtfSilentRenders);
|
||||
mixStats["%_hrtf_struggle_mixes"] = percentageForMixStats(_stats.hrtfStruggleRenders);
|
||||
mixStats["%_hrtf_throttle_mixes"] = percentageForMixStats(_stats.hrtfThrottleRenders);
|
||||
mixStats["%_manual_stereo_mixes"] = percentageForMixStats(_stats.manualStereoMixes);
|
||||
mixStats["%_manual_echo_mixes"] = percentageForMixStats(_stats.manualEchoMixes);
|
||||
|
||||
|
@ -398,7 +390,7 @@ void AudioMixer::start() {
|
|||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
// prepare the NodeList
|
||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
nodeList->addSetOfNodeTypesToNodeInterestSet({ NodeType::Agent, NodeType::EntityScriptServer });
|
||||
nodeList->linkedDataCreateCallback = [&](Node* node) { getOrCreateClientData(node); };
|
||||
|
||||
// parse out any AudioMixer settings
|
||||
|
@ -408,25 +400,25 @@ void AudioMixer::start() {
|
|||
parseSettingsObject(settingsObject);
|
||||
}
|
||||
|
||||
// manageLoad state
|
||||
auto frameTimestamp = p_high_resolution_clock::time_point::min();
|
||||
unsigned int framesSinceManagement = std::numeric_limits<int>::max();
|
||||
|
||||
// mix state
|
||||
unsigned int frame = 1;
|
||||
auto frameTimestamp = p_high_resolution_clock::now();
|
||||
|
||||
while (!_isFinished) {
|
||||
auto ticTimer = _ticTiming.timer();
|
||||
|
||||
{
|
||||
auto timer = _sleepTiming.timer();
|
||||
manageLoad(frameTimestamp, framesSinceManagement);
|
||||
auto frameDuration = timeFrame(frameTimestamp);
|
||||
throttle(frameDuration, frame);
|
||||
}
|
||||
|
||||
auto timer = _frameTiming.timer();
|
||||
auto frameTimer = _frameTiming.timer();
|
||||
|
||||
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
|
||||
// prepare frames; pop off any new audio from their streams
|
||||
{
|
||||
auto timer = _prepareTiming.timer();
|
||||
auto prepareTimer = _prepareTiming.timer();
|
||||
std::for_each(cbegin, cend, [&](const SharedNodePointer& node) {
|
||||
_stats.sumStreams += prepareFrame(node, frame);
|
||||
});
|
||||
|
@ -434,8 +426,8 @@ void AudioMixer::start() {
|
|||
|
||||
// mix across slave threads
|
||||
{
|
||||
auto timer = _mixTiming.timer();
|
||||
_slavePool.mix(cbegin, cend, frame);
|
||||
auto mixTimer = _mixTiming.timer();
|
||||
_slavePool.mix(cbegin, cend, frame, _throttlingRatio);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -450,7 +442,7 @@ void AudioMixer::start() {
|
|||
|
||||
// play nice with qt event-looping
|
||||
{
|
||||
auto timer = _eventsTiming.timer();
|
||||
auto eventsTimer = _eventsTiming.timer();
|
||||
|
||||
// since we're a while loop we need to yield to qt's event processing
|
||||
QCoreApplication::processEvents();
|
||||
|
@ -464,67 +456,66 @@ void AudioMixer::start() {
|
|||
}
|
||||
}
|
||||
|
||||
void AudioMixer::manageLoad(p_high_resolution_clock::time_point& frameTimestamp, unsigned int& framesSinceCutoffEvent) {
|
||||
auto timeToSleep = std::chrono::microseconds(0);
|
||||
std::chrono::microseconds AudioMixer::timeFrame(p_high_resolution_clock::time_point& timestamp) {
|
||||
// advance the next frame
|
||||
auto nextTimestamp = timestamp + std::chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
|
||||
auto now = p_high_resolution_clock::now();
|
||||
|
||||
// sleep until the next frame, if necessary
|
||||
{
|
||||
// advance the next frame
|
||||
frameTimestamp += std::chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
|
||||
auto now = p_high_resolution_clock::now();
|
||||
// compute how long the last frame took
|
||||
auto duration = std::chrono::duration_cast<std::chrono::microseconds>(now - timestamp);
|
||||
|
||||
// calculate sleep
|
||||
if (frameTimestamp < now) {
|
||||
frameTimestamp = now;
|
||||
} else {
|
||||
timeToSleep = std::chrono::duration_cast<std::chrono::microseconds>(frameTimestamp - now);
|
||||
std::this_thread::sleep_for(timeToSleep);
|
||||
}
|
||||
}
|
||||
// set the new frame timestamp
|
||||
timestamp = std::max(now, nextTimestamp);
|
||||
|
||||
// manage mixer load
|
||||
{
|
||||
const int TRAILING_AVERAGE_FRAMES = 100;
|
||||
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
|
||||
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
|
||||
// sleep until the next frame should start
|
||||
std::this_thread::sleep_until(timestamp);
|
||||
|
||||
const float STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.10f;
|
||||
const float BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.20f;
|
||||
return duration;
|
||||
}
|
||||
|
||||
const float RATIO_BACK_OFF = 0.02f;
|
||||
void AudioMixer::throttle(std::chrono::microseconds duration, int frame) {
|
||||
// throttle using a modified proportional-integral controller
|
||||
const float FRAME_TIME = 10000.0f;
|
||||
float mixRatio = duration.count() / FRAME_TIME;
|
||||
|
||||
_trailingSleepRatio = (PREVIOUS_FRAMES_RATIO * _trailingSleepRatio) +
|
||||
// ratio of frame spent sleeping / total frame time
|
||||
((CURRENT_FRAME_RATIO * timeToSleep.count()) / (float) AudioConstants::NETWORK_FRAME_USECS);
|
||||
// constants are determined based on a "regular" 16-CPU EC2 server
|
||||
|
||||
bool hasRatioChanged = false;
|
||||
// target different mix and backoff ratios (they also have different backoff rates)
|
||||
// this is to prevent oscillation, and encourage throttling to find a steady state
|
||||
const float TARGET = 0.9f;
|
||||
// on a "regular" machine with 100 avatars, this is the largest value where
|
||||
// - overthrottling can be recovered
|
||||
// - oscillations will not occur after the recovery
|
||||
const float BACKOFF_TARGET = 0.44f;
|
||||
|
||||
if (framesSinceCutoffEvent >= TRAILING_AVERAGE_FRAMES) {
|
||||
if (_trailingSleepRatio <= STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD) {
|
||||
qDebug() << "Mixer is struggling";
|
||||
// change our min required loudness to reduce some load
|
||||
_performanceThrottlingRatio = _performanceThrottlingRatio + (0.5f * (1.0f - _performanceThrottlingRatio));
|
||||
hasRatioChanged = true;
|
||||
} else if (_trailingSleepRatio >= BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD && _performanceThrottlingRatio != 0) {
|
||||
qDebug() << "Mixer is recovering";
|
||||
// back off the required loudness
|
||||
_performanceThrottlingRatio = std::max(0.0f, _performanceThrottlingRatio - RATIO_BACK_OFF);
|
||||
hasRatioChanged = true;
|
||||
}
|
||||
// the mixer is known to struggle at about 80 on a "regular" machine
|
||||
// so throttle 2/80 the streams to ensure smooth audio (throttling is linear)
|
||||
const float THROTTLE_RATE = 2 / 80.0f;
|
||||
const float BACKOFF_RATE = THROTTLE_RATE / 4;
|
||||
|
||||
if (hasRatioChanged) {
|
||||
// set out min audability threshold from the new ratio
|
||||
_minAudibilityThreshold = LOUDNESS_TO_DISTANCE_RATIO / (2.0f * (1.0f - _performanceThrottlingRatio));
|
||||
framesSinceCutoffEvent = 0;
|
||||
// recovery should be bounded so that large changes in user count is a tolerable experience
|
||||
// throttling is linear, so most cases will not need a full recovery
|
||||
const int RECOVERY_TIME = 180;
|
||||
|
||||
qDebug() << "Sleeping" << _trailingSleepRatio << "of frame";
|
||||
qDebug() << "Cutoff is" << _performanceThrottlingRatio;
|
||||
qDebug() << "Minimum audibility to be mixed is" << _minAudibilityThreshold;
|
||||
}
|
||||
}
|
||||
// weight more recent frames to determine if throttling is necessary,
|
||||
const int TRAILING_FRAMES = (int)(100 * RECOVERY_TIME * BACKOFF_RATE);
|
||||
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_FRAMES;
|
||||
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
|
||||
_trailingMixRatio = PREVIOUS_FRAMES_RATIO * _trailingMixRatio + CURRENT_FRAME_RATIO * mixRatio;
|
||||
|
||||
if (!hasRatioChanged) {
|
||||
++framesSinceCutoffEvent;
|
||||
if (frame % TRAILING_FRAMES == 0) {
|
||||
if (_trailingMixRatio > TARGET) {
|
||||
int proportionalTerm = 1 + (_trailingMixRatio - TARGET) / 0.1f;
|
||||
_throttlingRatio += THROTTLE_RATE * proportionalTerm;
|
||||
_throttlingRatio = std::min(_throttlingRatio, 1.0f);
|
||||
qDebug("audio-mixer is struggling (%f mix/sleep) - throttling %f of streams",
|
||||
(double)_trailingMixRatio, (double)_throttlingRatio);
|
||||
} else if (_throttlingRatio > 0.0f && _trailingMixRatio <= BACKOFF_TARGET) {
|
||||
int proportionalTerm = 1 + (TARGET - _trailingMixRatio) / 0.2f;
|
||||
_throttlingRatio -= BACKOFF_RATE * proportionalTerm;
|
||||
_throttlingRatio = std::max(_throttlingRatio, 0.0f);
|
||||
qDebug("audio-mixer is recovering (%f mix/sleep) - throttling %f of streams",
|
||||
(double)_trailingMixRatio, (double)_throttlingRatio);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,7 +46,6 @@ public:
|
|||
static int getStaticJitterFrames() { return _numStaticJitterFrames; }
|
||||
static bool shouldMute(float quietestFrame) { return quietestFrame > _noiseMutingThreshold; }
|
||||
static float getAttenuationPerDoublingInDistance() { return _attenuationPerDoublingInDistance; }
|
||||
static float getMinimumAudibilityThreshold() { return _performanceThrottlingRatio > 0.0f ? _minAudibilityThreshold : 0.0f; }
|
||||
static const QHash<QString, AABox>& getAudioZones() { return _audioZones; }
|
||||
static const QVector<ZoneSettings>& getZoneSettings() { return _zoneSettings; }
|
||||
static const QVector<ReverbSettings>& getReverbSettings() { return _zoneReverbSettings; }
|
||||
|
@ -73,8 +72,8 @@ private slots:
|
|||
|
||||
private:
|
||||
// mixing helpers
|
||||
// check and maybe throttle mixer load by changing audibility threshold
|
||||
void manageLoad(p_high_resolution_clock::time_point& frameTimestamp, unsigned int& framesSinceManagement);
|
||||
std::chrono::microseconds timeFrame(p_high_resolution_clock::time_point& timestamp);
|
||||
void throttle(std::chrono::microseconds frameDuration, int frame);
|
||||
// pop a frame from any streams on the node
|
||||
// returns the number of available streams
|
||||
int prepareFrame(const SharedNodePointer& node, unsigned int frame);
|
||||
|
@ -85,6 +84,9 @@ private:
|
|||
|
||||
void parseSettingsObject(const QJsonObject& settingsObject);
|
||||
|
||||
float _trailingMixRatio { 0.0f };
|
||||
float _throttlingRatio { 0.0f };
|
||||
|
||||
int _numStatFrames { 0 };
|
||||
AudioMixerStats _stats;
|
||||
|
||||
|
@ -113,6 +115,7 @@ private:
|
|||
uint64_t _history[TIMER_TRAILING_SECONDS] {};
|
||||
int _index { 0 };
|
||||
};
|
||||
Timer _ticTiming;
|
||||
Timer _sleepTiming;
|
||||
Timer _frameTiming;
|
||||
Timer _prepareTiming;
|
||||
|
@ -122,9 +125,6 @@ private:
|
|||
static int _numStaticJitterFrames; // -1 denotes dynamic jitter buffering
|
||||
static float _noiseMutingThreshold;
|
||||
static float _attenuationPerDoublingInDistance;
|
||||
static float _trailingSleepRatio;
|
||||
static float _performanceThrottlingRatio;
|
||||
static float _minAudibilityThreshold;
|
||||
static QHash<QString, AABox> _audioZones;
|
||||
static QVector<ZoneSettings> _zoneSettings;
|
||||
static QVector<ReverbSettings> _zoneReverbSettings;
|
||||
|
|
|
@ -36,6 +36,292 @@
|
|||
|
||||
#include "AudioMixerSlave.h"
|
||||
|
||||
using AudioStreamMap = AudioMixerClientData::AudioStreamMap;
|
||||
|
||||
// packet helpers
|
||||
std::unique_ptr<NLPacket> createAudioPacket(PacketType type, int size, quint16 sequence, QString codec);
|
||||
void sendMixPacket(const SharedNodePointer& node, AudioMixerClientData& data, QByteArray& buffer);
|
||||
void sendSilentPacket(const SharedNodePointer& node, AudioMixerClientData& data);
|
||||
void sendMutePacket(const SharedNodePointer& node, AudioMixerClientData&);
|
||||
void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData& data);
|
||||
|
||||
// mix helpers
|
||||
bool shouldIgnoreNode(const SharedNodePointer& listener, const SharedNodePointer& node);
|
||||
float gainForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition, bool isEcho);
|
||||
float azimuthForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition);
|
||||
|
||||
void AudioMixerSlave::configure(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
|
||||
_begin = begin;
|
||||
_end = end;
|
||||
_frame = frame;
|
||||
_throttlingRatio = throttlingRatio;
|
||||
}
|
||||
|
||||
void AudioMixerSlave::mix(const SharedNodePointer& node) {
|
||||
// check that the node is valid
|
||||
AudioMixerClientData* data = (AudioMixerClientData*)node->getLinkedData();
|
||||
if (data == nullptr) {
|
||||
return;
|
||||
}
|
||||
|
||||
// check that the stream is valid
|
||||
auto avatarStream = data->getAvatarAudioStream();
|
||||
if (avatarStream == nullptr) {
|
||||
return;
|
||||
}
|
||||
|
||||
// send mute packet, if necessary
|
||||
if (AudioMixer::shouldMute(avatarStream->getQuietestFrameLoudness()) || data->shouldMuteClient()) {
|
||||
sendMutePacket(node, *data);
|
||||
}
|
||||
|
||||
// send audio packets, if necessary
|
||||
if (node->getType() == NodeType::Agent && node->getActiveSocket()) {
|
||||
++stats.sumListeners;
|
||||
|
||||
// mix the audio
|
||||
bool mixHasAudio = prepareMix(node);
|
||||
|
||||
// send audio packet
|
||||
if (mixHasAudio || data->shouldFlushEncoder()) {
|
||||
QByteArray encodedBuffer;
|
||||
if (mixHasAudio) {
|
||||
// encode the audio
|
||||
QByteArray decodedBuffer(reinterpret_cast<char*>(_bufferSamples), AudioConstants::NETWORK_FRAME_BYTES_STEREO);
|
||||
data->encode(decodedBuffer, encodedBuffer);
|
||||
} else {
|
||||
// time to flush (resets shouldFlush until the next encode)
|
||||
data->encodeFrameOfZeros(encodedBuffer);
|
||||
}
|
||||
|
||||
sendMixPacket(node, *data, encodedBuffer);
|
||||
} else {
|
||||
sendSilentPacket(node, *data);
|
||||
}
|
||||
|
||||
// send environment packet
|
||||
sendEnvironmentPacket(node, *data);
|
||||
|
||||
// send stats packet (about every second)
|
||||
const unsigned int NUM_FRAMES_PER_SEC = (int)ceil(AudioConstants::NETWORK_FRAMES_PER_SEC);
|
||||
if (data->shouldSendStats(_frame % NUM_FRAMES_PER_SEC)) {
|
||||
data->sendAudioStreamStatsPackets(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
|
||||
AvatarAudioStream* listenerAudioStream = static_cast<AudioMixerClientData*>(listener->getLinkedData())->getAvatarAudioStream();
|
||||
AudioMixerClientData* listenerData = static_cast<AudioMixerClientData*>(listener->getLinkedData());
|
||||
|
||||
// zero out the mix for this listener
|
||||
memset(_mixSamples, 0, sizeof(_mixSamples));
|
||||
|
||||
bool isThrottling = _throttlingRatio > 0.0f;
|
||||
std::vector<std::pair<float, SharedNodePointer>> throttledNodes;
|
||||
|
||||
typedef void (AudioMixerSlave::*MixFunctor)(
|
||||
AudioMixerClientData&, const QUuid&, const AvatarAudioStream&, const PositionalAudioStream&);
|
||||
auto allStreams = [&](const SharedNodePointer& node, MixFunctor mixFunctor) {
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
for (auto& streamPair : nodeData->getAudioStreams()) {
|
||||
auto nodeStream = streamPair.second;
|
||||
(this->*mixFunctor)(*listenerData, node->getUUID(), *listenerAudioStream, *nodeStream);
|
||||
}
|
||||
};
|
||||
|
||||
std::for_each(_begin, _end, [&](const SharedNodePointer& node) {
|
||||
if (*node == *listener) {
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
|
||||
// only mix the echo, if requested
|
||||
for (auto& streamPair : nodeData->getAudioStreams()) {
|
||||
auto nodeStream = streamPair.second;
|
||||
if (nodeStream->shouldLoopbackForNode()) {
|
||||
mixStream(*listenerData, node->getUUID(), *listenerAudioStream, *nodeStream);
|
||||
}
|
||||
}
|
||||
} else if (!shouldIgnoreNode(listener, node)) {
|
||||
if (!isThrottling) {
|
||||
allStreams(node, &AudioMixerSlave::mixStream);
|
||||
} else {
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
|
||||
// compute the node's max relative volume
|
||||
float nodeVolume;
|
||||
for (auto& streamPair : nodeData->getAudioStreams()) {
|
||||
auto nodeStream = streamPair.second;
|
||||
float distance = glm::length(nodeStream->getPosition() - listenerAudioStream->getPosition());
|
||||
nodeVolume = std::max(nodeStream->getLastPopOutputTrailingLoudness() / distance, nodeVolume);
|
||||
}
|
||||
|
||||
// max-heapify the nodes by relative volume
|
||||
throttledNodes.push_back(std::make_pair(nodeVolume, node));
|
||||
if (!throttledNodes.empty()) {
|
||||
std::push_heap(throttledNodes.begin(), throttledNodes.end());
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (isThrottling) {
|
||||
// pop the loudest nodes off the heap and mix their streams
|
||||
int numToRetain = (int)(std::distance(_begin, _end) * (1 - _throttlingRatio));
|
||||
for (int i = 0; i < numToRetain; i++) {
|
||||
if (throttledNodes.empty()) {
|
||||
break;
|
||||
}
|
||||
|
||||
std::pop_heap(throttledNodes.begin(), throttledNodes.end());
|
||||
|
||||
auto& node = throttledNodes.back().second;
|
||||
allStreams(node, &AudioMixerSlave::mixStream);
|
||||
|
||||
throttledNodes.pop_back();
|
||||
}
|
||||
|
||||
// throttle the remaining nodes' streams
|
||||
for (const std::pair<float, SharedNodePointer>& nodePair : throttledNodes) {
|
||||
auto& node = nodePair.second;
|
||||
allStreams(node, &AudioMixerSlave::throttleStream);
|
||||
}
|
||||
}
|
||||
|
||||
// use the per listener AudioLimiter to render the mixed data...
|
||||
listenerData->audioLimiter.render(_mixSamples, _bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
// check for silent audio after the peak limiter has converted the samples
|
||||
bool hasAudio = false;
|
||||
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; ++i) {
|
||||
if (_bufferSamples[i] != 0) {
|
||||
hasAudio = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return hasAudio;
|
||||
}
|
||||
|
||||
void AudioMixerSlave::throttleStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
|
||||
const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
|
||||
addStream(listenerNodeData, sourceNodeID, listeningNodeStream, streamToAdd, true);
|
||||
}
|
||||
|
||||
void AudioMixerSlave::mixStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
|
||||
const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
|
||||
addStream(listenerNodeData, sourceNodeID, listeningNodeStream, streamToAdd, false);
|
||||
}
|
||||
|
||||
void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
|
||||
const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
bool throttle) {
|
||||
++stats.totalMixes;
|
||||
|
||||
// to reduce artifacts we call the HRTF functor for every source, even if throttled or silent
|
||||
// this ensures the correct tail from last mixed block and the correct spatialization of next first block
|
||||
|
||||
// check if this is a server echo of a source back to itself
|
||||
bool isEcho = (&streamToAdd == &listeningNodeStream);
|
||||
|
||||
glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition();
|
||||
|
||||
float distance = glm::max(glm::length(relativePosition), EPSILON);
|
||||
float gain = gainForSource(listeningNodeStream, streamToAdd, relativePosition, isEcho);
|
||||
float azimuth = isEcho ? 0.0f : azimuthForSource(listeningNodeStream, listeningNodeStream, relativePosition);
|
||||
static const int HRTF_DATASET_INDEX = 1;
|
||||
|
||||
if (!streamToAdd.lastPopSucceeded()) {
|
||||
bool forceSilentBlock = true;
|
||||
|
||||
if (!streamToAdd.getLastPopOutput().isNull()) {
|
||||
bool isInjector = dynamic_cast<const InjectedAudioStream*>(&streamToAdd);
|
||||
|
||||
// in an injector, just go silent - the injector has likely ended
|
||||
// in other inputs (microphone, &c.), repeat with fade to avoid the harsh jump to silence
|
||||
if (!isInjector) {
|
||||
// calculate its fade factor, which depends on how many times it's already been repeated.
|
||||
float fadeFactor = calculateRepeatedFrameFadeFactor(streamToAdd.getConsecutiveNotMixedCount() - 1);
|
||||
if (fadeFactor > 0.0f) {
|
||||
// apply the fadeFactor to the gain
|
||||
gain *= fadeFactor;
|
||||
forceSilentBlock = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (forceSilentBlock) {
|
||||
// call renderSilent with a forced silent block to reduce artifacts
|
||||
// (this is not done for stereo streams since they do not go through the HRTF)
|
||||
if (!streamToAdd.isStereo() && !isEcho) {
|
||||
// get the existing listener-source HRTF object, or create a new one
|
||||
auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
|
||||
|
||||
static int16_t silentMonoBlock[AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL] = {};
|
||||
hrtf.renderSilent(silentMonoBlock, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
++stats.hrtfSilentRenders;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// grab the stream from the ring buffer
|
||||
AudioRingBuffer::ConstIterator streamPopOutput = streamToAdd.getLastPopOutput();
|
||||
|
||||
// stereo sources are not passed through HRTF
|
||||
if (streamToAdd.isStereo()) {
|
||||
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; ++i) {
|
||||
_mixSamples[i] += float(streamPopOutput[i] * gain / AudioConstants::MAX_SAMPLE_VALUE);
|
||||
}
|
||||
|
||||
++stats.manualStereoMixes;
|
||||
return;
|
||||
}
|
||||
|
||||
// echo sources are not passed through HRTF
|
||||
if (isEcho) {
|
||||
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; i += 2) {
|
||||
auto monoSample = float(streamPopOutput[i / 2] * gain / AudioConstants::MAX_SAMPLE_VALUE);
|
||||
_mixSamples[i] += monoSample;
|
||||
_mixSamples[i + 1] += monoSample;
|
||||
}
|
||||
|
||||
++stats.manualEchoMixes;
|
||||
return;
|
||||
}
|
||||
|
||||
// get the existing listener-source HRTF object, or create a new one
|
||||
auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
|
||||
|
||||
streamPopOutput.readSamples(_bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
if (streamToAdd.getLastPopOutputLoudness() == 0.0f) {
|
||||
// call renderSilent to reduce artifacts
|
||||
hrtf.renderSilent(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
++stats.hrtfSilentRenders;
|
||||
return;
|
||||
}
|
||||
|
||||
if (throttle) {
|
||||
// call renderSilent with actual frame data and a gain of 0.0f to reduce artifacts
|
||||
hrtf.renderSilent(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, 0.0f,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
++stats.hrtfThrottleRenders;
|
||||
return;
|
||||
}
|
||||
|
||||
hrtf.render(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
++stats.hrtfRenders;
|
||||
}
|
||||
|
||||
std::unique_ptr<NLPacket> createAudioPacket(PacketType type, int size, quint16 sequence, QString codec) {
|
||||
auto audioPacket = NLPacket::create(type, size);
|
||||
audioPacket->writePrimitive(sequence);
|
||||
|
@ -73,6 +359,14 @@ void sendSilentPacket(const SharedNodePointer& node, AudioMixerClientData& data)
|
|||
data.incrementOutgoingMixedAudioSequenceNumber();
|
||||
}
|
||||
|
||||
void sendMutePacket(const SharedNodePointer& node, AudioMixerClientData& data) {
|
||||
auto mutePacket = NLPacket::create(PacketType::NoisyMute, 0);
|
||||
DependencyManager::get<NodeList>()->sendPacket(std::move(mutePacket), *node);
|
||||
|
||||
// probably now we just reset the flag, once should do it (?)
|
||||
data.setShouldMuteClient(false);
|
||||
}
|
||||
|
||||
void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData& data) {
|
||||
bool hasReverb = false;
|
||||
float reverbTime, wetLevel;
|
||||
|
@ -134,285 +428,54 @@ void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData&
|
|||
}
|
||||
}
|
||||
|
||||
void AudioMixerSlave::configure(ConstIter begin, ConstIter end, unsigned int frame) {
|
||||
_begin = begin;
|
||||
_end = end;
|
||||
_frame = frame;
|
||||
}
|
||||
|
||||
void AudioMixerSlave::mix(const SharedNodePointer& node) {
|
||||
// check that the node is valid
|
||||
AudioMixerClientData* data = (AudioMixerClientData*)node->getLinkedData();
|
||||
if (data == nullptr) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto avatarStream = data->getAvatarAudioStream();
|
||||
if (avatarStream == nullptr) {
|
||||
return;
|
||||
}
|
||||
|
||||
// send mute packet, if necessary
|
||||
if (AudioMixer::shouldMute(avatarStream->getQuietestFrameLoudness()) || data->shouldMuteClient()) {
|
||||
auto mutePacket = NLPacket::create(PacketType::NoisyMute, 0);
|
||||
DependencyManager::get<NodeList>()->sendPacket(std::move(mutePacket), *node);
|
||||
|
||||
// probably now we just reset the flag, once should do it (?)
|
||||
data->setShouldMuteClient(false);
|
||||
}
|
||||
|
||||
// send audio packets, if necessary
|
||||
if (node->getType() == NodeType::Agent && node->getActiveSocket()) {
|
||||
++stats.sumListeners;
|
||||
|
||||
// mix the audio
|
||||
bool mixHasAudio = prepareMix(node);
|
||||
|
||||
// send audio packet
|
||||
if (mixHasAudio || data->shouldFlushEncoder()) {
|
||||
// encode the audio
|
||||
QByteArray encodedBuffer;
|
||||
if (mixHasAudio) {
|
||||
QByteArray decodedBuffer(reinterpret_cast<char*>(_bufferSamples), AudioConstants::NETWORK_FRAME_BYTES_STEREO);
|
||||
data->encode(decodedBuffer, encodedBuffer);
|
||||
} else {
|
||||
// time to flush, which resets the shouldFlush until next time we encode something
|
||||
data->encodeFrameOfZeros(encodedBuffer);
|
||||
}
|
||||
|
||||
sendMixPacket(node, *data, encodedBuffer);
|
||||
} else {
|
||||
sendSilentPacket(node, *data);
|
||||
}
|
||||
|
||||
// send environment packet
|
||||
sendEnvironmentPacket(node, *data);
|
||||
|
||||
// send stats packet (about every second)
|
||||
static const unsigned int NUM_FRAMES_PER_SEC = (int) ceil(AudioConstants::NETWORK_FRAMES_PER_SEC);
|
||||
if (data->shouldSendStats(_frame % NUM_FRAMES_PER_SEC)) {
|
||||
data->sendAudioStreamStatsPackets(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool AudioMixerSlave::prepareMix(const SharedNodePointer& node) {
|
||||
AvatarAudioStream* nodeAudioStream = static_cast<AudioMixerClientData*>(node->getLinkedData())->getAvatarAudioStream();
|
||||
bool shouldIgnoreNode(const SharedNodePointer& listener, const SharedNodePointer& node) {
|
||||
AudioMixerClientData* listenerData = static_cast<AudioMixerClientData*>(listener->getLinkedData());
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
|
||||
// zero out the client mix for this node
|
||||
memset(_mixSamples, 0, sizeof(_mixSamples));
|
||||
// when this is true, the AudioMixer will send Audio data to a client about avatars that have ignored them
|
||||
bool getsAnyIgnored = listenerData->getRequestsDomainListData() && listener->getCanKick();
|
||||
|
||||
// loop through all other nodes that have sufficient audio to mix
|
||||
std::for_each(_begin, _end, [&](const SharedNodePointer& otherNode){
|
||||
// make sure that we have audio data for this other node
|
||||
// and that it isn't being ignored by our listening node
|
||||
// and that it isn't ignoring our listening node
|
||||
AudioMixerClientData* otherData = static_cast<AudioMixerClientData*>(otherNode->getLinkedData());
|
||||
bool ignore = true;
|
||||
|
||||
// When this is true, the AudioMixer will send Audio data to a client about avatars that have ignored them
|
||||
bool getsAnyIgnored = nodeData->getRequestsDomainListData() && node->getCanKick();
|
||||
if (nodeData &&
|
||||
// make sure that it isn't being ignored by our listening node
|
||||
(!listener->isIgnoringNodeWithID(node->getUUID()) || (nodeData->getRequestsDomainListData() && node->getCanKick())) &&
|
||||
// and that it isn't ignoring our listening node
|
||||
(!node->isIgnoringNodeWithID(listener->getUUID()) || getsAnyIgnored)) {
|
||||
|
||||
if (otherData
|
||||
&& (!node->isIgnoringNodeWithID(otherNode->getUUID()) || (otherData->getRequestsDomainListData() && otherNode->getCanKick()))
|
||||
&& (!otherNode->isIgnoringNodeWithID(node->getUUID()) || getsAnyIgnored)) {
|
||||
// is either node enabling the space bubble / ignore radius?
|
||||
if ((listener->isIgnoreRadiusEnabled() || node->isIgnoreRadiusEnabled())) {
|
||||
// define the minimum bubble size
|
||||
static const glm::vec3 minBubbleSize = glm::vec3(0.3f, 1.3f, 0.3f);
|
||||
|
||||
// check to see if we're ignoring in radius
|
||||
bool insideIgnoreRadius = false;
|
||||
// If the otherNode equals the node, we're doing a comparison on ourselves
|
||||
if (*otherNode == *node) {
|
||||
// We'll always be inside the radius in that case.
|
||||
insideIgnoreRadius = true;
|
||||
// Check to see if the space bubble is enabled
|
||||
} else if ((node->isIgnoreRadiusEnabled() || otherNode->isIgnoreRadiusEnabled())) {
|
||||
// Define the minimum bubble size
|
||||
static const glm::vec3 minBubbleSize = glm::vec3(0.3f, 1.3f, 0.3f);
|
||||
AudioMixerClientData* nodeData = reinterpret_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
// Set up the bounding box for the current node
|
||||
AABox nodeBox(nodeData->getAvatarBoundingBoxCorner(), nodeData->getAvatarBoundingBoxScale());
|
||||
// Clamp the size of the bounding box to a minimum scale
|
||||
if (glm::any(glm::lessThan(nodeData->getAvatarBoundingBoxScale(), minBubbleSize))) {
|
||||
nodeBox.setScaleStayCentered(minBubbleSize);
|
||||
}
|
||||
// Set up the bounding box for the current other node
|
||||
AABox otherNodeBox(otherData->getAvatarBoundingBoxCorner(), otherData->getAvatarBoundingBoxScale());
|
||||
// Clamp the size of the bounding box to a minimum scale
|
||||
if (glm::any(glm::lessThan(otherData->getAvatarBoundingBoxScale(), minBubbleSize))) {
|
||||
otherNodeBox.setScaleStayCentered(minBubbleSize);
|
||||
}
|
||||
// Quadruple the scale of both bounding boxes
|
||||
nodeBox.embiggen(4.0f);
|
||||
otherNodeBox.embiggen(4.0f);
|
||||
|
||||
// Perform the collision check between the two bounding boxes
|
||||
if (nodeBox.touches(otherNodeBox)) {
|
||||
insideIgnoreRadius = true;
|
||||
}
|
||||
// set up the bounding box for the listener
|
||||
AABox listenerBox(listenerData->getAvatarBoundingBoxCorner(), listenerData->getAvatarBoundingBoxScale());
|
||||
if (glm::any(glm::lessThan(listenerData->getAvatarBoundingBoxScale(), minBubbleSize))) {
|
||||
listenerBox.setScaleStayCentered(minBubbleSize);
|
||||
}
|
||||
|
||||
// Enumerate the audio streams attached to the otherNode
|
||||
auto streamsCopy = otherData->getAudioStreams();
|
||||
for (auto& streamPair : streamsCopy) {
|
||||
auto otherNodeStream = streamPair.second;
|
||||
bool isSelfWithEcho = ((*otherNode == *node) && (otherNodeStream->shouldLoopbackForNode()));
|
||||
// Add all audio streams that should be added to the mix
|
||||
if (isSelfWithEcho || (!isSelfWithEcho && !insideIgnoreRadius)) {
|
||||
addStreamToMix(*nodeData, otherNode->getUUID(), *nodeAudioStream, *otherNodeStream);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// use the per listener AudioLimiter to render the mixed data...
|
||||
nodeData->audioLimiter.render(_mixSamples, _bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
// check for silent audio after the peak limiter has converted the samples
|
||||
bool hasAudio = false;
|
||||
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; ++i) {
|
||||
if (_bufferSamples[i] != 0) {
|
||||
hasAudio = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return hasAudio;
|
||||
}
|
||||
|
||||
void AudioMixerSlave::addStreamToMix(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
|
||||
const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
|
||||
// to reduce artifacts we calculate the gain and azimuth for every source for this listener
|
||||
// even if we are not going to end up mixing in this source
|
||||
|
||||
++stats.totalMixes;
|
||||
|
||||
// this ensures that the tail of any previously mixed audio or the first block of new audio sounds correct
|
||||
|
||||
// check if this is a server echo of a source back to itself
|
||||
bool isEcho = (&streamToAdd == &listeningNodeStream);
|
||||
|
||||
glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition();
|
||||
|
||||
// figure out the distance between source and listener
|
||||
float distance = glm::max(glm::length(relativePosition), EPSILON);
|
||||
|
||||
// figure out the gain for this source at the listener
|
||||
float gain = gainForSource(listeningNodeStream, streamToAdd, relativePosition, isEcho);
|
||||
|
||||
// figure out the azimuth to this source at the listener
|
||||
float azimuth = isEcho ? 0.0f : azimuthForSource(listeningNodeStream, listeningNodeStream, relativePosition);
|
||||
|
||||
float repeatedFrameFadeFactor = 1.0f;
|
||||
|
||||
static const int HRTF_DATASET_INDEX = 1;
|
||||
|
||||
if (!streamToAdd.lastPopSucceeded()) {
|
||||
bool forceSilentBlock = true;
|
||||
|
||||
if (!streamToAdd.getLastPopOutput().isNull()) {
|
||||
bool isInjector = dynamic_cast<const InjectedAudioStream*>(&streamToAdd);
|
||||
|
||||
// in an injector, just go silent - the injector has likely ended
|
||||
// in other inputs (microphone, &c.), repeat with fade to avoid the harsh jump to silence
|
||||
|
||||
// we'll repeat the last block until it has a block to mix
|
||||
// and we'll gradually fade that repeated block into silence.
|
||||
|
||||
// calculate its fade factor, which depends on how many times it's already been repeated.
|
||||
repeatedFrameFadeFactor = calculateRepeatedFrameFadeFactor(streamToAdd.getConsecutiveNotMixedCount() - 1);
|
||||
if (!isInjector && repeatedFrameFadeFactor > 0.0f) {
|
||||
// apply the repeatedFrameFadeFactor to the gain
|
||||
gain *= repeatedFrameFadeFactor;
|
||||
|
||||
forceSilentBlock = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (forceSilentBlock) {
|
||||
// we're deciding not to repeat either since we've already done it enough times or repetition with fade is disabled
|
||||
// in this case we will call renderSilent with a forced silent block
|
||||
// this ensures the correct tail from the previously mixed block and the correct spatialization of first block
|
||||
// of any upcoming audio
|
||||
|
||||
if (!streamToAdd.isStereo() && !isEcho) {
|
||||
// get the existing listener-source HRTF object, or create a new one
|
||||
auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
|
||||
|
||||
// this is not done for stereo streams since they do not go through the HRTF
|
||||
static int16_t silentMonoBlock[AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL] = {};
|
||||
hrtf.renderSilent(silentMonoBlock, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
++stats.hrtfSilentRenders;
|
||||
// set up the bounding box for the node
|
||||
AABox nodeBox(nodeData->getAvatarBoundingBoxCorner(), nodeData->getAvatarBoundingBoxScale());
|
||||
// Clamp the size of the bounding box to a minimum scale
|
||||
if (glm::any(glm::lessThan(nodeData->getAvatarBoundingBoxScale(), minBubbleSize))) {
|
||||
nodeBox.setScaleStayCentered(minBubbleSize);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
// quadruple the scale of both bounding boxes
|
||||
listenerBox.embiggen(4.0f);
|
||||
nodeBox.embiggen(4.0f);
|
||||
|
||||
// grab the stream from the ring buffer
|
||||
AudioRingBuffer::ConstIterator streamPopOutput = streamToAdd.getLastPopOutput();
|
||||
|
||||
if (streamToAdd.isStereo() || isEcho) {
|
||||
// this is a stereo source or server echo so we do not pass it through the HRTF
|
||||
// simply apply our calculated gain to each sample
|
||||
if (streamToAdd.isStereo()) {
|
||||
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; ++i) {
|
||||
_mixSamples[i] += float(streamPopOutput[i] * gain / AudioConstants::MAX_SAMPLE_VALUE);
|
||||
}
|
||||
|
||||
++stats.manualStereoMixes;
|
||||
// perform the collision check between the two bounding boxes
|
||||
ignore = listenerBox.touches(nodeBox);
|
||||
} else {
|
||||
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; i += 2) {
|
||||
auto monoSample = float(streamPopOutput[i / 2] * gain / AudioConstants::MAX_SAMPLE_VALUE);
|
||||
_mixSamples[i] += monoSample;
|
||||
_mixSamples[i + 1] += monoSample;
|
||||
}
|
||||
|
||||
++stats.manualEchoMixes;
|
||||
ignore = false;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// get the existing listener-source HRTF object, or create a new one
|
||||
auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
|
||||
|
||||
streamPopOutput.readSamples(_bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
// if the frame we're about to mix is silent, simply call render silent and move on
|
||||
if (streamToAdd.getLastPopOutputLoudness() == 0.0f) {
|
||||
// silent frame from source
|
||||
|
||||
// we still need to call renderSilent via the HRTF for mono source
|
||||
hrtf.renderSilent(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
++stats.hrtfSilentRenders;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
float audibilityThreshold = AudioMixer::getMinimumAudibilityThreshold();
|
||||
if (audibilityThreshold > 0.0f &&
|
||||
streamToAdd.getLastPopOutputTrailingLoudness() / glm::length(relativePosition) <= audibilityThreshold) {
|
||||
// the mixer is struggling so we're going to drop off some streams
|
||||
|
||||
// we call renderSilent via the HRTF with the actual frame data and a gain of 0.0
|
||||
hrtf.renderSilent(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, 0.0f,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
++stats.hrtfStruggleRenders;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
++stats.hrtfRenders;
|
||||
|
||||
// mono stream, call the HRTF with our block and calculated azimuth and gain
|
||||
hrtf.render(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
return ignore;
|
||||
}
|
||||
|
||||
float AudioMixerSlave::gainForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
float gainForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition, bool isEcho) {
|
||||
float gain = 1.0f;
|
||||
|
||||
|
@ -472,7 +535,7 @@ float AudioMixerSlave::gainForSource(const AvatarAudioStream& listeningNodeStrea
|
|||
return gain;
|
||||
}
|
||||
|
||||
float AudioMixerSlave::azimuthForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
float azimuthForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition) {
|
||||
glm::quat inverseOrientation = glm::inverse(listeningNodeStream.getOrientation());
|
||||
|
||||
|
@ -482,7 +545,7 @@ float AudioMixerSlave::azimuthForSource(const AvatarAudioStream& listeningNodeSt
|
|||
// project the rotated source position vector onto the XZ plane
|
||||
rotatedSourcePosition.y = 0.0f;
|
||||
|
||||
static const float SOURCE_DISTANCE_THRESHOLD = 1e-30f;
|
||||
const float SOURCE_DISTANCE_THRESHOLD = 1e-30f;
|
||||
|
||||
if (glm::length2(rotatedSourcePosition) > SOURCE_DISTANCE_THRESHOLD) {
|
||||
// produce an oriented angle about the y-axis
|
||||
|
|
|
@ -30,7 +30,7 @@ class AudioMixerSlave {
|
|||
public:
|
||||
using ConstIter = NodeList::const_iterator;
|
||||
|
||||
void configure(ConstIter begin, ConstIter end, unsigned int frame);
|
||||
void configure(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio);
|
||||
|
||||
// mix and broadcast non-ignored streams to the node
|
||||
// returns true if a mixed packet was sent to the node
|
||||
|
@ -40,15 +40,14 @@ public:
|
|||
|
||||
private:
|
||||
// create mix, returns true if mix has audio
|
||||
bool prepareMix(const SharedNodePointer& node);
|
||||
// add a stream to the mix
|
||||
void addStreamToMix(AudioMixerClientData& listenerData, const QUuid& streamerID,
|
||||
bool prepareMix(const SharedNodePointer& listener);
|
||||
void throttleStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
|
||||
const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer);
|
||||
|
||||
float gainForSource(const AvatarAudioStream& listener, const PositionalAudioStream& streamer,
|
||||
const glm::vec3& relativePosition, bool isEcho);
|
||||
float azimuthForSource(const AvatarAudioStream& listener, const PositionalAudioStream& streamer,
|
||||
const glm::vec3& relativePosition);
|
||||
void mixStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
|
||||
const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer);
|
||||
void addStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
|
||||
const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer,
|
||||
bool throttle);
|
||||
|
||||
// mixing buffers
|
||||
float _mixSamples[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
|
||||
|
@ -58,6 +57,7 @@ private:
|
|||
ConstIter _begin;
|
||||
ConstIter _end;
|
||||
unsigned int _frame { 0 };
|
||||
float _throttlingRatio { 0.0f };
|
||||
};
|
||||
|
||||
#endif // hifi_AudioMixerSlave_h
|
||||
|
|
|
@ -41,7 +41,7 @@ void AudioMixerSlaveThread::wait() {
|
|||
});
|
||||
++_pool._numStarted;
|
||||
}
|
||||
configure(_pool._begin, _pool._end, _pool._frame);
|
||||
configure(_pool._begin, _pool._end, _pool._frame, _pool._throttlingRatio);
|
||||
}
|
||||
|
||||
void AudioMixerSlaveThread::notify(bool stopping) {
|
||||
|
@ -64,13 +64,14 @@ bool AudioMixerSlaveThread::try_pop(SharedNodePointer& node) {
|
|||
static AudioMixerSlave slave;
|
||||
#endif
|
||||
|
||||
void AudioMixerSlavePool::mix(ConstIter begin, ConstIter end, unsigned int frame) {
|
||||
void AudioMixerSlavePool::mix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
|
||||
_begin = begin;
|
||||
_end = end;
|
||||
_frame = frame;
|
||||
_throttlingRatio = throttlingRatio;
|
||||
|
||||
#ifdef AUDIO_SINGLE_THREADED
|
||||
slave.configure(_begin, _end, frame);
|
||||
slave.configure(_begin, _end, frame, throttlingRatio);
|
||||
std::for_each(begin, end, [&](const SharedNodePointer& node) {
|
||||
slave.mix(node);
|
||||
});
|
||||
|
@ -131,7 +132,7 @@ void AudioMixerSlavePool::setNumThreads(int numThreads) {
|
|||
}
|
||||
|
||||
void AudioMixerSlavePool::resize(int numThreads) {
|
||||
assert(_numThreads == _slaves.size());
|
||||
assert(_numThreads == (int)_slaves.size());
|
||||
|
||||
#ifdef AUDIO_SINGLE_THREADED
|
||||
qDebug("%s: running single threaded", __FUNCTION__, numThreads);
|
||||
|
@ -182,6 +183,6 @@ void AudioMixerSlavePool::resize(int numThreads) {
|
|||
}
|
||||
|
||||
_numThreads = _numStarted = _numFinished = numThreads;
|
||||
assert(_numThreads == _slaves.size());
|
||||
assert(_numThreads == (int)_slaves.size());
|
||||
#endif
|
||||
}
|
||||
|
|
|
@ -61,7 +61,7 @@ public:
|
|||
~AudioMixerSlavePool() { resize(0); }
|
||||
|
||||
// mix on slave threads
|
||||
void mix(ConstIter begin, ConstIter end, unsigned int frame);
|
||||
void mix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio);
|
||||
|
||||
// iterate over all slaves
|
||||
void each(std::function<void(AudioMixerSlave& slave)> functor);
|
||||
|
@ -90,6 +90,7 @@ private:
|
|||
// frame state
|
||||
Queue _queue;
|
||||
unsigned int _frame { 0 };
|
||||
float _throttlingRatio { 0.0f };
|
||||
ConstIter _begin;
|
||||
ConstIter _end;
|
||||
};
|
||||
|
|
|
@ -17,7 +17,7 @@ void AudioMixerStats::reset() {
|
|||
totalMixes = 0;
|
||||
hrtfRenders = 0;
|
||||
hrtfSilentRenders = 0;
|
||||
hrtfStruggleRenders = 0;
|
||||
hrtfThrottleRenders = 0;
|
||||
manualStereoMixes = 0;
|
||||
manualEchoMixes = 0;
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ void AudioMixerStats::accumulate(const AudioMixerStats& otherStats) {
|
|||
totalMixes += otherStats.totalMixes;
|
||||
hrtfRenders += otherStats.hrtfRenders;
|
||||
hrtfSilentRenders += otherStats.hrtfSilentRenders;
|
||||
hrtfStruggleRenders += otherStats.hrtfStruggleRenders;
|
||||
hrtfThrottleRenders += otherStats.hrtfThrottleRenders;
|
||||
manualStereoMixes += otherStats.manualStereoMixes;
|
||||
manualEchoMixes += otherStats.manualEchoMixes;
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ struct AudioMixerStats {
|
|||
|
||||
int hrtfRenders { 0 };
|
||||
int hrtfSilentRenders { 0 };
|
||||
int hrtfStruggleRenders { 0 };
|
||||
int hrtfThrottleRenders { 0 };
|
||||
|
||||
int manualStereoMixes { 0 };
|
||||
int manualEchoMixes { 0 };
|
||||
|
|
|
@ -681,8 +681,8 @@ void AvatarMixer::run() {
|
|||
|
||||
void AvatarMixer::domainSettingsRequestComplete() {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
|
||||
nodeList->addSetOfNodeTypesToNodeInterestSet({ NodeType::Agent, NodeType::EntityScriptServer });
|
||||
|
||||
// parse the settings to pull out the values we need
|
||||
parseDomainServerSettings(nodeList->getDomainHandler().getSettingsObject());
|
||||
|
||||
|
|
|
@ -11,6 +11,8 @@
|
|||
|
||||
#include "AssignmentParentFinder.h"
|
||||
|
||||
#include <AvatarHashMap.h>
|
||||
|
||||
SpatiallyNestableWeakPointer AssignmentParentFinder::find(QUuid parentID, bool& success, SpatialParentTree* entityTree) const {
|
||||
SpatiallyNestableWeakPointer parent;
|
||||
|
||||
|
@ -25,10 +27,21 @@ SpatiallyNestableWeakPointer AssignmentParentFinder::find(QUuid parentID, bool&
|
|||
} else {
|
||||
parent = _tree->findEntityByEntityItemID(parentID);
|
||||
}
|
||||
if (parent.expired()) {
|
||||
success = false;
|
||||
} else {
|
||||
if (!parent.expired()) {
|
||||
success = true;
|
||||
return parent;
|
||||
}
|
||||
|
||||
// search avatars
|
||||
if (DependencyManager::isSet<AvatarHashMap>()) {
|
||||
auto avatarHashMap = DependencyManager::get<AvatarHashMap>();
|
||||
parent = avatarHashMap->getAvatarBySessionID(parentID);
|
||||
if (!parent.expired()) {
|
||||
success = true;
|
||||
return parent;
|
||||
}
|
||||
}
|
||||
|
||||
success = false;
|
||||
return parent;
|
||||
}
|
||||
|
|
|
@ -44,8 +44,7 @@ void MessagesMixer::handleMessages(QSharedPointer<ReceivedMessage> receivedMessa
|
|||
|
||||
nodeList->eachMatchingNode(
|
||||
[&](const SharedNodePointer& node)->bool {
|
||||
return node->getType() == NodeType::Agent && node->getActiveSocket() &&
|
||||
_channelSubscribers[channel].contains(node->getUUID());
|
||||
return node->getActiveSocket() && _channelSubscribers[channel].contains(node->getUUID());
|
||||
},
|
||||
[&](const SharedNodePointer& node) {
|
||||
auto packetList = MessagesClient::encodeMessagesPacket(channel, message, senderID);
|
||||
|
@ -83,5 +82,6 @@ void MessagesMixer::sendStatsPacket() {
|
|||
|
||||
void MessagesMixer::run() {
|
||||
ThreadedAssignment::commonInit(MESSAGES_MIXER_LOGGING_NAME, NodeType::MessagesMixer);
|
||||
DependencyManager::get<NodeList>()->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
}
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->addSetOfNodeTypesToNodeInterestSet({ NodeType::Agent, NodeType::EntityScriptServer });
|
||||
}
|
||||
|
|
|
@ -316,8 +316,9 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
|
|||
int truePacketsSent = 0;
|
||||
int trueBytesSent = 0;
|
||||
int packetsSentThisInterval = 0;
|
||||
bool isFullScene = ((!viewFrustumChanged) && nodeData->getViewFrustumJustStoppedChanging())
|
||||
|| nodeData->hasLodChanged();
|
||||
bool isFullScene = nodeData->haveJSONParametersChanged() ||
|
||||
(nodeData->getUsesFrustum()
|
||||
&& ((!viewFrustumChanged && nodeData->getViewFrustumJustStoppedChanging()) || nodeData->hasLodChanged()));
|
||||
|
||||
bool somethingToSend = true; // assume we have something
|
||||
|
||||
|
@ -432,7 +433,9 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
|
|||
boundaryLevelAdjust, octreeSizeScale,
|
||||
nodeData->getLastTimeBagEmpty(),
|
||||
isFullScene, &nodeData->stats, _myServer->getJurisdiction(),
|
||||
&nodeData->extraEncodeData);
|
||||
&nodeData->extraEncodeData,
|
||||
nodeData->getUsesFrustum(),
|
||||
nodeData);
|
||||
nodeData->copyCurrentViewFrustum(params.viewFrustum);
|
||||
if (viewFrustumChanged) {
|
||||
nodeData->copyLastKnownViewFrustum(params.lastViewFrustum);
|
||||
|
|
|
@ -1136,8 +1136,8 @@ void OctreeServer::domainSettingsRequestComplete() {
|
|||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
// we need to ask the DS about agents so we can ping/reply with them
|
||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
|
||||
nodeList->addSetOfNodeTypesToNodeInterestSet({ NodeType::Agent, NodeType::EntityScriptServer });
|
||||
|
||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||
packetReceiver.registerListener(getMyQueryMessageType(), this, "handleOctreeQueryPacket");
|
||||
packetReceiver.registerListener(PacketType::OctreeDataNack, this, "handleOctreeDataNackPacket");
|
||||
|
|
372
assignment-client/src/scripts/EntityScriptServer.cpp
Normal file
372
assignment-client/src/scripts/EntityScriptServer.cpp
Normal file
|
@ -0,0 +1,372 @@
|
|||
//
|
||||
// EntityScriptServer.cpp
|
||||
// assignment-client/src/scripts
|
||||
//
|
||||
// Created by Clément Brisset on 1/5/17.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "EntityScriptServer.h"
|
||||
|
||||
#include <AudioConstants.h>
|
||||
#include <AudioInjectorManager.h>
|
||||
#include <EntityScriptingInterface.h>
|
||||
#include <MessagesClient.h>
|
||||
#include <plugins/CodecPlugin.h>
|
||||
#include <plugins/PluginManager.h>
|
||||
#include <ResourceManager.h>
|
||||
#include <ScriptCache.h>
|
||||
#include <ScriptEngines.h>
|
||||
#include <SoundCache.h>
|
||||
#include <UUID.h>
|
||||
#include <WebSocketServerClass.h>
|
||||
|
||||
#include "ClientServerUtils.h"
|
||||
#include "../entities/AssignmentParentFinder.h"
|
||||
|
||||
int EntityScriptServer::_entitiesScriptEngineCount = 0;
|
||||
|
||||
EntityScriptServer::EntityScriptServer(ReceivedMessage& message) : ThreadedAssignment(message) {
|
||||
DependencyManager::get<EntityScriptingInterface>()->setPacketSender(&_entityEditSender);
|
||||
|
||||
ResourceManager::init();
|
||||
|
||||
DependencyManager::registerInheritance<SpatialParentFinder, AssignmentParentFinder>();
|
||||
|
||||
DependencyManager::set<ResourceCacheSharedItems>();
|
||||
DependencyManager::set<SoundCache>();
|
||||
DependencyManager::set<AudioInjectorManager>();
|
||||
|
||||
DependencyManager::set<ScriptCache>();
|
||||
DependencyManager::set<ScriptEngines>(ScriptEngine::ENTITY_SERVER_SCRIPT);
|
||||
|
||||
|
||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||
packetReceiver.registerListenerForTypes({ PacketType::OctreeStats, PacketType::EntityData, PacketType::EntityErase },
|
||||
this, "handleOctreePacket");
|
||||
packetReceiver.registerListener(PacketType::Jurisdiction, this, "handleJurisdictionPacket");
|
||||
packetReceiver.registerListener(PacketType::SelectedAudioFormat, this, "handleSelectedAudioFormat");
|
||||
|
||||
auto avatarHashMap = DependencyManager::set<AvatarHashMap>();
|
||||
packetReceiver.registerListener(PacketType::BulkAvatarData, avatarHashMap.data(), "processAvatarDataPacket");
|
||||
packetReceiver.registerListener(PacketType::KillAvatar, avatarHashMap.data(), "processKillAvatar");
|
||||
packetReceiver.registerListener(PacketType::AvatarIdentity, avatarHashMap.data(), "processAvatarIdentityPacket");
|
||||
|
||||
packetReceiver.registerListener(PacketType::ReloadEntityServerScript, this, "handleReloadEntityServerScriptPacket");
|
||||
packetReceiver.registerListener(PacketType::EntityScriptGetStatus, this, "handleEntityScriptGetStatusPacket");
|
||||
}
|
||||
|
||||
static const QString ENTITY_SCRIPT_SERVER_LOGGING_NAME = "entity-script-server";
|
||||
|
||||
void EntityScriptServer::handleReloadEntityServerScriptPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
|
||||
// These are temporary checks until we can ensure that nodes eventually disconnect if the Domain Server stops telling them
|
||||
// about each other.
|
||||
if (senderNode->getCanRez() || senderNode->getCanRezTmp()) {
|
||||
auto entityID = QUuid::fromRfc4122(message->read(NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
if (_entityViewer.getTree() && !_shuttingDown) {
|
||||
qDebug() << "Reloading: " << entityID;
|
||||
_entitiesScriptEngine->unloadEntityScript(entityID);
|
||||
checkAndCallPreload(entityID, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void EntityScriptServer::handleEntityScriptGetStatusPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
|
||||
// These are temporary checks until we can ensure that nodes eventually disconnect if the Domain Server stops telling them
|
||||
// about each other.
|
||||
if (senderNode->getCanRez() || senderNode->getCanRezTmp()) {
|
||||
MessageID messageID;
|
||||
message->readPrimitive(&messageID);
|
||||
auto entityID = QUuid::fromRfc4122(message->read(NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
auto replyPacketList = NLPacketList::create(PacketType::EntityScriptGetStatusReply, QByteArray(), true, true);
|
||||
replyPacketList->writePrimitive(messageID);
|
||||
|
||||
EntityScriptDetails details;
|
||||
if (_entitiesScriptEngine->getEntityScriptDetails(entityID, details)) {
|
||||
replyPacketList->writePrimitive(true);
|
||||
replyPacketList->writePrimitive(details.status);
|
||||
replyPacketList->writeString(details.errorInfo);
|
||||
} else {
|
||||
replyPacketList->writePrimitive(false);
|
||||
}
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->sendPacketList(std::move(replyPacketList), *senderNode);
|
||||
}
|
||||
}
|
||||
|
||||
void EntityScriptServer::run() {
|
||||
// make sure we request our script once the agent connects to the domain
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
ThreadedAssignment::commonInit(ENTITY_SCRIPT_SERVER_LOGGING_NAME, NodeType::EntityScriptServer);
|
||||
|
||||
// Setup MessagesClient
|
||||
auto messagesClient = DependencyManager::set<MessagesClient>();
|
||||
QThread* messagesThread = new QThread;
|
||||
messagesThread->setObjectName("Messages Client Thread");
|
||||
messagesClient->moveToThread(messagesThread);
|
||||
connect(messagesThread, &QThread::started, messagesClient.data(), &MessagesClient::init);
|
||||
messagesThread->start();
|
||||
|
||||
// make sure we hear about connected nodes so we can grab an ATP script if a request is pending
|
||||
connect(nodeList.data(), &LimitedNodeList::nodeActivated, this, &EntityScriptServer::nodeActivated);
|
||||
connect(nodeList.data(), &LimitedNodeList::nodeKilled, this, &EntityScriptServer::nodeKilled);
|
||||
|
||||
nodeList->addSetOfNodeTypesToNodeInterestSet({
|
||||
NodeType::Agent, NodeType::AudioMixer, NodeType::AvatarMixer,
|
||||
NodeType::EntityServer, NodeType::MessagesMixer, NodeType::AssetServer
|
||||
});
|
||||
|
||||
// Setup Script Engine
|
||||
resetEntitiesScriptEngine();
|
||||
|
||||
// we need to make sure that init has been called for our EntityScriptingInterface
|
||||
// so that it actually has a jurisdiction listener when we ask it for it next
|
||||
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
|
||||
entityScriptingInterface->init();
|
||||
_entityViewer.setJurisdictionListener(entityScriptingInterface->getJurisdictionListener());
|
||||
|
||||
_entityViewer.init();
|
||||
|
||||
// setup the JSON filter that asks for entities with a non-default serverScripts property
|
||||
QJsonObject queryJSONParameters;
|
||||
static const QString SERVER_SCRIPTS_PROPERTY = "serverScripts";
|
||||
queryJSONParameters[SERVER_SCRIPTS_PROPERTY] = EntityQueryFilterSymbol::NonDefault;
|
||||
|
||||
// setup the JSON parameters so that OctreeQuery does not use a frustum and uses our JSON filter
|
||||
_entityViewer.getOctreeQuery().setUsesFrustum(false);
|
||||
_entityViewer.getOctreeQuery().setJSONParameters(queryJSONParameters);
|
||||
|
||||
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
|
||||
|
||||
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
|
||||
|
||||
|
||||
auto tree = _entityViewer.getTree().get();
|
||||
connect(tree, &EntityTree::deletingEntity, this, &EntityScriptServer::deletingEntity, Qt::QueuedConnection);
|
||||
connect(tree, &EntityTree::addingEntity, this, &EntityScriptServer::addingEntity, Qt::QueuedConnection);
|
||||
connect(tree, &EntityTree::entityServerScriptChanging, this, &EntityScriptServer::entityServerScriptChanging, Qt::QueuedConnection);
|
||||
}
|
||||
|
||||
void EntityScriptServer::nodeActivated(SharedNodePointer activatedNode) {
|
||||
if (activatedNode->getType() == NodeType::AudioMixer) {
|
||||
negotiateAudioFormat();
|
||||
}
|
||||
}
|
||||
|
||||
void EntityScriptServer::negotiateAudioFormat() {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
auto negotiateFormatPacket = NLPacket::create(PacketType::NegotiateAudioFormat);
|
||||
auto codecPlugins = PluginManager::getInstance()->getCodecPlugins();
|
||||
quint8 numberOfCodecs = (quint8)codecPlugins.size();
|
||||
negotiateFormatPacket->writePrimitive(numberOfCodecs);
|
||||
for (auto& plugin : codecPlugins) {
|
||||
auto codecName = plugin->getName();
|
||||
negotiateFormatPacket->writeString(codecName);
|
||||
}
|
||||
|
||||
// grab our audio mixer from the NodeList, if it exists
|
||||
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
|
||||
|
||||
if (audioMixer) {
|
||||
// send off this mute packet
|
||||
nodeList->sendPacket(std::move(negotiateFormatPacket), *audioMixer);
|
||||
}
|
||||
}
|
||||
|
||||
void EntityScriptServer::handleSelectedAudioFormat(QSharedPointer<ReceivedMessage> message) {
|
||||
QString selectedCodecName = message->readString();
|
||||
selectAudioFormat(selectedCodecName);
|
||||
}
|
||||
|
||||
void EntityScriptServer::selectAudioFormat(const QString& selectedCodecName) {
|
||||
_selectedCodecName = selectedCodecName;
|
||||
|
||||
qDebug() << "Selected Codec:" << _selectedCodecName;
|
||||
|
||||
// release any old codec encoder/decoder first...
|
||||
if (_codec && _encoder) {
|
||||
_codec->releaseEncoder(_encoder);
|
||||
_encoder = nullptr;
|
||||
_codec = nullptr;
|
||||
}
|
||||
|
||||
auto codecPlugins = PluginManager::getInstance()->getCodecPlugins();
|
||||
for (auto& plugin : codecPlugins) {
|
||||
if (_selectedCodecName == plugin->getName()) {
|
||||
_codec = plugin;
|
||||
_encoder = plugin->createEncoder(AudioConstants::SAMPLE_RATE, AudioConstants::MONO);
|
||||
qDebug() << "Selected Codec Plugin:" << _codec.get();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void EntityScriptServer::resetEntitiesScriptEngine() {
|
||||
auto engineName = QString("Entities %1").arg(++_entitiesScriptEngineCount);
|
||||
auto newEngine = QSharedPointer<ScriptEngine>(new ScriptEngine(ScriptEngine::ENTITY_SERVER_SCRIPT, NO_SCRIPT, engineName));
|
||||
|
||||
auto webSocketServerConstructorValue = newEngine->newFunction(WebSocketServerClass::constructor);
|
||||
newEngine->globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
|
||||
|
||||
newEngine->registerGlobalObject("SoundCache", DependencyManager::get<SoundCache>().data());
|
||||
|
||||
// connect this script engines printedMessage signal to the global ScriptEngines these various messages
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>().data();
|
||||
connect(newEngine.data(), &ScriptEngine::printedMessage, scriptEngines, &ScriptEngines::onPrintedMessage);
|
||||
connect(newEngine.data(), &ScriptEngine::errorMessage, scriptEngines, &ScriptEngines::onErrorMessage);
|
||||
connect(newEngine.data(), &ScriptEngine::warningMessage, scriptEngines, &ScriptEngines::onWarningMessage);
|
||||
connect(newEngine.data(), &ScriptEngine::infoMessage, scriptEngines, &ScriptEngines::onInfoMessage);
|
||||
|
||||
connect(newEngine.data(), &ScriptEngine::update, this, [this] {
|
||||
_entityViewer.queryOctree();
|
||||
});
|
||||
|
||||
|
||||
newEngine->runInThread();
|
||||
DependencyManager::get<EntityScriptingInterface>()->setEntitiesScriptEngine(newEngine.data());
|
||||
|
||||
_entitiesScriptEngine.swap(newEngine);
|
||||
}
|
||||
|
||||
|
||||
void EntityScriptServer::clear() {
|
||||
// unload and stop the engine
|
||||
if (_entitiesScriptEngine) {
|
||||
// do this here (instead of in deleter) to avoid marshalling unload signals back to this thread
|
||||
_entitiesScriptEngine->unloadAllEntityScripts();
|
||||
_entitiesScriptEngine->stop();
|
||||
}
|
||||
|
||||
// reset the engine
|
||||
if (!_shuttingDown) {
|
||||
resetEntitiesScriptEngine();
|
||||
}
|
||||
|
||||
_entityViewer.clear();
|
||||
}
|
||||
|
||||
void EntityScriptServer::shutdownScriptEngine() {
|
||||
if (_entitiesScriptEngine) {
|
||||
_entitiesScriptEngine->disconnectNonEssentialSignals(); // disconnect all slots/signals from the script engine, except essential
|
||||
}
|
||||
_shuttingDown = true;
|
||||
|
||||
clear(); // always clear() on shutdown
|
||||
}
|
||||
|
||||
void EntityScriptServer::addingEntity(const EntityItemID& entityID) {
|
||||
checkAndCallPreload(entityID);
|
||||
}
|
||||
|
||||
void EntityScriptServer::deletingEntity(const EntityItemID& entityID) {
|
||||
if (_entityViewer.getTree() && !_shuttingDown && _entitiesScriptEngine) {
|
||||
_entitiesScriptEngine->unloadEntityScript(entityID);
|
||||
}
|
||||
}
|
||||
|
||||
void EntityScriptServer::entityServerScriptChanging(const EntityItemID& entityID, const bool reload) {
|
||||
if (_entityViewer.getTree() && !_shuttingDown) {
|
||||
_entitiesScriptEngine->unloadEntityScript(entityID);
|
||||
checkAndCallPreload(entityID, reload);
|
||||
}
|
||||
}
|
||||
|
||||
void EntityScriptServer::checkAndCallPreload(const EntityItemID& entityID, const bool reload) {
|
||||
if (_entityViewer.getTree() && !_shuttingDown && _entitiesScriptEngine) {
|
||||
|
||||
EntityItemPointer entity = _entityViewer.getTree()->findEntityByEntityItemID(entityID);
|
||||
EntityScriptDetails details;
|
||||
bool notRunning = !_entitiesScriptEngine->getEntityScriptDetails(entityID, details);
|
||||
if (entity && (reload || notRunning || details.scriptText != entity->getServerScripts())) {
|
||||
QString scriptUrl = entity->getServerScripts();
|
||||
if (!scriptUrl.isEmpty()) {
|
||||
scriptUrl = ResourceManager::normalizeURL(scriptUrl);
|
||||
qDebug() << "Loading entity server script" << scriptUrl << "for" << entityID;
|
||||
ScriptEngine::loadEntityScript(_entitiesScriptEngine, entityID, scriptUrl, reload);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void EntityScriptServer::nodeKilled(SharedNodePointer killedNode) {
|
||||
if (!_shuttingDown && killedNode->getType() == NodeType::EntityServer) {
|
||||
if (_entitiesScriptEngine) {
|
||||
_entitiesScriptEngine->unloadAllEntityScripts();
|
||||
_entitiesScriptEngine->stop();
|
||||
}
|
||||
|
||||
resetEntitiesScriptEngine();
|
||||
|
||||
_entityViewer.clear();
|
||||
}
|
||||
}
|
||||
|
||||
void EntityScriptServer::sendStatsPacket() {
|
||||
|
||||
}
|
||||
|
||||
void EntityScriptServer::handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
|
||||
auto packetType = message->getType();
|
||||
|
||||
if (packetType == PacketType::OctreeStats) {
|
||||
|
||||
int statsMessageLength = OctreeHeadlessViewer::parseOctreeStats(message, senderNode);
|
||||
if (message->getSize() > statsMessageLength) {
|
||||
// pull out the piggybacked packet and create a new QSharedPointer<NLPacket> for it
|
||||
int piggyBackedSizeWithHeader = message->getSize() - statsMessageLength;
|
||||
|
||||
auto buffer = std::unique_ptr<char[]>(new char[piggyBackedSizeWithHeader]);
|
||||
memcpy(buffer.get(), message->getRawMessage() + statsMessageLength, piggyBackedSizeWithHeader);
|
||||
|
||||
auto newPacket = NLPacket::fromReceivedPacket(std::move(buffer), piggyBackedSizeWithHeader, message->getSenderSockAddr());
|
||||
message = QSharedPointer<ReceivedMessage>::create(*newPacket);
|
||||
} else {
|
||||
return; // bail since no piggyback data
|
||||
}
|
||||
|
||||
packetType = message->getType();
|
||||
} // fall through to piggyback message
|
||||
|
||||
if (packetType == PacketType::EntityData) {
|
||||
_entityViewer.processDatagram(*message, senderNode);
|
||||
} else if (packetType == PacketType::EntityErase) {
|
||||
_entityViewer.processEraseMessage(*message, senderNode);
|
||||
}
|
||||
}
|
||||
|
||||
void EntityScriptServer::handleJurisdictionPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
|
||||
NodeType_t nodeType;
|
||||
message->peekPrimitive(&nodeType);
|
||||
|
||||
// PacketType_JURISDICTION, first byte is the node type...
|
||||
if (nodeType == NodeType::EntityServer) {
|
||||
DependencyManager::get<EntityScriptingInterface>()->getJurisdictionListener()->
|
||||
queueReceivedPacket(message, senderNode);
|
||||
}
|
||||
}
|
||||
|
||||
void EntityScriptServer::aboutToFinish() {
|
||||
shutdownScriptEngine();
|
||||
|
||||
// our entity tree is going to go away so tell that to the EntityScriptingInterface
|
||||
DependencyManager::get<EntityScriptingInterface>()->setEntityTree(nullptr);
|
||||
|
||||
ResourceManager::cleanup();
|
||||
|
||||
// cleanup the AudioInjectorManager (and any still running injectors)
|
||||
DependencyManager::destroy<AudioInjectorManager>();
|
||||
DependencyManager::destroy<ScriptEngines>();
|
||||
|
||||
// cleanup codec & encoder
|
||||
if (_codec && _encoder) {
|
||||
_codec->releaseEncoder(_encoder);
|
||||
_encoder = nullptr;
|
||||
}
|
||||
}
|
70
assignment-client/src/scripts/EntityScriptServer.h
Normal file
70
assignment-client/src/scripts/EntityScriptServer.h
Normal file
|
@ -0,0 +1,70 @@
|
|||
//
|
||||
// EntityScriptServer.h
|
||||
// assignment-client/src/scripts
|
||||
//
|
||||
// Created by Clément Brisset on 1/5/17.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_EntityScriptServer_h
|
||||
#define hifi_EntityScriptServer_h
|
||||
|
||||
#include <QtCore/QObject>
|
||||
|
||||
#include <EntityEditPacketSender.h>
|
||||
#include <EntityTreeHeadlessViewer.h>
|
||||
#include <plugins/CodecPlugin.h>
|
||||
#include <ScriptEngine.h>
|
||||
#include <ThreadedAssignment.h>
|
||||
|
||||
class EntityScriptServer : public ThreadedAssignment {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
EntityScriptServer(ReceivedMessage& message);
|
||||
|
||||
virtual void aboutToFinish() override;
|
||||
|
||||
public slots:
|
||||
void run() override;
|
||||
void nodeActivated(SharedNodePointer activatedNode);
|
||||
void nodeKilled(SharedNodePointer killedNode);
|
||||
void sendStatsPacket() override;
|
||||
|
||||
private slots:
|
||||
void handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
||||
void handleJurisdictionPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
||||
void handleSelectedAudioFormat(QSharedPointer<ReceivedMessage> message);
|
||||
|
||||
void handleReloadEntityServerScriptPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
||||
void handleEntityScriptGetStatusPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
||||
|
||||
private:
|
||||
void negotiateAudioFormat();
|
||||
void selectAudioFormat(const QString& selectedCodecName);
|
||||
|
||||
void resetEntitiesScriptEngine();
|
||||
void clear();
|
||||
void shutdownScriptEngine();
|
||||
|
||||
void addingEntity(const EntityItemID& entityID);
|
||||
void deletingEntity(const EntityItemID& entityID);
|
||||
void entityServerScriptChanging(const EntityItemID& entityID, const bool reload);
|
||||
void checkAndCallPreload(const EntityItemID& entityID, const bool reload = false);
|
||||
|
||||
bool _shuttingDown { false };
|
||||
|
||||
static int _entitiesScriptEngineCount;
|
||||
QSharedPointer<ScriptEngine> _entitiesScriptEngine;
|
||||
EntityEditPacketSender _entityEditSender;
|
||||
EntityTreeHeadlessViewer _entityViewer;
|
||||
|
||||
QString _selectedCodecName;
|
||||
CodecPluginPointer _codec;
|
||||
Encoder* _encoder { nullptr };
|
||||
};
|
||||
|
||||
#endif // hifi_EntityScriptServer_h
|
4
cmake/externals/quazip/CMakeLists.txt
vendored
4
cmake/externals/quazip/CMakeLists.txt
vendored
|
@ -38,10 +38,10 @@ set(${EXTERNAL_NAME_UPPER}_DLL_PATH ${INSTALL_DIR}/lib CACHE FILEPATH "Location
|
|||
|
||||
if (APPLE)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/libquazip5.1.0.0.dylib CACHE FILEPATH "Location of QuaZip release library")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/libquazip5.1.0.0.dylib CACHE FILEPATH "Location of QuaZip release library")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/libquazip5d.1.0.0.dylib CACHE FILEPATH "Location of QuaZip release library")
|
||||
elseif (WIN32)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/quazip5.lib CACHE FILEPATH "Location of QuaZip release library")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/quazip5.lib CACHE FILEPATH "Location of QuaZip release library")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/quazip5d.lib CACHE FILEPATH "Location of QuaZip release library")
|
||||
else ()
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/libquazip5.so CACHE FILEPATH "Location of QuaZip release library")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/libquazip5.so CACHE FILEPATH "Location of QuaZip release library")
|
||||
|
|
4
cmake/externals/wasapi/CMakeLists.txt
vendored
4
cmake/externals/wasapi/CMakeLists.txt
vendored
|
@ -6,8 +6,8 @@ if (WIN32)
|
|||
include(ExternalProject)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://hifi-public.s3.amazonaws.com/dependencies/qtaudio_wasapi5.zip
|
||||
URL_MD5 0530753e855ffc00232cc969bf1c84a8
|
||||
URL http://hifi-public.s3.amazonaws.com/dependencies/qtaudio_wasapi6.zip
|
||||
URL_MD5 fcac808c1ba0b0f5b44ea06e2612ebab
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
|
|
|
@ -1089,9 +1089,9 @@
|
|||
{
|
||||
"name": "noise_muting_threshold",
|
||||
"label": "Noise Muting Threshold",
|
||||
"help": "Loudness value for noise background between 0 and 1.0 (0: mute everyone, 1.0: never mute)",
|
||||
"placeholder": "0.003",
|
||||
"default": "0.003",
|
||||
"help": "Loudness value for noise background between 0 and 1.0 (0: mute everyone, 1.0: never mute). 0.003 is a typical setting to mute loud people.",
|
||||
"placeholder": "1.0",
|
||||
"default": "1.0",
|
||||
"advanced": false
|
||||
},
|
||||
{
|
||||
|
@ -1285,7 +1285,7 @@
|
|||
{
|
||||
"name": "entityScriptSourceWhitelist",
|
||||
"label": "Entity Scripts Allowed from:",
|
||||
"help": "The domains that entity scripts are allowed from. A comma separated list of domains that entity scripts are allowed from, if someone attempts to create and entity or edit an entity to have a different domain, it will be rejected. If left blank, any domain is allowed.",
|
||||
"help": "Comma separated list of URLs (with optional paths) that entity scripts are allowed from. If someone attempts to create and entity or edit an entity to have a different domain, it will be rejected. If left blank, any domain is allowed.",
|
||||
"placeholder": "",
|
||||
"default": "",
|
||||
"advanced": true
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
<div id="setup-sidebar" class="hidden-xs" data-spy="affix" data-offset-top="55" data-clampedwidth="#setup-sidebar-col">
|
||||
<script id="list-group-template" type="text/template">
|
||||
<% _.each(descriptions, function(group){ %>
|
||||
<% panelID = group.name ? group.name : group.label %>
|
||||
<% panelID = group.name ? group.name : group.html_id %>
|
||||
<li>
|
||||
<a href="#<%- panelID %>" class="list-group-item">
|
||||
<span class="badge"></span>
|
||||
|
|
|
@ -46,10 +46,9 @@ QUuid DomainGatekeeper::assignmentUUIDForPendingAssignment(const QUuid& tempUUID
|
|||
}
|
||||
}
|
||||
|
||||
const NodeSet STATICALLY_ASSIGNED_NODES = NodeSet() << NodeType::AudioMixer
|
||||
<< NodeType::AvatarMixer << NodeType::EntityServer
|
||||
<< NodeType::AssetServer
|
||||
<< NodeType::MessagesMixer;
|
||||
const NodeSet STATICALLY_ASSIGNED_NODES = NodeSet() << NodeType::AudioMixer << NodeType::AvatarMixer
|
||||
<< NodeType::EntityServer << NodeType::AssetServer << NodeType::MessagesMixer
|
||||
<< NodeType::EntityScriptServer;
|
||||
|
||||
void DomainGatekeeper::processConnectRequestPacket(QSharedPointer<ReceivedMessage> message) {
|
||||
if (message->getSize() == 0) {
|
||||
|
@ -72,7 +71,7 @@ void DomainGatekeeper::processConnectRequestPacket(QSharedPointer<ReceivedMessag
|
|||
}
|
||||
|
||||
static const NodeSet VALID_NODE_TYPES {
|
||||
NodeType::AudioMixer, NodeType::AvatarMixer, NodeType::AssetServer, NodeType::EntityServer, NodeType::Agent, NodeType::MessagesMixer
|
||||
NodeType::AudioMixer, NodeType::AvatarMixer, NodeType::AssetServer, NodeType::EntityServer, NodeType::Agent, NodeType::MessagesMixer, NodeType::EntityScriptServer
|
||||
};
|
||||
|
||||
if (!VALID_NODE_TYPES.contains(nodeConnection.nodeType)) {
|
||||
|
@ -107,7 +106,7 @@ void DomainGatekeeper::processConnectRequestPacket(QSharedPointer<ReceivedMessag
|
|||
|
||||
if (node) {
|
||||
// set the sending sock addr and node interest set on this node
|
||||
DomainServerNodeData* nodeData = reinterpret_cast<DomainServerNodeData*>(node->getLinkedData());
|
||||
DomainServerNodeData* nodeData = static_cast<DomainServerNodeData*>(node->getLinkedData());
|
||||
nodeData->setSendingSockAddr(message->getSenderSockAddr());
|
||||
|
||||
// guard against patched agents asking to hear about other agents
|
||||
|
@ -128,12 +127,12 @@ void DomainGatekeeper::processConnectRequestPacket(QSharedPointer<ReceivedMessag
|
|||
emit connectedNode(node);
|
||||
} else {
|
||||
qDebug() << "Refusing connection from node at" << message->getSenderSockAddr()
|
||||
<< "with hardware address" << nodeConnection.hardwareAddress
|
||||
<< "with hardware address" << nodeConnection.hardwareAddress
|
||||
<< "and machine fingerprint" << nodeConnection.machineFingerprint;
|
||||
}
|
||||
}
|
||||
|
||||
NodePermissions DomainGatekeeper::setPermissionsForUser(bool isLocalUser, QString verifiedUsername, const QHostAddress& senderAddress,
|
||||
NodePermissions DomainGatekeeper::setPermissionsForUser(bool isLocalUser, QString verifiedUsername, const QHostAddress& senderAddress,
|
||||
const QString& hardwareAddress, const QUuid& machineFingerprint) {
|
||||
NodePermissions userPerms;
|
||||
|
||||
|
@ -283,7 +282,7 @@ void DomainGatekeeper::updateNodePermissions() {
|
|||
QString hardwareAddress;
|
||||
QUuid machineFingerprint;
|
||||
|
||||
DomainServerNodeData* nodeData = reinterpret_cast<DomainServerNodeData*>(node->getLinkedData());
|
||||
DomainServerNodeData* nodeData = static_cast<DomainServerNodeData*>(node->getLinkedData());
|
||||
if (nodeData) {
|
||||
hardwareAddress = nodeData->getHardwareAddress();
|
||||
machineFingerprint = nodeData->getMachineFingerprint();
|
||||
|
@ -336,7 +335,7 @@ SharedNodePointer DomainGatekeeper::processAssignmentConnectRequest(const NodeCo
|
|||
// add the new node
|
||||
SharedNodePointer newNode = addVerifiedNodeFromConnectRequest(nodeConnection);
|
||||
|
||||
DomainServerNodeData* nodeData = reinterpret_cast<DomainServerNodeData*>(newNode->getLinkedData());
|
||||
DomainServerNodeData* nodeData = static_cast<DomainServerNodeData*>(newNode->getLinkedData());
|
||||
|
||||
// set assignment related data on the linked data for this node
|
||||
nodeData->setAssignmentUUID(matchingQueuedAssignment->getUUID());
|
||||
|
@ -458,7 +457,7 @@ SharedNodePointer DomainGatekeeper::processAgentConnectRequest(const NodeConnect
|
|||
newNode->setPermissions(userPerms);
|
||||
|
||||
// grab the linked data for our new node so we can set the username
|
||||
DomainServerNodeData* nodeData = reinterpret_cast<DomainServerNodeData*>(newNode->getLinkedData());
|
||||
DomainServerNodeData* nodeData = static_cast<DomainServerNodeData*>(newNode->getLinkedData());
|
||||
|
||||
// if we have a username from the connect request, set it on the DomainServerNodeData
|
||||
nodeData->setUsername(username);
|
||||
|
|
|
@ -107,7 +107,7 @@ DomainServer::DomainServer(int argc, char* argv[]) :
|
|||
|
||||
qRegisterMetaType<DomainServerWebSessionData>("DomainServerWebSessionData");
|
||||
qRegisterMetaTypeStreamOperators<DomainServerWebSessionData>("DomainServerWebSessionData");
|
||||
|
||||
|
||||
// make sure we hear about newly connected nodes from our gatekeeper
|
||||
connect(&_gatekeeper, &DomainGatekeeper::connectedNode, this, &DomainServer::handleConnectedNode);
|
||||
|
||||
|
@ -281,7 +281,7 @@ bool DomainServer::optionallyReadX509KeyAndCertificate() {
|
|||
QString keyPassphraseString = QProcessEnvironment::systemEnvironment().value(X509_KEY_PASSPHRASE_ENV);
|
||||
|
||||
qDebug() << "Reading certificate file at" << certPath << "for HTTPS.";
|
||||
qDebug() << "Reading key file at" << keyPath << "for HTTPS.";
|
||||
qDebug() << "Reading key file at" << keyPath << "for HTTPS.";
|
||||
|
||||
QFile certFile(certPath);
|
||||
certFile.open(QIODevice::ReadOnly);
|
||||
|
@ -528,12 +528,12 @@ void DomainServer::setupNodeListAndAssignments() {
|
|||
packetReceiver.registerListener(PacketType::DomainServerPathQuery, this, "processPathQueryPacket");
|
||||
packetReceiver.registerListener(PacketType::NodeJsonStats, this, "processNodeJSONStatsPacket");
|
||||
packetReceiver.registerListener(PacketType::DomainDisconnectRequest, this, "processNodeDisconnectRequestPacket");
|
||||
|
||||
|
||||
// NodeList won't be available to the settings manager when it is created, so call registerListener here
|
||||
packetReceiver.registerListener(PacketType::DomainSettingsRequest, &_settingsManager, "processSettingsRequestPacket");
|
||||
packetReceiver.registerListener(PacketType::NodeKickRequest, &_settingsManager, "processNodeKickRequestPacket");
|
||||
packetReceiver.registerListener(PacketType::UsernameFromIDRequest, &_settingsManager, "processUsernameFromIDRequestPacket");
|
||||
|
||||
|
||||
// register the gatekeeper for the packets it needs to receive
|
||||
packetReceiver.registerListener(PacketType::DomainConnectRequest, &_gatekeeper, "processConnectRequestPacket");
|
||||
packetReceiver.registerListener(PacketType::ICEPing, &_gatekeeper, "processICEPingPacket");
|
||||
|
@ -542,7 +542,7 @@ void DomainServer::setupNodeListAndAssignments() {
|
|||
|
||||
packetReceiver.registerListener(PacketType::ICEServerHeartbeatDenied, this, "processICEServerHeartbeatDenialPacket");
|
||||
packetReceiver.registerListener(PacketType::ICEServerHeartbeatACK, this, "processICEServerHeartbeatACK");
|
||||
|
||||
|
||||
// add whatever static assignments that have been parsed to the queue
|
||||
addStaticAssignmentsToQueue();
|
||||
|
||||
|
@ -808,21 +808,19 @@ void DomainServer::populateDefaultStaticAssignmentsExcludingTypes(const QSet<Ass
|
|||
for (Assignment::Type defaultedType = Assignment::AudioMixerType;
|
||||
defaultedType != Assignment::AllTypes;
|
||||
defaultedType = static_cast<Assignment::Type>(static_cast<int>(defaultedType) + 1)) {
|
||||
if (!excludedTypes.contains(defaultedType)
|
||||
&& defaultedType != Assignment::UNUSED_1
|
||||
&& defaultedType != Assignment::AgentType) {
|
||||
|
||||
if (!excludedTypes.contains(defaultedType) && defaultedType != Assignment::AgentType) {
|
||||
|
||||
if (defaultedType == Assignment::AssetServerType) {
|
||||
// Make sure the asset-server is enabled before adding it here.
|
||||
// Initially we do not assign it by default so we can test it in HF domains first
|
||||
static const QString ASSET_SERVER_ENABLED_KEYPATH = "asset_server.enabled";
|
||||
|
||||
|
||||
if (!_settingsManager.valueOrDefaultValueForKeyPath(ASSET_SERVER_ENABLED_KEYPATH).toBool()) {
|
||||
// skip to the next iteration if asset-server isn't enabled
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// type has not been set from a command line or config file config, use the default
|
||||
// by clearing whatever exists and writing a single default assignment with no payload
|
||||
Assignment* newAssignment = new Assignment(Assignment::CreateCommand, (Assignment::Type) defaultedType);
|
||||
|
@ -839,9 +837,9 @@ void DomainServer::processListRequestPacket(QSharedPointer<ReceivedMessage> mess
|
|||
// update this node's sockets in case they have changed
|
||||
sendingNode->setPublicSocket(nodeRequestData.publicSockAddr);
|
||||
sendingNode->setLocalSocket(nodeRequestData.localSockAddr);
|
||||
|
||||
|
||||
// update the NodeInterestSet in case there have been any changes
|
||||
DomainServerNodeData* nodeData = reinterpret_cast<DomainServerNodeData*>(sendingNode->getLinkedData());
|
||||
DomainServerNodeData* nodeData = static_cast<DomainServerNodeData*>(sendingNode->getLinkedData());
|
||||
|
||||
// guard against patched agents asking to hear about other agents
|
||||
auto safeInterestSet = nodeRequestData.interestList.toSet();
|
||||
|
@ -857,6 +855,44 @@ void DomainServer::processListRequestPacket(QSharedPointer<ReceivedMessage> mess
|
|||
sendDomainListToNode(sendingNode, message->getSenderSockAddr());
|
||||
}
|
||||
|
||||
bool DomainServer::isInInterestSet(const SharedNodePointer& nodeA, const SharedNodePointer& nodeB) {
|
||||
auto nodeAData = static_cast<DomainServerNodeData*>(nodeA->getLinkedData());
|
||||
auto nodeBData = static_cast<DomainServerNodeData*>(nodeB->getLinkedData());
|
||||
|
||||
// if we have no linked data for node A then B can't possibly be in the interest set
|
||||
if (!nodeAData) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// first check if the general interest set A contains the type for B
|
||||
if (nodeAData->getNodeInterestSet().contains(nodeB->getType())) {
|
||||
// given that there is a match in the general interest set, do any special checks
|
||||
|
||||
// (1/19/17) Agents only need to connect to Entity Script Servers to perform administrative tasks
|
||||
// related to entity server scripts. Only agents with rez permissions should be doing that, so
|
||||
// if the agent does not have those permissions, we do not want them and the server to incur the
|
||||
// overhead of connecting to one another. Additionally we exclude agents that do not care about the
|
||||
// Entity Script Server and won't attempt to connect to it.
|
||||
|
||||
bool isAgentWithoutRights = nodeA->getType() == NodeType::Agent
|
||||
&& nodeB->getType() == NodeType::EntityScriptServer
|
||||
&& !nodeA->getCanRez() && !nodeA->getCanRezTmp();
|
||||
|
||||
if (isAgentWithoutRights) {
|
||||
return false;
|
||||
}
|
||||
|
||||
bool isScriptServerForIneffectiveAgent =
|
||||
(nodeA->getType() == NodeType::EntityScriptServer && nodeB->getType() == NodeType::Agent)
|
||||
&& ((nodeBData && !nodeBData->getNodeInterestSet().contains(NodeType::EntityScriptServer))
|
||||
|| (!nodeB->getCanRez() && !nodeB->getCanRezTmp()));
|
||||
|
||||
return !isScriptServerForIneffectiveAgent;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
unsigned int DomainServer::countConnectedUsers() {
|
||||
unsigned int result = 0;
|
||||
auto nodeList = DependencyManager::get<LimitedNodeList>();
|
||||
|
@ -928,14 +964,14 @@ void DomainServer::handleConnectedNode(SharedNodePointer newNode) {
|
|||
|
||||
void DomainServer::sendDomainListToNode(const SharedNodePointer& node, const HifiSockAddr &senderSockAddr) {
|
||||
const int NUM_DOMAIN_LIST_EXTENDED_HEADER_BYTES = NUM_BYTES_RFC4122_UUID + NUM_BYTES_RFC4122_UUID + 2;
|
||||
|
||||
|
||||
// setup the extended header for the domain list packets
|
||||
// this data is at the beginning of each of the domain list packets
|
||||
QByteArray extendedHeader(NUM_DOMAIN_LIST_EXTENDED_HEADER_BYTES, 0);
|
||||
QDataStream extendedHeaderStream(&extendedHeader, QIODevice::WriteOnly);
|
||||
|
||||
|
||||
auto limitedNodeList = DependencyManager::get<LimitedNodeList>();
|
||||
|
||||
|
||||
extendedHeaderStream << limitedNodeList->getSessionUUID();
|
||||
extendedHeaderStream << node->getUUID();
|
||||
extendedHeaderStream << node->getPermissions();
|
||||
|
@ -945,7 +981,7 @@ void DomainServer::sendDomainListToNode(const SharedNodePointer& node, const Hif
|
|||
// always send the node their own UUID back
|
||||
QDataStream domainListStream(domainListPackets.get());
|
||||
|
||||
DomainServerNodeData* nodeData = reinterpret_cast<DomainServerNodeData*>(node->getLinkedData());
|
||||
DomainServerNodeData* nodeData = static_cast<DomainServerNodeData*>(node->getLinkedData());
|
||||
|
||||
// store the nodeInterestSet on this DomainServerNodeData, in case it has changed
|
||||
auto& nodeInterestSet = nodeData->getNodeInterestSet();
|
||||
|
@ -955,10 +991,8 @@ void DomainServer::sendDomainListToNode(const SharedNodePointer& node, const Hif
|
|||
// DTLSServerSession* dtlsSession = _isUsingDTLS ? _dtlsSessions[senderSockAddr] : NULL;
|
||||
if (nodeData->isAuthenticated()) {
|
||||
// if this authenticated node has any interest types, send back those nodes as well
|
||||
limitedNodeList->eachNode([&](const SharedNodePointer& otherNode){
|
||||
if (otherNode->getUUID() != node->getUUID()
|
||||
&& nodeInterestSet.contains(otherNode->getType())) {
|
||||
|
||||
limitedNodeList->eachNode([&](const SharedNodePointer& otherNode) {
|
||||
if (otherNode->getUUID() != node->getUUID() && isInInterestSet(node, otherNode)) {
|
||||
// since we're about to add a node to the packet we start a segment
|
||||
domainListPackets->startSegment();
|
||||
|
||||
|
@ -974,7 +1008,7 @@ void DomainServer::sendDomainListToNode(const SharedNodePointer& node, const Hif
|
|||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// send an empty list to the node, in case there were no other nodes
|
||||
domainListPackets->closeCurrentPacket(true);
|
||||
|
||||
|
@ -983,8 +1017,8 @@ void DomainServer::sendDomainListToNode(const SharedNodePointer& node, const Hif
|
|||
}
|
||||
|
||||
QUuid DomainServer::connectionSecretForNodes(const SharedNodePointer& nodeA, const SharedNodePointer& nodeB) {
|
||||
DomainServerNodeData* nodeAData = dynamic_cast<DomainServerNodeData*>(nodeA->getLinkedData());
|
||||
DomainServerNodeData* nodeBData = dynamic_cast<DomainServerNodeData*>(nodeB->getLinkedData());
|
||||
DomainServerNodeData* nodeAData = static_cast<DomainServerNodeData*>(nodeA->getLinkedData());
|
||||
DomainServerNodeData* nodeBData = static_cast<DomainServerNodeData*>(nodeB->getLinkedData());
|
||||
|
||||
if (nodeAData && nodeBData) {
|
||||
QUuid& secretUUID = nodeAData->getSessionSecretHash()[nodeB->getUUID()];
|
||||
|
@ -994,7 +1028,7 @@ QUuid DomainServer::connectionSecretForNodes(const SharedNodePointer& nodeA, con
|
|||
secretUUID = QUuid::createUuid();
|
||||
|
||||
// set it on the other Node's sessionSecretHash
|
||||
reinterpret_cast<DomainServerNodeData*>(nodeBData)->getSessionSecretHash().insert(nodeA->getUUID(), secretUUID);
|
||||
static_cast<DomainServerNodeData*>(nodeBData)->getSessionSecretHash().insert(nodeA->getUUID(), secretUUID);
|
||||
}
|
||||
|
||||
return secretUUID;
|
||||
|
@ -1020,8 +1054,7 @@ void DomainServer::broadcastNewNode(const SharedNodePointer& addedNode) {
|
|||
[&](const SharedNodePointer& node)->bool {
|
||||
if (node->getLinkedData() && node->getActiveSocket() && node != addedNode) {
|
||||
// is the added Node in this node's interest list?
|
||||
DomainServerNodeData* nodeData = dynamic_cast<DomainServerNodeData*>(node->getLinkedData());
|
||||
return nodeData->getNodeInterestSet().contains(addedNode->getType());
|
||||
return isInInterestSet(node, addedNode);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
|
@ -1124,7 +1157,7 @@ void DomainServer::processRequestAssignmentPacket(QSharedPointer<ReceivedMessage
|
|||
void DomainServer::setupPendingAssignmentCredits() {
|
||||
// enumerate the NodeList to find the assigned nodes
|
||||
DependencyManager::get<LimitedNodeList>()->eachNode([&](const SharedNodePointer& node){
|
||||
DomainServerNodeData* nodeData = reinterpret_cast<DomainServerNodeData*>(node->getLinkedData());
|
||||
DomainServerNodeData* nodeData = static_cast<DomainServerNodeData*>(node->getLinkedData());
|
||||
|
||||
if (!nodeData->getAssignmentUUID().isNull() && !nodeData->getWalletUUID().isNull()) {
|
||||
// check if we have a non-finalized transaction for this node to add this amount to
|
||||
|
@ -1510,7 +1543,7 @@ void DomainServer::sendHeartbeatToIceServer() {
|
|||
}
|
||||
|
||||
void DomainServer::processNodeJSONStatsPacket(QSharedPointer<ReceivedMessage> packetList, SharedNodePointer sendingNode) {
|
||||
auto nodeData = dynamic_cast<DomainServerNodeData*>(sendingNode->getLinkedData());
|
||||
auto nodeData = static_cast<DomainServerNodeData*>(sendingNode->getLinkedData());
|
||||
if (nodeData) {
|
||||
nodeData->updateJSONStats(packetList->getMessage());
|
||||
}
|
||||
|
@ -1556,7 +1589,7 @@ QJsonObject DomainServer::jsonObjectForNode(const SharedNodePointer& node) {
|
|||
nodeJson[JSON_KEY_UPTIME] = QString::number(double(QDateTime::currentMSecsSinceEpoch() - node->getWakeTimestamp()) / 1000.0);
|
||||
|
||||
// if the node has pool information, add it
|
||||
DomainServerNodeData* nodeData = reinterpret_cast<DomainServerNodeData*>(node->getLinkedData());
|
||||
DomainServerNodeData* nodeData = static_cast<DomainServerNodeData*>(node->getLinkedData());
|
||||
|
||||
// add the node username, if it exists
|
||||
nodeJson[JSON_KEY_USERNAME] = nodeData->getUsername();
|
||||
|
@ -1624,23 +1657,23 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
|
|||
if (connection->requestOperation() == QNetworkAccessManager::GetOperation
|
||||
&& assignmentRegex.indexIn(url.path()) != -1) {
|
||||
QUuid nodeUUID = QUuid(assignmentRegex.cap(1));
|
||||
|
||||
|
||||
auto matchingNode = nodeList->nodeWithUUID(nodeUUID);
|
||||
|
||||
|
||||
// don't handle if we don't have a matching node
|
||||
if (!matchingNode) {
|
||||
return false;
|
||||
}
|
||||
|
||||
auto nodeData = dynamic_cast<DomainServerNodeData*>(matchingNode->getLinkedData());
|
||||
|
||||
|
||||
auto nodeData = static_cast<DomainServerNodeData*>(matchingNode->getLinkedData());
|
||||
|
||||
// don't handle if we don't have node data for this node
|
||||
if (!nodeData) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
SharedAssignmentPointer matchingAssignment = _allAssignments.value(nodeData->getAssignmentUUID());
|
||||
|
||||
|
||||
// check if we have an assignment that matches this temp UUID, and it is a scripted assignment
|
||||
if (matchingAssignment && matchingAssignment->getType() == Assignment::AgentType) {
|
||||
// we have a matching assignment and it is for the right type, have the HTTP manager handle it
|
||||
|
@ -1655,7 +1688,7 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
|
|||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
// request not handled
|
||||
return false;
|
||||
}
|
||||
|
@ -1687,7 +1720,7 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
|
|||
|
||||
// enumerate the NodeList to find the assigned nodes
|
||||
nodeList->eachNode([this, &assignedNodesJSON](const SharedNodePointer& node){
|
||||
DomainServerNodeData* nodeData = reinterpret_cast<DomainServerNodeData*>(node->getLinkedData());
|
||||
DomainServerNodeData* nodeData = static_cast<DomainServerNodeData*>(node->getLinkedData());
|
||||
|
||||
if (!nodeData->getAssignmentUUID().isNull()) {
|
||||
// add the node using the UUID as the key
|
||||
|
@ -1775,7 +1808,7 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
|
|||
if (matchingNode) {
|
||||
// create a QJsonDocument with the stats QJsonObject
|
||||
QJsonObject statsObject =
|
||||
reinterpret_cast<DomainServerNodeData*>(matchingNode->getLinkedData())->getStatsJSONObject();
|
||||
static_cast<DomainServerNodeData*>(matchingNode->getLinkedData())->getStatsJSONObject();
|
||||
|
||||
// add the node type to the JSON data for output purposes
|
||||
statsObject["node_type"] = NodeType::getNodeTypeName(matchingNode->getType()).toLower().replace(' ', '-');
|
||||
|
@ -2247,7 +2280,7 @@ void DomainServer::addStaticAssignmentsToQueue() {
|
|||
// if the domain-server has just restarted,
|
||||
// check if there are static assignments that we need to throw into the assignment queue
|
||||
auto sharedAssignments = _allAssignments.values();
|
||||
|
||||
|
||||
// sort the assignments to put the server/mixer assignments first
|
||||
qSort(sharedAssignments.begin(), sharedAssignments.end(), [](SharedAssignmentPointer a, SharedAssignmentPointer b){
|
||||
if (a->getType() == b->getType()) {
|
||||
|
@ -2258,9 +2291,9 @@ void DomainServer::addStaticAssignmentsToQueue() {
|
|||
return a->getType() != Assignment::AgentType;
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
auto staticAssignment = sharedAssignments.begin();
|
||||
|
||||
|
||||
while (staticAssignment != sharedAssignments.end()) {
|
||||
// add any of the un-matched static assignments to the queue
|
||||
|
||||
|
@ -2371,7 +2404,6 @@ void DomainServer::processNodeDisconnectRequestPacket(QSharedPointer<ReceivedMes
|
|||
}
|
||||
|
||||
void DomainServer::handleKillNode(SharedNodePointer nodeToKill) {
|
||||
auto nodeType = nodeToKill->getType();
|
||||
auto limitedNodeList = DependencyManager::get<LimitedNodeList>();
|
||||
const QUuid& nodeUUID = nodeToKill->getUUID();
|
||||
|
||||
|
@ -2383,10 +2415,9 @@ void DomainServer::handleKillNode(SharedNodePointer nodeToKill) {
|
|||
removedNodePacket->write(nodeUUID.toRfc4122());
|
||||
|
||||
// broadcast out the DomainServerRemovedNode message
|
||||
limitedNodeList->eachMatchingNode([&nodeType](const SharedNodePointer& otherNode) -> bool {
|
||||
limitedNodeList->eachMatchingNode([this, &nodeToKill](const SharedNodePointer& otherNode) -> bool {
|
||||
// only send the removed node packet to nodes that care about the type of node this was
|
||||
auto nodeLinkedData = dynamic_cast<DomainServerNodeData*>(otherNode->getLinkedData());
|
||||
return (nodeLinkedData != nullptr) && nodeLinkedData->getNodeInterestSet().contains(nodeType);
|
||||
return isInInterestSet(otherNode, nodeToKill);
|
||||
}, [&limitedNodeList](const SharedNodePointer& otherNode){
|
||||
limitedNodeList->sendUnreliablePacket(*removedNodePacket, *otherNode);
|
||||
});
|
||||
|
|
|
@ -132,6 +132,8 @@ private:
|
|||
|
||||
void sendDomainListToNode(const SharedNodePointer& node, const HifiSockAddr& senderSockAddr);
|
||||
|
||||
bool isInInterestSet(const SharedNodePointer& nodeA, const SharedNodePointer& nodeB);
|
||||
|
||||
QUuid connectionSecretForNodes(const SharedNodePointer& nodeA, const SharedNodePointer& nodeB);
|
||||
void broadcastNewNode(const SharedNodePointer& node);
|
||||
|
||||
|
|
|
@ -725,7 +725,7 @@ void DomainServerSettingsManager::processNodeKickRequestPacket(QSharedPointer<Re
|
|||
}
|
||||
|
||||
// potentially remove connect permissions for the MAC address and machine fingerprint
|
||||
DomainServerNodeData* nodeData = reinterpret_cast<DomainServerNodeData*>(matchingNode->getLinkedData());
|
||||
DomainServerNodeData* nodeData = static_cast<DomainServerNodeData*>(matchingNode->getLinkedData());
|
||||
if (nodeData) {
|
||||
// mac address first
|
||||
NodePermissionsKey macAddressKey(nodeData->getHardwareAddress(), 0);
|
||||
|
@ -807,7 +807,7 @@ void DomainServerSettingsManager::processUsernameFromIDRequestPacket(QSharedPoin
|
|||
usernameFromIDReplyPacket->writeString(verifiedUsername);
|
||||
|
||||
// now put in the machine fingerprint
|
||||
DomainServerNodeData* nodeData = reinterpret_cast<DomainServerNodeData*>(matchingNode->getLinkedData());
|
||||
DomainServerNodeData* nodeData = static_cast<DomainServerNodeData*>(matchingNode->getLinkedData());
|
||||
machineFingerprint = nodeData ? nodeData->getMachineFingerprint() : QUuid();
|
||||
usernameFromIDReplyPacket->write(machineFingerprint.toRfc4122());
|
||||
} else {
|
||||
|
|
|
@ -218,10 +218,10 @@ Rectangle {
|
|||
id: nameCard
|
||||
// Properties
|
||||
displayName: styleData.value
|
||||
userName: model && model.userName
|
||||
audioLevel: model && model.audioLevel
|
||||
userName: model ? model.userName : ""
|
||||
audioLevel: model ? model.audioLevel : 0.0
|
||||
visible: !isCheckBox && !isButton
|
||||
uuid: model && model.sessionId
|
||||
uuid: model ? model.sessionId : ""
|
||||
selected: styleData.selected
|
||||
isAdmin: model && model.admin
|
||||
// Size
|
||||
|
@ -241,9 +241,9 @@ Rectangle {
|
|||
id: actionCheckBox
|
||||
visible: isCheckBox
|
||||
anchors.centerIn: parent
|
||||
checked: model[styleData.role]
|
||||
checked: model ? model[styleData.role] : false
|
||||
// If this is a "Personal Mute" checkbox, disable the checkbox if the "Ignore" checkbox is checked.
|
||||
enabled: !(styleData.role === "personalMute" && model["ignore"])
|
||||
enabled: !(styleData.role === "personalMute" && (model ? model["ignore"] : true))
|
||||
boxSize: 24
|
||||
onClicked: {
|
||||
var newValue = !model[styleData.role]
|
||||
|
@ -416,6 +416,22 @@ Rectangle {
|
|||
}
|
||||
}
|
||||
}
|
||||
// Timer used when selecting table rows that aren't yet present in the model
|
||||
// (i.e. when selecting avatars using edit.js or sphere overlays)
|
||||
Timer {
|
||||
property bool selected // Selected or deselected?
|
||||
property int userIndex // The userIndex of the avatar we want to select
|
||||
id: selectionTimer
|
||||
onTriggered: {
|
||||
if (selected) {
|
||||
table.selection.clear(); // for now, no multi-select
|
||||
table.selection.select(userIndex);
|
||||
table.positionViewAtRow(userIndex, ListView.Beginning);
|
||||
} else {
|
||||
table.selection.deselect(userIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function findSessionIndex(sessionId, optionalData) { // no findIndex in .qml
|
||||
var data = optionalData || userModelData, length = data.length;
|
||||
|
@ -453,19 +469,30 @@ Rectangle {
|
|||
case 'select':
|
||||
var sessionIds = message.params[0];
|
||||
var selected = message.params[1];
|
||||
var alreadyRefreshed = message.params[2];
|
||||
var userIndex = findSessionIndex(sessionIds[0]);
|
||||
if (sessionIds.length > 1) {
|
||||
letterbox("", "", 'Only one user can be selected at a time.');
|
||||
} else if (userIndex < 0) {
|
||||
letterbox("", "", 'The last editor is not among this list of users.');
|
||||
} else {
|
||||
if (selected) {
|
||||
table.selection.clear(); // for now, no multi-select
|
||||
table.selection.select(userIndex);
|
||||
table.positionViewAtRow(userIndex, ListView.Visible);
|
||||
// If we've already refreshed the PAL and the avatar still isn't present in the model...
|
||||
if (alreadyRefreshed === true) {
|
||||
letterbox('', '', 'The last editor of this object is either you or not among this list of users.');
|
||||
} else {
|
||||
table.selection.deselect(userIndex);
|
||||
pal.sendToScript({method: 'refresh', params: message.params});
|
||||
}
|
||||
} else {
|
||||
// If we've already refreshed the PAL and found the avatar in the model
|
||||
if (alreadyRefreshed === true) {
|
||||
// Wait a little bit before trying to actually select the avatar in the table
|
||||
selectionTimer.interval = 250;
|
||||
} else {
|
||||
// If we've found the avatar in the model and didn't need to refresh,
|
||||
// select the avatar in the table immediately
|
||||
selectionTimer.interval = 0;
|
||||
}
|
||||
selectionTimer.selected = selected;
|
||||
selectionTimer.userIndex = userIndex;
|
||||
selectionTimer.start();
|
||||
}
|
||||
break;
|
||||
// Received an "updateUsername()" request from the JS
|
||||
|
|
|
@ -61,7 +61,7 @@
|
|||
#include <CursorManager.h>
|
||||
#include <DebugDraw.h>
|
||||
#include <DeferredLightingEffect.h>
|
||||
#include <display-plugins/DisplayPlugin.h>
|
||||
#include <EntityScriptClient.h>
|
||||
#include <EntityScriptingInterface.h>
|
||||
#include <ErrorDialog.h>
|
||||
#include <FileScriptingInterface.h>
|
||||
|
@ -173,6 +173,7 @@
|
|||
#include "FrameTimingsScriptingInterface.h"
|
||||
#include <GPUIdent.h>
|
||||
#include <gl/GLHelpers.h>
|
||||
#include <EntityScriptClient.h>
|
||||
|
||||
// On Windows PC, NVidia Optimus laptop, we want to enable NVIDIA GPU
|
||||
// FIXME seems to be broken.
|
||||
|
@ -458,7 +459,7 @@ bool setupEssentials(int& argc, char** argv) {
|
|||
// Set dependencies
|
||||
DependencyManager::set<AccountManager>(std::bind(&Application::getUserAgent, qApp));
|
||||
DependencyManager::set<StatTracker>();
|
||||
DependencyManager::set<ScriptEngines>();
|
||||
DependencyManager::set<ScriptEngines>(ScriptEngine::CLIENT_SCRIPT);
|
||||
DependencyManager::set<Preferences>();
|
||||
DependencyManager::set<recording::Deck>();
|
||||
DependencyManager::set<recording::Recorder>();
|
||||
|
@ -514,6 +515,7 @@ bool setupEssentials(int& argc, char** argv) {
|
|||
DependencyManager::set<EntityTreeRenderer>(true, qApp, qApp);
|
||||
DependencyManager::set<CompositorHelper>();
|
||||
DependencyManager::set<OffscreenQmlSurfaceCache>();
|
||||
DependencyManager::set<EntityScriptClient>();
|
||||
return previousSessionCrashed;
|
||||
}
|
||||
|
||||
|
@ -852,7 +854,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
|
||||
// tell the NodeList instance who to tell the domain server we care about
|
||||
nodeList->addSetOfNodeTypesToNodeInterestSet(NodeSet() << NodeType::AudioMixer << NodeType::AvatarMixer
|
||||
<< NodeType::EntityServer << NodeType::AssetServer << NodeType::MessagesMixer);
|
||||
<< NodeType::EntityServer << NodeType::AssetServer << NodeType::MessagesMixer << NodeType::EntityScriptServer);
|
||||
|
||||
// connect to the packet sent signal of the _entityEditSender
|
||||
connect(&_entityEditSender, &EntityEditPacketSender::packetSent, this, &Application::packetSent);
|
||||
|
@ -5493,6 +5495,9 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
|||
scriptEngine->registerGlobalObject("Controller", scriptingInterface.data());
|
||||
UserInputMapper::registerControllerTypes(scriptEngine);
|
||||
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
scriptEngine->registerGlobalObject("Recording", recordingInterface.data());
|
||||
|
||||
// connect this script engines printedMessage signal to the global ScriptEngines these various messages
|
||||
connect(scriptEngine, &ScriptEngine::printedMessage, DependencyManager::get<ScriptEngines>().data(), &ScriptEngines::onPrintedMessage);
|
||||
connect(scriptEngine, &ScriptEngine::errorMessage, DependencyManager::get<ScriptEngines>().data(), &ScriptEngines::onErrorMessage);
|
||||
|
|
|
@ -85,19 +85,6 @@ namespace render {
|
|||
}
|
||||
}
|
||||
|
||||
static uint64_t timeProcessingJoints = 0;
|
||||
static int32_t numJointsProcessed = 0;
|
||||
|
||||
float Avatar::getNumJointsProcessedPerSecond() {
|
||||
float rate = 0.0f;
|
||||
if (timeProcessingJoints > 0) {
|
||||
rate = (float)(numJointsProcessed * USECS_PER_SECOND) / (float)timeProcessingJoints;
|
||||
}
|
||||
timeProcessingJoints = 0;
|
||||
numJointsProcessed = 0;
|
||||
return rate;
|
||||
}
|
||||
|
||||
Avatar::Avatar(RigPointer rig) :
|
||||
AvatarData(),
|
||||
_skeletonOffset(0.0f),
|
||||
|
@ -127,6 +114,7 @@ Avatar::Avatar(RigPointer rig) :
|
|||
_nameRectGeometryID = geometryCache->allocateID();
|
||||
_leftPointerGeometryID = geometryCache->allocateID();
|
||||
_rightPointerGeometryID = geometryCache->allocateID();
|
||||
_lastRenderUpdateTime = usecTimestampNow();
|
||||
}
|
||||
|
||||
Avatar::~Avatar() {
|
||||
|
@ -187,25 +175,35 @@ AABox Avatar::getBounds() const {
|
|||
}
|
||||
|
||||
void Avatar::animateScaleChanges(float deltaTime) {
|
||||
float currentScale = getUniformScale();
|
||||
auto desiredScale = getDomainLimitedScale();
|
||||
if (currentScale != desiredScale) {
|
||||
if (_isAnimatingScale) {
|
||||
float currentScale = getUniformScale();
|
||||
float desiredScale = getDomainLimitedScale();
|
||||
|
||||
// use exponential decay toward the domain limit clamped scale
|
||||
const float SCALE_ANIMATION_TIMESCALE = 0.5f;
|
||||
float blendFactor = glm::clamp(deltaTime / SCALE_ANIMATION_TIMESCALE, 0.0f, 1.0f);
|
||||
float animatedScale = (1.0f - blendFactor) * currentScale + blendFactor * desiredScale;
|
||||
|
||||
// snap to the end when we get close enough
|
||||
const float MIN_RELATIVE_SCALE_ERROR = 0.03f;
|
||||
if (fabsf(desiredScale - currentScale) / desiredScale < MIN_RELATIVE_SCALE_ERROR) {
|
||||
const float MIN_RELATIVE_ERROR = 0.03f;
|
||||
float relativeError = fabsf(desiredScale - currentScale) / desiredScale;
|
||||
if (relativeError < MIN_RELATIVE_ERROR) {
|
||||
animatedScale = desiredScale;
|
||||
_isAnimatingScale = false;
|
||||
}
|
||||
|
||||
setScale(glm::vec3(animatedScale)); // avatar scale is uniform
|
||||
|
||||
// TODO: rebuilding the shape constantly is somehwat expensive.
|
||||
// We should only rebuild after significant change.
|
||||
rebuildCollisionShape();
|
||||
}
|
||||
}
|
||||
|
||||
void Avatar::setTargetScale(float targetScale) {
|
||||
AvatarData::setTargetScale(targetScale);
|
||||
_isAnimatingScale = true;
|
||||
}
|
||||
|
||||
void Avatar::updateAvatarEntities() {
|
||||
PerformanceTimer perfTimer("attachments");
|
||||
// - if queueEditEntityMessage sees clientOnly flag it does _myAvatar->updateAvatarEntity()
|
||||
|
@ -302,63 +300,23 @@ void Avatar::updateAvatarEntities() {
|
|||
}
|
||||
}
|
||||
|
||||
void Avatar::setShouldDie() {
|
||||
// This will cause the avatar to be shrunk away and removed (the actual Avatar gets removed), but then it comes back.
|
||||
_owningAvatarMixer.clear();
|
||||
bool Avatar::shouldDie() const {
|
||||
const qint64 AVATAR_SILENCE_THRESHOLD_USECS = 5 * USECS_PER_SECOND;
|
||||
return _owningAvatarMixer.isNull() || getUsecsSinceLastUpdate() > AVATAR_SILENCE_THRESHOLD_USECS;
|
||||
}
|
||||
|
||||
void Avatar::simulate(float deltaTime) {
|
||||
void Avatar::simulate(float deltaTime, bool inView) {
|
||||
PROFILE_RANGE(simulation, "simulate");
|
||||
PerformanceTimer perfTimer("simulate");
|
||||
|
||||
if (!isDead() && !_motionState) {
|
||||
DependencyManager::get<AvatarManager>()->addAvatarToSimulation(this);
|
||||
}
|
||||
animateScaleChanges(deltaTime);
|
||||
|
||||
bool avatarInView = false;
|
||||
{ // update the shouldAnimate flag to match whether or not we will render the avatar.
|
||||
PerformanceTimer perfTimer("cull");
|
||||
{
|
||||
// simple frustum check
|
||||
PerformanceTimer perfTimer("inView");
|
||||
ViewFrustum viewFrustum;
|
||||
qApp->copyDisplayViewFrustum(viewFrustum);
|
||||
avatarInView = viewFrustum.sphereIntersectsFrustum(getPosition(), getBoundingRadius())
|
||||
|| viewFrustum.boxIntersectsFrustum(_skeletonModel->getRenderableMeshBound());
|
||||
}
|
||||
PerformanceTimer lodPerfTimer("LOD");
|
||||
if (avatarInView) {
|
||||
const float MINIMUM_VISIBILITY_FOR_ON = 0.4f;
|
||||
const float MAXIMUM_VISIBILITY_FOR_OFF = 0.6f;
|
||||
ViewFrustum viewFrustum;
|
||||
qApp->copyViewFrustum(viewFrustum);
|
||||
float visibility = calculateRenderAccuracy(viewFrustum.getPosition(),
|
||||
getBounds(), DependencyManager::get<LODManager>()->getOctreeSizeScale());
|
||||
if (!_shouldAnimate) {
|
||||
if (visibility > MINIMUM_VISIBILITY_FOR_ON) {
|
||||
_shouldAnimate = true;
|
||||
qCDebug(interfaceapp) << "Restoring" << (isMyAvatar() ? "myself" : getSessionUUID()) << "for visibility" << visibility;
|
||||
}
|
||||
} else if (visibility < MAXIMUM_VISIBILITY_FOR_OFF) {
|
||||
_shouldAnimate = false;
|
||||
qCDebug(interfaceapp) << "Optimizing" << (isMyAvatar() ? "myself" : getSessionUUID()) << "for visibility" << visibility;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
uint64_t start = usecTimestampNow();
|
||||
// CRUFT? _shouldSkipRender is never set 'true'
|
||||
if (_shouldAnimate && avatarInView && !_shouldSkipRender) {
|
||||
{
|
||||
PerformanceTimer perfTimer("skeleton");
|
||||
{
|
||||
PROFILE_RANGE(simulation, "updateJoints");
|
||||
if (inView && _hasNewJointData) {
|
||||
_skeletonModel->getRig()->copyJointsFromJointData(_jointData);
|
||||
_skeletonModel->simulate(deltaTime, _hasNewJointRotations || _hasNewJointTranslations);
|
||||
_skeletonModel->simulate(deltaTime, true);
|
||||
|
||||
locationChanged(); // joints changed, so if there are any children, update them.
|
||||
_hasNewJointRotations = false;
|
||||
_hasNewJointTranslations = false;
|
||||
}
|
||||
{
|
||||
PerformanceTimer perfTimer("head");
|
||||
_hasNewJointData = false;
|
||||
|
||||
glm::vec3 headPosition = getPosition();
|
||||
if (!_skeletonModel->getHeadPosition(headPosition)) {
|
||||
headPosition = getPosition();
|
||||
|
@ -366,16 +324,12 @@ void Avatar::simulate(float deltaTime) {
|
|||
Head* head = getHead();
|
||||
head->setPosition(headPosition);
|
||||
head->setScale(getUniformScale());
|
||||
head->simulate(deltaTime, false, !_shouldAnimate);
|
||||
head->simulate(deltaTime, false);
|
||||
} else {
|
||||
// a non-full update is still required so that the position, rotation, scale and bounds of the skeletonModel are updated.
|
||||
_skeletonModel->simulate(deltaTime, false);
|
||||
}
|
||||
} else {
|
||||
// a non-full update is still required so that the position, rotation, scale and bounds of the skeletonModel are updated.
|
||||
getHead()->setPosition(getPosition());
|
||||
PerformanceTimer perfTimer("skeleton");
|
||||
_skeletonModel->simulate(deltaTime, false);
|
||||
}
|
||||
timeProcessingJoints += usecTimestampNow() - start;
|
||||
numJointsProcessed += _jointData.size();
|
||||
|
||||
// update animation for display name fade in/out
|
||||
if ( _displayNameTargetAlpha != _displayNameAlpha) {
|
||||
|
@ -394,11 +348,13 @@ void Avatar::simulate(float deltaTime) {
|
|||
_displayNameAlpha = abs(_displayNameAlpha - _displayNameTargetAlpha) < 0.01f ? _displayNameTargetAlpha : _displayNameAlpha;
|
||||
}
|
||||
|
||||
measureMotionDerivatives(deltaTime);
|
||||
|
||||
simulateAttachments(deltaTime);
|
||||
updatePalms();
|
||||
updateAvatarEntities();
|
||||
{
|
||||
PROFILE_RANGE(simulation, "misc");
|
||||
measureMotionDerivatives(deltaTime);
|
||||
simulateAttachments(deltaTime);
|
||||
updatePalms();
|
||||
updateAvatarEntities();
|
||||
}
|
||||
}
|
||||
|
||||
bool Avatar::isLookingAtMe(AvatarSharedPointer avatar) const {
|
||||
|
@ -1044,10 +1000,14 @@ void Avatar::setModelURLFinished(bool success) {
|
|||
|
||||
|
||||
// create new model, can return an instance of a SoftAttachmentModel rather then Model
|
||||
static std::shared_ptr<Model> allocateAttachmentModel(bool isSoft, RigPointer rigOverride) {
|
||||
static std::shared_ptr<Model> allocateAttachmentModel(bool isSoft, RigPointer rigOverride, bool isCauterized) {
|
||||
if (isSoft) {
|
||||
// cast to std::shared_ptr<Model>
|
||||
return std::dynamic_pointer_cast<Model>(std::make_shared<SoftAttachmentModel>(std::make_shared<Rig>(), nullptr, rigOverride));
|
||||
std::shared_ptr<SoftAttachmentModel> softModel = std::make_shared<SoftAttachmentModel>(std::make_shared<Rig>(), nullptr, rigOverride);
|
||||
if (isCauterized) {
|
||||
softModel->flagAsCauterized();
|
||||
}
|
||||
return std::dynamic_pointer_cast<Model>(softModel);
|
||||
} else {
|
||||
return std::make_shared<Model>(std::make_shared<Rig>());
|
||||
}
|
||||
|
@ -1073,12 +1033,12 @@ void Avatar::setAttachmentData(const QVector<AttachmentData>& attachmentData) {
|
|||
for (int i = 0; i < attachmentData.size(); i++) {
|
||||
if (i == (int)_attachmentModels.size()) {
|
||||
// if number of attachments has been increased, we need to allocate a new model
|
||||
_attachmentModels.push_back(allocateAttachmentModel(attachmentData[i].isSoft, _skeletonModel->getRig()));
|
||||
_attachmentModels.push_back(allocateAttachmentModel(attachmentData[i].isSoft, _skeletonModel->getRig(), isMyAvatar()));
|
||||
}
|
||||
else if (i < oldAttachmentData.size() && oldAttachmentData[i].isSoft != attachmentData[i].isSoft) {
|
||||
// if the attachment has changed type, we need to re-allocate a new one.
|
||||
_attachmentsToRemove.push_back(_attachmentModels[i]);
|
||||
_attachmentModels[i] = allocateAttachmentModel(attachmentData[i].isSoft, _skeletonModel->getRig());
|
||||
_attachmentModels[i] = allocateAttachmentModel(attachmentData[i].isSoft, _skeletonModel->getRig(), isMyAvatar());
|
||||
}
|
||||
_attachmentModels[i]->setURL(attachmentData[i].modelURL);
|
||||
}
|
||||
|
@ -1102,7 +1062,7 @@ int Avatar::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
if (_moving && _motionState) {
|
||||
_motionState->addDirtyFlags(Simulation::DIRTY_POSITION);
|
||||
}
|
||||
if (_moving || _hasNewJointRotations || _hasNewJointTranslations) {
|
||||
if (_moving || _hasNewJointData) {
|
||||
locationChanged();
|
||||
}
|
||||
|
||||
|
@ -1363,4 +1323,4 @@ void Avatar::ensureInScene(AvatarSharedPointer self) {
|
|||
if (!_inScene) {
|
||||
addToScene(self);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -58,8 +58,6 @@ class Avatar : public AvatarData {
|
|||
Q_PROPERTY(glm::vec3 skeletonOffset READ getSkeletonOffset WRITE setSkeletonOffset)
|
||||
|
||||
public:
|
||||
static float getNumJointsProcessedPerSecond();
|
||||
|
||||
explicit Avatar(RigPointer rig = nullptr);
|
||||
~Avatar();
|
||||
|
||||
|
@ -68,7 +66,7 @@ public:
|
|||
|
||||
void init();
|
||||
void updateAvatarEntities();
|
||||
void simulate(float deltaTime);
|
||||
void simulate(float deltaTime, bool inView);
|
||||
virtual void simulateAttachments(float deltaTime);
|
||||
|
||||
virtual void render(RenderArgs* renderArgs, const glm::vec3& cameraPosition);
|
||||
|
@ -141,8 +139,6 @@ public:
|
|||
|
||||
Q_INVOKABLE glm::vec3 getAcceleration() const { return _acceleration; }
|
||||
|
||||
Q_INVOKABLE bool getShouldRender() const { return !_shouldSkipRender; }
|
||||
|
||||
/// Scales a world space position vector relative to the avatar position and scale
|
||||
/// \param vector position to be scaled. Will store the result
|
||||
void scaleVectorRelativeToPosition(glm::vec3 &positionToScale) const;
|
||||
|
@ -179,7 +175,12 @@ public:
|
|||
glm::vec3 getUncachedRightPalmPosition() const;
|
||||
glm::quat getUncachedRightPalmRotation() const;
|
||||
|
||||
Q_INVOKABLE void setShouldDie();
|
||||
uint64_t getLastRenderUpdateTime() const { return _lastRenderUpdateTime; }
|
||||
void setLastRenderUpdateTime(uint64_t time) { _lastRenderUpdateTime = time; }
|
||||
|
||||
bool shouldDie() const;
|
||||
void animateScaleChanges(float deltaTime);
|
||||
void setTargetScale(float targetScale) override;
|
||||
|
||||
public slots:
|
||||
|
||||
|
@ -230,8 +231,6 @@ protected:
|
|||
// protected methods...
|
||||
bool isLookingAtMe(AvatarSharedPointer avatar) const;
|
||||
|
||||
virtual void animateScaleChanges(float deltaTime);
|
||||
|
||||
glm::vec3 getBodyRightDirection() const { return getOrientation() * IDENTITY_RIGHT; }
|
||||
glm::vec3 getBodyUpDirection() const { return getOrientation() * IDENTITY_UP; }
|
||||
glm::vec3 getBodyFrontDirection() const { return getOrientation() * IDENTITY_FRONT; }
|
||||
|
@ -261,14 +260,14 @@ protected:
|
|||
void ensureInScene(AvatarSharedPointer self);
|
||||
|
||||
private:
|
||||
uint64_t _lastRenderUpdateTime { 0 };
|
||||
int _leftPointerGeometryID { 0 };
|
||||
int _rightPointerGeometryID { 0 };
|
||||
int _nameRectGeometryID { 0 };
|
||||
bool _initialized;
|
||||
bool _shouldAnimate { true };
|
||||
bool _shouldSkipRender { false };
|
||||
bool _isLookAtTarget { false };
|
||||
bool _inScene { false };
|
||||
bool _isAnimatingScale { false };
|
||||
|
||||
float getBoundingRadius() const;
|
||||
|
||||
|
|
|
@ -132,53 +132,131 @@ void AvatarManager::updateMyAvatar(float deltaTime) {
|
|||
|
||||
Q_LOGGING_CATEGORY(trace_simulation_avatar, "trace.simulation.avatar");
|
||||
|
||||
class AvatarPriority {
|
||||
public:
|
||||
AvatarPriority(AvatarSharedPointer a, float p) : avatar(a), priority(p) {}
|
||||
AvatarSharedPointer avatar;
|
||||
float priority;
|
||||
// NOTE: we invert the less-than operator to sort high priorities to front
|
||||
bool operator<(const AvatarPriority& other) const { return priority > other.priority; }
|
||||
};
|
||||
|
||||
void AvatarManager::updateOtherAvatars(float deltaTime) {
|
||||
// lock the hash for read to check the size
|
||||
QReadLocker lock(&_hashLock);
|
||||
|
||||
if (_avatarHash.size() < 2 && _avatarFades.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
lock.unlock();
|
||||
|
||||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||
PerformanceWarning warn(showWarnings, "Application::updateAvatars()");
|
||||
|
||||
PerformanceTimer perfTimer("otherAvatars");
|
||||
render::PendingChanges pendingChanges;
|
||||
uint64_t startTime = usecTimestampNow();
|
||||
|
||||
// simulate avatars
|
||||
auto hashCopy = getHashCopy();
|
||||
auto avatarMap = getHashCopy();
|
||||
QList<AvatarSharedPointer> avatarList = avatarMap.values();
|
||||
ViewFrustum cameraView;
|
||||
qApp->copyDisplayViewFrustum(cameraView);
|
||||
glm::vec3 frustumCenter = cameraView.getPosition();
|
||||
|
||||
uint64_t start = usecTimestampNow();
|
||||
AvatarHash::iterator avatarIterator = hashCopy.begin();
|
||||
while (avatarIterator != hashCopy.end()) {
|
||||
auto avatar = std::static_pointer_cast<Avatar>(avatarIterator.value());
|
||||
const float OUT_OF_VIEW_PENALTY = -10.0;
|
||||
|
||||
if (avatar == _myAvatar || !avatar->isInitialized()) {
|
||||
// DO NOT update _myAvatar! Its update has already been done earlier in the main loop.
|
||||
// DO NOT update or fade out uninitialized Avatars
|
||||
++avatarIterator;
|
||||
} else if (avatar->shouldDie()) {
|
||||
removeAvatar(avatarIterator.key());
|
||||
++avatarIterator;
|
||||
} else {
|
||||
avatar->ensureInScene(avatar);
|
||||
avatar->simulate(deltaTime);
|
||||
++avatarIterator;
|
||||
std::priority_queue<AvatarPriority> sortedAvatars;
|
||||
{
|
||||
PROFILE_RANGE(simulation, "sort");
|
||||
for (int32_t i = 0; i < avatarList.size(); ++i) {
|
||||
const auto& avatar = std::static_pointer_cast<Avatar>(avatarList.at(i));
|
||||
if (avatar == _myAvatar || !avatar->isInitialized()) {
|
||||
// DO NOT update _myAvatar! Its update has already been done earlier in the main loop.
|
||||
// DO NOT update or fade out uninitialized Avatars
|
||||
continue;
|
||||
}
|
||||
if (avatar->shouldDie()) {
|
||||
removeAvatar(avatar->getID());
|
||||
continue;
|
||||
}
|
||||
if (avatar->isDead()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
avatar->updateRenderItem(pendingChanges);
|
||||
// priority = weighted linear combination of:
|
||||
// (a) apparentSize
|
||||
// (b) proximity to center of view
|
||||
// (c) time since last update
|
||||
// (d) TIME_PENALTY to help recently updated entries sort toward back
|
||||
glm::vec3 avatarPosition = avatar->getPosition();
|
||||
glm::vec3 offset = avatarPosition - frustumCenter;
|
||||
float distance = glm::length(offset) + 0.001f; // add 1mm to avoid divide by zero
|
||||
float radius = avatar->getBoundingRadius();
|
||||
const glm::vec3& forward = cameraView.getDirection();
|
||||
float apparentSize = radius / distance;
|
||||
float cosineAngle = glm::length(offset - glm::dot(offset, forward) * forward) / distance;
|
||||
const float TIME_PENALTY = 0.080f; // seconds
|
||||
float age = (float)(startTime - avatar->getLastRenderUpdateTime()) / (float)(USECS_PER_SECOND) - TIME_PENALTY;
|
||||
// NOTE: we are adding values of different units to get a single measure of "priority".
|
||||
// Thus we multiply each component by a conversion "weight" that scales its units
|
||||
// relative to the others. These weights are pure magic tuning and are hard coded in the
|
||||
// relation below: (hint: unitary weights are not explicityly shown)
|
||||
float priority = apparentSize + 0.25f * cosineAngle + age;
|
||||
|
||||
// decrement priority of avatars outside keyhole
|
||||
if (distance > cameraView.getCenterRadius()) {
|
||||
if (!cameraView.sphereIntersectsFrustum(avatarPosition, radius)) {
|
||||
priority += OUT_OF_VIEW_PENALTY;
|
||||
}
|
||||
}
|
||||
sortedAvatars.push(AvatarPriority(avatar, priority));
|
||||
}
|
||||
}
|
||||
|
||||
render::PendingChanges pendingChanges;
|
||||
const uint64_t RENDER_UPDATE_BUDGET = 1500; // usec
|
||||
const uint64_t MAX_UPDATE_BUDGET = 2000; // usec
|
||||
uint64_t renderExpiry = startTime + RENDER_UPDATE_BUDGET;
|
||||
uint64_t maxExpiry = startTime + MAX_UPDATE_BUDGET;
|
||||
while (!sortedAvatars.empty()) {
|
||||
const AvatarPriority& sortData = sortedAvatars.top();
|
||||
const auto& avatar = std::static_pointer_cast<Avatar>(sortData.avatar);
|
||||
|
||||
// for ALL avatars...
|
||||
avatar->ensureInScene(avatar);
|
||||
if (!avatar->getMotionState()) {
|
||||
ShapeInfo shapeInfo;
|
||||
avatar->computeShapeInfo(shapeInfo);
|
||||
btCollisionShape* shape = const_cast<btCollisionShape*>(ObjectMotionState::getShapeManager()->getShape(shapeInfo));
|
||||
if (shape) {
|
||||
// don't add to the simulation now, instead put it on a list to be added later
|
||||
AvatarMotionState* motionState = new AvatarMotionState(avatar.get(), shape);
|
||||
avatar->setMotionState(motionState);
|
||||
_motionStatesToAddToPhysics.insert(motionState);
|
||||
_motionStatesThatMightUpdate.insert(motionState);
|
||||
}
|
||||
}
|
||||
avatar->animateScaleChanges(deltaTime);
|
||||
|
||||
uint64_t now = usecTimestampNow();
|
||||
if (now < renderExpiry) {
|
||||
// we're within budget
|
||||
const float OUT_OF_VIEW_THRESHOLD = 0.5f * OUT_OF_VIEW_PENALTY;
|
||||
bool inView = sortData.priority > OUT_OF_VIEW_THRESHOLD;
|
||||
avatar->simulate(deltaTime, inView);
|
||||
avatar->updateRenderItem(pendingChanges);
|
||||
avatar->setLastRenderUpdateTime(startTime);
|
||||
} else if (now < maxExpiry) {
|
||||
// we've spent most of our time budget, but we still simulate() the avatar as it if were out of view
|
||||
// --> some avatars may freeze until their priority trickles up
|
||||
const bool inView = false;
|
||||
avatar->simulate(deltaTime, inView);
|
||||
} else {
|
||||
// we've spent ALL of our time budget --> bail on the rest of the avatar updates
|
||||
// --> some scale or fade animations may glitch
|
||||
// --> some avatar velocity measurements may be a little off
|
||||
break;
|
||||
}
|
||||
sortedAvatars.pop();
|
||||
}
|
||||
qApp->getMain3DScene()->enqueuePendingChanges(pendingChanges);
|
||||
|
||||
// simulate avatar fades
|
||||
simulateAvatarFades(deltaTime);
|
||||
|
||||
PROFILE_COUNTER(simulation_avatar, "NumAvatarsPerSec",
|
||||
{ { "NumAvatarsPerSec", (float)(size() * USECS_PER_SECOND) / (float)(usecTimestampNow() - start) } });
|
||||
PROFILE_COUNTER(simulation_avatar, "NumJointsPerSec", { { "NumJointsPerSec", Avatar::getNumJointsProcessedPerSecond() } });
|
||||
}
|
||||
|
||||
void AvatarManager::postUpdate(float deltaTime) {
|
||||
|
@ -201,6 +279,7 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
|
|||
while (fadingIterator != _avatarFades.end()) {
|
||||
auto avatar = std::static_pointer_cast<Avatar>(*fadingIterator);
|
||||
avatar->setTargetScale(avatar->getUniformScale() * SHRINK_RATE);
|
||||
avatar->animateScaleChanges(deltaTime);
|
||||
if (avatar->getTargetScale() <= MIN_FADE_SCALE) {
|
||||
avatar->removeFromScene(*fadingIterator, scene, pendingChanges);
|
||||
// only remove from _avatarFades if we're sure its motionState has been removed from PhysicsEngine
|
||||
|
@ -210,7 +289,8 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
|
|||
++fadingIterator;
|
||||
}
|
||||
} else {
|
||||
avatar->simulate(deltaTime);
|
||||
const bool inView = true; // HACK
|
||||
avatar->simulate(deltaTime, inView);
|
||||
++fadingIterator;
|
||||
}
|
||||
}
|
||||
|
@ -386,21 +466,6 @@ void AvatarManager::handleCollisionEvents(const CollisionEvents& collisionEvents
|
|||
}
|
||||
}
|
||||
|
||||
void AvatarManager::addAvatarToSimulation(Avatar* avatar) {
|
||||
assert(!avatar->getMotionState());
|
||||
|
||||
ShapeInfo shapeInfo;
|
||||
avatar->computeShapeInfo(shapeInfo);
|
||||
btCollisionShape* shape = const_cast<btCollisionShape*>(ObjectMotionState::getShapeManager()->getShape(shapeInfo));
|
||||
if (shape) {
|
||||
// we don't add to the simulation now, we put it on a list to be added later
|
||||
AvatarMotionState* motionState = new AvatarMotionState(avatar, shape);
|
||||
avatar->setMotionState(motionState);
|
||||
_motionStatesToAddToPhysics.insert(motionState);
|
||||
_motionStatesThatMightUpdate.insert(motionState);
|
||||
}
|
||||
}
|
||||
|
||||
void AvatarManager::updateAvatarRenderStatus(bool shouldRenderAvatars) {
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
|
||||
for (auto avatarData : _avatarHash) {
|
||||
|
|
|
@ -69,8 +69,6 @@ public:
|
|||
void handleOutgoingChanges(const VectorOfMotionStates& motionStates);
|
||||
void handleCollisionEvents(const CollisionEvents& collisionEvents);
|
||||
|
||||
void addAvatarToSimulation(Avatar* avatar);
|
||||
|
||||
Q_INVOKABLE RayToAvatarIntersectionResult findRayIntersection(const PickRay& ray,
|
||||
const QScriptValue& avatarIdsToInclude = QScriptValue(),
|
||||
const QScriptValue& avatarIdsToDiscard = QScriptValue());
|
||||
|
|
74
interface/src/avatar/CauterizedMeshPartPayload.cpp
Normal file
74
interface/src/avatar/CauterizedMeshPartPayload.cpp
Normal file
|
@ -0,0 +1,74 @@
|
|||
//
|
||||
// CauterizedMeshPartPayload.cpp
|
||||
// interface/src/renderer
|
||||
//
|
||||
// Created by Andrew Meadows 2017.01.17
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "CauterizedMeshPartPayload.h"
|
||||
|
||||
#include <PerfStat.h>
|
||||
|
||||
#include "SkeletonModel.h"
|
||||
|
||||
using namespace render;
|
||||
|
||||
CauterizedMeshPartPayload::CauterizedMeshPartPayload(Model* model, int meshIndex, int partIndex, int shapeIndex, const Transform& transform, const Transform& offsetTransform)
|
||||
: ModelMeshPartPayload(model, meshIndex, partIndex, shapeIndex, transform, offsetTransform) {}
|
||||
|
||||
void CauterizedMeshPartPayload::updateTransformForSkinnedCauterizedMesh(const Transform& transform,
|
||||
const QVector<glm::mat4>& clusterMatrices,
|
||||
const QVector<glm::mat4>& cauterizedClusterMatrices) {
|
||||
_transform = transform;
|
||||
_cauterizedTransform = transform;
|
||||
|
||||
if (clusterMatrices.size() > 0) {
|
||||
_worldBound = AABox();
|
||||
for (auto& clusterMatrix : clusterMatrices) {
|
||||
AABox clusterBound = _localBound;
|
||||
clusterBound.transform(clusterMatrix);
|
||||
_worldBound += clusterBound;
|
||||
}
|
||||
|
||||
_worldBound.transform(transform);
|
||||
if (clusterMatrices.size() == 1) {
|
||||
_transform = _transform.worldTransform(Transform(clusterMatrices[0]));
|
||||
if (cauterizedClusterMatrices.size() != 0) {
|
||||
_cauterizedTransform = _cauterizedTransform.worldTransform(Transform(cauterizedClusterMatrices[0]));
|
||||
} else {
|
||||
_cauterizedTransform = _transform;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
_worldBound = _localBound;
|
||||
_worldBound.transform(_drawTransform);
|
||||
}
|
||||
}
|
||||
|
||||
void CauterizedMeshPartPayload::bindTransform(gpu::Batch& batch, const render::ShapePipeline::LocationsPointer locations, RenderArgs::RenderMode renderMode) const {
|
||||
// Still relying on the raw data from the model
|
||||
const Model::MeshState& state = _model->getMeshState(_meshIndex);
|
||||
SkeletonModel* skeleton = static_cast<SkeletonModel*>(_model);
|
||||
bool useCauterizedMesh = (renderMode != RenderArgs::RenderMode::SHADOW_RENDER_MODE) && skeleton->getEnableCauterization();
|
||||
|
||||
if (state.clusterBuffer) {
|
||||
if (useCauterizedMesh) {
|
||||
const Model::MeshState& cState = skeleton->getCauterizeMeshState(_meshIndex);
|
||||
batch.setUniformBuffer(ShapePipeline::Slot::BUFFER::SKINNING, cState.clusterBuffer);
|
||||
} else {
|
||||
batch.setUniformBuffer(ShapePipeline::Slot::BUFFER::SKINNING, state.clusterBuffer);
|
||||
}
|
||||
batch.setModelTransform(_transform);
|
||||
} else {
|
||||
if (useCauterizedMesh) {
|
||||
batch.setModelTransform(_cauterizedTransform);
|
||||
} else {
|
||||
batch.setModelTransform(_transform);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
29
interface/src/avatar/CauterizedMeshPartPayload.h
Normal file
29
interface/src/avatar/CauterizedMeshPartPayload.h
Normal file
|
@ -0,0 +1,29 @@
|
|||
//
|
||||
// CauterizedModelMeshPartPayload.h
|
||||
// interface/src/avatar
|
||||
//
|
||||
// Created by AndrewMeadows 2017.01.17
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_CauterizedMeshPartPayload_h
|
||||
#define hifi_CauterizedMeshPartPayload_h
|
||||
|
||||
#include <MeshPartPayload.h>
|
||||
|
||||
class CauterizedMeshPartPayload : public ModelMeshPartPayload {
|
||||
public:
|
||||
CauterizedMeshPartPayload(Model* model, int meshIndex, int partIndex, int shapeIndex, const Transform& transform, const Transform& offsetTransform);
|
||||
void updateTransformForSkinnedCauterizedMesh(const Transform& transform,
|
||||
const QVector<glm::mat4>& clusterMatrices,
|
||||
const QVector<glm::mat4>& cauterizedClusterMatrices);
|
||||
|
||||
void bindTransform(gpu::Batch& batch, const render::ShapePipeline::LocationsPointer locations, RenderArgs::RenderMode renderMode) const override;
|
||||
private:
|
||||
Transform _cauterizedTransform;
|
||||
};
|
||||
|
||||
#endif // hifi_CauterizedMeshPartPayload_h
|
254
interface/src/avatar/CauterizedModel.cpp
Normal file
254
interface/src/avatar/CauterizedModel.cpp
Normal file
|
@ -0,0 +1,254 @@
|
|||
//
|
||||
// CauterizedModel.cpp
|
||||
// interface/src/avatar
|
||||
//
|
||||
// Created by Andrew Meadows 2017.01.17
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "CauterizedModel.h"
|
||||
|
||||
#include <AbstractViewStateInterface.h>
|
||||
#include <MeshPartPayload.h>
|
||||
#include <PerfStat.h>
|
||||
|
||||
#include "CauterizedMeshPartPayload.h"
|
||||
|
||||
|
||||
CauterizedModel::CauterizedModel(RigPointer rig, QObject* parent) :
|
||||
Model(rig, parent) {
|
||||
}
|
||||
|
||||
CauterizedModel::~CauterizedModel() {
|
||||
}
|
||||
|
||||
void CauterizedModel::deleteGeometry() {
|
||||
Model::deleteGeometry();
|
||||
_cauterizeMeshStates.clear();
|
||||
}
|
||||
|
||||
bool CauterizedModel::updateGeometry() {
|
||||
bool needsFullUpdate = Model::updateGeometry();
|
||||
if (_isCauterized && needsFullUpdate) {
|
||||
assert(_cauterizeMeshStates.empty());
|
||||
const FBXGeometry& fbxGeometry = getFBXGeometry();
|
||||
foreach (const FBXMesh& mesh, fbxGeometry.meshes) {
|
||||
Model::MeshState state;
|
||||
state.clusterMatrices.resize(mesh.clusters.size());
|
||||
_cauterizeMeshStates.append(state);
|
||||
}
|
||||
}
|
||||
return needsFullUpdate;
|
||||
}
|
||||
|
||||
void CauterizedModel::createVisibleRenderItemSet() {
|
||||
if (_isCauterized) {
|
||||
assert(isLoaded());
|
||||
const auto& meshes = _renderGeometry->getMeshes();
|
||||
|
||||
// all of our mesh vectors must match in size
|
||||
if ((int)meshes.size() != _meshStates.size()) {
|
||||
qCDebug(renderlogging) << "WARNING!!!! Mesh Sizes don't match! We will not segregate mesh groups yet.";
|
||||
return;
|
||||
}
|
||||
|
||||
// We should not have any existing renderItems if we enter this section of code
|
||||
Q_ASSERT(_modelMeshRenderItemsSet.isEmpty());
|
||||
|
||||
_modelMeshRenderItemsSet.clear();
|
||||
|
||||
Transform transform;
|
||||
transform.setTranslation(_translation);
|
||||
transform.setRotation(_rotation);
|
||||
|
||||
Transform offset;
|
||||
offset.setScale(_scale);
|
||||
offset.postTranslate(_offset);
|
||||
|
||||
// Run through all of the meshes, and place them into their segregated, but unsorted buckets
|
||||
int shapeID = 0;
|
||||
uint32_t numMeshes = (uint32_t)meshes.size();
|
||||
for (uint32_t i = 0; i < numMeshes; i++) {
|
||||
const auto& mesh = meshes.at(i);
|
||||
if (!mesh) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Create the render payloads
|
||||
int numParts = (int)mesh->getNumParts();
|
||||
for (int partIndex = 0; partIndex < numParts; partIndex++) {
|
||||
auto ptr = std::make_shared<CauterizedMeshPartPayload>(this, i, partIndex, shapeID, transform, offset);
|
||||
_modelMeshRenderItemsSet << std::static_pointer_cast<ModelMeshPartPayload>(ptr);
|
||||
shapeID++;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Model::createVisibleRenderItemSet();
|
||||
}
|
||||
}
|
||||
|
||||
void CauterizedModel::createCollisionRenderItemSet() {
|
||||
// Temporary HACK: use base class method for now
|
||||
Model::createCollisionRenderItemSet();
|
||||
}
|
||||
|
||||
// Called within Model::simulate call, below.
|
||||
void CauterizedModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||
Model::updateRig(deltaTime, parentTransform);
|
||||
_needsUpdateClusterMatrices = true;
|
||||
}
|
||||
|
||||
void CauterizedModel::updateClusterMatrices() {
|
||||
PerformanceTimer perfTimer("CauterizedModel::updateClusterMatrices");
|
||||
|
||||
if (!_needsUpdateClusterMatrices || !isLoaded()) {
|
||||
return;
|
||||
}
|
||||
_needsUpdateClusterMatrices = false;
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
|
||||
for (int i = 0; i < _meshStates.size(); i++) {
|
||||
Model::MeshState& state = _meshStates[i];
|
||||
const FBXMesh& mesh = geometry.meshes.at(i);
|
||||
for (int j = 0; j < mesh.clusters.size(); j++) {
|
||||
const FBXCluster& cluster = mesh.clusters.at(j);
|
||||
auto jointMatrix = _rig->getJointTransform(cluster.jointIndex);
|
||||
#if GLM_ARCH & GLM_ARCH_SSE2
|
||||
glm::mat4 out, inverseBindMatrix = cluster.inverseBindMatrix;
|
||||
glm_mat4_mul((glm_vec4*)&jointMatrix, (glm_vec4*)&inverseBindMatrix, (glm_vec4*)&out);
|
||||
state.clusterMatrices[j] = out;
|
||||
#else
|
||||
state.clusterMatrices[j] = jointMatrix * cluster.inverseBindMatrix;
|
||||
#endif
|
||||
}
|
||||
|
||||
// Once computed the cluster matrices, update the buffer(s)
|
||||
if (mesh.clusters.size() > 1) {
|
||||
if (!state.clusterBuffer) {
|
||||
state.clusterBuffer = std::make_shared<gpu::Buffer>(state.clusterMatrices.size() * sizeof(glm::mat4),
|
||||
(const gpu::Byte*) state.clusterMatrices.constData());
|
||||
} else {
|
||||
state.clusterBuffer->setSubData(0, state.clusterMatrices.size() * sizeof(glm::mat4),
|
||||
(const gpu::Byte*) state.clusterMatrices.constData());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// as an optimization, don't build cautrizedClusterMatrices if the boneSet is empty.
|
||||
if (!_cauterizeBoneSet.empty()) {
|
||||
static const glm::mat4 zeroScale(
|
||||
glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
|
||||
glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
|
||||
glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
|
||||
glm::vec4(0.0f, 0.0f, 0.0f, 1.0f));
|
||||
auto cauterizeMatrix = _rig->getJointTransform(geometry.neckJointIndex) * zeroScale;
|
||||
|
||||
for (int i = 0; i < _cauterizeMeshStates.size(); i++) {
|
||||
Model::MeshState& state = _cauterizeMeshStates[i];
|
||||
const FBXMesh& mesh = geometry.meshes.at(i);
|
||||
for (int j = 0; j < mesh.clusters.size(); j++) {
|
||||
const FBXCluster& cluster = mesh.clusters.at(j);
|
||||
auto jointMatrix = _rig->getJointTransform(cluster.jointIndex);
|
||||
if (_cauterizeBoneSet.find(cluster.jointIndex) != _cauterizeBoneSet.end()) {
|
||||
jointMatrix = cauterizeMatrix;
|
||||
}
|
||||
#if GLM_ARCH & GLM_ARCH_SSE2
|
||||
glm::mat4 out, inverseBindMatrix = cluster.inverseBindMatrix;
|
||||
glm_mat4_mul((glm_vec4*)&jointMatrix, (glm_vec4*)&inverseBindMatrix, (glm_vec4*)&out);
|
||||
state.clusterMatrices[j] = out;
|
||||
#else
|
||||
state.clusterMatrices[j] = jointMatrix * cluster.inverseBindMatrix;
|
||||
#endif
|
||||
}
|
||||
|
||||
if (!_cauterizeBoneSet.empty() && (state.clusterMatrices.size() > 1)) {
|
||||
if (!state.clusterBuffer) {
|
||||
state.clusterBuffer =
|
||||
std::make_shared<gpu::Buffer>(state.clusterMatrices.size() * sizeof(glm::mat4),
|
||||
(const gpu::Byte*) state.clusterMatrices.constData());
|
||||
} else {
|
||||
state.clusterBuffer->setSubData(0, state.clusterMatrices.size() * sizeof(glm::mat4),
|
||||
(const gpu::Byte*) state.clusterMatrices.constData());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// post the blender if we're not currently waiting for one to finish
|
||||
if (geometry.hasBlendedMeshes() && _blendshapeCoefficients != _blendedBlendshapeCoefficients) {
|
||||
_blendedBlendshapeCoefficients = _blendshapeCoefficients;
|
||||
DependencyManager::get<ModelBlender>()->noteRequiresBlend(getThisPointer());
|
||||
}
|
||||
}
|
||||
|
||||
void CauterizedModel::updateRenderItems() {
|
||||
if (_isCauterized) {
|
||||
if (!_addedToScene) {
|
||||
return;
|
||||
}
|
||||
|
||||
glm::vec3 scale = getScale();
|
||||
if (_collisionGeometry) {
|
||||
// _collisionGeometry is already scaled
|
||||
scale = glm::vec3(1.0f);
|
||||
}
|
||||
_needsUpdateClusterMatrices = true;
|
||||
_renderItemsNeedUpdate = false;
|
||||
|
||||
// queue up this work for later processing, at the end of update and just before rendering.
|
||||
// the application will ensure only the last lambda is actually invoked.
|
||||
void* key = (void*)this;
|
||||
std::weak_ptr<Model> weakSelf = shared_from_this();
|
||||
AbstractViewStateInterface::instance()->pushPostUpdateLambda(key, [weakSelf, scale]() {
|
||||
// do nothing, if the model has already been destroyed.
|
||||
auto self = weakSelf.lock();
|
||||
if (!self) {
|
||||
return;
|
||||
}
|
||||
|
||||
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();
|
||||
|
||||
Transform modelTransform;
|
||||
modelTransform.setTranslation(self->getTranslation());
|
||||
modelTransform.setRotation(self->getRotation());
|
||||
|
||||
Transform scaledModelTransform(modelTransform);
|
||||
scaledModelTransform.setScale(scale);
|
||||
|
||||
uint32_t deleteGeometryCounter = self->getGeometryCounter();
|
||||
|
||||
render::PendingChanges pendingChanges;
|
||||
QList<render::ItemID> keys = self->getRenderItems().keys();
|
||||
foreach (auto itemID, keys) {
|
||||
pendingChanges.updateItem<CauterizedMeshPartPayload>(itemID, [modelTransform, deleteGeometryCounter](CauterizedMeshPartPayload& data) {
|
||||
if (data._model && data._model->isLoaded()) {
|
||||
// Ensure the model geometry was not reset between frames
|
||||
if (deleteGeometryCounter == data._model->getGeometryCounter()) {
|
||||
// lazy update of cluster matrices used for rendering. We need to update them here, so we can correctly update the bounding box.
|
||||
data._model->updateClusterMatrices();
|
||||
|
||||
// update the model transform and bounding box for this render item.
|
||||
const Model::MeshState& state = data._model->getMeshState(data._meshIndex);
|
||||
CauterizedModel* cModel = static_cast<CauterizedModel*>(data._model);
|
||||
assert(data._meshIndex < cModel->_cauterizeMeshStates.size());
|
||||
const Model::MeshState& cState = cModel->_cauterizeMeshStates.at(data._meshIndex);
|
||||
data.updateTransformForSkinnedCauterizedMesh(modelTransform, state.clusterMatrices, cState.clusterMatrices);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
});
|
||||
} else {
|
||||
Model::updateRenderItems();
|
||||
}
|
||||
}
|
||||
|
||||
const Model::MeshState& CauterizedModel::getCauterizeMeshState(int index) const {
|
||||
assert(index < _meshStates.size());
|
||||
return _cauterizeMeshStates.at(index);
|
||||
}
|
53
interface/src/avatar/CauterizedModel.h
Normal file
53
interface/src/avatar/CauterizedModel.h
Normal file
|
@ -0,0 +1,53 @@
|
|||
//
|
||||
// CauterizeableModel.h
|
||||
// interface/src/avatar
|
||||
//
|
||||
// Created by Andrew Meadows 2016.01.17
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_CauterizedModel_h
|
||||
#define hifi_CauterizedModel_h
|
||||
|
||||
|
||||
#include <Model.h>
|
||||
|
||||
class CauterizedModel : public Model {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
CauterizedModel(RigPointer rig, QObject* parent);
|
||||
virtual ~CauterizedModel();
|
||||
|
||||
void flagAsCauterized() { _isCauterized = true; }
|
||||
bool getIsCauterized() const { return _isCauterized; }
|
||||
|
||||
void setEnableCauterization(bool flag) { _enableCauterization = flag; }
|
||||
bool getEnableCauterization() const { return _enableCauterization; }
|
||||
|
||||
const std::unordered_set<int>& getCauterizeBoneSet() const { return _cauterizeBoneSet; }
|
||||
void setCauterizeBoneSet(const std::unordered_set<int>& boneSet) { _cauterizeBoneSet = boneSet; }
|
||||
|
||||
void deleteGeometry() override;
|
||||
bool updateGeometry() override;
|
||||
|
||||
void createVisibleRenderItemSet() override;
|
||||
void createCollisionRenderItemSet() override;
|
||||
|
||||
virtual void updateRig(float deltaTime, glm::mat4 parentTransform) override;
|
||||
virtual void updateClusterMatrices() override;
|
||||
void updateRenderItems() override;
|
||||
|
||||
const Model::MeshState& getCauterizeMeshState(int index) const;
|
||||
|
||||
protected:
|
||||
std::unordered_set<int> _cauterizeBoneSet;
|
||||
QVector<Model::MeshState> _cauterizeMeshStates;
|
||||
bool _isCauterized { false };
|
||||
bool _enableCauterization { false };
|
||||
};
|
||||
|
||||
#endif // hifi_CauterizedModel_h
|
|
@ -70,7 +70,7 @@ void Head::reset() {
|
|||
_baseYaw = _basePitch = _baseRoll = 0.0f;
|
||||
}
|
||||
|
||||
void Head::simulate(float deltaTime, bool isMine, bool billboard) {
|
||||
void Head::simulate(float deltaTime, bool isMine) {
|
||||
// Update audio trailing average for rendering facial animations
|
||||
const float AUDIO_AVERAGING_SECS = 0.05f;
|
||||
const float AUDIO_LONG_TERM_AVERAGING_SECS = 30.0f;
|
||||
|
@ -117,7 +117,7 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
|
|||
}
|
||||
}
|
||||
|
||||
if (!(_isFaceTrackerConnected || billboard)) {
|
||||
if (!_isFaceTrackerConnected) {
|
||||
|
||||
if (!_isEyeTrackerConnected) {
|
||||
// Update eye saccades
|
||||
|
@ -220,7 +220,7 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
|
|||
_leftEyePosition = _rightEyePosition = getPosition();
|
||||
_eyePosition = getPosition();
|
||||
|
||||
if (!billboard && _owningAvatar) {
|
||||
if (_owningAvatar) {
|
||||
auto skeletonModel = static_cast<Avatar*>(_owningAvatar)->getSkeletonModel();
|
||||
if (skeletonModel) {
|
||||
skeletonModel->getEyePositions(_leftEyePosition, _rightEyePosition);
|
||||
|
@ -378,10 +378,6 @@ glm::quat Head::getEyeRotation(const glm::vec3& eyePosition) const {
|
|||
return rotationBetween(orientation * IDENTITY_FRONT, lookAtDelta + glm::length(lookAtDelta) * _saccade) * orientation;
|
||||
}
|
||||
|
||||
glm::vec3 Head::getScalePivot() const {
|
||||
return _position;
|
||||
}
|
||||
|
||||
void Head::setFinalPitch(float finalPitch) {
|
||||
_deltaPitch = glm::clamp(finalPitch, MIN_HEAD_PITCH, MAX_HEAD_PITCH) - _basePitch;
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ public:
|
|||
|
||||
void init();
|
||||
void reset();
|
||||
void simulate(float deltaTime, bool isMine, bool billboard = false);
|
||||
void simulate(float deltaTime, bool isMine);
|
||||
void setScale(float scale);
|
||||
void setPosition(glm::vec3 position) { _position = position; }
|
||||
void setAverageLoudness(float averageLoudness) { _averageLoudness = averageLoudness; }
|
||||
|
@ -70,8 +70,6 @@ public:
|
|||
|
||||
bool getReturnToCenter() const { return _returnHeadToCenter; } // Do you want head to try to return to center (depends on interface detected)
|
||||
float getAverageLoudness() const { return _averageLoudness; }
|
||||
/// \return the point about which scaling occurs.
|
||||
glm::vec3 getScalePivot() const;
|
||||
|
||||
void setDeltaPitch(float pitch) { _deltaPitch = pitch; }
|
||||
float getDeltaPitch() const { return _deltaPitch; }
|
||||
|
|
|
@ -116,12 +116,12 @@ MyAvatar::MyAvatar(RigPointer rig) :
|
|||
_hmdAtRestDetector(glm::vec3(0), glm::quat())
|
||||
{
|
||||
using namespace recording;
|
||||
_skeletonModel->flagAsCauterized();
|
||||
|
||||
for (int i = 0; i < MAX_DRIVE_KEYS; i++) {
|
||||
_driveKeys[i] = 0.0f;
|
||||
}
|
||||
|
||||
|
||||
// Necessary to select the correct slot
|
||||
using SlotType = void(MyAvatar::*)(const glm::vec3&, bool, const glm::quat&, bool);
|
||||
|
||||
|
@ -1592,7 +1592,7 @@ void MyAvatar::preDisplaySide(RenderArgs* renderArgs) {
|
|||
// toggle using the cauterizedBones depending on where the camera is and the rendering pass type.
|
||||
const bool shouldDrawHead = shouldRenderHead(renderArgs);
|
||||
if (shouldDrawHead != _prevShouldDrawHead) {
|
||||
_skeletonModel->setCauterizeBones(!shouldDrawHead);
|
||||
_skeletonModel->setEnableCauterization(!shouldDrawHead);
|
||||
}
|
||||
_prevShouldDrawHead = shouldDrawHead;
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
#include "AnimDebugDraw.h"
|
||||
|
||||
SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent, RigPointer rig) :
|
||||
Model(rig, parent),
|
||||
CauterizedModel(rig, parent),
|
||||
_owningAvatar(owningAvatar),
|
||||
_boundingCapsuleLocalOffset(0.0f),
|
||||
_boundingCapsuleRadius(0.0f),
|
||||
|
@ -166,7 +166,7 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
_rig->computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState);
|
||||
|
||||
// evaluate AnimGraph animation and update jointStates.
|
||||
Model::updateRig(deltaTime, parentTransform);
|
||||
CauterizedModel::updateRig(deltaTime, parentTransform);
|
||||
|
||||
Rig::EyeParameters eyeParams;
|
||||
eyeParams.worldHeadOrientation = headParams.worldHeadOrientation;
|
||||
|
@ -178,10 +178,8 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;
|
||||
|
||||
_rig->updateFromEyeParameters(eyeParams);
|
||||
|
||||
} else {
|
||||
|
||||
Model::updateRig(deltaTime, parentTransform);
|
||||
CauterizedModel::updateRig(deltaTime, parentTransform);
|
||||
|
||||
// This is a little more work than we really want.
|
||||
//
|
||||
|
@ -222,15 +220,19 @@ void SkeletonModel::updateAttitude() {
|
|||
// Called by Avatar::simulate after it has set the joint states (fullUpdate true if changed),
|
||||
// but just before head has been simulated.
|
||||
void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
|
||||
updateAttitude();
|
||||
setBlendshapeCoefficients(_owningAvatar->getHead()->getBlendshapeCoefficients());
|
||||
if (fullUpdate) {
|
||||
updateAttitude();
|
||||
setBlendshapeCoefficients(_owningAvatar->getHead()->getBlendshapeCoefficients());
|
||||
|
||||
Model::simulate(deltaTime, fullUpdate);
|
||||
Model::simulate(deltaTime, fullUpdate);
|
||||
|
||||
// let rig compute the model offset
|
||||
glm::vec3 registrationPoint;
|
||||
if (_rig->getModelRegistrationPoint(registrationPoint)) {
|
||||
setOffset(registrationPoint);
|
||||
// let rig compute the model offset
|
||||
glm::vec3 registrationPoint;
|
||||
if (_rig->getModelRegistrationPoint(registrationPoint)) {
|
||||
setOffset(registrationPoint);
|
||||
}
|
||||
} else {
|
||||
Model::simulate(deltaTime, fullUpdate);
|
||||
}
|
||||
|
||||
if (!isActive() || !_owningAvatar->isMyAvatar()) {
|
||||
|
|
|
@ -12,8 +12,7 @@
|
|||
#ifndef hifi_SkeletonModel_h
|
||||
#define hifi_SkeletonModel_h
|
||||
|
||||
|
||||
#include <Model.h>
|
||||
#include "CauterizedModel.h"
|
||||
|
||||
class Avatar;
|
||||
class MuscleConstraint;
|
||||
|
@ -23,7 +22,7 @@ using SkeletonModelPointer = std::shared_ptr<SkeletonModel>;
|
|||
using SkeletonModelWeakPointer = std::weak_ptr<SkeletonModel>;
|
||||
|
||||
/// A skeleton loaded from a model.
|
||||
class SkeletonModel : public Model {
|
||||
class SkeletonModel : public CauterizedModel {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
|
@ -31,10 +30,10 @@ public:
|
|||
SkeletonModel(Avatar* owningAvatar, QObject* parent = nullptr, RigPointer rig = nullptr);
|
||||
~SkeletonModel();
|
||||
|
||||
virtual void initJointStates() override;
|
||||
void initJointStates() override;
|
||||
|
||||
virtual void simulate(float deltaTime, bool fullUpdate = true) override;
|
||||
virtual void updateRig(float deltaTime, glm::mat4 parentTransform) override;
|
||||
void simulate(float deltaTime, bool fullUpdate = true) override;
|
||||
void updateRig(float deltaTime, glm::mat4 parentTransform) override;
|
||||
void updateAttitude();
|
||||
|
||||
/// Returns the index of the left hand joint, or -1 if not found.
|
||||
|
@ -105,7 +104,7 @@ public:
|
|||
|
||||
float getHeadClipDistance() const { return _headClipDistance; }
|
||||
|
||||
virtual void onInvalidate() override;
|
||||
void onInvalidate() override;
|
||||
|
||||
signals:
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
#include "InterfaceLogging.h"
|
||||
|
||||
SoftAttachmentModel::SoftAttachmentModel(RigPointer rig, QObject* parent, RigPointer rigOverride) :
|
||||
Model(rig, parent),
|
||||
CauterizedModel(rig, parent),
|
||||
_rigOverride(rigOverride) {
|
||||
assert(_rig);
|
||||
assert(_rigOverride);
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
#ifndef hifi_SoftAttachmentModel_h
|
||||
#define hifi_SoftAttachmentModel_h
|
||||
|
||||
#include <Model.h>
|
||||
#include "CauterizedModel.h"
|
||||
|
||||
// A model that allows the creator to specify a secondary rig instance.
|
||||
// When the cluster matrices are created for rendering, the
|
||||
|
@ -22,16 +22,15 @@
|
|||
// This is used by Avatar instances to wear clothing that follows the same
|
||||
// animated pose as the SkeletonModel.
|
||||
|
||||
class SoftAttachmentModel : public Model {
|
||||
class SoftAttachmentModel : public CauterizedModel {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
|
||||
SoftAttachmentModel(RigPointer rig, QObject* parent, RigPointer rigOverride);
|
||||
~SoftAttachmentModel();
|
||||
|
||||
virtual void updateRig(float deltaTime, glm::mat4 parentTransform) override;
|
||||
virtual void updateClusterMatrices() override;
|
||||
void updateRig(float deltaTime, glm::mat4 parentTransform) override;
|
||||
void updateClusterMatrices() override;
|
||||
|
||||
protected:
|
||||
int getJointIndexOverride(int i) const;
|
||||
|
|
|
@ -253,6 +253,16 @@ int WindowScriptingInterface::createMessageBox(QString title, QString text, int
|
|||
void WindowScriptingInterface::updateMessageBox(int id, QString title, QString text, int buttons, int defaultButton) {
|
||||
auto messageBox = _messageBoxes.value(id);
|
||||
if (messageBox) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "updateMessageBox",
|
||||
Q_ARG(int, id),
|
||||
Q_ARG(QString, title),
|
||||
Q_ARG(QString, text),
|
||||
Q_ARG(int, buttons),
|
||||
Q_ARG(int, defaultButton));
|
||||
return;
|
||||
}
|
||||
|
||||
messageBox->setProperty("title", title);
|
||||
messageBox->setProperty("text", text);
|
||||
messageBox->setProperty("buttons", buttons);
|
||||
|
@ -263,6 +273,12 @@ void WindowScriptingInterface::updateMessageBox(int id, QString title, QString t
|
|||
void WindowScriptingInterface::closeMessageBox(int id) {
|
||||
auto messageBox = _messageBoxes.value(id);
|
||||
if (messageBox) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "closeMessageBox",
|
||||
Q_ARG(int, id));
|
||||
return;
|
||||
}
|
||||
|
||||
disconnect(messageBox);
|
||||
messageBox->setVisible(false);
|
||||
messageBox->deleteLater();
|
||||
|
|
|
@ -157,7 +157,7 @@ void setupPreferences() {
|
|||
}
|
||||
{
|
||||
auto getter = [=]()->float { return myAvatar->getUniformScale(); };
|
||||
auto setter = [=](float value) { myAvatar->setTargetScaleVerbose(value); }; // The hell?
|
||||
auto setter = [=](float value) { myAvatar->setTargetScale(value); };
|
||||
auto preference = new SpinnerPreference(AVATAR_TUNING, "Avatar scale (default is 1.0)", getter, setter);
|
||||
preference->setMin(0.01f);
|
||||
preference->setMax(99.9f);
|
||||
|
|
|
@ -1269,6 +1269,7 @@ void Rig::copyJointsIntoJointData(QVector<JointData>& jointDataVec) const {
|
|||
|
||||
void Rig::copyJointsFromJointData(const QVector<JointData>& jointDataVec) {
|
||||
PerformanceTimer perfTimer("copyJoints");
|
||||
PROFILE_RANGE(simulation_animation_detail, "copyJoints");
|
||||
if (_animSkeleton && jointDataVec.size() == (int)_internalPoseSet._relativePoses.size()) {
|
||||
// make a vector of rotations in absolute-geometry-frame
|
||||
const AnimPoseVec& absoluteDefaultPoses = _animSkeleton->getAbsoluteDefaultPoses();
|
||||
|
|
|
@ -180,9 +180,6 @@ bool AudioInjector::injectLocally() {
|
|||
} else {
|
||||
qCDebug(audio) << "AudioInjector::injectLocally called without any data in Sound QByteArray";
|
||||
}
|
||||
|
||||
} else {
|
||||
qCDebug(audio) << "AudioInjector::injectLocally cannot inject locally with no local audio interface present.";
|
||||
}
|
||||
|
||||
return success;
|
||||
|
|
|
@ -9,9 +9,13 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "AudioInjectorOptions.h"
|
||||
|
||||
#include <QScriptValueIterator>
|
||||
|
||||
#include <RegisteredMetaTypes.h>
|
||||
|
||||
#include "AudioInjectorOptions.h"
|
||||
#include "AudioLogging.h"
|
||||
|
||||
AudioInjectorOptions::AudioInjectorOptions() :
|
||||
position(0.0f, 0.0f, 0.0f),
|
||||
|
@ -22,7 +26,7 @@ AudioInjectorOptions::AudioInjectorOptions() :
|
|||
ambisonic(false),
|
||||
ignorePenumbra(false),
|
||||
localOnly(false),
|
||||
secondOffset(0.0)
|
||||
secondOffset(0.0f)
|
||||
{
|
||||
|
||||
}
|
||||
|
@ -40,31 +44,51 @@ QScriptValue injectorOptionsToScriptValue(QScriptEngine* engine, const AudioInje
|
|||
}
|
||||
|
||||
void injectorOptionsFromScriptValue(const QScriptValue& object, AudioInjectorOptions& injectorOptions) {
|
||||
if (object.property("position").isValid()) {
|
||||
vec3FromScriptValue(object.property("position"), injectorOptions.position);
|
||||
if (!object.isObject()) {
|
||||
qWarning() << "Audio injector options is not an object.";
|
||||
return;
|
||||
}
|
||||
|
||||
if (object.property("volume").isValid()) {
|
||||
injectorOptions.volume = object.property("volume").toNumber();
|
||||
}
|
||||
|
||||
if (object.property("loop").isValid()) {
|
||||
injectorOptions.loop = object.property("loop").toBool();
|
||||
}
|
||||
|
||||
if (object.property("orientation").isValid()) {
|
||||
quatFromScriptValue(object.property("orientation"), injectorOptions.orientation);
|
||||
}
|
||||
|
||||
if (object.property("ignorePenumbra").isValid()) {
|
||||
injectorOptions.ignorePenumbra = object.property("ignorePenumbra").toBool();
|
||||
}
|
||||
|
||||
if (object.property("localOnly").isValid()) {
|
||||
injectorOptions.localOnly = object.property("localOnly").toBool();
|
||||
}
|
||||
|
||||
if (object.property("secondOffset").isValid()) {
|
||||
injectorOptions.secondOffset = object.property("secondOffset").toNumber();
|
||||
|
||||
QScriptValueIterator it(object);
|
||||
while (it.hasNext()) {
|
||||
it.next();
|
||||
|
||||
if (it.name() == "position") {
|
||||
vec3FromScriptValue(object.property("position"), injectorOptions.position);
|
||||
} else if (it.name() == "orientation") {
|
||||
quatFromScriptValue(object.property("orientation"), injectorOptions.orientation);
|
||||
} else if (it.name() == "volume") {
|
||||
if (it.value().isNumber()) {
|
||||
injectorOptions.volume = it.value().toNumber();
|
||||
} else {
|
||||
qCWarning(audio) << "Audio injector options: volume is not a number";
|
||||
}
|
||||
} else if (it.name() == "loop") {
|
||||
if (it.value().isBool()) {
|
||||
injectorOptions.loop = it.value().toBool();
|
||||
} else {
|
||||
qCWarning(audio) << "Audio injector options: loop is not a boolean";
|
||||
}
|
||||
} else if (it.name() == "ignorePenumbra") {
|
||||
if (it.value().isBool()) {
|
||||
injectorOptions.ignorePenumbra = it.value().toBool();
|
||||
} else {
|
||||
qCWarning(audio) << "Audio injector options: ignorePenumbra is not a boolean";
|
||||
}
|
||||
} else if (it.name() == "localOnly") {
|
||||
if (it.value().isBool()) {
|
||||
injectorOptions.localOnly = it.value().toBool();
|
||||
} else {
|
||||
qCWarning(audio) << "Audio injector options: localOnly is not a boolean";
|
||||
}
|
||||
} else if (it.name() == "secondOffset") {
|
||||
if (it.value().isNumber()) {
|
||||
injectorOptions.secondOffset = it.value().toNumber();
|
||||
} else {
|
||||
qCWarning(audio) << "Audio injector options: secondOffset is not a number";
|
||||
}
|
||||
} else {
|
||||
qCWarning(audio) << "Unknown audio injector option:" << it.name();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -120,8 +120,7 @@ AvatarData::AvatarData() :
|
|||
_handState(0),
|
||||
_keyState(NO_KEY_DOWN),
|
||||
_forceFaceTrackerConnected(false),
|
||||
_hasNewJointRotations(true),
|
||||
_hasNewJointTranslations(true),
|
||||
_hasNewJointData(true),
|
||||
_headData(NULL),
|
||||
_displayNameTargetAlpha(1.0f),
|
||||
_displayNameAlpha(1.0f),
|
||||
|
@ -180,11 +179,6 @@ void AvatarData::setTargetScale(float targetScale) {
|
|||
_targetScale = glm::clamp(targetScale, MIN_AVATAR_SCALE, MAX_AVATAR_SCALE);
|
||||
}
|
||||
|
||||
void AvatarData::setTargetScaleVerbose(float targetScale) {
|
||||
setTargetScale(targetScale);
|
||||
qCDebug(avatars) << "Changed scale to " << _targetScale;
|
||||
}
|
||||
|
||||
glm::vec3 AvatarData::getHandPosition() const {
|
||||
return getOrientation() * _handPosition + getPosition();
|
||||
}
|
||||
|
@ -553,7 +547,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
glm::vec3 newEulerAngles(pitch, yaw, roll);
|
||||
glm::quat newOrientation = glm::quat(glm::radians(newEulerAngles));
|
||||
if (currentOrientation != newOrientation) {
|
||||
_hasNewJointRotations = true;
|
||||
_hasNewJointData = true;
|
||||
setLocalOrientation(newOrientation);
|
||||
}
|
||||
|
||||
|
@ -680,7 +674,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
JointData& data = _jointData[i];
|
||||
if (validRotations[i]) {
|
||||
sourceBuffer += unpackOrientationQuatFromSixBytes(sourceBuffer, data.rotation);
|
||||
_hasNewJointRotations = true;
|
||||
_hasNewJointData = true;
|
||||
data.rotationSet = true;
|
||||
}
|
||||
}
|
||||
|
@ -715,7 +709,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
JointData& data = _jointData[i];
|
||||
if (validTranslations[i]) {
|
||||
sourceBuffer += unpackFloatVec3FromSignedTwoByteFixed(sourceBuffer, data.translation, TRANSLATION_COMPRESSION_RADIX);
|
||||
_hasNewJointTranslations = true;
|
||||
_hasNewJointData = true;
|
||||
data.translationSet = true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -81,8 +81,6 @@ const quint32 AVATAR_MOTION_DEFAULTS =
|
|||
const quint32 AVATAR_MOTION_SCRIPTABLE_BITS =
|
||||
AVATAR_MOTION_SCRIPTED_MOTOR_ENABLED;
|
||||
|
||||
const qint64 AVATAR_SILENCE_THRESHOLD_USECS = 5 * USECS_PER_SECOND;
|
||||
|
||||
// Bitset of state flags - we store the key state, hand state, Faceshift, eye tracking, and existence of
|
||||
// referential data in this bit set. The hand state is an octal, but is split into two sections to maintain
|
||||
// backward compatibility. The bits are ordered as such (0-7 left to right).
|
||||
|
@ -264,8 +262,7 @@ public:
|
|||
|
||||
// Scale
|
||||
float getTargetScale() const;
|
||||
void setTargetScale(float targetScale);
|
||||
void setTargetScaleVerbose(float targetScale);
|
||||
virtual void setTargetScale(float targetScale);
|
||||
|
||||
float getDomainLimitedScale() const { return glm::clamp(_targetScale, _domainMinimumScale, _domainMaximumScale); }
|
||||
void setDomainMinimumScale(float domainMinimumScale)
|
||||
|
@ -371,8 +368,6 @@ public:
|
|||
|
||||
const glm::vec3& getTargetVelocity() const { return _targetVelocity; }
|
||||
|
||||
bool shouldDie() const { return _owningAvatarMixer.isNull() || getUsecsSinceLastUpdate() > AVATAR_SILENCE_THRESHOLD_USECS; }
|
||||
|
||||
void clearRecordingBasis();
|
||||
TransformPointer getRecordingBasis() const;
|
||||
void setRecordingBasis(TransformPointer recordingBasis = TransformPointer());
|
||||
|
@ -427,8 +422,7 @@ protected:
|
|||
KeyState _keyState;
|
||||
|
||||
bool _forceFaceTrackerConnected;
|
||||
bool _hasNewJointRotations; // set in AvatarData, cleared in Avatar
|
||||
bool _hasNewJointTranslations; // set in AvatarData, cleared in Avatar
|
||||
bool _hasNewJointData; // set in AvatarData, cleared in Avatar
|
||||
|
||||
HeadData* _headData;
|
||||
|
||||
|
|
|
@ -45,9 +45,7 @@
|
|||
|
||||
EntityTreeRenderer::EntityTreeRenderer(bool wantScripts, AbstractViewStateInterface* viewState,
|
||||
AbstractScriptingServicesInterface* scriptingServices) :
|
||||
OctreeRenderer(),
|
||||
_wantScripts(wantScripts),
|
||||
_entitiesScriptEngine(NULL),
|
||||
_lastPointerEventValid(false),
|
||||
_viewState(viewState),
|
||||
_scriptingServices(scriptingServices),
|
||||
|
@ -103,7 +101,7 @@ void EntityTreeRenderer::resetEntitiesScriptEngine() {
|
|||
// Keep a ref to oldEngine until newEngine is ready so EntityScriptingInterface has something to use
|
||||
auto oldEngine = _entitiesScriptEngine;
|
||||
|
||||
auto newEngine = new ScriptEngine(NO_SCRIPT, QString("Entities %1").arg(++_entitiesScriptEngineCount));
|
||||
auto newEngine = new ScriptEngine(ScriptEngine::ENTITY_CLIENT_SCRIPT, NO_SCRIPT, QString("Entities %1").arg(++_entitiesScriptEngineCount));
|
||||
_entitiesScriptEngine = QSharedPointer<ScriptEngine>(newEngine, entitiesScriptEngineDeleter);
|
||||
|
||||
_scriptingServices->registerScriptEngineWithApplicationServices(_entitiesScriptEngine.data());
|
||||
|
@ -169,7 +167,7 @@ void EntityTreeRenderer::init() {
|
|||
connect(entityTree.get(), &EntityTree::deletingEntity, this, &EntityTreeRenderer::deletingEntity, Qt::QueuedConnection);
|
||||
connect(entityTree.get(), &EntityTree::addingEntity, this, &EntityTreeRenderer::addingEntity, Qt::QueuedConnection);
|
||||
connect(entityTree.get(), &EntityTree::entityScriptChanging,
|
||||
this, &EntityTreeRenderer::entitySciptChanging, Qt::QueuedConnection);
|
||||
this, &EntityTreeRenderer::entityScriptChanging, Qt::QueuedConnection);
|
||||
}
|
||||
|
||||
void EntityTreeRenderer::shutdown() {
|
||||
|
@ -939,7 +937,7 @@ void EntityTreeRenderer::addEntityToScene(EntityItemPointer entity) {
|
|||
}
|
||||
|
||||
|
||||
void EntityTreeRenderer::entitySciptChanging(const EntityItemID& entityID, const bool reload) {
|
||||
void EntityTreeRenderer::entityScriptChanging(const EntityItemID& entityID, const bool reload) {
|
||||
if (_tree && !_shuttingDown) {
|
||||
_entitiesScriptEngine->unloadEntityScript(entityID);
|
||||
checkAndCallPreload(entityID, reload);
|
||||
|
@ -1063,7 +1061,7 @@ void EntityTreeRenderer::entityCollisionWithEntity(const EntityItemID& idA, cons
|
|||
}
|
||||
}
|
||||
|
||||
if (isCollisionOwner(myNodeID, entityTree, idA, collision)) {
|
||||
if (isCollisionOwner(myNodeID, entityTree, idB, collision)) {
|
||||
emit collisionWithEntity(idB, idA, collision);
|
||||
if (_entitiesScriptEngine) {
|
||||
_entitiesScriptEngine->callEntityScriptMethod(idB, "collisionWithEntity", idA, collision);
|
||||
|
|
|
@ -122,7 +122,7 @@ signals:
|
|||
public slots:
|
||||
void addingEntity(const EntityItemID& entityID);
|
||||
void deletingEntity(const EntityItemID& entityID);
|
||||
void entitySciptChanging(const EntityItemID& entityID, const bool reload);
|
||||
void entityScriptChanging(const EntityItemID& entityID, const bool reload);
|
||||
void entityCollisionWithEntity(const EntityItemID& idA, const EntityItemID& idB, const Collision& collision);
|
||||
void updateEntityRenderStatus(bool shouldRenderEntities);
|
||||
void updateZone(const EntityItemID& id);
|
||||
|
|
|
@ -246,14 +246,16 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
|
|||
}
|
||||
|
||||
void RenderableWebEntityItem::setSourceUrl(const QString& value) {
|
||||
if (_sourceUrl != value) {
|
||||
qCDebug(entities) << "Setting web entity source URL to " << value;
|
||||
_sourceUrl = value;
|
||||
if (_webSurface) {
|
||||
AbstractViewStateInterface::instance()->postLambdaEvent([this] {
|
||||
_webSurface->getRootItem()->setProperty("url", _sourceUrl);
|
||||
});
|
||||
}
|
||||
auto valueBeforeSuperclassSet = _sourceUrl;
|
||||
|
||||
WebEntityItem::setSourceUrl(value);
|
||||
|
||||
if (_sourceUrl != valueBeforeSuperclassSet && _webSurface) {
|
||||
qCDebug(entities) << "Changing web entity source URL to " << _sourceUrl;
|
||||
|
||||
AbstractViewStateInterface::instance()->postLambdaEvent([this] {
|
||||
_webSurface->getRootItem()->setProperty("url", _sourceUrl);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -121,6 +121,7 @@ EntityPropertyFlags EntityItem::getEntityProperties(EncodeBitstreamParams& param
|
|||
requestedProperties += PROP_LIFETIME;
|
||||
requestedProperties += PROP_SCRIPT;
|
||||
requestedProperties += PROP_SCRIPT_TIMESTAMP;
|
||||
requestedProperties += PROP_SERVER_SCRIPTS;
|
||||
requestedProperties += PROP_COLLISION_SOUND_URL;
|
||||
requestedProperties += PROP_REGISTRATION_POINT;
|
||||
requestedProperties += PROP_ANGULAR_DAMPING;
|
||||
|
@ -265,6 +266,7 @@ OctreeElement::AppendState EntityItem::appendEntityData(OctreePacketData* packet
|
|||
APPEND_ENTITY_PROPERTY(PROP_LIFETIME, getLifetime());
|
||||
APPEND_ENTITY_PROPERTY(PROP_SCRIPT, getScript());
|
||||
APPEND_ENTITY_PROPERTY(PROP_SCRIPT_TIMESTAMP, getScriptTimestamp());
|
||||
APPEND_ENTITY_PROPERTY(PROP_SERVER_SCRIPTS, getServerScripts());
|
||||
APPEND_ENTITY_PROPERTY(PROP_REGISTRATION_POINT, getRegistrationPoint());
|
||||
APPEND_ENTITY_PROPERTY(PROP_ANGULAR_DAMPING, getAngularDamping());
|
||||
APPEND_ENTITY_PROPERTY(PROP_VISIBLE, getVisible());
|
||||
|
@ -778,6 +780,19 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
READ_ENTITY_PROPERTY(PROP_LIFETIME, float, updateLifetime);
|
||||
READ_ENTITY_PROPERTY(PROP_SCRIPT, QString, setScript);
|
||||
READ_ENTITY_PROPERTY(PROP_SCRIPT_TIMESTAMP, quint64, setScriptTimestamp);
|
||||
|
||||
{
|
||||
// We use this scope to work around an issue stopping server script changes
|
||||
// from being received by an entity script server running a script that continously updates an entity.
|
||||
|
||||
// Basically, we'll allow recent changes to the server scripts even if there are local changes to other properties
|
||||
// that have been made more recently.
|
||||
|
||||
bool overwriteLocalData = !ignoreServerPacket || (lastEditedFromBufferAdjusted > _serverScriptsChangedTimestamp);
|
||||
|
||||
READ_ENTITY_PROPERTY(PROP_SERVER_SCRIPTS, QString, setServerScripts);
|
||||
}
|
||||
|
||||
READ_ENTITY_PROPERTY(PROP_REGISTRATION_POINT, glm::vec3, updateRegistrationPoint);
|
||||
|
||||
READ_ENTITY_PROPERTY(PROP_ANGULAR_DAMPING, float, updateAngularDamping);
|
||||
|
@ -1186,6 +1201,7 @@ EntityItemProperties EntityItem::getProperties(EntityPropertyFlags desiredProper
|
|||
COPY_ENTITY_PROPERTY_TO_PROPERTIES(lifetime, getLifetime);
|
||||
COPY_ENTITY_PROPERTY_TO_PROPERTIES(script, getScript);
|
||||
COPY_ENTITY_PROPERTY_TO_PROPERTIES(scriptTimestamp, getScriptTimestamp);
|
||||
COPY_ENTITY_PROPERTY_TO_PROPERTIES(serverScripts, getServerScripts);
|
||||
COPY_ENTITY_PROPERTY_TO_PROPERTIES(collisionSoundURL, getCollisionSoundURL);
|
||||
COPY_ENTITY_PROPERTY_TO_PROPERTIES(registrationPoint, getRegistrationPoint);
|
||||
COPY_ENTITY_PROPERTY_TO_PROPERTIES(angularVelocity, getLocalAngularVelocity);
|
||||
|
@ -1298,6 +1314,7 @@ bool EntityItem::setProperties(const EntityItemProperties& properties) {
|
|||
// non-simulation properties below
|
||||
SET_ENTITY_PROPERTY_FROM_PROPERTIES(script, setScript);
|
||||
SET_ENTITY_PROPERTY_FROM_PROPERTIES(scriptTimestamp, setScriptTimestamp);
|
||||
SET_ENTITY_PROPERTY_FROM_PROPERTIES(serverScripts, setServerScripts);
|
||||
SET_ENTITY_PROPERTY_FROM_PROPERTIES(collisionSoundURL, setCollisionSoundURL);
|
||||
SET_ENTITY_PROPERTY_FROM_PROPERTIES(localRenderAlpha, setLocalRenderAlpha);
|
||||
SET_ENTITY_PROPERTY_FROM_PROPERTIES(visible, setVisible);
|
||||
|
@ -2228,3 +2245,30 @@ void EntityItem::globalizeProperties(EntityItemProperties& properties, const QSt
|
|||
QUuid empty;
|
||||
properties.setParentID(empty);
|
||||
}
|
||||
|
||||
|
||||
bool EntityItem::matchesJSONFilters(const QJsonObject& jsonFilters) const {
|
||||
|
||||
// The intention for the query JSON filter and this method is to be flexible to handle a variety of filters for
|
||||
// ALL entity properties. Some work will need to be done to the property system so that it can be more flexible
|
||||
// (to grab the value and default value of a property given the string representation of that property, for example)
|
||||
|
||||
// currently the only property filter we handle is '+' for serverScripts
|
||||
// which means that we only handle a filtered query asking for entities where the serverScripts property is non-default
|
||||
|
||||
static const QString SERVER_SCRIPTS_PROPERTY = "serverScripts";
|
||||
|
||||
foreach(const auto& property, jsonFilters.keys()) {
|
||||
if (property == SERVER_SCRIPTS_PROPERTY && jsonFilters[property] == EntityQueryFilterSymbol::NonDefault) {
|
||||
// check if this entity has a non-default value for serverScripts
|
||||
if (_serverScripts != ENTITY_ITEM_DEFAULT_SERVER_SCRIPTS) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// the json filter syntax did not match what we expected, return a match
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -125,7 +125,7 @@ public:
|
|||
void markAsChangedOnServer() { _changedOnServer = usecTimestampNow(); }
|
||||
quint64 getLastChangedOnServer() const { return _changedOnServer; }
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
virtual EntityPropertyFlags getEntityProperties(EncodeBitstreamParams& params) const;
|
||||
|
||||
virtual OctreeElement::AppendState appendEntityData(OctreePacketData* packetData, EncodeBitstreamParams& params,
|
||||
|
@ -254,12 +254,16 @@ public:
|
|||
using SpatiallyNestable::getQueryAACube;
|
||||
virtual AACube getQueryAACube(bool& success) const override;
|
||||
|
||||
const QString& getScript() const { return _script; }
|
||||
QString getScript() const { return _script; }
|
||||
void setScript(const QString& value) { _script = value; }
|
||||
|
||||
quint64 getScriptTimestamp() const { return _scriptTimestamp; }
|
||||
void setScriptTimestamp(const quint64 value) { _scriptTimestamp = value; }
|
||||
|
||||
QString getServerScripts() const { return _serverScripts; }
|
||||
void setServerScripts(const QString& serverScripts)
|
||||
{ _serverScripts = serverScripts; _serverScriptsChangedTimestamp = usecTimestampNow(); }
|
||||
|
||||
const QString& getCollisionSoundURL() const { return _collisionSoundURL; }
|
||||
void setCollisionSoundURL(const QString& value);
|
||||
|
||||
|
@ -463,6 +467,8 @@ public:
|
|||
|
||||
QUuid getLastEditedBy() const { return _lastEditedBy; }
|
||||
void setLastEditedBy(QUuid value) { _lastEditedBy = value; }
|
||||
|
||||
bool matchesJSONFilters(const QJsonObject& jsonFilters) const;
|
||||
|
||||
protected:
|
||||
|
||||
|
@ -511,6 +517,10 @@ protected:
|
|||
QString _loadedScript; /// the value of _script when the last preload signal was sent
|
||||
quint64 _scriptTimestamp{ ENTITY_ITEM_DEFAULT_SCRIPT_TIMESTAMP }; /// the script loaded property used for forced reload
|
||||
|
||||
QString _serverScripts;
|
||||
/// keep track of time when _serverScripts property was last changed
|
||||
quint64 _serverScriptsChangedTimestamp { ENTITY_ITEM_DEFAULT_SCRIPT_TIMESTAMP };
|
||||
|
||||
/// the value of _scriptTimestamp when the last preload signal was sent
|
||||
// NOTE: on construction we want this to be different from _scriptTimestamp so we intentionally bump it
|
||||
quint64 _loadedScriptTimestamp{ ENTITY_ITEM_DEFAULT_SCRIPT_TIMESTAMP + 1 };
|
||||
|
|
|
@ -32,21 +32,21 @@ KeyLightPropertyGroup EntityItemProperties::_staticKeyLight;
|
|||
EntityPropertyList PROP_LAST_ITEM = (EntityPropertyList)(PROP_AFTER_LAST_ITEM - 1);
|
||||
|
||||
EntityItemProperties::EntityItemProperties(EntityPropertyFlags desiredProperties) :
|
||||
_id(UNKNOWN_ENTITY_ID),
|
||||
_idSet(false),
|
||||
_lastEdited(0),
|
||||
_type(EntityTypes::Unknown),
|
||||
|
||||
_id(UNKNOWN_ENTITY_ID),
|
||||
_idSet(false),
|
||||
_lastEdited(0),
|
||||
_type(EntityTypes::Unknown),
|
||||
_localRenderAlpha(1.0f),
|
||||
|
||||
_localRenderAlpha(1.0f),
|
||||
_localRenderAlphaChanged(false),
|
||||
|
||||
_localRenderAlphaChanged(false),
|
||||
|
||||
_defaultSettings(true),
|
||||
_naturalDimensions(1.0f, 1.0f, 1.0f),
|
||||
_naturalPosition(0.0f, 0.0f, 0.0f),
|
||||
_desiredProperties(desiredProperties)
|
||||
_defaultSettings(true),
|
||||
_naturalDimensions(1.0f, 1.0f, 1.0f),
|
||||
_naturalPosition(0.0f, 0.0f, 0.0f),
|
||||
_desiredProperties(desiredProperties)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
void EntityItemProperties::setSittingPoints(const QVector<SittingPoint>& sittingPoints) {
|
||||
|
@ -241,6 +241,7 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
|
|||
CHECK_PROPERTY_CHANGE(PROP_LIFETIME, lifetime);
|
||||
CHECK_PROPERTY_CHANGE(PROP_SCRIPT, script);
|
||||
CHECK_PROPERTY_CHANGE(PROP_SCRIPT_TIMESTAMP, scriptTimestamp);
|
||||
CHECK_PROPERTY_CHANGE(PROP_SERVER_SCRIPTS, serverScripts);
|
||||
CHECK_PROPERTY_CHANGE(PROP_COLLISION_SOUND_URL, collisionSoundURL);
|
||||
CHECK_PROPERTY_CHANGE(PROP_COLOR, color);
|
||||
CHECK_PROPERTY_CHANGE(PROP_COLOR_SPREAD, colorSpread);
|
||||
|
@ -388,6 +389,7 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
|
|||
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_LIFETIME, lifetime);
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_SCRIPT, script);
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_SCRIPT_TIMESTAMP, scriptTimestamp);
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_SERVER_SCRIPTS, serverScripts);
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_REGISTRATION_POINT, registrationPoint);
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_ANGULAR_VELOCITY, angularVelocity);
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_ANGULAR_DAMPING, angularDamping);
|
||||
|
@ -628,6 +630,7 @@ void EntityItemProperties::copyFromScriptValue(const QScriptValue& object, bool
|
|||
COPY_PROPERTY_FROM_QSCRIPTVALUE(lifetime, float, setLifetime);
|
||||
COPY_PROPERTY_FROM_QSCRIPTVALUE(script, QString, setScript);
|
||||
COPY_PROPERTY_FROM_QSCRIPTVALUE(scriptTimestamp, quint64, setScriptTimestamp);
|
||||
COPY_PROPERTY_FROM_QSCRIPTVALUE(serverScripts, QString, setServerScripts);
|
||||
COPY_PROPERTY_FROM_QSCRIPTVALUE(registrationPoint, glmVec3, setRegistrationPoint);
|
||||
COPY_PROPERTY_FROM_QSCRIPTVALUE(angularVelocity, glmVec3, setAngularVelocity);
|
||||
COPY_PROPERTY_FROM_QSCRIPTVALUE(angularDamping, float, setAngularDamping);
|
||||
|
@ -917,6 +920,7 @@ QScriptValue EntityItemProperties::entityPropertyFlagsToScriptValue(QScriptEngin
|
|||
static QHash<QString, EntityPropertyList> _propertyStringsToEnums;
|
||||
|
||||
void EntityItemProperties::entityPropertyFlagsFromScriptValue(const QScriptValue& object, EntityPropertyFlags& flags) {
|
||||
|
||||
static std::once_flag initMap;
|
||||
|
||||
std::call_once(initMap, [](){
|
||||
|
@ -934,6 +938,7 @@ void EntityItemProperties::entityPropertyFlagsFromScriptValue(const QScriptValue
|
|||
ADD_PROPERTY_TO_MAP(PROP_LIFETIME, Lifetime, lifetime, float);
|
||||
ADD_PROPERTY_TO_MAP(PROP_SCRIPT, Script, script, QString);
|
||||
ADD_PROPERTY_TO_MAP(PROP_SCRIPT_TIMESTAMP, ScriptTimestamp, scriptTimestamp, quint64);
|
||||
ADD_PROPERTY_TO_MAP(PROP_SERVER_SCRIPTS, ServerScripts, serverScripts, QString);
|
||||
ADD_PROPERTY_TO_MAP(PROP_COLLISION_SOUND_URL, CollisionSoundURL, collisionSoundURL, QString);
|
||||
ADD_PROPERTY_TO_MAP(PROP_COLOR, Color, color, xColor);
|
||||
ADD_PROPERTY_TO_MAP(PROP_COLOR_SPREAD, ColorSpread, colorSpread, xColor);
|
||||
|
@ -1201,6 +1206,7 @@ bool EntityItemProperties::encodeEntityEditPacket(PacketType command, EntityItem
|
|||
APPEND_ENTITY_PROPERTY(PROP_LIFETIME, properties.getLifetime());
|
||||
APPEND_ENTITY_PROPERTY(PROP_SCRIPT, properties.getScript());
|
||||
APPEND_ENTITY_PROPERTY(PROP_SCRIPT_TIMESTAMP, properties.getScriptTimestamp());
|
||||
APPEND_ENTITY_PROPERTY(PROP_SERVER_SCRIPTS, properties.getServerScripts());
|
||||
APPEND_ENTITY_PROPERTY(PROP_COLOR, properties.getColor());
|
||||
APPEND_ENTITY_PROPERTY(PROP_REGISTRATION_POINT, properties.getRegistrationPoint());
|
||||
APPEND_ENTITY_PROPERTY(PROP_ANGULAR_VELOCITY, properties.getAngularVelocity());
|
||||
|
@ -1501,6 +1507,7 @@ bool EntityItemProperties::decodeEntityEditPacket(const unsigned char* data, int
|
|||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_LIFETIME, float, setLifetime);
|
||||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_SCRIPT, QString, setScript);
|
||||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_SCRIPT_TIMESTAMP, quint64, setScriptTimestamp);
|
||||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_SERVER_SCRIPTS, QString, setServerScripts);
|
||||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_COLOR, xColor, setColor);
|
||||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_REGISTRATION_POINT, glm::vec3, setRegistrationPoint);
|
||||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_ANGULAR_VELOCITY, glm::vec3, setAngularVelocity);
|
||||
|
@ -1627,7 +1634,7 @@ bool EntityItemProperties::decodeEntityEditPacket(const unsigned char* data, int
|
|||
|
||||
// NOTE: Spheres and Boxes are just special cases of Shape, and they need to include their PROP_SHAPE
|
||||
// when encoding/decoding edits because otherwise they can't polymorph to other shape types
|
||||
if (properties.getType() == EntityTypes::Shape ||
|
||||
if (properties.getType() == EntityTypes::Shape ||
|
||||
properties.getType() == EntityTypes::Box ||
|
||||
properties.getType() == EntityTypes::Sphere) {
|
||||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_SHAPE, QString, setShape);
|
||||
|
@ -1688,6 +1695,7 @@ void EntityItemProperties::markAllChanged() {
|
|||
_userDataChanged = true;
|
||||
_scriptChanged = true;
|
||||
_scriptTimestampChanged = true;
|
||||
_serverScriptsChanged = true;
|
||||
_collisionSoundURLChanged = true;
|
||||
_registrationPointChanged = true;
|
||||
_angularVelocityChanged = true;
|
||||
|
@ -1896,6 +1904,9 @@ QList<QString> EntityItemProperties::listChangedProperties() {
|
|||
if (scriptTimestampChanged()) {
|
||||
out += "scriptTimestamp";
|
||||
}
|
||||
if (serverScriptsChanged()) {
|
||||
out += "serverScripts";
|
||||
}
|
||||
if (collisionSoundURLChanged()) {
|
||||
out += "collisionSoundURL";
|
||||
}
|
||||
|
|
|
@ -221,6 +221,8 @@ public:
|
|||
|
||||
DEFINE_PROPERTY_REF(PROP_LAST_EDITED_BY, LastEditedBy, lastEditedBy, QUuid, ENTITY_ITEM_DEFAULT_LAST_EDITED_BY);
|
||||
|
||||
DEFINE_PROPERTY_REF(PROP_SERVER_SCRIPTS, ServerScripts, serverScripts, QString, ENTITY_ITEM_DEFAULT_SERVER_SCRIPTS);
|
||||
|
||||
static QString getBackgroundModeString(BackgroundMode mode);
|
||||
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ const bool ENTITY_ITEM_DEFAULT_VISIBLE = true;
|
|||
|
||||
const QString ENTITY_ITEM_DEFAULT_SCRIPT = QString("");
|
||||
const quint64 ENTITY_ITEM_DEFAULT_SCRIPT_TIMESTAMP = 0;
|
||||
const QString ENTITY_ITEM_DEFAULT_SERVER_SCRIPTS = QString("");
|
||||
const QString ENTITY_ITEM_DEFAULT_COLLISION_SOUND_URL = QString("");
|
||||
const glm::vec3 ENTITY_ITEM_DEFAULT_REGISTRATION_POINT = ENTITY_ITEM_HALF_VEC3; // center
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
|
||||
#include <udt/PacketHeaders.h>
|
||||
|
||||
#include "../octree/OctreeQueryNode.h"
|
||||
#include <OctreeQueryNode.h>
|
||||
|
||||
class EntityNodeData : public OctreeQueryNode {
|
||||
public:
|
||||
|
@ -22,9 +22,15 @@ public:
|
|||
|
||||
quint64 getLastDeletedEntitiesSentAt() const { return _lastDeletedEntitiesSentAt; }
|
||||
void setLastDeletedEntitiesSentAt(quint64 sentAt) { _lastDeletedEntitiesSentAt = sentAt; }
|
||||
|
||||
// these can only be called from the OctreeSendThread for the given Node
|
||||
void insertEntitySentLastFrame(const QUuid& entityID) { _entitiesSentLastFrame.insert(entityID); }
|
||||
void removeEntitySentLastFrame(const QUuid& entityID) { _entitiesSentLastFrame.remove(entityID); }
|
||||
bool sentEntityLastFrame(const QUuid& entityID) { return _entitiesSentLastFrame.contains(entityID); }
|
||||
|
||||
private:
|
||||
quint64 _lastDeletedEntitiesSentAt { usecTimestampNow() };
|
||||
QSet<QUuid> _entitiesSentLastFrame;
|
||||
};
|
||||
|
||||
#endif // hifi_EntityNodeData_h
|
|
@ -183,6 +183,8 @@ enum EntityPropertyList {
|
|||
|
||||
PROP_LAST_EDITED_BY,
|
||||
|
||||
PROP_SERVER_SCRIPTS,
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// ATTENTION: add new properties to end of list just ABOVE this line
|
||||
PROP_AFTER_LAST_ITEM,
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
#include "QVariantGLM.h"
|
||||
#include "SimulationOwner.h"
|
||||
#include "ZoneEntityItem.h"
|
||||
#include <EntityScriptClient.h>
|
||||
|
||||
|
||||
EntityScriptingInterface::EntityScriptingInterface(bool bidOnSimulationOwnership) :
|
||||
|
@ -670,6 +671,38 @@ RayToEntityIntersectionResult EntityScriptingInterface::findRayIntersectionWorke
|
|||
return result;
|
||||
}
|
||||
|
||||
bool EntityScriptingInterface::reloadServerScripts(QUuid entityID) {
|
||||
auto client = DependencyManager::get<EntityScriptClient>();
|
||||
return client->reloadServerScript(entityID);
|
||||
}
|
||||
|
||||
bool EntityScriptingInterface::getServerScriptStatus(QUuid entityID, QScriptValue callback) {
|
||||
auto client = DependencyManager::get<EntityScriptClient>();
|
||||
auto request = client->createScriptStatusRequest(entityID);
|
||||
connect(request, &GetScriptStatusRequest::finished, callback.engine(), [callback](GetScriptStatusRequest* request) mutable {
|
||||
QString statusString;
|
||||
switch (request->getStatus()) {
|
||||
case RUNNING:
|
||||
statusString = "running";
|
||||
break;
|
||||
case ERROR_LOADING_SCRIPT:
|
||||
statusString = "error_loading_script";
|
||||
break;
|
||||
case ERROR_RUNNING_SCRIPT:
|
||||
statusString = "error_running_script";
|
||||
break;
|
||||
default:
|
||||
statusString = "";
|
||||
break;
|
||||
}
|
||||
QScriptValueList args { request->getResponseReceived(), request->getIsRunning(), statusString, request->getErrorInfo() };
|
||||
callback.call(QScriptValue(), args);
|
||||
request->deleteLater();
|
||||
});
|
||||
request->start();
|
||||
return true;
|
||||
}
|
||||
|
||||
void EntityScriptingInterface::setLightsArePickable(bool value) {
|
||||
LightEntityItem::setLightsArePickable(value);
|
||||
}
|
||||
|
|
|
@ -210,6 +210,9 @@ public slots:
|
|||
/// order to return an accurate result
|
||||
Q_INVOKABLE RayToEntityIntersectionResult findRayIntersectionBlocking(const PickRay& ray, bool precisionPicking = false, const QScriptValue& entityIdsToInclude = QScriptValue(), const QScriptValue& entityIdsToDiscard = QScriptValue());
|
||||
|
||||
Q_INVOKABLE bool reloadServerScripts(QUuid entityID);
|
||||
Q_INVOKABLE bool getServerScriptStatus(QUuid entityID, QScriptValue callback);
|
||||
|
||||
Q_INVOKABLE void setLightsArePickable(bool value);
|
||||
Q_INVOKABLE bool getLightsArePickable() const;
|
||||
|
||||
|
|
|
@ -390,10 +390,14 @@ EntityItemPointer EntityTree::addEntity(const EntityItemID& entityID, const Enti
|
|||
return result;
|
||||
}
|
||||
|
||||
void EntityTree::emitEntityScriptChanging(const EntityItemID& entityItemID, const bool reload) {
|
||||
void EntityTree::emitEntityScriptChanging(const EntityItemID& entityItemID, bool reload) {
|
||||
emit entityScriptChanging(entityItemID, reload);
|
||||
}
|
||||
|
||||
void EntityTree::emitEntityServerScriptChanging(const EntityItemID& entityItemID, bool reload) {
|
||||
emit entityServerScriptChanging(entityItemID, reload);
|
||||
}
|
||||
|
||||
void EntityTree::notifyNewCollisionSoundURL(const QString& newURL, const EntityItemID& entityID) {
|
||||
emit newCollisionSoundURL(QUrl(newURL), entityID);
|
||||
}
|
||||
|
@ -958,9 +962,16 @@ int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned c
|
|||
|
||||
if (validEditPacket && !_entityScriptSourceWhitelist.isEmpty() && !properties.getScript().isEmpty()) {
|
||||
bool passedWhiteList = false;
|
||||
auto entityScript = properties.getScript();
|
||||
|
||||
// grab a URL representation of the entity script so we can check the host for this script
|
||||
auto entityScriptURL = QUrl::fromUserInput(properties.getScript());
|
||||
|
||||
for (const auto& whiteListedPrefix : _entityScriptSourceWhitelist) {
|
||||
if (entityScript.startsWith(whiteListedPrefix, Qt::CaseInsensitive)) {
|
||||
auto whiteListURL = QUrl::fromUserInput(whiteListedPrefix);
|
||||
|
||||
// check if this script URL matches the whitelist domain and, optionally, is beneath the path
|
||||
if (entityScriptURL.host().compare(whiteListURL.host(), Qt::CaseInsensitive) == 0 &&
|
||||
entityScriptURL.path().startsWith(whiteListURL.path(), Qt::CaseInsensitive)) {
|
||||
passedWhiteList = true;
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -31,6 +31,9 @@ using ModelWeakPointer = std::weak_ptr<Model>;
|
|||
|
||||
class EntitySimulation;
|
||||
|
||||
namespace EntityQueryFilterSymbol {
|
||||
static const QString NonDefault = "+";
|
||||
}
|
||||
|
||||
class NewlyCreatedEntityHook {
|
||||
public:
|
||||
|
@ -201,7 +204,8 @@ public:
|
|||
|
||||
void entityChanged(EntityItemPointer entity);
|
||||
|
||||
void emitEntityScriptChanging(const EntityItemID& entityItemID, const bool reload);
|
||||
void emitEntityScriptChanging(const EntityItemID& entityItemID, bool reload);
|
||||
void emitEntityServerScriptChanging(const EntityItemID& entityItemID, bool reload);
|
||||
|
||||
void setSimulation(EntitySimulationPointer simulation);
|
||||
EntitySimulationPointer getSimulation() const { return _simulation; }
|
||||
|
@ -270,6 +274,7 @@ signals:
|
|||
void deletingEntity(const EntityItemID& entityID);
|
||||
void addingEntity(const EntityItemID& entityID);
|
||||
void entityScriptChanging(const EntityItemID& entityItemID, const bool reload);
|
||||
void entityServerScriptChanging(const EntityItemID& entityItemID, const bool reload);
|
||||
void newCollisionSoundURL(const QUrl& url, const EntityItemID& entityID);
|
||||
void clearingEntities();
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
#include <OctreeUtils.h>
|
||||
|
||||
#include "EntitiesLogging.h"
|
||||
#include "EntityNodeData.h"
|
||||
#include "EntityItemProperties.h"
|
||||
#include "EntityTree.h"
|
||||
#include "EntityTreeElement.h"
|
||||
|
@ -94,7 +95,7 @@ void EntityTreeElement::initializeExtraEncodeData(EncodeBitstreamParams& params)
|
|||
bool EntityTreeElement::shouldIncludeChildData(int childIndex, EncodeBitstreamParams& params) const {
|
||||
OctreeElementExtraEncodeData* extraEncodeData = params.extraEncodeData;
|
||||
assert(extraEncodeData); // EntityTrees always require extra encode data on their encoding passes
|
||||
|
||||
|
||||
if (extraEncodeData->contains(this)) {
|
||||
EntityTreeElementExtraEncodeDataPointer entityTreeElementExtraEncodeData
|
||||
= std::static_pointer_cast<EntityTreeElementExtraEncodeData>((*extraEncodeData)[this]);
|
||||
|
@ -231,7 +232,7 @@ void EntityTreeElement::elementEncodeComplete(EncodeBitstreamParams& params) con
|
|||
}
|
||||
|
||||
OctreeElement::AppendState EntityTreeElement::appendElementData(OctreePacketData* packetData,
|
||||
EncodeBitstreamParams& params) const {
|
||||
EncodeBitstreamParams& params) const {
|
||||
|
||||
OctreeElement::AppendState appendElementState = OctreeElement::COMPLETED; // assume the best...
|
||||
|
||||
|
@ -278,25 +279,57 @@ OctreeElement::AppendState EntityTreeElement::appendElementData(OctreePacketData
|
|||
int numberOfEntitiesOffset = 0;
|
||||
withReadLock([&] {
|
||||
QVector<uint16_t> indexesOfEntitiesToInclude;
|
||||
|
||||
|
||||
// It's possible that our element has been previous completed. In this case we'll simply not include any of our
|
||||
// entities for encoding. This is needed because we encode the element data at the "parent" level, and so we
|
||||
// need to handle the case where our sibling elements need encoding but we don't.
|
||||
if (!entityTreeElementExtraEncodeData->elementCompleted) {
|
||||
|
||||
QJsonObject jsonFilters;
|
||||
auto entityNodeData = static_cast<EntityNodeData*>(params.nodeData);
|
||||
|
||||
if (entityNodeData) {
|
||||
// we have an EntityNodeData instance
|
||||
// so we should assume that means we might have JSON filters to check
|
||||
jsonFilters = entityNodeData->getJSONParameters();
|
||||
}
|
||||
|
||||
for (uint16_t i = 0; i < _entityItems.size(); i++) {
|
||||
EntityItemPointer entity = _entityItems[i];
|
||||
bool includeThisEntity = true;
|
||||
|
||||
if (!params.forceSendScene && entity->getLastChangedOnServer() < params.lastViewFrustumSent) {
|
||||
if (!params.forceSendScene && entity->getLastChangedOnServer() < params.lastQuerySent) {
|
||||
includeThisEntity = false;
|
||||
}
|
||||
|
||||
if (hadElementExtraData) {
|
||||
includeThisEntity = includeThisEntity &&
|
||||
entityTreeElementExtraEncodeData->entities.contains(entity->getEntityItemID());
|
||||
// if this entity has been updated since our last full send and there are json filters, check them
|
||||
if (includeThisEntity && !jsonFilters.isEmpty()) {
|
||||
|
||||
// if params include JSON filters, check if this entity matches
|
||||
bool entityMatchesFilters = entity->matchesJSONFilters(jsonFilters);
|
||||
|
||||
if (entityMatchesFilters) {
|
||||
// make sure this entity is in the set of entities sent last frame
|
||||
entityNodeData->insertEntitySentLastFrame(entity->getID());
|
||||
|
||||
} else {
|
||||
// we might include this entity if it matched in the previous frame
|
||||
if (entityNodeData->sentEntityLastFrame(entity->getID())) {
|
||||
|
||||
entityNodeData->removeEntitySentLastFrame(entity->getID());
|
||||
} else {
|
||||
includeThisEntity = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (includeThisEntity || params.recurseEverything) {
|
||||
if (includeThisEntity && hadElementExtraData) {
|
||||
includeThisEntity = entityTreeElementExtraEncodeData->entities.contains(entity->getEntityItemID());
|
||||
}
|
||||
|
||||
// we only check the bounds against our frustum and LOD if the query has asked us to check against the frustum
|
||||
// which can sometimes not be the case when JSON filters are sent
|
||||
if (params.usesFrustum && (includeThisEntity || params.recurseEverything)) {
|
||||
|
||||
// we want to use the maximum possible box for this, so that we don't have to worry about the nuance of
|
||||
// simulation changing what's visible. consider the case where the entity contains an angular velocity
|
||||
|
@ -925,6 +958,7 @@ int EntityTreeElement::readElementDataFromBuffer(const unsigned char* data, int
|
|||
// 3) remember the old cube for the entity so we can mark it as dirty
|
||||
if (entityItem) {
|
||||
QString entityScriptBefore = entityItem->getScript();
|
||||
QString entityServerScriptsBefore = entityItem->getServerScripts();
|
||||
quint64 entityScriptTimestampBefore = entityItem->getScriptTimestamp();
|
||||
bool bestFitBefore = bestFitEntityBounds(entityItem);
|
||||
EntityTreeElementPointer currentContainingElement = _myTree->getContainingElement(entityItemID);
|
||||
|
@ -948,6 +982,7 @@ int EntityTreeElement::readElementDataFromBuffer(const unsigned char* data, int
|
|||
}
|
||||
|
||||
QString entityScriptAfter = entityItem->getScript();
|
||||
QString entityServerScriptsAfter = entityItem->getServerScripts();
|
||||
quint64 entityScriptTimestampAfter = entityItem->getScriptTimestamp();
|
||||
bool reload = entityScriptTimestampBefore != entityScriptTimestampAfter;
|
||||
|
||||
|
@ -956,6 +991,9 @@ int EntityTreeElement::readElementDataFromBuffer(const unsigned char* data, int
|
|||
if (entityScriptBefore != entityScriptAfter || reload) {
|
||||
_myTree->emitEntityScriptChanging(entityItemID, reload); // the entity script has changed
|
||||
}
|
||||
if (entityServerScriptsBefore != entityServerScriptsAfter || reload) {
|
||||
_myTree->emitEntityServerScriptChanging(entityItemID, reload); // the entity server script has changed
|
||||
}
|
||||
|
||||
} else {
|
||||
entityItem = EntityTypes::constructEntityItem(dataAt, bytesLeftToRead, args);
|
||||
|
|
|
@ -127,7 +127,6 @@ public:
|
|||
|
||||
bool alreadyFullyEncoded(EncodeBitstreamParams& params) const;
|
||||
|
||||
|
||||
/// Override to serialize the state of this element. This is used for persistance and for transmission across the network.
|
||||
virtual OctreeElement::AppendState appendElementData(OctreePacketData* packetData,
|
||||
EncodeBitstreamParams& params) const override;
|
||||
|
|
|
@ -174,7 +174,7 @@ int LightEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* data,
|
|||
}
|
||||
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
EntityPropertyFlags LightEntityItem::getEntityProperties(EncodeBitstreamParams& params) const {
|
||||
EntityPropertyFlags requestedProperties = EntityItem::getEntityProperties(params);
|
||||
requestedProperties += PROP_IS_SPOTLIGHT;
|
||||
|
|
|
@ -126,7 +126,7 @@ int LineEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* data,
|
|||
}
|
||||
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
EntityPropertyFlags LineEntityItem::getEntityProperties(EncodeBitstreamParams& params) const {
|
||||
EntityPropertyFlags requestedProperties = EntityItem::getEntityProperties(params);
|
||||
requestedProperties += PROP_COLOR;
|
||||
|
|
|
@ -26,7 +26,7 @@ class LineEntityItem : public EntityItem {
|
|||
virtual EntityItemProperties getProperties(EntityPropertyFlags desiredProperties = EntityPropertyFlags()) const override;
|
||||
virtual bool setProperties(const EntityItemProperties& properties) override;
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
virtual EntityPropertyFlags getEntityProperties(EncodeBitstreamParams& params) const override;
|
||||
|
||||
virtual void appendSubclassData(OctreePacketData* packetData, EncodeBitstreamParams& params,
|
||||
|
|
|
@ -160,7 +160,7 @@ int ModelEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* data,
|
|||
return bytesRead;
|
||||
}
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
EntityPropertyFlags ModelEntityItem::getEntityProperties(EncodeBitstreamParams& params) const {
|
||||
EntityPropertyFlags requestedProperties = EntityItem::getEntityProperties(params);
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ public:
|
|||
virtual EntityItemProperties getProperties(EntityPropertyFlags desiredProperties = EntityPropertyFlags()) const override;
|
||||
virtual bool setProperties(const EntityItemProperties& properties) override;
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
virtual EntityPropertyFlags getEntityProperties(EncodeBitstreamParams& params) const override;
|
||||
|
||||
virtual void appendSubclassData(OctreePacketData* packetData, EncodeBitstreamParams& params,
|
||||
|
|
|
@ -469,7 +469,7 @@ int ParticleEffectEntityItem::readEntitySubclassDataFromBuffer(const unsigned ch
|
|||
}
|
||||
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
EntityPropertyFlags ParticleEffectEntityItem::getEntityProperties(EncodeBitstreamParams& params) const {
|
||||
EntityPropertyFlags requestedProperties = EntityItem::getEntityProperties(params);
|
||||
|
||||
|
|
|
@ -170,7 +170,7 @@ int PolyLineEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* da
|
|||
}
|
||||
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
EntityPropertyFlags PolyLineEntityItem::getEntityProperties(EncodeBitstreamParams& params) const {
|
||||
EntityPropertyFlags requestedProperties = EntityItem::getEntityProperties(params);
|
||||
requestedProperties += PROP_COLOR;
|
||||
|
|
|
@ -26,7 +26,7 @@ class PolyLineEntityItem : public EntityItem {
|
|||
virtual EntityItemProperties getProperties(EntityPropertyFlags desiredProperties = EntityPropertyFlags()) const override;
|
||||
virtual bool setProperties(const EntityItemProperties& properties) override;
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
virtual EntityPropertyFlags getEntityProperties(EncodeBitstreamParams& params) const override;
|
||||
|
||||
virtual void appendSubclassData(OctreePacketData* packetData, EncodeBitstreamParams& params,
|
||||
|
|
|
@ -179,7 +179,7 @@ int PolyVoxEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* dat
|
|||
}
|
||||
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
EntityPropertyFlags PolyVoxEntityItem::getEntityProperties(EncodeBitstreamParams& params) const {
|
||||
EntityPropertyFlags requestedProperties = EntityItem::getEntityProperties(params);
|
||||
requestedProperties += PROP_VOXEL_VOLUME_SIZE;
|
||||
|
|
|
@ -26,7 +26,7 @@ class PolyVoxEntityItem : public EntityItem {
|
|||
virtual EntityItemProperties getProperties(EntityPropertyFlags desiredProperties = EntityPropertyFlags()) const override;
|
||||
virtual bool setProperties(const EntityItemProperties& properties) override;
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
virtual EntityPropertyFlags getEntityProperties(EncodeBitstreamParams& params) const override;
|
||||
|
||||
virtual void appendSubclassData(OctreePacketData* packetData, EncodeBitstreamParams& params,
|
||||
|
|
|
@ -137,7 +137,7 @@ int ShapeEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* data,
|
|||
}
|
||||
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
EntityPropertyFlags ShapeEntityItem::getEntityProperties(EncodeBitstreamParams& params) const {
|
||||
EntityPropertyFlags requestedProperties = EntityItem::getEntityProperties(params);
|
||||
requestedProperties += PROP_SHAPE;
|
||||
|
|
|
@ -99,7 +99,7 @@ int TextEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* data,
|
|||
}
|
||||
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
EntityPropertyFlags TextEntityItem::getEntityProperties(EncodeBitstreamParams& params) const {
|
||||
EntityPropertyFlags requestedProperties = EntityItem::getEntityProperties(params);
|
||||
requestedProperties += PROP_TEXT;
|
||||
|
|
|
@ -30,7 +30,7 @@ public:
|
|||
virtual EntityItemProperties getProperties(EntityPropertyFlags desiredProperties = EntityPropertyFlags()) const override;
|
||||
virtual bool setProperties(const EntityItemProperties& properties) override;
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
virtual EntityPropertyFlags getEntityProperties(EncodeBitstreamParams& params) const override;
|
||||
|
||||
virtual void appendSubclassData(OctreePacketData* packetData, EncodeBitstreamParams& params,
|
||||
|
|
|
@ -84,7 +84,7 @@ int WebEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* data, i
|
|||
}
|
||||
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
EntityPropertyFlags WebEntityItem::getEntityProperties(EncodeBitstreamParams& params) const {
|
||||
EntityPropertyFlags requestedProperties = EntityItem::getEntityProperties(params);
|
||||
requestedProperties += PROP_SOURCE_URL;
|
||||
|
@ -125,7 +125,13 @@ bool WebEntityItem::findDetailedRayIntersection(const glm::vec3& origin, const g
|
|||
|
||||
void WebEntityItem::setSourceUrl(const QString& value) {
|
||||
if (_sourceUrl != value) {
|
||||
_sourceUrl = value;
|
||||
auto newURL = QUrl::fromUserInput(value);
|
||||
|
||||
if (newURL.isValid()) {
|
||||
_sourceUrl = newURL.toDisplayString();
|
||||
} else {
|
||||
qCDebug(entities) << "Clearing web entity source URL since" << value << "cannot be parsed to a valid URL.";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ public:
|
|||
virtual EntityItemProperties getProperties(EntityPropertyFlags desiredProperties = EntityPropertyFlags()) const override;
|
||||
virtual bool setProperties(const EntityItemProperties& properties) override;
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
virtual EntityPropertyFlags getEntityProperties(EncodeBitstreamParams& params) const override;
|
||||
|
||||
virtual void appendSubclassData(OctreePacketData* packetData, EncodeBitstreamParams& params,
|
||||
|
|
|
@ -133,7 +133,7 @@ int ZoneEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* data,
|
|||
}
|
||||
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
EntityPropertyFlags ZoneEntityItem::getEntityProperties(EncodeBitstreamParams& params) const {
|
||||
EntityPropertyFlags requestedProperties = EntityItem::getEntityProperties(params);
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ public:
|
|||
virtual EntityItemProperties getProperties(EntityPropertyFlags desiredProperties = EntityPropertyFlags()) const override;
|
||||
virtual bool setProperties(const EntityItemProperties& properties) override;
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
// TODO: eventually only include properties changed since the params.lastQuerySent time
|
||||
virtual EntityPropertyFlags getEntityProperties(EncodeBitstreamParams& params) const override;
|
||||
|
||||
virtual void appendSubclassData(OctreePacketData* packetData, EncodeBitstreamParams& params,
|
||||
|
|
|
@ -48,39 +48,18 @@ public:
|
|||
void update();
|
||||
uvec3 getPageCounts(const uvec3& dimensions) const;
|
||||
uint32_t getPageCount(const uvec3& dimensions) const;
|
||||
uint32_t getSize() const;
|
||||
|
||||
GL45Texture& texture;
|
||||
bool sparse { false };
|
||||
uvec3 pageDimensions { DEFAULT_PAGE_DIMENSION };
|
||||
GLuint maxSparseLevel { DEFAULT_MAX_SPARSE_LEVEL };
|
||||
uint32_t allocatedPages { 0 };
|
||||
uint32_t maxPages { 0 };
|
||||
uint32_t pageBytes { 0 };
|
||||
GLint pageDimensionsIndex { 0 };
|
||||
};
|
||||
|
||||
#if INCREMENTAL_TRANSFER
|
||||
struct TransferState {
|
||||
TransferState(GL45Texture& texture);
|
||||
uvec3 currentPageSize() const;
|
||||
void updateMip();
|
||||
void populatePage(std::vector<uint8_t>& dest);
|
||||
bool increment();
|
||||
|
||||
GL45Texture& texture;
|
||||
GLTexelFormat texelFormat;
|
||||
uint8_t face { 0 };
|
||||
uint16_t mipLevel { 0 };
|
||||
uint32_t bytesPerLine { 0 };
|
||||
uint32_t bytesPerPixel { 0 };
|
||||
uint32_t bytesPerPage { 0 };
|
||||
uvec3 mipDimensions;
|
||||
uvec3 mipOffset;
|
||||
const uint8_t* srcPointer { nullptr };
|
||||
};
|
||||
protected:
|
||||
TransferState _transferState;
|
||||
#endif
|
||||
|
||||
protected:
|
||||
void updateMips() override;
|
||||
void stripToMip(uint16_t newMinMip);
|
||||
|
@ -98,8 +77,6 @@ public:
|
|||
void derez();
|
||||
|
||||
SparseInfo _sparseInfo;
|
||||
uint32_t _allocatedPages { 0 };
|
||||
uint32_t _lastMipAllocatedPages { 0 };
|
||||
uint16_t _mipOffset { 0 };
|
||||
friend class GL45Backend;
|
||||
};
|
||||
|
|
|
@ -116,6 +116,8 @@ void SparseInfo::maybeMakeSparse() {
|
|||
}
|
||||
}
|
||||
|
||||
#define SPARSE_PAGE_SIZE_OVERHEAD_ESTIMATE 1.3f
|
||||
|
||||
// This can only be called after we've established our storage size
|
||||
void SparseInfo::update() {
|
||||
if (!sparse) {
|
||||
|
@ -124,6 +126,9 @@ void SparseInfo::update() {
|
|||
glGetTextureParameterIuiv(texture._id, GL_NUM_SPARSE_LEVELS_ARB, &maxSparseLevel);
|
||||
pageBytes = texture._gpuObject.getTexelFormat().getSize();
|
||||
pageBytes *= pageDimensions.x * pageDimensions.y * pageDimensions.z;
|
||||
// Testing with a simple texture allocating app shows an estimated 20% GPU memory overhead for
|
||||
// sparse textures as compared to non-sparse, so we acount for that here.
|
||||
pageBytes = (uint32_t)(pageBytes * SPARSE_PAGE_SIZE_OVERHEAD_ESTIMATE);
|
||||
|
||||
for (uint16_t mipLevel = 0; mipLevel <= maxSparseLevel; ++mipLevel) {
|
||||
auto mipDimensions = texture._gpuObject.evalMipDimensions(mipLevel);
|
||||
|
@ -146,6 +151,11 @@ uint32_t SparseInfo::getPageCount(const uvec3& dimensions) const {
|
|||
return pageCounts.x * pageCounts.y * pageCounts.z;
|
||||
}
|
||||
|
||||
|
||||
uint32_t SparseInfo::getSize() const {
|
||||
return allocatedPages * pageBytes;
|
||||
}
|
||||
|
||||
void GL45Backend::initTextureManagementStage() {
|
||||
// enable the Sparse Texture on gl45
|
||||
_textureManagement._sparseCapable = true;
|
||||
|
@ -160,93 +170,6 @@ void GL45Backend::initTextureManagementStage() {
|
|||
}
|
||||
}
|
||||
|
||||
#if INCREMENTAL_TRANSFER
|
||||
|
||||
using TransferState = GL45Backend::GL45Texture::TransferState;
|
||||
|
||||
TransferState::TransferState(GL45Texture& texture) : texture(texture) {
|
||||
}
|
||||
|
||||
void TransferState::updateMip() {
|
||||
mipDimensions = texture._gpuObject.evalMipDimensions(mipLevel);
|
||||
mipOffset = uvec3();
|
||||
if (!texture._gpuObject.isStoredMipFaceAvailable(mipLevel, face)) {
|
||||
srcPointer = nullptr;
|
||||
return;
|
||||
}
|
||||
|
||||
auto mip = texture._gpuObject.accessStoredMipFace(mipLevel, face);
|
||||
texelFormat = gl::GLTexelFormat::evalGLTexelFormat(texture._gpuObject.getTexelFormat(), mip->getFormat());
|
||||
srcPointer = mip->readData();
|
||||
bytesPerLine = (uint32_t)mip->getSize() / mipDimensions.y;
|
||||
bytesPerPixel = bytesPerLine / mipDimensions.x;
|
||||
}
|
||||
|
||||
bool TransferState::increment() {
|
||||
const SparseInfo& sparse = texture._sparseInfo;
|
||||
if ((mipOffset.x + sparse.pageDimensions.x) < mipDimensions.x) {
|
||||
mipOffset.x += sparse.pageDimensions.x;
|
||||
return true;
|
||||
}
|
||||
|
||||
if ((mipOffset.y + sparse.pageDimensions.y) < mipDimensions.y) {
|
||||
mipOffset.x = 0;
|
||||
mipOffset.y += sparse.pageDimensions.y;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (mipOffset.z + sparse.pageDimensions.z < mipDimensions.z) {
|
||||
mipOffset.x = 0;
|
||||
mipOffset.y = 0;
|
||||
++mipOffset.z;
|
||||
return true;
|
||||
}
|
||||
|
||||
// Done with this mip?, move on to the next mip
|
||||
if (mipLevel + 1 < texture.usedMipLevels()) {
|
||||
mipOffset = uvec3(0);
|
||||
++mipLevel;
|
||||
updateMip();
|
||||
return true;
|
||||
}
|
||||
|
||||
uint8_t maxFace = (uint8_t)((texture._target == GL_TEXTURE_CUBE_MAP) ? GLTexture::CUBE_NUM_FACES : 1);
|
||||
uint8_t nextFace = face + 1;
|
||||
// Done with this face? Move on to the next
|
||||
if (nextFace < maxFace) {
|
||||
++face;
|
||||
mipOffset = uvec3(0);
|
||||
mipLevel = 0;
|
||||
updateMip();
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void TransferState::populatePage(std::vector<uint8_t>& buffer) {
|
||||
uvec3 pageSize = currentPageSize();
|
||||
auto bytesPerPageLine = bytesPerPixel * pageSize.x;
|
||||
if (0 != (bytesPerPageLine % DEFAULT_GL_PIXEL_ALIGNMENT)) {
|
||||
bytesPerPageLine += DEFAULT_GL_PIXEL_ALIGNMENT - (bytesPerPageLine % DEFAULT_GL_PIXEL_ALIGNMENT);
|
||||
assert(0 == (bytesPerPageLine % DEFAULT_GL_PIXEL_ALIGNMENT));
|
||||
}
|
||||
auto totalPageSize = bytesPerPageLine * pageSize.y;
|
||||
if (totalPageSize > buffer.size()) {
|
||||
buffer.resize(totalPageSize);
|
||||
}
|
||||
uint8_t* dst = &buffer[0];
|
||||
for (uint32_t y = 0; y < pageSize.y; ++y) {
|
||||
uint32_t srcOffset = (bytesPerLine * (mipOffset.y + y)) + (bytesPerPixel * mipOffset.x);
|
||||
uint32_t dstOffset = bytesPerPageLine * y;
|
||||
memcpy(dst + dstOffset, srcPointer + srcOffset, pageSize.x * bytesPerPixel);
|
||||
}
|
||||
}
|
||||
|
||||
uvec3 TransferState::currentPageSize() const {
|
||||
return glm::clamp(mipDimensions - mipOffset, uvec3(1), texture._sparseInfo.pageDimensions);
|
||||
}
|
||||
#endif
|
||||
|
||||
GLuint GL45Texture::allocate(const Texture& texture) {
|
||||
GLuint result;
|
||||
|
@ -260,17 +183,11 @@ GLuint GL45Backend::getTextureID(const TexturePointer& texture, bool transfer) {
|
|||
|
||||
GL45Texture::GL45Texture(const std::weak_ptr<GLBackend>& backend, const Texture& texture, GLuint externalId)
|
||||
: GLTexture(backend, texture, externalId), _sparseInfo(*this)
|
||||
#if INCREMENTAL_TRANSFER
|
||||
, _transferState(*this)
|
||||
#endif
|
||||
{
|
||||
}
|
||||
|
||||
GL45Texture::GL45Texture(const std::weak_ptr<GLBackend>& backend, const Texture& texture, bool transferrable)
|
||||
: GLTexture(backend, texture, allocate(texture), transferrable), _sparseInfo(*this)
|
||||
#if INCREMENTAL_TRANSFER
|
||||
, _transferState(*this)
|
||||
#endif
|
||||
{
|
||||
|
||||
auto theBackend = _backend.lock();
|
||||
|
@ -316,12 +233,12 @@ GL45Texture::~GL45Texture() {
|
|||
});
|
||||
|
||||
auto deallocatedPages = _sparseInfo.getPageCount(mipDimensions) * maxFace;
|
||||
assert(deallocatedPages <= _allocatedPages);
|
||||
_allocatedPages -= deallocatedPages;
|
||||
assert(deallocatedPages <= _sparseInfo.allocatedPages);
|
||||
_sparseInfo.allocatedPages -= deallocatedPages;
|
||||
}
|
||||
|
||||
if (0 != _allocatedPages) {
|
||||
qCWarning(gpugl45logging) << "Allocated pages remaining " << _id << " " << _allocatedPages;
|
||||
if (0 != _sparseInfo.allocatedPages) {
|
||||
qCWarning(gpugl45logging) << "Allocated pages remaining " << _id << " " << _sparseInfo.allocatedPages;
|
||||
}
|
||||
|
||||
auto size = _size;
|
||||
|
@ -365,9 +282,9 @@ void GL45Texture::updateSize() const {
|
|||
}
|
||||
|
||||
if (_transferrable && _sparseInfo.sparse) {
|
||||
auto size = _allocatedPages * _sparseInfo.pageBytes;
|
||||
auto size = _sparseInfo.getSize();
|
||||
Backend::updateTextureGPUSparseMemoryUsage(_size, size);
|
||||
setSize(_allocatedPages * _sparseInfo.pageBytes);
|
||||
setSize(size);
|
||||
} else {
|
||||
setSize(_gpuObject.evalTotalSize(_mipOffset));
|
||||
}
|
||||
|
@ -376,20 +293,16 @@ void GL45Texture::updateSize() const {
|
|||
void GL45Texture::startTransfer() {
|
||||
Parent::startTransfer();
|
||||
_sparseInfo.update();
|
||||
#if INCREMENTAL_TRANSFER
|
||||
_transferState.updateMip();
|
||||
#endif
|
||||
}
|
||||
|
||||
bool GL45Texture::continueTransfer() {
|
||||
#if !INCREMENTAL_TRANSFER
|
||||
size_t maxFace = GL_TEXTURE_CUBE_MAP == _target ? CUBE_NUM_FACES : 1;
|
||||
for (uint8_t face = 0; face < maxFace; ++face) {
|
||||
for (uint16_t mipLevel = _minMip; mipLevel <= _maxMip; ++mipLevel) {
|
||||
auto size = _gpuObject.evalMipDimensions(mipLevel);
|
||||
if (_sparseInfo.sparse && mipLevel <= _sparseInfo.maxSparseLevel) {
|
||||
glTexturePageCommitmentEXT(_id, mipLevel, 0, 0, face, size.x, size.y, 1, GL_TRUE);
|
||||
_allocatedPages += _sparseInfo.getPageCount(size);
|
||||
_sparseInfo.allocatedPages += _sparseInfo.getPageCount(size);
|
||||
}
|
||||
if (_gpuObject.isStoredMipFaceAvailable(mipLevel, face)) {
|
||||
auto mip = _gpuObject.accessStoredMipFace(mipLevel, face);
|
||||
|
@ -413,58 +326,6 @@ bool GL45Texture::continueTransfer() {
|
|||
}
|
||||
}
|
||||
return false;
|
||||
#else
|
||||
static std::vector<uint8_t> buffer;
|
||||
if (buffer.empty()) {
|
||||
buffer.resize(DEFAULT_PAGE_BUFFER_SIZE);
|
||||
}
|
||||
const uvec3 pageSize = _transferState.currentPageSize();
|
||||
const uvec3& offset = _transferState.mipOffset;
|
||||
|
||||
if (_sparseInfo.sparse && _transferState.mipLevel <= _sparseInfo.maxSparseLevel) {
|
||||
if (_allocatedPages > _sparseInfo.maxPages) {
|
||||
qCWarning(gpugl45logging) << "Exceeded max page allocation!";
|
||||
}
|
||||
glTexturePageCommitmentEXT(_id, _transferState.mipLevel,
|
||||
offset.x, offset.y, _transferState.face,
|
||||
pageSize.x, pageSize.y, pageSize.z,
|
||||
GL_TRUE);
|
||||
++_allocatedPages;
|
||||
}
|
||||
|
||||
if (_transferState.srcPointer) {
|
||||
// Transfer the mip data
|
||||
_transferState.populatePage(buffer);
|
||||
if (GL_TEXTURE_2D == _target) {
|
||||
glTextureSubImage2D(_id, _transferState.mipLevel,
|
||||
offset.x, offset.y,
|
||||
pageSize.x, pageSize.y,
|
||||
_transferState.texelFormat.format, _transferState.texelFormat.type, &buffer[0]);
|
||||
} else if (GL_TEXTURE_CUBE_MAP == _target) {
|
||||
auto target = CUBE_FACE_LAYOUT[_transferState.face];
|
||||
// DSA ARB does not work on AMD, so use EXT
|
||||
// glTextureSubImage3D(_id, mipLevel, 0, 0, face, size.x, size.y, 1, texelFormat.format, texelFormat.type, mip->readData());
|
||||
glTextureSubImage2DEXT(_id, target, _transferState.mipLevel,
|
||||
offset.x, offset.y,
|
||||
pageSize.x, pageSize.y,
|
||||
_transferState.texelFormat.format, _transferState.texelFormat.type, &buffer[0]);
|
||||
}
|
||||
}
|
||||
|
||||
serverWait();
|
||||
auto currentMip = _transferState.mipLevel;
|
||||
auto result = _transferState.increment();
|
||||
if (_sparseInfo.sparse && _transferState.mipLevel != currentMip && currentMip <= _sparseInfo.maxSparseLevel) {
|
||||
auto mipDimensions = _gpuObject.evalMipDimensions(currentMip);
|
||||
auto mipExpectedPages = _sparseInfo.getPageCount(mipDimensions);
|
||||
auto newPages = _allocatedPages - _lastMipAllocatedPages;
|
||||
if (newPages != mipExpectedPages) {
|
||||
qCWarning(gpugl45logging) << "Unexpected page allocation size... " << newPages << " " << mipExpectedPages;
|
||||
}
|
||||
_lastMipAllocatedPages = _allocatedPages;
|
||||
}
|
||||
return result;
|
||||
#endif
|
||||
}
|
||||
|
||||
void GL45Texture::finishTransfer() {
|
||||
|
@ -545,8 +406,8 @@ void GL45Texture::stripToMip(uint16_t newMinMip) {
|
|||
});
|
||||
|
||||
auto deallocatedPages = _sparseInfo.getPageCount(mipDimensions) * maxFace;
|
||||
assert(deallocatedPages < _allocatedPages);
|
||||
_allocatedPages -= deallocatedPages;
|
||||
assert(deallocatedPages < _sparseInfo.allocatedPages);
|
||||
_sparseInfo.allocatedPages -= deallocatedPages;
|
||||
}
|
||||
_minMip = newMinMip;
|
||||
} else {
|
||||
|
|
|
@ -21,11 +21,10 @@
|
|||
#include <DependencyManager.h>
|
||||
|
||||
#include "AssetUtils.h"
|
||||
#include "ClientServerUtils.h"
|
||||
#include "LimitedNodeList.h"
|
||||
#include "NLPacket.h"
|
||||
#include "Node.h"
|
||||
#include "ReceivedMessage.h"
|
||||
#include "ResourceCache.h"
|
||||
|
||||
class GetMappingRequest;
|
||||
class SetMappingRequest;
|
||||
|
@ -60,8 +59,6 @@ public:
|
|||
Q_INVOKABLE AssetUpload* createUpload(const QString& filename);
|
||||
Q_INVOKABLE AssetUpload* createUpload(const QByteArray& data);
|
||||
|
||||
static const MessageID INVALID_MESSAGE_ID = 0;
|
||||
|
||||
public slots:
|
||||
void init();
|
||||
|
||||
|
|
|
@ -77,7 +77,7 @@ void AssetRequest::start() {
|
|||
_assetInfoRequestID = assetClient->getAssetInfo(_hash,
|
||||
[this](bool responseReceived, AssetServerError serverError, AssetInfo info) {
|
||||
|
||||
_assetInfoRequestID = AssetClient::INVALID_MESSAGE_ID;
|
||||
_assetInfoRequestID = INVALID_MESSAGE_ID;
|
||||
|
||||
_info = info;
|
||||
|
||||
|
@ -119,7 +119,7 @@ void AssetRequest::start() {
|
|||
// If the request is dead, return
|
||||
return;
|
||||
}
|
||||
_assetRequestID = AssetClient::INVALID_MESSAGE_ID;
|
||||
_assetRequestID = INVALID_MESSAGE_ID;
|
||||
|
||||
if (!responseReceived) {
|
||||
_error = NetworkError;
|
||||
|
|
|
@ -64,8 +64,8 @@ private:
|
|||
QString _hash;
|
||||
QByteArray _data;
|
||||
int _numPendingRequests { 0 };
|
||||
MessageID _assetRequestID { AssetClient::INVALID_MESSAGE_ID };
|
||||
MessageID _assetInfoRequestID { AssetClient::INVALID_MESSAGE_ID };
|
||||
MessageID _assetRequestID { INVALID_MESSAGE_ID };
|
||||
MessageID _assetInfoRequestID { INVALID_MESSAGE_ID };
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
#include <QtCore/QByteArray>
|
||||
#include <QtCore/QUrl>
|
||||
|
||||
using MessageID = uint32_t;
|
||||
using DataOffset = int64_t;
|
||||
|
||||
using AssetPath = QString;
|
||||
|
@ -64,4 +63,4 @@ bool isValidFilePath(const AssetPath& path);
|
|||
bool isValidPath(const AssetPath& path);
|
||||
bool isValidHash(const QString& hashString);
|
||||
|
||||
#endif
|
||||
#endif // hifi_AssetUtils_h
|
||||
|
|
|
@ -35,6 +35,8 @@ Assignment::Type Assignment::typeForNodeType(NodeType_t nodeType) {
|
|||
return Assignment::AssetServerType;
|
||||
case NodeType::MessagesMixer:
|
||||
return Assignment::MessagesMixerType;
|
||||
case NodeType::EntityScriptServer:
|
||||
return Assignment::EntityScriptServerType;
|
||||
default:
|
||||
return Assignment::AllTypes;
|
||||
}
|
||||
|
@ -139,6 +141,8 @@ const char* Assignment::getTypeName() const {
|
|||
return "entity-server";
|
||||
case Assignment::MessagesMixerType:
|
||||
return "messages-mixer";
|
||||
case Assignment::EntityScriptServerType:
|
||||
return "entity-script-server";
|
||||
default:
|
||||
return "unknown";
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ public:
|
|||
AgentType = 2,
|
||||
AssetServerType = 3,
|
||||
MessagesMixerType = 4,
|
||||
UNUSED_1 = 5,
|
||||
EntityScriptServerType = 5,
|
||||
EntityServerType = 6,
|
||||
AllTypes = 7
|
||||
};
|
||||
|
|
21
libraries/networking/src/ClientServerUtils.h
Normal file
21
libraries/networking/src/ClientServerUtils.h
Normal file
|
@ -0,0 +1,21 @@
|
|||
|
||||
//
|
||||
// ClientServerUtils.h
|
||||
// libraries/networking/src
|
||||
//
|
||||
// Created by Ryan Huffman on 2017/01/20
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_ClientServerUtils_h
|
||||
#define hifi_ClientServerUtils_h
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
using MessageID = uint32_t;
|
||||
const MessageID INVALID_MESSAGE_ID = 0;
|
||||
|
||||
#endif // hifi_ClientServerUtils_h
|
165
libraries/networking/src/EntityScriptClient.cpp
Normal file
165
libraries/networking/src/EntityScriptClient.cpp
Normal file
|
@ -0,0 +1,165 @@
|
|||
#include "EntityScriptClient.h"
|
||||
#include "NodeList.h"
|
||||
#include "NetworkLogging.h"
|
||||
#include "EntityScriptUtils.h"
|
||||
|
||||
#include <QThread>
|
||||
|
||||
MessageID EntityScriptClient::_currentID = 0;
|
||||
|
||||
GetScriptStatusRequest::GetScriptStatusRequest(QUuid entityID) : _entityID(entityID) {
|
||||
}
|
||||
|
||||
GetScriptStatusRequest::~GetScriptStatusRequest() {
|
||||
|
||||
}
|
||||
|
||||
void GetScriptStatusRequest::start() {
|
||||
auto client = DependencyManager::get<EntityScriptClient>();
|
||||
client->getEntityServerScriptStatus(_entityID, [this](bool responseReceived, bool isRunning, EntityScriptStatus status, QString errorInfo) {
|
||||
_responseReceived = responseReceived;
|
||||
_isRunning = isRunning;
|
||||
_status = status;
|
||||
_errorInfo = errorInfo;
|
||||
|
||||
emit finished(this);
|
||||
});
|
||||
}
|
||||
|
||||
EntityScriptClient::EntityScriptClient() {
|
||||
setCustomDeleter([](Dependency* dependency){
|
||||
static_cast<EntityScriptClient*>(dependency)->deleteLater();
|
||||
});
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
auto& packetReceiver = nodeList->getPacketReceiver();
|
||||
|
||||
packetReceiver.registerListener(PacketType::EntityScriptGetStatusReply, this, "handleGetScriptStatusReply");
|
||||
|
||||
connect(nodeList.data(), &LimitedNodeList::nodeKilled, this, &EntityScriptClient::handleNodeKilled);
|
||||
connect(nodeList.data(), &LimitedNodeList::clientConnectionToNodeReset,
|
||||
this, &EntityScriptClient::handleNodeClientConnectionReset);
|
||||
}
|
||||
|
||||
GetScriptStatusRequest* EntityScriptClient::createScriptStatusRequest(QUuid entityID) {
|
||||
auto request = new GetScriptStatusRequest(entityID);
|
||||
|
||||
request->moveToThread(thread());
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
bool EntityScriptClient::reloadServerScript(QUuid entityID) {
|
||||
// Send packet to entity script server
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
SharedNodePointer entityScriptServer = nodeList->soloNodeOfType(NodeType::EntityScriptServer);
|
||||
|
||||
if (entityScriptServer) {
|
||||
auto id = entityID.toRfc4122();
|
||||
auto payloadSize = id.size();
|
||||
auto packet = NLPacket::create(PacketType::ReloadEntityServerScript, payloadSize, true);
|
||||
|
||||
packet->write(id);
|
||||
|
||||
if (nodeList->sendPacket(std::move(packet), *entityScriptServer) != -1) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
MessageID EntityScriptClient::getEntityServerScriptStatus(QUuid entityID, GetScriptStatusCallback callback) {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
SharedNodePointer entityScriptServer = nodeList->soloNodeOfType(NodeType::EntityScriptServer);
|
||||
|
||||
if (entityScriptServer) {
|
||||
auto packetList = NLPacketList::create(PacketType::EntityScriptGetStatus, QByteArray(), true, true);
|
||||
|
||||
auto messageID = ++_currentID;
|
||||
packetList->writePrimitive(messageID);
|
||||
|
||||
packetList->write(entityID.toRfc4122());
|
||||
|
||||
if (nodeList->sendPacketList(std::move(packetList), *entityScriptServer) != -1) {
|
||||
_pendingEntityScriptStatusRequests[entityScriptServer][messageID] = callback;
|
||||
|
||||
return messageID;
|
||||
}
|
||||
}
|
||||
|
||||
callback(false, false, ERROR_LOADING_SCRIPT, "");
|
||||
return INVALID_MESSAGE_ID;
|
||||
}
|
||||
|
||||
void EntityScriptClient::handleGetScriptStatusReply(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
|
||||
Q_ASSERT(QThread::currentThread() == thread());
|
||||
|
||||
MessageID messageID;
|
||||
bool isKnown { false };
|
||||
EntityScriptStatus status = ERROR_LOADING_SCRIPT;
|
||||
QString errorInfo { "" };
|
||||
|
||||
message->readPrimitive(&messageID);
|
||||
message->readPrimitive(&isKnown);
|
||||
|
||||
if (isKnown) {
|
||||
message->readPrimitive(&status);
|
||||
errorInfo = message->readString();
|
||||
}
|
||||
|
||||
// Check if we have any pending requests for this node
|
||||
auto messageMapIt = _pendingEntityScriptStatusRequests.find(senderNode);
|
||||
if (messageMapIt != _pendingEntityScriptStatusRequests.end()) {
|
||||
|
||||
// Found the node, get the MessageID -> Callback map
|
||||
auto& messageCallbackMap = messageMapIt->second;
|
||||
|
||||
// Check if we have this pending request
|
||||
auto requestIt = messageCallbackMap.find(messageID);
|
||||
if (requestIt != messageCallbackMap.end()) {
|
||||
auto callback = requestIt->second;
|
||||
callback(true, isKnown, status, errorInfo);
|
||||
messageCallbackMap.erase(requestIt);
|
||||
}
|
||||
|
||||
// Although the messageCallbackMap may now be empty, we won't delete the node until we have disconnected from
|
||||
// it to avoid constantly creating/deleting the map on subsequent requests.
|
||||
}
|
||||
}
|
||||
|
||||
void EntityScriptClient::handleNodeKilled(SharedNodePointer node) {
|
||||
Q_ASSERT(QThread::currentThread() == thread());
|
||||
|
||||
if (node->getType() != NodeType::EntityScriptServer) {
|
||||
return;
|
||||
}
|
||||
|
||||
forceFailureOfPendingRequests(node);
|
||||
}
|
||||
|
||||
void EntityScriptClient::handleNodeClientConnectionReset(SharedNodePointer node) {
|
||||
// a client connection to a Node was reset
|
||||
// if it was an EntityScriptServer we need to cause anything pending to fail so it is re-attempted
|
||||
|
||||
if (node->getType() != NodeType::EntityScriptServer) {
|
||||
return;
|
||||
}
|
||||
|
||||
//qCDebug(entity_script_client) << "EntityScriptClient detected client connection reset handshake with Asset Server - failing any pending requests";
|
||||
|
||||
forceFailureOfPendingRequests(node);
|
||||
}
|
||||
|
||||
void EntityScriptClient::forceFailureOfPendingRequests(SharedNodePointer node) {
|
||||
|
||||
{
|
||||
auto messageMapIt = _pendingEntityScriptStatusRequests.find(node);
|
||||
if (messageMapIt != _pendingEntityScriptStatusRequests.end()) {
|
||||
for (const auto& value : messageMapIt->second) {
|
||||
value.second(false, false, ERROR_LOADING_SCRIPT, "");
|
||||
}
|
||||
messageMapIt->second.clear();
|
||||
}
|
||||
}
|
||||
}
|
75
libraries/networking/src/EntityScriptClient.h
Normal file
75
libraries/networking/src/EntityScriptClient.h
Normal file
|
@ -0,0 +1,75 @@
|
|||
//
|
||||
// EntityScriptClient.h
|
||||
// libraries/networking/src
|
||||
//
|
||||
// Created by Ryan Huffman on 2017/01/13
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_EntityScriptClient_h
|
||||
#define hifi_EntityScriptClient_h
|
||||
|
||||
#include "ClientServerUtils.h"
|
||||
#include "LimitedNodeList.h"
|
||||
#include "ReceivedMessage.h"
|
||||
#include "AssetUtils.h"
|
||||
#include "EntityScriptUtils.h"
|
||||
|
||||
#include <DependencyManager.h>
|
||||
#include <unordered_map>
|
||||
|
||||
using GetScriptStatusCallback = std::function<void(bool responseReceived, bool isRunning, EntityScriptStatus status, QString errorInfo)>;
|
||||
|
||||
class GetScriptStatusRequest : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
GetScriptStatusRequest(QUuid);
|
||||
~GetScriptStatusRequest();
|
||||
|
||||
Q_INVOKABLE void start();
|
||||
|
||||
bool getResponseReceived() const { return _responseReceived; }
|
||||
bool getIsRunning() const { return _isRunning; }
|
||||
EntityScriptStatus getStatus() const { return _status; }
|
||||
QString getErrorInfo() const { return _errorInfo; }
|
||||
|
||||
signals:
|
||||
void finished(GetScriptStatusRequest* request);
|
||||
|
||||
private:
|
||||
QUuid _entityID;
|
||||
MessageID _messageID;
|
||||
|
||||
bool _responseReceived;
|
||||
bool _isRunning;
|
||||
EntityScriptStatus _status;
|
||||
QString _errorInfo;
|
||||
};
|
||||
|
||||
class EntityScriptClient : public QObject, public Dependency {
|
||||
Q_OBJECT
|
||||
public:
|
||||
EntityScriptClient();
|
||||
|
||||
Q_INVOKABLE GetScriptStatusRequest* createScriptStatusRequest(QUuid entityID);
|
||||
|
||||
bool reloadServerScript(QUuid entityID);
|
||||
MessageID getEntityServerScriptStatus(QUuid entityID, GetScriptStatusCallback callback);
|
||||
|
||||
private slots:
|
||||
void handleNodeKilled(SharedNodePointer node);
|
||||
void handleNodeClientConnectionReset(SharedNodePointer node);
|
||||
|
||||
void handleGetScriptStatusReply(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
||||
|
||||
private:
|
||||
static MessageID _currentID;
|
||||
std::unordered_map<SharedNodePointer, std::unordered_map<MessageID, GetScriptStatusCallback>> _pendingEntityScriptStatusRequests;
|
||||
|
||||
void forceFailureOfPendingRequests(SharedNodePointer node);
|
||||
};
|
||||
|
||||
#endif
|
21
libraries/networking/src/EntityScriptUtils.h
Normal file
21
libraries/networking/src/EntityScriptUtils.h
Normal file
|
@ -0,0 +1,21 @@
|
|||
//
|
||||
// EntityScriptUtils.h
|
||||
// libraries/networking/src
|
||||
//
|
||||
// Created by Ryan Huffman on 2017/01/13
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_EntityScriptUtils_h
|
||||
#define hifi_EntityScriptUtils_h
|
||||
|
||||
enum EntityScriptStatus {
|
||||
ERROR_LOADING_SCRIPT,
|
||||
ERROR_RUNNING_SCRIPT,
|
||||
RUNNING
|
||||
};
|
||||
|
||||
#endif // hifi_EntityScriptUtils_h
|
|
@ -68,7 +68,7 @@ void GetMappingRequest::doStart() {
|
|||
_mappingRequestID = assetClient->getAssetMapping(_path,
|
||||
[this, assetClient](bool responseReceived, AssetServerError error, QSharedPointer<ReceivedMessage> message) {
|
||||
|
||||
_mappingRequestID = AssetClient::INVALID_MESSAGE_ID;
|
||||
_mappingRequestID = INVALID_MESSAGE_ID;
|
||||
if (!responseReceived) {
|
||||
_error = NetworkError;
|
||||
} else {
|
||||
|
@ -100,7 +100,7 @@ void GetAllMappingsRequest::doStart() {
|
|||
_mappingRequestID = assetClient->getAllAssetMappings(
|
||||
[this, assetClient](bool responseReceived, AssetServerError error, QSharedPointer<ReceivedMessage> message) {
|
||||
|
||||
_mappingRequestID = AssetClient::INVALID_MESSAGE_ID;
|
||||
_mappingRequestID = INVALID_MESSAGE_ID;
|
||||
|
||||
if (!responseReceived) {
|
||||
_error = NetworkError;
|
||||
|
@ -152,7 +152,7 @@ void SetMappingRequest::doStart() {
|
|||
_mappingRequestID = assetClient->setAssetMapping(_path, _hash,
|
||||
[this, assetClient](bool responseReceived, AssetServerError error, QSharedPointer<ReceivedMessage> message) {
|
||||
|
||||
_mappingRequestID = AssetClient::INVALID_MESSAGE_ID;
|
||||
_mappingRequestID = INVALID_MESSAGE_ID;
|
||||
if (!responseReceived) {
|
||||
_error = NetworkError;
|
||||
} else {
|
||||
|
@ -195,7 +195,7 @@ void DeleteMappingsRequest::doStart() {
|
|||
_mappingRequestID = assetClient->deleteAssetMappings(_paths,
|
||||
[this, assetClient](bool responseReceived, AssetServerError error, QSharedPointer<ReceivedMessage> message) {
|
||||
|
||||
_mappingRequestID = AssetClient::INVALID_MESSAGE_ID;
|
||||
_mappingRequestID = INVALID_MESSAGE_ID;
|
||||
if (!responseReceived) {
|
||||
_error = NetworkError;
|
||||
} else {
|
||||
|
@ -237,7 +237,7 @@ void RenameMappingRequest::doStart() {
|
|||
_mappingRequestID = assetClient->renameAssetMapping(_oldPath, _newPath,
|
||||
[this, assetClient](bool responseReceived, AssetServerError error, QSharedPointer<ReceivedMessage> message) {
|
||||
|
||||
_mappingRequestID = AssetClient::INVALID_MESSAGE_ID;
|
||||
_mappingRequestID = INVALID_MESSAGE_ID;
|
||||
if (!responseReceived) {
|
||||
_error = NetworkError;
|
||||
} else {
|
||||
|
|
|
@ -40,7 +40,7 @@ public:
|
|||
|
||||
protected:
|
||||
Error _error { NoError };
|
||||
MessageID _mappingRequestID { AssetClient::INVALID_MESSAGE_ID };
|
||||
MessageID _mappingRequestID { INVALID_MESSAGE_ID };
|
||||
|
||||
private:
|
||||
virtual void doStart() = 0;
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
Q_DECLARE_LOGGING_CATEGORY(resourceLog)
|
||||
Q_DECLARE_LOGGING_CATEGORY(networking)
|
||||
Q_DECLARE_LOGGING_CATEGORY(asset_client)
|
||||
Q_DECLARE_LOGGING_CATEGORY(entity_script_client)
|
||||
Q_DECLARE_LOGGING_CATEGORY(messages_client)
|
||||
|
||||
#endif // hifi_NetworkLogging_h
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue