mirror of
https://github.com/overte-org/overte.git
synced 2025-04-21 08:04:01 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into pal-performance
This commit is contained in:
commit
23fc9b6fb3
20 changed files with 306 additions and 98 deletions
|
@ -316,6 +316,10 @@ void AudioMixer::sendStatsPacket() {
|
|||
addTiming(_mixTiming, "mix");
|
||||
addTiming(_eventsTiming, "events");
|
||||
|
||||
#ifdef HIFI_AUDIO_THROTTLE_DEBUG
|
||||
timingStats["ns_per_throttle"] = (_stats.totalMixes > 0) ? (float)(_stats.throttleTime / _stats.totalMixes) : 0;
|
||||
#endif
|
||||
|
||||
// call it "avg_..." to keep it higher in the display, sorted alphabetically
|
||||
statsObject["avg_timing_stats"] = timingStats;
|
||||
|
||||
|
|
|
@ -46,10 +46,12 @@ void sendMutePacket(const SharedNodePointer& node, AudioMixerClientData&);
|
|||
void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData& data);
|
||||
|
||||
// mix helpers
|
||||
bool shouldIgnoreNode(const SharedNodePointer& listener, const SharedNodePointer& node);
|
||||
float gainForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
inline bool shouldIgnoreNode(const SharedNodePointer& listener, const SharedNodePointer& node);
|
||||
inline float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition);
|
||||
inline float computeGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition, bool isEcho);
|
||||
float azimuthForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
inline float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition);
|
||||
|
||||
void AudioMixerSlave::configure(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
|
||||
|
@ -126,9 +128,10 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
|
|||
AudioMixerClientData&, const QUuid&, const AvatarAudioStream&, const PositionalAudioStream&);
|
||||
auto allStreams = [&](const SharedNodePointer& node, MixFunctor mixFunctor) {
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
auto nodeID = node->getUUID();
|
||||
for (auto& streamPair : nodeData->getAudioStreams()) {
|
||||
auto nodeStream = streamPair.second;
|
||||
(this->*mixFunctor)(*listenerData, node->getUUID(), *listenerAudioStream, *nodeStream);
|
||||
(this->*mixFunctor)(*listenerData, nodeID, *listenerAudioStream, *nodeStream);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -147,14 +150,28 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
|
|||
if (!isThrottling) {
|
||||
allStreams(node, &AudioMixerSlave::mixStream);
|
||||
} else {
|
||||
#ifdef HIFI_AUDIO_THROTTLE_DEBUG
|
||||
auto throttleStart = p_high_resolution_clock::now();
|
||||
#endif
|
||||
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
auto nodeID = node->getUUID();
|
||||
|
||||
// compute the node's max relative volume
|
||||
float nodeVolume;
|
||||
for (auto& streamPair : nodeData->getAudioStreams()) {
|
||||
auto nodeStream = streamPair.second;
|
||||
float distance = glm::length(nodeStream->getPosition() - listenerAudioStream->getPosition());
|
||||
nodeVolume = std::max(nodeStream->getLastPopOutputTrailingLoudness() / distance, nodeVolume);
|
||||
|
||||
// approximate the gain
|
||||
glm::vec3 relativePosition = nodeStream->getPosition() - listenerAudioStream->getPosition();
|
||||
float gain = approximateGain(*listenerAudioStream, *nodeStream, relativePosition);
|
||||
|
||||
// modify by hrtf gain adjustment
|
||||
auto& hrtf = listenerData->hrtfForStream(nodeID, nodeStream->getStreamIdentifier());
|
||||
gain *= hrtf.getGainAdjustment();
|
||||
|
||||
auto streamVolume = nodeStream->getLastPopOutputTrailingLoudness() * gain;
|
||||
nodeVolume = std::max(streamVolume, nodeVolume);
|
||||
}
|
||||
|
||||
// max-heapify the nodes by relative volume
|
||||
|
@ -162,6 +179,13 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
|
|||
if (!throttledNodes.empty()) {
|
||||
std::push_heap(throttledNodes.begin(), throttledNodes.end());
|
||||
}
|
||||
|
||||
#ifdef HIFI_AUDIO_THROTTLE_DEBUG
|
||||
auto throttleEnd = p_high_resolution_clock::now();
|
||||
uint64_t throttleTime =
|
||||
std::chrono::duration_cast<std::chrono::nanoseconds>(throttleEnd - throttleStart).count();
|
||||
stats.throttleTime += throttleTime;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -227,9 +251,9 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
|
|||
glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition();
|
||||
|
||||
float distance = glm::max(glm::length(relativePosition), EPSILON);
|
||||
float gain = gainForSource(listeningNodeStream, streamToAdd, relativePosition, isEcho);
|
||||
float azimuth = isEcho ? 0.0f : azimuthForSource(listeningNodeStream, listeningNodeStream, relativePosition);
|
||||
static const int HRTF_DATASET_INDEX = 1;
|
||||
float gain = computeGain(listeningNodeStream, streamToAdd, relativePosition, isEcho);
|
||||
float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
|
||||
const int HRTF_DATASET_INDEX = 1;
|
||||
|
||||
if (!streamToAdd.lastPopSucceeded()) {
|
||||
bool forceSilentBlock = true;
|
||||
|
@ -330,7 +354,7 @@ std::unique_ptr<NLPacket> createAudioPacket(PacketType type, int size, quint16 s
|
|||
}
|
||||
|
||||
void sendMixPacket(const SharedNodePointer& node, AudioMixerClientData& data, QByteArray& buffer) {
|
||||
static const int MIX_PACKET_SIZE =
|
||||
const int MIX_PACKET_SIZE =
|
||||
sizeof(quint16) + AudioConstants::MAX_CODEC_NAME_LENGTH_ON_WIRE + AudioConstants::NETWORK_FRAME_BYTES_STEREO;
|
||||
quint16 sequence = data.getOutgoingSequenceNumber();
|
||||
QString codec = data.getCodecName();
|
||||
|
@ -345,7 +369,7 @@ void sendMixPacket(const SharedNodePointer& node, AudioMixerClientData& data, QB
|
|||
}
|
||||
|
||||
void sendSilentPacket(const SharedNodePointer& node, AudioMixerClientData& data) {
|
||||
static const int SILENT_PACKET_SIZE =
|
||||
const int SILENT_PACKET_SIZE =
|
||||
sizeof(quint16) + AudioConstants::MAX_CODEC_NAME_LENGTH_ON_WIRE + sizeof(quint16);
|
||||
quint16 sequence = data.getOutgoingSequenceNumber();
|
||||
QString codec = data.getCodecName();
|
||||
|
@ -475,40 +499,54 @@ bool shouldIgnoreNode(const SharedNodePointer& listener, const SharedNodePointer
|
|||
return ignore;
|
||||
}
|
||||
|
||||
float gainForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition, bool isEcho) {
|
||||
static const float ATTENUATION_START_DISTANCE = 1.0f;
|
||||
|
||||
float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition) {
|
||||
float gain = 1.0f;
|
||||
|
||||
float distanceBetween = glm::length(relativePosition);
|
||||
|
||||
if (distanceBetween < EPSILON) {
|
||||
distanceBetween = EPSILON;
|
||||
}
|
||||
|
||||
// injector: apply attenuation
|
||||
if (streamToAdd.getType() == PositionalAudioStream::Injector) {
|
||||
gain *= reinterpret_cast<const InjectedAudioStream*>(&streamToAdd)->getAttenuationRatio();
|
||||
}
|
||||
|
||||
if (!isEcho && (streamToAdd.getType() == PositionalAudioStream::Microphone)) {
|
||||
// source is another avatar, apply fixed off-axis attenuation to make them quieter as they turn away from listener
|
||||
glm::vec3 rotatedListenerPosition = glm::inverse(streamToAdd.getOrientation()) * relativePosition;
|
||||
// avatar: skip attenuation - it is too costly to approximate
|
||||
|
||||
// distance attenuation: approximate, ignore zone-specific attenuations
|
||||
// this is a good approximation for streams further than ATTENUATION_START_DISTANCE
|
||||
// those streams closer will be amplified; amplifying close streams is acceptable
|
||||
// when throttling, as close streams are expected to be heard by a user
|
||||
float distance = glm::length(relativePosition);
|
||||
return gain / distance;
|
||||
}
|
||||
|
||||
float computeGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition, bool isEcho) {
|
||||
float gain = 1.0f;
|
||||
|
||||
// injector: apply attenuation
|
||||
if (streamToAdd.getType() == PositionalAudioStream::Injector) {
|
||||
gain *= reinterpret_cast<const InjectedAudioStream*>(&streamToAdd)->getAttenuationRatio();
|
||||
|
||||
// avatar: apply fixed off-axis attenuation to make them quieter as they turn away
|
||||
} else if (!isEcho && (streamToAdd.getType() == PositionalAudioStream::Microphone)) {
|
||||
glm::vec3 rotatedListenerPosition = glm::inverse(streamToAdd.getOrientation()) * relativePosition;
|
||||
float angleOfDelivery = glm::angle(glm::vec3(0.0f, 0.0f, -1.0f),
|
||||
glm::normalize(rotatedListenerPosition));
|
||||
|
||||
const float MAX_OFF_AXIS_ATTENUATION = 0.2f;
|
||||
const float OFF_AXIS_ATTENUATION_FORMULA_STEP = (1 - MAX_OFF_AXIS_ATTENUATION) / 2.0f;
|
||||
|
||||
const float OFF_AXIS_ATTENUATION_STEP = (1 - MAX_OFF_AXIS_ATTENUATION) / 2.0f;
|
||||
float offAxisCoefficient = MAX_OFF_AXIS_ATTENUATION +
|
||||
(OFF_AXIS_ATTENUATION_FORMULA_STEP * (angleOfDelivery / PI_OVER_TWO));
|
||||
(angleOfDelivery * (OFF_AXIS_ATTENUATION_STEP / PI_OVER_TWO));
|
||||
|
||||
// multiply the current attenuation coefficient by the calculated off axis coefficient
|
||||
gain *= offAxisCoefficient;
|
||||
}
|
||||
|
||||
float attenuationPerDoublingInDistance = AudioMixer::getAttenuationPerDoublingInDistance();
|
||||
auto& zoneSettings = AudioMixer::getZoneSettings();
|
||||
auto& audioZones = AudioMixer::getAudioZones();
|
||||
auto& zoneSettings = AudioMixer::getZoneSettings();
|
||||
|
||||
// find distance attenuation coefficient
|
||||
float attenuationPerDoublingInDistance = AudioMixer::getAttenuationPerDoublingInDistance();
|
||||
for (int i = 0; i < zoneSettings.length(); ++i) {
|
||||
if (audioZones[zoneSettings[i].source].contains(streamToAdd.getPosition()) &&
|
||||
audioZones[zoneSettings[i].listener].contains(listeningNodeStream.getPosition())) {
|
||||
|
@ -517,16 +555,17 @@ float gainForSource(const AvatarAudioStream& listeningNodeStream, const Position
|
|||
}
|
||||
}
|
||||
|
||||
const float ATTENUATION_BEGINS_AT_DISTANCE = 1.0f;
|
||||
if (distanceBetween >= ATTENUATION_BEGINS_AT_DISTANCE) {
|
||||
// distance attenuation
|
||||
float distance = glm::length(relativePosition);
|
||||
assert(ATTENUATION_START_DISTANCE > EPSILON);
|
||||
if (distance >= ATTENUATION_START_DISTANCE) {
|
||||
|
||||
// translate the zone setting to gain per log2(distance)
|
||||
float g = 1.0f - attenuationPerDoublingInDistance;
|
||||
g = (g < EPSILON) ? EPSILON : g;
|
||||
g = (g > 1.0f) ? 1.0f : g;
|
||||
g = glm::clamp(g, EPSILON, 1.0f);
|
||||
|
||||
// calculate the distance coefficient using the distance to this node
|
||||
float distanceCoefficient = fastExp2f(fastLog2f(g) * fastLog2f(distanceBetween/ATTENUATION_BEGINS_AT_DISTANCE));
|
||||
float distanceCoefficient = fastExp2f(fastLog2f(g) * fastLog2f(distance/ATTENUATION_START_DISTANCE));
|
||||
|
||||
// multiply the current attenuation coefficient by the distance coefficient
|
||||
gain *= distanceCoefficient;
|
||||
|
@ -535,7 +574,7 @@ float gainForSource(const AvatarAudioStream& listeningNodeStream, const Position
|
|||
return gain;
|
||||
}
|
||||
|
||||
float azimuthForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition) {
|
||||
glm::quat inverseOrientation = glm::inverse(listeningNodeStream.getOrientation());
|
||||
|
||||
|
|
|
@ -20,6 +20,9 @@ void AudioMixerStats::reset() {
|
|||
hrtfThrottleRenders = 0;
|
||||
manualStereoMixes = 0;
|
||||
manualEchoMixes = 0;
|
||||
#ifdef HIFI_AUDIO_THROTTLE_DEBUG
|
||||
throttleTime = 0;
|
||||
#endif
|
||||
}
|
||||
|
||||
void AudioMixerStats::accumulate(const AudioMixerStats& otherStats) {
|
||||
|
@ -31,4 +34,7 @@ void AudioMixerStats::accumulate(const AudioMixerStats& otherStats) {
|
|||
hrtfThrottleRenders += otherStats.hrtfThrottleRenders;
|
||||
manualStereoMixes += otherStats.manualStereoMixes;
|
||||
manualEchoMixes += otherStats.manualEchoMixes;
|
||||
#ifdef HIFI_AUDIO_THROTTLE_DEBUG
|
||||
throttleTime += otherStats.throttleTime;
|
||||
#endif
|
||||
}
|
||||
|
|
|
@ -12,6 +12,10 @@
|
|||
#ifndef hifi_AudioMixerStats_h
|
||||
#define hifi_AudioMixerStats_h
|
||||
|
||||
#ifdef HIFI_AUDIO_THROTTLE_DEBUG
|
||||
#include <cstdint>
|
||||
#endif
|
||||
|
||||
struct AudioMixerStats {
|
||||
int sumStreams { 0 };
|
||||
int sumListeners { 0 };
|
||||
|
@ -25,6 +29,10 @@ struct AudioMixerStats {
|
|||
int manualStereoMixes { 0 };
|
||||
int manualEchoMixes { 0 };
|
||||
|
||||
#ifdef HIFI_AUDIO_THROTTLE_DEBUG
|
||||
uint64_t throttleTime { 0 };
|
||||
#endif
|
||||
|
||||
void reset();
|
||||
void accumulate(const AudioMixerStats& otherStats);
|
||||
};
|
||||
|
|
|
@ -1306,7 +1306,9 @@ function badgeSidebarForDifferences(changedElement) {
|
|||
var isGrouped = $('#' + panelParentID).hasClass('grouped');
|
||||
|
||||
if (isGrouped) {
|
||||
var initialPanelJSON = Settings.initialValues[panelParentID];
|
||||
var initialPanelJSON = Settings.initialValues[panelParentID]
|
||||
? Settings.initialValues[panelParentID]
|
||||
: {};
|
||||
|
||||
// get a JSON representation of that section
|
||||
var panelJSON = form2js(panelParentID, ".", false, cleanupFormValues, true)[panelParentID];
|
||||
|
@ -1417,7 +1419,7 @@ function addTableRow(row) {
|
|||
|
||||
input_clone.children('td').each(function () {
|
||||
if ($(this).attr("name") !== keepField) {
|
||||
$(this).find("input").val($(this).attr('data-default'));
|
||||
$(this).find("input").val($(this).children('input').attr('data-default'));
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -1595,7 +1597,11 @@ function updateDataChangedForSiblingRows(row, forceTrue) {
|
|||
|
||||
// get a JSON representation of that section
|
||||
var panelSettingJSON = form2js(panelParentID, ".", false, cleanupFormValues, true)[panelParentID][tableShortName]
|
||||
var initialPanelSettingJSON = Settings.initialValues[panelParentID][tableShortName]
|
||||
if (Settings.initialValues[panelParentID]) {
|
||||
var initialPanelSettingJSON = Settings.initialValues[panelParentID][tableShortName]
|
||||
} else {
|
||||
var initialPanelSettingJSON = {};
|
||||
}
|
||||
|
||||
// if they are equal, we don't need data-changed
|
||||
isTrue = !_.isEqual(panelSettingJSON, initialPanelSettingJSON)
|
||||
|
|
|
@ -87,7 +87,11 @@ Item {
|
|||
if (topMenu.objectName === "") {
|
||||
breadcrumbText.text = "Menu";
|
||||
} else {
|
||||
breadcrumbText.text = topMenu.objectName;
|
||||
if (menuStack.length === 1) {
|
||||
breadcrumbText.text = "Menu";
|
||||
} else {
|
||||
breadcrumbText.text = topMenu.objectName;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
breadcrumbText.text = "Menu";
|
||||
|
|
|
@ -74,6 +74,11 @@ int main(int argc, const char* argv[]) {
|
|||
instanceMightBeRunning = !sharedMemory.create(1, QSharedMemory::ReadOnly);
|
||||
#endif
|
||||
|
||||
// allow multiple interfaces to run if this environment variable is set.
|
||||
if (QProcessEnvironment::systemEnvironment().contains("HIFI_ALLOW_MULTIPLE_INSTANCES")) {
|
||||
instanceMightBeRunning = false;
|
||||
}
|
||||
|
||||
if (instanceMightBeRunning) {
|
||||
// Try to connect and send message to existing interface instance
|
||||
QLocalSocket socket;
|
||||
|
|
|
@ -48,6 +48,7 @@ public:
|
|||
// HRTF local gain adjustment in amplitude (1.0 == unity)
|
||||
//
|
||||
void setGainAdjustment(float gain) { _gainAdjust = HRTF_GAIN * gain; };
|
||||
float getGainAdjustment() { return _gainAdjust; }
|
||||
|
||||
private:
|
||||
AudioHRTF(const AudioHRTF&) = delete;
|
||||
|
|
|
@ -1465,6 +1465,34 @@ FBXGeometry* FBXReader::extractFBXGeometry(const QVariantHash& mapping, const QS
|
|||
|
||||
// Create the Material Library
|
||||
consolidateFBXMaterials(mapping);
|
||||
|
||||
// HACK: until we get proper LOD management we're going to cap model textures
|
||||
// according to how many unique textures the model uses:
|
||||
// 1 - 8 textures --> 2048
|
||||
// 8 - 32 textures --> 1024
|
||||
// 33 - 128 textures --> 512
|
||||
// etc...
|
||||
QSet<QString> uniqueTextures;
|
||||
for (auto& material : _fbxMaterials) {
|
||||
material.getTextureNames(uniqueTextures);
|
||||
}
|
||||
int numTextures = uniqueTextures.size();
|
||||
const int MAX_NUM_TEXTURES_AT_MAX_RESOLUTION = 8;
|
||||
int maxWidth = sqrt(MAX_NUM_PIXELS_FOR_FBX_TEXTURE);
|
||||
if (numTextures > MAX_NUM_TEXTURES_AT_MAX_RESOLUTION) {
|
||||
int numTextureThreshold = MAX_NUM_TEXTURES_AT_MAX_RESOLUTION;
|
||||
const int MIN_MIP_TEXTURE_WIDTH = 64;
|
||||
do {
|
||||
maxWidth /= 2;
|
||||
numTextureThreshold *= 4;
|
||||
} while (numTextureThreshold < numTextures && maxWidth > MIN_MIP_TEXTURE_WIDTH);
|
||||
|
||||
qCDebug(modelformat) << "Capped square texture width =" << maxWidth << "for model" << url << "with" << numTextures << "textures";
|
||||
for (auto& material : _fbxMaterials) {
|
||||
material.setMaxNumPixelsPerTexture(maxWidth * maxWidth);
|
||||
}
|
||||
}
|
||||
|
||||
geometry.materials = _fbxMaterials;
|
||||
|
||||
// see if any materials have texture children
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
#define hifi_FBXReader_h
|
||||
|
||||
#include <QMetaType>
|
||||
#include <QSet>
|
||||
#include <QUrl>
|
||||
#include <QVarLengthArray>
|
||||
#include <QVariant>
|
||||
|
@ -100,22 +101,25 @@ public:
|
|||
/// A single binding to a joint in an FBX document.
|
||||
class FBXCluster {
|
||||
public:
|
||||
|
||||
|
||||
int jointIndex;
|
||||
glm::mat4 inverseBindMatrix;
|
||||
};
|
||||
|
||||
const int MAX_NUM_PIXELS_FOR_FBX_TEXTURE = 2048 * 2048;
|
||||
|
||||
/// A texture map in an FBX document.
|
||||
class FBXTexture {
|
||||
public:
|
||||
QString name;
|
||||
QByteArray filename;
|
||||
QByteArray content;
|
||||
|
||||
|
||||
Transform transform;
|
||||
int maxNumPixels { MAX_NUM_PIXELS_FOR_FBX_TEXTURE };
|
||||
int texcoordSet;
|
||||
QString texcoordSetName;
|
||||
|
||||
|
||||
bool isBumpmap{ false };
|
||||
|
||||
bool isNull() const { return name.isEmpty() && filename.isEmpty() && content.isEmpty(); }
|
||||
|
@ -143,6 +147,9 @@ public:
|
|||
shininess(shininess),
|
||||
opacity(opacity) {}
|
||||
|
||||
void getTextureNames(QSet<QString>& textureList) const;
|
||||
void setMaxNumPixelsPerTexture(int maxNumPixels);
|
||||
|
||||
glm::vec3 diffuseColor{ 1.0f };
|
||||
float diffuseFactor{ 1.0f };
|
||||
glm::vec3 specularColor{ 0.02f };
|
||||
|
|
|
@ -27,6 +27,56 @@
|
|||
|
||||
#include "ModelFormatLogging.h"
|
||||
|
||||
void FBXMaterial::getTextureNames(QSet<QString>& textureList) const {
|
||||
if (!normalTexture.isNull()) {
|
||||
textureList.insert(normalTexture.name);
|
||||
}
|
||||
if (!albedoTexture.isNull()) {
|
||||
textureList.insert(albedoTexture.name);
|
||||
}
|
||||
if (!opacityTexture.isNull()) {
|
||||
textureList.insert(opacityTexture.name);
|
||||
}
|
||||
if (!glossTexture.isNull()) {
|
||||
textureList.insert(glossTexture.name);
|
||||
}
|
||||
if (!roughnessTexture.isNull()) {
|
||||
textureList.insert(roughnessTexture.name);
|
||||
}
|
||||
if (!specularTexture.isNull()) {
|
||||
textureList.insert(specularTexture.name);
|
||||
}
|
||||
if (!metallicTexture.isNull()) {
|
||||
textureList.insert(metallicTexture.name);
|
||||
}
|
||||
if (!emissiveTexture.isNull()) {
|
||||
textureList.insert(emissiveTexture.name);
|
||||
}
|
||||
if (!occlusionTexture.isNull()) {
|
||||
textureList.insert(occlusionTexture.name);
|
||||
}
|
||||
if (!scatteringTexture.isNull()) {
|
||||
textureList.insert(scatteringTexture.name);
|
||||
}
|
||||
if (!lightmapTexture.isNull()) {
|
||||
textureList.insert(lightmapTexture.name);
|
||||
}
|
||||
}
|
||||
|
||||
void FBXMaterial::setMaxNumPixelsPerTexture(int maxNumPixels) {
|
||||
normalTexture.maxNumPixels = maxNumPixels;
|
||||
albedoTexture.maxNumPixels = maxNumPixels;
|
||||
opacityTexture.maxNumPixels = maxNumPixels;
|
||||
glossTexture.maxNumPixels = maxNumPixels;
|
||||
roughnessTexture.maxNumPixels = maxNumPixels;
|
||||
specularTexture.maxNumPixels = maxNumPixels;
|
||||
metallicTexture.maxNumPixels = maxNumPixels;
|
||||
emissiveTexture.maxNumPixels = maxNumPixels;
|
||||
occlusionTexture.maxNumPixels = maxNumPixels;
|
||||
scatteringTexture.maxNumPixels = maxNumPixels;
|
||||
lightmapTexture.maxNumPixels = maxNumPixels;
|
||||
}
|
||||
|
||||
bool FBXMaterial::needTangentSpace() const {
|
||||
return !normalTexture.isNull();
|
||||
}
|
||||
|
@ -47,10 +97,10 @@ FBXTexture FBXReader::getTexture(const QString& textureID) {
|
|||
texture.texcoordSet = 0;
|
||||
if (_textureParams.contains(textureID)) {
|
||||
auto p = _textureParams.value(textureID);
|
||||
|
||||
|
||||
texture.transform.setTranslation(p.translation);
|
||||
texture.transform.setRotation(glm::quat(glm::radians(p.rotation)));
|
||||
|
||||
|
||||
auto scaling = p.scaling;
|
||||
// Protect from bad scaling which should never happen
|
||||
if (scaling.x == 0.0f) {
|
||||
|
@ -63,7 +113,7 @@ FBXTexture FBXReader::getTexture(const QString& textureID) {
|
|||
scaling.z = 1.0f;
|
||||
}
|
||||
texture.transform.setScale(scaling);
|
||||
|
||||
|
||||
if ((p.UVSet != "map1") && (p.UVSet != "UVSet0")) {
|
||||
texture.texcoordSet = 1;
|
||||
}
|
||||
|
@ -78,11 +128,10 @@ void FBXReader::consolidateFBXMaterials(const QVariantHash& mapping) {
|
|||
QJsonDocument materialMapDocument = QJsonDocument::fromJson(materialMapString.toUtf8());
|
||||
QJsonObject materialMap = materialMapDocument.object();
|
||||
|
||||
// foreach (const QString& materialID, materials) {
|
||||
for (QHash<QString, FBXMaterial>::iterator it = _fbxMaterials.begin(); it != _fbxMaterials.end(); it++) {
|
||||
FBXMaterial& material = (*it);
|
||||
|
||||
// Maya is the exporting the shading model and we aretrying to use it
|
||||
// Maya is the exporting the shading model and we are trying to use it
|
||||
bool isMaterialLambert = (material.shadingModel.toLower() == "lambert");
|
||||
|
||||
// the pure material associated with this part
|
||||
|
@ -127,21 +176,19 @@ void FBXReader::consolidateFBXMaterials(const QVariantHash& mapping) {
|
|||
detectDifferentUVs |= (transparentTexture.texcoordSet != 0) || (!transparentTexture.transform.isIdentity());
|
||||
}
|
||||
|
||||
|
||||
|
||||
FBXTexture normalTexture;
|
||||
QString bumpTextureID = bumpTextures.value(material.materialID);
|
||||
QString normalTextureID = normalTextures.value(material.materialID);
|
||||
if (!normalTextureID.isNull()) {
|
||||
normalTexture = getTexture(normalTextureID);
|
||||
normalTexture.isBumpmap = false;
|
||||
|
||||
|
||||
material.normalTexture = normalTexture;
|
||||
detectDifferentUVs |= (normalTexture.texcoordSet != 0) || (!normalTexture.transform.isIdentity());
|
||||
} else if (!bumpTextureID.isNull()) {
|
||||
normalTexture = getTexture(bumpTextureID);
|
||||
normalTexture.isBumpmap = true;
|
||||
|
||||
|
||||
material.normalTexture = normalTexture;
|
||||
detectDifferentUVs |= (normalTexture.texcoordSet != 0) || (!normalTexture.transform.isIdentity());
|
||||
}
|
||||
|
@ -151,7 +198,7 @@ void FBXReader::consolidateFBXMaterials(const QVariantHash& mapping) {
|
|||
if (!specularTextureID.isNull()) {
|
||||
specularTexture = getTexture(specularTextureID);
|
||||
detectDifferentUVs |= (specularTexture.texcoordSet != 0) || (!specularTexture.transform.isIdentity());
|
||||
material.specularTexture = specularTexture;
|
||||
material.specularTexture = specularTexture;
|
||||
}
|
||||
|
||||
FBXTexture metallicTexture;
|
||||
|
@ -222,7 +269,7 @@ void FBXReader::consolidateFBXMaterials(const QVariantHash& mapping) {
|
|||
ambientTextureID = ambientFactorTextures.value(material.materialID);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (_loadLightmaps && !ambientTextureID.isNull()) {
|
||||
ambientTexture = getTexture(ambientTextureID);
|
||||
detectDifferentUVs |= (ambientTexture.texcoordSet != 0) || (!ambientTexture.transform.isIdentity());
|
||||
|
|
|
@ -472,7 +472,7 @@ QUrl NetworkMaterial::getTextureUrl(const QUrl& baseUrl, const FBXTexture& textu
|
|||
model::TextureMapPointer NetworkMaterial::fetchTextureMap(const QUrl& baseUrl, const FBXTexture& fbxTexture,
|
||||
TextureType type, MapChannel channel) {
|
||||
const auto url = getTextureUrl(baseUrl, fbxTexture);
|
||||
const auto texture = DependencyManager::get<TextureCache>()->getTexture(url, type, fbxTexture.content);
|
||||
const auto texture = DependencyManager::get<TextureCache>()->getTexture(url, type, fbxTexture.content, fbxTexture.maxNumPixels);
|
||||
_textures[channel] = Texture { fbxTexture.name, texture };
|
||||
|
||||
auto map = std::make_shared<model::TextureMap>();
|
||||
|
|
|
@ -159,7 +159,7 @@ protected:
|
|||
class Texture {
|
||||
public:
|
||||
QString name;
|
||||
QSharedPointer<NetworkTexture> texture;
|
||||
NetworkTexturePointer texture;
|
||||
};
|
||||
using Textures = std::vector<Texture>;
|
||||
|
||||
|
|
|
@ -167,16 +167,17 @@ class TextureExtra {
|
|||
public:
|
||||
NetworkTexture::Type type;
|
||||
const QByteArray& content;
|
||||
int maxNumPixels;
|
||||
};
|
||||
|
||||
ScriptableResource* TextureCache::prefetch(const QUrl& url, int type) {
|
||||
ScriptableResource* TextureCache::prefetch(const QUrl& url, int type, int maxNumPixels) {
|
||||
auto byteArray = QByteArray();
|
||||
TextureExtra extra = { (Type)type, byteArray };
|
||||
TextureExtra extra = { (Type)type, byteArray, maxNumPixels };
|
||||
return ResourceCache::prefetch(url, &extra);
|
||||
}
|
||||
|
||||
NetworkTexturePointer TextureCache::getTexture(const QUrl& url, Type type, const QByteArray& content) {
|
||||
TextureExtra extra = { type, content };
|
||||
NetworkTexturePointer TextureCache::getTexture(const QUrl& url, Type type, const QByteArray& content, int maxNumPixels) {
|
||||
TextureExtra extra = { type, content, maxNumPixels };
|
||||
return ResourceCache::getResource(url, QUrl(), &extra).staticCast<NetworkTexture>();
|
||||
}
|
||||
|
||||
|
@ -251,13 +252,15 @@ QSharedPointer<Resource> TextureCache::createResource(const QUrl& url, const QSh
|
|||
const TextureExtra* textureExtra = static_cast<const TextureExtra*>(extra);
|
||||
auto type = textureExtra ? textureExtra->type : Type::DEFAULT_TEXTURE;
|
||||
auto content = textureExtra ? textureExtra->content : QByteArray();
|
||||
return QSharedPointer<Resource>(new NetworkTexture(url, type, content),
|
||||
auto maxNumPixels = textureExtra ? textureExtra->maxNumPixels : ABSOLUTE_MAX_TEXTURE_NUM_PIXELS;
|
||||
return QSharedPointer<Resource>(new NetworkTexture(url, type, content, maxNumPixels),
|
||||
&Resource::deleter);
|
||||
}
|
||||
|
||||
NetworkTexture::NetworkTexture(const QUrl& url, Type type, const QByteArray& content) :
|
||||
NetworkTexture::NetworkTexture(const QUrl& url, Type type, const QByteArray& content, int maxNumPixels) :
|
||||
Resource(url),
|
||||
_type(type)
|
||||
_type(type),
|
||||
_maxNumPixels(maxNumPixels)
|
||||
{
|
||||
_textureSource = std::make_shared<gpu::TextureSource>();
|
||||
|
||||
|
@ -274,7 +277,7 @@ NetworkTexture::NetworkTexture(const QUrl& url, Type type, const QByteArray& con
|
|||
}
|
||||
|
||||
NetworkTexture::NetworkTexture(const QUrl& url, const TextureLoaderFunc& textureLoader, const QByteArray& content) :
|
||||
NetworkTexture(url, CUSTOM_TEXTURE, content)
|
||||
NetworkTexture(url, CUSTOM_TEXTURE, content, ABSOLUTE_MAX_TEXTURE_NUM_PIXELS)
|
||||
{
|
||||
_textureLoader = textureLoader;
|
||||
}
|
||||
|
@ -290,7 +293,8 @@ NetworkTexture::TextureLoaderFunc NetworkTexture::getTextureLoader() const {
|
|||
class ImageReader : public QRunnable {
|
||||
public:
|
||||
|
||||
ImageReader(const QWeakPointer<Resource>& resource, const QByteArray& data, const QUrl& url = QUrl());
|
||||
ImageReader(const QWeakPointer<Resource>& resource, const QByteArray& data,
|
||||
const QUrl& url = QUrl(), int maxNumPixels = ABSOLUTE_MAX_TEXTURE_NUM_PIXELS);
|
||||
|
||||
virtual void run() override;
|
||||
|
||||
|
@ -300,6 +304,7 @@ private:
|
|||
QWeakPointer<Resource> _resource;
|
||||
QUrl _url;
|
||||
QByteArray _content;
|
||||
int _maxNumPixels;
|
||||
};
|
||||
|
||||
void NetworkTexture::downloadFinished(const QByteArray& data) {
|
||||
|
@ -308,14 +313,15 @@ void NetworkTexture::downloadFinished(const QByteArray& data) {
|
|||
}
|
||||
|
||||
void NetworkTexture::loadContent(const QByteArray& content) {
|
||||
QThreadPool::globalInstance()->start(new ImageReader(_self, content, _url));
|
||||
QThreadPool::globalInstance()->start(new ImageReader(_self, content, _url, _maxNumPixels));
|
||||
}
|
||||
|
||||
ImageReader::ImageReader(const QWeakPointer<Resource>& resource, const QByteArray& data,
|
||||
const QUrl& url) :
|
||||
const QUrl& url, int maxNumPixels) :
|
||||
_resource(resource),
|
||||
_url(url),
|
||||
_content(data)
|
||||
_content(data),
|
||||
_maxNumPixels(maxNumPixels)
|
||||
{
|
||||
#if DEBUG_DUMP_TEXTURE_LOADS
|
||||
static auto start = usecTimestampNow() / USECS_PER_MSEC;
|
||||
|
@ -375,10 +381,10 @@ void ImageReader::run() {
|
|||
|
||||
// Note that QImage.format is the pixel format which is different from the "format" of the image file...
|
||||
auto imageFormat = image.format();
|
||||
int originalWidth = image.width();
|
||||
int originalHeight = image.height();
|
||||
int imageWidth = image.width();
|
||||
int imageHeight = image.height();
|
||||
|
||||
if (originalWidth == 0 || originalHeight == 0 || imageFormat == QImage::Format_Invalid) {
|
||||
if (imageWidth == 0 || imageHeight == 0 || imageFormat == QImage::Format_Invalid) {
|
||||
if (filenameExtension.empty()) {
|
||||
qCDebug(modelnetworking) << "QImage failed to create from content, no file extension:" << _url;
|
||||
} else {
|
||||
|
@ -386,6 +392,20 @@ void ImageReader::run() {
|
|||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (imageWidth * imageHeight > _maxNumPixels) {
|
||||
float scaleFactor = sqrtf(_maxNumPixels / (float)(imageWidth * imageHeight));
|
||||
int originalWidth = imageWidth;
|
||||
int originalHeight = imageHeight;
|
||||
imageWidth = (int)(scaleFactor * (float)imageWidth + 0.5f);
|
||||
imageHeight = (int)(scaleFactor * (float)imageHeight + 0.5f);
|
||||
QImage newImage = image.scaled(QSize(imageWidth, imageHeight), Qt::IgnoreAspectRatio);
|
||||
image.swap(newImage);
|
||||
qCDebug(modelnetworking) << "Downscale image" << _url
|
||||
<< "from" << originalWidth << "x" << originalHeight
|
||||
<< "to" << imageWidth << "x" << imageHeight;
|
||||
}
|
||||
|
||||
gpu::TexturePointer texture = nullptr;
|
||||
{
|
||||
// Double-check the resource still exists between long operations.
|
||||
|
@ -408,7 +428,7 @@ void ImageReader::run() {
|
|||
} else {
|
||||
QMetaObject::invokeMethod(resource.data(), "setImage",
|
||||
Q_ARG(gpu::TexturePointer, texture),
|
||||
Q_ARG(int, originalWidth), Q_ARG(int, originalHeight));
|
||||
Q_ARG(int, imageWidth), Q_ARG(int, imageHeight));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,8 @@
|
|||
#include <ResourceCache.h>
|
||||
#include <model/TextureMap.h>
|
||||
|
||||
const int ABSOLUTE_MAX_TEXTURE_NUM_PIXELS = 8192 * 8192;
|
||||
|
||||
namespace gpu {
|
||||
class Batch;
|
||||
}
|
||||
|
@ -60,7 +62,7 @@ public:
|
|||
typedef gpu::Texture* TextureLoader(const QImage& image, const std::string& srcImageName);
|
||||
using TextureLoaderFunc = std::function<TextureLoader>;
|
||||
|
||||
NetworkTexture(const QUrl& url, Type type, const QByteArray& content);
|
||||
NetworkTexture(const QUrl& url, Type type, const QByteArray& content, int maxNumPixels);
|
||||
NetworkTexture(const QUrl& url, const TextureLoaderFunc& textureLoader, const QByteArray& content);
|
||||
|
||||
QString getType() const override { return "NetworkTexture"; }
|
||||
|
@ -70,7 +72,7 @@ public:
|
|||
int getWidth() const { return _width; }
|
||||
int getHeight() const { return _height; }
|
||||
Type getTextureType() const { return _type; }
|
||||
|
||||
|
||||
TextureLoaderFunc getTextureLoader() const;
|
||||
|
||||
signals:
|
||||
|
@ -81,7 +83,7 @@ protected:
|
|||
virtual bool isCacheable() const override { return _loaded; }
|
||||
|
||||
virtual void downloadFinished(const QByteArray& data) override;
|
||||
|
||||
|
||||
Q_INVOKABLE void loadContent(const QByteArray& content);
|
||||
Q_INVOKABLE void setImage(gpu::TexturePointer texture, int originalWidth, int originalHeight);
|
||||
|
||||
|
@ -92,6 +94,7 @@ private:
|
|||
int _originalHeight { 0 };
|
||||
int _width { 0 };
|
||||
int _height { 0 };
|
||||
int _maxNumPixels { ABSOLUTE_MAX_TEXTURE_NUM_PIXELS };
|
||||
};
|
||||
|
||||
using NetworkTexturePointer = QSharedPointer<NetworkTexture>;
|
||||
|
@ -129,11 +132,11 @@ public:
|
|||
|
||||
/// Loads a texture from the specified URL.
|
||||
NetworkTexturePointer getTexture(const QUrl& url, Type type = Type::DEFAULT_TEXTURE,
|
||||
const QByteArray& content = QByteArray());
|
||||
const QByteArray& content = QByteArray(), int maxNumPixels = ABSOLUTE_MAX_TEXTURE_NUM_PIXELS);
|
||||
|
||||
protected:
|
||||
// Overload ResourceCache::prefetch to allow specifying texture type for loads
|
||||
Q_INVOKABLE ScriptableResource* prefetch(const QUrl& url, int type);
|
||||
Q_INVOKABLE ScriptableResource* prefetch(const QUrl& url, int type, int maxNumPixels = ABSOLUTE_MAX_TEXTURE_NUM_PIXELS);
|
||||
|
||||
virtual QSharedPointer<Resource> createResource(const QUrl& url, const QSharedPointer<Resource>& fallback,
|
||||
const void* extra) override;
|
||||
|
|
|
@ -827,18 +827,26 @@ void NodeList::ignoreNodeBySessionID(const QUuid& nodeID, bool ignoreEnabled) {
|
|||
});
|
||||
|
||||
if (ignoreEnabled) {
|
||||
QReadLocker ignoredSetLocker{ &_ignoredSetLock }; // read lock for insert
|
||||
QReadLocker personalMutedSetLocker{ &_personalMutedSetLock }; // read lock for insert
|
||||
// add this nodeID to our set of ignored IDs
|
||||
_ignoredNodeIDs.insert(nodeID);
|
||||
// add this nodeID to our set of personal muted IDs
|
||||
_personalMutedNodeIDs.insert(nodeID);
|
||||
{
|
||||
QReadLocker ignoredSetLocker{ &_ignoredSetLock }; // read lock for insert
|
||||
// add this nodeID to our set of ignored IDs
|
||||
_ignoredNodeIDs.insert(nodeID);
|
||||
}
|
||||
{
|
||||
QReadLocker personalMutedSetLocker{ &_personalMutedSetLock }; // read lock for insert
|
||||
// add this nodeID to our set of personal muted IDs
|
||||
_personalMutedNodeIDs.insert(nodeID);
|
||||
}
|
||||
emit ignoredNode(nodeID, true);
|
||||
} else {
|
||||
QWriteLocker ignoredSetLocker{ &_ignoredSetLock }; // write lock for unsafe_erase
|
||||
QWriteLocker personalMutedSetLocker{ &_personalMutedSetLock }; // write lock for unsafe_erase
|
||||
_ignoredNodeIDs.unsafe_erase(nodeID);
|
||||
_personalMutedNodeIDs.unsafe_erase(nodeID);
|
||||
{
|
||||
QWriteLocker ignoredSetLocker{ &_ignoredSetLock }; // write lock for unsafe_erase
|
||||
_ignoredNodeIDs.unsafe_erase(nodeID);
|
||||
}
|
||||
{
|
||||
QWriteLocker personalMutedSetLocker{ &_personalMutedSetLock }; // write lock for unsafe_erase
|
||||
_personalMutedNodeIDs.unsafe_erase(nodeID);
|
||||
}
|
||||
emit ignoredNode(nodeID, false);
|
||||
}
|
||||
|
||||
|
@ -850,10 +858,14 @@ void NodeList::ignoreNodeBySessionID(const QUuid& nodeID, bool ignoreEnabled) {
|
|||
void NodeList::removeFromIgnoreMuteSets(const QUuid& nodeID) {
|
||||
// don't remove yourself, or nobody
|
||||
if (!nodeID.isNull() && _sessionUUID != nodeID) {
|
||||
QWriteLocker ignoredSetLocker{ &_ignoredSetLock };
|
||||
QWriteLocker personalMutedSetLocker{ &_personalMutedSetLock };
|
||||
_ignoredNodeIDs.unsafe_erase(nodeID);
|
||||
_personalMutedNodeIDs.unsafe_erase(nodeID);
|
||||
{
|
||||
QWriteLocker ignoredSetLocker{ &_ignoredSetLock };
|
||||
_ignoredNodeIDs.unsafe_erase(nodeID);
|
||||
}
|
||||
{
|
||||
QWriteLocker personalMutedSetLocker{ &_personalMutedSetLock };
|
||||
_personalMutedNodeIDs.unsafe_erase(nodeID);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -86,7 +86,7 @@ private:
|
|||
/// Wrapper to expose resources to JS/QML
|
||||
class ScriptableResource : public QObject {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(QUrl url READ getUrl)
|
||||
Q_PROPERTY(QUrl url READ getURL)
|
||||
Q_PROPERTY(int state READ getState NOTIFY stateChanged)
|
||||
|
||||
/**jsdoc
|
||||
|
@ -125,7 +125,7 @@ public:
|
|||
*/
|
||||
Q_INVOKABLE void release();
|
||||
|
||||
const QUrl& getUrl() const { return _url; }
|
||||
const QUrl& getURL() const { return _url; }
|
||||
int getState() const { return (int)_state; }
|
||||
const QSharedPointer<Resource>& getResource() const { return _resource; }
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
|
||||
var MESSAGE_CHANNEL = "io.highfidelity.summon-crowd";
|
||||
|
||||
print('crowd-agent version 4');
|
||||
print('crowd-agent version 5');
|
||||
|
||||
/* Observations:
|
||||
- File urls for AC scripts silently fail. Use a local server (e.g., python SimpleHTTPServer) for development.
|
||||
|
@ -84,6 +84,9 @@ function startAgent(parameters) { // Can also be used to update.
|
|||
clearStopper();
|
||||
var wasOff = !Agent.isAvatar;
|
||||
Agent.isAvatar = true;
|
||||
if (parameters.displayName !== undefined) {
|
||||
Avatar.displayName = parameters.displayName;
|
||||
}
|
||||
if (parameters.position) {
|
||||
Avatar.position = parameters.position;
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
//
|
||||
// See crowd-agent.js
|
||||
|
||||
var version = 2;
|
||||
var version = 3;
|
||||
var label = "summon";
|
||||
function debug() {
|
||||
print.apply(null, [].concat.apply([label, version], [].map.call(arguments, JSON.stringify)));
|
||||
|
@ -23,6 +23,9 @@ var MINIMUM_AVATARS = 25; // We will summon agents to produce this many total. (
|
|||
var N_LISTENING = MINIMUM_AVATARS - 1;
|
||||
var AVATARS_CHATTERING_AT_ONCE = 4; // How many of the agents should we request to play SOUND_DATA at once.
|
||||
|
||||
var initialBubble = Users.getIgnoreRadiusEnabled();
|
||||
debug('startup seeking:', MINIMUM_AVATARS, 'listening:', N_LISTENING, 'chattering:', AVATARS_CHATTERING_AT_ONCE, 'had bubble:', initialBubble);
|
||||
|
||||
// If we add or remove things too quickly, we get problems (e.g., audio, fogbugz 2095).
|
||||
// For now, spread them out this timing apart.
|
||||
var SPREAD_TIME_MS = 500;
|
||||
|
@ -66,7 +69,7 @@ function messageHandler(channel, messageString, senderID) {
|
|||
if (MyAvatar.sessionUUID === senderID) { // ignore my own
|
||||
return;
|
||||
}
|
||||
var message = {}, avatarIdentifiers;
|
||||
var message = {};
|
||||
try {
|
||||
message = JSON.parse(messageString);
|
||||
} catch (e) {
|
||||
|
@ -76,9 +79,10 @@ function messageHandler(channel, messageString, senderID) {
|
|||
case "hello":
|
||||
Script.setTimeout(function () {
|
||||
// There can be avatars we've summoned that do not yet appear in the AvatarList.
|
||||
avatarIdentifiers = without(AvatarList.getAvatarIdentifiers(), summonedAgents);
|
||||
var avatarIdentifiers = without(AvatarList.getAvatarIdentifiers(), summonedAgents);
|
||||
var nSummoned = summonedAgents.length;
|
||||
debug('present', avatarIdentifiers, summonedAgents);
|
||||
if ((summonedAgents.length + avatarIdentifiers.length) < MINIMUM_AVATARS ) {
|
||||
if ((nSummoned + avatarIdentifiers.length) < MINIMUM_AVATARS ) {
|
||||
var chatter = chattering.length < AVATARS_CHATTERING_AT_ONCE;
|
||||
var listen = nListening < N_LISTENING;
|
||||
if (chatter) {
|
||||
|
@ -91,6 +95,7 @@ function messageHandler(channel, messageString, senderID) {
|
|||
messageSend({
|
||||
key: 'SUMMON',
|
||||
rcpt: senderID,
|
||||
displayName: "crowd " + nSummoned + " " + senderID,
|
||||
position: Vec3.sum(MyAvatar.position, {x: coord(), y: 0, z: coord()}),
|
||||
orientation: Quat.fromPitchYawRollDegrees(0, Quat.safeEulerAngles(MyAvatar.orientation).y + (turnSpread * (Math.random() - 0.5)), 0),
|
||||
soundData: chatter && SOUND_DATA,
|
||||
|
@ -100,7 +105,7 @@ function messageHandler(channel, messageString, senderID) {
|
|||
});
|
||||
}
|
||||
}, accumulatedDelay);
|
||||
accumulatedDelay += SPREAD_TIME_MS; // assume we'll get all the hello respsponses more or less together.
|
||||
accumulatedDelay += SPREAD_TIME_MS; // assume we'll get all the hello responses more or less together.
|
||||
break;
|
||||
case "finishedSound": // Give someone else a chance.
|
||||
chattering = without(chattering, [senderID]);
|
||||
|
@ -123,6 +128,8 @@ Messages.subscribe(MESSAGE_CHANNEL);
|
|||
Messages.messageReceived.connect(messageHandler);
|
||||
Script.scriptEnding.connect(function () {
|
||||
debug('stopping agents', summonedAgents);
|
||||
Users.requestsDomainListData = false;
|
||||
if (initialBubble && !Users.getIgnoreRadiusEnabled()) { Users.toggleIgnoreRadius(); }
|
||||
Messages.messageReceived.disconnect(messageHandler); // don't respond to any messages during shutdown
|
||||
accumulatedDelay = 0;
|
||||
summonedAgents.forEach(function (id) {
|
||||
|
@ -134,14 +141,17 @@ Script.scriptEnding.connect(function () {
|
|||
debug('unsubscribed');
|
||||
});
|
||||
|
||||
Users.requestsDomainListData = true; // Get avatar data for the whole domain, even if not in our view.
|
||||
if (initialBubble) { Users.toggleIgnoreRadius(); }
|
||||
messageSend({key: 'HELO'}); // Ask agents to report in now.
|
||||
Script.setTimeout(function () {
|
||||
var total = AvatarList.getAvatarIdentifiers().length;
|
||||
if (0 === summonedAgents.length) {
|
||||
Window.alert("No agents reported.\n\Please run " + MINIMUM_AVATARS + " instances of\n\
|
||||
http://hifi-content.s3.amazonaws.com/howard/scripts/tests/performance/crowd-agent.js\n\
|
||||
http://hifi-content.s3.amazonaws.com/howard/scripts/tests/performance/crowd-agent.js?v=someDate\n\
|
||||
on your domain server.");
|
||||
} else if (total < MINIMUM_AVATARS) {
|
||||
Window.alert("Only " + summonedAgents.length + " agents reported. Now missing " + (MINIMUM_AVATARS - total) + " avatars, total.");
|
||||
}
|
||||
Users.requestsDomainListData = false;
|
||||
}, MINIMUM_AVATARS * SPREAD_TIME_MS )
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
|
||||
(function() { // BEGIN LOCAL_SCOPE
|
||||
var USERS_URL = "https://hifi-content.s3.amazonaws.com/faye/tablet-dev/users.html";
|
||||
var HOME_BUTTON_TEXTURE = Script.resourcesPath() + "meshes/tablet-with-home-button.fbx/tablet-with-home-button.fbm/button-root.png";
|
||||
|
||||
var FRIENDS_WINDOW_URL = "https://metaverse.highfidelity.com/user/friends";
|
||||
var FRIENDS_WINDOW_WIDTH = 290;
|
||||
|
@ -40,6 +41,10 @@
|
|||
});
|
||||
|
||||
function onClicked() {
|
||||
var tabletEntity = HMD.tabletID;
|
||||
if (tabletEntity) {
|
||||
Entities.editEntity(tabletEntity, {textures: JSON.stringify({"tex.close" : HOME_BUTTON_TEXTURE})});
|
||||
}
|
||||
tablet.gotoWebScreen(USERS_URL);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue