diff --git a/assignment-client/src/audio/AudioMixer.cpp b/assignment-client/src/audio/AudioMixer.cpp index 04acae6f05..4f123a6a8f 100644 --- a/assignment-client/src/audio/AudioMixer.cpp +++ b/assignment-client/src/audio/AudioMixer.cpp @@ -316,6 +316,10 @@ void AudioMixer::sendStatsPacket() { addTiming(_mixTiming, "mix"); addTiming(_eventsTiming, "events"); +#ifdef HIFI_AUDIO_THROTTLE_DEBUG + timingStats["ns_per_throttle"] = (_stats.totalMixes > 0) ? (float)(_stats.throttleTime / _stats.totalMixes) : 0; +#endif + // call it "avg_..." to keep it higher in the display, sorted alphabetically statsObject["avg_timing_stats"] = timingStats; diff --git a/assignment-client/src/audio/AudioMixerSlave.cpp b/assignment-client/src/audio/AudioMixerSlave.cpp index 4b02ca1567..adc6413316 100644 --- a/assignment-client/src/audio/AudioMixerSlave.cpp +++ b/assignment-client/src/audio/AudioMixerSlave.cpp @@ -46,10 +46,12 @@ void sendMutePacket(const SharedNodePointer& node, AudioMixerClientData&); void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData& data); // mix helpers -bool shouldIgnoreNode(const SharedNodePointer& listener, const SharedNodePointer& node); -float gainForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd, +inline bool shouldIgnoreNode(const SharedNodePointer& listener, const SharedNodePointer& node); +inline float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd, + const glm::vec3& relativePosition); +inline float computeGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, bool isEcho); -float azimuthForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd, +inline float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition); void AudioMixerSlave::configure(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) { @@ -126,9 +128,10 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) { AudioMixerClientData&, const QUuid&, const AvatarAudioStream&, const PositionalAudioStream&); auto allStreams = [&](const SharedNodePointer& node, MixFunctor mixFunctor) { AudioMixerClientData* nodeData = static_cast(node->getLinkedData()); + auto nodeID = node->getUUID(); for (auto& streamPair : nodeData->getAudioStreams()) { auto nodeStream = streamPair.second; - (this->*mixFunctor)(*listenerData, node->getUUID(), *listenerAudioStream, *nodeStream); + (this->*mixFunctor)(*listenerData, nodeID, *listenerAudioStream, *nodeStream); } }; @@ -147,14 +150,28 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) { if (!isThrottling) { allStreams(node, &AudioMixerSlave::mixStream); } else { +#ifdef HIFI_AUDIO_THROTTLE_DEBUG + auto throttleStart = p_high_resolution_clock::now(); +#endif + AudioMixerClientData* nodeData = static_cast(node->getLinkedData()); + auto nodeID = node->getUUID(); // compute the node's max relative volume float nodeVolume; for (auto& streamPair : nodeData->getAudioStreams()) { auto nodeStream = streamPair.second; - float distance = glm::length(nodeStream->getPosition() - listenerAudioStream->getPosition()); - nodeVolume = std::max(nodeStream->getLastPopOutputTrailingLoudness() / distance, nodeVolume); + + // approximate the gain + glm::vec3 relativePosition = nodeStream->getPosition() - listenerAudioStream->getPosition(); + float gain = approximateGain(*listenerAudioStream, *nodeStream, relativePosition); + + // modify by hrtf gain adjustment + auto& hrtf = listenerData->hrtfForStream(nodeID, nodeStream->getStreamIdentifier()); + gain *= hrtf.getGainAdjustment(); + + auto streamVolume = nodeStream->getLastPopOutputTrailingLoudness() * gain; + nodeVolume = std::max(streamVolume, nodeVolume); } // max-heapify the nodes by relative volume @@ -162,6 +179,13 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) { if (!throttledNodes.empty()) { std::push_heap(throttledNodes.begin(), throttledNodes.end()); } + +#ifdef HIFI_AUDIO_THROTTLE_DEBUG + auto throttleEnd = p_high_resolution_clock::now(); + uint64_t throttleTime = + std::chrono::duration_cast(throttleEnd - throttleStart).count(); + stats.throttleTime += throttleTime; +#endif } } }); @@ -227,9 +251,9 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition(); float distance = glm::max(glm::length(relativePosition), EPSILON); - float gain = gainForSource(listeningNodeStream, streamToAdd, relativePosition, isEcho); - float azimuth = isEcho ? 0.0f : azimuthForSource(listeningNodeStream, listeningNodeStream, relativePosition); - static const int HRTF_DATASET_INDEX = 1; + float gain = computeGain(listeningNodeStream, streamToAdd, relativePosition, isEcho); + float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition); + const int HRTF_DATASET_INDEX = 1; if (!streamToAdd.lastPopSucceeded()) { bool forceSilentBlock = true; @@ -330,7 +354,7 @@ std::unique_ptr createAudioPacket(PacketType type, int size, quint16 s } void sendMixPacket(const SharedNodePointer& node, AudioMixerClientData& data, QByteArray& buffer) { - static const int MIX_PACKET_SIZE = + const int MIX_PACKET_SIZE = sizeof(quint16) + AudioConstants::MAX_CODEC_NAME_LENGTH_ON_WIRE + AudioConstants::NETWORK_FRAME_BYTES_STEREO; quint16 sequence = data.getOutgoingSequenceNumber(); QString codec = data.getCodecName(); @@ -345,7 +369,7 @@ void sendMixPacket(const SharedNodePointer& node, AudioMixerClientData& data, QB } void sendSilentPacket(const SharedNodePointer& node, AudioMixerClientData& data) { - static const int SILENT_PACKET_SIZE = + const int SILENT_PACKET_SIZE = sizeof(quint16) + AudioConstants::MAX_CODEC_NAME_LENGTH_ON_WIRE + sizeof(quint16); quint16 sequence = data.getOutgoingSequenceNumber(); QString codec = data.getCodecName(); @@ -475,40 +499,54 @@ bool shouldIgnoreNode(const SharedNodePointer& listener, const SharedNodePointer return ignore; } -float gainForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd, - const glm::vec3& relativePosition, bool isEcho) { +static const float ATTENUATION_START_DISTANCE = 1.0f; + +float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd, + const glm::vec3& relativePosition) { float gain = 1.0f; - float distanceBetween = glm::length(relativePosition); - - if (distanceBetween < EPSILON) { - distanceBetween = EPSILON; - } - + // injector: apply attenuation if (streamToAdd.getType() == PositionalAudioStream::Injector) { gain *= reinterpret_cast(&streamToAdd)->getAttenuationRatio(); } - if (!isEcho && (streamToAdd.getType() == PositionalAudioStream::Microphone)) { - // source is another avatar, apply fixed off-axis attenuation to make them quieter as they turn away from listener - glm::vec3 rotatedListenerPosition = glm::inverse(streamToAdd.getOrientation()) * relativePosition; + // avatar: skip attenuation - it is too costly to approximate + // distance attenuation: approximate, ignore zone-specific attenuations + // this is a good approximation for streams further than ATTENUATION_START_DISTANCE + // those streams closer will be amplified; amplifying close streams is acceptable + // when throttling, as close streams are expected to be heard by a user + float distance = glm::length(relativePosition); + return gain / distance; +} + +float computeGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd, + const glm::vec3& relativePosition, bool isEcho) { + float gain = 1.0f; + + // injector: apply attenuation + if (streamToAdd.getType() == PositionalAudioStream::Injector) { + gain *= reinterpret_cast(&streamToAdd)->getAttenuationRatio(); + + // avatar: apply fixed off-axis attenuation to make them quieter as they turn away + } else if (!isEcho && (streamToAdd.getType() == PositionalAudioStream::Microphone)) { + glm::vec3 rotatedListenerPosition = glm::inverse(streamToAdd.getOrientation()) * relativePosition; float angleOfDelivery = glm::angle(glm::vec3(0.0f, 0.0f, -1.0f), glm::normalize(rotatedListenerPosition)); const float MAX_OFF_AXIS_ATTENUATION = 0.2f; - const float OFF_AXIS_ATTENUATION_FORMULA_STEP = (1 - MAX_OFF_AXIS_ATTENUATION) / 2.0f; - + const float OFF_AXIS_ATTENUATION_STEP = (1 - MAX_OFF_AXIS_ATTENUATION) / 2.0f; float offAxisCoefficient = MAX_OFF_AXIS_ATTENUATION + - (OFF_AXIS_ATTENUATION_FORMULA_STEP * (angleOfDelivery / PI_OVER_TWO)); + (angleOfDelivery * (OFF_AXIS_ATTENUATION_STEP / PI_OVER_TWO)); - // multiply the current attenuation coefficient by the calculated off axis coefficient gain *= offAxisCoefficient; } - float attenuationPerDoublingInDistance = AudioMixer::getAttenuationPerDoublingInDistance(); - auto& zoneSettings = AudioMixer::getZoneSettings(); auto& audioZones = AudioMixer::getAudioZones(); + auto& zoneSettings = AudioMixer::getZoneSettings(); + + // find distance attenuation coefficient + float attenuationPerDoublingInDistance = AudioMixer::getAttenuationPerDoublingInDistance(); for (int i = 0; i < zoneSettings.length(); ++i) { if (audioZones[zoneSettings[i].source].contains(streamToAdd.getPosition()) && audioZones[zoneSettings[i].listener].contains(listeningNodeStream.getPosition())) { @@ -517,16 +555,17 @@ float gainForSource(const AvatarAudioStream& listeningNodeStream, const Position } } - const float ATTENUATION_BEGINS_AT_DISTANCE = 1.0f; - if (distanceBetween >= ATTENUATION_BEGINS_AT_DISTANCE) { + // distance attenuation + float distance = glm::length(relativePosition); + assert(ATTENUATION_START_DISTANCE > EPSILON); + if (distance >= ATTENUATION_START_DISTANCE) { // translate the zone setting to gain per log2(distance) float g = 1.0f - attenuationPerDoublingInDistance; - g = (g < EPSILON) ? EPSILON : g; - g = (g > 1.0f) ? 1.0f : g; + g = glm::clamp(g, EPSILON, 1.0f); // calculate the distance coefficient using the distance to this node - float distanceCoefficient = fastExp2f(fastLog2f(g) * fastLog2f(distanceBetween/ATTENUATION_BEGINS_AT_DISTANCE)); + float distanceCoefficient = fastExp2f(fastLog2f(g) * fastLog2f(distance/ATTENUATION_START_DISTANCE)); // multiply the current attenuation coefficient by the distance coefficient gain *= distanceCoefficient; @@ -535,7 +574,7 @@ float gainForSource(const AvatarAudioStream& listeningNodeStream, const Position return gain; } -float azimuthForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd, +float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition) { glm::quat inverseOrientation = glm::inverse(listeningNodeStream.getOrientation()); diff --git a/assignment-client/src/audio/AudioMixerStats.cpp b/assignment-client/src/audio/AudioMixerStats.cpp index a50c0d26c1..a3a3a215bc 100644 --- a/assignment-client/src/audio/AudioMixerStats.cpp +++ b/assignment-client/src/audio/AudioMixerStats.cpp @@ -20,6 +20,9 @@ void AudioMixerStats::reset() { hrtfThrottleRenders = 0; manualStereoMixes = 0; manualEchoMixes = 0; +#ifdef HIFI_AUDIO_THROTTLE_DEBUG + throttleTime = 0; +#endif } void AudioMixerStats::accumulate(const AudioMixerStats& otherStats) { @@ -31,4 +34,7 @@ void AudioMixerStats::accumulate(const AudioMixerStats& otherStats) { hrtfThrottleRenders += otherStats.hrtfThrottleRenders; manualStereoMixes += otherStats.manualStereoMixes; manualEchoMixes += otherStats.manualEchoMixes; +#ifdef HIFI_AUDIO_THROTTLE_DEBUG + throttleTime += otherStats.throttleTime; +#endif } diff --git a/assignment-client/src/audio/AudioMixerStats.h b/assignment-client/src/audio/AudioMixerStats.h index cb85006061..f7e3ed1525 100644 --- a/assignment-client/src/audio/AudioMixerStats.h +++ b/assignment-client/src/audio/AudioMixerStats.h @@ -12,6 +12,10 @@ #ifndef hifi_AudioMixerStats_h #define hifi_AudioMixerStats_h +#ifdef HIFI_AUDIO_THROTTLE_DEBUG +#include +#endif + struct AudioMixerStats { int sumStreams { 0 }; int sumListeners { 0 }; @@ -25,6 +29,10 @@ struct AudioMixerStats { int manualStereoMixes { 0 }; int manualEchoMixes { 0 }; +#ifdef HIFI_AUDIO_THROTTLE_DEBUG + uint64_t throttleTime { 0 }; +#endif + void reset(); void accumulate(const AudioMixerStats& otherStats); }; diff --git a/assignment-client/src/entities/EntityServer.cpp b/assignment-client/src/entities/EntityServer.cpp index dc1a693590..02dc552dae 100644 --- a/assignment-client/src/entities/EntityServer.cpp +++ b/assignment-client/src/entities/EntityServer.cpp @@ -34,7 +34,7 @@ EntityServer::EntityServer(ReceivedMessage& message) : DependencyManager::set(); auto& packetReceiver = DependencyManager::get()->getPacketReceiver(); - packetReceiver.registerListenerForTypes({ PacketType::EntityAdd, PacketType::EntityEdit, PacketType::EntityErase }, + packetReceiver.registerListenerForTypes({ PacketType::EntityAdd, PacketType::EntityEdit, PacketType::EntityErase, PacketType::EntityPhysics }, this, "handleEntityPacket"); } diff --git a/domain-server/resources/describe-settings.json b/domain-server/resources/describe-settings.json index 20d2711743..d27d068f84 100644 --- a/domain-server/resources/describe-settings.json +++ b/domain-server/resources/describe-settings.json @@ -141,218 +141,6 @@ "can_set": true } ] - }, - { - "label": "Operating Hours", - "help": "\"Open\" domains can be searched using their operating hours. Hours are entered in the local timezone, selected below.", - - "name": "weekday_hours", - "caption": "Weekday Hours (Monday-Friday)", - "type": "table", - "can_add_new_rows": false, - "columns": [ - { - "name": "open", - "label": "Opening Time", - "type": "time", - "default": "00:00", - "editable": true - }, - { - "name": "close", - "label": "Closing Time", - "type": "time", - "default": "23:59", - "editable": true - } - ] - }, - { - "name": "weekend_hours", - "label": "Weekend Hours (Saturday/Sunday)", - "type": "table", - "can_add_new_rows": false, - "columns": [ - { - "name": "open", - "label": "Opening Time", - "type": "time", - "default": "00:00", - "editable": true - }, - { - "name": "close", - "label": "Closing Time", - "type": "time", - "default": "23:59", - "editable": true - } - ] - }, - { - "label": "Time Zone", - "name": "utc_offset", - "caption": "Time Zone", - "help": "This server's time zone. Used to define your server's operating hours.", - "type": "select", - "options": [ - { - "value": "-12", - "label": "UTC-12:00" - }, - { - "value": "-11", - "label": "UTC-11:00" - }, - { - "value": "-10", - "label": "UTC-10:00" - }, - { - "value": "-9.5", - "label": "UTC-09:30" - }, - { - "value": "-9", - "label": "UTC-09:00" - }, - { - "value": "-8", - "label": "UTC-08:00" - }, - { - "value": "-7", - "label": "UTC-07:00" - }, - { - "value": "-6", - "label": "UTC-06:00" - }, - { - "value": "-5", - "label": "UTC-05:00" - }, - { - "value": "-4", - "label": "UTC-04:00" - }, - { - "value": "-3.5", - "label": "UTC-03:30" - }, - { - "value": "-3", - "label": "UTC-03:00" - }, - { - "value": "-2", - "label": "UTC-02:00" - }, - { - "value": "-1", - "label": "UTC-01:00" - }, - { - "value": "", - "label": "UTC±00:00" - }, - { - "value": "1", - "label": "UTC+01:00" - }, - { - "value": "2", - "label": "UTC+02:00" - }, - { - "value": "3", - "label": "UTC+03:00" - }, - { - "value": "3.5", - "label": "UTC+03:30" - }, - { - "value": "4", - "label": "UTC+04:00" - }, - { - "value": "4.5", - "label": "UTC+04:30" - }, - { - "value": "5", - "label": "UTC+05:00" - }, - { - "value": "5.5", - "label": "UTC+05:30" - }, - { - "value": "5.75", - "label": "UTC+05:45" - }, - { - "value": "6", - "label": "UTC+06:00" - }, - { - "value": "6.5", - "label": "UTC+06:30" - }, - { - "value": "7", - "label": "UTC+07:00" - }, - { - "value": "8", - "label": "UTC+08:00" - }, - { - "value": "8.5", - "label": "UTC+08:30" - }, - { - "value": "8.75", - "label": "UTC+08:45" - }, - { - "value": "9", - "label": "UTC+09:00" - }, - { - "value": "9.5", - "label": "UTC+09:30" - }, - { - "value": "10", - "label": "UTC+10:00" - }, - { - "value": "10.5", - "label": "UTC+10:30" - }, - { - "value": "11", - "label": "UTC+11:00" - }, - { - "value": "12", - "label": "UTC+12:00" - }, - { - "value": "12.75", - "label": "UTC+12:45" - }, - { - "value": "13", - "label": "UTC+13:00" - }, - { - "value": "14", - "label": "UTC+14:00" - } - ] } ] }, diff --git a/domain-server/resources/web/settings/js/settings.js b/domain-server/resources/web/settings/js/settings.js index 3ed7d02364..c5f055bed0 100644 --- a/domain-server/resources/web/settings/js/settings.js +++ b/domain-server/resources/web/settings/js/settings.js @@ -1306,7 +1306,9 @@ function badgeSidebarForDifferences(changedElement) { var isGrouped = $('#' + panelParentID).hasClass('grouped'); if (isGrouped) { - var initialPanelJSON = Settings.initialValues[panelParentID]; + var initialPanelJSON = Settings.initialValues[panelParentID] + ? Settings.initialValues[panelParentID] + : {}; // get a JSON representation of that section var panelJSON = form2js(panelParentID, ".", false, cleanupFormValues, true)[panelParentID]; @@ -1417,7 +1419,7 @@ function addTableRow(row) { input_clone.children('td').each(function () { if ($(this).attr("name") !== keepField) { - $(this).find("input").val($(this).attr('data-default')); + $(this).find("input").val($(this).children('input').attr('data-default')); } }); @@ -1595,7 +1597,11 @@ function updateDataChangedForSiblingRows(row, forceTrue) { // get a JSON representation of that section var panelSettingJSON = form2js(panelParentID, ".", false, cleanupFormValues, true)[panelParentID][tableShortName] - var initialPanelSettingJSON = Settings.initialValues[panelParentID][tableShortName] + if (Settings.initialValues[panelParentID]) { + var initialPanelSettingJSON = Settings.initialValues[panelParentID][tableShortName] + } else { + var initialPanelSettingJSON = {}; + } // if they are equal, we don't need data-changed isTrue = !_.isEqual(panelSettingJSON, initialPanelSettingJSON) diff --git a/domain-server/src/DomainMetadata.cpp b/domain-server/src/DomainMetadata.cpp index d614b1bbd3..c19cefa397 100644 --- a/domain-server/src/DomainMetadata.cpp +++ b/domain-server/src/DomainMetadata.cpp @@ -35,12 +35,6 @@ const QString DomainMetadata::Descriptors::RESTRICTION = "restriction"; // parse const QString DomainMetadata::Descriptors::MATURITY = "maturity"; const QString DomainMetadata::Descriptors::HOSTS = "hosts"; const QString DomainMetadata::Descriptors::TAGS = "tags"; -const QString DomainMetadata::Descriptors::HOURS = "hours"; -const QString DomainMetadata::Descriptors::Hours::WEEKDAY = "weekday"; -const QString DomainMetadata::Descriptors::Hours::WEEKEND = "weekend"; -const QString DomainMetadata::Descriptors::Hours::UTC_OFFSET = "utc_offset"; -const QString DomainMetadata::Descriptors::Hours::OPEN = "open"; -const QString DomainMetadata::Descriptors::Hours::CLOSE = "close"; // descriptors metadata will appear as (JSON): // { "description": String, // capped description // "capacity": Number, @@ -48,11 +42,6 @@ const QString DomainMetadata::Descriptors::Hours::CLOSE = "close"; // "maturity": String, // enum corresponding to ESRB ratings // "hosts": [ String ], // capped list of usernames // "tags": [ String ], // capped list of tags -// "hours": { -// "utc_offset": Number, -// "weekday": [ [ Time, Time ] ], -// "weekend": [ [ Time, Time ] ], -// } // } // metadata will appear as (JSON): @@ -60,52 +49,10 @@ const QString DomainMetadata::Descriptors::Hours::CLOSE = "close"; // // it is meant to be sent to and consumed by an external API -// merge delta into target -// target should be of the form [ OpenTime, CloseTime ], -// delta should be of the form [ { open: Time, close: Time } ] -void parseHours(QVariant delta, QVariant& target) { - using Hours = DomainMetadata::Descriptors::Hours; - static const QVariantList DEFAULT_HOURS{ - { QVariantList{ "00:00", "23:59" } } - }; - target.setValue(DEFAULT_HOURS); - - if (!delta.canConvert()) { - return; - } - - auto& deltaList = *static_cast(delta.data()); - if (deltaList.isEmpty()) { - return; - } - - auto& deltaHours = *static_cast(deltaList.first().data()); - auto open = deltaHours.find(Hours::OPEN); - auto close = deltaHours.find(Hours::CLOSE); - if (open == deltaHours.end() || close == deltaHours.end()) { - return; - } - - // merge delta into new hours - static const int OPEN_INDEX = 0; - static const int CLOSE_INDEX = 1; - auto& hours = *static_cast(static_cast(target.data())->first().data()); - hours[OPEN_INDEX] = open.value(); - hours[CLOSE_INDEX] = close.value(); - - assert(hours[OPEN_INDEX].canConvert()); - assert(hours[CLOSE_INDEX].canConvert()); -} - DomainMetadata::DomainMetadata(QObject* domainServer) : QObject(domainServer) { - // set up the structure necessary for casting during parsing (see parseHours, esp.) + // set up the structure necessary for casting during parsing _metadata[USERS] = QVariantMap {}; - _metadata[DESCRIPTORS] = QVariantMap { { - Descriptors::HOURS, QVariantMap { - { Descriptors::Hours::WEEKDAY, QVariant{} }, - { Descriptors::Hours::WEEKEND, QVariant{} } - } - } }; + _metadata[DESCRIPTORS] = QVariantMap {}; assert(dynamic_cast(domainServer)); DomainServer* server = static_cast(domainServer); @@ -154,16 +101,6 @@ void DomainMetadata::descriptorsChanged() { unsigned int capacity = capacityVariant ? capacityVariant->toUInt() : 0; state[Descriptors::CAPACITY] = capacity; - // parse operating hours - static const QString WEEKDAY_HOURS = "weekday_hours"; - static const QString WEEKEND_HOURS = "weekend_hours"; - static const QString UTC_OFFSET = "utc_offset"; - assert(state[Descriptors::HOURS].canConvert()); - auto& hours = *static_cast(state[Descriptors::HOURS].data()); - hours[Descriptors::Hours::UTC_OFFSET] = descriptors.take(UTC_OFFSET); - parseHours(descriptors[WEEKDAY_HOURS], hours[Descriptors::Hours::WEEKDAY]); - parseHours(descriptors[WEEKEND_HOURS], hours[Descriptors::Hours::WEEKEND]); - #if DEV_BUILD || PR_BUILD qDebug() << "Domain metadata descriptors set:" << QJsonObject::fromVariantMap(_metadata[DESCRIPTORS].toMap()); #endif diff --git a/domain-server/src/DomainMetadata.h b/domain-server/src/DomainMetadata.h index 41f3a60832..ed4e324464 100644 --- a/domain-server/src/DomainMetadata.h +++ b/domain-server/src/DomainMetadata.h @@ -39,15 +39,6 @@ public: static const QString MATURITY; static const QString HOSTS; static const QString TAGS; - static const QString HOURS; - class Hours { - public: - static const QString WEEKDAY; - static const QString WEEKEND; - static const QString UTC_OFFSET; - static const QString OPEN; - static const QString CLOSE; - }; }; DomainMetadata(QObject* domainServer); diff --git a/domain-server/src/DomainServerSettingsManager.cpp b/domain-server/src/DomainServerSettingsManager.cpp index a0b80875b0..31d6845972 100644 --- a/domain-server/src/DomainServerSettingsManager.cpp +++ b/domain-server/src/DomainServerSettingsManager.cpp @@ -21,7 +21,6 @@ #include #include #include -#include #include #include @@ -270,11 +269,6 @@ void DomainServerSettingsManager::setupConfigMap(const QStringList& argumentList _agentPermissions.clear(); } - if (oldVersion < 1.5) { - // This was prior to operating hours, so add default hours - validateDescriptorsMap(); - } - if (oldVersion < 1.6) { unpackPermissions(); @@ -305,46 +299,10 @@ void DomainServerSettingsManager::setupConfigMap(const QStringList& argumentList } QVariantMap& DomainServerSettingsManager::getDescriptorsMap() { - validateDescriptorsMap(); - static const QString DESCRIPTORS{ "descriptors" }; return *static_cast(getSettingsMap()[DESCRIPTORS].data()); } -void DomainServerSettingsManager::validateDescriptorsMap() { - static const QString WEEKDAY_HOURS{ "descriptors.weekday_hours" }; - static const QString WEEKEND_HOURS{ "descriptors.weekend_hours" }; - static const QString UTC_OFFSET{ "descriptors.utc_offset" }; - - QVariant* weekdayHours = _configMap.valueForKeyPath(WEEKDAY_HOURS, true); - QVariant* weekendHours = _configMap.valueForKeyPath(WEEKEND_HOURS, true); - QVariant* utcOffset = _configMap.valueForKeyPath(UTC_OFFSET, true); - - static const QString OPEN{ "open" }; - static const QString CLOSE{ "close" }; - static const QString DEFAULT_OPEN{ "00:00" }; - static const QString DEFAULT_CLOSE{ "23:59" }; - bool wasMalformed = false; - if (weekdayHours->isNull()) { - *weekdayHours = QVariantList{ QVariantMap{ { OPEN, QVariant(DEFAULT_OPEN) }, { CLOSE, QVariant(DEFAULT_CLOSE) } } }; - wasMalformed = true; - } - if (weekendHours->isNull()) { - *weekendHours = QVariantList{ QVariantMap{ { OPEN, QVariant(DEFAULT_OPEN) }, { CLOSE, QVariant(DEFAULT_CLOSE) } } }; - wasMalformed = true; - } - if (utcOffset->isNull()) { - *utcOffset = QVariant(QTimeZone::systemTimeZone().offsetFromUtc(QDateTime::currentDateTime()) / (float)SECS_PER_HOUR); - wasMalformed = true; - } - - if (wasMalformed) { - // write the new settings to file - persistToFile(); - } -} - - void DomainServerSettingsManager::initializeGroupPermissions(NodePermissionsMap& permissionsRows, QString groupName, NodePermissionsPointer perms) { // this is called when someone has used the domain-settings webpage to add a group. They type the group's name diff --git a/domain-server/src/DomainServerSettingsManager.h b/domain-server/src/DomainServerSettingsManager.h index 2b5f9518a0..d56a786d4b 100644 --- a/domain-server/src/DomainServerSettingsManager.h +++ b/domain-server/src/DomainServerSettingsManager.h @@ -138,8 +138,6 @@ private: friend class DomainServer; - void validateDescriptorsMap(); - // these cause calls to metaverse's group api void apiGetGroupID(const QString& groupName); void apiGetGroupRanks(const QUuid& groupID); diff --git a/interface/resources/controllers/standard.json b/interface/resources/controllers/standard.json index c9e91c8666..04a3f560b6 100644 --- a/interface/resources/controllers/standard.json +++ b/interface/resources/controllers/standard.json @@ -22,7 +22,7 @@ "to": "Actions.Up", "filters": [ - { "type": "deadZone", "min": 0.95 }, + { "type": "deadZone", "min": 0.6 }, "invert" ] }, diff --git a/interface/resources/qml/hifi/dialogs/AvatarPreferencesDialog.qml b/interface/resources/qml/hifi/dialogs/AvatarPreferencesDialog.qml index 45414cfaf8..86f195612c 100644 --- a/interface/resources/qml/hifi/dialogs/AvatarPreferencesDialog.qml +++ b/interface/resources/qml/hifi/dialogs/AvatarPreferencesDialog.qml @@ -7,7 +7,7 @@ PreferencesDialog { id: root objectName: "AvatarPreferencesDialog" title: "Avatar Settings" - showCategories: [ "Avatar Basics", "Snapshots", "Avatar Tuning", "Avatar Camera" ] + showCategories: [ "Avatar Basics", "Avatar Tuning", "Avatar Camera" ] property var settings: Settings { category: root.objectName property alias x: root.x diff --git a/interface/resources/qml/hifi/tablet/TabletMouseHandler.qml b/interface/resources/qml/hifi/tablet/TabletMouseHandler.qml index 32e34e279b..17a00eccde 100644 --- a/interface/resources/qml/hifi/tablet/TabletMouseHandler.qml +++ b/interface/resources/qml/hifi/tablet/TabletMouseHandler.qml @@ -87,7 +87,11 @@ Item { if (topMenu.objectName === "") { breadcrumbText.text = "Menu"; } else { - breadcrumbText.text = topMenu.objectName; + if (menuStack.length === 1) { + breadcrumbText.text = "Menu"; + } else { + breadcrumbText.text = topMenu.objectName; + } } } else { breadcrumbText.text = "Menu"; diff --git a/interface/src/main.cpp b/interface/src/main.cpp index d33dba535e..39b37e3d19 100644 --- a/interface/src/main.cpp +++ b/interface/src/main.cpp @@ -74,6 +74,11 @@ int main(int argc, const char* argv[]) { instanceMightBeRunning = !sharedMemory.create(1, QSharedMemory::ReadOnly); #endif + // allow multiple interfaces to run if this environment variable is set. + if (QProcessEnvironment::systemEnvironment().contains("HIFI_ALLOW_MULTIPLE_INSTANCES")) { + instanceMightBeRunning = false; + } + if (instanceMightBeRunning) { // Try to connect and send message to existing interface instance QLocalSocket socket; diff --git a/interface/src/ui/PreferencesDialog.cpp b/interface/src/ui/PreferencesDialog.cpp index d68d9b4531..6377cda281 100644 --- a/interface/src/ui/PreferencesDialog.cpp +++ b/interface/src/ui/PreferencesDialog.cpp @@ -104,7 +104,7 @@ void setupPreferences() { { auto getter = []()->bool { return SnapshotAnimated::alsoTakeAnimatedSnapshot.get(); }; auto setter = [](bool value) { SnapshotAnimated::alsoTakeAnimatedSnapshot.set(value); }; - preferences->addPreference(new CheckPreference(SNAPSHOTS, "Take Animated GIF Snapshot with HUD Button", getter, setter)); + preferences->addPreference(new CheckPreference(SNAPSHOTS, "Take Animated GIF Snapshot", getter, setter)); } { auto getter = []()->float { return SnapshotAnimated::snapshotAnimatedDuration.get(); }; diff --git a/libraries/audio/src/AudioHRTF.h b/libraries/audio/src/AudioHRTF.h index 6a17a2d3cc..a197264994 100644 --- a/libraries/audio/src/AudioHRTF.h +++ b/libraries/audio/src/AudioHRTF.h @@ -48,6 +48,7 @@ public: // HRTF local gain adjustment in amplitude (1.0 == unity) // void setGainAdjustment(float gain) { _gainAdjust = HRTF_GAIN * gain; }; + float getGainAdjustment() { return _gainAdjust; } private: AudioHRTF(const AudioHRTF&) = delete; diff --git a/libraries/audio/src/InboundAudioStream.cpp b/libraries/audio/src/InboundAudioStream.cpp index 36a6079546..57c344adaf 100644 --- a/libraries/audio/src/InboundAudioStream.cpp +++ b/libraries/audio/src/InboundAudioStream.cpp @@ -131,12 +131,16 @@ int InboundAudioStream::parseData(ReceivedMessage& message) { // handle this packet based on its arrival status. switch (arrivalInfo._status) { + case SequenceNumberStats::Unreasonable: { + lostAudioData(1); + break; + } case SequenceNumberStats::Early: { // Packet is early; write droppable silent samples for each of the skipped packets. // NOTE: we assume that each dropped packet contains the same number of samples // as the packet we just received. int packetsDropped = arrivalInfo._seqDiffFromExpected; - writeFramesForDroppedPackets(packetsDropped * networkFrames); + lostAudioData(packetsDropped); // fall through to OnTime case } @@ -208,6 +212,21 @@ int InboundAudioStream::parseStreamProperties(PacketType type, const QByteArray& } } +int InboundAudioStream::lostAudioData(int numPackets) { + QByteArray decodedBuffer; + + while (numPackets--) { + if (_decoder) { + _decoder->lostFrame(decodedBuffer); + } else { + decodedBuffer.resize(AudioConstants::NETWORK_FRAME_BYTES_STEREO); + memset(decodedBuffer.data(), 0, decodedBuffer.size()); + } + _ringBuffer.writeData(decodedBuffer.data(), decodedBuffer.size()); + } + return 0; +} + int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties) { QByteArray decodedBuffer; if (_decoder) { @@ -220,9 +239,6 @@ int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packet } int InboundAudioStream::writeDroppableSilentFrames(int silentFrames) { - if (_decoder) { - _decoder->trackLostFrames(silentFrames); - } // calculate how many silent frames we should drop. int silentSamples = silentFrames * _numChannels; @@ -416,29 +432,6 @@ void InboundAudioStream::packetReceivedUpdateTimingStats() { _lastPacketReceivedTime = now; } -int InboundAudioStream::writeFramesForDroppedPackets(int networkFrames) { - return writeLastFrameRepeatedWithFade(networkFrames); -} - -int InboundAudioStream::writeLastFrameRepeatedWithFade(int frames) { - AudioRingBuffer::ConstIterator frameToRepeat = _ringBuffer.lastFrameWritten(); - int frameSize = _ringBuffer.getNumFrameSamples(); - int samplesToWrite = frames * _numChannels; - int indexOfRepeat = 0; - do { - int samplesToWriteThisIteration = std::min(samplesToWrite, frameSize); - float fade = calculateRepeatedFrameFadeFactor(indexOfRepeat); - if (fade == 1.0f) { - samplesToWrite -= _ringBuffer.writeSamples(frameToRepeat, samplesToWriteThisIteration); - } else { - samplesToWrite -= _ringBuffer.writeSamplesWithFade(frameToRepeat, samplesToWriteThisIteration, fade); - } - indexOfRepeat++; - } while (samplesToWrite > 0); - - return frames; -} - AudioStreamStats InboundAudioStream::getAudioStreamStats() const { AudioStreamStats streamStats; diff --git a/libraries/audio/src/InboundAudioStream.h b/libraries/audio/src/InboundAudioStream.h index f7b79ab136..9494b2f204 100644 --- a/libraries/audio/src/InboundAudioStream.h +++ b/libraries/audio/src/InboundAudioStream.h @@ -115,8 +115,6 @@ public slots: private: void packetReceivedUpdateTimingStats(); - int writeFramesForDroppedPackets(int networkFrames); - void popSamplesNoCheck(int samples); void framesAvailableChanged(); @@ -134,12 +132,11 @@ protected: /// default implementation assumes packet contains raw audio samples after stream properties virtual int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties); + /// produces audio data for lost network packets. + virtual int lostAudioData(int numPackets); + /// writes silent frames to the buffer that may be dropped to reduce latency caused by the buffer virtual int writeDroppableSilentFrames(int silentFrames); - - /// writes the last written frame repeatedly, gradually fading to silence. - /// used for writing samples for dropped packets. - virtual int writeLastFrameRepeatedWithFade(int frames); protected: diff --git a/libraries/audio/src/MixedProcessedAudioStream.cpp b/libraries/audio/src/MixedProcessedAudioStream.cpp index 671d3a9d60..082977246b 100644 --- a/libraries/audio/src/MixedProcessedAudioStream.cpp +++ b/libraries/audio/src/MixedProcessedAudioStream.cpp @@ -31,11 +31,26 @@ int MixedProcessedAudioStream::writeDroppableSilentFrames(int silentFrames) { return deviceSilentFramesWritten; } -int MixedProcessedAudioStream::writeLastFrameRepeatedWithFade(int frames) { - int deviceFrames = networkToDeviceFrames(frames); - int deviceFramesWritten = InboundAudioStream::writeLastFrameRepeatedWithFade(deviceFrames); - emit addedLastFrameRepeatedWithFade(deviceToNetworkFrames(deviceFramesWritten)); - return deviceFramesWritten; +int MixedProcessedAudioStream::lostAudioData(int numPackets) { + QByteArray decodedBuffer; + QByteArray outputBuffer; + + while (numPackets--) { + if (_decoder) { + _decoder->lostFrame(decodedBuffer); + } else { + decodedBuffer.resize(AudioConstants::NETWORK_FRAME_BYTES_STEREO); + memset(decodedBuffer.data(), 0, decodedBuffer.size()); + } + + emit addedStereoSamples(decodedBuffer); + + emit processSamples(decodedBuffer, outputBuffer); + + _ringBuffer.writeData(outputBuffer.data(), outputBuffer.size()); + qCDebug(audiostream, "Wrote %d samples to buffer (%d available)", outputBuffer.size() / (int)sizeof(int16_t), getSamplesAvailable()); + } + return 0; } int MixedProcessedAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties) { diff --git a/libraries/audio/src/MixedProcessedAudioStream.h b/libraries/audio/src/MixedProcessedAudioStream.h index d536163d2d..14da1d45af 100644 --- a/libraries/audio/src/MixedProcessedAudioStream.h +++ b/libraries/audio/src/MixedProcessedAudioStream.h @@ -34,8 +34,8 @@ public: protected: int writeDroppableSilentFrames(int silentFrames) override; - int writeLastFrameRepeatedWithFade(int frames) override; int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties) override; + int lostAudioData(int numPackets) override; private: int networkToDeviceFrames(int networkFrames); diff --git a/libraries/entities/src/EntityEditPacketSender.cpp b/libraries/entities/src/EntityEditPacketSender.cpp index e05db07d0d..c8a14c40be 100644 --- a/libraries/entities/src/EntityEditPacketSender.cpp +++ b/libraries/entities/src/EntityEditPacketSender.cpp @@ -29,7 +29,7 @@ void EntityEditPacketSender::processEntityEditNackPacket(QSharedPointer(); + const QUuid myNodeID = nodeList->getSessionUUID(); + propertiesCopy.setParentID(myNodeID); + success = EntityItemProperties::encodeEntityEditPacket(type, entityItemID, propertiesCopy, bufferOut); + } else { + success = EntityItemProperties::encodeEntityEditPacket(type, entityItemID, properties, bufferOut); + } + + if (success) { #ifdef WANT_DEBUG qCDebug(entities) << "calling queueOctreeEditMessage()..."; qCDebug(entities) << " id:" << entityItemID; diff --git a/libraries/entities/src/EntityItem.cpp b/libraries/entities/src/EntityItem.cpp index 233ce7d88e..6543af5355 100644 --- a/libraries/entities/src/EntityItem.cpp +++ b/libraries/entities/src/EntityItem.cpp @@ -828,7 +828,7 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef { // parentID and parentJointIndex are also protected by simulation ownership bool oldOverwrite = overwriteLocalData; overwriteLocalData = overwriteLocalData && !weOwnSimulation; - READ_ENTITY_PROPERTY(PROP_PARENT_ID, QUuid, setParentID); + READ_ENTITY_PROPERTY(PROP_PARENT_ID, QUuid, updateParentID); READ_ENTITY_PROPERTY(PROP_PARENT_JOINT_INDEX, quint16, setParentJointIndex); overwriteLocalData = oldOverwrite; } @@ -1823,28 +1823,6 @@ void EntityItem::computeCollisionGroupAndFinalMask(int16_t& group, int16_t& mask } uint8_t userMask = getCollisionMask(); - if (userMask & USER_COLLISION_GROUP_MY_AVATAR) { - // if this entity is a descendant of MyAvatar, don't collide with MyAvatar. This avoids the - // "bootstrapping" problem where you can shoot yourself across the room by grabbing something - // and holding it against your own avatar. - QUuid ancestorID = findAncestorOfType(NestableType::Avatar); - if (!ancestorID.isNull() && ancestorID == Physics::getSessionUUID()) { - userMask &= ~USER_COLLISION_GROUP_MY_AVATAR; - } - } - if (userMask & USER_COLLISION_GROUP_MY_AVATAR) { - // also, don't bootstrap our own avatar with a hold action - QList holdActions = getActionsOfType(ACTION_TYPE_HOLD); - QList::const_iterator i = holdActions.begin(); - while (i != holdActions.end()) { - EntityActionPointer action = *i; - if (action->isMine()) { - userMask &= ~USER_COLLISION_GROUP_MY_AVATAR; - break; - } - i++; - } - } if ((bool)(userMask & USER_COLLISION_GROUP_MY_AVATAR) != (bool)(userMask & USER_COLLISION_GROUP_OTHER_AVATAR)) { @@ -1854,6 +1832,33 @@ void EntityItem::computeCollisionGroupAndFinalMask(int16_t& group, int16_t& mask userMask ^= USER_COLLISION_MASK_AVATARS | ~userMask; } } + + if (userMask & USER_COLLISION_GROUP_MY_AVATAR) { + bool iAmHoldingThis = false; + // if this entity is a descendant of MyAvatar, don't collide with MyAvatar. This avoids the + // "bootstrapping" problem where you can shoot yourself across the room by grabbing something + // and holding it against your own avatar. + QUuid ancestorID = findAncestorOfType(NestableType::Avatar); + if (!ancestorID.isNull() && + (ancestorID == Physics::getSessionUUID() || ancestorID == AVATAR_SELF_ID)) { + iAmHoldingThis = true; + } + // also, don't bootstrap our own avatar with a hold action + QList holdActions = getActionsOfType(ACTION_TYPE_HOLD); + QList::const_iterator i = holdActions.begin(); + while (i != holdActions.end()) { + EntityActionPointer action = *i; + if (action->isMine()) { + iAmHoldingThis = true; + break; + } + i++; + } + + if (iAmHoldingThis) { + userMask &= ~USER_COLLISION_GROUP_MY_AVATAR; + } + } mask = Physics::getDefaultCollisionMask(group) & (int16_t)(userMask); } } diff --git a/libraries/entities/src/EntityTree.cpp b/libraries/entities/src/EntityTree.cpp index b7a8841772..4e92b2a572 100644 --- a/libraries/entities/src/EntityTree.cpp +++ b/libraries/entities/src/EntityTree.cpp @@ -104,6 +104,7 @@ bool EntityTree::handlesEditPacketType(PacketType packetType) const { case PacketType::EntityAdd: case PacketType::EntityEdit: case PacketType::EntityErase: + case PacketType::EntityPhysics: return true; default: return false; @@ -931,10 +932,15 @@ void EntityTree::initEntityEditFilterEngine(QScriptEngine* engine, std::function qCDebug(entities) << "Filter function specified but not found. Will reject all edits."; _entityEditFilterEngine = nullptr; // So that we don't try to call it. See filterProperties. } + auto entitiesObject = _entityEditFilterEngine->newObject(); + entitiesObject.setProperty("ADD_FILTER_TYPE", FilterType::Add); + entitiesObject.setProperty("EDIT_FILTER_TYPE", FilterType::Edit); + entitiesObject.setProperty("PHYSICS_FILTER_TYPE", FilterType::Physics); + global.setProperty("Entities", entitiesObject); _hasEntityEditFilter = true; } -bool EntityTree::filterProperties(EntityItemProperties& propertiesIn, EntityItemProperties& propertiesOut, bool& wasChanged, bool isAdd) { +bool EntityTree::filterProperties(EntityItemProperties& propertiesIn, EntityItemProperties& propertiesOut, bool& wasChanged, FilterType filterType) { if (!_entityEditFilterEngine) { propertiesOut = propertiesIn; wasChanged = false; // not changed @@ -953,7 +959,7 @@ bool EntityTree::filterProperties(EntityItemProperties& propertiesIn, EntityItem auto in = QJsonValue::fromVariant(inputValues.toVariant()); // grab json copy now, because the inputValues might be side effected by the filter. QScriptValueList args; args << inputValues; - args << isAdd; + args << filterType; QScriptValue result = _entityEditFilterFunction.call(_nullObjectForFilter, args); if (_entityEditFilterHadUncaughtExceptions()) { @@ -1001,6 +1007,7 @@ int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned c case PacketType::EntityAdd: isAdd = true; // fall through to next case + case PacketType::EntityPhysics: case PacketType::EntityEdit: { quint64 startDecode = 0, endDecode = 0; quint64 startLookup = 0, endLookup = 0; @@ -1010,6 +1017,7 @@ int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned c quint64 startLogging = 0, endLogging = 0; bool suppressDisallowedScript = false; + bool isPhysics = message.getType() == PacketType::EntityPhysics; _totalEditMessages++; @@ -1021,6 +1029,7 @@ int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned c entityItemID, properties); endDecode = usecTimestampNow(); + if (validEditPacket && !_entityScriptSourceWhitelist.isEmpty() && !properties.getScript().isEmpty()) { bool passedWhiteList = false; @@ -1053,8 +1062,7 @@ int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned c } } - if ((isAdd || - (message.getType() == PacketType::EntityEdit && properties.lifetimeChanged())) && + if ((isAdd || properties.lifetimeChanged()) && !senderNode->getCanRez() && senderNode->getCanRezTmp()) { // this node is only allowed to rez temporary entities. if need be, cap the lifetime. if (properties.getLifetime() == ENTITY_ITEM_IMMORTAL_LIFETIME || @@ -1070,8 +1078,9 @@ int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned c startFilter = usecTimestampNow(); bool wasChanged = false; - // Having (un)lock rights bypasses the filter. - bool allowed = senderNode->isAllowedEditor() || filterProperties(properties, properties, wasChanged, isAdd); + // Having (un)lock rights bypasses the filter, unless it's a physics result. + FilterType filterType = isPhysics ? FilterType::Physics : (isAdd ? FilterType::Add : FilterType::Edit); + bool allowed = (!isPhysics && senderNode->isAllowedEditor()) || filterProperties(properties, properties, wasChanged, filterType); if (!allowed) { auto timestamp = properties.getLastEdited(); properties = EntityItemProperties(); @@ -1088,7 +1097,7 @@ int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned c startLookup = usecTimestampNow(); EntityItemPointer existingEntity = findEntityByEntityItemID(entityItemID); endLookup = usecTimestampNow(); - if (existingEntity && message.getType() == PacketType::EntityEdit) { + if (existingEntity && !isAdd) { if (suppressDisallowedScript) { bumpTimestamp(properties); diff --git a/libraries/entities/src/EntityTree.h b/libraries/entities/src/EntityTree.h index 9b30096be5..5dad282d3b 100644 --- a/libraries/entities/src/EntityTree.h +++ b/libraries/entities/src/EntityTree.h @@ -60,6 +60,11 @@ public: class EntityTree : public Octree, public SpatialParentTree { Q_OBJECT public: + enum FilterType { + Add, + Edit, + Physics + }; EntityTree(bool shouldReaverage = false); virtual ~EntityTree(); @@ -357,7 +362,7 @@ protected: float _maxTmpEntityLifetime { DEFAULT_MAX_TMP_ENTITY_LIFETIME }; - bool filterProperties(EntityItemProperties& propertiesIn, EntityItemProperties& propertiesOut, bool& wasChanged, bool isAdd); + bool filterProperties(EntityItemProperties& propertiesIn, EntityItemProperties& propertiesOut, bool& wasChanged, FilterType filterType); bool _hasEntityEditFilter{ false }; QScriptEngine* _entityEditFilterEngine{}; QScriptValue _entityEditFilterFunction{}; diff --git a/libraries/fbx/src/FBXReader.cpp b/libraries/fbx/src/FBXReader.cpp index 71a8419091..fcaef90527 100644 --- a/libraries/fbx/src/FBXReader.cpp +++ b/libraries/fbx/src/FBXReader.cpp @@ -1467,6 +1467,34 @@ FBXGeometry* FBXReader::extractFBXGeometry(const QVariantHash& mapping, const QS // Create the Material Library consolidateFBXMaterials(mapping); + + // HACK: until we get proper LOD management we're going to cap model textures + // according to how many unique textures the model uses: + // 1 - 8 textures --> 2048 + // 8 - 32 textures --> 1024 + // 33 - 128 textures --> 512 + // etc... + QSet uniqueTextures; + for (auto& material : _fbxMaterials) { + material.getTextureNames(uniqueTextures); + } + int numTextures = uniqueTextures.size(); + const int MAX_NUM_TEXTURES_AT_MAX_RESOLUTION = 8; + int maxWidth = sqrt(MAX_NUM_PIXELS_FOR_FBX_TEXTURE); + if (numTextures > MAX_NUM_TEXTURES_AT_MAX_RESOLUTION) { + int numTextureThreshold = MAX_NUM_TEXTURES_AT_MAX_RESOLUTION; + const int MIN_MIP_TEXTURE_WIDTH = 64; + do { + maxWidth /= 2; + numTextureThreshold *= 4; + } while (numTextureThreshold < numTextures && maxWidth > MIN_MIP_TEXTURE_WIDTH); + + qCDebug(modelformat) << "Capped square texture width =" << maxWidth << "for model" << url << "with" << numTextures << "textures"; + for (auto& material : _fbxMaterials) { + material.setMaxNumPixelsPerTexture(maxWidth * maxWidth); + } + } + geometry.materials = _fbxMaterials; // see if any materials have texture children diff --git a/libraries/fbx/src/FBXReader.h b/libraries/fbx/src/FBXReader.h index e2e6a8c004..5910b8d312 100644 --- a/libraries/fbx/src/FBXReader.h +++ b/libraries/fbx/src/FBXReader.h @@ -13,6 +13,7 @@ #define hifi_FBXReader_h #include +#include #include #include #include @@ -100,22 +101,25 @@ public: /// A single binding to a joint in an FBX document. class FBXCluster { public: - + int jointIndex; glm::mat4 inverseBindMatrix; }; +const int MAX_NUM_PIXELS_FOR_FBX_TEXTURE = 2048 * 2048; + /// A texture map in an FBX document. class FBXTexture { public: QString name; QByteArray filename; QByteArray content; - + Transform transform; + int maxNumPixels { MAX_NUM_PIXELS_FOR_FBX_TEXTURE }; int texcoordSet; QString texcoordSetName; - + bool isBumpmap{ false }; bool isNull() const { return name.isEmpty() && filename.isEmpty() && content.isEmpty(); } @@ -143,6 +147,9 @@ public: shininess(shininess), opacity(opacity) {} + void getTextureNames(QSet& textureList) const; + void setMaxNumPixelsPerTexture(int maxNumPixels); + glm::vec3 diffuseColor{ 1.0f }; float diffuseFactor{ 1.0f }; glm::vec3 specularColor{ 0.02f }; diff --git a/libraries/fbx/src/FBXReader_Material.cpp b/libraries/fbx/src/FBXReader_Material.cpp index 8c0f4b34ac..ca2ec557b4 100644 --- a/libraries/fbx/src/FBXReader_Material.cpp +++ b/libraries/fbx/src/FBXReader_Material.cpp @@ -27,6 +27,56 @@ #include "ModelFormatLogging.h" +void FBXMaterial::getTextureNames(QSet& textureList) const { + if (!normalTexture.isNull()) { + textureList.insert(normalTexture.name); + } + if (!albedoTexture.isNull()) { + textureList.insert(albedoTexture.name); + } + if (!opacityTexture.isNull()) { + textureList.insert(opacityTexture.name); + } + if (!glossTexture.isNull()) { + textureList.insert(glossTexture.name); + } + if (!roughnessTexture.isNull()) { + textureList.insert(roughnessTexture.name); + } + if (!specularTexture.isNull()) { + textureList.insert(specularTexture.name); + } + if (!metallicTexture.isNull()) { + textureList.insert(metallicTexture.name); + } + if (!emissiveTexture.isNull()) { + textureList.insert(emissiveTexture.name); + } + if (!occlusionTexture.isNull()) { + textureList.insert(occlusionTexture.name); + } + if (!scatteringTexture.isNull()) { + textureList.insert(scatteringTexture.name); + } + if (!lightmapTexture.isNull()) { + textureList.insert(lightmapTexture.name); + } +} + +void FBXMaterial::setMaxNumPixelsPerTexture(int maxNumPixels) { + normalTexture.maxNumPixels = maxNumPixels; + albedoTexture.maxNumPixels = maxNumPixels; + opacityTexture.maxNumPixels = maxNumPixels; + glossTexture.maxNumPixels = maxNumPixels; + roughnessTexture.maxNumPixels = maxNumPixels; + specularTexture.maxNumPixels = maxNumPixels; + metallicTexture.maxNumPixels = maxNumPixels; + emissiveTexture.maxNumPixels = maxNumPixels; + occlusionTexture.maxNumPixels = maxNumPixels; + scatteringTexture.maxNumPixels = maxNumPixels; + lightmapTexture.maxNumPixels = maxNumPixels; +} + bool FBXMaterial::needTangentSpace() const { return !normalTexture.isNull(); } @@ -47,10 +97,10 @@ FBXTexture FBXReader::getTexture(const QString& textureID) { texture.texcoordSet = 0; if (_textureParams.contains(textureID)) { auto p = _textureParams.value(textureID); - + texture.transform.setTranslation(p.translation); texture.transform.setRotation(glm::quat(glm::radians(p.rotation))); - + auto scaling = p.scaling; // Protect from bad scaling which should never happen if (scaling.x == 0.0f) { @@ -63,7 +113,7 @@ FBXTexture FBXReader::getTexture(const QString& textureID) { scaling.z = 1.0f; } texture.transform.setScale(scaling); - + if ((p.UVSet != "map1") && (p.UVSet != "UVSet0")) { texture.texcoordSet = 1; } @@ -78,11 +128,10 @@ void FBXReader::consolidateFBXMaterials(const QVariantHash& mapping) { QJsonDocument materialMapDocument = QJsonDocument::fromJson(materialMapString.toUtf8()); QJsonObject materialMap = materialMapDocument.object(); - // foreach (const QString& materialID, materials) { for (QHash::iterator it = _fbxMaterials.begin(); it != _fbxMaterials.end(); it++) { FBXMaterial& material = (*it); - // Maya is the exporting the shading model and we aretrying to use it + // Maya is the exporting the shading model and we are trying to use it bool isMaterialLambert = (material.shadingModel.toLower() == "lambert"); // the pure material associated with this part @@ -127,21 +176,19 @@ void FBXReader::consolidateFBXMaterials(const QVariantHash& mapping) { detectDifferentUVs |= (transparentTexture.texcoordSet != 0) || (!transparentTexture.transform.isIdentity()); } - - FBXTexture normalTexture; QString bumpTextureID = bumpTextures.value(material.materialID); QString normalTextureID = normalTextures.value(material.materialID); if (!normalTextureID.isNull()) { normalTexture = getTexture(normalTextureID); normalTexture.isBumpmap = false; - + material.normalTexture = normalTexture; detectDifferentUVs |= (normalTexture.texcoordSet != 0) || (!normalTexture.transform.isIdentity()); } else if (!bumpTextureID.isNull()) { normalTexture = getTexture(bumpTextureID); normalTexture.isBumpmap = true; - + material.normalTexture = normalTexture; detectDifferentUVs |= (normalTexture.texcoordSet != 0) || (!normalTexture.transform.isIdentity()); } @@ -151,7 +198,7 @@ void FBXReader::consolidateFBXMaterials(const QVariantHash& mapping) { if (!specularTextureID.isNull()) { specularTexture = getTexture(specularTextureID); detectDifferentUVs |= (specularTexture.texcoordSet != 0) || (!specularTexture.transform.isIdentity()); - material.specularTexture = specularTexture; + material.specularTexture = specularTexture; } FBXTexture metallicTexture; @@ -222,7 +269,7 @@ void FBXReader::consolidateFBXMaterials(const QVariantHash& mapping) { ambientTextureID = ambientFactorTextures.value(material.materialID); } } - + if (_loadLightmaps && !ambientTextureID.isNull()) { ambientTexture = getTexture(ambientTextureID); detectDifferentUVs |= (ambientTexture.texcoordSet != 0) || (!ambientTexture.transform.isIdentity()); diff --git a/libraries/input-plugins/src/input-plugins/KeyboardMouseDevice.cpp b/libraries/input-plugins/src/input-plugins/KeyboardMouseDevice.cpp index 3308ba36ab..ddb2f482a1 100755 --- a/libraries/input-plugins/src/input-plugins/KeyboardMouseDevice.cpp +++ b/libraries/input-plugins/src/input-plugins/KeyboardMouseDevice.cpp @@ -25,6 +25,9 @@ void KeyboardMouseDevice::pluginUpdate(float deltaTime, const controller::InputC auto userInputMapper = DependencyManager::get(); userInputMapper->withLock([&, this]() { _inputDevice->update(deltaTime, inputCalibrationData); + + _inputDevice->_axisStateMap[MOUSE_AXIS_X] = _lastCursor.x(); + _inputDevice->_axisStateMap[MOUSE_AXIS_Y] = _lastCursor.y(); }); // For touch event, we need to check that the last event is not too long ago @@ -249,6 +252,9 @@ controller::Input::NamedVector KeyboardMouseDevice::InputDevice::getAvailableInp availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_Y_POS), "MouseMoveUp")); availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_Y_NEG), "MouseMoveDown")); + availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_X), "MouseX")); + availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_Y), "MouseY")); + availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_WHEEL_Y_POS), "MouseWheelRight")); availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_WHEEL_Y_NEG), "MouseWheelLeft")); availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_WHEEL_X_POS), "MouseWheelUp")); diff --git a/libraries/input-plugins/src/input-plugins/KeyboardMouseDevice.h b/libraries/input-plugins/src/input-plugins/KeyboardMouseDevice.h index f38b43c107..3570ec7193 100644 --- a/libraries/input-plugins/src/input-plugins/KeyboardMouseDevice.h +++ b/libraries/input-plugins/src/input-plugins/KeyboardMouseDevice.h @@ -47,6 +47,8 @@ public: MOUSE_AXIS_X_NEG, MOUSE_AXIS_Y_POS, MOUSE_AXIS_Y_NEG, + MOUSE_AXIS_X, + MOUSE_AXIS_Y, MOUSE_AXIS_WHEEL_Y_POS, MOUSE_AXIS_WHEEL_Y_NEG, MOUSE_AXIS_WHEEL_X_POS, diff --git a/libraries/model-networking/src/model-networking/ModelCache.cpp b/libraries/model-networking/src/model-networking/ModelCache.cpp index 2a02540821..dd3193073d 100644 --- a/libraries/model-networking/src/model-networking/ModelCache.cpp +++ b/libraries/model-networking/src/model-networking/ModelCache.cpp @@ -472,7 +472,7 @@ QUrl NetworkMaterial::getTextureUrl(const QUrl& baseUrl, const FBXTexture& textu model::TextureMapPointer NetworkMaterial::fetchTextureMap(const QUrl& baseUrl, const FBXTexture& fbxTexture, TextureType type, MapChannel channel) { const auto url = getTextureUrl(baseUrl, fbxTexture); - const auto texture = DependencyManager::get()->getTexture(url, type, fbxTexture.content); + const auto texture = DependencyManager::get()->getTexture(url, type, fbxTexture.content, fbxTexture.maxNumPixels); _textures[channel] = Texture { fbxTexture.name, texture }; auto map = std::make_shared(); diff --git a/libraries/model-networking/src/model-networking/ModelCache.h b/libraries/model-networking/src/model-networking/ModelCache.h index bcca846625..adef3ce2b5 100644 --- a/libraries/model-networking/src/model-networking/ModelCache.h +++ b/libraries/model-networking/src/model-networking/ModelCache.h @@ -159,7 +159,7 @@ protected: class Texture { public: QString name; - QSharedPointer texture; + NetworkTexturePointer texture; }; using Textures = std::vector; diff --git a/libraries/model-networking/src/model-networking/TextureCache.cpp b/libraries/model-networking/src/model-networking/TextureCache.cpp index 6a4edd60ed..447a1b93c8 100644 --- a/libraries/model-networking/src/model-networking/TextureCache.cpp +++ b/libraries/model-networking/src/model-networking/TextureCache.cpp @@ -167,16 +167,17 @@ class TextureExtra { public: NetworkTexture::Type type; const QByteArray& content; + int maxNumPixels; }; -ScriptableResource* TextureCache::prefetch(const QUrl& url, int type) { +ScriptableResource* TextureCache::prefetch(const QUrl& url, int type, int maxNumPixels) { auto byteArray = QByteArray(); - TextureExtra extra = { (Type)type, byteArray }; + TextureExtra extra = { (Type)type, byteArray, maxNumPixels }; return ResourceCache::prefetch(url, &extra); } -NetworkTexturePointer TextureCache::getTexture(const QUrl& url, Type type, const QByteArray& content) { - TextureExtra extra = { type, content }; +NetworkTexturePointer TextureCache::getTexture(const QUrl& url, Type type, const QByteArray& content, int maxNumPixels) { + TextureExtra extra = { type, content, maxNumPixels }; return ResourceCache::getResource(url, QUrl(), &extra).staticCast(); } @@ -251,13 +252,15 @@ QSharedPointer TextureCache::createResource(const QUrl& url, const QSh const TextureExtra* textureExtra = static_cast(extra); auto type = textureExtra ? textureExtra->type : Type::DEFAULT_TEXTURE; auto content = textureExtra ? textureExtra->content : QByteArray(); - return QSharedPointer(new NetworkTexture(url, type, content), + auto maxNumPixels = textureExtra ? textureExtra->maxNumPixels : ABSOLUTE_MAX_TEXTURE_NUM_PIXELS; + return QSharedPointer(new NetworkTexture(url, type, content, maxNumPixels), &Resource::deleter); } -NetworkTexture::NetworkTexture(const QUrl& url, Type type, const QByteArray& content) : +NetworkTexture::NetworkTexture(const QUrl& url, Type type, const QByteArray& content, int maxNumPixels) : Resource(url), - _type(type) + _type(type), + _maxNumPixels(maxNumPixels) { _textureSource = std::make_shared(); @@ -274,7 +277,7 @@ NetworkTexture::NetworkTexture(const QUrl& url, Type type, const QByteArray& con } NetworkTexture::NetworkTexture(const QUrl& url, const TextureLoaderFunc& textureLoader, const QByteArray& content) : - NetworkTexture(url, CUSTOM_TEXTURE, content) + NetworkTexture(url, CUSTOM_TEXTURE, content, ABSOLUTE_MAX_TEXTURE_NUM_PIXELS) { _textureLoader = textureLoader; } @@ -290,7 +293,8 @@ NetworkTexture::TextureLoaderFunc NetworkTexture::getTextureLoader() const { class ImageReader : public QRunnable { public: - ImageReader(const QWeakPointer& resource, const QByteArray& data, const QUrl& url = QUrl()); + ImageReader(const QWeakPointer& resource, const QByteArray& data, + const QUrl& url = QUrl(), int maxNumPixels = ABSOLUTE_MAX_TEXTURE_NUM_PIXELS); virtual void run() override; @@ -300,6 +304,7 @@ private: QWeakPointer _resource; QUrl _url; QByteArray _content; + int _maxNumPixels; }; void NetworkTexture::downloadFinished(const QByteArray& data) { @@ -308,14 +313,15 @@ void NetworkTexture::downloadFinished(const QByteArray& data) { } void NetworkTexture::loadContent(const QByteArray& content) { - QThreadPool::globalInstance()->start(new ImageReader(_self, content, _url)); + QThreadPool::globalInstance()->start(new ImageReader(_self, content, _url, _maxNumPixels)); } ImageReader::ImageReader(const QWeakPointer& resource, const QByteArray& data, - const QUrl& url) : + const QUrl& url, int maxNumPixels) : _resource(resource), _url(url), - _content(data) + _content(data), + _maxNumPixels(maxNumPixels) { #if DEBUG_DUMP_TEXTURE_LOADS static auto start = usecTimestampNow() / USECS_PER_MSEC; @@ -375,10 +381,10 @@ void ImageReader::run() { // Note that QImage.format is the pixel format which is different from the "format" of the image file... auto imageFormat = image.format(); - int originalWidth = image.width(); - int originalHeight = image.height(); + int imageWidth = image.width(); + int imageHeight = image.height(); - if (originalWidth == 0 || originalHeight == 0 || imageFormat == QImage::Format_Invalid) { + if (imageWidth == 0 || imageHeight == 0 || imageFormat == QImage::Format_Invalid) { if (filenameExtension.empty()) { qCDebug(modelnetworking) << "QImage failed to create from content, no file extension:" << _url; } else { @@ -386,6 +392,20 @@ void ImageReader::run() { } return; } + + if (imageWidth * imageHeight > _maxNumPixels) { + float scaleFactor = sqrtf(_maxNumPixels / (float)(imageWidth * imageHeight)); + int originalWidth = imageWidth; + int originalHeight = imageHeight; + imageWidth = (int)(scaleFactor * (float)imageWidth + 0.5f); + imageHeight = (int)(scaleFactor * (float)imageHeight + 0.5f); + QImage newImage = image.scaled(QSize(imageWidth, imageHeight), Qt::IgnoreAspectRatio); + image.swap(newImage); + qCDebug(modelnetworking) << "Downscale image" << _url + << "from" << originalWidth << "x" << originalHeight + << "to" << imageWidth << "x" << imageHeight; + } + gpu::TexturePointer texture = nullptr; { // Double-check the resource still exists between long operations. @@ -408,7 +428,7 @@ void ImageReader::run() { } else { QMetaObject::invokeMethod(resource.data(), "setImage", Q_ARG(gpu::TexturePointer, texture), - Q_ARG(int, originalWidth), Q_ARG(int, originalHeight)); + Q_ARG(int, imageWidth), Q_ARG(int, imageHeight)); } } diff --git a/libraries/model-networking/src/model-networking/TextureCache.h b/libraries/model-networking/src/model-networking/TextureCache.h index 8ccfe5364c..cb509490c6 100644 --- a/libraries/model-networking/src/model-networking/TextureCache.h +++ b/libraries/model-networking/src/model-networking/TextureCache.h @@ -23,6 +23,8 @@ #include #include +const int ABSOLUTE_MAX_TEXTURE_NUM_PIXELS = 8192 * 8192; + namespace gpu { class Batch; } @@ -60,7 +62,7 @@ public: typedef gpu::Texture* TextureLoader(const QImage& image, const std::string& srcImageName); using TextureLoaderFunc = std::function; - NetworkTexture(const QUrl& url, Type type, const QByteArray& content); + NetworkTexture(const QUrl& url, Type type, const QByteArray& content, int maxNumPixels); NetworkTexture(const QUrl& url, const TextureLoaderFunc& textureLoader, const QByteArray& content); QString getType() const override { return "NetworkTexture"; } @@ -70,7 +72,7 @@ public: int getWidth() const { return _width; } int getHeight() const { return _height; } Type getTextureType() const { return _type; } - + TextureLoaderFunc getTextureLoader() const; signals: @@ -81,7 +83,7 @@ protected: virtual bool isCacheable() const override { return _loaded; } virtual void downloadFinished(const QByteArray& data) override; - + Q_INVOKABLE void loadContent(const QByteArray& content); Q_INVOKABLE void setImage(gpu::TexturePointer texture, int originalWidth, int originalHeight); @@ -92,6 +94,7 @@ private: int _originalHeight { 0 }; int _width { 0 }; int _height { 0 }; + int _maxNumPixels { ABSOLUTE_MAX_TEXTURE_NUM_PIXELS }; }; using NetworkTexturePointer = QSharedPointer; @@ -129,11 +132,11 @@ public: /// Loads a texture from the specified URL. NetworkTexturePointer getTexture(const QUrl& url, Type type = Type::DEFAULT_TEXTURE, - const QByteArray& content = QByteArray()); + const QByteArray& content = QByteArray(), int maxNumPixels = ABSOLUTE_MAX_TEXTURE_NUM_PIXELS); protected: // Overload ResourceCache::prefetch to allow specifying texture type for loads - Q_INVOKABLE ScriptableResource* prefetch(const QUrl& url, int type); + Q_INVOKABLE ScriptableResource* prefetch(const QUrl& url, int type, int maxNumPixels = ABSOLUTE_MAX_TEXTURE_NUM_PIXELS); virtual QSharedPointer createResource(const QUrl& url, const QSharedPointer& fallback, const void* extra) override; diff --git a/libraries/networking/src/NodeList.cpp b/libraries/networking/src/NodeList.cpp index 7bfe1d1845..f4a02ad805 100644 --- a/libraries/networking/src/NodeList.cpp +++ b/libraries/networking/src/NodeList.cpp @@ -827,18 +827,26 @@ void NodeList::ignoreNodeBySessionID(const QUuid& nodeID, bool ignoreEnabled) { }); if (ignoreEnabled) { - QReadLocker ignoredSetLocker{ &_ignoredSetLock }; // read lock for insert - QReadLocker personalMutedSetLocker{ &_personalMutedSetLock }; // read lock for insert - // add this nodeID to our set of ignored IDs - _ignoredNodeIDs.insert(nodeID); - // add this nodeID to our set of personal muted IDs - _personalMutedNodeIDs.insert(nodeID); + { + QReadLocker ignoredSetLocker{ &_ignoredSetLock }; // read lock for insert + // add this nodeID to our set of ignored IDs + _ignoredNodeIDs.insert(nodeID); + } + { + QReadLocker personalMutedSetLocker{ &_personalMutedSetLock }; // read lock for insert + // add this nodeID to our set of personal muted IDs + _personalMutedNodeIDs.insert(nodeID); + } emit ignoredNode(nodeID, true); } else { - QWriteLocker ignoredSetLocker{ &_ignoredSetLock }; // write lock for unsafe_erase - QWriteLocker personalMutedSetLocker{ &_personalMutedSetLock }; // write lock for unsafe_erase - _ignoredNodeIDs.unsafe_erase(nodeID); - _personalMutedNodeIDs.unsafe_erase(nodeID); + { + QWriteLocker ignoredSetLocker{ &_ignoredSetLock }; // write lock for unsafe_erase + _ignoredNodeIDs.unsafe_erase(nodeID); + } + { + QWriteLocker personalMutedSetLocker{ &_personalMutedSetLock }; // write lock for unsafe_erase + _personalMutedNodeIDs.unsafe_erase(nodeID); + } emit ignoredNode(nodeID, false); } @@ -850,10 +858,14 @@ void NodeList::ignoreNodeBySessionID(const QUuid& nodeID, bool ignoreEnabled) { void NodeList::removeFromIgnoreMuteSets(const QUuid& nodeID) { // don't remove yourself, or nobody if (!nodeID.isNull() && _sessionUUID != nodeID) { - QWriteLocker ignoredSetLocker{ &_ignoredSetLock }; - QWriteLocker personalMutedSetLocker{ &_personalMutedSetLock }; - _ignoredNodeIDs.unsafe_erase(nodeID); - _personalMutedNodeIDs.unsafe_erase(nodeID); + { + QWriteLocker ignoredSetLocker{ &_ignoredSetLock }; + _ignoredNodeIDs.unsafe_erase(nodeID); + } + { + QWriteLocker personalMutedSetLocker{ &_personalMutedSetLock }; + _personalMutedNodeIDs.unsafe_erase(nodeID); + } } } diff --git a/libraries/networking/src/ResourceCache.h b/libraries/networking/src/ResourceCache.h index 03d37bc9ac..a369416ebe 100644 --- a/libraries/networking/src/ResourceCache.h +++ b/libraries/networking/src/ResourceCache.h @@ -86,7 +86,7 @@ private: /// Wrapper to expose resources to JS/QML class ScriptableResource : public QObject { Q_OBJECT - Q_PROPERTY(QUrl url READ getUrl) + Q_PROPERTY(QUrl url READ getURL) Q_PROPERTY(int state READ getState NOTIFY stateChanged) /**jsdoc @@ -125,7 +125,7 @@ public: */ Q_INVOKABLE void release(); - const QUrl& getUrl() const { return _url; } + const QUrl& getURL() const { return _url; } int getState() const { return (int)_state; } const QSharedPointer& getResource() const { return _resource; } diff --git a/libraries/networking/src/UserActivityLoggerScriptingInterface.cpp b/libraries/networking/src/UserActivityLoggerScriptingInterface.cpp index 02d1711230..f38d24c31f 100644 --- a/libraries/networking/src/UserActivityLoggerScriptingInterface.cpp +++ b/libraries/networking/src/UserActivityLoggerScriptingInterface.cpp @@ -16,6 +16,14 @@ void UserActivityLoggerScriptingInterface::enabledEdit() { logAction("enabled_edit"); } +void UserActivityLoggerScriptingInterface::openedTablet() { + logAction("opened_tablet"); +} + +void UserActivityLoggerScriptingInterface::closedTablet() { + logAction("closed_tablet"); +} + void UserActivityLoggerScriptingInterface::openedMarketplace() { logAction("opened_marketplace"); } diff --git a/libraries/networking/src/UserActivityLoggerScriptingInterface.h b/libraries/networking/src/UserActivityLoggerScriptingInterface.h index a202858a1c..b827b2262a 100644 --- a/libraries/networking/src/UserActivityLoggerScriptingInterface.h +++ b/libraries/networking/src/UserActivityLoggerScriptingInterface.h @@ -21,6 +21,8 @@ class UserActivityLoggerScriptingInterface : public QObject, public Dependency { Q_OBJECT public: Q_INVOKABLE void enabledEdit(); + Q_INVOKABLE void openedTablet(); + Q_INVOKABLE void closedTablet(); Q_INVOKABLE void openedMarketplace(); Q_INVOKABLE void toggledAway(bool isAway); Q_INVOKABLE void tutorialProgress(QString stepName, int stepNumber, float secondsToComplete, diff --git a/libraries/networking/src/udt/PacketHeaders.cpp b/libraries/networking/src/udt/PacketHeaders.cpp index 7cb02010f8..e2dc8d73e6 100644 --- a/libraries/networking/src/udt/PacketHeaders.cpp +++ b/libraries/networking/src/udt/PacketHeaders.cpp @@ -48,7 +48,8 @@ PacketVersion versionForPacketType(PacketType packetType) { case PacketType::EntityAdd: case PacketType::EntityEdit: case PacketType::EntityData: - return VERSION_ENTITIES_SERVER_SCRIPTS; + case PacketType::EntityPhysics: + return VERSION_ENTITIES_PHYSICS_PACKET; case PacketType::EntityQuery: return static_cast(EntityQueryPacketVersion::JsonFilter); case PacketType::AvatarIdentity: diff --git a/libraries/networking/src/udt/PacketHeaders.h b/libraries/networking/src/udt/PacketHeaders.h index d695bde62a..88b5ec19ad 100644 --- a/libraries/networking/src/udt/PacketHeaders.h +++ b/libraries/networking/src/udt/PacketHeaders.h @@ -110,7 +110,8 @@ public: EntityScriptGetStatus, EntityScriptGetStatusReply, ReloadEntityServerScript, - LAST_PACKET_TYPE = ReloadEntityServerScript + EntityPhysics, + LAST_PACKET_TYPE = EntityPhysics }; }; @@ -201,6 +202,7 @@ const PacketVersion VERSION_WEB_ENTITIES_SUPPORT_DPI = 63; const PacketVersion VERSION_ENTITIES_ARROW_ACTION = 64; const PacketVersion VERSION_ENTITIES_LAST_EDITED_BY = 65; const PacketVersion VERSION_ENTITIES_SERVER_SCRIPTS = 66; +const PacketVersion VERSION_ENTITIES_PHYSICS_PACKET = 67; enum class EntityQueryPacketVersion: PacketVersion { JsonFilter = 18 diff --git a/libraries/physics/src/EntityMotionState.cpp b/libraries/physics/src/EntityMotionState.cpp index e9891020b3..c175a836cc 100644 --- a/libraries/physics/src/EntityMotionState.cpp +++ b/libraries/physics/src/EntityMotionState.cpp @@ -199,15 +199,12 @@ void EntityMotionState::getWorldTransform(btTransform& worldTrans) const { return; } assert(entityTreeIsLocked()); - if (_motionType == MOTION_TYPE_KINEMATIC) { + if (_motionType == MOTION_TYPE_KINEMATIC && !_entity->hasAncestorOfType(NestableType::Avatar)) { BT_PROFILE("kinematicIntegration"); // This is physical kinematic motion which steps strictly by the subframe count // of the physics simulation and uses full gravity for acceleration. - if (_entity->hasAncestorOfType(NestableType::Avatar)) { - _entity->setAcceleration(glm::vec3(0.0f)); - } else { - _entity->setAcceleration(_entity->getGravity()); - } + _entity->setAcceleration(_entity->getGravity()); + uint32_t thisStep = ObjectMotionState::getWorldSimulationStep(); float dt = (thisStep - _lastKinematicStep) * PHYSICS_ENGINE_FIXED_SUBSTEP; _entity->stepKinematicMotion(dt); @@ -614,7 +611,7 @@ void EntityMotionState::sendUpdate(OctreeEditPacketSender* packetSender, uint32_ properties.setClientOnly(_entity->getClientOnly()); properties.setOwningAvatarID(_entity->getOwningAvatarID()); - entityPacketSender->queueEditEntityMessage(PacketType::EntityEdit, tree, id, properties); + entityPacketSender->queueEditEntityMessage(PacketType::EntityPhysics, tree, id, properties); _entity->setLastBroadcast(now); // if we've moved an entity with children, check/update the queryAACube of all descendents and tell the server @@ -630,7 +627,7 @@ void EntityMotionState::sendUpdate(OctreeEditPacketSender* packetSender, uint32_ newQueryCubeProperties.setClientOnly(entityDescendant->getClientOnly()); newQueryCubeProperties.setOwningAvatarID(entityDescendant->getOwningAvatarID()); - entityPacketSender->queueEditEntityMessage(PacketType::EntityEdit, tree, + entityPacketSender->queueEditEntityMessage(PacketType::EntityPhysics, tree, descendant->getID(), newQueryCubeProperties); entityDescendant->setLastBroadcast(now); } diff --git a/libraries/plugins/src/plugins/CodecPlugin.h b/libraries/plugins/src/plugins/CodecPlugin.h index 404f05e860..cb5b857be8 100644 --- a/libraries/plugins/src/plugins/CodecPlugin.h +++ b/libraries/plugins/src/plugins/CodecPlugin.h @@ -23,8 +23,7 @@ public: virtual ~Decoder() { } virtual void decode(const QByteArray& encodedBuffer, QByteArray& decodedBuffer) = 0; - // numFrames - number of samples (mono) or sample-pairs (stereo) - virtual void trackLostFrames(int numFrames) = 0; + virtual void lostFrame(QByteArray& decodedBuffer) = 0; }; class CodecPlugin : public Plugin { diff --git a/libraries/script-engine/src/Mat4.cpp b/libraries/script-engine/src/Mat4.cpp index bb65cb1e26..52b9690321 100644 --- a/libraries/script-engine/src/Mat4.cpp +++ b/libraries/script-engine/src/Mat4.cpp @@ -26,6 +26,10 @@ glm::mat4 Mat4::createFromScaleRotAndTrans(const glm::vec3& scale, const glm::qu return createMatFromScaleQuatAndPos(scale, rot, trans); } +glm::mat4 Mat4::createFromColumns(const glm::vec4& col0, const glm::vec4& col1, const glm::vec4& col2, const glm::vec4& col3) const { + return glm::mat4(col0, col1, col2, col3); +} + glm::vec3 Mat4::extractTranslation(const glm::mat4& m) const { return ::extractTranslation(m); } diff --git a/libraries/script-engine/src/Mat4.h b/libraries/script-engine/src/Mat4.h index 047bf56079..8b2a8aa8c1 100644 --- a/libraries/script-engine/src/Mat4.h +++ b/libraries/script-engine/src/Mat4.h @@ -23,8 +23,10 @@ class Mat4 : public QObject { public slots: glm::mat4 multiply(const glm::mat4& m1, const glm::mat4& m2) const; + glm::mat4 createFromRotAndTrans(const glm::quat& rot, const glm::vec3& trans) const; glm::mat4 createFromScaleRotAndTrans(const glm::vec3& scale, const glm::quat& rot, const glm::vec3& trans) const; + glm::mat4 createFromColumns(const glm::vec4& col0, const glm::vec4& col1, const glm::vec4& col2, const glm::vec4& col3) const; glm::vec3 extractTranslation(const glm::mat4& m) const; glm::quat extractRotation(const glm::mat4& m) const; diff --git a/libraries/shared/src/AABox.cpp b/libraries/shared/src/AABox.cpp index 4a74fb4033..89d5ce709d 100644 --- a/libraries/shared/src/AABox.cpp +++ b/libraries/shared/src/AABox.cpp @@ -360,7 +360,7 @@ glm::vec3 AABox::getClosestPointOnFace(const glm::vec3& point, BoxFace face) con case MIN_Z_FACE: return glm::clamp(point, glm::vec3(_corner.x, _corner.y, _corner.z), - glm::vec3(_corner.x + _scale.z, _corner.y + _scale.y, _corner.z)); + glm::vec3(_corner.x + _scale.x, _corner.y + _scale.y, _corner.z)); default: //quiet windows warnings case MAX_Z_FACE: diff --git a/libraries/shared/src/SpatiallyNestable.cpp b/libraries/shared/src/SpatiallyNestable.cpp index 35e574bf06..ddc3f416e0 100644 --- a/libraries/shared/src/SpatiallyNestable.cpp +++ b/libraries/shared/src/SpatiallyNestable.cpp @@ -1034,6 +1034,13 @@ AACube SpatiallyNestable::getQueryAACube() const { bool SpatiallyNestable::hasAncestorOfType(NestableType nestableType) const { bool success; + if (nestableType == NestableType::Avatar) { + QUuid parentID = getParentID(); + if (parentID == AVATAR_SELF_ID) { + return true; + } + } + SpatiallyNestablePointer parent = getParentPointer(success); if (!success || !parent) { return false; @@ -1048,6 +1055,14 @@ bool SpatiallyNestable::hasAncestorOfType(NestableType nestableType) const { const QUuid SpatiallyNestable::findAncestorOfType(NestableType nestableType) const { bool success; + + if (nestableType == NestableType::Avatar) { + QUuid parentID = getParentID(); + if (parentID == AVATAR_SELF_ID) { + return AVATAR_SELF_ID; // TODO -- can we put nodeID here? + } + } + SpatiallyNestablePointer parent = getParentPointer(success); if (!success || !parent) { return QUuid(); diff --git a/plugins/hifiCodec/src/HiFiCodec.cpp b/plugins/hifiCodec/src/HiFiCodec.cpp index 77c369dcae..2c7151fe59 100644 --- a/plugins/hifiCodec/src/HiFiCodec.cpp +++ b/plugins/hifiCodec/src/HiFiCodec.cpp @@ -65,12 +65,10 @@ public: AudioDecoder::process((const int16_t*)encodedBuffer.constData(), (int16_t*)decodedBuffer.data(), AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL, true); } - virtual void trackLostFrames(int numFrames) override { - QByteArray encodedBuffer; - QByteArray decodedBuffer; + virtual void lostFrame(QByteArray& decodedBuffer) override { decodedBuffer.resize(_decodedSize); - // NOTE: we don't actually use the results of this decode, we just do it to keep the state of the codec clean - AudioDecoder::process((const int16_t*)encodedBuffer.constData(), (int16_t*)decodedBuffer.data(), AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL, false); + // this performs packet loss interpolation + AudioDecoder::process(nullptr, (int16_t*)decodedBuffer.data(), AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL, false); } private: int _decodedSize; diff --git a/plugins/pcmCodec/src/PCMCodecManager.h b/plugins/pcmCodec/src/PCMCodecManager.h index d58a219fef..608e9a1556 100644 --- a/plugins/pcmCodec/src/PCMCodecManager.h +++ b/plugins/pcmCodec/src/PCMCodecManager.h @@ -38,11 +38,14 @@ public: virtual void encode(const QByteArray& decodedBuffer, QByteArray& encodedBuffer) override { encodedBuffer = decodedBuffer; } + virtual void decode(const QByteArray& encodedBuffer, QByteArray& decodedBuffer) override { decodedBuffer = encodedBuffer; } - virtual void trackLostFrames(int numFrames) override { } + virtual void lostFrame(QByteArray& decodedBuffer) override { + memset(decodedBuffer.data(), 0, decodedBuffer.size()); + } private: static const char* NAME; @@ -77,7 +80,9 @@ public: decodedBuffer = qUncompress(encodedBuffer); } - virtual void trackLostFrames(int numFrames) override { } + virtual void lostFrame(QByteArray& decodedBuffer) override { + memset(decodedBuffer.data(), 0, decodedBuffer.size()); + } private: static const char* NAME; diff --git a/scripts/developer/tests/performance/crowd-agent.js b/scripts/developer/tests/performance/crowd-agent.js index b87d418643..9db4a112f3 100644 --- a/scripts/developer/tests/performance/crowd-agent.js +++ b/scripts/developer/tests/performance/crowd-agent.js @@ -16,7 +16,7 @@ var MESSAGE_CHANNEL = "io.highfidelity.summon-crowd"; -print('crowd-agent version 4'); +print('crowd-agent version 5'); /* Observations: - File urls for AC scripts silently fail. Use a local server (e.g., python SimpleHTTPServer) for development. @@ -84,6 +84,9 @@ function startAgent(parameters) { // Can also be used to update. clearStopper(); var wasOff = !Agent.isAvatar; Agent.isAvatar = true; + if (parameters.displayName !== undefined) { + Avatar.displayName = parameters.displayName; + } if (parameters.position) { Avatar.position = parameters.position; } diff --git a/scripts/developer/tests/performance/summon.js b/scripts/developer/tests/performance/summon.js index 69bf0860ae..8e888fe9bc 100644 --- a/scripts/developer/tests/performance/summon.js +++ b/scripts/developer/tests/performance/summon.js @@ -13,7 +13,7 @@ // // See crowd-agent.js -var version = 2; +var version = 3; var label = "summon"; function debug() { print.apply(null, [].concat.apply([label, version], [].map.call(arguments, JSON.stringify))); @@ -23,6 +23,9 @@ var MINIMUM_AVATARS = 25; // We will summon agents to produce this many total. ( var N_LISTENING = MINIMUM_AVATARS - 1; var AVATARS_CHATTERING_AT_ONCE = 4; // How many of the agents should we request to play SOUND_DATA at once. +var initialBubble = Users.getIgnoreRadiusEnabled(); +debug('startup seeking:', MINIMUM_AVATARS, 'listening:', N_LISTENING, 'chattering:', AVATARS_CHATTERING_AT_ONCE, 'had bubble:', initialBubble); + // If we add or remove things too quickly, we get problems (e.g., audio, fogbugz 2095). // For now, spread them out this timing apart. var SPREAD_TIME_MS = 500; @@ -66,7 +69,7 @@ function messageHandler(channel, messageString, senderID) { if (MyAvatar.sessionUUID === senderID) { // ignore my own return; } - var message = {}, avatarIdentifiers; + var message = {}; try { message = JSON.parse(messageString); } catch (e) { @@ -76,9 +79,10 @@ function messageHandler(channel, messageString, senderID) { case "hello": Script.setTimeout(function () { // There can be avatars we've summoned that do not yet appear in the AvatarList. - avatarIdentifiers = without(AvatarList.getAvatarIdentifiers(), summonedAgents); + var avatarIdentifiers = without(AvatarList.getAvatarIdentifiers(), summonedAgents); + var nSummoned = summonedAgents.length; debug('present', avatarIdentifiers, summonedAgents); - if ((summonedAgents.length + avatarIdentifiers.length) < MINIMUM_AVATARS ) { + if ((nSummoned + avatarIdentifiers.length) < MINIMUM_AVATARS ) { var chatter = chattering.length < AVATARS_CHATTERING_AT_ONCE; var listen = nListening < N_LISTENING; if (chatter) { @@ -91,6 +95,7 @@ function messageHandler(channel, messageString, senderID) { messageSend({ key: 'SUMMON', rcpt: senderID, + displayName: "crowd " + nSummoned + " " + senderID, position: Vec3.sum(MyAvatar.position, {x: coord(), y: 0, z: coord()}), orientation: Quat.fromPitchYawRollDegrees(0, Quat.safeEulerAngles(MyAvatar.orientation).y + (turnSpread * (Math.random() - 0.5)), 0), soundData: chatter && SOUND_DATA, @@ -100,7 +105,7 @@ function messageHandler(channel, messageString, senderID) { }); } }, accumulatedDelay); - accumulatedDelay += SPREAD_TIME_MS; // assume we'll get all the hello respsponses more or less together. + accumulatedDelay += SPREAD_TIME_MS; // assume we'll get all the hello responses more or less together. break; case "finishedSound": // Give someone else a chance. chattering = without(chattering, [senderID]); @@ -123,6 +128,8 @@ Messages.subscribe(MESSAGE_CHANNEL); Messages.messageReceived.connect(messageHandler); Script.scriptEnding.connect(function () { debug('stopping agents', summonedAgents); + Users.requestsDomainListData = false; + if (initialBubble && !Users.getIgnoreRadiusEnabled()) { Users.toggleIgnoreRadius(); } Messages.messageReceived.disconnect(messageHandler); // don't respond to any messages during shutdown accumulatedDelay = 0; summonedAgents.forEach(function (id) { @@ -134,14 +141,17 @@ Script.scriptEnding.connect(function () { debug('unsubscribed'); }); +Users.requestsDomainListData = true; // Get avatar data for the whole domain, even if not in our view. +if (initialBubble) { Users.toggleIgnoreRadius(); } messageSend({key: 'HELO'}); // Ask agents to report in now. Script.setTimeout(function () { var total = AvatarList.getAvatarIdentifiers().length; if (0 === summonedAgents.length) { Window.alert("No agents reported.\n\Please run " + MINIMUM_AVATARS + " instances of\n\ -http://hifi-content.s3.amazonaws.com/howard/scripts/tests/performance/crowd-agent.js\n\ +http://hifi-content.s3.amazonaws.com/howard/scripts/tests/performance/crowd-agent.js?v=someDate\n\ on your domain server."); } else if (total < MINIMUM_AVATARS) { Window.alert("Only " + summonedAgents.length + " agents reported. Now missing " + (MINIMUM_AVATARS - total) + " avatars, total."); } + Users.requestsDomainListData = false; }, MINIMUM_AVATARS * SPREAD_TIME_MS ) diff --git a/scripts/system/controllers/handControllerGrab.js b/scripts/system/controllers/handControllerGrab.js index d2c9fdc05a..087460deab 100644 --- a/scripts/system/controllers/handControllerGrab.js +++ b/scripts/system/controllers/handControllerGrab.js @@ -53,6 +53,13 @@ var HAPTIC_TEXTURE_DISTANCE = 0.002; var HAPTIC_DEQUIP_STRENGTH = 0.75; var HAPTIC_DEQUIP_DURATION = 50.0; +// triggered when stylus presses a web overlay/entity +var HAPTIC_STYLUS_STRENGTH = 1.0; +var HAPTIC_STYLUS_DURATION = 20.0; + +// triggerd when ui laser presses a web overlay/entity +var HAPTIC_LASER_UI_STRENGTH = 1.0; +var HAPTIC_LASER_UI_DURATION = 20.0; var HAND_HEAD_MIX_RATIO = 0.0; // 0 = only use hands for search/move. 1 = only use head for search/move. @@ -122,7 +129,6 @@ var GRAB_POINT_SPHERE_RADIUS = NEAR_GRAB_RADIUS; var GRAB_POINT_SPHERE_COLOR = { red: 240, green: 240, blue: 240 }; var GRAB_POINT_SPHERE_ALPHA = 0.85; - // // other constants // @@ -1248,7 +1254,7 @@ function MyController(hand) { if (homeButton === hmdHomeButton) { if (this.homeButtonTouched === false) { this.homeButtonTouched = true; - Controller.triggerHapticPulse(1, 20, this.hand); + Controller.triggerHapticPulse(HAPTIC_STYLUS_STRENGTH, HAPTIC_STYLUS_DURATION, this.hand); Messages.sendLocalMessage("home", homeButton); } } else { @@ -1266,7 +1272,7 @@ function MyController(hand) { if (homeButton === hmdHomeButton) { if (this.homeButtonTouched === false) { this.homeButtonTouched = true; - Controller.triggerHapticPulse(1, 20, this.hand); + Controller.triggerHapticPulse(HAPTIC_LASER_UI_STRENGTH, HAPTIC_LASER_UI_DURATION, this.hand); Messages.sendLocalMessage("home", homeButton); } } else { @@ -1754,7 +1760,6 @@ function MyController(hand) { Entities.sendHoverOverEntity(entity, pointerEvent); } - this.grabbedEntity = entity; this.setState(STATE_ENTITY_STYLUS_TOUCHING, "begin touching entity '" + name + "'"); return true; @@ -1775,11 +1780,6 @@ function MyController(hand) { var pointerEvent; if (rayPickInfo.overlayID) { var overlay = rayPickInfo.overlayID; - - if (!this.homeButtonTouched) { - Controller.triggerHapticPulse(1, 20, this.hand); - } - if (Overlays.keyboardFocusOverlay != overlay) { Entities.keyboardFocusEntity = null; Overlays.keyboardFocusOverlay = overlay; @@ -2710,6 +2710,12 @@ function MyController(hand) { var theta = this.state === STATE_ENTITY_STYLUS_TOUCHING ? STYLUS_PRESS_TO_MOVE_DEADSPOT_ANGLE : LASER_PRESS_TO_MOVE_DEADSPOT_ANGLE; this.deadspotRadius = Math.tan(theta) * intersectInfo.distance; // dead spot radius in meters } + + if (this.state == STATE_ENTITY_STYLUS_TOUCHING) { + Controller.triggerHapticPulse(HAPTIC_STYLUS_STRENGTH, HAPTIC_STYLUS_DURATION, this.hand); + } else if (this.state == STATE_ENTITY_LASER_TOUCHING) { + Controller.triggerHapticPulse(HAPTIC_LASER_UI_STRENGTH, HAPTIC_LASER_UI_DURATION, this.hand); + } }; this.entityTouchingExit = function() { @@ -2829,6 +2835,12 @@ function MyController(hand) { var theta = this.state === STATE_OVERLAY_STYLUS_TOUCHING ? STYLUS_PRESS_TO_MOVE_DEADSPOT_ANGLE : LASER_PRESS_TO_MOVE_DEADSPOT_ANGLE; this.deadspotRadius = Math.tan(theta) * intersectInfo.distance; // dead spot radius in meters } + + if (this.state == STATE_OVERLAY_STYLUS_TOUCHING) { + Controller.triggerHapticPulse(HAPTIC_STYLUS_STRENGTH, HAPTIC_STYLUS_DURATION, this.hand); + } else if (this.state == STATE_OVERLAY_LASER_TOUCHING) { + Controller.triggerHapticPulse(HAPTIC_LASER_UI_STRENGTH, HAPTIC_LASER_UI_DURATION, this.hand); + } }; this.overlayTouchingExit = function () { @@ -2882,7 +2894,6 @@ function MyController(hand) { this.touchingEnterTimer += dt; if (this.state == STATE_OVERLAY_STYLUS_TOUCHING && this.triggerSmoothedSqueezed()) { - this.setState(STATE_OFF, "trigger squeezed"); return; } diff --git a/scripts/system/libraries/WebTablet.js b/scripts/system/libraries/WebTablet.js index 0cd7d26854..74bbd788be 100644 --- a/scripts/system/libraries/WebTablet.js +++ b/scripts/system/libraries/WebTablet.js @@ -8,7 +8,7 @@ // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // /* global getControllerWorldLocation, setEntityCustomData, Tablet, WebTablet:true, HMD, Settings, Script, - Vec3, Quat, MyAvatar, Entities, Overlays, Camera, Messages, Xform */ + Vec3, Quat, MyAvatar, Entities, Overlays, Camera, Messages, Xform, clamp */ Script.include(Script.resolvePath("../libraries/utils.js")); Script.include(Script.resolvePath("../libraries/controllers.js")); @@ -118,7 +118,7 @@ WebTablet = function (url, width, dpi, hand, clientOnly) { }; // compute position, rotation & parentJointIndex of the tablet - this.calculateTabletAttachmentProperties(hand, tabletProperties); + this.calculateTabletAttachmentProperties(hand, true, tabletProperties); this.cleanUpOldTablets(); this.tabletEntityID = Entities.addEntity(tabletProperties, clientOnly); @@ -252,31 +252,78 @@ WebTablet.prototype.destroy = function () { WebTablet.prototype.geometryChanged = function (geometry) { if (!HMD.active) { var tabletProperties = {}; + // compute position, rotation & parentJointIndex of the tablet - this.calculateTabletAttachmentProperties(NO_HANDS, tabletProperties); + this.calculateTabletAttachmentProperties(NO_HANDS, false, tabletProperties); Entities.editEntity(this.tabletEntityID, tabletProperties); } }; +function gluPerspective(fovy, aspect, zNear, zFar) { + var cotan = 1 / Math.tan(fovy / 2); + var alpha = -(zFar + zNear) / (zFar - zNear); + var beta = -(2 * zFar * zNear) / (zFar - zNear); + var col0 = {x: cotan / aspect, y: 0, z: 0, w: 0}; + var col1 = {x: 0, y: cotan, z: 0, w: 0}; + var col2 = {x: 0, y: 0, z: alpha, w: -1}; + var col3 = {x: 0, y: 0, z: beta, w: 0}; + return Mat4.createFromColumns(col0, col1, col2, col3); +} + // calclulate the appropriate position of the tablet in world space, such that it fits in the center of the screen. // with a bit of padding on the top and bottom. -WebTablet.prototype.calculateWorldAttitudeRelativeToCamera = function () { +// windowPos is used to position the center of the tablet at the given position. +WebTablet.prototype.calculateWorldAttitudeRelativeToCamera = function (windowPos) { + var DEFAULT_DESKTOP_TABLET_SCALE = 75; var DESKTOP_TABLET_SCALE = Settings.getValue("desktopTabletScale") || DEFAULT_DESKTOP_TABLET_SCALE; + + // clamp window pos so 2d tablet is not off-screen. + var TABLET_TEXEL_PADDING = {x: 60, y: 90}; + var X_CLAMP = (DESKTOP_TABLET_SCALE / 100) * ((TABLET_TEXTURE_RESOLUTION.x / 2) + TABLET_TEXEL_PADDING.x); + var Y_CLAMP = (DESKTOP_TABLET_SCALE / 100) * ((TABLET_TEXTURE_RESOLUTION.y / 2) + TABLET_TEXEL_PADDING.y); + windowPos.x = clamp(windowPos.x, X_CLAMP, Window.innerWidth - X_CLAMP); + windowPos.y = clamp(windowPos.y, Y_CLAMP, Window.innerHeight - Y_CLAMP); + var fov = (Settings.getValue('fieldOfView') || DEFAULT_VERTICAL_FIELD_OF_VIEW) * (Math.PI / 180); var MAX_PADDING_FACTOR = 2.2; var PADDING_FACTOR = Math.min(Window.innerHeight / TABLET_TEXTURE_RESOLUTION.y, MAX_PADDING_FACTOR); var TABLET_HEIGHT = (TABLET_TEXTURE_RESOLUTION.y / this.dpi) * INCHES_TO_METERS; var WEB_ENTITY_Z_OFFSET = (this.depth / 2); + + // calcualte distance from camera var dist = (PADDING_FACTOR * TABLET_HEIGHT) / (2 * Math.tan(fov / 2) * (DESKTOP_TABLET_SCALE / 100)) - WEB_ENTITY_Z_OFFSET; + + var Z_NEAR = 0.01; + var Z_FAR = 100.0; + + // calculate mouse position in clip space + var alpha = -(Z_FAR + Z_NEAR) / (Z_FAR - Z_NEAR); + var beta = -(2 * Z_FAR * Z_NEAR) / (Z_FAR - Z_NEAR); + var clipZ = (beta / dist) - alpha; + var clipMousePosition = {x: (2 * windowPos.x / Window.innerWidth) - 1, + y: (2 * ((Window.innerHeight - windowPos.y) / Window.innerHeight)) - 1, + z: clipZ}; + + // calculate projection matrix + var aspect = Window.innerWidth / Window.innerHeight; + var projMatrix = gluPerspective(fov, aspect, Z_NEAR, Z_FAR); + + // transform mouse clip position into view coordinates. + var viewMousePosition = Mat4.transformPoint(Mat4.inverse(projMatrix), clipMousePosition); + + // transform view mouse position into world coordinates. + var viewToWorldMatrix = Mat4.createFromRotAndTrans(Camera.orientation, Camera.position); + var worldMousePosition = Mat4.transformPoint(viewToWorldMatrix, viewMousePosition); + return { - position: Vec3.sum(Camera.position, Vec3.multiply(dist, Quat.getFront(Camera.orientation))), + position: worldMousePosition, rotation: Quat.multiply(Camera.orientation, ROT_Y_180) }; }; // compute position, rotation & parentJointIndex of the tablet -WebTablet.prototype.calculateTabletAttachmentProperties = function (hand, tabletProperties) { +WebTablet.prototype.calculateTabletAttachmentProperties = function (hand, useMouse, tabletProperties) { if (HMD.active) { // in HMD mode, the tablet should be relative to the sensor to world matrix. tabletProperties.parentJointIndex = SENSOR_TO_ROOM_MATRIX; @@ -289,8 +336,16 @@ WebTablet.prototype.calculateTabletAttachmentProperties = function (hand, tablet // in desktop mode, the tablet should be relative to the camera tabletProperties.parentJointIndex = CAMERA_MATRIX; - // compute the appropriate postion of the tablet such that it fits in the center of the screen nicely. - var attitude = this.calculateWorldAttitudeRelativeToCamera(); + var windowPos; + if (useMouse) { + // compute the appropriate postion of the tablet such that it fits in the center of the screen nicely. + windowPos = {x: Controller.getValue(Controller.Hardware.Keyboard.MouseX), + y: Controller.getValue(Controller.Hardware.Keyboard.MouseY)}; + } else { + windowPos = {x: Window.innerWidth / 2, + y: Window.innerHeight / 2}; + } + var attitude = this.calculateWorldAttitudeRelativeToCamera(windowPos); tabletProperties.position = attitude.position; tabletProperties.rotation = attitude.rotation; } @@ -310,7 +365,7 @@ WebTablet.prototype.onHmdChanged = function () { var tabletProperties = {}; // compute position, rotation & parentJointIndex of the tablet - this.calculateTabletAttachmentProperties(NO_HANDS, tabletProperties); + this.calculateTabletAttachmentProperties(NO_HANDS, false, tabletProperties); Entities.editEntity(this.tabletEntityID, tabletProperties); // Full scene FXAA should be disabled on the overlay when the tablet in desktop mode. @@ -398,7 +453,7 @@ WebTablet.prototype.cameraModeChanged = function (newMode) { var self = this; var tabletProperties = {}; // compute position, rotation & parentJointIndex of the tablet - self.calculateTabletAttachmentProperties(NO_HANDS, tabletProperties); + self.calculateTabletAttachmentProperties(NO_HANDS, false, tabletProperties); Entities.editEntity(self.tabletEntityID, tabletProperties); } }; diff --git a/scripts/system/nameTag.js b/scripts/system/nameTag.js new file mode 100644 index 0000000000..e25db69064 --- /dev/null +++ b/scripts/system/nameTag.js @@ -0,0 +1,112 @@ +"use strict"; + +/*jslint vars: true, plusplus: true*/ +/*global Entities, Script, Quat, Vec3, MyAvatar, print*/ +// nameTag.js +// +// Created by Triplelexx on 17/01/31 +// Copyright 2017 High Fidelity, Inc. +// +// Running the script creates a text entity that will hover over the user's head showing their display name. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html + +const CLIENTONLY = false; +const NULL_UUID = "{00000000-0000-0000-0000-000000000000}"; +const ENTITY_CHECK_INTERVAL = 5000; // ms = 5 seconds +const STARTUP_DELAY = 2000; // ms = 2 second +const OLD_AGE = 3500; // we recreate the entity if older than this time in seconds +const TTL = 2; // time to live in seconds if script is not running +const HEIGHT_ABOVE_HEAD = 0.2; +const HEAD_OFFSET = -0.025; +const SIZE_Y = 0.075; +const LETTER_OFFSET = 0.03; // arbitrary value to dynamically change width, could be more accurate by detecting characters +const LINE_HEIGHT = 0.05; + +var nameTagEntityID = NULL_UUID; +var lastCheckForEntity = 0; + +// create the name tag entity after a brief delay +Script.setTimeout(function() { + addNameTag(); +}, STARTUP_DELAY); + +function addNameTag() { + var nameTagPosition = Vec3.sum(MyAvatar.getHeadPosition(), Vec3.multiply(HEAD_OFFSET, Quat.getFront(MyAvatar.orientation))); + nameTagPosition.y += HEIGHT_ABOVE_HEAD; + var nameTagProperties = { + name: MyAvatar.displayName + ' Name Tag', + type: 'Text', + text: MyAvatar.displayName, + lineHeight: LINE_HEIGHT, + parentID: MyAvatar.sessionUUID, + dimensions: dimensionsFromName(), + position: nameTagPosition + } + nameTagEntityID = Entities.addEntity(nameTagProperties, CLIENTONLY); +} + +function updateNameTag() { + var nameTagProps = Entities.getEntityProperties(nameTagEntityID); + var nameTagPosition = Vec3.sum(MyAvatar.getHeadPosition(), Vec3.multiply(HEAD_OFFSET, Quat.getFront(MyAvatar.orientation))); + nameTagPosition.y += HEIGHT_ABOVE_HEAD; + + Entities.editEntity(nameTagEntityID, { + position: nameTagPosition, + dimensions: dimensionsFromName(), + // lifetime is in seconds we add TTL on top of the next poll time + lifetime: Math.round(nameTagProps.age) + (ENTITY_CHECK_INTERVAL / 1000) + TTL, + text: MyAvatar.displayName + }); +}; + +function deleteNameTag() { + if(nameTagEntityID !== NULL_UUID) { + Entities.deleteEntity(nameTagEntityID); + nameTagEntityID = NULL_UUID; + } +} + +function dimensionsFromName() { + return { + x: LETTER_OFFSET * MyAvatar.displayName.length, + y: SIZE_Y, + z: 0.0 + } +}; + +// cleanup on ending +Script.scriptEnding.connect(cleanup); +function cleanup() { + deleteNameTag(); +} + +Script.update.connect(update); +function update() { + // if no entity we return + if(nameTagEntityID == NULL_UUID) { + return; + } + + if(Date.now() - lastCheckForEntity > ENTITY_CHECK_INTERVAL) { + checkForEntity(); + lastCheckForEntity = Date.now(); + } +} + +function checkForEntity() { + var nameTagProps = Entities.getEntityProperties(nameTagEntityID); + // it is possible for the age to not be a valid number, we check for this and return accordingly + if(nameTagProps.age == -1) { + return; + } + + // it's too old or we receive undefined make a new one, otherwise update + if(nameTagProps.age > OLD_AGE || nameTagProps.age == undefined) { + deleteNameTag(); + addNameTag(); + } else { + updateNameTag(); + } +} diff --git a/scripts/system/pal.js b/scripts/system/pal.js index 9e9c49b1a0..adbde0ef5c 100644 --- a/scripts/system/pal.js +++ b/scripts/system/pal.js @@ -11,6 +11,8 @@ // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // +(function() { // BEGIN LOCAL_SCOPE + // hardcoding these as it appears we cannot traverse the originalTextures in overlays??? Maybe I've missed // something, will revisit as this is sorta horrible. const UNSELECTED_TEXTURES = {"idle-D": Script.resolvePath("./assets/models/Avatar-Overlay-v1.fbx/Avatar-Overlay-v1.fbm/avatar-overlay-idle.png"), @@ -27,7 +29,7 @@ const UNSELECTED_COLOR = { red: 0x1F, green: 0xC6, blue: 0xA6}; const SELECTED_COLOR = {red: 0xF3, green: 0x91, blue: 0x29}; const HOVER_COLOR = {red: 0xD0, green: 0xD0, blue: 0xD0}; // almost white for now -(function() { // BEGIN LOCAL_SCOPE +var conserveResources = true; Script.include("/~/system/libraries/controllers.js"); @@ -265,15 +267,16 @@ pal.fromQml.connect(function (message) { // messages are {method, params}, like function addAvatarNode(id) { var selected = ExtendedOverlay.isSelected(id); return new ExtendedOverlay(id, "sphere", { - drawInFront: true, - solid: true, - alpha: 0.8, - color: color(selected, false, 0.0), - ignoreRayIntersection: false}, selected, true); + drawInFront: true, + solid: true, + alpha: 0.8, + color: color(selected, false, 0.0), + ignoreRayIntersection: false}, selected, !conserveResources); } function populateUserList(selectData) { - var data = []; - AvatarList.getAvatarIdentifiers().sort().forEach(function (id) { // sorting the identifiers is just an aid for debugging + var data = [], avatars = AvatarList.getAvatarIdentifiers(); + conserveResources = avatars.length > 20; + avatars.forEach(function (id) { // sorting the identifiers is just an aid for debugging var avatar = AvatarList.getAvatar(id); var avatarPalDatum = { displayName: avatar.sessionDisplayName, @@ -498,6 +501,9 @@ if (Settings.getValue("HUDUIEnabled")) { }); } var isWired = false; +var audioTimer; +var AUDIO_LEVEL_UPDATE_INTERVAL_MS = 100; // 10hz for now (change this and change the AVERAGING_RATIO too) +var AUDIO_LEVEL_CONSERVED_UPDATE_INTERVAL_MS = 300; function off() { if (isWired) { // It is not ok to disconnect these twice, hence guard. Script.update.disconnect(updateOverlays); @@ -505,6 +511,7 @@ function off() { Controller.mouseMoveEvent.disconnect(handleMouseMoveEvent); isWired = false; } + if (audioTimer) { Script.clearInterval(audioTimer); } triggerMapping.disable(); // It's ok if we disable twice. triggerPressMapping.disable(); // see above removeOverlays(); @@ -521,7 +528,7 @@ function onClicked() { Controller.mouseMoveEvent.connect(handleMouseMoveEvent); triggerMapping.enable(); triggerPressMapping.enable(); - createAudioInterval(); + audioTimer = createAudioInterval(conserveResources ? AUDIO_LEVEL_CONSERVED_UPDATE_INTERVAL_MS : AUDIO_LEVEL_UPDATE_INTERVAL_MS); } else { off(); } @@ -557,9 +564,7 @@ var AVERAGING_RATIO = 0.05; var LOUDNESS_FLOOR = 11.0; var LOUDNESS_SCALE = 2.8 / 5.0; var LOG2 = Math.log(2.0); -var AUDIO_LEVEL_UPDATE_INTERVAL_MS = 100; // 10hz for now (change this and change the AVERAGING_RATIO too) var myData = {}; // we're not includied in ExtendedOverlay.get. -var audioInterval; function getAudioLevel(id) { // the VU meter should work similarly to the one in AvatarInputs: log scale, exponentially averaged @@ -591,21 +596,19 @@ function getAudioLevel(id) { return audioLevel; } -function createAudioInterval() { +function createAudioInterval(interval) { // we will update the audioLevels periodically // TODO: tune for efficiency - expecially with large numbers of avatars return Script.setInterval(function () { - if (pal.visible) { - var param = {}; - AvatarList.getAvatarIdentifiers().forEach(function (id) { - var level = getAudioLevel(id); - // qml didn't like an object with null/empty string for a key, so... - var userId = id || 0; - param[userId] = level; - }); - pal.sendToQml({method: 'updateAudioLevel', params: param}); - } - }, AUDIO_LEVEL_UPDATE_INTERVAL_MS); + var param = {}; + AvatarList.getAvatarIdentifiers().forEach(function (id) { + var level = getAudioLevel(id); + // qml didn't like an object with null/empty string for a key, so... + var userId = id || 0; + param[userId] = level; + }); + pal.sendToQml({method: 'updateAudioLevel', params: param}); + }, interval); } function avatarDisconnected(nodeID) { diff --git a/scripts/system/tablet-ui/tabletUI.js b/scripts/system/tablet-ui/tabletUI.js index eab3d85adc..dc1d71f402 100644 --- a/scripts/system/tablet-ui/tabletUI.js +++ b/scripts/system/tablet-ui/tabletUI.js @@ -12,7 +12,7 @@ // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // -/* global Script, HMD, WebTablet, UIWebTablet */ +/* global Script, HMD, WebTablet, UIWebTablet, UserActivityLogger, Settings, Entities, Messages, Tablet, Overlays, MyAvatar */ (function() { // BEGIN LOCAL_SCOPE var tabletShown = false; @@ -65,8 +65,10 @@ hideTabletUI(); HMD.closeTablet(); } else if (HMD.showTablet && !tabletShown) { + UserActivityLogger.openedTablet(); showTabletUI(); } else if (!HMD.showTablet && tabletShown) { + UserActivityLogger.closedTablet(); hideTabletUI(); } } @@ -86,7 +88,6 @@ var accumulatedLevel = 0.0; // Note: Might have to tweak the following two based on the rate we're getting the data var AVERAGING_RATIO = 0.05; - var MIC_LEVEL_UPDATE_INTERVAL_MS = 100; // Calculate microphone level with the same scaling equation (log scale, exponentially averaged) in AvatarInputs and pal.js function getMicLevel() { diff --git a/scripts/system/tablet-users.js b/scripts/system/tablet-users.js index f832fa304a..f4682ec75d 100644 --- a/scripts/system/tablet-users.js +++ b/scripts/system/tablet-users.js @@ -12,6 +12,7 @@ (function() { // BEGIN LOCAL_SCOPE var USERS_URL = "https://hifi-content.s3.amazonaws.com/faye/tablet-dev/users.html"; + var HOME_BUTTON_TEXTURE = Script.resourcesPath() + "meshes/tablet-with-home-button.fbx/tablet-with-home-button.fbm/button-root.png"; var FRIENDS_WINDOW_URL = "https://metaverse.highfidelity.com/user/friends"; var FRIENDS_WINDOW_WIDTH = 290; @@ -40,6 +41,10 @@ }); function onClicked() { + var tabletEntity = HMD.tabletID; + if (tabletEntity) { + Entities.editEntity(tabletEntity, {textures: JSON.stringify({"tex.close" : HOME_BUTTON_TEXTURE})}); + } tablet.gotoWebScreen(USERS_URL); } diff --git a/tests/shared/src/AABoxTests.cpp b/tests/shared/src/AABoxTests.cpp index b9ab95bb09..2e9dfab497 100644 --- a/tests/shared/src/AABoxTests.cpp +++ b/tests/shared/src/AABoxTests.cpp @@ -169,3 +169,17 @@ void AABoxTests::testScale() { box3 += glm::vec3(-1.0f, -1.0f, -1.0f); QCOMPARE(box3.contains(glm::vec3(0.5f, 0.5f, 0.5f)), true); } + +void AABoxTests::testFindSpherePenetration() { + vec3 searchPosition(-0.0141186f, 0.0640736f, -0.116081f); + float searchRadius = 0.5f; + + vec3 boxMin(-0.800014f, -0.450025f, -0.00503815f); + vec3 boxDim(1.60003f, 0.900049f, 0.0100763f); + AABox testBox(boxMin, boxDim); + + vec3 penetration; + bool hit = testBox.findSpherePenetration(searchPosition, searchRadius, penetration); + QCOMPARE(hit, true); +} + diff --git a/tests/shared/src/AABoxTests.h b/tests/shared/src/AABoxTests.h index c777f8e94f..605db7d3ca 100644 --- a/tests/shared/src/AABoxTests.h +++ b/tests/shared/src/AABoxTests.h @@ -24,6 +24,7 @@ private slots: void testContainsPoint(); void testTouchesSphere(); void testScale(); + void testFindSpherePenetration(); }; #endif // hifi_AABoxTests_h