Merge branch 'master' of https://github.com/highfidelity/hifi into betterBots

This commit is contained in:
Brad Hefta-Gaub 2017-02-08 13:41:33 -08:00
commit 07cf87749f
58 changed files with 722 additions and 569 deletions

View file

@ -316,6 +316,10 @@ void AudioMixer::sendStatsPacket() {
addTiming(_mixTiming, "mix"); addTiming(_mixTiming, "mix");
addTiming(_eventsTiming, "events"); addTiming(_eventsTiming, "events");
#ifdef HIFI_AUDIO_THROTTLE_DEBUG
timingStats["ns_per_throttle"] = (_stats.totalMixes > 0) ? (float)(_stats.throttleTime / _stats.totalMixes) : 0;
#endif
// call it "avg_..." to keep it higher in the display, sorted alphabetically // call it "avg_..." to keep it higher in the display, sorted alphabetically
statsObject["avg_timing_stats"] = timingStats; statsObject["avg_timing_stats"] = timingStats;

View file

@ -46,10 +46,12 @@ void sendMutePacket(const SharedNodePointer& node, AudioMixerClientData&);
void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData& data); void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData& data);
// mix helpers // mix helpers
bool shouldIgnoreNode(const SharedNodePointer& listener, const SharedNodePointer& node); inline bool shouldIgnoreNode(const SharedNodePointer& listener, const SharedNodePointer& node);
float gainForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd, inline float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition);
inline float computeGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition, bool isEcho); const glm::vec3& relativePosition, bool isEcho);
float azimuthForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd, inline float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition); const glm::vec3& relativePosition);
void AudioMixerSlave::configure(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) { void AudioMixerSlave::configure(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
@ -126,9 +128,10 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
AudioMixerClientData&, const QUuid&, const AvatarAudioStream&, const PositionalAudioStream&); AudioMixerClientData&, const QUuid&, const AvatarAudioStream&, const PositionalAudioStream&);
auto allStreams = [&](const SharedNodePointer& node, MixFunctor mixFunctor) { auto allStreams = [&](const SharedNodePointer& node, MixFunctor mixFunctor) {
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData()); AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
auto nodeID = node->getUUID();
for (auto& streamPair : nodeData->getAudioStreams()) { for (auto& streamPair : nodeData->getAudioStreams()) {
auto nodeStream = streamPair.second; auto nodeStream = streamPair.second;
(this->*mixFunctor)(*listenerData, node->getUUID(), *listenerAudioStream, *nodeStream); (this->*mixFunctor)(*listenerData, nodeID, *listenerAudioStream, *nodeStream);
} }
}; };
@ -147,14 +150,28 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
if (!isThrottling) { if (!isThrottling) {
allStreams(node, &AudioMixerSlave::mixStream); allStreams(node, &AudioMixerSlave::mixStream);
} else { } else {
#ifdef HIFI_AUDIO_THROTTLE_DEBUG
auto throttleStart = p_high_resolution_clock::now();
#endif
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData()); AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
auto nodeID = node->getUUID();
// compute the node's max relative volume // compute the node's max relative volume
float nodeVolume; float nodeVolume;
for (auto& streamPair : nodeData->getAudioStreams()) { for (auto& streamPair : nodeData->getAudioStreams()) {
auto nodeStream = streamPair.second; auto nodeStream = streamPair.second;
float distance = glm::length(nodeStream->getPosition() - listenerAudioStream->getPosition());
nodeVolume = std::max(nodeStream->getLastPopOutputTrailingLoudness() / distance, nodeVolume); // approximate the gain
glm::vec3 relativePosition = nodeStream->getPosition() - listenerAudioStream->getPosition();
float gain = approximateGain(*listenerAudioStream, *nodeStream, relativePosition);
// modify by hrtf gain adjustment
auto& hrtf = listenerData->hrtfForStream(nodeID, nodeStream->getStreamIdentifier());
gain *= hrtf.getGainAdjustment();
auto streamVolume = nodeStream->getLastPopOutputTrailingLoudness() * gain;
nodeVolume = std::max(streamVolume, nodeVolume);
} }
// max-heapify the nodes by relative volume // max-heapify the nodes by relative volume
@ -162,6 +179,13 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
if (!throttledNodes.empty()) { if (!throttledNodes.empty()) {
std::push_heap(throttledNodes.begin(), throttledNodes.end()); std::push_heap(throttledNodes.begin(), throttledNodes.end());
} }
#ifdef HIFI_AUDIO_THROTTLE_DEBUG
auto throttleEnd = p_high_resolution_clock::now();
uint64_t throttleTime =
std::chrono::duration_cast<std::chrono::nanoseconds>(throttleEnd - throttleStart).count();
stats.throttleTime += throttleTime;
#endif
} }
} }
}); });
@ -227,9 +251,9 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition(); glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition();
float distance = glm::max(glm::length(relativePosition), EPSILON); float distance = glm::max(glm::length(relativePosition), EPSILON);
float gain = gainForSource(listeningNodeStream, streamToAdd, relativePosition, isEcho); float gain = computeGain(listeningNodeStream, streamToAdd, relativePosition, isEcho);
float azimuth = isEcho ? 0.0f : azimuthForSource(listeningNodeStream, listeningNodeStream, relativePosition); float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
static const int HRTF_DATASET_INDEX = 1; const int HRTF_DATASET_INDEX = 1;
if (!streamToAdd.lastPopSucceeded()) { if (!streamToAdd.lastPopSucceeded()) {
bool forceSilentBlock = true; bool forceSilentBlock = true;
@ -330,7 +354,7 @@ std::unique_ptr<NLPacket> createAudioPacket(PacketType type, int size, quint16 s
} }
void sendMixPacket(const SharedNodePointer& node, AudioMixerClientData& data, QByteArray& buffer) { void sendMixPacket(const SharedNodePointer& node, AudioMixerClientData& data, QByteArray& buffer) {
static const int MIX_PACKET_SIZE = const int MIX_PACKET_SIZE =
sizeof(quint16) + AudioConstants::MAX_CODEC_NAME_LENGTH_ON_WIRE + AudioConstants::NETWORK_FRAME_BYTES_STEREO; sizeof(quint16) + AudioConstants::MAX_CODEC_NAME_LENGTH_ON_WIRE + AudioConstants::NETWORK_FRAME_BYTES_STEREO;
quint16 sequence = data.getOutgoingSequenceNumber(); quint16 sequence = data.getOutgoingSequenceNumber();
QString codec = data.getCodecName(); QString codec = data.getCodecName();
@ -345,7 +369,7 @@ void sendMixPacket(const SharedNodePointer& node, AudioMixerClientData& data, QB
} }
void sendSilentPacket(const SharedNodePointer& node, AudioMixerClientData& data) { void sendSilentPacket(const SharedNodePointer& node, AudioMixerClientData& data) {
static const int SILENT_PACKET_SIZE = const int SILENT_PACKET_SIZE =
sizeof(quint16) + AudioConstants::MAX_CODEC_NAME_LENGTH_ON_WIRE + sizeof(quint16); sizeof(quint16) + AudioConstants::MAX_CODEC_NAME_LENGTH_ON_WIRE + sizeof(quint16);
quint16 sequence = data.getOutgoingSequenceNumber(); quint16 sequence = data.getOutgoingSequenceNumber();
QString codec = data.getCodecName(); QString codec = data.getCodecName();
@ -475,40 +499,54 @@ bool shouldIgnoreNode(const SharedNodePointer& listener, const SharedNodePointer
return ignore; return ignore;
} }
float gainForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd, static const float ATTENUATION_START_DISTANCE = 1.0f;
const glm::vec3& relativePosition, bool isEcho) {
float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition) {
float gain = 1.0f; float gain = 1.0f;
float distanceBetween = glm::length(relativePosition); // injector: apply attenuation
if (distanceBetween < EPSILON) {
distanceBetween = EPSILON;
}
if (streamToAdd.getType() == PositionalAudioStream::Injector) { if (streamToAdd.getType() == PositionalAudioStream::Injector) {
gain *= reinterpret_cast<const InjectedAudioStream*>(&streamToAdd)->getAttenuationRatio(); gain *= reinterpret_cast<const InjectedAudioStream*>(&streamToAdd)->getAttenuationRatio();
} }
if (!isEcho && (streamToAdd.getType() == PositionalAudioStream::Microphone)) { // avatar: skip attenuation - it is too costly to approximate
// source is another avatar, apply fixed off-axis attenuation to make them quieter as they turn away from listener
glm::vec3 rotatedListenerPosition = glm::inverse(streamToAdd.getOrientation()) * relativePosition;
// distance attenuation: approximate, ignore zone-specific attenuations
// this is a good approximation for streams further than ATTENUATION_START_DISTANCE
// those streams closer will be amplified; amplifying close streams is acceptable
// when throttling, as close streams are expected to be heard by a user
float distance = glm::length(relativePosition);
return gain / distance;
}
float computeGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition, bool isEcho) {
float gain = 1.0f;
// injector: apply attenuation
if (streamToAdd.getType() == PositionalAudioStream::Injector) {
gain *= reinterpret_cast<const InjectedAudioStream*>(&streamToAdd)->getAttenuationRatio();
// avatar: apply fixed off-axis attenuation to make them quieter as they turn away
} else if (!isEcho && (streamToAdd.getType() == PositionalAudioStream::Microphone)) {
glm::vec3 rotatedListenerPosition = glm::inverse(streamToAdd.getOrientation()) * relativePosition;
float angleOfDelivery = glm::angle(glm::vec3(0.0f, 0.0f, -1.0f), float angleOfDelivery = glm::angle(glm::vec3(0.0f, 0.0f, -1.0f),
glm::normalize(rotatedListenerPosition)); glm::normalize(rotatedListenerPosition));
const float MAX_OFF_AXIS_ATTENUATION = 0.2f; const float MAX_OFF_AXIS_ATTENUATION = 0.2f;
const float OFF_AXIS_ATTENUATION_FORMULA_STEP = (1 - MAX_OFF_AXIS_ATTENUATION) / 2.0f; const float OFF_AXIS_ATTENUATION_STEP = (1 - MAX_OFF_AXIS_ATTENUATION) / 2.0f;
float offAxisCoefficient = MAX_OFF_AXIS_ATTENUATION + float offAxisCoefficient = MAX_OFF_AXIS_ATTENUATION +
(OFF_AXIS_ATTENUATION_FORMULA_STEP * (angleOfDelivery / PI_OVER_TWO)); (angleOfDelivery * (OFF_AXIS_ATTENUATION_STEP / PI_OVER_TWO));
// multiply the current attenuation coefficient by the calculated off axis coefficient
gain *= offAxisCoefficient; gain *= offAxisCoefficient;
} }
float attenuationPerDoublingInDistance = AudioMixer::getAttenuationPerDoublingInDistance();
auto& zoneSettings = AudioMixer::getZoneSettings();
auto& audioZones = AudioMixer::getAudioZones(); auto& audioZones = AudioMixer::getAudioZones();
auto& zoneSettings = AudioMixer::getZoneSettings();
// find distance attenuation coefficient
float attenuationPerDoublingInDistance = AudioMixer::getAttenuationPerDoublingInDistance();
for (int i = 0; i < zoneSettings.length(); ++i) { for (int i = 0; i < zoneSettings.length(); ++i) {
if (audioZones[zoneSettings[i].source].contains(streamToAdd.getPosition()) && if (audioZones[zoneSettings[i].source].contains(streamToAdd.getPosition()) &&
audioZones[zoneSettings[i].listener].contains(listeningNodeStream.getPosition())) { audioZones[zoneSettings[i].listener].contains(listeningNodeStream.getPosition())) {
@ -517,16 +555,17 @@ float gainForSource(const AvatarAudioStream& listeningNodeStream, const Position
} }
} }
const float ATTENUATION_BEGINS_AT_DISTANCE = 1.0f; // distance attenuation
if (distanceBetween >= ATTENUATION_BEGINS_AT_DISTANCE) { float distance = glm::length(relativePosition);
assert(ATTENUATION_START_DISTANCE > EPSILON);
if (distance >= ATTENUATION_START_DISTANCE) {
// translate the zone setting to gain per log2(distance) // translate the zone setting to gain per log2(distance)
float g = 1.0f - attenuationPerDoublingInDistance; float g = 1.0f - attenuationPerDoublingInDistance;
g = (g < EPSILON) ? EPSILON : g; g = glm::clamp(g, EPSILON, 1.0f);
g = (g > 1.0f) ? 1.0f : g;
// calculate the distance coefficient using the distance to this node // calculate the distance coefficient using the distance to this node
float distanceCoefficient = fastExp2f(fastLog2f(g) * fastLog2f(distanceBetween/ATTENUATION_BEGINS_AT_DISTANCE)); float distanceCoefficient = fastExp2f(fastLog2f(g) * fastLog2f(distance/ATTENUATION_START_DISTANCE));
// multiply the current attenuation coefficient by the distance coefficient // multiply the current attenuation coefficient by the distance coefficient
gain *= distanceCoefficient; gain *= distanceCoefficient;
@ -535,7 +574,7 @@ float gainForSource(const AvatarAudioStream& listeningNodeStream, const Position
return gain; return gain;
} }
float azimuthForSource(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd, float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition) { const glm::vec3& relativePosition) {
glm::quat inverseOrientation = glm::inverse(listeningNodeStream.getOrientation()); glm::quat inverseOrientation = glm::inverse(listeningNodeStream.getOrientation());

View file

@ -20,6 +20,9 @@ void AudioMixerStats::reset() {
hrtfThrottleRenders = 0; hrtfThrottleRenders = 0;
manualStereoMixes = 0; manualStereoMixes = 0;
manualEchoMixes = 0; manualEchoMixes = 0;
#ifdef HIFI_AUDIO_THROTTLE_DEBUG
throttleTime = 0;
#endif
} }
void AudioMixerStats::accumulate(const AudioMixerStats& otherStats) { void AudioMixerStats::accumulate(const AudioMixerStats& otherStats) {
@ -31,4 +34,7 @@ void AudioMixerStats::accumulate(const AudioMixerStats& otherStats) {
hrtfThrottleRenders += otherStats.hrtfThrottleRenders; hrtfThrottleRenders += otherStats.hrtfThrottleRenders;
manualStereoMixes += otherStats.manualStereoMixes; manualStereoMixes += otherStats.manualStereoMixes;
manualEchoMixes += otherStats.manualEchoMixes; manualEchoMixes += otherStats.manualEchoMixes;
#ifdef HIFI_AUDIO_THROTTLE_DEBUG
throttleTime += otherStats.throttleTime;
#endif
} }

View file

@ -12,6 +12,10 @@
#ifndef hifi_AudioMixerStats_h #ifndef hifi_AudioMixerStats_h
#define hifi_AudioMixerStats_h #define hifi_AudioMixerStats_h
#ifdef HIFI_AUDIO_THROTTLE_DEBUG
#include <cstdint>
#endif
struct AudioMixerStats { struct AudioMixerStats {
int sumStreams { 0 }; int sumStreams { 0 };
int sumListeners { 0 }; int sumListeners { 0 };
@ -25,6 +29,10 @@ struct AudioMixerStats {
int manualStereoMixes { 0 }; int manualStereoMixes { 0 };
int manualEchoMixes { 0 }; int manualEchoMixes { 0 };
#ifdef HIFI_AUDIO_THROTTLE_DEBUG
uint64_t throttleTime { 0 };
#endif
void reset(); void reset();
void accumulate(const AudioMixerStats& otherStats); void accumulate(const AudioMixerStats& otherStats);
}; };

View file

@ -34,7 +34,7 @@ EntityServer::EntityServer(ReceivedMessage& message) :
DependencyManager::set<ScriptCache>(); DependencyManager::set<ScriptCache>();
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver(); auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
packetReceiver.registerListenerForTypes({ PacketType::EntityAdd, PacketType::EntityEdit, PacketType::EntityErase }, packetReceiver.registerListenerForTypes({ PacketType::EntityAdd, PacketType::EntityEdit, PacketType::EntityErase, PacketType::EntityPhysics },
this, "handleEntityPacket"); this, "handleEntityPacket");
} }

View file

@ -141,218 +141,6 @@
"can_set": true "can_set": true
} }
] ]
},
{
"label": "Operating Hours",
"help": "\"Open\" domains can be searched using their operating hours. Hours are entered in the local timezone, selected below.",
"name": "weekday_hours",
"caption": "Weekday Hours (Monday-Friday)",
"type": "table",
"can_add_new_rows": false,
"columns": [
{
"name": "open",
"label": "Opening Time",
"type": "time",
"default": "00:00",
"editable": true
},
{
"name": "close",
"label": "Closing Time",
"type": "time",
"default": "23:59",
"editable": true
}
]
},
{
"name": "weekend_hours",
"label": "Weekend Hours (Saturday/Sunday)",
"type": "table",
"can_add_new_rows": false,
"columns": [
{
"name": "open",
"label": "Opening Time",
"type": "time",
"default": "00:00",
"editable": true
},
{
"name": "close",
"label": "Closing Time",
"type": "time",
"default": "23:59",
"editable": true
}
]
},
{
"label": "Time Zone",
"name": "utc_offset",
"caption": "Time Zone",
"help": "This server's time zone. Used to define your server's operating hours.",
"type": "select",
"options": [
{
"value": "-12",
"label": "UTC-12:00"
},
{
"value": "-11",
"label": "UTC-11:00"
},
{
"value": "-10",
"label": "UTC-10:00"
},
{
"value": "-9.5",
"label": "UTC-09:30"
},
{
"value": "-9",
"label": "UTC-09:00"
},
{
"value": "-8",
"label": "UTC-08:00"
},
{
"value": "-7",
"label": "UTC-07:00"
},
{
"value": "-6",
"label": "UTC-06:00"
},
{
"value": "-5",
"label": "UTC-05:00"
},
{
"value": "-4",
"label": "UTC-04:00"
},
{
"value": "-3.5",
"label": "UTC-03:30"
},
{
"value": "-3",
"label": "UTC-03:00"
},
{
"value": "-2",
"label": "UTC-02:00"
},
{
"value": "-1",
"label": "UTC-01:00"
},
{
"value": "",
"label": "UTC±00:00"
},
{
"value": "1",
"label": "UTC+01:00"
},
{
"value": "2",
"label": "UTC+02:00"
},
{
"value": "3",
"label": "UTC+03:00"
},
{
"value": "3.5",
"label": "UTC+03:30"
},
{
"value": "4",
"label": "UTC+04:00"
},
{
"value": "4.5",
"label": "UTC+04:30"
},
{
"value": "5",
"label": "UTC+05:00"
},
{
"value": "5.5",
"label": "UTC+05:30"
},
{
"value": "5.75",
"label": "UTC+05:45"
},
{
"value": "6",
"label": "UTC+06:00"
},
{
"value": "6.5",
"label": "UTC+06:30"
},
{
"value": "7",
"label": "UTC+07:00"
},
{
"value": "8",
"label": "UTC+08:00"
},
{
"value": "8.5",
"label": "UTC+08:30"
},
{
"value": "8.75",
"label": "UTC+08:45"
},
{
"value": "9",
"label": "UTC+09:00"
},
{
"value": "9.5",
"label": "UTC+09:30"
},
{
"value": "10",
"label": "UTC+10:00"
},
{
"value": "10.5",
"label": "UTC+10:30"
},
{
"value": "11",
"label": "UTC+11:00"
},
{
"value": "12",
"label": "UTC+12:00"
},
{
"value": "12.75",
"label": "UTC+12:45"
},
{
"value": "13",
"label": "UTC+13:00"
},
{
"value": "14",
"label": "UTC+14:00"
}
]
} }
] ]
}, },

View file

@ -1306,7 +1306,9 @@ function badgeSidebarForDifferences(changedElement) {
var isGrouped = $('#' + panelParentID).hasClass('grouped'); var isGrouped = $('#' + panelParentID).hasClass('grouped');
if (isGrouped) { if (isGrouped) {
var initialPanelJSON = Settings.initialValues[panelParentID]; var initialPanelJSON = Settings.initialValues[panelParentID]
? Settings.initialValues[panelParentID]
: {};
// get a JSON representation of that section // get a JSON representation of that section
var panelJSON = form2js(panelParentID, ".", false, cleanupFormValues, true)[panelParentID]; var panelJSON = form2js(panelParentID, ".", false, cleanupFormValues, true)[panelParentID];
@ -1417,7 +1419,7 @@ function addTableRow(row) {
input_clone.children('td').each(function () { input_clone.children('td').each(function () {
if ($(this).attr("name") !== keepField) { if ($(this).attr("name") !== keepField) {
$(this).find("input").val($(this).attr('data-default')); $(this).find("input").val($(this).children('input').attr('data-default'));
} }
}); });
@ -1595,7 +1597,11 @@ function updateDataChangedForSiblingRows(row, forceTrue) {
// get a JSON representation of that section // get a JSON representation of that section
var panelSettingJSON = form2js(panelParentID, ".", false, cleanupFormValues, true)[panelParentID][tableShortName] var panelSettingJSON = form2js(panelParentID, ".", false, cleanupFormValues, true)[panelParentID][tableShortName]
var initialPanelSettingJSON = Settings.initialValues[panelParentID][tableShortName] if (Settings.initialValues[panelParentID]) {
var initialPanelSettingJSON = Settings.initialValues[panelParentID][tableShortName]
} else {
var initialPanelSettingJSON = {};
}
// if they are equal, we don't need data-changed // if they are equal, we don't need data-changed
isTrue = !_.isEqual(panelSettingJSON, initialPanelSettingJSON) isTrue = !_.isEqual(panelSettingJSON, initialPanelSettingJSON)

View file

@ -35,12 +35,6 @@ const QString DomainMetadata::Descriptors::RESTRICTION = "restriction"; // parse
const QString DomainMetadata::Descriptors::MATURITY = "maturity"; const QString DomainMetadata::Descriptors::MATURITY = "maturity";
const QString DomainMetadata::Descriptors::HOSTS = "hosts"; const QString DomainMetadata::Descriptors::HOSTS = "hosts";
const QString DomainMetadata::Descriptors::TAGS = "tags"; const QString DomainMetadata::Descriptors::TAGS = "tags";
const QString DomainMetadata::Descriptors::HOURS = "hours";
const QString DomainMetadata::Descriptors::Hours::WEEKDAY = "weekday";
const QString DomainMetadata::Descriptors::Hours::WEEKEND = "weekend";
const QString DomainMetadata::Descriptors::Hours::UTC_OFFSET = "utc_offset";
const QString DomainMetadata::Descriptors::Hours::OPEN = "open";
const QString DomainMetadata::Descriptors::Hours::CLOSE = "close";
// descriptors metadata will appear as (JSON): // descriptors metadata will appear as (JSON):
// { "description": String, // capped description // { "description": String, // capped description
// "capacity": Number, // "capacity": Number,
@ -48,11 +42,6 @@ const QString DomainMetadata::Descriptors::Hours::CLOSE = "close";
// "maturity": String, // enum corresponding to ESRB ratings // "maturity": String, // enum corresponding to ESRB ratings
// "hosts": [ String ], // capped list of usernames // "hosts": [ String ], // capped list of usernames
// "tags": [ String ], // capped list of tags // "tags": [ String ], // capped list of tags
// "hours": {
// "utc_offset": Number,
// "weekday": [ [ Time, Time ] ],
// "weekend": [ [ Time, Time ] ],
// }
// } // }
// metadata will appear as (JSON): // metadata will appear as (JSON):
@ -60,52 +49,10 @@ const QString DomainMetadata::Descriptors::Hours::CLOSE = "close";
// //
// it is meant to be sent to and consumed by an external API // it is meant to be sent to and consumed by an external API
// merge delta into target
// target should be of the form [ OpenTime, CloseTime ],
// delta should be of the form [ { open: Time, close: Time } ]
void parseHours(QVariant delta, QVariant& target) {
using Hours = DomainMetadata::Descriptors::Hours;
static const QVariantList DEFAULT_HOURS{
{ QVariantList{ "00:00", "23:59" } }
};
target.setValue(DEFAULT_HOURS);
if (!delta.canConvert<QVariantList>()) {
return;
}
auto& deltaList = *static_cast<QVariantList*>(delta.data());
if (deltaList.isEmpty()) {
return;
}
auto& deltaHours = *static_cast<QVariantMap*>(deltaList.first().data());
auto open = deltaHours.find(Hours::OPEN);
auto close = deltaHours.find(Hours::CLOSE);
if (open == deltaHours.end() || close == deltaHours.end()) {
return;
}
// merge delta into new hours
static const int OPEN_INDEX = 0;
static const int CLOSE_INDEX = 1;
auto& hours = *static_cast<QVariantList*>(static_cast<QVariantList*>(target.data())->first().data());
hours[OPEN_INDEX] = open.value();
hours[CLOSE_INDEX] = close.value();
assert(hours[OPEN_INDEX].canConvert<QString>());
assert(hours[CLOSE_INDEX].canConvert<QString>());
}
DomainMetadata::DomainMetadata(QObject* domainServer) : QObject(domainServer) { DomainMetadata::DomainMetadata(QObject* domainServer) : QObject(domainServer) {
// set up the structure necessary for casting during parsing (see parseHours, esp.) // set up the structure necessary for casting during parsing
_metadata[USERS] = QVariantMap {}; _metadata[USERS] = QVariantMap {};
_metadata[DESCRIPTORS] = QVariantMap { { _metadata[DESCRIPTORS] = QVariantMap {};
Descriptors::HOURS, QVariantMap {
{ Descriptors::Hours::WEEKDAY, QVariant{} },
{ Descriptors::Hours::WEEKEND, QVariant{} }
}
} };
assert(dynamic_cast<DomainServer*>(domainServer)); assert(dynamic_cast<DomainServer*>(domainServer));
DomainServer* server = static_cast<DomainServer*>(domainServer); DomainServer* server = static_cast<DomainServer*>(domainServer);
@ -154,16 +101,6 @@ void DomainMetadata::descriptorsChanged() {
unsigned int capacity = capacityVariant ? capacityVariant->toUInt() : 0; unsigned int capacity = capacityVariant ? capacityVariant->toUInt() : 0;
state[Descriptors::CAPACITY] = capacity; state[Descriptors::CAPACITY] = capacity;
// parse operating hours
static const QString WEEKDAY_HOURS = "weekday_hours";
static const QString WEEKEND_HOURS = "weekend_hours";
static const QString UTC_OFFSET = "utc_offset";
assert(state[Descriptors::HOURS].canConvert<QVariantMap>());
auto& hours = *static_cast<QVariantMap*>(state[Descriptors::HOURS].data());
hours[Descriptors::Hours::UTC_OFFSET] = descriptors.take(UTC_OFFSET);
parseHours(descriptors[WEEKDAY_HOURS], hours[Descriptors::Hours::WEEKDAY]);
parseHours(descriptors[WEEKEND_HOURS], hours[Descriptors::Hours::WEEKEND]);
#if DEV_BUILD || PR_BUILD #if DEV_BUILD || PR_BUILD
qDebug() << "Domain metadata descriptors set:" << QJsonObject::fromVariantMap(_metadata[DESCRIPTORS].toMap()); qDebug() << "Domain metadata descriptors set:" << QJsonObject::fromVariantMap(_metadata[DESCRIPTORS].toMap());
#endif #endif

View file

@ -39,15 +39,6 @@ public:
static const QString MATURITY; static const QString MATURITY;
static const QString HOSTS; static const QString HOSTS;
static const QString TAGS; static const QString TAGS;
static const QString HOURS;
class Hours {
public:
static const QString WEEKDAY;
static const QString WEEKEND;
static const QString UTC_OFFSET;
static const QString OPEN;
static const QString CLOSE;
};
}; };
DomainMetadata(QObject* domainServer); DomainMetadata(QObject* domainServer);

View file

@ -21,7 +21,6 @@
#include <QtCore/QStandardPaths> #include <QtCore/QStandardPaths>
#include <QtCore/QUrl> #include <QtCore/QUrl>
#include <QtCore/QUrlQuery> #include <QtCore/QUrlQuery>
#include <QTimeZone>
#include <AccountManager.h> #include <AccountManager.h>
#include <Assignment.h> #include <Assignment.h>
@ -270,11 +269,6 @@ void DomainServerSettingsManager::setupConfigMap(const QStringList& argumentList
_agentPermissions.clear(); _agentPermissions.clear();
} }
if (oldVersion < 1.5) {
// This was prior to operating hours, so add default hours
validateDescriptorsMap();
}
if (oldVersion < 1.6) { if (oldVersion < 1.6) {
unpackPermissions(); unpackPermissions();
@ -305,46 +299,10 @@ void DomainServerSettingsManager::setupConfigMap(const QStringList& argumentList
} }
QVariantMap& DomainServerSettingsManager::getDescriptorsMap() { QVariantMap& DomainServerSettingsManager::getDescriptorsMap() {
validateDescriptorsMap();
static const QString DESCRIPTORS{ "descriptors" }; static const QString DESCRIPTORS{ "descriptors" };
return *static_cast<QVariantMap*>(getSettingsMap()[DESCRIPTORS].data()); return *static_cast<QVariantMap*>(getSettingsMap()[DESCRIPTORS].data());
} }
void DomainServerSettingsManager::validateDescriptorsMap() {
static const QString WEEKDAY_HOURS{ "descriptors.weekday_hours" };
static const QString WEEKEND_HOURS{ "descriptors.weekend_hours" };
static const QString UTC_OFFSET{ "descriptors.utc_offset" };
QVariant* weekdayHours = _configMap.valueForKeyPath(WEEKDAY_HOURS, true);
QVariant* weekendHours = _configMap.valueForKeyPath(WEEKEND_HOURS, true);
QVariant* utcOffset = _configMap.valueForKeyPath(UTC_OFFSET, true);
static const QString OPEN{ "open" };
static const QString CLOSE{ "close" };
static const QString DEFAULT_OPEN{ "00:00" };
static const QString DEFAULT_CLOSE{ "23:59" };
bool wasMalformed = false;
if (weekdayHours->isNull()) {
*weekdayHours = QVariantList{ QVariantMap{ { OPEN, QVariant(DEFAULT_OPEN) }, { CLOSE, QVariant(DEFAULT_CLOSE) } } };
wasMalformed = true;
}
if (weekendHours->isNull()) {
*weekendHours = QVariantList{ QVariantMap{ { OPEN, QVariant(DEFAULT_OPEN) }, { CLOSE, QVariant(DEFAULT_CLOSE) } } };
wasMalformed = true;
}
if (utcOffset->isNull()) {
*utcOffset = QVariant(QTimeZone::systemTimeZone().offsetFromUtc(QDateTime::currentDateTime()) / (float)SECS_PER_HOUR);
wasMalformed = true;
}
if (wasMalformed) {
// write the new settings to file
persistToFile();
}
}
void DomainServerSettingsManager::initializeGroupPermissions(NodePermissionsMap& permissionsRows, void DomainServerSettingsManager::initializeGroupPermissions(NodePermissionsMap& permissionsRows,
QString groupName, NodePermissionsPointer perms) { QString groupName, NodePermissionsPointer perms) {
// this is called when someone has used the domain-settings webpage to add a group. They type the group's name // this is called when someone has used the domain-settings webpage to add a group. They type the group's name

View file

@ -138,8 +138,6 @@ private:
friend class DomainServer; friend class DomainServer;
void validateDescriptorsMap();
// these cause calls to metaverse's group api // these cause calls to metaverse's group api
void apiGetGroupID(const QString& groupName); void apiGetGroupID(const QString& groupName);
void apiGetGroupRanks(const QUuid& groupID); void apiGetGroupRanks(const QUuid& groupID);

View file

@ -22,7 +22,7 @@
"to": "Actions.Up", "to": "Actions.Up",
"filters": "filters":
[ [
{ "type": "deadZone", "min": 0.95 }, { "type": "deadZone", "min": 0.6 },
"invert" "invert"
] ]
}, },

View file

@ -7,7 +7,7 @@ PreferencesDialog {
id: root id: root
objectName: "AvatarPreferencesDialog" objectName: "AvatarPreferencesDialog"
title: "Avatar Settings" title: "Avatar Settings"
showCategories: [ "Avatar Basics", "Snapshots", "Avatar Tuning", "Avatar Camera" ] showCategories: [ "Avatar Basics", "Avatar Tuning", "Avatar Camera" ]
property var settings: Settings { property var settings: Settings {
category: root.objectName category: root.objectName
property alias x: root.x property alias x: root.x

View file

@ -87,7 +87,11 @@ Item {
if (topMenu.objectName === "") { if (topMenu.objectName === "") {
breadcrumbText.text = "Menu"; breadcrumbText.text = "Menu";
} else { } else {
breadcrumbText.text = topMenu.objectName; if (menuStack.length === 1) {
breadcrumbText.text = "Menu";
} else {
breadcrumbText.text = topMenu.objectName;
}
} }
} else { } else {
breadcrumbText.text = "Menu"; breadcrumbText.text = "Menu";

View file

@ -74,6 +74,11 @@ int main(int argc, const char* argv[]) {
instanceMightBeRunning = !sharedMemory.create(1, QSharedMemory::ReadOnly); instanceMightBeRunning = !sharedMemory.create(1, QSharedMemory::ReadOnly);
#endif #endif
// allow multiple interfaces to run if this environment variable is set.
if (QProcessEnvironment::systemEnvironment().contains("HIFI_ALLOW_MULTIPLE_INSTANCES")) {
instanceMightBeRunning = false;
}
if (instanceMightBeRunning) { if (instanceMightBeRunning) {
// Try to connect and send message to existing interface instance // Try to connect and send message to existing interface instance
QLocalSocket socket; QLocalSocket socket;

View file

@ -104,7 +104,7 @@ void setupPreferences() {
{ {
auto getter = []()->bool { return SnapshotAnimated::alsoTakeAnimatedSnapshot.get(); }; auto getter = []()->bool { return SnapshotAnimated::alsoTakeAnimatedSnapshot.get(); };
auto setter = [](bool value) { SnapshotAnimated::alsoTakeAnimatedSnapshot.set(value); }; auto setter = [](bool value) { SnapshotAnimated::alsoTakeAnimatedSnapshot.set(value); };
preferences->addPreference(new CheckPreference(SNAPSHOTS, "Take Animated GIF Snapshot with HUD Button", getter, setter)); preferences->addPreference(new CheckPreference(SNAPSHOTS, "Take Animated GIF Snapshot", getter, setter));
} }
{ {
auto getter = []()->float { return SnapshotAnimated::snapshotAnimatedDuration.get(); }; auto getter = []()->float { return SnapshotAnimated::snapshotAnimatedDuration.get(); };

View file

@ -48,6 +48,7 @@ public:
// HRTF local gain adjustment in amplitude (1.0 == unity) // HRTF local gain adjustment in amplitude (1.0 == unity)
// //
void setGainAdjustment(float gain) { _gainAdjust = HRTF_GAIN * gain; }; void setGainAdjustment(float gain) { _gainAdjust = HRTF_GAIN * gain; };
float getGainAdjustment() { return _gainAdjust; }
private: private:
AudioHRTF(const AudioHRTF&) = delete; AudioHRTF(const AudioHRTF&) = delete;

View file

@ -131,12 +131,16 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
// handle this packet based on its arrival status. // handle this packet based on its arrival status.
switch (arrivalInfo._status) { switch (arrivalInfo._status) {
case SequenceNumberStats::Unreasonable: {
lostAudioData(1);
break;
}
case SequenceNumberStats::Early: { case SequenceNumberStats::Early: {
// Packet is early; write droppable silent samples for each of the skipped packets. // Packet is early; write droppable silent samples for each of the skipped packets.
// NOTE: we assume that each dropped packet contains the same number of samples // NOTE: we assume that each dropped packet contains the same number of samples
// as the packet we just received. // as the packet we just received.
int packetsDropped = arrivalInfo._seqDiffFromExpected; int packetsDropped = arrivalInfo._seqDiffFromExpected;
writeFramesForDroppedPackets(packetsDropped * networkFrames); lostAudioData(packetsDropped);
// fall through to OnTime case // fall through to OnTime case
} }
@ -208,6 +212,21 @@ int InboundAudioStream::parseStreamProperties(PacketType type, const QByteArray&
} }
} }
int InboundAudioStream::lostAudioData(int numPackets) {
QByteArray decodedBuffer;
while (numPackets--) {
if (_decoder) {
_decoder->lostFrame(decodedBuffer);
} else {
decodedBuffer.resize(AudioConstants::NETWORK_FRAME_BYTES_STEREO);
memset(decodedBuffer.data(), 0, decodedBuffer.size());
}
_ringBuffer.writeData(decodedBuffer.data(), decodedBuffer.size());
}
return 0;
}
int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties) { int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties) {
QByteArray decodedBuffer; QByteArray decodedBuffer;
if (_decoder) { if (_decoder) {
@ -220,9 +239,6 @@ int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packet
} }
int InboundAudioStream::writeDroppableSilentFrames(int silentFrames) { int InboundAudioStream::writeDroppableSilentFrames(int silentFrames) {
if (_decoder) {
_decoder->trackLostFrames(silentFrames);
}
// calculate how many silent frames we should drop. // calculate how many silent frames we should drop.
int silentSamples = silentFrames * _numChannels; int silentSamples = silentFrames * _numChannels;
@ -416,29 +432,6 @@ void InboundAudioStream::packetReceivedUpdateTimingStats() {
_lastPacketReceivedTime = now; _lastPacketReceivedTime = now;
} }
int InboundAudioStream::writeFramesForDroppedPackets(int networkFrames) {
return writeLastFrameRepeatedWithFade(networkFrames);
}
int InboundAudioStream::writeLastFrameRepeatedWithFade(int frames) {
AudioRingBuffer::ConstIterator frameToRepeat = _ringBuffer.lastFrameWritten();
int frameSize = _ringBuffer.getNumFrameSamples();
int samplesToWrite = frames * _numChannels;
int indexOfRepeat = 0;
do {
int samplesToWriteThisIteration = std::min(samplesToWrite, frameSize);
float fade = calculateRepeatedFrameFadeFactor(indexOfRepeat);
if (fade == 1.0f) {
samplesToWrite -= _ringBuffer.writeSamples(frameToRepeat, samplesToWriteThisIteration);
} else {
samplesToWrite -= _ringBuffer.writeSamplesWithFade(frameToRepeat, samplesToWriteThisIteration, fade);
}
indexOfRepeat++;
} while (samplesToWrite > 0);
return frames;
}
AudioStreamStats InboundAudioStream::getAudioStreamStats() const { AudioStreamStats InboundAudioStream::getAudioStreamStats() const {
AudioStreamStats streamStats; AudioStreamStats streamStats;

View file

@ -115,8 +115,6 @@ public slots:
private: private:
void packetReceivedUpdateTimingStats(); void packetReceivedUpdateTimingStats();
int writeFramesForDroppedPackets(int networkFrames);
void popSamplesNoCheck(int samples); void popSamplesNoCheck(int samples);
void framesAvailableChanged(); void framesAvailableChanged();
@ -134,12 +132,11 @@ protected:
/// default implementation assumes packet contains raw audio samples after stream properties /// default implementation assumes packet contains raw audio samples after stream properties
virtual int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties); virtual int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties);
/// produces audio data for lost network packets.
virtual int lostAudioData(int numPackets);
/// writes silent frames to the buffer that may be dropped to reduce latency caused by the buffer /// writes silent frames to the buffer that may be dropped to reduce latency caused by the buffer
virtual int writeDroppableSilentFrames(int silentFrames); virtual int writeDroppableSilentFrames(int silentFrames);
/// writes the last written frame repeatedly, gradually fading to silence.
/// used for writing samples for dropped packets.
virtual int writeLastFrameRepeatedWithFade(int frames);
protected: protected:

View file

@ -31,11 +31,26 @@ int MixedProcessedAudioStream::writeDroppableSilentFrames(int silentFrames) {
return deviceSilentFramesWritten; return deviceSilentFramesWritten;
} }
int MixedProcessedAudioStream::writeLastFrameRepeatedWithFade(int frames) { int MixedProcessedAudioStream::lostAudioData(int numPackets) {
int deviceFrames = networkToDeviceFrames(frames); QByteArray decodedBuffer;
int deviceFramesWritten = InboundAudioStream::writeLastFrameRepeatedWithFade(deviceFrames); QByteArray outputBuffer;
emit addedLastFrameRepeatedWithFade(deviceToNetworkFrames(deviceFramesWritten));
return deviceFramesWritten; while (numPackets--) {
if (_decoder) {
_decoder->lostFrame(decodedBuffer);
} else {
decodedBuffer.resize(AudioConstants::NETWORK_FRAME_BYTES_STEREO);
memset(decodedBuffer.data(), 0, decodedBuffer.size());
}
emit addedStereoSamples(decodedBuffer);
emit processSamples(decodedBuffer, outputBuffer);
_ringBuffer.writeData(outputBuffer.data(), outputBuffer.size());
qCDebug(audiostream, "Wrote %d samples to buffer (%d available)", outputBuffer.size() / (int)sizeof(int16_t), getSamplesAvailable());
}
return 0;
} }
int MixedProcessedAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties) { int MixedProcessedAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties) {

View file

@ -34,8 +34,8 @@ public:
protected: protected:
int writeDroppableSilentFrames(int silentFrames) override; int writeDroppableSilentFrames(int silentFrames) override;
int writeLastFrameRepeatedWithFade(int frames) override;
int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties) override; int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties) override;
int lostAudioData(int numPackets) override;
private: private:
int networkToDeviceFrames(int networkFrames); int networkToDeviceFrames(int networkFrames);

View file

@ -29,7 +29,7 @@ void EntityEditPacketSender::processEntityEditNackPacket(QSharedPointer<Received
} }
void EntityEditPacketSender::adjustEditPacketForClockSkew(PacketType type, QByteArray& buffer, qint64 clockSkew) { void EntityEditPacketSender::adjustEditPacketForClockSkew(PacketType type, QByteArray& buffer, qint64 clockSkew) {
if (type == PacketType::EntityAdd || type == PacketType::EntityEdit) { if (type == PacketType::EntityAdd || type == PacketType::EntityEdit || type == PacketType::EntityPhysics) {
EntityItem::adjustEditPacketForClockSkew(buffer, clockSkew); EntityItem::adjustEditPacketForClockSkew(buffer, clockSkew);
} }
} }
@ -100,7 +100,18 @@ void EntityEditPacketSender::queueEditEntityMessage(PacketType type,
QByteArray bufferOut(NLPacket::maxPayloadSize(type), 0); QByteArray bufferOut(NLPacket::maxPayloadSize(type), 0);
if (EntityItemProperties::encodeEntityEditPacket(type, entityItemID, properties, bufferOut)) { bool success;
if (properties.parentIDChanged() && properties.getParentID() == AVATAR_SELF_ID) {
EntityItemProperties propertiesCopy = properties;
auto nodeList = DependencyManager::get<NodeList>();
const QUuid myNodeID = nodeList->getSessionUUID();
propertiesCopy.setParentID(myNodeID);
success = EntityItemProperties::encodeEntityEditPacket(type, entityItemID, propertiesCopy, bufferOut);
} else {
success = EntityItemProperties::encodeEntityEditPacket(type, entityItemID, properties, bufferOut);
}
if (success) {
#ifdef WANT_DEBUG #ifdef WANT_DEBUG
qCDebug(entities) << "calling queueOctreeEditMessage()..."; qCDebug(entities) << "calling queueOctreeEditMessage()...";
qCDebug(entities) << " id:" << entityItemID; qCDebug(entities) << " id:" << entityItemID;

View file

@ -828,7 +828,7 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
{ // parentID and parentJointIndex are also protected by simulation ownership { // parentID and parentJointIndex are also protected by simulation ownership
bool oldOverwrite = overwriteLocalData; bool oldOverwrite = overwriteLocalData;
overwriteLocalData = overwriteLocalData && !weOwnSimulation; overwriteLocalData = overwriteLocalData && !weOwnSimulation;
READ_ENTITY_PROPERTY(PROP_PARENT_ID, QUuid, setParentID); READ_ENTITY_PROPERTY(PROP_PARENT_ID, QUuid, updateParentID);
READ_ENTITY_PROPERTY(PROP_PARENT_JOINT_INDEX, quint16, setParentJointIndex); READ_ENTITY_PROPERTY(PROP_PARENT_JOINT_INDEX, quint16, setParentJointIndex);
overwriteLocalData = oldOverwrite; overwriteLocalData = oldOverwrite;
} }
@ -1823,28 +1823,6 @@ void EntityItem::computeCollisionGroupAndFinalMask(int16_t& group, int16_t& mask
} }
uint8_t userMask = getCollisionMask(); uint8_t userMask = getCollisionMask();
if (userMask & USER_COLLISION_GROUP_MY_AVATAR) {
// if this entity is a descendant of MyAvatar, don't collide with MyAvatar. This avoids the
// "bootstrapping" problem where you can shoot yourself across the room by grabbing something
// and holding it against your own avatar.
QUuid ancestorID = findAncestorOfType(NestableType::Avatar);
if (!ancestorID.isNull() && ancestorID == Physics::getSessionUUID()) {
userMask &= ~USER_COLLISION_GROUP_MY_AVATAR;
}
}
if (userMask & USER_COLLISION_GROUP_MY_AVATAR) {
// also, don't bootstrap our own avatar with a hold action
QList<EntityActionPointer> holdActions = getActionsOfType(ACTION_TYPE_HOLD);
QList<EntityActionPointer>::const_iterator i = holdActions.begin();
while (i != holdActions.end()) {
EntityActionPointer action = *i;
if (action->isMine()) {
userMask &= ~USER_COLLISION_GROUP_MY_AVATAR;
break;
}
i++;
}
}
if ((bool)(userMask & USER_COLLISION_GROUP_MY_AVATAR) != if ((bool)(userMask & USER_COLLISION_GROUP_MY_AVATAR) !=
(bool)(userMask & USER_COLLISION_GROUP_OTHER_AVATAR)) { (bool)(userMask & USER_COLLISION_GROUP_OTHER_AVATAR)) {
@ -1854,6 +1832,33 @@ void EntityItem::computeCollisionGroupAndFinalMask(int16_t& group, int16_t& mask
userMask ^= USER_COLLISION_MASK_AVATARS | ~userMask; userMask ^= USER_COLLISION_MASK_AVATARS | ~userMask;
} }
} }
if (userMask & USER_COLLISION_GROUP_MY_AVATAR) {
bool iAmHoldingThis = false;
// if this entity is a descendant of MyAvatar, don't collide with MyAvatar. This avoids the
// "bootstrapping" problem where you can shoot yourself across the room by grabbing something
// and holding it against your own avatar.
QUuid ancestorID = findAncestorOfType(NestableType::Avatar);
if (!ancestorID.isNull() &&
(ancestorID == Physics::getSessionUUID() || ancestorID == AVATAR_SELF_ID)) {
iAmHoldingThis = true;
}
// also, don't bootstrap our own avatar with a hold action
QList<EntityActionPointer> holdActions = getActionsOfType(ACTION_TYPE_HOLD);
QList<EntityActionPointer>::const_iterator i = holdActions.begin();
while (i != holdActions.end()) {
EntityActionPointer action = *i;
if (action->isMine()) {
iAmHoldingThis = true;
break;
}
i++;
}
if (iAmHoldingThis) {
userMask &= ~USER_COLLISION_GROUP_MY_AVATAR;
}
}
mask = Physics::getDefaultCollisionMask(group) & (int16_t)(userMask); mask = Physics::getDefaultCollisionMask(group) & (int16_t)(userMask);
} }
} }

View file

@ -104,6 +104,7 @@ bool EntityTree::handlesEditPacketType(PacketType packetType) const {
case PacketType::EntityAdd: case PacketType::EntityAdd:
case PacketType::EntityEdit: case PacketType::EntityEdit:
case PacketType::EntityErase: case PacketType::EntityErase:
case PacketType::EntityPhysics:
return true; return true;
default: default:
return false; return false;
@ -931,10 +932,15 @@ void EntityTree::initEntityEditFilterEngine(QScriptEngine* engine, std::function
qCDebug(entities) << "Filter function specified but not found. Will reject all edits."; qCDebug(entities) << "Filter function specified but not found. Will reject all edits.";
_entityEditFilterEngine = nullptr; // So that we don't try to call it. See filterProperties. _entityEditFilterEngine = nullptr; // So that we don't try to call it. See filterProperties.
} }
auto entitiesObject = _entityEditFilterEngine->newObject();
entitiesObject.setProperty("ADD_FILTER_TYPE", FilterType::Add);
entitiesObject.setProperty("EDIT_FILTER_TYPE", FilterType::Edit);
entitiesObject.setProperty("PHYSICS_FILTER_TYPE", FilterType::Physics);
global.setProperty("Entities", entitiesObject);
_hasEntityEditFilter = true; _hasEntityEditFilter = true;
} }
bool EntityTree::filterProperties(EntityItemProperties& propertiesIn, EntityItemProperties& propertiesOut, bool& wasChanged, bool isAdd) { bool EntityTree::filterProperties(EntityItemProperties& propertiesIn, EntityItemProperties& propertiesOut, bool& wasChanged, FilterType filterType) {
if (!_entityEditFilterEngine) { if (!_entityEditFilterEngine) {
propertiesOut = propertiesIn; propertiesOut = propertiesIn;
wasChanged = false; // not changed wasChanged = false; // not changed
@ -953,7 +959,7 @@ bool EntityTree::filterProperties(EntityItemProperties& propertiesIn, EntityItem
auto in = QJsonValue::fromVariant(inputValues.toVariant()); // grab json copy now, because the inputValues might be side effected by the filter. auto in = QJsonValue::fromVariant(inputValues.toVariant()); // grab json copy now, because the inputValues might be side effected by the filter.
QScriptValueList args; QScriptValueList args;
args << inputValues; args << inputValues;
args << isAdd; args << filterType;
QScriptValue result = _entityEditFilterFunction.call(_nullObjectForFilter, args); QScriptValue result = _entityEditFilterFunction.call(_nullObjectForFilter, args);
if (_entityEditFilterHadUncaughtExceptions()) { if (_entityEditFilterHadUncaughtExceptions()) {
@ -1001,6 +1007,7 @@ int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned c
case PacketType::EntityAdd: case PacketType::EntityAdd:
isAdd = true; // fall through to next case isAdd = true; // fall through to next case
case PacketType::EntityPhysics:
case PacketType::EntityEdit: { case PacketType::EntityEdit: {
quint64 startDecode = 0, endDecode = 0; quint64 startDecode = 0, endDecode = 0;
quint64 startLookup = 0, endLookup = 0; quint64 startLookup = 0, endLookup = 0;
@ -1010,6 +1017,7 @@ int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned c
quint64 startLogging = 0, endLogging = 0; quint64 startLogging = 0, endLogging = 0;
bool suppressDisallowedScript = false; bool suppressDisallowedScript = false;
bool isPhysics = message.getType() == PacketType::EntityPhysics;
_totalEditMessages++; _totalEditMessages++;
@ -1021,6 +1029,7 @@ int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned c
entityItemID, properties); entityItemID, properties);
endDecode = usecTimestampNow(); endDecode = usecTimestampNow();
if (validEditPacket && !_entityScriptSourceWhitelist.isEmpty() && !properties.getScript().isEmpty()) { if (validEditPacket && !_entityScriptSourceWhitelist.isEmpty() && !properties.getScript().isEmpty()) {
bool passedWhiteList = false; bool passedWhiteList = false;
@ -1053,8 +1062,7 @@ int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned c
} }
} }
if ((isAdd || if ((isAdd || properties.lifetimeChanged()) &&
(message.getType() == PacketType::EntityEdit && properties.lifetimeChanged())) &&
!senderNode->getCanRez() && senderNode->getCanRezTmp()) { !senderNode->getCanRez() && senderNode->getCanRezTmp()) {
// this node is only allowed to rez temporary entities. if need be, cap the lifetime. // this node is only allowed to rez temporary entities. if need be, cap the lifetime.
if (properties.getLifetime() == ENTITY_ITEM_IMMORTAL_LIFETIME || if (properties.getLifetime() == ENTITY_ITEM_IMMORTAL_LIFETIME ||
@ -1070,8 +1078,9 @@ int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned c
startFilter = usecTimestampNow(); startFilter = usecTimestampNow();
bool wasChanged = false; bool wasChanged = false;
// Having (un)lock rights bypasses the filter. // Having (un)lock rights bypasses the filter, unless it's a physics result.
bool allowed = senderNode->isAllowedEditor() || filterProperties(properties, properties, wasChanged, isAdd); FilterType filterType = isPhysics ? FilterType::Physics : (isAdd ? FilterType::Add : FilterType::Edit);
bool allowed = (!isPhysics && senderNode->isAllowedEditor()) || filterProperties(properties, properties, wasChanged, filterType);
if (!allowed) { if (!allowed) {
auto timestamp = properties.getLastEdited(); auto timestamp = properties.getLastEdited();
properties = EntityItemProperties(); properties = EntityItemProperties();
@ -1088,7 +1097,7 @@ int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned c
startLookup = usecTimestampNow(); startLookup = usecTimestampNow();
EntityItemPointer existingEntity = findEntityByEntityItemID(entityItemID); EntityItemPointer existingEntity = findEntityByEntityItemID(entityItemID);
endLookup = usecTimestampNow(); endLookup = usecTimestampNow();
if (existingEntity && message.getType() == PacketType::EntityEdit) { if (existingEntity && !isAdd) {
if (suppressDisallowedScript) { if (suppressDisallowedScript) {
bumpTimestamp(properties); bumpTimestamp(properties);

View file

@ -60,6 +60,11 @@ public:
class EntityTree : public Octree, public SpatialParentTree { class EntityTree : public Octree, public SpatialParentTree {
Q_OBJECT Q_OBJECT
public: public:
enum FilterType {
Add,
Edit,
Physics
};
EntityTree(bool shouldReaverage = false); EntityTree(bool shouldReaverage = false);
virtual ~EntityTree(); virtual ~EntityTree();
@ -357,7 +362,7 @@ protected:
float _maxTmpEntityLifetime { DEFAULT_MAX_TMP_ENTITY_LIFETIME }; float _maxTmpEntityLifetime { DEFAULT_MAX_TMP_ENTITY_LIFETIME };
bool filterProperties(EntityItemProperties& propertiesIn, EntityItemProperties& propertiesOut, bool& wasChanged, bool isAdd); bool filterProperties(EntityItemProperties& propertiesIn, EntityItemProperties& propertiesOut, bool& wasChanged, FilterType filterType);
bool _hasEntityEditFilter{ false }; bool _hasEntityEditFilter{ false };
QScriptEngine* _entityEditFilterEngine{}; QScriptEngine* _entityEditFilterEngine{};
QScriptValue _entityEditFilterFunction{}; QScriptValue _entityEditFilterFunction{};

View file

@ -1467,6 +1467,34 @@ FBXGeometry* FBXReader::extractFBXGeometry(const QVariantHash& mapping, const QS
// Create the Material Library // Create the Material Library
consolidateFBXMaterials(mapping); consolidateFBXMaterials(mapping);
// HACK: until we get proper LOD management we're going to cap model textures
// according to how many unique textures the model uses:
// 1 - 8 textures --> 2048
// 8 - 32 textures --> 1024
// 33 - 128 textures --> 512
// etc...
QSet<QString> uniqueTextures;
for (auto& material : _fbxMaterials) {
material.getTextureNames(uniqueTextures);
}
int numTextures = uniqueTextures.size();
const int MAX_NUM_TEXTURES_AT_MAX_RESOLUTION = 8;
int maxWidth = sqrt(MAX_NUM_PIXELS_FOR_FBX_TEXTURE);
if (numTextures > MAX_NUM_TEXTURES_AT_MAX_RESOLUTION) {
int numTextureThreshold = MAX_NUM_TEXTURES_AT_MAX_RESOLUTION;
const int MIN_MIP_TEXTURE_WIDTH = 64;
do {
maxWidth /= 2;
numTextureThreshold *= 4;
} while (numTextureThreshold < numTextures && maxWidth > MIN_MIP_TEXTURE_WIDTH);
qCDebug(modelformat) << "Capped square texture width =" << maxWidth << "for model" << url << "with" << numTextures << "textures";
for (auto& material : _fbxMaterials) {
material.setMaxNumPixelsPerTexture(maxWidth * maxWidth);
}
}
geometry.materials = _fbxMaterials; geometry.materials = _fbxMaterials;
// see if any materials have texture children // see if any materials have texture children

View file

@ -13,6 +13,7 @@
#define hifi_FBXReader_h #define hifi_FBXReader_h
#include <QMetaType> #include <QMetaType>
#include <QSet>
#include <QUrl> #include <QUrl>
#include <QVarLengthArray> #include <QVarLengthArray>
#include <QVariant> #include <QVariant>
@ -100,22 +101,25 @@ public:
/// A single binding to a joint in an FBX document. /// A single binding to a joint in an FBX document.
class FBXCluster { class FBXCluster {
public: public:
int jointIndex; int jointIndex;
glm::mat4 inverseBindMatrix; glm::mat4 inverseBindMatrix;
}; };
const int MAX_NUM_PIXELS_FOR_FBX_TEXTURE = 2048 * 2048;
/// A texture map in an FBX document. /// A texture map in an FBX document.
class FBXTexture { class FBXTexture {
public: public:
QString name; QString name;
QByteArray filename; QByteArray filename;
QByteArray content; QByteArray content;
Transform transform; Transform transform;
int maxNumPixels { MAX_NUM_PIXELS_FOR_FBX_TEXTURE };
int texcoordSet; int texcoordSet;
QString texcoordSetName; QString texcoordSetName;
bool isBumpmap{ false }; bool isBumpmap{ false };
bool isNull() const { return name.isEmpty() && filename.isEmpty() && content.isEmpty(); } bool isNull() const { return name.isEmpty() && filename.isEmpty() && content.isEmpty(); }
@ -143,6 +147,9 @@ public:
shininess(shininess), shininess(shininess),
opacity(opacity) {} opacity(opacity) {}
void getTextureNames(QSet<QString>& textureList) const;
void setMaxNumPixelsPerTexture(int maxNumPixels);
glm::vec3 diffuseColor{ 1.0f }; glm::vec3 diffuseColor{ 1.0f };
float diffuseFactor{ 1.0f }; float diffuseFactor{ 1.0f };
glm::vec3 specularColor{ 0.02f }; glm::vec3 specularColor{ 0.02f };

View file

@ -27,6 +27,56 @@
#include "ModelFormatLogging.h" #include "ModelFormatLogging.h"
void FBXMaterial::getTextureNames(QSet<QString>& textureList) const {
if (!normalTexture.isNull()) {
textureList.insert(normalTexture.name);
}
if (!albedoTexture.isNull()) {
textureList.insert(albedoTexture.name);
}
if (!opacityTexture.isNull()) {
textureList.insert(opacityTexture.name);
}
if (!glossTexture.isNull()) {
textureList.insert(glossTexture.name);
}
if (!roughnessTexture.isNull()) {
textureList.insert(roughnessTexture.name);
}
if (!specularTexture.isNull()) {
textureList.insert(specularTexture.name);
}
if (!metallicTexture.isNull()) {
textureList.insert(metallicTexture.name);
}
if (!emissiveTexture.isNull()) {
textureList.insert(emissiveTexture.name);
}
if (!occlusionTexture.isNull()) {
textureList.insert(occlusionTexture.name);
}
if (!scatteringTexture.isNull()) {
textureList.insert(scatteringTexture.name);
}
if (!lightmapTexture.isNull()) {
textureList.insert(lightmapTexture.name);
}
}
void FBXMaterial::setMaxNumPixelsPerTexture(int maxNumPixels) {
normalTexture.maxNumPixels = maxNumPixels;
albedoTexture.maxNumPixels = maxNumPixels;
opacityTexture.maxNumPixels = maxNumPixels;
glossTexture.maxNumPixels = maxNumPixels;
roughnessTexture.maxNumPixels = maxNumPixels;
specularTexture.maxNumPixels = maxNumPixels;
metallicTexture.maxNumPixels = maxNumPixels;
emissiveTexture.maxNumPixels = maxNumPixels;
occlusionTexture.maxNumPixels = maxNumPixels;
scatteringTexture.maxNumPixels = maxNumPixels;
lightmapTexture.maxNumPixels = maxNumPixels;
}
bool FBXMaterial::needTangentSpace() const { bool FBXMaterial::needTangentSpace() const {
return !normalTexture.isNull(); return !normalTexture.isNull();
} }
@ -47,10 +97,10 @@ FBXTexture FBXReader::getTexture(const QString& textureID) {
texture.texcoordSet = 0; texture.texcoordSet = 0;
if (_textureParams.contains(textureID)) { if (_textureParams.contains(textureID)) {
auto p = _textureParams.value(textureID); auto p = _textureParams.value(textureID);
texture.transform.setTranslation(p.translation); texture.transform.setTranslation(p.translation);
texture.transform.setRotation(glm::quat(glm::radians(p.rotation))); texture.transform.setRotation(glm::quat(glm::radians(p.rotation)));
auto scaling = p.scaling; auto scaling = p.scaling;
// Protect from bad scaling which should never happen // Protect from bad scaling which should never happen
if (scaling.x == 0.0f) { if (scaling.x == 0.0f) {
@ -63,7 +113,7 @@ FBXTexture FBXReader::getTexture(const QString& textureID) {
scaling.z = 1.0f; scaling.z = 1.0f;
} }
texture.transform.setScale(scaling); texture.transform.setScale(scaling);
if ((p.UVSet != "map1") && (p.UVSet != "UVSet0")) { if ((p.UVSet != "map1") && (p.UVSet != "UVSet0")) {
texture.texcoordSet = 1; texture.texcoordSet = 1;
} }
@ -78,11 +128,10 @@ void FBXReader::consolidateFBXMaterials(const QVariantHash& mapping) {
QJsonDocument materialMapDocument = QJsonDocument::fromJson(materialMapString.toUtf8()); QJsonDocument materialMapDocument = QJsonDocument::fromJson(materialMapString.toUtf8());
QJsonObject materialMap = materialMapDocument.object(); QJsonObject materialMap = materialMapDocument.object();
// foreach (const QString& materialID, materials) {
for (QHash<QString, FBXMaterial>::iterator it = _fbxMaterials.begin(); it != _fbxMaterials.end(); it++) { for (QHash<QString, FBXMaterial>::iterator it = _fbxMaterials.begin(); it != _fbxMaterials.end(); it++) {
FBXMaterial& material = (*it); FBXMaterial& material = (*it);
// Maya is the exporting the shading model and we aretrying to use it // Maya is the exporting the shading model and we are trying to use it
bool isMaterialLambert = (material.shadingModel.toLower() == "lambert"); bool isMaterialLambert = (material.shadingModel.toLower() == "lambert");
// the pure material associated with this part // the pure material associated with this part
@ -127,21 +176,19 @@ void FBXReader::consolidateFBXMaterials(const QVariantHash& mapping) {
detectDifferentUVs |= (transparentTexture.texcoordSet != 0) || (!transparentTexture.transform.isIdentity()); detectDifferentUVs |= (transparentTexture.texcoordSet != 0) || (!transparentTexture.transform.isIdentity());
} }
FBXTexture normalTexture; FBXTexture normalTexture;
QString bumpTextureID = bumpTextures.value(material.materialID); QString bumpTextureID = bumpTextures.value(material.materialID);
QString normalTextureID = normalTextures.value(material.materialID); QString normalTextureID = normalTextures.value(material.materialID);
if (!normalTextureID.isNull()) { if (!normalTextureID.isNull()) {
normalTexture = getTexture(normalTextureID); normalTexture = getTexture(normalTextureID);
normalTexture.isBumpmap = false; normalTexture.isBumpmap = false;
material.normalTexture = normalTexture; material.normalTexture = normalTexture;
detectDifferentUVs |= (normalTexture.texcoordSet != 0) || (!normalTexture.transform.isIdentity()); detectDifferentUVs |= (normalTexture.texcoordSet != 0) || (!normalTexture.transform.isIdentity());
} else if (!bumpTextureID.isNull()) { } else if (!bumpTextureID.isNull()) {
normalTexture = getTexture(bumpTextureID); normalTexture = getTexture(bumpTextureID);
normalTexture.isBumpmap = true; normalTexture.isBumpmap = true;
material.normalTexture = normalTexture; material.normalTexture = normalTexture;
detectDifferentUVs |= (normalTexture.texcoordSet != 0) || (!normalTexture.transform.isIdentity()); detectDifferentUVs |= (normalTexture.texcoordSet != 0) || (!normalTexture.transform.isIdentity());
} }
@ -151,7 +198,7 @@ void FBXReader::consolidateFBXMaterials(const QVariantHash& mapping) {
if (!specularTextureID.isNull()) { if (!specularTextureID.isNull()) {
specularTexture = getTexture(specularTextureID); specularTexture = getTexture(specularTextureID);
detectDifferentUVs |= (specularTexture.texcoordSet != 0) || (!specularTexture.transform.isIdentity()); detectDifferentUVs |= (specularTexture.texcoordSet != 0) || (!specularTexture.transform.isIdentity());
material.specularTexture = specularTexture; material.specularTexture = specularTexture;
} }
FBXTexture metallicTexture; FBXTexture metallicTexture;
@ -222,7 +269,7 @@ void FBXReader::consolidateFBXMaterials(const QVariantHash& mapping) {
ambientTextureID = ambientFactorTextures.value(material.materialID); ambientTextureID = ambientFactorTextures.value(material.materialID);
} }
} }
if (_loadLightmaps && !ambientTextureID.isNull()) { if (_loadLightmaps && !ambientTextureID.isNull()) {
ambientTexture = getTexture(ambientTextureID); ambientTexture = getTexture(ambientTextureID);
detectDifferentUVs |= (ambientTexture.texcoordSet != 0) || (!ambientTexture.transform.isIdentity()); detectDifferentUVs |= (ambientTexture.texcoordSet != 0) || (!ambientTexture.transform.isIdentity());

View file

@ -25,6 +25,9 @@ void KeyboardMouseDevice::pluginUpdate(float deltaTime, const controller::InputC
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>(); auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
userInputMapper->withLock([&, this]() { userInputMapper->withLock([&, this]() {
_inputDevice->update(deltaTime, inputCalibrationData); _inputDevice->update(deltaTime, inputCalibrationData);
_inputDevice->_axisStateMap[MOUSE_AXIS_X] = _lastCursor.x();
_inputDevice->_axisStateMap[MOUSE_AXIS_Y] = _lastCursor.y();
}); });
// For touch event, we need to check that the last event is not too long ago // For touch event, we need to check that the last event is not too long ago
@ -249,6 +252,9 @@ controller::Input::NamedVector KeyboardMouseDevice::InputDevice::getAvailableInp
availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_Y_POS), "MouseMoveUp")); availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_Y_POS), "MouseMoveUp"));
availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_Y_NEG), "MouseMoveDown")); availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_Y_NEG), "MouseMoveDown"));
availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_X), "MouseX"));
availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_Y), "MouseY"));
availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_WHEEL_Y_POS), "MouseWheelRight")); availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_WHEEL_Y_POS), "MouseWheelRight"));
availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_WHEEL_Y_NEG), "MouseWheelLeft")); availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_WHEEL_Y_NEG), "MouseWheelLeft"));
availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_WHEEL_X_POS), "MouseWheelUp")); availableInputs.append(Input::NamedPair(makeInput(MOUSE_AXIS_WHEEL_X_POS), "MouseWheelUp"));

View file

@ -47,6 +47,8 @@ public:
MOUSE_AXIS_X_NEG, MOUSE_AXIS_X_NEG,
MOUSE_AXIS_Y_POS, MOUSE_AXIS_Y_POS,
MOUSE_AXIS_Y_NEG, MOUSE_AXIS_Y_NEG,
MOUSE_AXIS_X,
MOUSE_AXIS_Y,
MOUSE_AXIS_WHEEL_Y_POS, MOUSE_AXIS_WHEEL_Y_POS,
MOUSE_AXIS_WHEEL_Y_NEG, MOUSE_AXIS_WHEEL_Y_NEG,
MOUSE_AXIS_WHEEL_X_POS, MOUSE_AXIS_WHEEL_X_POS,

View file

@ -472,7 +472,7 @@ QUrl NetworkMaterial::getTextureUrl(const QUrl& baseUrl, const FBXTexture& textu
model::TextureMapPointer NetworkMaterial::fetchTextureMap(const QUrl& baseUrl, const FBXTexture& fbxTexture, model::TextureMapPointer NetworkMaterial::fetchTextureMap(const QUrl& baseUrl, const FBXTexture& fbxTexture,
TextureType type, MapChannel channel) { TextureType type, MapChannel channel) {
const auto url = getTextureUrl(baseUrl, fbxTexture); const auto url = getTextureUrl(baseUrl, fbxTexture);
const auto texture = DependencyManager::get<TextureCache>()->getTexture(url, type, fbxTexture.content); const auto texture = DependencyManager::get<TextureCache>()->getTexture(url, type, fbxTexture.content, fbxTexture.maxNumPixels);
_textures[channel] = Texture { fbxTexture.name, texture }; _textures[channel] = Texture { fbxTexture.name, texture };
auto map = std::make_shared<model::TextureMap>(); auto map = std::make_shared<model::TextureMap>();

View file

@ -159,7 +159,7 @@ protected:
class Texture { class Texture {
public: public:
QString name; QString name;
QSharedPointer<NetworkTexture> texture; NetworkTexturePointer texture;
}; };
using Textures = std::vector<Texture>; using Textures = std::vector<Texture>;

View file

@ -167,16 +167,17 @@ class TextureExtra {
public: public:
NetworkTexture::Type type; NetworkTexture::Type type;
const QByteArray& content; const QByteArray& content;
int maxNumPixels;
}; };
ScriptableResource* TextureCache::prefetch(const QUrl& url, int type) { ScriptableResource* TextureCache::prefetch(const QUrl& url, int type, int maxNumPixels) {
auto byteArray = QByteArray(); auto byteArray = QByteArray();
TextureExtra extra = { (Type)type, byteArray }; TextureExtra extra = { (Type)type, byteArray, maxNumPixels };
return ResourceCache::prefetch(url, &extra); return ResourceCache::prefetch(url, &extra);
} }
NetworkTexturePointer TextureCache::getTexture(const QUrl& url, Type type, const QByteArray& content) { NetworkTexturePointer TextureCache::getTexture(const QUrl& url, Type type, const QByteArray& content, int maxNumPixels) {
TextureExtra extra = { type, content }; TextureExtra extra = { type, content, maxNumPixels };
return ResourceCache::getResource(url, QUrl(), &extra).staticCast<NetworkTexture>(); return ResourceCache::getResource(url, QUrl(), &extra).staticCast<NetworkTexture>();
} }
@ -251,13 +252,15 @@ QSharedPointer<Resource> TextureCache::createResource(const QUrl& url, const QSh
const TextureExtra* textureExtra = static_cast<const TextureExtra*>(extra); const TextureExtra* textureExtra = static_cast<const TextureExtra*>(extra);
auto type = textureExtra ? textureExtra->type : Type::DEFAULT_TEXTURE; auto type = textureExtra ? textureExtra->type : Type::DEFAULT_TEXTURE;
auto content = textureExtra ? textureExtra->content : QByteArray(); auto content = textureExtra ? textureExtra->content : QByteArray();
return QSharedPointer<Resource>(new NetworkTexture(url, type, content), auto maxNumPixels = textureExtra ? textureExtra->maxNumPixels : ABSOLUTE_MAX_TEXTURE_NUM_PIXELS;
return QSharedPointer<Resource>(new NetworkTexture(url, type, content, maxNumPixels),
&Resource::deleter); &Resource::deleter);
} }
NetworkTexture::NetworkTexture(const QUrl& url, Type type, const QByteArray& content) : NetworkTexture::NetworkTexture(const QUrl& url, Type type, const QByteArray& content, int maxNumPixels) :
Resource(url), Resource(url),
_type(type) _type(type),
_maxNumPixels(maxNumPixels)
{ {
_textureSource = std::make_shared<gpu::TextureSource>(); _textureSource = std::make_shared<gpu::TextureSource>();
@ -274,7 +277,7 @@ NetworkTexture::NetworkTexture(const QUrl& url, Type type, const QByteArray& con
} }
NetworkTexture::NetworkTexture(const QUrl& url, const TextureLoaderFunc& textureLoader, const QByteArray& content) : NetworkTexture::NetworkTexture(const QUrl& url, const TextureLoaderFunc& textureLoader, const QByteArray& content) :
NetworkTexture(url, CUSTOM_TEXTURE, content) NetworkTexture(url, CUSTOM_TEXTURE, content, ABSOLUTE_MAX_TEXTURE_NUM_PIXELS)
{ {
_textureLoader = textureLoader; _textureLoader = textureLoader;
} }
@ -290,7 +293,8 @@ NetworkTexture::TextureLoaderFunc NetworkTexture::getTextureLoader() const {
class ImageReader : public QRunnable { class ImageReader : public QRunnable {
public: public:
ImageReader(const QWeakPointer<Resource>& resource, const QByteArray& data, const QUrl& url = QUrl()); ImageReader(const QWeakPointer<Resource>& resource, const QByteArray& data,
const QUrl& url = QUrl(), int maxNumPixels = ABSOLUTE_MAX_TEXTURE_NUM_PIXELS);
virtual void run() override; virtual void run() override;
@ -300,6 +304,7 @@ private:
QWeakPointer<Resource> _resource; QWeakPointer<Resource> _resource;
QUrl _url; QUrl _url;
QByteArray _content; QByteArray _content;
int _maxNumPixels;
}; };
void NetworkTexture::downloadFinished(const QByteArray& data) { void NetworkTexture::downloadFinished(const QByteArray& data) {
@ -308,14 +313,15 @@ void NetworkTexture::downloadFinished(const QByteArray& data) {
} }
void NetworkTexture::loadContent(const QByteArray& content) { void NetworkTexture::loadContent(const QByteArray& content) {
QThreadPool::globalInstance()->start(new ImageReader(_self, content, _url)); QThreadPool::globalInstance()->start(new ImageReader(_self, content, _url, _maxNumPixels));
} }
ImageReader::ImageReader(const QWeakPointer<Resource>& resource, const QByteArray& data, ImageReader::ImageReader(const QWeakPointer<Resource>& resource, const QByteArray& data,
const QUrl& url) : const QUrl& url, int maxNumPixels) :
_resource(resource), _resource(resource),
_url(url), _url(url),
_content(data) _content(data),
_maxNumPixels(maxNumPixels)
{ {
#if DEBUG_DUMP_TEXTURE_LOADS #if DEBUG_DUMP_TEXTURE_LOADS
static auto start = usecTimestampNow() / USECS_PER_MSEC; static auto start = usecTimestampNow() / USECS_PER_MSEC;
@ -375,10 +381,10 @@ void ImageReader::run() {
// Note that QImage.format is the pixel format which is different from the "format" of the image file... // Note that QImage.format is the pixel format which is different from the "format" of the image file...
auto imageFormat = image.format(); auto imageFormat = image.format();
int originalWidth = image.width(); int imageWidth = image.width();
int originalHeight = image.height(); int imageHeight = image.height();
if (originalWidth == 0 || originalHeight == 0 || imageFormat == QImage::Format_Invalid) { if (imageWidth == 0 || imageHeight == 0 || imageFormat == QImage::Format_Invalid) {
if (filenameExtension.empty()) { if (filenameExtension.empty()) {
qCDebug(modelnetworking) << "QImage failed to create from content, no file extension:" << _url; qCDebug(modelnetworking) << "QImage failed to create from content, no file extension:" << _url;
} else { } else {
@ -386,6 +392,20 @@ void ImageReader::run() {
} }
return; return;
} }
if (imageWidth * imageHeight > _maxNumPixels) {
float scaleFactor = sqrtf(_maxNumPixels / (float)(imageWidth * imageHeight));
int originalWidth = imageWidth;
int originalHeight = imageHeight;
imageWidth = (int)(scaleFactor * (float)imageWidth + 0.5f);
imageHeight = (int)(scaleFactor * (float)imageHeight + 0.5f);
QImage newImage = image.scaled(QSize(imageWidth, imageHeight), Qt::IgnoreAspectRatio);
image.swap(newImage);
qCDebug(modelnetworking) << "Downscale image" << _url
<< "from" << originalWidth << "x" << originalHeight
<< "to" << imageWidth << "x" << imageHeight;
}
gpu::TexturePointer texture = nullptr; gpu::TexturePointer texture = nullptr;
{ {
// Double-check the resource still exists between long operations. // Double-check the resource still exists between long operations.
@ -408,7 +428,7 @@ void ImageReader::run() {
} else { } else {
QMetaObject::invokeMethod(resource.data(), "setImage", QMetaObject::invokeMethod(resource.data(), "setImage",
Q_ARG(gpu::TexturePointer, texture), Q_ARG(gpu::TexturePointer, texture),
Q_ARG(int, originalWidth), Q_ARG(int, originalHeight)); Q_ARG(int, imageWidth), Q_ARG(int, imageHeight));
} }
} }

View file

@ -23,6 +23,8 @@
#include <ResourceCache.h> #include <ResourceCache.h>
#include <model/TextureMap.h> #include <model/TextureMap.h>
const int ABSOLUTE_MAX_TEXTURE_NUM_PIXELS = 8192 * 8192;
namespace gpu { namespace gpu {
class Batch; class Batch;
} }
@ -60,7 +62,7 @@ public:
typedef gpu::Texture* TextureLoader(const QImage& image, const std::string& srcImageName); typedef gpu::Texture* TextureLoader(const QImage& image, const std::string& srcImageName);
using TextureLoaderFunc = std::function<TextureLoader>; using TextureLoaderFunc = std::function<TextureLoader>;
NetworkTexture(const QUrl& url, Type type, const QByteArray& content); NetworkTexture(const QUrl& url, Type type, const QByteArray& content, int maxNumPixels);
NetworkTexture(const QUrl& url, const TextureLoaderFunc& textureLoader, const QByteArray& content); NetworkTexture(const QUrl& url, const TextureLoaderFunc& textureLoader, const QByteArray& content);
QString getType() const override { return "NetworkTexture"; } QString getType() const override { return "NetworkTexture"; }
@ -70,7 +72,7 @@ public:
int getWidth() const { return _width; } int getWidth() const { return _width; }
int getHeight() const { return _height; } int getHeight() const { return _height; }
Type getTextureType() const { return _type; } Type getTextureType() const { return _type; }
TextureLoaderFunc getTextureLoader() const; TextureLoaderFunc getTextureLoader() const;
signals: signals:
@ -81,7 +83,7 @@ protected:
virtual bool isCacheable() const override { return _loaded; } virtual bool isCacheable() const override { return _loaded; }
virtual void downloadFinished(const QByteArray& data) override; virtual void downloadFinished(const QByteArray& data) override;
Q_INVOKABLE void loadContent(const QByteArray& content); Q_INVOKABLE void loadContent(const QByteArray& content);
Q_INVOKABLE void setImage(gpu::TexturePointer texture, int originalWidth, int originalHeight); Q_INVOKABLE void setImage(gpu::TexturePointer texture, int originalWidth, int originalHeight);
@ -92,6 +94,7 @@ private:
int _originalHeight { 0 }; int _originalHeight { 0 };
int _width { 0 }; int _width { 0 };
int _height { 0 }; int _height { 0 };
int _maxNumPixels { ABSOLUTE_MAX_TEXTURE_NUM_PIXELS };
}; };
using NetworkTexturePointer = QSharedPointer<NetworkTexture>; using NetworkTexturePointer = QSharedPointer<NetworkTexture>;
@ -129,11 +132,11 @@ public:
/// Loads a texture from the specified URL. /// Loads a texture from the specified URL.
NetworkTexturePointer getTexture(const QUrl& url, Type type = Type::DEFAULT_TEXTURE, NetworkTexturePointer getTexture(const QUrl& url, Type type = Type::DEFAULT_TEXTURE,
const QByteArray& content = QByteArray()); const QByteArray& content = QByteArray(), int maxNumPixels = ABSOLUTE_MAX_TEXTURE_NUM_PIXELS);
protected: protected:
// Overload ResourceCache::prefetch to allow specifying texture type for loads // Overload ResourceCache::prefetch to allow specifying texture type for loads
Q_INVOKABLE ScriptableResource* prefetch(const QUrl& url, int type); Q_INVOKABLE ScriptableResource* prefetch(const QUrl& url, int type, int maxNumPixels = ABSOLUTE_MAX_TEXTURE_NUM_PIXELS);
virtual QSharedPointer<Resource> createResource(const QUrl& url, const QSharedPointer<Resource>& fallback, virtual QSharedPointer<Resource> createResource(const QUrl& url, const QSharedPointer<Resource>& fallback,
const void* extra) override; const void* extra) override;

View file

@ -827,18 +827,26 @@ void NodeList::ignoreNodeBySessionID(const QUuid& nodeID, bool ignoreEnabled) {
}); });
if (ignoreEnabled) { if (ignoreEnabled) {
QReadLocker ignoredSetLocker{ &_ignoredSetLock }; // read lock for insert {
QReadLocker personalMutedSetLocker{ &_personalMutedSetLock }; // read lock for insert QReadLocker ignoredSetLocker{ &_ignoredSetLock }; // read lock for insert
// add this nodeID to our set of ignored IDs // add this nodeID to our set of ignored IDs
_ignoredNodeIDs.insert(nodeID); _ignoredNodeIDs.insert(nodeID);
// add this nodeID to our set of personal muted IDs }
_personalMutedNodeIDs.insert(nodeID); {
QReadLocker personalMutedSetLocker{ &_personalMutedSetLock }; // read lock for insert
// add this nodeID to our set of personal muted IDs
_personalMutedNodeIDs.insert(nodeID);
}
emit ignoredNode(nodeID, true); emit ignoredNode(nodeID, true);
} else { } else {
QWriteLocker ignoredSetLocker{ &_ignoredSetLock }; // write lock for unsafe_erase {
QWriteLocker personalMutedSetLocker{ &_personalMutedSetLock }; // write lock for unsafe_erase QWriteLocker ignoredSetLocker{ &_ignoredSetLock }; // write lock for unsafe_erase
_ignoredNodeIDs.unsafe_erase(nodeID); _ignoredNodeIDs.unsafe_erase(nodeID);
_personalMutedNodeIDs.unsafe_erase(nodeID); }
{
QWriteLocker personalMutedSetLocker{ &_personalMutedSetLock }; // write lock for unsafe_erase
_personalMutedNodeIDs.unsafe_erase(nodeID);
}
emit ignoredNode(nodeID, false); emit ignoredNode(nodeID, false);
} }
@ -850,10 +858,14 @@ void NodeList::ignoreNodeBySessionID(const QUuid& nodeID, bool ignoreEnabled) {
void NodeList::removeFromIgnoreMuteSets(const QUuid& nodeID) { void NodeList::removeFromIgnoreMuteSets(const QUuid& nodeID) {
// don't remove yourself, or nobody // don't remove yourself, or nobody
if (!nodeID.isNull() && _sessionUUID != nodeID) { if (!nodeID.isNull() && _sessionUUID != nodeID) {
QWriteLocker ignoredSetLocker{ &_ignoredSetLock }; {
QWriteLocker personalMutedSetLocker{ &_personalMutedSetLock }; QWriteLocker ignoredSetLocker{ &_ignoredSetLock };
_ignoredNodeIDs.unsafe_erase(nodeID); _ignoredNodeIDs.unsafe_erase(nodeID);
_personalMutedNodeIDs.unsafe_erase(nodeID); }
{
QWriteLocker personalMutedSetLocker{ &_personalMutedSetLock };
_personalMutedNodeIDs.unsafe_erase(nodeID);
}
} }
} }

View file

@ -86,7 +86,7 @@ private:
/// Wrapper to expose resources to JS/QML /// Wrapper to expose resources to JS/QML
class ScriptableResource : public QObject { class ScriptableResource : public QObject {
Q_OBJECT Q_OBJECT
Q_PROPERTY(QUrl url READ getUrl) Q_PROPERTY(QUrl url READ getURL)
Q_PROPERTY(int state READ getState NOTIFY stateChanged) Q_PROPERTY(int state READ getState NOTIFY stateChanged)
/**jsdoc /**jsdoc
@ -125,7 +125,7 @@ public:
*/ */
Q_INVOKABLE void release(); Q_INVOKABLE void release();
const QUrl& getUrl() const { return _url; } const QUrl& getURL() const { return _url; }
int getState() const { return (int)_state; } int getState() const { return (int)_state; }
const QSharedPointer<Resource>& getResource() const { return _resource; } const QSharedPointer<Resource>& getResource() const { return _resource; }

View file

@ -16,6 +16,14 @@ void UserActivityLoggerScriptingInterface::enabledEdit() {
logAction("enabled_edit"); logAction("enabled_edit");
} }
void UserActivityLoggerScriptingInterface::openedTablet() {
logAction("opened_tablet");
}
void UserActivityLoggerScriptingInterface::closedTablet() {
logAction("closed_tablet");
}
void UserActivityLoggerScriptingInterface::openedMarketplace() { void UserActivityLoggerScriptingInterface::openedMarketplace() {
logAction("opened_marketplace"); logAction("opened_marketplace");
} }

View file

@ -21,6 +21,8 @@ class UserActivityLoggerScriptingInterface : public QObject, public Dependency {
Q_OBJECT Q_OBJECT
public: public:
Q_INVOKABLE void enabledEdit(); Q_INVOKABLE void enabledEdit();
Q_INVOKABLE void openedTablet();
Q_INVOKABLE void closedTablet();
Q_INVOKABLE void openedMarketplace(); Q_INVOKABLE void openedMarketplace();
Q_INVOKABLE void toggledAway(bool isAway); Q_INVOKABLE void toggledAway(bool isAway);
Q_INVOKABLE void tutorialProgress(QString stepName, int stepNumber, float secondsToComplete, Q_INVOKABLE void tutorialProgress(QString stepName, int stepNumber, float secondsToComplete,

View file

@ -48,7 +48,8 @@ PacketVersion versionForPacketType(PacketType packetType) {
case PacketType::EntityAdd: case PacketType::EntityAdd:
case PacketType::EntityEdit: case PacketType::EntityEdit:
case PacketType::EntityData: case PacketType::EntityData:
return VERSION_ENTITIES_SERVER_SCRIPTS; case PacketType::EntityPhysics:
return VERSION_ENTITIES_PHYSICS_PACKET;
case PacketType::EntityQuery: case PacketType::EntityQuery:
return static_cast<PacketVersion>(EntityQueryPacketVersion::JsonFilter); return static_cast<PacketVersion>(EntityQueryPacketVersion::JsonFilter);
case PacketType::AvatarIdentity: case PacketType::AvatarIdentity:

View file

@ -110,7 +110,8 @@ public:
EntityScriptGetStatus, EntityScriptGetStatus,
EntityScriptGetStatusReply, EntityScriptGetStatusReply,
ReloadEntityServerScript, ReloadEntityServerScript,
LAST_PACKET_TYPE = ReloadEntityServerScript EntityPhysics,
LAST_PACKET_TYPE = EntityPhysics
}; };
}; };
@ -201,6 +202,7 @@ const PacketVersion VERSION_WEB_ENTITIES_SUPPORT_DPI = 63;
const PacketVersion VERSION_ENTITIES_ARROW_ACTION = 64; const PacketVersion VERSION_ENTITIES_ARROW_ACTION = 64;
const PacketVersion VERSION_ENTITIES_LAST_EDITED_BY = 65; const PacketVersion VERSION_ENTITIES_LAST_EDITED_BY = 65;
const PacketVersion VERSION_ENTITIES_SERVER_SCRIPTS = 66; const PacketVersion VERSION_ENTITIES_SERVER_SCRIPTS = 66;
const PacketVersion VERSION_ENTITIES_PHYSICS_PACKET = 67;
enum class EntityQueryPacketVersion: PacketVersion { enum class EntityQueryPacketVersion: PacketVersion {
JsonFilter = 18 JsonFilter = 18

View file

@ -199,15 +199,12 @@ void EntityMotionState::getWorldTransform(btTransform& worldTrans) const {
return; return;
} }
assert(entityTreeIsLocked()); assert(entityTreeIsLocked());
if (_motionType == MOTION_TYPE_KINEMATIC) { if (_motionType == MOTION_TYPE_KINEMATIC && !_entity->hasAncestorOfType(NestableType::Avatar)) {
BT_PROFILE("kinematicIntegration"); BT_PROFILE("kinematicIntegration");
// This is physical kinematic motion which steps strictly by the subframe count // This is physical kinematic motion which steps strictly by the subframe count
// of the physics simulation and uses full gravity for acceleration. // of the physics simulation and uses full gravity for acceleration.
if (_entity->hasAncestorOfType(NestableType::Avatar)) { _entity->setAcceleration(_entity->getGravity());
_entity->setAcceleration(glm::vec3(0.0f));
} else {
_entity->setAcceleration(_entity->getGravity());
}
uint32_t thisStep = ObjectMotionState::getWorldSimulationStep(); uint32_t thisStep = ObjectMotionState::getWorldSimulationStep();
float dt = (thisStep - _lastKinematicStep) * PHYSICS_ENGINE_FIXED_SUBSTEP; float dt = (thisStep - _lastKinematicStep) * PHYSICS_ENGINE_FIXED_SUBSTEP;
_entity->stepKinematicMotion(dt); _entity->stepKinematicMotion(dt);
@ -614,7 +611,7 @@ void EntityMotionState::sendUpdate(OctreeEditPacketSender* packetSender, uint32_
properties.setClientOnly(_entity->getClientOnly()); properties.setClientOnly(_entity->getClientOnly());
properties.setOwningAvatarID(_entity->getOwningAvatarID()); properties.setOwningAvatarID(_entity->getOwningAvatarID());
entityPacketSender->queueEditEntityMessage(PacketType::EntityEdit, tree, id, properties); entityPacketSender->queueEditEntityMessage(PacketType::EntityPhysics, tree, id, properties);
_entity->setLastBroadcast(now); _entity->setLastBroadcast(now);
// if we've moved an entity with children, check/update the queryAACube of all descendents and tell the server // if we've moved an entity with children, check/update the queryAACube of all descendents and tell the server
@ -630,7 +627,7 @@ void EntityMotionState::sendUpdate(OctreeEditPacketSender* packetSender, uint32_
newQueryCubeProperties.setClientOnly(entityDescendant->getClientOnly()); newQueryCubeProperties.setClientOnly(entityDescendant->getClientOnly());
newQueryCubeProperties.setOwningAvatarID(entityDescendant->getOwningAvatarID()); newQueryCubeProperties.setOwningAvatarID(entityDescendant->getOwningAvatarID());
entityPacketSender->queueEditEntityMessage(PacketType::EntityEdit, tree, entityPacketSender->queueEditEntityMessage(PacketType::EntityPhysics, tree,
descendant->getID(), newQueryCubeProperties); descendant->getID(), newQueryCubeProperties);
entityDescendant->setLastBroadcast(now); entityDescendant->setLastBroadcast(now);
} }

View file

@ -23,8 +23,7 @@ public:
virtual ~Decoder() { } virtual ~Decoder() { }
virtual void decode(const QByteArray& encodedBuffer, QByteArray& decodedBuffer) = 0; virtual void decode(const QByteArray& encodedBuffer, QByteArray& decodedBuffer) = 0;
// numFrames - number of samples (mono) or sample-pairs (stereo) virtual void lostFrame(QByteArray& decodedBuffer) = 0;
virtual void trackLostFrames(int numFrames) = 0;
}; };
class CodecPlugin : public Plugin { class CodecPlugin : public Plugin {

View file

@ -26,6 +26,10 @@ glm::mat4 Mat4::createFromScaleRotAndTrans(const glm::vec3& scale, const glm::qu
return createMatFromScaleQuatAndPos(scale, rot, trans); return createMatFromScaleQuatAndPos(scale, rot, trans);
} }
glm::mat4 Mat4::createFromColumns(const glm::vec4& col0, const glm::vec4& col1, const glm::vec4& col2, const glm::vec4& col3) const {
return glm::mat4(col0, col1, col2, col3);
}
glm::vec3 Mat4::extractTranslation(const glm::mat4& m) const { glm::vec3 Mat4::extractTranslation(const glm::mat4& m) const {
return ::extractTranslation(m); return ::extractTranslation(m);
} }

View file

@ -23,8 +23,10 @@ class Mat4 : public QObject {
public slots: public slots:
glm::mat4 multiply(const glm::mat4& m1, const glm::mat4& m2) const; glm::mat4 multiply(const glm::mat4& m1, const glm::mat4& m2) const;
glm::mat4 createFromRotAndTrans(const glm::quat& rot, const glm::vec3& trans) const; glm::mat4 createFromRotAndTrans(const glm::quat& rot, const glm::vec3& trans) const;
glm::mat4 createFromScaleRotAndTrans(const glm::vec3& scale, const glm::quat& rot, const glm::vec3& trans) const; glm::mat4 createFromScaleRotAndTrans(const glm::vec3& scale, const glm::quat& rot, const glm::vec3& trans) const;
glm::mat4 createFromColumns(const glm::vec4& col0, const glm::vec4& col1, const glm::vec4& col2, const glm::vec4& col3) const;
glm::vec3 extractTranslation(const glm::mat4& m) const; glm::vec3 extractTranslation(const glm::mat4& m) const;
glm::quat extractRotation(const glm::mat4& m) const; glm::quat extractRotation(const glm::mat4& m) const;

View file

@ -360,7 +360,7 @@ glm::vec3 AABox::getClosestPointOnFace(const glm::vec3& point, BoxFace face) con
case MIN_Z_FACE: case MIN_Z_FACE:
return glm::clamp(point, glm::vec3(_corner.x, _corner.y, _corner.z), return glm::clamp(point, glm::vec3(_corner.x, _corner.y, _corner.z),
glm::vec3(_corner.x + _scale.z, _corner.y + _scale.y, _corner.z)); glm::vec3(_corner.x + _scale.x, _corner.y + _scale.y, _corner.z));
default: //quiet windows warnings default: //quiet windows warnings
case MAX_Z_FACE: case MAX_Z_FACE:

View file

@ -1034,6 +1034,13 @@ AACube SpatiallyNestable::getQueryAACube() const {
bool SpatiallyNestable::hasAncestorOfType(NestableType nestableType) const { bool SpatiallyNestable::hasAncestorOfType(NestableType nestableType) const {
bool success; bool success;
if (nestableType == NestableType::Avatar) {
QUuid parentID = getParentID();
if (parentID == AVATAR_SELF_ID) {
return true;
}
}
SpatiallyNestablePointer parent = getParentPointer(success); SpatiallyNestablePointer parent = getParentPointer(success);
if (!success || !parent) { if (!success || !parent) {
return false; return false;
@ -1048,6 +1055,14 @@ bool SpatiallyNestable::hasAncestorOfType(NestableType nestableType) const {
const QUuid SpatiallyNestable::findAncestorOfType(NestableType nestableType) const { const QUuid SpatiallyNestable::findAncestorOfType(NestableType nestableType) const {
bool success; bool success;
if (nestableType == NestableType::Avatar) {
QUuid parentID = getParentID();
if (parentID == AVATAR_SELF_ID) {
return AVATAR_SELF_ID; // TODO -- can we put nodeID here?
}
}
SpatiallyNestablePointer parent = getParentPointer(success); SpatiallyNestablePointer parent = getParentPointer(success);
if (!success || !parent) { if (!success || !parent) {
return QUuid(); return QUuid();

View file

@ -65,12 +65,10 @@ public:
AudioDecoder::process((const int16_t*)encodedBuffer.constData(), (int16_t*)decodedBuffer.data(), AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL, true); AudioDecoder::process((const int16_t*)encodedBuffer.constData(), (int16_t*)decodedBuffer.data(), AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL, true);
} }
virtual void trackLostFrames(int numFrames) override { virtual void lostFrame(QByteArray& decodedBuffer) override {
QByteArray encodedBuffer;
QByteArray decodedBuffer;
decodedBuffer.resize(_decodedSize); decodedBuffer.resize(_decodedSize);
// NOTE: we don't actually use the results of this decode, we just do it to keep the state of the codec clean // this performs packet loss interpolation
AudioDecoder::process((const int16_t*)encodedBuffer.constData(), (int16_t*)decodedBuffer.data(), AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL, false); AudioDecoder::process(nullptr, (int16_t*)decodedBuffer.data(), AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL, false);
} }
private: private:
int _decodedSize; int _decodedSize;

View file

@ -38,11 +38,14 @@ public:
virtual void encode(const QByteArray& decodedBuffer, QByteArray& encodedBuffer) override { virtual void encode(const QByteArray& decodedBuffer, QByteArray& encodedBuffer) override {
encodedBuffer = decodedBuffer; encodedBuffer = decodedBuffer;
} }
virtual void decode(const QByteArray& encodedBuffer, QByteArray& decodedBuffer) override { virtual void decode(const QByteArray& encodedBuffer, QByteArray& decodedBuffer) override {
decodedBuffer = encodedBuffer; decodedBuffer = encodedBuffer;
} }
virtual void trackLostFrames(int numFrames) override { } virtual void lostFrame(QByteArray& decodedBuffer) override {
memset(decodedBuffer.data(), 0, decodedBuffer.size());
}
private: private:
static const char* NAME; static const char* NAME;
@ -77,7 +80,9 @@ public:
decodedBuffer = qUncompress(encodedBuffer); decodedBuffer = qUncompress(encodedBuffer);
} }
virtual void trackLostFrames(int numFrames) override { } virtual void lostFrame(QByteArray& decodedBuffer) override {
memset(decodedBuffer.data(), 0, decodedBuffer.size());
}
private: private:
static const char* NAME; static const char* NAME;

View file

@ -16,7 +16,7 @@
var MESSAGE_CHANNEL = "io.highfidelity.summon-crowd"; var MESSAGE_CHANNEL = "io.highfidelity.summon-crowd";
print('crowd-agent version 4'); print('crowd-agent version 5');
/* Observations: /* Observations:
- File urls for AC scripts silently fail. Use a local server (e.g., python SimpleHTTPServer) for development. - File urls for AC scripts silently fail. Use a local server (e.g., python SimpleHTTPServer) for development.
@ -84,6 +84,9 @@ function startAgent(parameters) { // Can also be used to update.
clearStopper(); clearStopper();
var wasOff = !Agent.isAvatar; var wasOff = !Agent.isAvatar;
Agent.isAvatar = true; Agent.isAvatar = true;
if (parameters.displayName !== undefined) {
Avatar.displayName = parameters.displayName;
}
if (parameters.position) { if (parameters.position) {
Avatar.position = parameters.position; Avatar.position = parameters.position;
} }

View file

@ -13,7 +13,7 @@
// //
// See crowd-agent.js // See crowd-agent.js
var version = 2; var version = 3;
var label = "summon"; var label = "summon";
function debug() { function debug() {
print.apply(null, [].concat.apply([label, version], [].map.call(arguments, JSON.stringify))); print.apply(null, [].concat.apply([label, version], [].map.call(arguments, JSON.stringify)));
@ -23,6 +23,9 @@ var MINIMUM_AVATARS = 25; // We will summon agents to produce this many total. (
var N_LISTENING = MINIMUM_AVATARS - 1; var N_LISTENING = MINIMUM_AVATARS - 1;
var AVATARS_CHATTERING_AT_ONCE = 4; // How many of the agents should we request to play SOUND_DATA at once. var AVATARS_CHATTERING_AT_ONCE = 4; // How many of the agents should we request to play SOUND_DATA at once.
var initialBubble = Users.getIgnoreRadiusEnabled();
debug('startup seeking:', MINIMUM_AVATARS, 'listening:', N_LISTENING, 'chattering:', AVATARS_CHATTERING_AT_ONCE, 'had bubble:', initialBubble);
// If we add or remove things too quickly, we get problems (e.g., audio, fogbugz 2095). // If we add or remove things too quickly, we get problems (e.g., audio, fogbugz 2095).
// For now, spread them out this timing apart. // For now, spread them out this timing apart.
var SPREAD_TIME_MS = 500; var SPREAD_TIME_MS = 500;
@ -66,7 +69,7 @@ function messageHandler(channel, messageString, senderID) {
if (MyAvatar.sessionUUID === senderID) { // ignore my own if (MyAvatar.sessionUUID === senderID) { // ignore my own
return; return;
} }
var message = {}, avatarIdentifiers; var message = {};
try { try {
message = JSON.parse(messageString); message = JSON.parse(messageString);
} catch (e) { } catch (e) {
@ -76,9 +79,10 @@ function messageHandler(channel, messageString, senderID) {
case "hello": case "hello":
Script.setTimeout(function () { Script.setTimeout(function () {
// There can be avatars we've summoned that do not yet appear in the AvatarList. // There can be avatars we've summoned that do not yet appear in the AvatarList.
avatarIdentifiers = without(AvatarList.getAvatarIdentifiers(), summonedAgents); var avatarIdentifiers = without(AvatarList.getAvatarIdentifiers(), summonedAgents);
var nSummoned = summonedAgents.length;
debug('present', avatarIdentifiers, summonedAgents); debug('present', avatarIdentifiers, summonedAgents);
if ((summonedAgents.length + avatarIdentifiers.length) < MINIMUM_AVATARS ) { if ((nSummoned + avatarIdentifiers.length) < MINIMUM_AVATARS ) {
var chatter = chattering.length < AVATARS_CHATTERING_AT_ONCE; var chatter = chattering.length < AVATARS_CHATTERING_AT_ONCE;
var listen = nListening < N_LISTENING; var listen = nListening < N_LISTENING;
if (chatter) { if (chatter) {
@ -91,6 +95,7 @@ function messageHandler(channel, messageString, senderID) {
messageSend({ messageSend({
key: 'SUMMON', key: 'SUMMON',
rcpt: senderID, rcpt: senderID,
displayName: "crowd " + nSummoned + " " + senderID,
position: Vec3.sum(MyAvatar.position, {x: coord(), y: 0, z: coord()}), position: Vec3.sum(MyAvatar.position, {x: coord(), y: 0, z: coord()}),
orientation: Quat.fromPitchYawRollDegrees(0, Quat.safeEulerAngles(MyAvatar.orientation).y + (turnSpread * (Math.random() - 0.5)), 0), orientation: Quat.fromPitchYawRollDegrees(0, Quat.safeEulerAngles(MyAvatar.orientation).y + (turnSpread * (Math.random() - 0.5)), 0),
soundData: chatter && SOUND_DATA, soundData: chatter && SOUND_DATA,
@ -100,7 +105,7 @@ function messageHandler(channel, messageString, senderID) {
}); });
} }
}, accumulatedDelay); }, accumulatedDelay);
accumulatedDelay += SPREAD_TIME_MS; // assume we'll get all the hello respsponses more or less together. accumulatedDelay += SPREAD_TIME_MS; // assume we'll get all the hello responses more or less together.
break; break;
case "finishedSound": // Give someone else a chance. case "finishedSound": // Give someone else a chance.
chattering = without(chattering, [senderID]); chattering = without(chattering, [senderID]);
@ -123,6 +128,8 @@ Messages.subscribe(MESSAGE_CHANNEL);
Messages.messageReceived.connect(messageHandler); Messages.messageReceived.connect(messageHandler);
Script.scriptEnding.connect(function () { Script.scriptEnding.connect(function () {
debug('stopping agents', summonedAgents); debug('stopping agents', summonedAgents);
Users.requestsDomainListData = false;
if (initialBubble && !Users.getIgnoreRadiusEnabled()) { Users.toggleIgnoreRadius(); }
Messages.messageReceived.disconnect(messageHandler); // don't respond to any messages during shutdown Messages.messageReceived.disconnect(messageHandler); // don't respond to any messages during shutdown
accumulatedDelay = 0; accumulatedDelay = 0;
summonedAgents.forEach(function (id) { summonedAgents.forEach(function (id) {
@ -134,14 +141,17 @@ Script.scriptEnding.connect(function () {
debug('unsubscribed'); debug('unsubscribed');
}); });
Users.requestsDomainListData = true; // Get avatar data for the whole domain, even if not in our view.
if (initialBubble) { Users.toggleIgnoreRadius(); }
messageSend({key: 'HELO'}); // Ask agents to report in now. messageSend({key: 'HELO'}); // Ask agents to report in now.
Script.setTimeout(function () { Script.setTimeout(function () {
var total = AvatarList.getAvatarIdentifiers().length; var total = AvatarList.getAvatarIdentifiers().length;
if (0 === summonedAgents.length) { if (0 === summonedAgents.length) {
Window.alert("No agents reported.\n\Please run " + MINIMUM_AVATARS + " instances of\n\ Window.alert("No agents reported.\n\Please run " + MINIMUM_AVATARS + " instances of\n\
http://hifi-content.s3.amazonaws.com/howard/scripts/tests/performance/crowd-agent.js\n\ http://hifi-content.s3.amazonaws.com/howard/scripts/tests/performance/crowd-agent.js?v=someDate\n\
on your domain server."); on your domain server.");
} else if (total < MINIMUM_AVATARS) { } else if (total < MINIMUM_AVATARS) {
Window.alert("Only " + summonedAgents.length + " agents reported. Now missing " + (MINIMUM_AVATARS - total) + " avatars, total."); Window.alert("Only " + summonedAgents.length + " agents reported. Now missing " + (MINIMUM_AVATARS - total) + " avatars, total.");
} }
Users.requestsDomainListData = false;
}, MINIMUM_AVATARS * SPREAD_TIME_MS ) }, MINIMUM_AVATARS * SPREAD_TIME_MS )

View file

@ -53,6 +53,13 @@ var HAPTIC_TEXTURE_DISTANCE = 0.002;
var HAPTIC_DEQUIP_STRENGTH = 0.75; var HAPTIC_DEQUIP_STRENGTH = 0.75;
var HAPTIC_DEQUIP_DURATION = 50.0; var HAPTIC_DEQUIP_DURATION = 50.0;
// triggered when stylus presses a web overlay/entity
var HAPTIC_STYLUS_STRENGTH = 1.0;
var HAPTIC_STYLUS_DURATION = 20.0;
// triggerd when ui laser presses a web overlay/entity
var HAPTIC_LASER_UI_STRENGTH = 1.0;
var HAPTIC_LASER_UI_DURATION = 20.0;
var HAND_HEAD_MIX_RATIO = 0.0; // 0 = only use hands for search/move. 1 = only use head for search/move. var HAND_HEAD_MIX_RATIO = 0.0; // 0 = only use hands for search/move. 1 = only use head for search/move.
@ -122,7 +129,6 @@ var GRAB_POINT_SPHERE_RADIUS = NEAR_GRAB_RADIUS;
var GRAB_POINT_SPHERE_COLOR = { red: 240, green: 240, blue: 240 }; var GRAB_POINT_SPHERE_COLOR = { red: 240, green: 240, blue: 240 };
var GRAB_POINT_SPHERE_ALPHA = 0.85; var GRAB_POINT_SPHERE_ALPHA = 0.85;
// //
// other constants // other constants
// //
@ -1248,7 +1254,7 @@ function MyController(hand) {
if (homeButton === hmdHomeButton) { if (homeButton === hmdHomeButton) {
if (this.homeButtonTouched === false) { if (this.homeButtonTouched === false) {
this.homeButtonTouched = true; this.homeButtonTouched = true;
Controller.triggerHapticPulse(1, 20, this.hand); Controller.triggerHapticPulse(HAPTIC_STYLUS_STRENGTH, HAPTIC_STYLUS_DURATION, this.hand);
Messages.sendLocalMessage("home", homeButton); Messages.sendLocalMessage("home", homeButton);
} }
} else { } else {
@ -1266,7 +1272,7 @@ function MyController(hand) {
if (homeButton === hmdHomeButton) { if (homeButton === hmdHomeButton) {
if (this.homeButtonTouched === false) { if (this.homeButtonTouched === false) {
this.homeButtonTouched = true; this.homeButtonTouched = true;
Controller.triggerHapticPulse(1, 20, this.hand); Controller.triggerHapticPulse(HAPTIC_LASER_UI_STRENGTH, HAPTIC_LASER_UI_DURATION, this.hand);
Messages.sendLocalMessage("home", homeButton); Messages.sendLocalMessage("home", homeButton);
} }
} else { } else {
@ -1754,7 +1760,6 @@ function MyController(hand) {
Entities.sendHoverOverEntity(entity, pointerEvent); Entities.sendHoverOverEntity(entity, pointerEvent);
} }
this.grabbedEntity = entity; this.grabbedEntity = entity;
this.setState(STATE_ENTITY_STYLUS_TOUCHING, "begin touching entity '" + name + "'"); this.setState(STATE_ENTITY_STYLUS_TOUCHING, "begin touching entity '" + name + "'");
return true; return true;
@ -1775,11 +1780,6 @@ function MyController(hand) {
var pointerEvent; var pointerEvent;
if (rayPickInfo.overlayID) { if (rayPickInfo.overlayID) {
var overlay = rayPickInfo.overlayID; var overlay = rayPickInfo.overlayID;
if (!this.homeButtonTouched) {
Controller.triggerHapticPulse(1, 20, this.hand);
}
if (Overlays.keyboardFocusOverlay != overlay) { if (Overlays.keyboardFocusOverlay != overlay) {
Entities.keyboardFocusEntity = null; Entities.keyboardFocusEntity = null;
Overlays.keyboardFocusOverlay = overlay; Overlays.keyboardFocusOverlay = overlay;
@ -2710,6 +2710,12 @@ function MyController(hand) {
var theta = this.state === STATE_ENTITY_STYLUS_TOUCHING ? STYLUS_PRESS_TO_MOVE_DEADSPOT_ANGLE : LASER_PRESS_TO_MOVE_DEADSPOT_ANGLE; var theta = this.state === STATE_ENTITY_STYLUS_TOUCHING ? STYLUS_PRESS_TO_MOVE_DEADSPOT_ANGLE : LASER_PRESS_TO_MOVE_DEADSPOT_ANGLE;
this.deadspotRadius = Math.tan(theta) * intersectInfo.distance; // dead spot radius in meters this.deadspotRadius = Math.tan(theta) * intersectInfo.distance; // dead spot radius in meters
} }
if (this.state == STATE_ENTITY_STYLUS_TOUCHING) {
Controller.triggerHapticPulse(HAPTIC_STYLUS_STRENGTH, HAPTIC_STYLUS_DURATION, this.hand);
} else if (this.state == STATE_ENTITY_LASER_TOUCHING) {
Controller.triggerHapticPulse(HAPTIC_LASER_UI_STRENGTH, HAPTIC_LASER_UI_DURATION, this.hand);
}
}; };
this.entityTouchingExit = function() { this.entityTouchingExit = function() {
@ -2829,6 +2835,12 @@ function MyController(hand) {
var theta = this.state === STATE_OVERLAY_STYLUS_TOUCHING ? STYLUS_PRESS_TO_MOVE_DEADSPOT_ANGLE : LASER_PRESS_TO_MOVE_DEADSPOT_ANGLE; var theta = this.state === STATE_OVERLAY_STYLUS_TOUCHING ? STYLUS_PRESS_TO_MOVE_DEADSPOT_ANGLE : LASER_PRESS_TO_MOVE_DEADSPOT_ANGLE;
this.deadspotRadius = Math.tan(theta) * intersectInfo.distance; // dead spot radius in meters this.deadspotRadius = Math.tan(theta) * intersectInfo.distance; // dead spot radius in meters
} }
if (this.state == STATE_OVERLAY_STYLUS_TOUCHING) {
Controller.triggerHapticPulse(HAPTIC_STYLUS_STRENGTH, HAPTIC_STYLUS_DURATION, this.hand);
} else if (this.state == STATE_OVERLAY_LASER_TOUCHING) {
Controller.triggerHapticPulse(HAPTIC_LASER_UI_STRENGTH, HAPTIC_LASER_UI_DURATION, this.hand);
}
}; };
this.overlayTouchingExit = function () { this.overlayTouchingExit = function () {
@ -2882,7 +2894,6 @@ function MyController(hand) {
this.touchingEnterTimer += dt; this.touchingEnterTimer += dt;
if (this.state == STATE_OVERLAY_STYLUS_TOUCHING && this.triggerSmoothedSqueezed()) { if (this.state == STATE_OVERLAY_STYLUS_TOUCHING && this.triggerSmoothedSqueezed()) {
this.setState(STATE_OFF, "trigger squeezed");
return; return;
} }

View file

@ -8,7 +8,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// //
/* global getControllerWorldLocation, setEntityCustomData, Tablet, WebTablet:true, HMD, Settings, Script, /* global getControllerWorldLocation, setEntityCustomData, Tablet, WebTablet:true, HMD, Settings, Script,
Vec3, Quat, MyAvatar, Entities, Overlays, Camera, Messages, Xform */ Vec3, Quat, MyAvatar, Entities, Overlays, Camera, Messages, Xform, clamp */
Script.include(Script.resolvePath("../libraries/utils.js")); Script.include(Script.resolvePath("../libraries/utils.js"));
Script.include(Script.resolvePath("../libraries/controllers.js")); Script.include(Script.resolvePath("../libraries/controllers.js"));
@ -118,7 +118,7 @@ WebTablet = function (url, width, dpi, hand, clientOnly) {
}; };
// compute position, rotation & parentJointIndex of the tablet // compute position, rotation & parentJointIndex of the tablet
this.calculateTabletAttachmentProperties(hand, tabletProperties); this.calculateTabletAttachmentProperties(hand, true, tabletProperties);
this.cleanUpOldTablets(); this.cleanUpOldTablets();
this.tabletEntityID = Entities.addEntity(tabletProperties, clientOnly); this.tabletEntityID = Entities.addEntity(tabletProperties, clientOnly);
@ -252,31 +252,78 @@ WebTablet.prototype.destroy = function () {
WebTablet.prototype.geometryChanged = function (geometry) { WebTablet.prototype.geometryChanged = function (geometry) {
if (!HMD.active) { if (!HMD.active) {
var tabletProperties = {}; var tabletProperties = {};
// compute position, rotation & parentJointIndex of the tablet // compute position, rotation & parentJointIndex of the tablet
this.calculateTabletAttachmentProperties(NO_HANDS, tabletProperties); this.calculateTabletAttachmentProperties(NO_HANDS, false, tabletProperties);
Entities.editEntity(this.tabletEntityID, tabletProperties); Entities.editEntity(this.tabletEntityID, tabletProperties);
} }
}; };
function gluPerspective(fovy, aspect, zNear, zFar) {
var cotan = 1 / Math.tan(fovy / 2);
var alpha = -(zFar + zNear) / (zFar - zNear);
var beta = -(2 * zFar * zNear) / (zFar - zNear);
var col0 = {x: cotan / aspect, y: 0, z: 0, w: 0};
var col1 = {x: 0, y: cotan, z: 0, w: 0};
var col2 = {x: 0, y: 0, z: alpha, w: -1};
var col3 = {x: 0, y: 0, z: beta, w: 0};
return Mat4.createFromColumns(col0, col1, col2, col3);
}
// calclulate the appropriate position of the tablet in world space, such that it fits in the center of the screen. // calclulate the appropriate position of the tablet in world space, such that it fits in the center of the screen.
// with a bit of padding on the top and bottom. // with a bit of padding on the top and bottom.
WebTablet.prototype.calculateWorldAttitudeRelativeToCamera = function () { // windowPos is used to position the center of the tablet at the given position.
WebTablet.prototype.calculateWorldAttitudeRelativeToCamera = function (windowPos) {
var DEFAULT_DESKTOP_TABLET_SCALE = 75; var DEFAULT_DESKTOP_TABLET_SCALE = 75;
var DESKTOP_TABLET_SCALE = Settings.getValue("desktopTabletScale") || DEFAULT_DESKTOP_TABLET_SCALE; var DESKTOP_TABLET_SCALE = Settings.getValue("desktopTabletScale") || DEFAULT_DESKTOP_TABLET_SCALE;
// clamp window pos so 2d tablet is not off-screen.
var TABLET_TEXEL_PADDING = {x: 60, y: 90};
var X_CLAMP = (DESKTOP_TABLET_SCALE / 100) * ((TABLET_TEXTURE_RESOLUTION.x / 2) + TABLET_TEXEL_PADDING.x);
var Y_CLAMP = (DESKTOP_TABLET_SCALE / 100) * ((TABLET_TEXTURE_RESOLUTION.y / 2) + TABLET_TEXEL_PADDING.y);
windowPos.x = clamp(windowPos.x, X_CLAMP, Window.innerWidth - X_CLAMP);
windowPos.y = clamp(windowPos.y, Y_CLAMP, Window.innerHeight - Y_CLAMP);
var fov = (Settings.getValue('fieldOfView') || DEFAULT_VERTICAL_FIELD_OF_VIEW) * (Math.PI / 180); var fov = (Settings.getValue('fieldOfView') || DEFAULT_VERTICAL_FIELD_OF_VIEW) * (Math.PI / 180);
var MAX_PADDING_FACTOR = 2.2; var MAX_PADDING_FACTOR = 2.2;
var PADDING_FACTOR = Math.min(Window.innerHeight / TABLET_TEXTURE_RESOLUTION.y, MAX_PADDING_FACTOR); var PADDING_FACTOR = Math.min(Window.innerHeight / TABLET_TEXTURE_RESOLUTION.y, MAX_PADDING_FACTOR);
var TABLET_HEIGHT = (TABLET_TEXTURE_RESOLUTION.y / this.dpi) * INCHES_TO_METERS; var TABLET_HEIGHT = (TABLET_TEXTURE_RESOLUTION.y / this.dpi) * INCHES_TO_METERS;
var WEB_ENTITY_Z_OFFSET = (this.depth / 2); var WEB_ENTITY_Z_OFFSET = (this.depth / 2);
// calcualte distance from camera
var dist = (PADDING_FACTOR * TABLET_HEIGHT) / (2 * Math.tan(fov / 2) * (DESKTOP_TABLET_SCALE / 100)) - WEB_ENTITY_Z_OFFSET; var dist = (PADDING_FACTOR * TABLET_HEIGHT) / (2 * Math.tan(fov / 2) * (DESKTOP_TABLET_SCALE / 100)) - WEB_ENTITY_Z_OFFSET;
var Z_NEAR = 0.01;
var Z_FAR = 100.0;
// calculate mouse position in clip space
var alpha = -(Z_FAR + Z_NEAR) / (Z_FAR - Z_NEAR);
var beta = -(2 * Z_FAR * Z_NEAR) / (Z_FAR - Z_NEAR);
var clipZ = (beta / dist) - alpha;
var clipMousePosition = {x: (2 * windowPos.x / Window.innerWidth) - 1,
y: (2 * ((Window.innerHeight - windowPos.y) / Window.innerHeight)) - 1,
z: clipZ};
// calculate projection matrix
var aspect = Window.innerWidth / Window.innerHeight;
var projMatrix = gluPerspective(fov, aspect, Z_NEAR, Z_FAR);
// transform mouse clip position into view coordinates.
var viewMousePosition = Mat4.transformPoint(Mat4.inverse(projMatrix), clipMousePosition);
// transform view mouse position into world coordinates.
var viewToWorldMatrix = Mat4.createFromRotAndTrans(Camera.orientation, Camera.position);
var worldMousePosition = Mat4.transformPoint(viewToWorldMatrix, viewMousePosition);
return { return {
position: Vec3.sum(Camera.position, Vec3.multiply(dist, Quat.getFront(Camera.orientation))), position: worldMousePosition,
rotation: Quat.multiply(Camera.orientation, ROT_Y_180) rotation: Quat.multiply(Camera.orientation, ROT_Y_180)
}; };
}; };
// compute position, rotation & parentJointIndex of the tablet // compute position, rotation & parentJointIndex of the tablet
WebTablet.prototype.calculateTabletAttachmentProperties = function (hand, tabletProperties) { WebTablet.prototype.calculateTabletAttachmentProperties = function (hand, useMouse, tabletProperties) {
if (HMD.active) { if (HMD.active) {
// in HMD mode, the tablet should be relative to the sensor to world matrix. // in HMD mode, the tablet should be relative to the sensor to world matrix.
tabletProperties.parentJointIndex = SENSOR_TO_ROOM_MATRIX; tabletProperties.parentJointIndex = SENSOR_TO_ROOM_MATRIX;
@ -289,8 +336,16 @@ WebTablet.prototype.calculateTabletAttachmentProperties = function (hand, tablet
// in desktop mode, the tablet should be relative to the camera // in desktop mode, the tablet should be relative to the camera
tabletProperties.parentJointIndex = CAMERA_MATRIX; tabletProperties.parentJointIndex = CAMERA_MATRIX;
// compute the appropriate postion of the tablet such that it fits in the center of the screen nicely. var windowPos;
var attitude = this.calculateWorldAttitudeRelativeToCamera(); if (useMouse) {
// compute the appropriate postion of the tablet such that it fits in the center of the screen nicely.
windowPos = {x: Controller.getValue(Controller.Hardware.Keyboard.MouseX),
y: Controller.getValue(Controller.Hardware.Keyboard.MouseY)};
} else {
windowPos = {x: Window.innerWidth / 2,
y: Window.innerHeight / 2};
}
var attitude = this.calculateWorldAttitudeRelativeToCamera(windowPos);
tabletProperties.position = attitude.position; tabletProperties.position = attitude.position;
tabletProperties.rotation = attitude.rotation; tabletProperties.rotation = attitude.rotation;
} }
@ -310,7 +365,7 @@ WebTablet.prototype.onHmdChanged = function () {
var tabletProperties = {}; var tabletProperties = {};
// compute position, rotation & parentJointIndex of the tablet // compute position, rotation & parentJointIndex of the tablet
this.calculateTabletAttachmentProperties(NO_HANDS, tabletProperties); this.calculateTabletAttachmentProperties(NO_HANDS, false, tabletProperties);
Entities.editEntity(this.tabletEntityID, tabletProperties); Entities.editEntity(this.tabletEntityID, tabletProperties);
// Full scene FXAA should be disabled on the overlay when the tablet in desktop mode. // Full scene FXAA should be disabled on the overlay when the tablet in desktop mode.
@ -398,7 +453,7 @@ WebTablet.prototype.cameraModeChanged = function (newMode) {
var self = this; var self = this;
var tabletProperties = {}; var tabletProperties = {};
// compute position, rotation & parentJointIndex of the tablet // compute position, rotation & parentJointIndex of the tablet
self.calculateTabletAttachmentProperties(NO_HANDS, tabletProperties); self.calculateTabletAttachmentProperties(NO_HANDS, false, tabletProperties);
Entities.editEntity(self.tabletEntityID, tabletProperties); Entities.editEntity(self.tabletEntityID, tabletProperties);
} }
}; };

112
scripts/system/nameTag.js Normal file
View file

@ -0,0 +1,112 @@
"use strict";
/*jslint vars: true, plusplus: true*/
/*global Entities, Script, Quat, Vec3, MyAvatar, print*/
// nameTag.js
//
// Created by Triplelexx on 17/01/31
// Copyright 2017 High Fidelity, Inc.
//
// Running the script creates a text entity that will hover over the user's head showing their display name.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
const CLIENTONLY = false;
const NULL_UUID = "{00000000-0000-0000-0000-000000000000}";
const ENTITY_CHECK_INTERVAL = 5000; // ms = 5 seconds
const STARTUP_DELAY = 2000; // ms = 2 second
const OLD_AGE = 3500; // we recreate the entity if older than this time in seconds
const TTL = 2; // time to live in seconds if script is not running
const HEIGHT_ABOVE_HEAD = 0.2;
const HEAD_OFFSET = -0.025;
const SIZE_Y = 0.075;
const LETTER_OFFSET = 0.03; // arbitrary value to dynamically change width, could be more accurate by detecting characters
const LINE_HEIGHT = 0.05;
var nameTagEntityID = NULL_UUID;
var lastCheckForEntity = 0;
// create the name tag entity after a brief delay
Script.setTimeout(function() {
addNameTag();
}, STARTUP_DELAY);
function addNameTag() {
var nameTagPosition = Vec3.sum(MyAvatar.getHeadPosition(), Vec3.multiply(HEAD_OFFSET, Quat.getFront(MyAvatar.orientation)));
nameTagPosition.y += HEIGHT_ABOVE_HEAD;
var nameTagProperties = {
name: MyAvatar.displayName + ' Name Tag',
type: 'Text',
text: MyAvatar.displayName,
lineHeight: LINE_HEIGHT,
parentID: MyAvatar.sessionUUID,
dimensions: dimensionsFromName(),
position: nameTagPosition
}
nameTagEntityID = Entities.addEntity(nameTagProperties, CLIENTONLY);
}
function updateNameTag() {
var nameTagProps = Entities.getEntityProperties(nameTagEntityID);
var nameTagPosition = Vec3.sum(MyAvatar.getHeadPosition(), Vec3.multiply(HEAD_OFFSET, Quat.getFront(MyAvatar.orientation)));
nameTagPosition.y += HEIGHT_ABOVE_HEAD;
Entities.editEntity(nameTagEntityID, {
position: nameTagPosition,
dimensions: dimensionsFromName(),
// lifetime is in seconds we add TTL on top of the next poll time
lifetime: Math.round(nameTagProps.age) + (ENTITY_CHECK_INTERVAL / 1000) + TTL,
text: MyAvatar.displayName
});
};
function deleteNameTag() {
if(nameTagEntityID !== NULL_UUID) {
Entities.deleteEntity(nameTagEntityID);
nameTagEntityID = NULL_UUID;
}
}
function dimensionsFromName() {
return {
x: LETTER_OFFSET * MyAvatar.displayName.length,
y: SIZE_Y,
z: 0.0
}
};
// cleanup on ending
Script.scriptEnding.connect(cleanup);
function cleanup() {
deleteNameTag();
}
Script.update.connect(update);
function update() {
// if no entity we return
if(nameTagEntityID == NULL_UUID) {
return;
}
if(Date.now() - lastCheckForEntity > ENTITY_CHECK_INTERVAL) {
checkForEntity();
lastCheckForEntity = Date.now();
}
}
function checkForEntity() {
var nameTagProps = Entities.getEntityProperties(nameTagEntityID);
// it is possible for the age to not be a valid number, we check for this and return accordingly
if(nameTagProps.age == -1) {
return;
}
// it's too old or we receive undefined make a new one, otherwise update
if(nameTagProps.age > OLD_AGE || nameTagProps.age == undefined) {
deleteNameTag();
addNameTag();
} else {
updateNameTag();
}
}

View file

@ -11,6 +11,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// //
(function() { // BEGIN LOCAL_SCOPE
// hardcoding these as it appears we cannot traverse the originalTextures in overlays??? Maybe I've missed // hardcoding these as it appears we cannot traverse the originalTextures in overlays??? Maybe I've missed
// something, will revisit as this is sorta horrible. // something, will revisit as this is sorta horrible.
const UNSELECTED_TEXTURES = {"idle-D": Script.resolvePath("./assets/models/Avatar-Overlay-v1.fbx/Avatar-Overlay-v1.fbm/avatar-overlay-idle.png"), const UNSELECTED_TEXTURES = {"idle-D": Script.resolvePath("./assets/models/Avatar-Overlay-v1.fbx/Avatar-Overlay-v1.fbm/avatar-overlay-idle.png"),
@ -27,7 +29,7 @@ const UNSELECTED_COLOR = { red: 0x1F, green: 0xC6, blue: 0xA6};
const SELECTED_COLOR = {red: 0xF3, green: 0x91, blue: 0x29}; const SELECTED_COLOR = {red: 0xF3, green: 0x91, blue: 0x29};
const HOVER_COLOR = {red: 0xD0, green: 0xD0, blue: 0xD0}; // almost white for now const HOVER_COLOR = {red: 0xD0, green: 0xD0, blue: 0xD0}; // almost white for now
(function() { // BEGIN LOCAL_SCOPE var conserveResources = true;
Script.include("/~/system/libraries/controllers.js"); Script.include("/~/system/libraries/controllers.js");
@ -265,15 +267,16 @@ pal.fromQml.connect(function (message) { // messages are {method, params}, like
function addAvatarNode(id) { function addAvatarNode(id) {
var selected = ExtendedOverlay.isSelected(id); var selected = ExtendedOverlay.isSelected(id);
return new ExtendedOverlay(id, "sphere", { return new ExtendedOverlay(id, "sphere", {
drawInFront: true, drawInFront: true,
solid: true, solid: true,
alpha: 0.8, alpha: 0.8,
color: color(selected, false, 0.0), color: color(selected, false, 0.0),
ignoreRayIntersection: false}, selected, true); ignoreRayIntersection: false}, selected, !conserveResources);
} }
function populateUserList(selectData) { function populateUserList(selectData) {
var data = []; var data = [], avatars = AvatarList.getAvatarIdentifiers();
AvatarList.getAvatarIdentifiers().sort().forEach(function (id) { // sorting the identifiers is just an aid for debugging conserveResources = avatars.length > 20;
avatars.forEach(function (id) { // sorting the identifiers is just an aid for debugging
var avatar = AvatarList.getAvatar(id); var avatar = AvatarList.getAvatar(id);
var avatarPalDatum = { var avatarPalDatum = {
displayName: avatar.sessionDisplayName, displayName: avatar.sessionDisplayName,
@ -498,6 +501,9 @@ if (Settings.getValue("HUDUIEnabled")) {
}); });
} }
var isWired = false; var isWired = false;
var audioTimer;
var AUDIO_LEVEL_UPDATE_INTERVAL_MS = 100; // 10hz for now (change this and change the AVERAGING_RATIO too)
var AUDIO_LEVEL_CONSERVED_UPDATE_INTERVAL_MS = 300;
function off() { function off() {
if (isWired) { // It is not ok to disconnect these twice, hence guard. if (isWired) { // It is not ok to disconnect these twice, hence guard.
Script.update.disconnect(updateOverlays); Script.update.disconnect(updateOverlays);
@ -505,6 +511,7 @@ function off() {
Controller.mouseMoveEvent.disconnect(handleMouseMoveEvent); Controller.mouseMoveEvent.disconnect(handleMouseMoveEvent);
isWired = false; isWired = false;
} }
if (audioTimer) { Script.clearInterval(audioTimer); }
triggerMapping.disable(); // It's ok if we disable twice. triggerMapping.disable(); // It's ok if we disable twice.
triggerPressMapping.disable(); // see above triggerPressMapping.disable(); // see above
removeOverlays(); removeOverlays();
@ -521,7 +528,7 @@ function onClicked() {
Controller.mouseMoveEvent.connect(handleMouseMoveEvent); Controller.mouseMoveEvent.connect(handleMouseMoveEvent);
triggerMapping.enable(); triggerMapping.enable();
triggerPressMapping.enable(); triggerPressMapping.enable();
createAudioInterval(); audioTimer = createAudioInterval(conserveResources ? AUDIO_LEVEL_CONSERVED_UPDATE_INTERVAL_MS : AUDIO_LEVEL_UPDATE_INTERVAL_MS);
} else { } else {
off(); off();
} }
@ -557,9 +564,7 @@ var AVERAGING_RATIO = 0.05;
var LOUDNESS_FLOOR = 11.0; var LOUDNESS_FLOOR = 11.0;
var LOUDNESS_SCALE = 2.8 / 5.0; var LOUDNESS_SCALE = 2.8 / 5.0;
var LOG2 = Math.log(2.0); var LOG2 = Math.log(2.0);
var AUDIO_LEVEL_UPDATE_INTERVAL_MS = 100; // 10hz for now (change this and change the AVERAGING_RATIO too)
var myData = {}; // we're not includied in ExtendedOverlay.get. var myData = {}; // we're not includied in ExtendedOverlay.get.
var audioInterval;
function getAudioLevel(id) { function getAudioLevel(id) {
// the VU meter should work similarly to the one in AvatarInputs: log scale, exponentially averaged // the VU meter should work similarly to the one in AvatarInputs: log scale, exponentially averaged
@ -591,21 +596,19 @@ function getAudioLevel(id) {
return audioLevel; return audioLevel;
} }
function createAudioInterval() { function createAudioInterval(interval) {
// we will update the audioLevels periodically // we will update the audioLevels periodically
// TODO: tune for efficiency - expecially with large numbers of avatars // TODO: tune for efficiency - expecially with large numbers of avatars
return Script.setInterval(function () { return Script.setInterval(function () {
if (pal.visible) { var param = {};
var param = {}; AvatarList.getAvatarIdentifiers().forEach(function (id) {
AvatarList.getAvatarIdentifiers().forEach(function (id) { var level = getAudioLevel(id);
var level = getAudioLevel(id); // qml didn't like an object with null/empty string for a key, so...
// qml didn't like an object with null/empty string for a key, so... var userId = id || 0;
var userId = id || 0; param[userId] = level;
param[userId] = level; });
}); pal.sendToQml({method: 'updateAudioLevel', params: param});
pal.sendToQml({method: 'updateAudioLevel', params: param}); }, interval);
}
}, AUDIO_LEVEL_UPDATE_INTERVAL_MS);
} }
function avatarDisconnected(nodeID) { function avatarDisconnected(nodeID) {

View file

@ -12,7 +12,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// //
/* global Script, HMD, WebTablet, UIWebTablet */ /* global Script, HMD, WebTablet, UIWebTablet, UserActivityLogger, Settings, Entities, Messages, Tablet, Overlays, MyAvatar */
(function() { // BEGIN LOCAL_SCOPE (function() { // BEGIN LOCAL_SCOPE
var tabletShown = false; var tabletShown = false;
@ -65,8 +65,10 @@
hideTabletUI(); hideTabletUI();
HMD.closeTablet(); HMD.closeTablet();
} else if (HMD.showTablet && !tabletShown) { } else if (HMD.showTablet && !tabletShown) {
UserActivityLogger.openedTablet();
showTabletUI(); showTabletUI();
} else if (!HMD.showTablet && tabletShown) { } else if (!HMD.showTablet && tabletShown) {
UserActivityLogger.closedTablet();
hideTabletUI(); hideTabletUI();
} }
} }
@ -86,7 +88,6 @@
var accumulatedLevel = 0.0; var accumulatedLevel = 0.0;
// Note: Might have to tweak the following two based on the rate we're getting the data // Note: Might have to tweak the following two based on the rate we're getting the data
var AVERAGING_RATIO = 0.05; var AVERAGING_RATIO = 0.05;
var MIC_LEVEL_UPDATE_INTERVAL_MS = 100;
// Calculate microphone level with the same scaling equation (log scale, exponentially averaged) in AvatarInputs and pal.js // Calculate microphone level with the same scaling equation (log scale, exponentially averaged) in AvatarInputs and pal.js
function getMicLevel() { function getMicLevel() {

View file

@ -12,6 +12,7 @@
(function() { // BEGIN LOCAL_SCOPE (function() { // BEGIN LOCAL_SCOPE
var USERS_URL = "https://hifi-content.s3.amazonaws.com/faye/tablet-dev/users.html"; var USERS_URL = "https://hifi-content.s3.amazonaws.com/faye/tablet-dev/users.html";
var HOME_BUTTON_TEXTURE = Script.resourcesPath() + "meshes/tablet-with-home-button.fbx/tablet-with-home-button.fbm/button-root.png";
var FRIENDS_WINDOW_URL = "https://metaverse.highfidelity.com/user/friends"; var FRIENDS_WINDOW_URL = "https://metaverse.highfidelity.com/user/friends";
var FRIENDS_WINDOW_WIDTH = 290; var FRIENDS_WINDOW_WIDTH = 290;
@ -40,6 +41,10 @@
}); });
function onClicked() { function onClicked() {
var tabletEntity = HMD.tabletID;
if (tabletEntity) {
Entities.editEntity(tabletEntity, {textures: JSON.stringify({"tex.close" : HOME_BUTTON_TEXTURE})});
}
tablet.gotoWebScreen(USERS_URL); tablet.gotoWebScreen(USERS_URL);
} }

View file

@ -169,3 +169,17 @@ void AABoxTests::testScale() {
box3 += glm::vec3(-1.0f, -1.0f, -1.0f); box3 += glm::vec3(-1.0f, -1.0f, -1.0f);
QCOMPARE(box3.contains(glm::vec3(0.5f, 0.5f, 0.5f)), true); QCOMPARE(box3.contains(glm::vec3(0.5f, 0.5f, 0.5f)), true);
} }
void AABoxTests::testFindSpherePenetration() {
vec3 searchPosition(-0.0141186f, 0.0640736f, -0.116081f);
float searchRadius = 0.5f;
vec3 boxMin(-0.800014f, -0.450025f, -0.00503815f);
vec3 boxDim(1.60003f, 0.900049f, 0.0100763f);
AABox testBox(boxMin, boxDim);
vec3 penetration;
bool hit = testBox.findSpherePenetration(searchPosition, searchRadius, penetration);
QCOMPARE(hit, true);
}

View file

@ -24,6 +24,7 @@ private slots:
void testContainsPoint(); void testContainsPoint();
void testTouchesSphere(); void testTouchesSphere();
void testScale(); void testScale();
void testFindSpherePenetration();
}; };
#endif // hifi_AABoxTests_h #endif // hifi_AABoxTests_h