Merge branch 'master' of https://github.com/highfidelity/hifi into project-freeloco

This commit is contained in:
r3tk0n 2019-04-01 14:52:21 -07:00
commit 4fb7bbabe1
118 changed files with 3618 additions and 1431 deletions

View file

@ -18,16 +18,25 @@
#include "Agent.h"
/**jsdoc
* The <code>Agent</code> API enables an assignment client to emulate an avatar. Setting <code>isAvatar = true</code> connects
* the assignment client to the avatar and audio mixers, and enables the {@link Avatar} API to be used.
*
* @namespace Agent
*
* @hifi-assignment-client
*
* @property {boolean} isAvatar
* @property {boolean} isPlayingAvatarSound <em>Read-only.</em>
* @property {boolean} isListeningToAudioStream
* @property {boolean} isNoiseGateEnabled
* @property {number} lastReceivedAudioLoudness <em>Read-only.</em>
* @property {Uuid} sessionUUID <em>Read-only.</em>
* @property {boolean} isAvatar - <code>true</code> if the assignment client script is emulating an avatar, otherwise
* <code>false</code>.
* @property {boolean} isPlayingAvatarSound - <code>true</code> if the script has a sound to play, otherwise <code>false</code>.
* Sounds are played when <code>isAvatar</code> is <code>true</code>, from the position and with the orientation of the
* scripted avatar's head. <em>Read-only.</em>
* @property {boolean} isListeningToAudioStream - <code>true</code> if the agent is "listening" to the audio stream from the
* domain, otherwise <code>false</code>.
* @property {boolean} isNoiseGateEnabled - <code>true</code> if the noise gate is enabled, otherwise <code>false</code>. When
* enabled, the input audio stream is blocked (fully attenuated) if it falls below an adaptive threshold.
* @property {number} lastReceivedAudioLoudness - The current loudness of the audio input. Nominal range [<code>0.0</code> (no
* sound) &ndash; <code>1.0</code> (the onset of clipping)]. <em>Read-only.</em>
* @property {Uuid} sessionUUID - The unique ID associated with the agent's current session in the domain. <em>Read-only.</em>
*/
class AgentScriptingInterface : public QObject {
Q_OBJECT
@ -54,20 +63,43 @@ public:
public slots:
/**jsdoc
* Sets whether the script should emulate an avatar.
* @function Agent.setIsAvatar
* @param {boolean} isAvatar
* @param {boolean} isAvatar - <code>true</code> if the script emulates an avatar, otherwise <code>false</code>.
* @example <caption>Make an assignment client script emulate an avatar.</caption>
* (function () {
* Agent.setIsAvatar(true);
* Avatar.displayName = "AC avatar";
* print("Position: " + JSON.stringify(Avatar.position)); // 0, 0, 0
* }());
*/
void setIsAvatar(bool isAvatar) const { _agent->setIsAvatar(isAvatar); }
/**jsdoc
* Checks whether the script is emulating an avatar.
* @function Agent.isAvatar
* @returns {boolean}
* @returns {boolean} <code>true</code> if the script is emulating an avatar, otherwise <code>false</code>.
* @example <caption>Check whether the agent is emulating an avatar.</caption>
* (function () {
* print("Agent is avatar: " + Agent.isAvatar());
* print("Agent is avatar: " + Agent.isAvatar); // Same result.
* }());
*/
bool isAvatar() const { return _agent->isAvatar(); }
/**jsdoc
* Plays a sound from the position and with the orientation of the emulated avatar's head. No sound is played unless
* <code>isAvatar == true</code>.
* @function Agent.playAvatarSound
* @param {object} avatarSound
* @param {SoundObject} avatarSound - The sound played.
* @example <caption>Play a sound from an emulated avatar.</caption>
* (function () {
* Agent.isAvatar = true;
* var sound = SoundCache.getSound(Script.resourcesPath() + "sounds/sample.wav");
* Script.setTimeout(function () { // Give the sound time to load.
* Agent.playAvatarSound(sound);
* }, 1000);
* }());
*/
void playAvatarSound(SharedSoundPointer avatarSound) const { _agent->playAvatarSound(avatarSound); }

View file

@ -97,6 +97,7 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
PacketType::RadiusIgnoreRequest,
PacketType::RequestsDomainListData,
PacketType::PerAvatarGainSet,
PacketType::InjectorGainSet,
PacketType::AudioSoloRequest },
this, "queueAudioPacket");

View file

@ -92,6 +92,9 @@ int AudioMixerClientData::processPackets(ConcurrentAddedStreams& addedStreams) {
case PacketType::PerAvatarGainSet:
parsePerAvatarGainSet(*packet, node);
break;
case PacketType::InjectorGainSet:
parseInjectorGainSet(*packet, node);
break;
case PacketType::NodeIgnoreRequest:
parseNodeIgnoreRequest(packet, node);
break;
@ -197,14 +200,25 @@ void AudioMixerClientData::parsePerAvatarGainSet(ReceivedMessage& message, const
if (avatarUUID.isNull()) {
// set the MASTER avatar gain
setMasterAvatarGain(gain);
qCDebug(audio) << "Setting MASTER avatar gain for " << uuid << " to " << gain;
qCDebug(audio) << "Setting MASTER avatar gain for" << uuid << "to" << gain;
} else {
// set the per-source avatar gain
setGainForAvatar(avatarUUID, gain);
qCDebug(audio) << "Setting avatar gain adjustment for hrtf[" << uuid << "][" << avatarUUID << "] to " << gain;
qCDebug(audio) << "Setting avatar gain adjustment for hrtf[" << uuid << "][" << avatarUUID << "] to" << gain;
}
}
void AudioMixerClientData::parseInjectorGainSet(ReceivedMessage& message, const SharedNodePointer& node) {
QUuid uuid = node->getUUID();
uint8_t packedGain;
message.readPrimitive(&packedGain);
float gain = unpackFloatGainFromByte(packedGain);
setMasterInjectorGain(gain);
qCDebug(audio) << "Setting MASTER injector gain for" << uuid << "to" << gain;
}
void AudioMixerClientData::setGainForAvatar(QUuid nodeID, float gain) {
auto it = std::find_if(_streams.active.cbegin(), _streams.active.cend(), [nodeID](const MixableStream& mixableStream){
return mixableStream.nodeStreamID.nodeID == nodeID && mixableStream.nodeStreamID.streamID.isNull();

View file

@ -63,6 +63,7 @@ public:
void negotiateAudioFormat(ReceivedMessage& message, const SharedNodePointer& node);
void parseRequestsDomainListData(ReceivedMessage& message);
void parsePerAvatarGainSet(ReceivedMessage& message, const SharedNodePointer& node);
void parseInjectorGainSet(ReceivedMessage& message, const SharedNodePointer& node);
void parseNodeIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node);
void parseRadiusIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node);
void parseSoloRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node);
@ -84,6 +85,8 @@ public:
float getMasterAvatarGain() const { return _masterAvatarGain; }
void setMasterAvatarGain(float gain) { _masterAvatarGain = gain; }
float getMasterInjectorGain() const { return _masterInjectorGain; }
void setMasterInjectorGain(float gain) { _masterInjectorGain = gain; }
AudioLimiter audioLimiter;
@ -189,6 +192,7 @@ private:
int _frameToSendStats { 0 };
float _masterAvatarGain { 1.0f }; // per-listener mixing gain, applied only to avatars
float _masterInjectorGain { 1.0f }; // per-listener mixing gain, applied only to injectors
CodecPluginPointer _codec;
QString _selectedCodecName;

View file

@ -50,8 +50,8 @@ void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData&
// mix helpers
inline float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd);
inline float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNodeStream,
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, float distance, bool isEcho);
inline float computeGain(float masterAvatarGain, float masterInjectorGain, const AvatarAudioStream& listeningNodeStream,
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, float distance);
inline float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition);
@ -338,8 +338,8 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
}
if (!isThrottling) {
updateHRTFParameters(stream, *listenerAudioStream,
listenerData->getMasterAvatarGain());
updateHRTFParameters(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(),
listenerData->getMasterInjectorGain());
}
return false;
});
@ -363,8 +363,8 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
}
if (!isThrottling) {
updateHRTFParameters(stream, *listenerAudioStream,
listenerData->getMasterAvatarGain());
updateHRTFParameters(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(),
listenerData->getMasterInjectorGain());
}
return false;
});
@ -381,13 +381,13 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
stream.approximateVolume = approximateVolume(stream, listenerAudioStream);
} else {
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
addStream(stream, *listenerAudioStream, 0.0f, isSoloing);
addStream(stream, *listenerAudioStream, 0.0f, 0.0f, isSoloing);
streams.skipped.push_back(move(stream));
++stats.activeToSkipped;
return true;
}
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(),
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(), listenerData->getMasterInjectorGain(),
isSoloing);
if (shouldBeInactive(stream)) {
@ -423,7 +423,7 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
return true;
}
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(),
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(), listenerData->getMasterInjectorGain(),
isSoloing);
if (shouldBeInactive(stream)) {
@ -491,7 +491,9 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
void AudioMixerSlave::addStream(AudioMixerClientData::MixableStream& mixableStream,
AvatarAudioStream& listeningNodeStream,
float masterListenerGain, bool isSoloing) {
float masterAvatarGain,
float masterInjectorGain,
bool isSoloing) {
++stats.totalMixes;
auto streamToAdd = mixableStream.positionalStream;
@ -502,13 +504,12 @@ void AudioMixerSlave::addStream(AudioMixerClientData::MixableStream& mixableStre
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream.getPosition();
float distance = glm::max(glm::length(relativePosition), EPSILON);
float gain = isEcho ? 1.0f
: (isSoloing ? masterAvatarGain
: computeGain(masterAvatarGain, masterInjectorGain, listeningNodeStream, *streamToAdd,
relativePosition, distance));
float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
float gain = masterListenerGain;
if (!isSoloing) {
gain = computeGain(masterListenerGain, listeningNodeStream, *streamToAdd, relativePosition, distance, isEcho);
}
const int HRTF_DATASET_INDEX = 1;
if (!streamToAdd->lastPopSucceeded()) {
@ -585,8 +586,9 @@ void AudioMixerSlave::addStream(AudioMixerClientData::MixableStream& mixableStre
}
void AudioMixerSlave::updateHRTFParameters(AudioMixerClientData::MixableStream& mixableStream,
AvatarAudioStream& listeningNodeStream,
float masterListenerGain) {
AvatarAudioStream& listeningNodeStream,
float masterAvatarGain,
float masterInjectorGain) {
auto streamToAdd = mixableStream.positionalStream;
// check if this is a server echo of a source back to itself
@ -595,7 +597,8 @@ void AudioMixerSlave::updateHRTFParameters(AudioMixerClientData::MixableStream&
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream.getPosition();
float distance = glm::max(glm::length(relativePosition), EPSILON);
float gain = computeGain(masterListenerGain, listeningNodeStream, *streamToAdd, relativePosition, distance, isEcho);
float gain = isEcho ? 1.0f : computeGain(masterAvatarGain, masterInjectorGain, listeningNodeStream, *streamToAdd,
relativePosition, distance);
float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
mixableStream.hrtf->setParameterHistory(azimuth, distance, gain);
@ -720,6 +723,7 @@ float approximateGain(const AvatarAudioStream& listeningNodeStream, const Positi
// injector: apply attenuation
if (streamToAdd.getType() == PositionalAudioStream::Injector) {
gain *= reinterpret_cast<const InjectedAudioStream*>(&streamToAdd)->getAttenuationRatio();
// injector: skip master gain
}
// avatar: skip attenuation - it is too costly to approximate
@ -729,19 +733,25 @@ float approximateGain(const AvatarAudioStream& listeningNodeStream, const Positi
float distance = glm::length(relativePosition);
return gain / distance;
// avatar: skip master gain - it is constant for all streams
// avatar: skip master gain
}
float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNodeStream,
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, float distance, bool isEcho) {
float computeGain(float masterAvatarGain,
float masterInjectorGain,
const AvatarAudioStream& listeningNodeStream,
const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition,
float distance) {
float gain = 1.0f;
// injector: apply attenuation
if (streamToAdd.getType() == PositionalAudioStream::Injector) {
gain *= reinterpret_cast<const InjectedAudioStream*>(&streamToAdd)->getAttenuationRatio();
// apply master gain
gain *= masterInjectorGain;
// avatar: apply fixed off-axis attenuation to make them quieter as they turn away
} else if (!isEcho && (streamToAdd.getType() == PositionalAudioStream::Microphone)) {
} else if (streamToAdd.getType() == PositionalAudioStream::Microphone) {
glm::vec3 rotatedListenerPosition = glm::inverse(streamToAdd.getOrientation()) * relativePosition;
// source directivity is based on angle of emission, in local coordinates
@ -754,8 +764,8 @@ float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNo
gain *= offAxisCoefficient;
// apply master gain, only to avatars
gain *= masterListenerGain;
// apply master gain
gain *= masterAvatarGain;
}
auto& audioZones = AudioMixer::getAudioZones();
@ -797,8 +807,9 @@ float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNo
return gain;
}
float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition) {
float computeAzimuth(const AvatarAudioStream& listeningNodeStream,
const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition) {
glm::quat inverseOrientation = glm::inverse(listeningNodeStream.getOrientation());
glm::vec3 rotatedSourcePosition = inverseOrientation * relativePosition;

View file

@ -57,10 +57,13 @@ private:
bool prepareMix(const SharedNodePointer& listener);
void addStream(AudioMixerClientData::MixableStream& mixableStream,
AvatarAudioStream& listeningNodeStream,
float masterListenerGain, bool isSoloing);
float masterAvatarGain,
float masterInjectorGain,
bool isSoloing);
void updateHRTFParameters(AudioMixerClientData::MixableStream& mixableStream,
AvatarAudioStream& listeningNodeStream,
float masterListenerGain);
float masterAvatarGain,
float masterInjectorGain);
void resetHRTFState(AudioMixerClientData::MixableStream& mixableStream);
void addStreams(Node& listener, AudioMixerClientData& listenerData);

View file

@ -253,10 +253,29 @@ void AvatarMixer::start() {
int lockWait, nodeTransform, functor;
// Set our query each frame
{
_entityViewer.queryOctree();
}
// Dirty the hero status if there's been an entity change.
{
if (_dirtyHeroStatus) {
_dirtyHeroStatus = false;
nodeList->nestedEach([](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
std::for_each(cbegin, cend, [](const SharedNodePointer& node) {
if (node->getType() == NodeType::Agent) {
NodeData* nodeData = node->getLinkedData();
if (nodeData) {
auto& avatar = static_cast<AvatarMixerClientData*>(nodeData)->getAvatar();
avatar.setNeedsHeroCheck();
}
}
});
});
}
}
// Allow nodes to process any pending/queued packets across our worker threads
{
auto start = usecTimestampNow();
@ -827,7 +846,7 @@ void AvatarMixer::sendStatsPacket() {
QJsonObject avatarsObject;
auto nodeList = DependencyManager::get<NodeList>();
// add stats for each listerner
// add stats for each listener
nodeList->eachNode([&](const SharedNodePointer& node) {
QJsonObject avatarStats;
@ -851,6 +870,12 @@ void AvatarMixer::sendStatsPacket() {
avatarStats["delta_full_vs_avatar_data_kbps"] =
(double)outboundAvatarDataKbps - avatarStats[OUTBOUND_AVATAR_DATA_STATS_KEY].toDouble();
}
if (node->getType() != NodeType::Agent) { // Nodes that aren't avatars
const QString displayName
{ node->getType() == NodeType::EntityScriptServer ? "ENTITY SCRIPT SERVER" : "ENTITY SERVER" };
avatarStats["display_name"] = displayName;
}
}
avatarsObject[uuidStringWithoutCurlyBraces(node->getUUID())] = avatarStats;
@ -973,19 +998,30 @@ void AvatarMixer::parseDomainServerSettings(const QJsonObject& domainSettings) {
{
const QString CONNECTION_RATE = "connection_rate";
auto nodeList = DependencyManager::get<NodeList>();
auto defaultConnectionRate = nodeList->getMaxConnectionRate();
int connectionRate = avatarMixerGroupObject[CONNECTION_RATE].toInt((int)defaultConnectionRate);
nodeList->setMaxConnectionRate(connectionRate);
bool success;
int connectionRate = avatarMixerGroupObject[CONNECTION_RATE].toString().toInt(&success);
if (success) {
nodeList->setMaxConnectionRate(connectionRate);
}
}
{ // Fraction of downstream bandwidth reserved for 'hero' avatars:
static const QString PRIORITY_FRACTION_KEY = "priority_fraction";
if (avatarMixerGroupObject.contains(PRIORITY_FRACTION_KEY)) {
float priorityFraction = float(avatarMixerGroupObject[PRIORITY_FRACTION_KEY].toDouble());
_slavePool.setPriorityReservedFraction(std::min(std::max(0.0f, priorityFraction), 1.0f));
qCDebug(avatars) << "Avatar mixer reserving" << priorityFraction << "of bandwidth for priority avatars";
}
}
const QString AVATARS_SETTINGS_KEY = "avatars";
static const QString MIN_HEIGHT_OPTION = "min_avatar_height";
float settingMinHeight = domainSettings[AVATARS_SETTINGS_KEY].toObject()[MIN_HEIGHT_OPTION].toDouble(MIN_AVATAR_HEIGHT);
float settingMinHeight = avatarMixerGroupObject[MIN_HEIGHT_OPTION].toDouble(MIN_AVATAR_HEIGHT);
_domainMinimumHeight = glm::clamp(settingMinHeight, MIN_AVATAR_HEIGHT, MAX_AVATAR_HEIGHT);
static const QString MAX_HEIGHT_OPTION = "max_avatar_height";
float settingMaxHeight = domainSettings[AVATARS_SETTINGS_KEY].toObject()[MAX_HEIGHT_OPTION].toDouble(MAX_AVATAR_HEIGHT);
float settingMaxHeight = avatarMixerGroupObject[MAX_HEIGHT_OPTION].toDouble(MAX_AVATAR_HEIGHT);
_domainMaximumHeight = glm::clamp(settingMaxHeight, MIN_AVATAR_HEIGHT, MAX_AVATAR_HEIGHT);
// make sure that the domain owner didn't flip min and max
@ -997,11 +1033,11 @@ void AvatarMixer::parseDomainServerSettings(const QJsonObject& domainSettings) {
<< "and a maximum avatar height of" << _domainMaximumHeight;
static const QString AVATAR_WHITELIST_OPTION = "avatar_whitelist";
_slaveSharedData.skeletonURLWhitelist = domainSettings[AVATARS_SETTINGS_KEY].toObject()[AVATAR_WHITELIST_OPTION]
_slaveSharedData.skeletonURLWhitelist = avatarMixerGroupObject[AVATAR_WHITELIST_OPTION]
.toString().split(',', QString::KeepEmptyParts);
static const QString REPLACEMENT_AVATAR_OPTION = "replacement_avatar";
_slaveSharedData.skeletonReplacementURL = domainSettings[AVATARS_SETTINGS_KEY].toObject()[REPLACEMENT_AVATAR_OPTION]
_slaveSharedData.skeletonReplacementURL = avatarMixerGroupObject[REPLACEMENT_AVATAR_OPTION]
.toString();
if (_slaveSharedData.skeletonURLWhitelist.count() == 1 && _slaveSharedData.skeletonURLWhitelist[0].isEmpty()) {
@ -1018,9 +1054,12 @@ void AvatarMixer::parseDomainServerSettings(const QJsonObject& domainSettings) {
void AvatarMixer::setupEntityQuery() {
_entityViewer.init();
EntityTreePointer entityTree = _entityViewer.getTree();
DependencyManager::registerInheritance<SpatialParentFinder, AssignmentParentFinder>();
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
_slaveSharedData.entityTree = _entityViewer.getTree();
DependencyManager::set<AssignmentParentFinder>(entityTree);
connect(entityTree.get(), &EntityTree::addingEntityPointer, this, &AvatarMixer::entityAdded);
connect(entityTree.get(), &EntityTree::deletingEntityPointer, this, &AvatarMixer::entityChange);
// ES query: {"avatarPriority": true, "type": "Zone"}
QJsonObject priorityZoneQuery;
@ -1028,6 +1067,7 @@ void AvatarMixer::setupEntityQuery() {
priorityZoneQuery["type"] = "Zone";
_entityViewer.getOctreeQuery().setJSONParameters(priorityZoneQuery);
_slaveSharedData.entityTree = entityTree;
}
void AvatarMixer::handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
@ -1064,6 +1104,25 @@ void AvatarMixer::handleOctreePacket(QSharedPointer<ReceivedMessage> message, Sh
}
}
void AvatarMixer::entityAdded(EntityItem* entity) {
if (entity->getType() == EntityTypes::Zone) {
_dirtyHeroStatus = true;
entity->registerChangeHandler([this](const EntityItemID& entityItemID) {
entityChange();
});
}
}
void AvatarMixer::entityRemoved(EntityItem * entity) {
if (entity->getType() == EntityTypes::Zone) {
_dirtyHeroStatus = true;
}
}
void AvatarMixer::entityChange() {
_dirtyHeroStatus = true;
}
void AvatarMixer::aboutToFinish() {
DependencyManager::destroy<ResourceManager>();
DependencyManager::destroy<ResourceCacheSharedItems>();

View file

@ -34,8 +34,8 @@ public:
static bool shouldReplicateTo(const Node& from, const Node& to) {
return to.getType() == NodeType::DownstreamAvatarMixer &&
to.getPublicSocket() != from.getPublicSocket() &&
to.getLocalSocket() != from.getLocalSocket();
to.getPublicSocket() != from.getPublicSocket() &&
to.getLocalSocket() != from.getLocalSocket();
}
public slots:
@ -46,6 +46,11 @@ public slots:
void sendStatsPacket() override;
// Avatar zone possibly changed
void entityAdded(EntityItem* entity);
void entityRemoved(EntityItem* entity);
void entityChange();
private slots:
void queueIncomingPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer node);
void handleAdjustAvatarSorting(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
@ -80,6 +85,7 @@ private:
// Attach to entity tree for avatar-priority zone info.
EntityTreeHeadlessViewer _entityViewer;
bool _dirtyHeroStatus { true }; // Dirty the needs-hero-update
// FIXME - new throttling - use these values somehow
float _trailingMixRatio { 0.0f };

View file

@ -129,7 +129,7 @@ int AvatarMixerClientData::parseData(ReceivedMessage& message, const SlaveShared
incrementNumOutOfOrderSends();
}
_lastReceivedSequenceNumber = sequenceNumber;
glm::vec3 oldPosition = getPosition();
glm::vec3 oldPosition = _avatar->getClientGlobalPosition();
bool oldHasPriority = _avatar->getHasPriority();
// compute the offset to the data payload
@ -140,23 +140,13 @@ int AvatarMixerClientData::parseData(ReceivedMessage& message, const SlaveShared
// Regardless of what the client says, restore the priority as we know it without triggering any update.
_avatar->setHasPriorityWithoutTimestampReset(oldHasPriority);
auto newPosition = getPosition();
if (newPosition != oldPosition) {
//#define AVATAR_HERO_TEST_HACK
#ifdef AVATAR_HERO_TEST_HACK
{
const static QString heroKey { "HERO" };
_avatar->setPriorityAvatar(_avatar->getDisplayName().contains(heroKey));
}
#else
auto newPosition = _avatar->getClientGlobalPosition();
if (newPosition != oldPosition || _avatar->getNeedsHeroCheck()) {
EntityTree& entityTree = *slaveSharedData.entityTree;
FindPriorityZone findPriorityZone { newPosition, false } ;
FindPriorityZone findPriorityZone { newPosition } ;
entityTree.recurseTreeWithOperation(&FindPriorityZone::operation, &findPriorityZone);
_avatar->setHasPriority(findPriorityZone.isInPriorityZone);
//if (findPriorityZone.isInPriorityZone) {
// qCWarning(avatars) << "Avatar" << _avatar->getSessionDisplayName() << "in hero zone";
//}
#endif
_avatar->setNeedsHeroCheck(false);
}
return true;
@ -341,7 +331,7 @@ void AvatarMixerClientData::checkSkeletonURLAgainstWhitelist(const SlaveSharedDa
// the returned set traits packet uses the trait version from the incoming packet
// so the client knows they should not overwrite if they have since changed the trait
_avatar->packTrait(AvatarTraits::SkeletonModelURL, *packet, traitVersion);
AvatarTraits::packVersionedTrait(AvatarTraits::SkeletonModelURL, *packet, traitVersion, *_avatar);
auto nodeList = DependencyManager::get<NodeList>();
nodeList->sendPacket(std::move(packet), sendingNode);

View file

@ -43,12 +43,14 @@ void AvatarMixerSlave::configure(ConstIter begin, ConstIter end) {
void AvatarMixerSlave::configureBroadcast(ConstIter begin, ConstIter end,
p_high_resolution_clock::time_point lastFrameTimestamp,
float maxKbpsPerNode, float throttlingRatio) {
float maxKbpsPerNode, float throttlingRatio,
float priorityReservedFraction) {
_begin = begin;
_end = end;
_lastFrameTimestamp = lastFrameTimestamp;
_maxKbpsPerNode = maxKbpsPerNode;
_throttlingRatio = throttlingRatio;
_avatarHeroFraction = priorityReservedFraction;
}
void AvatarMixerSlave::harvestStats(AvatarMixerSlaveStats& stats) {
@ -139,7 +141,8 @@ qint64 AvatarMixerSlave::addChangedTraitsToBulkPacket(AvatarMixerClientData* lis
if (lastReceivedVersion > lastSentVersionRef) {
bytesWritten += addTraitsNodeHeader(listeningNodeData, sendingNodeData, traitsPacketList, bytesWritten);
// there is an update to this trait, add it to the traits packet
bytesWritten += sendingAvatar->packTrait(traitType, traitsPacketList, lastReceivedVersion);
bytesWritten += AvatarTraits::packVersionedTrait(traitType, traitsPacketList,
lastReceivedVersion, *sendingAvatar);
// update the last sent version
lastSentVersionRef = lastReceivedVersion;
// Remember which versions we sent in this particular packet
@ -194,7 +197,8 @@ qint64 AvatarMixerSlave::addChangedTraitsToBulkPacket(AvatarMixerClientData* lis
bytesWritten += addTraitsNodeHeader(listeningNodeData, sendingNodeData, traitsPacketList, bytesWritten);
// this instance version exists and has never been sent or is newer so we need to send it
bytesWritten += sendingAvatar->packTraitInstance(traitType, instanceID, traitsPacketList, receivedVersion);
bytesWritten += AvatarTraits::packVersionedTraitInstance(traitType, instanceID, traitsPacketList,
receivedVersion, *sendingAvatar);
if (sentInstanceIt != sentIDValuePairs.end()) {
sentInstanceIt->value = receivedVersion;
@ -308,7 +312,6 @@ namespace {
} // Close anonymous namespace.
void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node) {
const float AVATAR_HERO_FRACTION { 0.4f };
const Node* destinationNode = node.data();
auto nodeList = DependencyManager::get<NodeList>();
@ -343,7 +346,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
// max number of avatarBytes per frame (13 900, typical)
const int maxAvatarBytesPerFrame = int(_maxKbpsPerNode * BYTES_PER_KILOBIT / AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND);
const int maxHeroBytesPerFrame = int(maxAvatarBytesPerFrame * AVATAR_HERO_FRACTION); // 5555, typical
const int maxHeroBytesPerFrame = int(maxAvatarBytesPerFrame * _avatarHeroFraction); // 5555, typical
// keep track of the number of other avatars held back in this frame
int numAvatarsHeldBack = 0;
@ -469,8 +472,8 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
SortableAvatar(avatarNodeData, sourceAvatarNode, lastEncodeTime));
}
// If Avatar A's PAL WAS open but is no longer open, AND
// Avatar A is ignoring Avatar B OR Avatar B is ignoring Avatar A...
// If Node A's PAL WAS open but is no longer open, AND
// Node A is ignoring Avatar B OR Node B is ignoring Avatar A...
//
// This is a bit heavy-handed still - there are cases where a kill packet
// will be sent when it doesn't need to be (but where it _should_ be OK to send).
@ -539,7 +542,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
const MixerAvatar* sourceAvatar = sourceNodeData->getConstAvatarData();
// Typically all out-of-view avatars but such avatars' priorities will rise with time:
bool isLowerPriority = currentVariant != kHero && sortedAvatar.getPriority() <= OUT_OF_VIEW_THRESHOLD; // XXX: hero handling?
bool isLowerPriority = sortedAvatar.getPriority() <= OUT_OF_VIEW_THRESHOLD;
if (isLowerPriority) {
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::MinimumData;
@ -548,8 +551,8 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO ? AvatarData::SendAllData : AvatarData::CullSmallData;
destinationNodeData->incrementAvatarInView();
// If the time that the mixer sent AVATAR DATA about Avatar B to Avatar A is BEFORE OR EQUAL TO
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Avatar A.
// If the time that the mixer sent AVATAR DATA about Avatar B to Node A is BEFORE OR EQUAL TO
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Node A.
if (sourceAvatar->hasProcessedFirstIdentity()
&& destinationNodeData->getLastBroadcastTime(sourceNode->getLocalID()) <= sourceNodeData->getIdentityChangeTimestamp()) {
identityBytesSent += sendIdentityPacket(*identityPacketList, sourceNodeData, *destinationNode);

View file

@ -110,7 +110,8 @@ public:
void configure(ConstIter begin, ConstIter end);
void configureBroadcast(ConstIter begin, ConstIter end,
p_high_resolution_clock::time_point lastFrameTimestamp,
float maxKbpsPerNode, float throttlingRatio);
float maxKbpsPerNode, float throttlingRatio,
float priorityReservedFraction);
void processIncomingPackets(const SharedNodePointer& node);
void broadcastAvatarData(const SharedNodePointer& node);
@ -140,6 +141,7 @@ private:
p_high_resolution_clock::time_point _lastFrameTimestamp;
float _maxKbpsPerNode { 0.0f };
float _throttlingRatio { 0.0f };
float _avatarHeroFraction { 0.4f };
AvatarMixerSlaveStats _stats;
SlaveSharedData* _sharedData;

View file

@ -76,7 +76,8 @@ void AvatarMixerSlavePool::broadcastAvatarData(ConstIter begin, ConstIter end,
float maxKbpsPerNode, float throttlingRatio) {
_function = &AvatarMixerSlave::broadcastAvatarData;
_configure = [=](AvatarMixerSlave& slave) {
slave.configureBroadcast(begin, end, lastFrameTimestamp, maxKbpsPerNode, throttlingRatio);
slave.configureBroadcast(begin, end, lastFrameTimestamp, maxKbpsPerNode, throttlingRatio,
_priorityReservedFraction);
};
run(begin, end);
}

View file

@ -73,7 +73,10 @@ public:
void each(std::function<void(AvatarMixerSlave& slave)> functor);
void setNumThreads(int numThreads);
int numThreads() { return _numThreads; }
int numThreads() const { return _numThreads; }
void setPriorityReservedFraction(float fraction) { _priorityReservedFraction = fraction; }
float getPriorityReservedFraction() const { return _priorityReservedFraction; }
private:
void run(ConstIter begin, ConstIter end);
@ -91,7 +94,11 @@ private:
ConditionVariable _poolCondition;
void (AvatarMixerSlave::*_function)(const SharedNodePointer& node);
std::function<void(AvatarMixerSlave&)> _configure;
// Set from Domain Settings:
float _priorityReservedFraction { 0.4f };
int _numThreads { 0 };
int _numStarted { 0 }; // guarded by _mutex
int _numFinished { 0 }; // guarded by _mutex
int _numStopped { 0 }; // guarded by _mutex

View file

@ -19,8 +19,12 @@
class MixerAvatar : public AvatarData {
public:
bool getNeedsHeroCheck() const { return _needsHeroCheck; }
void setNeedsHeroCheck(bool needsHeroCheck = true)
{ _needsHeroCheck = needsHeroCheck; }
private:
bool _needsHeroCheck { false };
};
using MixerAvatarSharedPointer = std::shared_ptr<MixerAvatar>;

View file

@ -20,25 +20,29 @@
/**jsdoc
* The <code>Avatar</code> API is used to manipulate scriptable avatars on the domain. This API is a subset of the
* {@link MyAvatar} API.
* {@link MyAvatar} API. To enable this API, set {@link Agent|Agent.isAvatar} to <code>true</code>.
*
* <p>For Interface, client entity, and avatar scripts, see {@link MyAvatar}.</p>
*
* <p><strong>Note:</strong> In the examples, use "<code>Avatar</code>" instead of "<code>MyAvatar</code>".</p>
*
* @namespace Avatar
*
* @hifi-assignment-client
*
* @property {Vec3} position
* @property {number} scale
* @property {number} density <em>Read-only.</em>
* @property {Vec3} handPosition
* @property {number} bodyYaw - The rotation left or right about an axis running from the head to the feet of the avatar.
* @comment IMPORTANT: This group of properties is copied from AvatarData.h; they should NOT be edited here.
* @property {Vec3} position - The position of the avatar.
* @property {number} scale=1.0 - The scale of the avatar. The value can be set to anything between <code>0.005</code> and
* <code>1000.0</code>. When the scale value is fetched, it may temporarily be further limited by the domain's settings.
* @property {number} density - The density of the avatar in kg/m<sup>3</sup>. The density is used to work out its mass in
* the application of physics. <em>Read-only.</em>
* @property {Vec3} handPosition - A user-defined hand position, in world coordinates. The position moves with the avatar
* but is otherwise not used or changed by Interface.
* @property {number} bodyYaw - The left or right rotation about an axis running from the head to the feet of the avatar.
* Yaw is sometimes called "heading".
* @property {number} bodyPitch - The rotation about an axis running from shoulder to shoulder of the avatar. Pitch is
* sometimes called "elevation".
* @property {number} bodyRoll - The rotation about an axis running from the chest to the back of the avatar. Roll is
* sometimes called "bank".
* @property {Quat} orientation
* @property {Quat} orientation - The orientation of the avatar.
* @property {Quat} headOrientation - The orientation of the avatar's head.
* @property {number} headPitch - The rotation about an axis running from ear to ear of the avatar's head. Pitch is
* sometimes called "elevation".
@ -46,79 +50,37 @@
* head. Yaw is sometimes called "heading".
* @property {number} headRoll - The rotation about an axis running from the nose to the back of the avatar's head. Roll is
* sometimes called "bank".
* @property {Vec3} velocity
* @property {Vec3} angularVelocity
* @property {number} audioLoudness
* @property {number} audioAverageLoudness
* @property {string} displayName
* @property {string} sessionDisplayName - Sanitized, defaulted version displayName that is defined by the AvatarMixer
* rather than by Interface clients. The result is unique among all avatars present at the time.
* @property {boolean} lookAtSnappingEnabled
* @property {string} skeletonModelURL
* @property {AttachmentData[]} attachmentData
* @property {Vec3} velocity - The current velocity of the avatar.
* @property {Vec3} angularVelocity - The current angular velocity of the avatar.
* @property {number} audioLoudness - The instantaneous loudness of the audio input that the avatar is injecting into the
* domain.
* @property {number} audioAverageLoudness - The rolling average loudness of the audio input that the avatar is injecting
* into the domain.
* @property {string} displayName - The avatar's display name.
* @property {string} sessionDisplayName - <code>displayName's</code> sanitized and default version defined by the avatar mixer
* rather than Interface clients. The result is unique among all avatars present in the domain at the time.
* @property {boolean} lookAtSnappingEnabled=true - <code>true</code> if the avatar's eyes snap to look at another avatar's
* eyes when the other avatar is in the line of sight and also has <code>lookAtSnappingEnabled == true</code>.
* @property {string} skeletonModelURL - The avatar's FST file.
* @property {AttachmentData[]} attachmentData - Information on the avatar's attachments.<br />
* <strong>Deprecated:</strong> Use avatar entities instead.
* @property {string[]} jointNames - The list of joints in the current avatar model. <em>Read-only.</em>
* @property {Uuid} sessionUUID <em>Read-only.</em>
* @property {Mat4} sensorToWorldMatrix <em>Read-only.</em>
* @property {Mat4} controllerLeftHandMatrix <em>Read-only.</em>
* @property {Mat4} controllerRightHandMatrix <em>Read-only.</em>
* @property {number} sensorToWorldScale <em>Read-only.</em>
* @property {Uuid} sessionUUID - Unique ID of the avatar in the domain. <em>Read-only.</em>
* @property {Mat4} sensorToWorldMatrix - The scale, rotation, and translation transform from the user's real world to the
* avatar's size, orientation, and position in the virtual world. <em>Read-only.</em>
* @property {Mat4} controllerLeftHandMatrix - The rotation and translation of the left hand controller relative to the
* avatar. <em>Read-only.</em>
* @property {Mat4} controllerRightHandMatrix - The rotation and translation of the right hand controller relative to the
* avatar. <em>Read-only.</em>
* @property {number} sensorToWorldScale - The scale that transforms dimensions in the user's real world to the avatar's
* size in the virtual world. <em>Read-only.</em>
* @property {boolean} hasPriority - is the avatar in a Hero zone? <em>Read-only.</em>
*
* @borrows MyAvatar.getDomainMinScale as getDomainMinScale
* @borrows MyAvatar.getDomainMaxScale as getDomainMaxScale
* @borrows MyAvatar.canMeasureEyeHeight as canMeasureEyeHeight
* @borrows MyAvatar.getEyeHeight as getEyeHeight
* @borrows MyAvatar.getHeight as getHeight
* @borrows MyAvatar.setHandState as setHandState
* @borrows MyAvatar.getHandState as getHandState
* @borrows MyAvatar.setRawJointData as setRawJointData
* @borrows MyAvatar.setJointData as setJointData
* @borrows MyAvatar.setJointRotation as setJointRotation
* @borrows MyAvatar.setJointTranslation as setJointTranslation
* @borrows MyAvatar.clearJointData as clearJointData
* @borrows MyAvatar.isJointDataValid as isJointDataValid
* @borrows MyAvatar.getJointRotation as getJointRotation
* @borrows MyAvatar.getJointTranslation as getJointTranslation
* @borrows MyAvatar.getJointRotations as getJointRotations
* @borrows MyAvatar.getJointTranslations as getJointTranslations
* @borrows MyAvatar.setJointRotations as setJointRotations
* @borrows MyAvatar.setJointTranslations as setJointTranslations
* @borrows MyAvatar.clearJointsData as clearJointsData
* @borrows MyAvatar.getJointIndex as getJointIndex
* @borrows MyAvatar.getJointNames as getJointNames
* @borrows MyAvatar.setBlendshape as setBlendshape
* @borrows MyAvatar.getAttachmentsVariant as getAttachmentsVariant
* @borrows MyAvatar.setAttachmentsVariant as setAttachmentsVariant
* @borrows MyAvatar.updateAvatarEntity as updateAvatarEntity
* @borrows MyAvatar.clearAvatarEntity as clearAvatarEntity
* @borrows MyAvatar.setForceFaceTrackerConnected as setForceFaceTrackerConnected
* @borrows MyAvatar.getAttachmentData as getAttachmentData
* @borrows MyAvatar.setAttachmentData as setAttachmentData
* @borrows MyAvatar.attach as attach
* @borrows MyAvatar.detachOne as detachOne
* @borrows MyAvatar.detachAll as detachAll
* @borrows MyAvatar.getAvatarEntityData as getAvatarEntityData
* @borrows MyAvatar.setAvatarEntityData as setAvatarEntityData
* @borrows MyAvatar.getSensorToWorldMatrix as getSensorToWorldMatrix
* @borrows MyAvatar.getSensorToWorldScale as getSensorToWorldScale
* @borrows MyAvatar.getControllerLeftHandMatrix as getControllerLeftHandMatrix
* @borrows MyAvatar.getControllerRightHandMatrix as getControllerRightHandMatrix
* @borrows MyAvatar.getDataRate as getDataRate
* @borrows MyAvatar.getUpdateRate as getUpdateRate
* @borrows MyAvatar.displayNameChanged as displayNameChanged
* @borrows MyAvatar.sessionDisplayNameChanged as sessionDisplayNameChanged
* @borrows MyAvatar.skeletonModelURLChanged as skeletonModelURLChanged
* @borrows MyAvatar.lookAtSnappingChanged as lookAtSnappingChanged
* @borrows MyAvatar.sessionUUIDChanged as sessionUUIDChanged
* @borrows MyAvatar.sendAvatarDataPacket as sendAvatarDataPacket
* @borrows MyAvatar.sendIdentityPacket as sendIdentityPacket
* @borrows MyAvatar.setJointMappingsFromNetworkReply as setJointMappingsFromNetworkReply
* @borrows MyAvatar.setSessionUUID as setSessionUUID
* @borrows MyAvatar.getAbsoluteJointRotationInObjectFrame as getAbsoluteJointRotationInObjectFrame
* @borrows MyAvatar.getAbsoluteJointTranslationInObjectFrame as getAbsoluteJointTranslationInObjectFrame
* @borrows MyAvatar.setAbsoluteJointRotationInObjectFrame as setAbsoluteJointRotationInObjectFrame
* @borrows MyAvatar.setAbsoluteJointTranslationInObjectFrame as setAbsoluteJointTranslationInObjectFrame
* @borrows MyAvatar.getTargetScale as getTargetScale
* @borrows MyAvatar.resetLastSent as resetLastSent
* @example <caption>Create a scriptable avatar.</caption>
* (function () {
* Agent.setIsAvatar(true);
* print("Position: " + JSON.stringify(Avatar.position)); // 0, 0, 0
* }());
*/
class ScriptableAvatar : public AvatarData, public Dependency {
@ -132,15 +94,17 @@ public:
ScriptableAvatar();
/**jsdoc
* Starts playing an animation on the avatar.
* @function Avatar.startAnimation
* @param {string} url
* @param {number} [fps=30]
* @param {number} [priority=1]
* @param {boolean} [loop=false]
* @param {boolean} [hold=false]
* @param {number} [firstFrame=0]
* @param {number} [lastFrame=3.403e+38]
* @param {string[]} [maskedJoints=[]]
* @param {string} url - The animation file's URL. Animation files need to be in the FBX format but only need to contain
* the avatar skeleton and animation data.
* @param {number} [fps=30] - The frames per second (FPS) rate for the animation playback. 30 FPS is normal speed.
* @param {number} [priority=1] - <em>Not used.</em>
* @param {boolean} [loop=false] - <code>true</code> if the animation should loop, <code>false</code> if it shouldn't.
* @param {boolean} [hold=false] - <em>Not used.</em>
* @param {number} [firstFrame=0] - The frame at which the animation starts.
* @param {number} [lastFrame=3.403e+38] - The frame at which the animation stops.
* @param {string[]} [maskedJoints=[]] - The names of joints that should not be animated.
*/
/// Allows scripts to run animations.
Q_INVOKABLE void startAnimation(const QString& url, float fps = 30.0f, float priority = 1.0f, bool loop = false,
@ -148,39 +112,37 @@ public:
const QStringList& maskedJoints = QStringList());
/**jsdoc
* Stops playing the current animation.
* @function Avatar.stopAnimation
*/
Q_INVOKABLE void stopAnimation();
/**jsdoc
* Gets the details of the current avatar animation that is being or was recently played.
* @function Avatar.getAnimationDetails
* @returns {Avatar.AnimationDetails}
* @returns {Avatar.AnimationDetails} The current or recent avatar animation.
* @example <caption>Report the current animation details.</caption>
* var animationDetails = Avatar.getAnimationDetails();
* print("Animation details: " + JSON.stringify(animationDetails));
*/
Q_INVOKABLE AnimationDetails getAnimationDetails();
/**jsdoc
* Get the names of all the joints in the current avatar.
* @function MyAvatar.getJointNames
* @returns {string[]} The joint names.
* @example <caption>Report the names of all the joints in your current avatar.</caption>
* print(JSON.stringify(MyAvatar.getJointNames()));
*/
* @comment Uses the base class's JSDoc.
*/
Q_INVOKABLE virtual QStringList getJointNames() const override;
/**jsdoc
* Get the joint index for a named joint. The joint index value is the position of the joint in the array returned by
* {@link MyAvatar.getJointNames} or {@link Avatar.getJointNames}.
* @function MyAvatar.getJointIndex
* @param {string} name - The name of the joint.
* @returns {number} The index of the joint.
* @example <caption>Report the index of your avatar's left arm joint.</caption>
* print(JSON.stringify(MyAvatar.getJointIndex("LeftArm"));
*/
* @comment Uses the base class's JSDoc.
*/
/// Returns the index of the joint with the specified name, or -1 if not found/unknown.
Q_INVOKABLE virtual int getJointIndex(const QString& name) const override;
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL) override;
/**jsdoc
* @comment Uses the base class's JSDoc.
*/
int sendAvatarDataPacket(bool sendAll = false) override;
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking = false) override;
@ -192,32 +154,42 @@ public:
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
bool getHasAudioEnabledFaceMovement() const override { return _headData->getHasAudioEnabledFaceMovement(); }
/**jsdoc
* Potentially Very Expensive. Do not use.
/**jsdoc
* Gets details of all avatar entities.
* <p><strong>Warning:</strong> Potentially an expensive call. Do not use if possible.</p>
* @function Avatar.getAvatarEntityData
* @returns {object}
* @returns {AvatarEntityMap} Details of the avatar entities.
* @example <caption>Report the current avatar entities.</caption>
* var avatarEntityData = Avatar.getAvatarEntityData();
* print("Avatar entities: " + JSON.stringify(avatarEntityData));
*/
Q_INVOKABLE AvatarEntityMap getAvatarEntityData() const override;
/**jsdoc
* @function MyAvatar.setAvatarEntityData
* @param {object} avatarEntityData
*/
* Sets all avatar entities from an object.
* <p><strong>Warning:</strong> Potentially an expensive call. Do not use if possible.</p>
* @function Avatar.setAvatarEntityData
* @param {AvatarEntityMap} avatarEntityData - Details of the avatar entities.
*/
Q_INVOKABLE void setAvatarEntityData(const AvatarEntityMap& avatarEntityData) override;
/**jsdoc
* @function MyAvatar.updateAvatarEntity
* @param {Uuid} entityID
* @param {string} entityData
* @comment Uses the base class's JSDoc.
*/
Q_INVOKABLE void updateAvatarEntity(const QUuid& entityID, const QByteArray& entityData) override;
public slots:
/**jsdoc
* @function Avatar.update
* @param {number} deltaTime - Delta time.
* @deprecated This function is deprecated and will be removed.
*/
void update(float deltatime);
/**jsdoc
* @function MyAvatar.setJointMappingsFromNetworkReply
*/
* @function Avatar.setJointMappingsFromNetworkReply
* @deprecated This function is deprecated and will be removed.
*/
void setJointMappingsFromNetworkReply();
private:

View file

@ -1310,6 +1310,15 @@
"placeholder": "50",
"default": "50",
"advanced": true
},
{
"name": "priority_fraction",
"type": "double",
"label": "Hero Bandwidth",
"help": "Fraction of downstream bandwidth reserved for avatars in 'Hero' zones",
"placeholder": "0.40",
"default": "0.40",
"advanced": true
}
]
},

View file

@ -1766,14 +1766,14 @@ void DomainServer::processOctreeDataRequestMessage(QSharedPointer<ReceivedMessag
bool remoteHasExistingData { false };
QUuid id;
int version;
int dataVersion;
message->readPrimitive(&remoteHasExistingData);
if (remoteHasExistingData) {
constexpr size_t UUID_SIZE_BYTES = 16;
auto idData = message->read(UUID_SIZE_BYTES);
id = QUuid::fromRfc4122(idData);
message->readPrimitive(&version);
qCDebug(domain_server) << "Entity server does have existing data: ID(" << id << ") DataVersion(" << version << ")";
message->readPrimitive(&dataVersion);
qCDebug(domain_server) << "Entity server does have existing data: ID(" << id << ") DataVersion(" << dataVersion << ")";
} else {
qCDebug(domain_server) << "Entity server does not have existing data";
}
@ -1782,11 +1782,11 @@ void DomainServer::processOctreeDataRequestMessage(QSharedPointer<ReceivedMessag
auto reply = NLPacketList::create(PacketType::OctreeDataFileReply, QByteArray(), true, true);
OctreeUtils::RawEntityData data;
if (data.readOctreeDataInfoFromFile(entityFilePath)) {
if (data.id == id && data.version <= version) {
if (data.id == id && data.dataVersion <= dataVersion) {
qCDebug(domain_server) << "ES has sufficient octree data, not sending data";
reply->writePrimitive(false);
} else {
qCDebug(domain_server) << "Sending newer octree data to ES: ID(" << data.id << ") DataVersion(" << data.version << ")";
qCDebug(domain_server) << "Sending newer octree data to ES: ID(" << data.id << ") DataVersion(" << data.dataVersion << ")";
QFile file(entityFilePath);
if (file.open(QIODevice::ReadOnly)) {
reply->writePrimitive(true);

View file

@ -9,7 +9,7 @@
//
import QtQuick 2.7
import Qt.labs.folderlistmodel 2.1
import Qt.labs.folderlistmodel 2.2
import Qt.labs.settings 1.0
import QtQuick.Dialogs 1.2 as OriginalDialogs
import QtQuick.Controls 1.4 as QQC1
@ -320,6 +320,7 @@ ModalWindow {
FolderListModel {
id: folderListModel
nameFilters: selectionType.currentFilter
caseSensitive: false
showDirsFirst: true
showDotAndDotDot: false
showFiles: !root.selectDirectory

View file

@ -9,7 +9,7 @@
//
import QtQuick 2.7
import Qt.labs.folderlistmodel 2.1
import Qt.labs.folderlistmodel 2.2
import Qt.labs.settings 1.0
import QtQuick.Dialogs 1.2 as OriginalDialogs
import QtQuick.Controls 1.4 as QQC1
@ -285,6 +285,7 @@ TabletModalWindow {
FolderListModel {
id: folderListModel
nameFilters: selectionType.currentFilter
caseSensitive: false
showDirsFirst: true
showDotAndDotDot: false
showFiles: !root.selectDirectory

View file

@ -16,6 +16,8 @@ Rectangle {
property bool keyboardRaised: false
property bool punctuationMode: false
HifiConstants { id: hifi }
HifiControls.Keyboard {
id: keyboard
z: 1000
@ -48,6 +50,7 @@ Rectangle {
property var jointNames: []
property var currentAvatarSettings;
property bool wearablesFrozen;
function fetchAvatarModelName(marketId, avatar) {
var xmlhttp = new XMLHttpRequest();
@ -187,6 +190,8 @@ Rectangle {
updateCurrentAvatarInBookmarks(currentAvatar);
} else if (message.method === 'selectAvatarEntity') {
adjustWearables.selectWearableByID(message.entityID);
} else if (message.method === 'wearablesFrozenChanged') {
wearablesFrozen = message.wearablesFrozen;
}
}
@ -507,6 +512,7 @@ Rectangle {
}
SquareLabel {
id: adjustLabel
anchors.right: parent.right
anchors.verticalCenter: wearablesLabel.verticalCenter
glyphText: "\ue02e"
@ -515,6 +521,17 @@ Rectangle {
adjustWearables.open(currentAvatar);
}
}
SquareLabel {
anchors.right: adjustLabel.left
anchors.verticalCenter: wearablesLabel.verticalCenter
anchors.rightMargin: 15
glyphText: wearablesFrozen ? hifi.glyphs.lock : hifi.glyphs.unlock;
onClicked: {
emitSendToScript({'method' : 'toggleWearablesFrozen'});
}
}
}
Rectangle {

View file

@ -129,6 +129,7 @@ Item {
height: 40
// Anchors
anchors.top: avatarImage.top
anchors.topMargin: avatarImage.visible ? 18 : 0;
anchors.left: avatarImage.right
anchors.leftMargin: avatarImage.visible ? 5 : 0;
anchors.rightMargin: 5;

View file

@ -87,8 +87,19 @@ Rectangle {
}
function updateMyAvatarGainFromQML(sliderValue, isReleased) {
if (Users.getAvatarGain(myAvatarUuid) != sliderValue) {
Users.setAvatarGain(myAvatarUuid, sliderValue);
if (AudioScriptingInterface.getAvatarGain() != sliderValue) {
AudioScriptingInterface.setAvatarGain(sliderValue);
}
}
function updateInjectorGainFromQML(sliderValue, isReleased) {
if (AudioScriptingInterface.getInjectorGain() != sliderValue) {
AudioScriptingInterface.setInjectorGain(sliderValue); // server side
AudioScriptingInterface.setLocalInjectorGain(sliderValue); // client side
}
}
function updateSystemInjectorGainFromQML(sliderValue, isReleased) {
if (AudioScriptingInterface.getSystemInjectorGain() != sliderValue) {
AudioScriptingInterface.setSystemInjectorGain(sliderValue);
}
}
@ -382,6 +393,7 @@ Rectangle {
}
}
}
AudioControls.LoopbackAudio {
id: loopbackAudio
x: margins.paddings
@ -462,22 +474,22 @@ Rectangle {
}
Item {
id: gainContainer
id: avatarGainContainer
x: margins.paddings;
anchors.top: outputView.bottom;
anchors.topMargin: 10;
width: parent.width - margins.paddings*2
height: gainSliderTextMetrics.height
height: avatarGainSliderTextMetrics.height
HifiControlsUit.Slider {
id: gainSlider
id: avatarGainSlider
anchors.right: parent.right
height: parent.height
width: 200
minimumValue: -60.0
maximumValue: 20.0
stepSize: 5
value: Users.getAvatarGain(myAvatarUuid)
value: AudioScriptingInterface.getAvatarGain()
onValueChanged: {
updateMyAvatarGainFromQML(value, false);
}
@ -493,7 +505,7 @@ Rectangle {
// Do nothing.
}
onDoubleClicked: {
gainSlider.value = 0.0
avatarGainSlider.value = 0.0
}
onPressed: {
// Pass through to Slider
@ -507,13 +519,13 @@ Rectangle {
}
}
TextMetrics {
id: gainSliderTextMetrics
text: gainSliderText.text
font: gainSliderText.font
id: avatarGainSliderTextMetrics
text: avatarGainSliderText.text
font: avatarGainSliderText.font
}
RalewayRegular {
// The slider for my card is special, it controls the master gain
id: gainSliderText;
id: avatarGainSliderText;
text: "Avatar volume";
size: 16;
anchors.left: parent.left;
@ -523,15 +535,133 @@ Rectangle {
}
}
Item {
id: injectorGainContainer
x: margins.paddings;
width: parent.width - margins.paddings*2
height: injectorGainSliderTextMetrics.height
anchors.top: avatarGainContainer.bottom;
anchors.topMargin: 10;
HifiControlsUit.Slider {
id: injectorGainSlider
anchors.right: parent.right
height: parent.height
width: 200
minimumValue: -60.0
maximumValue: 20.0
stepSize: 5
value: AudioScriptingInterface.getInjectorGain()
onValueChanged: {
updateInjectorGainFromQML(value, false);
}
onPressedChanged: {
if (!pressed) {
updateInjectorGainFromQML(value, false);
}
}
MouseArea {
anchors.fill: parent
onWheel: {
// Do nothing.
}
onDoubleClicked: {
injectorGainSlider.value = 0.0
}
onPressed: {
// Pass through to Slider
mouse.accepted = false
}
onReleased: {
// the above mouse.accepted seems to make this
// never get called, nonetheless...
mouse.accepted = false
}
}
}
TextMetrics {
id: injectorGainSliderTextMetrics
text: injectorGainSliderText.text
font: injectorGainSliderText.font
}
RalewayRegular {
id: injectorGainSliderText;
text: "Environment volume";
size: 16;
anchors.left: parent.left;
color: hifi.colors.white;
horizontalAlignment: Text.AlignLeft;
verticalAlignment: Text.AlignTop;
}
}
Item {
id: systemInjectorGainContainer
x: margins.paddings;
width: parent.width - margins.paddings*2
height: systemInjectorGainSliderTextMetrics.height
anchors.top: injectorGainContainer.bottom;
anchors.topMargin: 10;
HifiControlsUit.Slider {
id: systemInjectorGainSlider
anchors.right: parent.right
height: parent.height
width: 200
minimumValue: -60.0
maximumValue: 20.0
stepSize: 5
value: AudioScriptingInterface.getSystemInjectorGain()
onValueChanged: {
updateSystemInjectorGainFromQML(value, false);
}
onPressedChanged: {
if (!pressed) {
updateSystemInjectorGainFromQML(value, false);
}
}
MouseArea {
anchors.fill: parent
onWheel: {
// Do nothing.
}
onDoubleClicked: {
systemInjectorGainSlider.value = 0.0
}
onPressed: {
// Pass through to Slider
mouse.accepted = false
}
onReleased: {
// the above mouse.accepted seems to make this
// never get called, nonetheless...
mouse.accepted = false
}
}
}
TextMetrics {
id: systemInjectorGainSliderTextMetrics
text: systemInjectorGainSliderText.text
font: systemInjectorGainSliderText.font
}
RalewayRegular {
id: systemInjectorGainSliderText;
text: "System Sound volume";
size: 16;
anchors.left: parent.left;
color: hifi.colors.white;
horizontalAlignment: Text.AlignLeft;
verticalAlignment: Text.AlignTop;
}
}
AudioControls.PlaySampleSound {
id: playSampleSound
x: margins.paddings
anchors.top: gainContainer.bottom;
anchors.top: systemInjectorGainContainer.bottom;
anchors.topMargin: 10;
visible: (bar.currentIndex === 1 && isVR) ||
(bar.currentIndex === 0 && !isVR);
anchors { left: parent.left; leftMargin: margins.paddings }
}
}
}

View file

@ -17,17 +17,17 @@ import stylesUit 1.0
import controlsUit 1.0 as HifiControlsUit
RowLayout {
property bool audioLoopedBack: AudioScriptingInterface.getServerEcho();
property bool audioLoopedBack: AudioScriptingInterface.getLocalEcho();
function startAudioLoopback() {
if (!audioLoopedBack) {
audioLoopedBack = true;
AudioScriptingInterface.setServerEcho(true);
AudioScriptingInterface.setLocalEcho(true);
}
}
function stopAudioLoopback() {
if (audioLoopedBack) {
audioLoopedBack = false;
AudioScriptingInterface.setServerEcho(false);
AudioScriptingInterface.setLocalEcho(false);
}
}
@ -44,8 +44,11 @@ RowLayout {
}
HifiControlsUit.Button {
text: audioLoopedBack ? qsTr("STOP TESTING YOUR VOICE") : qsTr("TEST YOUR VOICE");
text: audioLoopedBack ? qsTr("STOP TESTING VOICE") : qsTr("TEST YOUR VOICE");
color: audioLoopedBack ? hifi.buttons.red : hifi.buttons.blue;
fontSize: 15;
width: 200;
height: 32;
onClicked: {
if (audioLoopedBack) {
loopbackTimer.stop();
@ -57,11 +60,11 @@ RowLayout {
}
}
RalewayRegular {
Layout.leftMargin: 2;
size: 14;
color: "white";
font.italic: true
text: audioLoopedBack ? qsTr("Speak in your input") : "";
}
// RalewayRegular {
// Layout.leftMargin: 2;
// size: 14;
// color: "white";
// font.italic: true
// text: audioLoopedBack ? qsTr("Speak in your input") : "";
// }
}

View file

@ -18,12 +18,29 @@ import TabletScriptingInterface 1.0
Rectangle {
HifiConstants { id: hifi; }
property var muted: AudioScriptingInterface.muted;
readonly property var level: AudioScriptingInterface.inputLevel;
property var pushToTalk: AudioScriptingInterface.pushToTalk;
property var pushingToTalk: AudioScriptingInterface.pushingToTalk;
property bool gated: false;
Component.onCompleted: {
AudioScriptingInterface.noiseGateOpened.connect(function() { gated = false; });
AudioScriptingInterface.noiseGateClosed.connect(function() { gated = true; });
HMD.displayModeChanged.connect(function() {
muted = AudioScriptingInterface.muted;
pushToTalk = AudioScriptingInterface.pushToTalk;
});
AudioScriptingInterface.mutedChanged.connect(function() {
muted = AudioScriptingInterface.muted;
});
AudioScriptingInterface.pushToTalkChanged.connect(function() {
pushToTalk = AudioScriptingInterface.pushToTalk;
});
AudioScriptingInterface.pushingToTalkChanged.connect(function() {
pushingToTalk = AudioScriptingInterface.pushingToTalk;
});
}
property bool standalone: false;
@ -67,10 +84,10 @@ Rectangle {
hoverEnabled: true;
scrollGestureEnabled: false;
onClicked: {
if (AudioScriptingInterface.pushToTalk) {
if (pushToTalk) {
return;
}
AudioScriptingInterface.muted = !AudioScriptingInterface.muted;
muted = !muted;
Tablet.playSound(TabletEnums.ButtonClick);
}
drag.target: dragTarget;
@ -84,16 +101,16 @@ Rectangle {
QtObject {
id: colors;
readonly property string unmuted: "#FFF";
readonly property string muted: "#E2334D";
readonly property string unmutedColor: "#FFF";
readonly property string mutedColor: "#E2334D";
readonly property string gutter: "#575757";
readonly property string greenStart: "#39A38F";
readonly property string greenEnd: "#1FC6A6";
readonly property string yellow: "#C0C000";
readonly property string red: colors.muted;
readonly property string red: colors.mutedColor;
readonly property string fill: "#55000000";
readonly property string border: standalone ? "#80FFFFFF" : "#55FFFFFF";
readonly property string icon: AudioScriptingInterface.muted ? muted : unmuted;
readonly property string icon: muted ? colors.mutedColor : unmutedColor;
}
Item {
@ -115,7 +132,7 @@ Rectangle {
readonly property string pushToTalkIcon: "../../../icons/tablet-icons/mic-ptt-i.svg";
id: image;
source: (AudioScriptingInterface.pushToTalk && !AudioScriptingInterface.pushingToTalk) ? pushToTalkIcon : AudioScriptingInterface.muted ? mutedIcon : unmutedIcon;
source: (pushToTalk && !pushingToTalk) ? pushToTalkIcon : muted ? mutedIcon : unmutedIcon;
width: 30;
height: 30;
@ -138,9 +155,7 @@ Rectangle {
Item {
id: status;
readonly property string color: AudioScriptingInterface.muted ? colors.muted : colors.unmuted;
visible: (AudioScriptingInterface.pushToTalk && !AudioScriptingInterface.pushingToTalk) || AudioScriptingInterface.muted;
visible: (pushToTalk && !pushingToTalk) || muted;
anchors {
left: parent.left;
@ -157,9 +172,9 @@ Rectangle {
verticalCenter: parent.verticalCenter;
}
color: parent.color;
color: colors.icon;
text: (AudioScriptingInterface.pushToTalk && !AudioScriptingInterface.pushingToTalk) ? (HMD.active ? "MUTED PTT" : "MUTED PTT-(T)") : (AudioScriptingInterface.muted ? "MUTED" : "MUTE");
text: (pushToTalk && !pushingToTalk) ? (HMD.active ? "MUTED PTT" : "MUTED PTT-(T)") : (muted ? "MUTED" : "MUTE");
font.pointSize: 12;
}
@ -169,9 +184,9 @@ Rectangle {
verticalCenter: parent.verticalCenter;
}
width: AudioScriptingInterface.pushToTalk && !AudioScriptingInterface.pushingToTalk ? (HMD.active ? 27 : 25) : 50;
width: pushToTalk && !pushingToTalk ? (HMD.active ? 27 : 25) : 50;
height: 4;
color: parent.color;
color: colors.icon;
}
Rectangle {
@ -180,9 +195,9 @@ Rectangle {
verticalCenter: parent.verticalCenter;
}
width: AudioScriptingInterface.pushToTalk && !AudioScriptingInterface.pushingToTalk ? (HMD.active ? 27 : 25) : 50;
width: pushToTalk && !pushingToTalk ? (HMD.active ? 27 : 25) : 50;
height: 4;
color: parent.color;
color: colors.icon;
}
}

View file

@ -56,16 +56,19 @@ RowLayout {
HifiConstants { id: hifi; }
HifiControlsUit.Button {
text: isPlaying ? qsTr("STOP TESTING YOUR SOUND") : qsTr("TEST YOUR SOUND");
text: isPlaying ? qsTr("STOP TESTING") : qsTr("TEST YOUR SOUND");
color: isPlaying ? hifi.buttons.red : hifi.buttons.blue;
onClicked: isPlaying ? stopSound() : playSound();
fontSize: 15;
width: 200;
height: 32;
}
RalewayRegular {
Layout.leftMargin: 2;
size: 14;
color: "white";
font.italic: true
text: isPlaying ? qsTr("Listen to your output") : "";
}
// RalewayRegular {
// Layout.leftMargin: 2;
// size: 14;
// color: "white";
// font.italic: true
// text: isPlaying ? qsTr("Listen to your output") : "";
// }
}

View file

@ -133,7 +133,7 @@ Item {
states: [
State {
name: AvatarPackagerState.main
PropertyChanges { target: avatarPackagerHeader; title: qsTr("Avatar Packager"); docsEnabled: true; backButtonVisible: false }
PropertyChanges { target: avatarPackagerHeader; title: qsTr("Avatar Packager"); docsEnabled: true; videoEnabled: true; backButtonVisible: false }
PropertyChanges { target: avatarPackagerMain; visible: true }
PropertyChanges { target: avatarPackagerFooter; content: avatarPackagerMain.footer }
},
@ -229,7 +229,11 @@ Item {
}
function openDocs() {
Qt.openUrlExternally("https://docs.highfidelity.com/create/avatars/create-avatars#how-to-package-your-avatar");
Qt.openUrlExternally("https://docs.highfidelity.com/create/avatars/package-avatar.html");
}
function openVideo() {
Qt.openUrlExternally("https://youtu.be/zrkEowu_yps");
}
AvatarPackagerHeader {
@ -243,6 +247,9 @@ Item {
onDocsButtonClicked: {
avatarPackager.openDocs();
}
onVideoButtonClicked: {
avatarPackager.openVideo();
}
}
Item {

View file

@ -13,6 +13,7 @@ ShadowRectangle {
property string title: qsTr("Avatar Packager")
property alias docsEnabled: docs.visible
property alias videoEnabled: video.visible
property bool backButtonVisible: true // If false, is not visible and does not take up space
property bool backButtonEnabled: true // If false, is not visible but does not affect space
property bool canRename: false
@ -24,6 +25,7 @@ ShadowRectangle {
signal backButtonClicked
signal docsButtonClicked
signal videoButtonClicked
RalewayButton {
id: back
@ -126,6 +128,20 @@ ShadowRectangle {
}
}
RalewayButton {
id: video
visible: false
size: 28
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.right: docs.left
anchors.rightMargin: 16
text: qsTr("Video")
onClicked: videoButtonClicked()
}
RalewayButton {
id: docs
visible: false
@ -137,8 +153,6 @@ ShadowRectangle {
text: qsTr("Docs")
onClicked: {
docsButtonClicked();
}
onClicked: docsButtonClicked()
}
}

View file

@ -339,8 +339,8 @@ Item {
visible: AvatarPackagerCore.currentAvatarProject && AvatarPackagerCore.currentAvatarProject.hasErrors
anchors {
top: notForSaleMessage.bottom
topMargin: 16
top: notForSaleMessage.visible ? notForSaleMessage.bottom : infoMessage .bottom
bottom: showFilesText.top
horizontalCenter: parent.horizontalCenter
}

View file

@ -113,6 +113,7 @@ Rectangle {
} else if (prop === 'dimensions') {
scalespinner.set(wearable[prop].x / wearable.naturalDimensions.x);
}
modified = true;
}
}

View file

@ -2248,6 +2248,7 @@ Item {
if (sendAssetStep.selectedRecipientUserName === "") {
console.log("SendAsset: Script didn't specify a recipient username!");
sendAssetHome.visible = false;
root.nextActiveView = 'paymentFailure';
return;
}

View file

@ -664,7 +664,7 @@ Rectangle {
text: "LOG IN"
onClicked: {
sendToScript({method: 'needsLogIn_loginClicked'});
sendToScript({method: 'marketplace_loginClicked'});
}
}

View file

@ -9,7 +9,7 @@
//
import QtQuick 2.7
import Qt.labs.folderlistmodel 2.1
import Qt.labs.folderlistmodel 2.2
import Qt.labs.settings 1.0
import QtQuick.Dialogs 1.2 as OriginalDialogs
import QtQuick.Controls 1.4 as QQC1
@ -279,6 +279,7 @@ Rectangle {
FolderListModel {
id: folderListModel
nameFilters: selectionType.currentFilter
caseSensitive: false
showDirsFirst: true
showDotAndDotDot: false
showFiles: !root.selectDirectory

View file

@ -344,6 +344,7 @@ Item {
readonly property string stop_square: "\ue01e"
readonly property string avatarTPose: "\ue01f"
readonly property string lock: "\ue006"
readonly property string unlock: "\ue039"
readonly property string checkmark: "\ue020"
readonly property string leftRightArrows: "\ue021"
readonly property string hfc: "\ue022"

View file

@ -330,6 +330,7 @@ QtObject {
readonly property string stop_square: "\ue01e"
readonly property string avatarTPose: "\ue01f"
readonly property string lock: "\ue006"
readonly property string unlock: "\ue039"
readonly property string checkmark: "\ue020"
readonly property string leftRightArrows: "\ue021"
readonly property string hfc: "\ue022"

View file

@ -1211,10 +1211,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
connect(&domainHandler, SIGNAL(connectedToDomain(QUrl)), SLOT(updateWindowTitle()));
connect(&domainHandler, SIGNAL(disconnectedFromDomain()), SLOT(updateWindowTitle()));
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, this, [this]() {
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
if (tabletScriptingInterface) {
tabletScriptingInterface->setQmlTabletRoot(SYSTEM_TABLET, nullptr);
}
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
entityScriptingInterface->deleteEntity(getTabletScreenID());
entityScriptingInterface->deleteEntity(getTabletHomeButtonID());
@ -2896,11 +2892,19 @@ void Application::initializeGL() {
}
#if !defined(DISABLE_QML)
QStringList chromiumFlags;
// Bug 21993: disable microphone and camera input
chromiumFlags << "--use-fake-device-for-media-stream";
// Disable signed distance field font rendering on ATI/AMD GPUs, due to
// https://highfidelity.manuscript.com/f/cases/13677/Text-showing-up-white-on-Marketplace-app
std::string vendor{ (const char*)glGetString(GL_VENDOR) };
if ((vendor.find("AMD") != std::string::npos) || (vendor.find("ATI") != std::string::npos)) {
qputenv("QTWEBENGINE_CHROMIUM_FLAGS", QByteArray("--disable-distance-field-text"));
chromiumFlags << "--disable-distance-field-text";
}
// Ensure all Qt webengine processes launched from us have the appropriate command line flags
if (!chromiumFlags.empty()) {
qputenv("QTWEBENGINE_CHROMIUM_FLAGS", chromiumFlags.join(' ').toLocal8Bit());
}
#endif

View file

@ -55,7 +55,7 @@ static QStringList HAND_MAPPING_SUFFIXES = {
"HandThumb1",
};
const QUrl DEFAULT_DOCS_URL = QUrl("https://docs.highfidelity.com/create/avatars/create-avatars.html#create-your-own-avatar");
const QUrl PACKAGE_AVATAR_DOCS_BASE_URL = QUrl("https://docs.highfidelity.com/create/avatars/package-avatar.html");
AvatarDoctor::AvatarDoctor(const QUrl& avatarFSTFileUrl) :
_avatarFSTFileUrl(avatarFSTFileUrl) {
@ -85,53 +85,53 @@ void AvatarDoctor::startDiagnosing() {
const auto resourceLoaded = [this, resource](bool success) {
// MODEL
if (!success) {
_errors.push_back({ "Model file cannot be opened.", DEFAULT_DOCS_URL });
addError("Model file cannot be opened.", "missing-file");
emit complete(getErrors());
return;
}
_model = resource;
const auto model = resource.data();
const auto avatarModel = resource.data()->getHFMModel();
if (!avatarModel.originalURL.endsWith(".fbx")) {
_errors.push_back({ "Unsupported avatar model format.", DEFAULT_DOCS_URL });
if (!avatarModel.originalURL.toLower().endsWith(".fbx")) {
addError("Unsupported avatar model format.", "unsupported-format");
emit complete(getErrors());
return;
}
// RIG
if (avatarModel.joints.isEmpty()) {
_errors.push_back({ "Avatar has no rig.", DEFAULT_DOCS_URL });
addError("Avatar has no rig.", "no-rig");
} else {
auto jointNames = avatarModel.getJointNames();
if (avatarModel.joints.length() > NETWORKED_JOINTS_LIMIT) {
_errors.push_back({tr( "Avatar has over %n bones.", "", NETWORKED_JOINTS_LIMIT), DEFAULT_DOCS_URL });
addError(tr( "Avatar has over %n bones.", "", NETWORKED_JOINTS_LIMIT), "maximum-bone-limit");
}
// Avatar does not have Hips bone mapped
if (!jointNames.contains("Hips")) {
_errors.push_back({ "Hips are not mapped.", DEFAULT_DOCS_URL });
addError("Hips are not mapped.", "hips-not-mapped");
}
if (!jointNames.contains("Spine")) {
_errors.push_back({ "Spine is not mapped.", DEFAULT_DOCS_URL });
addError("Spine is not mapped.", "spine-not-mapped");
}
if (!jointNames.contains("Spine1")) {
_errors.push_back({ "Chest (Spine1) is not mapped.", DEFAULT_DOCS_URL });
addError("Chest (Spine1) is not mapped.", "chest-not-mapped");
}
if (!jointNames.contains("Neck")) {
_errors.push_back({ "Neck is not mapped.", DEFAULT_DOCS_URL });
addError("Neck is not mapped.", "neck-not-mapped");
}
if (!jointNames.contains("Head")) {
_errors.push_back({ "Head is not mapped.", DEFAULT_DOCS_URL });
addError("Head is not mapped.", "head-not-mapped");
}
if (!jointNames.contains("LeftEye")) {
if (jointNames.contains("RightEye")) {
_errors.push_back({ "LeftEye is not mapped.", DEFAULT_DOCS_URL });
addError("LeftEye is not mapped.", "eye-not-mapped");
} else {
_errors.push_back({ "Eyes are not mapped.", DEFAULT_DOCS_URL });
addError("Eyes are not mapped.", "eye-not-mapped");
}
} else if (!jointNames.contains("RightEye")) {
_errors.push_back({ "RightEye is not mapped.", DEFAULT_DOCS_URL });
addError("RightEye is not mapped.", "eye-not-mapped");
}
const auto checkJointAsymmetry = [jointNames] (const QStringList& jointMappingSuffixes) {
@ -159,13 +159,13 @@ void AvatarDoctor::startDiagnosing() {
};
if (checkJointAsymmetry(ARM_MAPPING_SUFFIXES)) {
_errors.push_back({ "Asymmetrical arm bones.", DEFAULT_DOCS_URL });
addError("Asymmetrical arm bones.", "asymmetrical-bones");
}
if (checkJointAsymmetry(HAND_MAPPING_SUFFIXES)) {
_errors.push_back({ "Asymmetrical hand bones.", DEFAULT_DOCS_URL });
addError("Asymmetrical hand bones.", "asymmetrical-bones");
}
if (checkJointAsymmetry(LEG_MAPPING_SUFFIXES)) {
_errors.push_back({ "Asymmetrical leg bones.", DEFAULT_DOCS_URL });
addError("Asymmetrical leg bones.", "asymmetrical-bones");
}
// Multiple skeleton root joints checkup
@ -177,7 +177,7 @@ void AvatarDoctor::startDiagnosing() {
}
if (skeletonRootJoints > 1) {
_errors.push_back({ "Multiple top-level joints found.", DEFAULT_DOCS_URL });
addError("Multiple top-level joints found.", "multiple-top-level-joints");
}
Rig rig;
@ -191,9 +191,9 @@ void AvatarDoctor::startDiagnosing() {
const float RECOMMENDED_MAX_HEIGHT = DEFAULT_AVATAR_HEIGHT * 1.5f;
if (avatarHeight < RECOMMENDED_MIN_HEIGHT) {
_errors.push_back({ "Avatar is possibly too short.", DEFAULT_DOCS_URL });
addError("Avatar is possibly too short.", "short-avatar");
} else if (avatarHeight > RECOMMENDED_MAX_HEIGHT) {
_errors.push_back({ "Avatar is possibly too tall.", DEFAULT_DOCS_URL });
addError("Avatar is possibly too tall.", "tall-avatar");
}
// HipsNotOnGround
@ -204,7 +204,7 @@ void AvatarDoctor::startDiagnosing() {
const auto hipJoint = avatarModel.joints.at(avatarModel.getJointIndex("Hips"));
if (hipsPosition.y < HIPS_GROUND_MIN_Y) {
_errors.push_back({ "Hips are on ground.", DEFAULT_DOCS_URL });
addError("Hips are on ground.", "hips-on-ground");
}
}
}
@ -223,7 +223,7 @@ void AvatarDoctor::startDiagnosing() {
const auto hipsToSpine = glm::length(hipsPosition - spinePosition);
const auto spineToChest = glm::length(spinePosition - chestPosition);
if (hipsToSpine < HIPS_SPINE_CHEST_MIN_SEPARATION && spineToChest < HIPS_SPINE_CHEST_MIN_SEPARATION) {
_errors.push_back({ "Hips/Spine/Chest overlap.", DEFAULT_DOCS_URL });
addError("Hips/Spine/Chest overlap.", "overlap-error");
}
}
}
@ -240,21 +240,21 @@ void AvatarDoctor::startDiagnosing() {
const auto& uniqueJointValues = jointValues.toSet();
for (const auto& jointName: uniqueJointValues) {
if (jointValues.count(jointName) > 1) {
_errors.push_back({ tr("%1 is mapped multiple times.").arg(jointName), DEFAULT_DOCS_URL });
addError(tr("%1 is mapped multiple times.").arg(jointName), "mapped-multiple-times");
}
}
}
if (!isDescendantOfJointWhenJointsExist("Spine", "Hips")) {
_errors.push_back({ "Spine is not a child of Hips.", DEFAULT_DOCS_URL });
addError("Spine is not a child of Hips.", "spine-not-child");
}
if (!isDescendantOfJointWhenJointsExist("Spine1", "Spine")) {
_errors.push_back({ "Spine1 is not a child of Spine.", DEFAULT_DOCS_URL });
addError("Spine1 is not a child of Spine.", "spine1-not-child");
}
if (!isDescendantOfJointWhenJointsExist("Head", "Spine1")) {
_errors.push_back({ "Head is not a child of Spine1.", DEFAULT_DOCS_URL });
addError("Head is not a child of Spine1.", "head-not-child");
}
}
@ -300,7 +300,7 @@ void AvatarDoctor::startDiagnosing() {
connect(resource.data(), &GeometryResource::finished, this, resourceLoaded);
}
} else {
_errors.push_back({ "Model file cannot be opened", DEFAULT_DOCS_URL });
addError("Model file cannot be opened", "missing-file");
emit complete(getErrors());
}
}
@ -345,7 +345,7 @@ void AvatarDoctor::diagnoseTextures() {
QUrl(avatarModel.originalURL)).resolved(QUrl("textures"));
if (texturesFound == 0) {
_errors.push_back({ tr("No textures assigned."), DEFAULT_DOCS_URL });
addError(tr("No textures assigned."), "no-textures-assigned");
}
if (!externalTextures.empty()) {
@ -356,11 +356,10 @@ void AvatarDoctor::diagnoseTextures() {
auto checkTextureLoadingComplete = [this]() mutable {
if (_checkedTextureCount == _externalTextureCount) {
if (_missingTextureCount > 0) {
_errors.push_back({ tr("Missing %n texture(s).","", _missingTextureCount), DEFAULT_DOCS_URL });
addError(tr("Missing %n texture(s).","", _missingTextureCount), "missing-textures");
}
if (_unsupportedTextureCount > 0) {
_errors.push_back({ tr("%n unsupported texture(s) found.", "", _unsupportedTextureCount),
DEFAULT_DOCS_URL });
addError(tr("%n unsupported texture(s) found.", "", _unsupportedTextureCount), "unsupported-textures");
}
emit complete(getErrors());
@ -411,6 +410,12 @@ void AvatarDoctor::diagnoseTextures() {
}
}
void AvatarDoctor::addError(const QString& errorMessage, const QString& docFragment) {
QUrl documentationURL = PACKAGE_AVATAR_DOCS_BASE_URL;
documentationURL.setFragment(docFragment);
_errors.push_back({ errorMessage, documentationURL });
}
QVariantList AvatarDoctor::getErrors() const {
QVariantList result;
for (const auto& error : _errors) {

View file

@ -40,6 +40,8 @@ signals:
private:
void diagnoseTextures();
void addError(const QString& errorMessage, const QString& docFragment);
QUrl _avatarFSTFileUrl;
QVector<AvatarDiagnosticResult> _errors;

View file

@ -342,8 +342,11 @@ bool MyAvatar::getStrafeEnabled() const {
void MyAvatar::setDominantHand(const QString& hand) {
if (hand == DOMINANT_LEFT_HAND || hand == DOMINANT_RIGHT_HAND) {
_dominantHand.set(hand);
emit dominantHandChanged(hand);
bool changed = (hand != _dominantHand.get());
if (changed) {
_dominantHand.set(hand);
emit dominantHandChanged(hand);
}
}
}
@ -2423,7 +2426,19 @@ void MyAvatar::clearWornAvatarEntities() {
}
}
/**jsdoc
* Information about an avatar entity.
* <table>
* <thead>
* <tr><th>Property</th><th>Type</th><th>Description</th></tr>
* </thead>
* <tbody>
* <tr><td><code>id</code></td><td>Uuid</td><td>Entity ID.</td></tr>
* <tr><td><code>properties</code></td><td>{@link Entities.EntityProperties}</td><td>Entity properties.</td></tr>
* </tbody>
* </table>
* @typedef {object} MyAvatar.AvatarEntityData
*/
QVariantList MyAvatar::getAvatarEntitiesVariant() {
// NOTE: this method is NOT efficient
QVariantList avatarEntitiesData;
@ -3606,11 +3621,6 @@ void MyAvatar::setSessionUUID(const QUuid& sessionUUID) {
QUuid oldSessionID = getSessionUUID();
Avatar::setSessionUUID(sessionUUID);
QUuid newSessionID = getSessionUUID();
if (DependencyManager::get<NodeList>()->getSessionUUID().isNull()) {
// we don't actually have a connection to a domain right now
// so there is no need to queue AvatarEntity messages --> bail early
return;
}
if (newSessionID != oldSessionID) {
auto treeRenderer = DependencyManager::get<EntityTreeRenderer>();
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
@ -3619,6 +3629,7 @@ void MyAvatar::setSessionUUID(const QUuid& sessionUUID) {
_avatarEntitiesLock.withReadLock([&] {
avatarEntityIDs = _packedAvatarEntityData.keys();
});
bool sendPackets = !DependencyManager::get<NodeList>()->getSessionUUID().isNull();
EntityEditPacketSender* packetSender = qApp->getEntityEditPacketSender();
entityTree->withWriteLock([&] {
for (const auto& entityID : avatarEntityIDs) {
@ -3626,12 +3637,14 @@ void MyAvatar::setSessionUUID(const QUuid& sessionUUID) {
if (!entity) {
continue;
}
// update OwningAvatarID so entity can be identified as "ours" later
entity->setOwningAvatarID(newSessionID);
// NOTE: each attached AvatarEntity should already have the correct updated parentID
// via magic in SpatiallyNestable, but when an AvatarEntity IS parented to MyAvatar
// we need to update the "packedAvatarEntityData" we send to the avatar-mixer
// so that others will get the updated state.
if (entity->getParentID() == newSessionID) {
// NOTE: each attached AvatarEntity already have the correct updated parentID
// via magic in SpatiallyNestable, hence we check against newSessionID
if (sendPackets && entity->getParentID() == newSessionID) {
// but when we have a real session and the AvatarEntity is parented to MyAvatar
// we need to update the "packedAvatarEntityData" sent to the avatar-mixer
// because it contains a stale parentID somewhere deep inside
packetSender->queueEditAvatarEntityMessage(entityTree, entityID);
}
}
@ -3717,6 +3730,12 @@ void MyAvatar::clearScaleRestriction() {
_haveReceivedHeightLimitsFromDomain = false;
}
/**jsdoc
* A teleport target.
* @typedef {object} MyAvatar.GoToProperties
* @property {Vec3} position - The avatar's new position.
* @property {Quat} [orientation] - The avatar's new orientation.
*/
void MyAvatar::goToLocation(const QVariant& propertiesVar) {
qCDebug(interfaceapp, "MyAvatar QML goToLocation");
auto properties = propertiesVar.toMap();
@ -4203,6 +4222,13 @@ void MyAvatar::setCollisionWithOtherAvatarsFlags() {
_characterController.setPendingFlagsUpdateCollisionMask();
}
/**jsdoc
* A collision capsule is a cylinder with hemispherical ends. It is often used to approximate the extents of an avatar.
* @typedef {object} MyAvatar.CollisionCapsule
* @property {Vec3} start - The bottom end of the cylinder, excluding the bottom hemisphere.
* @property {Vec3} end - The top end of the cylinder, excluding the top hemisphere.
* @property {number} radius - The radius of the cylinder and the hemispheres.
*/
void MyAvatar::updateCollisionCapsuleCache() {
glm::vec3 start, end;
float radius;
@ -5760,6 +5786,24 @@ void MyAvatar::addAvatarHandsToFlow(const std::shared_ptr<Avatar>& otherAvatar)
}
}
/**jsdoc
* Physics options to use in the flow simulation of a joint.
* @typedef {object} MyAvatar.FlowPhysicsOptions
* @property {boolean} [active=true] - <code>true</code> to enable flow on the joint, otherwise <code>false</code>.
* @property {number} [radius=0.01] - The thickness of segments and knots (needed for collisions).
* @property {number} [gravity=-0.0096] - Y-value of the gravity vector.
* @property {number} [inertia=0.8] - Rotational inertia multiplier.
* @property {number} [damping=0.85] - The amount of damping on joint oscillation.
* @property {number} [stiffness=0.0] - The stiffness of each thread.
* @property {number} [delta=0.55] - Delta time for every integration step.
*/
/**jsdoc
* Collision options to use in the flow simulation of a joint.
* @typedef {object} MyAvatar.FlowCollisionsOptions
* @property {string} [type="sphere"] - Currently, only <code>"sphere"</code> is supported.
* @property {number} [radius=0.05] - Collision sphere radius.
* @property {number} [offset=Vec3.ZERO] - Offset of the collision sphere from the joint.
*/
void MyAvatar::useFlow(bool isActive, bool isCollidable, const QVariantMap& physicsConfig, const QVariantMap& collisionsConfig) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "useFlow",
@ -5809,7 +5853,7 @@ void MyAvatar::useFlow(bool isActive, bool isCollidable, const QVariantMap& phys
}
auto collisionJoints = collisionsConfig.keys();
if (collisionJoints.size() > 0) {
collisionSystem.resetCollisions();
collisionSystem.clearSelfCollisions();
for (auto &jointName : collisionJoints) {
int jointIndex = getJointIndex(jointName);
FlowCollisionSettings collisionsSettings;
@ -5824,9 +5868,43 @@ void MyAvatar::useFlow(bool isActive, bool isCollidable, const QVariantMap& phys
collisionSystem.addCollisionSphere(jointIndex, collisionsSettings);
}
}
flow.updateScale();
}
}
/**jsdoc
* Flow options currently used in flow simulation.
* @typedef {object} MyAvatar.FlowData
* @property {boolean} initialized - <code>true</code> if flow has been initialized for the current avatar, <code>false</code>
* if it hasn't.
* @property {boolean} active - <code>true</code> if flow is enabled, <code>false</code> if it isn't.
* @property {boolean} colliding - <code>true</code> if collisions are enabled, <code>false</code> if they aren't.
* @property {Object<GroupName, MyAvatar.FlowPhysicsData>} physicsData - The physics configuration for each group of joints
* that has been configured.
* @property {Object<JointName, MyAvatar.FlowCollisionsData>} collisions - The collisions configuration for each joint that
* has collisions configured.
* @property {Object<ThreadName, number[]>} threads - The threads that have been configured, with the first joint's name as the
* <code>ThreadName</code> and value as an array of the indexes of all the joints in the thread.
*/
/**jsdoc
* A set of physics options currently used in flow simulation.
* @typedef {object} MyAvatar.FlowPhysicsData
* @property {boolean} active - <code>true</code> to enable flow on the joint, otherwise <code>false</code>.
* @property {number} radius - The thickness of segments and knots. (Needed for collisions.)
* @property {number} gravity - Y-value of the gravity vector.
* @property {number} inertia - Rotational inertia multiplier.
* @property {number} damping - The amount of damping on joint oscillation.
* @property {number} stiffness - The stiffness of each thread.
* @property {number} delta - Delta time for every integration step.
* @property {number[]} jointIndices - The indexes of the joints the options are applied to.
*/
/**jsdoc
* A set of collision options currently used in flow simulation.
* @typedef {object} MyAvatar.FlowCollisionsData
* @property {number} radius - Collision sphere radius.
* @property {number} offset - Offset of the collision sphere from the joint.
* @property {number} jointIndex - The index of the joint the options are applied to.
*/
QVariantMap MyAvatar::getFlowData() {
QVariantMap result;
if (QThread::currentThread() != thread()) {

File diff suppressed because it is too large Load diff

View file

@ -365,7 +365,7 @@ void OtherAvatar::handleChangedAvatarEntityData() {
// AVATAR ENTITY UPDATE FLOW
// - if queueEditEntityMessage() sees "AvatarEntity" HostType it calls _myAvatar->storeAvatarEntityDataPayload()
// - storeAvatarEntityDataPayload() saves the payload and flags the trait instance for the entity as updated,
// - ClientTraitsHandler::sendChangedTraitsToMixea() sends the entity bytes to the mixer which relays them to other interfaces
// - ClientTraitsHandler::sendChangedTraitsToMixer() sends the entity bytes to the mixer which relays them to other interfaces
// - AvatarHashMap::processBulkAvatarTraits() on other interfaces calls avatar->processTraitInstance()
// - AvatarData::processTraitInstance() calls storeAvatarEntityDataPayload(), which sets _avatarEntityDataChanged = true
// - (My)Avatar::simulate() calls handleChangedAvatarEntityData() every frame which checks _avatarEntityDataChanged
@ -495,6 +495,18 @@ void OtherAvatar::handleChangedAvatarEntityData() {
const QUuid NULL_ID = QUuid("{00000000-0000-0000-0000-000000000005}");
entity->setParentID(NULL_ID);
entity->setParentID(oldParentID);
if (entity->stillHasMyGrabAction()) {
// For this case: we want to ignore transform+velocities coming from authoritative OtherAvatar
// because the MyAvatar is grabbing and we expect the local grab state
// to have enough information to prevent simulation drift.
//
// Clever readers might realize this could cause problems. For example,
// if an ignored OtherAvagtar were to simultanously grab the object then there would be
// a noticeable discrepancy between participants in the distributed physics simulation,
// however the difference would be stable and would not drift.
properties.clearTransformOrVelocityChanges();
}
if (entityTree->updateEntity(entityID, properties)) {
entity->updateLastEditedFromRemote();
} else {

View file

@ -224,10 +224,10 @@ void Audio::saveData() {
}
void Audio::loadData() {
_desktopMuted = _desktopMutedSetting.get();
_hmdMuted = _hmdMutedSetting.get();
_pttDesktop = _pttDesktopSetting.get();
_pttHMD = _pttHMDSetting.get();
setMutedDesktop(_desktopMutedSetting.get());
setMutedHMD(_hmdMutedSetting.get());
setPTTDesktop(_pttDesktopSetting.get());
setPTTHMD(_pttHMDSetting.get());
auto client = DependencyManager::get<AudioClient>().data();
QMetaObject::invokeMethod(client, "setMuted", Q_ARG(bool, isMuted()), Q_ARG(bool, false));
@ -377,6 +377,18 @@ void Audio::handlePushedToTalk(bool enabled) {
}
}
void Audio::setInputDevice(const QAudioDeviceInfo& device, bool isHMD) {
withWriteLock([&] {
_devices.chooseInputDevice(device, isHMD);
});
}
void Audio::setOutputDevice(const QAudioDeviceInfo& device, bool isHMD) {
withWriteLock([&] {
_devices.chooseOutputDevice(device, isHMD);
});
}
void Audio::setReverb(bool enable) {
withWriteLock([&] {
DependencyManager::get<AudioClient>()->setReverb(enable);
@ -389,14 +401,66 @@ void Audio::setReverbOptions(const AudioEffectOptions* options) {
});
}
void Audio::setInputDevice(const QAudioDeviceInfo& device, bool isHMD) {
void Audio::setAvatarGain(float gain) {
withWriteLock([&] {
_devices.chooseInputDevice(device, isHMD);
// ask the NodeList to set the master avatar gain
DependencyManager::get<NodeList>()->setAvatarGain(QUuid(), gain);
});
}
void Audio::setOutputDevice(const QAudioDeviceInfo& device, bool isHMD) {
withWriteLock([&] {
_devices.chooseOutputDevice(device, isHMD);
float Audio::getAvatarGain() {
return resultWithReadLock<float>([&] {
return DependencyManager::get<NodeList>()->getAvatarGain(QUuid());
});
}
void Audio::setInjectorGain(float gain) {
withWriteLock([&] {
// ask the NodeList to set the audio injector gain
DependencyManager::get<NodeList>()->setInjectorGain(gain);
});
}
float Audio::getInjectorGain() {
return resultWithReadLock<float>([&] {
return DependencyManager::get<NodeList>()->getInjectorGain();
});
}
void Audio::setLocalInjectorGain(float gain) {
withWriteLock([&] {
if (_localInjectorGain != gain) {
_localInjectorGain = gain;
// convert dB to amplitude
gain = fastExp2f(gain / 6.02059991f);
// quantize and limit to match NodeList::setInjectorGain()
gain = unpackFloatGainFromByte(packFloatGainToByte(gain));
DependencyManager::get<AudioClient>()->setLocalInjectorGain(gain);
}
});
}
float Audio::getLocalInjectorGain() {
return resultWithReadLock<float>([&] {
return _localInjectorGain;
});
}
void Audio::setSystemInjectorGain(float gain) {
withWriteLock([&] {
if (_systemInjectorGain != gain) {
_systemInjectorGain = gain;
// convert dB to amplitude
gain = fastExp2f(gain / 6.02059991f);
// quantize and limit to match NodeList::setInjectorGain()
gain = unpackFloatGainFromByte(packFloatGainToByte(gain));
DependencyManager::get<AudioClient>()->setSystemInjectorGain(gain);
}
});
}
float Audio::getSystemInjectorGain() {
return resultWithReadLock<float>([&] {
return _systemInjectorGain;
});
}

View file

@ -170,6 +170,66 @@ public:
*/
Q_INVOKABLE void setReverbOptions(const AudioEffectOptions* options);
/**jsdoc
* Sets the avatar gain at the server.
* Units are Decibels (dB)
* @function Audio.setAvatarGain
* @param {number} gain (in dB)
*/
Q_INVOKABLE void setAvatarGain(float gain);
/**jsdoc
* Gets the avatar gain at the server.
* @function Audio.getAvatarGain
* @returns {number} gain (in dB)
*/
Q_INVOKABLE float getAvatarGain();
/**jsdoc
* Sets the injector gain at the server.
* Units are Decibels (dB)
* @function Audio.setInjectorGain
* @param {number} gain (in dB)
*/
Q_INVOKABLE void setInjectorGain(float gain);
/**jsdoc
* Gets the injector gain at the server.
* @function Audio.getInjectorGain
* @returns {number} gain (in dB)
*/
Q_INVOKABLE float getInjectorGain();
/**jsdoc
* Sets the local injector gain in the client.
* Units are Decibels (dB)
* @function Audio.setLocalInjectorGain
* @param {number} gain (in dB)
*/
Q_INVOKABLE void setLocalInjectorGain(float gain);
/**jsdoc
* Gets the local injector gain in the client.
* @function Audio.getLocalInjectorGain
* @returns {number} gain (in dB)
*/
Q_INVOKABLE float getLocalInjectorGain();
/**jsdoc
* Sets the injector gain for system sounds.
* Units are Decibels (dB)
* @function Audio.setSystemInjectorGain
* @param {number} gain (in dB)
*/
Q_INVOKABLE void setSystemInjectorGain(float gain);
/**jsdoc
* Gets the injector gain for system sounds.
* @function Audio.getSystemInjectorGain
* @returns {number} gain (in dB)
*/
Q_INVOKABLE float getSystemInjectorGain();
/**jsdoc
* Starts making an audio recording of the audio being played in-world (i.e., not local-only audio) to a file in WAV format.
* @function Audio.startRecording
@ -350,6 +410,8 @@ private:
float _inputVolume { 1.0f };
float _inputLevel { 0.0f };
float _localInjectorGain { 0.0f }; // in dB
float _systemInjectorGain { 0.0f }; // in dB
bool _isClipping { false };
bool _enableNoiseReduction { true }; // Match default value of AudioClient::_isNoiseGateEnabled.
bool _enableWarnWhenMuted { true };

View file

@ -1711,9 +1711,9 @@ QVector<QUuid> Overlays::findOverlays(const glm::vec3& center, float radius) {
* @property {number} parentJointIndex=65535 - Integer value specifying the skeleton joint that the overlay is attached to if
* <code>parentID</code> is an avatar skeleton. A value of <code>65535</code> means "no joint".
*
* @property {boolean} isFacingAvatar - If <code>true< / code>, the overlay is rotated to face the user's camera about an axis
* @property {boolean} isFacingAvatar - If <code>true</code>, the overlay is rotated to face the user's camera about an axis
* parallel to the user's avatar's "up" direction.
* @property {string} text="" - The text to display.Text does not automatically wrap; use <code>\n< / code> for a line break.
* @property {string} text="" - The text to display.Text does not automatically wrap; use <code>\n</code> for a line break.
* @property {number} textAlpha=1 - The text alpha value.
* @property {Color} backgroundColor=0,0,0 - The background color.
* @property {number} backgroundAlpha=0.7 - The background alpha value.
@ -1876,7 +1876,7 @@ QVector<QUuid> Overlays::findOverlays(const glm::vec3& center, float radius) {
* @property {Vec3} localPosition - The local position of the overlay relative to its parent if the overlay has a
* <code>parentID</code> set, otherwise the same value as <code>position</code>.
* @property {Quat} localRotation - The orientation of the overlay relative to its parent if the overlay has a
* <code>parentID</code> set, otherwise the same value as <code>rotation</code>. Synonym: <code>localOrientation</code>.
* <code>parentID</code> set, otherwise the same value as <code>rotation</code>. Synonym: <code>localOrientation</code>.
* @property {boolean} ignorePickIntersection=false - If <code>true</code>, picks ignore the overlay. <code>ignoreRayIntersection</code> is a synonym.
* @property {boolean} drawInFront=false - If <code>true</code>, the overlay is rendered in front of objects in the world, but behind the HUD.
* @property {boolean} drawHUDLayer=false - If <code>true</code>, the overlay is rendered in front of everything, including the HUD.
@ -1916,7 +1916,7 @@ QVector<QUuid> Overlays::findOverlays(const glm::vec3& center, float radius) {
* @property {Vec3} localPosition - The local position of the overlay relative to its parent if the overlay has a
* <code>parentID</code> set, otherwise the same value as <code>position</code>.
* @property {Quat} localRotation - The orientation of the overlay relative to its parent if the overlay has a
* <code>parentID</code> set, otherwise the same value as <code>rotation</code>. Synonym: <code>localOrientation</code>.
* <code>parentID</code> set, otherwise the same value as <code>rotation</code>. Synonym: <code>localOrientation</code>.
* @property {boolean} isSolid=false - Synonyms: <ode>solid</code>, <code>isFilled</code>, and <code>filled</code>.
* Antonyms: <code>isWire</code> and <code>wire</code>.
* @property {boolean} ignorePickIntersection=false - If <code>true</code>, picks ignore the overlay. <code>ignoreRayIntersection</code> is a synonym.
@ -1927,46 +1927,46 @@ QVector<QUuid> Overlays::findOverlays(const glm::vec3& center, float radius) {
* @property {number} parentJointIndex=65535 - Integer value specifying the skeleton joint that the overlay is attached to if
* <code>parentID</code> is an avatar skeleton. A value of <code>65535</code> means "no joint".
*
* @property {number} startAt = 0 - The counter - clockwise angle from the overlay's x-axis that drawing starts at, in degrees.
* @property {number} endAt = 360 - The counter - clockwise angle from the overlay's x-axis that drawing ends at, in degrees.
* @property {number} outerRadius = 1 - The outer radius of the overlay, in meters.Synonym: <code>radius< / code>.
* @property {number} innerRadius = 0 - The inner radius of the overlay, in meters.
* @property {Color} color = 255, 255, 255 - The color of the overlay.Setting this value also sets the values of
* <code>innerStartColor< / code>, <code>innerEndColor< / code>, <code>outerStartColor< / code>, and <code>outerEndColor< / code>.
* @property {Color} startColor - Sets the values of <code>innerStartColor< / code> and <code>outerStartColor< / code>.
* <em>Write - only.< / em>
* @property {Color} endColor - Sets the values of <code>innerEndColor< / code> and <code>outerEndColor< / code>.
* <em>Write - only.< / em>
* @property {Color} innerColor - Sets the values of <code>innerStartColor< / code> and <code>innerEndColor< / code>.
* <em>Write - only.< / em>
* @property {Color} outerColor - Sets the values of <code>outerStartColor< / code> and <code>outerEndColor< / code>.
* <em>Write - only.< / em>
* @property {number} startAt = 0 - The counter - clockwise angle from the overlay's x-axis that drawing starts at in degrees.
* @property {number} endAt = 360 - The counter - clockwise angle from the overlay's x-axis that drawing ends at in degrees.
* @property {number} outerRadius = 1 - The outer radius of the overlay in meters. Synonym: <code>radius</code>.
* @property {number} innerRadius = 0 - The inner radius of the overlay in meters.
* @property {Color} color = 255, 255, 255 - The color of the overlay. Setting this value also sets the values of
* <code>innerStartColor</code>, <code>innerEndColor</code>, <code>outerStartColor</code>, and <code>outerEndColor</code>.
* @property {Color} startColor - Sets the values of <code>innerStartColor</code> and <code>outerStartColor</code>.
* <em>Write - only.</em>
* @property {Color} endColor - Sets the values of <code>innerEndColor</code> and <code>outerEndColor</code>.
* <em>Write - only.</em>
* @property {Color} innerColor - Sets the values of <code>innerStartColor</code> and <code>innerEndColor</code>.
* <em>Write - only.</em>
* @property {Color} outerColor - Sets the values of <code>outerStartColor</code> and <code>outerEndColor</code>.
* <em>Write - only.</em>
* @property {Color} innerStartcolor - The color at the inner start point of the overlay.
* @property {Color} innerEndColor - The color at the inner end point of the overlay.
* @property {Color} outerStartColor - The color at the outer start point of the overlay.
* @property {Color} outerEndColor - The color at the outer end point of the overlay.
* @property {number} alpha = 0.5 - The opacity of the overlay, <code>0.0< / code> -<code>1.0< / code>.Setting this value also sets
* the values of <code>innerStartAlpha< / code>, <code>innerEndAlpha< / code>, <code>outerStartAlpha< / code>, and
* <code>outerEndAlpha< / code>.Synonym: <code>Alpha< / code>; <em>write - only< / em>.
* @property {number} startAlpha - Sets the values of <code>innerStartAlpha< / code> and <code>outerStartAlpha< / code>.
* <em>Write - only.< / em>
* @property {number} endAlpha - Sets the values of <code>innerEndAlpha< / code> and <code>outerEndAlpha< / code>.
* <em>Write - only.< / em>
* @property {number} innerAlpha - Sets the values of <code>innerStartAlpha< / code> and <code>innerEndAlpha< / code>.
* <em>Write - only.< / em>
* @property {number} outerAlpha - Sets the values of <code>outerStartAlpha< / code> and <code>outerEndAlpha< / code>.
* <em>Write - only.< / em>
* @property {number} alpha = 0.5 - The opacity of the overlay, <code>0.0</code> -<code>1.0</code>. Setting this value also sets
* the values of <code>innerStartAlpha</code>, <code>innerEndAlpha</code>, <code>outerStartAlpha</code>, and
* <code>outerEndAlpha</code>. Synonym: <code>Alpha</code>; <em>write - only</em>.
* @property {number} startAlpha - Sets the values of <code>innerStartAlpha</code> and <code>outerStartAlpha</code>.
* <em>Write - only.</em>
* @property {number} endAlpha - Sets the values of <code>innerEndAlpha</code> and <code>outerEndAlpha</code>.
* <em>Write - only.</em>
* @property {number} innerAlpha - Sets the values of <code>innerStartAlpha</code> and <code>innerEndAlpha</code>.
* <em>Write - only.</em>
* @property {number} outerAlpha - Sets the values of <code>outerStartAlpha</code> and <code>outerEndAlpha</code>.
* <em>Write - only.</em>
* @property {number} innerStartAlpha = 0 - The alpha at the inner start point of the overlay.
* @property {number} innerEndAlpha = 0 - The alpha at the inner end point of the overlay.
* @property {number} outerStartAlpha = 0 - The alpha at the outer start point of the overlay.
* @property {number} outerEndAlpha = 0 - The alpha at the outer end point of the overlay.
*
* @property {boolean} hasTickMarks = false - If <code>true< / code>, tick marks are drawn.
* @property {boolean} hasTickMarks = false - If <code>true</code>, tick marks are drawn.
* @property {number} majorTickMarksAngle = 0 - The angle between major tick marks, in degrees.
* @property {number} minorTickMarksAngle = 0 - The angle between minor tick marks, in degrees.
* @property {number} majorTickMarksLength = 0 - The length of the major tick marks, in meters.A positive value draws tick marks
* @property {number} majorTickMarksLength = 0 - The length of the major tick marks, in meters. A positive value draws tick marks
* outwards from the inner radius; a negative value draws tick marks inwards from the outer radius.
* @property {number} minorTickMarksLength = 0 - The length of the minor tick marks, in meters.A positive value draws tick marks
* @property {number} minorTickMarksLength = 0 - The length of the minor tick marks, in meters. A positive value draws tick marks
* outwards from the inner radius; a negative value draws tick marks inwards from the outer radius.
* @property {Color} majorTickMarksColor = 0, 0, 0 - The color of the major tick marks.
* @property {Color} minorTickMarksColor = 0, 0, 0 - The color of the minor tick marks.

View file

@ -59,6 +59,46 @@ public:
float getMaxErrorOnLastSolve() { return _maxErrorOnLastSolve; }
/**jsdoc
* <p>Specifies the initial conditions of the IK solver.</p>
* <table>
* <thead>
* <tr><th>Value</th><th>Name</p><th>Description</th>
* </thead>
* <tbody>
* <tr><td><code>0</code></td><td>RelaxToUnderPoses</td><td>This is a blend: it is 15/16 <code>PreviousSolution</code>
* and 1/16 <code>UnderPoses</code>. This provides some of the benefits of using <code>UnderPoses</code> so that the
* underlying animation is still visible, while at the same time converging faster then using the
* <code>UnderPoses</code> as the only initial solution.</td></tr>
* <tr><td><code>1</code></td><td>RelaxToLimitCenterPoses</td><td>This is a blend: it is 15/16
* <code>PreviousSolution</code> and 1/16 <code>LimitCenterPoses</code>. This should converge quickly because it is
* close to the previous solution, but still provides the benefits of avoiding limb locking.</td></tr>
* <tr><td><code>2</code></td><td>PreviousSolution</td><td>The IK system will begin to solve from the same position and
* orientations for each joint that was the result from the previous frame.<br />
* Pros: As the end effectors typically do not move much from frame to frame, this is likely to converge quickly
* to a valid solution.<br />
* Cons: If the previous solution resulted in an awkward or uncomfortable posture, the next frame will also be
* awkward and uncomfortable. It can also result in locked elbows and knees.</td></tr>
* <tr><td><code>3</code></td><td>UnderPoses</td><td>The IK occurs at one of the top-most layers. It has access to the
* full posture that was computed via canned animations and blends. We call this animated set of poses the "under
* pose". The under poses are what would be visible if IK was completely disabled. Using the under poses as the
* initial conditions of the CCD solve will cause some of the animated motion to be blended into the result of the
* IK. This can result in very natural results, especially if there are only a few IK targets enabled. On the other
* hand, because the under poses might be quite far from the desired end effector, it can converge slowly in some
* cases, causing it to never reach the IK target in the allotted number of iterations. Also, in situations where all
* of the IK targets are being controlled by external sensors, sometimes starting from the under poses can cause
* awkward motions from the underlying animations to leak into the IK result.</td></tr>
* <tr><td><code>4</code></td><td>LimitCenterPoses</td><td>This pose is taken to be the center of all the joint
* constraints. This can prevent the IK solution from getting locked or stuck at a particular constraint. For
* example, if the arm is pointing straight outward from the body, as the end effector moves towards the body, at
* some point the elbow should bend to accommodate. However, because the CCD solver is stuck at a local maximum, it
* will not rotate the elbow, unless the initial conditions already have the elbow bent, which is the case for
* <code>LimitCenterPoses</code>. When all the IK targets are enabled, this result will provide a consistent starting
* point for each IK solve, hopefully resulting in a consistent, natural result.</td></tr>
* </tbody>
* </table>
* @typedef {number} MyAvatar.AnimIKSolutionSource
*/
enum class SolutionSource {
RelaxToUnderPoses = 0,
RelaxToLimitCenterPoses,

View file

@ -24,6 +24,37 @@ class AnimOverlay : public AnimNode {
public:
friend class AnimTests;
/**jsdoc
* <p>Specifies sets of joints.</p>
* <table>
* <thead>
* <tr><th>Value</th><th>Name</p><th>Description</th>
* </thead>
* <tbody>
* <tr><td><code>0</code></td><td>FullBodyBoneSet</td><td>All joints.</td></tr>
* <tr><td><code>1</code></td><td>UpperBodyBoneSet</td><td>Only the "Spine" joint and its children.</td></tr>
* <tr><td><code>2</code></td><td>LowerBodyBoneSet</td><td>Only the leg joints and their children.</td></tr>
* <tr><td><code>3</code></td><td>LeftArmBoneSet</td><td>Joints that are the children of the "LeftShoulder"
* joint.</td></tr>
* <tr><td><code>4</code></td><td>RightArmBoneSet</td><td>Joints that are the children of the "RightShoulder"
* joint.</td></tr>
* <tr><td><code>5</code></td><td>AboveTheHeadBoneSet</td><td>Joints that are the children of the "Head"
* joint.</td></tr>
* <tr><td><code>6</code></td><td>BelowTheHeadBoneSet</td><td>Joints that are NOT the children of the "head"
* joint.</td></tr>
* <tr><td><code>7</code></td><td>HeadOnlyBoneSet</td><td>The "Head" joint.</td></tr>
* <tr><td><code>8</code></td><td>SpineOnlyBoneSet</td><td>The "Spine" joint.</td></tr>
* <tr><td><code>9</code></td><td>EmptyBoneSet</td><td>No joints.</td></tr>
* <tr><td><code>10</code></td><td>LeftHandBoneSet</td><td>joints that are the children of the "LeftHand"
* joint.</td></tr>
* <tr><td><code>11</code></td><td>RightHandBoneSet</td><td>Joints that are the children of the "RightHand"
* joint.</td></tr>
* <tr><td><code>12</code></td><td>HipsOnlyBoneSet</td><td>The "Hips" joint.</td></tr>
* <tr><td><code>13</code></td><td>BothFeetBoneSet</td><td>The "LeftFoot" and "RightFoot" joints.</td></tr>
* </tbody>
* </table>
* @typedef {number} MyAvatar.AnimOverlayBoneSet
*/
enum BoneSet {
FullBodyBoneSet = 0,
UpperBodyBoneSet,

View file

@ -67,17 +67,23 @@ void FlowCollisionSystem::addCollisionSphere(int jointIndex, const FlowCollision
auto collision = FlowCollisionSphere(jointIndex, settings, isTouch);
collision.setPosition(position);
if (isSelfCollision) {
_selfCollisions.push_back(collision);
if (!isTouch) {
_selfCollisions.push_back(collision);
} else {
_selfTouchCollisions.push_back(collision);
}
} else {
_othersCollisions.push_back(collision);
}
};
void FlowCollisionSystem::resetCollisions() {
_allCollisions.clear();
_othersCollisions.clear();
_selfTouchCollisions.clear();
_selfCollisions.clear();
}
FlowCollisionResult FlowCollisionSystem::computeCollision(const std::vector<FlowCollisionResult> collisions) {
FlowCollisionResult result;
if (collisions.size() > 1) {
@ -106,6 +112,10 @@ void FlowCollisionSystem::setScale(float scale) {
_selfCollisions[j]._radius = _selfCollisions[j]._initialRadius * scale;
_selfCollisions[j]._offset = _selfCollisions[j]._initialOffset * scale;
}
for (size_t j = 0; j < _selfTouchCollisions.size(); j++) {
_selfTouchCollisions[j]._radius = _selfTouchCollisions[j]._initialRadius * scale;
_selfTouchCollisions[j]._offset = _selfTouchCollisions[j]._initialOffset * scale;
}
};
std::vector<FlowCollisionResult> FlowCollisionSystem::checkFlowThreadCollisions(FlowThread* flowThread) {
@ -178,9 +188,9 @@ void FlowCollisionSystem::setCollisionSettingsByJoint(int jointIndex, const Flow
}
void FlowCollisionSystem::prepareCollisions() {
_allCollisions.clear();
_allCollisions.resize(_selfCollisions.size() + _othersCollisions.size());
std::copy(_selfCollisions.begin(), _selfCollisions.begin() + _selfCollisions.size(), _allCollisions.begin());
std::copy(_othersCollisions.begin(), _othersCollisions.begin() + _othersCollisions.size(), _allCollisions.begin() + _selfCollisions.size());
_allCollisions.insert(_allCollisions.end(), _selfCollisions.begin(), _selfCollisions.end());
_allCollisions.insert(_allCollisions.end(), _othersCollisions.begin(), _othersCollisions.end());
_allCollisions.insert(_allCollisions.end(), _selfTouchCollisions.begin(), _selfTouchCollisions.end());
_othersCollisions.clear();
}
@ -273,18 +283,20 @@ void FlowJoint::setRecoveryPosition(const glm::vec3& recoveryPosition) {
}
void FlowJoint::update(float deltaTime) {
glm::vec3 accelerationOffset = glm::vec3(0.0f);
if (_settings._stiffness > 0.0f) {
glm::vec3 recoveryVector = _recoveryPosition - _currentPosition;
float recoveryFactor = powf(_settings._stiffness, 3.0f);
accelerationOffset = recoveryVector * recoveryFactor;
}
FlowNode::update(deltaTime, accelerationOffset);
if (_anchored) {
if (!_isHelper) {
_currentPosition = _updatedPosition;
} else {
_currentPosition = _parentPosition;
if (_settings._active) {
glm::vec3 accelerationOffset = glm::vec3(0.0f);
if (_settings._stiffness > 0.0f) {
glm::vec3 recoveryVector = _recoveryPosition - _currentPosition;
float recoveryFactor = powf(_settings._stiffness, 3.0f);
accelerationOffset = recoveryVector * recoveryFactor;
}
FlowNode::update(deltaTime, accelerationOffset);
if (_anchored) {
if (!_isHelper) {
_currentPosition = _updatedPosition;
} else {
_currentPosition = _parentPosition;
}
}
}
};
@ -674,6 +686,14 @@ bool Flow::updateRootFramePositions(const AnimPoseVec& absolutePoses, size_t thr
return true;
}
void Flow::updateCollisionJoint(FlowCollisionSphere& collision, AnimPoseVec& absolutePoses) {
glm::quat jointRotation;
getJointPositionInWorldFrame(absolutePoses, collision._jointIndex, collision._position, _entityPosition, _entityRotation);
getJointRotationInWorldFrame(absolutePoses, collision._jointIndex, jointRotation, _entityRotation);
glm::vec3 worldOffset = jointRotation * collision._offset;
collision._position = collision._position + worldOffset;
}
void Flow::updateJoints(AnimPoseVec& relativePoses, AnimPoseVec& absolutePoses) {
updateAbsolutePoses(relativePoses, absolutePoses);
for (auto &jointData : _flowJointData) {
@ -695,11 +715,11 @@ void Flow::updateJoints(AnimPoseVec& relativePoses, AnimPoseVec& absolutePoses)
}
auto &selfCollisions = _collisionSystem.getSelfCollisions();
for (auto &collision : selfCollisions) {
glm::quat jointRotation;
getJointPositionInWorldFrame(absolutePoses, collision._jointIndex, collision._position, _entityPosition, _entityRotation);
getJointRotationInWorldFrame(absolutePoses, collision._jointIndex, jointRotation, _entityRotation);
glm::vec3 worldOffset = jointRotation * collision._offset;
collision._position = collision._position + worldOffset;
updateCollisionJoint(collision, absolutePoses);
}
auto &selfTouchCollisions = _collisionSystem.getSelfTouchCollisions();
for (auto &collision : selfTouchCollisions) {
updateCollisionJoint(collision, absolutePoses);
}
_collisionSystem.prepareCollisions();
}
@ -710,7 +730,7 @@ void Flow::setJoints(AnimPoseVec& relativePoses, const std::vector<bool>& overri
for (int jointIndex : joints) {
auto &joint = _flowJointData[jointIndex];
if (jointIndex >= 0 && jointIndex < (int)relativePoses.size() && !overrideFlags[jointIndex]) {
relativePoses[jointIndex].rot() = joint.getCurrentRotation();
relativePoses[jointIndex].rot() = joint.getSettings()._active ? joint.getCurrentRotation() : joint.getInitialRotation();
}
}
}

View file

@ -140,6 +140,7 @@ public:
std::vector<FlowCollisionResult> checkFlowThreadCollisions(FlowThread* flowThread);
std::vector<FlowCollisionSphere>& getSelfCollisions() { return _selfCollisions; };
std::vector<FlowCollisionSphere>& getSelfTouchCollisions() { return _selfTouchCollisions; };
void setOthersCollisions(const std::vector<FlowCollisionSphere>& othersCollisions) { _othersCollisions = othersCollisions; }
void prepareCollisions();
void resetCollisions();
@ -150,9 +151,11 @@ public:
void setActive(bool active) { _active = active; }
bool getActive() const { return _active; }
const std::vector<FlowCollisionSphere>& getCollisions() const { return _selfCollisions; }
void clearSelfCollisions() { _selfCollisions.clear(); }
protected:
std::vector<FlowCollisionSphere> _selfCollisions;
std::vector<FlowCollisionSphere> _othersCollisions;
std::vector<FlowCollisionSphere> _selfTouchCollisions;
std::vector<FlowCollisionSphere> _allCollisions;
float _scale { 1.0f };
bool _active { false };
@ -210,7 +213,7 @@ public:
bool isHelper() const { return _isHelper; }
const FlowPhysicsSettings& getSettings() { return _settings; }
void setSettings(const FlowPhysicsSettings& settings) { _settings = settings; }
void setSettings(const FlowPhysicsSettings& settings) { _settings = settings; _initialRadius = _settings._radius; }
const glm::vec3& getCurrentPosition() const { return _currentPosition; }
int getIndex() const { return _index; }
@ -222,6 +225,7 @@ public:
const glm::quat& getCurrentRotation() const { return _currentRotation; }
const glm::vec3& getCurrentTranslation() const { return _initialTranslation; }
const glm::vec3& getInitialPosition() const { return _initialPosition; }
const glm::quat& getInitialRotation() const { return _initialRotation; }
bool isColliding() const { return _colliding; }
protected:
@ -297,6 +301,7 @@ public:
void setPhysicsSettingsForGroup(const QString& group, const FlowPhysicsSettings& settings);
const std::map<QString, FlowPhysicsSettings>& getGroupSettings() const { return _groupSettings; }
void cleanUp();
void updateScale() { setScale(_scale); }
signals:
void onCleanup();
@ -311,6 +316,7 @@ private:
void setJoints(AnimPoseVec& relativePoses, const std::vector<bool>& overrideFlags);
void updateJoints(AnimPoseVec& relativePoses, AnimPoseVec& absolutePoses);
void updateCollisionJoint(FlowCollisionSphere& collision, AnimPoseVec& absolutePoses);
bool updateRootFramePositions(const AnimPoseVec& absolutePoses, size_t threadIndex);
void updateGroupSettings(const QString& group, const FlowPhysicsSettings& settings);
void setScale(float scale);

View file

@ -16,6 +16,27 @@ const float HACK_HMD_TARGET_WEIGHT = 8.0f;
class IKTarget {
public:
/**jsdoc
* <p>An IK target type.</p>
* <table>
* <thead>
* <tr><th>Value</th><th>Name</p><th>Description</th>
* </thead>
* <tbody>
* <tr><td><code>0</code></td><td>RotationAndPosition</td><td>Attempt to reach the rotation and position end
* effector.</td></tr>
* <tr><td><code>1</code></td><td>RotationOnly</td><td>Attempt to reach the end effector rotation only.</td></tr>
* <tr><td><code>2</code></td><td>HmdHead</td><td><strong>Deprecated:</strong> A special mode of IK that would attempt
* to prevent unnecessary bending of the spine.</td></tr>
* <tr><td><code>3</code></td><td>HipsRelativeRotationAndPosition</td><td>Attempt to reach a rotation and position end
* effector that is not in absolute rig coordinates but is offset by the avatar hips translation.</td></tr>
* <tr><td><code>4</code></td><td>Spline</td><td>Use a cubic Hermite spline to model the human spine. This prevents
* kinks in the spine and allows for a small amount of stretch and squash.</td></tr>
* <tr><td><code>5</code></td><td>Unknown</td><td>IK is disabled.</td></tr>
* </tbody>
* </table>
* @typedef {number} MyAvatar.IKTargetType
*/
enum class Type {
RotationAndPosition,
RotationOnly,

View file

@ -88,6 +88,218 @@ static const QString MAIN_STATE_MACHINE_RIGHT_HAND_ROTATION("mainStateMachineRig
static const QString MAIN_STATE_MACHINE_RIGHT_HAND_POSITION("mainStateMachineRightHandPosition");
/**jsdoc
* <p>An <code>AnimStateDictionary</code> object may have the following properties. It may also have other properties, set by
* scripts.</p>
* <p><strong>Warning:</strong> These properties are subject to change.
* <table>
* <thead>
* <tr><th>Name</th><th>Type</p><th>Description</th>
* </thead>
* <tbody>
* <tr><td><code>userAnimNone</code></td><td>boolean</td><td><code>true</code> when no user overrideAnimation is
* playing.</td></tr>
* <tr><td><code>userAnimA</code></td><td>boolean</td><td><code>true</code> when a user overrideAnimation is
* playing.</td></tr>
* <tr><td><code>userAnimB</code></td><td>boolean</td><td><code>true</code> when a user overrideAnimation is
* playing.</td></tr>
*
* <tr><td><code>sine</code></td><td>number</td><td>Oscillating sine wave.</td></tr>
* <tr><td><code>moveForwardSpeed</code></td><td>number</td><td>Controls the blend between the various forward walking
* &amp; running animations.</td></tr>
* <tr><td><code>moveBackwardSpeed</code></td><td>number</td><td>Controls the blend between the various backward walking
* &amp; running animations.</td></tr>
* <tr><td><code>moveLateralSpeed</code></td><td>number</td><td>Controls the blend between the various sidestep walking
* &amp; running animations.</td></tr>
*
* <tr><td><code>isMovingForward</code></td><td>boolean</td><td><code>true</code> if the avatar is moving
* forward.</td></tr>
* <tr><td><code>isMovingBackward</code></td><td>boolean</td><td><code>true</code> if the avatar is moving
* backward.</td></tr>
* <tr><td><code>isMovingRight</code></td><td>boolean</td><td><code>true</code> if the avatar is moving to the
* right.</td></tr>
* <tr><td><code>isMovingLeft</code></td><td>boolean</td><td><code>true</code> if the avatar is moving to the
* left.</td></tr>
* <tr><td><code>isMovingRightHmd</code></td><td>boolean</td><td><code>true</code> if the avatar is moving to the right
* while the user is in HMD mode.</td></tr>
* <tr><td><code>isMovingLeftHmd</code></td><td>boolean</td><td><code>true</code> if the avatar is moving to the left while
* the user is in HMD mode.</td></tr>
* <tr><td><code>isNotMoving</code></td><td>boolean</td><td><code>true</code> if the avatar is stationary.</td></tr>
*
* <tr><td><code>isTurningRight</code></td><td>boolean</td><td><code>true</code> if the avatar is turning
* clockwise.</td></tr>
* <tr><td><code>isTurningLeft</code></td><td>boolean</td><td><code>true</code> if the avatar is turning
* counter-clockwise.</td></tr>
* <tr><td><code>isNotTurning</code></td><td>boolean</td><td><code>true</code> if the avatar is not turning.</td></tr>
* <tr><td><code>isFlying</code></td><td>boolean</td><td><code>true</code> if the avatar is flying.</td></tr>
* <tr><td><code>isNotFlying</code></td><td>boolean</td><td><code>true</code> if the avatar is not flying.</td></tr>
* <tr><td><code>isTakeoffStand</code></td><td>boolean</td><td><code>true</code> if the avatar is about to execute a
* standing jump.</td></tr>
* <tr><td><code>isTakeoffRun</code></td><td>boolean</td><td><code>true</code> if the avatar is about to execute a running
* jump.</td></tr>
* <tr><td><code>isNotTakeoff</code></td><td>boolean</td><td><code>true</code> if the avatar is not jumping.</td></tr>
* <tr><td><code>isInAirStand</code></td><td>boolean</td><td><code>true</code> if the avatar is in the air after a standing
* jump.</td></tr>
* <tr><td><code>isInAirRun</code></td><td>boolean</td><td><code>true</code> if the avatar is in the air after a running
* jump.</td></tr>
* <tr><td><code>isNotInAir</code></td><td>boolean</td><td><code>true</code> if the avatar on the ground.</td></tr>
*
* <tr><td><code>inAirAlpha</code></td><td>number</td><td>Used to interpolate between the up, apex, and down in-air
* animations.</td></tr>
* <tr><td><code>ikOverlayAlpha</code></td><td>number</td><td>The blend between upper body and spline IK versus the
* underlying animation</td></tr>
*
* <tr><td><code>headPosition</code></td><td>{@link Vec3}</td><td>The desired position of the <code>Head</code> joint in
* rig coordinates.</td></tr>
* <tr><td><code>headRotation</code></td><td>{@link Quat}</td><td>The desired orientation of the <code>Head</code> joint in
* rig coordinates.</td></tr>
* <tr><td><code>headType</code></td><td>{@link MyAvatar.IKTargetType|IKTargetType}</td><td>The type of IK used for the
* head.</td></tr>
* <tr><td><code>headWeight</code></td><td>number</td><td>How strongly the head chain blends with the other IK
* chains.</td></tr>
*
* <tr><td><code>leftHandPosition</code></td><td>{@link Vec3}</td><td>The desired position of the <code>LeftHand</code>
* joint in rig coordinates.</td></tr>
* <tr><td><code>leftHandRotation</code></td><td>{@link Quat}</td><td>The desired orientation of the <code>LeftHand</code>
* joint in rig coordinates.</td></tr>
* <tr><td><code>leftHandType</code></td><td>{@link MyAvatar.IKTargetType|IKTargetType}</td><td>The type of IK used for the
* left arm.</td></tr>
* <tr><td><code>leftHandPoleVectorEnabled</code></td><td>boolean</td><td>When <code>true</code>, the elbow angle is
* controlled by the <code>rightHandPoleVector</code> property value. Otherwise the elbow direction comes from the
* underlying animation.</td></tr>
* <tr><td><code>leftHandPoleReferenceVector</code></td><td>{@link Vec3}</td><td>The direction of the elbow in the local
* coordinate system of the elbow.</td></tr>
* <tr><td><code>leftHandPoleVector</code></td><td>{@link Vec3}</td><td>The direction the elbow should point in rig
* coordinates.</td></tr>
*
* <tr><td><code>rightHandPosition</code></td><td>{@link Vec3}</td><td>The desired position of the <code>RightHand</code>
* joint in rig coordinates.</td></tr>
* <tr><td><code>rightHandRotation</code></td><td>{@link Quat}</td><td>The desired orientation of the
* <code>RightHand</code> joint in rig coordinates.</td></tr>
* <tr><td><code>rightHandType</code></td><td>{@link MyAvatar.IKTargetType|IKTargetType}</td><td>The type of IK used for
* the right arm.</td></tr>
* <tr><td><code>rightHandPoleVectorEnabled</code></td><td>boolean</td><td>When <code>true</code>, the elbow angle is
* controlled by the <code>rightHandPoleVector</code> property value. Otherwise the elbow direction comes from the
* underlying animation.</td></tr>
* <tr><td><code>rightHandPoleReferenceVector</code></td><td>{@link Vec3}</td><td>The direction of the elbow in the local
* coordinate system of the elbow.</td></tr>
* <tr><td><code>rightHandPoleVector</code></td><td>{@link Vec3}</td><td>The direction the elbow should point in rig
* coordinates.</td></tr>
*
* <tr><td><code>leftFootIKEnabled</code></td><td>boolean</td><td><code>true</code> if IK is enabled for the left
* foot.</td></tr>
* <tr><td><code>rightFootIKEnabled</code></td><td>boolean</td><td><code>true</code> if IK is enabled for the right
* foot.</td></tr>
*
* <tr><td><code>leftFootIKPositionVar</code></td><td>string</td><td>The name of the source for the desired position
* of the <code>LeftFoot</code> joint. If not set, the foot rotation of the underlying animation will be used.</td></tr>
* <tr><td><code>leftFootIKRotationVar</code></td><td>string</td><td>The name of the source for the desired rotation
* of the <code>LeftFoot</code> joint. If not set, the foot rotation of the underlying animation will be used.</td></tr>
* <tr><td><code>leftFootPoleVectorEnabled</code></td><td>boolean</td><td>When <code>true</code>, the knee angle is
* controlled by the <code>leftFootPoleVector</code> property value. Otherwise the knee direction comes from the
* underlying animation.</td></tr>
* <tr><td><code>leftFootPoleVector</code></td><td>{@link Vec3}</td><td>The direction the knee should face in rig
* coordinates.</td></tr>
* <tr><td><code>rightFootIKPositionVar</code></td><td>string</td><td>The name of the source for the desired position
* of the <code>RightFoot</code> joint. If not set, the foot rotation of the underlying animation will be used.</td></tr>
* <tr><td><code>rightFootIKRotationVar</code></td><td>string</td><td>The name of the source for the desired rotation
* of the <code>RightFoot</code> joint. If not set, the foot rotation of the underlying animation will be used.</td></tr>
* <tr><td><code>rightFootPoleVectorEnabled</code></td><td>boolean</td><td>When <code>true</code>, the knee angle is
* controlled by the <code>rightFootPoleVector</code> property value. Otherwise the knee direction comes from the
* underlying animation.</td></tr>
* <tr><td><code>rightFootPoleVector</code></td><td>{@link Vec3}</td><td>The direction the knee should face in rig
* coordinates.</td></tr>
*
* <tr><td><code>isTalking</code></td><td>boolean</td><td><code>true</code> if the avatar is talking.</td></tr>
* <tr><td><code>notIsTalking</code></td><td>boolean</td><td><code>true</code> if the avatar is not talking.</td></tr>
*
* <tr><td><code>solutionSource</code></td><td>{@link MyAvatar.AnimIKSolutionSource|AnimIKSolutionSource}</td>
* <td>Determines the initial conditions of the IK solver.</td></tr>
* <tr><td><code>defaultPoseOverlayAlpha</code></td><td>number</td><td>Controls the blend between the main animation state
* machine and the default pose. Mostly used during full body tracking so that walking &amp; jumping animations do not
* affect the IK of the figure.</td></tr>
* <tr><td><code>defaultPoseOverlayBoneSet</code></td><td>{@link MyAvatar.AnimOverlayBoneSet|AnimOverlayBoneSet}</td>
* <td>Specifies which bones will be replace by the source overlay.</td></tr>
* <tr><td><code>hipsType</code></td><td>{@link MyAvatar.IKTargetType|IKTargetType}</td><td>The type of IK used for the
* hips.</td></tr>
* <tr><td><code>hipsPosition</code></td><td>{@link Vec3}</td><td>The desired position of <code>Hips</code> joint in rig
* coordinates.</td></tr>
* <tr><td><code>hipsRotation</code></td><td>{@link Quat}</td><td>the desired orientation of the <code>Hips</code> joint in
* rig coordinates.</td></tr>
* <tr><td><code>spine2Type</code></td><td>{@link MyAvatar.IKTargetType|IKTargetType}</td><td>The type of IK used for the
* <code>Spine2</code> joint.</td></tr>
* <tr><td><code>spine2Position</code></td><td>{@link Vec3}</td><td>The desired position of the <code>Spine2</code> joint
* in rig coordinates.</td></tr>
* <tr><td><code>spine2Rotation</code></td><td>{@link Quat}</td><td>The desired orientation of the <code>Spine2</code>
* joint in rig coordinates.</td></tr>
*
* <tr><td><code>leftFootIKAlpha</code></td><td>number</td><td>Blends between full IK for the leg and the underlying
* animation.</td></tr>
* <tr><td><code>rightFootIKAlpha</code></td><td>number</td><td>Blends between full IK for the leg and the underlying
* animation.</td></tr>
* <tr><td><code>hipsWeight</code></td><td>number</td><td>How strongly the hips target blends with the IK solution for
* other IK chains.</td></tr>
* <tr><td><code>leftHandWeight</code></td><td>number</td><td>How strongly the left hand blends with IK solution of other
* IK chains.</td></tr>
* <tr><td><code>rightHandWeight</code></td><td>number</td><td>How strongly the right hand blends with IK solution of other
* IK chains.</td></tr>
* <tr><td><code>spine2Weight</code></td><td>number</td><td>How strongly the spine2 chain blends with the rest of the IK
* solution.</td></tr>
*
* <tr><td><code>leftHandOverlayAlpha</code></td><td>number</td><td>Used to blend in the animated hand gesture poses, such
* as point and thumbs up.</td></tr>
* <tr><td><code>leftHandGraspAlpha</code></td><td>number</td><td>Used to blend between an open hand and a closed hand.
* Usually changed as you squeeze the trigger of the hand controller.</td></tr>
* <tr><td><code>rightHandOverlayAlpha</code></td><td>number</td><td>Used to blend in the animated hand gesture poses,
* such as point and thumbs up.</td></tr>
* <tr><td><code>rightHandGraspAlpha</code></td><td>number</td><td>Used to blend between an open hand and a closed hand.
* Usually changed as you squeeze the trigger of the hand controller.</td></tr>
* <tr><td><code>isLeftIndexPoint</code></td><td>boolean</td><td><code>true</code> if the left hand should be
* pointing.</td></tr>
* <tr><td><code>isLeftThumbRaise</code></td><td>boolean</td><td><code>true</code> if the left hand should be
* thumbs-up.</td></tr>
* <tr><td><code>isLeftIndexPointAndThumbRaise</code></td><td>boolean</td><td><code>true</code> if the left hand should be
* pointing and thumbs-up.</td></tr>
* <tr><td><code>isLeftHandGrasp</code></td><td>boolean</td><td><code>true</code> if the left hand should be at rest,
* grasping the controller.</td></tr>
* <tr><td><code>isRightIndexPoint</code></td><td>boolean</td><td><code>true</code> if the right hand should be
* pointing.</td></tr>
* <tr><td><code>isRightThumbRaise</code></td><td>boolean</td><td><code>true</code> if the right hand should be
* thumbs-up.</td></tr>
* <tr><td><code>isRightIndexPointAndThumbRaise</code></td><td>boolean</td><td><code>true</code> if the right hand should
* be pointing and thumbs-up.</td></tr>
* <tr><td><code>isRightHandGrasp</code></td><td>boolean</td><td><code>true</code> if the right hand should be at rest,
* grasping the controller.</td></tr>
*
* </tbody>
* </table>
* <p>Note: Rig coordinates are <code>+z</code> forward and <code>+y</code> up.</p>
* @typedef {object} MyAvatar.AnimStateDictionary
*/
// Note: The following animVars are intentionally not documented:
// - leftFootPosition
// - leftFootRotation
// - rightFooKPosition
// - rightFooKRotation
// Note: The following items aren't set in the code below but are still intentionally documented:
// - leftFootIKAlpha
// - rightFootIKAlpha
// - hipsWeight
// - leftHandWeight
// - rightHandWeight
// - spine2Weight
// - rightHandOverlayAlpha
// - rightHandGraspAlpha
// - leftHandOverlayAlpha
// - leftHandGraspAlpha
// - isRightIndexPoint
// - isRightThumbRaise
// - isRightIndexPointAndThumbRaise
// - isRightHandGrasp
// - isLeftIndexPoint
// - isLeftThumbRaise
// - isLeftIndexPointAndThumbRaise
// - isLeftHandGrasp
Rig::Rig() {
// Ensure thread-safe access to the rigRegistry.
std::lock_guard<std::mutex> guard(rigRegistryMutex);
@ -1210,7 +1422,8 @@ void Rig::updateAnimations(float deltaTime, const glm::mat4& rootTransform, cons
_networkAnimState.blendTime += deltaTime;
alpha = _computeNetworkAnimation ? (_networkAnimState.blendTime / TOTAL_BLEND_TIME) : (1.0f - (_networkAnimState.blendTime / TOTAL_BLEND_TIME));
alpha = glm::clamp(alpha, 0.0f, 1.0f);
for (size_t i = 0; i < _networkPoseSet._relativePoses.size(); i++) {
size_t numJoints = std::min(_networkPoseSet._relativePoses.size(), _internalPoseSet._relativePoses.size());
for (size_t i = 0; i < numJoints; i++) {
_networkPoseSet._relativePoses[i].blend(_internalPoseSet._relativePoses[i], alpha);
}
}

View file

@ -1052,7 +1052,7 @@ void AudioClient::setReverbOptions(const AudioEffectOptions* options) {
void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
// If there is server echo, reverb will be applied to the recieved audio stream so no need to have it here.
bool hasReverb = _reverb || _receivedAudioStream.hasReverb();
if (_muted || !_audioOutput || (!_shouldEchoLocally && !hasReverb)) {
if ((_muted && !_shouldEchoLocally) || !_audioOutput || (!_shouldEchoLocally && !hasReverb)) {
return;
}
@ -1368,7 +1368,9 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
memset(_localScratchBuffer, 0, bytesToRead);
if (0 < injectorBuffer->readData((char*)_localScratchBuffer, bytesToRead)) {
float gain = options.volume;
bool isSystemSound = !options.positionSet && !options.ambisonic;
float gain = options.volume * (isSystemSound ? _systemInjectorGain : _localInjectorGain);
if (options.ambisonic) {

View file

@ -241,6 +241,8 @@ public slots:
void setInputVolume(float volume, bool emitSignal = true);
void setReverb(bool reverb);
void setReverbOptions(const AudioEffectOptions* options);
void setLocalInjectorGain(float gain) { _localInjectorGain = gain; };
void setSystemInjectorGain(float gain) { _systemInjectorGain = gain; };
void outputNotify();
@ -395,6 +397,8 @@ private:
int16_t* _outputScratchBuffer { NULL };
// for local audio (used by audio injectors thread)
std::atomic<float> _localInjectorGain { 1.0f };
std::atomic<float> _systemInjectorGain { 1.0f };
float _localMixBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
int16_t _localScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
float* _localOutputMixBuffer { NULL };

View file

@ -372,13 +372,6 @@ bool Avatar::applyGrabChanges() {
target->removeGrab(grab);
_avatarGrabs.erase(itr);
grabAddedOrRemoved = true;
if (isMyAvatar()) {
const EntityItemPointer& entity = std::dynamic_pointer_cast<EntityItem>(target);
if (entity && entity->getEntityHostType() == entity::HostType::AVATAR && entity->getSimulationOwner().getID() == getID()) {
EntityItemProperties properties = entity->getProperties();
sendPacket(entity->getID());
}
}
} else {
undeleted.push_back(id);
}

View file

@ -127,7 +127,12 @@ private:
class Avatar : public AvatarData, public scriptable::ModelProvider, public MetaModelPayload {
Q_OBJECT
// This property has JSDoc in MyAvatar.h.
/*jsdoc
* @comment IMPORTANT: The JSDoc for the following properties should be copied to MyAvatar.h.
*
* @property {Vec3} skeletonOffset - Can be used to apply a translation offset between the avatar's position and the
* registration point of the 3D model.
*/
Q_PROPERTY(glm::vec3 skeletonOffset READ getSkeletonOffset WRITE setSkeletonOffset)
public:
@ -175,7 +180,6 @@ public:
/// Returns the distance to use as a LOD parameter.
float getLODDistance() const;
virtual bool isMyAvatar() const override { return false; }
virtual void createOrb() { }
enum class LoadingStatus {
@ -196,36 +200,52 @@ public:
virtual QStringList getJointNames() const override;
/**jsdoc
* Gets the default rotation of a joint (in the current avatar) relative to its parent.
* <p>For information on the joint hierarchy used, see
* <a href="https://docs.highfidelity.com/create/avatars/avatar-standards">Avatar Standards</a>.</p>
* @function MyAvatar.getDefaultJointRotation
* @param {number} index
* @returns {Quat}
* @param {number} index - The joint index.
* @returns {Quat} The default rotation of the joint if the joint index is valid, otherwise {@link Quat(0)|Quat.IDENTITY}.
*/
Q_INVOKABLE virtual glm::quat getDefaultJointRotation(int index) const;
/**jsdoc
* Gets the default translation of a joint (in the current avatar) relative to its parent, in model coordinates.
* <p><strong>Warning:</strong> These coordinates are not necessarily in meters.</p>
* <p>For information on the joint hierarchy used, see
* <a href="https://docs.highfidelity.com/create/avatars/avatar-standards">Avatar Standards</a>.</p>
* @function MyAvatar.getDefaultJointTranslation
* @param {number} index
* @returns {Vec3}
* @param {number} index - The joint index.
* @returns {Vec3} The default translation of the joint (in model coordinates) if the joint index is valid, otherwise
* {@link Vec3(0)|Vec3.ZERO}.
*/
Q_INVOKABLE virtual glm::vec3 getDefaultJointTranslation(int index) const;
/**jsdoc
* Provides read only access to the default joint rotations in avatar coordinates.
* Gets the default joint rotations in avatar coordinates.
* The default pose of the avatar is defined by the position and orientation of all bones
* in the avatar's model file. Typically this is a T-pose.
* @function MyAvatar.getAbsoluteDefaultJointRotationInObjectFrame
* @param index {number} index number
* @returns {Quat} The rotation of this joint in avatar coordinates.
* @param index {number} - The joint index.
* @returns {Quat} The default rotation of the joint in avatar coordinates.
* @example <caption>Report the default rotation of your avatar's head joint relative to your avatar.</caption>
* var headIndex = MyAvatar.getJointIndex("Head");
* var defaultHeadRotation = MyAvatar.getAbsoluteDefaultJointRotationInObjectFrame(headIndex);
* print("Default head rotation: " + JSON.stringify(Quat.safeEulerAngles(defaultHeadRotation))); // Degrees
*/
Q_INVOKABLE virtual glm::quat getAbsoluteDefaultJointRotationInObjectFrame(int index) const;
/**jsdoc
* Provides read only access to the default joint translations in avatar coordinates.
* Gets the default joint translations in avatar coordinates.
* The default pose of the avatar is defined by the position and orientation of all bones
* in the avatar's model file. Typically this is a T-pose.
* @function MyAvatar.getAbsoluteDefaultJointTranslationInObjectFrame
* @param index {number} index number
* @returns {Vec3} The position of this joint in avatar coordinates.
* @param index {number} - The joint index.
* @returns {Vec3} The default position of the joint in avatar coordinates.
* @example <caption>Report the default translation of your avatar's head joint relative to your avatar.</caption>
* var headIndex = MyAvatar.getJointIndex("Head");
* var defaultHeadTranslation = MyAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(headIndex);
* print("Default head translation: " + JSON.stringify(defaultHeadTranslation));
*/
Q_INVOKABLE virtual glm::vec3 getAbsoluteDefaultJointTranslationInObjectFrame(int index) const;
@ -233,59 +253,88 @@ public:
virtual glm::vec3 getAbsoluteJointScaleInObjectFrame(int index) const override;
virtual glm::quat getAbsoluteJointRotationInObjectFrame(int index) const override;
virtual glm::vec3 getAbsoluteJointTranslationInObjectFrame(int index) const override;
/**jsdoc
* Sets the rotation of a joint relative to the avatar.
* <p><strong>Warning:</strong> Not able to be used in the <code>MyAvatar</code> API.</p>
* @function MyAvatar.setAbsoluteJointRotationInObjectFrame
* @param {number} index - The index of the joint. <em>Not used.</em>
* @param {Quat} rotation - The rotation of the joint relative to the avatar. <em>Not used.</em>
* @returns {boolean} <code>false</code>.
*/
virtual bool setAbsoluteJointRotationInObjectFrame(int index, const glm::quat& rotation) override { return false; }
/**jsdoc
* Sets the translation of a joint relative to the avatar.
* <p><strong>Warning:</strong> Not able to be used in the <code>MyAvatar</code> API.</p>
* @function MyAvatar.setAbsoluteJointTranslationInObjectFrame
* @param {number} index - The index of the joint. <em>Not used.</em>
* @param {Vec3} translation - The translation of the joint relative to the avatar. <em>Not used.</em>
* @returns {boolean} <code>false</code>.
*/
virtual bool setAbsoluteJointTranslationInObjectFrame(int index, const glm::vec3& translation) override { return false; }
virtual glm::vec3 getSpine2SplineOffset() const { return _spine2SplineOffset; }
virtual float getSpine2SplineRatio() const { return _spine2SplineRatio; }
// world-space to avatar-space rigconversion functions
/**jsdoc
* @function MyAvatar.worldToJointPoint
* @param {Vec3} position
* @param {number} [jointIndex=-1]
* @returns {Vec3}
*/
* Transforms a position in world coordinates to a position in a joint's coordinates, or avatar coordinates if no joint is
* specified.
* @function MyAvatar.worldToJointPoint
* @param {Vec3} position - The position in world coordinates.
* @param {number} [jointIndex=-1] - The index of the joint.
* @returns {Vec3} The position in the joint's coordinate system, or avatar coordinate system if no joint is specified.
*/
Q_INVOKABLE glm::vec3 worldToJointPoint(const glm::vec3& position, const int jointIndex = -1) const;
/**jsdoc
* @function MyAvatar.worldToJointDirection
* @param {Vec3} direction
* @param {number} [jointIndex=-1]
* @returns {Vec3}
*/
* Transforms a direction in world coordinates to a direction in a joint's coordinates, or avatar coordinates if no joint
* is specified.
* @function MyAvatar.worldToJointDirection
* @param {Vec3} direction - The direction in world coordinates.
* @param {number} [jointIndex=-1] - The index of the joint.
* @returns {Vec3} The direction in the joint's coordinate system, or avatar coordinate system if no joint is specified.
*/
Q_INVOKABLE glm::vec3 worldToJointDirection(const glm::vec3& direction, const int jointIndex = -1) const;
/**jsdoc
* @function MyAvatar.worldToJointRotation
* @param {Quat} rotation
* @param {number} [jointIndex=-1]
* @returns {Quat}
* Transforms a rotation in world coordinates to a rotation in a joint's coordinates, or avatar coordinates if no joint is
* specified.
* @function MyAvatar.worldToJointRotation
* @param {Quat} rotation - The rotation in world coordinates.
* @param {number} [jointIndex=-1] - The index of the joint.
* @returns {Quat} The rotation in the joint's coordinate system, or avatar coordinate system if no joint is specified.
*/
Q_INVOKABLE glm::quat worldToJointRotation(const glm::quat& rotation, const int jointIndex = -1) const;
/**jsdoc
* @function MyAvatar.jointToWorldPoint
* @param {vec3} position
* @param {number} [jointIndex=-1]
* @returns {Vec3}
*/
* Transforms a position in a joint's coordinates, or avatar coordinates if no joint is specified, to a position in world
* coordinates.
* @function MyAvatar.jointToWorldPoint
* @param {Vec3} position - The position in joint coordinates, or avatar coordinates if no joint is specified.
* @param {number} [jointIndex=-1] - The index of the joint.
* @returns {Vec3} The position in world coordinates.
*/
Q_INVOKABLE glm::vec3 jointToWorldPoint(const glm::vec3& position, const int jointIndex = -1) const;
/**jsdoc
* @function MyAvatar.jointToWorldDirection
* @param {Vec3} direction
* @param {number} [jointIndex=-1]
* @returns {Vec3}
*/
* Transforms a direction in a joint's coordinates, or avatar coordinates if no joint is specified, to a direction in world
* coordinates.
* @function MyAvatar.jointToWorldDirection
* @param {Vec3} direction - The direction in joint coordinates, or avatar coordinates if no joint is specified.
* @param {number} [jointIndex=-1] - The index of the joint.
* @returns {Vec3} The direction in world coordinates.
*/
Q_INVOKABLE glm::vec3 jointToWorldDirection(const glm::vec3& direction, const int jointIndex = -1) const;
/**jsdoc
* @function MyAvatar.jointToWorldRotation
* @param {Quat} rotation
* @param {number} [jointIndex=-1]
* @returns {Quat}
*/
* Transforms a rotation in a joint's coordinates, or avatar coordinates if no joint is specified, to a rotation in world
* coordinates.
* @function MyAvatar.jointToWorldRotation
* @param {Quat} rotation - The rotation in joint coordinates, or avatar coordinates if no joint is specified.
* @param {number} [jointIndex=-1] - The index of the joint.
* @returns {Quat} The rotation in world coordinates.
*/
Q_INVOKABLE glm::quat jointToWorldRotation(const glm::quat& rotation, const int jointIndex = -1) const;
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL) override;
@ -297,7 +346,7 @@ public:
virtual int parseDataFromBuffer(const QByteArray& buffer) override;
/**jsdoc
* Set the offset applied to the current avatar. The offset adjusts the position that the avatar is rendered. For example,
* Sets the offset applied to the current avatar. The offset adjusts the position that the avatar is rendered. For example,
* with an offset of <code>{ x: 0, y: 0.1, z: 0 }</code>, your avatar will appear to be raised off the ground slightly.
* @function MyAvatar.setSkeletonOffset
* @param {Vec3} offset - The skeleton offset to set.
@ -313,7 +362,7 @@ public:
Q_INVOKABLE void setSkeletonOffset(const glm::vec3& offset);
/**jsdoc
* Get the offset applied to the current avatar. The offset adjusts the position that the avatar is rendered. For example,
* Gets the offset applied to the current avatar. The offset adjusts the position that the avatar is rendered. For example,
* with an offset of <code>{ x: 0, y: 0.1, z: 0 }</code>, your avatar will appear to be raised off the ground slightly.
* @function MyAvatar.getSkeletonOffset
* @returns {Vec3} The current skeleton offset.
@ -325,7 +374,7 @@ public:
virtual glm::vec3 getSkeletonPosition() const;
/**jsdoc
* Get the position of a joint in the current avatar.
* Gets the position of a joint in the current avatar.
* @function MyAvatar.getJointPosition
* @param {number} index - The index of the joint.
* @returns {Vec3} The position of the joint in world coordinates.
@ -333,7 +382,7 @@ public:
Q_INVOKABLE glm::vec3 getJointPosition(int index) const;
/**jsdoc
* Get the position of a joint in the current avatar.
* Gets the position of a joint in the current avatar.
* @function MyAvatar.getJointPosition
* @param {string} name - The name of the joint.
* @returns {Vec3} The position of the joint in world coordinates.
@ -343,7 +392,7 @@ public:
Q_INVOKABLE glm::vec3 getJointPosition(const QString& name) const;
/**jsdoc
* Get the position of the current avatar's neck in world coordinates.
* Gets the position of the current avatar's neck in world coordinates.
* @function MyAvatar.getNeckPosition
* @returns {Vec3} The position of the neck in world coordinates.
* @example <caption>Report the position of your avatar's neck.</caption>
@ -352,8 +401,9 @@ public:
Q_INVOKABLE glm::vec3 getNeckPosition() const;
/**jsdoc
* Gets the current acceleration of the avatar.
* @function MyAvatar.getAcceleration
* @returns {Vec3}
* @returns {Vec3} The current acceleration of the avatar.
*/
Q_INVOKABLE glm::vec3 getAcceleration() const { return _acceleration; }
@ -377,47 +427,55 @@ public:
void getCapsule(glm::vec3& start, glm::vec3& end, float& radius);
float computeMass();
/**jsdoc
* Get the position of the current avatar's feet (or rather, bottom of its collision capsule) in world coordinates.
* Gets the position of the current avatar's feet (or rather, bottom of its collision capsule) in world coordinates.
* @function MyAvatar.getWorldFeetPosition
* @returns {Vec3} The position of the avatar's feet in world coordinates.
*/
*/
Q_INVOKABLE glm::vec3 getWorldFeetPosition();
void setPositionViaScript(const glm::vec3& position) override;
void setOrientationViaScript(const glm::quat& orientation) override;
/**jsdoc
* Gets the ID of the entity of avatar that the avatar is parented to.
* @function MyAvatar.getParentID
* @returns {Uuid}
* @returns {Uuid} The ID of the entity or avatar that the avatar is parented to. {@link Uuid|Uuid.NULL} if not parented.
*/
// This calls through to the SpatiallyNestable versions, but is here to expose these to JavaScript.
Q_INVOKABLE virtual const QUuid getParentID() const override { return SpatiallyNestable::getParentID(); }
/**jsdoc
* Sets the ID of the entity of avatar that the avatar is parented to.
* @function MyAvatar.setParentID
* @param {Uuid} parentID
* @param {Uuid} parentID - The ID of the entity or avatar that the avatar should be parented to. Set to
* {@link Uuid|Uuid.NULL} to unparent.
*/
// This calls through to the SpatiallyNestable versions, but is here to expose these to JavaScript.
Q_INVOKABLE virtual void setParentID(const QUuid& parentID) override;
/**jsdoc
* Gets the joint of the entity or avatar that the avatar is parented to.
* @function MyAvatar.getParentJointIndex
* @returns {number}
* @returns {number} The joint of the entity or avatar that the avatar is parented to. <code>65535</code> or
* <code>-1</code> if parented to the entity or avatar's position and orientation rather than a joint.
*/
// This calls through to the SpatiallyNestable versions, but is here to expose these to JavaScript.
Q_INVOKABLE virtual quint16 getParentJointIndex() const override { return SpatiallyNestable::getParentJointIndex(); }
/**jsdoc
* Sets the joint of the entity or avatar that the avatar is parented to.
* @function MyAvatar.setParentJointIndex
* @param {number} parentJointIndex
* @param {number} parentJointIndex - he joint of the entity or avatar that the avatar should be parented to. Use
* <code>65535</code> or <code>-1</code> to parent to the entity or avatar's position and orientation rather than a
* joint.
*/
// This calls through to the SpatiallyNestable versions, but is here to expose these to JavaScript.
Q_INVOKABLE virtual void setParentJointIndex(quint16 parentJointIndex) override;
/**jsdoc
* Returns an array of joints, where each joint is an object containing name, index, and parentIndex fields.
* Gets information on all the joints in the avatar's skeleton.
* @function MyAvatar.getSkeleton
* @returns {MyAvatar.SkeletonJoint[]} A list of information about each joint in this avatar's skeleton.
* @returns {MyAvatar.SkeletonJoint[]} Information about each joint in the avatar's skeleton.
*/
/**jsdoc
* Information about a single joint in an Avatar's skeleton hierarchy.
@ -443,8 +501,9 @@ public:
/**jsdoc
* @function MyAvatar.getSimulationRate
* @param {string} [rateName=""]
* @returns {number}
* @param {string} [rateName=""] - Rate name.
* @returns {number} Simulation rate.
* @deprecated This function is deprecated and will be removed.
*/
Q_INVOKABLE float getSimulationRate(const QString& rateName = QString("")) const;
@ -500,6 +559,13 @@ public:
uint32_t appendSubMetaItems(render::ItemIDs& subItems);
signals:
/**jsdoc
* Triggered when the avatar's target scale is changed. The target scale is the desired scale of the avatar without any
* restrictions on permissible scale values imposed by the domain.
* @function MyAvatar.targetScaleChanged
* @param {number} targetScale - The avatar's target scale.
* @returns Signal
*/
void targetScaleChanged(float targetScale);
public slots:
@ -508,7 +574,7 @@ public slots:
// thread safe, will return last valid palm from cache
/**jsdoc
* Get the position of the left palm in world coordinates.
* Gets the position of the left palm in world coordinates.
* @function MyAvatar.getLeftPalmPosition
* @returns {Vec3} The position of the left palm in world coordinates.
* @example <caption>Report the position of your avatar's left palm.</caption>
@ -517,15 +583,16 @@ public slots:
glm::vec3 getLeftPalmPosition() const;
/**jsdoc
* Get the rotation of the left palm in world coordinates.
* Gets the rotation of the left palm in world coordinates.
* @function MyAvatar.getLeftPalmRotation
* @returns {Quat} The rotation of the left palm in world coordinates.
* @example <caption>Report the rotation of your avatar's left palm.</caption>
* print(JSON.stringify(MyAvatar.getLeftPalmRotation()));
*/
glm::quat getLeftPalmRotation() const;
/**jsdoc
* Get the position of the right palm in world coordinates.
* Gets the position of the right palm in world coordinates.
* @function MyAvatar.getRightPalmPosition
* @returns {Vec3} The position of the right palm in world coordinates.
* @example <caption>Report the position of your avatar's right palm.</caption>
@ -542,21 +609,26 @@ public slots:
*/
glm::quat getRightPalmRotation() const;
/**jsdoc
* @function MyAvatar.setModelURLFinished
* @param {boolean} success
* @deprecated This function is deprecated and will be removed.
*/
// hooked up to Model::setURLFinished signal
void setModelURLFinished(bool success);
/**jsdoc
* @function MyAvatar.rigReady
* @returns {Signal}
* @deprecated This function is deprecated and will be removed.
*/
// Hooked up to Model::rigReady signal
void rigReady();
/**jsdoc
* @function MyAvatar.rigReset
* @returns {Signal}
* @deprecated This function is deprecated and will be removed.
*/
// Jooked up to Model::rigReset signal
// Hooked up to Model::rigReset signal
void rigReset();
protected:

View file

@ -28,9 +28,10 @@
namespace AvatarTraits {
template<typename T, T defaultValue>
class AssociatedTraitValues {
using SimpleTypesArray = std::array<T, NUM_SIMPLE_TRAITS>;
public:
// constructor that pre-fills _simpleTypes with the default value specified by the template
AssociatedTraitValues() : _simpleTypes(FirstInstancedTrait, defaultValue) {}
AssociatedTraitValues() { std::fill(_simpleTypes.begin(), _simpleTypes.end(), defaultValue); }
/// inserts the given value for the given simple trait type
void insert(TraitType type, T value) { _simpleTypes[type] = value; }
@ -71,12 +72,12 @@ namespace AvatarTraits {
}
/// const iterators for the vector of simple type values
typename std::vector<T>::const_iterator simpleCBegin() const { return _simpleTypes.cbegin(); }
typename std::vector<T>::const_iterator simpleCEnd() const { return _simpleTypes.cend(); }
typename SimpleTypesArray::const_iterator simpleCBegin() const { return _simpleTypes.cbegin(); }
typename SimpleTypesArray::const_iterator simpleCEnd() const { return _simpleTypes.cend(); }
/// non-const iterators for the vector of simple type values
typename std::vector<T>::iterator simpleBegin() { return _simpleTypes.begin(); }
typename std::vector<T>::iterator simpleEnd() { return _simpleTypes.end(); }
typename SimpleTypesArray::iterator simpleBegin() { return _simpleTypes.begin(); }
typename SimpleTypesArray::iterator simpleEnd() { return _simpleTypes.end(); }
struct TraitWithInstances {
TraitType traitType;
@ -96,7 +97,7 @@ namespace AvatarTraits {
typename std::vector<TraitWithInstances>::iterator instancedEnd() { return _instancedTypes.end(); }
private:
std::vector<T> _simpleTypes;
SimpleTypesArray _simpleTypes;
/// return the iterator to the matching TraitWithInstances object for a given instanced trait type
typename std::vector<TraitWithInstances>::iterator instancesForTrait(TraitType traitType) {

View file

@ -1143,10 +1143,11 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
// we store the hand state as well as other items in a shared bitset. The hand state is an octal, but is split
// into two sections to maintain backward compatibility. The bits are ordered as such (0-7 left to right).
// AA 6/1/18 added three more flags bits 8,9, and 10 for procedural audio, blink, and eye saccade enabled
// +---+-----+-----+--+--+--+--+-----+
// |x,x|H0,H1|x,x,x|H2|Au|Bl|Ey|xxxxx|
// +---+-----+-----+--+--+--+--+-----+
// +---+-----+-----+--+--+--+--+--+----+
// |x,x|H0,H1|x,x,x|H2|Au|Bl|Ey|He|xxxx|
// +---+-----+-----+--+--+--+--+--+----+
// Hand state - H0,H1,H2 is found in the 3rd, 4th, and 8th bits
// Hero-avatar status (He) - 12th bit
auto newHandState = getSemiNibbleAt(bitItems, HAND_STATE_START_BIT)
+ (oneAtBit16(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
@ -1434,6 +1435,47 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
return numBytesRead;
}
/**jsdoc
* The avatar mixer data comprises different types of data, with the data rates of each being tracked in kbps.
*
* <table>
* <thead>
* <tr><th>Rate Name</th><th>Description</th></tr>
* </thead>
* <tbody>
* <tr><td><code>"globalPosition"</code></td><td>Incoming global position.</td></tr>
* <tr><td><code>"localPosition"</code></td><td>Incoming local position.</td></tr>
* <tr><td><code>"avatarBoundingBox"</code></td><td>Incoming avatar bounding box.</td></tr>
* <tr><td><code>"avatarOrientation"</code></td><td>Incoming avatar orientation.</td></tr>
* <tr><td><code>"avatarScale"</code></td><td>Incoming avatar scale.</td></tr>
* <tr><td><code>"lookAtPosition"</code></td><td>Incoming look-at position.</td></tr>
* <tr><td><code>"audioLoudness"</code></td><td>Incoming audio loudness.</td></tr>
* <tr><td><code>"sensorToWorkMatrix"</code></td><td>Incoming sensor-to-world matrix.</td></tr>
* <tr><td><code>"additionalFlags"</code></td><td>Incoming additional avatar flags.</td></tr>
* <tr><td><code>"parentInfo"</code></td><td>Incoming parent information.</td></tr>
* <tr><td><code>"faceTracker"</code></td><td>Incoming face tracker data.</td></tr>
* <tr><td><code>"jointData"</code></td><td>Incoming joint data.</td></tr>
* <tr><td><code>"jointDefaultPoseFlagsRate"</code></td><td>Incoming joint default pose flags.</td></tr>
* <tr><td><code>"farGrabJointRate"</code></td><td>Incoming far grab joint.</td></tr>
* <tr><td><code>"globalPositionOutbound"</code></td><td>Outgoing global position.</td></tr>
* <tr><td><code>"localPositionOutbound"</code></td><td>Outgoing local position.</td></tr>
* <tr><td><code>"avatarBoundingBoxOutbound"</code></td><td>Outgoing avatar bounding box.</td></tr>
* <tr><td><code>"avatarOrientationOutbound"</code></td><td>Outgoing avatar orientation.</td></tr>
* <tr><td><code>"avatarScaleOutbound"</code></td><td>Outgoing avatar scale.</td></tr>
* <tr><td><code>"lookAtPositionOutbound"</code></td><td>Outgoing look-at position.</td></tr>
* <tr><td><code>"audioLoudnessOutbound"</code></td><td>Outgoing audio loudness.</td></tr>
* <tr><td><code>"sensorToWorkMatrixOutbound"</code></td><td>Outgoing sensor-to-world matrix.</td></tr>
* <tr><td><code>"additionalFlagsOutbound"</code></td><td>Outgoing additional avatar flags.</td></tr>
* <tr><td><code>"parentInfoOutbound"</code></td><td>Outgoing parent information.</td></tr>
* <tr><td><code>"faceTrackerOutbound"</code></td><td>Outgoing face tracker data.</td></tr>
* <tr><td><code>"jointDataOutbound"</code></td><td>Outgoing joint data.</td></tr>
* <tr><td><code>"jointDefaultPoseFlagsOutbound"</code></td><td>Outgoing joint default pose flags.</td></tr>
* <tr><td><code>""</code></td><td>When no rate name is specified, the total incoming data rate is provided.</td></tr>
* </tbody>
* </table>
*
* @typedef {string} AvatarDataRate
*/
float AvatarData::getDataRate(const QString& rateName) const {
if (rateName == "") {
return _parseBufferRate.rate() / BYTES_PER_KILOBIT;
@ -1495,6 +1537,35 @@ float AvatarData::getDataRate(const QString& rateName) const {
return 0.0f;
}
/**jsdoc
* The avatar mixer data comprises different types of data updated at different rates, in Hz.
*
* <table>
* <thead>
* <tr><th>Rate Name</th><th>Description</th></tr>
* </thead>
* <tbody>
* <tr><td><code>"globalPosition"</code></td><td>Global position.</td></tr>
* <tr><td><code>"localPosition"</code></td><td>Local position.</td></tr>
* <tr><td><code>"avatarBoundingBox"</code></td><td>Avatar bounding box.</td></tr>
* <tr><td><code>"avatarOrientation"</code></td><td>Avatar orientation.</td></tr>
* <tr><td><code>"avatarScale"</code></td><td>Avatar scale.</td></tr>
* <tr><td><code>"lookAtPosition"</code></td><td>Look-at position.</td></tr>
* <tr><td><code>"audioLoudness"</code></td><td>Audio loudness.</td></tr>
* <tr><td><code>"sensorToWorkMatrix"</code></td><td>Sensor-to-world matrix.</td></tr>
* <tr><td><code>"additionalFlags"</code></td><td>Additional avatar flags.</td></tr>
* <tr><td><code>"parentInfo"</code></td><td>Parent information.</td></tr>
* <tr><td><code>"faceTracker"</code></td><td>Face tracker data.</td></tr>
* <tr><td><code>"jointData"</code></td><td>Joint data.</td></tr>
* <tr><td><code>"farGrabJointData"</code></td><td>Far grab joint data.</td></tr>
* <tr><td><code>""</code></td><td>When no rate name is specified, the overall update rate is provided.</td></tr>
* </tbody>
* </table>
*
* @typedef {string} AvatarUpdateRate
*/
float AvatarData::getUpdateRate(const QString& rateName) const {
if (rateName == "") {
return _parseBufferUpdateRate.rate();
@ -1920,42 +1991,16 @@ QUrl AvatarData::getWireSafeSkeletonModelURL() const {
}
}
qint64 AvatarData::packTrait(AvatarTraits::TraitType traitType, ExtendedIODevice& destination,
AvatarTraits::TraitVersion traitVersion) {
qint64 bytesWritten = 0;
if (traitType == AvatarTraits::SkeletonModelURL) {
QByteArray encodedSkeletonURL = getWireSafeSkeletonModelURL().toEncoded();
if (encodedSkeletonURL.size() > AvatarTraits::MAXIMUM_TRAIT_SIZE) {
qWarning() << "Refusing to pack simple trait" << traitType << "of size" << encodedSkeletonURL.size()
<< "bytes since it exceeds the maximum size" << AvatarTraits::MAXIMUM_TRAIT_SIZE << "bytes";
return 0;
}
bytesWritten += destination.writePrimitive(traitType);
if (traitVersion > AvatarTraits::DEFAULT_TRAIT_VERSION) {
bytesWritten += destination.writePrimitive(traitVersion);
}
AvatarTraits::TraitWireSize encodedURLSize = encodedSkeletonURL.size();
bytesWritten += destination.writePrimitive(encodedURLSize);
bytesWritten += destination.write(encodedSkeletonURL);
}
return bytesWritten;
QByteArray AvatarData::packSkeletonModelURL() const {
return getWireSafeSkeletonModelURL().toEncoded();
}
void AvatarData::unpackSkeletonModelURL(const QByteArray& data) {
auto skeletonModelURL = QUrl::fromEncoded(data);
setSkeletonModelURL(skeletonModelURL);
}
qint64 AvatarData::packAvatarEntityTraitInstance(AvatarTraits::TraitType traitType,
AvatarTraits::TraitInstanceID traitInstanceID,
ExtendedIODevice& destination, AvatarTraits::TraitVersion traitVersion) {
qint64 bytesWritten = 0;
QByteArray AvatarData::packAvatarEntityTraitInstance(AvatarTraits::TraitInstanceID traitInstanceID) {
// grab a read lock on the avatar entities and check for entity data for the given ID
QByteArray entityBinaryData;
_avatarEntitiesLock.withReadLock([this, &entityBinaryData, &traitInstanceID] {
@ -1964,104 +2009,48 @@ qint64 AvatarData::packAvatarEntityTraitInstance(AvatarTraits::TraitType traitTy
}
});
if (entityBinaryData.size() > AvatarTraits::MAXIMUM_TRAIT_SIZE) {
qWarning() << "Refusing to pack instanced trait" << traitType << "of size" << entityBinaryData.size()
<< "bytes since it exceeds the maximum size " << AvatarTraits::MAXIMUM_TRAIT_SIZE << "bytes";
return 0;
}
bytesWritten += destination.writePrimitive(traitType);
if (traitVersion > AvatarTraits::DEFAULT_TRAIT_VERSION) {
bytesWritten += destination.writePrimitive(traitVersion);
}
bytesWritten += destination.write(traitInstanceID.toRfc4122());
if (!entityBinaryData.isNull()) {
AvatarTraits::TraitWireSize entityBinarySize = entityBinaryData.size();
bytesWritten += destination.writePrimitive(entityBinarySize);
bytesWritten += destination.write(entityBinaryData);
} else {
bytesWritten += destination.writePrimitive(AvatarTraits::DELETED_TRAIT_SIZE);
}
return bytesWritten;
return entityBinaryData;
}
qint64 AvatarData::packGrabTraitInstance(AvatarTraits::TraitType traitType,
AvatarTraits::TraitInstanceID traitInstanceID,
ExtendedIODevice& destination, AvatarTraits::TraitVersion traitVersion) {
qint64 bytesWritten = 0;
QByteArray AvatarData::packGrabTraitInstance(AvatarTraits::TraitInstanceID traitInstanceID) {
// grab a read lock on the avatar grabs and check for grab data for the given ID
QByteArray grabBinaryData;
_avatarGrabsLock.withReadLock([this, &grabBinaryData, &traitInstanceID] {
if (_avatarGrabData.contains(traitInstanceID)) {
grabBinaryData = _avatarGrabData[traitInstanceID];
}
});
if (grabBinaryData.size() > AvatarTraits::MAXIMUM_TRAIT_SIZE) {
qWarning() << "Refusing to pack instanced trait" << traitType << "of size" << grabBinaryData.size()
<< "bytes since it exceeds the maximum size " << AvatarTraits::MAXIMUM_TRAIT_SIZE << "bytes";
return 0;
}
bytesWritten += destination.writePrimitive(traitType);
if (traitVersion > AvatarTraits::DEFAULT_TRAIT_VERSION) {
bytesWritten += destination.writePrimitive(traitVersion);
}
bytesWritten += destination.write(traitInstanceID.toRfc4122());
if (!grabBinaryData.isNull()) {
AvatarTraits::TraitWireSize grabBinarySize = grabBinaryData.size();
bytesWritten += destination.writePrimitive(grabBinarySize);
bytesWritten += destination.write(grabBinaryData);
} else {
bytesWritten += destination.writePrimitive(AvatarTraits::DELETED_TRAIT_SIZE);
}
return bytesWritten;
return grabBinaryData;
}
qint64 AvatarData::packTraitInstance(AvatarTraits::TraitType traitType, AvatarTraits::TraitInstanceID traitInstanceID,
ExtendedIODevice& destination, AvatarTraits::TraitVersion traitVersion) {
qint64 bytesWritten = 0;
QByteArray AvatarData::packTrait(AvatarTraits::TraitType traitType) const {
QByteArray traitBinaryData;
// Call packer function
if (traitType == AvatarTraits::SkeletonModelURL) {
traitBinaryData = packSkeletonModelURL();
}
return traitBinaryData;
}
QByteArray AvatarData::packTraitInstance(AvatarTraits::TraitType traitType, AvatarTraits::TraitInstanceID traitInstanceID) {
QByteArray traitBinaryData;
// Call packer function
if (traitType == AvatarTraits::AvatarEntity) {
bytesWritten += packAvatarEntityTraitInstance(traitType, traitInstanceID, destination, traitVersion);
traitBinaryData = packAvatarEntityTraitInstance(traitInstanceID);
} else if (traitType == AvatarTraits::Grab) {
bytesWritten += packGrabTraitInstance(traitType, traitInstanceID, destination, traitVersion);
traitBinaryData = packGrabTraitInstance(traitInstanceID);
}
return bytesWritten;
}
void AvatarData::prepareResetTraitInstances() {
if (_clientTraitsHandler) {
_avatarEntitiesLock.withReadLock([this]{
foreach (auto entityID, _packedAvatarEntityData.keys()) {
_clientTraitsHandler->markInstancedTraitUpdated(AvatarTraits::AvatarEntity, entityID);
}
foreach (auto grabID, _avatarGrabData.keys()) {
_clientTraitsHandler->markInstancedTraitUpdated(AvatarTraits::Grab, grabID);
}
});
}
return traitBinaryData;
}
void AvatarData::processTrait(AvatarTraits::TraitType traitType, QByteArray traitBinaryData) {
if (traitType == AvatarTraits::SkeletonModelURL) {
// get the URL from the binary data
auto skeletonModelURL = QUrl::fromEncoded(traitBinaryData);
setSkeletonModelURL(skeletonModelURL);
unpackSkeletonModelURL(traitBinaryData);
}
}
@ -2082,6 +2071,19 @@ void AvatarData::processDeletedTraitInstance(AvatarTraits::TraitType traitType,
}
}
void AvatarData::prepareResetTraitInstances() {
if (_clientTraitsHandler) {
_avatarEntitiesLock.withReadLock([this]{
foreach (auto entityID, _packedAvatarEntityData.keys()) {
_clientTraitsHandler->markInstancedTraitUpdated(AvatarTraits::AvatarEntity, entityID);
}
foreach (auto grabID, _avatarGrabData.keys()) {
_clientTraitsHandler->markInstancedTraitUpdated(AvatarTraits::Grab, grabID);
}
});
}
}
QByteArray AvatarData::identityByteArray(bool setIsReplicated) const {
QByteArray identityData;
QDataStream identityStream(&identityData, QIODevice::Append);
@ -2730,13 +2732,16 @@ glm::vec3 AvatarData::getAbsoluteJointTranslationInObjectFrame(int index) const
}
/**jsdoc
* Information on an attachment worn by the avatar.
* @typedef {object} AttachmentData
* @property {string} modelUrl
* @property {string} jointName
* @property {Vec3} translation
* @property {Vec3} rotation
* @property {number} scale
* @property {boolean} soft
* @property {string} modelUrl - The URL of the model file. Models can be FBX or OBJ format.
* @property {string} jointName - The offset to apply to the model relative to the joint position.
* @property {Vec3} translation - The offset from the joint that the attachment is positioned at.
* @property {Vec3} rotation - The rotation applied to the model relative to the joint orientation.
* @property {number} scale - The scale applied to the attachment model.
* @property {boolean} soft - If <code>true</code> and the model has a skeleton, the bones of the attached model's skeleton are
* rotated to fit the avatar's current pose. If <code>true</code>, the <code>translation</code>, <code>rotation</code>, and
* <code>scale</code> parameters are ignored.
*/
QVariant AttachmentData::toVariant() const {
QVariantMap result;
@ -2942,6 +2947,10 @@ float AvatarData::_avatarSortCoefficientSize { 8.0f };
float AvatarData::_avatarSortCoefficientCenter { 0.25f };
float AvatarData::_avatarSortCoefficientAge { 1.0f };
/**jsdoc
* An object with the UUIDs of avatar entities as keys and avatar entity properties objects as values.
* @typedef {Object.<Uuid, Entities.EntityProperties>} AvatarEntityMap
*/
QScriptValue AvatarEntityMapToScriptValue(QScriptEngine* engine, const AvatarEntityMap& value) {
QScriptValue obj = engine->newObject();
for (auto entityID : value.keys()) {

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,135 @@
//
// AvatarTraits.cpp
// libraries/avatars/src
//
// Created by Clement Brisset on 3/19/19.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AvatarTraits.h"
#include <ExtendedIODevice.h>
#include "AvatarData.h"
namespace AvatarTraits {
qint64 packTrait(TraitType traitType, ExtendedIODevice& destination, const AvatarData& avatar) {
// Call packer function
auto traitBinaryData = avatar.packTrait(traitType);
auto traitBinaryDataSize = traitBinaryData.size();
// Verify packed data
if (traitBinaryDataSize > MAXIMUM_TRAIT_SIZE) {
qWarning() << "Refusing to pack simple trait" << traitType << "of size" << traitBinaryDataSize
<< "bytes since it exceeds the maximum size" << MAXIMUM_TRAIT_SIZE << "bytes";
return 0;
}
// Write packed data to stream
qint64 bytesWritten = 0;
bytesWritten += destination.writePrimitive((TraitType)traitType);
bytesWritten += destination.writePrimitive((TraitWireSize)traitBinaryDataSize);
bytesWritten += destination.write(traitBinaryData);
return bytesWritten;
}
qint64 packVersionedTrait(TraitType traitType, ExtendedIODevice& destination,
TraitVersion traitVersion, const AvatarData& avatar) {
// Call packer function
auto traitBinaryData = avatar.packTrait(traitType);
auto traitBinaryDataSize = traitBinaryData.size();
// Verify packed data
if (traitBinaryDataSize > MAXIMUM_TRAIT_SIZE) {
qWarning() << "Refusing to pack simple trait" << traitType << "of size" << traitBinaryDataSize
<< "bytes since it exceeds the maximum size" << MAXIMUM_TRAIT_SIZE << "bytes";
return 0;
}
// Write packed data to stream
qint64 bytesWritten = 0;
bytesWritten += destination.writePrimitive((TraitType)traitType);
bytesWritten += destination.writePrimitive((TraitVersion)traitVersion);
bytesWritten += destination.writePrimitive((TraitWireSize)traitBinaryDataSize);
bytesWritten += destination.write(traitBinaryData);
return bytesWritten;
}
qint64 packTraitInstance(TraitType traitType, TraitInstanceID traitInstanceID,
ExtendedIODevice& destination, AvatarData& avatar) {
// Call packer function
auto traitBinaryData = avatar.packTraitInstance(traitType, traitInstanceID);
auto traitBinaryDataSize = traitBinaryData.size();
// Verify packed data
if (traitBinaryDataSize > AvatarTraits::MAXIMUM_TRAIT_SIZE) {
qWarning() << "Refusing to pack instanced trait" << traitType << "of size" << traitBinaryDataSize
<< "bytes since it exceeds the maximum size " << AvatarTraits::MAXIMUM_TRAIT_SIZE << "bytes";
return 0;
}
// Write packed data to stream
qint64 bytesWritten = 0;
bytesWritten += destination.writePrimitive((TraitType)traitType);
bytesWritten += destination.write(traitInstanceID.toRfc4122());
if (!traitBinaryData.isNull()) {
bytesWritten += destination.writePrimitive((TraitWireSize)traitBinaryDataSize);
bytesWritten += destination.write(traitBinaryData);
} else {
bytesWritten += destination.writePrimitive(AvatarTraits::DELETED_TRAIT_SIZE);
}
return bytesWritten;
}
qint64 packVersionedTraitInstance(TraitType traitType, TraitInstanceID traitInstanceID,
ExtendedIODevice& destination, TraitVersion traitVersion,
AvatarData& avatar) {
// Call packer function
auto traitBinaryData = avatar.packTraitInstance(traitType, traitInstanceID);
auto traitBinaryDataSize = traitBinaryData.size();
// Verify packed data
if (traitBinaryDataSize > AvatarTraits::MAXIMUM_TRAIT_SIZE) {
qWarning() << "Refusing to pack instanced trait" << traitType << "of size" << traitBinaryDataSize
<< "bytes since it exceeds the maximum size " << AvatarTraits::MAXIMUM_TRAIT_SIZE << "bytes";
return 0;
}
// Write packed data to stream
qint64 bytesWritten = 0;
bytesWritten += destination.writePrimitive((TraitType)traitType);
bytesWritten += destination.writePrimitive((TraitVersion)traitVersion);
bytesWritten += destination.write(traitInstanceID.toRfc4122());
if (!traitBinaryData.isNull()) {
bytesWritten += destination.writePrimitive((TraitWireSize)traitBinaryDataSize);
bytesWritten += destination.write(traitBinaryData);
} else {
bytesWritten += destination.writePrimitive(AvatarTraits::DELETED_TRAIT_SIZE);
}
return bytesWritten;
}
qint64 packInstancedTraitDelete(TraitType traitType, TraitInstanceID instanceID, ExtendedIODevice& destination,
TraitVersion traitVersion) {
qint64 bytesWritten = 0;
bytesWritten += destination.writePrimitive(traitType);
if (traitVersion > DEFAULT_TRAIT_VERSION) {
bytesWritten += destination.writePrimitive(traitVersion);
}
bytesWritten += destination.write(instanceID.toRfc4122());
bytesWritten += destination.writePrimitive(DELETED_TRAIT_SIZE);
return bytesWritten;
}
};

View file

@ -14,20 +14,35 @@
#include <algorithm>
#include <cstdint>
#include <array>
#include <vector>
#include <QtCore/QUuid>
class ExtendedIODevice;
class AvatarData;
namespace AvatarTraits {
enum TraitType : int8_t {
// Null trait
NullTrait = -1,
SkeletonModelURL,
// Simple traits
SkeletonModelURL = 0,
// Instanced traits
FirstInstancedTrait,
AvatarEntity = FirstInstancedTrait,
Grab,
// Traits count
TotalTraitTypes
};
const int NUM_SIMPLE_TRAITS = (int)FirstInstancedTrait;
const int NUM_INSTANCED_TRAITS = (int)TotalTraitTypes - (int)FirstInstancedTrait;
const int NUM_TRAITS = (int)TotalTraitTypes;
using TraitInstanceID = QUuid;
inline bool isSimpleTrait(TraitType traitType) {
@ -46,22 +61,19 @@ namespace AvatarTraits {
const TraitMessageSequence FIRST_TRAIT_SEQUENCE = 0;
const TraitMessageSequence MAX_TRAIT_SEQUENCE = INT64_MAX;
inline qint64 packInstancedTraitDelete(TraitType traitType, TraitInstanceID instanceID, ExtendedIODevice& destination,
TraitVersion traitVersion = NULL_TRAIT_VERSION) {
qint64 bytesWritten = 0;
qint64 packTrait(TraitType traitType, ExtendedIODevice& destination, const AvatarData& avatar);
qint64 packVersionedTrait(TraitType traitType, ExtendedIODevice& destination,
TraitVersion traitVersion, const AvatarData& avatar);
bytesWritten += destination.writePrimitive(traitType);
qint64 packTraitInstance(TraitType traitType, TraitInstanceID traitInstanceID,
ExtendedIODevice& destination, AvatarData& avatar);
qint64 packVersionedTraitInstance(TraitType traitType, TraitInstanceID traitInstanceID,
ExtendedIODevice& destination, TraitVersion traitVersion,
AvatarData& avatar);
if (traitVersion > DEFAULT_TRAIT_VERSION) {
bytesWritten += destination.writePrimitive(traitVersion);
}
qint64 packInstancedTraitDelete(TraitType traitType, TraitInstanceID instanceID, ExtendedIODevice& destination,
TraitVersion traitVersion = NULL_TRAIT_VERSION);
bytesWritten += destination.write(instanceID.toRfc4122());
bytesWritten += destination.writePrimitive(DELETED_TRAIT_SIZE);
return bytesWritten;
}
};
#endif // hifi_AvatarTraits_h

View file

@ -106,9 +106,10 @@ int ClientTraitsHandler::sendChangedTraitsToMixer() {
auto traitType = static_cast<AvatarTraits::TraitType>(std::distance(traitStatusesCopy.simpleCBegin(), simpleIt));
if (initialSend || *simpleIt == Updated) {
if (traitType == AvatarTraits::SkeletonModelURL) {
bytesWritten += _owningAvatar->packTrait(traitType, *traitsPacketList);
bytesWritten += AvatarTraits::packTrait(traitType, *traitsPacketList, *_owningAvatar);
if (traitType == AvatarTraits::SkeletonModelURL) {
// keep track of our skeleton version in case we get an override back
_currentSkeletonVersion = _currentTraitVersion;
}
@ -124,7 +125,9 @@ int ClientTraitsHandler::sendChangedTraitsToMixer() {
|| instanceIDValuePair.value == Updated) {
// this is a changed trait we need to send or we haven't send out trait information yet
// ask the owning avatar to pack it
bytesWritten += _owningAvatar->packTraitInstance(instancedIt->traitType, instanceIDValuePair.id, *traitsPacketList);
bytesWritten += AvatarTraits::packTraitInstance(instancedIt->traitType, instanceIDValuePair.id,
*traitsPacketList, *_owningAvatar);
} else if (!initialSend && instanceIDValuePair.value == Deleted) {
// pack delete for this trait instance
bytesWritten += AvatarTraits::packInstancedTraitDelete(instancedIt->traitType, instanceIDValuePair.id,
@ -162,11 +165,11 @@ void ClientTraitsHandler::processTraitOverride(QSharedPointer<ReceivedMessage> m
// override the skeleton URL but do not mark the trait as having changed
// so that we don't unecessarily send a new trait packet to the mixer with the overriden URL
auto encodedSkeletonURL = QUrl::fromEncoded(message->readWithoutCopy(traitBinarySize));
auto hasChangesBefore = _hasChangedTraits;
_owningAvatar->setSkeletonModelURL(encodedSkeletonURL);
auto traitBinaryData = message->readWithoutCopy(traitBinarySize);
_owningAvatar->processTrait(traitType, traitBinaryData);
// setSkeletonModelURL will flag us for changes to the SkeletonModelURL so we reset some state here to
// avoid unnecessarily sending the overriden skeleton model URL back to the mixer

View file

@ -343,6 +343,14 @@ glm::mat4 ScriptAvatarData::getControllerRightHandMatrix() const {
// END
//
bool ScriptAvatarData::getHasPriority() const {
if (AvatarSharedPointer sharedAvatarData = _avatarData.lock()) {
return sharedAvatarData->getHasPriority();
} else {
return false;
}
}
glm::quat ScriptAvatarData::getAbsoluteJointRotationInObjectFrame(int index) const {
if (AvatarSharedPointer sharedAvatarData = _avatarData.lock()) {
return sharedAvatarData->getAbsoluteJointRotationInObjectFrame(index);

View file

@ -68,6 +68,8 @@ class ScriptAvatarData : public QObject {
Q_PROPERTY(glm::mat4 controllerLeftHandMatrix READ getControllerLeftHandMatrix)
Q_PROPERTY(glm::mat4 controllerRightHandMatrix READ getControllerRightHandMatrix)
Q_PROPERTY(bool hasPriority READ getHasPriority)
public:
ScriptAvatarData(AvatarSharedPointer avatarData);
@ -133,6 +135,8 @@ public:
glm::mat4 getControllerLeftHandMatrix() const;
glm::mat4 getControllerRightHandMatrix() const;
bool getHasPriority() const;
signals:
void displayNameChanged();
void sessionDisplayNameChanged();

View file

@ -169,7 +169,7 @@ void MaterialEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
if (urlChanged && !usingMaterialData) {
_networkMaterial = MaterialCache::instance().getMaterial(_materialURL);
auto onMaterialRequestFinished = [&, oldParentID, oldParentMaterialName, newCurrentMaterialName](bool success) {
auto onMaterialRequestFinished = [this, oldParentID, oldParentMaterialName, newCurrentMaterialName](bool success) {
if (success) {
deleteMaterial(oldParentID, oldParentMaterialName);
_texturesLoaded = false;
@ -186,7 +186,11 @@ void MaterialEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
if (_networkMaterial->isLoaded()) {
onMaterialRequestFinished(!_networkMaterial->isFailed());
} else {
connect(_networkMaterial.data(), &Resource::finished, this, onMaterialRequestFinished);
connect(_networkMaterial.data(), &Resource::finished, this, [this, onMaterialRequestFinished](bool success) {
withWriteLock([&] {
onMaterialRequestFinished(success);
});
});
}
}
} else if (materialDataChanged && usingMaterialData) {

View file

@ -95,19 +95,18 @@ bool PolyLineEntityRenderer::needsRenderUpdate() const {
}
bool PolyLineEntityRenderer::needsRenderUpdateFromTypedEntity(const TypedEntityPointer& entity) const {
return (
entity->pointsChanged() ||
entity->widthsChanged() ||
entity->normalsChanged() ||
entity->texturesChanged() ||
entity->colorsChanged() ||
_isUVModeStretch != entity->getIsUVModeStretch() ||
_glow != entity->getGlow() ||
_faceCamera != entity->getFaceCamera()
);
if (entity->pointsChanged() || entity->widthsChanged() || entity->normalsChanged() || entity->texturesChanged() || entity->colorsChanged()) {
return true;
}
if (_isUVModeStretch != entity->getIsUVModeStretch() || _glow != entity->getGlow() || _faceCamera != entity->getFaceCamera()) {
return true;
}
return Parent::needsRenderUpdateFromTypedEntity(entity);
}
void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPointer& entity) {
void PolyLineEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& scene, Transaction& transaction, const TypedEntityPointer& entity) {
auto pointsChanged = entity->pointsChanged();
auto widthsChanged = entity->widthsChanged();
auto normalsChanged = entity->normalsChanged();
@ -119,10 +118,6 @@ void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
entity->resetPolyLineChanged();
// Transform
updateModelTransformAndBound();
_renderTransform = getModelTransform();
// Textures
if (entity->texturesChanged()) {
entity->resetTexturesChanged();
@ -131,7 +126,9 @@ void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
if (!textures.isEmpty()) {
entityTextures = QUrl(textures);
}
_texture = DependencyManager::get<TextureCache>()->getTexture(entityTextures);
withWriteLock([&] {
_texture = DependencyManager::get<TextureCache>()->getTexture(entityTextures);
});
_textureAspectRatio = 1.0f;
_textureLoaded = false;
}
@ -145,11 +142,13 @@ void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
// Data
bool faceCameraChanged = faceCamera != _faceCamera;
if (faceCameraChanged || glow != _glow) {
_faceCamera = faceCamera;
_glow = glow;
updateData();
}
withWriteLock([&] {
if (faceCameraChanged || glow != _glow) {
_faceCamera = faceCamera;
_glow = glow;
updateData();
}
});
// Geometry
if (pointsChanged) {
@ -165,10 +164,23 @@ void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
_colors = entity->getStrokeColors();
_color = toGlm(entity->getColor());
}
if (_isUVModeStretch != isUVModeStretch || pointsChanged || widthsChanged || normalsChanged || colorsChanged || textureChanged || faceCameraChanged) {
_isUVModeStretch = isUVModeStretch;
updateGeometry();
}
bool uvModeStretchChanged = _isUVModeStretch != isUVModeStretch;
_isUVModeStretch = isUVModeStretch;
bool geometryChanged = uvModeStretchChanged || pointsChanged || widthsChanged || normalsChanged || colorsChanged || textureChanged || faceCameraChanged;
void* key = (void*)this;
AbstractViewStateInterface::instance()->pushPostUpdateLambda(key, [this, geometryChanged] () {
withWriteLock([&] {
updateModelTransformAndBound();
_renderTransform = getModelTransform();
if (geometryChanged) {
updateGeometry();
}
});
});
}
void PolyLineEntityRenderer::updateGeometry() {
@ -267,22 +279,32 @@ void PolyLineEntityRenderer::updateData() {
}
void PolyLineEntityRenderer::doRender(RenderArgs* args) {
if (_numVertices < 2) {
return;
}
PerformanceTimer perfTimer("RenderablePolyLineEntityItem::render");
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
if (!_pipeline || !_glowPipeline) {
size_t numVertices;
Transform transform;
gpu::TexturePointer texture;
withReadLock([&] {
numVertices = _numVertices;
transform = _renderTransform;
texture = _textureLoaded ? _texture->getGPUTexture() : DependencyManager::get<TextureCache>()->getWhiteTexture();
batch.setResourceBuffer(0, _polylineGeometryBuffer);
batch.setUniformBuffer(0, _polylineDataBuffer);
});
if (numVertices < 2) {
return;
}
if (!_pipeline) {
buildPipeline();
}
batch.setPipeline(_glow ? _glowPipeline : _pipeline);
batch.setModelTransform(_renderTransform);
batch.setResourceTexture(0, _textureLoaded ? _texture->getGPUTexture() : DependencyManager::get<TextureCache>()->getWhiteTexture());
batch.setResourceBuffer(0, _polylineGeometryBuffer);
batch.setUniformBuffer(0, _polylineDataBuffer);
batch.draw(gpu::TRIANGLE_STRIP, (gpu::uint32)(2 * _numVertices), 0);
batch.setModelTransform(transform);
batch.setResourceTexture(0, texture);
batch.draw(gpu::TRIANGLE_STRIP, (gpu::uint32)(2 * numVertices), 0);
}

View file

@ -31,7 +31,7 @@ public:
protected:
virtual bool needsRenderUpdate() const override;
virtual bool needsRenderUpdateFromTypedEntity(const TypedEntityPointer& entity) const override;
virtual void doRenderUpdateAsynchronousTyped(const TypedEntityPointer& entity) override;
virtual void doRenderUpdateSynchronousTyped(const ScenePointer& scene, Transaction& transaction, const TypedEntityPointer& entity) override;
virtual ItemKey getKey() override;
virtual ShapeKey getShapeKey() override;

View file

@ -249,10 +249,14 @@ void ShapeEntityRenderer::doRender(RenderArgs* args) {
graphics::MultiMaterial materials;
auto geometryCache = DependencyManager::get<GeometryCache>();
GeometryCache::Shape geometryShape;
PrimitiveMode primitiveMode;
RenderLayer renderLayer;
bool proceduralRender = false;
glm::vec4 outColor;
withReadLock([&] {
geometryShape = geometryCache->getShapeForEntityShape(_shape);
primitiveMode = _primitiveMode;
renderLayer = _renderLayer;
batch.setModelTransform(_renderTransform); // use a transform with scale, rotation, registration point and translation
materials = _materials["0"];
auto& schema = materials.getSchemaBuffer().get<graphics::MultiMaterial::Schema>();
@ -261,13 +265,13 @@ void ShapeEntityRenderer::doRender(RenderArgs* args) {
if (_procedural.isReady()) {
outColor = _procedural.getColor(outColor);
outColor.a *= _procedural.isFading() ? Interpolate::calculateFadeRatio(_procedural.getFadeStartTime()) : 1.0f;
_procedural.prepare(batch, _position, _dimensions, _orientation, ProceduralProgramKey(outColor.a < 1.0f));
_procedural.prepare(batch, _position, _dimensions, _orientation, _created, ProceduralProgramKey(outColor.a < 1.0f));
proceduralRender = true;
}
});
if (proceduralRender) {
if (render::ShapeKey(args->_globalShapeKey).isWireframe()) {
if (render::ShapeKey(args->_globalShapeKey).isWireframe() || primitiveMode == PrimitiveMode::LINES) {
geometryCache->renderWireShape(batch, geometryShape, outColor);
} else {
geometryCache->renderShape(batch, geometryShape, outColor);
@ -275,10 +279,16 @@ void ShapeEntityRenderer::doRender(RenderArgs* args) {
} else if (!useMaterialPipeline(materials)) {
// FIXME, support instanced multi-shape rendering using multidraw indirect
outColor.a *= _isFading ? Interpolate::calculateFadeRatio(_fadeStartTime) : 1.0f;
if (render::ShapeKey(args->_globalShapeKey).isWireframe() || _primitiveMode == PrimitiveMode::LINES) {
geometryCache->renderWireShapeInstance(args, batch, geometryShape, outColor, args->_shapePipeline);
render::ShapePipelinePointer pipeline;
if (renderLayer == RenderLayer::WORLD) {
pipeline = outColor.a < 1.0f ? geometryCache->getTransparentShapePipeline() : geometryCache->getOpaqueShapePipeline();
} else {
geometryCache->renderSolidShapeInstance(args, batch, geometryShape, outColor, args->_shapePipeline);
pipeline = outColor.a < 1.0f ? geometryCache->getForwardTransparentShapePipeline() : geometryCache->getForwardOpaqueShapePipeline();
}
if (render::ShapeKey(args->_globalShapeKey).isWireframe() || primitiveMode == PrimitiveMode::LINES) {
geometryCache->renderWireShapeInstance(args, batch, geometryShape, outColor, pipeline);
} else {
geometryCache->renderSolidShapeInstance(args, batch, geometryShape, outColor, pipeline);
}
} else {
if (args->_renderMode != render::Args::RenderMode::SHADOW_RENDER_MODE) {

View file

@ -162,10 +162,12 @@ void TextEntityRenderer::doRender(RenderArgs* args) {
glm::vec4 backgroundColor;
Transform modelTransform;
glm::vec3 dimensions;
BillboardMode billboardMode;
bool layered;
withReadLock([&] {
modelTransform = _renderTransform;
dimensions = _dimensions;
billboardMode = _billboardMode;
float fadeRatio = _isFading ? Interpolate::calculateFadeRatio(_fadeStartTime) : 1.0f;
textColor = glm::vec4(_textColor, fadeRatio * _textAlpha);
@ -190,7 +192,7 @@ void TextEntityRenderer::doRender(RenderArgs* args) {
}
auto transformToTopLeft = modelTransform;
transformToTopLeft.setRotation(EntityItem::getBillboardRotation(transformToTopLeft.getTranslation(), transformToTopLeft.getRotation(), _billboardMode, args->getViewFrustum().getPosition()));
transformToTopLeft.setRotation(EntityItem::getBillboardRotation(transformToTopLeft.getTranslation(), transformToTopLeft.getRotation(), billboardMode, args->getViewFrustum().getPosition()));
transformToTopLeft.postTranslate(dimensions * glm::vec3(-0.5f, 0.5f, 0.0f)); // Go to the top left
transformToTopLeft.setScale(1.0f); // Use a scale of one so that the text is not deformed
@ -210,10 +212,6 @@ void TextEntityRenderer::doRender(RenderArgs* args) {
glm::vec2 bounds = glm::vec2(dimensions.x - (_leftMargin + _rightMargin), dimensions.y - (_topMargin + _bottomMargin));
_textRenderer->draw(batch, _leftMargin / scale, -_topMargin / scale, _text, textColor, bounds / scale, layered);
}
if (layered) {
DependencyManager::get<DeferredLightingEffect>()->unsetKeyLightBatch(batch);
}
}
QSizeF TextEntityRenderer::textSize(const QString& text) const {

View file

@ -33,7 +33,7 @@ using namespace render::entities;
ZoneEntityRenderer::ZoneEntityRenderer(const EntityItemPointer& entity)
: Parent(entity) {
_background->setSkybox(std::make_shared<ProceduralSkybox>());
_background->setSkybox(std::make_shared<ProceduralSkybox>(entity->getCreated()));
}
void ZoneEntityRenderer::onRemoveFromSceneTyped(const TypedEntityPointer& entity) {

View file

@ -789,8 +789,10 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
auto lastEdited = lastEditedFromBufferAdjusted;
bool otherOverwrites = overwriteLocalData && !weOwnSimulation;
auto shouldUpdate = [this, lastEdited, otherOverwrites, filterRejection](quint64 updatedTimestamp, bool valueChanged) {
if (stillHasGrabActions()) {
// calculate hasGrab once outside the lambda rather than calling it every time inside
bool hasGrab = stillHasGrabAction();
auto shouldUpdate = [this, lastEdited, otherOverwrites, filterRejection, hasGrab](quint64 updatedTimestamp, bool valueChanged) {
if (hasGrab) {
return false;
}
bool simulationChanged = lastEdited > updatedTimestamp;
@ -957,12 +959,18 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
// by doing this parsing here... but it's not likely going to fully recover the content.
//
if (overwriteLocalData && (getDirtyFlags() & (Simulation::DIRTY_TRANSFORM | Simulation::DIRTY_VELOCITIES))) {
if (overwriteLocalData &&
!hasGrab &&
(getDirtyFlags() & (Simulation::DIRTY_TRANSFORM | Simulation::DIRTY_VELOCITIES))) {
// NOTE: This code is attempting to "repair" the old data we just got from the server to make it more
// closely match where the entities should be if they'd stepped forward in time to "now". The server
// is sending us data with a known "last simulated" time. That time is likely in the past, and therefore
// this "new" data is actually slightly out of date. We calculate the time we need to skip forward and
// use our simulation helper routine to get a best estimate of where the entity should be.
//
// NOTE: We don't want to do this in the hasGrab case because grabs "know best"
// (e.g. grabs will prevent drift between distributed physics simulations).
//
float skipTimeForward = (float)(now - lastSimulatedFromBufferAdjusted) / (float)(USECS_PER_SECOND);
// we want to extrapolate the motion forward to compensate for packet travel time, but
@ -1426,7 +1434,7 @@ void EntityItem::getTransformAndVelocityProperties(EntityItemProperties& propert
void EntityItem::upgradeScriptSimulationPriority(uint8_t priority) {
uint8_t newPriority = glm::max(priority, _scriptSimulationPriority);
if (newPriority < SCRIPT_GRAB_SIMULATION_PRIORITY && stillHasGrabActions()) {
if (newPriority < SCRIPT_GRAB_SIMULATION_PRIORITY && stillHasMyGrabAction()) {
newPriority = SCRIPT_GRAB_SIMULATION_PRIORITY;
}
if (newPriority != _scriptSimulationPriority) {
@ -1439,7 +1447,7 @@ void EntityItem::upgradeScriptSimulationPriority(uint8_t priority) {
void EntityItem::clearScriptSimulationPriority() {
// DO NOT markDirtyFlags(Simulation::DIRTY_SIMULATION_OWNERSHIP_PRIORITY) here, because this
// is only ever called from the code that actually handles the dirty flags, and it knows best.
_scriptSimulationPriority = stillHasGrabActions() ? SCRIPT_GRAB_SIMULATION_PRIORITY : 0;
_scriptSimulationPriority = stillHasMyGrabAction() ? SCRIPT_GRAB_SIMULATION_PRIORITY : 0;
}
void EntityItem::setPendingOwnershipPriority(uint8_t priority) {
@ -2186,7 +2194,7 @@ void EntityItem::enableNoBootstrap() {
}
void EntityItem::disableNoBootstrap() {
if (!stillHasGrabActions()) {
if (!stillHasMyGrabAction()) {
_flags &= ~Simulation::SPECIAL_FLAGS_NO_BOOTSTRAPPING;
_flags |= Simulation::DIRTY_COLLISION_GROUP; // may need to not collide with own avatar
@ -2272,7 +2280,13 @@ bool EntityItem::removeAction(EntitySimulationPointer simulation, const QUuid& a
return success;
}
bool EntityItem::stillHasGrabActions() const {
bool EntityItem::stillHasGrabAction() const {
return !_grabActions.empty();
}
// retutrns 'true' if there exists an action that returns 'true' for EntityActionInterface::isMine()
// (e.g. the action belongs to the MyAvatar instance)
bool EntityItem::stillHasMyGrabAction() const {
QList<EntityDynamicPointer> holdActions = getActionsOfType(DYNAMIC_TYPE_HOLD);
QList<EntityDynamicPointer>::const_iterator i = holdActions.begin();
while (i != holdActions.end()) {
@ -2700,20 +2714,6 @@ void EntityItem::setLastEdited(quint64 lastEdited) {
});
}
quint64 EntityItem::getLastBroadcast() const {
quint64 result;
withReadLock([&] {
result = _lastBroadcast;
});
return result;
}
void EntityItem::setLastBroadcast(quint64 lastBroadcast) {
withWriteLock([&] {
_lastBroadcast = lastBroadcast;
});
}
void EntityItem::markAsChangedOnServer() {
withWriteLock([&] {
_changedOnServer = usecTimestampNow();
@ -3479,6 +3479,9 @@ void EntityItem::addGrab(GrabPointer grab) {
simulation->addDynamic(action);
markDirtyFlags(Simulation::DIRTY_MOTION_TYPE);
simulation->changeEntity(getThisPointer());
// don't forget to set isMine() for locally-created grabs
action->setIsMine(grab->getOwnerID() == Physics::getSessionUUID());
}
}

View file

@ -124,8 +124,8 @@ public:
{ return (float)(usecTimestampNow() - getLastEdited()) / (float)USECS_PER_SECOND; }
/// Last time we sent out an edit packet for this entity
quint64 getLastBroadcast() const;
void setLastBroadcast(quint64 lastBroadcast);
quint64 getLastBroadcast() const { return _lastBroadcast; }
void setLastBroadcast(quint64 lastBroadcast) { _lastBroadcast = lastBroadcast; }
void markAsChangedOnServer();
quint64 getLastChangedOnServer() const;
@ -562,6 +562,8 @@ public:
static void setPrimaryViewFrustumPositionOperator(std::function<glm::vec3()> getPrimaryViewFrustumPositionOperator) { _getPrimaryViewFrustumPositionOperator = getPrimaryViewFrustumPositionOperator; }
static glm::vec3 getPrimaryViewFrustumPosition() { return _getPrimaryViewFrustumPositionOperator(); }
bool stillHasMyGrabAction() const;
signals:
void requestRenderUpdate();
void spaceUpdate(std::pair<int32_t, glm::vec4> data);
@ -574,7 +576,7 @@ protected:
void setSimulated(bool simulated) { _simulated = simulated; }
const QByteArray getDynamicDataInternal() const;
bool stillHasGrabActions() const;
bool stillHasGrabAction() const;
void setDynamicDataInternal(QByteArray dynamicData);
virtual void dimensionsChanged() override;

View file

@ -1101,13 +1101,13 @@ void EntityScriptingInterface::handleEntityScriptCallMethodPacket(QSharedPointer
void EntityScriptingInterface::onAddingEntity(EntityItem* entity) {
if (entity->isWearable()) {
emit addingWearable(entity->getEntityItemID());
QMetaObject::invokeMethod(this, "addingWearable", Q_ARG(QUuid, entity->getEntityItemID()));
}
}
void EntityScriptingInterface::onDeletingEntity(EntityItem* entity) {
if (entity->isWearable()) {
emit deletingWearable(entity->getEntityItemID());
QMetaObject::invokeMethod(this, "deletingWearable", Q_ARG(QUuid, entity->getEntityItemID()));
}
}

View file

@ -443,6 +443,7 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
QString hifiGlobalNodeID;
unsigned int meshIndex = 0;
haveReportedUnhandledRotationOrder = false;
int fbxVersionNumber = -1;
foreach (const FBXNode& child, node.children) {
if (child.name == "FBXHeaderExtension") {
@ -465,6 +466,8 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
}
}
}
} else if (object.name == "FBXVersion") {
fbxVersionNumber = object.properties.at(0).toInt();
}
}
} else if (child.name == "GlobalSettings") {
@ -1161,8 +1164,14 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
counter++;
}
}
_connectionParentMap.insert(getID(connection.properties, 1), getID(connection.properties, 2));
_connectionChildMap.insert(getID(connection.properties, 2), getID(connection.properties, 1));
if (_connectionParentMap.value(getID(connection.properties, 1)) == "0") {
// don't assign the new parent
qCDebug(modelformat) << "root node " << getID(connection.properties, 1) << " has discarded parent " << getID(connection.properties, 2);
_connectionChildMap.insert(getID(connection.properties, 2), getID(connection.properties, 1));
} else {
_connectionParentMap.insert(getID(connection.properties, 1), getID(connection.properties, 2));
_connectionChildMap.insert(getID(connection.properties, 2), getID(connection.properties, 1));
}
}
}
}
@ -1311,8 +1320,6 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
joint.bindTransformFoundInCluster = false;
hfmModel.joints.append(joint);
QString rotationID = localRotations.value(modelID);
AnimationCurve xRotCurve = animationCurves.value(xComponents.value(rotationID));
AnimationCurve yRotCurve = animationCurves.value(yComponents.value(rotationID));
@ -1335,7 +1342,13 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
xPosCurve.values.isEmpty() ? defaultPosValues.x : xPosCurve.values.at(i % xPosCurve.values.size()),
yPosCurve.values.isEmpty() ? defaultPosValues.y : yPosCurve.values.at(i % yPosCurve.values.size()),
zPosCurve.values.isEmpty() ? defaultPosValues.z : zPosCurve.values.at(i % zPosCurve.values.size()));
if ((fbxVersionNumber < 7500) && (i == 0)) {
joint.translation = hfmModel.animationFrames[i].translations[jointIndex];
joint.rotation = hfmModel.animationFrames[i].rotations[jointIndex];
}
}
hfmModel.joints.append(joint);
}
// NOTE: shapeVertices are in joint-frame

View file

@ -40,7 +40,7 @@ public:
auto lastSlash = filename.rfind('/');
result = filename.substr(0, lastSlash + 1);
} else {
std::string result = QFileInfo(filename.c_str()).absoluteDir().canonicalPath().toStdString();
result = QFileInfo(filename.c_str()).absoluteDir().canonicalPath().toStdString();
if (*result.rbegin() != '/') {
result += '/';
}

View file

@ -536,7 +536,7 @@ void AccountManager::requestAccessToken(const QString& login, const QString& pas
QByteArray postData;
postData.append("grant_type=password&");
postData.append("username=" + login + "&");
postData.append("username=" + QUrl::toPercentEncoding(login) + "&");
postData.append("password=" + QUrl::toPercentEncoding(password) + "&");
postData.append("scope=" + ACCOUNT_MANAGER_REQUESTED_SCOPE);

View file

@ -677,6 +677,9 @@ SharedNodePointer LimitedNodeList::addOrUpdateNode(const QUuid& uuid, NodeType_t
// If there is a new node with the same socket, this is a reconnection, kill the old node
removeOldNode(findNodeWithAddr(publicSocket));
removeOldNode(findNodeWithAddr(localSocket));
// If there is an old Connection to the new node's address kill it
_nodeSocket.cleanupConnection(publicSocket);
_nodeSocket.cleanupConnection(localSocket);
auto it = _connectionIDs.find(uuid);
if (it == _connectionIDs.end()) {

View file

@ -1016,6 +1016,14 @@ void NodeList::maybeSendIgnoreSetToNode(SharedNodePointer newNode) {
// also send them the current ignore radius state.
sendIgnoreRadiusStateToNode(newNode);
// also send the current avatar and injector gains
if (_avatarGain != 0.0f) {
setAvatarGain(QUuid(), _avatarGain);
}
if (_injectorGain != 0.0f) {
setInjectorGain(_injectorGain);
}
}
if (newNode->getType() == NodeType::AvatarMixer) {
// this is a mixer that we just added - it's unlikely it knows who we were previously ignoring in this session,
@ -1062,13 +1070,17 @@ void NodeList::setAvatarGain(const QUuid& nodeID, float gain) {
if (nodeID.isNull()) {
qCDebug(networking) << "Sending Set MASTER Avatar Gain packet with Gain:" << gain;
} else {
qCDebug(networking) << "Sending Set Avatar Gain packet with UUID: " << uuidStringWithoutCurlyBraces(nodeID) << "Gain:" << gain;
}
sendPacket(std::move(setAvatarGainPacket), *audioMixer);
QWriteLocker lock{ &_avatarGainMapLock };
_avatarGainMap[nodeID] = gain;
sendPacket(std::move(setAvatarGainPacket), *audioMixer);
_avatarGain = gain;
} else {
qCDebug(networking) << "Sending Set Avatar Gain packet with UUID:" << uuidStringWithoutCurlyBraces(nodeID) << "Gain:" << gain;
sendPacket(std::move(setAvatarGainPacket), *audioMixer);
QWriteLocker lock{ &_avatarGainMapLock };
_avatarGainMap[nodeID] = gain;
}
} else {
qWarning() << "Couldn't find audio mixer to send set gain request";
@ -1079,14 +1091,41 @@ void NodeList::setAvatarGain(const QUuid& nodeID, float gain) {
}
float NodeList::getAvatarGain(const QUuid& nodeID) {
QReadLocker lock{ &_avatarGainMapLock };
auto it = _avatarGainMap.find(nodeID);
if (it != _avatarGainMap.cend()) {
return it->second;
if (nodeID.isNull()) {
return _avatarGain;
} else {
QReadLocker lock{ &_avatarGainMapLock };
auto it = _avatarGainMap.find(nodeID);
if (it != _avatarGainMap.cend()) {
return it->second;
}
}
return 0.0f;
}
void NodeList::setInjectorGain(float gain) {
auto audioMixer = soloNodeOfType(NodeType::AudioMixer);
if (audioMixer) {
// setup the packet
auto setInjectorGainPacket = NLPacket::create(PacketType::InjectorGainSet, sizeof(float), true);
// We need to convert the gain in dB (from the script) to an amplitude before packing it.
setInjectorGainPacket->writePrimitive(packFloatGainToByte(fastExp2f(gain / 6.02059991f)));
qCDebug(networking) << "Sending Set Injector Gain packet with Gain:" << gain;
sendPacket(std::move(setInjectorGainPacket), *audioMixer);
_injectorGain = gain;
} else {
qWarning() << "Couldn't find audio mixer to send set gain request";
}
}
float NodeList::getInjectorGain() {
return _injectorGain;
}
void NodeList::kickNodeBySessionID(const QUuid& nodeID) {
// send a request to domain-server to kick the node with the given session ID
// the domain-server will handle the persistence of the kick (via username or IP)

View file

@ -83,6 +83,8 @@ public:
bool isPersonalMutingNode(const QUuid& nodeID) const;
void setAvatarGain(const QUuid& nodeID, float gain);
float getAvatarGain(const QUuid& nodeID);
void setInjectorGain(float gain);
float getInjectorGain();
void kickNodeBySessionID(const QUuid& nodeID);
void muteNodeBySessionID(const QUuid& nodeID);
@ -181,6 +183,9 @@ private:
mutable QReadWriteLock _avatarGainMapLock;
tbb::concurrent_unordered_map<QUuid, float, UUIDHasher> _avatarGainMap;
std::atomic<float> _avatarGain { 0.0f }; // in dB
std::atomic<float> _injectorGain { 0.0f }; // in dB
void sendIgnoreRadiusStateToNode(const SharedNodePointer& destinationNode);
#if defined(Q_OS_ANDROID)
Setting::Handle<bool> _ignoreRadiusEnabled { "IgnoreRadiusEnabled", false };

View file

@ -57,7 +57,7 @@ public:
ICEServerQuery,
OctreeStats,
SetAvatarTraits,
UNUSED_PACKET_TYPE,
InjectorGainSet,
AssignmentClientStatus,
NoisyMute,
AvatarIdentity,
@ -266,6 +266,7 @@ enum class EntityVersion : PacketVersion {
ModelScale,
ReOrderParentIDProperties,
CertificateTypeProperty,
DisableWebMedia,
// Add new versions above here
NUM_PACKET_TYPE,

View file

@ -82,11 +82,11 @@ void OctreePersistThread::start() {
}
if (data.readOctreeDataInfoFromData(_cachedJSONData)) {
qCDebug(octree) << "Current octree data: ID(" << data.id << ") DataVersion(" << data.version << ")";
qCDebug(octree) << "Current octree data: ID(" << data.id << ") DataVersion(" << data.dataVersion << ")";
packet->writePrimitive(true);
auto id = data.id.toRfc4122();
packet->write(id);
packet->writePrimitive(data.version);
packet->writePrimitive(data.dataVersion);
} else {
_cachedJSONData.clear();
qCWarning(octree) << "No octree data found";
@ -144,8 +144,8 @@ void OctreePersistThread::handleOctreeDataFileReply(QSharedPointer<ReceivedMessa
quint64 loadStarted = usecTimestampNow();
if (hasValidOctreeData) {
qDebug() << "Setting entity version info to: " << data.id << data.version;
_tree->setOctreeVersionInfo(data.id, data.version);
qDebug() << "Setting entity version info to: " << data.id << data.dataVersion;
_tree->setOctreeVersionInfo(data.id, data.dataVersion);
}
bool persistentFileRead;

View file

@ -225,13 +225,15 @@ void Procedural::prepare(gpu::Batch& batch,
const glm::vec3& position,
const glm::vec3& size,
const glm::quat& orientation,
const uint64_t& created,
const ProceduralProgramKey key) {
std::lock_guard<std::mutex> lock(_mutex);
_entityDimensions = size;
_entityPosition = position;
_entityOrientation = glm::mat3_cast(orientation);
_entityCreated = created;
if (!_shaderPath.isEmpty()) {
auto lastModified = (quint64)QFileInfo(_shaderPath).lastModified().toMSecsSinceEpoch();
auto lastModified = (uint64_t)QFileInfo(_shaderPath).lastModified().toMSecsSinceEpoch();
if (lastModified > _shaderModified) {
QFile file(_shaderPath);
file.open(QIODevice::ReadOnly);
@ -278,7 +280,10 @@ void Procedural::prepare(gpu::Batch& batch,
_proceduralPipelines[key] = gpu::Pipeline::create(program, key.isTransparent() ? _transparentState : _opaqueState);
_start = usecTimestampNow();
_lastCompile = usecTimestampNow();
if (_firstCompile == 0) {
_firstCompile = _lastCompile;
}
_frameCount = 0;
recompiledShader = true;
}
@ -371,7 +376,11 @@ void Procedural::setupUniforms() {
_uniforms.push_back([=](gpu::Batch& batch) {
_standardInputs.position = vec4(_entityPosition, 1.0f);
// Minimize floating point error by doing an integer division to milliseconds, before the floating point division to seconds
_standardInputs.time = (float)((usecTimestampNow() - _start) / USECS_PER_MSEC) / MSECS_PER_SECOND;
auto now = usecTimestampNow();
_standardInputs.timeSinceLastCompile = (float)((now - _lastCompile) / USECS_PER_MSEC) / MSECS_PER_SECOND;
_standardInputs.timeSinceFirstCompile = (float)((now - _firstCompile) / USECS_PER_MSEC) / MSECS_PER_SECOND;
_standardInputs.timeSinceEntityCreation = (float)((now - _entityCreated) / USECS_PER_MSEC) / MSECS_PER_SECOND;
// Date
{

View file

@ -82,10 +82,11 @@ public:
bool isReady() const;
bool isEnabled() const { return _enabled; }
void prepare(gpu::Batch& batch, const glm::vec3& position, const glm::vec3& size, const glm::quat& orientation, const ProceduralProgramKey key = ProceduralProgramKey());
void prepare(gpu::Batch& batch, const glm::vec3& position, const glm::vec3& size, const glm::quat& orientation,
const uint64_t& created, const ProceduralProgramKey key = ProceduralProgramKey());
glm::vec4 getColor(const glm::vec4& entityColor) const;
quint64 getFadeStartTime() const { return _fadeStartTime; }
uint64_t getFadeStartTime() const { return _fadeStartTime; }
bool isFading() const { return _doesFade && _isFading; }
void setIsFading(bool isFading) { _isFading = isFading; }
void setDoesFade(bool doesFade) { _doesFade = doesFade; }
@ -106,9 +107,10 @@ protected:
vec4 date;
vec4 position;
vec4 scale;
float time;
float timeSinceLastCompile;
float timeSinceFirstCompile;
float timeSinceEntityCreation;
int frameCount;
vec2 _spare1;
vec4 resolution[4];
mat4 orientation;
};
@ -116,9 +118,10 @@ protected:
static_assert(0 == offsetof(StandardInputs, date), "ProceduralOffsets");
static_assert(16 == offsetof(StandardInputs, position), "ProceduralOffsets");
static_assert(32 == offsetof(StandardInputs, scale), "ProceduralOffsets");
static_assert(48 == offsetof(StandardInputs, time), "ProceduralOffsets");
static_assert(52 == offsetof(StandardInputs, frameCount), "ProceduralOffsets");
static_assert(56 == offsetof(StandardInputs, _spare1), "ProceduralOffsets");
static_assert(48 == offsetof(StandardInputs, timeSinceLastCompile), "ProceduralOffsets");
static_assert(52 == offsetof(StandardInputs, timeSinceFirstCompile), "ProceduralOffsets");
static_assert(56 == offsetof(StandardInputs, timeSinceEntityCreation), "ProceduralOffsets");
static_assert(60 == offsetof(StandardInputs, frameCount), "ProceduralOffsets");
static_assert(64 == offsetof(StandardInputs, resolution), "ProceduralOffsets");
static_assert(128 == offsetof(StandardInputs, orientation), "ProceduralOffsets");
@ -126,13 +129,14 @@ protected:
ProceduralData _data;
bool _enabled { false };
uint64_t _start { 0 };
uint64_t _lastCompile { 0 };
uint64_t _firstCompile { 0 };
int32_t _frameCount { 0 };
// Rendering object descriptions, from userData
QString _shaderSource;
QString _shaderPath;
quint64 _shaderModified { 0 };
uint64_t _shaderModified { 0 };
NetworkShaderPointer _networkShader;
bool _shaderDirty { true };
bool _uniformsDirty { true };
@ -152,11 +156,12 @@ protected:
glm::vec3 _entityDimensions;
glm::vec3 _entityPosition;
glm::mat3 _entityOrientation;
uint64_t _entityCreated;
private:
void setupUniforms();
mutable quint64 _fadeStartTime { 0 };
mutable uint64_t _fadeStartTime { 0 };
mutable bool _hasStartedFade { false };
mutable bool _isFading { false };
bool _doesFade { true };

View file

@ -36,9 +36,11 @@ LAYOUT_STD140(binding=0) uniform standardInputsBuffer {
// Offset 48
float globalTime;
// Offset 52
int frameCount;
float localCreatedTime;
// Offset 56
vec2 _spare1;
float entityTime;
// Offset 60
int frameCount;
// Offset 64, acts as vec4[4] for alignment purposes
vec3 channelResolution[4];
// Offset 128, acts as vec4[3] for alignment purposes
@ -52,6 +54,8 @@ LAYOUT_STD140(binding=0) uniform standardInputsBuffer {
#define iWorldPosition standardInputs.worldPosition
#define iWorldScale standardInputs.worldScale
#define iGlobalTime standardInputs.globalTime
#define iLocalCreatedTime standardInputs.localCreatedTime
#define iEntityTime standardInputs.entityTime
#define iFrameCount standardInputs.frameCount
#define iChannelResolution standardInputs.channelResolution
#define iWorldOrientation standardInputs.worldOrientation

View file

@ -17,7 +17,7 @@
#include <ViewFrustum.h>
#include <shaders/Shaders.h>
ProceduralSkybox::ProceduralSkybox() : graphics::Skybox() {
ProceduralSkybox::ProceduralSkybox(uint64_t created) : graphics::Skybox(), _created(created) {
_procedural._vertexSource = gpu::Shader::createVertex(shader::graphics::vertex::skybox)->getSource();
_procedural._opaqueFragmentSource = shader::Source::get(shader::procedural::fragment::proceduralSkybox);
// Adjust the pipeline state for background using the stencil test
@ -59,7 +59,7 @@ void ProceduralSkybox::render(gpu::Batch& batch, const ViewFrustum& viewFrustum,
batch.setModelTransform(Transform()); // only for Mac
auto& procedural = skybox._procedural;
procedural.prepare(batch, glm::vec3(0), glm::vec3(1), glm::quat());
procedural.prepare(batch, glm::vec3(0), glm::vec3(1), glm::quat(), skybox.getCreated());
skybox.prepare(batch);
batch.draw(gpu::TRIANGLE_STRIP, 4);
}

View file

@ -19,7 +19,7 @@
class ProceduralSkybox: public graphics::Skybox {
public:
ProceduralSkybox();
ProceduralSkybox(uint64_t created = 0);
void parse(const QString& userData) { _procedural.setProceduralData(ProceduralData::parse(userData)); }
@ -29,8 +29,11 @@ public:
void render(gpu::Batch& batch, const ViewFrustum& frustum) const override;
static void render(gpu::Batch& batch, const ViewFrustum& frustum, const ProceduralSkybox& skybox);
uint64_t getCreated() const { return _created; }
protected:
mutable Procedural _procedural;
uint64_t _created;
};
typedef std::shared_ptr< ProceduralSkybox > ProceduralSkyboxPointer;

View file

@ -178,7 +178,6 @@ void CauterizedModel::updateClusterMatrices() {
}
}
}
computeMeshPartLocalBounds();
// post the blender if we're not currently waiting for one to finish
auto modelBlender = DependencyManager::get<ModelBlender>();

View file

@ -722,6 +722,8 @@ gpu::ShaderPointer GeometryCache::_unlitFadeShader;
render::ShapePipelinePointer GeometryCache::_simpleOpaquePipeline;
render::ShapePipelinePointer GeometryCache::_simpleTransparentPipeline;
render::ShapePipelinePointer GeometryCache::_forwardSimpleOpaquePipeline;
render::ShapePipelinePointer GeometryCache::_forwardSimpleTransparentPipeline;
render::ShapePipelinePointer GeometryCache::_simpleOpaqueFadePipeline;
render::ShapePipelinePointer GeometryCache::_simpleTransparentFadePipeline;
render::ShapePipelinePointer GeometryCache::_simpleWirePipeline;
@ -801,6 +803,8 @@ void GeometryCache::initializeShapePipelines() {
if (!_simpleOpaquePipeline) {
_simpleOpaquePipeline = getShapePipeline(false, false, true, false);
_simpleTransparentPipeline = getShapePipeline(false, true, true, false);
_forwardSimpleOpaquePipeline = getShapePipeline(false, false, true, false, false, true);
_forwardSimpleTransparentPipeline = getShapePipeline(false, true, true, false, false, true);
_simpleOpaqueFadePipeline = getFadingShapePipeline(false, false, false, false, false);
_simpleTransparentFadePipeline = getFadingShapePipeline(false, true, false, false, false);
_simpleWirePipeline = getShapePipeline(false, false, true, true);

View file

@ -181,6 +181,11 @@ public:
static void initializeShapePipelines();
render::ShapePipelinePointer getOpaqueShapePipeline() { assert(_simpleOpaquePipeline != nullptr); return _simpleOpaquePipeline; }
render::ShapePipelinePointer getTransparentShapePipeline() { assert(_simpleTransparentPipeline != nullptr); return _simpleTransparentPipeline; }
render::ShapePipelinePointer getForwardOpaqueShapePipeline() { assert(_forwardSimpleOpaquePipeline != nullptr); return _forwardSimpleOpaquePipeline; }
render::ShapePipelinePointer getForwardTransparentShapePipeline() { assert(_forwardSimpleTransparentPipeline != nullptr); return _forwardSimpleTransparentPipeline; }
// Static (instanced) geometry
void renderShapeInstances(gpu::Batch& batch, Shape shape, size_t count, gpu::BufferPointer& colorBuffer);
void renderWireShapeInstances(gpu::Batch& batch, Shape shape, size_t count, gpu::BufferPointer& colorBuffer);
@ -368,6 +373,7 @@ public:
const ShapeData * getShapeData(Shape shape) const;
graphics::MeshPointer meshFromShape(Shape geometryShape, glm::vec3 color);
private:
GeometryCache();
@ -461,6 +467,8 @@ private:
static gpu::ShaderPointer _unlitFadeShader;
static render::ShapePipelinePointer _simpleOpaquePipeline;
static render::ShapePipelinePointer _simpleTransparentPipeline;
static render::ShapePipelinePointer _forwardSimpleOpaquePipeline;
static render::ShapePipelinePointer _forwardSimpleTransparentPipeline;
static render::ShapePipelinePointer _simpleOpaqueFadePipeline;
static render::ShapePipelinePointer _simpleTransparentFadePipeline;
static render::ShapePipelinePointer _simpleWirePipeline;

View file

@ -1346,19 +1346,14 @@ void Model::updateRig(float deltaTime, glm::mat4 parentTransform) {
}
void Model::computeMeshPartLocalBounds() {
render::Transaction transaction;
auto meshStates = _meshStates;
for (auto renderItem : _modelMeshRenderItemIDs) {
transaction.updateItem<ModelMeshPartPayload>(renderItem, [this, meshStates](ModelMeshPartPayload& data) {
const Model::MeshState& state = meshStates.at(data._meshIndex);
if (_useDualQuaternionSkinning) {
data.computeAdjustedLocalBound(state.clusterDualQuaternions);
} else {
data.computeAdjustedLocalBound(state.clusterMatrices);
}
});
for (auto& part : _modelMeshRenderItems) {
const Model::MeshState& state = _meshStates.at(part->_meshIndex);
if (_useDualQuaternionSkinning) {
part->computeAdjustedLocalBound(state.clusterDualQuaternions);
} else {
part->computeAdjustedLocalBound(state.clusterMatrices);
}
}
AbstractViewStateInterface::instance()->getMain3DScene()->enqueueTransaction(transaction);
}
// virtual
@ -1391,7 +1386,6 @@ void Model::updateClusterMatrices() {
}
}
}
computeMeshPartLocalBounds();
// post the blender if we're not currently waiting for one to finish
auto modelBlender = DependencyManager::get<ModelBlender>();
@ -1648,7 +1642,7 @@ using packBlendshapeOffsetTo = void(glm::uvec4& packed, const BlendshapeOffsetUn
void packBlendshapeOffsetTo_Pos_F32_3xSN10_Nor_3xSN10_Tan_3xSN10(glm::uvec4& packed, const BlendshapeOffsetUnpacked& unpacked) {
float len = glm::compMax(glm::abs(unpacked.positionOffset));
glm::vec3 normalizedPos(unpacked.positionOffset);
if (len > 1.0f) {
if (len > 0.0f) {
normalizedPos /= len;
} else {
len = 1.0f;

View file

@ -95,7 +95,11 @@ void DrawLayered3D::run(const RenderContextPointer& renderContext, const Inputs&
// Setup lighting model for all items;
batch.setUniformBuffer(ru::Buffer::LightModel, lightingModel->getParametersBuffer());
renderShapes(renderContext, _shapePlumber, inItems, _maxDrawn);
if (_opaquePass) {
renderStateSortShapes(renderContext, _shapePlumber, inItems, _maxDrawn);
} else {
renderShapes(renderContext, _shapePlumber, inItems, _maxDrawn);
}
args->_batch = nullptr;
});
}

View file

@ -216,8 +216,8 @@ void RenderDeferredTask::build(JobModel& task, const render::Varying& input, ren
task.addJob<DrawHaze>("DrawHazeDeferred", drawHazeInputs);
// Render transparent objects forward in LightingBuffer
const auto transparentsInputs = DrawDeferred::Inputs(transparents, hazeFrame, lightFrame, lightingModel, lightClusters, shadowFrame, jitter).asVarying();
task.addJob<DrawDeferred>("DrawTransparentDeferred", transparentsInputs, shapePlumber);
const auto transparentsInputs = RenderTransparentDeferred::Inputs(transparents, hazeFrame, lightFrame, lightingModel, lightClusters, shadowFrame, jitter).asVarying();
task.addJob<RenderTransparentDeferred>("DrawTransparentDeferred", transparentsInputs, shapePlumber);
const auto outlineRangeTimer = task.addJob<BeginGPURangeTimer>("BeginHighlightRangeTimer", "Highlight");
@ -436,7 +436,7 @@ void RenderDeferredTaskDebug::build(JobModel& task, const render::Varying& input
}
void DrawDeferred::run(const RenderContextPointer& renderContext, const Inputs& inputs) {
void RenderTransparentDeferred::run(const RenderContextPointer& renderContext, const Inputs& inputs) {
assert(renderContext->args);
assert(renderContext->args->hasViewFrustum());
@ -453,7 +453,7 @@ void DrawDeferred::run(const RenderContextPointer& renderContext, const Inputs&
RenderArgs* args = renderContext->args;
gpu::doInBatch("DrawDeferred::run", args->_context, [&](gpu::Batch& batch) {
gpu::doInBatch("RenderTransparentDeferred::run", args->_context, [&](gpu::Batch& batch) {
args->_batch = &batch;
// Setup camera, projection and viewport for all items

View file

@ -19,7 +19,7 @@
#include "LightClusters.h"
#include "RenderShadowTask.h"
class DrawDeferredConfig : public render::Job::Config {
class RenderTransparentDeferredConfig : public render::Job::Config {
Q_OBJECT
Q_PROPERTY(int numDrawn READ getNumDrawn NOTIFY newStats)
Q_PROPERTY(int maxDrawn MEMBER maxDrawn NOTIFY dirty)
@ -41,13 +41,13 @@ protected:
int _numDrawn{ 0 };
};
class DrawDeferred {
class RenderTransparentDeferred {
public:
using Inputs = render::VaryingSet7<render::ItemBounds, HazeStage::FramePointer, LightStage::FramePointer, LightingModelPointer, LightClustersPointer, LightStage::ShadowFramePointer, glm::vec2>;
using Config = DrawDeferredConfig;
using JobModel = render::Job::ModelI<DrawDeferred, Inputs, Config>;
using Config = RenderTransparentDeferredConfig;
using JobModel = render::Job::ModelI<RenderTransparentDeferred, Inputs, Config>;
DrawDeferred(render::ShapePlumberPointer shapePlumber)
RenderTransparentDeferred(render::ShapePlumberPointer shapePlumber)
: _shapePlumber{ shapePlumber } {}
void configure(const Config& config) { _maxDrawn = config.maxDrawn; }

View file

@ -98,7 +98,7 @@ void RenderForwardTask::build(JobModel& task, const render::Varying& input, rend
// Draw opaques forward
const auto opaqueInputs = DrawForward::Inputs(opaques, lightingModel).asVarying();
task.addJob<DrawForward>("DrawOpaques", opaqueInputs, shapePlumber);
task.addJob<DrawForward>("DrawOpaques", opaqueInputs, shapePlumber, true);
// Similar to light stage, background stage has been filled by several potential render items and resolved for the frame in this job
const auto backgroundInputs = DrawBackgroundStage::Inputs(lightingModel, backgroundFrame).asVarying();
@ -106,7 +106,7 @@ void RenderForwardTask::build(JobModel& task, const render::Varying& input, rend
// Draw transparent objects forward
const auto transparentInputs = DrawForward::Inputs(transparents, lightingModel).asVarying();
task.addJob<DrawForward>("DrawTransparents", transparentInputs, shapePlumber);
task.addJob<DrawForward>("DrawTransparents", transparentInputs, shapePlumber, false);
// Layered
const auto nullJitter = Varying(glm::vec2(0.0f, 0.0f));
@ -261,7 +261,11 @@ void DrawForward::run(const RenderContextPointer& renderContext, const Inputs& i
args->_globalShapeKey = globalKey._flags.to_ulong();
// Render items
renderStateSortShapes(renderContext, _shapePlumber, inItems, -1, globalKey);
if (_opaquePass) {
renderStateSortShapes(renderContext, _shapePlumber, inItems, -1, globalKey);
} else {
renderShapes(renderContext, _shapePlumber, inItems, -1, globalKey);
}
args->_batch = nullptr;
args->_globalShapeKey = 0;

View file

@ -76,12 +76,13 @@ public:
using Inputs = render::VaryingSet2<render::ItemBounds, LightingModelPointer>;
using JobModel = render::Job::ModelI<DrawForward, Inputs>;
DrawForward(const render::ShapePlumberPointer& shapePlumber) : _shapePlumber(shapePlumber) {}
DrawForward(const render::ShapePlumberPointer& shapePlumber, bool opaquePass) : _shapePlumber(shapePlumber), _opaquePass(opaquePass) {}
void run(const render::RenderContextPointer& renderContext,
const Inputs& inputs);
private:
render::ShapePlumberPointer _shapePlumber;
bool _opaquePass;
};
#endif // hifi_RenderForwardTask_h

View file

@ -16,8 +16,8 @@
<@include gpu/Color.slh@>
<@include render-utils/ShaderConstants.h@>
<@include ForwardGlobalLight.slh@>
<$declareEvalGlobalLightingAlphaBlended()$>
<@include DeferredGlobalLight.slh@>
<$declareEvalGlobalLightingAlphaBlendedWithHaze()$>
<@include gpu/Transform.slh@>
<$declareStandardCameraTransform()$>

View file

@ -23,6 +23,7 @@
/**jsdoc
* @namespace Mat4
* @variation 0
*
* @hifi-interface
* @hifi-client-entity
@ -38,7 +39,7 @@ class Mat4 : public QObject, protected QScriptable {
public slots:
/**jsdoc
* @function Mat4.multiply
* @function Mat4(0).multiply
* @param {Mat4} m1
* @param {Mat4} m2
* @returns {Mat4}
@ -47,7 +48,7 @@ public slots:
/**jsdoc
* @function Mat4.createFromRotAndTrans
* @function Mat4(0).createFromRotAndTrans
* @param {Quat} rot
* @param {Vec3} trans
* @returns {Mat4}
@ -55,7 +56,7 @@ public slots:
glm::mat4 createFromRotAndTrans(const glm::quat& rot, const glm::vec3& trans) const;
/**jsdoc
* @function Mat4.createFromScaleRotAndTrans
* @function Mat4(0).createFromScaleRotAndTrans
* @param {Vec3} scale
* @param {Quat} rot
* @param {Vec3} trans
@ -64,7 +65,7 @@ public slots:
glm::mat4 createFromScaleRotAndTrans(const glm::vec3& scale, const glm::quat& rot, const glm::vec3& trans) const;
/**jsdoc
* @function Mat4.createFromColumns
* @function Mat4(0).createFromColumns
* @param {Vec4} col0
* @param {Vec4} col1
* @param {Vec4} col2
@ -74,7 +75,7 @@ public slots:
glm::mat4 createFromColumns(const glm::vec4& col0, const glm::vec4& col1, const glm::vec4& col2, const glm::vec4& col3) const;
/**jsdoc
* @function Mat4.createFromArray
* @function Mat4(0).createFromArray
* @param {number[]} numbers
* @returns {Mat4}
*/
@ -82,21 +83,21 @@ public slots:
/**jsdoc
* @function Mat4.extractTranslation
* @function Mat4(0).extractTranslation
* @param {Mat4} m
* @returns {Vec3}
*/
glm::vec3 extractTranslation(const glm::mat4& m) const;
/**jsdoc
* @function Mat4.extractRotation
* @function Mat4(0).extractRotation
* @param {Mat4} m
* @returns {Vec3}
*/
glm::quat extractRotation(const glm::mat4& m) const;
/**jsdoc
* @function Mat4.extractScale
* @function Mat4(0).extractScale
* @param {Mat4} m
* @returns {Vec3}
*/
@ -104,7 +105,7 @@ public slots:
/**jsdoc
* @function Mat4.transformPoint
* @function Mat4(0).transformPoint
* @param {Mat4} m
* @param {Vec3} point
* @returns {Vec3}
@ -112,7 +113,7 @@ public slots:
glm::vec3 transformPoint(const glm::mat4& m, const glm::vec3& point) const;
/**jsdoc
* @function Mat4.transformVector
* @function Mat4(0).transformVector
* @param {Mat4} m
* @param {Vec3} vector
* @returns {Vec3}
@ -121,7 +122,7 @@ public slots:
/**jsdoc
* @function Mat4.inverse
* @function Mat4(0).inverse
* @param {Mat4} m
* @returns {Mat4}
*/
@ -129,7 +130,7 @@ public slots:
/**jsdoc
* @function Mat4.getFront
* @function Mat4(0).getFront
* @param {Mat4} m
* @returns {Vec3}
*/
@ -137,28 +138,28 @@ public slots:
glm::vec3 getFront(const glm::mat4& m) const { return getForward(m); }
/**jsdoc
* @function Mat4.getForward
* @function Mat4(0).getForward
* @param {Mat4} m
* @returns {Vec3}
*/
glm::vec3 getForward(const glm::mat4& m) const;
/**jsdoc
* @function Mat4.getRight
* @function Mat4(0).getRight
* @param {Mat4} m
* @returns {Vec3}
*/
glm::vec3 getRight(const glm::mat4& m) const;
/**jsdoc
* @function Mat4.getUp
* @function Mat4(0).getUp
* @param {Mat4} m
* @returns {Vec3}
*/
glm::vec3 getUp(const glm::mat4& m) const;
/**jsdoc
* @function Mat4.print
* @function Mat4(0).print
* @param {string} label
* @param {Mat4} m
* @param {boolean} [transpose=false]

View file

@ -573,11 +573,12 @@ public slots:
/**jsdoc
* @function Script.callAnimationStateHandler
* @param {function} callback
* @param {object} parameters
* @param {string[]} names
* @param {boolean} useNames
* @param {object} resultHandler
* @param {function} callback - Callback.
* @param {object} parameters - Parameters.
* @param {string[]} names - Names.
* @param {boolean} useNames - Use names.
* @param {function} resultHandler - Result handler.
* @deprecated This function is deprecated and will be removed.
*/
void callAnimationStateHandler(QScriptValue callback, AnimVariantMap parameters, QStringList names, bool useNames, AnimVariantResultHandler resultHandler);

View file

@ -18,9 +18,11 @@
* <tr><th>Value</th><th>Description</th></tr>
* </thead>
* <tbody>
* <tr><td><code>none</code></td><td>The entity will not be billboarded.</td></tr>
* <tr><td><code>yaw</code></td><td>The entity will yaw, but not pitch, to face the camera. Its actual rotation will be ignored.</td></tr>
* <tr><td><code>full</code></td><td>The entity will be billboarded to face the camera. Its actual rotation will be ignored.</td></tr>
* <tr><td><code>"none"</code></td><td>The entity will not be billboarded.</td></tr>
* <tr><td><code>"yaw"</code></td><td>The entity will yaw, but not pitch, to face the camera. Its actual rotation will be
* ignored.</td></tr>
* <tr><td><code>"full"</code></td><td>The entity will be billboarded to face the camera. Its actual rotation will be
* ignored.</td></tr>
* </tbody>
* </table>
* @typedef {string} BillboardMode

View file

@ -1069,13 +1069,14 @@ void pickRayFromScriptValue(const QScriptValue& object, PickRay& pickRay) {
}
/**jsdoc
* Details of a collision between avatars and entities.
* @typedef {object} Collision
* @property {ContactEventType} type - The contact type of the collision event.
* @property {Uuid} idA - The ID of one of the entities in the collision.
* @property {Uuid} idB - The ID of the other of the entities in the collision.
* @property {Vec3} penetration - The amount of penetration between the two entities.
* @property {Uuid} idA - The ID of one of the avatars or entities in the collision.
* @property {Uuid} idB - The ID of the other of the avatars or entities in the collision.
* @property {Vec3} penetration - The amount of penetration between the two items.
* @property {Vec3} contactPoint - The point of contact.
* @property {Vec3} velocityChange - The change in relative velocity of the two entities, in m/s.
* @property {Vec3} velocityChange - The change in relative velocity of the two items, in m/s.
*/
QScriptValue collisionToScriptValue(QScriptEngine* engine, const Collision& collision) {
QScriptValue obj = engine->newObject();
@ -1147,19 +1148,21 @@ AnimationDetails::AnimationDetails(QString role, QUrl url, float fps, float prio
}
/**jsdoc
* The details of an animation that is playing.
* @typedef {object} Avatar.AnimationDetails
* @property {string} role
* @property {string} url
* @property {number} fps
* @property {number} priority
* @property {boolean} loop
* @property {boolean} hold
* @property {boolean} startAutomatically
* @property {number} firstFrame
* @property {number} lastFrame
* @property {boolean} running
* @property {number} currentFrame
* @property {boolean} allowTranslation
* @property {string} role - <em>Not used.</em>
* @property {string} url - The URL to the animation file. Animation files need to be in .FBX format but only need to contain
* the avatar skeleton and animation data.
* @property {number} fps - The frames per second(FPS) rate for the animation playback. 30 FPS is normal speed.
* @property {number} priority - <em>Not used.</em>
* @property {boolean} loop - <code>true</code> if the animation should loop, <code>false</code> if it shouldn't.
* @property {boolean} hold - <em>Not used.</em>
* @property {number} firstFrame - The frame the animation should start at.
* @property {number} lastFrame - The frame the animation should stop at.
* @property {boolean} running - <em>Not used.</em>
* @property {number} currentFrame - The current frame being played.
* @property {boolean} startAutomatically - <em>Not used.</em>
* @property {boolean} allowTranslation - <em>Not used.</em>
*/
QScriptValue animationDetailsToScriptValue(QScriptEngine* engine, const AnimationDetails& details) {
QScriptValue obj = engine->newObject();

View file

@ -43,6 +43,27 @@ Q_DECLARE_METATYPE(std::function<QVariant()>);
void registerMetaTypes(QScriptEngine* engine);
// Mat4
/**jsdoc
* A 4 x 4 matrix, typically containing a scale, rotation, and translation transform. See also the {@link Mat4(0)|Mat4} object.
*
* @typedef {object} Mat4
* @property {number} r0c0 - Row 0, column 0 value.
* @property {number} r1c0 - Row 1, column 0 value.
* @property {number} r2c0 - Row 2, column 0 value.
* @property {number} r3c0 - Row 3, column 0 value.
* @property {number} r0c1 - Row 0, column 1 value.
* @property {number} r1c1 - Row 1, column 1 value.
* @property {number} r2c1 - Row 2, column 1 value.
* @property {number} r3c1 - Row 3, column 1 value.
* @property {number} r0c2 - Row 0, column 2 value.
* @property {number} r1c2 - Row 1, column 2 value.
* @property {number} r2c2 - Row 2, column 2 value.
* @property {number} r3c2 - Row 3, column 2 value.
* @property {number} r0c3 - Row 0, column 3 value.
* @property {number} r1c3 - Row 1, column 3 value.
* @property {number} r2c3 - Row 2, column 3 value.
* @property {number} r3c3 - Row 3, column 3 value.
*/
QScriptValue mat4toScriptValue(QScriptEngine* engine, const glm::mat4& mat4);
void mat4FromScriptValue(const QScriptValue& object, glm::mat4& mat4);

View file

@ -138,7 +138,7 @@ public slots:
* var pickRay = Camera.computePickRay(event.x, event.y);
* var intersection = Entities.findRayIntersection(pickRay);
* if (intersection.intersects) {
* print ("You clicked on entity " + intersection.entityID);
* print("You clicked on entity " + intersection.entityID);
* }
* }
*

View file

@ -353,10 +353,11 @@ function AppUi(properties) {
// Close if necessary, clean up any remaining handlers, and remove the button.
GlobalServices.myUsernameChanged.disconnect(restartNotificationPoll);
GlobalServices.findableByChanged.disconnect(restartNotificationPoll);
that.tablet.screenChanged.disconnect(that.onScreenChanged);
if (that.isOpen) {
that.close();
that.onScreenChanged("", "");
}
that.tablet.screenChanged.disconnect(that.onScreenChanged);
if (that.button) {
if (that.onClicked) {
that.button.clicked.disconnect(that.onClicked);

Some files were not shown because too many files have changed in this diff Show more