Merge branch 'master' of https://github.com/highfidelity/hifi into amc

This commit is contained in:
Olivier Prat 2019-04-01 10:04:41 +02:00
commit 20f53f5de3
229 changed files with 7343 additions and 2971 deletions

View file

@ -0,0 +1,39 @@
#version 320 es
precision highp float;
precision highp sampler2D;
layout(location = 0) in vec4 vTexCoordLR;
layout(location = 0) out vec4 FragColorL;
layout(location = 1) out vec4 FragColorR;
uniform sampler2D sampler;
// https://software.intel.com/en-us/node/503873
// sRGB ====> Linear
vec3 color_sRGBToLinear(vec3 srgb) {
return mix(pow((srgb + vec3(0.055)) / vec3(1.055), vec3(2.4)), srgb / vec3(12.92), vec3(lessThanEqual(srgb, vec3(0.04045))));
}
vec4 color_sRGBAToLinear(vec4 srgba) {
return vec4(color_sRGBToLinear(srgba.xyz), srgba.w);
}
// Linear ====> sRGB
vec3 color_LinearTosRGB(vec3 lrgb) {
return mix(vec3(1.055) * pow(vec3(lrgb), vec3(0.41666)) - vec3(0.055), vec3(lrgb) * vec3(12.92), vec3(lessThan(lrgb, vec3(0.0031308))));
}
vec4 color_LinearTosRGBA(vec4 lrgba) {
return vec4(color_LinearTosRGB(lrgba.xyz), lrgba.w);
}
// FIXME switch to texelfetch for getting from the source texture?
void main() {
//FragColorL = color_LinearTosRGBA(texture(sampler, vTexCoordLR.xy));
//FragColorR = color_LinearTosRGBA(texture(sampler, vTexCoordLR.zw));
FragColorL = texture(sampler, vTexCoordLR.xy);
FragColorR = texture(sampler, vTexCoordLR.zw);
}

View file

@ -0,0 +1,21 @@
#version 320 es
layout(location = 0) out vec4 vTexCoordLR;
void main(void) {
const float depth = 0.0;
const vec4 UNIT_QUAD[4] = vec4[4](
vec4(-1.0, -1.0, depth, 1.0),
vec4(1.0, -1.0, depth, 1.0),
vec4(-1.0, 1.0, depth, 1.0),
vec4(1.0, 1.0, depth, 1.0)
);
vec4 pos = UNIT_QUAD[gl_VertexID];
gl_Position = pos;
vTexCoordLR.xy = pos.xy;
vTexCoordLR.xy += 1.0;
vTexCoordLR.y *= 0.5;
vTexCoordLR.x *= 0.25;
vTexCoordLR.zw = vTexCoordLR.xy;
vTexCoordLR.z += 0.5;
}

View file

@ -7,6 +7,7 @@
//
package io.highfidelity.oculus;
import android.content.res.AssetManager;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
@ -24,7 +25,7 @@ public class OculusMobileActivity extends QtActivity implements SurfaceHolder.Ca
private static final String TAG = OculusMobileActivity.class.getSimpleName();
static { System.loadLibrary("oculusMobile"); }
private native void nativeOnCreate();
private native void nativeOnCreate(AssetManager assetManager);
private native static void nativeOnResume();
private native static void nativeOnPause();
private native static void nativeOnSurfaceChanged(Surface s);
@ -53,7 +54,7 @@ public class OculusMobileActivity extends QtActivity implements SurfaceHolder.Ca
mView = new SurfaceView(this);
mView.getHolder().addCallback(this);
nativeOnCreate();
nativeOnCreate(getAssets());
questNativeOnCreate();
}
@ -81,7 +82,7 @@ public class OculusMobileActivity extends QtActivity implements SurfaceHolder.Ca
Log.w(TAG, "QQQ onResume");
super.onResume();
//Reconnect the global reference back to handler
nativeOnCreate();
nativeOnCreate(getAssets());
questNativeOnResume();
nativeOnResume();

View file

@ -52,6 +52,8 @@
#include <WebSocketServerClass.h>
#include <EntityScriptingInterface.h> // TODO: consider moving to scriptengine.h
#include <hfm/ModelFormatRegistry.h>
#include "entities/AssignmentParentFinder.h"
#include "AssignmentDynamicFactory.h"
#include "RecordingScriptingInterface.h"
@ -99,6 +101,9 @@ Agent::Agent(ReceivedMessage& message) :
DependencyManager::set<RecordingScriptingInterface>();
DependencyManager::set<UsersScriptingInterface>();
DependencyManager::set<ModelFormatRegistry>();
DependencyManager::set<ModelCache>();
// Needed to ensure the creation of the DebugDraw instance on the main thread
DebugDraw::getInstance();
@ -819,6 +824,9 @@ void Agent::aboutToFinish() {
DependencyManager::get<ResourceManager>()->cleanup();
DependencyManager::destroy<ModelFormatRegistry>();
DependencyManager::destroy<ModelCache>();
DependencyManager::destroy<PluginManager>();
// cleanup the AudioInjectorManager (and any still running injectors)

View file

@ -18,16 +18,25 @@
#include "Agent.h"
/**jsdoc
* The <code>Agent</code> API enables an assignment client to emulate an avatar. Setting <code>isAvatar = true</code> connects
* the assignment client to the avatar and audio mixers, and enables the {@link Avatar} API to be used.
*
* @namespace Agent
*
* @hifi-assignment-client
*
* @property {boolean} isAvatar
* @property {boolean} isPlayingAvatarSound <em>Read-only.</em>
* @property {boolean} isListeningToAudioStream
* @property {boolean} isNoiseGateEnabled
* @property {number} lastReceivedAudioLoudness <em>Read-only.</em>
* @property {Uuid} sessionUUID <em>Read-only.</em>
* @property {boolean} isAvatar - <code>true</code> if the assignment client script is emulating an avatar, otherwise
* <code>false</code>.
* @property {boolean} isPlayingAvatarSound - <code>true</code> if the script has a sound to play, otherwise <code>false</code>.
* Sounds are played when <code>isAvatar</code> is <code>true</code>, from the position and with the orientation of the
* scripted avatar's head. <em>Read-only.</em>
* @property {boolean} isListeningToAudioStream - <code>true</code> if the agent is "listening" to the audio stream from the
* domain, otherwise <code>false</code>.
* @property {boolean} isNoiseGateEnabled - <code>true</code> if the noise gate is enabled, otherwise <code>false</code>. When
* enabled, the input audio stream is blocked (fully attenuated) if it falls below an adaptive threshold.
* @property {number} lastReceivedAudioLoudness - The current loudness of the audio input. Nominal range [<code>0.0</code> (no
* sound) &ndash; <code>1.0</code> (the onset of clipping)]. <em>Read-only.</em>
* @property {Uuid} sessionUUID - The unique ID associated with the agent's current session in the domain. <em>Read-only.</em>
*/
class AgentScriptingInterface : public QObject {
Q_OBJECT
@ -54,20 +63,43 @@ public:
public slots:
/**jsdoc
* Sets whether the script should emulate an avatar.
* @function Agent.setIsAvatar
* @param {boolean} isAvatar
* @param {boolean} isAvatar - <code>true</code> if the script emulates an avatar, otherwise <code>false</code>.
* @example <caption>Make an assignment client script emulate an avatar.</caption>
* (function () {
* Agent.setIsAvatar(true);
* Avatar.displayName = "AC avatar";
* print("Position: " + JSON.stringify(Avatar.position)); // 0, 0, 0
* }());
*/
void setIsAvatar(bool isAvatar) const { _agent->setIsAvatar(isAvatar); }
/**jsdoc
* Checks whether the script is emulating an avatar.
* @function Agent.isAvatar
* @returns {boolean}
* @returns {boolean} <code>true</code> if the script is emulating an avatar, otherwise <code>false</code>.
* @example <caption>Check whether the agent is emulating an avatar.</caption>
* (function () {
* print("Agent is avatar: " + Agent.isAvatar());
* print("Agent is avatar: " + Agent.isAvatar); // Same result.
* }());
*/
bool isAvatar() const { return _agent->isAvatar(); }
/**jsdoc
* Plays a sound from the position and with the orientation of the emulated avatar's head. No sound is played unless
* <code>isAvatar == true</code>.
* @function Agent.playAvatarSound
* @param {object} avatarSound
* @param {SoundObject} avatarSound - The sound played.
* @example <caption>Play a sound from an emulated avatar.</caption>
* (function () {
* Agent.isAvatar = true;
* var sound = SoundCache.getSound(Script.resourcesPath() + "sounds/sample.wav");
* Script.setTimeout(function () { // Give the sound time to load.
* Agent.playAvatarSound(sound);
* }, 1000);
* }());
*/
void playAvatarSound(SharedSoundPointer avatarSound) const { _agent->playAvatarSound(avatarSound); }

View file

@ -97,6 +97,7 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
PacketType::RadiusIgnoreRequest,
PacketType::RequestsDomainListData,
PacketType::PerAvatarGainSet,
PacketType::InjectorGainSet,
PacketType::AudioSoloRequest },
this, "queueAudioPacket");

View file

@ -92,6 +92,9 @@ int AudioMixerClientData::processPackets(ConcurrentAddedStreams& addedStreams) {
case PacketType::PerAvatarGainSet:
parsePerAvatarGainSet(*packet, node);
break;
case PacketType::InjectorGainSet:
parseInjectorGainSet(*packet, node);
break;
case PacketType::NodeIgnoreRequest:
parseNodeIgnoreRequest(packet, node);
break;
@ -197,14 +200,25 @@ void AudioMixerClientData::parsePerAvatarGainSet(ReceivedMessage& message, const
if (avatarUUID.isNull()) {
// set the MASTER avatar gain
setMasterAvatarGain(gain);
qCDebug(audio) << "Setting MASTER avatar gain for " << uuid << " to " << gain;
qCDebug(audio) << "Setting MASTER avatar gain for" << uuid << "to" << gain;
} else {
// set the per-source avatar gain
setGainForAvatar(avatarUUID, gain);
qCDebug(audio) << "Setting avatar gain adjustment for hrtf[" << uuid << "][" << avatarUUID << "] to " << gain;
qCDebug(audio) << "Setting avatar gain adjustment for hrtf[" << uuid << "][" << avatarUUID << "] to" << gain;
}
}
void AudioMixerClientData::parseInjectorGainSet(ReceivedMessage& message, const SharedNodePointer& node) {
QUuid uuid = node->getUUID();
uint8_t packedGain;
message.readPrimitive(&packedGain);
float gain = unpackFloatGainFromByte(packedGain);
setMasterInjectorGain(gain);
qCDebug(audio) << "Setting MASTER injector gain for" << uuid << "to" << gain;
}
void AudioMixerClientData::setGainForAvatar(QUuid nodeID, float gain) {
auto it = std::find_if(_streams.active.cbegin(), _streams.active.cend(), [nodeID](const MixableStream& mixableStream){
return mixableStream.nodeStreamID.nodeID == nodeID && mixableStream.nodeStreamID.streamID.isNull();

View file

@ -63,6 +63,7 @@ public:
void negotiateAudioFormat(ReceivedMessage& message, const SharedNodePointer& node);
void parseRequestsDomainListData(ReceivedMessage& message);
void parsePerAvatarGainSet(ReceivedMessage& message, const SharedNodePointer& node);
void parseInjectorGainSet(ReceivedMessage& message, const SharedNodePointer& node);
void parseNodeIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node);
void parseRadiusIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node);
void parseSoloRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node);
@ -84,6 +85,8 @@ public:
float getMasterAvatarGain() const { return _masterAvatarGain; }
void setMasterAvatarGain(float gain) { _masterAvatarGain = gain; }
float getMasterInjectorGain() const { return _masterInjectorGain; }
void setMasterInjectorGain(float gain) { _masterInjectorGain = gain; }
AudioLimiter audioLimiter;
@ -189,6 +192,7 @@ private:
int _frameToSendStats { 0 };
float _masterAvatarGain { 1.0f }; // per-listener mixing gain, applied only to avatars
float _masterInjectorGain { 1.0f }; // per-listener mixing gain, applied only to injectors
CodecPluginPointer _codec;
QString _selectedCodecName;

View file

@ -50,8 +50,8 @@ void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData&
// mix helpers
inline float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd);
inline float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNodeStream,
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, float distance, bool isEcho);
inline float computeGain(float masterAvatarGain, float masterInjectorGain, const AvatarAudioStream& listeningNodeStream,
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, float distance);
inline float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition);
@ -338,8 +338,8 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
}
if (!isThrottling) {
updateHRTFParameters(stream, *listenerAudioStream,
listenerData->getMasterAvatarGain());
updateHRTFParameters(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(),
listenerData->getMasterInjectorGain());
}
return false;
});
@ -363,8 +363,8 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
}
if (!isThrottling) {
updateHRTFParameters(stream, *listenerAudioStream,
listenerData->getMasterAvatarGain());
updateHRTFParameters(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(),
listenerData->getMasterInjectorGain());
}
return false;
});
@ -381,13 +381,13 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
stream.approximateVolume = approximateVolume(stream, listenerAudioStream);
} else {
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
addStream(stream, *listenerAudioStream, 0.0f, isSoloing);
addStream(stream, *listenerAudioStream, 0.0f, 0.0f, isSoloing);
streams.skipped.push_back(move(stream));
++stats.activeToSkipped;
return true;
}
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(),
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(), listenerData->getMasterInjectorGain(),
isSoloing);
if (shouldBeInactive(stream)) {
@ -423,7 +423,7 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
return true;
}
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(),
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(), listenerData->getMasterInjectorGain(),
isSoloing);
if (shouldBeInactive(stream)) {
@ -491,7 +491,9 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
void AudioMixerSlave::addStream(AudioMixerClientData::MixableStream& mixableStream,
AvatarAudioStream& listeningNodeStream,
float masterListenerGain, bool isSoloing) {
float masterAvatarGain,
float masterInjectorGain,
bool isSoloing) {
++stats.totalMixes;
auto streamToAdd = mixableStream.positionalStream;
@ -502,13 +504,12 @@ void AudioMixerSlave::addStream(AudioMixerClientData::MixableStream& mixableStre
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream.getPosition();
float distance = glm::max(glm::length(relativePosition), EPSILON);
float gain = isEcho ? 1.0f
: (isSoloing ? masterAvatarGain
: computeGain(masterAvatarGain, masterInjectorGain, listeningNodeStream, *streamToAdd,
relativePosition, distance));
float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
float gain = masterListenerGain;
if (!isSoloing) {
gain = computeGain(masterListenerGain, listeningNodeStream, *streamToAdd, relativePosition, distance, isEcho);
}
const int HRTF_DATASET_INDEX = 1;
if (!streamToAdd->lastPopSucceeded()) {
@ -585,8 +586,9 @@ void AudioMixerSlave::addStream(AudioMixerClientData::MixableStream& mixableStre
}
void AudioMixerSlave::updateHRTFParameters(AudioMixerClientData::MixableStream& mixableStream,
AvatarAudioStream& listeningNodeStream,
float masterListenerGain) {
AvatarAudioStream& listeningNodeStream,
float masterAvatarGain,
float masterInjectorGain) {
auto streamToAdd = mixableStream.positionalStream;
// check if this is a server echo of a source back to itself
@ -595,7 +597,8 @@ void AudioMixerSlave::updateHRTFParameters(AudioMixerClientData::MixableStream&
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream.getPosition();
float distance = glm::max(glm::length(relativePosition), EPSILON);
float gain = computeGain(masterListenerGain, listeningNodeStream, *streamToAdd, relativePosition, distance, isEcho);
float gain = isEcho ? 1.0f : computeGain(masterAvatarGain, masterInjectorGain, listeningNodeStream, *streamToAdd,
relativePosition, distance);
float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
mixableStream.hrtf->setParameterHistory(azimuth, distance, gain);
@ -720,6 +723,7 @@ float approximateGain(const AvatarAudioStream& listeningNodeStream, const Positi
// injector: apply attenuation
if (streamToAdd.getType() == PositionalAudioStream::Injector) {
gain *= reinterpret_cast<const InjectedAudioStream*>(&streamToAdd)->getAttenuationRatio();
// injector: skip master gain
}
// avatar: skip attenuation - it is too costly to approximate
@ -729,19 +733,25 @@ float approximateGain(const AvatarAudioStream& listeningNodeStream, const Positi
float distance = glm::length(relativePosition);
return gain / distance;
// avatar: skip master gain - it is constant for all streams
// avatar: skip master gain
}
float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNodeStream,
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, float distance, bool isEcho) {
float computeGain(float masterAvatarGain,
float masterInjectorGain,
const AvatarAudioStream& listeningNodeStream,
const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition,
float distance) {
float gain = 1.0f;
// injector: apply attenuation
if (streamToAdd.getType() == PositionalAudioStream::Injector) {
gain *= reinterpret_cast<const InjectedAudioStream*>(&streamToAdd)->getAttenuationRatio();
// apply master gain
gain *= masterInjectorGain;
// avatar: apply fixed off-axis attenuation to make them quieter as they turn away
} else if (!isEcho && (streamToAdd.getType() == PositionalAudioStream::Microphone)) {
} else if (streamToAdd.getType() == PositionalAudioStream::Microphone) {
glm::vec3 rotatedListenerPosition = glm::inverse(streamToAdd.getOrientation()) * relativePosition;
// source directivity is based on angle of emission, in local coordinates
@ -754,8 +764,8 @@ float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNo
gain *= offAxisCoefficient;
// apply master gain, only to avatars
gain *= masterListenerGain;
// apply master gain
gain *= masterAvatarGain;
}
auto& audioZones = AudioMixer::getAudioZones();
@ -797,8 +807,9 @@ float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNo
return gain;
}
float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition) {
float computeAzimuth(const AvatarAudioStream& listeningNodeStream,
const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition) {
glm::quat inverseOrientation = glm::inverse(listeningNodeStream.getOrientation());
glm::vec3 rotatedSourcePosition = inverseOrientation * relativePosition;

View file

@ -57,10 +57,13 @@ private:
bool prepareMix(const SharedNodePointer& listener);
void addStream(AudioMixerClientData::MixableStream& mixableStream,
AvatarAudioStream& listeningNodeStream,
float masterListenerGain, bool isSoloing);
float masterAvatarGain,
float masterInjectorGain,
bool isSoloing);
void updateHRTFParameters(AudioMixerClientData::MixableStream& mixableStream,
AvatarAudioStream& listeningNodeStream,
float masterListenerGain);
float masterAvatarGain,
float masterInjectorGain);
void resetHRTFState(AudioMixerClientData::MixableStream& mixableStream);
void addStreams(Node& listener, AudioMixerClientData& listenerData);

View file

@ -253,10 +253,29 @@ void AvatarMixer::start() {
int lockWait, nodeTransform, functor;
// Set our query each frame
{
_entityViewer.queryOctree();
}
// Dirty the hero status if there's been an entity change.
{
if (_dirtyHeroStatus) {
_dirtyHeroStatus = false;
nodeList->nestedEach([](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
std::for_each(cbegin, cend, [](const SharedNodePointer& node) {
if (node->getType() == NodeType::Agent) {
NodeData* nodeData = node->getLinkedData();
if (nodeData) {
auto& avatar = static_cast<AvatarMixerClientData*>(nodeData)->getAvatar();
avatar.setNeedsHeroCheck();
}
}
});
});
}
}
// Allow nodes to process any pending/queued packets across our worker threads
{
auto start = usecTimestampNow();
@ -827,7 +846,7 @@ void AvatarMixer::sendStatsPacket() {
QJsonObject avatarsObject;
auto nodeList = DependencyManager::get<NodeList>();
// add stats for each listerner
// add stats for each listener
nodeList->eachNode([&](const SharedNodePointer& node) {
QJsonObject avatarStats;
@ -851,6 +870,12 @@ void AvatarMixer::sendStatsPacket() {
avatarStats["delta_full_vs_avatar_data_kbps"] =
(double)outboundAvatarDataKbps - avatarStats[OUTBOUND_AVATAR_DATA_STATS_KEY].toDouble();
}
if (node->getType() != NodeType::Agent) { // Nodes that aren't avatars
const QString displayName
{ node->getType() == NodeType::EntityScriptServer ? "ENTITY SCRIPT SERVER" : "ENTITY SERVER" };
avatarStats["display_name"] = displayName;
}
}
avatarsObject[uuidStringWithoutCurlyBraces(node->getUUID())] = avatarStats;
@ -973,19 +998,30 @@ void AvatarMixer::parseDomainServerSettings(const QJsonObject& domainSettings) {
{
const QString CONNECTION_RATE = "connection_rate";
auto nodeList = DependencyManager::get<NodeList>();
auto defaultConnectionRate = nodeList->getMaxConnectionRate();
int connectionRate = avatarMixerGroupObject[CONNECTION_RATE].toInt((int)defaultConnectionRate);
nodeList->setMaxConnectionRate(connectionRate);
bool success;
int connectionRate = avatarMixerGroupObject[CONNECTION_RATE].toString().toInt(&success);
if (success) {
nodeList->setMaxConnectionRate(connectionRate);
}
}
{ // Fraction of downstream bandwidth reserved for 'hero' avatars:
static const QString PRIORITY_FRACTION_KEY = "priority_fraction";
if (avatarMixerGroupObject.contains(PRIORITY_FRACTION_KEY)) {
float priorityFraction = float(avatarMixerGroupObject[PRIORITY_FRACTION_KEY].toDouble());
_slavePool.setPriorityReservedFraction(std::min(std::max(0.0f, priorityFraction), 1.0f));
qCDebug(avatars) << "Avatar mixer reserving" << priorityFraction << "of bandwidth for priority avatars";
}
}
const QString AVATARS_SETTINGS_KEY = "avatars";
static const QString MIN_HEIGHT_OPTION = "min_avatar_height";
float settingMinHeight = domainSettings[AVATARS_SETTINGS_KEY].toObject()[MIN_HEIGHT_OPTION].toDouble(MIN_AVATAR_HEIGHT);
float settingMinHeight = avatarMixerGroupObject[MIN_HEIGHT_OPTION].toDouble(MIN_AVATAR_HEIGHT);
_domainMinimumHeight = glm::clamp(settingMinHeight, MIN_AVATAR_HEIGHT, MAX_AVATAR_HEIGHT);
static const QString MAX_HEIGHT_OPTION = "max_avatar_height";
float settingMaxHeight = domainSettings[AVATARS_SETTINGS_KEY].toObject()[MAX_HEIGHT_OPTION].toDouble(MAX_AVATAR_HEIGHT);
float settingMaxHeight = avatarMixerGroupObject[MAX_HEIGHT_OPTION].toDouble(MAX_AVATAR_HEIGHT);
_domainMaximumHeight = glm::clamp(settingMaxHeight, MIN_AVATAR_HEIGHT, MAX_AVATAR_HEIGHT);
// make sure that the domain owner didn't flip min and max
@ -997,11 +1033,11 @@ void AvatarMixer::parseDomainServerSettings(const QJsonObject& domainSettings) {
<< "and a maximum avatar height of" << _domainMaximumHeight;
static const QString AVATAR_WHITELIST_OPTION = "avatar_whitelist";
_slaveSharedData.skeletonURLWhitelist = domainSettings[AVATARS_SETTINGS_KEY].toObject()[AVATAR_WHITELIST_OPTION]
_slaveSharedData.skeletonURLWhitelist = avatarMixerGroupObject[AVATAR_WHITELIST_OPTION]
.toString().split(',', QString::KeepEmptyParts);
static const QString REPLACEMENT_AVATAR_OPTION = "replacement_avatar";
_slaveSharedData.skeletonReplacementURL = domainSettings[AVATARS_SETTINGS_KEY].toObject()[REPLACEMENT_AVATAR_OPTION]
_slaveSharedData.skeletonReplacementURL = avatarMixerGroupObject[REPLACEMENT_AVATAR_OPTION]
.toString();
if (_slaveSharedData.skeletonURLWhitelist.count() == 1 && _slaveSharedData.skeletonURLWhitelist[0].isEmpty()) {
@ -1018,9 +1054,12 @@ void AvatarMixer::parseDomainServerSettings(const QJsonObject& domainSettings) {
void AvatarMixer::setupEntityQuery() {
_entityViewer.init();
EntityTreePointer entityTree = _entityViewer.getTree();
DependencyManager::registerInheritance<SpatialParentFinder, AssignmentParentFinder>();
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
_slaveSharedData.entityTree = _entityViewer.getTree();
DependencyManager::set<AssignmentParentFinder>(entityTree);
connect(entityTree.get(), &EntityTree::addingEntityPointer, this, &AvatarMixer::entityAdded);
connect(entityTree.get(), &EntityTree::deletingEntityPointer, this, &AvatarMixer::entityChange);
// ES query: {"avatarPriority": true, "type": "Zone"}
QJsonObject priorityZoneQuery;
@ -1028,6 +1067,7 @@ void AvatarMixer::setupEntityQuery() {
priorityZoneQuery["type"] = "Zone";
_entityViewer.getOctreeQuery().setJSONParameters(priorityZoneQuery);
_slaveSharedData.entityTree = entityTree;
}
void AvatarMixer::handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
@ -1064,6 +1104,25 @@ void AvatarMixer::handleOctreePacket(QSharedPointer<ReceivedMessage> message, Sh
}
}
void AvatarMixer::entityAdded(EntityItem* entity) {
if (entity->getType() == EntityTypes::Zone) {
_dirtyHeroStatus = true;
entity->registerChangeHandler([this](const EntityItemID& entityItemID) {
entityChange();
});
}
}
void AvatarMixer::entityRemoved(EntityItem * entity) {
if (entity->getType() == EntityTypes::Zone) {
_dirtyHeroStatus = true;
}
}
void AvatarMixer::entityChange() {
_dirtyHeroStatus = true;
}
void AvatarMixer::aboutToFinish() {
DependencyManager::destroy<ResourceManager>();
DependencyManager::destroy<ResourceCacheSharedItems>();

View file

@ -34,8 +34,8 @@ public:
static bool shouldReplicateTo(const Node& from, const Node& to) {
return to.getType() == NodeType::DownstreamAvatarMixer &&
to.getPublicSocket() != from.getPublicSocket() &&
to.getLocalSocket() != from.getLocalSocket();
to.getPublicSocket() != from.getPublicSocket() &&
to.getLocalSocket() != from.getLocalSocket();
}
public slots:
@ -46,6 +46,11 @@ public slots:
void sendStatsPacket() override;
// Avatar zone possibly changed
void entityAdded(EntityItem* entity);
void entityRemoved(EntityItem* entity);
void entityChange();
private slots:
void queueIncomingPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer node);
void handleAdjustAvatarSorting(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
@ -80,6 +85,7 @@ private:
// Attach to entity tree for avatar-priority zone info.
EntityTreeHeadlessViewer _entityViewer;
bool _dirtyHeroStatus { true }; // Dirty the needs-hero-update
// FIXME - new throttling - use these values somehow
float _trailingMixRatio { 0.0f };

View file

@ -129,7 +129,7 @@ int AvatarMixerClientData::parseData(ReceivedMessage& message, const SlaveShared
incrementNumOutOfOrderSends();
}
_lastReceivedSequenceNumber = sequenceNumber;
glm::vec3 oldPosition = getPosition();
glm::vec3 oldPosition = _avatar->getClientGlobalPosition();
bool oldHasPriority = _avatar->getHasPriority();
// compute the offset to the data payload
@ -140,23 +140,13 @@ int AvatarMixerClientData::parseData(ReceivedMessage& message, const SlaveShared
// Regardless of what the client says, restore the priority as we know it without triggering any update.
_avatar->setHasPriorityWithoutTimestampReset(oldHasPriority);
auto newPosition = getPosition();
if (newPosition != oldPosition) {
//#define AVATAR_HERO_TEST_HACK
#ifdef AVATAR_HERO_TEST_HACK
{
const static QString heroKey { "HERO" };
_avatar->setPriorityAvatar(_avatar->getDisplayName().contains(heroKey));
}
#else
auto newPosition = _avatar->getClientGlobalPosition();
if (newPosition != oldPosition || _avatar->getNeedsHeroCheck()) {
EntityTree& entityTree = *slaveSharedData.entityTree;
FindPriorityZone findPriorityZone { newPosition, false } ;
FindPriorityZone findPriorityZone { newPosition } ;
entityTree.recurseTreeWithOperation(&FindPriorityZone::operation, &findPriorityZone);
_avatar->setHasPriority(findPriorityZone.isInPriorityZone);
//if (findPriorityZone.isInPriorityZone) {
// qCWarning(avatars) << "Avatar" << _avatar->getSessionDisplayName() << "in hero zone";
//}
#endif
_avatar->setNeedsHeroCheck(false);
}
return true;
@ -341,7 +331,7 @@ void AvatarMixerClientData::checkSkeletonURLAgainstWhitelist(const SlaveSharedDa
// the returned set traits packet uses the trait version from the incoming packet
// so the client knows they should not overwrite if they have since changed the trait
_avatar->packTrait(AvatarTraits::SkeletonModelURL, *packet, traitVersion);
AvatarTraits::packVersionedTrait(AvatarTraits::SkeletonModelURL, *packet, traitVersion, *_avatar);
auto nodeList = DependencyManager::get<NodeList>();
nodeList->sendPacket(std::move(packet), sendingNode);

View file

@ -43,12 +43,14 @@ void AvatarMixerSlave::configure(ConstIter begin, ConstIter end) {
void AvatarMixerSlave::configureBroadcast(ConstIter begin, ConstIter end,
p_high_resolution_clock::time_point lastFrameTimestamp,
float maxKbpsPerNode, float throttlingRatio) {
float maxKbpsPerNode, float throttlingRatio,
float priorityReservedFraction) {
_begin = begin;
_end = end;
_lastFrameTimestamp = lastFrameTimestamp;
_maxKbpsPerNode = maxKbpsPerNode;
_throttlingRatio = throttlingRatio;
_avatarHeroFraction = priorityReservedFraction;
}
void AvatarMixerSlave::harvestStats(AvatarMixerSlaveStats& stats) {
@ -139,7 +141,8 @@ qint64 AvatarMixerSlave::addChangedTraitsToBulkPacket(AvatarMixerClientData* lis
if (lastReceivedVersion > lastSentVersionRef) {
bytesWritten += addTraitsNodeHeader(listeningNodeData, sendingNodeData, traitsPacketList, bytesWritten);
// there is an update to this trait, add it to the traits packet
bytesWritten += sendingAvatar->packTrait(traitType, traitsPacketList, lastReceivedVersion);
bytesWritten += AvatarTraits::packVersionedTrait(traitType, traitsPacketList,
lastReceivedVersion, *sendingAvatar);
// update the last sent version
lastSentVersionRef = lastReceivedVersion;
// Remember which versions we sent in this particular packet
@ -194,7 +197,8 @@ qint64 AvatarMixerSlave::addChangedTraitsToBulkPacket(AvatarMixerClientData* lis
bytesWritten += addTraitsNodeHeader(listeningNodeData, sendingNodeData, traitsPacketList, bytesWritten);
// this instance version exists and has never been sent or is newer so we need to send it
bytesWritten += sendingAvatar->packTraitInstance(traitType, instanceID, traitsPacketList, receivedVersion);
bytesWritten += AvatarTraits::packVersionedTraitInstance(traitType, instanceID, traitsPacketList,
receivedVersion, *sendingAvatar);
if (sentInstanceIt != sentIDValuePairs.end()) {
sentInstanceIt->value = receivedVersion;
@ -308,7 +312,6 @@ namespace {
} // Close anonymous namespace.
void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node) {
const float AVATAR_HERO_FRACTION { 0.4f };
const Node* destinationNode = node.data();
auto nodeList = DependencyManager::get<NodeList>();
@ -343,7 +346,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
// max number of avatarBytes per frame (13 900, typical)
const int maxAvatarBytesPerFrame = int(_maxKbpsPerNode * BYTES_PER_KILOBIT / AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND);
const int maxHeroBytesPerFrame = int(maxAvatarBytesPerFrame * AVATAR_HERO_FRACTION); // 5555, typical
const int maxHeroBytesPerFrame = int(maxAvatarBytesPerFrame * _avatarHeroFraction); // 5555, typical
// keep track of the number of other avatars held back in this frame
int numAvatarsHeldBack = 0;
@ -469,8 +472,8 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
SortableAvatar(avatarNodeData, sourceAvatarNode, lastEncodeTime));
}
// If Avatar A's PAL WAS open but is no longer open, AND
// Avatar A is ignoring Avatar B OR Avatar B is ignoring Avatar A...
// If Node A's PAL WAS open but is no longer open, AND
// Node A is ignoring Avatar B OR Node B is ignoring Avatar A...
//
// This is a bit heavy-handed still - there are cases where a kill packet
// will be sent when it doesn't need to be (but where it _should_ be OK to send).
@ -539,7 +542,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
const MixerAvatar* sourceAvatar = sourceNodeData->getConstAvatarData();
// Typically all out-of-view avatars but such avatars' priorities will rise with time:
bool isLowerPriority = currentVariant != kHero && sortedAvatar.getPriority() <= OUT_OF_VIEW_THRESHOLD; // XXX: hero handling?
bool isLowerPriority = sortedAvatar.getPriority() <= OUT_OF_VIEW_THRESHOLD;
if (isLowerPriority) {
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::MinimumData;
@ -548,8 +551,8 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO ? AvatarData::SendAllData : AvatarData::CullSmallData;
destinationNodeData->incrementAvatarInView();
// If the time that the mixer sent AVATAR DATA about Avatar B to Avatar A is BEFORE OR EQUAL TO
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Avatar A.
// If the time that the mixer sent AVATAR DATA about Avatar B to Node A is BEFORE OR EQUAL TO
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Node A.
if (sourceAvatar->hasProcessedFirstIdentity()
&& destinationNodeData->getLastBroadcastTime(sourceNode->getLocalID()) <= sourceNodeData->getIdentityChangeTimestamp()) {
identityBytesSent += sendIdentityPacket(*identityPacketList, sourceNodeData, *destinationNode);

View file

@ -110,7 +110,8 @@ public:
void configure(ConstIter begin, ConstIter end);
void configureBroadcast(ConstIter begin, ConstIter end,
p_high_resolution_clock::time_point lastFrameTimestamp,
float maxKbpsPerNode, float throttlingRatio);
float maxKbpsPerNode, float throttlingRatio,
float priorityReservedFraction);
void processIncomingPackets(const SharedNodePointer& node);
void broadcastAvatarData(const SharedNodePointer& node);
@ -140,6 +141,7 @@ private:
p_high_resolution_clock::time_point _lastFrameTimestamp;
float _maxKbpsPerNode { 0.0f };
float _throttlingRatio { 0.0f };
float _avatarHeroFraction { 0.4f };
AvatarMixerSlaveStats _stats;
SlaveSharedData* _sharedData;

View file

@ -76,7 +76,8 @@ void AvatarMixerSlavePool::broadcastAvatarData(ConstIter begin, ConstIter end,
float maxKbpsPerNode, float throttlingRatio) {
_function = &AvatarMixerSlave::broadcastAvatarData;
_configure = [=](AvatarMixerSlave& slave) {
slave.configureBroadcast(begin, end, lastFrameTimestamp, maxKbpsPerNode, throttlingRatio);
slave.configureBroadcast(begin, end, lastFrameTimestamp, maxKbpsPerNode, throttlingRatio,
_priorityReservedFraction);
};
run(begin, end);
}

View file

@ -73,7 +73,10 @@ public:
void each(std::function<void(AvatarMixerSlave& slave)> functor);
void setNumThreads(int numThreads);
int numThreads() { return _numThreads; }
int numThreads() const { return _numThreads; }
void setPriorityReservedFraction(float fraction) { _priorityReservedFraction = fraction; }
float getPriorityReservedFraction() const { return _priorityReservedFraction; }
private:
void run(ConstIter begin, ConstIter end);
@ -91,7 +94,11 @@ private:
ConditionVariable _poolCondition;
void (AvatarMixerSlave::*_function)(const SharedNodePointer& node);
std::function<void(AvatarMixerSlave&)> _configure;
// Set from Domain Settings:
float _priorityReservedFraction { 0.4f };
int _numThreads { 0 };
int _numStarted { 0 }; // guarded by _mutex
int _numFinished { 0 }; // guarded by _mutex
int _numStopped { 0 }; // guarded by _mutex

View file

@ -19,8 +19,12 @@
class MixerAvatar : public AvatarData {
public:
bool getNeedsHeroCheck() const { return _needsHeroCheck; }
void setNeedsHeroCheck(bool needsHeroCheck = true)
{ _needsHeroCheck = needsHeroCheck; }
private:
bool _needsHeroCheck { false };
};
using MixerAvatarSharedPointer = std::shared_ptr<MixerAvatar>;

View file

@ -20,25 +20,29 @@
/**jsdoc
* The <code>Avatar</code> API is used to manipulate scriptable avatars on the domain. This API is a subset of the
* {@link MyAvatar} API.
* {@link MyAvatar} API. To enable this API, set {@link Agent|Agent.isAvatar} to <code>true</code>.
*
* <p>For Interface, client entity, and avatar scripts, see {@link MyAvatar}.</p>
*
* <p><strong>Note:</strong> In the examples, use "<code>Avatar</code>" instead of "<code>MyAvatar</code>".</p>
*
* @namespace Avatar
*
* @hifi-assignment-client
*
* @property {Vec3} position
* @property {number} scale
* @property {number} density <em>Read-only.</em>
* @property {Vec3} handPosition
* @property {number} bodyYaw - The rotation left or right about an axis running from the head to the feet of the avatar.
* @comment IMPORTANT: This group of properties is copied from AvatarData.h; they should NOT be edited here.
* @property {Vec3} position - The position of the avatar.
* @property {number} scale=1.0 - The scale of the avatar. The value can be set to anything between <code>0.005</code> and
* <code>1000.0</code>. When the scale value is fetched, it may temporarily be further limited by the domain's settings.
* @property {number} density - The density of the avatar in kg/m<sup>3</sup>. The density is used to work out its mass in
* the application of physics. <em>Read-only.</em>
* @property {Vec3} handPosition - A user-defined hand position, in world coordinates. The position moves with the avatar
* but is otherwise not used or changed by Interface.
* @property {number} bodyYaw - The left or right rotation about an axis running from the head to the feet of the avatar.
* Yaw is sometimes called "heading".
* @property {number} bodyPitch - The rotation about an axis running from shoulder to shoulder of the avatar. Pitch is
* sometimes called "elevation".
* @property {number} bodyRoll - The rotation about an axis running from the chest to the back of the avatar. Roll is
* sometimes called "bank".
* @property {Quat} orientation
* @property {Quat} orientation - The orientation of the avatar.
* @property {Quat} headOrientation - The orientation of the avatar's head.
* @property {number} headPitch - The rotation about an axis running from ear to ear of the avatar's head. Pitch is
* sometimes called "elevation".
@ -46,79 +50,37 @@
* head. Yaw is sometimes called "heading".
* @property {number} headRoll - The rotation about an axis running from the nose to the back of the avatar's head. Roll is
* sometimes called "bank".
* @property {Vec3} velocity
* @property {Vec3} angularVelocity
* @property {number} audioLoudness
* @property {number} audioAverageLoudness
* @property {string} displayName
* @property {string} sessionDisplayName - Sanitized, defaulted version displayName that is defined by the AvatarMixer
* rather than by Interface clients. The result is unique among all avatars present at the time.
* @property {boolean} lookAtSnappingEnabled
* @property {string} skeletonModelURL
* @property {AttachmentData[]} attachmentData
* @property {Vec3} velocity - The current velocity of the avatar.
* @property {Vec3} angularVelocity - The current angular velocity of the avatar.
* @property {number} audioLoudness - The instantaneous loudness of the audio input that the avatar is injecting into the
* domain.
* @property {number} audioAverageLoudness - The rolling average loudness of the audio input that the avatar is injecting
* into the domain.
* @property {string} displayName - The avatar's display name.
* @property {string} sessionDisplayName - <code>displayName's</code> sanitized and default version defined by the avatar mixer
* rather than Interface clients. The result is unique among all avatars present in the domain at the time.
* @property {boolean} lookAtSnappingEnabled=true - <code>true</code> if the avatar's eyes snap to look at another avatar's
* eyes when the other avatar is in the line of sight and also has <code>lookAtSnappingEnabled == true</code>.
* @property {string} skeletonModelURL - The avatar's FST file.
* @property {AttachmentData[]} attachmentData - Information on the avatar's attachments.<br />
* <strong>Deprecated:</strong> Use avatar entities instead.
* @property {string[]} jointNames - The list of joints in the current avatar model. <em>Read-only.</em>
* @property {Uuid} sessionUUID <em>Read-only.</em>
* @property {Mat4} sensorToWorldMatrix <em>Read-only.</em>
* @property {Mat4} controllerLeftHandMatrix <em>Read-only.</em>
* @property {Mat4} controllerRightHandMatrix <em>Read-only.</em>
* @property {number} sensorToWorldScale <em>Read-only.</em>
* @property {Uuid} sessionUUID - Unique ID of the avatar in the domain. <em>Read-only.</em>
* @property {Mat4} sensorToWorldMatrix - The scale, rotation, and translation transform from the user's real world to the
* avatar's size, orientation, and position in the virtual world. <em>Read-only.</em>
* @property {Mat4} controllerLeftHandMatrix - The rotation and translation of the left hand controller relative to the
* avatar. <em>Read-only.</em>
* @property {Mat4} controllerRightHandMatrix - The rotation and translation of the right hand controller relative to the
* avatar. <em>Read-only.</em>
* @property {number} sensorToWorldScale - The scale that transforms dimensions in the user's real world to the avatar's
* size in the virtual world. <em>Read-only.</em>
* @property {boolean} hasPriority - is the avatar in a Hero zone? <em>Read-only.</em>
*
* @borrows MyAvatar.getDomainMinScale as getDomainMinScale
* @borrows MyAvatar.getDomainMaxScale as getDomainMaxScale
* @borrows MyAvatar.canMeasureEyeHeight as canMeasureEyeHeight
* @borrows MyAvatar.getEyeHeight as getEyeHeight
* @borrows MyAvatar.getHeight as getHeight
* @borrows MyAvatar.setHandState as setHandState
* @borrows MyAvatar.getHandState as getHandState
* @borrows MyAvatar.setRawJointData as setRawJointData
* @borrows MyAvatar.setJointData as setJointData
* @borrows MyAvatar.setJointRotation as setJointRotation
* @borrows MyAvatar.setJointTranslation as setJointTranslation
* @borrows MyAvatar.clearJointData as clearJointData
* @borrows MyAvatar.isJointDataValid as isJointDataValid
* @borrows MyAvatar.getJointRotation as getJointRotation
* @borrows MyAvatar.getJointTranslation as getJointTranslation
* @borrows MyAvatar.getJointRotations as getJointRotations
* @borrows MyAvatar.getJointTranslations as getJointTranslations
* @borrows MyAvatar.setJointRotations as setJointRotations
* @borrows MyAvatar.setJointTranslations as setJointTranslations
* @borrows MyAvatar.clearJointsData as clearJointsData
* @borrows MyAvatar.getJointIndex as getJointIndex
* @borrows MyAvatar.getJointNames as getJointNames
* @borrows MyAvatar.setBlendshape as setBlendshape
* @borrows MyAvatar.getAttachmentsVariant as getAttachmentsVariant
* @borrows MyAvatar.setAttachmentsVariant as setAttachmentsVariant
* @borrows MyAvatar.updateAvatarEntity as updateAvatarEntity
* @borrows MyAvatar.clearAvatarEntity as clearAvatarEntity
* @borrows MyAvatar.setForceFaceTrackerConnected as setForceFaceTrackerConnected
* @borrows MyAvatar.getAttachmentData as getAttachmentData
* @borrows MyAvatar.setAttachmentData as setAttachmentData
* @borrows MyAvatar.attach as attach
* @borrows MyAvatar.detachOne as detachOne
* @borrows MyAvatar.detachAll as detachAll
* @borrows MyAvatar.getAvatarEntityData as getAvatarEntityData
* @borrows MyAvatar.setAvatarEntityData as setAvatarEntityData
* @borrows MyAvatar.getSensorToWorldMatrix as getSensorToWorldMatrix
* @borrows MyAvatar.getSensorToWorldScale as getSensorToWorldScale
* @borrows MyAvatar.getControllerLeftHandMatrix as getControllerLeftHandMatrix
* @borrows MyAvatar.getControllerRightHandMatrix as getControllerRightHandMatrix
* @borrows MyAvatar.getDataRate as getDataRate
* @borrows MyAvatar.getUpdateRate as getUpdateRate
* @borrows MyAvatar.displayNameChanged as displayNameChanged
* @borrows MyAvatar.sessionDisplayNameChanged as sessionDisplayNameChanged
* @borrows MyAvatar.skeletonModelURLChanged as skeletonModelURLChanged
* @borrows MyAvatar.lookAtSnappingChanged as lookAtSnappingChanged
* @borrows MyAvatar.sessionUUIDChanged as sessionUUIDChanged
* @borrows MyAvatar.sendAvatarDataPacket as sendAvatarDataPacket
* @borrows MyAvatar.sendIdentityPacket as sendIdentityPacket
* @borrows MyAvatar.setJointMappingsFromNetworkReply as setJointMappingsFromNetworkReply
* @borrows MyAvatar.setSessionUUID as setSessionUUID
* @borrows MyAvatar.getAbsoluteJointRotationInObjectFrame as getAbsoluteJointRotationInObjectFrame
* @borrows MyAvatar.getAbsoluteJointTranslationInObjectFrame as getAbsoluteJointTranslationInObjectFrame
* @borrows MyAvatar.setAbsoluteJointRotationInObjectFrame as setAbsoluteJointRotationInObjectFrame
* @borrows MyAvatar.setAbsoluteJointTranslationInObjectFrame as setAbsoluteJointTranslationInObjectFrame
* @borrows MyAvatar.getTargetScale as getTargetScale
* @borrows MyAvatar.resetLastSent as resetLastSent
* @example <caption>Create a scriptable avatar.</caption>
* (function () {
* Agent.setIsAvatar(true);
* print("Position: " + JSON.stringify(Avatar.position)); // 0, 0, 0
* }());
*/
class ScriptableAvatar : public AvatarData, public Dependency {
@ -132,15 +94,17 @@ public:
ScriptableAvatar();
/**jsdoc
* Starts playing an animation on the avatar.
* @function Avatar.startAnimation
* @param {string} url
* @param {number} [fps=30]
* @param {number} [priority=1]
* @param {boolean} [loop=false]
* @param {boolean} [hold=false]
* @param {number} [firstFrame=0]
* @param {number} [lastFrame=3.403e+38]
* @param {string[]} [maskedJoints=[]]
* @param {string} url - The animation file's URL. Animation files need to be in the FBX format but only need to contain
* the avatar skeleton and animation data.
* @param {number} [fps=30] - The frames per second (FPS) rate for the animation playback. 30 FPS is normal speed.
* @param {number} [priority=1] - <em>Not used.</em>
* @param {boolean} [loop=false] - <code>true</code> if the animation should loop, <code>false</code> if it shouldn't.
* @param {boolean} [hold=false] - <em>Not used.</em>
* @param {number} [firstFrame=0] - The frame at which the animation starts.
* @param {number} [lastFrame=3.403e+38] - The frame at which the animation stops.
* @param {string[]} [maskedJoints=[]] - The names of joints that should not be animated.
*/
/// Allows scripts to run animations.
Q_INVOKABLE void startAnimation(const QString& url, float fps = 30.0f, float priority = 1.0f, bool loop = false,
@ -148,39 +112,37 @@ public:
const QStringList& maskedJoints = QStringList());
/**jsdoc
* Stops playing the current animation.
* @function Avatar.stopAnimation
*/
Q_INVOKABLE void stopAnimation();
/**jsdoc
* Gets the details of the current avatar animation that is being or was recently played.
* @function Avatar.getAnimationDetails
* @returns {Avatar.AnimationDetails}
* @returns {Avatar.AnimationDetails} The current or recent avatar animation.
* @example <caption>Report the current animation details.</caption>
* var animationDetails = Avatar.getAnimationDetails();
* print("Animation details: " + JSON.stringify(animationDetails));
*/
Q_INVOKABLE AnimationDetails getAnimationDetails();
/**jsdoc
* Get the names of all the joints in the current avatar.
* @function MyAvatar.getJointNames
* @returns {string[]} The joint names.
* @example <caption>Report the names of all the joints in your current avatar.</caption>
* print(JSON.stringify(MyAvatar.getJointNames()));
*/
* @comment Uses the base class's JSDoc.
*/
Q_INVOKABLE virtual QStringList getJointNames() const override;
/**jsdoc
* Get the joint index for a named joint. The joint index value is the position of the joint in the array returned by
* {@link MyAvatar.getJointNames} or {@link Avatar.getJointNames}.
* @function MyAvatar.getJointIndex
* @param {string} name - The name of the joint.
* @returns {number} The index of the joint.
* @example <caption>Report the index of your avatar's left arm joint.</caption>
* print(JSON.stringify(MyAvatar.getJointIndex("LeftArm"));
*/
* @comment Uses the base class's JSDoc.
*/
/// Returns the index of the joint with the specified name, or -1 if not found/unknown.
Q_INVOKABLE virtual int getJointIndex(const QString& name) const override;
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL) override;
/**jsdoc
* @comment Uses the base class's JSDoc.
*/
int sendAvatarDataPacket(bool sendAll = false) override;
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking = false) override;
@ -192,32 +154,42 @@ public:
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
bool getHasAudioEnabledFaceMovement() const override { return _headData->getHasAudioEnabledFaceMovement(); }
/**jsdoc
* Potentially Very Expensive. Do not use.
/**jsdoc
* Gets details of all avatar entities.
* <p><strong>Warning:</strong> Potentially an expensive call. Do not use if possible.</p>
* @function Avatar.getAvatarEntityData
* @returns {object}
* @returns {AvatarEntityMap} Details of the avatar entities.
* @example <caption>Report the current avatar entities.</caption>
* var avatarEntityData = Avatar.getAvatarEntityData();
* print("Avatar entities: " + JSON.stringify(avatarEntityData));
*/
Q_INVOKABLE AvatarEntityMap getAvatarEntityData() const override;
/**jsdoc
* @function MyAvatar.setAvatarEntityData
* @param {object} avatarEntityData
*/
* Sets all avatar entities from an object.
* <p><strong>Warning:</strong> Potentially an expensive call. Do not use if possible.</p>
* @function Avatar.setAvatarEntityData
* @param {AvatarEntityMap} avatarEntityData - Details of the avatar entities.
*/
Q_INVOKABLE void setAvatarEntityData(const AvatarEntityMap& avatarEntityData) override;
/**jsdoc
* @function MyAvatar.updateAvatarEntity
* @param {Uuid} entityID
* @param {string} entityData
* @comment Uses the base class's JSDoc.
*/
Q_INVOKABLE void updateAvatarEntity(const QUuid& entityID, const QByteArray& entityData) override;
public slots:
/**jsdoc
* @function Avatar.update
* @param {number} deltaTime - Delta time.
* @deprecated This function is deprecated and will be removed.
*/
void update(float deltatime);
/**jsdoc
* @function MyAvatar.setJointMappingsFromNetworkReply
*/
* @function Avatar.setJointMappingsFromNetworkReply
* @deprecated This function is deprecated and will be removed.
*/
void setJointMappingsFromNetworkReply();
private:

View file

@ -1,7 +1,7 @@
macro(TARGET_PYTHON)
if (NOT HIFI_PYTHON_EXEC)
# Find the python interpreter
if (CAME_VERSION VERSION_LESS 3.12)
if (CMAKE_VERSION VERSION_LESS 3.12)
# this logic is deprecated in CMake after 3.12
# FIXME eventually we should make 3.12 the min cmake verion and just use the Python3 find_package path
set(Python_ADDITIONAL_VERSIONS 3)

View file

@ -1310,6 +1310,15 @@
"placeholder": "50",
"default": "50",
"advanced": true
},
{
"name": "priority_fraction",
"type": "double",
"label": "Hero Bandwidth",
"help": "Fraction of downstream bandwidth reserved for avatars in 'Hero' zones",
"placeholder": "0.40",
"default": "0.40",
"advanced": true
}
]
},

View file

@ -1766,14 +1766,14 @@ void DomainServer::processOctreeDataRequestMessage(QSharedPointer<ReceivedMessag
bool remoteHasExistingData { false };
QUuid id;
int version;
int dataVersion;
message->readPrimitive(&remoteHasExistingData);
if (remoteHasExistingData) {
constexpr size_t UUID_SIZE_BYTES = 16;
auto idData = message->read(UUID_SIZE_BYTES);
id = QUuid::fromRfc4122(idData);
message->readPrimitive(&version);
qCDebug(domain_server) << "Entity server does have existing data: ID(" << id << ") DataVersion(" << version << ")";
message->readPrimitive(&dataVersion);
qCDebug(domain_server) << "Entity server does have existing data: ID(" << id << ") DataVersion(" << dataVersion << ")";
} else {
qCDebug(domain_server) << "Entity server does not have existing data";
}
@ -1782,11 +1782,11 @@ void DomainServer::processOctreeDataRequestMessage(QSharedPointer<ReceivedMessag
auto reply = NLPacketList::create(PacketType::OctreeDataFileReply, QByteArray(), true, true);
OctreeUtils::RawEntityData data;
if (data.readOctreeDataInfoFromFile(entityFilePath)) {
if (data.id == id && data.version <= version) {
if (data.id == id && data.dataVersion <= dataVersion) {
qCDebug(domain_server) << "ES has sufficient octree data, not sending data";
reply->writePrimitive(false);
} else {
qCDebug(domain_server) << "Sending newer octree data to ES: ID(" << data.id << ") DataVersion(" << data.version << ")";
qCDebug(domain_server) << "Sending newer octree data to ES: ID(" << data.id << ") DataVersion(" << data.dataVersion << ")";
QFile file(entityFilePath);
if (file.open(QIODevice::ReadOnly)) {
reply->writePrimitive(true);

View file

@ -379,9 +379,9 @@ Item {
Component.onCompleted: {
// with the link.
if (completeProfileBody.withOculus) {
termsText.text = qsTr("By signing up, you agree to <a href='https://highfidelity.com/terms'>High Fidelity's Terms of Service</a>")
termsText.text = qsTr("By signing up, you agree to <a href='https://www.highfidelity.com/termsofservice'>High Fidelity's Terms of Service</a>")
} else {
termsText.text = qsTr("By creating this user profile, you agree to <a href='https://highfidelity.com/terms'>High Fidelity's Terms of Service</a>")
termsText.text = qsTr("By creating this user profile, you agree to <a href='https://www.highfidelity.com/termsofservice'>High Fidelity's Terms of Service</a>")
}
}
}

View file

@ -395,7 +395,7 @@ Item {
text: signUpBody.termsContainerText
Component.onCompleted: {
// with the link.
termsText.text = qsTr("By signing up, you agree to <a href='https://highfidelity.com/terms'>High Fidelity's Terms of Service</a>")
termsText.text = qsTr("By signing up, you agree to <a href='https://www.highfidelity.com/termsofservice'>High Fidelity's Terms of Service</a>")
}
}

View file

@ -218,7 +218,7 @@ Item {
text: usernameCollisionBody.termsContainerText
Component.onCompleted: {
// with the link.
termsText.text = qsTr("By creating this user profile, you agree to <a href='https://highfidelity.com/terms'>High Fidelity's Terms of Service</a>")
termsText.text = qsTr("By creating this user profile, you agree to <a href='https://www.highfidelity.com/termsofservice'>High Fidelity's Terms of Service</a>")
}
}

View file

@ -232,6 +232,10 @@ Item {
text: "Audio Codec: " + root.audioCodec + " Noise Gate: " +
root.audioNoiseGate;
}
StatText {
visible: root.expanded;
text: "Injectors (Local/NonLocal): " + root.audioInjectors.x + "/" + root.audioInjectors.y;
}
StatText {
visible: root.expanded;
text: "Entity Servers In: " + root.entityPacketsInKbps + " kbps";

View file

@ -9,7 +9,7 @@
//
import QtQuick 2.7
import Qt.labs.folderlistmodel 2.1
import Qt.labs.folderlistmodel 2.2
import Qt.labs.settings 1.0
import QtQuick.Dialogs 1.2 as OriginalDialogs
import QtQuick.Controls 1.4 as QQC1
@ -320,6 +320,7 @@ ModalWindow {
FolderListModel {
id: folderListModel
nameFilters: selectionType.currentFilter
caseSensitive: false
showDirsFirst: true
showDotAndDotDot: false
showFiles: !root.selectDirectory

View file

@ -9,7 +9,7 @@
//
import QtQuick 2.7
import Qt.labs.folderlistmodel 2.1
import Qt.labs.folderlistmodel 2.2
import Qt.labs.settings 1.0
import QtQuick.Dialogs 1.2 as OriginalDialogs
import QtQuick.Controls 1.4 as QQC1
@ -285,6 +285,7 @@ TabletModalWindow {
FolderListModel {
id: folderListModel
nameFilters: selectionType.currentFilter
caseSensitive: false
showDirsFirst: true
showDotAndDotDot: false
showFiles: !root.selectDirectory

View file

@ -28,7 +28,7 @@ TabletModalWindow {
id: mouse;
anchors.fill: parent
}
function click(button) {
clickedButton = button;
selected(button);

View file

@ -16,6 +16,8 @@ Rectangle {
property bool keyboardRaised: false
property bool punctuationMode: false
HifiConstants { id: hifi }
HifiControls.Keyboard {
id: keyboard
z: 1000
@ -48,6 +50,7 @@ Rectangle {
property var jointNames: []
property var currentAvatarSettings;
property bool wearablesFrozen;
function fetchAvatarModelName(marketId, avatar) {
var xmlhttp = new XMLHttpRequest();
@ -187,6 +190,8 @@ Rectangle {
updateCurrentAvatarInBookmarks(currentAvatar);
} else if (message.method === 'selectAvatarEntity') {
adjustWearables.selectWearableByID(message.entityID);
} else if (message.method === 'wearablesFrozenChanged') {
wearablesFrozen = message.wearablesFrozen;
}
}
@ -507,6 +512,7 @@ Rectangle {
}
SquareLabel {
id: adjustLabel
anchors.right: parent.right
anchors.verticalCenter: wearablesLabel.verticalCenter
glyphText: "\ue02e"
@ -515,6 +521,17 @@ Rectangle {
adjustWearables.open(currentAvatar);
}
}
SquareLabel {
anchors.right: adjustLabel.left
anchors.verticalCenter: wearablesLabel.verticalCenter
anchors.rightMargin: 15
glyphText: wearablesFrozen ? hifi.glyphs.lock : hifi.glyphs.unlock;
onClicked: {
emitSendToScript({'method' : 'toggleWearablesFrozen'});
}
}
}
Rectangle {

View file

@ -40,6 +40,7 @@ Item {
property bool isConcurrency: action === 'concurrency';
property bool isAnnouncement: action === 'announcement';
property bool isStacked: !isConcurrency && drillDownToPlace;
property bool has3DHTML: PlatformInfo.has3DHTML();
property int textPadding: 10;
@ -298,7 +299,7 @@ Item {
StateImage {
id: actionIcon;
visible: !isAnnouncement;
visible: !isAnnouncement && has3DHTML;
imageURL: "../../images/info-icon-2-state.svg";
size: 30;
buttonState: messageArea.containsMouse ? 1 : 0;
@ -315,7 +316,7 @@ Item {
}
MouseArea {
id: messageArea;
visible: !isAnnouncement;
visible: !isAnnouncement && has3DHTML;
width: parent.width;
height: messageHeight;
anchors.top: lobby.bottom;

View file

@ -46,6 +46,8 @@ Item {
property string placeName: ""
property string profilePicBorderColor: (connectionStatus == "connection" ? hifi.colors.indigoAccent : (connectionStatus == "friend" ? hifi.colors.greenHighlight : "transparent"))
property alias avImage: avatarImage
property bool has3DHTML: PlatformInfo.has3DHTML();
Item {
id: avatarImage
visible: profileUrl !== "" && userName !== "";
@ -94,10 +96,12 @@ Item {
enabled: (selected && activeTab == "nearbyTab") || isMyCard;
hoverEnabled: enabled
onClicked: {
userInfoViewer.url = Account.metaverseServerURL + "/users/" + userName;
userInfoViewer.visible = true;
if (has3DHTML) {
userInfoViewer.url = Account.metaverseServerURL + "/users/" + userName;
userInfoViewer.visible = true;
}
}
onEntered: infoHoverImage.visible = true;
onEntered: infoHoverImage.visible = has3DHTML;
onExited: infoHoverImage.visible = false;
}
}
@ -352,7 +356,7 @@ Item {
}
StateImage {
id: nameCardConnectionInfoImage
visible: selected && !isMyCard && pal.activeTab == "connectionsTab"
visible: selected && !isMyCard && pal.activeTab == "connectionsTab" && has3DHTML
imageURL: "../../images/info-icon-2-state.svg" // PLACEHOLDER!!!
size: 32;
buttonState: 0;
@ -364,8 +368,10 @@ Item {
enabled: selected
hoverEnabled: true
onClicked: {
userInfoViewer.url = Account.metaverseServerURL + "/users/" + userName;
userInfoViewer.visible = true;
if (has3DHTML) {
userInfoViewer.url = Account.metaverseServerURL + "/users/" + userName;
userInfoViewer.visible = true;
}
}
onEntered: {
nameCardConnectionInfoImage.buttonState = 1;
@ -376,8 +382,7 @@ Item {
}
FiraSansRegular {
id: nameCardConnectionInfoText
visible: selected && !isMyCard && pal.activeTab == "connectionsTab"
width: parent.width
visible: selected && !isMyCard && pal.activeTab == "connectionsTab" && PlatformInfo.has3DHTML()
height: displayNameTextPixelSize
size: displayNameTextPixelSize - 4
anchors.left: nameCardConnectionInfoImage.right
@ -391,9 +396,10 @@ Item {
id: nameCardRemoveConnectionImage
visible: selected && !isMyCard && pal.activeTab == "connectionsTab"
text: hifi.glyphs.close
size: 28;
size: 24;
x: 120
anchors.verticalCenter: nameCardConnectionInfoImage.verticalCenter
anchors.left: has3DHTML ? nameCardConnectionInfoText.right + 10 : avatarImage.right
}
MouseArea {
anchors.fill:nameCardRemoveConnectionImage

View file

@ -1261,6 +1261,14 @@ Rectangle {
case 'refreshConnections':
refreshConnections();
break;
case 'connectionRemoved':
for (var i=0; i<connectionsUserModel.count; ++i) {
if (connectionsUserModel.get(i).userName === message.params) {
connectionsUserModel.remove(i);
break;
}
}
break;
case 'avatarDisconnected':
var sessionID = message.params[0];
delete ignored[sessionID];

View file

@ -11,7 +11,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.7
import QtQuick 2.10
import QtQuick.Controls 2.2
import QtQuick.Layouts 1.3
@ -31,6 +31,8 @@ Rectangle {
property string title: "Audio Settings"
property int switchHeight: 16
property int switchWidth: 40
readonly property real verticalScrollWidth: 10
readonly property real verticalScrollShaft: 8
signal sendToScript(var message);
color: hifi.colors.baseGray;
@ -42,7 +44,7 @@ Rectangle {
property bool isVR: AudioScriptingInterface.context === "VR"
property real rightMostInputLevelPos: 450
property real rightMostInputLevelPos: 440
//placeholder for control sizes and paddings
//recalculates dynamically in case of UI size is changed
QtObject {
@ -60,8 +62,8 @@ Rectangle {
id: bar
spacing: 0
width: parent.width
height: 42
currentIndex: isVR ? 1 : 0
height: 28;
currentIndex: isVR ? 1 : 0;
AudioControls.AudioTabButton {
height: parent.height
@ -85,32 +87,92 @@ Rectangle {
}
function updateMyAvatarGainFromQML(sliderValue, isReleased) {
if (Users.getAvatarGain(myAvatarUuid) != sliderValue) {
Users.setAvatarGain(myAvatarUuid, sliderValue);
if (AudioScriptingInterface.getAvatarGain() != sliderValue) {
AudioScriptingInterface.setAvatarGain(sliderValue);
}
}
function updateInjectorGainFromQML(sliderValue, isReleased) {
if (AudioScriptingInterface.getInjectorGain() != sliderValue) {
AudioScriptingInterface.setInjectorGain(sliderValue); // server side
AudioScriptingInterface.setLocalInjectorGain(sliderValue); // client side
}
}
function updateSystemInjectorGainFromQML(sliderValue, isReleased) {
if (AudioScriptingInterface.getSystemInjectorGain() != sliderValue) {
AudioScriptingInterface.setSystemInjectorGain(sliderValue);
}
}
Component.onCompleted: enablePeakValues();
Column {
id: column
spacing: 12;
anchors.top: bar.bottom
anchors.bottom: parent.bottom
anchors.bottomMargin: 5
Flickable {
id: flickView;
anchors.top: bar.bottom;
anchors.left: parent.left;
anchors.bottom: parent.bottom;
width: parent.width;
contentWidth: parent.width;
contentHeight: contentItem.childrenRect.height;
boundsBehavior: Flickable.DragOverBounds;
flickableDirection: Flickable.VerticalFlick;
property bool isScrolling: (contentHeight - height) > 10 ? true : false;
clip: true;
Separator { }
ScrollBar.vertical: ScrollBar {
policy: flickView.isScrolling ? ScrollBar.AlwaysOn : ScrollBar.AlwaysOff;
parent: flickView.parent;
anchors.top: flickView.top;
anchors.right: flickView.right;
anchors.bottom: flickView.bottom;
anchors.rightMargin: -verticalScrollWidth; //compensate flickView's right margin
background: Item {
implicitWidth: verticalScrollWidth;
Rectangle {
color: hifi.colors.darkGray30;
radius: 4;
anchors {
fill: parent;
topMargin: -1; // Finesse size
bottomMargin: -2;
}
}
}
contentItem: Item {
implicitWidth: verticalScrollShaft;
Rectangle {
radius: verticalScrollShaft/2;
color: hifi.colors.white30;
anchors {
fill: parent;
leftMargin: 2; // Finesse size and position.
topMargin: 1;
bottomMargin: 1;
}
}
}
}
RowLayout {
Separator {
id: firstSeparator;
anchors.top: parent.top;
}
Item {
id: switchesContainer;
x: 2 * margins.paddings;
width: parent.width;
// switch heights + 2 * top margins
height: (root.switchHeight) * 3 + 48;
anchors.top: firstSeparator.bottom;
anchors.topMargin: 10;
// mute is in its own row
ColumnLayout {
id: columnOne
spacing: 24;
x: margins.paddings
Item {
id: switchContainer;
x: margins.paddings;
width: parent.width / 2;
height: parent.height;
anchors.left: parent.left;
HifiControlsUit.Switch {
id: muteMic;
height: root.switchHeight;
@ -129,8 +191,12 @@ Rectangle {
}
HifiControlsUit.Switch {
id: noiseReductionSwitch;
height: root.switchHeight;
switchWidth: root.switchWidth;
anchors.top: muteMic.bottom;
anchors.topMargin: 24
anchors.left: parent.left
labelTextOn: "Noise Reduction";
backgroundOnColor: "#E3E3E3";
checked: AudioScriptingInterface.noiseReduction;
@ -144,6 +210,9 @@ Rectangle {
id: pttSwitch
height: root.switchHeight;
switchWidth: root.switchWidth;
anchors.top: noiseReductionSwitch.bottom
anchors.topMargin: 24
anchors.left: parent.left
labelTextOn: qsTr("Push To Talk (T)");
backgroundOnColor: "#E3E3E3";
checked: (bar.currentIndex === 0) ? AudioScriptingInterface.pushToTalkDesktop : AudioScriptingInterface.pushToTalkHMD;
@ -164,12 +233,18 @@ Rectangle {
}
}
ColumnLayout {
spacing: 24;
Item {
id: additionalSwitchContainer
width: switchContainer.width - margins.paddings;
height: parent.height;
anchors.top: parent.top
anchors.left: switchContainer.right;
HifiControlsUit.Switch {
id: warnMutedSwitch
height: root.switchHeight;
switchWidth: root.switchWidth;
anchors.top: parent.top
anchors.left: parent.left
labelTextOn: qsTr("Warn when muted");
backgroundOnColor: "#E3E3E3";
checked: AudioScriptingInterface.warnWhenMuted;
@ -184,6 +259,9 @@ Rectangle {
id: audioLevelSwitch
height: root.switchHeight;
switchWidth: root.switchWidth;
anchors.top: warnMutedSwitch.bottom
anchors.topMargin: 24
anchors.left: parent.left
labelTextOn: qsTr("Audio Level Meter");
backgroundOnColor: "#E3E3E3";
checked: AvatarInputs.showAudioTools;
@ -197,6 +275,9 @@ Rectangle {
id: stereoInput;
height: root.switchHeight;
switchWidth: root.switchWidth;
anchors.top: audioLevelSwitch.bottom
anchors.topMargin: 24
anchors.left: parent.left
labelTextOn: qsTr("Stereo input");
backgroundOnColor: "#E3E3E3";
checked: AudioScriptingInterface.isStereoInput;
@ -210,17 +291,20 @@ Rectangle {
}
Item {
anchors.left: parent.left
id: pttTextContainer
anchors.top: switchesContainer.bottom;
anchors.topMargin: 10;
anchors.left: parent.left;
width: rightMostInputLevelPos;
height: pttText.height;
RalewayRegular {
id: pttText
id: pttText;
x: margins.paddings;
color: hifi.colors.white;
width: rightMostInputLevelPos;
height: paintedHeight;
wrapMode: Text.WordWrap;
font.italic: true
font.italic: true;
size: 16;
text: (bar.currentIndex === 0) ? qsTr("Press and hold the button \"T\" to talk.") :
@ -228,28 +312,35 @@ Rectangle {
}
}
Separator { }
Separator {
id: secondSeparator;
anchors.top: pttTextContainer.bottom;
anchors.topMargin: 10;
}
Item {
id: inputDeviceHeader
x: margins.paddings;
width: parent.width - margins.paddings*2
height: 36
width: parent.width - margins.paddings*2;
height: 36;
anchors.top: secondSeparator.bottom;
anchors.topMargin: 10;
HiFiGlyphs {
width: margins.sizeCheckBox
width: margins.sizeCheckBox;
text: hifi.glyphs.mic;
color: hifi.colors.white;
anchors.left: parent.left
anchors.leftMargin: -size/4 //the glyph has empty space at left about 25%
anchors.left: parent.left;
anchors.leftMargin: -size/4; //the glyph has empty space at left about 25%
anchors.verticalCenter: parent.verticalCenter;
size: 30;
}
RalewayRegular {
anchors.verticalCenter: parent.verticalCenter;
width: margins.sizeText + margins.sizeLevel
anchors.left: parent.left
anchors.leftMargin: margins.sizeCheckBox
width: margins.sizeText + margins.sizeLevel;
anchors.left: parent.left;
anchors.leftMargin: margins.sizeCheckBox;
size: 16;
color: hifi.colors.white;
text: qsTr("Choose input device");
@ -257,12 +348,13 @@ Rectangle {
}
ListView {
id: inputView
width: parent.width - margins.paddings*2
id: inputView;
width: parent.width - margins.paddings*2;
anchors.top: inputDeviceHeader.bottom;
anchors.topMargin: 10;
x: margins.paddings
height: Math.min(150, contentHeight);
height: contentHeight;
spacing: 4;
snapMode: ListView.SnapToItem;
clip: true;
model: AudioScriptingInterface.devices.input;
delegate: Item {
@ -301,17 +393,28 @@ Rectangle {
}
}
}
AudioControls.LoopbackAudio {
id: loopbackAudio
x: margins.paddings
anchors.top: inputView.bottom;
anchors.topMargin: 10;
visible: (bar.currentIndex === 1 && isVR) ||
(bar.currentIndex === 0 && !isVR);
anchors { left: parent.left; leftMargin: margins.paddings }
}
Separator {}
Separator {
id: thirdSeparator;
anchors.top: loopbackAudio.bottom;
anchors.topMargin: 10;
}
Item {
id: outputDeviceHeader;
anchors.topMargin: 10;
anchors.top: thirdSeparator.bottom;
x: margins.paddings;
width: parent.width - margins.paddings*2
height: 36
@ -341,9 +444,10 @@ Rectangle {
id: outputView
width: parent.width - margins.paddings*2
x: margins.paddings
height: Math.min(360 - inputView.height, contentHeight);
height: contentHeight;
anchors.top: outputDeviceHeader.bottom;
anchors.topMargin: 10;
spacing: 4;
snapMode: ListView.SnapToItem;
clip: true;
model: AudioScriptingInterface.devices.output;
delegate: Item {
@ -370,20 +474,22 @@ Rectangle {
}
Item {
id: gainContainer
id: avatarGainContainer
x: margins.paddings;
anchors.top: outputView.bottom;
anchors.topMargin: 10;
width: parent.width - margins.paddings*2
height: gainSliderTextMetrics.height
height: avatarGainSliderTextMetrics.height
HifiControlsUit.Slider {
id: gainSlider
id: avatarGainSlider
anchors.right: parent.right
height: parent.height
width: 200
minimumValue: -60.0
maximumValue: 20.0
stepSize: 5
value: Users.getAvatarGain(myAvatarUuid)
value: AudioScriptingInterface.getAvatarGain()
onValueChanged: {
updateMyAvatarGainFromQML(value, false);
}
@ -399,7 +505,7 @@ Rectangle {
// Do nothing.
}
onDoubleClicked: {
gainSlider.value = 0.0
avatarGainSlider.value = 0.0
}
onPressed: {
// Pass through to Slider
@ -413,13 +519,13 @@ Rectangle {
}
}
TextMetrics {
id: gainSliderTextMetrics
text: gainSliderText.text
font: gainSliderText.font
id: avatarGainSliderTextMetrics
text: avatarGainSliderText.text
font: avatarGainSliderText.font
}
RalewayRegular {
// The slider for my card is special, it controls the master gain
id: gainSliderText;
id: avatarGainSliderText;
text: "Avatar volume";
size: 16;
anchors.left: parent.left;
@ -429,12 +535,133 @@ Rectangle {
}
}
AudioControls.PlaySampleSound {
x: margins.paddings
Item {
id: injectorGainContainer
x: margins.paddings;
width: parent.width - margins.paddings*2
height: injectorGainSliderTextMetrics.height
anchors.top: avatarGainContainer.bottom;
anchors.topMargin: 10;
visible: (bar.currentIndex === 1 && isVR) ||
(bar.currentIndex === 0 && !isVR);
anchors { left: parent.left; leftMargin: margins.paddings }
HifiControlsUit.Slider {
id: injectorGainSlider
anchors.right: parent.right
height: parent.height
width: 200
minimumValue: -60.0
maximumValue: 20.0
stepSize: 5
value: AudioScriptingInterface.getInjectorGain()
onValueChanged: {
updateInjectorGainFromQML(value, false);
}
onPressedChanged: {
if (!pressed) {
updateInjectorGainFromQML(value, false);
}
}
MouseArea {
anchors.fill: parent
onWheel: {
// Do nothing.
}
onDoubleClicked: {
injectorGainSlider.value = 0.0
}
onPressed: {
// Pass through to Slider
mouse.accepted = false
}
onReleased: {
// the above mouse.accepted seems to make this
// never get called, nonetheless...
mouse.accepted = false
}
}
}
TextMetrics {
id: injectorGainSliderTextMetrics
text: injectorGainSliderText.text
font: injectorGainSliderText.font
}
RalewayRegular {
id: injectorGainSliderText;
text: "Environment volume";
size: 16;
anchors.left: parent.left;
color: hifi.colors.white;
horizontalAlignment: Text.AlignLeft;
verticalAlignment: Text.AlignTop;
}
}
Item {
id: systemInjectorGainContainer
x: margins.paddings;
width: parent.width - margins.paddings*2
height: systemInjectorGainSliderTextMetrics.height
anchors.top: injectorGainContainer.bottom;
anchors.topMargin: 10;
HifiControlsUit.Slider {
id: systemInjectorGainSlider
anchors.right: parent.right
height: parent.height
width: 200
minimumValue: -60.0
maximumValue: 20.0
stepSize: 5
value: AudioScriptingInterface.getSystemInjectorGain()
onValueChanged: {
updateSystemInjectorGainFromQML(value, false);
}
onPressedChanged: {
if (!pressed) {
updateSystemInjectorGainFromQML(value, false);
}
}
MouseArea {
anchors.fill: parent
onWheel: {
// Do nothing.
}
onDoubleClicked: {
systemInjectorGainSlider.value = 0.0
}
onPressed: {
// Pass through to Slider
mouse.accepted = false
}
onReleased: {
// the above mouse.accepted seems to make this
// never get called, nonetheless...
mouse.accepted = false
}
}
}
TextMetrics {
id: systemInjectorGainSliderTextMetrics
text: systemInjectorGainSliderText.text
font: systemInjectorGainSliderText.font
}
RalewayRegular {
id: systemInjectorGainSliderText;
text: "System Sound volume";
size: 16;
anchors.left: parent.left;
color: hifi.colors.white;
horizontalAlignment: Text.AlignLeft;
verticalAlignment: Text.AlignTop;
}
}
AudioControls.PlaySampleSound {
id: playSampleSound
x: margins.paddings
anchors.top: systemInjectorGainContainer.bottom;
anchors.topMargin: 10;
}
}
}

View file

@ -16,7 +16,7 @@ import stylesUit 1.0
TabButton {
id: control
font.pixelSize: height / 2
font.pixelSize: 14
HifiConstants { id: hifi; }

View file

@ -17,17 +17,17 @@ import stylesUit 1.0
import controlsUit 1.0 as HifiControlsUit
RowLayout {
property bool audioLoopedBack: AudioScriptingInterface.getServerEcho();
property bool audioLoopedBack: AudioScriptingInterface.getLocalEcho();
function startAudioLoopback() {
if (!audioLoopedBack) {
audioLoopedBack = true;
AudioScriptingInterface.setServerEcho(true);
AudioScriptingInterface.setLocalEcho(true);
}
}
function stopAudioLoopback() {
if (audioLoopedBack) {
audioLoopedBack = false;
AudioScriptingInterface.setServerEcho(false);
AudioScriptingInterface.setLocalEcho(false);
}
}
@ -44,8 +44,11 @@ RowLayout {
}
HifiControlsUit.Button {
text: audioLoopedBack ? qsTr("STOP TESTING YOUR VOICE") : qsTr("TEST YOUR VOICE");
text: audioLoopedBack ? qsTr("STOP TESTING VOICE") : qsTr("TEST YOUR VOICE");
color: audioLoopedBack ? hifi.buttons.red : hifi.buttons.blue;
fontSize: 15;
width: 200;
height: 32;
onClicked: {
if (audioLoopedBack) {
loopbackTimer.stop();
@ -57,11 +60,11 @@ RowLayout {
}
}
RalewayRegular {
Layout.leftMargin: 2;
size: 14;
color: "white";
font.italic: true
text: audioLoopedBack ? qsTr("Speak in your input") : "";
}
// RalewayRegular {
// Layout.leftMargin: 2;
// size: 14;
// color: "white";
// font.italic: true
// text: audioLoopedBack ? qsTr("Speak in your input") : "";
// }
}

View file

@ -56,16 +56,19 @@ RowLayout {
HifiConstants { id: hifi; }
HifiControlsUit.Button {
text: isPlaying ? qsTr("STOP TESTING YOUR SOUND") : qsTr("TEST YOUR SOUND");
text: isPlaying ? qsTr("STOP TESTING") : qsTr("TEST YOUR SOUND");
color: isPlaying ? hifi.buttons.red : hifi.buttons.blue;
onClicked: isPlaying ? stopSound() : playSound();
fontSize: 15;
width: 200;
height: 32;
}
RalewayRegular {
Layout.leftMargin: 2;
size: 14;
color: "white";
font.italic: true
text: isPlaying ? qsTr("Listen to your output") : "";
}
// RalewayRegular {
// Layout.leftMargin: 2;
// size: 14;
// color: "white";
// font.italic: true
// text: isPlaying ? qsTr("Listen to your output") : "";
// }
}

View file

@ -133,7 +133,7 @@ Item {
states: [
State {
name: AvatarPackagerState.main
PropertyChanges { target: avatarPackagerHeader; title: qsTr("Avatar Packager"); docsEnabled: true; backButtonVisible: false }
PropertyChanges { target: avatarPackagerHeader; title: qsTr("Avatar Packager"); docsEnabled: true; videoEnabled: true; backButtonVisible: false }
PropertyChanges { target: avatarPackagerMain; visible: true }
PropertyChanges { target: avatarPackagerFooter; content: avatarPackagerMain.footer }
},
@ -229,7 +229,11 @@ Item {
}
function openDocs() {
Qt.openUrlExternally("https://docs.highfidelity.com/create/avatars/create-avatars#how-to-package-your-avatar");
Qt.openUrlExternally("https://docs.highfidelity.com/create/avatars/package-avatar.html");
}
function openVideo() {
Qt.openUrlExternally("https://youtu.be/zrkEowu_yps");
}
AvatarPackagerHeader {
@ -243,6 +247,9 @@ Item {
onDocsButtonClicked: {
avatarPackager.openDocs();
}
onVideoButtonClicked: {
avatarPackager.openVideo();
}
}
Item {

View file

@ -13,6 +13,7 @@ ShadowRectangle {
property string title: qsTr("Avatar Packager")
property alias docsEnabled: docs.visible
property alias videoEnabled: video.visible
property bool backButtonVisible: true // If false, is not visible and does not take up space
property bool backButtonEnabled: true // If false, is not visible but does not affect space
property bool canRename: false
@ -24,6 +25,7 @@ ShadowRectangle {
signal backButtonClicked
signal docsButtonClicked
signal videoButtonClicked
RalewayButton {
id: back
@ -126,6 +128,20 @@ ShadowRectangle {
}
}
RalewayButton {
id: video
visible: false
size: 28
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.right: docs.left
anchors.rightMargin: 16
text: qsTr("Video")
onClicked: videoButtonClicked()
}
RalewayButton {
id: docs
visible: false
@ -137,8 +153,6 @@ ShadowRectangle {
text: qsTr("Docs")
onClicked: {
docsButtonClicked();
}
onClicked: docsButtonClicked()
}
}

View file

@ -339,8 +339,8 @@ Item {
visible: AvatarPackagerCore.currentAvatarProject && AvatarPackagerCore.currentAvatarProject.hasErrors
anchors {
top: notForSaleMessage.bottom
topMargin: 16
top: notForSaleMessage.visible ? notForSaleMessage.bottom : infoMessage .bottom
bottom: showFilesText.top
horizontalCenter: parent.horizontalCenter
}

View file

@ -113,6 +113,7 @@ Rectangle {
} else if (prop === 'dimensions') {
scalespinner.set(wearable[prop].x / wearable.naturalDimensions.x);
}
modified = true;
}
}

View file

@ -2248,6 +2248,7 @@ Item {
if (sendAssetStep.selectedRecipientUserName === "") {
console.log("SendAsset: Script didn't specify a recipient username!");
sendAssetHome.visible = false;
root.nextActiveView = 'paymentFailure';
return;
}

View file

@ -348,9 +348,11 @@ Rectangle {
}
onAccepted: {
root.searchString = searchField.text;
getMarketplaceItems();
searchField.forceActiveFocus();
if (root.searchString !== searchField.text) {
root.searchString = searchField.text;
getMarketplaceItems();
searchField.forceActiveFocus();
}
}
onActiveFocusChanged: {
@ -662,7 +664,7 @@ Rectangle {
text: "LOG IN"
onClicked: {
sendToScript({method: 'needsLogIn_loginClicked'});
sendToScript({method: 'marketplace_loginClicked'});
}
}

View file

@ -32,6 +32,7 @@ Rectangle {
property string initialActiveViewAfterStatus5: "walletInventory";
property bool keyboardRaised: false;
property bool isPassword: false;
property bool has3DHTML: PlatformInfo.has3DHTML();
anchors.fill: (typeof parent === undefined) ? undefined : parent;
@ -335,8 +336,10 @@ Rectangle {
Connections {
onSendSignalToWallet: {
if (msg.method === 'transactionHistory_usernameLinkClicked') {
userInfoViewer.url = msg.usernameLink;
userInfoViewer.visible = true;
if (has3DHTML) {
userInfoViewer.url = msg.usernameLink;
userInfoViewer.visible = true;
}
} else {
sendToScript(msg);
}

View file

@ -24,6 +24,8 @@ Item {
HifiConstants { id: hifi; }
id: root;
property bool has3DHTML: PlatformInfo.has3DHTML();
onVisibleChanged: {
if (visible) {
@ -333,7 +335,9 @@ Item {
onLinkActivated: {
if (link.indexOf("users/") !== -1) {
sendSignalToWallet({method: 'transactionHistory_usernameLinkClicked', usernameLink: link});
if (has3DHTML) {
sendSignalToWallet({method: 'transactionHistory_usernameLinkClicked', usernameLink: link});
}
} else {
sendSignalToWallet({method: 'transactionHistory_linkClicked', itemId: model.marketplace_item});
}

View file

@ -1,27 +0,0 @@
//
// Audio.qml
//
// Created by Zach Pomerantz on 6/12/2017
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import "../../windows"
import "../audio"
ScrollingWindow {
id: root;
resizable: true;
destroyOnHidden: true;
width: 400;
height: 577;
minSize: Qt.vector2d(400, 500);
Audio { id: audio; width: root.width }
objectName: "AudioDialog";
title: audio.title;
}

View file

@ -35,6 +35,7 @@ StackView {
property int cardWidth: 212;
property int cardHeight: 152;
property var tablet: null;
property bool has3DHTML: PlatformInfo.has3DHTML();
RootHttpRequest { id: http; }
signal sendToScript(var message);
@ -75,8 +76,10 @@ StackView {
}
function goCard(targetString, standaloneOptimized) {
if (0 !== targetString.indexOf('hifi://')) {
var card = tabletWebView.createObject();
card.url = addressBarDialog.metaverseServerUrl + targetString;
if(has3DHTML) {
var card = tabletWebView.createObject();
card.url = addressBarDialog.metaverseServerUrl + targetString;
}
card.parentStackItem = root;
root.push(card);
return;

View file

@ -117,7 +117,6 @@ Rectangle {
if (loader.item.hasOwnProperty("gotoPreviousApp")) {
loader.item.gotoPreviousApp = true;
}
screenChanged("Web", url)
});
}

View file

@ -9,7 +9,7 @@
//
import QtQuick 2.7
import Qt.labs.folderlistmodel 2.1
import Qt.labs.folderlistmodel 2.2
import Qt.labs.settings 1.0
import QtQuick.Dialogs 1.2 as OriginalDialogs
import QtQuick.Controls 1.4 as QQC1
@ -279,6 +279,7 @@ Rectangle {
FolderListModel {
id: folderListModel
nameFilters: selectionType.currentFilter
caseSensitive: false
showDirsFirst: true
showDotAndDotDot: false
showFiles: !root.selectDirectory

View file

@ -344,6 +344,7 @@ Item {
readonly property string stop_square: "\ue01e"
readonly property string avatarTPose: "\ue01f"
readonly property string lock: "\ue006"
readonly property string unlock: "\ue039"
readonly property string checkmark: "\ue020"
readonly property string leftRightArrows: "\ue021"
readonly property string hfc: "\ue022"

View file

@ -330,6 +330,7 @@ QtObject {
readonly property string stop_square: "\ue01e"
readonly property string avatarTPose: "\ue01f"
readonly property string lock: "\ue006"
readonly property string unlock: "\ue039"
readonly property string checkmark: "\ue020"
readonly property string leftRightArrows: "\ue021"
readonly property string hfc: "\ue022"

View file

@ -1206,10 +1206,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
connect(&domainHandler, SIGNAL(connectedToDomain(QUrl)), SLOT(updateWindowTitle()));
connect(&domainHandler, SIGNAL(disconnectedFromDomain()), SLOT(updateWindowTitle()));
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, this, [this]() {
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
if (tabletScriptingInterface) {
tabletScriptingInterface->setQmlTabletRoot(SYSTEM_TABLET, nullptr);
}
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
entityScriptingInterface->deleteEntity(getTabletScreenID());
entityScriptingInterface->deleteEntity(getTabletHomeButtonID());
@ -1985,6 +1981,13 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
return nullptr;
});
EntityTree::setEmitScriptEventOperator([this](const QUuid& id, const QVariant& message) {
auto entities = getEntities();
if (auto entity = entities->renderableForEntityId(id)) {
entity->emitScriptEvent(message);
}
});
EntityTree::setTextSizeOperator([this](const QUuid& id, const QString& text) {
auto entities = getEntities();
if (auto entity = entities->renderableForEntityId(id)) {
@ -2342,6 +2345,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
return viewFrustum.getPosition();
});
DependencyManager::get<UsersScriptingInterface>()->setKickConfirmationOperator([this] (const QUuid& nodeID) { userKickConfirmation(nodeID); });
render::entities::WebEntityRenderer::setAcquireWebSurfaceOperator([this](const QString& url, bool htmlContent, QSharedPointer<OffscreenQmlSurface>& webSurface, bool& cachedWebSurface) {
bool isTablet = url == TabletScriptingInterface::QML;
if (htmlContent) {
@ -2704,9 +2709,7 @@ void Application::cleanupBeforeQuit() {
DependencyManager::destroy<OffscreenQmlSurfaceCache>();
if (_snapshotSoundInjector != nullptr) {
_snapshotSoundInjector->stop();
}
_snapshotSoundInjector = nullptr;
// destroy Audio so it and its threads have a chance to go down safely
// this must happen after QML, as there are unexplained audio crashes originating in qtwebengine
@ -3043,6 +3046,9 @@ void Application::initializeUi() {
QUrl{ "hifi/commerce/wallet/Wallet.qml" },
QUrl{ "hifi/commerce/wallet/WalletHome.qml" },
QUrl{ "hifi/tablet/TabletAddressDialog.qml" },
QUrl{ "hifi/Card.qml" },
QUrl{ "hifi/Pal.qml" },
QUrl{ "hifi/NameCard.qml" },
}, platformInfoCallback);
QmlContextCallback ttsCallback = [](QQmlContext* context) {
@ -3287,6 +3293,40 @@ void Application::onDesktopRootItemCreated(QQuickItem* rootItem) {
#endif
}
void Application::userKickConfirmation(const QUuid& nodeID) {
auto avatarHashMap = DependencyManager::get<AvatarHashMap>();
auto avatar = avatarHashMap->getAvatarBySessionID(nodeID);
QString userName;
if (avatar) {
userName = avatar->getSessionDisplayName();
} else {
userName = nodeID.toString();
}
QString kickMessage = "Do you wish to kick " + userName + " from your domain";
ModalDialogListener* dlg = OffscreenUi::asyncQuestion("Kick User", kickMessage,
QMessageBox::Yes | QMessageBox::No);
if (dlg->getDialogItem()) {
QObject::connect(dlg, &ModalDialogListener::response, this, [=] (QVariant answer) {
QObject::disconnect(dlg, &ModalDialogListener::response, this, nullptr);
bool yes = (static_cast<QMessageBox::StandardButton>(answer.toInt()) == QMessageBox::Yes);
// ask the NodeList to kick the user with the given session ID
if (yes) {
DependencyManager::get<NodeList>()->kickNodeBySessionID(nodeID);
}
DependencyManager::get<UsersScriptingInterface>()->setWaitForKickResponse(false);
});
DependencyManager::get<UsersScriptingInterface>()->setWaitForKickResponse(true);
}
}
void Application::setupQmlSurface(QQmlContext* surfaceContext, bool setAdditionalContextProperties) {
surfaceContext->setContextProperty("Users", DependencyManager::get<UsersScriptingInterface>().data());
surfaceContext->setContextProperty("HMD", DependencyManager::get<HMDScriptingInterface>().data());
@ -4225,10 +4265,9 @@ void Application::keyPressEvent(QKeyEvent* event) {
Setting::Handle<bool> notificationSoundSnapshot{ MenuOption::NotificationSoundsSnapshot, true };
if (notificationSounds.get() && notificationSoundSnapshot.get()) {
if (_snapshotSoundInjector) {
_snapshotSoundInjector->setOptions(options);
_snapshotSoundInjector->restart();
DependencyManager::get<AudioInjectorManager>()->setOptionsAndRestart(_snapshotSoundInjector, options);
} else {
_snapshotSoundInjector = AudioInjector::playSound(_snapshotSound, options);
_snapshotSoundInjector = DependencyManager::get<AudioInjectorManager>()->playSound(_snapshotSound, options);
}
}
takeSnapshot(true);

View file

@ -593,6 +593,7 @@ private:
void toggleTabletUI(bool shouldOpen = false) const;
static void setupQmlSurface(QQmlContext* surfaceContext, bool setAdditionalContextProperties);
void userKickConfirmation(const QUuid& nodeID);
MainWindow* _window;
QElapsedTimer& _sessionRunTimer;

View file

@ -149,6 +149,9 @@ void AvatarBookmarks::removeBookmark(const QString& bookmarkName) {
emit bookmarkDeleted(bookmarkName);
}
void AvatarBookmarks::deleteBookmark() {
}
void AvatarBookmarks::updateAvatarEntities(const QVariantList &avatarEntities) {
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
auto currentAvatarEntities = myAvatar->getAvatarEntityData();

View file

@ -76,6 +76,9 @@ protected:
void readFromFile() override;
QVariantMap getAvatarDataToBookmark();
protected slots:
void deleteBookmark() override;
private:
const QString AVATARBOOKMARKS_FILENAME = "avatarbookmarks.json";
const QString ENTRY_AVATAR_URL = "avatarUrl";

View file

@ -51,13 +51,10 @@ protected:
bool _isMenuSorted;
protected slots:
/**jsdoc
* @function AvatarBookmarks.deleteBookmark
*/
/**jsdoc
* @function LocationBookmarks.deleteBookmark
*/
void deleteBookmark();
virtual void deleteBookmark();
private:
static bool sortOrder(QAction* a, QAction* b);

View file

@ -270,10 +270,14 @@ Menu::Menu() {
// Settings > Audio...
action = addActionToQMenuAndActionHash(settingsMenu, "Audio...");
connect(action, &QAction::triggered, [] {
static const QUrl widgetUrl("hifi/dialogs/Audio.qml");
static const QUrl tabletUrl("hifi/audio/Audio.qml");
static const QString name("AudioDialog");
qApp->showDialog(widgetUrl, tabletUrl, name);
auto tablet = DependencyManager::get<TabletScriptingInterface>()->getTablet("com.highfidelity.interface.tablet.system");
auto hmd = DependencyManager::get<HMDScriptingInterface>();
tablet->pushOntoStack(tabletUrl);
if (!hmd->getShouldShowTablet()) {
hmd->toggleShouldShowTablet();
}
});
// Settings > Graphics...

View file

@ -55,7 +55,7 @@ static QStringList HAND_MAPPING_SUFFIXES = {
"HandThumb1",
};
const QUrl DEFAULT_DOCS_URL = QUrl("https://docs.highfidelity.com/create/avatars/create-avatars.html#create-your-own-avatar");
const QUrl PACKAGE_AVATAR_DOCS_BASE_URL = QUrl("https://docs.highfidelity.com/create/avatars/package-avatar.html");
AvatarDoctor::AvatarDoctor(const QUrl& avatarFSTFileUrl) :
_avatarFSTFileUrl(avatarFSTFileUrl) {
@ -85,53 +85,53 @@ void AvatarDoctor::startDiagnosing() {
const auto resourceLoaded = [this, resource](bool success) {
// MODEL
if (!success) {
_errors.push_back({ "Model file cannot be opened.", DEFAULT_DOCS_URL });
addError("Model file cannot be opened.", "missing-file");
emit complete(getErrors());
return;
}
_model = resource;
const auto model = resource.data();
const auto avatarModel = resource.data()->getHFMModel();
if (!avatarModel.originalURL.endsWith(".fbx")) {
_errors.push_back({ "Unsupported avatar model format.", DEFAULT_DOCS_URL });
if (!avatarModel.originalURL.toLower().endsWith(".fbx")) {
addError("Unsupported avatar model format.", "unsupported-format");
emit complete(getErrors());
return;
}
// RIG
if (avatarModel.joints.isEmpty()) {
_errors.push_back({ "Avatar has no rig.", DEFAULT_DOCS_URL });
addError("Avatar has no rig.", "no-rig");
} else {
auto jointNames = avatarModel.getJointNames();
if (avatarModel.joints.length() > NETWORKED_JOINTS_LIMIT) {
_errors.push_back({tr( "Avatar has over %n bones.", "", NETWORKED_JOINTS_LIMIT), DEFAULT_DOCS_URL });
addError(tr( "Avatar has over %n bones.", "", NETWORKED_JOINTS_LIMIT), "maximum-bone-limit");
}
// Avatar does not have Hips bone mapped
if (!jointNames.contains("Hips")) {
_errors.push_back({ "Hips are not mapped.", DEFAULT_DOCS_URL });
addError("Hips are not mapped.", "hips-not-mapped");
}
if (!jointNames.contains("Spine")) {
_errors.push_back({ "Spine is not mapped.", DEFAULT_DOCS_URL });
addError("Spine is not mapped.", "spine-not-mapped");
}
if (!jointNames.contains("Spine1")) {
_errors.push_back({ "Chest (Spine1) is not mapped.", DEFAULT_DOCS_URL });
addError("Chest (Spine1) is not mapped.", "chest-not-mapped");
}
if (!jointNames.contains("Neck")) {
_errors.push_back({ "Neck is not mapped.", DEFAULT_DOCS_URL });
addError("Neck is not mapped.", "neck-not-mapped");
}
if (!jointNames.contains("Head")) {
_errors.push_back({ "Head is not mapped.", DEFAULT_DOCS_URL });
addError("Head is not mapped.", "head-not-mapped");
}
if (!jointNames.contains("LeftEye")) {
if (jointNames.contains("RightEye")) {
_errors.push_back({ "LeftEye is not mapped.", DEFAULT_DOCS_URL });
addError("LeftEye is not mapped.", "eye-not-mapped");
} else {
_errors.push_back({ "Eyes are not mapped.", DEFAULT_DOCS_URL });
addError("Eyes are not mapped.", "eye-not-mapped");
}
} else if (!jointNames.contains("RightEye")) {
_errors.push_back({ "RightEye is not mapped.", DEFAULT_DOCS_URL });
addError("RightEye is not mapped.", "eye-not-mapped");
}
const auto checkJointAsymmetry = [jointNames] (const QStringList& jointMappingSuffixes) {
@ -159,13 +159,13 @@ void AvatarDoctor::startDiagnosing() {
};
if (checkJointAsymmetry(ARM_MAPPING_SUFFIXES)) {
_errors.push_back({ "Asymmetrical arm bones.", DEFAULT_DOCS_URL });
addError("Asymmetrical arm bones.", "asymmetrical-bones");
}
if (checkJointAsymmetry(HAND_MAPPING_SUFFIXES)) {
_errors.push_back({ "Asymmetrical hand bones.", DEFAULT_DOCS_URL });
addError("Asymmetrical hand bones.", "asymmetrical-bones");
}
if (checkJointAsymmetry(LEG_MAPPING_SUFFIXES)) {
_errors.push_back({ "Asymmetrical leg bones.", DEFAULT_DOCS_URL });
addError("Asymmetrical leg bones.", "asymmetrical-bones");
}
// Multiple skeleton root joints checkup
@ -177,7 +177,7 @@ void AvatarDoctor::startDiagnosing() {
}
if (skeletonRootJoints > 1) {
_errors.push_back({ "Multiple top-level joints found.", DEFAULT_DOCS_URL });
addError("Multiple top-level joints found.", "multiple-top-level-joints");
}
Rig rig;
@ -191,9 +191,9 @@ void AvatarDoctor::startDiagnosing() {
const float RECOMMENDED_MAX_HEIGHT = DEFAULT_AVATAR_HEIGHT * 1.5f;
if (avatarHeight < RECOMMENDED_MIN_HEIGHT) {
_errors.push_back({ "Avatar is possibly too short.", DEFAULT_DOCS_URL });
addError("Avatar is possibly too short.", "short-avatar");
} else if (avatarHeight > RECOMMENDED_MAX_HEIGHT) {
_errors.push_back({ "Avatar is possibly too tall.", DEFAULT_DOCS_URL });
addError("Avatar is possibly too tall.", "tall-avatar");
}
// HipsNotOnGround
@ -204,7 +204,7 @@ void AvatarDoctor::startDiagnosing() {
const auto hipJoint = avatarModel.joints.at(avatarModel.getJointIndex("Hips"));
if (hipsPosition.y < HIPS_GROUND_MIN_Y) {
_errors.push_back({ "Hips are on ground.", DEFAULT_DOCS_URL });
addError("Hips are on ground.", "hips-on-ground");
}
}
}
@ -223,7 +223,7 @@ void AvatarDoctor::startDiagnosing() {
const auto hipsToSpine = glm::length(hipsPosition - spinePosition);
const auto spineToChest = glm::length(spinePosition - chestPosition);
if (hipsToSpine < HIPS_SPINE_CHEST_MIN_SEPARATION && spineToChest < HIPS_SPINE_CHEST_MIN_SEPARATION) {
_errors.push_back({ "Hips/Spine/Chest overlap.", DEFAULT_DOCS_URL });
addError("Hips/Spine/Chest overlap.", "overlap-error");
}
}
}
@ -240,21 +240,21 @@ void AvatarDoctor::startDiagnosing() {
const auto& uniqueJointValues = jointValues.toSet();
for (const auto& jointName: uniqueJointValues) {
if (jointValues.count(jointName) > 1) {
_errors.push_back({ tr("%1 is mapped multiple times.").arg(jointName), DEFAULT_DOCS_URL });
addError(tr("%1 is mapped multiple times.").arg(jointName), "mapped-multiple-times");
}
}
}
if (!isDescendantOfJointWhenJointsExist("Spine", "Hips")) {
_errors.push_back({ "Spine is not a child of Hips.", DEFAULT_DOCS_URL });
addError("Spine is not a child of Hips.", "spine-not-child");
}
if (!isDescendantOfJointWhenJointsExist("Spine1", "Spine")) {
_errors.push_back({ "Spine1 is not a child of Spine.", DEFAULT_DOCS_URL });
addError("Spine1 is not a child of Spine.", "spine1-not-child");
}
if (!isDescendantOfJointWhenJointsExist("Head", "Spine1")) {
_errors.push_back({ "Head is not a child of Spine1.", DEFAULT_DOCS_URL });
addError("Head is not a child of Spine1.", "head-not-child");
}
}
@ -300,7 +300,7 @@ void AvatarDoctor::startDiagnosing() {
connect(resource.data(), &GeometryResource::finished, this, resourceLoaded);
}
} else {
_errors.push_back({ "Model file cannot be opened", DEFAULT_DOCS_URL });
addError("Model file cannot be opened", "missing-file");
emit complete(getErrors());
}
}
@ -345,7 +345,7 @@ void AvatarDoctor::diagnoseTextures() {
QUrl(avatarModel.originalURL)).resolved(QUrl("textures"));
if (texturesFound == 0) {
_errors.push_back({ tr("No textures assigned."), DEFAULT_DOCS_URL });
addError(tr("No textures assigned."), "no-textures-assigned");
}
if (!externalTextures.empty()) {
@ -356,11 +356,10 @@ void AvatarDoctor::diagnoseTextures() {
auto checkTextureLoadingComplete = [this]() mutable {
if (_checkedTextureCount == _externalTextureCount) {
if (_missingTextureCount > 0) {
_errors.push_back({ tr("Missing %n texture(s).","", _missingTextureCount), DEFAULT_DOCS_URL });
addError(tr("Missing %n texture(s).","", _missingTextureCount), "missing-textures");
}
if (_unsupportedTextureCount > 0) {
_errors.push_back({ tr("%n unsupported texture(s) found.", "", _unsupportedTextureCount),
DEFAULT_DOCS_URL });
addError(tr("%n unsupported texture(s) found.", "", _unsupportedTextureCount), "unsupported-textures");
}
emit complete(getErrors());
@ -411,6 +410,12 @@ void AvatarDoctor::diagnoseTextures() {
}
}
void AvatarDoctor::addError(const QString& errorMessage, const QString& docFragment) {
QUrl documentationURL = PACKAGE_AVATAR_DOCS_BASE_URL;
documentationURL.setFragment(docFragment);
_errors.push_back({ errorMessage, documentationURL });
}
QVariantList AvatarDoctor::getErrors() const {
QVariantList result;
for (const auto& error : _errors) {

View file

@ -40,6 +40,8 @@ signals:
private:
void diagnoseTextures();
void addError(const QString& errorMessage, const QString& docFragment);
QUrl _avatarFSTFileUrl;
QVector<AvatarDiagnosticResult> _errors;

View file

@ -629,8 +629,7 @@ void AvatarManager::handleCollisionEvents(const CollisionEvents& collisionEvents
// but most avatars are roughly the same size, so let's not be so fancy yet.
const float AVATAR_STRETCH_FACTOR = 1.0f;
_collisionInjectors.remove_if(
[](const AudioInjectorPointer& injector) { return !injector || injector->isFinished(); });
_collisionInjectors.remove_if([](const AudioInjectorPointer& injector) { return !injector; });
static const int MAX_INJECTOR_COUNT = 3;
if (_collisionInjectors.size() < MAX_INJECTOR_COUNT) {
@ -640,7 +639,7 @@ void AvatarManager::handleCollisionEvents(const CollisionEvents& collisionEvents
options.volume = energyFactorOfFull;
options.pitch = 1.0f / AVATAR_STRETCH_FACTOR;
auto injector = AudioInjector::playSoundAndDelete(collisionSound, options);
auto injector = DependencyManager::get<AudioInjectorManager>()->playSound(collisionSound, options, true);
_collisionInjectors.emplace_back(injector);
}
}

View file

@ -24,7 +24,7 @@
#include <SimpleMovingAverage.h>
#include <shared/RateCounter.h>
#include <avatars-renderer/ScriptAvatar.h>
#include <AudioInjector.h>
#include <AudioInjectorManager.h>
#include <workload/Space.h>
#include <EntitySimulation.h> // for SetOfEntities
@ -239,7 +239,7 @@ private:
std::shared_ptr<MyAvatar> _myAvatar;
quint64 _lastSendAvatarDataTime = 0; // Controls MyAvatar send data rate.
std::list<AudioInjectorPointer> _collisionInjectors;
std::list<QWeakPointer<AudioInjector>> _collisionInjectors;
RateCounter<> _myAvatarSendRate;
int _numAvatarsUpdated { 0 };

View file

@ -324,8 +324,11 @@ QString MyAvatar::getDominantHand() const {
void MyAvatar::setDominantHand(const QString& hand) {
if (hand == DOMINANT_LEFT_HAND || hand == DOMINANT_RIGHT_HAND) {
_dominantHand.set(hand);
emit dominantHandChanged(hand);
bool changed = (hand != _dominantHand.get());
if (changed) {
_dominantHand.set(hand);
emit dominantHandChanged(hand);
}
}
}
@ -1570,7 +1573,7 @@ void MyAvatar::handleChangedAvatarEntityData() {
entityTree->withWriteLock([&] {
EntityItemPointer entity = entityTree->addEntity(id, properties);
if (entity) {
packetSender->queueEditEntityMessage(PacketType::EntityAdd, entityTree, id, properties);
packetSender->queueEditAvatarEntityMessage(entityTree, id);
}
});
}
@ -2385,7 +2388,19 @@ void MyAvatar::clearWornAvatarEntities() {
}
}
/**jsdoc
* Information about an avatar entity.
* <table>
* <thead>
* <tr><th>Property</th><th>Type</th><th>Description</th></tr>
* </thead>
* <tbody>
* <tr><td><code>id</code></td><td>Uuid</td><td>Entity ID.</td></tr>
* <tr><td><code>properties</code></td><td>{@link Entities.EntityProperties}</td><td>Entity properties.</td></tr>
* </tbody>
* </table>
* @typedef {object} MyAvatar.AvatarEntityData
*/
QVariantList MyAvatar::getAvatarEntitiesVariant() {
// NOTE: this method is NOT efficient
QVariantList avatarEntitiesData;
@ -3451,10 +3466,10 @@ float MyAvatar::getGravity() {
}
void MyAvatar::setSessionUUID(const QUuid& sessionUUID) {
QUuid oldID = getSessionUUID();
QUuid oldSessionID = getSessionUUID();
Avatar::setSessionUUID(sessionUUID);
QUuid id = getSessionUUID();
if (id != oldID) {
QUuid newSessionID = getSessionUUID();
if (newSessionID != oldSessionID) {
auto treeRenderer = DependencyManager::get<EntityTreeRenderer>();
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
if (entityTree) {
@ -3462,15 +3477,23 @@ void MyAvatar::setSessionUUID(const QUuid& sessionUUID) {
_avatarEntitiesLock.withReadLock([&] {
avatarEntityIDs = _packedAvatarEntityData.keys();
});
bool sendPackets = !DependencyManager::get<NodeList>()->getSessionUUID().isNull();
EntityEditPacketSender* packetSender = qApp->getEntityEditPacketSender();
entityTree->withWriteLock([&] {
for (const auto& entityID : avatarEntityIDs) {
auto entity = entityTree->findEntityByID(entityID);
if (!entity) {
continue;
}
entity->setOwningAvatarID(id);
if (entity->getParentID() == oldID) {
entity->setParentID(id);
// update OwningAvatarID so entity can be identified as "ours" later
entity->setOwningAvatarID(newSessionID);
// NOTE: each attached AvatarEntity already have the correct updated parentID
// via magic in SpatiallyNestable, hence we check against newSessionID
if (sendPackets && entity->getParentID() == newSessionID) {
// but when we have a real session and the AvatarEntity is parented to MyAvatar
// we need to update the "packedAvatarEntityData" sent to the avatar-mixer
// because it contains a stale parentID somewhere deep inside
packetSender->queueEditAvatarEntityMessage(entityTree, entityID);
}
}
});
@ -3555,6 +3578,12 @@ void MyAvatar::clearScaleRestriction() {
_haveReceivedHeightLimitsFromDomain = false;
}
/**jsdoc
* A teleport target.
* @typedef {object} MyAvatar.GoToProperties
* @property {Vec3} position - The avatar's new position.
* @property {Quat} [orientation] - The avatar's new orientation.
*/
void MyAvatar::goToLocation(const QVariant& propertiesVar) {
qCDebug(interfaceapp, "MyAvatar QML goToLocation");
auto properties = propertiesVar.toMap();
@ -3911,6 +3940,13 @@ void MyAvatar::setCollisionWithOtherAvatarsFlags() {
_characterController.setPendingFlagsUpdateCollisionMask();
}
/**jsdoc
* A collision capsule is a cylinder with hemispherical ends. It is often used to approximate the extents of an avatar.
* @typedef {object} MyAvatar.CollisionCapsule
* @property {Vec3} start - The bottom end of the cylinder, excluding the bottom hemisphere.
* @property {Vec3} end - The top end of the cylinder, excluding the top hemisphere.
* @property {number} radius - The radius of the cylinder and the hemispheres.
*/
void MyAvatar::updateCollisionCapsuleCache() {
glm::vec3 start, end;
float radius;
@ -5360,6 +5396,24 @@ void MyAvatar::addAvatarHandsToFlow(const std::shared_ptr<Avatar>& otherAvatar)
}
}
/**jsdoc
* Physics options to use in the flow simulation of a joint.
* @typedef {object} MyAvatar.FlowPhysicsOptions
* @property {boolean} [active=true] - <code>true</code> to enable flow on the joint, otherwise <code>false</code>.
* @property {number} [radius=0.01] - The thickness of segments and knots (needed for collisions).
* @property {number} [gravity=-0.0096] - Y-value of the gravity vector.
* @property {number} [inertia=0.8] - Rotational inertia multiplier.
* @property {number} [damping=0.85] - The amount of damping on joint oscillation.
* @property {number} [stiffness=0.0] - The stiffness of each thread.
* @property {number} [delta=0.55] - Delta time for every integration step.
*/
/**jsdoc
* Collision options to use in the flow simulation of a joint.
* @typedef {object} MyAvatar.FlowCollisionsOptions
* @property {string} [type="sphere"] - Currently, only <code>"sphere"</code> is supported.
* @property {number} [radius=0.05] - Collision sphere radius.
* @property {number} [offset=Vec3.ZERO] - Offset of the collision sphere from the joint.
*/
void MyAvatar::useFlow(bool isActive, bool isCollidable, const QVariantMap& physicsConfig, const QVariantMap& collisionsConfig) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "useFlow",
@ -5409,7 +5463,7 @@ void MyAvatar::useFlow(bool isActive, bool isCollidable, const QVariantMap& phys
}
auto collisionJoints = collisionsConfig.keys();
if (collisionJoints.size() > 0) {
collisionSystem.resetCollisions();
collisionSystem.clearSelfCollisions();
for (auto &jointName : collisionJoints) {
int jointIndex = getJointIndex(jointName);
FlowCollisionSettings collisionsSettings;
@ -5424,9 +5478,43 @@ void MyAvatar::useFlow(bool isActive, bool isCollidable, const QVariantMap& phys
collisionSystem.addCollisionSphere(jointIndex, collisionsSettings);
}
}
flow.updateScale();
}
}
/**jsdoc
* Flow options currently used in flow simulation.
* @typedef {object} MyAvatar.FlowData
* @property {boolean} initialized - <code>true</code> if flow has been initialized for the current avatar, <code>false</code>
* if it hasn't.
* @property {boolean} active - <code>true</code> if flow is enabled, <code>false</code> if it isn't.
* @property {boolean} colliding - <code>true</code> if collisions are enabled, <code>false</code> if they aren't.
* @property {Object<GroupName, MyAvatar.FlowPhysicsData>} physicsData - The physics configuration for each group of joints
* that has been configured.
* @property {Object<JointName, MyAvatar.FlowCollisionsData>} collisions - The collisions configuration for each joint that
* has collisions configured.
* @property {Object<ThreadName, number[]>} threads - The threads that have been configured, with the first joint's name as the
* <code>ThreadName</code> and value as an array of the indexes of all the joints in the thread.
*/
/**jsdoc
* A set of physics options currently used in flow simulation.
* @typedef {object} MyAvatar.FlowPhysicsData
* @property {boolean} active - <code>true</code> to enable flow on the joint, otherwise <code>false</code>.
* @property {number} radius - The thickness of segments and knots. (Needed for collisions.)
* @property {number} gravity - Y-value of the gravity vector.
* @property {number} inertia - Rotational inertia multiplier.
* @property {number} damping - The amount of damping on joint oscillation.
* @property {number} stiffness - The stiffness of each thread.
* @property {number} delta - Delta time for every integration step.
* @property {number[]} jointIndices - The indexes of the joints the options are applied to.
*/
/**jsdoc
* A set of collision options currently used in flow simulation.
* @typedef {object} MyAvatar.FlowCollisionsData
* @property {number} radius - Collision sphere radius.
* @property {number} offset - Offset of the collision sphere from the joint.
* @property {number} jointIndex - The index of the joint the options are applied to.
*/
QVariantMap MyAvatar::getFlowData() {
QVariantMap result;
if (QThread::currentThread() != thread()) {
@ -5523,14 +5611,14 @@ void MyAvatar::initFlowFromFST() {
}
}
void MyAvatar::sendPacket(const QUuid& entityID, const EntityItemProperties& properties) const {
void MyAvatar::sendPacket(const QUuid& entityID) const {
auto treeRenderer = DependencyManager::get<EntityTreeRenderer>();
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
if (entityTree) {
entityTree->withWriteLock([&] {
// force an update packet
EntityEditPacketSender* packetSender = qApp->getEntityEditPacketSender();
packetSender->queueEditEntityMessage(PacketType::EntityEdit, entityTree, entityID, properties);
packetSender->queueEditAvatarEntityMessage(entityTree, entityID);
});
}
}

File diff suppressed because it is too large Load diff

View file

@ -365,7 +365,7 @@ void OtherAvatar::handleChangedAvatarEntityData() {
// AVATAR ENTITY UPDATE FLOW
// - if queueEditEntityMessage() sees "AvatarEntity" HostType it calls _myAvatar->storeAvatarEntityDataPayload()
// - storeAvatarEntityDataPayload() saves the payload and flags the trait instance for the entity as updated,
// - ClientTraitsHandler::sendChangedTraitsToMixea() sends the entity bytes to the mixer which relays them to other interfaces
// - ClientTraitsHandler::sendChangedTraitsToMixer() sends the entity bytes to the mixer which relays them to other interfaces
// - AvatarHashMap::processBulkAvatarTraits() on other interfaces calls avatar->processTraitInstance()
// - AvatarData::processTraitInstance() calls storeAvatarEntityDataPayload(), which sets _avatarEntityDataChanged = true
// - (My)Avatar::simulate() calls handleChangedAvatarEntityData() every frame which checks _avatarEntityDataChanged
@ -495,6 +495,18 @@ void OtherAvatar::handleChangedAvatarEntityData() {
const QUuid NULL_ID = QUuid("{00000000-0000-0000-0000-000000000005}");
entity->setParentID(NULL_ID);
entity->setParentID(oldParentID);
if (entity->stillHasMyGrabAction()) {
// For this case: we want to ignore transform+velocities coming from authoritative OtherAvatar
// because the MyAvatar is grabbing and we expect the local grab state
// to have enough information to prevent simulation drift.
//
// Clever readers might realize this could cause problems. For example,
// if an ignored OtherAvagtar were to simultanously grab the object then there would be
// a noticeable discrepancy between participants in the distributed physics simulation,
// however the difference would be stable and would not drift.
properties.clearTransformOrVelocityChanges();
}
if (entityTree->updateEntity(entityID, properties)) {
entity->updateLastEditedFromRemote();
} else {

View file

@ -228,6 +228,9 @@ void Audio::loadData() {
_hmdMuted = _hmdMutedSetting.get();
_pttDesktop = _pttDesktopSetting.get();
_pttHMD = _pttHMDSetting.get();
auto client = DependencyManager::get<AudioClient>().data();
QMetaObject::invokeMethod(client, "setMuted", Q_ARG(bool, isMuted()), Q_ARG(bool, false));
}
bool Audio::getPTTHMD() const {
@ -374,6 +377,18 @@ void Audio::handlePushedToTalk(bool enabled) {
}
}
void Audio::setInputDevice(const QAudioDeviceInfo& device, bool isHMD) {
withWriteLock([&] {
_devices.chooseInputDevice(device, isHMD);
});
}
void Audio::setOutputDevice(const QAudioDeviceInfo& device, bool isHMD) {
withWriteLock([&] {
_devices.chooseOutputDevice(device, isHMD);
});
}
void Audio::setReverb(bool enable) {
withWriteLock([&] {
DependencyManager::get<AudioClient>()->setReverb(enable);
@ -386,14 +401,66 @@ void Audio::setReverbOptions(const AudioEffectOptions* options) {
});
}
void Audio::setInputDevice(const QAudioDeviceInfo& device, bool isHMD) {
void Audio::setAvatarGain(float gain) {
withWriteLock([&] {
_devices.chooseInputDevice(device, isHMD);
// ask the NodeList to set the master avatar gain
DependencyManager::get<NodeList>()->setAvatarGain(QUuid(), gain);
});
}
void Audio::setOutputDevice(const QAudioDeviceInfo& device, bool isHMD) {
withWriteLock([&] {
_devices.chooseOutputDevice(device, isHMD);
float Audio::getAvatarGain() {
return resultWithReadLock<float>([&] {
return DependencyManager::get<NodeList>()->getAvatarGain(QUuid());
});
}
void Audio::setInjectorGain(float gain) {
withWriteLock([&] {
// ask the NodeList to set the audio injector gain
DependencyManager::get<NodeList>()->setInjectorGain(gain);
});
}
float Audio::getInjectorGain() {
return resultWithReadLock<float>([&] {
return DependencyManager::get<NodeList>()->getInjectorGain();
});
}
void Audio::setLocalInjectorGain(float gain) {
withWriteLock([&] {
if (_localInjectorGain != gain) {
_localInjectorGain = gain;
// convert dB to amplitude
gain = fastExp2f(gain / 6.02059991f);
// quantize and limit to match NodeList::setInjectorGain()
gain = unpackFloatGainFromByte(packFloatGainToByte(gain));
DependencyManager::get<AudioClient>()->setLocalInjectorGain(gain);
}
});
}
float Audio::getLocalInjectorGain() {
return resultWithReadLock<float>([&] {
return _localInjectorGain;
});
}
void Audio::setSystemInjectorGain(float gain) {
withWriteLock([&] {
if (_systemInjectorGain != gain) {
_systemInjectorGain = gain;
// convert dB to amplitude
gain = fastExp2f(gain / 6.02059991f);
// quantize and limit to match NodeList::setInjectorGain()
gain = unpackFloatGainFromByte(packFloatGainToByte(gain));
DependencyManager::get<AudioClient>()->setSystemInjectorGain(gain);
}
});
}
float Audio::getSystemInjectorGain() {
return resultWithReadLock<float>([&] {
return _systemInjectorGain;
});
}

View file

@ -170,6 +170,66 @@ public:
*/
Q_INVOKABLE void setReverbOptions(const AudioEffectOptions* options);
/**jsdoc
* Sets the avatar gain at the server.
* Units are Decibels (dB)
* @function Audio.setAvatarGain
* @param {number} gain (in dB)
*/
Q_INVOKABLE void setAvatarGain(float gain);
/**jsdoc
* Gets the avatar gain at the server.
* @function Audio.getAvatarGain
* @returns {number} gain (in dB)
*/
Q_INVOKABLE float getAvatarGain();
/**jsdoc
* Sets the injector gain at the server.
* Units are Decibels (dB)
* @function Audio.setInjectorGain
* @param {number} gain (in dB)
*/
Q_INVOKABLE void setInjectorGain(float gain);
/**jsdoc
* Gets the injector gain at the server.
* @function Audio.getInjectorGain
* @returns {number} gain (in dB)
*/
Q_INVOKABLE float getInjectorGain();
/**jsdoc
* Sets the local injector gain in the client.
* Units are Decibels (dB)
* @function Audio.setLocalInjectorGain
* @param {number} gain (in dB)
*/
Q_INVOKABLE void setLocalInjectorGain(float gain);
/**jsdoc
* Gets the local injector gain in the client.
* @function Audio.getLocalInjectorGain
* @returns {number} gain (in dB)
*/
Q_INVOKABLE float getLocalInjectorGain();
/**jsdoc
* Sets the injector gain for system sounds.
* Units are Decibels (dB)
* @function Audio.setSystemInjectorGain
* @param {number} gain (in dB)
*/
Q_INVOKABLE void setSystemInjectorGain(float gain);
/**jsdoc
* Gets the injector gain for system sounds.
* @function Audio.getSystemInjectorGain
* @returns {number} gain (in dB)
*/
Q_INVOKABLE float getSystemInjectorGain();
/**jsdoc
* Starts making an audio recording of the audio being played in-world (i.e., not local-only audio) to a file in WAV format.
* @function Audio.startRecording
@ -350,6 +410,8 @@ private:
float _inputVolume { 1.0f };
float _inputLevel { 0.0f };
float _localInjectorGain { 0.0f }; // in dB
float _systemInjectorGain { 0.0f }; // in dB
bool _isClipping { false };
bool _enableNoiseReduction { true }; // Match default value of AudioClient::_isNoiseGateEnabled.
bool _enableWarnWhenMuted { true };

View file

@ -66,7 +66,7 @@ void TTSScriptingInterface::updateLastSoundAudioInjector() {
if (_lastSoundAudioInjector) {
AudioInjectorOptions options;
options.position = DependencyManager::get<AvatarManager>()->getMyAvatarPosition();
_lastSoundAudioInjector->setOptions(options);
DependencyManager::get<AudioInjectorManager>()->setOptions(_lastSoundAudioInjector, options);
_lastSoundAudioInjectorUpdateTimer.start(INJECTOR_INTERVAL_MS);
}
}
@ -143,7 +143,7 @@ void TTSScriptingInterface::speakText(const QString& textToSpeak) {
options.position = DependencyManager::get<AvatarManager>()->getMyAvatarPosition();
if (_lastSoundAudioInjector) {
_lastSoundAudioInjector->stop();
DependencyManager::get<AudioInjectorManager>()->stop(_lastSoundAudioInjector);
_lastSoundAudioInjectorUpdateTimer.stop();
}
@ -151,7 +151,7 @@ void TTSScriptingInterface::speakText(const QString& textToSpeak) {
uint32_t numSamples = (uint32_t)_lastSoundByteArray.size() / sizeof(AudioData::AudioSample);
auto samples = reinterpret_cast<AudioData::AudioSample*>(_lastSoundByteArray.data());
auto newAudioData = AudioData::make(numSamples, numChannels, samples);
_lastSoundAudioInjector = AudioInjector::playSoundAndDelete(newAudioData, options);
_lastSoundAudioInjector = DependencyManager::get<AudioInjectorManager>()->playSound(newAudioData, options, true);
_lastSoundAudioInjectorUpdateTimer.start(INJECTOR_INTERVAL_MS);
#else
@ -161,7 +161,7 @@ void TTSScriptingInterface::speakText(const QString& textToSpeak) {
void TTSScriptingInterface::stopLastSpeech() {
if (_lastSoundAudioInjector) {
_lastSoundAudioInjector->stop();
_lastSoundAudioInjector = NULL;
DependencyManager::get<AudioInjectorManager>()->stop(_lastSoundAudioInjector);
_lastSoundAudioInjector = nullptr;
}
}

View file

@ -199,13 +199,3 @@ void TestScriptingInterface::setOtherAvatarsReplicaCount(int count) {
int TestScriptingInterface::getOtherAvatarsReplicaCount() {
return qApp->getOtherAvatarsReplicaCount();
}
QString TestScriptingInterface::getOperatingSystemType() {
#ifdef Q_OS_WIN
return "WINDOWS";
#elif defined Q_OS_MAC
return "MACOS";
#else
return "UNKNOWN";
#endif
}

View file

@ -163,13 +163,6 @@ public slots:
*/
Q_INVOKABLE int getOtherAvatarsReplicaCount();
/**jsdoc
* Returns the Operating Sytem type
* @function Test.getOperatingSystemType
* @returns {string} "WINDOWS", "MACOS" or "UNKNOWN"
*/
QString getOperatingSystemType();
private:
bool waitForCondition(qint64 maxWaitMs, std::function<bool()> condition);
QString _testResultsLocation;

View file

@ -29,7 +29,7 @@
#include <PathUtils.h>
#include <ResourceManager.h>
#include <SoundCache.h>
#include <AudioInjector.h>
#include <AudioInjectorManager.h>
#include <RegisteredMetaTypes.h>
#include <ui/TabletScriptingInterface.h>
@ -537,7 +537,7 @@ void Keyboard::handleTriggerBegin(const QUuid& id, const PointerEvent& event) {
audioOptions.position = keyWorldPosition;
audioOptions.volume = 0.05f;
AudioInjector::playSoundAndDelete(_keySound, audioOptions);
DependencyManager::get<AudioInjectorManager>()->playSound(_keySound, audioOptions, true);
int scanCode = key.getScanCode(_capsEnabled);
QString keyString = key.getKeyString(_capsEnabled);

View file

@ -19,9 +19,9 @@
#include <QtCore/QObject>
#include <QTimer>
#include <QHash>
#include <QUuid>
#include <DependencyManager.h>
#include <Sound.h>
#include <AudioInjector.h>
#include <shared/ReadWriteLockable.h>
#include <SettingHandle.h>

View file

@ -266,6 +266,11 @@ void Stats::updateStats(bool force) {
}
STAT_UPDATE(audioCodec, audioClient->getSelectedAudioFormat());
STAT_UPDATE(audioNoiseGate, audioClient->getNoiseGateOpen() ? "Open" : "Closed");
{
int localInjectors = audioClient->getNumLocalInjectors();
size_t nonLocalInjectors = DependencyManager::get<AudioInjectorManager>()->getNumInjectors();
STAT_UPDATE(audioInjectors, QVector2D(localInjectors, nonLocalInjectors));
}
STAT_UPDATE(entityPacketsInKbps, octreeServerCount ? totalEntityKbps / octreeServerCount : -1);

View file

@ -87,6 +87,7 @@ private: \
* @property {number} audioPacketLoss - <em>Read-only.</em>
* @property {string} audioCodec - <em>Read-only.</em>
* @property {string} audioNoiseGate - <em>Read-only.</em>
* @property {Vec2} audioInjectors - <em>Read-only.</em>
* @property {number} entityPacketsInKbps - <em>Read-only.</em>
*
* @property {number} downloads - <em>Read-only.</em>
@ -243,6 +244,7 @@ class Stats : public QQuickItem {
STATS_PROPERTY(int, audioPacketLoss, 0)
STATS_PROPERTY(QString, audioCodec, QString())
STATS_PROPERTY(QString, audioNoiseGate, QString())
STATS_PROPERTY(QVector2D, audioInjectors, QVector2D());
STATS_PROPERTY(int, entityPacketsInKbps, 0)
STATS_PROPERTY(int, downloads, 0)
@ -692,6 +694,13 @@ signals:
*/
void audioNoiseGateChanged();
/**jsdoc
* Triggered when the value of the <code>audioInjectors</code> property changes.
* @function Stats.audioInjectorsChanged
* @returns {Signal}
*/
void audioInjectorsChanged();
/**jsdoc
* Triggered when the value of the <code>entityPacketsInKbps</code> property changes.
* @function Stats.entityPacketsInKbpsChanged

View file

@ -1711,9 +1711,9 @@ QVector<QUuid> Overlays::findOverlays(const glm::vec3& center, float radius) {
* @property {number} parentJointIndex=65535 - Integer value specifying the skeleton joint that the overlay is attached to if
* <code>parentID</code> is an avatar skeleton. A value of <code>65535</code> means "no joint".
*
* @property {boolean} isFacingAvatar - If <code>true< / code>, the overlay is rotated to face the user's camera about an axis
* @property {boolean} isFacingAvatar - If <code>true</code>, the overlay is rotated to face the user's camera about an axis
* parallel to the user's avatar's "up" direction.
* @property {string} text="" - The text to display.Text does not automatically wrap; use <code>\n< / code> for a line break.
* @property {string} text="" - The text to display.Text does not automatically wrap; use <code>\n</code> for a line break.
* @property {number} textAlpha=1 - The text alpha value.
* @property {Color} backgroundColor=0,0,0 - The background color.
* @property {number} backgroundAlpha=0.7 - The background alpha value.
@ -1876,7 +1876,7 @@ QVector<QUuid> Overlays::findOverlays(const glm::vec3& center, float radius) {
* @property {Vec3} localPosition - The local position of the overlay relative to its parent if the overlay has a
* <code>parentID</code> set, otherwise the same value as <code>position</code>.
* @property {Quat} localRotation - The orientation of the overlay relative to its parent if the overlay has a
* <code>parentID</code> set, otherwise the same value as <code>rotation</code>. Synonym: <code>localOrientation</code>.
* <code>parentID</code> set, otherwise the same value as <code>rotation</code>. Synonym: <code>localOrientation</code>.
* @property {boolean} ignorePickIntersection=false - If <code>true</code>, picks ignore the overlay. <code>ignoreRayIntersection</code> is a synonym.
* @property {boolean} drawInFront=false - If <code>true</code>, the overlay is rendered in front of objects in the world, but behind the HUD.
* @property {boolean} drawHUDLayer=false - If <code>true</code>, the overlay is rendered in front of everything, including the HUD.
@ -1916,7 +1916,7 @@ QVector<QUuid> Overlays::findOverlays(const glm::vec3& center, float radius) {
* @property {Vec3} localPosition - The local position of the overlay relative to its parent if the overlay has a
* <code>parentID</code> set, otherwise the same value as <code>position</code>.
* @property {Quat} localRotation - The orientation of the overlay relative to its parent if the overlay has a
* <code>parentID</code> set, otherwise the same value as <code>rotation</code>. Synonym: <code>localOrientation</code>.
* <code>parentID</code> set, otherwise the same value as <code>rotation</code>. Synonym: <code>localOrientation</code>.
* @property {boolean} isSolid=false - Synonyms: <ode>solid</code>, <code>isFilled</code>, and <code>filled</code>.
* Antonyms: <code>isWire</code> and <code>wire</code>.
* @property {boolean} ignorePickIntersection=false - If <code>true</code>, picks ignore the overlay. <code>ignoreRayIntersection</code> is a synonym.
@ -1927,46 +1927,46 @@ QVector<QUuid> Overlays::findOverlays(const glm::vec3& center, float radius) {
* @property {number} parentJointIndex=65535 - Integer value specifying the skeleton joint that the overlay is attached to if
* <code>parentID</code> is an avatar skeleton. A value of <code>65535</code> means "no joint".
*
* @property {number} startAt = 0 - The counter - clockwise angle from the overlay's x-axis that drawing starts at, in degrees.
* @property {number} endAt = 360 - The counter - clockwise angle from the overlay's x-axis that drawing ends at, in degrees.
* @property {number} outerRadius = 1 - The outer radius of the overlay, in meters.Synonym: <code>radius< / code>.
* @property {number} innerRadius = 0 - The inner radius of the overlay, in meters.
* @property {Color} color = 255, 255, 255 - The color of the overlay.Setting this value also sets the values of
* <code>innerStartColor< / code>, <code>innerEndColor< / code>, <code>outerStartColor< / code>, and <code>outerEndColor< / code>.
* @property {Color} startColor - Sets the values of <code>innerStartColor< / code> and <code>outerStartColor< / code>.
* <em>Write - only.< / em>
* @property {Color} endColor - Sets the values of <code>innerEndColor< / code> and <code>outerEndColor< / code>.
* <em>Write - only.< / em>
* @property {Color} innerColor - Sets the values of <code>innerStartColor< / code> and <code>innerEndColor< / code>.
* <em>Write - only.< / em>
* @property {Color} outerColor - Sets the values of <code>outerStartColor< / code> and <code>outerEndColor< / code>.
* <em>Write - only.< / em>
* @property {number} startAt = 0 - The counter - clockwise angle from the overlay's x-axis that drawing starts at in degrees.
* @property {number} endAt = 360 - The counter - clockwise angle from the overlay's x-axis that drawing ends at in degrees.
* @property {number} outerRadius = 1 - The outer radius of the overlay in meters. Synonym: <code>radius</code>.
* @property {number} innerRadius = 0 - The inner radius of the overlay in meters.
* @property {Color} color = 255, 255, 255 - The color of the overlay. Setting this value also sets the values of
* <code>innerStartColor</code>, <code>innerEndColor</code>, <code>outerStartColor</code>, and <code>outerEndColor</code>.
* @property {Color} startColor - Sets the values of <code>innerStartColor</code> and <code>outerStartColor</code>.
* <em>Write - only.</em>
* @property {Color} endColor - Sets the values of <code>innerEndColor</code> and <code>outerEndColor</code>.
* <em>Write - only.</em>
* @property {Color} innerColor - Sets the values of <code>innerStartColor</code> and <code>innerEndColor</code>.
* <em>Write - only.</em>
* @property {Color} outerColor - Sets the values of <code>outerStartColor</code> and <code>outerEndColor</code>.
* <em>Write - only.</em>
* @property {Color} innerStartcolor - The color at the inner start point of the overlay.
* @property {Color} innerEndColor - The color at the inner end point of the overlay.
* @property {Color} outerStartColor - The color at the outer start point of the overlay.
* @property {Color} outerEndColor - The color at the outer end point of the overlay.
* @property {number} alpha = 0.5 - The opacity of the overlay, <code>0.0< / code> -<code>1.0< / code>.Setting this value also sets
* the values of <code>innerStartAlpha< / code>, <code>innerEndAlpha< / code>, <code>outerStartAlpha< / code>, and
* <code>outerEndAlpha< / code>.Synonym: <code>Alpha< / code>; <em>write - only< / em>.
* @property {number} startAlpha - Sets the values of <code>innerStartAlpha< / code> and <code>outerStartAlpha< / code>.
* <em>Write - only.< / em>
* @property {number} endAlpha - Sets the values of <code>innerEndAlpha< / code> and <code>outerEndAlpha< / code>.
* <em>Write - only.< / em>
* @property {number} innerAlpha - Sets the values of <code>innerStartAlpha< / code> and <code>innerEndAlpha< / code>.
* <em>Write - only.< / em>
* @property {number} outerAlpha - Sets the values of <code>outerStartAlpha< / code> and <code>outerEndAlpha< / code>.
* <em>Write - only.< / em>
* @property {number} alpha = 0.5 - The opacity of the overlay, <code>0.0</code> -<code>1.0</code>. Setting this value also sets
* the values of <code>innerStartAlpha</code>, <code>innerEndAlpha</code>, <code>outerStartAlpha</code>, and
* <code>outerEndAlpha</code>. Synonym: <code>Alpha</code>; <em>write - only</em>.
* @property {number} startAlpha - Sets the values of <code>innerStartAlpha</code> and <code>outerStartAlpha</code>.
* <em>Write - only.</em>
* @property {number} endAlpha - Sets the values of <code>innerEndAlpha</code> and <code>outerEndAlpha</code>.
* <em>Write - only.</em>
* @property {number} innerAlpha - Sets the values of <code>innerStartAlpha</code> and <code>innerEndAlpha</code>.
* <em>Write - only.</em>
* @property {number} outerAlpha - Sets the values of <code>outerStartAlpha</code> and <code>outerEndAlpha</code>.
* <em>Write - only.</em>
* @property {number} innerStartAlpha = 0 - The alpha at the inner start point of the overlay.
* @property {number} innerEndAlpha = 0 - The alpha at the inner end point of the overlay.
* @property {number} outerStartAlpha = 0 - The alpha at the outer start point of the overlay.
* @property {number} outerEndAlpha = 0 - The alpha at the outer end point of the overlay.
*
* @property {boolean} hasTickMarks = false - If <code>true< / code>, tick marks are drawn.
* @property {boolean} hasTickMarks = false - If <code>true</code>, tick marks are drawn.
* @property {number} majorTickMarksAngle = 0 - The angle between major tick marks, in degrees.
* @property {number} minorTickMarksAngle = 0 - The angle between minor tick marks, in degrees.
* @property {number} majorTickMarksLength = 0 - The length of the major tick marks, in meters.A positive value draws tick marks
* @property {number} majorTickMarksLength = 0 - The length of the major tick marks, in meters. A positive value draws tick marks
* outwards from the inner radius; a negative value draws tick marks inwards from the outer radius.
* @property {number} minorTickMarksLength = 0 - The length of the minor tick marks, in meters.A positive value draws tick marks
* @property {number} minorTickMarksLength = 0 - The length of the minor tick marks, in meters. A positive value draws tick marks
* outwards from the inner radius; a negative value draws tick marks inwards from the outer radius.
* @property {Color} majorTickMarksColor = 0, 0, 0 - The color of the major tick marks.
* @property {Color} minorTickMarksColor = 0, 0, 0 - The color of the minor tick marks.

View file

@ -59,6 +59,46 @@ public:
float getMaxErrorOnLastSolve() { return _maxErrorOnLastSolve; }
/**jsdoc
* <p>Specifies the initial conditions of the IK solver.</p>
* <table>
* <thead>
* <tr><th>Value</th><th>Name</p><th>Description</th>
* </thead>
* <tbody>
* <tr><td><code>0</code></td><td>RelaxToUnderPoses</td><td>This is a blend: it is 15/16 <code>PreviousSolution</code>
* and 1/16 <code>UnderPoses</code>. This provides some of the benefits of using <code>UnderPoses</code> so that the
* underlying animation is still visible, while at the same time converging faster then using the
* <code>UnderPoses</code> as the only initial solution.</td></tr>
* <tr><td><code>1</code></td><td>RelaxToLimitCenterPoses</td><td>This is a blend: it is 15/16
* <code>PreviousSolution</code> and 1/16 <code>LimitCenterPoses</code>. This should converge quickly because it is
* close to the previous solution, but still provides the benefits of avoiding limb locking.</td></tr>
* <tr><td><code>2</code></td><td>PreviousSolution</td><td>The IK system will begin to solve from the same position and
* orientations for each joint that was the result from the previous frame.<br />
* Pros: As the end effectors typically do not move much from frame to frame, this is likely to converge quickly
* to a valid solution.<br />
* Cons: If the previous solution resulted in an awkward or uncomfortable posture, the next frame will also be
* awkward and uncomfortable. It can also result in locked elbows and knees.</td></tr>
* <tr><td><code>3</code></td><td>UnderPoses</td><td>The IK occurs at one of the top-most layers. It has access to the
* full posture that was computed via canned animations and blends. We call this animated set of poses the "under
* pose". The under poses are what would be visible if IK was completely disabled. Using the under poses as the
* initial conditions of the CCD solve will cause some of the animated motion to be blended into the result of the
* IK. This can result in very natural results, especially if there are only a few IK targets enabled. On the other
* hand, because the under poses might be quite far from the desired end effector, it can converge slowly in some
* cases, causing it to never reach the IK target in the allotted number of iterations. Also, in situations where all
* of the IK targets are being controlled by external sensors, sometimes starting from the under poses can cause
* awkward motions from the underlying animations to leak into the IK result.</td></tr>
* <tr><td><code>4</code></td><td>LimitCenterPoses</td><td>This pose is taken to be the center of all the joint
* constraints. This can prevent the IK solution from getting locked or stuck at a particular constraint. For
* example, if the arm is pointing straight outward from the body, as the end effector moves towards the body, at
* some point the elbow should bend to accommodate. However, because the CCD solver is stuck at a local maximum, it
* will not rotate the elbow, unless the initial conditions already have the elbow bent, which is the case for
* <code>LimitCenterPoses</code>. When all the IK targets are enabled, this result will provide a consistent starting
* point for each IK solve, hopefully resulting in a consistent, natural result.</td></tr>
* </tbody>
* </table>
* @typedef {number} MyAvatar.AnimIKSolutionSource
*/
enum class SolutionSource {
RelaxToUnderPoses = 0,
RelaxToLimitCenterPoses,

View file

@ -24,6 +24,37 @@ class AnimOverlay : public AnimNode {
public:
friend class AnimTests;
/**jsdoc
* <p>Specifies sets of joints.</p>
* <table>
* <thead>
* <tr><th>Value</th><th>Name</p><th>Description</th>
* </thead>
* <tbody>
* <tr><td><code>0</code></td><td>FullBodyBoneSet</td><td>All joints.</td></tr>
* <tr><td><code>1</code></td><td>UpperBodyBoneSet</td><td>Only the "Spine" joint and its children.</td></tr>
* <tr><td><code>2</code></td><td>LowerBodyBoneSet</td><td>Only the leg joints and their children.</td></tr>
* <tr><td><code>3</code></td><td>LeftArmBoneSet</td><td>Joints that are the children of the "LeftShoulder"
* joint.</td></tr>
* <tr><td><code>4</code></td><td>RightArmBoneSet</td><td>Joints that are the children of the "RightShoulder"
* joint.</td></tr>
* <tr><td><code>5</code></td><td>AboveTheHeadBoneSet</td><td>Joints that are the children of the "Head"
* joint.</td></tr>
* <tr><td><code>6</code></td><td>BelowTheHeadBoneSet</td><td>Joints that are NOT the children of the "head"
* joint.</td></tr>
* <tr><td><code>7</code></td><td>HeadOnlyBoneSet</td><td>The "Head" joint.</td></tr>
* <tr><td><code>8</code></td><td>SpineOnlyBoneSet</td><td>The "Spine" joint.</td></tr>
* <tr><td><code>9</code></td><td>EmptyBoneSet</td><td>No joints.</td></tr>
* <tr><td><code>10</code></td><td>LeftHandBoneSet</td><td>joints that are the children of the "LeftHand"
* joint.</td></tr>
* <tr><td><code>11</code></td><td>RightHandBoneSet</td><td>Joints that are the children of the "RightHand"
* joint.</td></tr>
* <tr><td><code>12</code></td><td>HipsOnlyBoneSet</td><td>The "Hips" joint.</td></tr>
* <tr><td><code>13</code></td><td>BothFeetBoneSet</td><td>The "LeftFoot" and "RightFoot" joints.</td></tr>
* </tbody>
* </table>
* @typedef {number} MyAvatar.AnimOverlayBoneSet
*/
enum BoneSet {
FullBodyBoneSet = 0,
UpperBodyBoneSet,

View file

@ -67,17 +67,23 @@ void FlowCollisionSystem::addCollisionSphere(int jointIndex, const FlowCollision
auto collision = FlowCollisionSphere(jointIndex, settings, isTouch);
collision.setPosition(position);
if (isSelfCollision) {
_selfCollisions.push_back(collision);
if (!isTouch) {
_selfCollisions.push_back(collision);
} else {
_selfTouchCollisions.push_back(collision);
}
} else {
_othersCollisions.push_back(collision);
}
};
void FlowCollisionSystem::resetCollisions() {
_allCollisions.clear();
_othersCollisions.clear();
_selfTouchCollisions.clear();
_selfCollisions.clear();
}
FlowCollisionResult FlowCollisionSystem::computeCollision(const std::vector<FlowCollisionResult> collisions) {
FlowCollisionResult result;
if (collisions.size() > 1) {
@ -106,6 +112,10 @@ void FlowCollisionSystem::setScale(float scale) {
_selfCollisions[j]._radius = _selfCollisions[j]._initialRadius * scale;
_selfCollisions[j]._offset = _selfCollisions[j]._initialOffset * scale;
}
for (size_t j = 0; j < _selfTouchCollisions.size(); j++) {
_selfTouchCollisions[j]._radius = _selfTouchCollisions[j]._initialRadius * scale;
_selfTouchCollisions[j]._offset = _selfTouchCollisions[j]._initialOffset * scale;
}
};
std::vector<FlowCollisionResult> FlowCollisionSystem::checkFlowThreadCollisions(FlowThread* flowThread) {
@ -178,9 +188,9 @@ void FlowCollisionSystem::setCollisionSettingsByJoint(int jointIndex, const Flow
}
void FlowCollisionSystem::prepareCollisions() {
_allCollisions.clear();
_allCollisions.resize(_selfCollisions.size() + _othersCollisions.size());
std::copy(_selfCollisions.begin(), _selfCollisions.begin() + _selfCollisions.size(), _allCollisions.begin());
std::copy(_othersCollisions.begin(), _othersCollisions.begin() + _othersCollisions.size(), _allCollisions.begin() + _selfCollisions.size());
_allCollisions.insert(_allCollisions.end(), _selfCollisions.begin(), _selfCollisions.end());
_allCollisions.insert(_allCollisions.end(), _othersCollisions.begin(), _othersCollisions.end());
_allCollisions.insert(_allCollisions.end(), _selfTouchCollisions.begin(), _selfTouchCollisions.end());
_othersCollisions.clear();
}
@ -273,18 +283,20 @@ void FlowJoint::setRecoveryPosition(const glm::vec3& recoveryPosition) {
}
void FlowJoint::update(float deltaTime) {
glm::vec3 accelerationOffset = glm::vec3(0.0f);
if (_settings._stiffness > 0.0f) {
glm::vec3 recoveryVector = _recoveryPosition - _currentPosition;
float recoveryFactor = powf(_settings._stiffness, 3.0f);
accelerationOffset = recoveryVector * recoveryFactor;
}
FlowNode::update(deltaTime, accelerationOffset);
if (_anchored) {
if (!_isHelper) {
_currentPosition = _updatedPosition;
} else {
_currentPosition = _parentPosition;
if (_settings._active) {
glm::vec3 accelerationOffset = glm::vec3(0.0f);
if (_settings._stiffness > 0.0f) {
glm::vec3 recoveryVector = _recoveryPosition - _currentPosition;
float recoveryFactor = powf(_settings._stiffness, 3.0f);
accelerationOffset = recoveryVector * recoveryFactor;
}
FlowNode::update(deltaTime, accelerationOffset);
if (_anchored) {
if (!_isHelper) {
_currentPosition = _updatedPosition;
} else {
_currentPosition = _parentPosition;
}
}
}
};
@ -674,6 +686,14 @@ bool Flow::updateRootFramePositions(const AnimPoseVec& absolutePoses, size_t thr
return true;
}
void Flow::updateCollisionJoint(FlowCollisionSphere& collision, AnimPoseVec& absolutePoses) {
glm::quat jointRotation;
getJointPositionInWorldFrame(absolutePoses, collision._jointIndex, collision._position, _entityPosition, _entityRotation);
getJointRotationInWorldFrame(absolutePoses, collision._jointIndex, jointRotation, _entityRotation);
glm::vec3 worldOffset = jointRotation * collision._offset;
collision._position = collision._position + worldOffset;
}
void Flow::updateJoints(AnimPoseVec& relativePoses, AnimPoseVec& absolutePoses) {
updateAbsolutePoses(relativePoses, absolutePoses);
for (auto &jointData : _flowJointData) {
@ -695,11 +715,11 @@ void Flow::updateJoints(AnimPoseVec& relativePoses, AnimPoseVec& absolutePoses)
}
auto &selfCollisions = _collisionSystem.getSelfCollisions();
for (auto &collision : selfCollisions) {
glm::quat jointRotation;
getJointPositionInWorldFrame(absolutePoses, collision._jointIndex, collision._position, _entityPosition, _entityRotation);
getJointRotationInWorldFrame(absolutePoses, collision._jointIndex, jointRotation, _entityRotation);
glm::vec3 worldOffset = jointRotation * collision._offset;
collision._position = collision._position + worldOffset;
updateCollisionJoint(collision, absolutePoses);
}
auto &selfTouchCollisions = _collisionSystem.getSelfTouchCollisions();
for (auto &collision : selfTouchCollisions) {
updateCollisionJoint(collision, absolutePoses);
}
_collisionSystem.prepareCollisions();
}
@ -710,7 +730,7 @@ void Flow::setJoints(AnimPoseVec& relativePoses, const std::vector<bool>& overri
for (int jointIndex : joints) {
auto &joint = _flowJointData[jointIndex];
if (jointIndex >= 0 && jointIndex < (int)relativePoses.size() && !overrideFlags[jointIndex]) {
relativePoses[jointIndex].rot() = joint.getCurrentRotation();
relativePoses[jointIndex].rot() = joint.getSettings()._active ? joint.getCurrentRotation() : joint.getInitialRotation();
}
}
}

View file

@ -140,6 +140,7 @@ public:
std::vector<FlowCollisionResult> checkFlowThreadCollisions(FlowThread* flowThread);
std::vector<FlowCollisionSphere>& getSelfCollisions() { return _selfCollisions; };
std::vector<FlowCollisionSphere>& getSelfTouchCollisions() { return _selfTouchCollisions; };
void setOthersCollisions(const std::vector<FlowCollisionSphere>& othersCollisions) { _othersCollisions = othersCollisions; }
void prepareCollisions();
void resetCollisions();
@ -150,9 +151,11 @@ public:
void setActive(bool active) { _active = active; }
bool getActive() const { return _active; }
const std::vector<FlowCollisionSphere>& getCollisions() const { return _selfCollisions; }
void clearSelfCollisions() { _selfCollisions.clear(); }
protected:
std::vector<FlowCollisionSphere> _selfCollisions;
std::vector<FlowCollisionSphere> _othersCollisions;
std::vector<FlowCollisionSphere> _selfTouchCollisions;
std::vector<FlowCollisionSphere> _allCollisions;
float _scale { 1.0f };
bool _active { false };
@ -210,7 +213,7 @@ public:
bool isHelper() const { return _isHelper; }
const FlowPhysicsSettings& getSettings() { return _settings; }
void setSettings(const FlowPhysicsSettings& settings) { _settings = settings; }
void setSettings(const FlowPhysicsSettings& settings) { _settings = settings; _initialRadius = _settings._radius; }
const glm::vec3& getCurrentPosition() const { return _currentPosition; }
int getIndex() const { return _index; }
@ -222,6 +225,7 @@ public:
const glm::quat& getCurrentRotation() const { return _currentRotation; }
const glm::vec3& getCurrentTranslation() const { return _initialTranslation; }
const glm::vec3& getInitialPosition() const { return _initialPosition; }
const glm::quat& getInitialRotation() const { return _initialRotation; }
bool isColliding() const { return _colliding; }
protected:
@ -297,6 +301,7 @@ public:
void setPhysicsSettingsForGroup(const QString& group, const FlowPhysicsSettings& settings);
const std::map<QString, FlowPhysicsSettings>& getGroupSettings() const { return _groupSettings; }
void cleanUp();
void updateScale() { setScale(_scale); }
signals:
void onCleanup();
@ -311,6 +316,7 @@ private:
void setJoints(AnimPoseVec& relativePoses, const std::vector<bool>& overrideFlags);
void updateJoints(AnimPoseVec& relativePoses, AnimPoseVec& absolutePoses);
void updateCollisionJoint(FlowCollisionSphere& collision, AnimPoseVec& absolutePoses);
bool updateRootFramePositions(const AnimPoseVec& absolutePoses, size_t threadIndex);
void updateGroupSettings(const QString& group, const FlowPhysicsSettings& settings);
void setScale(float scale);

View file

@ -16,6 +16,27 @@ const float HACK_HMD_TARGET_WEIGHT = 8.0f;
class IKTarget {
public:
/**jsdoc
* <p>An IK target type.</p>
* <table>
* <thead>
* <tr><th>Value</th><th>Name</p><th>Description</th>
* </thead>
* <tbody>
* <tr><td><code>0</code></td><td>RotationAndPosition</td><td>Attempt to reach the rotation and position end
* effector.</td></tr>
* <tr><td><code>1</code></td><td>RotationOnly</td><td>Attempt to reach the end effector rotation only.</td></tr>
* <tr><td><code>2</code></td><td>HmdHead</td><td><strong>Deprecated:</strong> A special mode of IK that would attempt
* to prevent unnecessary bending of the spine.</td></tr>
* <tr><td><code>3</code></td><td>HipsRelativeRotationAndPosition</td><td>Attempt to reach a rotation and position end
* effector that is not in absolute rig coordinates but is offset by the avatar hips translation.</td></tr>
* <tr><td><code>4</code></td><td>Spline</td><td>Use a cubic Hermite spline to model the human spine. This prevents
* kinks in the spine and allows for a small amount of stretch and squash.</td></tr>
* <tr><td><code>5</code></td><td>Unknown</td><td>IK is disabled.</td></tr>
* </tbody>
* </table>
* @typedef {number} MyAvatar.IKTargetType
*/
enum class Type {
RotationAndPosition,
RotationOnly,

View file

@ -88,6 +88,218 @@ static const QString MAIN_STATE_MACHINE_RIGHT_HAND_ROTATION("mainStateMachineRig
static const QString MAIN_STATE_MACHINE_RIGHT_HAND_POSITION("mainStateMachineRightHandPosition");
/**jsdoc
* <p>An <code>AnimStateDictionary</code> object may have the following properties. It may also have other properties, set by
* scripts.</p>
* <p><strong>Warning:</strong> These properties are subject to change.
* <table>
* <thead>
* <tr><th>Name</th><th>Type</p><th>Description</th>
* </thead>
* <tbody>
* <tr><td><code>userAnimNone</code></td><td>boolean</td><td><code>true</code> when no user overrideAnimation is
* playing.</td></tr>
* <tr><td><code>userAnimA</code></td><td>boolean</td><td><code>true</code> when a user overrideAnimation is
* playing.</td></tr>
* <tr><td><code>userAnimB</code></td><td>boolean</td><td><code>true</code> when a user overrideAnimation is
* playing.</td></tr>
*
* <tr><td><code>sine</code></td><td>number</td><td>Oscillating sine wave.</td></tr>
* <tr><td><code>moveForwardSpeed</code></td><td>number</td><td>Controls the blend between the various forward walking
* &amp; running animations.</td></tr>
* <tr><td><code>moveBackwardSpeed</code></td><td>number</td><td>Controls the blend between the various backward walking
* &amp; running animations.</td></tr>
* <tr><td><code>moveLateralSpeed</code></td><td>number</td><td>Controls the blend between the various sidestep walking
* &amp; running animations.</td></tr>
*
* <tr><td><code>isMovingForward</code></td><td>boolean</td><td><code>true</code> if the avatar is moving
* forward.</td></tr>
* <tr><td><code>isMovingBackward</code></td><td>boolean</td><td><code>true</code> if the avatar is moving
* backward.</td></tr>
* <tr><td><code>isMovingRight</code></td><td>boolean</td><td><code>true</code> if the avatar is moving to the
* right.</td></tr>
* <tr><td><code>isMovingLeft</code></td><td>boolean</td><td><code>true</code> if the avatar is moving to the
* left.</td></tr>
* <tr><td><code>isMovingRightHmd</code></td><td>boolean</td><td><code>true</code> if the avatar is moving to the right
* while the user is in HMD mode.</td></tr>
* <tr><td><code>isMovingLeftHmd</code></td><td>boolean</td><td><code>true</code> if the avatar is moving to the left while
* the user is in HMD mode.</td></tr>
* <tr><td><code>isNotMoving</code></td><td>boolean</td><td><code>true</code> if the avatar is stationary.</td></tr>
*
* <tr><td><code>isTurningRight</code></td><td>boolean</td><td><code>true</code> if the avatar is turning
* clockwise.</td></tr>
* <tr><td><code>isTurningLeft</code></td><td>boolean</td><td><code>true</code> if the avatar is turning
* counter-clockwise.</td></tr>
* <tr><td><code>isNotTurning</code></td><td>boolean</td><td><code>true</code> if the avatar is not turning.</td></tr>
* <tr><td><code>isFlying</code></td><td>boolean</td><td><code>true</code> if the avatar is flying.</td></tr>
* <tr><td><code>isNotFlying</code></td><td>boolean</td><td><code>true</code> if the avatar is not flying.</td></tr>
* <tr><td><code>isTakeoffStand</code></td><td>boolean</td><td><code>true</code> if the avatar is about to execute a
* standing jump.</td></tr>
* <tr><td><code>isTakeoffRun</code></td><td>boolean</td><td><code>true</code> if the avatar is about to execute a running
* jump.</td></tr>
* <tr><td><code>isNotTakeoff</code></td><td>boolean</td><td><code>true</code> if the avatar is not jumping.</td></tr>
* <tr><td><code>isInAirStand</code></td><td>boolean</td><td><code>true</code> if the avatar is in the air after a standing
* jump.</td></tr>
* <tr><td><code>isInAirRun</code></td><td>boolean</td><td><code>true</code> if the avatar is in the air after a running
* jump.</td></tr>
* <tr><td><code>isNotInAir</code></td><td>boolean</td><td><code>true</code> if the avatar on the ground.</td></tr>
*
* <tr><td><code>inAirAlpha</code></td><td>number</td><td>Used to interpolate between the up, apex, and down in-air
* animations.</td></tr>
* <tr><td><code>ikOverlayAlpha</code></td><td>number</td><td>The blend between upper body and spline IK versus the
* underlying animation</td></tr>
*
* <tr><td><code>headPosition</code></td><td>{@link Vec3}</td><td>The desired position of the <code>Head</code> joint in
* rig coordinates.</td></tr>
* <tr><td><code>headRotation</code></td><td>{@link Quat}</td><td>The desired orientation of the <code>Head</code> joint in
* rig coordinates.</td></tr>
* <tr><td><code>headType</code></td><td>{@link MyAvatar.IKTargetType|IKTargetType}</td><td>The type of IK used for the
* head.</td></tr>
* <tr><td><code>headWeight</code></td><td>number</td><td>How strongly the head chain blends with the other IK
* chains.</td></tr>
*
* <tr><td><code>leftHandPosition</code></td><td>{@link Vec3}</td><td>The desired position of the <code>LeftHand</code>
* joint in rig coordinates.</td></tr>
* <tr><td><code>leftHandRotation</code></td><td>{@link Quat}</td><td>The desired orientation of the <code>LeftHand</code>
* joint in rig coordinates.</td></tr>
* <tr><td><code>leftHandType</code></td><td>{@link MyAvatar.IKTargetType|IKTargetType}</td><td>The type of IK used for the
* left arm.</td></tr>
* <tr><td><code>leftHandPoleVectorEnabled</code></td><td>boolean</td><td>When <code>true</code>, the elbow angle is
* controlled by the <code>rightHandPoleVector</code> property value. Otherwise the elbow direction comes from the
* underlying animation.</td></tr>
* <tr><td><code>leftHandPoleReferenceVector</code></td><td>{@link Vec3}</td><td>The direction of the elbow in the local
* coordinate system of the elbow.</td></tr>
* <tr><td><code>leftHandPoleVector</code></td><td>{@link Vec3}</td><td>The direction the elbow should point in rig
* coordinates.</td></tr>
*
* <tr><td><code>rightHandPosition</code></td><td>{@link Vec3}</td><td>The desired position of the <code>RightHand</code>
* joint in rig coordinates.</td></tr>
* <tr><td><code>rightHandRotation</code></td><td>{@link Quat}</td><td>The desired orientation of the
* <code>RightHand</code> joint in rig coordinates.</td></tr>
* <tr><td><code>rightHandType</code></td><td>{@link MyAvatar.IKTargetType|IKTargetType}</td><td>The type of IK used for
* the right arm.</td></tr>
* <tr><td><code>rightHandPoleVectorEnabled</code></td><td>boolean</td><td>When <code>true</code>, the elbow angle is
* controlled by the <code>rightHandPoleVector</code> property value. Otherwise the elbow direction comes from the
* underlying animation.</td></tr>
* <tr><td><code>rightHandPoleReferenceVector</code></td><td>{@link Vec3}</td><td>The direction of the elbow in the local
* coordinate system of the elbow.</td></tr>
* <tr><td><code>rightHandPoleVector</code></td><td>{@link Vec3}</td><td>The direction the elbow should point in rig
* coordinates.</td></tr>
*
* <tr><td><code>leftFootIKEnabled</code></td><td>boolean</td><td><code>true</code> if IK is enabled for the left
* foot.</td></tr>
* <tr><td><code>rightFootIKEnabled</code></td><td>boolean</td><td><code>true</code> if IK is enabled for the right
* foot.</td></tr>
*
* <tr><td><code>leftFootIKPositionVar</code></td><td>string</td><td>The name of the source for the desired position
* of the <code>LeftFoot</code> joint. If not set, the foot rotation of the underlying animation will be used.</td></tr>
* <tr><td><code>leftFootIKRotationVar</code></td><td>string</td><td>The name of the source for the desired rotation
* of the <code>LeftFoot</code> joint. If not set, the foot rotation of the underlying animation will be used.</td></tr>
* <tr><td><code>leftFootPoleVectorEnabled</code></td><td>boolean</td><td>When <code>true</code>, the knee angle is
* controlled by the <code>leftFootPoleVector</code> property value. Otherwise the knee direction comes from the
* underlying animation.</td></tr>
* <tr><td><code>leftFootPoleVector</code></td><td>{@link Vec3}</td><td>The direction the knee should face in rig
* coordinates.</td></tr>
* <tr><td><code>rightFootIKPositionVar</code></td><td>string</td><td>The name of the source for the desired position
* of the <code>RightFoot</code> joint. If not set, the foot rotation of the underlying animation will be used.</td></tr>
* <tr><td><code>rightFootIKRotationVar</code></td><td>string</td><td>The name of the source for the desired rotation
* of the <code>RightFoot</code> joint. If not set, the foot rotation of the underlying animation will be used.</td></tr>
* <tr><td><code>rightFootPoleVectorEnabled</code></td><td>boolean</td><td>When <code>true</code>, the knee angle is
* controlled by the <code>rightFootPoleVector</code> property value. Otherwise the knee direction comes from the
* underlying animation.</td></tr>
* <tr><td><code>rightFootPoleVector</code></td><td>{@link Vec3}</td><td>The direction the knee should face in rig
* coordinates.</td></tr>
*
* <tr><td><code>isTalking</code></td><td>boolean</td><td><code>true</code> if the avatar is talking.</td></tr>
* <tr><td><code>notIsTalking</code></td><td>boolean</td><td><code>true</code> if the avatar is not talking.</td></tr>
*
* <tr><td><code>solutionSource</code></td><td>{@link MyAvatar.AnimIKSolutionSource|AnimIKSolutionSource}</td>
* <td>Determines the initial conditions of the IK solver.</td></tr>
* <tr><td><code>defaultPoseOverlayAlpha</code></td><td>number</td><td>Controls the blend between the main animation state
* machine and the default pose. Mostly used during full body tracking so that walking &amp; jumping animations do not
* affect the IK of the figure.</td></tr>
* <tr><td><code>defaultPoseOverlayBoneSet</code></td><td>{@link MyAvatar.AnimOverlayBoneSet|AnimOverlayBoneSet}</td>
* <td>Specifies which bones will be replace by the source overlay.</td></tr>
* <tr><td><code>hipsType</code></td><td>{@link MyAvatar.IKTargetType|IKTargetType}</td><td>The type of IK used for the
* hips.</td></tr>
* <tr><td><code>hipsPosition</code></td><td>{@link Vec3}</td><td>The desired position of <code>Hips</code> joint in rig
* coordinates.</td></tr>
* <tr><td><code>hipsRotation</code></td><td>{@link Quat}</td><td>the desired orientation of the <code>Hips</code> joint in
* rig coordinates.</td></tr>
* <tr><td><code>spine2Type</code></td><td>{@link MyAvatar.IKTargetType|IKTargetType}</td><td>The type of IK used for the
* <code>Spine2</code> joint.</td></tr>
* <tr><td><code>spine2Position</code></td><td>{@link Vec3}</td><td>The desired position of the <code>Spine2</code> joint
* in rig coordinates.</td></tr>
* <tr><td><code>spine2Rotation</code></td><td>{@link Quat}</td><td>The desired orientation of the <code>Spine2</code>
* joint in rig coordinates.</td></tr>
*
* <tr><td><code>leftFootIKAlpha</code></td><td>number</td><td>Blends between full IK for the leg and the underlying
* animation.</td></tr>
* <tr><td><code>rightFootIKAlpha</code></td><td>number</td><td>Blends between full IK for the leg and the underlying
* animation.</td></tr>
* <tr><td><code>hipsWeight</code></td><td>number</td><td>How strongly the hips target blends with the IK solution for
* other IK chains.</td></tr>
* <tr><td><code>leftHandWeight</code></td><td>number</td><td>How strongly the left hand blends with IK solution of other
* IK chains.</td></tr>
* <tr><td><code>rightHandWeight</code></td><td>number</td><td>How strongly the right hand blends with IK solution of other
* IK chains.</td></tr>
* <tr><td><code>spine2Weight</code></td><td>number</td><td>How strongly the spine2 chain blends with the rest of the IK
* solution.</td></tr>
*
* <tr><td><code>leftHandOverlayAlpha</code></td><td>number</td><td>Used to blend in the animated hand gesture poses, such
* as point and thumbs up.</td></tr>
* <tr><td><code>leftHandGraspAlpha</code></td><td>number</td><td>Used to blend between an open hand and a closed hand.
* Usually changed as you squeeze the trigger of the hand controller.</td></tr>
* <tr><td><code>rightHandOverlayAlpha</code></td><td>number</td><td>Used to blend in the animated hand gesture poses,
* such as point and thumbs up.</td></tr>
* <tr><td><code>rightHandGraspAlpha</code></td><td>number</td><td>Used to blend between an open hand and a closed hand.
* Usually changed as you squeeze the trigger of the hand controller.</td></tr>
* <tr><td><code>isLeftIndexPoint</code></td><td>boolean</td><td><code>true</code> if the left hand should be
* pointing.</td></tr>
* <tr><td><code>isLeftThumbRaise</code></td><td>boolean</td><td><code>true</code> if the left hand should be
* thumbs-up.</td></tr>
* <tr><td><code>isLeftIndexPointAndThumbRaise</code></td><td>boolean</td><td><code>true</code> if the left hand should be
* pointing and thumbs-up.</td></tr>
* <tr><td><code>isLeftHandGrasp</code></td><td>boolean</td><td><code>true</code> if the left hand should be at rest,
* grasping the controller.</td></tr>
* <tr><td><code>isRightIndexPoint</code></td><td>boolean</td><td><code>true</code> if the right hand should be
* pointing.</td></tr>
* <tr><td><code>isRightThumbRaise</code></td><td>boolean</td><td><code>true</code> if the right hand should be
* thumbs-up.</td></tr>
* <tr><td><code>isRightIndexPointAndThumbRaise</code></td><td>boolean</td><td><code>true</code> if the right hand should
* be pointing and thumbs-up.</td></tr>
* <tr><td><code>isRightHandGrasp</code></td><td>boolean</td><td><code>true</code> if the right hand should be at rest,
* grasping the controller.</td></tr>
*
* </tbody>
* </table>
* <p>Note: Rig coordinates are <code>+z</code> forward and <code>+y</code> up.</p>
* @typedef {object} MyAvatar.AnimStateDictionary
*/
// Note: The following animVars are intentionally not documented:
// - leftFootPosition
// - leftFootRotation
// - rightFooKPosition
// - rightFooKRotation
// Note: The following items aren't set in the code below but are still intentionally documented:
// - leftFootIKAlpha
// - rightFootIKAlpha
// - hipsWeight
// - leftHandWeight
// - rightHandWeight
// - spine2Weight
// - rightHandOverlayAlpha
// - rightHandGraspAlpha
// - leftHandOverlayAlpha
// - leftHandGraspAlpha
// - isRightIndexPoint
// - isRightThumbRaise
// - isRightIndexPointAndThumbRaise
// - isRightHandGrasp
// - isLeftIndexPoint
// - isLeftThumbRaise
// - isLeftIndexPointAndThumbRaise
// - isLeftHandGrasp
Rig::Rig() {
// Ensure thread-safe access to the rigRegistry.
std::lock_guard<std::mutex> guard(rigRegistryMutex);
@ -1210,7 +1422,8 @@ void Rig::updateAnimations(float deltaTime, const glm::mat4& rootTransform, cons
_networkAnimState.blendTime += deltaTime;
alpha = _computeNetworkAnimation ? (_networkAnimState.blendTime / TOTAL_BLEND_TIME) : (1.0f - (_networkAnimState.blendTime / TOTAL_BLEND_TIME));
alpha = glm::clamp(alpha, 0.0f, 1.0f);
for (size_t i = 0; i < _networkPoseSet._relativePoses.size(); i++) {
size_t numJoints = std::min(_networkPoseSet._relativePoses.size(), _internalPoseSet._relativePoses.size());
for (size_t i = 0; i < numJoints; i++) {
_networkPoseSet._relativePoses[i].blend(_internalPoseSet._relativePoses[i], alpha);
}
}

View file

@ -1052,7 +1052,7 @@ void AudioClient::setReverbOptions(const AudioEffectOptions* options) {
void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
// If there is server echo, reverb will be applied to the recieved audio stream so no need to have it here.
bool hasReverb = _reverb || _receivedAudioStream.hasReverb();
if (_muted || !_audioOutput || (!_shouldEchoLocally && !hasReverb)) {
if ((_muted && !_shouldEchoLocally) || !_audioOutput || (!_shouldEchoLocally && !hasReverb)) {
return;
}
@ -1354,26 +1354,30 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
for (const AudioInjectorPointer& injector : _activeLocalAudioInjectors) {
// the lock guarantees that injectorBuffer, if found, is invariant
AudioInjectorLocalBuffer* injectorBuffer = injector->getLocalBuffer();
auto injectorBuffer = injector->getLocalBuffer();
if (injectorBuffer) {
auto options = injector->getOptions();
static const int HRTF_DATASET_INDEX = 1;
int numChannels = injector->isAmbisonic() ? AudioConstants::AMBISONIC : (injector->isStereo() ? AudioConstants::STEREO : AudioConstants::MONO);
int numChannels = options.ambisonic ? AudioConstants::AMBISONIC : (options.stereo ? AudioConstants::STEREO : AudioConstants::MONO);
size_t bytesToRead = numChannels * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
// get one frame from the injector
memset(_localScratchBuffer, 0, bytesToRead);
if (0 < injectorBuffer->readData((char*)_localScratchBuffer, bytesToRead)) {
float gain = injector->getVolume();
bool isSystemSound = !options.positionSet && !options.ambisonic;
if (injector->isAmbisonic()) {
float gain = options.volume * (isSystemSound ? _systemInjectorGain : _localInjectorGain);
if (injector->isPositionSet()) {
if (options.ambisonic) {
if (options.positionSet) {
// distance attenuation
glm::vec3 relativePosition = injector->getPosition() - _positionGetter();
glm::vec3 relativePosition = options.position - _positionGetter();
float distance = glm::max(glm::length(relativePosition), EPSILON);
gain = gainForSource(distance, gain);
}
@ -1382,7 +1386,7 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
// Calculate the soundfield orientation relative to the listener.
// Injector orientation can be used to align a recording to our world coordinates.
//
glm::quat relativeOrientation = injector->getOrientation() * glm::inverse(_orientationGetter());
glm::quat relativeOrientation = options.orientation * glm::inverse(_orientationGetter());
// convert from Y-up (OpenGL) to Z-up (Ambisonic) coordinate system
float qw = relativeOrientation.w;
@ -1394,12 +1398,12 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
injector->getLocalFOA().render(_localScratchBuffer, mixBuffer, HRTF_DATASET_INDEX,
qw, qx, qy, qz, gain, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
} else if (injector->isStereo()) {
} else if (options.stereo) {
if (injector->isPositionSet()) {
if (options.positionSet) {
// distance attenuation
glm::vec3 relativePosition = injector->getPosition() - _positionGetter();
glm::vec3 relativePosition = options.position - _positionGetter();
float distance = glm::max(glm::length(relativePosition), EPSILON);
gain = gainForSource(distance, gain);
}
@ -1412,10 +1416,10 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
} else { // injector is mono
if (injector->isPositionSet()) {
if (options.positionSet) {
// distance attenuation
glm::vec3 relativePosition = injector->getPosition() - _positionGetter();
glm::vec3 relativePosition = options.position - _positionGetter();
float distance = glm::max(glm::length(relativePosition), EPSILON);
gain = gainForSource(distance, gain);
@ -1437,21 +1441,21 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
} else {
qCDebug(audioclient) << "injector has no more data, marking finished for removal";
//qCDebug(audioclient) << "injector has no more data, marking finished for removal";
injector->finishLocalInjection();
injectorsToRemove.append(injector);
}
} else {
qCDebug(audioclient) << "injector has no local buffer, marking as finished for removal";
//qCDebug(audioclient) << "injector has no local buffer, marking as finished for removal";
injector->finishLocalInjection();
injectorsToRemove.append(injector);
}
}
for (const AudioInjectorPointer& injector : injectorsToRemove) {
qCDebug(audioclient) << "removing injector";
//qCDebug(audioclient) << "removing injector";
_activeLocalAudioInjectors.removeOne(injector);
}
@ -1571,15 +1575,13 @@ bool AudioClient::setIsStereoInput(bool isStereoInput) {
}
bool AudioClient::outputLocalInjector(const AudioInjectorPointer& injector) {
AudioInjectorLocalBuffer* injectorBuffer = injector->getLocalBuffer();
auto injectorBuffer = injector->getLocalBuffer();
if (injectorBuffer) {
// local injectors are on the AudioInjectorsThread, so we must guard access
Lock lock(_injectorsMutex);
if (!_activeLocalAudioInjectors.contains(injector)) {
qCDebug(audioclient) << "adding new injector";
//qCDebug(audioclient) << "adding new injector";
_activeLocalAudioInjectors.append(injector);
// move local buffer to the LocalAudioThread to avoid dataraces with AudioInjector (like stop())
injectorBuffer->setParent(nullptr);
// update the flag
_localInjectorsAvailable.exchange(true, std::memory_order_release);
@ -1595,6 +1597,11 @@ bool AudioClient::outputLocalInjector(const AudioInjectorPointer& injector) {
}
}
int AudioClient::getNumLocalInjectors() {
Lock lock(_injectorsMutex);
return _activeLocalAudioInjectors.size();
}
void AudioClient::outputFormatChanged() {
_outputFrameSize = (AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * OUTPUT_CHANNEL_COUNT * _outputFormat.sampleRate()) /
_desiredOutputFormat.sampleRate();

View file

@ -181,6 +181,8 @@ public:
bool isHeadsetPluggedIn() { return _isHeadsetPluggedIn; }
#endif
int getNumLocalInjectors();
public slots:
void start();
void stop();
@ -239,6 +241,8 @@ public slots:
void setInputVolume(float volume, bool emitSignal = true);
void setReverb(bool reverb);
void setReverbOptions(const AudioEffectOptions* options);
void setLocalInjectorGain(float gain) { _localInjectorGain = gain; };
void setSystemInjectorGain(float gain) { _systemInjectorGain = gain; };
void outputNotify();
@ -393,6 +397,8 @@ private:
int16_t* _outputScratchBuffer { NULL };
// for local audio (used by audio injectors thread)
std::atomic<float> _localInjectorGain { 1.0f };
std::atomic<float> _systemInjectorGain { 1.0f };
float _localMixBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
int16_t _localScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
float* _localOutputMixBuffer { NULL };

View file

@ -24,9 +24,10 @@
#include "AudioRingBuffer.h"
#include "AudioLogging.h"
#include "SoundCache.h"
#include "AudioSRC.h"
#include "AudioHelpers.h"
int metaType = qRegisterMetaType<AudioInjectorPointer>("AudioInjectorPointer");
AbstractAudioInterface* AudioInjector::_localAudioInterface{ nullptr };
AudioInjectorState operator& (AudioInjectorState lhs, AudioInjectorState rhs) {
@ -51,26 +52,30 @@ AudioInjector::AudioInjector(AudioDataPointer audioData, const AudioInjectorOpti
{
}
AudioInjector::~AudioInjector() {
deleteLocalBuffer();
}
AudioInjector::~AudioInjector() {}
bool AudioInjector::stateHas(AudioInjectorState state) const {
return (_state & state) == state;
return resultWithReadLock<bool>([&] {
return (_state & state) == state;
});
}
void AudioInjector::setOptions(const AudioInjectorOptions& options) {
// since options.stereo is computed from the audio stream,
// we need to copy it from existing options just in case.
bool currentlyStereo = _options.stereo;
bool currentlyAmbisonic = _options.ambisonic;
_options = options;
_options.stereo = currentlyStereo;
_options.ambisonic = currentlyAmbisonic;
withWriteLock([&] {
bool currentlyStereo = _options.stereo;
bool currentlyAmbisonic = _options.ambisonic;
_options = options;
_options.stereo = currentlyStereo;
_options.ambisonic = currentlyAmbisonic;
});
}
void AudioInjector::finishNetworkInjection() {
_state |= AudioInjectorState::NetworkInjectionFinished;
withWriteLock([&] {
_state |= AudioInjectorState::NetworkInjectionFinished;
});
// if we are already finished with local
// injection, then we are finished
@ -80,35 +85,31 @@ void AudioInjector::finishNetworkInjection() {
}
void AudioInjector::finishLocalInjection() {
_state |= AudioInjectorState::LocalInjectionFinished;
if(_options.localOnly || stateHas(AudioInjectorState::NetworkInjectionFinished)) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "finishLocalInjection");
return;
}
bool localOnly = false;
withWriteLock([&] {
_state |= AudioInjectorState::LocalInjectionFinished;
localOnly = _options.localOnly;
});
if(localOnly || stateHas(AudioInjectorState::NetworkInjectionFinished)) {
finish();
}
}
void AudioInjector::finish() {
_state |= AudioInjectorState::Finished;
withWriteLock([&] {
_state |= AudioInjectorState::Finished;
});
emit finished();
deleteLocalBuffer();
_localBuffer = nullptr;
}
void AudioInjector::restart() {
// grab the AudioInjectorManager
auto injectorManager = DependencyManager::get<AudioInjectorManager>();
if (thread() != QThread::currentThread()) {
QMetaObject::invokeMethod(this, "restart");
if (!_options.localOnly) {
// notify the AudioInjectorManager to wake up in case it's waiting for new injectors
injectorManager->notifyInjectorReadyCondition();
}
return;
}
// reset the current send offset to zero
_currentSendOffset = 0;
@ -121,19 +122,23 @@ void AudioInjector::restart() {
// check our state to decide if we need extra handling for the restart request
if (stateHas(AudioInjectorState::Finished)) {
if (!inject(&AudioInjectorManager::restartFinishedInjector)) {
if (!inject(&AudioInjectorManager::threadInjector)) {
qWarning() << "AudioInjector::restart failed to thread injector";
}
}
}
bool AudioInjector::inject(bool(AudioInjectorManager::*injection)(const AudioInjectorPointer&)) {
_state = AudioInjectorState::NotFinished;
AudioInjectorOptions options;
withWriteLock([&] {
_state = AudioInjectorState::NotFinished;
options = _options;
});
int byteOffset = 0;
if (_options.secondOffset > 0.0f) {
int numChannels = _options.ambisonic ? 4 : (_options.stereo ? 2 : 1);
byteOffset = (int)(AudioConstants::SAMPLE_RATE * _options.secondOffset * numChannels);
if (options.secondOffset > 0.0f) {
int numChannels = options.ambisonic ? 4 : (options.stereo ? 2 : 1);
byteOffset = (int)(AudioConstants::SAMPLE_RATE * options.secondOffset * numChannels);
byteOffset *= AudioConstants::SAMPLE_SIZE;
}
_currentSendOffset = byteOffset;
@ -143,7 +148,7 @@ bool AudioInjector::inject(bool(AudioInjectorManager::*injection)(const AudioInj
}
bool success = true;
if (!_options.localOnly) {
if (!options.localOnly) {
auto injectorManager = DependencyManager::get<AudioInjectorManager>();
if (!(*injectorManager.*injection)(sharedFromThis())) {
success = false;
@ -158,7 +163,8 @@ bool AudioInjector::injectLocally() {
if (_localAudioInterface) {
if (_audioData->getNumBytes() > 0) {
_localBuffer = new AudioInjectorLocalBuffer(_audioData);
_localBuffer = QSharedPointer<AudioInjectorLocalBuffer>(new AudioInjectorLocalBuffer(_audioData), &AudioInjectorLocalBuffer::deleteLater);
_localBuffer->moveToThread(thread());
_localBuffer->open(QIODevice::ReadOnly);
_localBuffer->setShouldLoop(_options.loop);
@ -181,14 +187,6 @@ bool AudioInjector::injectLocally() {
return success;
}
void AudioInjector::deleteLocalBuffer() {
if (_localBuffer) {
_localBuffer->stop();
_localBuffer->deleteLater();
_localBuffer = nullptr;
}
}
const uchar MAX_INJECTOR_VOLUME = packFloatGainToByte(1.0f);
static const int64_t NEXT_FRAME_DELTA_ERROR_OR_FINISHED = -1;
static const int64_t NEXT_FRAME_DELTA_IMMEDIATELY = 0;
@ -220,6 +218,10 @@ int64_t AudioInjector::injectNextFrame() {
static int volumeOptionOffset = -1;
static int audioDataOffset = -1;
AudioInjectorOptions options = resultWithReadLock<AudioInjectorOptions>([&] {
return _options;
});
if (!_currentPacket) {
if (_currentSendOffset < 0 ||
_currentSendOffset >= (int)_audioData->getNumBytes()) {
@ -253,7 +255,7 @@ int64_t AudioInjector::injectNextFrame() {
audioPacketStream << QUuid::createUuid();
// pack the stereo/mono type of the stream
audioPacketStream << _options.stereo;
audioPacketStream << options.stereo;
// pack the flag for loopback, if requested
loopbackOptionOffset = _currentPacket->pos();
@ -262,15 +264,16 @@ int64_t AudioInjector::injectNextFrame() {
// pack the position for injected audio
positionOptionOffset = _currentPacket->pos();
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&_options.position),
sizeof(_options.position));
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&options.position),
sizeof(options.position));
// pack our orientation for injected audio
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&_options.orientation),
sizeof(_options.orientation));
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&options.orientation),
sizeof(options.orientation));
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&options.position),
sizeof(options.position));
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&_options.position),
sizeof(_options.position));
glm::vec3 boxCorner = glm::vec3(0);
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&boxCorner),
sizeof(glm::vec3));
@ -283,7 +286,7 @@ int64_t AudioInjector::injectNextFrame() {
volumeOptionOffset = _currentPacket->pos();
quint8 volume = MAX_INJECTOR_VOLUME;
audioPacketStream << volume;
audioPacketStream << _options.ignorePenumbra;
audioPacketStream << options.ignorePenumbra;
audioDataOffset = _currentPacket->pos();
@ -313,10 +316,10 @@ int64_t AudioInjector::injectNextFrame() {
_currentPacket->writePrimitive((uchar)(_localAudioInterface && _localAudioInterface->shouldLoopbackInjectors()));
_currentPacket->seek(positionOptionOffset);
_currentPacket->writePrimitive(_options.position);
_currentPacket->writePrimitive(_options.orientation);
_currentPacket->writePrimitive(options.position);
_currentPacket->writePrimitive(options.orientation);
quint8 volume = packFloatGainToByte(_options.volume);
quint8 volume = packFloatGainToByte(options.volume);
_currentPacket->seek(volumeOptionOffset);
_currentPacket->writePrimitive(volume);
@ -326,8 +329,8 @@ int64_t AudioInjector::injectNextFrame() {
// Might be a reasonable place to do the encode step here.
QByteArray decodedAudio;
int totalBytesLeftToCopy = (_options.stereo ? 2 : 1) * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
if (!_options.loop) {
int totalBytesLeftToCopy = (options.stereo ? 2 : 1) * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
if (!options.loop) {
// If we aren't looping, let's make sure we don't read past the end
int bytesLeftToRead = _audioData->getNumBytes() - _currentSendOffset;
totalBytesLeftToCopy = std::min(totalBytesLeftToCopy, bytesLeftToRead);
@ -342,14 +345,16 @@ int64_t AudioInjector::injectNextFrame() {
auto samplesOut = reinterpret_cast<AudioSample*>(decodedAudio.data());
// Copy and Measure the loudness of this frame
_loudness = 0.0f;
for (int i = 0; i < samplesLeftToCopy; ++i) {
auto index = (currentSample + i) % _audioData->getNumSamples();
auto sample = samples[index];
samplesOut[i] = sample;
_loudness += abs(sample) / (AudioConstants::MAX_SAMPLE_VALUE / 2.0f);
}
_loudness /= (float)samplesLeftToCopy;
withWriteLock([&] {
_loudness = 0.0f;
for (int i = 0; i < samplesLeftToCopy; ++i) {
auto index = (currentSample + i) % _audioData->getNumSamples();
auto sample = samples[index];
samplesOut[i] = sample;
_loudness += abs(sample) / (AudioConstants::MAX_SAMPLE_VALUE / 2.0f);
}
_loudness /= (float)samplesLeftToCopy;
});
_currentSendOffset = (_currentSendOffset + totalBytesLeftToCopy) %
_audioData->getNumBytes();
@ -371,7 +376,7 @@ int64_t AudioInjector::injectNextFrame() {
_outgoingSequenceNumber++;
}
if (_currentSendOffset == 0 && !_options.loop) {
if (_currentSendOffset == 0 && !options.loop) {
finishNetworkInjection();
return NEXT_FRAME_DELTA_ERROR_OR_FINISHED;
}
@ -391,134 +396,10 @@ int64_t AudioInjector::injectNextFrame() {
// If we are falling behind by more frames than our threshold, let's skip the frames ahead
qCDebug(audio) << this << "injectNextFrame() skipping ahead, fell behind by " << (currentFrameBasedOnElapsedTime - _nextFrame) << " frames";
_nextFrame = currentFrameBasedOnElapsedTime;
_currentSendOffset = _nextFrame * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL * (_options.stereo ? 2 : 1) % _audioData->getNumBytes();
_currentSendOffset = _nextFrame * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL * (options.stereo ? 2 : 1) % _audioData->getNumBytes();
}
int64_t playNextFrameAt = ++_nextFrame * AudioConstants::NETWORK_FRAME_USECS;
return std::max(INT64_C(0), playNextFrameAt - currentTime);
}
void AudioInjector::stop() {
// trigger a call on the injector's thread to change state to finished
QMetaObject::invokeMethod(this, "finish");
}
void AudioInjector::triggerDeleteAfterFinish() {
// make sure this fires on the AudioInjector thread
if (thread() != QThread::currentThread()) {
QMetaObject::invokeMethod(this, "triggerDeleteAfterFinish", Qt::QueuedConnection);
return;
}
if (stateHas(AudioInjectorState::Finished)) {
stop();
} else {
_state |= AudioInjectorState::PendingDelete;
}
}
AudioInjectorPointer AudioInjector::playSoundAndDelete(SharedSoundPointer sound, const AudioInjectorOptions& options) {
AudioInjectorPointer injector = playSound(sound, options);
if (injector) {
injector->_state |= AudioInjectorState::PendingDelete;
}
return injector;
}
AudioInjectorPointer AudioInjector::playSound(SharedSoundPointer sound, const AudioInjectorOptions& options) {
if (!sound || !sound->isReady()) {
return AudioInjectorPointer();
}
if (options.pitch == 1.0f) {
AudioInjectorPointer injector = AudioInjectorPointer::create(sound, options);
if (!injector->inject(&AudioInjectorManager::threadInjector)) {
qWarning() << "AudioInjector::playSound failed to thread injector";
}
return injector;
} else {
using AudioConstants::AudioSample;
using AudioConstants::SAMPLE_RATE;
const int standardRate = SAMPLE_RATE;
// limit pitch to 4 octaves
const float pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
const int resampledRate = glm::round(SAMPLE_RATE / pitch);
auto audioData = sound->getAudioData();
auto numChannels = audioData->getNumChannels();
auto numFrames = audioData->getNumFrames();
AudioSRC resampler(standardRate, resampledRate, numChannels);
// create a resampled buffer that is guaranteed to be large enough
const int maxOutputFrames = resampler.getMaxOutput(numFrames);
const int maxOutputSize = maxOutputFrames * numChannels * sizeof(AudioSample);
QByteArray resampledBuffer(maxOutputSize, '\0');
auto bufferPtr = reinterpret_cast<AudioSample*>(resampledBuffer.data());
resampler.render(audioData->data(), bufferPtr, numFrames);
int numSamples = maxOutputFrames * numChannels;
auto newAudioData = AudioData::make(numSamples, numChannels, bufferPtr);
AudioInjectorPointer injector = AudioInjectorPointer::create(newAudioData, options);
if (!injector->inject(&AudioInjectorManager::threadInjector)) {
qWarning() << "AudioInjector::playSound failed to thread pitch-shifted injector";
}
return injector;
}
}
AudioInjectorPointer AudioInjector::playSoundAndDelete(AudioDataPointer audioData, const AudioInjectorOptions& options) {
AudioInjectorPointer injector = playSound(audioData, options);
if (injector) {
injector->_state |= AudioInjectorState::PendingDelete;
}
return injector;
}
AudioInjectorPointer AudioInjector::playSound(AudioDataPointer audioData, const AudioInjectorOptions& options) {
if (options.pitch == 1.0f) {
AudioInjectorPointer injector = AudioInjectorPointer::create(audioData, options);
if (!injector->inject(&AudioInjectorManager::threadInjector)) {
qWarning() << "AudioInjector::playSound failed to thread pitch-shifted injector";
}
return injector;
} else {
using AudioConstants::AudioSample;
using AudioConstants::SAMPLE_RATE;
const int standardRate = SAMPLE_RATE;
// limit pitch to 4 octaves
const float pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
const int resampledRate = glm::round(SAMPLE_RATE / pitch);
auto numChannels = audioData->getNumChannels();
auto numFrames = audioData->getNumFrames();
AudioSRC resampler(standardRate, resampledRate, numChannels);
// create a resampled buffer that is guaranteed to be large enough
const int maxOutputFrames = resampler.getMaxOutput(numFrames);
const int maxOutputSize = maxOutputFrames * numChannels * sizeof(AudioSample);
QByteArray resampledBuffer(maxOutputSize, '\0');
auto bufferPtr = reinterpret_cast<AudioSample*>(resampledBuffer.data());
resampler.render(audioData->data(), bufferPtr, numFrames);
int numSamples = maxOutputFrames * numChannels;
auto newAudioData = AudioData::make(numSamples, numChannels, bufferPtr);
return AudioInjector::playSound(newAudioData, options);
}
}

View file

@ -19,6 +19,8 @@
#include <QtCore/QSharedPointer>
#include <QtCore/QThread>
#include <shared/ReadWriteLockable.h>
#include <glm/glm.hpp>
#include <glm/gtx/quaternion.hpp>
@ -49,7 +51,7 @@ AudioInjectorState& operator|= (AudioInjectorState& lhs, AudioInjectorState rhs)
// In order to make scripting cleaner for the AudioInjector, the script now holds on to the AudioInjector object
// until it dies.
class AudioInjector : public QObject, public QEnableSharedFromThis<AudioInjector> {
class AudioInjector : public QObject, public QEnableSharedFromThis<AudioInjector>, public ReadWriteLockable {
Q_OBJECT
public:
AudioInjector(SharedSoundPointer sound, const AudioInjectorOptions& injectorOptions);
@ -61,40 +63,34 @@ public:
int getCurrentSendOffset() const { return _currentSendOffset; }
void setCurrentSendOffset(int currentSendOffset) { _currentSendOffset = currentSendOffset; }
AudioInjectorLocalBuffer* getLocalBuffer() const { return _localBuffer; }
QSharedPointer<AudioInjectorLocalBuffer> getLocalBuffer() const { return _localBuffer; }
AudioHRTF& getLocalHRTF() { return _localHRTF; }
AudioFOA& getLocalFOA() { return _localFOA; }
bool isLocalOnly() const { return _options.localOnly; }
float getVolume() const { return _options.volume; }
bool isPositionSet() const { return _options.positionSet; }
glm::vec3 getPosition() const { return _options.position; }
glm::quat getOrientation() const { return _options.orientation; }
bool isStereo() const { return _options.stereo; }
bool isAmbisonic() const { return _options.ambisonic; }
float getLoudness() const { return resultWithReadLock<float>([&] { return _loudness; }); }
bool isPlaying() const { return !stateHas(AudioInjectorState::Finished); }
bool isLocalOnly() const { return resultWithReadLock<bool>([&] { return _options.localOnly; }); }
float getVolume() const { return resultWithReadLock<float>([&] { return _options.volume; }); }
bool isPositionSet() const { return resultWithReadLock<bool>([&] { return _options.positionSet; }); }
glm::vec3 getPosition() const { return resultWithReadLock<glm::vec3>([&] { return _options.position; }); }
glm::quat getOrientation() const { return resultWithReadLock<glm::quat>([&] { return _options.orientation; }); }
bool isStereo() const { return resultWithReadLock<bool>([&] { return _options.stereo; }); }
bool isAmbisonic() const { return resultWithReadLock<bool>([&] { return _options.ambisonic; }); }
AudioInjectorOptions getOptions() const { return resultWithReadLock<AudioInjectorOptions>([&] { return _options; }); }
void setOptions(const AudioInjectorOptions& options);
bool stateHas(AudioInjectorState state) const ;
static void setLocalAudioInterface(AbstractAudioInterface* audioInterface) { _localAudioInterface = audioInterface; }
static AudioInjectorPointer playSoundAndDelete(SharedSoundPointer sound, const AudioInjectorOptions& options);
static AudioInjectorPointer playSound(SharedSoundPointer sound, const AudioInjectorOptions& options);
static AudioInjectorPointer playSoundAndDelete(AudioDataPointer audioData, const AudioInjectorOptions& options);
static AudioInjectorPointer playSound(AudioDataPointer audioData, const AudioInjectorOptions& options);
void restart();
void finish();
void finishNetworkInjection();
public slots:
void restart();
void stop();
void triggerDeleteAfterFinish();
const AudioInjectorOptions& getOptions() const { return _options; }
void setOptions(const AudioInjectorOptions& options);
float getLoudness() const { return _loudness; }
bool isPlaying() const { return !stateHas(AudioInjectorState::Finished); }
void finish();
void finishLocalInjection();
void finishNetworkInjection();
signals:
void finished();
@ -104,7 +100,6 @@ private:
int64_t injectNextFrame();
bool inject(bool(AudioInjectorManager::*injection)(const AudioInjectorPointer&));
bool injectLocally();
void deleteLocalBuffer();
static AbstractAudioInterface* _localAudioInterface;
@ -116,7 +111,7 @@ private:
float _loudness { 0.0f };
int _currentSendOffset { 0 };
std::unique_ptr<NLPacket> _currentPacket { nullptr };
AudioInjectorLocalBuffer* _localBuffer { nullptr };
QSharedPointer<AudioInjectorLocalBuffer> _localBuffer { nullptr };
int64_t _nextFrame { 0 };
std::unique_ptr<QElapsedTimer> _frameTimer { nullptr };
@ -128,4 +123,6 @@ private:
friend class AudioInjectorManager;
};
Q_DECLARE_METATYPE(AudioInjectorPointer)
#endif // hifi_AudioInjector_h

View file

@ -16,6 +16,10 @@ AudioInjectorLocalBuffer::AudioInjectorLocalBuffer(AudioDataPointer audioData) :
{
}
AudioInjectorLocalBuffer::~AudioInjectorLocalBuffer() {
stop();
}
void AudioInjectorLocalBuffer::stop() {
_isStopped = true;
@ -30,9 +34,8 @@ bool AudioInjectorLocalBuffer::seek(qint64 pos) {
}
}
qint64 AudioInjectorLocalBuffer::readData(char* data, qint64 maxSize) {
if (!_isStopped) {
if (!_isStopped && _audioData) {
// first copy to the end of the raw audio
int bytesToEnd = (int)_audioData->getNumBytes() - _currentOffset;

View file

@ -22,6 +22,7 @@ class AudioInjectorLocalBuffer : public QIODevice {
Q_OBJECT
public:
AudioInjectorLocalBuffer(AudioDataPointer audioData);
~AudioInjectorLocalBuffer();
void stop();

View file

@ -14,11 +14,14 @@
#include <QtCore/QCoreApplication>
#include <SharedUtil.h>
#include <shared/QtHelpers.h>
#include "AudioConstants.h"
#include "AudioInjector.h"
#include "AudioLogging.h"
#include "AudioSRC.h"
AudioInjectorManager::~AudioInjectorManager() {
_shouldStop = true;
@ -30,7 +33,7 @@ AudioInjectorManager::~AudioInjectorManager() {
auto& timePointerPair = _injectors.top();
// ask it to stop and be deleted
timePointerPair.second->stop();
timePointerPair.second->finish();
_injectors.pop();
}
@ -46,6 +49,8 @@ AudioInjectorManager::~AudioInjectorManager() {
_thread->quit();
_thread->wait();
}
moveToThread(qApp->thread());
}
void AudioInjectorManager::createThread() {
@ -55,6 +60,8 @@ void AudioInjectorManager::createThread() {
// when the thread is started, have it call our run to handle injection of audio
connect(_thread, &QThread::started, this, &AudioInjectorManager::run, Qt::DirectConnection);
moveToThread(_thread);
// start the thread
_thread->start();
}
@ -141,36 +148,7 @@ bool AudioInjectorManager::wouldExceedLimits() { // Should be called inside of a
bool AudioInjectorManager::threadInjector(const AudioInjectorPointer& injector) {
if (_shouldStop) {
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
return false;
}
// guard the injectors vector with a mutex
Lock lock(_injectorsMutex);
if (wouldExceedLimits()) {
return false;
} else {
if (!_thread) {
createThread();
}
// move the injector to the QThread
injector->moveToThread(_thread);
// add the injector to the queue with a send timestamp of now
_injectors.emplace(usecTimestampNow(), injector);
// notify our wait condition so we can inject two frames for this injector immediately
_injectorReady.notify_one();
return true;
}
}
bool AudioInjectorManager::restartFinishedInjector(const AudioInjectorPointer& injector) {
if (_shouldStop) {
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
return false;
}
@ -188,3 +166,192 @@ bool AudioInjectorManager::restartFinishedInjector(const AudioInjectorPointer& i
}
return true;
}
AudioInjectorPointer AudioInjectorManager::playSound(const SharedSoundPointer& sound, const AudioInjectorOptions& options, bool setPendingDelete) {
if (_shouldStop) {
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
return nullptr;
}
AudioInjectorPointer injector = nullptr;
if (sound && sound->isReady()) {
if (options.pitch == 1.0f) {
injector = QSharedPointer<AudioInjector>(new AudioInjector(sound, options), &AudioInjector::deleteLater);
} else {
using AudioConstants::AudioSample;
using AudioConstants::SAMPLE_RATE;
const int standardRate = SAMPLE_RATE;
// limit pitch to 4 octaves
const float pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
const int resampledRate = glm::round(SAMPLE_RATE / pitch);
auto audioData = sound->getAudioData();
auto numChannels = audioData->getNumChannels();
auto numFrames = audioData->getNumFrames();
AudioSRC resampler(standardRate, resampledRate, numChannels);
// create a resampled buffer that is guaranteed to be large enough
const int maxOutputFrames = resampler.getMaxOutput(numFrames);
const int maxOutputSize = maxOutputFrames * numChannels * sizeof(AudioSample);
QByteArray resampledBuffer(maxOutputSize, '\0');
auto bufferPtr = reinterpret_cast<AudioSample*>(resampledBuffer.data());
resampler.render(audioData->data(), bufferPtr, numFrames);
int numSamples = maxOutputFrames * numChannels;
auto newAudioData = AudioData::make(numSamples, numChannels, bufferPtr);
injector = QSharedPointer<AudioInjector>(new AudioInjector(newAudioData, options), &AudioInjector::deleteLater);
}
}
if (!injector) {
return nullptr;
}
if (setPendingDelete) {
injector->_state |= AudioInjectorState::PendingDelete;
}
injector->moveToThread(_thread);
injector->inject(&AudioInjectorManager::threadInjector);
return injector;
}
AudioInjectorPointer AudioInjectorManager::playSound(const AudioDataPointer& audioData, const AudioInjectorOptions& options, bool setPendingDelete) {
if (_shouldStop) {
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
return nullptr;
}
AudioInjectorPointer injector = nullptr;
if (options.pitch == 1.0f) {
injector = QSharedPointer<AudioInjector>(new AudioInjector(audioData, options), &AudioInjector::deleteLater);
} else {
using AudioConstants::AudioSample;
using AudioConstants::SAMPLE_RATE;
const int standardRate = SAMPLE_RATE;
// limit pitch to 4 octaves
const float pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
const int resampledRate = glm::round(SAMPLE_RATE / pitch);
auto numChannels = audioData->getNumChannels();
auto numFrames = audioData->getNumFrames();
AudioSRC resampler(standardRate, resampledRate, numChannels);
// create a resampled buffer that is guaranteed to be large enough
const int maxOutputFrames = resampler.getMaxOutput(numFrames);
const int maxOutputSize = maxOutputFrames * numChannels * sizeof(AudioSample);
QByteArray resampledBuffer(maxOutputSize, '\0');
auto bufferPtr = reinterpret_cast<AudioSample*>(resampledBuffer.data());
resampler.render(audioData->data(), bufferPtr, numFrames);
int numSamples = maxOutputFrames * numChannels;
auto newAudioData = AudioData::make(numSamples, numChannels, bufferPtr);
injector = QSharedPointer<AudioInjector>(new AudioInjector(newAudioData, options), &AudioInjector::deleteLater);
}
if (!injector) {
return nullptr;
}
if (setPendingDelete) {
injector->_state |= AudioInjectorState::PendingDelete;
}
injector->moveToThread(_thread);
injector->inject(&AudioInjectorManager::threadInjector);
return injector;
}
void AudioInjectorManager::setOptionsAndRestart(const AudioInjectorPointer& injector, const AudioInjectorOptions& options) {
if (!injector) {
return;
}
if (QThread::currentThread() != _thread) {
QMetaObject::invokeMethod(this, "setOptionsAndRestart", Q_ARG(const AudioInjectorPointer&, injector), Q_ARG(const AudioInjectorOptions&, options));
_injectorReady.notify_one();
return;
}
injector->setOptions(options);
injector->restart();
}
void AudioInjectorManager::restart(const AudioInjectorPointer& injector) {
if (!injector) {
return;
}
if (QThread::currentThread() != _thread) {
QMetaObject::invokeMethod(this, "restart", Q_ARG(const AudioInjectorPointer&, injector));
_injectorReady.notify_one();
return;
}
injector->restart();
}
void AudioInjectorManager::setOptions(const AudioInjectorPointer& injector, const AudioInjectorOptions& options) {
if (!injector) {
return;
}
if (QThread::currentThread() != _thread) {
QMetaObject::invokeMethod(this, "setOptions", Q_ARG(const AudioInjectorPointer&, injector), Q_ARG(const AudioInjectorOptions&, options));
_injectorReady.notify_one();
return;
}
injector->setOptions(options);
}
AudioInjectorOptions AudioInjectorManager::getOptions(const AudioInjectorPointer& injector) {
if (!injector) {
return AudioInjectorOptions();
}
return injector->getOptions();
}
float AudioInjectorManager::getLoudness(const AudioInjectorPointer& injector) {
if (!injector) {
return 0.0f;
}
return injector->getLoudness();
}
bool AudioInjectorManager::isPlaying(const AudioInjectorPointer& injector) {
if (!injector) {
return false;
}
return injector->isPlaying();
}
void AudioInjectorManager::stop(const AudioInjectorPointer& injector) {
if (!injector) {
return;
}
if (QThread::currentThread() != _thread) {
QMetaObject::invokeMethod(this, "stop", Q_ARG(const AudioInjectorPointer&, injector));
_injectorReady.notify_one();
return;
}
injector->finish();
}
size_t AudioInjectorManager::getNumInjectors() {
Lock lock(_injectorsMutex);
return _injectors.size();
}

View file

@ -30,8 +30,27 @@ class AudioInjectorManager : public QObject, public Dependency {
SINGLETON_DEPENDENCY
public:
~AudioInjectorManager();
AudioInjectorPointer playSound(const SharedSoundPointer& sound, const AudioInjectorOptions& options, bool setPendingDelete = false);
AudioInjectorPointer playSound(const AudioDataPointer& audioData, const AudioInjectorOptions& options, bool setPendingDelete = false);
size_t getNumInjectors();
public slots:
void setOptionsAndRestart(const AudioInjectorPointer& injector, const AudioInjectorOptions& options);
void restart(const AudioInjectorPointer& injector);
void setOptions(const AudioInjectorPointer& injector, const AudioInjectorOptions& options);
AudioInjectorOptions getOptions(const AudioInjectorPointer& injector);
float getLoudness(const AudioInjectorPointer& injector);
bool isPlaying(const AudioInjectorPointer& injector);
void stop(const AudioInjectorPointer& injector);
private slots:
void run();
private:
using TimeInjectorPointerPair = std::pair<uint64_t, AudioInjectorPointer>;
@ -49,11 +68,10 @@ private:
using Lock = std::unique_lock<Mutex>;
bool threadInjector(const AudioInjectorPointer& injector);
bool restartFinishedInjector(const AudioInjectorPointer& injector);
void notifyInjectorReadyCondition() { _injectorReady.notify_one(); }
bool wouldExceedLimits();
AudioInjectorManager() {};
AudioInjectorManager() { createThread(); }
AudioInjectorManager(const AudioInjectorManager&) = delete;
AudioInjectorManager& operator=(const AudioInjectorManager&) = delete;

View file

@ -372,13 +372,6 @@ bool Avatar::applyGrabChanges() {
target->removeGrab(grab);
_avatarGrabs.erase(itr);
grabAddedOrRemoved = true;
if (isMyAvatar()) {
const EntityItemPointer& entity = std::dynamic_pointer_cast<EntityItem>(target);
if (entity && entity->getEntityHostType() == entity::HostType::AVATAR && entity->getSimulationOwner().getID() == getID()) {
EntityItemProperties properties = entity->getProperties();
sendPacket(entity->getID(), properties);
}
}
} else {
undeleted.push_back(id);
}

View file

@ -127,7 +127,12 @@ private:
class Avatar : public AvatarData, public scriptable::ModelProvider, public MetaModelPayload {
Q_OBJECT
// This property has JSDoc in MyAvatar.h.
/*jsdoc
* @comment IMPORTANT: The JSDoc for the following properties should be copied to MyAvatar.h.
*
* @property {Vec3} skeletonOffset - Can be used to apply a translation offset between the avatar's position and the
* registration point of the 3D model.
*/
Q_PROPERTY(glm::vec3 skeletonOffset READ getSkeletonOffset WRITE setSkeletonOffset)
public:
@ -175,7 +180,6 @@ public:
/// Returns the distance to use as a LOD parameter.
float getLODDistance() const;
virtual bool isMyAvatar() const override { return false; }
virtual void createOrb() { }
enum class LoadingStatus {
@ -196,36 +200,52 @@ public:
virtual QStringList getJointNames() const override;
/**jsdoc
* Gets the default rotation of a joint (in the current avatar) relative to its parent.
* <p>For information on the joint hierarchy used, see
* <a href="https://docs.highfidelity.com/create/avatars/avatar-standards">Avatar Standards</a>.</p>
* @function MyAvatar.getDefaultJointRotation
* @param {number} index
* @returns {Quat}
* @param {number} index - The joint index.
* @returns {Quat} The default rotation of the joint if the joint index is valid, otherwise {@link Quat(0)|Quat.IDENTITY}.
*/
Q_INVOKABLE virtual glm::quat getDefaultJointRotation(int index) const;
/**jsdoc
* Gets the default translation of a joint (in the current avatar) relative to its parent, in model coordinates.
* <p><strong>Warning:</strong> These coordinates are not necessarily in meters.</p>
* <p>For information on the joint hierarchy used, see
* <a href="https://docs.highfidelity.com/create/avatars/avatar-standards">Avatar Standards</a>.</p>
* @function MyAvatar.getDefaultJointTranslation
* @param {number} index
* @returns {Vec3}
* @param {number} index - The joint index.
* @returns {Vec3} The default translation of the joint (in model coordinates) if the joint index is valid, otherwise
* {@link Vec3(0)|Vec3.ZERO}.
*/
Q_INVOKABLE virtual glm::vec3 getDefaultJointTranslation(int index) const;
/**jsdoc
* Provides read only access to the default joint rotations in avatar coordinates.
* Gets the default joint rotations in avatar coordinates.
* The default pose of the avatar is defined by the position and orientation of all bones
* in the avatar's model file. Typically this is a T-pose.
* @function MyAvatar.getAbsoluteDefaultJointRotationInObjectFrame
* @param index {number} index number
* @returns {Quat} The rotation of this joint in avatar coordinates.
* @param index {number} - The joint index.
* @returns {Quat} The default rotation of the joint in avatar coordinates.
* @example <caption>Report the default rotation of your avatar's head joint relative to your avatar.</caption>
* var headIndex = MyAvatar.getJointIndex("Head");
* var defaultHeadRotation = MyAvatar.getAbsoluteDefaultJointRotationInObjectFrame(headIndex);
* print("Default head rotation: " + JSON.stringify(Quat.safeEulerAngles(defaultHeadRotation))); // Degrees
*/
Q_INVOKABLE virtual glm::quat getAbsoluteDefaultJointRotationInObjectFrame(int index) const;
/**jsdoc
* Provides read only access to the default joint translations in avatar coordinates.
* Gets the default joint translations in avatar coordinates.
* The default pose of the avatar is defined by the position and orientation of all bones
* in the avatar's model file. Typically this is a T-pose.
* @function MyAvatar.getAbsoluteDefaultJointTranslationInObjectFrame
* @param index {number} index number
* @returns {Vec3} The position of this joint in avatar coordinates.
* @param index {number} - The joint index.
* @returns {Vec3} The default position of the joint in avatar coordinates.
* @example <caption>Report the default translation of your avatar's head joint relative to your avatar.</caption>
* var headIndex = MyAvatar.getJointIndex("Head");
* var defaultHeadTranslation = MyAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(headIndex);
* print("Default head translation: " + JSON.stringify(defaultHeadTranslation));
*/
Q_INVOKABLE virtual glm::vec3 getAbsoluteDefaultJointTranslationInObjectFrame(int index) const;
@ -233,59 +253,88 @@ public:
virtual glm::vec3 getAbsoluteJointScaleInObjectFrame(int index) const override;
virtual glm::quat getAbsoluteJointRotationInObjectFrame(int index) const override;
virtual glm::vec3 getAbsoluteJointTranslationInObjectFrame(int index) const override;
/**jsdoc
* Sets the rotation of a joint relative to the avatar.
* <p><strong>Warning:</strong> Not able to be used in the <code>MyAvatar</code> API.</p>
* @function MyAvatar.setAbsoluteJointRotationInObjectFrame
* @param {number} index - The index of the joint. <em>Not used.</em>
* @param {Quat} rotation - The rotation of the joint relative to the avatar. <em>Not used.</em>
* @returns {boolean} <code>false</code>.
*/
virtual bool setAbsoluteJointRotationInObjectFrame(int index, const glm::quat& rotation) override { return false; }
/**jsdoc
* Sets the translation of a joint relative to the avatar.
* <p><strong>Warning:</strong> Not able to be used in the <code>MyAvatar</code> API.</p>
* @function MyAvatar.setAbsoluteJointTranslationInObjectFrame
* @param {number} index - The index of the joint. <em>Not used.</em>
* @param {Vec3} translation - The translation of the joint relative to the avatar. <em>Not used.</em>
* @returns {boolean} <code>false</code>.
*/
virtual bool setAbsoluteJointTranslationInObjectFrame(int index, const glm::vec3& translation) override { return false; }
virtual glm::vec3 getSpine2SplineOffset() const { return _spine2SplineOffset; }
virtual float getSpine2SplineRatio() const { return _spine2SplineRatio; }
// world-space to avatar-space rigconversion functions
/**jsdoc
* @function MyAvatar.worldToJointPoint
* @param {Vec3} position
* @param {number} [jointIndex=-1]
* @returns {Vec3}
*/
* Transforms a position in world coordinates to a position in a joint's coordinates, or avatar coordinates if no joint is
* specified.
* @function MyAvatar.worldToJointPoint
* @param {Vec3} position - The position in world coordinates.
* @param {number} [jointIndex=-1] - The index of the joint.
* @returns {Vec3} The position in the joint's coordinate system, or avatar coordinate system if no joint is specified.
*/
Q_INVOKABLE glm::vec3 worldToJointPoint(const glm::vec3& position, const int jointIndex = -1) const;
/**jsdoc
* @function MyAvatar.worldToJointDirection
* @param {Vec3} direction
* @param {number} [jointIndex=-1]
* @returns {Vec3}
*/
* Transforms a direction in world coordinates to a direction in a joint's coordinates, or avatar coordinates if no joint
* is specified.
* @function MyAvatar.worldToJointDirection
* @param {Vec3} direction - The direction in world coordinates.
* @param {number} [jointIndex=-1] - The index of the joint.
* @returns {Vec3} The direction in the joint's coordinate system, or avatar coordinate system if no joint is specified.
*/
Q_INVOKABLE glm::vec3 worldToJointDirection(const glm::vec3& direction, const int jointIndex = -1) const;
/**jsdoc
* @function MyAvatar.worldToJointRotation
* @param {Quat} rotation
* @param {number} [jointIndex=-1]
* @returns {Quat}
* Transforms a rotation in world coordinates to a rotation in a joint's coordinates, or avatar coordinates if no joint is
* specified.
* @function MyAvatar.worldToJointRotation
* @param {Quat} rotation - The rotation in world coordinates.
* @param {number} [jointIndex=-1] - The index of the joint.
* @returns {Quat} The rotation in the joint's coordinate system, or avatar coordinate system if no joint is specified.
*/
Q_INVOKABLE glm::quat worldToJointRotation(const glm::quat& rotation, const int jointIndex = -1) const;
/**jsdoc
* @function MyAvatar.jointToWorldPoint
* @param {vec3} position
* @param {number} [jointIndex=-1]
* @returns {Vec3}
*/
* Transforms a position in a joint's coordinates, or avatar coordinates if no joint is specified, to a position in world
* coordinates.
* @function MyAvatar.jointToWorldPoint
* @param {Vec3} position - The position in joint coordinates, or avatar coordinates if no joint is specified.
* @param {number} [jointIndex=-1] - The index of the joint.
* @returns {Vec3} The position in world coordinates.
*/
Q_INVOKABLE glm::vec3 jointToWorldPoint(const glm::vec3& position, const int jointIndex = -1) const;
/**jsdoc
* @function MyAvatar.jointToWorldDirection
* @param {Vec3} direction
* @param {number} [jointIndex=-1]
* @returns {Vec3}
*/
* Transforms a direction in a joint's coordinates, or avatar coordinates if no joint is specified, to a direction in world
* coordinates.
* @function MyAvatar.jointToWorldDirection
* @param {Vec3} direction - The direction in joint coordinates, or avatar coordinates if no joint is specified.
* @param {number} [jointIndex=-1] - The index of the joint.
* @returns {Vec3} The direction in world coordinates.
*/
Q_INVOKABLE glm::vec3 jointToWorldDirection(const glm::vec3& direction, const int jointIndex = -1) const;
/**jsdoc
* @function MyAvatar.jointToWorldRotation
* @param {Quat} rotation
* @param {number} [jointIndex=-1]
* @returns {Quat}
*/
* Transforms a rotation in a joint's coordinates, or avatar coordinates if no joint is specified, to a rotation in world
* coordinates.
* @function MyAvatar.jointToWorldRotation
* @param {Quat} rotation - The rotation in joint coordinates, or avatar coordinates if no joint is specified.
* @param {number} [jointIndex=-1] - The index of the joint.
* @returns {Quat} The rotation in world coordinates.
*/
Q_INVOKABLE glm::quat jointToWorldRotation(const glm::quat& rotation, const int jointIndex = -1) const;
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL) override;
@ -297,7 +346,7 @@ public:
virtual int parseDataFromBuffer(const QByteArray& buffer) override;
/**jsdoc
* Set the offset applied to the current avatar. The offset adjusts the position that the avatar is rendered. For example,
* Sets the offset applied to the current avatar. The offset adjusts the position that the avatar is rendered. For example,
* with an offset of <code>{ x: 0, y: 0.1, z: 0 }</code>, your avatar will appear to be raised off the ground slightly.
* @function MyAvatar.setSkeletonOffset
* @param {Vec3} offset - The skeleton offset to set.
@ -313,7 +362,7 @@ public:
Q_INVOKABLE void setSkeletonOffset(const glm::vec3& offset);
/**jsdoc
* Get the offset applied to the current avatar. The offset adjusts the position that the avatar is rendered. For example,
* Gets the offset applied to the current avatar. The offset adjusts the position that the avatar is rendered. For example,
* with an offset of <code>{ x: 0, y: 0.1, z: 0 }</code>, your avatar will appear to be raised off the ground slightly.
* @function MyAvatar.getSkeletonOffset
* @returns {Vec3} The current skeleton offset.
@ -325,7 +374,7 @@ public:
virtual glm::vec3 getSkeletonPosition() const;
/**jsdoc
* Get the position of a joint in the current avatar.
* Gets the position of a joint in the current avatar.
* @function MyAvatar.getJointPosition
* @param {number} index - The index of the joint.
* @returns {Vec3} The position of the joint in world coordinates.
@ -333,7 +382,7 @@ public:
Q_INVOKABLE glm::vec3 getJointPosition(int index) const;
/**jsdoc
* Get the position of a joint in the current avatar.
* Gets the position of a joint in the current avatar.
* @function MyAvatar.getJointPosition
* @param {string} name - The name of the joint.
* @returns {Vec3} The position of the joint in world coordinates.
@ -343,7 +392,7 @@ public:
Q_INVOKABLE glm::vec3 getJointPosition(const QString& name) const;
/**jsdoc
* Get the position of the current avatar's neck in world coordinates.
* Gets the position of the current avatar's neck in world coordinates.
* @function MyAvatar.getNeckPosition
* @returns {Vec3} The position of the neck in world coordinates.
* @example <caption>Report the position of your avatar's neck.</caption>
@ -352,8 +401,9 @@ public:
Q_INVOKABLE glm::vec3 getNeckPosition() const;
/**jsdoc
* Gets the current acceleration of the avatar.
* @function MyAvatar.getAcceleration
* @returns {Vec3}
* @returns {Vec3} The current acceleration of the avatar.
*/
Q_INVOKABLE glm::vec3 getAcceleration() const { return _acceleration; }
@ -377,47 +427,55 @@ public:
void getCapsule(glm::vec3& start, glm::vec3& end, float& radius);
float computeMass();
/**jsdoc
* Get the position of the current avatar's feet (or rather, bottom of its collision capsule) in world coordinates.
* Gets the position of the current avatar's feet (or rather, bottom of its collision capsule) in world coordinates.
* @function MyAvatar.getWorldFeetPosition
* @returns {Vec3} The position of the avatar's feet in world coordinates.
*/
*/
Q_INVOKABLE glm::vec3 getWorldFeetPosition();
void setPositionViaScript(const glm::vec3& position) override;
void setOrientationViaScript(const glm::quat& orientation) override;
/**jsdoc
* Gets the ID of the entity of avatar that the avatar is parented to.
* @function MyAvatar.getParentID
* @returns {Uuid}
* @returns {Uuid} The ID of the entity or avatar that the avatar is parented to. {@link Uuid|Uuid.NULL} if not parented.
*/
// This calls through to the SpatiallyNestable versions, but is here to expose these to JavaScript.
Q_INVOKABLE virtual const QUuid getParentID() const override { return SpatiallyNestable::getParentID(); }
/**jsdoc
* Sets the ID of the entity of avatar that the avatar is parented to.
* @function MyAvatar.setParentID
* @param {Uuid} parentID
* @param {Uuid} parentID - The ID of the entity or avatar that the avatar should be parented to. Set to
* {@link Uuid|Uuid.NULL} to unparent.
*/
// This calls through to the SpatiallyNestable versions, but is here to expose these to JavaScript.
Q_INVOKABLE virtual void setParentID(const QUuid& parentID) override;
/**jsdoc
* Gets the joint of the entity or avatar that the avatar is parented to.
* @function MyAvatar.getParentJointIndex
* @returns {number}
* @returns {number} The joint of the entity or avatar that the avatar is parented to. <code>65535</code> or
* <code>-1</code> if parented to the entity or avatar's position and orientation rather than a joint.
*/
// This calls through to the SpatiallyNestable versions, but is here to expose these to JavaScript.
Q_INVOKABLE virtual quint16 getParentJointIndex() const override { return SpatiallyNestable::getParentJointIndex(); }
/**jsdoc
* Sets the joint of the entity or avatar that the avatar is parented to.
* @function MyAvatar.setParentJointIndex
* @param {number} parentJointIndex
* @param {number} parentJointIndex - he joint of the entity or avatar that the avatar should be parented to. Use
* <code>65535</code> or <code>-1</code> to parent to the entity or avatar's position and orientation rather than a
* joint.
*/
// This calls through to the SpatiallyNestable versions, but is here to expose these to JavaScript.
Q_INVOKABLE virtual void setParentJointIndex(quint16 parentJointIndex) override;
/**jsdoc
* Returns an array of joints, where each joint is an object containing name, index, and parentIndex fields.
* Gets information on all the joints in the avatar's skeleton.
* @function MyAvatar.getSkeleton
* @returns {MyAvatar.SkeletonJoint[]} A list of information about each joint in this avatar's skeleton.
* @returns {MyAvatar.SkeletonJoint[]} Information about each joint in the avatar's skeleton.
*/
/**jsdoc
* Information about a single joint in an Avatar's skeleton hierarchy.
@ -443,8 +501,9 @@ public:
/**jsdoc
* @function MyAvatar.getSimulationRate
* @param {string} [rateName=""]
* @returns {number}
* @param {string} [rateName=""] - Rate name.
* @returns {number} Simulation rate.
* @deprecated This function is deprecated and will be removed.
*/
Q_INVOKABLE float getSimulationRate(const QString& rateName = QString("")) const;
@ -500,6 +559,13 @@ public:
uint32_t appendSubMetaItems(render::ItemIDs& subItems);
signals:
/**jsdoc
* Triggered when the avatar's target scale is changed. The target scale is the desired scale of the avatar without any
* restrictions on permissible scale values imposed by the domain.
* @function MyAvatar.targetScaleChanged
* @param {number} targetScale - The avatar's target scale.
* @returns Signal
*/
void targetScaleChanged(float targetScale);
public slots:
@ -508,7 +574,7 @@ public slots:
// thread safe, will return last valid palm from cache
/**jsdoc
* Get the position of the left palm in world coordinates.
* Gets the position of the left palm in world coordinates.
* @function MyAvatar.getLeftPalmPosition
* @returns {Vec3} The position of the left palm in world coordinates.
* @example <caption>Report the position of your avatar's left palm.</caption>
@ -517,15 +583,16 @@ public slots:
glm::vec3 getLeftPalmPosition() const;
/**jsdoc
* Get the rotation of the left palm in world coordinates.
* Gets the rotation of the left palm in world coordinates.
* @function MyAvatar.getLeftPalmRotation
* @returns {Quat} The rotation of the left palm in world coordinates.
* @example <caption>Report the rotation of your avatar's left palm.</caption>
* print(JSON.stringify(MyAvatar.getLeftPalmRotation()));
*/
glm::quat getLeftPalmRotation() const;
/**jsdoc
* Get the position of the right palm in world coordinates.
* Gets the position of the right palm in world coordinates.
* @function MyAvatar.getRightPalmPosition
* @returns {Vec3} The position of the right palm in world coordinates.
* @example <caption>Report the position of your avatar's right palm.</caption>
@ -542,21 +609,26 @@ public slots:
*/
glm::quat getRightPalmRotation() const;
/**jsdoc
* @function MyAvatar.setModelURLFinished
* @param {boolean} success
* @deprecated This function is deprecated and will be removed.
*/
// hooked up to Model::setURLFinished signal
void setModelURLFinished(bool success);
/**jsdoc
* @function MyAvatar.rigReady
* @returns {Signal}
* @deprecated This function is deprecated and will be removed.
*/
// Hooked up to Model::rigReady signal
void rigReady();
/**jsdoc
* @function MyAvatar.rigReset
* @returns {Signal}
* @deprecated This function is deprecated and will be removed.
*/
// Jooked up to Model::rigReset signal
// Hooked up to Model::rigReset signal
void rigReset();
protected:
@ -605,7 +677,7 @@ protected:
// protected methods...
bool isLookingAtMe(AvatarSharedPointer avatar) const;
virtual void sendPacket(const QUuid& entityID, const EntityItemProperties& properties) const { }
virtual void sendPacket(const QUuid& entityID) const { }
bool applyGrabChanges();
void relayJointDataToChildren();

View file

@ -28,9 +28,10 @@
namespace AvatarTraits {
template<typename T, T defaultValue>
class AssociatedTraitValues {
using SimpleTypesArray = std::array<T, NUM_SIMPLE_TRAITS>;
public:
// constructor that pre-fills _simpleTypes with the default value specified by the template
AssociatedTraitValues() : _simpleTypes(FirstInstancedTrait, defaultValue) {}
AssociatedTraitValues() { std::fill(_simpleTypes.begin(), _simpleTypes.end(), defaultValue); }
/// inserts the given value for the given simple trait type
void insert(TraitType type, T value) { _simpleTypes[type] = value; }
@ -71,12 +72,12 @@ namespace AvatarTraits {
}
/// const iterators for the vector of simple type values
typename std::vector<T>::const_iterator simpleCBegin() const { return _simpleTypes.cbegin(); }
typename std::vector<T>::const_iterator simpleCEnd() const { return _simpleTypes.cend(); }
typename SimpleTypesArray::const_iterator simpleCBegin() const { return _simpleTypes.cbegin(); }
typename SimpleTypesArray::const_iterator simpleCEnd() const { return _simpleTypes.cend(); }
/// non-const iterators for the vector of simple type values
typename std::vector<T>::iterator simpleBegin() { return _simpleTypes.begin(); }
typename std::vector<T>::iterator simpleEnd() { return _simpleTypes.end(); }
typename SimpleTypesArray::iterator simpleBegin() { return _simpleTypes.begin(); }
typename SimpleTypesArray::iterator simpleEnd() { return _simpleTypes.end(); }
struct TraitWithInstances {
TraitType traitType;
@ -96,7 +97,7 @@ namespace AvatarTraits {
typename std::vector<TraitWithInstances>::iterator instancedEnd() { return _instancedTypes.end(); }
private:
std::vector<T> _simpleTypes;
SimpleTypesArray _simpleTypes;
/// return the iterator to the matching TraitWithInstances object for a given instanced trait type
typename std::vector<TraitWithInstances>::iterator instancesForTrait(TraitType traitType) {

View file

@ -1143,10 +1143,11 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
// we store the hand state as well as other items in a shared bitset. The hand state is an octal, but is split
// into two sections to maintain backward compatibility. The bits are ordered as such (0-7 left to right).
// AA 6/1/18 added three more flags bits 8,9, and 10 for procedural audio, blink, and eye saccade enabled
// +---+-----+-----+--+--+--+--+-----+
// |x,x|H0,H1|x,x,x|H2|Au|Bl|Ey|xxxxx|
// +---+-----+-----+--+--+--+--+-----+
// +---+-----+-----+--+--+--+--+--+----+
// |x,x|H0,H1|x,x,x|H2|Au|Bl|Ey|He|xxxx|
// +---+-----+-----+--+--+--+--+--+----+
// Hand state - H0,H1,H2 is found in the 3rd, 4th, and 8th bits
// Hero-avatar status (He) - 12th bit
auto newHandState = getSemiNibbleAt(bitItems, HAND_STATE_START_BIT)
+ (oneAtBit16(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
@ -1434,6 +1435,47 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
return numBytesRead;
}
/**jsdoc
* The avatar mixer data comprises different types of data, with the data rates of each being tracked in kbps.
*
* <table>
* <thead>
* <tr><th>Rate Name</th><th>Description</th></tr>
* </thead>
* <tbody>
* <tr><td><code>"globalPosition"</code></td><td>Incoming global position.</td></tr>
* <tr><td><code>"localPosition"</code></td><td>Incoming local position.</td></tr>
* <tr><td><code>"avatarBoundingBox"</code></td><td>Incoming avatar bounding box.</td></tr>
* <tr><td><code>"avatarOrientation"</code></td><td>Incoming avatar orientation.</td></tr>
* <tr><td><code>"avatarScale"</code></td><td>Incoming avatar scale.</td></tr>
* <tr><td><code>"lookAtPosition"</code></td><td>Incoming look-at position.</td></tr>
* <tr><td><code>"audioLoudness"</code></td><td>Incoming audio loudness.</td></tr>
* <tr><td><code>"sensorToWorkMatrix"</code></td><td>Incoming sensor-to-world matrix.</td></tr>
* <tr><td><code>"additionalFlags"</code></td><td>Incoming additional avatar flags.</td></tr>
* <tr><td><code>"parentInfo"</code></td><td>Incoming parent information.</td></tr>
* <tr><td><code>"faceTracker"</code></td><td>Incoming face tracker data.</td></tr>
* <tr><td><code>"jointData"</code></td><td>Incoming joint data.</td></tr>
* <tr><td><code>"jointDefaultPoseFlagsRate"</code></td><td>Incoming joint default pose flags.</td></tr>
* <tr><td><code>"farGrabJointRate"</code></td><td>Incoming far grab joint.</td></tr>
* <tr><td><code>"globalPositionOutbound"</code></td><td>Outgoing global position.</td></tr>
* <tr><td><code>"localPositionOutbound"</code></td><td>Outgoing local position.</td></tr>
* <tr><td><code>"avatarBoundingBoxOutbound"</code></td><td>Outgoing avatar bounding box.</td></tr>
* <tr><td><code>"avatarOrientationOutbound"</code></td><td>Outgoing avatar orientation.</td></tr>
* <tr><td><code>"avatarScaleOutbound"</code></td><td>Outgoing avatar scale.</td></tr>
* <tr><td><code>"lookAtPositionOutbound"</code></td><td>Outgoing look-at position.</td></tr>
* <tr><td><code>"audioLoudnessOutbound"</code></td><td>Outgoing audio loudness.</td></tr>
* <tr><td><code>"sensorToWorkMatrixOutbound"</code></td><td>Outgoing sensor-to-world matrix.</td></tr>
* <tr><td><code>"additionalFlagsOutbound"</code></td><td>Outgoing additional avatar flags.</td></tr>
* <tr><td><code>"parentInfoOutbound"</code></td><td>Outgoing parent information.</td></tr>
* <tr><td><code>"faceTrackerOutbound"</code></td><td>Outgoing face tracker data.</td></tr>
* <tr><td><code>"jointDataOutbound"</code></td><td>Outgoing joint data.</td></tr>
* <tr><td><code>"jointDefaultPoseFlagsOutbound"</code></td><td>Outgoing joint default pose flags.</td></tr>
* <tr><td><code>""</code></td><td>When no rate name is specified, the total incoming data rate is provided.</td></tr>
* </tbody>
* </table>
*
* @typedef {string} AvatarDataRate
*/
float AvatarData::getDataRate(const QString& rateName) const {
if (rateName == "") {
return _parseBufferRate.rate() / BYTES_PER_KILOBIT;
@ -1495,6 +1537,35 @@ float AvatarData::getDataRate(const QString& rateName) const {
return 0.0f;
}
/**jsdoc
* The avatar mixer data comprises different types of data updated at different rates, in Hz.
*
* <table>
* <thead>
* <tr><th>Rate Name</th><th>Description</th></tr>
* </thead>
* <tbody>
* <tr><td><code>"globalPosition"</code></td><td>Global position.</td></tr>
* <tr><td><code>"localPosition"</code></td><td>Local position.</td></tr>
* <tr><td><code>"avatarBoundingBox"</code></td><td>Avatar bounding box.</td></tr>
* <tr><td><code>"avatarOrientation"</code></td><td>Avatar orientation.</td></tr>
* <tr><td><code>"avatarScale"</code></td><td>Avatar scale.</td></tr>
* <tr><td><code>"lookAtPosition"</code></td><td>Look-at position.</td></tr>
* <tr><td><code>"audioLoudness"</code></td><td>Audio loudness.</td></tr>
* <tr><td><code>"sensorToWorkMatrix"</code></td><td>Sensor-to-world matrix.</td></tr>
* <tr><td><code>"additionalFlags"</code></td><td>Additional avatar flags.</td></tr>
* <tr><td><code>"parentInfo"</code></td><td>Parent information.</td></tr>
* <tr><td><code>"faceTracker"</code></td><td>Face tracker data.</td></tr>
* <tr><td><code>"jointData"</code></td><td>Joint data.</td></tr>
* <tr><td><code>"farGrabJointData"</code></td><td>Far grab joint data.</td></tr>
* <tr><td><code>""</code></td><td>When no rate name is specified, the overall update rate is provided.</td></tr>
* </tbody>
* </table>
*
* @typedef {string} AvatarUpdateRate
*/
float AvatarData::getUpdateRate(const QString& rateName) const {
if (rateName == "") {
return _parseBufferUpdateRate.rate();
@ -1920,42 +1991,16 @@ QUrl AvatarData::getWireSafeSkeletonModelURL() const {
}
}
qint64 AvatarData::packTrait(AvatarTraits::TraitType traitType, ExtendedIODevice& destination,
AvatarTraits::TraitVersion traitVersion) {
qint64 bytesWritten = 0;
if (traitType == AvatarTraits::SkeletonModelURL) {
QByteArray encodedSkeletonURL = getWireSafeSkeletonModelURL().toEncoded();
if (encodedSkeletonURL.size() > AvatarTraits::MAXIMUM_TRAIT_SIZE) {
qWarning() << "Refusing to pack simple trait" << traitType << "of size" << encodedSkeletonURL.size()
<< "bytes since it exceeds the maximum size" << AvatarTraits::MAXIMUM_TRAIT_SIZE << "bytes";
return 0;
}
bytesWritten += destination.writePrimitive(traitType);
if (traitVersion > AvatarTraits::DEFAULT_TRAIT_VERSION) {
bytesWritten += destination.writePrimitive(traitVersion);
}
AvatarTraits::TraitWireSize encodedURLSize = encodedSkeletonURL.size();
bytesWritten += destination.writePrimitive(encodedURLSize);
bytesWritten += destination.write(encodedSkeletonURL);
}
return bytesWritten;
QByteArray AvatarData::packSkeletonModelURL() const {
return getWireSafeSkeletonModelURL().toEncoded();
}
void AvatarData::unpackSkeletonModelURL(const QByteArray& data) {
auto skeletonModelURL = QUrl::fromEncoded(data);
setSkeletonModelURL(skeletonModelURL);
}
qint64 AvatarData::packAvatarEntityTraitInstance(AvatarTraits::TraitType traitType,
AvatarTraits::TraitInstanceID traitInstanceID,
ExtendedIODevice& destination, AvatarTraits::TraitVersion traitVersion) {
qint64 bytesWritten = 0;
QByteArray AvatarData::packAvatarEntityTraitInstance(AvatarTraits::TraitInstanceID traitInstanceID) {
// grab a read lock on the avatar entities and check for entity data for the given ID
QByteArray entityBinaryData;
_avatarEntitiesLock.withReadLock([this, &entityBinaryData, &traitInstanceID] {
@ -1964,104 +2009,48 @@ qint64 AvatarData::packAvatarEntityTraitInstance(AvatarTraits::TraitType traitTy
}
});
if (entityBinaryData.size() > AvatarTraits::MAXIMUM_TRAIT_SIZE) {
qWarning() << "Refusing to pack instanced trait" << traitType << "of size" << entityBinaryData.size()
<< "bytes since it exceeds the maximum size " << AvatarTraits::MAXIMUM_TRAIT_SIZE << "bytes";
return 0;
}
bytesWritten += destination.writePrimitive(traitType);
if (traitVersion > AvatarTraits::DEFAULT_TRAIT_VERSION) {
bytesWritten += destination.writePrimitive(traitVersion);
}
bytesWritten += destination.write(traitInstanceID.toRfc4122());
if (!entityBinaryData.isNull()) {
AvatarTraits::TraitWireSize entityBinarySize = entityBinaryData.size();
bytesWritten += destination.writePrimitive(entityBinarySize);
bytesWritten += destination.write(entityBinaryData);
} else {
bytesWritten += destination.writePrimitive(AvatarTraits::DELETED_TRAIT_SIZE);
}
return bytesWritten;
return entityBinaryData;
}
qint64 AvatarData::packGrabTraitInstance(AvatarTraits::TraitType traitType,
AvatarTraits::TraitInstanceID traitInstanceID,
ExtendedIODevice& destination, AvatarTraits::TraitVersion traitVersion) {
qint64 bytesWritten = 0;
QByteArray AvatarData::packGrabTraitInstance(AvatarTraits::TraitInstanceID traitInstanceID) {
// grab a read lock on the avatar grabs and check for grab data for the given ID
QByteArray grabBinaryData;
_avatarGrabsLock.withReadLock([this, &grabBinaryData, &traitInstanceID] {
if (_avatarGrabData.contains(traitInstanceID)) {
grabBinaryData = _avatarGrabData[traitInstanceID];
}
});
if (grabBinaryData.size() > AvatarTraits::MAXIMUM_TRAIT_SIZE) {
qWarning() << "Refusing to pack instanced trait" << traitType << "of size" << grabBinaryData.size()
<< "bytes since it exceeds the maximum size " << AvatarTraits::MAXIMUM_TRAIT_SIZE << "bytes";
return 0;
}
bytesWritten += destination.writePrimitive(traitType);
if (traitVersion > AvatarTraits::DEFAULT_TRAIT_VERSION) {
bytesWritten += destination.writePrimitive(traitVersion);
}
bytesWritten += destination.write(traitInstanceID.toRfc4122());
if (!grabBinaryData.isNull()) {
AvatarTraits::TraitWireSize grabBinarySize = grabBinaryData.size();
bytesWritten += destination.writePrimitive(grabBinarySize);
bytesWritten += destination.write(grabBinaryData);
} else {
bytesWritten += destination.writePrimitive(AvatarTraits::DELETED_TRAIT_SIZE);
}
return bytesWritten;
return grabBinaryData;
}
qint64 AvatarData::packTraitInstance(AvatarTraits::TraitType traitType, AvatarTraits::TraitInstanceID traitInstanceID,
ExtendedIODevice& destination, AvatarTraits::TraitVersion traitVersion) {
qint64 bytesWritten = 0;
QByteArray AvatarData::packTrait(AvatarTraits::TraitType traitType) const {
QByteArray traitBinaryData;
// Call packer function
if (traitType == AvatarTraits::SkeletonModelURL) {
traitBinaryData = packSkeletonModelURL();
}
return traitBinaryData;
}
QByteArray AvatarData::packTraitInstance(AvatarTraits::TraitType traitType, AvatarTraits::TraitInstanceID traitInstanceID) {
QByteArray traitBinaryData;
// Call packer function
if (traitType == AvatarTraits::AvatarEntity) {
bytesWritten += packAvatarEntityTraitInstance(traitType, traitInstanceID, destination, traitVersion);
traitBinaryData = packAvatarEntityTraitInstance(traitInstanceID);
} else if (traitType == AvatarTraits::Grab) {
bytesWritten += packGrabTraitInstance(traitType, traitInstanceID, destination, traitVersion);
traitBinaryData = packGrabTraitInstance(traitInstanceID);
}
return bytesWritten;
}
void AvatarData::prepareResetTraitInstances() {
if (_clientTraitsHandler) {
_avatarEntitiesLock.withReadLock([this]{
foreach (auto entityID, _packedAvatarEntityData.keys()) {
_clientTraitsHandler->markInstancedTraitUpdated(AvatarTraits::AvatarEntity, entityID);
}
foreach (auto grabID, _avatarGrabData.keys()) {
_clientTraitsHandler->markInstancedTraitUpdated(AvatarTraits::Grab, grabID);
}
});
}
return traitBinaryData;
}
void AvatarData::processTrait(AvatarTraits::TraitType traitType, QByteArray traitBinaryData) {
if (traitType == AvatarTraits::SkeletonModelURL) {
// get the URL from the binary data
auto skeletonModelURL = QUrl::fromEncoded(traitBinaryData);
setSkeletonModelURL(skeletonModelURL);
unpackSkeletonModelURL(traitBinaryData);
}
}
@ -2082,6 +2071,19 @@ void AvatarData::processDeletedTraitInstance(AvatarTraits::TraitType traitType,
}
}
void AvatarData::prepareResetTraitInstances() {
if (_clientTraitsHandler) {
_avatarEntitiesLock.withReadLock([this]{
foreach (auto entityID, _packedAvatarEntityData.keys()) {
_clientTraitsHandler->markInstancedTraitUpdated(AvatarTraits::AvatarEntity, entityID);
}
foreach (auto grabID, _avatarGrabData.keys()) {
_clientTraitsHandler->markInstancedTraitUpdated(AvatarTraits::Grab, grabID);
}
});
}
}
QByteArray AvatarData::identityByteArray(bool setIsReplicated) const {
QByteArray identityData;
QDataStream identityStream(&identityData, QIODevice::Append);
@ -2730,13 +2732,16 @@ glm::vec3 AvatarData::getAbsoluteJointTranslationInObjectFrame(int index) const
}
/**jsdoc
* Information on an attachment worn by the avatar.
* @typedef {object} AttachmentData
* @property {string} modelUrl
* @property {string} jointName
* @property {Vec3} translation
* @property {Vec3} rotation
* @property {number} scale
* @property {boolean} soft
* @property {string} modelUrl - The URL of the model file. Models can be FBX or OBJ format.
* @property {string} jointName - The offset to apply to the model relative to the joint position.
* @property {Vec3} translation - The offset from the joint that the attachment is positioned at.
* @property {Vec3} rotation - The rotation applied to the model relative to the joint orientation.
* @property {number} scale - The scale applied to the attachment model.
* @property {boolean} soft - If <code>true</code> and the model has a skeleton, the bones of the attached model's skeleton are
* rotated to fit the avatar's current pose. If <code>true</code>, the <code>translation</code>, <code>rotation</code>, and
* <code>scale</code> parameters are ignored.
*/
QVariant AttachmentData::toVariant() const {
QVariantMap result;
@ -2942,6 +2947,10 @@ float AvatarData::_avatarSortCoefficientSize { 8.0f };
float AvatarData::_avatarSortCoefficientCenter { 0.25f };
float AvatarData::_avatarSortCoefficientAge { 1.0f };
/**jsdoc
* An object with the UUIDs of avatar entities as keys and avatar entity properties objects as values.
* @typedef {Object.<Uuid, Entities.EntityProperties>} AvatarEntityMap
*/
QScriptValue AvatarEntityMapToScriptValue(QScriptEngine* engine, const AvatarEntityMap& value) {
QScriptValue obj = engine->newObject();
for (auto entityID : value.keys()) {

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,135 @@
//
// AvatarTraits.cpp
// libraries/avatars/src
//
// Created by Clement Brisset on 3/19/19.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AvatarTraits.h"
#include <ExtendedIODevice.h>
#include "AvatarData.h"
namespace AvatarTraits {
qint64 packTrait(TraitType traitType, ExtendedIODevice& destination, const AvatarData& avatar) {
// Call packer function
auto traitBinaryData = avatar.packTrait(traitType);
auto traitBinaryDataSize = traitBinaryData.size();
// Verify packed data
if (traitBinaryDataSize > MAXIMUM_TRAIT_SIZE) {
qWarning() << "Refusing to pack simple trait" << traitType << "of size" << traitBinaryDataSize
<< "bytes since it exceeds the maximum size" << MAXIMUM_TRAIT_SIZE << "bytes";
return 0;
}
// Write packed data to stream
qint64 bytesWritten = 0;
bytesWritten += destination.writePrimitive((TraitType)traitType);
bytesWritten += destination.writePrimitive((TraitWireSize)traitBinaryDataSize);
bytesWritten += destination.write(traitBinaryData);
return bytesWritten;
}
qint64 packVersionedTrait(TraitType traitType, ExtendedIODevice& destination,
TraitVersion traitVersion, const AvatarData& avatar) {
// Call packer function
auto traitBinaryData = avatar.packTrait(traitType);
auto traitBinaryDataSize = traitBinaryData.size();
// Verify packed data
if (traitBinaryDataSize > MAXIMUM_TRAIT_SIZE) {
qWarning() << "Refusing to pack simple trait" << traitType << "of size" << traitBinaryDataSize
<< "bytes since it exceeds the maximum size" << MAXIMUM_TRAIT_SIZE << "bytes";
return 0;
}
// Write packed data to stream
qint64 bytesWritten = 0;
bytesWritten += destination.writePrimitive((TraitType)traitType);
bytesWritten += destination.writePrimitive((TraitVersion)traitVersion);
bytesWritten += destination.writePrimitive((TraitWireSize)traitBinaryDataSize);
bytesWritten += destination.write(traitBinaryData);
return bytesWritten;
}
qint64 packTraitInstance(TraitType traitType, TraitInstanceID traitInstanceID,
ExtendedIODevice& destination, AvatarData& avatar) {
// Call packer function
auto traitBinaryData = avatar.packTraitInstance(traitType, traitInstanceID);
auto traitBinaryDataSize = traitBinaryData.size();
// Verify packed data
if (traitBinaryDataSize > AvatarTraits::MAXIMUM_TRAIT_SIZE) {
qWarning() << "Refusing to pack instanced trait" << traitType << "of size" << traitBinaryDataSize
<< "bytes since it exceeds the maximum size " << AvatarTraits::MAXIMUM_TRAIT_SIZE << "bytes";
return 0;
}
// Write packed data to stream
qint64 bytesWritten = 0;
bytesWritten += destination.writePrimitive((TraitType)traitType);
bytesWritten += destination.write(traitInstanceID.toRfc4122());
if (!traitBinaryData.isNull()) {
bytesWritten += destination.writePrimitive((TraitWireSize)traitBinaryDataSize);
bytesWritten += destination.write(traitBinaryData);
} else {
bytesWritten += destination.writePrimitive(AvatarTraits::DELETED_TRAIT_SIZE);
}
return bytesWritten;
}
qint64 packVersionedTraitInstance(TraitType traitType, TraitInstanceID traitInstanceID,
ExtendedIODevice& destination, TraitVersion traitVersion,
AvatarData& avatar) {
// Call packer function
auto traitBinaryData = avatar.packTraitInstance(traitType, traitInstanceID);
auto traitBinaryDataSize = traitBinaryData.size();
// Verify packed data
if (traitBinaryDataSize > AvatarTraits::MAXIMUM_TRAIT_SIZE) {
qWarning() << "Refusing to pack instanced trait" << traitType << "of size" << traitBinaryDataSize
<< "bytes since it exceeds the maximum size " << AvatarTraits::MAXIMUM_TRAIT_SIZE << "bytes";
return 0;
}
// Write packed data to stream
qint64 bytesWritten = 0;
bytesWritten += destination.writePrimitive((TraitType)traitType);
bytesWritten += destination.writePrimitive((TraitVersion)traitVersion);
bytesWritten += destination.write(traitInstanceID.toRfc4122());
if (!traitBinaryData.isNull()) {
bytesWritten += destination.writePrimitive((TraitWireSize)traitBinaryDataSize);
bytesWritten += destination.write(traitBinaryData);
} else {
bytesWritten += destination.writePrimitive(AvatarTraits::DELETED_TRAIT_SIZE);
}
return bytesWritten;
}
qint64 packInstancedTraitDelete(TraitType traitType, TraitInstanceID instanceID, ExtendedIODevice& destination,
TraitVersion traitVersion) {
qint64 bytesWritten = 0;
bytesWritten += destination.writePrimitive(traitType);
if (traitVersion > DEFAULT_TRAIT_VERSION) {
bytesWritten += destination.writePrimitive(traitVersion);
}
bytesWritten += destination.write(instanceID.toRfc4122());
bytesWritten += destination.writePrimitive(DELETED_TRAIT_SIZE);
return bytesWritten;
}
};

View file

@ -14,20 +14,35 @@
#include <algorithm>
#include <cstdint>
#include <array>
#include <vector>
#include <QtCore/QUuid>
class ExtendedIODevice;
class AvatarData;
namespace AvatarTraits {
enum TraitType : int8_t {
// Null trait
NullTrait = -1,
SkeletonModelURL,
// Simple traits
SkeletonModelURL = 0,
// Instanced traits
FirstInstancedTrait,
AvatarEntity = FirstInstancedTrait,
Grab,
// Traits count
TotalTraitTypes
};
const int NUM_SIMPLE_TRAITS = (int)FirstInstancedTrait;
const int NUM_INSTANCED_TRAITS = (int)TotalTraitTypes - (int)FirstInstancedTrait;
const int NUM_TRAITS = (int)TotalTraitTypes;
using TraitInstanceID = QUuid;
inline bool isSimpleTrait(TraitType traitType) {
@ -46,22 +61,19 @@ namespace AvatarTraits {
const TraitMessageSequence FIRST_TRAIT_SEQUENCE = 0;
const TraitMessageSequence MAX_TRAIT_SEQUENCE = INT64_MAX;
inline qint64 packInstancedTraitDelete(TraitType traitType, TraitInstanceID instanceID, ExtendedIODevice& destination,
TraitVersion traitVersion = NULL_TRAIT_VERSION) {
qint64 bytesWritten = 0;
qint64 packTrait(TraitType traitType, ExtendedIODevice& destination, const AvatarData& avatar);
qint64 packVersionedTrait(TraitType traitType, ExtendedIODevice& destination,
TraitVersion traitVersion, const AvatarData& avatar);
bytesWritten += destination.writePrimitive(traitType);
qint64 packTraitInstance(TraitType traitType, TraitInstanceID traitInstanceID,
ExtendedIODevice& destination, AvatarData& avatar);
qint64 packVersionedTraitInstance(TraitType traitType, TraitInstanceID traitInstanceID,
ExtendedIODevice& destination, TraitVersion traitVersion,
AvatarData& avatar);
if (traitVersion > DEFAULT_TRAIT_VERSION) {
bytesWritten += destination.writePrimitive(traitVersion);
}
qint64 packInstancedTraitDelete(TraitType traitType, TraitInstanceID instanceID, ExtendedIODevice& destination,
TraitVersion traitVersion = NULL_TRAIT_VERSION);
bytesWritten += destination.write(instanceID.toRfc4122());
bytesWritten += destination.writePrimitive(DELETED_TRAIT_SIZE);
return bytesWritten;
}
};
#endif // hifi_AvatarTraits_h

View file

@ -106,9 +106,10 @@ int ClientTraitsHandler::sendChangedTraitsToMixer() {
auto traitType = static_cast<AvatarTraits::TraitType>(std::distance(traitStatusesCopy.simpleCBegin(), simpleIt));
if (initialSend || *simpleIt == Updated) {
if (traitType == AvatarTraits::SkeletonModelURL) {
bytesWritten += _owningAvatar->packTrait(traitType, *traitsPacketList);
bytesWritten += AvatarTraits::packTrait(traitType, *traitsPacketList, *_owningAvatar);
if (traitType == AvatarTraits::SkeletonModelURL) {
// keep track of our skeleton version in case we get an override back
_currentSkeletonVersion = _currentTraitVersion;
}
@ -124,7 +125,9 @@ int ClientTraitsHandler::sendChangedTraitsToMixer() {
|| instanceIDValuePair.value == Updated) {
// this is a changed trait we need to send or we haven't send out trait information yet
// ask the owning avatar to pack it
bytesWritten += _owningAvatar->packTraitInstance(instancedIt->traitType, instanceIDValuePair.id, *traitsPacketList);
bytesWritten += AvatarTraits::packTraitInstance(instancedIt->traitType, instanceIDValuePair.id,
*traitsPacketList, *_owningAvatar);
} else if (!initialSend && instanceIDValuePair.value == Deleted) {
// pack delete for this trait instance
bytesWritten += AvatarTraits::packInstancedTraitDelete(instancedIt->traitType, instanceIDValuePair.id,
@ -162,11 +165,11 @@ void ClientTraitsHandler::processTraitOverride(QSharedPointer<ReceivedMessage> m
// override the skeleton URL but do not mark the trait as having changed
// so that we don't unecessarily send a new trait packet to the mixer with the overriden URL
auto encodedSkeletonURL = QUrl::fromEncoded(message->readWithoutCopy(traitBinarySize));
auto hasChangesBefore = _hasChangedTraits;
_owningAvatar->setSkeletonModelURL(encodedSkeletonURL);
auto traitBinaryData = message->readWithoutCopy(traitBinarySize);
_owningAvatar->processTrait(traitType, traitBinaryData);
// setSkeletonModelURL will flag us for changes to the SkeletonModelURL so we reset some state here to
// avoid unnecessarily sending the overriden skeleton model URL back to the mixer

View file

@ -343,6 +343,14 @@ glm::mat4 ScriptAvatarData::getControllerRightHandMatrix() const {
// END
//
bool ScriptAvatarData::getHasPriority() const {
if (AvatarSharedPointer sharedAvatarData = _avatarData.lock()) {
return sharedAvatarData->getHasPriority();
} else {
return false;
}
}
glm::quat ScriptAvatarData::getAbsoluteJointRotationInObjectFrame(int index) const {
if (AvatarSharedPointer sharedAvatarData = _avatarData.lock()) {
return sharedAvatarData->getAbsoluteJointRotationInObjectFrame(index);

View file

@ -68,6 +68,8 @@ class ScriptAvatarData : public QObject {
Q_PROPERTY(glm::mat4 controllerLeftHandMatrix READ getControllerLeftHandMatrix)
Q_PROPERTY(glm::mat4 controllerRightHandMatrix READ getControllerRightHandMatrix)
Q_PROPERTY(bool hasPriority READ getHasPriority)
public:
ScriptAvatarData(AvatarSharedPointer avatarData);
@ -133,6 +135,8 @@ public:
glm::mat4 getControllerLeftHandMatrix() const;
glm::mat4 getControllerRightHandMatrix() const;
bool getHasPriority() const;
signals:
void displayNameChanged();
void sessionDisplayNameChanged();

View file

@ -109,7 +109,7 @@ bool Basic2DWindowOpenGLDisplayPlugin::internalActivate() {
return Parent::internalActivate();
}
void Basic2DWindowOpenGLDisplayPlugin::compositeExtra() {
void Basic2DWindowOpenGLDisplayPlugin::compositeExtra(const gpu::FramebufferPointer& compositeFramebuffer) {
#if defined(Q_OS_ANDROID)
auto& virtualPadManager = VirtualPad::Manager::instance();
if(virtualPadManager.getLeftVirtualPad()->isShown()) {
@ -121,7 +121,7 @@ void Basic2DWindowOpenGLDisplayPlugin::compositeExtra() {
render([&](gpu::Batch& batch) {
batch.enableStereo(false);
batch.setFramebuffer(_compositeFramebuffer);
batch.setFramebuffer(compositeFramebuffer);
batch.resetViewTransform();
batch.setProjectionTransform(mat4());
batch.setPipeline(_cursorPipeline);
@ -140,7 +140,7 @@ void Basic2DWindowOpenGLDisplayPlugin::compositeExtra() {
});
}
#endif
Parent::compositeExtra();
Parent::compositeExtra(compositeFramebuffer);
}
static const uint32_t MIN_THROTTLE_CHECK_FRAMES = 60;

View file

@ -33,7 +33,7 @@ public:
virtual bool isThrottled() const override;
virtual void compositeExtra() override;
virtual void compositeExtra(const gpu::FramebufferPointer&) override;
virtual void pluginUpdate() override {};

Some files were not shown because too many files have changed in this diff Show more