mirror of
https://github.com/JulianGro/overte.git
synced 2025-04-10 04:53:04 +02:00
Merge remote-tracking branch 'upstream/master' into avatar-hero-zone-improvements
Fix jsdoc conflict, also minor clean-up.
This commit is contained in:
commit
6b21fc1af7
166 changed files with 6026 additions and 2428 deletions
|
@ -0,0 +1,39 @@
|
|||
#version 320 es
|
||||
|
||||
precision highp float;
|
||||
precision highp sampler2D;
|
||||
|
||||
layout(location = 0) in vec4 vTexCoordLR;
|
||||
|
||||
layout(location = 0) out vec4 FragColorL;
|
||||
layout(location = 1) out vec4 FragColorR;
|
||||
|
||||
uniform sampler2D sampler;
|
||||
|
||||
// https://software.intel.com/en-us/node/503873
|
||||
|
||||
// sRGB ====> Linear
|
||||
vec3 color_sRGBToLinear(vec3 srgb) {
|
||||
return mix(pow((srgb + vec3(0.055)) / vec3(1.055), vec3(2.4)), srgb / vec3(12.92), vec3(lessThanEqual(srgb, vec3(0.04045))));
|
||||
}
|
||||
|
||||
vec4 color_sRGBAToLinear(vec4 srgba) {
|
||||
return vec4(color_sRGBToLinear(srgba.xyz), srgba.w);
|
||||
}
|
||||
|
||||
// Linear ====> sRGB
|
||||
vec3 color_LinearTosRGB(vec3 lrgb) {
|
||||
return mix(vec3(1.055) * pow(vec3(lrgb), vec3(0.41666)) - vec3(0.055), vec3(lrgb) * vec3(12.92), vec3(lessThan(lrgb, vec3(0.0031308))));
|
||||
}
|
||||
|
||||
vec4 color_LinearTosRGBA(vec4 lrgba) {
|
||||
return vec4(color_LinearTosRGB(lrgba.xyz), lrgba.w);
|
||||
}
|
||||
|
||||
// FIXME switch to texelfetch for getting from the source texture?
|
||||
void main() {
|
||||
//FragColorL = color_LinearTosRGBA(texture(sampler, vTexCoordLR.xy));
|
||||
//FragColorR = color_LinearTosRGBA(texture(sampler, vTexCoordLR.zw));
|
||||
FragColorL = texture(sampler, vTexCoordLR.xy);
|
||||
FragColorR = texture(sampler, vTexCoordLR.zw);
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
#version 320 es
|
||||
|
||||
layout(location = 0) out vec4 vTexCoordLR;
|
||||
|
||||
void main(void) {
|
||||
const float depth = 0.0;
|
||||
const vec4 UNIT_QUAD[4] = vec4[4](
|
||||
vec4(-1.0, -1.0, depth, 1.0),
|
||||
vec4(1.0, -1.0, depth, 1.0),
|
||||
vec4(-1.0, 1.0, depth, 1.0),
|
||||
vec4(1.0, 1.0, depth, 1.0)
|
||||
);
|
||||
vec4 pos = UNIT_QUAD[gl_VertexID];
|
||||
gl_Position = pos;
|
||||
vTexCoordLR.xy = pos.xy;
|
||||
vTexCoordLR.xy += 1.0;
|
||||
vTexCoordLR.y *= 0.5;
|
||||
vTexCoordLR.x *= 0.25;
|
||||
vTexCoordLR.zw = vTexCoordLR.xy;
|
||||
vTexCoordLR.z += 0.5;
|
||||
}
|
|
@ -7,6 +7,7 @@
|
|||
//
|
||||
package io.highfidelity.oculus;
|
||||
|
||||
import android.content.res.AssetManager;
|
||||
import android.os.Bundle;
|
||||
import android.util.Log;
|
||||
import android.view.Surface;
|
||||
|
@ -24,7 +25,7 @@ public class OculusMobileActivity extends QtActivity implements SurfaceHolder.Ca
|
|||
private static final String TAG = OculusMobileActivity.class.getSimpleName();
|
||||
static { System.loadLibrary("oculusMobile"); }
|
||||
|
||||
private native void nativeOnCreate();
|
||||
private native void nativeOnCreate(AssetManager assetManager);
|
||||
private native static void nativeOnResume();
|
||||
private native static void nativeOnPause();
|
||||
private native static void nativeOnSurfaceChanged(Surface s);
|
||||
|
@ -53,7 +54,7 @@ public class OculusMobileActivity extends QtActivity implements SurfaceHolder.Ca
|
|||
mView = new SurfaceView(this);
|
||||
mView.getHolder().addCallback(this);
|
||||
|
||||
nativeOnCreate();
|
||||
nativeOnCreate(getAssets());
|
||||
questNativeOnCreate();
|
||||
}
|
||||
|
||||
|
@ -81,7 +82,7 @@ public class OculusMobileActivity extends QtActivity implements SurfaceHolder.Ca
|
|||
Log.w(TAG, "QQQ onResume");
|
||||
super.onResume();
|
||||
//Reconnect the global reference back to handler
|
||||
nativeOnCreate();
|
||||
nativeOnCreate(getAssets());
|
||||
|
||||
questNativeOnResume();
|
||||
nativeOnResume();
|
||||
|
|
|
@ -52,6 +52,8 @@
|
|||
#include <WebSocketServerClass.h>
|
||||
#include <EntityScriptingInterface.h> // TODO: consider moving to scriptengine.h
|
||||
|
||||
#include <hfm/ModelFormatRegistry.h>
|
||||
|
||||
#include "entities/AssignmentParentFinder.h"
|
||||
#include "AssignmentDynamicFactory.h"
|
||||
#include "RecordingScriptingInterface.h"
|
||||
|
@ -99,6 +101,9 @@ Agent::Agent(ReceivedMessage& message) :
|
|||
DependencyManager::set<RecordingScriptingInterface>();
|
||||
DependencyManager::set<UsersScriptingInterface>();
|
||||
|
||||
DependencyManager::set<ModelFormatRegistry>();
|
||||
DependencyManager::set<ModelCache>();
|
||||
|
||||
// Needed to ensure the creation of the DebugDraw instance on the main thread
|
||||
DebugDraw::getInstance();
|
||||
|
||||
|
@ -819,6 +824,9 @@ void Agent::aboutToFinish() {
|
|||
|
||||
DependencyManager::get<ResourceManager>()->cleanup();
|
||||
|
||||
DependencyManager::destroy<ModelFormatRegistry>();
|
||||
DependencyManager::destroy<ModelCache>();
|
||||
|
||||
DependencyManager::destroy<PluginManager>();
|
||||
|
||||
// cleanup the AudioInjectorManager (and any still running injectors)
|
||||
|
|
|
@ -18,16 +18,25 @@
|
|||
#include "Agent.h"
|
||||
|
||||
/**jsdoc
|
||||
* The <code>Agent</code> API enables an assignment client to emulate an avatar. Setting <code>isAvatar = true</code> connects
|
||||
* the assignment client to the avatar and audio mixers, and enables the {@link Avatar} API to be used.
|
||||
*
|
||||
* @namespace Agent
|
||||
*
|
||||
* @hifi-assignment-client
|
||||
*
|
||||
* @property {boolean} isAvatar
|
||||
* @property {boolean} isPlayingAvatarSound <em>Read-only.</em>
|
||||
* @property {boolean} isListeningToAudioStream
|
||||
* @property {boolean} isNoiseGateEnabled
|
||||
* @property {number} lastReceivedAudioLoudness <em>Read-only.</em>
|
||||
* @property {Uuid} sessionUUID <em>Read-only.</em>
|
||||
* @property {boolean} isAvatar - <code>true</code> if the assignment client script is emulating an avatar, otherwise
|
||||
* <code>false</code>.
|
||||
* @property {boolean} isPlayingAvatarSound - <code>true</code> if the script has a sound to play, otherwise <code>false</code>.
|
||||
* Sounds are played when <code>isAvatar</code> is <code>true</code>, from the position and with the orientation of the
|
||||
* scripted avatar's head. <em>Read-only.</em>
|
||||
* @property {boolean} isListeningToAudioStream - <code>true</code> if the agent is "listening" to the audio stream from the
|
||||
* domain, otherwise <code>false</code>.
|
||||
* @property {boolean} isNoiseGateEnabled - <code>true</code> if the noise gate is enabled, otherwise <code>false</code>. When
|
||||
* enabled, the input audio stream is blocked (fully attenuated) if it falls below an adaptive threshold.
|
||||
* @property {number} lastReceivedAudioLoudness - The current loudness of the audio input. Nominal range [<code>0.0</code> (no
|
||||
* sound) – <code>1.0</code> (the onset of clipping)]. <em>Read-only.</em>
|
||||
* @property {Uuid} sessionUUID - The unique ID associated with the agent's current session in the domain. <em>Read-only.</em>
|
||||
*/
|
||||
class AgentScriptingInterface : public QObject {
|
||||
Q_OBJECT
|
||||
|
@ -54,20 +63,43 @@ public:
|
|||
|
||||
public slots:
|
||||
/**jsdoc
|
||||
* Sets whether the script should emulate an avatar.
|
||||
* @function Agent.setIsAvatar
|
||||
* @param {boolean} isAvatar
|
||||
* @param {boolean} isAvatar - <code>true</code> if the script emulates an avatar, otherwise <code>false</code>.
|
||||
* @example <caption>Make an assignment client script emulate an avatar.</caption>
|
||||
* (function () {
|
||||
* Agent.setIsAvatar(true);
|
||||
* Avatar.displayName = "AC avatar";
|
||||
* print("Position: " + JSON.stringify(Avatar.position)); // 0, 0, 0
|
||||
* }());
|
||||
*/
|
||||
void setIsAvatar(bool isAvatar) const { _agent->setIsAvatar(isAvatar); }
|
||||
|
||||
/**jsdoc
|
||||
* Checks whether the script is emulating an avatar.
|
||||
* @function Agent.isAvatar
|
||||
* @returns {boolean}
|
||||
* @returns {boolean} <code>true</code> if the script is emulating an avatar, otherwise <code>false</code>.
|
||||
* @example <caption>Check whether the agent is emulating an avatar.</caption>
|
||||
* (function () {
|
||||
* print("Agent is avatar: " + Agent.isAvatar());
|
||||
* print("Agent is avatar: " + Agent.isAvatar); // Same result.
|
||||
* }());
|
||||
*/
|
||||
bool isAvatar() const { return _agent->isAvatar(); }
|
||||
|
||||
/**jsdoc
|
||||
* Plays a sound from the position and with the orientation of the emulated avatar's head. No sound is played unless
|
||||
* <code>isAvatar == true</code>.
|
||||
* @function Agent.playAvatarSound
|
||||
* @param {object} avatarSound
|
||||
* @param {SoundObject} avatarSound - The sound played.
|
||||
* @example <caption>Play a sound from an emulated avatar.</caption>
|
||||
* (function () {
|
||||
* Agent.isAvatar = true;
|
||||
* var sound = SoundCache.getSound(Script.resourcesPath() + "sounds/sample.wav");
|
||||
* Script.setTimeout(function () { // Give the sound time to load.
|
||||
* Agent.playAvatarSound(sound);
|
||||
* }, 1000);
|
||||
* }());
|
||||
*/
|
||||
void playAvatarSound(SharedSoundPointer avatarSound) const { _agent->playAvatarSound(avatarSound); }
|
||||
|
||||
|
|
|
@ -470,8 +470,8 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
SortableAvatar(avatarNodeData, sourceAvatarNode, lastEncodeTime));
|
||||
}
|
||||
|
||||
// If Avatar A's PAL WAS open but is no longer open, AND
|
||||
// Avatar A is ignoring Avatar B OR Avatar B is ignoring Avatar A...
|
||||
// If Node A's PAL WAS open but is no longer open, AND
|
||||
// Node A is ignoring Avatar B OR Node B is ignoring Avatar A...
|
||||
//
|
||||
// This is a bit heavy-handed still - there are cases where a kill packet
|
||||
// will be sent when it doesn't need to be (but where it _should_ be OK to send).
|
||||
|
@ -540,7 +540,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
const MixerAvatar* sourceAvatar = sourceNodeData->getConstAvatarData();
|
||||
|
||||
// Typically all out-of-view avatars but such avatars' priorities will rise with time:
|
||||
bool isLowerPriority = currentVariant != kHero && sortedAvatar.getPriority() <= OUT_OF_VIEW_THRESHOLD; // XXX: hero handling?
|
||||
bool isLowerPriority = sortedAvatar.getPriority() <= OUT_OF_VIEW_THRESHOLD;
|
||||
|
||||
if (isLowerPriority) {
|
||||
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::MinimumData;
|
||||
|
@ -549,8 +549,8 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO ? AvatarData::SendAllData : AvatarData::CullSmallData;
|
||||
destinationNodeData->incrementAvatarInView();
|
||||
|
||||
// If the time that the mixer sent AVATAR DATA about Avatar B to Avatar A is BEFORE OR EQUAL TO
|
||||
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Avatar A.
|
||||
// If the time that the mixer sent AVATAR DATA about Avatar B to Node A is BEFORE OR EQUAL TO
|
||||
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Node A.
|
||||
if (sourceAvatar->hasProcessedFirstIdentity()
|
||||
&& destinationNodeData->getLastBroadcastTime(sourceNode->getLocalID()) <= sourceNodeData->getIdentityChangeTimestamp()) {
|
||||
identityBytesSent += sendIdentityPacket(*identityPacketList, sourceNodeData, *destinationNode);
|
||||
|
|
|
@ -20,25 +20,29 @@
|
|||
|
||||
/**jsdoc
|
||||
* The <code>Avatar</code> API is used to manipulate scriptable avatars on the domain. This API is a subset of the
|
||||
* {@link MyAvatar} API.
|
||||
* {@link MyAvatar} API. To enable this API, set {@link Agent|Agent.isAvatar} to <code>true</code>.
|
||||
*
|
||||
* <p>For Interface, client entity, and avatar scripts, see {@link MyAvatar}.</p>
|
||||
*
|
||||
* <p><strong>Note:</strong> In the examples, use "<code>Avatar</code>" instead of "<code>MyAvatar</code>".</p>
|
||||
*
|
||||
* @namespace Avatar
|
||||
*
|
||||
* @hifi-assignment-client
|
||||
*
|
||||
* @property {Vec3} position
|
||||
* @property {number} scale
|
||||
* @property {number} density <em>Read-only.</em>
|
||||
* @property {Vec3} handPosition
|
||||
* @property {number} bodyYaw - The rotation left or right about an axis running from the head to the feet of the avatar.
|
||||
* @comment IMPORTANT: This group of properties is copied from AvatarData.h; they should NOT be edited here.
|
||||
* @property {Vec3} position - The position of the avatar.
|
||||
* @property {number} scale=1.0 - The scale of the avatar. The value can be set to anything between <code>0.005</code> and
|
||||
* <code>1000.0</code>. When the scale value is fetched, it may temporarily be further limited by the domain's settings.
|
||||
* @property {number} density - The density of the avatar in kg/m<sup>3</sup>. The density is used to work out its mass in
|
||||
* the application of physics. <em>Read-only.</em>
|
||||
* @property {Vec3} handPosition - A user-defined hand position, in world coordinates. The position moves with the avatar
|
||||
* but is otherwise not used or changed by Interface.
|
||||
* @property {number} bodyYaw - The left or right rotation about an axis running from the head to the feet of the avatar.
|
||||
* Yaw is sometimes called "heading".
|
||||
* @property {number} bodyPitch - The rotation about an axis running from shoulder to shoulder of the avatar. Pitch is
|
||||
* sometimes called "elevation".
|
||||
* @property {number} bodyRoll - The rotation about an axis running from the chest to the back of the avatar. Roll is
|
||||
* sometimes called "bank".
|
||||
* @property {Quat} orientation
|
||||
* @property {Quat} orientation - The orientation of the avatar.
|
||||
* @property {Quat} headOrientation - The orientation of the avatar's head.
|
||||
* @property {number} headPitch - The rotation about an axis running from ear to ear of the avatar's head. Pitch is
|
||||
* sometimes called "elevation".
|
||||
|
@ -46,79 +50,36 @@
|
|||
* head. Yaw is sometimes called "heading".
|
||||
* @property {number} headRoll - The rotation about an axis running from the nose to the back of the avatar's head. Roll is
|
||||
* sometimes called "bank".
|
||||
* @property {Vec3} velocity
|
||||
* @property {Vec3} angularVelocity
|
||||
* @property {number} audioLoudness
|
||||
* @property {number} audioAverageLoudness
|
||||
* @property {string} displayName
|
||||
* @property {string} sessionDisplayName - Sanitized, defaulted version displayName that is defined by the AvatarMixer
|
||||
* rather than by Interface clients. The result is unique among all avatars present at the time.
|
||||
* @property {boolean} lookAtSnappingEnabled
|
||||
* @property {string} skeletonModelURL
|
||||
* @property {AttachmentData[]} attachmentData
|
||||
* @property {Vec3} velocity - The current velocity of the avatar.
|
||||
* @property {Vec3} angularVelocity - The current angular velocity of the avatar.
|
||||
* @property {number} audioLoudness - The instantaneous loudness of the audio input that the avatar is injecting into the
|
||||
* domain.
|
||||
* @property {number} audioAverageLoudness - The rolling average loudness of the audio input that the avatar is injecting
|
||||
* into the domain.
|
||||
* @property {string} displayName - The avatar's display name.
|
||||
* @property {string} sessionDisplayName - <code>displayName's</code> sanitized and default version defined by the avatar mixer
|
||||
* rather than Interface clients. The result is unique among all avatars present in the domain at the time.
|
||||
* @property {boolean} lookAtSnappingEnabled=true - <code>true</code> if the avatar's eyes snap to look at another avatar's
|
||||
* eyes when the other avatar is in the line of sight and also has <code>lookAtSnappingEnabled == true</code>.
|
||||
* @property {string} skeletonModelURL - The avatar's FST file.
|
||||
* @property {AttachmentData[]} attachmentData - Information on the avatar's attachments.<br />
|
||||
* <strong>Deprecated:</strong> Use avatar entities instead.
|
||||
* @property {string[]} jointNames - The list of joints in the current avatar model. <em>Read-only.</em>
|
||||
* @property {Uuid} sessionUUID <em>Read-only.</em>
|
||||
* @property {Mat4} sensorToWorldMatrix <em>Read-only.</em>
|
||||
* @property {Mat4} controllerLeftHandMatrix <em>Read-only.</em>
|
||||
* @property {Mat4} controllerRightHandMatrix <em>Read-only.</em>
|
||||
* @property {number} sensorToWorldScale <em>Read-only.</em>
|
||||
* @property {Uuid} sessionUUID - Unique ID of the avatar in the domain. <em>Read-only.</em>
|
||||
* @property {Mat4} sensorToWorldMatrix - The scale, rotation, and translation transform from the user's real world to the
|
||||
* avatar's size, orientation, and position in the virtual world. <em>Read-only.</em>
|
||||
* @property {Mat4} controllerLeftHandMatrix - The rotation and translation of the left hand controller relative to the
|
||||
* avatar. <em>Read-only.</em>
|
||||
* @property {Mat4} controllerRightHandMatrix - The rotation and translation of the right hand controller relative to the
|
||||
* avatar. <em>Read-only.</em>
|
||||
* @property {number} sensorToWorldScale - The scale that transforms dimensions in the user's real world to the avatar's
|
||||
* size in the virtual world. <em>Read-only.</em>
|
||||
*
|
||||
* @borrows MyAvatar.getDomainMinScale as getDomainMinScale
|
||||
* @borrows MyAvatar.getDomainMaxScale as getDomainMaxScale
|
||||
* @borrows MyAvatar.canMeasureEyeHeight as canMeasureEyeHeight
|
||||
* @borrows MyAvatar.getEyeHeight as getEyeHeight
|
||||
* @borrows MyAvatar.getHeight as getHeight
|
||||
* @borrows MyAvatar.setHandState as setHandState
|
||||
* @borrows MyAvatar.getHandState as getHandState
|
||||
* @borrows MyAvatar.setRawJointData as setRawJointData
|
||||
* @borrows MyAvatar.setJointData as setJointData
|
||||
* @borrows MyAvatar.setJointRotation as setJointRotation
|
||||
* @borrows MyAvatar.setJointTranslation as setJointTranslation
|
||||
* @borrows MyAvatar.clearJointData as clearJointData
|
||||
* @borrows MyAvatar.isJointDataValid as isJointDataValid
|
||||
* @borrows MyAvatar.getJointRotation as getJointRotation
|
||||
* @borrows MyAvatar.getJointTranslation as getJointTranslation
|
||||
* @borrows MyAvatar.getJointRotations as getJointRotations
|
||||
* @borrows MyAvatar.getJointTranslations as getJointTranslations
|
||||
* @borrows MyAvatar.setJointRotations as setJointRotations
|
||||
* @borrows MyAvatar.setJointTranslations as setJointTranslations
|
||||
* @borrows MyAvatar.clearJointsData as clearJointsData
|
||||
* @borrows MyAvatar.getJointIndex as getJointIndex
|
||||
* @borrows MyAvatar.getJointNames as getJointNames
|
||||
* @borrows MyAvatar.setBlendshape as setBlendshape
|
||||
* @borrows MyAvatar.getAttachmentsVariant as getAttachmentsVariant
|
||||
* @borrows MyAvatar.setAttachmentsVariant as setAttachmentsVariant
|
||||
* @borrows MyAvatar.updateAvatarEntity as updateAvatarEntity
|
||||
* @borrows MyAvatar.clearAvatarEntity as clearAvatarEntity
|
||||
* @borrows MyAvatar.setForceFaceTrackerConnected as setForceFaceTrackerConnected
|
||||
* @borrows MyAvatar.getAttachmentData as getAttachmentData
|
||||
* @borrows MyAvatar.setAttachmentData as setAttachmentData
|
||||
* @borrows MyAvatar.attach as attach
|
||||
* @borrows MyAvatar.detachOne as detachOne
|
||||
* @borrows MyAvatar.detachAll as detachAll
|
||||
* @borrows MyAvatar.getAvatarEntityData as getAvatarEntityData
|
||||
* @borrows MyAvatar.setAvatarEntityData as setAvatarEntityData
|
||||
* @borrows MyAvatar.getSensorToWorldMatrix as getSensorToWorldMatrix
|
||||
* @borrows MyAvatar.getSensorToWorldScale as getSensorToWorldScale
|
||||
* @borrows MyAvatar.getControllerLeftHandMatrix as getControllerLeftHandMatrix
|
||||
* @borrows MyAvatar.getControllerRightHandMatrix as getControllerRightHandMatrix
|
||||
* @borrows MyAvatar.getDataRate as getDataRate
|
||||
* @borrows MyAvatar.getUpdateRate as getUpdateRate
|
||||
* @borrows MyAvatar.displayNameChanged as displayNameChanged
|
||||
* @borrows MyAvatar.sessionDisplayNameChanged as sessionDisplayNameChanged
|
||||
* @borrows MyAvatar.skeletonModelURLChanged as skeletonModelURLChanged
|
||||
* @borrows MyAvatar.lookAtSnappingChanged as lookAtSnappingChanged
|
||||
* @borrows MyAvatar.sessionUUIDChanged as sessionUUIDChanged
|
||||
* @borrows MyAvatar.sendAvatarDataPacket as sendAvatarDataPacket
|
||||
* @borrows MyAvatar.sendIdentityPacket as sendIdentityPacket
|
||||
* @borrows MyAvatar.setJointMappingsFromNetworkReply as setJointMappingsFromNetworkReply
|
||||
* @borrows MyAvatar.setSessionUUID as setSessionUUID
|
||||
* @borrows MyAvatar.getAbsoluteJointRotationInObjectFrame as getAbsoluteJointRotationInObjectFrame
|
||||
* @borrows MyAvatar.getAbsoluteJointTranslationInObjectFrame as getAbsoluteJointTranslationInObjectFrame
|
||||
* @borrows MyAvatar.setAbsoluteJointRotationInObjectFrame as setAbsoluteJointRotationInObjectFrame
|
||||
* @borrows MyAvatar.setAbsoluteJointTranslationInObjectFrame as setAbsoluteJointTranslationInObjectFrame
|
||||
* @borrows MyAvatar.getTargetScale as getTargetScale
|
||||
* @borrows MyAvatar.resetLastSent as resetLastSent
|
||||
* @example <caption>Create a scriptable avatar.</caption>
|
||||
* (function () {
|
||||
* Agent.setIsAvatar(true);
|
||||
* print("Position: " + JSON.stringify(Avatar.position)); // 0, 0, 0
|
||||
* }());
|
||||
*/
|
||||
|
||||
class ScriptableAvatar : public AvatarData, public Dependency {
|
||||
|
@ -132,15 +93,17 @@ public:
|
|||
ScriptableAvatar();
|
||||
|
||||
/**jsdoc
|
||||
* Starts playing an animation on the avatar.
|
||||
* @function Avatar.startAnimation
|
||||
* @param {string} url
|
||||
* @param {number} [fps=30]
|
||||
* @param {number} [priority=1]
|
||||
* @param {boolean} [loop=false]
|
||||
* @param {boolean} [hold=false]
|
||||
* @param {number} [firstFrame=0]
|
||||
* @param {number} [lastFrame=3.403e+38]
|
||||
* @param {string[]} [maskedJoints=[]]
|
||||
* @param {string} url - The animation file's URL. Animation files need to be in the FBX format but only need to contain
|
||||
* the avatar skeleton and animation data.
|
||||
* @param {number} [fps=30] - The frames per second (FPS) rate for the animation playback. 30 FPS is normal speed.
|
||||
* @param {number} [priority=1] - <em>Not used.</em>
|
||||
* @param {boolean} [loop=false] - <code>true</code> if the animation should loop, <code>false</code> if it shouldn't.
|
||||
* @param {boolean} [hold=false] - <em>Not used.</em>
|
||||
* @param {number} [firstFrame=0] - The frame at which the animation starts.
|
||||
* @param {number} [lastFrame=3.403e+38] - The frame at which the animation stops.
|
||||
* @param {string[]} [maskedJoints=[]] - The names of joints that should not be animated.
|
||||
*/
|
||||
/// Allows scripts to run animations.
|
||||
Q_INVOKABLE void startAnimation(const QString& url, float fps = 30.0f, float priority = 1.0f, bool loop = false,
|
||||
|
@ -148,39 +111,37 @@ public:
|
|||
const QStringList& maskedJoints = QStringList());
|
||||
|
||||
/**jsdoc
|
||||
* Stops playing the current animation.
|
||||
* @function Avatar.stopAnimation
|
||||
*/
|
||||
Q_INVOKABLE void stopAnimation();
|
||||
|
||||
/**jsdoc
|
||||
* Gets the details of the current avatar animation that is being or was recently played.
|
||||
* @function Avatar.getAnimationDetails
|
||||
* @returns {Avatar.AnimationDetails}
|
||||
* @returns {Avatar.AnimationDetails} The current or recent avatar animation.
|
||||
* @example <caption>Report the current animation details.</caption>
|
||||
* var animationDetails = Avatar.getAnimationDetails();
|
||||
* print("Animation details: " + JSON.stringify(animationDetails));
|
||||
*/
|
||||
Q_INVOKABLE AnimationDetails getAnimationDetails();
|
||||
|
||||
/**jsdoc
|
||||
* Get the names of all the joints in the current avatar.
|
||||
* @function MyAvatar.getJointNames
|
||||
* @returns {string[]} The joint names.
|
||||
* @example <caption>Report the names of all the joints in your current avatar.</caption>
|
||||
* print(JSON.stringify(MyAvatar.getJointNames()));
|
||||
*/
|
||||
* @comment Uses the base class's JSDoc.
|
||||
*/
|
||||
Q_INVOKABLE virtual QStringList getJointNames() const override;
|
||||
|
||||
/**jsdoc
|
||||
* Get the joint index for a named joint. The joint index value is the position of the joint in the array returned by
|
||||
* {@link MyAvatar.getJointNames} or {@link Avatar.getJointNames}.
|
||||
* @function MyAvatar.getJointIndex
|
||||
* @param {string} name - The name of the joint.
|
||||
* @returns {number} The index of the joint.
|
||||
* @example <caption>Report the index of your avatar's left arm joint.</caption>
|
||||
* print(JSON.stringify(MyAvatar.getJointIndex("LeftArm"));
|
||||
*/
|
||||
* @comment Uses the base class's JSDoc.
|
||||
*/
|
||||
/// Returns the index of the joint with the specified name, or -1 if not found/unknown.
|
||||
Q_INVOKABLE virtual int getJointIndex(const QString& name) const override;
|
||||
|
||||
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL) override;
|
||||
|
||||
/**jsdoc
|
||||
* @comment Uses the base class's JSDoc.
|
||||
*/
|
||||
int sendAvatarDataPacket(bool sendAll = false) override;
|
||||
|
||||
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking = false) override;
|
||||
|
@ -192,32 +153,42 @@ public:
|
|||
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
|
||||
bool getHasAudioEnabledFaceMovement() const override { return _headData->getHasAudioEnabledFaceMovement(); }
|
||||
|
||||
/**jsdoc
|
||||
* Potentially Very Expensive. Do not use.
|
||||
/**jsdoc
|
||||
* Gets details of all avatar entities.
|
||||
* <p><strong>Warning:</strong> Potentially an expensive call. Do not use if possible.</p>
|
||||
* @function Avatar.getAvatarEntityData
|
||||
* @returns {object}
|
||||
* @returns {AvatarEntityMap} Details of the avatar entities.
|
||||
* @example <caption>Report the current avatar entities.</caption>
|
||||
* var avatarEntityData = Avatar.getAvatarEntityData();
|
||||
* print("Avatar entities: " + JSON.stringify(avatarEntityData));
|
||||
*/
|
||||
Q_INVOKABLE AvatarEntityMap getAvatarEntityData() const override;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.setAvatarEntityData
|
||||
* @param {object} avatarEntityData
|
||||
*/
|
||||
* Sets all avatar entities from an object.
|
||||
* <p><strong>Warning:</strong> Potentially an expensive call. Do not use if possible.</p>
|
||||
* @function Avatar.setAvatarEntityData
|
||||
* @param {AvatarEntityMap} avatarEntityData - Details of the avatar entities.
|
||||
*/
|
||||
Q_INVOKABLE void setAvatarEntityData(const AvatarEntityMap& avatarEntityData) override;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.updateAvatarEntity
|
||||
* @param {Uuid} entityID
|
||||
* @param {string} entityData
|
||||
* @comment Uses the base class's JSDoc.
|
||||
*/
|
||||
Q_INVOKABLE void updateAvatarEntity(const QUuid& entityID, const QByteArray& entityData) override;
|
||||
|
||||
public slots:
|
||||
/**jsdoc
|
||||
* @function Avatar.update
|
||||
* @param {number} deltaTime - Delta time.
|
||||
* @deprecated This function is deprecated and will be removed.
|
||||
*/
|
||||
void update(float deltatime);
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.setJointMappingsFromNetworkReply
|
||||
*/
|
||||
* @function Avatar.setJointMappingsFromNetworkReply
|
||||
* @deprecated This function is deprecated and will be removed.
|
||||
*/
|
||||
void setJointMappingsFromNetworkReply();
|
||||
|
||||
private:
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
macro(TARGET_PYTHON)
|
||||
if (NOT HIFI_PYTHON_EXEC)
|
||||
# Find the python interpreter
|
||||
if (CAME_VERSION VERSION_LESS 3.12)
|
||||
if (CMAKE_VERSION VERSION_LESS 3.12)
|
||||
# this logic is deprecated in CMake after 3.12
|
||||
# FIXME eventually we should make 3.12 the min cmake verion and just use the Python3 find_package path
|
||||
set(Python_ADDITIONAL_VERSIONS 3)
|
||||
|
|
|
@ -379,9 +379,9 @@ Item {
|
|||
Component.onCompleted: {
|
||||
// with the link.
|
||||
if (completeProfileBody.withOculus) {
|
||||
termsText.text = qsTr("By signing up, you agree to <a href='https://highfidelity.com/terms'>High Fidelity's Terms of Service</a>")
|
||||
termsText.text = qsTr("By signing up, you agree to <a href='https://www.highfidelity.com/termsofservice'>High Fidelity's Terms of Service</a>")
|
||||
} else {
|
||||
termsText.text = qsTr("By creating this user profile, you agree to <a href='https://highfidelity.com/terms'>High Fidelity's Terms of Service</a>")
|
||||
termsText.text = qsTr("By creating this user profile, you agree to <a href='https://www.highfidelity.com/termsofservice'>High Fidelity's Terms of Service</a>")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -395,7 +395,7 @@ Item {
|
|||
text: signUpBody.termsContainerText
|
||||
Component.onCompleted: {
|
||||
// with the link.
|
||||
termsText.text = qsTr("By signing up, you agree to <a href='https://highfidelity.com/terms'>High Fidelity's Terms of Service</a>")
|
||||
termsText.text = qsTr("By signing up, you agree to <a href='https://www.highfidelity.com/termsofservice'>High Fidelity's Terms of Service</a>")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -218,7 +218,7 @@ Item {
|
|||
text: usernameCollisionBody.termsContainerText
|
||||
Component.onCompleted: {
|
||||
// with the link.
|
||||
termsText.text = qsTr("By creating this user profile, you agree to <a href='https://highfidelity.com/terms'>High Fidelity's Terms of Service</a>")
|
||||
termsText.text = qsTr("By creating this user profile, you agree to <a href='https://www.highfidelity.com/termsofservice'>High Fidelity's Terms of Service</a>")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -232,6 +232,10 @@ Item {
|
|||
text: "Audio Codec: " + root.audioCodec + " Noise Gate: " +
|
||||
root.audioNoiseGate;
|
||||
}
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: "Injectors (Local/NonLocal): " + root.audioInjectors.x + "/" + root.audioInjectors.y;
|
||||
}
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: "Entity Servers In: " + root.entityPacketsInKbps + " kbps";
|
||||
|
|
|
@ -28,7 +28,7 @@ TabletModalWindow {
|
|||
id: mouse;
|
||||
anchors.fill: parent
|
||||
}
|
||||
|
||||
|
||||
function click(button) {
|
||||
clickedButton = button;
|
||||
selected(button);
|
||||
|
|
|
@ -40,6 +40,7 @@ Item {
|
|||
property bool isConcurrency: action === 'concurrency';
|
||||
property bool isAnnouncement: action === 'announcement';
|
||||
property bool isStacked: !isConcurrency && drillDownToPlace;
|
||||
property bool has3DHTML: PlatformInfo.has3DHTML();
|
||||
|
||||
|
||||
property int textPadding: 10;
|
||||
|
@ -298,7 +299,7 @@ Item {
|
|||
|
||||
StateImage {
|
||||
id: actionIcon;
|
||||
visible: !isAnnouncement;
|
||||
visible: !isAnnouncement && has3DHTML;
|
||||
imageURL: "../../images/info-icon-2-state.svg";
|
||||
size: 30;
|
||||
buttonState: messageArea.containsMouse ? 1 : 0;
|
||||
|
@ -315,7 +316,7 @@ Item {
|
|||
}
|
||||
MouseArea {
|
||||
id: messageArea;
|
||||
visible: !isAnnouncement;
|
||||
visible: !isAnnouncement && has3DHTML;
|
||||
width: parent.width;
|
||||
height: messageHeight;
|
||||
anchors.top: lobby.bottom;
|
||||
|
|
|
@ -46,6 +46,8 @@ Item {
|
|||
property string placeName: ""
|
||||
property string profilePicBorderColor: (connectionStatus == "connection" ? hifi.colors.indigoAccent : (connectionStatus == "friend" ? hifi.colors.greenHighlight : "transparent"))
|
||||
property alias avImage: avatarImage
|
||||
property bool has3DHTML: PlatformInfo.has3DHTML();
|
||||
|
||||
Item {
|
||||
id: avatarImage
|
||||
visible: profileUrl !== "" && userName !== "";
|
||||
|
@ -94,10 +96,12 @@ Item {
|
|||
enabled: (selected && activeTab == "nearbyTab") || isMyCard;
|
||||
hoverEnabled: enabled
|
||||
onClicked: {
|
||||
userInfoViewer.url = Account.metaverseServerURL + "/users/" + userName;
|
||||
userInfoViewer.visible = true;
|
||||
if (has3DHTML) {
|
||||
userInfoViewer.url = Account.metaverseServerURL + "/users/" + userName;
|
||||
userInfoViewer.visible = true;
|
||||
}
|
||||
}
|
||||
onEntered: infoHoverImage.visible = true;
|
||||
onEntered: infoHoverImage.visible = has3DHTML;
|
||||
onExited: infoHoverImage.visible = false;
|
||||
}
|
||||
}
|
||||
|
@ -352,7 +356,7 @@ Item {
|
|||
}
|
||||
StateImage {
|
||||
id: nameCardConnectionInfoImage
|
||||
visible: selected && !isMyCard && pal.activeTab == "connectionsTab"
|
||||
visible: selected && !isMyCard && pal.activeTab == "connectionsTab" && has3DHTML
|
||||
imageURL: "../../images/info-icon-2-state.svg" // PLACEHOLDER!!!
|
||||
size: 32;
|
||||
buttonState: 0;
|
||||
|
@ -364,8 +368,10 @@ Item {
|
|||
enabled: selected
|
||||
hoverEnabled: true
|
||||
onClicked: {
|
||||
userInfoViewer.url = Account.metaverseServerURL + "/users/" + userName;
|
||||
userInfoViewer.visible = true;
|
||||
if (has3DHTML) {
|
||||
userInfoViewer.url = Account.metaverseServerURL + "/users/" + userName;
|
||||
userInfoViewer.visible = true;
|
||||
}
|
||||
}
|
||||
onEntered: {
|
||||
nameCardConnectionInfoImage.buttonState = 1;
|
||||
|
@ -376,8 +382,7 @@ Item {
|
|||
}
|
||||
FiraSansRegular {
|
||||
id: nameCardConnectionInfoText
|
||||
visible: selected && !isMyCard && pal.activeTab == "connectionsTab"
|
||||
width: parent.width
|
||||
visible: selected && !isMyCard && pal.activeTab == "connectionsTab" && PlatformInfo.has3DHTML()
|
||||
height: displayNameTextPixelSize
|
||||
size: displayNameTextPixelSize - 4
|
||||
anchors.left: nameCardConnectionInfoImage.right
|
||||
|
@ -391,9 +396,10 @@ Item {
|
|||
id: nameCardRemoveConnectionImage
|
||||
visible: selected && !isMyCard && pal.activeTab == "connectionsTab"
|
||||
text: hifi.glyphs.close
|
||||
size: 28;
|
||||
size: 24;
|
||||
x: 120
|
||||
anchors.verticalCenter: nameCardConnectionInfoImage.verticalCenter
|
||||
anchors.left: has3DHTML ? nameCardConnectionInfoText.right + 10 : avatarImage.right
|
||||
}
|
||||
MouseArea {
|
||||
anchors.fill:nameCardRemoveConnectionImage
|
||||
|
|
|
@ -1261,6 +1261,14 @@ Rectangle {
|
|||
case 'refreshConnections':
|
||||
refreshConnections();
|
||||
break;
|
||||
case 'connectionRemoved':
|
||||
for (var i=0; i<connectionsUserModel.count; ++i) {
|
||||
if (connectionsUserModel.get(i).userName === message.params) {
|
||||
connectionsUserModel.remove(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 'avatarDisconnected':
|
||||
var sessionID = message.params[0];
|
||||
delete ignored[sessionID];
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import QtQuick 2.7
|
||||
import QtQuick 2.10
|
||||
import QtQuick.Controls 2.2
|
||||
import QtQuick.Layouts 1.3
|
||||
|
||||
|
@ -31,6 +31,8 @@ Rectangle {
|
|||
property string title: "Audio Settings"
|
||||
property int switchHeight: 16
|
||||
property int switchWidth: 40
|
||||
readonly property real verticalScrollWidth: 10
|
||||
readonly property real verticalScrollShaft: 8
|
||||
signal sendToScript(var message);
|
||||
|
||||
color: hifi.colors.baseGray;
|
||||
|
@ -42,7 +44,7 @@ Rectangle {
|
|||
|
||||
|
||||
property bool isVR: AudioScriptingInterface.context === "VR"
|
||||
property real rightMostInputLevelPos: 450
|
||||
property real rightMostInputLevelPos: 440
|
||||
//placeholder for control sizes and paddings
|
||||
//recalculates dynamically in case of UI size is changed
|
||||
QtObject {
|
||||
|
@ -60,8 +62,8 @@ Rectangle {
|
|||
id: bar
|
||||
spacing: 0
|
||||
width: parent.width
|
||||
height: 42
|
||||
currentIndex: isVR ? 1 : 0
|
||||
height: 28;
|
||||
currentIndex: isVR ? 1 : 0;
|
||||
|
||||
AudioControls.AudioTabButton {
|
||||
height: parent.height
|
||||
|
@ -92,25 +94,74 @@ Rectangle {
|
|||
|
||||
Component.onCompleted: enablePeakValues();
|
||||
|
||||
Column {
|
||||
id: column
|
||||
spacing: 12;
|
||||
anchors.top: bar.bottom
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.bottomMargin: 5
|
||||
Flickable {
|
||||
id: flickView;
|
||||
anchors.top: bar.bottom;
|
||||
anchors.left: parent.left;
|
||||
anchors.bottom: parent.bottom;
|
||||
width: parent.width;
|
||||
contentWidth: parent.width;
|
||||
contentHeight: contentItem.childrenRect.height;
|
||||
boundsBehavior: Flickable.DragOverBounds;
|
||||
flickableDirection: Flickable.VerticalFlick;
|
||||
property bool isScrolling: (contentHeight - height) > 10 ? true : false;
|
||||
clip: true;
|
||||
|
||||
Separator { }
|
||||
ScrollBar.vertical: ScrollBar {
|
||||
policy: flickView.isScrolling ? ScrollBar.AlwaysOn : ScrollBar.AlwaysOff;
|
||||
parent: flickView.parent;
|
||||
anchors.top: flickView.top;
|
||||
anchors.right: flickView.right;
|
||||
anchors.bottom: flickView.bottom;
|
||||
anchors.rightMargin: -verticalScrollWidth; //compensate flickView's right margin
|
||||
background: Item {
|
||||
implicitWidth: verticalScrollWidth;
|
||||
Rectangle {
|
||||
color: hifi.colors.darkGray30;
|
||||
radius: 4;
|
||||
anchors {
|
||||
fill: parent;
|
||||
topMargin: -1; // Finesse size
|
||||
bottomMargin: -2;
|
||||
}
|
||||
}
|
||||
}
|
||||
contentItem: Item {
|
||||
implicitWidth: verticalScrollShaft;
|
||||
Rectangle {
|
||||
radius: verticalScrollShaft/2;
|
||||
color: hifi.colors.white30;
|
||||
anchors {
|
||||
fill: parent;
|
||||
leftMargin: 2; // Finesse size and position.
|
||||
topMargin: 1;
|
||||
bottomMargin: 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
RowLayout {
|
||||
Separator {
|
||||
id: firstSeparator;
|
||||
anchors.top: parent.top;
|
||||
}
|
||||
|
||||
Item {
|
||||
id: switchesContainer;
|
||||
x: 2 * margins.paddings;
|
||||
width: parent.width;
|
||||
// switch heights + 2 * top margins
|
||||
height: (root.switchHeight) * 3 + 48;
|
||||
anchors.top: firstSeparator.bottom;
|
||||
anchors.topMargin: 10;
|
||||
|
||||
// mute is in its own row
|
||||
ColumnLayout {
|
||||
id: columnOne
|
||||
spacing: 24;
|
||||
x: margins.paddings
|
||||
Item {
|
||||
id: switchContainer;
|
||||
x: margins.paddings;
|
||||
width: parent.width / 2;
|
||||
height: parent.height;
|
||||
anchors.left: parent.left;
|
||||
HifiControlsUit.Switch {
|
||||
id: muteMic;
|
||||
height: root.switchHeight;
|
||||
|
@ -129,8 +180,12 @@ Rectangle {
|
|||
}
|
||||
|
||||
HifiControlsUit.Switch {
|
||||
id: noiseReductionSwitch;
|
||||
height: root.switchHeight;
|
||||
switchWidth: root.switchWidth;
|
||||
anchors.top: muteMic.bottom;
|
||||
anchors.topMargin: 24
|
||||
anchors.left: parent.left
|
||||
labelTextOn: "Noise Reduction";
|
||||
backgroundOnColor: "#E3E3E3";
|
||||
checked: AudioScriptingInterface.noiseReduction;
|
||||
|
@ -144,6 +199,9 @@ Rectangle {
|
|||
id: pttSwitch
|
||||
height: root.switchHeight;
|
||||
switchWidth: root.switchWidth;
|
||||
anchors.top: noiseReductionSwitch.bottom
|
||||
anchors.topMargin: 24
|
||||
anchors.left: parent.left
|
||||
labelTextOn: qsTr("Push To Talk (T)");
|
||||
backgroundOnColor: "#E3E3E3";
|
||||
checked: (bar.currentIndex === 0) ? AudioScriptingInterface.pushToTalkDesktop : AudioScriptingInterface.pushToTalkHMD;
|
||||
|
@ -164,12 +222,18 @@ Rectangle {
|
|||
}
|
||||
}
|
||||
|
||||
ColumnLayout {
|
||||
spacing: 24;
|
||||
Item {
|
||||
id: additionalSwitchContainer
|
||||
width: switchContainer.width - margins.paddings;
|
||||
height: parent.height;
|
||||
anchors.top: parent.top
|
||||
anchors.left: switchContainer.right;
|
||||
HifiControlsUit.Switch {
|
||||
id: warnMutedSwitch
|
||||
height: root.switchHeight;
|
||||
switchWidth: root.switchWidth;
|
||||
anchors.top: parent.top
|
||||
anchors.left: parent.left
|
||||
labelTextOn: qsTr("Warn when muted");
|
||||
backgroundOnColor: "#E3E3E3";
|
||||
checked: AudioScriptingInterface.warnWhenMuted;
|
||||
|
@ -184,6 +248,9 @@ Rectangle {
|
|||
id: audioLevelSwitch
|
||||
height: root.switchHeight;
|
||||
switchWidth: root.switchWidth;
|
||||
anchors.top: warnMutedSwitch.bottom
|
||||
anchors.topMargin: 24
|
||||
anchors.left: parent.left
|
||||
labelTextOn: qsTr("Audio Level Meter");
|
||||
backgroundOnColor: "#E3E3E3";
|
||||
checked: AvatarInputs.showAudioTools;
|
||||
|
@ -197,6 +264,9 @@ Rectangle {
|
|||
id: stereoInput;
|
||||
height: root.switchHeight;
|
||||
switchWidth: root.switchWidth;
|
||||
anchors.top: audioLevelSwitch.bottom
|
||||
anchors.topMargin: 24
|
||||
anchors.left: parent.left
|
||||
labelTextOn: qsTr("Stereo input");
|
||||
backgroundOnColor: "#E3E3E3";
|
||||
checked: AudioScriptingInterface.isStereoInput;
|
||||
|
@ -210,17 +280,20 @@ Rectangle {
|
|||
}
|
||||
|
||||
Item {
|
||||
anchors.left: parent.left
|
||||
id: pttTextContainer
|
||||
anchors.top: switchesContainer.bottom;
|
||||
anchors.topMargin: 10;
|
||||
anchors.left: parent.left;
|
||||
width: rightMostInputLevelPos;
|
||||
height: pttText.height;
|
||||
RalewayRegular {
|
||||
id: pttText
|
||||
id: pttText;
|
||||
x: margins.paddings;
|
||||
color: hifi.colors.white;
|
||||
width: rightMostInputLevelPos;
|
||||
height: paintedHeight;
|
||||
wrapMode: Text.WordWrap;
|
||||
font.italic: true
|
||||
font.italic: true;
|
||||
size: 16;
|
||||
|
||||
text: (bar.currentIndex === 0) ? qsTr("Press and hold the button \"T\" to talk.") :
|
||||
|
@ -228,28 +301,35 @@ Rectangle {
|
|||
}
|
||||
}
|
||||
|
||||
Separator { }
|
||||
Separator {
|
||||
id: secondSeparator;
|
||||
anchors.top: pttTextContainer.bottom;
|
||||
anchors.topMargin: 10;
|
||||
}
|
||||
|
||||
|
||||
Item {
|
||||
id: inputDeviceHeader
|
||||
x: margins.paddings;
|
||||
width: parent.width - margins.paddings*2
|
||||
height: 36
|
||||
width: parent.width - margins.paddings*2;
|
||||
height: 36;
|
||||
anchors.top: secondSeparator.bottom;
|
||||
anchors.topMargin: 10;
|
||||
|
||||
HiFiGlyphs {
|
||||
width: margins.sizeCheckBox
|
||||
width: margins.sizeCheckBox;
|
||||
text: hifi.glyphs.mic;
|
||||
color: hifi.colors.white;
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: -size/4 //the glyph has empty space at left about 25%
|
||||
anchors.left: parent.left;
|
||||
anchors.leftMargin: -size/4; //the glyph has empty space at left about 25%
|
||||
anchors.verticalCenter: parent.verticalCenter;
|
||||
size: 30;
|
||||
}
|
||||
RalewayRegular {
|
||||
anchors.verticalCenter: parent.verticalCenter;
|
||||
width: margins.sizeText + margins.sizeLevel
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: margins.sizeCheckBox
|
||||
width: margins.sizeText + margins.sizeLevel;
|
||||
anchors.left: parent.left;
|
||||
anchors.leftMargin: margins.sizeCheckBox;
|
||||
size: 16;
|
||||
color: hifi.colors.white;
|
||||
text: qsTr("Choose input device");
|
||||
|
@ -257,12 +337,13 @@ Rectangle {
|
|||
}
|
||||
|
||||
ListView {
|
||||
id: inputView
|
||||
width: parent.width - margins.paddings*2
|
||||
id: inputView;
|
||||
width: parent.width - margins.paddings*2;
|
||||
anchors.top: inputDeviceHeader.bottom;
|
||||
anchors.topMargin: 10;
|
||||
x: margins.paddings
|
||||
height: Math.min(150, contentHeight);
|
||||
height: contentHeight;
|
||||
spacing: 4;
|
||||
snapMode: ListView.SnapToItem;
|
||||
clip: true;
|
||||
model: AudioScriptingInterface.devices.input;
|
||||
delegate: Item {
|
||||
|
@ -302,16 +383,26 @@ Rectangle {
|
|||
}
|
||||
}
|
||||
AudioControls.LoopbackAudio {
|
||||
id: loopbackAudio
|
||||
x: margins.paddings
|
||||
anchors.top: inputView.bottom;
|
||||
anchors.topMargin: 10;
|
||||
|
||||
visible: (bar.currentIndex === 1 && isVR) ||
|
||||
(bar.currentIndex === 0 && !isVR);
|
||||
anchors { left: parent.left; leftMargin: margins.paddings }
|
||||
}
|
||||
|
||||
Separator {}
|
||||
Separator {
|
||||
id: thirdSeparator;
|
||||
anchors.top: loopbackAudio.bottom;
|
||||
anchors.topMargin: 10;
|
||||
}
|
||||
|
||||
Item {
|
||||
id: outputDeviceHeader;
|
||||
anchors.topMargin: 10;
|
||||
anchors.top: thirdSeparator.bottom;
|
||||
x: margins.paddings;
|
||||
width: parent.width - margins.paddings*2
|
||||
height: 36
|
||||
|
@ -341,9 +432,10 @@ Rectangle {
|
|||
id: outputView
|
||||
width: parent.width - margins.paddings*2
|
||||
x: margins.paddings
|
||||
height: Math.min(360 - inputView.height, contentHeight);
|
||||
height: contentHeight;
|
||||
anchors.top: outputDeviceHeader.bottom;
|
||||
anchors.topMargin: 10;
|
||||
spacing: 4;
|
||||
snapMode: ListView.SnapToItem;
|
||||
clip: true;
|
||||
model: AudioScriptingInterface.devices.output;
|
||||
delegate: Item {
|
||||
|
@ -372,6 +464,8 @@ Rectangle {
|
|||
Item {
|
||||
id: gainContainer
|
||||
x: margins.paddings;
|
||||
anchors.top: outputView.bottom;
|
||||
anchors.topMargin: 10;
|
||||
width: parent.width - margins.paddings*2
|
||||
height: gainSliderTextMetrics.height
|
||||
|
||||
|
@ -430,7 +524,10 @@ Rectangle {
|
|||
}
|
||||
|
||||
AudioControls.PlaySampleSound {
|
||||
id: playSampleSound
|
||||
x: margins.paddings
|
||||
anchors.top: gainContainer.bottom;
|
||||
anchors.topMargin: 10;
|
||||
|
||||
visible: (bar.currentIndex === 1 && isVR) ||
|
||||
(bar.currentIndex === 0 && !isVR);
|
||||
|
|
|
@ -16,7 +16,7 @@ import stylesUit 1.0
|
|||
|
||||
TabButton {
|
||||
id: control
|
||||
font.pixelSize: height / 2
|
||||
font.pixelSize: 14
|
||||
|
||||
HifiConstants { id: hifi; }
|
||||
|
||||
|
|
|
@ -133,7 +133,7 @@ Item {
|
|||
states: [
|
||||
State {
|
||||
name: AvatarPackagerState.main
|
||||
PropertyChanges { target: avatarPackagerHeader; title: qsTr("Avatar Packager"); docsEnabled: true; backButtonVisible: false }
|
||||
PropertyChanges { target: avatarPackagerHeader; title: qsTr("Avatar Packager"); docsEnabled: true; videoEnabled: true; backButtonVisible: false }
|
||||
PropertyChanges { target: avatarPackagerMain; visible: true }
|
||||
PropertyChanges { target: avatarPackagerFooter; content: avatarPackagerMain.footer }
|
||||
},
|
||||
|
@ -229,7 +229,11 @@ Item {
|
|||
}
|
||||
|
||||
function openDocs() {
|
||||
Qt.openUrlExternally("https://docs.highfidelity.com/create/avatars/create-avatars#how-to-package-your-avatar");
|
||||
Qt.openUrlExternally("https://docs.highfidelity.com/create/avatars/package-avatar.html");
|
||||
}
|
||||
|
||||
function openVideo() {
|
||||
Qt.openUrlExternally("https://youtu.be/zrkEowu_yps");
|
||||
}
|
||||
|
||||
AvatarPackagerHeader {
|
||||
|
@ -243,6 +247,9 @@ Item {
|
|||
onDocsButtonClicked: {
|
||||
avatarPackager.openDocs();
|
||||
}
|
||||
onVideoButtonClicked: {
|
||||
avatarPackager.openVideo();
|
||||
}
|
||||
}
|
||||
|
||||
Item {
|
||||
|
|
|
@ -13,6 +13,7 @@ ShadowRectangle {
|
|||
|
||||
property string title: qsTr("Avatar Packager")
|
||||
property alias docsEnabled: docs.visible
|
||||
property alias videoEnabled: video.visible
|
||||
property bool backButtonVisible: true // If false, is not visible and does not take up space
|
||||
property bool backButtonEnabled: true // If false, is not visible but does not affect space
|
||||
property bool canRename: false
|
||||
|
@ -24,6 +25,7 @@ ShadowRectangle {
|
|||
|
||||
signal backButtonClicked
|
||||
signal docsButtonClicked
|
||||
signal videoButtonClicked
|
||||
|
||||
RalewayButton {
|
||||
id: back
|
||||
|
@ -126,6 +128,20 @@ ShadowRectangle {
|
|||
}
|
||||
}
|
||||
|
||||
RalewayButton {
|
||||
id: video
|
||||
visible: false
|
||||
size: 28
|
||||
anchors.top: parent.top
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.right: docs.left
|
||||
anchors.rightMargin: 16
|
||||
|
||||
text: qsTr("Video")
|
||||
|
||||
onClicked: videoButtonClicked()
|
||||
}
|
||||
|
||||
RalewayButton {
|
||||
id: docs
|
||||
visible: false
|
||||
|
@ -137,8 +153,6 @@ ShadowRectangle {
|
|||
|
||||
text: qsTr("Docs")
|
||||
|
||||
onClicked: {
|
||||
docsButtonClicked();
|
||||
}
|
||||
onClicked: docsButtonClicked()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -339,8 +339,8 @@ Item {
|
|||
visible: AvatarPackagerCore.currentAvatarProject && AvatarPackagerCore.currentAvatarProject.hasErrors
|
||||
|
||||
anchors {
|
||||
top: notForSaleMessage.bottom
|
||||
topMargin: 16
|
||||
top: notForSaleMessage.visible ? notForSaleMessage.bottom : infoMessage .bottom
|
||||
bottom: showFilesText.top
|
||||
horizontalCenter: parent.horizontalCenter
|
||||
}
|
||||
|
||||
|
|
|
@ -2248,6 +2248,7 @@ Item {
|
|||
if (sendAssetStep.selectedRecipientUserName === "") {
|
||||
console.log("SendAsset: Script didn't specify a recipient username!");
|
||||
sendAssetHome.visible = false;
|
||||
root.nextActiveView = 'paymentFailure';
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -348,9 +348,11 @@ Rectangle {
|
|||
}
|
||||
|
||||
onAccepted: {
|
||||
root.searchString = searchField.text;
|
||||
getMarketplaceItems();
|
||||
searchField.forceActiveFocus();
|
||||
if (root.searchString !== searchField.text) {
|
||||
root.searchString = searchField.text;
|
||||
getMarketplaceItems();
|
||||
searchField.forceActiveFocus();
|
||||
}
|
||||
}
|
||||
|
||||
onActiveFocusChanged: {
|
||||
|
|
|
@ -32,6 +32,7 @@ Rectangle {
|
|||
property string initialActiveViewAfterStatus5: "walletInventory";
|
||||
property bool keyboardRaised: false;
|
||||
property bool isPassword: false;
|
||||
property bool has3DHTML: PlatformInfo.has3DHTML();
|
||||
|
||||
anchors.fill: (typeof parent === undefined) ? undefined : parent;
|
||||
|
||||
|
@ -335,8 +336,10 @@ Rectangle {
|
|||
Connections {
|
||||
onSendSignalToWallet: {
|
||||
if (msg.method === 'transactionHistory_usernameLinkClicked') {
|
||||
userInfoViewer.url = msg.usernameLink;
|
||||
userInfoViewer.visible = true;
|
||||
if (has3DHTML) {
|
||||
userInfoViewer.url = msg.usernameLink;
|
||||
userInfoViewer.visible = true;
|
||||
}
|
||||
} else {
|
||||
sendToScript(msg);
|
||||
}
|
||||
|
|
|
@ -24,6 +24,8 @@ Item {
|
|||
HifiConstants { id: hifi; }
|
||||
|
||||
id: root;
|
||||
|
||||
property bool has3DHTML: PlatformInfo.has3DHTML();
|
||||
|
||||
onVisibleChanged: {
|
||||
if (visible) {
|
||||
|
@ -333,7 +335,9 @@ Item {
|
|||
|
||||
onLinkActivated: {
|
||||
if (link.indexOf("users/") !== -1) {
|
||||
sendSignalToWallet({method: 'transactionHistory_usernameLinkClicked', usernameLink: link});
|
||||
if (has3DHTML) {
|
||||
sendSignalToWallet({method: 'transactionHistory_usernameLinkClicked', usernameLink: link});
|
||||
}
|
||||
} else {
|
||||
sendSignalToWallet({method: 'transactionHistory_linkClicked', itemId: model.marketplace_item});
|
||||
}
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
//
|
||||
// Audio.qml
|
||||
//
|
||||
// Created by Zach Pomerantz on 6/12/2017
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import "../../windows"
|
||||
import "../audio"
|
||||
|
||||
ScrollingWindow {
|
||||
id: root;
|
||||
|
||||
resizable: true;
|
||||
destroyOnHidden: true;
|
||||
width: 400;
|
||||
height: 577;
|
||||
minSize: Qt.vector2d(400, 500);
|
||||
|
||||
Audio { id: audio; width: root.width }
|
||||
|
||||
objectName: "AudioDialog";
|
||||
title: audio.title;
|
||||
}
|
|
@ -35,6 +35,7 @@ StackView {
|
|||
property int cardWidth: 212;
|
||||
property int cardHeight: 152;
|
||||
property var tablet: null;
|
||||
property bool has3DHTML: PlatformInfo.has3DHTML();
|
||||
|
||||
RootHttpRequest { id: http; }
|
||||
signal sendToScript(var message);
|
||||
|
@ -75,8 +76,10 @@ StackView {
|
|||
}
|
||||
function goCard(targetString, standaloneOptimized) {
|
||||
if (0 !== targetString.indexOf('hifi://')) {
|
||||
var card = tabletWebView.createObject();
|
||||
card.url = addressBarDialog.metaverseServerUrl + targetString;
|
||||
if(has3DHTML) {
|
||||
var card = tabletWebView.createObject();
|
||||
card.url = addressBarDialog.metaverseServerUrl + targetString;
|
||||
}
|
||||
card.parentStackItem = root;
|
||||
root.push(card);
|
||||
return;
|
||||
|
|
|
@ -117,7 +117,6 @@ Rectangle {
|
|||
if (loader.item.hasOwnProperty("gotoPreviousApp")) {
|
||||
loader.item.gotoPreviousApp = true;
|
||||
}
|
||||
|
||||
screenChanged("Web", url)
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1985,6 +1985,13 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
return nullptr;
|
||||
});
|
||||
|
||||
EntityTree::setEmitScriptEventOperator([this](const QUuid& id, const QVariant& message) {
|
||||
auto entities = getEntities();
|
||||
if (auto entity = entities->renderableForEntityId(id)) {
|
||||
entity->emitScriptEvent(message);
|
||||
}
|
||||
});
|
||||
|
||||
EntityTree::setTextSizeOperator([this](const QUuid& id, const QString& text) {
|
||||
auto entities = getEntities();
|
||||
if (auto entity = entities->renderableForEntityId(id)) {
|
||||
|
@ -2342,6 +2349,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
return viewFrustum.getPosition();
|
||||
});
|
||||
|
||||
DependencyManager::get<UsersScriptingInterface>()->setKickConfirmationOperator([this] (const QUuid& nodeID) { userKickConfirmation(nodeID); });
|
||||
|
||||
render::entities::WebEntityRenderer::setAcquireWebSurfaceOperator([this](const QString& url, bool htmlContent, QSharedPointer<OffscreenQmlSurface>& webSurface, bool& cachedWebSurface) {
|
||||
bool isTablet = url == TabletScriptingInterface::QML;
|
||||
if (htmlContent) {
|
||||
|
@ -2704,9 +2713,7 @@ void Application::cleanupBeforeQuit() {
|
|||
|
||||
DependencyManager::destroy<OffscreenQmlSurfaceCache>();
|
||||
|
||||
if (_snapshotSoundInjector != nullptr) {
|
||||
_snapshotSoundInjector->stop();
|
||||
}
|
||||
_snapshotSoundInjector = nullptr;
|
||||
|
||||
// destroy Audio so it and its threads have a chance to go down safely
|
||||
// this must happen after QML, as there are unexplained audio crashes originating in qtwebengine
|
||||
|
@ -3043,6 +3050,9 @@ void Application::initializeUi() {
|
|||
QUrl{ "hifi/commerce/wallet/Wallet.qml" },
|
||||
QUrl{ "hifi/commerce/wallet/WalletHome.qml" },
|
||||
QUrl{ "hifi/tablet/TabletAddressDialog.qml" },
|
||||
QUrl{ "hifi/Card.qml" },
|
||||
QUrl{ "hifi/Pal.qml" },
|
||||
QUrl{ "hifi/NameCard.qml" },
|
||||
}, platformInfoCallback);
|
||||
|
||||
QmlContextCallback ttsCallback = [](QQmlContext* context) {
|
||||
|
@ -3287,6 +3297,40 @@ void Application::onDesktopRootItemCreated(QQuickItem* rootItem) {
|
|||
#endif
|
||||
}
|
||||
|
||||
void Application::userKickConfirmation(const QUuid& nodeID) {
|
||||
auto avatarHashMap = DependencyManager::get<AvatarHashMap>();
|
||||
auto avatar = avatarHashMap->getAvatarBySessionID(nodeID);
|
||||
|
||||
QString userName;
|
||||
|
||||
if (avatar) {
|
||||
userName = avatar->getSessionDisplayName();
|
||||
} else {
|
||||
userName = nodeID.toString();
|
||||
}
|
||||
|
||||
QString kickMessage = "Do you wish to kick " + userName + " from your domain";
|
||||
ModalDialogListener* dlg = OffscreenUi::asyncQuestion("Kick User", kickMessage,
|
||||
QMessageBox::Yes | QMessageBox::No);
|
||||
|
||||
if (dlg->getDialogItem()) {
|
||||
|
||||
QObject::connect(dlg, &ModalDialogListener::response, this, [=] (QVariant answer) {
|
||||
QObject::disconnect(dlg, &ModalDialogListener::response, this, nullptr);
|
||||
|
||||
bool yes = (static_cast<QMessageBox::StandardButton>(answer.toInt()) == QMessageBox::Yes);
|
||||
// ask the NodeList to kick the user with the given session ID
|
||||
|
||||
if (yes) {
|
||||
DependencyManager::get<NodeList>()->kickNodeBySessionID(nodeID);
|
||||
}
|
||||
|
||||
DependencyManager::get<UsersScriptingInterface>()->setWaitForKickResponse(false);
|
||||
});
|
||||
DependencyManager::get<UsersScriptingInterface>()->setWaitForKickResponse(true);
|
||||
}
|
||||
}
|
||||
|
||||
void Application::setupQmlSurface(QQmlContext* surfaceContext, bool setAdditionalContextProperties) {
|
||||
surfaceContext->setContextProperty("Users", DependencyManager::get<UsersScriptingInterface>().data());
|
||||
surfaceContext->setContextProperty("HMD", DependencyManager::get<HMDScriptingInterface>().data());
|
||||
|
@ -4225,10 +4269,9 @@ void Application::keyPressEvent(QKeyEvent* event) {
|
|||
Setting::Handle<bool> notificationSoundSnapshot{ MenuOption::NotificationSoundsSnapshot, true };
|
||||
if (notificationSounds.get() && notificationSoundSnapshot.get()) {
|
||||
if (_snapshotSoundInjector) {
|
||||
_snapshotSoundInjector->setOptions(options);
|
||||
_snapshotSoundInjector->restart();
|
||||
DependencyManager::get<AudioInjectorManager>()->setOptionsAndRestart(_snapshotSoundInjector, options);
|
||||
} else {
|
||||
_snapshotSoundInjector = AudioInjector::playSound(_snapshotSound, options);
|
||||
_snapshotSoundInjector = DependencyManager::get<AudioInjectorManager>()->playSound(_snapshotSound, options);
|
||||
}
|
||||
}
|
||||
takeSnapshot(true);
|
||||
|
|
|
@ -593,6 +593,7 @@ private:
|
|||
void toggleTabletUI(bool shouldOpen = false) const;
|
||||
|
||||
static void setupQmlSurface(QQmlContext* surfaceContext, bool setAdditionalContextProperties);
|
||||
void userKickConfirmation(const QUuid& nodeID);
|
||||
|
||||
MainWindow* _window;
|
||||
QElapsedTimer& _sessionRunTimer;
|
||||
|
|
|
@ -149,6 +149,9 @@ void AvatarBookmarks::removeBookmark(const QString& bookmarkName) {
|
|||
emit bookmarkDeleted(bookmarkName);
|
||||
}
|
||||
|
||||
void AvatarBookmarks::deleteBookmark() {
|
||||
}
|
||||
|
||||
void AvatarBookmarks::updateAvatarEntities(const QVariantList &avatarEntities) {
|
||||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
auto currentAvatarEntities = myAvatar->getAvatarEntityData();
|
||||
|
|
|
@ -76,6 +76,9 @@ protected:
|
|||
void readFromFile() override;
|
||||
QVariantMap getAvatarDataToBookmark();
|
||||
|
||||
protected slots:
|
||||
void deleteBookmark() override;
|
||||
|
||||
private:
|
||||
const QString AVATARBOOKMARKS_FILENAME = "avatarbookmarks.json";
|
||||
const QString ENTRY_AVATAR_URL = "avatarUrl";
|
||||
|
|
|
@ -51,13 +51,10 @@ protected:
|
|||
bool _isMenuSorted;
|
||||
|
||||
protected slots:
|
||||
/**jsdoc
|
||||
* @function AvatarBookmarks.deleteBookmark
|
||||
*/
|
||||
/**jsdoc
|
||||
* @function LocationBookmarks.deleteBookmark
|
||||
*/
|
||||
void deleteBookmark();
|
||||
virtual void deleteBookmark();
|
||||
|
||||
private:
|
||||
static bool sortOrder(QAction* a, QAction* b);
|
||||
|
|
|
@ -270,10 +270,14 @@ Menu::Menu() {
|
|||
// Settings > Audio...
|
||||
action = addActionToQMenuAndActionHash(settingsMenu, "Audio...");
|
||||
connect(action, &QAction::triggered, [] {
|
||||
static const QUrl widgetUrl("hifi/dialogs/Audio.qml");
|
||||
static const QUrl tabletUrl("hifi/audio/Audio.qml");
|
||||
static const QString name("AudioDialog");
|
||||
qApp->showDialog(widgetUrl, tabletUrl, name);
|
||||
auto tablet = DependencyManager::get<TabletScriptingInterface>()->getTablet("com.highfidelity.interface.tablet.system");
|
||||
auto hmd = DependencyManager::get<HMDScriptingInterface>();
|
||||
tablet->pushOntoStack(tabletUrl);
|
||||
|
||||
if (!hmd->getShouldShowTablet()) {
|
||||
hmd->toggleShouldShowTablet();
|
||||
}
|
||||
});
|
||||
|
||||
// Settings > Graphics...
|
||||
|
|
|
@ -55,7 +55,7 @@ static QStringList HAND_MAPPING_SUFFIXES = {
|
|||
"HandThumb1",
|
||||
};
|
||||
|
||||
const QUrl DEFAULT_DOCS_URL = QUrl("https://docs.highfidelity.com/create/avatars/create-avatars.html#create-your-own-avatar");
|
||||
const QUrl PACKAGE_AVATAR_DOCS_BASE_URL = QUrl("https://docs.highfidelity.com/create/avatars/package-avatar.html");
|
||||
|
||||
AvatarDoctor::AvatarDoctor(const QUrl& avatarFSTFileUrl) :
|
||||
_avatarFSTFileUrl(avatarFSTFileUrl) {
|
||||
|
@ -85,7 +85,7 @@ void AvatarDoctor::startDiagnosing() {
|
|||
const auto resourceLoaded = [this, resource](bool success) {
|
||||
// MODEL
|
||||
if (!success) {
|
||||
_errors.push_back({ "Model file cannot be opened.", DEFAULT_DOCS_URL });
|
||||
addError("Model file cannot be opened.", "missing-file");
|
||||
emit complete(getErrors());
|
||||
return;
|
||||
}
|
||||
|
@ -93,45 +93,45 @@ void AvatarDoctor::startDiagnosing() {
|
|||
const auto model = resource.data();
|
||||
const auto avatarModel = resource.data()->getHFMModel();
|
||||
if (!avatarModel.originalURL.endsWith(".fbx")) {
|
||||
_errors.push_back({ "Unsupported avatar model format.", DEFAULT_DOCS_URL });
|
||||
addError("Unsupported avatar model format.", "unsupported-format");
|
||||
emit complete(getErrors());
|
||||
return;
|
||||
}
|
||||
|
||||
// RIG
|
||||
if (avatarModel.joints.isEmpty()) {
|
||||
_errors.push_back({ "Avatar has no rig.", DEFAULT_DOCS_URL });
|
||||
addError("Avatar has no rig.", "no-rig");
|
||||
} else {
|
||||
auto jointNames = avatarModel.getJointNames();
|
||||
|
||||
if (avatarModel.joints.length() > NETWORKED_JOINTS_LIMIT) {
|
||||
_errors.push_back({tr( "Avatar has over %n bones.", "", NETWORKED_JOINTS_LIMIT), DEFAULT_DOCS_URL });
|
||||
addError(tr( "Avatar has over %n bones.", "", NETWORKED_JOINTS_LIMIT), "maximum-bone-limit");
|
||||
}
|
||||
// Avatar does not have Hips bone mapped
|
||||
if (!jointNames.contains("Hips")) {
|
||||
_errors.push_back({ "Hips are not mapped.", DEFAULT_DOCS_URL });
|
||||
addError("Hips are not mapped.", "hips-not-mapped");
|
||||
}
|
||||
if (!jointNames.contains("Spine")) {
|
||||
_errors.push_back({ "Spine is not mapped.", DEFAULT_DOCS_URL });
|
||||
addError("Spine is not mapped.", "spine-not-mapped");
|
||||
}
|
||||
if (!jointNames.contains("Spine1")) {
|
||||
_errors.push_back({ "Chest (Spine1) is not mapped.", DEFAULT_DOCS_URL });
|
||||
addError("Chest (Spine1) is not mapped.", "chest-not-mapped");
|
||||
}
|
||||
if (!jointNames.contains("Neck")) {
|
||||
_errors.push_back({ "Neck is not mapped.", DEFAULT_DOCS_URL });
|
||||
addError("Neck is not mapped.", "neck-not-mapped");
|
||||
}
|
||||
if (!jointNames.contains("Head")) {
|
||||
_errors.push_back({ "Head is not mapped.", DEFAULT_DOCS_URL });
|
||||
addError("Head is not mapped.", "head-not-mapped");
|
||||
}
|
||||
|
||||
if (!jointNames.contains("LeftEye")) {
|
||||
if (jointNames.contains("RightEye")) {
|
||||
_errors.push_back({ "LeftEye is not mapped.", DEFAULT_DOCS_URL });
|
||||
addError("LeftEye is not mapped.", "eye-not-mapped");
|
||||
} else {
|
||||
_errors.push_back({ "Eyes are not mapped.", DEFAULT_DOCS_URL });
|
||||
addError("Eyes are not mapped.", "eye-not-mapped");
|
||||
}
|
||||
} else if (!jointNames.contains("RightEye")) {
|
||||
_errors.push_back({ "RightEye is not mapped.", DEFAULT_DOCS_URL });
|
||||
addError("RightEye is not mapped.", "eye-not-mapped");
|
||||
}
|
||||
|
||||
const auto checkJointAsymmetry = [jointNames] (const QStringList& jointMappingSuffixes) {
|
||||
|
@ -159,13 +159,13 @@ void AvatarDoctor::startDiagnosing() {
|
|||
};
|
||||
|
||||
if (checkJointAsymmetry(ARM_MAPPING_SUFFIXES)) {
|
||||
_errors.push_back({ "Asymmetrical arm bones.", DEFAULT_DOCS_URL });
|
||||
addError("Asymmetrical arm bones.", "asymmetrical-bones");
|
||||
}
|
||||
if (checkJointAsymmetry(HAND_MAPPING_SUFFIXES)) {
|
||||
_errors.push_back({ "Asymmetrical hand bones.", DEFAULT_DOCS_URL });
|
||||
addError("Asymmetrical hand bones.", "asymmetrical-bones");
|
||||
}
|
||||
if (checkJointAsymmetry(LEG_MAPPING_SUFFIXES)) {
|
||||
_errors.push_back({ "Asymmetrical leg bones.", DEFAULT_DOCS_URL });
|
||||
addError("Asymmetrical leg bones.", "asymmetrical-bones");
|
||||
}
|
||||
|
||||
// Multiple skeleton root joints checkup
|
||||
|
@ -177,7 +177,7 @@ void AvatarDoctor::startDiagnosing() {
|
|||
}
|
||||
|
||||
if (skeletonRootJoints > 1) {
|
||||
_errors.push_back({ "Multiple top-level joints found.", DEFAULT_DOCS_URL });
|
||||
addError("Multiple top-level joints found.", "multiple-top-level-joints");
|
||||
}
|
||||
|
||||
Rig rig;
|
||||
|
@ -191,9 +191,9 @@ void AvatarDoctor::startDiagnosing() {
|
|||
const float RECOMMENDED_MAX_HEIGHT = DEFAULT_AVATAR_HEIGHT * 1.5f;
|
||||
|
||||
if (avatarHeight < RECOMMENDED_MIN_HEIGHT) {
|
||||
_errors.push_back({ "Avatar is possibly too short.", DEFAULT_DOCS_URL });
|
||||
addError("Avatar is possibly too short.", "short-avatar");
|
||||
} else if (avatarHeight > RECOMMENDED_MAX_HEIGHT) {
|
||||
_errors.push_back({ "Avatar is possibly too tall.", DEFAULT_DOCS_URL });
|
||||
addError("Avatar is possibly too tall.", "tall-avatar");
|
||||
}
|
||||
|
||||
// HipsNotOnGround
|
||||
|
@ -204,7 +204,7 @@ void AvatarDoctor::startDiagnosing() {
|
|||
const auto hipJoint = avatarModel.joints.at(avatarModel.getJointIndex("Hips"));
|
||||
|
||||
if (hipsPosition.y < HIPS_GROUND_MIN_Y) {
|
||||
_errors.push_back({ "Hips are on ground.", DEFAULT_DOCS_URL });
|
||||
addError("Hips are on ground.", "hips-on-ground");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -223,7 +223,7 @@ void AvatarDoctor::startDiagnosing() {
|
|||
const auto hipsToSpine = glm::length(hipsPosition - spinePosition);
|
||||
const auto spineToChest = glm::length(spinePosition - chestPosition);
|
||||
if (hipsToSpine < HIPS_SPINE_CHEST_MIN_SEPARATION && spineToChest < HIPS_SPINE_CHEST_MIN_SEPARATION) {
|
||||
_errors.push_back({ "Hips/Spine/Chest overlap.", DEFAULT_DOCS_URL });
|
||||
addError("Hips/Spine/Chest overlap.", "overlap-error");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -240,21 +240,21 @@ void AvatarDoctor::startDiagnosing() {
|
|||
const auto& uniqueJointValues = jointValues.toSet();
|
||||
for (const auto& jointName: uniqueJointValues) {
|
||||
if (jointValues.count(jointName) > 1) {
|
||||
_errors.push_back({ tr("%1 is mapped multiple times.").arg(jointName), DEFAULT_DOCS_URL });
|
||||
addError(tr("%1 is mapped multiple times.").arg(jointName), "mapped-multiple-times");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!isDescendantOfJointWhenJointsExist("Spine", "Hips")) {
|
||||
_errors.push_back({ "Spine is not a child of Hips.", DEFAULT_DOCS_URL });
|
||||
addError("Spine is not a child of Hips.", "spine-not-child");
|
||||
}
|
||||
|
||||
if (!isDescendantOfJointWhenJointsExist("Spine1", "Spine")) {
|
||||
_errors.push_back({ "Spine1 is not a child of Spine.", DEFAULT_DOCS_URL });
|
||||
addError("Spine1 is not a child of Spine.", "spine1-not-child");
|
||||
}
|
||||
|
||||
if (!isDescendantOfJointWhenJointsExist("Head", "Spine1")) {
|
||||
_errors.push_back({ "Head is not a child of Spine1.", DEFAULT_DOCS_URL });
|
||||
addError("Head is not a child of Spine1.", "head-not-child");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -300,7 +300,7 @@ void AvatarDoctor::startDiagnosing() {
|
|||
connect(resource.data(), &GeometryResource::finished, this, resourceLoaded);
|
||||
}
|
||||
} else {
|
||||
_errors.push_back({ "Model file cannot be opened", DEFAULT_DOCS_URL });
|
||||
addError("Model file cannot be opened", "missing-file");
|
||||
emit complete(getErrors());
|
||||
}
|
||||
}
|
||||
|
@ -345,7 +345,7 @@ void AvatarDoctor::diagnoseTextures() {
|
|||
QUrl(avatarModel.originalURL)).resolved(QUrl("textures"));
|
||||
|
||||
if (texturesFound == 0) {
|
||||
_errors.push_back({ tr("No textures assigned."), DEFAULT_DOCS_URL });
|
||||
addError(tr("No textures assigned."), "no-textures-assigned");
|
||||
}
|
||||
|
||||
if (!externalTextures.empty()) {
|
||||
|
@ -356,11 +356,10 @@ void AvatarDoctor::diagnoseTextures() {
|
|||
auto checkTextureLoadingComplete = [this]() mutable {
|
||||
if (_checkedTextureCount == _externalTextureCount) {
|
||||
if (_missingTextureCount > 0) {
|
||||
_errors.push_back({ tr("Missing %n texture(s).","", _missingTextureCount), DEFAULT_DOCS_URL });
|
||||
addError(tr("Missing %n texture(s).","", _missingTextureCount), "missing-textures");
|
||||
}
|
||||
if (_unsupportedTextureCount > 0) {
|
||||
_errors.push_back({ tr("%n unsupported texture(s) found.", "", _unsupportedTextureCount),
|
||||
DEFAULT_DOCS_URL });
|
||||
addError(tr("%n unsupported texture(s) found.", "", _unsupportedTextureCount), "unsupported-textures");
|
||||
}
|
||||
|
||||
emit complete(getErrors());
|
||||
|
@ -411,6 +410,12 @@ void AvatarDoctor::diagnoseTextures() {
|
|||
}
|
||||
}
|
||||
|
||||
void AvatarDoctor::addError(const QString& errorMessage, const QString& docFragment) {
|
||||
QUrl documentationURL = PACKAGE_AVATAR_DOCS_BASE_URL;
|
||||
documentationURL.setFragment(docFragment);
|
||||
_errors.push_back({ errorMessage, documentationURL });
|
||||
}
|
||||
|
||||
QVariantList AvatarDoctor::getErrors() const {
|
||||
QVariantList result;
|
||||
for (const auto& error : _errors) {
|
||||
|
|
|
@ -40,6 +40,8 @@ signals:
|
|||
private:
|
||||
void diagnoseTextures();
|
||||
|
||||
void addError(const QString& errorMessage, const QString& docFragment);
|
||||
|
||||
QUrl _avatarFSTFileUrl;
|
||||
QVector<AvatarDiagnosticResult> _errors;
|
||||
|
||||
|
|
|
@ -629,8 +629,7 @@ void AvatarManager::handleCollisionEvents(const CollisionEvents& collisionEvents
|
|||
// but most avatars are roughly the same size, so let's not be so fancy yet.
|
||||
const float AVATAR_STRETCH_FACTOR = 1.0f;
|
||||
|
||||
_collisionInjectors.remove_if(
|
||||
[](const AudioInjectorPointer& injector) { return !injector || injector->isFinished(); });
|
||||
_collisionInjectors.remove_if([](const AudioInjectorPointer& injector) { return !injector; });
|
||||
|
||||
static const int MAX_INJECTOR_COUNT = 3;
|
||||
if (_collisionInjectors.size() < MAX_INJECTOR_COUNT) {
|
||||
|
@ -640,7 +639,7 @@ void AvatarManager::handleCollisionEvents(const CollisionEvents& collisionEvents
|
|||
options.volume = energyFactorOfFull;
|
||||
options.pitch = 1.0f / AVATAR_STRETCH_FACTOR;
|
||||
|
||||
auto injector = AudioInjector::playSoundAndDelete(collisionSound, options);
|
||||
auto injector = DependencyManager::get<AudioInjectorManager>()->playSound(collisionSound, options, true);
|
||||
_collisionInjectors.emplace_back(injector);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
#include <SimpleMovingAverage.h>
|
||||
#include <shared/RateCounter.h>
|
||||
#include <avatars-renderer/ScriptAvatar.h>
|
||||
#include <AudioInjector.h>
|
||||
#include <AudioInjectorManager.h>
|
||||
#include <workload/Space.h>
|
||||
#include <EntitySimulation.h> // for SetOfEntities
|
||||
|
||||
|
@ -239,7 +239,7 @@ private:
|
|||
std::shared_ptr<MyAvatar> _myAvatar;
|
||||
quint64 _lastSendAvatarDataTime = 0; // Controls MyAvatar send data rate.
|
||||
|
||||
std::list<AudioInjectorPointer> _collisionInjectors;
|
||||
std::list<QWeakPointer<AudioInjector>> _collisionInjectors;
|
||||
|
||||
RateCounter<> _myAvatarSendRate;
|
||||
int _numAvatarsUpdated { 0 };
|
||||
|
|
|
@ -1570,7 +1570,7 @@ void MyAvatar::handleChangedAvatarEntityData() {
|
|||
entityTree->withWriteLock([&] {
|
||||
EntityItemPointer entity = entityTree->addEntity(id, properties);
|
||||
if (entity) {
|
||||
packetSender->queueEditEntityMessage(PacketType::EntityAdd, entityTree, id, properties);
|
||||
packetSender->queueEditAvatarEntityMessage(entityTree, id);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -2385,7 +2385,19 @@ void MyAvatar::clearWornAvatarEntities() {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
/**jsdoc
|
||||
* Information about an avatar entity.
|
||||
* <table>
|
||||
* <thead>
|
||||
* <tr><th>Property</th><th>Type</th><th>Description</th></tr>
|
||||
* </thead>
|
||||
* <tbody>
|
||||
* <tr><td><code>id</code></td><td>Uuid</td><td>Entity ID.</td></tr>
|
||||
* <tr><td><code>properties</code></td><td>{@link Entities.EntityProperties}</td><td>Entity properties.</td></tr>
|
||||
* </tbody>
|
||||
* </table>
|
||||
* @typedef {object} MyAvatar.AvatarEntityData
|
||||
*/
|
||||
QVariantList MyAvatar::getAvatarEntitiesVariant() {
|
||||
// NOTE: this method is NOT efficient
|
||||
QVariantList avatarEntitiesData;
|
||||
|
@ -3451,10 +3463,10 @@ float MyAvatar::getGravity() {
|
|||
}
|
||||
|
||||
void MyAvatar::setSessionUUID(const QUuid& sessionUUID) {
|
||||
QUuid oldID = getSessionUUID();
|
||||
QUuid oldSessionID = getSessionUUID();
|
||||
Avatar::setSessionUUID(sessionUUID);
|
||||
QUuid id = getSessionUUID();
|
||||
if (id != oldID) {
|
||||
QUuid newSessionID = getSessionUUID();
|
||||
if (newSessionID != oldSessionID) {
|
||||
auto treeRenderer = DependencyManager::get<EntityTreeRenderer>();
|
||||
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
|
||||
if (entityTree) {
|
||||
|
@ -3462,15 +3474,23 @@ void MyAvatar::setSessionUUID(const QUuid& sessionUUID) {
|
|||
_avatarEntitiesLock.withReadLock([&] {
|
||||
avatarEntityIDs = _packedAvatarEntityData.keys();
|
||||
});
|
||||
bool sendPackets = !DependencyManager::get<NodeList>()->getSessionUUID().isNull();
|
||||
EntityEditPacketSender* packetSender = qApp->getEntityEditPacketSender();
|
||||
entityTree->withWriteLock([&] {
|
||||
for (const auto& entityID : avatarEntityIDs) {
|
||||
auto entity = entityTree->findEntityByID(entityID);
|
||||
if (!entity) {
|
||||
continue;
|
||||
}
|
||||
entity->setOwningAvatarID(id);
|
||||
if (entity->getParentID() == oldID) {
|
||||
entity->setParentID(id);
|
||||
// update OwningAvatarID so entity can be identified as "ours" later
|
||||
entity->setOwningAvatarID(newSessionID);
|
||||
// NOTE: each attached AvatarEntity already have the correct updated parentID
|
||||
// via magic in SpatiallyNestable, hence we check against newSessionID
|
||||
if (sendPackets && entity->getParentID() == newSessionID) {
|
||||
// but when we have a real session and the AvatarEntity is parented to MyAvatar
|
||||
// we need to update the "packedAvatarEntityData" sent to the avatar-mixer
|
||||
// because it contains a stale parentID somewhere deep inside
|
||||
packetSender->queueEditAvatarEntityMessage(entityTree, entityID);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -3555,6 +3575,12 @@ void MyAvatar::clearScaleRestriction() {
|
|||
_haveReceivedHeightLimitsFromDomain = false;
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
* A teleport target.
|
||||
* @typedef {object} MyAvatar.GoToProperties
|
||||
* @property {Vec3} position - The avatar's new position.
|
||||
* @property {Quat} [orientation] - The avatar's new orientation.
|
||||
*/
|
||||
void MyAvatar::goToLocation(const QVariant& propertiesVar) {
|
||||
qCDebug(interfaceapp, "MyAvatar QML goToLocation");
|
||||
auto properties = propertiesVar.toMap();
|
||||
|
@ -3911,6 +3937,13 @@ void MyAvatar::setCollisionWithOtherAvatarsFlags() {
|
|||
_characterController.setPendingFlagsUpdateCollisionMask();
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
* A collision capsule is a cylinder with hemispherical ends. It is often used to approximate the extents of an avatar.
|
||||
* @typedef {object} MyAvatar.CollisionCapsule
|
||||
* @property {Vec3} start - The bottom end of the cylinder, excluding the bottom hemisphere.
|
||||
* @property {Vec3} end - The top end of the cylinder, excluding the top hemisphere.
|
||||
* @property {number} radius - The radius of the cylinder and the hemispheres.
|
||||
*/
|
||||
void MyAvatar::updateCollisionCapsuleCache() {
|
||||
glm::vec3 start, end;
|
||||
float radius;
|
||||
|
@ -5360,6 +5393,24 @@ void MyAvatar::addAvatarHandsToFlow(const std::shared_ptr<Avatar>& otherAvatar)
|
|||
}
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
* Physics options to use in the flow simulation of a joint.
|
||||
* @typedef {object} MyAvatar.FlowPhysicsOptions
|
||||
* @property {boolean} [active=true] - <code>true</code> to enable flow on the joint, otherwise <code>false</code>.
|
||||
* @property {number} [radius=0.01] - The thickness of segments and knots (needed for collisions).
|
||||
* @property {number} [gravity=-0.0096] - Y-value of the gravity vector.
|
||||
* @property {number} [inertia=0.8] - Rotational inertia multiplier.
|
||||
* @property {number} [damping=0.85] - The amount of damping on joint oscillation.
|
||||
* @property {number} [stiffness=0.0] - The stiffness of each thread.
|
||||
* @property {number} [delta=0.55] - Delta time for every integration step.
|
||||
*/
|
||||
/**jsdoc
|
||||
* Collision options to use in the flow simulation of a joint.
|
||||
* @typedef {object} MyAvatar.FlowCollisionsOptions
|
||||
* @property {string} [type="sphere"] - Currently, only <code>"sphere"</code> is supported.
|
||||
* @property {number} [radius=0.05] - Collision sphere radius.
|
||||
* @property {number} [offset=Vec3.ZERO] - Offset of the collision sphere from the joint.
|
||||
*/
|
||||
void MyAvatar::useFlow(bool isActive, bool isCollidable, const QVariantMap& physicsConfig, const QVariantMap& collisionsConfig) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "useFlow",
|
||||
|
@ -5427,6 +5478,39 @@ void MyAvatar::useFlow(bool isActive, bool isCollidable, const QVariantMap& phys
|
|||
}
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
* Flow options currently used in flow simulation.
|
||||
* @typedef {object} MyAvatar.FlowData
|
||||
* @property {boolean} initialized - <code>true</code> if flow has been initialized for the current avatar, <code>false</code>
|
||||
* if it hasn't.
|
||||
* @property {boolean} active - <code>true</code> if flow is enabled, <code>false</code> if it isn't.
|
||||
* @property {boolean} colliding - <code>true</code> if collisions are enabled, <code>false</code> if they aren't.
|
||||
* @property {Object<GroupName, MyAvatar.FlowPhysicsData>} physicsData - The physics configuration for each group of joints
|
||||
* that has been configured.
|
||||
* @property {Object<JointName, MyAvatar.FlowCollisionsData>} collisions - The collisions configuration for each joint that
|
||||
* has collisions configured.
|
||||
* @property {Object<ThreadName, number[]>} threads - The threads that have been configured, with the first joint's name as the
|
||||
* <code>ThreadName</code> and value as an array of the indexes of all the joints in the thread.
|
||||
*/
|
||||
/**jsdoc
|
||||
* A set of physics options currently used in flow simulation.
|
||||
* @typedef {object} MyAvatar.FlowPhysicsData
|
||||
* @property {boolean} active - <code>true</code> to enable flow on the joint, otherwise <code>false</code>.
|
||||
* @property {number} radius - The thickness of segments and knots. (Needed for collisions.)
|
||||
* @property {number} gravity - Y-value of the gravity vector.
|
||||
* @property {number} inertia - Rotational inertia multiplier.
|
||||
* @property {number} damping - The amount of damping on joint oscillation.
|
||||
* @property {number} stiffness - The stiffness of each thread.
|
||||
* @property {number} delta - Delta time for every integration step.
|
||||
* @property {number[]} jointIndices - The indexes of the joints the options are applied to.
|
||||
*/
|
||||
/**jsdoc
|
||||
* A set of collision options currently used in flow simulation.
|
||||
* @typedef {object} MyAvatar.FlowCollisionsData
|
||||
* @property {number} radius - Collision sphere radius.
|
||||
* @property {number} offset - Offset of the collision sphere from the joint.
|
||||
* @property {number} jointIndex - The index of the joint the options are applied to.
|
||||
*/
|
||||
QVariantMap MyAvatar::getFlowData() {
|
||||
QVariantMap result;
|
||||
if (QThread::currentThread() != thread()) {
|
||||
|
@ -5523,14 +5607,14 @@ void MyAvatar::initFlowFromFST() {
|
|||
}
|
||||
}
|
||||
|
||||
void MyAvatar::sendPacket(const QUuid& entityID, const EntityItemProperties& properties) const {
|
||||
void MyAvatar::sendPacket(const QUuid& entityID) const {
|
||||
auto treeRenderer = DependencyManager::get<EntityTreeRenderer>();
|
||||
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
|
||||
if (entityTree) {
|
||||
entityTree->withWriteLock([&] {
|
||||
// force an update packet
|
||||
EntityEditPacketSender* packetSender = qApp->getEntityEditPacketSender();
|
||||
packetSender->queueEditEntityMessage(PacketType::EntityEdit, entityTree, entityID, properties);
|
||||
packetSender->queueEditAvatarEntityMessage(entityTree, entityID);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -228,6 +228,9 @@ void Audio::loadData() {
|
|||
_hmdMuted = _hmdMutedSetting.get();
|
||||
_pttDesktop = _pttDesktopSetting.get();
|
||||
_pttHMD = _pttHMDSetting.get();
|
||||
|
||||
auto client = DependencyManager::get<AudioClient>().data();
|
||||
QMetaObject::invokeMethod(client, "setMuted", Q_ARG(bool, isMuted()), Q_ARG(bool, false));
|
||||
}
|
||||
|
||||
bool Audio::getPTTHMD() const {
|
||||
|
|
|
@ -66,7 +66,7 @@ void TTSScriptingInterface::updateLastSoundAudioInjector() {
|
|||
if (_lastSoundAudioInjector) {
|
||||
AudioInjectorOptions options;
|
||||
options.position = DependencyManager::get<AvatarManager>()->getMyAvatarPosition();
|
||||
_lastSoundAudioInjector->setOptions(options);
|
||||
DependencyManager::get<AudioInjectorManager>()->setOptions(_lastSoundAudioInjector, options);
|
||||
_lastSoundAudioInjectorUpdateTimer.start(INJECTOR_INTERVAL_MS);
|
||||
}
|
||||
}
|
||||
|
@ -143,7 +143,7 @@ void TTSScriptingInterface::speakText(const QString& textToSpeak) {
|
|||
options.position = DependencyManager::get<AvatarManager>()->getMyAvatarPosition();
|
||||
|
||||
if (_lastSoundAudioInjector) {
|
||||
_lastSoundAudioInjector->stop();
|
||||
DependencyManager::get<AudioInjectorManager>()->stop(_lastSoundAudioInjector);
|
||||
_lastSoundAudioInjectorUpdateTimer.stop();
|
||||
}
|
||||
|
||||
|
@ -151,7 +151,7 @@ void TTSScriptingInterface::speakText(const QString& textToSpeak) {
|
|||
uint32_t numSamples = (uint32_t)_lastSoundByteArray.size() / sizeof(AudioData::AudioSample);
|
||||
auto samples = reinterpret_cast<AudioData::AudioSample*>(_lastSoundByteArray.data());
|
||||
auto newAudioData = AudioData::make(numSamples, numChannels, samples);
|
||||
_lastSoundAudioInjector = AudioInjector::playSoundAndDelete(newAudioData, options);
|
||||
_lastSoundAudioInjector = DependencyManager::get<AudioInjectorManager>()->playSound(newAudioData, options, true);
|
||||
|
||||
_lastSoundAudioInjectorUpdateTimer.start(INJECTOR_INTERVAL_MS);
|
||||
#else
|
||||
|
@ -161,7 +161,7 @@ void TTSScriptingInterface::speakText(const QString& textToSpeak) {
|
|||
|
||||
void TTSScriptingInterface::stopLastSpeech() {
|
||||
if (_lastSoundAudioInjector) {
|
||||
_lastSoundAudioInjector->stop();
|
||||
_lastSoundAudioInjector = NULL;
|
||||
DependencyManager::get<AudioInjectorManager>()->stop(_lastSoundAudioInjector);
|
||||
_lastSoundAudioInjector = nullptr;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -199,13 +199,3 @@ void TestScriptingInterface::setOtherAvatarsReplicaCount(int count) {
|
|||
int TestScriptingInterface::getOtherAvatarsReplicaCount() {
|
||||
return qApp->getOtherAvatarsReplicaCount();
|
||||
}
|
||||
|
||||
QString TestScriptingInterface::getOperatingSystemType() {
|
||||
#ifdef Q_OS_WIN
|
||||
return "WINDOWS";
|
||||
#elif defined Q_OS_MAC
|
||||
return "MACOS";
|
||||
#else
|
||||
return "UNKNOWN";
|
||||
#endif
|
||||
}
|
||||
|
|
|
@ -163,13 +163,6 @@ public slots:
|
|||
*/
|
||||
Q_INVOKABLE int getOtherAvatarsReplicaCount();
|
||||
|
||||
/**jsdoc
|
||||
* Returns the Operating Sytem type
|
||||
* @function Test.getOperatingSystemType
|
||||
* @returns {string} "WINDOWS", "MACOS" or "UNKNOWN"
|
||||
*/
|
||||
QString getOperatingSystemType();
|
||||
|
||||
private:
|
||||
bool waitForCondition(qint64 maxWaitMs, std::function<bool()> condition);
|
||||
QString _testResultsLocation;
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
#include <PathUtils.h>
|
||||
#include <ResourceManager.h>
|
||||
#include <SoundCache.h>
|
||||
#include <AudioInjector.h>
|
||||
#include <AudioInjectorManager.h>
|
||||
#include <RegisteredMetaTypes.h>
|
||||
#include <ui/TabletScriptingInterface.h>
|
||||
|
||||
|
@ -537,7 +537,7 @@ void Keyboard::handleTriggerBegin(const QUuid& id, const PointerEvent& event) {
|
|||
audioOptions.position = keyWorldPosition;
|
||||
audioOptions.volume = 0.05f;
|
||||
|
||||
AudioInjector::playSoundAndDelete(_keySound, audioOptions);
|
||||
DependencyManager::get<AudioInjectorManager>()->playSound(_keySound, audioOptions, true);
|
||||
|
||||
int scanCode = key.getScanCode(_capsEnabled);
|
||||
QString keyString = key.getKeyString(_capsEnabled);
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
#include <QtCore/QObject>
|
||||
#include <QTimer>
|
||||
#include <QHash>
|
||||
#include <QUuid>
|
||||
#include <DependencyManager.h>
|
||||
#include <Sound.h>
|
||||
#include <AudioInjector.h>
|
||||
#include <shared/ReadWriteLockable.h>
|
||||
#include <SettingHandle.h>
|
||||
|
||||
|
|
|
@ -266,6 +266,11 @@ void Stats::updateStats(bool force) {
|
|||
}
|
||||
STAT_UPDATE(audioCodec, audioClient->getSelectedAudioFormat());
|
||||
STAT_UPDATE(audioNoiseGate, audioClient->getNoiseGateOpen() ? "Open" : "Closed");
|
||||
{
|
||||
int localInjectors = audioClient->getNumLocalInjectors();
|
||||
size_t nonLocalInjectors = DependencyManager::get<AudioInjectorManager>()->getNumInjectors();
|
||||
STAT_UPDATE(audioInjectors, QVector2D(localInjectors, nonLocalInjectors));
|
||||
}
|
||||
|
||||
STAT_UPDATE(entityPacketsInKbps, octreeServerCount ? totalEntityKbps / octreeServerCount : -1);
|
||||
|
||||
|
|
|
@ -87,6 +87,7 @@ private: \
|
|||
* @property {number} audioPacketLoss - <em>Read-only.</em>
|
||||
* @property {string} audioCodec - <em>Read-only.</em>
|
||||
* @property {string} audioNoiseGate - <em>Read-only.</em>
|
||||
* @property {Vec2} audioInjectors - <em>Read-only.</em>
|
||||
* @property {number} entityPacketsInKbps - <em>Read-only.</em>
|
||||
*
|
||||
* @property {number} downloads - <em>Read-only.</em>
|
||||
|
@ -243,6 +244,7 @@ class Stats : public QQuickItem {
|
|||
STATS_PROPERTY(int, audioPacketLoss, 0)
|
||||
STATS_PROPERTY(QString, audioCodec, QString())
|
||||
STATS_PROPERTY(QString, audioNoiseGate, QString())
|
||||
STATS_PROPERTY(QVector2D, audioInjectors, QVector2D());
|
||||
STATS_PROPERTY(int, entityPacketsInKbps, 0)
|
||||
|
||||
STATS_PROPERTY(int, downloads, 0)
|
||||
|
@ -692,6 +694,13 @@ signals:
|
|||
*/
|
||||
void audioNoiseGateChanged();
|
||||
|
||||
/**jsdoc
|
||||
* Triggered when the value of the <code>audioInjectors</code> property changes.
|
||||
* @function Stats.audioInjectorsChanged
|
||||
* @returns {Signal}
|
||||
*/
|
||||
void audioInjectorsChanged();
|
||||
|
||||
/**jsdoc
|
||||
* Triggered when the value of the <code>entityPacketsInKbps</code> property changes.
|
||||
* @function Stats.entityPacketsInKbpsChanged
|
||||
|
|
|
@ -1711,9 +1711,9 @@ QVector<QUuid> Overlays::findOverlays(const glm::vec3& center, float radius) {
|
|||
* @property {number} parentJointIndex=65535 - Integer value specifying the skeleton joint that the overlay is attached to if
|
||||
* <code>parentID</code> is an avatar skeleton. A value of <code>65535</code> means "no joint".
|
||||
*
|
||||
* @property {boolean} isFacingAvatar - If <code>true< / code>, the overlay is rotated to face the user's camera about an axis
|
||||
* @property {boolean} isFacingAvatar - If <code>true</code>, the overlay is rotated to face the user's camera about an axis
|
||||
* parallel to the user's avatar's "up" direction.
|
||||
* @property {string} text="" - The text to display.Text does not automatically wrap; use <code>\n< / code> for a line break.
|
||||
* @property {string} text="" - The text to display.Text does not automatically wrap; use <code>\n</code> for a line break.
|
||||
* @property {number} textAlpha=1 - The text alpha value.
|
||||
* @property {Color} backgroundColor=0,0,0 - The background color.
|
||||
* @property {number} backgroundAlpha=0.7 - The background alpha value.
|
||||
|
@ -1876,7 +1876,7 @@ QVector<QUuid> Overlays::findOverlays(const glm::vec3& center, float radius) {
|
|||
* @property {Vec3} localPosition - The local position of the overlay relative to its parent if the overlay has a
|
||||
* <code>parentID</code> set, otherwise the same value as <code>position</code>.
|
||||
* @property {Quat} localRotation - The orientation of the overlay relative to its parent if the overlay has a
|
||||
* <code>parentID</code> set, otherwise the same value as <code>rotation</code>. Synonym: <code>localOrientation</code>.
|
||||
* <code>parentID</code> set, otherwise the same value as <code>rotation</code>. Synonym: <code>localOrientation</code>.
|
||||
* @property {boolean} ignorePickIntersection=false - If <code>true</code>, picks ignore the overlay. <code>ignoreRayIntersection</code> is a synonym.
|
||||
* @property {boolean} drawInFront=false - If <code>true</code>, the overlay is rendered in front of objects in the world, but behind the HUD.
|
||||
* @property {boolean} drawHUDLayer=false - If <code>true</code>, the overlay is rendered in front of everything, including the HUD.
|
||||
|
@ -1916,7 +1916,7 @@ QVector<QUuid> Overlays::findOverlays(const glm::vec3& center, float radius) {
|
|||
* @property {Vec3} localPosition - The local position of the overlay relative to its parent if the overlay has a
|
||||
* <code>parentID</code> set, otherwise the same value as <code>position</code>.
|
||||
* @property {Quat} localRotation - The orientation of the overlay relative to its parent if the overlay has a
|
||||
* <code>parentID</code> set, otherwise the same value as <code>rotation</code>. Synonym: <code>localOrientation</code>.
|
||||
* <code>parentID</code> set, otherwise the same value as <code>rotation</code>. Synonym: <code>localOrientation</code>.
|
||||
* @property {boolean} isSolid=false - Synonyms: <ode>solid</code>, <code>isFilled</code>, and <code>filled</code>.
|
||||
* Antonyms: <code>isWire</code> and <code>wire</code>.
|
||||
* @property {boolean} ignorePickIntersection=false - If <code>true</code>, picks ignore the overlay. <code>ignoreRayIntersection</code> is a synonym.
|
||||
|
@ -1927,46 +1927,46 @@ QVector<QUuid> Overlays::findOverlays(const glm::vec3& center, float radius) {
|
|||
* @property {number} parentJointIndex=65535 - Integer value specifying the skeleton joint that the overlay is attached to if
|
||||
* <code>parentID</code> is an avatar skeleton. A value of <code>65535</code> means "no joint".
|
||||
*
|
||||
* @property {number} startAt = 0 - The counter - clockwise angle from the overlay's x-axis that drawing starts at, in degrees.
|
||||
* @property {number} endAt = 360 - The counter - clockwise angle from the overlay's x-axis that drawing ends at, in degrees.
|
||||
* @property {number} outerRadius = 1 - The outer radius of the overlay, in meters.Synonym: <code>radius< / code>.
|
||||
* @property {number} innerRadius = 0 - The inner radius of the overlay, in meters.
|
||||
* @property {Color} color = 255, 255, 255 - The color of the overlay.Setting this value also sets the values of
|
||||
* <code>innerStartColor< / code>, <code>innerEndColor< / code>, <code>outerStartColor< / code>, and <code>outerEndColor< / code>.
|
||||
* @property {Color} startColor - Sets the values of <code>innerStartColor< / code> and <code>outerStartColor< / code>.
|
||||
* <em>Write - only.< / em>
|
||||
* @property {Color} endColor - Sets the values of <code>innerEndColor< / code> and <code>outerEndColor< / code>.
|
||||
* <em>Write - only.< / em>
|
||||
* @property {Color} innerColor - Sets the values of <code>innerStartColor< / code> and <code>innerEndColor< / code>.
|
||||
* <em>Write - only.< / em>
|
||||
* @property {Color} outerColor - Sets the values of <code>outerStartColor< / code> and <code>outerEndColor< / code>.
|
||||
* <em>Write - only.< / em>
|
||||
* @property {number} startAt = 0 - The counter - clockwise angle from the overlay's x-axis that drawing starts at in degrees.
|
||||
* @property {number} endAt = 360 - The counter - clockwise angle from the overlay's x-axis that drawing ends at in degrees.
|
||||
* @property {number} outerRadius = 1 - The outer radius of the overlay in meters. Synonym: <code>radius</code>.
|
||||
* @property {number} innerRadius = 0 - The inner radius of the overlay in meters.
|
||||
* @property {Color} color = 255, 255, 255 - The color of the overlay. Setting this value also sets the values of
|
||||
* <code>innerStartColor</code>, <code>innerEndColor</code>, <code>outerStartColor</code>, and <code>outerEndColor</code>.
|
||||
* @property {Color} startColor - Sets the values of <code>innerStartColor</code> and <code>outerStartColor</code>.
|
||||
* <em>Write - only.</em>
|
||||
* @property {Color} endColor - Sets the values of <code>innerEndColor</code> and <code>outerEndColor</code>.
|
||||
* <em>Write - only.</em>
|
||||
* @property {Color} innerColor - Sets the values of <code>innerStartColor</code> and <code>innerEndColor</code>.
|
||||
* <em>Write - only.</em>
|
||||
* @property {Color} outerColor - Sets the values of <code>outerStartColor</code> and <code>outerEndColor</code>.
|
||||
* <em>Write - only.</em>
|
||||
* @property {Color} innerStartcolor - The color at the inner start point of the overlay.
|
||||
* @property {Color} innerEndColor - The color at the inner end point of the overlay.
|
||||
* @property {Color} outerStartColor - The color at the outer start point of the overlay.
|
||||
* @property {Color} outerEndColor - The color at the outer end point of the overlay.
|
||||
* @property {number} alpha = 0.5 - The opacity of the overlay, <code>0.0< / code> -<code>1.0< / code>.Setting this value also sets
|
||||
* the values of <code>innerStartAlpha< / code>, <code>innerEndAlpha< / code>, <code>outerStartAlpha< / code>, and
|
||||
* <code>outerEndAlpha< / code>.Synonym: <code>Alpha< / code>; <em>write - only< / em>.
|
||||
* @property {number} startAlpha - Sets the values of <code>innerStartAlpha< / code> and <code>outerStartAlpha< / code>.
|
||||
* <em>Write - only.< / em>
|
||||
* @property {number} endAlpha - Sets the values of <code>innerEndAlpha< / code> and <code>outerEndAlpha< / code>.
|
||||
* <em>Write - only.< / em>
|
||||
* @property {number} innerAlpha - Sets the values of <code>innerStartAlpha< / code> and <code>innerEndAlpha< / code>.
|
||||
* <em>Write - only.< / em>
|
||||
* @property {number} outerAlpha - Sets the values of <code>outerStartAlpha< / code> and <code>outerEndAlpha< / code>.
|
||||
* <em>Write - only.< / em>
|
||||
* @property {number} alpha = 0.5 - The opacity of the overlay, <code>0.0</code> -<code>1.0</code>. Setting this value also sets
|
||||
* the values of <code>innerStartAlpha</code>, <code>innerEndAlpha</code>, <code>outerStartAlpha</code>, and
|
||||
* <code>outerEndAlpha</code>. Synonym: <code>Alpha</code>; <em>write - only</em>.
|
||||
* @property {number} startAlpha - Sets the values of <code>innerStartAlpha</code> and <code>outerStartAlpha</code>.
|
||||
* <em>Write - only.</em>
|
||||
* @property {number} endAlpha - Sets the values of <code>innerEndAlpha</code> and <code>outerEndAlpha</code>.
|
||||
* <em>Write - only.</em>
|
||||
* @property {number} innerAlpha - Sets the values of <code>innerStartAlpha</code> and <code>innerEndAlpha</code>.
|
||||
* <em>Write - only.</em>
|
||||
* @property {number} outerAlpha - Sets the values of <code>outerStartAlpha</code> and <code>outerEndAlpha</code>.
|
||||
* <em>Write - only.</em>
|
||||
* @property {number} innerStartAlpha = 0 - The alpha at the inner start point of the overlay.
|
||||
* @property {number} innerEndAlpha = 0 - The alpha at the inner end point of the overlay.
|
||||
* @property {number} outerStartAlpha = 0 - The alpha at the outer start point of the overlay.
|
||||
* @property {number} outerEndAlpha = 0 - The alpha at the outer end point of the overlay.
|
||||
*
|
||||
* @property {boolean} hasTickMarks = false - If <code>true< / code>, tick marks are drawn.
|
||||
* @property {boolean} hasTickMarks = false - If <code>true</code>, tick marks are drawn.
|
||||
* @property {number} majorTickMarksAngle = 0 - The angle between major tick marks, in degrees.
|
||||
* @property {number} minorTickMarksAngle = 0 - The angle between minor tick marks, in degrees.
|
||||
* @property {number} majorTickMarksLength = 0 - The length of the major tick marks, in meters.A positive value draws tick marks
|
||||
* @property {number} majorTickMarksLength = 0 - The length of the major tick marks, in meters. A positive value draws tick marks
|
||||
* outwards from the inner radius; a negative value draws tick marks inwards from the outer radius.
|
||||
* @property {number} minorTickMarksLength = 0 - The length of the minor tick marks, in meters.A positive value draws tick marks
|
||||
* @property {number} minorTickMarksLength = 0 - The length of the minor tick marks, in meters. A positive value draws tick marks
|
||||
* outwards from the inner radius; a negative value draws tick marks inwards from the outer radius.
|
||||
* @property {Color} majorTickMarksColor = 0, 0, 0 - The color of the major tick marks.
|
||||
* @property {Color} minorTickMarksColor = 0, 0, 0 - The color of the minor tick marks.
|
||||
|
|
|
@ -59,6 +59,46 @@ public:
|
|||
|
||||
float getMaxErrorOnLastSolve() { return _maxErrorOnLastSolve; }
|
||||
|
||||
/**jsdoc
|
||||
* <p>Specifies the initial conditions of the IK solver.</p>
|
||||
* <table>
|
||||
* <thead>
|
||||
* <tr><th>Value</th><th>Name</p><th>Description</th>
|
||||
* </thead>
|
||||
* <tbody>
|
||||
* <tr><td><code>0</code></td><td>RelaxToUnderPoses</td><td>This is a blend: it is 15/16 <code>PreviousSolution</code>
|
||||
* and 1/16 <code>UnderPoses</code>. This provides some of the benefits of using <code>UnderPoses</code> so that the
|
||||
* underlying animation is still visible, while at the same time converging faster then using the
|
||||
* <code>UnderPoses</code> as the only initial solution.</td></tr>
|
||||
* <tr><td><code>1</code></td><td>RelaxToLimitCenterPoses</td><td>This is a blend: it is 15/16
|
||||
* <code>PreviousSolution</code> and 1/16 <code>LimitCenterPoses</code>. This should converge quickly because it is
|
||||
* close to the previous solution, but still provides the benefits of avoiding limb locking.</td></tr>
|
||||
* <tr><td><code>2</code></td><td>PreviousSolution</td><td>The IK system will begin to solve from the same position and
|
||||
* orientations for each joint that was the result from the previous frame.<br />
|
||||
* Pros: As the end effectors typically do not move much from frame to frame, this is likely to converge quickly
|
||||
* to a valid solution.<br />
|
||||
* Cons: If the previous solution resulted in an awkward or uncomfortable posture, the next frame will also be
|
||||
* awkward and uncomfortable. It can also result in locked elbows and knees.</td></tr>
|
||||
* <tr><td><code>3</code></td><td>UnderPoses</td><td>The IK occurs at one of the top-most layers. It has access to the
|
||||
* full posture that was computed via canned animations and blends. We call this animated set of poses the "under
|
||||
* pose". The under poses are what would be visible if IK was completely disabled. Using the under poses as the
|
||||
* initial conditions of the CCD solve will cause some of the animated motion to be blended into the result of the
|
||||
* IK. This can result in very natural results, especially if there are only a few IK targets enabled. On the other
|
||||
* hand, because the under poses might be quite far from the desired end effector, it can converge slowly in some
|
||||
* cases, causing it to never reach the IK target in the allotted number of iterations. Also, in situations where all
|
||||
* of the IK targets are being controlled by external sensors, sometimes starting from the under poses can cause
|
||||
* awkward motions from the underlying animations to leak into the IK result.</td></tr>
|
||||
* <tr><td><code>4</code></td><td>LimitCenterPoses</td><td>This pose is taken to be the center of all the joint
|
||||
* constraints. This can prevent the IK solution from getting locked or stuck at a particular constraint. For
|
||||
* example, if the arm is pointing straight outward from the body, as the end effector moves towards the body, at
|
||||
* some point the elbow should bend to accommodate. However, because the CCD solver is stuck at a local maximum, it
|
||||
* will not rotate the elbow, unless the initial conditions already have the elbow bent, which is the case for
|
||||
* <code>LimitCenterPoses</code>. When all the IK targets are enabled, this result will provide a consistent starting
|
||||
* point for each IK solve, hopefully resulting in a consistent, natural result.</td></tr>
|
||||
* </tbody>
|
||||
* </table>
|
||||
* @typedef {number} MyAvatar.AnimIKSolutionSource
|
||||
*/
|
||||
enum class SolutionSource {
|
||||
RelaxToUnderPoses = 0,
|
||||
RelaxToLimitCenterPoses,
|
||||
|
|
|
@ -24,6 +24,37 @@ class AnimOverlay : public AnimNode {
|
|||
public:
|
||||
friend class AnimTests;
|
||||
|
||||
/**jsdoc
|
||||
* <p>Specifies sets of joints.</p>
|
||||
* <table>
|
||||
* <thead>
|
||||
* <tr><th>Value</th><th>Name</p><th>Description</th>
|
||||
* </thead>
|
||||
* <tbody>
|
||||
* <tr><td><code>0</code></td><td>FullBodyBoneSet</td><td>All joints.</td></tr>
|
||||
* <tr><td><code>1</code></td><td>UpperBodyBoneSet</td><td>Only the "Spine" joint and its children.</td></tr>
|
||||
* <tr><td><code>2</code></td><td>LowerBodyBoneSet</td><td>Only the leg joints and their children.</td></tr>
|
||||
* <tr><td><code>3</code></td><td>LeftArmBoneSet</td><td>Joints that are the children of the "LeftShoulder"
|
||||
* joint.</td></tr>
|
||||
* <tr><td><code>4</code></td><td>RightArmBoneSet</td><td>Joints that are the children of the "RightShoulder"
|
||||
* joint.</td></tr>
|
||||
* <tr><td><code>5</code></td><td>AboveTheHeadBoneSet</td><td>Joints that are the children of the "Head"
|
||||
* joint.</td></tr>
|
||||
* <tr><td><code>6</code></td><td>BelowTheHeadBoneSet</td><td>Joints that are NOT the children of the "head"
|
||||
* joint.</td></tr>
|
||||
* <tr><td><code>7</code></td><td>HeadOnlyBoneSet</td><td>The "Head" joint.</td></tr>
|
||||
* <tr><td><code>8</code></td><td>SpineOnlyBoneSet</td><td>The "Spine" joint.</td></tr>
|
||||
* <tr><td><code>9</code></td><td>EmptyBoneSet</td><td>No joints.</td></tr>
|
||||
* <tr><td><code>10</code></td><td>LeftHandBoneSet</td><td>joints that are the children of the "LeftHand"
|
||||
* joint.</td></tr>
|
||||
* <tr><td><code>11</code></td><td>RightHandBoneSet</td><td>Joints that are the children of the "RightHand"
|
||||
* joint.</td></tr>
|
||||
* <tr><td><code>12</code></td><td>HipsOnlyBoneSet</td><td>The "Hips" joint.</td></tr>
|
||||
* <tr><td><code>13</code></td><td>BothFeetBoneSet</td><td>The "LeftFoot" and "RightFoot" joints.</td></tr>
|
||||
* </tbody>
|
||||
* </table>
|
||||
* @typedef {number} MyAvatar.AnimOverlayBoneSet
|
||||
*/
|
||||
enum BoneSet {
|
||||
FullBodyBoneSet = 0,
|
||||
UpperBodyBoneSet,
|
||||
|
|
|
@ -16,6 +16,27 @@ const float HACK_HMD_TARGET_WEIGHT = 8.0f;
|
|||
|
||||
class IKTarget {
|
||||
public:
|
||||
/**jsdoc
|
||||
* <p>An IK target type.</p>
|
||||
* <table>
|
||||
* <thead>
|
||||
* <tr><th>Value</th><th>Name</p><th>Description</th>
|
||||
* </thead>
|
||||
* <tbody>
|
||||
* <tr><td><code>0</code></td><td>RotationAndPosition</td><td>Attempt to reach the rotation and position end
|
||||
* effector.</td></tr>
|
||||
* <tr><td><code>1</code></td><td>RotationOnly</td><td>Attempt to reach the end effector rotation only.</td></tr>
|
||||
* <tr><td><code>2</code></td><td>HmdHead</td><td><strong>Deprecated:</strong> A special mode of IK that would attempt
|
||||
* to prevent unnecessary bending of the spine.</td></tr>
|
||||
* <tr><td><code>3</code></td><td>HipsRelativeRotationAndPosition</td><td>Attempt to reach a rotation and position end
|
||||
* effector that is not in absolute rig coordinates but is offset by the avatar hips translation.</td></tr>
|
||||
* <tr><td><code>4</code></td><td>Spline</td><td>Use a cubic Hermite spline to model the human spine. This prevents
|
||||
* kinks in the spine and allows for a small amount of stretch and squash.</td></tr>
|
||||
* <tr><td><code>5</code></td><td>Unknown</td><td>IK is disabled.</td></tr>
|
||||
* </tbody>
|
||||
* </table>
|
||||
* @typedef {number} MyAvatar.IKTargetType
|
||||
*/
|
||||
enum class Type {
|
||||
RotationAndPosition,
|
||||
RotationOnly,
|
||||
|
|
|
@ -88,6 +88,218 @@ static const QString MAIN_STATE_MACHINE_RIGHT_HAND_ROTATION("mainStateMachineRig
|
|||
static const QString MAIN_STATE_MACHINE_RIGHT_HAND_POSITION("mainStateMachineRightHandPosition");
|
||||
|
||||
|
||||
/**jsdoc
|
||||
* <p>An <code>AnimStateDictionary</code> object may have the following properties. It may also have other properties, set by
|
||||
* scripts.</p>
|
||||
* <p><strong>Warning:</strong> These properties are subject to change.
|
||||
* <table>
|
||||
* <thead>
|
||||
* <tr><th>Name</th><th>Type</p><th>Description</th>
|
||||
* </thead>
|
||||
* <tbody>
|
||||
* <tr><td><code>userAnimNone</code></td><td>boolean</td><td><code>true</code> when no user overrideAnimation is
|
||||
* playing.</td></tr>
|
||||
* <tr><td><code>userAnimA</code></td><td>boolean</td><td><code>true</code> when a user overrideAnimation is
|
||||
* playing.</td></tr>
|
||||
* <tr><td><code>userAnimB</code></td><td>boolean</td><td><code>true</code> when a user overrideAnimation is
|
||||
* playing.</td></tr>
|
||||
*
|
||||
* <tr><td><code>sine</code></td><td>number</td><td>Oscillating sine wave.</td></tr>
|
||||
* <tr><td><code>moveForwardSpeed</code></td><td>number</td><td>Controls the blend between the various forward walking
|
||||
* & running animations.</td></tr>
|
||||
* <tr><td><code>moveBackwardSpeed</code></td><td>number</td><td>Controls the blend between the various backward walking
|
||||
* & running animations.</td></tr>
|
||||
* <tr><td><code>moveLateralSpeed</code></td><td>number</td><td>Controls the blend between the various sidestep walking
|
||||
* & running animations.</td></tr>
|
||||
*
|
||||
* <tr><td><code>isMovingForward</code></td><td>boolean</td><td><code>true</code> if the avatar is moving
|
||||
* forward.</td></tr>
|
||||
* <tr><td><code>isMovingBackward</code></td><td>boolean</td><td><code>true</code> if the avatar is moving
|
||||
* backward.</td></tr>
|
||||
* <tr><td><code>isMovingRight</code></td><td>boolean</td><td><code>true</code> if the avatar is moving to the
|
||||
* right.</td></tr>
|
||||
* <tr><td><code>isMovingLeft</code></td><td>boolean</td><td><code>true</code> if the avatar is moving to the
|
||||
* left.</td></tr>
|
||||
* <tr><td><code>isMovingRightHmd</code></td><td>boolean</td><td><code>true</code> if the avatar is moving to the right
|
||||
* while the user is in HMD mode.</td></tr>
|
||||
* <tr><td><code>isMovingLeftHmd</code></td><td>boolean</td><td><code>true</code> if the avatar is moving to the left while
|
||||
* the user is in HMD mode.</td></tr>
|
||||
* <tr><td><code>isNotMoving</code></td><td>boolean</td><td><code>true</code> if the avatar is stationary.</td></tr>
|
||||
*
|
||||
* <tr><td><code>isTurningRight</code></td><td>boolean</td><td><code>true</code> if the avatar is turning
|
||||
* clockwise.</td></tr>
|
||||
* <tr><td><code>isTurningLeft</code></td><td>boolean</td><td><code>true</code> if the avatar is turning
|
||||
* counter-clockwise.</td></tr>
|
||||
* <tr><td><code>isNotTurning</code></td><td>boolean</td><td><code>true</code> if the avatar is not turning.</td></tr>
|
||||
* <tr><td><code>isFlying</code></td><td>boolean</td><td><code>true</code> if the avatar is flying.</td></tr>
|
||||
* <tr><td><code>isNotFlying</code></td><td>boolean</td><td><code>true</code> if the avatar is not flying.</td></tr>
|
||||
* <tr><td><code>isTakeoffStand</code></td><td>boolean</td><td><code>true</code> if the avatar is about to execute a
|
||||
* standing jump.</td></tr>
|
||||
* <tr><td><code>isTakeoffRun</code></td><td>boolean</td><td><code>true</code> if the avatar is about to execute a running
|
||||
* jump.</td></tr>
|
||||
* <tr><td><code>isNotTakeoff</code></td><td>boolean</td><td><code>true</code> if the avatar is not jumping.</td></tr>
|
||||
* <tr><td><code>isInAirStand</code></td><td>boolean</td><td><code>true</code> if the avatar is in the air after a standing
|
||||
* jump.</td></tr>
|
||||
* <tr><td><code>isInAirRun</code></td><td>boolean</td><td><code>true</code> if the avatar is in the air after a running
|
||||
* jump.</td></tr>
|
||||
* <tr><td><code>isNotInAir</code></td><td>boolean</td><td><code>true</code> if the avatar on the ground.</td></tr>
|
||||
*
|
||||
* <tr><td><code>inAirAlpha</code></td><td>number</td><td>Used to interpolate between the up, apex, and down in-air
|
||||
* animations.</td></tr>
|
||||
* <tr><td><code>ikOverlayAlpha</code></td><td>number</td><td>The blend between upper body and spline IK versus the
|
||||
* underlying animation</td></tr>
|
||||
*
|
||||
* <tr><td><code>headPosition</code></td><td>{@link Vec3}</td><td>The desired position of the <code>Head</code> joint in
|
||||
* rig coordinates.</td></tr>
|
||||
* <tr><td><code>headRotation</code></td><td>{@link Quat}</td><td>The desired orientation of the <code>Head</code> joint in
|
||||
* rig coordinates.</td></tr>
|
||||
* <tr><td><code>headType</code></td><td>{@link MyAvatar.IKTargetType|IKTargetType}</td><td>The type of IK used for the
|
||||
* head.</td></tr>
|
||||
* <tr><td><code>headWeight</code></td><td>number</td><td>How strongly the head chain blends with the other IK
|
||||
* chains.</td></tr>
|
||||
*
|
||||
* <tr><td><code>leftHandPosition</code></td><td>{@link Vec3}</td><td>The desired position of the <code>LeftHand</code>
|
||||
* joint in rig coordinates.</td></tr>
|
||||
* <tr><td><code>leftHandRotation</code></td><td>{@link Quat}</td><td>The desired orientation of the <code>LeftHand</code>
|
||||
* joint in rig coordinates.</td></tr>
|
||||
* <tr><td><code>leftHandType</code></td><td>{@link MyAvatar.IKTargetType|IKTargetType}</td><td>The type of IK used for the
|
||||
* left arm.</td></tr>
|
||||
* <tr><td><code>leftHandPoleVectorEnabled</code></td><td>boolean</td><td>When <code>true</code>, the elbow angle is
|
||||
* controlled by the <code>rightHandPoleVector</code> property value. Otherwise the elbow direction comes from the
|
||||
* underlying animation.</td></tr>
|
||||
* <tr><td><code>leftHandPoleReferenceVector</code></td><td>{@link Vec3}</td><td>The direction of the elbow in the local
|
||||
* coordinate system of the elbow.</td></tr>
|
||||
* <tr><td><code>leftHandPoleVector</code></td><td>{@link Vec3}</td><td>The direction the elbow should point in rig
|
||||
* coordinates.</td></tr>
|
||||
*
|
||||
* <tr><td><code>rightHandPosition</code></td><td>{@link Vec3}</td><td>The desired position of the <code>RightHand</code>
|
||||
* joint in rig coordinates.</td></tr>
|
||||
* <tr><td><code>rightHandRotation</code></td><td>{@link Quat}</td><td>The desired orientation of the
|
||||
* <code>RightHand</code> joint in rig coordinates.</td></tr>
|
||||
* <tr><td><code>rightHandType</code></td><td>{@link MyAvatar.IKTargetType|IKTargetType}</td><td>The type of IK used for
|
||||
* the right arm.</td></tr>
|
||||
* <tr><td><code>rightHandPoleVectorEnabled</code></td><td>boolean</td><td>When <code>true</code>, the elbow angle is
|
||||
* controlled by the <code>rightHandPoleVector</code> property value. Otherwise the elbow direction comes from the
|
||||
* underlying animation.</td></tr>
|
||||
* <tr><td><code>rightHandPoleReferenceVector</code></td><td>{@link Vec3}</td><td>The direction of the elbow in the local
|
||||
* coordinate system of the elbow.</td></tr>
|
||||
* <tr><td><code>rightHandPoleVector</code></td><td>{@link Vec3}</td><td>The direction the elbow should point in rig
|
||||
* coordinates.</td></tr>
|
||||
*
|
||||
* <tr><td><code>leftFootIKEnabled</code></td><td>boolean</td><td><code>true</code> if IK is enabled for the left
|
||||
* foot.</td></tr>
|
||||
* <tr><td><code>rightFootIKEnabled</code></td><td>boolean</td><td><code>true</code> if IK is enabled for the right
|
||||
* foot.</td></tr>
|
||||
*
|
||||
* <tr><td><code>leftFootIKPositionVar</code></td><td>string</td><td>The name of the source for the desired position
|
||||
* of the <code>LeftFoot</code> joint. If not set, the foot rotation of the underlying animation will be used.</td></tr>
|
||||
* <tr><td><code>leftFootIKRotationVar</code></td><td>string</td><td>The name of the source for the desired rotation
|
||||
* of the <code>LeftFoot</code> joint. If not set, the foot rotation of the underlying animation will be used.</td></tr>
|
||||
* <tr><td><code>leftFootPoleVectorEnabled</code></td><td>boolean</td><td>When <code>true</code>, the knee angle is
|
||||
* controlled by the <code>leftFootPoleVector</code> property value. Otherwise the knee direction comes from the
|
||||
* underlying animation.</td></tr>
|
||||
* <tr><td><code>leftFootPoleVector</code></td><td>{@link Vec3}</td><td>The direction the knee should face in rig
|
||||
* coordinates.</td></tr>
|
||||
* <tr><td><code>rightFootIKPositionVar</code></td><td>string</td><td>The name of the source for the desired position
|
||||
* of the <code>RightFoot</code> joint. If not set, the foot rotation of the underlying animation will be used.</td></tr>
|
||||
* <tr><td><code>rightFootIKRotationVar</code></td><td>string</td><td>The name of the source for the desired rotation
|
||||
* of the <code>RightFoot</code> joint. If not set, the foot rotation of the underlying animation will be used.</td></tr>
|
||||
* <tr><td><code>rightFootPoleVectorEnabled</code></td><td>boolean</td><td>When <code>true</code>, the knee angle is
|
||||
* controlled by the <code>rightFootPoleVector</code> property value. Otherwise the knee direction comes from the
|
||||
* underlying animation.</td></tr>
|
||||
* <tr><td><code>rightFootPoleVector</code></td><td>{@link Vec3}</td><td>The direction the knee should face in rig
|
||||
* coordinates.</td></tr>
|
||||
*
|
||||
* <tr><td><code>isTalking</code></td><td>boolean</td><td><code>true</code> if the avatar is talking.</td></tr>
|
||||
* <tr><td><code>notIsTalking</code></td><td>boolean</td><td><code>true</code> if the avatar is not talking.</td></tr>
|
||||
*
|
||||
* <tr><td><code>solutionSource</code></td><td>{@link MyAvatar.AnimIKSolutionSource|AnimIKSolutionSource}</td>
|
||||
* <td>Determines the initial conditions of the IK solver.</td></tr>
|
||||
* <tr><td><code>defaultPoseOverlayAlpha</code></td><td>number</td><td>Controls the blend between the main animation state
|
||||
* machine and the default pose. Mostly used during full body tracking so that walking & jumping animations do not
|
||||
* affect the IK of the figure.</td></tr>
|
||||
* <tr><td><code>defaultPoseOverlayBoneSet</code></td><td>{@link MyAvatar.AnimOverlayBoneSet|AnimOverlayBoneSet}</td>
|
||||
* <td>Specifies which bones will be replace by the source overlay.</td></tr>
|
||||
* <tr><td><code>hipsType</code></td><td>{@link MyAvatar.IKTargetType|IKTargetType}</td><td>The type of IK used for the
|
||||
* hips.</td></tr>
|
||||
* <tr><td><code>hipsPosition</code></td><td>{@link Vec3}</td><td>The desired position of <code>Hips</code> joint in rig
|
||||
* coordinates.</td></tr>
|
||||
* <tr><td><code>hipsRotation</code></td><td>{@link Quat}</td><td>the desired orientation of the <code>Hips</code> joint in
|
||||
* rig coordinates.</td></tr>
|
||||
* <tr><td><code>spine2Type</code></td><td>{@link MyAvatar.IKTargetType|IKTargetType}</td><td>The type of IK used for the
|
||||
* <code>Spine2</code> joint.</td></tr>
|
||||
* <tr><td><code>spine2Position</code></td><td>{@link Vec3}</td><td>The desired position of the <code>Spine2</code> joint
|
||||
* in rig coordinates.</td></tr>
|
||||
* <tr><td><code>spine2Rotation</code></td><td>{@link Quat}</td><td>The desired orientation of the <code>Spine2</code>
|
||||
* joint in rig coordinates.</td></tr>
|
||||
*
|
||||
* <tr><td><code>leftFootIKAlpha</code></td><td>number</td><td>Blends between full IK for the leg and the underlying
|
||||
* animation.</td></tr>
|
||||
* <tr><td><code>rightFootIKAlpha</code></td><td>number</td><td>Blends between full IK for the leg and the underlying
|
||||
* animation.</td></tr>
|
||||
* <tr><td><code>hipsWeight</code></td><td>number</td><td>How strongly the hips target blends with the IK solution for
|
||||
* other IK chains.</td></tr>
|
||||
* <tr><td><code>leftHandWeight</code></td><td>number</td><td>How strongly the left hand blends with IK solution of other
|
||||
* IK chains.</td></tr>
|
||||
* <tr><td><code>rightHandWeight</code></td><td>number</td><td>How strongly the right hand blends with IK solution of other
|
||||
* IK chains.</td></tr>
|
||||
* <tr><td><code>spine2Weight</code></td><td>number</td><td>How strongly the spine2 chain blends with the rest of the IK
|
||||
* solution.</td></tr>
|
||||
*
|
||||
* <tr><td><code>leftHandOverlayAlpha</code></td><td>number</td><td>Used to blend in the animated hand gesture poses, such
|
||||
* as point and thumbs up.</td></tr>
|
||||
* <tr><td><code>leftHandGraspAlpha</code></td><td>number</td><td>Used to blend between an open hand and a closed hand.
|
||||
* Usually changed as you squeeze the trigger of the hand controller.</td></tr>
|
||||
* <tr><td><code>rightHandOverlayAlpha</code></td><td>number</td><td>Used to blend in the animated hand gesture poses,
|
||||
* such as point and thumbs up.</td></tr>
|
||||
* <tr><td><code>rightHandGraspAlpha</code></td><td>number</td><td>Used to blend between an open hand and a closed hand.
|
||||
* Usually changed as you squeeze the trigger of the hand controller.</td></tr>
|
||||
* <tr><td><code>isLeftIndexPoint</code></td><td>boolean</td><td><code>true</code> if the left hand should be
|
||||
* pointing.</td></tr>
|
||||
* <tr><td><code>isLeftThumbRaise</code></td><td>boolean</td><td><code>true</code> if the left hand should be
|
||||
* thumbs-up.</td></tr>
|
||||
* <tr><td><code>isLeftIndexPointAndThumbRaise</code></td><td>boolean</td><td><code>true</code> if the left hand should be
|
||||
* pointing and thumbs-up.</td></tr>
|
||||
* <tr><td><code>isLeftHandGrasp</code></td><td>boolean</td><td><code>true</code> if the left hand should be at rest,
|
||||
* grasping the controller.</td></tr>
|
||||
* <tr><td><code>isRightIndexPoint</code></td><td>boolean</td><td><code>true</code> if the right hand should be
|
||||
* pointing.</td></tr>
|
||||
* <tr><td><code>isRightThumbRaise</code></td><td>boolean</td><td><code>true</code> if the right hand should be
|
||||
* thumbs-up.</td></tr>
|
||||
* <tr><td><code>isRightIndexPointAndThumbRaise</code></td><td>boolean</td><td><code>true</code> if the right hand should
|
||||
* be pointing and thumbs-up.</td></tr>
|
||||
* <tr><td><code>isRightHandGrasp</code></td><td>boolean</td><td><code>true</code> if the right hand should be at rest,
|
||||
* grasping the controller.</td></tr>
|
||||
*
|
||||
* </tbody>
|
||||
* </table>
|
||||
* <p>Note: Rig coordinates are <code>+z</code> forward and <code>+y</code> up.</p>
|
||||
* @typedef {object} MyAvatar.AnimStateDictionary
|
||||
*/
|
||||
// Note: The following animVars are intentionally not documented:
|
||||
// - leftFootPosition
|
||||
// - leftFootRotation
|
||||
// - rightFooKPosition
|
||||
// - rightFooKRotation
|
||||
// Note: The following items aren't set in the code below but are still intentionally documented:
|
||||
// - leftFootIKAlpha
|
||||
// - rightFootIKAlpha
|
||||
// - hipsWeight
|
||||
// - leftHandWeight
|
||||
// - rightHandWeight
|
||||
// - spine2Weight
|
||||
// - rightHandOverlayAlpha
|
||||
// - rightHandGraspAlpha
|
||||
// - leftHandOverlayAlpha
|
||||
// - leftHandGraspAlpha
|
||||
// - isRightIndexPoint
|
||||
// - isRightThumbRaise
|
||||
// - isRightIndexPointAndThumbRaise
|
||||
// - isRightHandGrasp
|
||||
// - isLeftIndexPoint
|
||||
// - isLeftThumbRaise
|
||||
// - isLeftIndexPointAndThumbRaise
|
||||
// - isLeftHandGrasp
|
||||
Rig::Rig() {
|
||||
// Ensure thread-safe access to the rigRegistry.
|
||||
std::lock_guard<std::mutex> guard(rigRegistryMutex);
|
||||
|
|
|
@ -1354,26 +1354,28 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
|
|||
|
||||
for (const AudioInjectorPointer& injector : _activeLocalAudioInjectors) {
|
||||
// the lock guarantees that injectorBuffer, if found, is invariant
|
||||
AudioInjectorLocalBuffer* injectorBuffer = injector->getLocalBuffer();
|
||||
auto injectorBuffer = injector->getLocalBuffer();
|
||||
if (injectorBuffer) {
|
||||
|
||||
auto options = injector->getOptions();
|
||||
|
||||
static const int HRTF_DATASET_INDEX = 1;
|
||||
|
||||
int numChannels = injector->isAmbisonic() ? AudioConstants::AMBISONIC : (injector->isStereo() ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
int numChannels = options.ambisonic ? AudioConstants::AMBISONIC : (options.stereo ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
size_t bytesToRead = numChannels * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
|
||||
|
||||
// get one frame from the injector
|
||||
memset(_localScratchBuffer, 0, bytesToRead);
|
||||
if (0 < injectorBuffer->readData((char*)_localScratchBuffer, bytesToRead)) {
|
||||
|
||||
float gain = injector->getVolume();
|
||||
float gain = options.volume;
|
||||
|
||||
if (injector->isAmbisonic()) {
|
||||
if (options.ambisonic) {
|
||||
|
||||
if (injector->isPositionSet()) {
|
||||
if (options.positionSet) {
|
||||
|
||||
// distance attenuation
|
||||
glm::vec3 relativePosition = injector->getPosition() - _positionGetter();
|
||||
glm::vec3 relativePosition = options.position - _positionGetter();
|
||||
float distance = glm::max(glm::length(relativePosition), EPSILON);
|
||||
gain = gainForSource(distance, gain);
|
||||
}
|
||||
|
@ -1382,7 +1384,7 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
|
|||
// Calculate the soundfield orientation relative to the listener.
|
||||
// Injector orientation can be used to align a recording to our world coordinates.
|
||||
//
|
||||
glm::quat relativeOrientation = injector->getOrientation() * glm::inverse(_orientationGetter());
|
||||
glm::quat relativeOrientation = options.orientation * glm::inverse(_orientationGetter());
|
||||
|
||||
// convert from Y-up (OpenGL) to Z-up (Ambisonic) coordinate system
|
||||
float qw = relativeOrientation.w;
|
||||
|
@ -1394,12 +1396,12 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
|
|||
injector->getLocalFOA().render(_localScratchBuffer, mixBuffer, HRTF_DATASET_INDEX,
|
||||
qw, qx, qy, qz, gain, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
} else if (injector->isStereo()) {
|
||||
} else if (options.stereo) {
|
||||
|
||||
if (injector->isPositionSet()) {
|
||||
if (options.positionSet) {
|
||||
|
||||
// distance attenuation
|
||||
glm::vec3 relativePosition = injector->getPosition() - _positionGetter();
|
||||
glm::vec3 relativePosition = options.position - _positionGetter();
|
||||
float distance = glm::max(glm::length(relativePosition), EPSILON);
|
||||
gain = gainForSource(distance, gain);
|
||||
}
|
||||
|
@ -1412,10 +1414,10 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
|
|||
|
||||
} else { // injector is mono
|
||||
|
||||
if (injector->isPositionSet()) {
|
||||
if (options.positionSet) {
|
||||
|
||||
// distance attenuation
|
||||
glm::vec3 relativePosition = injector->getPosition() - _positionGetter();
|
||||
glm::vec3 relativePosition = options.position - _positionGetter();
|
||||
float distance = glm::max(glm::length(relativePosition), EPSILON);
|
||||
gain = gainForSource(distance, gain);
|
||||
|
||||
|
@ -1437,21 +1439,21 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
|
|||
|
||||
} else {
|
||||
|
||||
qCDebug(audioclient) << "injector has no more data, marking finished for removal";
|
||||
//qCDebug(audioclient) << "injector has no more data, marking finished for removal";
|
||||
injector->finishLocalInjection();
|
||||
injectorsToRemove.append(injector);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
qCDebug(audioclient) << "injector has no local buffer, marking as finished for removal";
|
||||
//qCDebug(audioclient) << "injector has no local buffer, marking as finished for removal";
|
||||
injector->finishLocalInjection();
|
||||
injectorsToRemove.append(injector);
|
||||
}
|
||||
}
|
||||
|
||||
for (const AudioInjectorPointer& injector : injectorsToRemove) {
|
||||
qCDebug(audioclient) << "removing injector";
|
||||
//qCDebug(audioclient) << "removing injector";
|
||||
_activeLocalAudioInjectors.removeOne(injector);
|
||||
}
|
||||
|
||||
|
@ -1571,15 +1573,13 @@ bool AudioClient::setIsStereoInput(bool isStereoInput) {
|
|||
}
|
||||
|
||||
bool AudioClient::outputLocalInjector(const AudioInjectorPointer& injector) {
|
||||
AudioInjectorLocalBuffer* injectorBuffer = injector->getLocalBuffer();
|
||||
auto injectorBuffer = injector->getLocalBuffer();
|
||||
if (injectorBuffer) {
|
||||
// local injectors are on the AudioInjectorsThread, so we must guard access
|
||||
Lock lock(_injectorsMutex);
|
||||
if (!_activeLocalAudioInjectors.contains(injector)) {
|
||||
qCDebug(audioclient) << "adding new injector";
|
||||
//qCDebug(audioclient) << "adding new injector";
|
||||
_activeLocalAudioInjectors.append(injector);
|
||||
// move local buffer to the LocalAudioThread to avoid dataraces with AudioInjector (like stop())
|
||||
injectorBuffer->setParent(nullptr);
|
||||
|
||||
// update the flag
|
||||
_localInjectorsAvailable.exchange(true, std::memory_order_release);
|
||||
|
@ -1595,6 +1595,11 @@ bool AudioClient::outputLocalInjector(const AudioInjectorPointer& injector) {
|
|||
}
|
||||
}
|
||||
|
||||
int AudioClient::getNumLocalInjectors() {
|
||||
Lock lock(_injectorsMutex);
|
||||
return _activeLocalAudioInjectors.size();
|
||||
}
|
||||
|
||||
void AudioClient::outputFormatChanged() {
|
||||
_outputFrameSize = (AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * OUTPUT_CHANNEL_COUNT * _outputFormat.sampleRate()) /
|
||||
_desiredOutputFormat.sampleRate();
|
||||
|
|
|
@ -181,6 +181,8 @@ public:
|
|||
bool isHeadsetPluggedIn() { return _isHeadsetPluggedIn; }
|
||||
#endif
|
||||
|
||||
int getNumLocalInjectors();
|
||||
|
||||
public slots:
|
||||
void start();
|
||||
void stop();
|
||||
|
|
|
@ -24,9 +24,10 @@
|
|||
#include "AudioRingBuffer.h"
|
||||
#include "AudioLogging.h"
|
||||
#include "SoundCache.h"
|
||||
#include "AudioSRC.h"
|
||||
#include "AudioHelpers.h"
|
||||
|
||||
int metaType = qRegisterMetaType<AudioInjectorPointer>("AudioInjectorPointer");
|
||||
|
||||
AbstractAudioInterface* AudioInjector::_localAudioInterface{ nullptr };
|
||||
|
||||
AudioInjectorState operator& (AudioInjectorState lhs, AudioInjectorState rhs) {
|
||||
|
@ -51,26 +52,30 @@ AudioInjector::AudioInjector(AudioDataPointer audioData, const AudioInjectorOpti
|
|||
{
|
||||
}
|
||||
|
||||
AudioInjector::~AudioInjector() {
|
||||
deleteLocalBuffer();
|
||||
}
|
||||
AudioInjector::~AudioInjector() {}
|
||||
|
||||
bool AudioInjector::stateHas(AudioInjectorState state) const {
|
||||
return (_state & state) == state;
|
||||
return resultWithReadLock<bool>([&] {
|
||||
return (_state & state) == state;
|
||||
});
|
||||
}
|
||||
|
||||
void AudioInjector::setOptions(const AudioInjectorOptions& options) {
|
||||
// since options.stereo is computed from the audio stream,
|
||||
// we need to copy it from existing options just in case.
|
||||
bool currentlyStereo = _options.stereo;
|
||||
bool currentlyAmbisonic = _options.ambisonic;
|
||||
_options = options;
|
||||
_options.stereo = currentlyStereo;
|
||||
_options.ambisonic = currentlyAmbisonic;
|
||||
withWriteLock([&] {
|
||||
bool currentlyStereo = _options.stereo;
|
||||
bool currentlyAmbisonic = _options.ambisonic;
|
||||
_options = options;
|
||||
_options.stereo = currentlyStereo;
|
||||
_options.ambisonic = currentlyAmbisonic;
|
||||
});
|
||||
}
|
||||
|
||||
void AudioInjector::finishNetworkInjection() {
|
||||
_state |= AudioInjectorState::NetworkInjectionFinished;
|
||||
withWriteLock([&] {
|
||||
_state |= AudioInjectorState::NetworkInjectionFinished;
|
||||
});
|
||||
|
||||
// if we are already finished with local
|
||||
// injection, then we are finished
|
||||
|
@ -80,35 +85,31 @@ void AudioInjector::finishNetworkInjection() {
|
|||
}
|
||||
|
||||
void AudioInjector::finishLocalInjection() {
|
||||
_state |= AudioInjectorState::LocalInjectionFinished;
|
||||
if(_options.localOnly || stateHas(AudioInjectorState::NetworkInjectionFinished)) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "finishLocalInjection");
|
||||
return;
|
||||
}
|
||||
|
||||
bool localOnly = false;
|
||||
withWriteLock([&] {
|
||||
_state |= AudioInjectorState::LocalInjectionFinished;
|
||||
localOnly = _options.localOnly;
|
||||
});
|
||||
|
||||
if(localOnly || stateHas(AudioInjectorState::NetworkInjectionFinished)) {
|
||||
finish();
|
||||
}
|
||||
}
|
||||
|
||||
void AudioInjector::finish() {
|
||||
_state |= AudioInjectorState::Finished;
|
||||
|
||||
withWriteLock([&] {
|
||||
_state |= AudioInjectorState::Finished;
|
||||
});
|
||||
emit finished();
|
||||
|
||||
deleteLocalBuffer();
|
||||
_localBuffer = nullptr;
|
||||
}
|
||||
|
||||
void AudioInjector::restart() {
|
||||
// grab the AudioInjectorManager
|
||||
auto injectorManager = DependencyManager::get<AudioInjectorManager>();
|
||||
|
||||
if (thread() != QThread::currentThread()) {
|
||||
QMetaObject::invokeMethod(this, "restart");
|
||||
|
||||
if (!_options.localOnly) {
|
||||
// notify the AudioInjectorManager to wake up in case it's waiting for new injectors
|
||||
injectorManager->notifyInjectorReadyCondition();
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// reset the current send offset to zero
|
||||
_currentSendOffset = 0;
|
||||
|
||||
|
@ -121,19 +122,23 @@ void AudioInjector::restart() {
|
|||
|
||||
// check our state to decide if we need extra handling for the restart request
|
||||
if (stateHas(AudioInjectorState::Finished)) {
|
||||
if (!inject(&AudioInjectorManager::restartFinishedInjector)) {
|
||||
if (!inject(&AudioInjectorManager::threadInjector)) {
|
||||
qWarning() << "AudioInjector::restart failed to thread injector";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool AudioInjector::inject(bool(AudioInjectorManager::*injection)(const AudioInjectorPointer&)) {
|
||||
_state = AudioInjectorState::NotFinished;
|
||||
AudioInjectorOptions options;
|
||||
withWriteLock([&] {
|
||||
_state = AudioInjectorState::NotFinished;
|
||||
options = _options;
|
||||
});
|
||||
|
||||
int byteOffset = 0;
|
||||
if (_options.secondOffset > 0.0f) {
|
||||
int numChannels = _options.ambisonic ? 4 : (_options.stereo ? 2 : 1);
|
||||
byteOffset = (int)(AudioConstants::SAMPLE_RATE * _options.secondOffset * numChannels);
|
||||
if (options.secondOffset > 0.0f) {
|
||||
int numChannels = options.ambisonic ? 4 : (options.stereo ? 2 : 1);
|
||||
byteOffset = (int)(AudioConstants::SAMPLE_RATE * options.secondOffset * numChannels);
|
||||
byteOffset *= AudioConstants::SAMPLE_SIZE;
|
||||
}
|
||||
_currentSendOffset = byteOffset;
|
||||
|
@ -143,7 +148,7 @@ bool AudioInjector::inject(bool(AudioInjectorManager::*injection)(const AudioInj
|
|||
}
|
||||
|
||||
bool success = true;
|
||||
if (!_options.localOnly) {
|
||||
if (!options.localOnly) {
|
||||
auto injectorManager = DependencyManager::get<AudioInjectorManager>();
|
||||
if (!(*injectorManager.*injection)(sharedFromThis())) {
|
||||
success = false;
|
||||
|
@ -158,7 +163,8 @@ bool AudioInjector::injectLocally() {
|
|||
if (_localAudioInterface) {
|
||||
if (_audioData->getNumBytes() > 0) {
|
||||
|
||||
_localBuffer = new AudioInjectorLocalBuffer(_audioData);
|
||||
_localBuffer = QSharedPointer<AudioInjectorLocalBuffer>(new AudioInjectorLocalBuffer(_audioData), &AudioInjectorLocalBuffer::deleteLater);
|
||||
_localBuffer->moveToThread(thread());
|
||||
|
||||
_localBuffer->open(QIODevice::ReadOnly);
|
||||
_localBuffer->setShouldLoop(_options.loop);
|
||||
|
@ -181,14 +187,6 @@ bool AudioInjector::injectLocally() {
|
|||
return success;
|
||||
}
|
||||
|
||||
void AudioInjector::deleteLocalBuffer() {
|
||||
if (_localBuffer) {
|
||||
_localBuffer->stop();
|
||||
_localBuffer->deleteLater();
|
||||
_localBuffer = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
const uchar MAX_INJECTOR_VOLUME = packFloatGainToByte(1.0f);
|
||||
static const int64_t NEXT_FRAME_DELTA_ERROR_OR_FINISHED = -1;
|
||||
static const int64_t NEXT_FRAME_DELTA_IMMEDIATELY = 0;
|
||||
|
@ -220,6 +218,10 @@ int64_t AudioInjector::injectNextFrame() {
|
|||
static int volumeOptionOffset = -1;
|
||||
static int audioDataOffset = -1;
|
||||
|
||||
AudioInjectorOptions options = resultWithReadLock<AudioInjectorOptions>([&] {
|
||||
return _options;
|
||||
});
|
||||
|
||||
if (!_currentPacket) {
|
||||
if (_currentSendOffset < 0 ||
|
||||
_currentSendOffset >= (int)_audioData->getNumBytes()) {
|
||||
|
@ -253,7 +255,7 @@ int64_t AudioInjector::injectNextFrame() {
|
|||
audioPacketStream << QUuid::createUuid();
|
||||
|
||||
// pack the stereo/mono type of the stream
|
||||
audioPacketStream << _options.stereo;
|
||||
audioPacketStream << options.stereo;
|
||||
|
||||
// pack the flag for loopback, if requested
|
||||
loopbackOptionOffset = _currentPacket->pos();
|
||||
|
@ -262,15 +264,16 @@ int64_t AudioInjector::injectNextFrame() {
|
|||
|
||||
// pack the position for injected audio
|
||||
positionOptionOffset = _currentPacket->pos();
|
||||
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&_options.position),
|
||||
sizeof(_options.position));
|
||||
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&options.position),
|
||||
sizeof(options.position));
|
||||
|
||||
// pack our orientation for injected audio
|
||||
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&_options.orientation),
|
||||
sizeof(_options.orientation));
|
||||
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&options.orientation),
|
||||
sizeof(options.orientation));
|
||||
|
||||
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&options.position),
|
||||
sizeof(options.position));
|
||||
|
||||
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&_options.position),
|
||||
sizeof(_options.position));
|
||||
glm::vec3 boxCorner = glm::vec3(0);
|
||||
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&boxCorner),
|
||||
sizeof(glm::vec3));
|
||||
|
@ -283,7 +286,7 @@ int64_t AudioInjector::injectNextFrame() {
|
|||
volumeOptionOffset = _currentPacket->pos();
|
||||
quint8 volume = MAX_INJECTOR_VOLUME;
|
||||
audioPacketStream << volume;
|
||||
audioPacketStream << _options.ignorePenumbra;
|
||||
audioPacketStream << options.ignorePenumbra;
|
||||
|
||||
audioDataOffset = _currentPacket->pos();
|
||||
|
||||
|
@ -313,10 +316,10 @@ int64_t AudioInjector::injectNextFrame() {
|
|||
_currentPacket->writePrimitive((uchar)(_localAudioInterface && _localAudioInterface->shouldLoopbackInjectors()));
|
||||
|
||||
_currentPacket->seek(positionOptionOffset);
|
||||
_currentPacket->writePrimitive(_options.position);
|
||||
_currentPacket->writePrimitive(_options.orientation);
|
||||
_currentPacket->writePrimitive(options.position);
|
||||
_currentPacket->writePrimitive(options.orientation);
|
||||
|
||||
quint8 volume = packFloatGainToByte(_options.volume);
|
||||
quint8 volume = packFloatGainToByte(options.volume);
|
||||
_currentPacket->seek(volumeOptionOffset);
|
||||
_currentPacket->writePrimitive(volume);
|
||||
|
||||
|
@ -326,8 +329,8 @@ int64_t AudioInjector::injectNextFrame() {
|
|||
// Might be a reasonable place to do the encode step here.
|
||||
QByteArray decodedAudio;
|
||||
|
||||
int totalBytesLeftToCopy = (_options.stereo ? 2 : 1) * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
|
||||
if (!_options.loop) {
|
||||
int totalBytesLeftToCopy = (options.stereo ? 2 : 1) * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
|
||||
if (!options.loop) {
|
||||
// If we aren't looping, let's make sure we don't read past the end
|
||||
int bytesLeftToRead = _audioData->getNumBytes() - _currentSendOffset;
|
||||
totalBytesLeftToCopy = std::min(totalBytesLeftToCopy, bytesLeftToRead);
|
||||
|
@ -342,14 +345,16 @@ int64_t AudioInjector::injectNextFrame() {
|
|||
auto samplesOut = reinterpret_cast<AudioSample*>(decodedAudio.data());
|
||||
|
||||
// Copy and Measure the loudness of this frame
|
||||
_loudness = 0.0f;
|
||||
for (int i = 0; i < samplesLeftToCopy; ++i) {
|
||||
auto index = (currentSample + i) % _audioData->getNumSamples();
|
||||
auto sample = samples[index];
|
||||
samplesOut[i] = sample;
|
||||
_loudness += abs(sample) / (AudioConstants::MAX_SAMPLE_VALUE / 2.0f);
|
||||
}
|
||||
_loudness /= (float)samplesLeftToCopy;
|
||||
withWriteLock([&] {
|
||||
_loudness = 0.0f;
|
||||
for (int i = 0; i < samplesLeftToCopy; ++i) {
|
||||
auto index = (currentSample + i) % _audioData->getNumSamples();
|
||||
auto sample = samples[index];
|
||||
samplesOut[i] = sample;
|
||||
_loudness += abs(sample) / (AudioConstants::MAX_SAMPLE_VALUE / 2.0f);
|
||||
}
|
||||
_loudness /= (float)samplesLeftToCopy;
|
||||
});
|
||||
_currentSendOffset = (_currentSendOffset + totalBytesLeftToCopy) %
|
||||
_audioData->getNumBytes();
|
||||
|
||||
|
@ -371,7 +376,7 @@ int64_t AudioInjector::injectNextFrame() {
|
|||
_outgoingSequenceNumber++;
|
||||
}
|
||||
|
||||
if (_currentSendOffset == 0 && !_options.loop) {
|
||||
if (_currentSendOffset == 0 && !options.loop) {
|
||||
finishNetworkInjection();
|
||||
return NEXT_FRAME_DELTA_ERROR_OR_FINISHED;
|
||||
}
|
||||
|
@ -391,134 +396,10 @@ int64_t AudioInjector::injectNextFrame() {
|
|||
// If we are falling behind by more frames than our threshold, let's skip the frames ahead
|
||||
qCDebug(audio) << this << "injectNextFrame() skipping ahead, fell behind by " << (currentFrameBasedOnElapsedTime - _nextFrame) << " frames";
|
||||
_nextFrame = currentFrameBasedOnElapsedTime;
|
||||
_currentSendOffset = _nextFrame * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL * (_options.stereo ? 2 : 1) % _audioData->getNumBytes();
|
||||
_currentSendOffset = _nextFrame * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL * (options.stereo ? 2 : 1) % _audioData->getNumBytes();
|
||||
}
|
||||
|
||||
int64_t playNextFrameAt = ++_nextFrame * AudioConstants::NETWORK_FRAME_USECS;
|
||||
|
||||
return std::max(INT64_C(0), playNextFrameAt - currentTime);
|
||||
}
|
||||
|
||||
void AudioInjector::stop() {
|
||||
// trigger a call on the injector's thread to change state to finished
|
||||
QMetaObject::invokeMethod(this, "finish");
|
||||
}
|
||||
|
||||
void AudioInjector::triggerDeleteAfterFinish() {
|
||||
// make sure this fires on the AudioInjector thread
|
||||
if (thread() != QThread::currentThread()) {
|
||||
QMetaObject::invokeMethod(this, "triggerDeleteAfterFinish", Qt::QueuedConnection);
|
||||
return;
|
||||
}
|
||||
|
||||
if (stateHas(AudioInjectorState::Finished)) {
|
||||
stop();
|
||||
} else {
|
||||
_state |= AudioInjectorState::PendingDelete;
|
||||
}
|
||||
}
|
||||
|
||||
AudioInjectorPointer AudioInjector::playSoundAndDelete(SharedSoundPointer sound, const AudioInjectorOptions& options) {
|
||||
AudioInjectorPointer injector = playSound(sound, options);
|
||||
|
||||
if (injector) {
|
||||
injector->_state |= AudioInjectorState::PendingDelete;
|
||||
}
|
||||
|
||||
return injector;
|
||||
}
|
||||
|
||||
|
||||
AudioInjectorPointer AudioInjector::playSound(SharedSoundPointer sound, const AudioInjectorOptions& options) {
|
||||
if (!sound || !sound->isReady()) {
|
||||
return AudioInjectorPointer();
|
||||
}
|
||||
|
||||
if (options.pitch == 1.0f) {
|
||||
|
||||
AudioInjectorPointer injector = AudioInjectorPointer::create(sound, options);
|
||||
|
||||
if (!injector->inject(&AudioInjectorManager::threadInjector)) {
|
||||
qWarning() << "AudioInjector::playSound failed to thread injector";
|
||||
}
|
||||
return injector;
|
||||
|
||||
} else {
|
||||
using AudioConstants::AudioSample;
|
||||
using AudioConstants::SAMPLE_RATE;
|
||||
const int standardRate = SAMPLE_RATE;
|
||||
// limit pitch to 4 octaves
|
||||
const float pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
|
||||
const int resampledRate = glm::round(SAMPLE_RATE / pitch);
|
||||
|
||||
auto audioData = sound->getAudioData();
|
||||
auto numChannels = audioData->getNumChannels();
|
||||
auto numFrames = audioData->getNumFrames();
|
||||
|
||||
AudioSRC resampler(standardRate, resampledRate, numChannels);
|
||||
|
||||
// create a resampled buffer that is guaranteed to be large enough
|
||||
const int maxOutputFrames = resampler.getMaxOutput(numFrames);
|
||||
const int maxOutputSize = maxOutputFrames * numChannels * sizeof(AudioSample);
|
||||
QByteArray resampledBuffer(maxOutputSize, '\0');
|
||||
auto bufferPtr = reinterpret_cast<AudioSample*>(resampledBuffer.data());
|
||||
|
||||
resampler.render(audioData->data(), bufferPtr, numFrames);
|
||||
|
||||
int numSamples = maxOutputFrames * numChannels;
|
||||
auto newAudioData = AudioData::make(numSamples, numChannels, bufferPtr);
|
||||
|
||||
AudioInjectorPointer injector = AudioInjectorPointer::create(newAudioData, options);
|
||||
|
||||
if (!injector->inject(&AudioInjectorManager::threadInjector)) {
|
||||
qWarning() << "AudioInjector::playSound failed to thread pitch-shifted injector";
|
||||
}
|
||||
return injector;
|
||||
}
|
||||
}
|
||||
|
||||
AudioInjectorPointer AudioInjector::playSoundAndDelete(AudioDataPointer audioData, const AudioInjectorOptions& options) {
|
||||
AudioInjectorPointer injector = playSound(audioData, options);
|
||||
|
||||
if (injector) {
|
||||
injector->_state |= AudioInjectorState::PendingDelete;
|
||||
}
|
||||
|
||||
return injector;
|
||||
}
|
||||
|
||||
AudioInjectorPointer AudioInjector::playSound(AudioDataPointer audioData, const AudioInjectorOptions& options) {
|
||||
if (options.pitch == 1.0f) {
|
||||
AudioInjectorPointer injector = AudioInjectorPointer::create(audioData, options);
|
||||
|
||||
if (!injector->inject(&AudioInjectorManager::threadInjector)) {
|
||||
qWarning() << "AudioInjector::playSound failed to thread pitch-shifted injector";
|
||||
}
|
||||
return injector;
|
||||
} else {
|
||||
using AudioConstants::AudioSample;
|
||||
using AudioConstants::SAMPLE_RATE;
|
||||
const int standardRate = SAMPLE_RATE;
|
||||
// limit pitch to 4 octaves
|
||||
const float pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
|
||||
const int resampledRate = glm::round(SAMPLE_RATE / pitch);
|
||||
|
||||
auto numChannels = audioData->getNumChannels();
|
||||
auto numFrames = audioData->getNumFrames();
|
||||
|
||||
AudioSRC resampler(standardRate, resampledRate, numChannels);
|
||||
|
||||
// create a resampled buffer that is guaranteed to be large enough
|
||||
const int maxOutputFrames = resampler.getMaxOutput(numFrames);
|
||||
const int maxOutputSize = maxOutputFrames * numChannels * sizeof(AudioSample);
|
||||
QByteArray resampledBuffer(maxOutputSize, '\0');
|
||||
auto bufferPtr = reinterpret_cast<AudioSample*>(resampledBuffer.data());
|
||||
|
||||
resampler.render(audioData->data(), bufferPtr, numFrames);
|
||||
|
||||
int numSamples = maxOutputFrames * numChannels;
|
||||
auto newAudioData = AudioData::make(numSamples, numChannels, bufferPtr);
|
||||
|
||||
return AudioInjector::playSound(newAudioData, options);
|
||||
}
|
||||
}
|
|
@ -19,6 +19,8 @@
|
|||
#include <QtCore/QSharedPointer>
|
||||
#include <QtCore/QThread>
|
||||
|
||||
#include <shared/ReadWriteLockable.h>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtx/quaternion.hpp>
|
||||
|
||||
|
@ -49,7 +51,7 @@ AudioInjectorState& operator|= (AudioInjectorState& lhs, AudioInjectorState rhs)
|
|||
|
||||
// In order to make scripting cleaner for the AudioInjector, the script now holds on to the AudioInjector object
|
||||
// until it dies.
|
||||
class AudioInjector : public QObject, public QEnableSharedFromThis<AudioInjector> {
|
||||
class AudioInjector : public QObject, public QEnableSharedFromThis<AudioInjector>, public ReadWriteLockable {
|
||||
Q_OBJECT
|
||||
public:
|
||||
AudioInjector(SharedSoundPointer sound, const AudioInjectorOptions& injectorOptions);
|
||||
|
@ -61,40 +63,34 @@ public:
|
|||
int getCurrentSendOffset() const { return _currentSendOffset; }
|
||||
void setCurrentSendOffset(int currentSendOffset) { _currentSendOffset = currentSendOffset; }
|
||||
|
||||
AudioInjectorLocalBuffer* getLocalBuffer() const { return _localBuffer; }
|
||||
QSharedPointer<AudioInjectorLocalBuffer> getLocalBuffer() const { return _localBuffer; }
|
||||
AudioHRTF& getLocalHRTF() { return _localHRTF; }
|
||||
AudioFOA& getLocalFOA() { return _localFOA; }
|
||||
|
||||
bool isLocalOnly() const { return _options.localOnly; }
|
||||
float getVolume() const { return _options.volume; }
|
||||
bool isPositionSet() const { return _options.positionSet; }
|
||||
glm::vec3 getPosition() const { return _options.position; }
|
||||
glm::quat getOrientation() const { return _options.orientation; }
|
||||
bool isStereo() const { return _options.stereo; }
|
||||
bool isAmbisonic() const { return _options.ambisonic; }
|
||||
float getLoudness() const { return resultWithReadLock<float>([&] { return _loudness; }); }
|
||||
bool isPlaying() const { return !stateHas(AudioInjectorState::Finished); }
|
||||
|
||||
bool isLocalOnly() const { return resultWithReadLock<bool>([&] { return _options.localOnly; }); }
|
||||
float getVolume() const { return resultWithReadLock<float>([&] { return _options.volume; }); }
|
||||
bool isPositionSet() const { return resultWithReadLock<bool>([&] { return _options.positionSet; }); }
|
||||
glm::vec3 getPosition() const { return resultWithReadLock<glm::vec3>([&] { return _options.position; }); }
|
||||
glm::quat getOrientation() const { return resultWithReadLock<glm::quat>([&] { return _options.orientation; }); }
|
||||
bool isStereo() const { return resultWithReadLock<bool>([&] { return _options.stereo; }); }
|
||||
bool isAmbisonic() const { return resultWithReadLock<bool>([&] { return _options.ambisonic; }); }
|
||||
|
||||
AudioInjectorOptions getOptions() const { return resultWithReadLock<AudioInjectorOptions>([&] { return _options; }); }
|
||||
void setOptions(const AudioInjectorOptions& options);
|
||||
|
||||
bool stateHas(AudioInjectorState state) const ;
|
||||
static void setLocalAudioInterface(AbstractAudioInterface* audioInterface) { _localAudioInterface = audioInterface; }
|
||||
|
||||
static AudioInjectorPointer playSoundAndDelete(SharedSoundPointer sound, const AudioInjectorOptions& options);
|
||||
static AudioInjectorPointer playSound(SharedSoundPointer sound, const AudioInjectorOptions& options);
|
||||
static AudioInjectorPointer playSoundAndDelete(AudioDataPointer audioData, const AudioInjectorOptions& options);
|
||||
static AudioInjectorPointer playSound(AudioDataPointer audioData, const AudioInjectorOptions& options);
|
||||
void restart();
|
||||
void finish();
|
||||
|
||||
void finishNetworkInjection();
|
||||
|
||||
public slots:
|
||||
void restart();
|
||||
|
||||
void stop();
|
||||
void triggerDeleteAfterFinish();
|
||||
|
||||
const AudioInjectorOptions& getOptions() const { return _options; }
|
||||
void setOptions(const AudioInjectorOptions& options);
|
||||
|
||||
float getLoudness() const { return _loudness; }
|
||||
bool isPlaying() const { return !stateHas(AudioInjectorState::Finished); }
|
||||
void finish();
|
||||
void finishLocalInjection();
|
||||
void finishNetworkInjection();
|
||||
|
||||
signals:
|
||||
void finished();
|
||||
|
@ -104,7 +100,6 @@ private:
|
|||
int64_t injectNextFrame();
|
||||
bool inject(bool(AudioInjectorManager::*injection)(const AudioInjectorPointer&));
|
||||
bool injectLocally();
|
||||
void deleteLocalBuffer();
|
||||
|
||||
static AbstractAudioInterface* _localAudioInterface;
|
||||
|
||||
|
@ -116,7 +111,7 @@ private:
|
|||
float _loudness { 0.0f };
|
||||
int _currentSendOffset { 0 };
|
||||
std::unique_ptr<NLPacket> _currentPacket { nullptr };
|
||||
AudioInjectorLocalBuffer* _localBuffer { nullptr };
|
||||
QSharedPointer<AudioInjectorLocalBuffer> _localBuffer { nullptr };
|
||||
|
||||
int64_t _nextFrame { 0 };
|
||||
std::unique_ptr<QElapsedTimer> _frameTimer { nullptr };
|
||||
|
@ -128,4 +123,6 @@ private:
|
|||
friend class AudioInjectorManager;
|
||||
};
|
||||
|
||||
Q_DECLARE_METATYPE(AudioInjectorPointer)
|
||||
|
||||
#endif // hifi_AudioInjector_h
|
||||
|
|
|
@ -16,6 +16,10 @@ AudioInjectorLocalBuffer::AudioInjectorLocalBuffer(AudioDataPointer audioData) :
|
|||
{
|
||||
}
|
||||
|
||||
AudioInjectorLocalBuffer::~AudioInjectorLocalBuffer() {
|
||||
stop();
|
||||
}
|
||||
|
||||
void AudioInjectorLocalBuffer::stop() {
|
||||
_isStopped = true;
|
||||
|
||||
|
@ -30,9 +34,8 @@ bool AudioInjectorLocalBuffer::seek(qint64 pos) {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
qint64 AudioInjectorLocalBuffer::readData(char* data, qint64 maxSize) {
|
||||
if (!_isStopped) {
|
||||
if (!_isStopped && _audioData) {
|
||||
|
||||
// first copy to the end of the raw audio
|
||||
int bytesToEnd = (int)_audioData->getNumBytes() - _currentOffset;
|
||||
|
|
|
@ -22,6 +22,7 @@ class AudioInjectorLocalBuffer : public QIODevice {
|
|||
Q_OBJECT
|
||||
public:
|
||||
AudioInjectorLocalBuffer(AudioDataPointer audioData);
|
||||
~AudioInjectorLocalBuffer();
|
||||
|
||||
void stop();
|
||||
|
||||
|
|
|
@ -14,11 +14,14 @@
|
|||
#include <QtCore/QCoreApplication>
|
||||
|
||||
#include <SharedUtil.h>
|
||||
#include <shared/QtHelpers.h>
|
||||
|
||||
#include "AudioConstants.h"
|
||||
#include "AudioInjector.h"
|
||||
#include "AudioLogging.h"
|
||||
|
||||
#include "AudioSRC.h"
|
||||
|
||||
AudioInjectorManager::~AudioInjectorManager() {
|
||||
_shouldStop = true;
|
||||
|
||||
|
@ -30,7 +33,7 @@ AudioInjectorManager::~AudioInjectorManager() {
|
|||
auto& timePointerPair = _injectors.top();
|
||||
|
||||
// ask it to stop and be deleted
|
||||
timePointerPair.second->stop();
|
||||
timePointerPair.second->finish();
|
||||
|
||||
_injectors.pop();
|
||||
}
|
||||
|
@ -46,6 +49,8 @@ AudioInjectorManager::~AudioInjectorManager() {
|
|||
_thread->quit();
|
||||
_thread->wait();
|
||||
}
|
||||
|
||||
moveToThread(qApp->thread());
|
||||
}
|
||||
|
||||
void AudioInjectorManager::createThread() {
|
||||
|
@ -55,6 +60,8 @@ void AudioInjectorManager::createThread() {
|
|||
// when the thread is started, have it call our run to handle injection of audio
|
||||
connect(_thread, &QThread::started, this, &AudioInjectorManager::run, Qt::DirectConnection);
|
||||
|
||||
moveToThread(_thread);
|
||||
|
||||
// start the thread
|
||||
_thread->start();
|
||||
}
|
||||
|
@ -141,36 +148,7 @@ bool AudioInjectorManager::wouldExceedLimits() { // Should be called inside of a
|
|||
|
||||
bool AudioInjectorManager::threadInjector(const AudioInjectorPointer& injector) {
|
||||
if (_shouldStop) {
|
||||
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
|
||||
return false;
|
||||
}
|
||||
|
||||
// guard the injectors vector with a mutex
|
||||
Lock lock(_injectorsMutex);
|
||||
|
||||
if (wouldExceedLimits()) {
|
||||
return false;
|
||||
} else {
|
||||
if (!_thread) {
|
||||
createThread();
|
||||
}
|
||||
|
||||
// move the injector to the QThread
|
||||
injector->moveToThread(_thread);
|
||||
|
||||
// add the injector to the queue with a send timestamp of now
|
||||
_injectors.emplace(usecTimestampNow(), injector);
|
||||
|
||||
// notify our wait condition so we can inject two frames for this injector immediately
|
||||
_injectorReady.notify_one();
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
bool AudioInjectorManager::restartFinishedInjector(const AudioInjectorPointer& injector) {
|
||||
if (_shouldStop) {
|
||||
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
|
||||
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -188,3 +166,192 @@ bool AudioInjectorManager::restartFinishedInjector(const AudioInjectorPointer& i
|
|||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
AudioInjectorPointer AudioInjectorManager::playSound(const SharedSoundPointer& sound, const AudioInjectorOptions& options, bool setPendingDelete) {
|
||||
if (_shouldStop) {
|
||||
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
AudioInjectorPointer injector = nullptr;
|
||||
if (sound && sound->isReady()) {
|
||||
if (options.pitch == 1.0f) {
|
||||
injector = QSharedPointer<AudioInjector>(new AudioInjector(sound, options), &AudioInjector::deleteLater);
|
||||
} else {
|
||||
using AudioConstants::AudioSample;
|
||||
using AudioConstants::SAMPLE_RATE;
|
||||
const int standardRate = SAMPLE_RATE;
|
||||
// limit pitch to 4 octaves
|
||||
const float pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
|
||||
const int resampledRate = glm::round(SAMPLE_RATE / pitch);
|
||||
|
||||
auto audioData = sound->getAudioData();
|
||||
auto numChannels = audioData->getNumChannels();
|
||||
auto numFrames = audioData->getNumFrames();
|
||||
|
||||
AudioSRC resampler(standardRate, resampledRate, numChannels);
|
||||
|
||||
// create a resampled buffer that is guaranteed to be large enough
|
||||
const int maxOutputFrames = resampler.getMaxOutput(numFrames);
|
||||
const int maxOutputSize = maxOutputFrames * numChannels * sizeof(AudioSample);
|
||||
QByteArray resampledBuffer(maxOutputSize, '\0');
|
||||
auto bufferPtr = reinterpret_cast<AudioSample*>(resampledBuffer.data());
|
||||
|
||||
resampler.render(audioData->data(), bufferPtr, numFrames);
|
||||
|
||||
int numSamples = maxOutputFrames * numChannels;
|
||||
auto newAudioData = AudioData::make(numSamples, numChannels, bufferPtr);
|
||||
|
||||
injector = QSharedPointer<AudioInjector>(new AudioInjector(newAudioData, options), &AudioInjector::deleteLater);
|
||||
}
|
||||
}
|
||||
|
||||
if (!injector) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (setPendingDelete) {
|
||||
injector->_state |= AudioInjectorState::PendingDelete;
|
||||
}
|
||||
|
||||
injector->moveToThread(_thread);
|
||||
injector->inject(&AudioInjectorManager::threadInjector);
|
||||
|
||||
return injector;
|
||||
}
|
||||
|
||||
AudioInjectorPointer AudioInjectorManager::playSound(const AudioDataPointer& audioData, const AudioInjectorOptions& options, bool setPendingDelete) {
|
||||
if (_shouldStop) {
|
||||
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
AudioInjectorPointer injector = nullptr;
|
||||
if (options.pitch == 1.0f) {
|
||||
injector = QSharedPointer<AudioInjector>(new AudioInjector(audioData, options), &AudioInjector::deleteLater);
|
||||
} else {
|
||||
using AudioConstants::AudioSample;
|
||||
using AudioConstants::SAMPLE_RATE;
|
||||
const int standardRate = SAMPLE_RATE;
|
||||
// limit pitch to 4 octaves
|
||||
const float pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
|
||||
const int resampledRate = glm::round(SAMPLE_RATE / pitch);
|
||||
|
||||
auto numChannels = audioData->getNumChannels();
|
||||
auto numFrames = audioData->getNumFrames();
|
||||
|
||||
AudioSRC resampler(standardRate, resampledRate, numChannels);
|
||||
|
||||
// create a resampled buffer that is guaranteed to be large enough
|
||||
const int maxOutputFrames = resampler.getMaxOutput(numFrames);
|
||||
const int maxOutputSize = maxOutputFrames * numChannels * sizeof(AudioSample);
|
||||
QByteArray resampledBuffer(maxOutputSize, '\0');
|
||||
auto bufferPtr = reinterpret_cast<AudioSample*>(resampledBuffer.data());
|
||||
|
||||
resampler.render(audioData->data(), bufferPtr, numFrames);
|
||||
|
||||
int numSamples = maxOutputFrames * numChannels;
|
||||
auto newAudioData = AudioData::make(numSamples, numChannels, bufferPtr);
|
||||
|
||||
injector = QSharedPointer<AudioInjector>(new AudioInjector(newAudioData, options), &AudioInjector::deleteLater);
|
||||
}
|
||||
|
||||
if (!injector) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (setPendingDelete) {
|
||||
injector->_state |= AudioInjectorState::PendingDelete;
|
||||
}
|
||||
|
||||
injector->moveToThread(_thread);
|
||||
injector->inject(&AudioInjectorManager::threadInjector);
|
||||
|
||||
return injector;
|
||||
}
|
||||
|
||||
void AudioInjectorManager::setOptionsAndRestart(const AudioInjectorPointer& injector, const AudioInjectorOptions& options) {
|
||||
if (!injector) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (QThread::currentThread() != _thread) {
|
||||
QMetaObject::invokeMethod(this, "setOptionsAndRestart", Q_ARG(const AudioInjectorPointer&, injector), Q_ARG(const AudioInjectorOptions&, options));
|
||||
_injectorReady.notify_one();
|
||||
return;
|
||||
}
|
||||
|
||||
injector->setOptions(options);
|
||||
injector->restart();
|
||||
}
|
||||
|
||||
void AudioInjectorManager::restart(const AudioInjectorPointer& injector) {
|
||||
if (!injector) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (QThread::currentThread() != _thread) {
|
||||
QMetaObject::invokeMethod(this, "restart", Q_ARG(const AudioInjectorPointer&, injector));
|
||||
_injectorReady.notify_one();
|
||||
return;
|
||||
}
|
||||
|
||||
injector->restart();
|
||||
}
|
||||
|
||||
void AudioInjectorManager::setOptions(const AudioInjectorPointer& injector, const AudioInjectorOptions& options) {
|
||||
if (!injector) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (QThread::currentThread() != _thread) {
|
||||
QMetaObject::invokeMethod(this, "setOptions", Q_ARG(const AudioInjectorPointer&, injector), Q_ARG(const AudioInjectorOptions&, options));
|
||||
_injectorReady.notify_one();
|
||||
return;
|
||||
}
|
||||
|
||||
injector->setOptions(options);
|
||||
}
|
||||
|
||||
AudioInjectorOptions AudioInjectorManager::getOptions(const AudioInjectorPointer& injector) {
|
||||
if (!injector) {
|
||||
return AudioInjectorOptions();
|
||||
}
|
||||
|
||||
return injector->getOptions();
|
||||
}
|
||||
|
||||
float AudioInjectorManager::getLoudness(const AudioInjectorPointer& injector) {
|
||||
if (!injector) {
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
return injector->getLoudness();
|
||||
}
|
||||
|
||||
bool AudioInjectorManager::isPlaying(const AudioInjectorPointer& injector) {
|
||||
if (!injector) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return injector->isPlaying();
|
||||
}
|
||||
|
||||
void AudioInjectorManager::stop(const AudioInjectorPointer& injector) {
|
||||
if (!injector) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (QThread::currentThread() != _thread) {
|
||||
QMetaObject::invokeMethod(this, "stop", Q_ARG(const AudioInjectorPointer&, injector));
|
||||
_injectorReady.notify_one();
|
||||
return;
|
||||
}
|
||||
|
||||
injector->finish();
|
||||
}
|
||||
|
||||
size_t AudioInjectorManager::getNumInjectors() {
|
||||
Lock lock(_injectorsMutex);
|
||||
return _injectors.size();
|
||||
}
|
|
@ -30,8 +30,27 @@ class AudioInjectorManager : public QObject, public Dependency {
|
|||
SINGLETON_DEPENDENCY
|
||||
public:
|
||||
~AudioInjectorManager();
|
||||
|
||||
AudioInjectorPointer playSound(const SharedSoundPointer& sound, const AudioInjectorOptions& options, bool setPendingDelete = false);
|
||||
AudioInjectorPointer playSound(const AudioDataPointer& audioData, const AudioInjectorOptions& options, bool setPendingDelete = false);
|
||||
|
||||
size_t getNumInjectors();
|
||||
|
||||
public slots:
|
||||
void setOptionsAndRestart(const AudioInjectorPointer& injector, const AudioInjectorOptions& options);
|
||||
void restart(const AudioInjectorPointer& injector);
|
||||
|
||||
void setOptions(const AudioInjectorPointer& injector, const AudioInjectorOptions& options);
|
||||
AudioInjectorOptions getOptions(const AudioInjectorPointer& injector);
|
||||
|
||||
float getLoudness(const AudioInjectorPointer& injector);
|
||||
bool isPlaying(const AudioInjectorPointer& injector);
|
||||
|
||||
void stop(const AudioInjectorPointer& injector);
|
||||
|
||||
private slots:
|
||||
void run();
|
||||
|
||||
private:
|
||||
|
||||
using TimeInjectorPointerPair = std::pair<uint64_t, AudioInjectorPointer>;
|
||||
|
@ -49,11 +68,10 @@ private:
|
|||
using Lock = std::unique_lock<Mutex>;
|
||||
|
||||
bool threadInjector(const AudioInjectorPointer& injector);
|
||||
bool restartFinishedInjector(const AudioInjectorPointer& injector);
|
||||
void notifyInjectorReadyCondition() { _injectorReady.notify_one(); }
|
||||
bool wouldExceedLimits();
|
||||
|
||||
AudioInjectorManager() {};
|
||||
AudioInjectorManager() { createThread(); }
|
||||
AudioInjectorManager(const AudioInjectorManager&) = delete;
|
||||
AudioInjectorManager& operator=(const AudioInjectorManager&) = delete;
|
||||
|
||||
|
|
|
@ -376,7 +376,7 @@ bool Avatar::applyGrabChanges() {
|
|||
const EntityItemPointer& entity = std::dynamic_pointer_cast<EntityItem>(target);
|
||||
if (entity && entity->getEntityHostType() == entity::HostType::AVATAR && entity->getSimulationOwner().getID() == getID()) {
|
||||
EntityItemProperties properties = entity->getProperties();
|
||||
sendPacket(entity->getID(), properties);
|
||||
sendPacket(entity->getID());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -127,7 +127,12 @@ private:
|
|||
class Avatar : public AvatarData, public scriptable::ModelProvider, public MetaModelPayload {
|
||||
Q_OBJECT
|
||||
|
||||
// This property has JSDoc in MyAvatar.h.
|
||||
/*jsdoc
|
||||
* @comment IMPORTANT: The JSDoc for the following properties should be copied to MyAvatar.h.
|
||||
*
|
||||
* @property {Vec3} skeletonOffset - Can be used to apply a translation offset between the avatar's position and the
|
||||
* registration point of the 3D model.
|
||||
*/
|
||||
Q_PROPERTY(glm::vec3 skeletonOffset READ getSkeletonOffset WRITE setSkeletonOffset)
|
||||
|
||||
public:
|
||||
|
@ -196,36 +201,52 @@ public:
|
|||
virtual QStringList getJointNames() const override;
|
||||
|
||||
/**jsdoc
|
||||
* Gets the default rotation of a joint (in the current avatar) relative to its parent.
|
||||
* <p>For information on the joint hierarchy used, see
|
||||
* <a href="https://docs.highfidelity.com/create/avatars/avatar-standards">Avatar Standards</a>.</p>
|
||||
* @function MyAvatar.getDefaultJointRotation
|
||||
* @param {number} index
|
||||
* @returns {Quat}
|
||||
* @param {number} index - The joint index.
|
||||
* @returns {Quat} The default rotation of the joint if the joint index is valid, otherwise {@link Quat(0)|Quat.IDENTITY}.
|
||||
*/
|
||||
Q_INVOKABLE virtual glm::quat getDefaultJointRotation(int index) const;
|
||||
|
||||
/**jsdoc
|
||||
* Gets the default translation of a joint (in the current avatar) relative to its parent, in model coordinates.
|
||||
* <p><strong>Warning:</strong> These coordinates are not necessarily in meters.</p>
|
||||
* <p>For information on the joint hierarchy used, see
|
||||
* <a href="https://docs.highfidelity.com/create/avatars/avatar-standards">Avatar Standards</a>.</p>
|
||||
* @function MyAvatar.getDefaultJointTranslation
|
||||
* @param {number} index
|
||||
* @returns {Vec3}
|
||||
* @param {number} index - The joint index.
|
||||
* @returns {Vec3} The default translation of the joint (in model coordinates) if the joint index is valid, otherwise
|
||||
* {@link Vec3(0)|Vec3.ZERO}.
|
||||
*/
|
||||
Q_INVOKABLE virtual glm::vec3 getDefaultJointTranslation(int index) const;
|
||||
|
||||
/**jsdoc
|
||||
* Provides read only access to the default joint rotations in avatar coordinates.
|
||||
* Gets the default joint rotations in avatar coordinates.
|
||||
* The default pose of the avatar is defined by the position and orientation of all bones
|
||||
* in the avatar's model file. Typically this is a T-pose.
|
||||
* @function MyAvatar.getAbsoluteDefaultJointRotationInObjectFrame
|
||||
* @param index {number} index number
|
||||
* @returns {Quat} The rotation of this joint in avatar coordinates.
|
||||
* @param index {number} - The joint index.
|
||||
* @returns {Quat} The default rotation of the joint in avatar coordinates.
|
||||
* @example <caption>Report the default rotation of your avatar's head joint relative to your avatar.</caption>
|
||||
* var headIndex = MyAvatar.getJointIndex("Head");
|
||||
* var defaultHeadRotation = MyAvatar.getAbsoluteDefaultJointRotationInObjectFrame(headIndex);
|
||||
* print("Default head rotation: " + JSON.stringify(Quat.safeEulerAngles(defaultHeadRotation))); // Degrees
|
||||
*/
|
||||
Q_INVOKABLE virtual glm::quat getAbsoluteDefaultJointRotationInObjectFrame(int index) const;
|
||||
|
||||
/**jsdoc
|
||||
* Provides read only access to the default joint translations in avatar coordinates.
|
||||
* Gets the default joint translations in avatar coordinates.
|
||||
* The default pose of the avatar is defined by the position and orientation of all bones
|
||||
* in the avatar's model file. Typically this is a T-pose.
|
||||
* @function MyAvatar.getAbsoluteDefaultJointTranslationInObjectFrame
|
||||
* @param index {number} index number
|
||||
* @returns {Vec3} The position of this joint in avatar coordinates.
|
||||
* @param index {number} - The joint index.
|
||||
* @returns {Vec3} The default position of the joint in avatar coordinates.
|
||||
* @example <caption>Report the default translation of your avatar's head joint relative to your avatar.</caption>
|
||||
* var headIndex = MyAvatar.getJointIndex("Head");
|
||||
* var defaultHeadTranslation = MyAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(headIndex);
|
||||
* print("Default head translation: " + JSON.stringify(defaultHeadTranslation));
|
||||
*/
|
||||
Q_INVOKABLE virtual glm::vec3 getAbsoluteDefaultJointTranslationInObjectFrame(int index) const;
|
||||
|
||||
|
@ -233,59 +254,88 @@ public:
|
|||
virtual glm::vec3 getAbsoluteJointScaleInObjectFrame(int index) const override;
|
||||
virtual glm::quat getAbsoluteJointRotationInObjectFrame(int index) const override;
|
||||
virtual glm::vec3 getAbsoluteJointTranslationInObjectFrame(int index) const override;
|
||||
|
||||
/**jsdoc
|
||||
* Sets the rotation of a joint relative to the avatar.
|
||||
* <p><strong>Warning:</strong> Not able to be used in the <code>MyAvatar</code> API.</p>
|
||||
* @function MyAvatar.setAbsoluteJointRotationInObjectFrame
|
||||
* @param {number} index - The index of the joint. <em>Not used.</em>
|
||||
* @param {Quat} rotation - The rotation of the joint relative to the avatar. <em>Not used.</em>
|
||||
* @returns {boolean} <code>false</code>.
|
||||
*/
|
||||
virtual bool setAbsoluteJointRotationInObjectFrame(int index, const glm::quat& rotation) override { return false; }
|
||||
|
||||
/**jsdoc
|
||||
* Sets the translation of a joint relative to the avatar.
|
||||
* <p><strong>Warning:</strong> Not able to be used in the <code>MyAvatar</code> API.</p>
|
||||
* @function MyAvatar.setAbsoluteJointTranslationInObjectFrame
|
||||
* @param {number} index - The index of the joint. <em>Not used.</em>
|
||||
* @param {Vec3} translation - The translation of the joint relative to the avatar. <em>Not used.</em>
|
||||
* @returns {boolean} <code>false</code>.
|
||||
*/
|
||||
virtual bool setAbsoluteJointTranslationInObjectFrame(int index, const glm::vec3& translation) override { return false; }
|
||||
virtual glm::vec3 getSpine2SplineOffset() const { return _spine2SplineOffset; }
|
||||
virtual float getSpine2SplineRatio() const { return _spine2SplineRatio; }
|
||||
|
||||
// world-space to avatar-space rigconversion functions
|
||||
/**jsdoc
|
||||
* @function MyAvatar.worldToJointPoint
|
||||
* @param {Vec3} position
|
||||
* @param {number} [jointIndex=-1]
|
||||
* @returns {Vec3}
|
||||
*/
|
||||
* Transforms a position in world coordinates to a position in a joint's coordinates, or avatar coordinates if no joint is
|
||||
* specified.
|
||||
* @function MyAvatar.worldToJointPoint
|
||||
* @param {Vec3} position - The position in world coordinates.
|
||||
* @param {number} [jointIndex=-1] - The index of the joint.
|
||||
* @returns {Vec3} The position in the joint's coordinate system, or avatar coordinate system if no joint is specified.
|
||||
*/
|
||||
Q_INVOKABLE glm::vec3 worldToJointPoint(const glm::vec3& position, const int jointIndex = -1) const;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.worldToJointDirection
|
||||
* @param {Vec3} direction
|
||||
* @param {number} [jointIndex=-1]
|
||||
* @returns {Vec3}
|
||||
*/
|
||||
* Transforms a direction in world coordinates to a direction in a joint's coordinates, or avatar coordinates if no joint
|
||||
* is specified.
|
||||
* @function MyAvatar.worldToJointDirection
|
||||
* @param {Vec3} direction - The direction in world coordinates.
|
||||
* @param {number} [jointIndex=-1] - The index of the joint.
|
||||
* @returns {Vec3} The direction in the joint's coordinate system, or avatar coordinate system if no joint is specified.
|
||||
*/
|
||||
Q_INVOKABLE glm::vec3 worldToJointDirection(const glm::vec3& direction, const int jointIndex = -1) const;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.worldToJointRotation
|
||||
* @param {Quat} rotation
|
||||
* @param {number} [jointIndex=-1]
|
||||
* @returns {Quat}
|
||||
* Transforms a rotation in world coordinates to a rotation in a joint's coordinates, or avatar coordinates if no joint is
|
||||
* specified.
|
||||
* @function MyAvatar.worldToJointRotation
|
||||
* @param {Quat} rotation - The rotation in world coordinates.
|
||||
* @param {number} [jointIndex=-1] - The index of the joint.
|
||||
* @returns {Quat} The rotation in the joint's coordinate system, or avatar coordinate system if no joint is specified.
|
||||
*/
|
||||
Q_INVOKABLE glm::quat worldToJointRotation(const glm::quat& rotation, const int jointIndex = -1) const;
|
||||
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.jointToWorldPoint
|
||||
* @param {vec3} position
|
||||
* @param {number} [jointIndex=-1]
|
||||
* @returns {Vec3}
|
||||
*/
|
||||
* Transforms a position in a joint's coordinates, or avatar coordinates if no joint is specified, to a position in world
|
||||
* coordinates.
|
||||
* @function MyAvatar.jointToWorldPoint
|
||||
* @param {Vec3} position - The position in joint coordinates, or avatar coordinates if no joint is specified.
|
||||
* @param {number} [jointIndex=-1] - The index of the joint.
|
||||
* @returns {Vec3} The position in world coordinates.
|
||||
*/
|
||||
Q_INVOKABLE glm::vec3 jointToWorldPoint(const glm::vec3& position, const int jointIndex = -1) const;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.jointToWorldDirection
|
||||
* @param {Vec3} direction
|
||||
* @param {number} [jointIndex=-1]
|
||||
* @returns {Vec3}
|
||||
*/
|
||||
* Transforms a direction in a joint's coordinates, or avatar coordinates if no joint is specified, to a direction in world
|
||||
* coordinates.
|
||||
* @function MyAvatar.jointToWorldDirection
|
||||
* @param {Vec3} direction - The direction in joint coordinates, or avatar coordinates if no joint is specified.
|
||||
* @param {number} [jointIndex=-1] - The index of the joint.
|
||||
* @returns {Vec3} The direction in world coordinates.
|
||||
*/
|
||||
Q_INVOKABLE glm::vec3 jointToWorldDirection(const glm::vec3& direction, const int jointIndex = -1) const;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.jointToWorldRotation
|
||||
* @param {Quat} rotation
|
||||
* @param {number} [jointIndex=-1]
|
||||
* @returns {Quat}
|
||||
*/
|
||||
* Transforms a rotation in a joint's coordinates, or avatar coordinates if no joint is specified, to a rotation in world
|
||||
* coordinates.
|
||||
* @function MyAvatar.jointToWorldRotation
|
||||
* @param {Quat} rotation - The rotation in joint coordinates, or avatar coordinates if no joint is specified.
|
||||
* @param {number} [jointIndex=-1] - The index of the joint.
|
||||
* @returns {Quat} The rotation in world coordinates.
|
||||
*/
|
||||
Q_INVOKABLE glm::quat jointToWorldRotation(const glm::quat& rotation, const int jointIndex = -1) const;
|
||||
|
||||
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL) override;
|
||||
|
@ -297,7 +347,7 @@ public:
|
|||
virtual int parseDataFromBuffer(const QByteArray& buffer) override;
|
||||
|
||||
/**jsdoc
|
||||
* Set the offset applied to the current avatar. The offset adjusts the position that the avatar is rendered. For example,
|
||||
* Sets the offset applied to the current avatar. The offset adjusts the position that the avatar is rendered. For example,
|
||||
* with an offset of <code>{ x: 0, y: 0.1, z: 0 }</code>, your avatar will appear to be raised off the ground slightly.
|
||||
* @function MyAvatar.setSkeletonOffset
|
||||
* @param {Vec3} offset - The skeleton offset to set.
|
||||
|
@ -313,7 +363,7 @@ public:
|
|||
Q_INVOKABLE void setSkeletonOffset(const glm::vec3& offset);
|
||||
|
||||
/**jsdoc
|
||||
* Get the offset applied to the current avatar. The offset adjusts the position that the avatar is rendered. For example,
|
||||
* Gets the offset applied to the current avatar. The offset adjusts the position that the avatar is rendered. For example,
|
||||
* with an offset of <code>{ x: 0, y: 0.1, z: 0 }</code>, your avatar will appear to be raised off the ground slightly.
|
||||
* @function MyAvatar.getSkeletonOffset
|
||||
* @returns {Vec3} The current skeleton offset.
|
||||
|
@ -325,7 +375,7 @@ public:
|
|||
virtual glm::vec3 getSkeletonPosition() const;
|
||||
|
||||
/**jsdoc
|
||||
* Get the position of a joint in the current avatar.
|
||||
* Gets the position of a joint in the current avatar.
|
||||
* @function MyAvatar.getJointPosition
|
||||
* @param {number} index - The index of the joint.
|
||||
* @returns {Vec3} The position of the joint in world coordinates.
|
||||
|
@ -333,7 +383,7 @@ public:
|
|||
Q_INVOKABLE glm::vec3 getJointPosition(int index) const;
|
||||
|
||||
/**jsdoc
|
||||
* Get the position of a joint in the current avatar.
|
||||
* Gets the position of a joint in the current avatar.
|
||||
* @function MyAvatar.getJointPosition
|
||||
* @param {string} name - The name of the joint.
|
||||
* @returns {Vec3} The position of the joint in world coordinates.
|
||||
|
@ -343,7 +393,7 @@ public:
|
|||
Q_INVOKABLE glm::vec3 getJointPosition(const QString& name) const;
|
||||
|
||||
/**jsdoc
|
||||
* Get the position of the current avatar's neck in world coordinates.
|
||||
* Gets the position of the current avatar's neck in world coordinates.
|
||||
* @function MyAvatar.getNeckPosition
|
||||
* @returns {Vec3} The position of the neck in world coordinates.
|
||||
* @example <caption>Report the position of your avatar's neck.</caption>
|
||||
|
@ -352,8 +402,9 @@ public:
|
|||
Q_INVOKABLE glm::vec3 getNeckPosition() const;
|
||||
|
||||
/**jsdoc
|
||||
* Gets the current acceleration of the avatar.
|
||||
* @function MyAvatar.getAcceleration
|
||||
* @returns {Vec3}
|
||||
* @returns {Vec3} The current acceleration of the avatar.
|
||||
*/
|
||||
Q_INVOKABLE glm::vec3 getAcceleration() const { return _acceleration; }
|
||||
|
||||
|
@ -377,47 +428,55 @@ public:
|
|||
void getCapsule(glm::vec3& start, glm::vec3& end, float& radius);
|
||||
float computeMass();
|
||||
/**jsdoc
|
||||
* Get the position of the current avatar's feet (or rather, bottom of its collision capsule) in world coordinates.
|
||||
* Gets the position of the current avatar's feet (or rather, bottom of its collision capsule) in world coordinates.
|
||||
* @function MyAvatar.getWorldFeetPosition
|
||||
* @returns {Vec3} The position of the avatar's feet in world coordinates.
|
||||
*/
|
||||
*/
|
||||
Q_INVOKABLE glm::vec3 getWorldFeetPosition();
|
||||
|
||||
void setPositionViaScript(const glm::vec3& position) override;
|
||||
void setOrientationViaScript(const glm::quat& orientation) override;
|
||||
|
||||
/**jsdoc
|
||||
* Gets the ID of the entity of avatar that the avatar is parented to.
|
||||
* @function MyAvatar.getParentID
|
||||
* @returns {Uuid}
|
||||
* @returns {Uuid} The ID of the entity or avatar that the avatar is parented to. {@link Uuid|Uuid.NULL} if not parented.
|
||||
*/
|
||||
// This calls through to the SpatiallyNestable versions, but is here to expose these to JavaScript.
|
||||
Q_INVOKABLE virtual const QUuid getParentID() const override { return SpatiallyNestable::getParentID(); }
|
||||
|
||||
/**jsdoc
|
||||
* Sets the ID of the entity of avatar that the avatar is parented to.
|
||||
* @function MyAvatar.setParentID
|
||||
* @param {Uuid} parentID
|
||||
* @param {Uuid} parentID - The ID of the entity or avatar that the avatar should be parented to. Set to
|
||||
* {@link Uuid|Uuid.NULL} to unparent.
|
||||
*/
|
||||
// This calls through to the SpatiallyNestable versions, but is here to expose these to JavaScript.
|
||||
Q_INVOKABLE virtual void setParentID(const QUuid& parentID) override;
|
||||
|
||||
/**jsdoc
|
||||
* Gets the joint of the entity or avatar that the avatar is parented to.
|
||||
* @function MyAvatar.getParentJointIndex
|
||||
* @returns {number}
|
||||
* @returns {number} The joint of the entity or avatar that the avatar is parented to. <code>65535</code> or
|
||||
* <code>-1</code> if parented to the entity or avatar's position and orientation rather than a joint.
|
||||
*/
|
||||
// This calls through to the SpatiallyNestable versions, but is here to expose these to JavaScript.
|
||||
Q_INVOKABLE virtual quint16 getParentJointIndex() const override { return SpatiallyNestable::getParentJointIndex(); }
|
||||
|
||||
/**jsdoc
|
||||
* Sets the joint of the entity or avatar that the avatar is parented to.
|
||||
* @function MyAvatar.setParentJointIndex
|
||||
* @param {number} parentJointIndex
|
||||
* @param {number} parentJointIndex - he joint of the entity or avatar that the avatar should be parented to. Use
|
||||
* <code>65535</code> or <code>-1</code> to parent to the entity or avatar's position and orientation rather than a
|
||||
* joint.
|
||||
*/
|
||||
// This calls through to the SpatiallyNestable versions, but is here to expose these to JavaScript.
|
||||
Q_INVOKABLE virtual void setParentJointIndex(quint16 parentJointIndex) override;
|
||||
|
||||
/**jsdoc
|
||||
* Returns an array of joints, where each joint is an object containing name, index, and parentIndex fields.
|
||||
* Gets information on all the joints in the avatar's skeleton.
|
||||
* @function MyAvatar.getSkeleton
|
||||
* @returns {MyAvatar.SkeletonJoint[]} A list of information about each joint in this avatar's skeleton.
|
||||
* @returns {MyAvatar.SkeletonJoint[]} Information about each joint in the avatar's skeleton.
|
||||
*/
|
||||
/**jsdoc
|
||||
* Information about a single joint in an Avatar's skeleton hierarchy.
|
||||
|
@ -443,8 +502,9 @@ public:
|
|||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getSimulationRate
|
||||
* @param {string} [rateName=""]
|
||||
* @returns {number}
|
||||
* @param {string} [rateName=""] - Rate name.
|
||||
* @returns {number} Simulation rate.
|
||||
* @deprecated This function is deprecated and will be removed.
|
||||
*/
|
||||
Q_INVOKABLE float getSimulationRate(const QString& rateName = QString("")) const;
|
||||
|
||||
|
@ -500,6 +560,13 @@ public:
|
|||
uint32_t appendSubMetaItems(render::ItemIDs& subItems);
|
||||
|
||||
signals:
|
||||
/**jsdoc
|
||||
* Triggered when the avatar's target scale is changed. The target scale is the desired scale of the avatar without any
|
||||
* restrictions on permissible scale values imposed by the domain.
|
||||
* @function MyAvatar.targetScaleChanged
|
||||
* @param {number} targetScale - The avatar's target scale.
|
||||
* @returns Signal
|
||||
*/
|
||||
void targetScaleChanged(float targetScale);
|
||||
|
||||
public slots:
|
||||
|
@ -508,7 +575,7 @@ public slots:
|
|||
// thread safe, will return last valid palm from cache
|
||||
|
||||
/**jsdoc
|
||||
* Get the position of the left palm in world coordinates.
|
||||
* Gets the position of the left palm in world coordinates.
|
||||
* @function MyAvatar.getLeftPalmPosition
|
||||
* @returns {Vec3} The position of the left palm in world coordinates.
|
||||
* @example <caption>Report the position of your avatar's left palm.</caption>
|
||||
|
@ -517,15 +584,16 @@ public slots:
|
|||
glm::vec3 getLeftPalmPosition() const;
|
||||
|
||||
/**jsdoc
|
||||
* Get the rotation of the left palm in world coordinates.
|
||||
* Gets the rotation of the left palm in world coordinates.
|
||||
* @function MyAvatar.getLeftPalmRotation
|
||||
* @returns {Quat} The rotation of the left palm in world coordinates.
|
||||
* @example <caption>Report the rotation of your avatar's left palm.</caption>
|
||||
* print(JSON.stringify(MyAvatar.getLeftPalmRotation()));
|
||||
*/
|
||||
glm::quat getLeftPalmRotation() const;
|
||||
|
||||
/**jsdoc
|
||||
* Get the position of the right palm in world coordinates.
|
||||
* Gets the position of the right palm in world coordinates.
|
||||
* @function MyAvatar.getRightPalmPosition
|
||||
* @returns {Vec3} The position of the right palm in world coordinates.
|
||||
* @example <caption>Report the position of your avatar's right palm.</caption>
|
||||
|
@ -542,21 +610,26 @@ public slots:
|
|||
*/
|
||||
glm::quat getRightPalmRotation() const;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.setModelURLFinished
|
||||
* @param {boolean} success
|
||||
* @deprecated This function is deprecated and will be removed.
|
||||
*/
|
||||
// hooked up to Model::setURLFinished signal
|
||||
void setModelURLFinished(bool success);
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.rigReady
|
||||
* @returns {Signal}
|
||||
* @deprecated This function is deprecated and will be removed.
|
||||
*/
|
||||
// Hooked up to Model::rigReady signal
|
||||
void rigReady();
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.rigReset
|
||||
* @returns {Signal}
|
||||
* @deprecated This function is deprecated and will be removed.
|
||||
*/
|
||||
// Jooked up to Model::rigReset signal
|
||||
// Hooked up to Model::rigReset signal
|
||||
void rigReset();
|
||||
|
||||
protected:
|
||||
|
@ -605,7 +678,7 @@ protected:
|
|||
|
||||
// protected methods...
|
||||
bool isLookingAtMe(AvatarSharedPointer avatar) const;
|
||||
virtual void sendPacket(const QUuid& entityID, const EntityItemProperties& properties) const { }
|
||||
virtual void sendPacket(const QUuid& entityID) const { }
|
||||
bool applyGrabChanges();
|
||||
void relayJointDataToChildren();
|
||||
|
||||
|
|
|
@ -1435,6 +1435,47 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
return numBytesRead;
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
* The avatar mixer data comprises different types of data, with the data rates of each being tracked in kbps.
|
||||
*
|
||||
* <table>
|
||||
* <thead>
|
||||
* <tr><th>Rate Name</th><th>Description</th></tr>
|
||||
* </thead>
|
||||
* <tbody>
|
||||
* <tr><td><code>"globalPosition"</code></td><td>Incoming global position.</td></tr>
|
||||
* <tr><td><code>"localPosition"</code></td><td>Incoming local position.</td></tr>
|
||||
* <tr><td><code>"avatarBoundingBox"</code></td><td>Incoming avatar bounding box.</td></tr>
|
||||
* <tr><td><code>"avatarOrientation"</code></td><td>Incoming avatar orientation.</td></tr>
|
||||
* <tr><td><code>"avatarScale"</code></td><td>Incoming avatar scale.</td></tr>
|
||||
* <tr><td><code>"lookAtPosition"</code></td><td>Incoming look-at position.</td></tr>
|
||||
* <tr><td><code>"audioLoudness"</code></td><td>Incoming audio loudness.</td></tr>
|
||||
* <tr><td><code>"sensorToWorkMatrix"</code></td><td>Incoming sensor-to-world matrix.</td></tr>
|
||||
* <tr><td><code>"additionalFlags"</code></td><td>Incoming additional avatar flags.</td></tr>
|
||||
* <tr><td><code>"parentInfo"</code></td><td>Incoming parent information.</td></tr>
|
||||
* <tr><td><code>"faceTracker"</code></td><td>Incoming face tracker data.</td></tr>
|
||||
* <tr><td><code>"jointData"</code></td><td>Incoming joint data.</td></tr>
|
||||
* <tr><td><code>"jointDefaultPoseFlagsRate"</code></td><td>Incoming joint default pose flags.</td></tr>
|
||||
* <tr><td><code>"farGrabJointRate"</code></td><td>Incoming far grab joint.</td></tr>
|
||||
* <tr><td><code>"globalPositionOutbound"</code></td><td>Outgoing global position.</td></tr>
|
||||
* <tr><td><code>"localPositionOutbound"</code></td><td>Outgoing local position.</td></tr>
|
||||
* <tr><td><code>"avatarBoundingBoxOutbound"</code></td><td>Outgoing avatar bounding box.</td></tr>
|
||||
* <tr><td><code>"avatarOrientationOutbound"</code></td><td>Outgoing avatar orientation.</td></tr>
|
||||
* <tr><td><code>"avatarScaleOutbound"</code></td><td>Outgoing avatar scale.</td></tr>
|
||||
* <tr><td><code>"lookAtPositionOutbound"</code></td><td>Outgoing look-at position.</td></tr>
|
||||
* <tr><td><code>"audioLoudnessOutbound"</code></td><td>Outgoing audio loudness.</td></tr>
|
||||
* <tr><td><code>"sensorToWorkMatrixOutbound"</code></td><td>Outgoing sensor-to-world matrix.</td></tr>
|
||||
* <tr><td><code>"additionalFlagsOutbound"</code></td><td>Outgoing additional avatar flags.</td></tr>
|
||||
* <tr><td><code>"parentInfoOutbound"</code></td><td>Outgoing parent information.</td></tr>
|
||||
* <tr><td><code>"faceTrackerOutbound"</code></td><td>Outgoing face tracker data.</td></tr>
|
||||
* <tr><td><code>"jointDataOutbound"</code></td><td>Outgoing joint data.</td></tr>
|
||||
* <tr><td><code>"jointDefaultPoseFlagsOutbound"</code></td><td>Outgoing joint default pose flags.</td></tr>
|
||||
* <tr><td><code>""</code></td><td>When no rate name is specified, the total incoming data rate is provided.</td></tr>
|
||||
* </tbody>
|
||||
* </table>
|
||||
*
|
||||
* @typedef {string} AvatarDataRate
|
||||
*/
|
||||
float AvatarData::getDataRate(const QString& rateName) const {
|
||||
if (rateName == "") {
|
||||
return _parseBufferRate.rate() / BYTES_PER_KILOBIT;
|
||||
|
@ -1496,6 +1537,35 @@ float AvatarData::getDataRate(const QString& rateName) const {
|
|||
return 0.0f;
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
* The avatar mixer data comprises different types of data updated at different rates, in Hz.
|
||||
*
|
||||
* <table>
|
||||
* <thead>
|
||||
* <tr><th>Rate Name</th><th>Description</th></tr>
|
||||
* </thead>
|
||||
* <tbody>
|
||||
|
||||
* <tr><td><code>"globalPosition"</code></td><td>Global position.</td></tr>
|
||||
* <tr><td><code>"localPosition"</code></td><td>Local position.</td></tr>
|
||||
* <tr><td><code>"avatarBoundingBox"</code></td><td>Avatar bounding box.</td></tr>
|
||||
* <tr><td><code>"avatarOrientation"</code></td><td>Avatar orientation.</td></tr>
|
||||
* <tr><td><code>"avatarScale"</code></td><td>Avatar scale.</td></tr>
|
||||
* <tr><td><code>"lookAtPosition"</code></td><td>Look-at position.</td></tr>
|
||||
* <tr><td><code>"audioLoudness"</code></td><td>Audio loudness.</td></tr>
|
||||
* <tr><td><code>"sensorToWorkMatrix"</code></td><td>Sensor-to-world matrix.</td></tr>
|
||||
* <tr><td><code>"additionalFlags"</code></td><td>Additional avatar flags.</td></tr>
|
||||
* <tr><td><code>"parentInfo"</code></td><td>Parent information.</td></tr>
|
||||
* <tr><td><code>"faceTracker"</code></td><td>Face tracker data.</td></tr>
|
||||
* <tr><td><code>"jointData"</code></td><td>Joint data.</td></tr>
|
||||
* <tr><td><code>"farGrabJointData"</code></td><td>Far grab joint data.</td></tr>
|
||||
|
||||
* <tr><td><code>""</code></td><td>When no rate name is specified, the overall update rate is provided.</td></tr>
|
||||
* </tbody>
|
||||
* </table>
|
||||
*
|
||||
* @typedef {string} AvatarUpdateRate
|
||||
*/
|
||||
float AvatarData::getUpdateRate(const QString& rateName) const {
|
||||
if (rateName == "") {
|
||||
return _parseBufferUpdateRate.rate();
|
||||
|
@ -2731,13 +2801,16 @@ glm::vec3 AvatarData::getAbsoluteJointTranslationInObjectFrame(int index) const
|
|||
}
|
||||
|
||||
/**jsdoc
|
||||
* Information on an attachment worn by the avatar.
|
||||
* @typedef {object} AttachmentData
|
||||
* @property {string} modelUrl
|
||||
* @property {string} jointName
|
||||
* @property {Vec3} translation
|
||||
* @property {Vec3} rotation
|
||||
* @property {number} scale
|
||||
* @property {boolean} soft
|
||||
* @property {string} modelUrl - The URL of the model file. Models can be FBX or OBJ format.
|
||||
* @property {string} jointName - The offset to apply to the model relative to the joint position.
|
||||
* @property {Vec3} translation - The offset from the joint that the attachment is positioned at.
|
||||
* @property {Vec3} rotation - The rotation applied to the model relative to the joint orientation.
|
||||
* @property {number} scale - The scale applied to the attachment model.
|
||||
* @property {boolean} soft - If <code>true</code> and the model has a skeleton, the bones of the attached model's skeleton are
|
||||
* rotated to fit the avatar's current pose. If <code>true</code>, the <code>translation</code>, <code>rotation</code>, and
|
||||
* <code>scale</code> parameters are ignored.
|
||||
*/
|
||||
QVariant AttachmentData::toVariant() const {
|
||||
QVariantMap result;
|
||||
|
@ -2943,6 +3016,10 @@ float AvatarData::_avatarSortCoefficientSize { 8.0f };
|
|||
float AvatarData::_avatarSortCoefficientCenter { 0.25f };
|
||||
float AvatarData::_avatarSortCoefficientAge { 1.0f };
|
||||
|
||||
/**jsdoc
|
||||
* An object with the UUIDs of avatar entities as keys and avatar entity properties objects as values.
|
||||
* @typedef {Object.<Uuid, Entities.EntityProperties>} AvatarEntityMap
|
||||
*/
|
||||
QScriptValue AvatarEntityMapToScriptValue(QScriptEngine* engine, const AvatarEntityMap& value) {
|
||||
QScriptValue obj = engine->newObject();
|
||||
for (auto entityID : value.keys()) {
|
||||
|
|
|
@ -116,6 +116,24 @@ const int PROCEDURAL_BLINK_FACE_MOVEMENT = 10; // 11th bit
|
|||
const int COLLIDE_WITH_OTHER_AVATARS = 11; // 12th bit
|
||||
const int HAS_HERO_PRIORITY = 12; // 13th bit (be scared)
|
||||
|
||||
/**jsdoc
|
||||
* <p>The pointing state of the hands is specified by the following values:
|
||||
</p>
|
||||
* <table>
|
||||
* <thead>
|
||||
* <tr><th>Value</th><th>Description</th>
|
||||
* </thead>
|
||||
* <tbody>
|
||||
* <tr><td><code>0</code></td><td>No hand is pointing.</td></tr>
|
||||
* <tr><td><code>1</code></td><td>The left hand is pointing.</td></tr>
|
||||
* <tr><td><code>2</code></td><td>The right hand is pointing.</td></tr>
|
||||
* <tr><td><code>4</code></td><td>It is the index finger that is pointing.</td></tr>
|
||||
* </tbody>
|
||||
* </table>
|
||||
* <p>The values for the hand states are added together to give the <code>HandState</code> value. For example, if the left
|
||||
* hand's finger is pointing, the value is <code>1 + 4 == 5</code>.
|
||||
* @typedef {number} HandState
|
||||
*/
|
||||
const char HAND_STATE_NULL = 0;
|
||||
const char LEFT_HAND_POINTING_FLAG = 1;
|
||||
const char RIGHT_HAND_POINTING_FLAG = 2;
|
||||
|
@ -414,7 +432,55 @@ class ClientTraitsHandler;
|
|||
class AvatarData : public QObject, public SpatiallyNestable {
|
||||
Q_OBJECT
|
||||
|
||||
// The following properties have JSDoc in MyAvatar.h and ScriptableAvatar.h
|
||||
// IMPORTANT: The JSDoc for the following properties should be copied to MyAvatar.h and ScriptableAvatar.h.
|
||||
/*
|
||||
* @property {Vec3} position - The position of the avatar.
|
||||
* @property {number} scale=1.0 - The scale of the avatar. The value can be set to anything between <code>0.005</code> and
|
||||
* <code>1000.0</code>. When the scale value is fetched, it may temporarily be further limited by the domain's settings.
|
||||
* @property {number} density - The density of the avatar in kg/m<sup>3</sup>. The density is used to work out its mass in
|
||||
* the application of physics. <em>Read-only.</em>
|
||||
* @property {Vec3} handPosition - A user-defined hand position, in world coordinates. The position moves with the avatar
|
||||
* but is otherwise not used or changed by Interface.
|
||||
* @property {number} bodyYaw - The left or right rotation about an axis running from the head to the feet of the avatar.
|
||||
* Yaw is sometimes called "heading".
|
||||
* @property {number} bodyPitch - The rotation about an axis running from shoulder to shoulder of the avatar. Pitch is
|
||||
* sometimes called "elevation".
|
||||
* @property {number} bodyRoll - The rotation about an axis running from the chest to the back of the avatar. Roll is
|
||||
* sometimes called "bank".
|
||||
* @property {Quat} orientation - The orientation of the avatar.
|
||||
* @property {Quat} headOrientation - The orientation of the avatar's head.
|
||||
* @property {number} headPitch - The rotation about an axis running from ear to ear of the avatar's head. Pitch is
|
||||
* sometimes called "elevation".
|
||||
* @property {number} headYaw - The rotation left or right about an axis running from the base to the crown of the avatar's
|
||||
* head. Yaw is sometimes called "heading".
|
||||
* @property {number} headRoll - The rotation about an axis running from the nose to the back of the avatar's head. Roll is
|
||||
* sometimes called "bank".
|
||||
* @property {Vec3} velocity - The current velocity of the avatar.
|
||||
* @property {Vec3} angularVelocity - The current angular velocity of the avatar.
|
||||
* @property {number} audioLoudness - The instantaneous loudness of the audio input that the avatar is injecting into the
|
||||
* domain.
|
||||
* @property {number} audioAverageLoudness - The rolling average loudness of the audio input that the avatar is injecting
|
||||
* into the domain.
|
||||
* @property {string} displayName - The avatar's display name.
|
||||
* @property {string} sessionDisplayName - <code>displayName's</code> sanitized and default version defined by the avatar
|
||||
* mixer rather than Interface clients. The result is unique among all avatars present in the domain at the time.
|
||||
* @property {boolean} lookAtSnappingEnabled=true - <code>true</code> if the avatar's eyes snap to look at another avatar's
|
||||
* eyes when the other avatar is in the line of sight and also has <code>lookAtSnappingEnabled == true</code>.
|
||||
* @property {string} skeletonModelURL - The avatar's FST file.
|
||||
* @property {AttachmentData[]} attachmentData - Information on the avatar's attachments.<br />
|
||||
* <strong>Deprecated:</strong> Use avatar entities instead.
|
||||
* @property {string[]} jointNames - The list of joints in the current avatar model. <em>Read-only.</em>
|
||||
* @property {Uuid} sessionUUID - Unique ID of the avatar in the domain. <em>Read-only.</em>
|
||||
* @property {Mat4} sensorToWorldMatrix - The scale, rotation, and translation transform from the user's real world to the
|
||||
* avatar's size, orientation, and position in the virtual world. <em>Read-only.</em>
|
||||
* @property {Mat4} controllerLeftHandMatrix - The rotation and translation of the left hand controller relative to the
|
||||
* avatar. <em>Read-only.</em>
|
||||
* @property {Mat4} controllerRightHandMatrix - The rotation and translation of the right hand controller relative to the
|
||||
* avatar. <em>Read-only.</em>
|
||||
* @property {number} sensorToWorldScale - The scale that transforms dimensions in the user's real world to the avatar's
|
||||
* size in the virtual world. <em>Read-only.</em>
|
||||
* @property {boolean} hasPriority - is the avatar in a Hero zone? <em>Read-only</em>
|
||||
*/
|
||||
Q_PROPERTY(glm::vec3 position READ getWorldPosition WRITE setPositionViaScript)
|
||||
Q_PROPERTY(float scale READ getDomainLimitedScale WRITE setTargetScale)
|
||||
Q_PROPERTY(float density READ getDensity)
|
||||
|
@ -570,18 +636,18 @@ public:
|
|||
virtual bool getHasAudioEnabledFaceMovement() const { return false; }
|
||||
|
||||
/**jsdoc
|
||||
* Returns the minimum scale allowed for this avatar in the current domain.
|
||||
* Gets the minimum scale allowed for this avatar in the current domain.
|
||||
* This value can change as the user changes avatars or when changing domains.
|
||||
* @function MyAvatar.getDomainMinScale
|
||||
* @returns {number} minimum scale allowed for this avatar in the current domain.
|
||||
* @function Avatar.getDomainMinScale
|
||||
* @returns {number} The minimum scale allowed for this avatar in the current domain.
|
||||
*/
|
||||
Q_INVOKABLE float getDomainMinScale() const;
|
||||
|
||||
/**jsdoc
|
||||
* Returns the maximum scale allowed for this avatar in the current domain.
|
||||
* Gets the maximum scale allowed for this avatar in the current domain.
|
||||
* This value can change as the user changes avatars or when changing domains.
|
||||
* @function MyAvatar.getDomainMaxScale
|
||||
* @returns {number} maximum scale allowed for this avatar in the current domain.
|
||||
* @function Avatar.getDomainMaxScale
|
||||
* @returns {number} The maximum scale allowed for this avatar in the current domain.
|
||||
*/
|
||||
Q_INVOKABLE float getDomainMaxScale() const;
|
||||
|
||||
|
@ -594,18 +660,18 @@ public:
|
|||
virtual bool canMeasureEyeHeight() const { return false; }
|
||||
|
||||
/**jsdoc
|
||||
* Provides read only access to the current eye height of the avatar.
|
||||
* Gets the current eye height of the avatar.
|
||||
* This height is only an estimate and might be incorrect for avatars that are missing standard joints.
|
||||
* @function MyAvatar.getEyeHeight
|
||||
* @returns {number} Eye height of avatar in meters.
|
||||
* @function Avatar.getEyeHeight
|
||||
* @returns {number} The eye height of the avatar.
|
||||
*/
|
||||
Q_INVOKABLE virtual float getEyeHeight() const { return _targetScale * getUnscaledEyeHeight(); }
|
||||
|
||||
/**jsdoc
|
||||
* Provides read only access to the current height of the avatar.
|
||||
* Gets the current height of the avatar.
|
||||
* This height is only an estimate and might be incorrect for avatars that are missing standard joints.
|
||||
* @function MyAvatar.getHeight
|
||||
* @returns {number} Height of avatar in meters.
|
||||
* @function Avatar.getHeight
|
||||
* @returns {number} The height of the avatar.
|
||||
*/
|
||||
Q_INVOKABLE virtual float getHeight() const;
|
||||
|
||||
|
@ -615,36 +681,43 @@ public:
|
|||
void setDomainMaximumHeight(float domainMaximumHeight);
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.setHandState
|
||||
* @param {string} state
|
||||
* Sets the pointing state of the hands to control where the laser emanates from. If the right index finger is pointing, the
|
||||
* laser emanates from the tip of that finger, otherwise it emanates from the palm.
|
||||
* @function Avatar.setHandState
|
||||
* @param {HandState} state - The pointing state of the hand.
|
||||
*/
|
||||
Q_INVOKABLE void setHandState(char s) { _handState = s; }
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getHandState
|
||||
* @returns {string}
|
||||
* Gets the pointing state of the hands to control where the laser emanates from. If the right index finger is pointing, the
|
||||
* laser emanates from the tip of that finger, otherwise it emanates from the palm.
|
||||
* @function Avatar.getHandState
|
||||
* @returns {HandState} The pointing state of the hand.
|
||||
*/
|
||||
Q_INVOKABLE char getHandState() const { return _handState; }
|
||||
|
||||
const QVector<JointData>& getRawJointData() const { return _jointData; }
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.setRawJointData
|
||||
* @param {JointData[]} data
|
||||
* Sets joint translations and rotations from raw joint data.
|
||||
* @function Avatar.setRawJointData
|
||||
* @param {JointData[]} data - The raw joint data.
|
||||
* @deprecated This function is deprecated and will be removed.
|
||||
*/
|
||||
Q_INVOKABLE void setRawJointData(QVector<JointData> data);
|
||||
|
||||
/**jsdoc
|
||||
* Set a specific joint's rotation and position relative to its parent.
|
||||
* <p>Setting joint data completely overrides/replaces all motion from the default animation system including inverse
|
||||
* Sets a specific joint's rotation and position relative to its parent, in model coordinates.
|
||||
* <p><strong>Warning:</strong> These coordinates are not necessarily in meters.</p>
|
||||
* <p>Setting joint data completely overrides/replaces all motion from the default animation system including inverse
|
||||
* kinematics, but just for the specified joint. So for example, if you were to procedurally manipulate the finger joints,
|
||||
* the avatar's hand and head would still do inverse kinematics properly. However, as soon as you start to manipulate
|
||||
* joints in the inverse kinematics chain, the inverse kinematics might not function as you expect. For example, if you set
|
||||
* the rotation of the elbow, the hand inverse kinematics position won't end up in the right place.</p>
|
||||
* @function MyAvatar.setJointData
|
||||
* @function Avatar.setJointData
|
||||
* @param {number} index - The index of the joint.
|
||||
* @param {Quat} rotation - The rotation of the joint relative to its parent.
|
||||
* @param {Vec3} translation - The translation of the joint relative to its parent.
|
||||
* @param {Vec3} translation - The translation of the joint relative to its parent, in model coordinates.
|
||||
* @example <caption>Set your avatar to it's default T-pose for a while.<br />
|
||||
* <img alt="Avatar in T-pose" src="https://docs.highfidelity.com/images/t-pose.png" /></caption>
|
||||
* // Set all joint translations and rotations to defaults.
|
||||
|
@ -659,91 +732,98 @@ public:
|
|||
* Script.setTimeout(function () {
|
||||
* MyAvatar.clearJointsData();
|
||||
* }, 5000);
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace all occurrences of "MyAvatar" with "Avatar".
|
||||
*/
|
||||
Q_INVOKABLE virtual void setJointData(int index, const glm::quat& rotation, const glm::vec3& translation);
|
||||
|
||||
/**jsdoc
|
||||
* Set a specific joint's rotation relative to its parent.
|
||||
* Sets a specific joint's rotation relative to its parent.
|
||||
* <p>Setting joint data completely overrides/replaces all motion from the default animation system including inverse
|
||||
* kinematics, but just for the specified joint. So for example, if you were to procedurally manipulate the finger joints,
|
||||
* the avatar's hand and head would still do inverse kinematics properly. However, as soon as you start to manipulate
|
||||
* joints in the inverse kinematics chain, the inverse kinematics might not function as you expect. For example, if you set
|
||||
* the rotation of the elbow, the hand inverse kinematics position won't end up in the right place.</p>
|
||||
* @function MyAvatar.setJointRotation
|
||||
* @function Avatar.setJointRotation
|
||||
* @param {number} index - The index of the joint.
|
||||
* @param {Quat} rotation - The rotation of the joint relative to its parent.
|
||||
*/
|
||||
Q_INVOKABLE virtual void setJointRotation(int index, const glm::quat& rotation);
|
||||
|
||||
/**jsdoc
|
||||
* Set a specific joint's translation relative to its parent.
|
||||
* <p>Setting joint data completely overrides/replaces all motion from the default animation system including inverse
|
||||
* Sets a specific joint's translation relative to its parent, in model coordinates.
|
||||
* <p><strong>Warning:</strong> These coordinates are not necessarily in meters.</p>
|
||||
* <p>Setting joint data completely overrides/replaces all motion from the default animation system including inverse
|
||||
* kinematics, but just for the specified joint. So for example, if you were to procedurally manipulate the finger joints,
|
||||
* the avatar's hand and head would still do inverse kinematics properly. However, as soon as you start to manipulate
|
||||
* joints in the inverse kinematics chain, the inverse kinematics might not function as you expect. For example, if you set
|
||||
* the rotation of the elbow, the hand inverse kinematics position won't end up in the right place.</p>
|
||||
* @function MyAvatar.setJointTranslation
|
||||
* @function Avatar.setJointTranslation
|
||||
* @param {number} index - The index of the joint.
|
||||
* @param {Vec3} translation - The translation of the joint relative to its parent.
|
||||
* @param {Vec3} translation - The translation of the joint relative to its parent, in model coordinates.
|
||||
*/
|
||||
Q_INVOKABLE virtual void setJointTranslation(int index, const glm::vec3& translation);
|
||||
|
||||
/**jsdoc
|
||||
* Clear joint translations and rotations set by script for a specific joint. This restores all motion from the default
|
||||
* Clears joint translations and rotations set by script for a specific joint. This restores all motion from the default
|
||||
* animation system including inverse kinematics for that joint.
|
||||
* <p>Note: This is slightly faster than the function variation that specifies the joint name.</p>
|
||||
* @function MyAvatar.clearJointData
|
||||
* @function Avatar.clearJointData
|
||||
* @param {number} index - The index of the joint.
|
||||
*/
|
||||
Q_INVOKABLE virtual void clearJointData(int index);
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.isJointDataValid
|
||||
* @param {number} index
|
||||
* @returns {boolean}
|
||||
* Checks that the data for a joint are valid.
|
||||
* @function Avatar.isJointDataValid
|
||||
* @param {number} index - The index of the joint.
|
||||
* @returns {boolean} <code>true</code> if the joint data are valid, <code>false</code> if not.
|
||||
*/
|
||||
Q_INVOKABLE bool isJointDataValid(int index) const;
|
||||
|
||||
/**jsdoc
|
||||
* Get the rotation of a joint relative to its parent. For information on the joint hierarchy used, see
|
||||
* <a href="https://docs.highfidelity.com/create-and-explore/avatars/avatar-standards">Avatar Standards</a>.
|
||||
* @function MyAvatar.getJointRotation
|
||||
* Gets the rotation of a joint relative to its parent. For information on the joint hierarchy used, see
|
||||
* <a href="https://docs.highfidelity.com/create/avatars/avatar-standards">Avatar Standards</a>.
|
||||
* @function Avatar.getJointRotation
|
||||
* @param {number} index - The index of the joint.
|
||||
* @returns {Quat} The rotation of the joint relative to its parent.
|
||||
*/
|
||||
Q_INVOKABLE virtual glm::quat getJointRotation(int index) const;
|
||||
|
||||
/**jsdoc
|
||||
* Get the translation of a joint relative to its parent. For information on the joint hierarchy used, see
|
||||
* <a href="https://docs.highfidelity.com/create-and-explore/avatars/avatar-standards">Avatar Standards</a>.
|
||||
* @function MyAvatar.getJointTranslation
|
||||
* Gets the translation of a joint relative to its parent, in model coordinates.
|
||||
* <p><strong>Warning:</strong> These coordinates are not necessarily in meters.</p>
|
||||
* <p>For information on the joint hierarchy used, see
|
||||
* <a href="https://docs.highfidelity.com/create/avatars/avatar-standards">Avatar Standards</a>.</p>
|
||||
* @function Avatar.getJointTranslation
|
||||
* @param {number} index - The index of the joint.
|
||||
* @returns {Vec3} The translation of the joint relative to its parent.
|
||||
* @returns {Vec3} The translation of the joint relative to its parent, in model coordinates.
|
||||
*/
|
||||
Q_INVOKABLE virtual glm::vec3 getJointTranslation(int index) const;
|
||||
|
||||
/**jsdoc
|
||||
* Set a specific joint's rotation and position relative to its parent.
|
||||
* <p>Setting joint data completely overrides/replaces all motion from the default animation system including inverse
|
||||
* Sets a specific joint's rotation and position relative to its parent, in model coordinates.
|
||||
* <p><strong>Warning:</strong> These coordinates are not necessarily in meters.</p>
|
||||
* <p>Setting joint data completely overrides/replaces all motion from the default animation system including inverse
|
||||
* kinematics, but just for the specified joint. So for example, if you were to procedurally manipulate the finger joints,
|
||||
* the avatar's hand and head would still do inverse kinematics properly. However, as soon as you start to manipulate
|
||||
* joints in the inverse kinematics chain, the inverse kinematics might not function as you expect. For example, if you set
|
||||
* the rotation of the elbow, the hand inverse kinematics position won't end up in the right place.</p>
|
||||
* @function MyAvatar.setJointData
|
||||
* @function Avatar.setJointData
|
||||
* @param {string} name - The name of the joint.
|
||||
* @param {Quat} rotation - The rotation of the joint relative to its parent.
|
||||
* @param {Vec3} translation - The translation of the joint relative to its parent.
|
||||
* @param {Vec3} translation - The translation of the joint relative to its parent, in model coordinates.
|
||||
*/
|
||||
Q_INVOKABLE virtual void setJointData(const QString& name, const glm::quat& rotation, const glm::vec3& translation);
|
||||
|
||||
/**jsdoc
|
||||
* Set a specific joint's rotation relative to its parent.
|
||||
* Sets a specific joint's rotation relative to its parent.
|
||||
* <p>Setting joint data completely overrides/replaces all motion from the default animation system including inverse
|
||||
* kinematics, but just for the specified joint. So for example, if you were to procedurally manipulate the finger joints,
|
||||
* the avatar's hand and head would still do inverse kinematics properly. However, as soon as you start to manipulate
|
||||
* joints in the inverse kinematics chain, the inverse kinematics might not function as you expect. For example, if you set
|
||||
* the rotation of the elbow, the hand inverse kinematics position won't end up in the right place.</p>
|
||||
* @function MyAvatar.setJointRotation
|
||||
* @function Avatar.setJointRotation
|
||||
* @param {string} name - The name of the joint.
|
||||
* @param {Quat} rotation - The rotation of the joint relative to its parent.
|
||||
* @example <caption>Set your avatar to its default T-pose then rotate its right arm.<br />
|
||||
|
@ -764,104 +844,125 @@ public:
|
|||
* Script.setTimeout(function () {
|
||||
* MyAvatar.clearJointsData();
|
||||
* }, 5000);
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace all occurrences of "MyAvatar" with "Avatar".
|
||||
*/
|
||||
Q_INVOKABLE virtual void setJointRotation(const QString& name, const glm::quat& rotation);
|
||||
|
||||
/**jsdoc
|
||||
* Set a specific joint's translation relative to its parent.
|
||||
* <p>Setting joint data completely overrides/replaces all motion from the default animation system including inverse
|
||||
* Sets a specific joint's translation relative to its parent, in model coordinates.
|
||||
* <p><strong>Warning:</strong> These coordinates are not necessarily in meters.</p>
|
||||
* <p>Setting joint data completely overrides/replaces all motion from the default animation system including inverse
|
||||
* kinematics, but just for the specified joint. So for example, if you were to procedurally manipulate the finger joints,
|
||||
* the avatar's hand and head would still do inverse kinematics properly. However, as soon as you start to manipulate
|
||||
* joints in the inverse kinematics chain, the inverse kinematics might not function as you expect. For example, if you set
|
||||
* the rotation of the elbow, the hand inverse kinematics position won't end up in the right place.</p>
|
||||
* @function MyAvatar.setJointTranslation
|
||||
* @function Avatar.setJointTranslation
|
||||
* @param {string} name - The name of the joint.
|
||||
* @param {Vec3} translation - The translation of the joint relative to its parent.
|
||||
* @param {Vec3} translation - The translation of the joint relative to its parent, in model coordinates.
|
||||
* @example <caption>Stretch your avatar's neck. Depending on the avatar you are using, you will either see a gap between
|
||||
* the head and body or you will see the neck stretched.<br />
|
||||
* <img alt="Avatar with neck stretched" src="https://docs.highfidelity.com/images/stretched-neck.png" /></caption>
|
||||
* // Stretch your avatar's neck.
|
||||
* MyAvatar.setJointTranslation("Neck", { x: 0, y: 25, z: 0 });
|
||||
* MyAvatar.setJointTranslation("Neck", Vec3.multiply(2, MyAvatar.getJointTranslation("Neck")));
|
||||
*
|
||||
* // Restore your avatar's neck after 5s.
|
||||
* Script.setTimeout(function () {
|
||||
* MyAvatar.clearJointData("Neck");
|
||||
* }, 5000);
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace all occurrences of "MyAvatar" with "Avatar".
|
||||
*/
|
||||
Q_INVOKABLE virtual void setJointTranslation(const QString& name, const glm::vec3& translation);
|
||||
|
||||
/**jsdoc
|
||||
* Clear joint translations and rotations set by script for a specific joint. This restores all motion from the default
|
||||
* Clears joint translations and rotations set by script for a specific joint. This restores all motion from the default
|
||||
* animation system including inverse kinematics for that joint.
|
||||
* <p>Note: This is slightly slower than the function variation that specifies the joint index.</p>
|
||||
* @function MyAvatar.clearJointData
|
||||
* @function Avatar.clearJointData
|
||||
* @param {string} name - The name of the joint.
|
||||
* @example <caption>Offset and restore the position of your avatar's head.</caption>
|
||||
* // Move your avatar's head up by 25cm from where it should be.
|
||||
* MyAvatar.setJointTranslation("Neck", { x: 0, y: 0.25, z: 0 });
|
||||
* // Stretch your avatar's neck.
|
||||
* MyAvatar.setJointTranslation("Neck", Vec3.multiply(2, MyAvatar.getJointTranslation("Neck")));
|
||||
*
|
||||
* // Restore your avatar's head to its default position after 5s.
|
||||
* // Restore your avatar's neck after 5s.
|
||||
* Script.setTimeout(function () {
|
||||
* MyAvatar.clearJointData("Neck");
|
||||
* }, 5000);
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace all occurrences of "MyAvatar" with "Avatar".
|
||||
*/
|
||||
Q_INVOKABLE virtual void clearJointData(const QString& name);
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.isJointDataValid
|
||||
* @param {string} name
|
||||
* @returns {boolean}
|
||||
* Checks if the data for a joint are valid.
|
||||
* @function Avatar.isJointDataValid
|
||||
* @param {string} name - The name of the joint.
|
||||
* @returns {boolean} <code>true</code> if the joint data are valid, <code>false</code> if not.
|
||||
*/
|
||||
Q_INVOKABLE virtual bool isJointDataValid(const QString& name) const;
|
||||
|
||||
/**jsdoc
|
||||
* Get the rotation of a joint relative to its parent. For information on the joint hierarchy used, see
|
||||
* <a href="https://docs.highfidelity.com/create-and-explore/avatars/avatar-standards">Avatar Standards</a>.
|
||||
* @function MyAvatar.getJointRotation
|
||||
* Gets the rotation of a joint relative to its parent. For information on the joint hierarchy used, see
|
||||
* <a href="https://docs.highfidelity.com/create/avatars/avatar-standards">Avatar Standards</a>.
|
||||
* @function Avatar.getJointRotation
|
||||
* @param {string} name - The name of the joint.
|
||||
* @returns {Quat} The rotation of the joint relative to its parent.
|
||||
* @example <caption>Report the rotation of your avatar's hips joint.</caption>
|
||||
* print(JSON.stringify(MyAvatar.getJointRotation("Hips")));
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace "MyAvatar" with "Avatar".
|
||||
*/
|
||||
Q_INVOKABLE virtual glm::quat getJointRotation(const QString& name) const;
|
||||
|
||||
/**jsdoc
|
||||
* Get the translation of a joint relative to its parent. For information on the joint hierarchy used, see
|
||||
* <a href="https://docs.highfidelity.com/create-and-explore/avatars/avatar-standards">Avatar Standards</a>.
|
||||
* @function MyAvatar.getJointTranslation
|
||||
* Gets the translation of a joint relative to its parent, in model coordinates.
|
||||
* <p><strong>Warning:</strong> These coordinates are not necessarily in meters.</p>
|
||||
* <p>For information on the joint hierarchy used, see
|
||||
* <a href="https://docs.highfidelity.com/create/avatars/avatar-standards">Avatar Standards</a>.</p>
|
||||
* @function Avatar.getJointTranslation
|
||||
* @param {number} name - The name of the joint.
|
||||
* @returns {Vec3} The translation of the joint relative to its parent.
|
||||
* @returns {Vec3} The translation of the joint relative to its parent, in model coordinates.
|
||||
* @example <caption>Report the translation of your avatar's hips joint.</caption>
|
||||
* print(JSON.stringify(MyAvatar.getJointRotation("Hips")));
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace "MyAvatar" with "Avatar".
|
||||
*/
|
||||
Q_INVOKABLE virtual glm::vec3 getJointTranslation(const QString& name) const;
|
||||
|
||||
/**jsdoc
|
||||
* Get the rotations of all joints in the current avatar. Each joint's rotation is relative to its parent joint.
|
||||
* @function MyAvatar.getJointRotations
|
||||
* Gets the rotations of all joints in the current avatar. Each joint's rotation is relative to its parent joint.
|
||||
* @function Avatar.getJointRotations
|
||||
* @returns {Quat[]} The rotations of all joints relative to each's parent. The values are in the same order as the array
|
||||
* returned by {@link MyAvatar.getJointNames} or {@link Avatar.getJointNames}.
|
||||
* returned by {@link MyAvatar.getJointNames}, or {@link Avatar.getJointNames} if using the <code>Avatar</code> API.
|
||||
* @example <caption>Report the rotations of all your avatar's joints.</caption>
|
||||
* print(JSON.stringify(MyAvatar.getJointRotations()));
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace all "MyAvatar" with "Avatar".
|
||||
*/
|
||||
Q_INVOKABLE virtual QVector<glm::quat> getJointRotations() const;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getJointTranslations
|
||||
* @returns {Vec3[]}
|
||||
* Gets the translations of all joints in the current avatar. Each joint's translation is relative to its parent joint, in
|
||||
* model coordinates.
|
||||
* <p><strong>Warning:</strong> These coordinates are not necessarily in meters.</p>
|
||||
* @function Avatar.getJointTranslations
|
||||
* @returns {Vec3[]} The translations of all joints relative to each's parent, in model coordinates. The values are in the
|
||||
* same order as the array returned by {@link MyAvatar.getJointNames}, or {@link Avatar.getJointNames} if using the
|
||||
* <code>Avatar</code> API.
|
||||
*/
|
||||
Q_INVOKABLE virtual QVector<glm::vec3> getJointTranslations() const;
|
||||
|
||||
/**jsdoc
|
||||
* Set the rotations of all joints in the current avatar. Each joint's rotation is relative to its parent joint.
|
||||
* Sets the rotations of all joints in the current avatar. Each joint's rotation is relative to its parent joint.
|
||||
* <p>Setting joint data completely overrides/replaces all motion from the default animation system including inverse
|
||||
* kinematics, but just for the specified joint. So for example, if you were to procedurally manipulate the finger joints,
|
||||
* the avatar's hand and head would still do inverse kinematics properly. However, as soon as you start to manipulate
|
||||
* joints in the inverse kinematics chain, the inverse kinematics might not function as you expect. For example, if you set
|
||||
* the rotation of the elbow, the hand inverse kinematics position won't end up in the right place.</p>
|
||||
* @function MyAvatar.setJointRotations
|
||||
* @function Avatar.setJointRotations
|
||||
* @param {Quat[]} jointRotations - The rotations for all joints in the avatar. The values are in the same order as the
|
||||
* array returned by {@link MyAvatar.getJointNames} or {@link Avatar.getJointNames}.
|
||||
* array returned by {@link MyAvatar.getJointNames}, or {@link Avatar.getJointNames} if using the <code>Avatar</code> API.
|
||||
* @example <caption>Set your avatar to its default T-pose then rotate its right arm.<br />
|
||||
* <img alt="Avatar in T-pose" src="https://docs.highfidelity.com/images/armpose.png" /></caption>
|
||||
* // Set all joint translations and rotations to defaults.
|
||||
|
@ -885,19 +986,31 @@ public:
|
|||
* Script.setTimeout(function () {
|
||||
* MyAvatar.clearJointsData();
|
||||
* }, 5000);
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace all occurrences of "MyAvatar" with "Avatar".
|
||||
*/
|
||||
Q_INVOKABLE virtual void setJointRotations(const QVector<glm::quat>& jointRotations);
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.setJointTranslations
|
||||
* @param {Vec3[]} translations
|
||||
* Sets the translations of all joints in the current avatar. Each joint's translation is relative to its parent joint, in
|
||||
* model coordinates.
|
||||
* <p><strong>Warning:</strong> These coordinates are not necessarily in meters.</p>
|
||||
* <p>Setting joint data completely overrides/replaces all motion from the default animation system including inverse
|
||||
* kinematics, but just for the specified joint. So for example, if you were to procedurally manipulate the finger joints,
|
||||
* the avatar's hand and head would still do inverse kinematics properly. However, as soon as you start to manipulate
|
||||
* joints in the inverse kinematics chain, the inverse kinematics might not function as you expect. For example, if you set
|
||||
* the rotation of the elbow, the hand inverse kinematics position won't end up in the right place.</p>
|
||||
* @function Avatar.setJointTranslations
|
||||
* @param {Vec3[]} translations - The translations for all joints in the avatar, in model coordinates. The values are in
|
||||
* the same order as the array returned by {@link MyAvatar.getJointNames}, or {@link Avatar.getJointNames} if using the
|
||||
* <code>Avatar</code> API.
|
||||
*/
|
||||
Q_INVOKABLE virtual void setJointTranslations(const QVector<glm::vec3>& jointTranslations);
|
||||
|
||||
/**jsdoc
|
||||
* Clear all joint translations and rotations that have been set by script. This restores all motion from the default
|
||||
* Clears all joint translations and rotations that have been set by script. This restores all motion from the default
|
||||
* animation system including inverse kinematics for all joints.
|
||||
* @function MyAvatar.clearJointsData
|
||||
* @function Avatar.clearJointsData
|
||||
* @example <caption>Set your avatar to it's default T-pose for a while.</caption>
|
||||
* // Set all joint translations and rotations to defaults.
|
||||
* var i, length, rotation, translation;
|
||||
|
@ -911,49 +1024,69 @@ public:
|
|||
* Script.setTimeout(function () {
|
||||
* MyAvatar.clearJointsData();
|
||||
* }, 5000);
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace all occurrences of "MyAvatar" with "Avatar".
|
||||
*/
|
||||
Q_INVOKABLE virtual void clearJointsData();
|
||||
|
||||
/**jsdoc
|
||||
* Get the joint index for a named joint. The joint index value is the position of the joint in the array returned by
|
||||
* {@link MyAvatar.getJointNames} or {@link Avatar.getJointNames}.
|
||||
* @function MyAvatar.getJointIndex
|
||||
* Gets the joint index for a named joint. The joint index value is the position of the joint in the array returned by
|
||||
* {@link MyAvatar.getJointNames}, or {@link Avatar.getJointNames} if using the <code>Avatar</code> API.
|
||||
* @function Avatar.getJointIndex
|
||||
* @param {string} name - The name of the joint.
|
||||
* @returns {number} The index of the joint.
|
||||
* @returns {number} The index of the joint if valid, otherwise <code>-1</code>.
|
||||
* @example <caption>Report the index of your avatar's left arm joint.</caption>
|
||||
* print(JSON.stringify(MyAvatar.getJointIndex("LeftArm"));
|
||||
* print(JSON.stringify(MyAvatar.getJointIndex("LeftArm")));
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace "MyAvatar" with "Avatar".
|
||||
*/
|
||||
/// Returns the index of the joint with the specified name, or -1 if not found/unknown.
|
||||
Q_INVOKABLE virtual int getJointIndex(const QString& name) const;
|
||||
|
||||
/**jsdoc
|
||||
* Get the names of all the joints in the current avatar.
|
||||
* @function MyAvatar.getJointNames
|
||||
* Gets the names of all the joints in the current avatar.
|
||||
* @function Avatar.getJointNames
|
||||
* @returns {string[]} The joint names.
|
||||
* @example <caption>Report the names of all the joints in your current avatar.</caption>
|
||||
* print(JSON.stringify(MyAvatar.getJointNames()));
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace "MyAvatar" with "Avatar".
|
||||
*/
|
||||
Q_INVOKABLE virtual QStringList getJointNames() const;
|
||||
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.setBlendshape
|
||||
* @param {string} name
|
||||
* @param {number} value
|
||||
* Sets the value of a blendshape to animate your avatar's face. To enable other users to see the resulting animation of
|
||||
* your avatar's face, use {@link Avatar.setForceFaceTrackerConnected} or {@link MyAvatar.setForceFaceTrackerConnected}.
|
||||
* @function Avatar.setBlendshape
|
||||
* @param {string} name - The name of the blendshape, per the
|
||||
* {@link https://docs.highfidelity.com/create/avatars/avatar-standards.html#blendshapes Avatar Standards}.
|
||||
* @param {number} value - A value between <code>0.0</code> and <code>1.0</code>.
|
||||
* @example <caption>Open your avatar's mouth wide.</caption>
|
||||
* MyAvatar.setForceFaceTrackerConnected(true);
|
||||
* MyAvatar.setBlendshape("JawOpen", 1.0);
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace "MyAvatar" with "Avatar".
|
||||
*/
|
||||
Q_INVOKABLE void setBlendshape(QString name, float val) { _headData->setBlendshape(name, val); }
|
||||
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getAttachmentsVariant
|
||||
* @returns {object}
|
||||
* Gets information about the models currently attached to your avatar.
|
||||
* @function Avatar.getAttachmentsVariant
|
||||
* @returns {AttachmentData[]} Information about all models attached to your avatar.
|
||||
* @deprecated Use avatar entities instead.
|
||||
*/
|
||||
// FIXME: Can this name be improved? Can it be deprecated?
|
||||
Q_INVOKABLE virtual QVariantList getAttachmentsVariant() const;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.setAttachmentsVariant
|
||||
* @param {object} variant
|
||||
* Sets all models currently attached to your avatar. For example, if you retrieve attachment data using
|
||||
* {@link MyAvatar.getAttachmentsVariant} or {@link Avatar.getAttachmentsVariant}, make changes to it, and then want to
|
||||
* update your avatar's attachments per the changed data.
|
||||
* @function Avatar.setAttachmentsVariant
|
||||
* @param {AttachmentData[]} variant - The attachment data defining the models to have attached to your avatar.
|
||||
* @deprecated Use avatar entities instead.
|
||||
*/
|
||||
// FIXME: Can this name be improved? Can it be deprecated?
|
||||
Q_INVOKABLE virtual void setAttachmentsVariant(const QVariantList& variant);
|
||||
|
@ -961,22 +1094,28 @@ public:
|
|||
virtual void storeAvatarEntityDataPayload(const QUuid& entityID, const QByteArray& payload);
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.updateAvatarEntity
|
||||
* @param {Uuid} entityID
|
||||
* @param {string} entityData
|
||||
* @function Avatar.updateAvatarEntity
|
||||
* @param {Uuid} entityID - The entity ID.
|
||||
* @param {Array.<byte>} entityData - Entity data.
|
||||
* @deprecated This function is deprecated and will be removed.
|
||||
*/
|
||||
Q_INVOKABLE virtual void updateAvatarEntity(const QUuid& entityID, const QByteArray& entityData);
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.clearAvatarEntity
|
||||
* @param {Uuid} entityID
|
||||
* @function Avatar.clearAvatarEntity
|
||||
* @param {Uuid} entityID - The entity ID.
|
||||
* @param {boolean} [requiresRemovalFromTree=true] - Requires removal from tree.
|
||||
* @deprecated This function is deprecated and will be removed.
|
||||
*/
|
||||
Q_INVOKABLE virtual void clearAvatarEntity(const QUuid& entityID, bool requiresRemovalFromTree = true);
|
||||
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.setForceFaceTrackerConnected
|
||||
* @param {boolean} connected
|
||||
* Enables blendshapes set using {@link Avatar.setBlendshape} or {@link MyAvatar.setBlendshape} to be transmitted to other
|
||||
* users so that they can see the animation of your avatar's face.
|
||||
* @function Avatar.setForceFaceTrackerConnected
|
||||
* @param {boolean} connected - <code>true</code> to enable blendshape changes to be transmitted to other users,
|
||||
* <code>false</code> to disable.
|
||||
*/
|
||||
Q_INVOKABLE void setForceFaceTrackerConnected(bool connected) { _forceFaceTrackerConnected = connected; }
|
||||
|
||||
|
@ -1027,24 +1166,28 @@ public:
|
|||
}
|
||||
|
||||
/**jsdoc
|
||||
* Get information about all models currently attached to your avatar.
|
||||
* @function MyAvatar.getAttachmentData
|
||||
* Gets information about the models currently attached to your avatar.
|
||||
* @function Avatar.getAttachmentData
|
||||
* @returns {AttachmentData[]} Information about all models attached to your avatar.
|
||||
* @deprecated Use avatar entities instead.
|
||||
* @example <caption>Report the URLs of all current attachments.</caption>
|
||||
* var attachments = MyAvatar.getaAttachmentData();
|
||||
* for (var i = 0; i < attachments.length; i++) {
|
||||
* print (attachments[i].modelURL);
|
||||
* print(attachments[i].modelURL);
|
||||
* }
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace "MyAvatar" with "Avatar".
|
||||
*/
|
||||
Q_INVOKABLE virtual QVector<AttachmentData> getAttachmentData() const;
|
||||
|
||||
/**jsdoc
|
||||
* Set all models currently attached to your avatar. For example, if you retrieve attachment data using
|
||||
* Sets all models currently attached to your avatar. For example, if you retrieve attachment data using
|
||||
* {@link MyAvatar.getAttachmentData} or {@link Avatar.getAttachmentData}, make changes to it, and then want to update your avatar's attachments per the
|
||||
* changed data. You can also remove all attachments by using setting <code>attachmentData</code> to <code>null</code>.
|
||||
* @function MyAvatar.setAttachmentData
|
||||
* @param {AttachmentData[]} attachmentData - The attachment data defining the models to have attached to your avatar. Use
|
||||
* @function Avatar.setAttachmentData
|
||||
* @param {AttachmentData[]} attachmentData - The attachment data defining the models to have attached to your avatar. Use
|
||||
* <code>null</code> to remove all attachments.
|
||||
* @deprecated Use avatar entities instead.
|
||||
* @example <caption>Remove a hat attachment if your avatar is wearing it.</caption>
|
||||
* var hatURL = "https://s3.amazonaws.com/hifi-public/tony/cowboy-hat.fbx";
|
||||
* var attachments = MyAvatar.getAttachmentData();
|
||||
|
@ -1056,15 +1199,17 @@ public:
|
|||
* break;
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace all occurrences of "MyAvatar" with "Avatar".
|
||||
*/
|
||||
Q_INVOKABLE virtual void setAttachmentData(const QVector<AttachmentData>& attachmentData);
|
||||
|
||||
/**jsdoc
|
||||
* Attach a model to your avatar. For example, you can give your avatar a hat to wear, a guitar to hold, or a surfboard to
|
||||
* Attaches a model to your avatar. For example, you can give your avatar a hat to wear, a guitar to hold, or a surfboard to
|
||||
* stand on.
|
||||
* <p>Note: Attached models are models only; they are not entities and can not be manipulated using the {@link Entities} API.
|
||||
* Nor can you use this function to attach an entity (such as a sphere or a box) to your avatar.</p>
|
||||
* @function MyAvatar.attach
|
||||
* @function Avatar.attach
|
||||
* @param {string} modelURL - The URL of the model to attach. Models can be .FBX or .OBJ format.
|
||||
* @param {string} [jointName=""] - The name of the avatar joint (see {@link MyAvatar.getJointNames} or {@link Avatar.getJointNames}) to attach the model
|
||||
* to.
|
||||
|
@ -1072,12 +1217,14 @@ public:
|
|||
* @param {Quat} [rotation=Quat.IDENTITY] - The rotation to apply to the model relative to the joint orientation.
|
||||
* @param {number} [scale=1.0] - The scale to apply to the model.
|
||||
* @param {boolean} [isSoft=false] - If the model has a skeleton, set this to <code>true</code> so that the bones of the
|
||||
* attached model's skeleton are be rotated to fit the avatar's current pose. <code>isSoft</code> is used, for example,
|
||||
* attached model's skeleton are rotated to fit the avatar's current pose. <code>isSoft</code> is used, for example,
|
||||
* to have clothing that moves with the avatar.<br />
|
||||
* If <code>true</code>, the <code>translation</code>, <code>rotation</code>, and <code>scale</code> parameters are
|
||||
* ignored.
|
||||
* @param {boolean} [allowDuplicates=false]
|
||||
* @param {boolean} [useSaved=true]
|
||||
* @param {boolean} [allowDuplicates=false] - If <code>true</code> then more than one copy of any particular model may be
|
||||
* attached to the same joint; if <code>false</code> then the same model cannot be attached to the same joint.
|
||||
* @param {boolean} [useSaved=true] - <em>Not used.</em>
|
||||
* @deprecated Use avatar entities instead.
|
||||
* @example <caption>Attach a cowboy hat to your avatar's head.</caption>
|
||||
* var attachment = {
|
||||
* modelURL: "https://s3.amazonaws.com/hifi-public/tony/cowboy-hat.fbx",
|
||||
|
@ -1094,6 +1241,8 @@ public:
|
|||
* attachment.rotation,
|
||||
* attachment.scale,
|
||||
* attachment.isSoft);
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace "MyAvatar" with "Avatar".
|
||||
*/
|
||||
Q_INVOKABLE virtual void attach(const QString& modelURL, const QString& jointName = QString(),
|
||||
const glm::vec3& translation = glm::vec3(), const glm::quat& rotation = glm::quat(),
|
||||
|
@ -1101,20 +1250,22 @@ public:
|
|||
bool allowDuplicates = false, bool useSaved = true);
|
||||
|
||||
/**jsdoc
|
||||
* Detach the most recently attached instance of a particular model from either a specific joint or any joint.
|
||||
* @function MyAvatar.detachOne
|
||||
* Detaches the most recently attached instance of a particular model from either a specific joint or any joint.
|
||||
* @function Avatar.detachOne
|
||||
* @param {string} modelURL - The URL of the model to detach.
|
||||
* @param {string} [jointName=""] - The name of the joint to detach the model from. If <code>""</code>, then the most
|
||||
* recently attached model is removed from which ever joint it was attached to.
|
||||
* @deprecated Use avatar entities instead.
|
||||
*/
|
||||
Q_INVOKABLE virtual void detachOne(const QString& modelURL, const QString& jointName = QString());
|
||||
|
||||
/**jsdoc
|
||||
* Detach all instances of a particular model from either a specific joint or all joints.
|
||||
* @function MyAvatar.detachAll
|
||||
* Detaches all instances of a particular model from either a specific joint or all joints.
|
||||
* @function Avatar.detachAll
|
||||
* @param {string} modelURL - The URL of the model to detach.
|
||||
* @param {string} [jointName=""] - The name of the joint to detach the model from. If <code>""</code>, then the model is
|
||||
* detached from all joints.
|
||||
* @deprecated Use avatar entities instead.
|
||||
*/
|
||||
Q_INVOKABLE virtual void detachAll(const QString& modelURL, const QString& jointName = QString());
|
||||
|
||||
|
@ -1153,14 +1304,12 @@ public:
|
|||
AABox getDefaultBubbleBox() const;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getAvatarEntityData
|
||||
* @returns {object}
|
||||
* @comment Documented in derived classes' JSDoc because implementations are different.
|
||||
*/
|
||||
Q_INVOKABLE virtual AvatarEntityMap getAvatarEntityData() const;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.setAvatarEntityData
|
||||
* @param {object} avatarEntityData
|
||||
* @comment Documented in derived classes' JSDoc because implementations are different.
|
||||
*/
|
||||
Q_INVOKABLE virtual void setAvatarEntityData(const AvatarEntityMap& avatarEntityData);
|
||||
|
||||
|
@ -1168,45 +1317,69 @@ public:
|
|||
AvatarEntityIDs getAndClearRecentlyRemovedIDs();
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getSensorToWorldMatrix
|
||||
* @returns {Mat4}
|
||||
* Gets the transform from the user's real world to the avatar's size, orientation, and position in the virtual world.
|
||||
* @function Avatar.getSensorToWorldMatrix
|
||||
* @returns {Mat4} The scale, rotation, and translation transform from the user's real world to the avatar's size,
|
||||
* orientation, and position in the virtual world.
|
||||
* @example <caption>Report the sensor to world matrix.</caption>
|
||||
* var sensorToWorldMatrix = MyAvatar.getSensorToWorldMatrix();
|
||||
* print("Sensor to woprld matrix: " + JSON.stringify(sensorToWorldMatrix));
|
||||
* print("Rotation: " + JSON.stringify(Mat4.extractRotation(sensorToWorldMatrix)));
|
||||
* print("Translation: " + JSON.stringify(Mat4.extractTranslation(sensorToWorldMatrix)));
|
||||
* print("Scale: " + JSON.stringify(Mat4.extractScale(sensorToWorldMatrix)));
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace "MyAvatar" with "Avatar".
|
||||
*/
|
||||
// thread safe
|
||||
Q_INVOKABLE glm::mat4 getSensorToWorldMatrix() const;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getSensorToWorldScale
|
||||
* @returns {number}
|
||||
* Gets the scale that transforms dimensions in the user's real world to the avatar's size in the virtual world.
|
||||
* @function Avatar.getSensorToWorldScale
|
||||
* @returns {number} The scale that transforms dimensions in the user's real world to the avatar's size in the virtual
|
||||
* world.
|
||||
*/
|
||||
// thread safe
|
||||
Q_INVOKABLE float getSensorToWorldScale() const;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getControllerLeftHandMatrix
|
||||
* @returns {Mat4}
|
||||
* Gets the rotation and translation of the left hand controller relative to the avatar.
|
||||
* @function Avatar.getControllerLeftHandMatrix
|
||||
* @returns {Mat4} The rotation and translation of the left hand controller relative to the avatar.
|
||||
* @example <caption>Report the left hand controller matrix.</caption>
|
||||
* var leftHandMatrix = MyAvatar.getControllerLeftHandMatrix();
|
||||
* print("Controller left hand matrix: " + JSON.stringify(leftHandMatrix));
|
||||
* print("Rotation: " + JSON.stringify(Mat4.extractRotation(leftHandMatrix)));
|
||||
* print("Translation: " + JSON.stringify(Mat4.extractTranslation(leftHandMatrix)));
|
||||
* print("Scale: " + JSON.stringify(Mat4.extractScale(leftHandMatrix))); // Always 1,1,1.
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace "MyAvatar" with "Avatar".
|
||||
*/
|
||||
// thread safe
|
||||
Q_INVOKABLE glm::mat4 getControllerLeftHandMatrix() const;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getControllerRightHandMatrix
|
||||
* @returns {Mat4}
|
||||
* Gets the rotation and translation of the right hand controller relative to the avatar.
|
||||
* @function Avatar.getControllerRightHandMatrix
|
||||
* @returns {Mat4} The rotation and translation of the right hand controller relative to the avatar.
|
||||
*/
|
||||
// thread safe
|
||||
Q_INVOKABLE glm::mat4 getControllerRightHandMatrix() const;
|
||||
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getDataRate
|
||||
* @param {string} [rateName=""]
|
||||
* @returns {number}
|
||||
* Gets the amount of avatar mixer data being generated by the avatar.
|
||||
* @function Avatar.getDataRate
|
||||
* @param {AvatarDataRate} [rateName=""] - The type of avatar mixer data to get the data rate of.
|
||||
* @returns {number} The data rate in kbps.
|
||||
*/
|
||||
Q_INVOKABLE float getDataRate(const QString& rateName = QString("")) const;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getUpdateRate
|
||||
* @param {string} [rateName=""]
|
||||
* @returns {number}
|
||||
* Gets the update rate of avatar mixer data being generated by the avatar.
|
||||
* @function Avatar.getUpdateRate
|
||||
* @param {AvatarUpdateRate} [rateName=""] - The type of avatar mixer data to get the update rate of.
|
||||
* @returns {number} The update rate in Hz.
|
||||
*/
|
||||
Q_INVOKABLE float getUpdateRate(const QString& rateName = QString("")) const;
|
||||
|
||||
|
@ -1252,52 +1425,92 @@ public:
|
|||
signals:
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.displayNameChanged
|
||||
* Triggered when the avatar's <code>displayName</code> property value changes.
|
||||
* @function Avatar.displayNameChanged
|
||||
* @returns {Signal}
|
||||
* @example <caption>Report when your avatar display name changes.</caption>
|
||||
* MyAvatar.displayNameChanged.connect(function () {
|
||||
* print("Avatar display name changed to: " + MyAvatar.displayName);
|
||||
* });
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace "MyAvatar" with "Avatar".
|
||||
*/
|
||||
void displayNameChanged();
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.sessionDisplayNameChanged
|
||||
* Triggered when the avatar's <code>sessionDisplayName</code> property value changes.
|
||||
* @function Avatar.sessionDisplayNameChanged
|
||||
* @returns {Signal}
|
||||
* @example <caption>Report when your avatar's session display name changes.</caption>
|
||||
* MyAvatar.sessionDisplayNameChanged.connect(function () {
|
||||
* print("Avatar session display name changed to: " + MyAvatar.sessionDisplayName);
|
||||
* });
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace "MyAvatar" with "Avatar".
|
||||
*/
|
||||
void sessionDisplayNameChanged();
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.skeletonModelURLChanged
|
||||
* Triggered when the avatar's model (i.e., <code>skeletonModelURL</code> property value) is changed.
|
||||
* @function Avatar.skeletonModelURLChanged
|
||||
* @returns {Signal}
|
||||
* @example <caption>Report when your avatar's skeleton model changes.</caption>
|
||||
* MyAvatar.skeletonModelURLChanged.connect(function () {
|
||||
* print("Skeleton model changed to: " + MyAvatar.skeletonModelURL);
|
||||
* });
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace "MyAvatar" with "Avatar".
|
||||
*/
|
||||
void skeletonModelURLChanged();
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.lookAtSnappingChanged
|
||||
* @param {boolean} enabled
|
||||
* Triggered when the avatar's <code>lookAtSnappingEnabled</code> property value changes.
|
||||
* @function Avatar.lookAtSnappingChanged
|
||||
* @param {boolean} enabled - <code>true</code> if look-at snapping is enabled, <code>false</code> if not.
|
||||
* @returns {Signal}
|
||||
* @example <caption>Report when your look-at snapping setting changes.</caption>
|
||||
* MyAvatar.lookAtSnappingChanged.connect(function () {
|
||||
* print("Avatar look-at snapping changed to: " + MyAvatar.lookAtSnappingEnabled);
|
||||
* });
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace "MyAvatar" with "Avatar".
|
||||
*/
|
||||
void lookAtSnappingChanged(bool enabled);
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.sessionUUIDChanged
|
||||
* Triggered when the avatar's <code>sessionUUID</code> property value changes.
|
||||
* @function Avatar.sessionUUIDChanged
|
||||
* @returns {Signal}
|
||||
* @example <caption>Report when your avatar's session UUID changes.</caption>
|
||||
* MyAvatar.sessionUUIDChanged.connect(function () {
|
||||
* print("Avatar session UUID changed to: " + MyAvatar.sessionUUID);
|
||||
* });
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace "MyAvatar" with "Avatar".
|
||||
*/
|
||||
void sessionUUIDChanged();
|
||||
|
||||
public slots:
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.sendAvatarDataPacket
|
||||
* @param {boolean} [sendAll=false]
|
||||
* @function Avatar.sendAvatarDataPacket
|
||||
* @param {boolean} [sendAll=false] - Send all.
|
||||
* @returns {number}
|
||||
* @deprecated This function is deprecated and will be removed.
|
||||
*/
|
||||
virtual int sendAvatarDataPacket(bool sendAll = false);
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.sendIdentityPacket
|
||||
* @function Avatar.sendIdentityPacket
|
||||
* @returns {number}
|
||||
* @deprecated This function is deprecated and will be removed.
|
||||
*/
|
||||
int sendIdentityPacket();
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.setSessionUUID
|
||||
* @param {Uuid} sessionUUID
|
||||
* @function Avatar.setSessionUUID
|
||||
* @param {Uuid} sessionUUID - Session UUID.
|
||||
* @deprecated This function is deprecated and will be removed.
|
||||
*/
|
||||
virtual void setSessionUUID(const QUuid& sessionUUID) {
|
||||
if (sessionUUID != getID()) {
|
||||
|
@ -1310,44 +1523,61 @@ public slots:
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getAbsoluteJointRotationInObjectFrame
|
||||
* @param {number} index
|
||||
* @returns {Quat}
|
||||
* Gets the rotation of a joint relative to the avatar.
|
||||
* <p><strong>Warning:</strong> Not able to be used in the <code>Avatar</code> API.</p>
|
||||
* @function Avatar.getAbsoluteJointRotationInObjectFrame
|
||||
* @param {number} index - The index of the joint. <em>Not used.</em>
|
||||
* @returns {Quat} <code>Quat.IDENTITY</code>.
|
||||
*/
|
||||
virtual glm::quat getAbsoluteJointRotationInObjectFrame(int index) const override;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getAbsoluteJointTranslationInObjectFrame
|
||||
* @param {number} index
|
||||
* @returns {Vec3}
|
||||
* Gets the translation of a joint relative to the avatar.
|
||||
* <p><strong>Warning:</strong> Not able to be used in the <code>Avatar</code> API.</p>
|
||||
* @function Avatar.getAbsoluteJointTranslationInObjectFrame
|
||||
* @param {number} index - The index of the joint. <em>Not used.</em>
|
||||
* @returns {Vec3} <code>Vec3.ZERO</code>.
|
||||
*/
|
||||
virtual glm::vec3 getAbsoluteJointTranslationInObjectFrame(int index) const override;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.setAbsoluteJointRotationInObjectFrame
|
||||
* @param {number} index
|
||||
* @param {Quat} rotation
|
||||
* @returns {boolean}
|
||||
* Sets the rotation of a joint relative to the avatar.
|
||||
* <p><strong>Warning:</strong> Not able to be used in the <code>Avatar</code> API.</p>
|
||||
* @function Avatar.setAbsoluteJointRotationInObjectFrame
|
||||
* @param {number} index - The index of the joint. <em>Not used.</em>
|
||||
* @param {Quat} rotation - The rotation of the joint relative to the avatar. <em>Not used.</em>
|
||||
* @returns {boolean} <code>false</code>.
|
||||
*/
|
||||
virtual bool setAbsoluteJointRotationInObjectFrame(int index, const glm::quat& rotation) override { return false; }
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.setAbsoluteJointTranslationInObjectFrame
|
||||
* @param {number} index
|
||||
* @param {Vec3} translation
|
||||
* @returns {boolean}
|
||||
* Sets the translation of a joint relative to the avatar.
|
||||
* <p><strong>Warning:</strong> Not able to be used in the <code>Avatar</code> API.</p>
|
||||
* @function Avatar.setAbsoluteJointTranslationInObjectFrame
|
||||
* @param {number} index - The index of the joint. <em>Not used.</em>
|
||||
* @param {Vec3} translation - The translation of the joint relative to the avatar. <em>Not used.</em>
|
||||
* @returns {boolean} <code>false</code>.
|
||||
*/
|
||||
virtual bool setAbsoluteJointTranslationInObjectFrame(int index, const glm::vec3& translation) override { return false; }
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getTargetScale
|
||||
* @returns {number}
|
||||
* Gets the target scale of the avatar without any restrictions on permissible values imposed by the domain. In contrast, the
|
||||
* <code>scale</code> property's value may be limited by the domain's settings.
|
||||
* @function Avatar.getTargetScale
|
||||
* @returns {number} The target scale of the avatar.
|
||||
* @example <caption>Compare the target and current avatar scales.</caption>
|
||||
* print("Current avatar scale: " + MyAvatar.scale);
|
||||
* print("Target avatar scale: " + MyAvatar.getTargetScale());
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace all occurrences of "MyAvatar" with "Avatar".
|
||||
*/
|
||||
float getTargetScale() const { return _targetScale; } // why is this a slot?
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.resetLastSent
|
||||
* @function Avatar.resetLastSent
|
||||
* @deprecated This function is deprecated and will be removed.
|
||||
*/
|
||||
void resetLastSent() { _lastToByteArray = 0; }
|
||||
|
||||
|
|
|
@ -109,7 +109,7 @@ bool Basic2DWindowOpenGLDisplayPlugin::internalActivate() {
|
|||
return Parent::internalActivate();
|
||||
}
|
||||
|
||||
void Basic2DWindowOpenGLDisplayPlugin::compositeExtra() {
|
||||
void Basic2DWindowOpenGLDisplayPlugin::compositeExtra(const gpu::FramebufferPointer& compositeFramebuffer) {
|
||||
#if defined(Q_OS_ANDROID)
|
||||
auto& virtualPadManager = VirtualPad::Manager::instance();
|
||||
if(virtualPadManager.getLeftVirtualPad()->isShown()) {
|
||||
|
@ -121,7 +121,7 @@ void Basic2DWindowOpenGLDisplayPlugin::compositeExtra() {
|
|||
|
||||
render([&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
batch.setFramebuffer(_compositeFramebuffer);
|
||||
batch.setFramebuffer(compositeFramebuffer);
|
||||
batch.resetViewTransform();
|
||||
batch.setProjectionTransform(mat4());
|
||||
batch.setPipeline(_cursorPipeline);
|
||||
|
@ -140,7 +140,7 @@ void Basic2DWindowOpenGLDisplayPlugin::compositeExtra() {
|
|||
});
|
||||
}
|
||||
#endif
|
||||
Parent::compositeExtra();
|
||||
Parent::compositeExtra(compositeFramebuffer);
|
||||
}
|
||||
|
||||
static const uint32_t MIN_THROTTLE_CHECK_FRAMES = 60;
|
||||
|
|
|
@ -33,7 +33,7 @@ public:
|
|||
|
||||
virtual bool isThrottled() const override;
|
||||
|
||||
virtual void compositeExtra() override;
|
||||
virtual void compositeExtra(const gpu::FramebufferPointer&) override;
|
||||
|
||||
virtual void pluginUpdate() override {};
|
||||
|
||||
|
|
|
@ -379,14 +379,6 @@ void OpenGLDisplayPlugin::customizeContext() {
|
|||
scissorState->setDepthTest(gpu::State::DepthTest(false));
|
||||
scissorState->setScissorEnable(true);
|
||||
|
||||
{
|
||||
#ifdef Q_OS_ANDROID
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTextureGammaLinearToSRGB);
|
||||
#else
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTexture);
|
||||
#endif
|
||||
_simplePipeline = gpu::Pipeline::create(program, scissorState);
|
||||
}
|
||||
{
|
||||
#ifdef Q_OS_ANDROID
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTextureGammaLinearToSRGB);
|
||||
|
@ -396,29 +388,59 @@ void OpenGLDisplayPlugin::customizeContext() {
|
|||
_presentPipeline = gpu::Pipeline::create(program, scissorState);
|
||||
}
|
||||
|
||||
|
||||
// HUD operator
|
||||
{
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTexture);
|
||||
_hudPipeline = gpu::Pipeline::create(program, blendState);
|
||||
}
|
||||
{
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTextureMirroredX);
|
||||
_mirrorHUDPipeline = gpu::Pipeline::create(program, blendState);
|
||||
gpu::PipelinePointer hudPipeline;
|
||||
{
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTexture);
|
||||
hudPipeline = gpu::Pipeline::create(program, blendState);
|
||||
}
|
||||
|
||||
gpu::PipelinePointer hudMirrorPipeline;
|
||||
{
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTextureMirroredX);
|
||||
hudMirrorPipeline = gpu::Pipeline::create(program, blendState);
|
||||
}
|
||||
|
||||
|
||||
_hudOperator = [=](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, const gpu::FramebufferPointer& compositeFramebuffer, bool mirror) {
|
||||
auto hudStereo = isStereo();
|
||||
auto hudCompositeFramebufferSize = compositeFramebuffer->getSize();
|
||||
std::array<glm::ivec4, 2> hudEyeViewports;
|
||||
for_each_eye([&](Eye eye) {
|
||||
hudEyeViewports[eye] = eyeViewport(eye);
|
||||
});
|
||||
if (hudPipeline && hudTexture) {
|
||||
batch.enableStereo(false);
|
||||
batch.setPipeline(mirror ? hudMirrorPipeline : hudPipeline);
|
||||
batch.setResourceTexture(0, hudTexture);
|
||||
if (hudStereo) {
|
||||
for_each_eye([&](Eye eye) {
|
||||
batch.setViewportTransform(hudEyeViewports[eye]);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
});
|
||||
} else {
|
||||
batch.setViewportTransform(ivec4(uvec2(0), hudCompositeFramebufferSize));
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
||||
{
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTransformedTexture);
|
||||
_cursorPipeline = gpu::Pipeline::create(program, blendState);
|
||||
}
|
||||
}
|
||||
updateCompositeFramebuffer();
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::uncustomizeContext() {
|
||||
_presentPipeline.reset();
|
||||
_cursorPipeline.reset();
|
||||
_hudPipeline.reset();
|
||||
_mirrorHUDPipeline.reset();
|
||||
_compositeFramebuffer.reset();
|
||||
_hudOperator = DEFAULT_HUD_OPERATOR;
|
||||
withPresentThreadLock([&] {
|
||||
_currentFrame.reset();
|
||||
_lastFrame = nullptr;
|
||||
|
@ -510,24 +532,16 @@ void OpenGLDisplayPlugin::captureFrame(const std::string& filename) const {
|
|||
});
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor) {
|
||||
renderFromTexture(batch, texture, viewport, scissor, nullptr);
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor, const gpu::FramebufferPointer& copyFbo /*=gpu::FramebufferPointer()*/) {
|
||||
auto fbo = gpu::FramebufferPointer();
|
||||
void OpenGLDisplayPlugin::renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor, const gpu::FramebufferPointer& destFbo, const gpu::FramebufferPointer& copyFbo /*=gpu::FramebufferPointer()*/) {
|
||||
batch.enableStereo(false);
|
||||
batch.resetViewTransform();
|
||||
batch.setFramebuffer(fbo);
|
||||
batch.setFramebuffer(destFbo);
|
||||
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, vec4(0));
|
||||
batch.setStateScissorRect(scissor);
|
||||
batch.setViewportTransform(viewport);
|
||||
batch.setResourceTexture(0, texture);
|
||||
#ifndef USE_GLES
|
||||
batch.setPipeline(_presentPipeline);
|
||||
#else
|
||||
batch.setPipeline(_simplePipeline);
|
||||
#endif
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
if (copyFbo) {
|
||||
gpu::Vec4i copyFboRect(0, 0, copyFbo->getWidth(), copyFbo->getHeight());
|
||||
|
@ -553,7 +567,7 @@ void OpenGLDisplayPlugin::renderFromTexture(gpu::Batch& batch, const gpu::Textur
|
|||
batch.setViewportTransform(copyFboRect);
|
||||
batch.setStateScissorRect(copyFboRect);
|
||||
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, {0.0f, 0.0f, 0.0f, 1.0f});
|
||||
batch.blit(fbo, sourceRect, copyFbo, copyRect);
|
||||
batch.blit(destFbo, sourceRect, copyFbo, copyRect);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -581,41 +595,14 @@ void OpenGLDisplayPlugin::updateFrameData() {
|
|||
});
|
||||
}
|
||||
|
||||
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> OpenGLDisplayPlugin::getHUDOperator() {
|
||||
auto hudPipeline = _hudPipeline;
|
||||
auto hudMirrorPipeline = _mirrorHUDPipeline;
|
||||
auto hudStereo = isStereo();
|
||||
auto hudCompositeFramebufferSize = _compositeFramebuffer->getSize();
|
||||
std::array<glm::ivec4, 2> hudEyeViewports;
|
||||
for_each_eye([&](Eye eye) {
|
||||
hudEyeViewports[eye] = eyeViewport(eye);
|
||||
});
|
||||
return [=](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, bool mirror) {
|
||||
if (hudPipeline && hudTexture) {
|
||||
batch.enableStereo(false);
|
||||
batch.setPipeline(mirror ? hudMirrorPipeline : hudPipeline);
|
||||
batch.setResourceTexture(0, hudTexture);
|
||||
if (hudStereo) {
|
||||
for_each_eye([&](Eye eye) {
|
||||
batch.setViewportTransform(hudEyeViewports[eye]);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
});
|
||||
} else {
|
||||
batch.setViewportTransform(ivec4(uvec2(0), hudCompositeFramebufferSize));
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::compositePointer() {
|
||||
void OpenGLDisplayPlugin::compositePointer(const gpu::FramebufferPointer& compositeFramebuffer) {
|
||||
auto& cursorManager = Cursor::Manager::instance();
|
||||
const auto& cursorData = _cursorsData[cursorManager.getCursor()->getIcon()];
|
||||
auto cursorTransform = DependencyManager::get<CompositorHelper>()->getReticleTransform(glm::mat4());
|
||||
render([&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
batch.setProjectionTransform(mat4());
|
||||
batch.setFramebuffer(_compositeFramebuffer);
|
||||
batch.setFramebuffer(compositeFramebuffer);
|
||||
batch.setPipeline(_cursorPipeline);
|
||||
batch.setResourceTexture(0, cursorData.texture);
|
||||
batch.resetViewTransform();
|
||||
|
@ -626,34 +613,13 @@ void OpenGLDisplayPlugin::compositePointer() {
|
|||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
});
|
||||
} else {
|
||||
batch.setViewportTransform(ivec4(uvec2(0), _compositeFramebuffer->getSize()));
|
||||
batch.setViewportTransform(ivec4(uvec2(0), compositeFramebuffer->getSize()));
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::compositeScene() {
|
||||
render([&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
batch.setFramebuffer(_compositeFramebuffer);
|
||||
batch.setViewportTransform(ivec4(uvec2(), _compositeFramebuffer->getSize()));
|
||||
batch.setStateScissorRect(ivec4(uvec2(), _compositeFramebuffer->getSize()));
|
||||
batch.resetViewTransform();
|
||||
batch.setProjectionTransform(mat4());
|
||||
batch.setPipeline(_simplePipeline);
|
||||
batch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0));
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
});
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::compositeLayers() {
|
||||
updateCompositeFramebuffer();
|
||||
|
||||
{
|
||||
PROFILE_RANGE_EX(render_detail, "compositeScene", 0xff0077ff, (uint64_t)presentCount())
|
||||
compositeScene();
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::compositeLayers(const gpu::FramebufferPointer& compositeFramebuffer) {
|
||||
#ifdef HIFI_ENABLE_NSIGHT_DEBUG
|
||||
if (false) // do not draw the HUD if running nsight debug
|
||||
#endif
|
||||
|
@ -667,23 +633,35 @@ void OpenGLDisplayPlugin::compositeLayers() {
|
|||
|
||||
{
|
||||
PROFILE_RANGE_EX(render_detail, "compositeExtra", 0xff0077ff, (uint64_t)presentCount())
|
||||
compositeExtra();
|
||||
compositeExtra(compositeFramebuffer);
|
||||
}
|
||||
|
||||
// Draw the pointer last so it's on top of everything
|
||||
auto compositorHelper = DependencyManager::get<CompositorHelper>();
|
||||
if (compositorHelper->getReticleVisible()) {
|
||||
PROFILE_RANGE_EX(render_detail, "compositePointer", 0xff0077ff, (uint64_t)presentCount())
|
||||
compositePointer();
|
||||
compositePointer(compositeFramebuffer);
|
||||
}
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::internalPresent() {
|
||||
void OpenGLDisplayPlugin::internalPresent(const gpu::FramebufferPointer& compositeFramebuffer) {
|
||||
render([&](gpu::Batch& batch) {
|
||||
// Note: _displayTexture must currently be the same size as the display.
|
||||
uvec2 dims = _displayTexture ? uvec2(_displayTexture->getDimensions()) : getSurfacePixels();
|
||||
auto viewport = ivec4(uvec2(0), dims);
|
||||
renderFromTexture(batch, _displayTexture ? _displayTexture : _compositeFramebuffer->getRenderBuffer(0), viewport, viewport);
|
||||
|
||||
gpu::TexturePointer finalTexture;
|
||||
if (_displayTexture) {
|
||||
finalTexture = _displayTexture;
|
||||
} else if (compositeFramebuffer) {
|
||||
finalTexture = compositeFramebuffer->getRenderBuffer(0);
|
||||
} else {
|
||||
qCWarning(displayPlugins) << "No valid texture for output";
|
||||
}
|
||||
|
||||
if (finalTexture) {
|
||||
renderFromTexture(batch, finalTexture, viewport, viewport);
|
||||
}
|
||||
});
|
||||
swapBuffers();
|
||||
_presentRate.increment();
|
||||
|
@ -700,7 +678,7 @@ void OpenGLDisplayPlugin::present() {
|
|||
}
|
||||
incrementPresentCount();
|
||||
|
||||
if (_currentFrame) {
|
||||
if (_currentFrame && _currentFrame->framebuffer) {
|
||||
auto correction = getViewCorrection();
|
||||
getGLBackend()->setCameraCorrection(correction, _prevRenderView);
|
||||
_prevRenderView = correction * _currentFrame->view;
|
||||
|
@ -720,18 +698,18 @@ void OpenGLDisplayPlugin::present() {
|
|||
// Write all layers to a local framebuffer
|
||||
{
|
||||
PROFILE_RANGE_EX(render, "composite", 0xff00ffff, frameId)
|
||||
compositeLayers();
|
||||
compositeLayers(_currentFrame->framebuffer);
|
||||
}
|
||||
|
||||
// Take the composite framebuffer and send it to the output device
|
||||
{
|
||||
PROFILE_RANGE_EX(render, "internalPresent", 0xff00ffff, frameId)
|
||||
internalPresent();
|
||||
internalPresent(_currentFrame->framebuffer);
|
||||
}
|
||||
|
||||
gpu::Backend::freeGPUMemSize.set(gpu::gl::getFreeDedicatedMemory());
|
||||
} else if (alwaysPresent()) {
|
||||
internalPresent();
|
||||
internalPresent(nullptr);
|
||||
}
|
||||
_movingAveragePresent.addSample((float)(usecTimestampNow() - startPresent));
|
||||
}
|
||||
|
@ -788,7 +766,12 @@ bool OpenGLDisplayPlugin::setDisplayTexture(const QString& name) {
|
|||
}
|
||||
|
||||
QImage OpenGLDisplayPlugin::getScreenshot(float aspectRatio) const {
|
||||
auto size = _compositeFramebuffer->getSize();
|
||||
if (!_currentFrame || !_currentFrame->framebuffer) {
|
||||
return QImage();
|
||||
}
|
||||
|
||||
auto compositeFramebuffer = _currentFrame->framebuffer;
|
||||
auto size = compositeFramebuffer->getSize();
|
||||
if (isHmd()) {
|
||||
size.x /= 2;
|
||||
}
|
||||
|
@ -806,7 +789,7 @@ QImage OpenGLDisplayPlugin::getScreenshot(float aspectRatio) const {
|
|||
auto glBackend = const_cast<OpenGLDisplayPlugin&>(*this).getGLBackend();
|
||||
QImage screenshot(bestSize.x, bestSize.y, QImage::Format_ARGB32);
|
||||
withOtherThreadContext([&] {
|
||||
glBackend->downloadFramebuffer(_compositeFramebuffer, ivec4(corner, bestSize), screenshot);
|
||||
glBackend->downloadFramebuffer(compositeFramebuffer, ivec4(corner, bestSize), screenshot);
|
||||
});
|
||||
return screenshot.mirrored(false, true);
|
||||
}
|
||||
|
@ -858,7 +841,7 @@ bool OpenGLDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
|
|||
}
|
||||
|
||||
ivec4 OpenGLDisplayPlugin::eyeViewport(Eye eye) const {
|
||||
uvec2 vpSize = _compositeFramebuffer->getSize();
|
||||
auto vpSize = glm::uvec2(getRecommendedRenderSize());
|
||||
vpSize.x /= 2;
|
||||
uvec2 vpPos;
|
||||
if (eye == Eye::Right) {
|
||||
|
@ -891,14 +874,6 @@ void OpenGLDisplayPlugin::render(std::function<void(gpu::Batch& batch)> f) {
|
|||
OpenGLDisplayPlugin::~OpenGLDisplayPlugin() {
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::updateCompositeFramebuffer() {
|
||||
auto renderSize = glm::uvec2(getRecommendedRenderSize());
|
||||
if (!_compositeFramebuffer || _compositeFramebuffer->getSize() != renderSize) {
|
||||
_compositeFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("OpenGLDisplayPlugin::composite", gpu::Element::COLOR_RGBA_32, renderSize.x, renderSize.y));
|
||||
// _compositeFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("OpenGLDisplayPlugin::composite", gpu::Element::COLOR_SRGBA_32, renderSize.x, renderSize.y));
|
||||
}
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::copyTextureToQuickFramebuffer(NetworkTexturePointer networkTexture, QOpenGLFramebufferObject* target, GLsync* fenceSync) {
|
||||
#if !defined(USE_GLES)
|
||||
auto glBackend = const_cast<OpenGLDisplayPlugin&>(*this).getGLBackend();
|
||||
|
|
|
@ -94,14 +94,10 @@ protected:
|
|||
// is not populated
|
||||
virtual bool alwaysPresent() const { return false; }
|
||||
|
||||
void updateCompositeFramebuffer();
|
||||
|
||||
virtual QThread::Priority getPresentPriority() { return QThread::HighPriority; }
|
||||
virtual void compositeLayers();
|
||||
virtual void compositeScene();
|
||||
virtual std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> getHUDOperator();
|
||||
virtual void compositePointer();
|
||||
virtual void compositeExtra() {};
|
||||
virtual void compositeLayers(const gpu::FramebufferPointer&);
|
||||
virtual void compositePointer(const gpu::FramebufferPointer&);
|
||||
virtual void compositeExtra(const gpu::FramebufferPointer&) {};
|
||||
|
||||
// These functions must only be called on the presentation thread
|
||||
virtual void customizeContext();
|
||||
|
@ -116,10 +112,10 @@ protected:
|
|||
virtual void deactivateSession() {}
|
||||
|
||||
// Plugin specific functionality to send the composed scene to the output window or device
|
||||
virtual void internalPresent();
|
||||
virtual void internalPresent(const gpu::FramebufferPointer&);
|
||||
|
||||
void renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor, const gpu::FramebufferPointer& fbo);
|
||||
void renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor);
|
||||
|
||||
void renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor, const gpu::FramebufferPointer& destFbo = nullptr, const gpu::FramebufferPointer& copyFbo = nullptr);
|
||||
virtual void updateFrameData();
|
||||
virtual glm::mat4 getViewCorrection() { return glm::mat4(); }
|
||||
|
||||
|
@ -142,14 +138,8 @@ protected:
|
|||
gpu::FramePointer _currentFrame;
|
||||
gpu::Frame* _lastFrame { nullptr };
|
||||
mat4 _prevRenderView;
|
||||
gpu::FramebufferPointer _compositeFramebuffer;
|
||||
gpu::PipelinePointer _hudPipeline;
|
||||
gpu::PipelinePointer _mirrorHUDPipeline;
|
||||
gpu::ShaderPointer _mirrorHUDPS;
|
||||
gpu::PipelinePointer _simplePipeline;
|
||||
gpu::PipelinePointer _presentPipeline;
|
||||
gpu::PipelinePointer _cursorPipeline;
|
||||
gpu::TexturePointer _displayTexture{};
|
||||
gpu::TexturePointer _displayTexture;
|
||||
float _compositeHUDAlpha { 1.0f };
|
||||
|
||||
struct CursorData {
|
||||
|
@ -185,5 +175,9 @@ protected:
|
|||
// be serialized through this mutex
|
||||
mutable Mutex _presentMutex;
|
||||
float _hudAlpha{ 1.0f };
|
||||
|
||||
private:
|
||||
gpu::PipelinePointer _presentPipeline;
|
||||
|
||||
};
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ public:
|
|||
|
||||
protected:
|
||||
void updatePresentPose() override;
|
||||
void hmdPresent() override {}
|
||||
void hmdPresent(const gpu::FramebufferPointer&) override {}
|
||||
bool isHmdMounted() const override { return true; }
|
||||
bool internalActivate() override;
|
||||
private:
|
||||
|
|
|
@ -114,20 +114,23 @@ void HmdDisplayPlugin::internalDeactivate() {
|
|||
|
||||
void HmdDisplayPlugin::customizeContext() {
|
||||
Parent::customizeContext();
|
||||
_hudRenderer.build();
|
||||
_hudOperator = _hudRenderer.build();
|
||||
}
|
||||
|
||||
void HmdDisplayPlugin::uncustomizeContext() {
|
||||
// This stops the weirdness where if the preview was disabled, on switching back to 2D,
|
||||
// the vsync was stuck in the disabled state. No idea why that happens though.
|
||||
_disablePreview = false;
|
||||
render([&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
batch.resetViewTransform();
|
||||
batch.setFramebuffer(_compositeFramebuffer);
|
||||
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, vec4(0));
|
||||
});
|
||||
_hudRenderer = HUDRenderer();
|
||||
if (_currentFrame && _currentFrame->framebuffer) {
|
||||
render([&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
batch.resetViewTransform();
|
||||
batch.setFramebuffer(_currentFrame->framebuffer);
|
||||
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, vec4(0));
|
||||
});
|
||||
|
||||
}
|
||||
_hudRenderer = {};
|
||||
_previewTexture.reset();
|
||||
Parent::uncustomizeContext();
|
||||
}
|
||||
|
@ -174,11 +177,11 @@ float HmdDisplayPlugin::getLeftCenterPixel() const {
|
|||
return leftCenterPixel;
|
||||
}
|
||||
|
||||
void HmdDisplayPlugin::internalPresent() {
|
||||
void HmdDisplayPlugin::internalPresent(const gpu::FramebufferPointer& compositeFramebuffer) {
|
||||
PROFILE_RANGE_EX(render, __FUNCTION__, 0xff00ff00, (uint64_t)presentCount())
|
||||
|
||||
// Composite together the scene, hud and mouse cursor
|
||||
hmdPresent();
|
||||
hmdPresent(compositeFramebuffer);
|
||||
|
||||
if (_displayTexture) {
|
||||
// Note: _displayTexture must currently be the same size as the display.
|
||||
|
@ -260,7 +263,7 @@ void HmdDisplayPlugin::internalPresent() {
|
|||
|
||||
viewport.z *= 2;
|
||||
}
|
||||
renderFromTexture(batch, _compositeFramebuffer->getRenderBuffer(0), viewport, scissor, fbo);
|
||||
renderFromTexture(batch, compositeFramebuffer->getRenderBuffer(0), viewport, scissor, nullptr, fbo);
|
||||
});
|
||||
swapBuffers();
|
||||
|
||||
|
@ -345,7 +348,7 @@ glm::mat4 HmdDisplayPlugin::getViewCorrection() {
|
|||
}
|
||||
}
|
||||
|
||||
void HmdDisplayPlugin::HUDRenderer::build() {
|
||||
DisplayPlugin::HUDOperator HmdDisplayPlugin::HUDRenderer::build() {
|
||||
vertices = std::make_shared<gpu::Buffer>();
|
||||
indices = std::make_shared<gpu::Buffer>();
|
||||
|
||||
|
@ -380,7 +383,7 @@ void HmdDisplayPlugin::HUDRenderer::build() {
|
|||
indexCount = numberOfRectangles * TRIANGLE_PER_RECTANGLE * VERTEX_PER_TRANGLE;
|
||||
|
||||
// Compute indices order
|
||||
std::vector<GLushort> indices;
|
||||
std::vector<GLushort> indexData;
|
||||
for (int i = 0; i < stacks - 1; i++) {
|
||||
for (int j = 0; j < slices - 1; j++) {
|
||||
GLushort bottomLeftIndex = i * slices + j;
|
||||
|
@ -388,24 +391,21 @@ void HmdDisplayPlugin::HUDRenderer::build() {
|
|||
GLushort topLeftIndex = bottomLeftIndex + slices;
|
||||
GLushort topRightIndex = topLeftIndex + 1;
|
||||
// FIXME make a z-order curve for better vertex cache locality
|
||||
indices.push_back(topLeftIndex);
|
||||
indices.push_back(bottomLeftIndex);
|
||||
indices.push_back(topRightIndex);
|
||||
indexData.push_back(topLeftIndex);
|
||||
indexData.push_back(bottomLeftIndex);
|
||||
indexData.push_back(topRightIndex);
|
||||
|
||||
indices.push_back(topRightIndex);
|
||||
indices.push_back(bottomLeftIndex);
|
||||
indices.push_back(bottomRightIndex);
|
||||
indexData.push_back(topRightIndex);
|
||||
indexData.push_back(bottomLeftIndex);
|
||||
indexData.push_back(bottomRightIndex);
|
||||
}
|
||||
}
|
||||
this->indices->append(indices);
|
||||
indices->append(indexData);
|
||||
format = std::make_shared<gpu::Stream::Format>(); // 1 for everyone
|
||||
format->setAttribute(gpu::Stream::POSITION, gpu::Stream::POSITION, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), 0);
|
||||
format->setAttribute(gpu::Stream::TEXCOORD, gpu::Stream::TEXCOORD, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::UV));
|
||||
uniformsBuffer = std::make_shared<gpu::Buffer>(sizeof(Uniforms), nullptr);
|
||||
updatePipeline();
|
||||
}
|
||||
|
||||
void HmdDisplayPlugin::HUDRenderer::updatePipeline() {
|
||||
if (!pipeline) {
|
||||
auto program = gpu::Shader::createProgram(shader::render_utils::program::hmd_ui);
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
@ -416,10 +416,6 @@ void HmdDisplayPlugin::HUDRenderer::updatePipeline() {
|
|||
|
||||
pipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
}
|
||||
|
||||
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> HmdDisplayPlugin::HUDRenderer::render(HmdDisplayPlugin& plugin) {
|
||||
updatePipeline();
|
||||
|
||||
auto hudPipeline = pipeline;
|
||||
auto hudFormat = format;
|
||||
|
@ -428,9 +424,9 @@ std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> HmdDis
|
|||
auto hudUniformBuffer = uniformsBuffer;
|
||||
auto hudUniforms = uniforms;
|
||||
auto hudIndexCount = indexCount;
|
||||
return [=](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, bool mirror) {
|
||||
if (hudPipeline && hudTexture) {
|
||||
batch.setPipeline(hudPipeline);
|
||||
return [=](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, const gpu::FramebufferPointer&, const bool mirror) {
|
||||
if (pipeline && hudTexture) {
|
||||
batch.setPipeline(pipeline);
|
||||
|
||||
batch.setInputFormat(hudFormat);
|
||||
gpu::BufferView posView(hudVertices, VERTEX_OFFSET, hudVertices->getSize(), VERTEX_STRIDE, hudFormat->getAttributes().at(gpu::Stream::POSITION)._element);
|
||||
|
@ -454,7 +450,7 @@ std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> HmdDis
|
|||
};
|
||||
}
|
||||
|
||||
void HmdDisplayPlugin::compositePointer() {
|
||||
void HmdDisplayPlugin::compositePointer(const gpu::FramebufferPointer& compositeFramebuffer) {
|
||||
auto& cursorManager = Cursor::Manager::instance();
|
||||
const auto& cursorData = _cursorsData[cursorManager.getCursor()->getIcon()];
|
||||
auto compositorHelper = DependencyManager::get<CompositorHelper>();
|
||||
|
@ -463,7 +459,7 @@ void HmdDisplayPlugin::compositePointer() {
|
|||
render([&](gpu::Batch& batch) {
|
||||
// FIXME use standard gpu stereo rendering for this.
|
||||
batch.enableStereo(false);
|
||||
batch.setFramebuffer(_compositeFramebuffer);
|
||||
batch.setFramebuffer(compositeFramebuffer);
|
||||
batch.setPipeline(_cursorPipeline);
|
||||
batch.setResourceTexture(0, cursorData.texture);
|
||||
batch.resetViewTransform();
|
||||
|
@ -478,10 +474,6 @@ void HmdDisplayPlugin::compositePointer() {
|
|||
});
|
||||
}
|
||||
|
||||
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> HmdDisplayPlugin::getHUDOperator() {
|
||||
return _hudRenderer.render(*this);
|
||||
}
|
||||
|
||||
HmdDisplayPlugin::~HmdDisplayPlugin() {
|
||||
}
|
||||
|
||||
|
|
|
@ -53,16 +53,15 @@ signals:
|
|||
void hmdVisibleChanged(bool visible);
|
||||
|
||||
protected:
|
||||
virtual void hmdPresent() = 0;
|
||||
virtual void hmdPresent(const gpu::FramebufferPointer&) = 0;
|
||||
virtual bool isHmdMounted() const = 0;
|
||||
virtual void postPreview() {};
|
||||
virtual void updatePresentPose();
|
||||
|
||||
bool internalActivate() override;
|
||||
void internalDeactivate() override;
|
||||
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> getHUDOperator() override;
|
||||
void compositePointer() override;
|
||||
void internalPresent() override;
|
||||
void compositePointer(const gpu::FramebufferPointer&) override;
|
||||
void internalPresent(const gpu::FramebufferPointer&) override;
|
||||
void customizeContext() override;
|
||||
void uncustomizeContext() override;
|
||||
void updateFrameData() override;
|
||||
|
@ -120,8 +119,6 @@ private:
|
|||
static const size_t TEXTURE_OFFSET { offsetof(Vertex, uv) };
|
||||
static const int VERTEX_STRIDE { sizeof(Vertex) };
|
||||
|
||||
void build();
|
||||
void updatePipeline();
|
||||
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> render(HmdDisplayPlugin& plugin);
|
||||
HUDOperator build();
|
||||
} _hudRenderer;
|
||||
};
|
||||
|
|
|
@ -37,13 +37,13 @@ glm::uvec2 InterleavedStereoDisplayPlugin::getRecommendedRenderSize() const {
|
|||
return result;
|
||||
}
|
||||
|
||||
void InterleavedStereoDisplayPlugin::internalPresent() {
|
||||
void InterleavedStereoDisplayPlugin::internalPresent(const gpu::FramebufferPointer& compositeFramebuffer) {
|
||||
render([&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
batch.resetViewTransform();
|
||||
batch.setFramebuffer(gpu::FramebufferPointer());
|
||||
batch.setViewportTransform(ivec4(uvec2(0), getSurfacePixels()));
|
||||
batch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0));
|
||||
batch.setResourceTexture(0, compositeFramebuffer->getRenderBuffer(0));
|
||||
batch.setPipeline(_interleavedPresentPipeline);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
});
|
||||
|
|
|
@ -21,7 +21,7 @@ protected:
|
|||
// initialize OpenGL context settings needed by the plugin
|
||||
void customizeContext() override;
|
||||
void uncustomizeContext() override;
|
||||
void internalPresent() override;
|
||||
void internalPresent(const gpu::FramebufferPointer&) override;
|
||||
|
||||
private:
|
||||
static const QString NAME;
|
||||
|
|
|
@ -1105,7 +1105,7 @@ void EntityTreeRenderer::playEntityCollisionSound(const EntityItemPointer& entit
|
|||
options.volume = volume;
|
||||
options.pitch = 1.0f / stretchFactor;
|
||||
|
||||
AudioInjector::playSoundAndDelete(collisionSound, options);
|
||||
DependencyManager::get<AudioInjectorManager>()->playSound(collisionSound, options, true);
|
||||
}
|
||||
|
||||
void EntityTreeRenderer::entityCollisionWithEntity(const EntityItemID& idA, const EntityItemID& idB,
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
#include <QtCore/QStack>
|
||||
#include <QtGui/QMouseEvent>
|
||||
|
||||
#include <AbstractAudioInterface.h>
|
||||
#include <AudioInjectorManager.h>
|
||||
#include <EntityScriptingInterface.h> // for RayToEntityIntersectionResult
|
||||
#include <EntityTree.h>
|
||||
#include <PointerEvent.h>
|
||||
|
|
|
@ -40,6 +40,7 @@ public:
|
|||
virtual bool wantsKeyboardFocus() const { return false; }
|
||||
virtual void setProxyWindow(QWindow* proxyWindow) {}
|
||||
virtual QObject* getEventHandler() { return nullptr; }
|
||||
virtual void emitScriptEvent(const QVariant& message) {}
|
||||
const EntityItemPointer& getEntity() const { return _entity; }
|
||||
const ItemID& getRenderItemID() const { return _renderItemID; }
|
||||
|
||||
|
|
|
@ -169,7 +169,7 @@ void MaterialEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
|
|||
|
||||
if (urlChanged && !usingMaterialData) {
|
||||
_networkMaterial = MaterialCache::instance().getMaterial(_materialURL);
|
||||
auto onMaterialRequestFinished = [&, oldParentID, oldParentMaterialName, newCurrentMaterialName](bool success) {
|
||||
auto onMaterialRequestFinished = [this, oldParentID, oldParentMaterialName, newCurrentMaterialName](bool success) {
|
||||
if (success) {
|
||||
deleteMaterial(oldParentID, oldParentMaterialName);
|
||||
_texturesLoaded = false;
|
||||
|
@ -186,7 +186,11 @@ void MaterialEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
|
|||
if (_networkMaterial->isLoaded()) {
|
||||
onMaterialRequestFinished(!_networkMaterial->isFailed());
|
||||
} else {
|
||||
connect(_networkMaterial.data(), &Resource::finished, this, onMaterialRequestFinished);
|
||||
connect(_networkMaterial.data(), &Resource::finished, this, [this, onMaterialRequestFinished](bool success) {
|
||||
withWriteLock([&] {
|
||||
onMaterialRequestFinished(success);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
} else if (materialDataChanged && usingMaterialData) {
|
||||
|
|
|
@ -1034,7 +1034,7 @@ void RenderableModelEntityItem::copyAnimationJointDataToModel() {
|
|||
});
|
||||
|
||||
if (changed) {
|
||||
locationChanged(false, true);
|
||||
locationChanged(true, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -95,19 +95,18 @@ bool PolyLineEntityRenderer::needsRenderUpdate() const {
|
|||
}
|
||||
|
||||
bool PolyLineEntityRenderer::needsRenderUpdateFromTypedEntity(const TypedEntityPointer& entity) const {
|
||||
return (
|
||||
entity->pointsChanged() ||
|
||||
entity->widthsChanged() ||
|
||||
entity->normalsChanged() ||
|
||||
entity->texturesChanged() ||
|
||||
entity->colorsChanged() ||
|
||||
_isUVModeStretch != entity->getIsUVModeStretch() ||
|
||||
_glow != entity->getGlow() ||
|
||||
_faceCamera != entity->getFaceCamera()
|
||||
);
|
||||
if (entity->pointsChanged() || entity->widthsChanged() || entity->normalsChanged() || entity->texturesChanged() || entity->colorsChanged()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (_isUVModeStretch != entity->getIsUVModeStretch() || _glow != entity->getGlow() || _faceCamera != entity->getFaceCamera()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return Parent::needsRenderUpdateFromTypedEntity(entity);
|
||||
}
|
||||
|
||||
void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPointer& entity) {
|
||||
void PolyLineEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& scene, Transaction& transaction, const TypedEntityPointer& entity) {
|
||||
auto pointsChanged = entity->pointsChanged();
|
||||
auto widthsChanged = entity->widthsChanged();
|
||||
auto normalsChanged = entity->normalsChanged();
|
||||
|
@ -119,10 +118,6 @@ void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
|
|||
|
||||
entity->resetPolyLineChanged();
|
||||
|
||||
// Transform
|
||||
updateModelTransformAndBound();
|
||||
_renderTransform = getModelTransform();
|
||||
|
||||
// Textures
|
||||
if (entity->texturesChanged()) {
|
||||
entity->resetTexturesChanged();
|
||||
|
@ -131,7 +126,9 @@ void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
|
|||
if (!textures.isEmpty()) {
|
||||
entityTextures = QUrl(textures);
|
||||
}
|
||||
_texture = DependencyManager::get<TextureCache>()->getTexture(entityTextures);
|
||||
withWriteLock([&] {
|
||||
_texture = DependencyManager::get<TextureCache>()->getTexture(entityTextures);
|
||||
});
|
||||
_textureAspectRatio = 1.0f;
|
||||
_textureLoaded = false;
|
||||
}
|
||||
|
@ -145,11 +142,13 @@ void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
|
|||
|
||||
// Data
|
||||
bool faceCameraChanged = faceCamera != _faceCamera;
|
||||
if (faceCameraChanged || glow != _glow) {
|
||||
_faceCamera = faceCamera;
|
||||
_glow = glow;
|
||||
updateData();
|
||||
}
|
||||
withWriteLock([&] {
|
||||
if (faceCameraChanged || glow != _glow) {
|
||||
_faceCamera = faceCamera;
|
||||
_glow = glow;
|
||||
updateData();
|
||||
}
|
||||
});
|
||||
|
||||
// Geometry
|
||||
if (pointsChanged) {
|
||||
|
@ -165,10 +164,23 @@ void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
|
|||
_colors = entity->getStrokeColors();
|
||||
_color = toGlm(entity->getColor());
|
||||
}
|
||||
if (_isUVModeStretch != isUVModeStretch || pointsChanged || widthsChanged || normalsChanged || colorsChanged || textureChanged || faceCameraChanged) {
|
||||
_isUVModeStretch = isUVModeStretch;
|
||||
updateGeometry();
|
||||
}
|
||||
|
||||
bool uvModeStretchChanged = _isUVModeStretch != isUVModeStretch;
|
||||
_isUVModeStretch = isUVModeStretch;
|
||||
|
||||
bool geometryChanged = uvModeStretchChanged || pointsChanged || widthsChanged || normalsChanged || colorsChanged || textureChanged || faceCameraChanged;
|
||||
|
||||
void* key = (void*)this;
|
||||
AbstractViewStateInterface::instance()->pushPostUpdateLambda(key, [this, geometryChanged] () {
|
||||
withWriteLock([&] {
|
||||
updateModelTransformAndBound();
|
||||
_renderTransform = getModelTransform();
|
||||
|
||||
if (geometryChanged) {
|
||||
updateGeometry();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
void PolyLineEntityRenderer::updateGeometry() {
|
||||
|
@ -267,22 +279,32 @@ void PolyLineEntityRenderer::updateData() {
|
|||
}
|
||||
|
||||
void PolyLineEntityRenderer::doRender(RenderArgs* args) {
|
||||
if (_numVertices < 2) {
|
||||
return;
|
||||
}
|
||||
|
||||
PerformanceTimer perfTimer("RenderablePolyLineEntityItem::render");
|
||||
Q_ASSERT(args->_batch);
|
||||
gpu::Batch& batch = *args->_batch;
|
||||
|
||||
if (!_pipeline || !_glowPipeline) {
|
||||
size_t numVertices;
|
||||
Transform transform;
|
||||
gpu::TexturePointer texture;
|
||||
withReadLock([&] {
|
||||
numVertices = _numVertices;
|
||||
transform = _renderTransform;
|
||||
texture = _textureLoaded ? _texture->getGPUTexture() : DependencyManager::get<TextureCache>()->getWhiteTexture();
|
||||
|
||||
batch.setResourceBuffer(0, _polylineGeometryBuffer);
|
||||
batch.setUniformBuffer(0, _polylineDataBuffer);
|
||||
});
|
||||
|
||||
if (numVertices < 2) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!_pipeline) {
|
||||
buildPipeline();
|
||||
}
|
||||
|
||||
batch.setPipeline(_glow ? _glowPipeline : _pipeline);
|
||||
batch.setModelTransform(_renderTransform);
|
||||
batch.setResourceTexture(0, _textureLoaded ? _texture->getGPUTexture() : DependencyManager::get<TextureCache>()->getWhiteTexture());
|
||||
batch.setResourceBuffer(0, _polylineGeometryBuffer);
|
||||
batch.setUniformBuffer(0, _polylineDataBuffer);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, (gpu::uint32)(2 * _numVertices), 0);
|
||||
batch.setModelTransform(transform);
|
||||
batch.setResourceTexture(0, texture);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, (gpu::uint32)(2 * numVertices), 0);
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ public:
|
|||
protected:
|
||||
virtual bool needsRenderUpdate() const override;
|
||||
virtual bool needsRenderUpdateFromTypedEntity(const TypedEntityPointer& entity) const override;
|
||||
virtual void doRenderUpdateAsynchronousTyped(const TypedEntityPointer& entity) override;
|
||||
virtual void doRenderUpdateSynchronousTyped(const ScenePointer& scene, Transaction& transaction, const TypedEntityPointer& entity) override;
|
||||
|
||||
virtual ItemKey getKey() override;
|
||||
virtual ShapeKey getShapeKey() override;
|
||||
|
|
|
@ -249,10 +249,14 @@ void ShapeEntityRenderer::doRender(RenderArgs* args) {
|
|||
graphics::MultiMaterial materials;
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
GeometryCache::Shape geometryShape;
|
||||
PrimitiveMode primitiveMode;
|
||||
RenderLayer renderLayer;
|
||||
bool proceduralRender = false;
|
||||
glm::vec4 outColor;
|
||||
withReadLock([&] {
|
||||
geometryShape = geometryCache->getShapeForEntityShape(_shape);
|
||||
primitiveMode = _primitiveMode;
|
||||
renderLayer = _renderLayer;
|
||||
batch.setModelTransform(_renderTransform); // use a transform with scale, rotation, registration point and translation
|
||||
materials = _materials["0"];
|
||||
auto& schema = materials.getSchemaBuffer().get<graphics::MultiMaterial::Schema>();
|
||||
|
@ -267,7 +271,7 @@ void ShapeEntityRenderer::doRender(RenderArgs* args) {
|
|||
});
|
||||
|
||||
if (proceduralRender) {
|
||||
if (render::ShapeKey(args->_globalShapeKey).isWireframe()) {
|
||||
if (render::ShapeKey(args->_globalShapeKey).isWireframe() || primitiveMode == PrimitiveMode::LINES) {
|
||||
geometryCache->renderWireShape(batch, geometryShape, outColor);
|
||||
} else {
|
||||
geometryCache->renderShape(batch, geometryShape, outColor);
|
||||
|
@ -275,10 +279,16 @@ void ShapeEntityRenderer::doRender(RenderArgs* args) {
|
|||
} else if (!useMaterialPipeline(materials)) {
|
||||
// FIXME, support instanced multi-shape rendering using multidraw indirect
|
||||
outColor.a *= _isFading ? Interpolate::calculateFadeRatio(_fadeStartTime) : 1.0f;
|
||||
if (render::ShapeKey(args->_globalShapeKey).isWireframe() || _primitiveMode == PrimitiveMode::LINES) {
|
||||
geometryCache->renderWireShapeInstance(args, batch, geometryShape, outColor, args->_shapePipeline);
|
||||
render::ShapePipelinePointer pipeline;
|
||||
if (renderLayer == RenderLayer::WORLD) {
|
||||
pipeline = GeometryCache::getShapePipeline(false, outColor.a < 1.0f, true, false);
|
||||
} else {
|
||||
geometryCache->renderSolidShapeInstance(args, batch, geometryShape, outColor, args->_shapePipeline);
|
||||
pipeline = GeometryCache::getShapePipeline(false, outColor.a < 1.0f, true, false, false, true);
|
||||
}
|
||||
if (render::ShapeKey(args->_globalShapeKey).isWireframe() || primitiveMode == PrimitiveMode::LINES) {
|
||||
geometryCache->renderWireShapeInstance(args, batch, geometryShape, outColor, pipeline);
|
||||
} else {
|
||||
geometryCache->renderSolidShapeInstance(args, batch, geometryShape, outColor, pipeline);
|
||||
}
|
||||
} else {
|
||||
if (args->_renderMode != render::Args::RenderMode::SHADOW_RENDER_MODE) {
|
||||
|
|
|
@ -162,10 +162,12 @@ void TextEntityRenderer::doRender(RenderArgs* args) {
|
|||
glm::vec4 backgroundColor;
|
||||
Transform modelTransform;
|
||||
glm::vec3 dimensions;
|
||||
BillboardMode billboardMode;
|
||||
bool layered;
|
||||
withReadLock([&] {
|
||||
modelTransform = _renderTransform;
|
||||
dimensions = _dimensions;
|
||||
billboardMode = _billboardMode;
|
||||
|
||||
float fadeRatio = _isFading ? Interpolate::calculateFadeRatio(_fadeStartTime) : 1.0f;
|
||||
textColor = glm::vec4(_textColor, fadeRatio * _textAlpha);
|
||||
|
@ -190,7 +192,7 @@ void TextEntityRenderer::doRender(RenderArgs* args) {
|
|||
}
|
||||
|
||||
auto transformToTopLeft = modelTransform;
|
||||
transformToTopLeft.setRotation(EntityItem::getBillboardRotation(transformToTopLeft.getTranslation(), transformToTopLeft.getRotation(), _billboardMode, args->getViewFrustum().getPosition()));
|
||||
transformToTopLeft.setRotation(EntityItem::getBillboardRotation(transformToTopLeft.getTranslation(), transformToTopLeft.getRotation(), billboardMode, args->getViewFrustum().getPosition()));
|
||||
transformToTopLeft.postTranslate(dimensions * glm::vec3(-0.5f, 0.5f, 0.0f)); // Go to the top left
|
||||
transformToTopLeft.setScale(1.0f); // Use a scale of one so that the text is not deformed
|
||||
|
||||
|
@ -210,10 +212,6 @@ void TextEntityRenderer::doRender(RenderArgs* args) {
|
|||
glm::vec2 bounds = glm::vec2(dimensions.x - (_leftMargin + _rightMargin), dimensions.y - (_topMargin + _bottomMargin));
|
||||
_textRenderer->draw(batch, _leftMargin / scale, -_topMargin / scale, _text, textColor, bounds / scale, layered);
|
||||
}
|
||||
|
||||
if (layered) {
|
||||
DependencyManager::get<DeferredLightingEffect>()->unsetKeyLightBatch(batch);
|
||||
}
|
||||
}
|
||||
|
||||
QSizeF TextEntityRenderer::textSize(const QString& text) const {
|
||||
|
|
|
@ -106,7 +106,7 @@ private:
|
|||
static std::function<void(QSharedPointer<OffscreenQmlSurface>&, bool&, std::vector<QMetaObject::Connection>&)> _releaseWebSurfaceOperator;
|
||||
|
||||
public slots:
|
||||
void emitScriptEvent(const QVariant& scriptMessage);
|
||||
void emitScriptEvent(const QVariant& scriptMessage) override;
|
||||
|
||||
signals:
|
||||
void scriptEventReceived(const QVariant& message);
|
||||
|
|
|
@ -39,9 +39,7 @@ void EntityEditPacketSender::adjustEditPacketForClockSkew(PacketType type, QByte
|
|||
}
|
||||
}
|
||||
|
||||
void EntityEditPacketSender::queueEditAvatarEntityMessage(EntityTreePointer entityTree,
|
||||
EntityItemID entityItemID,
|
||||
const EntityItemProperties& properties) {
|
||||
void EntityEditPacketSender::queueEditAvatarEntityMessage(EntityTreePointer entityTree, EntityItemID entityItemID) {
|
||||
assert(_myAvatar);
|
||||
if (!entityTree) {
|
||||
qCDebug(entities) << "EntityEditPacketSender::queueEditAvatarEntityMessage null entityTree.";
|
||||
|
@ -54,11 +52,6 @@ void EntityEditPacketSender::queueEditAvatarEntityMessage(EntityTreePointer enti
|
|||
}
|
||||
entity->setLastBroadcast(usecTimestampNow());
|
||||
|
||||
// serialize ALL properties in an "AvatarEntity" packet
|
||||
// rather than just the ones being edited.
|
||||
EntityItemProperties entityProperties = entity->getProperties();
|
||||
entityProperties.merge(properties);
|
||||
|
||||
OctreePacketData packetData(false, AvatarTraits::MAXIMUM_TRAIT_SIZE);
|
||||
EncodeBitstreamParams params;
|
||||
EntityTreeElementExtraEncodeDataPointer extra { nullptr };
|
||||
|
@ -82,7 +75,7 @@ void EntityEditPacketSender::queueEditEntityMessage(PacketType type,
|
|||
qCWarning(entities) << "Suppressing entity edit message: cannot send avatar entity edit with no myAvatar";
|
||||
} else if (properties.getOwningAvatarID() == _myAvatar->getID()) {
|
||||
// this is an avatar-based entity --> update our avatar-data rather than sending to the entity-server
|
||||
queueEditAvatarEntityMessage(entityTree, entityItemID, properties);
|
||||
queueEditAvatarEntityMessage(entityTree, entityItemID);
|
||||
} else {
|
||||
qCWarning(entities) << "Suppressing entity edit message: cannot send avatar entity edit for another avatar";
|
||||
}
|
||||
|
|
|
@ -50,8 +50,8 @@ public slots:
|
|||
void processEntityEditNackPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
|
||||
|
||||
private:
|
||||
void queueEditAvatarEntityMessage(EntityTreePointer entityTree,
|
||||
EntityItemID entityItemID, const EntityItemProperties& properties);
|
||||
friend class MyAvatar;
|
||||
void queueEditAvatarEntityMessage(EntityTreePointer entityTree, EntityItemID entityItemID);
|
||||
|
||||
private:
|
||||
std::mutex _mutex;
|
||||
|
|
|
@ -511,8 +511,6 @@ public:
|
|||
virtual void setProxyWindow(QWindow* proxyWindow) {}
|
||||
virtual QObject* getEventHandler() { return nullptr; }
|
||||
|
||||
virtual void emitScriptEvent(const QVariant& message) {}
|
||||
|
||||
QUuid getLastEditedBy() const { return _lastEditedBy; }
|
||||
void setLastEditedBy(QUuid value) { _lastEditedBy = value; }
|
||||
|
||||
|
|
|
@ -2196,14 +2196,7 @@ bool EntityScriptingInterface::wantsHandControllerPointerEvents(const QUuid& id)
|
|||
}
|
||||
|
||||
void EntityScriptingInterface::emitScriptEvent(const EntityItemID& entityID, const QVariant& message) {
|
||||
if (_entityTree) {
|
||||
_entityTree->withReadLock([&] {
|
||||
EntityItemPointer entity = _entityTree->findEntityByEntityItemID(EntityItemID(entityID));
|
||||
if (entity) {
|
||||
entity->emitScriptEvent(message);
|
||||
}
|
||||
});
|
||||
}
|
||||
EntityTree::emitScriptEvent(entityID, message);
|
||||
}
|
||||
|
||||
// TODO move this someplace that makes more sense...
|
||||
|
|
|
@ -1529,7 +1529,6 @@ public slots:
|
|||
* @function Entities.emitScriptEvent
|
||||
* @param {Uuid} entityID - The ID of the {@link Entities.EntityType|Web} entity.
|
||||
* @param {string} message - The message to send.
|
||||
* @todo <em>This function is currently not implemented.</em>
|
||||
*/
|
||||
Q_INVOKABLE void emitScriptEvent(const EntityItemID& entityID, const QVariant& message);
|
||||
|
||||
|
|
|
@ -2978,6 +2978,7 @@ QStringList EntityTree::getJointNames(const QUuid& entityID) const {
|
|||
std::function<QObject*(const QUuid&)> EntityTree::_getEntityObjectOperator = nullptr;
|
||||
std::function<QSizeF(const QUuid&, const QString&)> EntityTree::_textSizeOperator = nullptr;
|
||||
std::function<bool()> EntityTree::_areEntityClicksCapturedOperator = nullptr;
|
||||
std::function<void(const QUuid&, const QVariant&)> EntityTree::_emitScriptEventOperator = nullptr;
|
||||
|
||||
QObject* EntityTree::getEntityObject(const QUuid& id) {
|
||||
if (_getEntityObjectOperator) {
|
||||
|
@ -3000,6 +3001,12 @@ bool EntityTree::areEntityClicksCaptured() {
|
|||
return false;
|
||||
}
|
||||
|
||||
void EntityTree::emitScriptEvent(const QUuid& id, const QVariant& message) {
|
||||
if (_emitScriptEventOperator) {
|
||||
_emitScriptEventOperator(id, message);
|
||||
}
|
||||
}
|
||||
|
||||
void EntityTree::updateEntityQueryAACubeWorker(SpatiallyNestablePointer object, EntityEditPacketSender* packetSender,
|
||||
MovingEntitiesOperator& moveOperator, bool force, bool tellServer) {
|
||||
// if the queryBox has changed, tell the entity-server
|
||||
|
|
|
@ -272,6 +272,9 @@ public:
|
|||
static void setEntityClicksCapturedOperator(std::function<bool()> areEntityClicksCapturedOperator) { _areEntityClicksCapturedOperator = areEntityClicksCapturedOperator; }
|
||||
static bool areEntityClicksCaptured();
|
||||
|
||||
static void setEmitScriptEventOperator(std::function<void(const QUuid&, const QVariant&)> emitScriptEventOperator) { _emitScriptEventOperator = emitScriptEventOperator; }
|
||||
static void emitScriptEvent(const QUuid& id, const QVariant& message);
|
||||
|
||||
std::map<QString, QString> getNamedPaths() const { return _namedPaths; }
|
||||
|
||||
void updateEntityQueryAACube(SpatiallyNestablePointer object, EntityEditPacketSender* packetSender,
|
||||
|
@ -383,6 +386,7 @@ private:
|
|||
static std::function<QObject*(const QUuid&)> _getEntityObjectOperator;
|
||||
static std::function<QSizeF(const QUuid&, const QString&)> _textSizeOperator;
|
||||
static std::function<bool()> _areEntityClicksCapturedOperator;
|
||||
static std::function<void(const QUuid&, const QVariant&)> _emitScriptEventOperator;
|
||||
|
||||
std::vector<int32_t> _staleProxies;
|
||||
|
||||
|
|
|
@ -834,6 +834,7 @@ bool AddressManager::setDomainInfo(const QUrl& domainURL, LookupTrigger trigger)
|
|||
}
|
||||
|
||||
_domainURL = domainURL;
|
||||
_shareablePlaceName.clear();
|
||||
|
||||
// clear any current place information
|
||||
_rootPlaceID = QUuid();
|
||||
|
|
|
@ -7,59 +7,44 @@
|
|||
//
|
||||
#include "Framebuffer.h"
|
||||
|
||||
#include <array>
|
||||
|
||||
#include <EGL/egl.h>
|
||||
#include <glad/glad.h>
|
||||
#include <android/log.h>
|
||||
|
||||
#include <VrApi.h>
|
||||
#include <VrApi_Helpers.h>
|
||||
|
||||
#include "Helpers.h"
|
||||
|
||||
using namespace ovr;
|
||||
|
||||
void Framebuffer::updateLayer(int eye, ovrLayerProjection2& layer, const ovrMatrix4f* projectionMatrix ) const {
|
||||
auto& layerTexture = layer.Textures[eye];
|
||||
layerTexture.ColorSwapChain = _swapChain;
|
||||
layerTexture.SwapChainIndex = _index;
|
||||
layerTexture.ColorSwapChain = _swapChainInfos[eye].swapChain;
|
||||
layerTexture.SwapChainIndex = _swapChainInfos[eye].index;
|
||||
if (projectionMatrix) {
|
||||
layerTexture.TexCoordsFromTanAngles = ovrMatrix4f_TanAngleMatrixFromProjection( projectionMatrix );
|
||||
}
|
||||
layerTexture.TextureRect = { 0, 0, 1, 1 };
|
||||
}
|
||||
|
||||
void Framebuffer::SwapChainInfo::destroy() {
|
||||
if (swapChain != nullptr) {
|
||||
vrapi_DestroyTextureSwapChain(swapChain);
|
||||
swapChain = nullptr;
|
||||
}
|
||||
index = -1;
|
||||
length = -1;
|
||||
}
|
||||
|
||||
void Framebuffer::create(const glm::uvec2& size) {
|
||||
_size = size;
|
||||
_index = 0;
|
||||
_validTexture = false;
|
||||
|
||||
// Depth renderbuffer
|
||||
/* glGenRenderbuffers(1, &_depth);
|
||||
glBindRenderbuffer(GL_RENDERBUFFER, _depth);
|
||||
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, _size.x, _size.y);
|
||||
glBindRenderbuffer(GL_RENDERBUFFER, 0);
|
||||
*/
|
||||
// Framebuffer
|
||||
glGenFramebuffers(1, &_fbo);
|
||||
// glBindFramebuffer(GL_DRAW_FRAMEBUFFER, _fbo);
|
||||
// glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, _depth);
|
||||
// glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
||||
|
||||
_swapChain = vrapi_CreateTextureSwapChain3(VRAPI_TEXTURE_TYPE_2D, GL_RGBA8, _size.x, _size.y, 1, 3);
|
||||
|
||||
_length = vrapi_GetTextureSwapChainLength(_swapChain);
|
||||
if (!_length) {
|
||||
__android_log_write(ANDROID_LOG_WARN, "QQQ_OVR", "Unable to count swap chain textures");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < _length; ++i) {
|
||||
GLuint chainTexId = vrapi_GetTextureSwapChainHandle(_swapChain, i);
|
||||
glBindTexture(GL_TEXTURE_2D, chainTexId);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
}
|
||||
ovr::for_each_eye([&](ovrEye eye) {
|
||||
_swapChainInfos[eye].create(size);
|
||||
});
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
glGenFramebuffers(1, &_fbo);
|
||||
}
|
||||
|
||||
void Framebuffer::destroy() {
|
||||
|
@ -67,28 +52,82 @@ void Framebuffer::destroy() {
|
|||
glDeleteFramebuffers(1, &_fbo);
|
||||
_fbo = 0;
|
||||
}
|
||||
if (0 != _depth) {
|
||||
glDeleteRenderbuffers(1, &_depth);
|
||||
_depth = 0;
|
||||
}
|
||||
if (_swapChain != nullptr) {
|
||||
vrapi_DestroyTextureSwapChain(_swapChain);
|
||||
_swapChain = nullptr;
|
||||
}
|
||||
_index = -1;
|
||||
_length = -1;
|
||||
|
||||
ovr::for_each_eye([&](ovrEye eye) {
|
||||
_swapChainInfos[eye].destroy();
|
||||
});
|
||||
}
|
||||
|
||||
void Framebuffer::advance() {
|
||||
_index = (_index + 1) % _length;
|
||||
_validTexture = false;
|
||||
ovr::for_each_eye([&](ovrEye eye) {
|
||||
_swapChainInfos[eye].advance();
|
||||
});
|
||||
}
|
||||
|
||||
void Framebuffer::bind() {
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, _fbo);
|
||||
if (!_validTexture) {
|
||||
GLuint chainTexId = vrapi_GetTextureSwapChainHandle(_swapChain, _index);
|
||||
glFramebufferTexture(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, chainTexId, 0);
|
||||
_validTexture = true;
|
||||
void Framebuffer::bind(GLenum target) {
|
||||
glBindFramebuffer(target, _fbo);
|
||||
_swapChainInfos[0].bind(target, GL_COLOR_ATTACHMENT0);
|
||||
_swapChainInfos[1].bind(target, GL_COLOR_ATTACHMENT1);
|
||||
}
|
||||
|
||||
void Framebuffer::invalidate(GLenum target) {
|
||||
static const std::array<GLenum, 2> INVALIDATE_ATTACHMENTS {{ GL_COLOR_ATTACHMENT0, GL_COLOR_ATTACHMENT1 }};
|
||||
glInvalidateFramebuffer(target, static_cast<GLsizei>(INVALIDATE_ATTACHMENTS.size()), INVALIDATE_ATTACHMENTS.data());
|
||||
}
|
||||
|
||||
|
||||
void Framebuffer::drawBuffers(ovrEye eye) const {
|
||||
static const std::array<std::array<GLenum, 2>, 3> EYE_DRAW_BUFFERS { {
|
||||
{GL_COLOR_ATTACHMENT0, GL_NONE},
|
||||
{GL_NONE, GL_COLOR_ATTACHMENT1},
|
||||
{GL_COLOR_ATTACHMENT0, GL_COLOR_ATTACHMENT1}
|
||||
} };
|
||||
|
||||
switch(eye) {
|
||||
case VRAPI_EYE_LEFT:
|
||||
case VRAPI_EYE_RIGHT:
|
||||
case VRAPI_EYE_COUNT: {
|
||||
const auto& eyeDrawBuffers = EYE_DRAW_BUFFERS[eye];
|
||||
glDrawBuffers(static_cast<GLsizei>(eyeDrawBuffers.size()), eyeDrawBuffers.data());
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
throw std::runtime_error("Invalid eye for drawBuffers");
|
||||
}
|
||||
}
|
||||
|
||||
void Framebuffer::SwapChainInfo::create(const glm::uvec2 &size) {
|
||||
index = 0;
|
||||
validTexture = false;
|
||||
// GL_SRGB8_ALPHA8 and GL_RGBA8 appear to behave the same here. The only thing that changes the
|
||||
// output gamma behavior is VRAPI_MODE_FLAG_FRONT_BUFFER_SRGB passed to vrapi_EnterVrMode
|
||||
swapChain = vrapi_CreateTextureSwapChain3(VRAPI_TEXTURE_TYPE_2D, GL_SRGB8_ALPHA8, size.x, size.y, 1, 3);
|
||||
length = vrapi_GetTextureSwapChainLength(swapChain);
|
||||
if (!length) {
|
||||
__android_log_write(ANDROID_LOG_WARN, "QQQ_OVR", "Unable to count swap chain textures");
|
||||
throw std::runtime_error("Unable to create Oculus texture swap chain");
|
||||
}
|
||||
|
||||
for (int i = 0; i < length; ++i) {
|
||||
GLuint chainTexId = vrapi_GetTextureSwapChainHandle(swapChain, i);
|
||||
glBindTexture(GL_TEXTURE_2D, chainTexId);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
}
|
||||
}
|
||||
|
||||
void Framebuffer::SwapChainInfo::advance() {
|
||||
index = (index + 1) % length;
|
||||
validTexture = false;
|
||||
}
|
||||
|
||||
void Framebuffer::SwapChainInfo::bind(uint32_t target, uint32_t attachment) {
|
||||
if (!validTexture) {
|
||||
GLuint chainTexId = vrapi_GetTextureSwapChainHandle(swapChain, index);
|
||||
glFramebufferTexture(target, attachment, chainTexId, 0);
|
||||
validTexture = true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
|
||||
#include <cstdint>
|
||||
#include <glm/glm.hpp>
|
||||
#include <glad/glad.h>
|
||||
|
||||
#include <VrApi_Types.h>
|
||||
|
||||
|
@ -20,15 +21,28 @@ public:
|
|||
void create(const glm::uvec2& size);
|
||||
void advance();
|
||||
void destroy();
|
||||
void bind();
|
||||
void bind(GLenum target = GL_DRAW_FRAMEBUFFER);
|
||||
void invalidate(GLenum target = GL_DRAW_FRAMEBUFFER);
|
||||
void drawBuffers(ovrEye eye) const;
|
||||
|
||||
uint32_t _depth { 0 };
|
||||
const glm::uvec2& size() const { return _size; }
|
||||
|
||||
private:
|
||||
uint32_t _fbo{ 0 };
|
||||
int _length{ -1 };
|
||||
int _index{ -1 };
|
||||
bool _validTexture{ false };
|
||||
glm::uvec2 _size;
|
||||
ovrTextureSwapChain* _swapChain{ nullptr };
|
||||
struct SwapChainInfo {
|
||||
int length{ -1 };
|
||||
int index{ -1 };
|
||||
bool validTexture{ false };
|
||||
ovrTextureSwapChain* swapChain{ nullptr };
|
||||
|
||||
void create(const glm::uvec2& size);
|
||||
void destroy();
|
||||
void advance();
|
||||
void bind(GLenum target, GLenum attachment);
|
||||
};
|
||||
|
||||
SwapChainInfo _swapChainInfos[VRAPI_FRAME_LAYER_EYE_MAX];
|
||||
};
|
||||
|
||||
} // namespace ovr
|
|
@ -9,37 +9,186 @@
|
|||
|
||||
#include <android/native_window_jni.h>
|
||||
#include <android/log.h>
|
||||
#include <android/asset_manager.h>
|
||||
#include <android/asset_manager_jni.h>
|
||||
|
||||
#include <unistd.h>
|
||||
#include <algorithm>
|
||||
#include <array>
|
||||
|
||||
#include <VrApi.h>
|
||||
#include <VrApi_Helpers.h>
|
||||
#include <VrApi_Types.h>
|
||||
//#include <OVR_Platform.h>
|
||||
|
||||
|
||||
#include "GLContext.h"
|
||||
#include "Helpers.h"
|
||||
#include "Framebuffer.h"
|
||||
|
||||
static AAssetManager* ASSET_MANAGER = nullptr;
|
||||
|
||||
#define USE_BLIT_PRESENT 0
|
||||
|
||||
#if !USE_BLIT_PRESENT
|
||||
|
||||
|
||||
|
||||
static std::string getTextAsset(const char* assetPath) {
|
||||
if (!ASSET_MANAGER || !assetPath) {
|
||||
return nullptr;
|
||||
}
|
||||
AAsset* asset = AAssetManager_open(ASSET_MANAGER, assetPath, AASSET_MODE_BUFFER);
|
||||
if (!asset) {
|
||||
return {};
|
||||
}
|
||||
|
||||
auto length = AAsset_getLength(asset);
|
||||
if (0 == length) {
|
||||
AAsset_close(asset);
|
||||
return {};
|
||||
}
|
||||
|
||||
auto buffer = AAsset_getBuffer(asset);
|
||||
if (!buffer) {
|
||||
AAsset_close(asset);
|
||||
return {};
|
||||
}
|
||||
|
||||
std::string result { static_cast<const char*>(buffer), static_cast<size_t>(length) };
|
||||
AAsset_close(asset);
|
||||
return result;
|
||||
}
|
||||
|
||||
static std::string getShaderInfoLog(GLuint glshader) {
|
||||
std::string result;
|
||||
GLint infoLength = 0;
|
||||
glGetShaderiv(glshader, GL_INFO_LOG_LENGTH, &infoLength);
|
||||
if (infoLength > 0) {
|
||||
char* temp = new char[infoLength];
|
||||
glGetShaderInfoLog(glshader, infoLength, NULL, temp);
|
||||
result = std::string(temp);
|
||||
delete[] temp;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
static GLuint buildShader(GLenum shaderDomain, const char* shader) {
|
||||
GLuint glshader = glCreateShader(shaderDomain);
|
||||
if (!glshader) {
|
||||
throw std::runtime_error("Bad shader");
|
||||
}
|
||||
|
||||
glShaderSource(glshader, 1, &shader, NULL);
|
||||
glCompileShader(glshader);
|
||||
|
||||
GLint compiled = 0;
|
||||
glGetShaderiv(glshader, GL_COMPILE_STATUS, &compiled);
|
||||
|
||||
// if compilation fails
|
||||
if (!compiled) {
|
||||
std::string compileError = getShaderInfoLog(glshader);
|
||||
glDeleteShader(glshader);
|
||||
__android_log_print(ANDROID_LOG_WARN, "QQQ_OVR", "Shader compile error: %s", compileError.c_str());
|
||||
return 0;
|
||||
}
|
||||
|
||||
return glshader;
|
||||
}
|
||||
|
||||
static std::string getProgramInfoLog(GLuint glprogram) {
|
||||
std::string result;
|
||||
GLint infoLength = 0;
|
||||
glGetProgramiv(glprogram, GL_INFO_LOG_LENGTH, &infoLength);
|
||||
if (infoLength > 0) {
|
||||
char* temp = new char[infoLength];
|
||||
glGetProgramInfoLog(glprogram, infoLength, NULL, temp);
|
||||
result = std::string(temp);
|
||||
delete[] temp;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
static GLuint buildProgram(const char* vertex, const char* fragment) {
|
||||
// A brand new program:
|
||||
GLuint glprogram { 0 }, glvertex { 0 }, glfragment { 0 };
|
||||
|
||||
try {
|
||||
glprogram = glCreateProgram();
|
||||
if (0 == glprogram) {
|
||||
throw std::runtime_error("Failed to create program, is GL context current?");
|
||||
}
|
||||
|
||||
glvertex = buildShader(GL_VERTEX_SHADER, vertex);
|
||||
if (0 == glvertex) {
|
||||
throw std::runtime_error("Failed to create or compile vertex shader");
|
||||
}
|
||||
glAttachShader(glprogram, glvertex);
|
||||
|
||||
glfragment = buildShader(GL_FRAGMENT_SHADER, fragment);
|
||||
if (0 == glfragment) {
|
||||
throw std::runtime_error("Failed to create or compile fragment shader");
|
||||
}
|
||||
glAttachShader(glprogram, glfragment);
|
||||
|
||||
GLint linked { 0 };
|
||||
glLinkProgram(glprogram);
|
||||
glGetProgramiv(glprogram, GL_LINK_STATUS, &linked);
|
||||
|
||||
if (!linked) {
|
||||
std::string linkErrorLog = getProgramInfoLog(glprogram);
|
||||
__android_log_print(ANDROID_LOG_WARN, "QQQ_OVR", "Program link error: %s", linkErrorLog.c_str());
|
||||
throw std::runtime_error("Failed to link program, is the interface between the fragment and vertex shaders correct?");
|
||||
}
|
||||
|
||||
|
||||
} catch(const std::runtime_error& error) {
|
||||
if (0 != glprogram) {
|
||||
glDeleteProgram(glprogram);
|
||||
glprogram = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (0 != glvertex) {
|
||||
glDeleteShader(glvertex);
|
||||
}
|
||||
|
||||
if (0 != glfragment) {
|
||||
glDeleteShader(glfragment);
|
||||
}
|
||||
|
||||
if (0 == glprogram) {
|
||||
throw std::runtime_error("Failed to build program");
|
||||
}
|
||||
|
||||
return glprogram;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
using namespace ovr;
|
||||
|
||||
static thread_local bool isRenderThread { false };
|
||||
|
||||
struct VrSurface : public TaskQueue {
|
||||
using HandlerTask = VrHandler::HandlerTask;
|
||||
using HandlerTask = ovr::VrHandler::HandlerTask;
|
||||
|
||||
JavaVM* vm{nullptr};
|
||||
jobject oculusActivity{ nullptr };
|
||||
ANativeWindow* nativeWindow{ nullptr };
|
||||
|
||||
VrHandler* handler{nullptr};
|
||||
ovr::VrHandler* handler{nullptr};
|
||||
ovrMobile* session{nullptr};
|
||||
bool resumed { false };
|
||||
GLContext vrglContext;
|
||||
Framebuffer eyeFbos[2];
|
||||
uint32_t readFbo{0};
|
||||
ovr::GLContext vrglContext;
|
||||
ovr::Framebuffer eyesFbo;
|
||||
|
||||
#if USE_BLIT_PRESENT
|
||||
GLuint readFbo { 0 };
|
||||
#else
|
||||
GLuint renderProgram { 0 };
|
||||
GLuint renderVao { 0 };
|
||||
#endif
|
||||
std::atomic<uint32_t> presentIndex{1};
|
||||
double displayTime{0};
|
||||
// Not currently set by anything
|
||||
|
@ -76,6 +225,16 @@ struct VrSurface : public TaskQueue {
|
|||
vrglContext.create(currentDisplay, currentContext, noErrorContext);
|
||||
vrglContext.makeCurrent();
|
||||
|
||||
#if USE_BLIT_PRESENT
|
||||
glGenFramebuffers(1, &readFbo);
|
||||
#else
|
||||
glGenVertexArrays(1, &renderVao);
|
||||
const char* vertex = nullptr;
|
||||
auto vertexShader = getTextAsset("shaders/present.vert");
|
||||
auto fragmentShader = getTextAsset("shaders/present.frag");
|
||||
renderProgram = buildProgram(vertexShader.c_str(), fragmentShader.c_str());
|
||||
#endif
|
||||
|
||||
glm::uvec2 eyeTargetSize;
|
||||
withEnv([&](JNIEnv* env){
|
||||
ovrJava java{ vm, env, oculusActivity };
|
||||
|
@ -85,10 +244,7 @@ struct VrSurface : public TaskQueue {
|
|||
};
|
||||
});
|
||||
__android_log_print(ANDROID_LOG_WARN, "QQQ_OVR", "QQQ Eye Size %d, %d", eyeTargetSize.x, eyeTargetSize.y);
|
||||
ovr::for_each_eye([&](ovrEye eye) {
|
||||
eyeFbos[eye].create(eyeTargetSize);
|
||||
});
|
||||
glGenFramebuffers(1, &readFbo);
|
||||
eyesFbo.create(eyeTargetSize);
|
||||
vrglContext.doneCurrent();
|
||||
}
|
||||
|
||||
|
@ -157,6 +313,7 @@ struct VrSurface : public TaskQueue {
|
|||
ovrJava java{ vm, env, oculusActivity };
|
||||
ovrModeParms modeParms = vrapi_DefaultModeParms(&java);
|
||||
modeParms.Flags |= VRAPI_MODE_FLAG_NATIVE_WINDOW;
|
||||
modeParms.Flags |= VRAPI_MODE_FLAG_FRONT_BUFFER_SRGB;
|
||||
if (noErrorContext) {
|
||||
modeParms.Flags |= VRAPI_MODE_FLAG_CREATE_CONTEXT_NO_ERROR;
|
||||
}
|
||||
|
@ -178,38 +335,51 @@ struct VrSurface : public TaskQueue {
|
|||
void presentFrame(uint32_t sourceTexture, const glm::uvec2 &sourceSize, const ovrTracking2& tracking) {
|
||||
ovrLayerProjection2 layer = vrapi_DefaultLayerProjection2();
|
||||
layer.HeadPose = tracking.HeadPose;
|
||||
|
||||
eyesFbo.bind();
|
||||
if (sourceTexture) {
|
||||
eyesFbo.invalidate();
|
||||
#if USE_BLIT_PRESENT
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, readFbo);
|
||||
glFramebufferTexture(GL_READ_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, sourceTexture, 0);
|
||||
GLenum framebufferStatus = glCheckFramebufferStatus(GL_READ_FRAMEBUFFER);
|
||||
if (GL_FRAMEBUFFER_COMPLETE != framebufferStatus) {
|
||||
__android_log_print(ANDROID_LOG_WARN, "QQQ_OVR", "incomplete framebuffer");
|
||||
}
|
||||
}
|
||||
GLenum invalidateAttachment = GL_COLOR_ATTACHMENT0;
|
||||
|
||||
ovr::for_each_eye([&](ovrEye eye) {
|
||||
const auto &eyeTracking = tracking.Eye[eye];
|
||||
auto &eyeFbo = eyeFbos[eye];
|
||||
const auto &destSize = eyeFbo._size;
|
||||
eyeFbo.bind();
|
||||
glInvalidateFramebuffer(GL_DRAW_FRAMEBUFFER, 1, &invalidateAttachment);
|
||||
if (sourceTexture) {
|
||||
const auto &destSize = eyesFbo.size();
|
||||
ovr::for_each_eye([&](ovrEye eye) {
|
||||
auto sourceWidth = sourceSize.x / 2;
|
||||
auto sourceX = (eye == VRAPI_EYE_LEFT) ? 0 : sourceWidth;
|
||||
// Each eye blit uses a different draw buffer
|
||||
eyesFbo.drawBuffers(eye);
|
||||
glBlitFramebuffer(
|
||||
sourceX, 0, sourceX + sourceWidth, sourceSize.y,
|
||||
0, 0, destSize.x, destSize.y,
|
||||
GL_COLOR_BUFFER_BIT, GL_NEAREST);
|
||||
}
|
||||
eyeFbo.updateLayer(eye, layer, &eyeTracking.ProjectionMatrix);
|
||||
eyeFbo.advance();
|
||||
});
|
||||
if (sourceTexture) {
|
||||
glInvalidateFramebuffer(GL_READ_FRAMEBUFFER, 1, &invalidateAttachment);
|
||||
});
|
||||
static const std::array<GLenum, 1> READ_INVALIDATE_ATTACHMENTS {{ GL_COLOR_ATTACHMENT0 }};
|
||||
glInvalidateFramebuffer(GL_READ_FRAMEBUFFER, (GLuint)READ_INVALIDATE_ATTACHMENTS.size(), READ_INVALIDATE_ATTACHMENTS.data());
|
||||
glFramebufferTexture(GL_READ_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, 0, 0);
|
||||
#else
|
||||
eyesFbo.drawBuffers(VRAPI_EYE_COUNT);
|
||||
const auto &destSize = eyesFbo.size();
|
||||
glViewport(0, 0, destSize.x, destSize.y);
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
glBindTexture(GL_TEXTURE_2D, sourceTexture);
|
||||
glBindVertexArray(renderVao);
|
||||
glUseProgram(renderProgram);
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
glUseProgram(0);
|
||||
glBindVertexArray(0);
|
||||
#endif
|
||||
} else {
|
||||
eyesFbo.drawBuffers(VRAPI_EYE_COUNT);
|
||||
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
}
|
||||
glFlush();
|
||||
|
||||
ovr::for_each_eye([&](ovrEye eye) {
|
||||
const auto &eyeTracking = tracking.Eye[eye];
|
||||
eyesFbo.updateLayer(eye, layer, &eyeTracking.ProjectionMatrix);
|
||||
});
|
||||
|
||||
eyesFbo.advance();
|
||||
|
||||
ovrLayerHeader2 *layerHeader = &layer.Header;
|
||||
ovrSubmitFrameDescription2 frameDesc = {};
|
||||
|
@ -321,8 +491,9 @@ JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *, void *) {
|
|||
return JNI_VERSION_1_6;
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL Java_io_highfidelity_oculus_OculusMobileActivity_nativeOnCreate(JNIEnv* env, jobject obj) {
|
||||
JNIEXPORT void JNICALL Java_io_highfidelity_oculus_OculusMobileActivity_nativeOnCreate(JNIEnv* env, jobject obj, jobject assetManager) {
|
||||
__android_log_write(ANDROID_LOG_WARN, "QQQ_JNI", __FUNCTION__);
|
||||
ASSET_MANAGER = AAssetManager_fromJava(env, assetManager);
|
||||
SURFACE.onCreate(env, obj);
|
||||
}
|
||||
|
||||
|
|
|
@ -245,7 +245,7 @@ void OculusMobileDisplayPlugin::updatePresentPose() {
|
|||
});
|
||||
}
|
||||
|
||||
void OculusMobileDisplayPlugin::internalPresent() {
|
||||
void OculusMobileDisplayPlugin::internalPresent(const gpu::FramebufferPointer& compsiteFramebuffer) {
|
||||
VrHandler::pollTask();
|
||||
|
||||
if (!vrActive()) {
|
||||
|
@ -253,8 +253,12 @@ void OculusMobileDisplayPlugin::internalPresent() {
|
|||
return;
|
||||
}
|
||||
|
||||
auto sourceTexture = getGLBackend()->getTextureID(_compositeFramebuffer->getRenderBuffer(0));
|
||||
glm::uvec2 sourceSize{ _compositeFramebuffer->getWidth(), _compositeFramebuffer->getHeight() };
|
||||
GLuint sourceTexture = 0;
|
||||
glm::uvec2 sourceSize;
|
||||
if (compsiteFramebuffer) {
|
||||
sourceTexture = getGLBackend()->getTextureID(compsiteFramebuffer->getRenderBuffer(0));
|
||||
sourceSize = { compsiteFramebuffer->getWidth(), compsiteFramebuffer->getHeight() };
|
||||
}
|
||||
VrHandler::presentFrame(sourceTexture, sourceSize, presentTracking);
|
||||
_presentRate.increment();
|
||||
}
|
||||
|
|
|
@ -54,8 +54,8 @@ protected:
|
|||
void uncustomizeContext() override;
|
||||
|
||||
void updatePresentPose() override;
|
||||
void internalPresent() override;
|
||||
void hmdPresent() override { throw std::runtime_error("Unused"); }
|
||||
void internalPresent(const gpu::FramebufferPointer&) override;
|
||||
void hmdPresent(const gpu::FramebufferPointer&) override { throw std::runtime_error("Unused"); }
|
||||
bool isHmdMounted() const override;
|
||||
bool alwaysPresent() const override { return true; }
|
||||
|
||||
|
|
|
@ -2,6 +2,12 @@
|
|||
|
||||
#include <NumericalConstants.h>
|
||||
|
||||
|
||||
const DisplayPlugin::HUDOperator DisplayPlugin::DEFAULT_HUD_OPERATOR{ std::function<void(gpu::Batch&, const gpu::TexturePointer&, const gpu::FramebufferPointer&, bool mirror)>() };
|
||||
|
||||
DisplayPlugin::DisplayPlugin() : _hudOperator{ DEFAULT_HUD_OPERATOR } {
|
||||
}
|
||||
|
||||
int64_t DisplayPlugin::getPaintDelayUsecs() const {
|
||||
std::lock_guard<std::mutex> lock(_paintDelayMutex);
|
||||
return _paintDelayTimer.isValid() ? _paintDelayTimer.nsecsElapsed() / NSECS_PER_USEC : 0;
|
||||
|
@ -35,8 +41,8 @@ void DisplayPlugin::waitForPresent() {
|
|||
}
|
||||
}
|
||||
|
||||
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> DisplayPlugin::getHUDOperator() {
|
||||
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> hudOperator;
|
||||
std::function<void(gpu::Batch&, const gpu::TexturePointer&, const gpu::FramebufferPointer& compositeFramebuffer, bool mirror)> DisplayPlugin::getHUDOperator() {
|
||||
HUDOperator hudOperator;
|
||||
{
|
||||
QMutexLocker locker(&_presentMutex);
|
||||
hudOperator = _hudOperator;
|
||||
|
@ -48,3 +54,5 @@ glm::mat4 HmdDisplay::getEyeToHeadTransform(Eye eye) const {
|
|||
static const glm::mat4 xform;
|
||||
return xform;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -121,6 +121,8 @@ class DisplayPlugin : public Plugin, public HmdDisplay {
|
|||
Q_OBJECT
|
||||
using Parent = Plugin;
|
||||
public:
|
||||
DisplayPlugin();
|
||||
|
||||
virtual int getRequiredThreadCount() const { return 0; }
|
||||
virtual bool isHmd() const { return false; }
|
||||
virtual int getHmdScreen() const { return -1; }
|
||||
|
@ -214,7 +216,8 @@ public:
|
|||
void waitForPresent();
|
||||
float getAveragePresentTime() { return _movingAveragePresent.average / (float)USECS_PER_MSEC; } // in msec
|
||||
|
||||
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> getHUDOperator();
|
||||
using HUDOperator = std::function<void(gpu::Batch&, const gpu::TexturePointer&, const gpu::FramebufferPointer&, bool mirror)>;
|
||||
virtual HUDOperator getHUDOperator() final;
|
||||
|
||||
static const QString& MENU_PATH();
|
||||
|
||||
|
@ -231,7 +234,8 @@ protected:
|
|||
|
||||
gpu::ContextPointer _gpuContext;
|
||||
|
||||
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> _hudOperator { std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)>() };
|
||||
static const HUDOperator DEFAULT_HUD_OPERATOR;
|
||||
HUDOperator _hudOperator;
|
||||
|
||||
MovingAverage<float, 10> _movingAveragePresent;
|
||||
|
||||
|
|
|
@ -368,6 +368,12 @@ public:
|
|||
const ShapeData * getShapeData(Shape shape) const;
|
||||
|
||||
graphics::MeshPointer meshFromShape(Shape geometryShape, glm::vec3 color);
|
||||
|
||||
static render::ShapePipelinePointer getShapePipeline(bool textured = false, bool transparent = false, bool culled = true,
|
||||
bool unlit = false, bool depthBias = false, bool forward = false);
|
||||
static render::ShapePipelinePointer getFadingShapePipeline(bool textured = false, bool transparent = false, bool culled = true,
|
||||
bool unlit = false, bool depthBias = false);
|
||||
|
||||
private:
|
||||
|
||||
GeometryCache();
|
||||
|
@ -471,11 +477,6 @@ private:
|
|||
gpu::PipelinePointer _simpleOpaqueWebBrowserPipeline;
|
||||
gpu::ShaderPointer _simpleTransparentWebBrowserShader;
|
||||
gpu::PipelinePointer _simpleTransparentWebBrowserPipeline;
|
||||
|
||||
static render::ShapePipelinePointer getShapePipeline(bool textured = false, bool transparent = false, bool culled = true,
|
||||
bool unlit = false, bool depthBias = false, bool forward = false);
|
||||
static render::ShapePipelinePointer getFadingShapePipeline(bool textured = false, bool transparent = false, bool culled = true,
|
||||
bool unlit = false, bool depthBias = false);
|
||||
};
|
||||
|
||||
#endif // hifi_GeometryCache_h
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue