Merge branch 'master' of https://github.com/highfidelity/hifi into undoEditOrderFix

This commit is contained in:
unknown 2019-04-16 15:26:47 -07:00
commit 2839879657
75 changed files with 1169 additions and 582 deletions

View file

@ -64,6 +64,10 @@ AssignmentClientApp::AssignmentClientApp(int argc, char* argv[]) :
"UDP port for this assignment client (or monitor)", "port");
parser.addOption(portOption);
const QCommandLineOption minChildListenPort(ASSIGNMENT_MONITOR_MIN_CHILDREN_LISTEN_PORT_OPTION,
"Minimum UDP listen port", "port");
parser.addOption(minChildListenPort);
const QCommandLineOption walletDestinationOption(ASSIGNMENT_WALLET_DESTINATION_ID_OPTION,
"set wallet destination", "wallet-uuid");
parser.addOption(walletDestinationOption);
@ -195,6 +199,11 @@ AssignmentClientApp::AssignmentClientApp(int argc, char* argv[]) :
assignmentServerPort = parser.value(assignmentServerPortOption).toInt();
}
quint16 childMinListenPort = 0;
if (argumentVariantMap.contains(ASSIGNMENT_MONITOR_MIN_CHILDREN_LISTEN_PORT_OPTION)) {
childMinListenPort = argumentVariantMap.value(ASSIGNMENT_MONITOR_MIN_CHILDREN_LISTEN_PORT_OPTION).toUInt();
}
// check for an overidden listen port
quint16 listenPort = 0;
if (argumentVariantMap.contains(ASSIGNMENT_CLIENT_LISTEN_PORT_OPTION)) {
@ -234,8 +243,8 @@ AssignmentClientApp::AssignmentClientApp(int argc, char* argv[]) :
if (numForks || minForks || maxForks) {
AssignmentClientMonitor* monitor = new AssignmentClientMonitor(numForks, minForks, maxForks,
requestAssignmentType, assignmentPool,
listenPort, walletUUID, assignmentServerHostname,
requestAssignmentType, assignmentPool, listenPort,
childMinListenPort, walletUUID, assignmentServerHostname,
assignmentServerPort, httpStatusPort, logDirectory);
monitor->setParent(this);
connect(this, &QCoreApplication::aboutToQuit, monitor, &AssignmentClientMonitor::aboutToQuit);

View file

@ -20,6 +20,7 @@ const QString ASSIGNMENT_POOL_OPTION = "pool";
const QString ASSIGNMENT_CLIENT_LISTEN_PORT_OPTION = "p";
const QString ASSIGNMENT_WALLET_DESTINATION_ID_OPTION = "wallet";
const QString CUSTOM_ASSIGNMENT_SERVER_HOSTNAME_OPTION = "a";
const QString ASSIGNMENT_MONITOR_MIN_CHILDREN_LISTEN_PORT_OPTION = "min-listen-port";
const QString CUSTOM_ASSIGNMENT_SERVER_PORT_OPTION = "server-port";
const QString ASSIGNMENT_NUM_FORKS_OPTION = "n";
const QString ASSIGNMENT_MIN_FORKS_OPTION = "min";

View file

@ -40,7 +40,7 @@ AssignmentClientMonitor::AssignmentClientMonitor(const unsigned int numAssignmen
const unsigned int minAssignmentClientForks,
const unsigned int maxAssignmentClientForks,
Assignment::Type requestAssignmentType, QString assignmentPool,
quint16 listenPort, QUuid walletUUID, QString assignmentServerHostname,
quint16 listenPort, quint16 childMinListenPort, QUuid walletUUID, QString assignmentServerHostname,
quint16 assignmentServerPort, quint16 httpStatusServerPort, QString logDirectory) :
_httpManager(QHostAddress::LocalHost, httpStatusServerPort, "", this),
_numAssignmentClientForks(numAssignmentClientForks),
@ -50,8 +50,8 @@ AssignmentClientMonitor::AssignmentClientMonitor(const unsigned int numAssignmen
_assignmentPool(assignmentPool),
_walletUUID(walletUUID),
_assignmentServerHostname(assignmentServerHostname),
_assignmentServerPort(assignmentServerPort)
_assignmentServerPort(assignmentServerPort),
_childMinListenPort(childMinListenPort)
{
qDebug() << "_requestAssignmentType =" << _requestAssignmentType;
@ -100,8 +100,13 @@ void AssignmentClientMonitor::simultaneousWaitOnChildren(int waitMsecs) {
}
}
void AssignmentClientMonitor::childProcessFinished(qint64 pid, int exitCode, QProcess::ExitStatus exitStatus) {
auto message = "Child process " + QString::number(pid) + " has %1 with exit code " + QString::number(exitCode) + ".";
void AssignmentClientMonitor::childProcessFinished(qint64 pid, quint16 listenPort, int exitCode, QProcess::ExitStatus exitStatus) {
auto message = "Child process " + QString::number(pid) + " on port " + QString::number(listenPort) +
"has %1 with exit code " + QString::number(exitCode) + ".";
if (listenPort) {
_childListenPorts.remove(listenPort);
}
if (_childProcesses.remove(pid)) {
message.append(" Removed from internal map.");
@ -153,6 +158,23 @@ void AssignmentClientMonitor::aboutToQuit() {
void AssignmentClientMonitor::spawnChildClient() {
QProcess* assignmentClient = new QProcess(this);
quint16 listenPort = 0;
// allocate a port
if (_childMinListenPort) {
for (listenPort = _childMinListenPort; _childListenPorts.contains(listenPort); listenPort++) {
if (_maxAssignmentClientForks &&
(listenPort >= _maxAssignmentClientForks + _childMinListenPort)) {
listenPort = 0;
qDebug() << "Insufficient listen ports";
break;
}
}
}
if (listenPort) {
_childListenPorts.insert(listenPort);
}
// unparse the parts of the command-line that the child cares about
QStringList _childArguments;
if (_assignmentPool != "") {
@ -176,6 +198,11 @@ void AssignmentClientMonitor::spawnChildClient() {
_childArguments.append(QString::number(_requestAssignmentType));
}
if (listenPort) {
_childArguments.append("-" + ASSIGNMENT_CLIENT_LISTEN_PORT_OPTION);
_childArguments.append(QString::number(listenPort));
}
// tell children which assignment monitor port to use
// for now they simply talk to us on localhost
_childArguments.append("--" + ASSIGNMENT_CLIENT_MONITOR_PORT_OPTION);
@ -247,8 +274,8 @@ void AssignmentClientMonitor::spawnChildClient() {
auto pid = assignmentClient->processId();
// make sure we hear that this process has finished when it does
connect(assignmentClient, static_cast<void(QProcess::*)(int, QProcess::ExitStatus)>(&QProcess::finished),
this, [this, pid](int exitCode, QProcess::ExitStatus exitStatus) {
childProcessFinished(pid, exitCode, exitStatus);
this, [this, listenPort, pid](int exitCode, QProcess::ExitStatus exitStatus) {
childProcessFinished(pid, listenPort, exitCode, exitStatus);
});
qDebug() << "Spawned a child client with PID" << assignmentClient->processId();

View file

@ -37,14 +37,15 @@ class AssignmentClientMonitor : public QObject, public HTTPRequestHandler {
public:
AssignmentClientMonitor(const unsigned int numAssignmentClientForks, const unsigned int minAssignmentClientForks,
const unsigned int maxAssignmentClientForks, Assignment::Type requestAssignmentType,
QString assignmentPool, quint16 listenPort, QUuid walletUUID, QString assignmentServerHostname,
quint16 assignmentServerPort, quint16 httpStatusServerPort, QString logDirectory);
QString assignmentPool, quint16 listenPort, quint16 childMinListenPort, QUuid walletUUID,
QString assignmentServerHostname, quint16 assignmentServerPort, quint16 httpStatusServerPort,
QString logDirectory);
~AssignmentClientMonitor();
void stopChildProcesses();
private slots:
void checkSpares();
void childProcessFinished(qint64 pid, int exitCode, QProcess::ExitStatus exitStatus);
void childProcessFinished(qint64 pid, quint16 port, int exitCode, QProcess::ExitStatus exitStatus);
void handleChildStatusPacket(QSharedPointer<ReceivedMessage> message);
bool handleHTTPRequest(HTTPConnection* connection, const QUrl& url, bool skipSubHandler = false) override;
@ -75,6 +76,9 @@ private:
QMap<qint64, ACProcess> _childProcesses;
quint16 _childMinListenPort;
QSet<quint16> _childListenPorts;
bool _wantsChildFileLogging { false };
};

View file

@ -588,8 +588,8 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
// check the payload to see if we have asked for dynamicJitterBuffer support
const QString DYNAMIC_JITTER_BUFFER_JSON_KEY = "dynamic_jitter_buffer";
bool enableDynamicJitterBuffer = audioBufferGroupObject[DYNAMIC_JITTER_BUFFER_JSON_KEY].toBool();
if (enableDynamicJitterBuffer) {
qCDebug(audio) << "Enabling dynamic jitter buffers.";
if (!enableDynamicJitterBuffer) {
qCDebug(audio) << "Disabling dynamic jitter buffers.";
bool ok;
const QString DESIRED_JITTER_BUFFER_FRAMES_KEY = "static_desired_jitter_buffer_frames";
@ -599,7 +599,7 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
}
qCDebug(audio) << "Static desired jitter buffer frames:" << _numStaticJitterFrames;
} else {
qCDebug(audio) << "Disabling dynamic jitter buffers.";
qCDebug(audio) << "Enabling dynamic jitter buffers.";
_numStaticJitterFrames = DISABLE_STATIC_JITTER_FRAMES;
}

View file

@ -549,38 +549,28 @@ void AudioMixerSlave::addStream(AudioMixerClientData::MixableStream& mixableStre
// grab the stream from the ring buffer
AudioRingBuffer::ConstIterator streamPopOutput = streamToAdd->getLastPopOutput();
// stereo sources are not passed through HRTF
if (streamToAdd->isStereo()) {
// apply the avatar gain adjustment
gain *= mixableStream.hrtf->getGainAdjustment();
streamPopOutput.readSamples(_bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_STEREO);
const float scale = 1 / 32768.0f; // int16_t to float
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL; i++) {
_mixSamples[2*i+0] += (float)streamPopOutput[2*i+0] * gain * scale;
_mixSamples[2*i+1] += (float)streamPopOutput[2*i+1] * gain * scale;
}
// stereo sources are not passed through HRTF
mixableStream.hrtf->mixStereo(_bufferSamples, _mixSamples, gain, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
++stats.manualStereoMixes;
} else if (isEcho) {
streamPopOutput.readSamples(_bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
// echo sources are not passed through HRTF
const float scale = 1/32768.0f; // int16_t to float
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL; i++) {
float sample = (float)streamPopOutput[i] * gain * scale;
_mixSamples[2*i+0] += sample;
_mixSamples[2*i+1] += sample;
}
mixableStream.hrtf->mixMono(_bufferSamples, _mixSamples, gain, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
++stats.manualEchoMixes;
} else {
streamPopOutput.readSamples(_bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
mixableStream.hrtf->render(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
++stats.hrtfRenders;
}
}

View file

@ -1451,6 +1451,34 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
_overlays.init(); // do this before scripts load
DependencyManager::set<ContextOverlayInterface>();
auto offscreenUi = getOffscreenUI();
connect(offscreenUi.data(), &OffscreenUi::desktopReady, []() {
// Now that we've loaded the menu and thus switched to the previous display plugin
// we can unlock the desktop repositioning code, since all the positions will be
// relative to the desktop size for this plugin
auto offscreenUi = getOffscreenUI();
auto desktop = offscreenUi->getDesktop();
if (desktop) {
desktop->setProperty("repositionLocked", false);
}
});
connect(offscreenUi.data(), &OffscreenUi::keyboardFocusActive, [this]() {
#if !defined(Q_OS_ANDROID) && !defined(DISABLE_QML)
// Do not show login dialog if requested not to on the command line
QString hifiNoLoginCommandLineKey = QString("--").append(HIFI_NO_LOGIN_COMMAND_LINE_KEY);
int index = arguments().indexOf(hifiNoLoginCommandLineKey);
if (index != -1) {
resumeAfterLoginDialogActionTaken();
return;
}
showLoginScreen();
#else
resumeAfterLoginDialogActionTaken();
#endif
});
// Initialize the user interface and menu system
// Needs to happen AFTER the render engine initialization to access its configuration
initializeUi();
@ -1805,34 +1833,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
updateVerboseLogging();
// Now that we've loaded the menu and thus switched to the previous display plugin
// we can unlock the desktop repositioning code, since all the positions will be
// relative to the desktop size for this plugin
auto offscreenUi = getOffscreenUI();
connect(offscreenUi.data(), &OffscreenUi::desktopReady, []() {
auto offscreenUi = getOffscreenUI();
auto desktop = offscreenUi->getDesktop();
if (desktop) {
desktop->setProperty("repositionLocked", false);
}
});
connect(offscreenUi.data(), &OffscreenUi::keyboardFocusActive, [this]() {
#if !defined(Q_OS_ANDROID) && !defined(DISABLE_QML)
// Do not show login dialog if requested not to on the command line
QString hifiNoLoginCommandLineKey = QString("--").append(HIFI_NO_LOGIN_COMMAND_LINE_KEY);
int index = arguments().indexOf(hifiNoLoginCommandLineKey);
if (index != -1) {
resumeAfterLoginDialogActionTaken();
return;
}
showLoginScreen();
#else
resumeAfterLoginDialogActionTaken();
#endif
});
// Make sure we don't time out during slow operations at startup
updateHeartbeat();
QTimer* settingsTimer = new QTimer();
@ -3981,6 +3981,15 @@ static void dumpEventQueue(QThread* thread) {
}
#endif // DEBUG_EVENT_QUEUE
bool Application::notify(QObject * object, QEvent * event) {
if (thread() == QThread::currentThread()) {
PROFILE_RANGE_IF_LONGER(app, "notify", 2)
return QApplication::notify(object, event);
}
return QApplication::notify(object, event);
}
bool Application::event(QEvent* event) {
if (_aboutToQuit) {
@ -5472,6 +5481,13 @@ void Application::pauseUntilLoginDetermined() {
// disconnect domain handler.
nodeList->getDomainHandler().disconnect();
// From now on, it's permissible to call resumeAfterLoginDialogActionTaken()
_resumeAfterLoginDialogActionTaken_SafeToRun = true;
if (_resumeAfterLoginDialogActionTaken_WasPostponed) {
// resumeAfterLoginDialogActionTaken() was already called, but it aborted. Now it's safe to call it again.
resumeAfterLoginDialogActionTaken();
}
}
void Application::resumeAfterLoginDialogActionTaken() {
@ -5480,6 +5496,11 @@ void Application::resumeAfterLoginDialogActionTaken() {
return;
}
if (!_resumeAfterLoginDialogActionTaken_SafeToRun) {
_resumeAfterLoginDialogActionTaken_WasPostponed = true;
return;
}
if (!isHMDMode() && getDesktopTabletBecomesToolbarSetting()) {
auto toolbar = DependencyManager::get<ToolbarScriptingInterface>()->getToolbar("com.highfidelity.interface.toolbar.system");
toolbar->writeProperty("visible", true);

View file

@ -1,4 +1,4 @@
//
//
// Application.h
// interface/src
//
@ -156,6 +156,7 @@ public:
void updateCamera(RenderArgs& renderArgs, float deltaTime);
void resizeGL();
bool notify(QObject *, QEvent *) override;
bool event(QEvent* event) override;
bool eventFilter(QObject* object, QEvent* event) override;
@ -807,5 +808,8 @@ private:
bool _showTrackedObjects { false };
bool _prevShowTrackedObjects { false };
bool _resumeAfterLoginDialogActionTaken_WasPostponed { false };
bool _resumeAfterLoginDialogActionTaken_SafeToRun { false };
};
#endif // hifi_Application_h

View file

@ -19,14 +19,22 @@ class FancyCamera : public Camera {
Q_OBJECT
/**jsdoc
* @namespace
* @augments Camera
*/
// FIXME: JSDoc 3.5.5 doesn't augment @property definitions. The following definition is repeated in Camera.h.
/**jsdoc
* @property {Uuid} cameraEntity The ID of the entity that the camera position and orientation follow when the camera is in
* entity mode.
* The <code>Camera</code> API provides access to the "camera" that defines your view in desktop and HMD display modes.
*
* @namespace Camera
*
* @hifi-interface
* @hifi-client-entity
* @hifi-avatar
*
* @property {Vec3} position - The position of the camera. You can set this value only when the camera is in independent
* mode.
* @property {Quat} orientation - The orientation of the camera. You can set this value only when the camera is in
* independent mode.
* @property {Camera.Mode} mode - The camera mode.
* @property {ViewFrustum} frustum - The camera frustum.
* @property {Uuid} cameraEntity - The ID of the entity that is used for the camera position and orientation when the
* camera is in entity mode.
*/
Q_PROPERTY(QUuid cameraEntity READ getCameraEntity WRITE setCameraEntity)
@ -38,25 +46,25 @@ public:
public slots:
/**jsdoc
* Get the ID of the entity that the camera is set to use the position and orientation from when it's in entity mode. You can
* also get the entity ID using the <code>Camera.cameraEntity</code> property.
* @function Camera.getCameraEntity
* @returns {Uuid} The ID of the entity that the camera is set to follow when in entity mode; <code>null</code> if no camera
* entity has been set.
*/
/**jsdoc
* Gets the ID of the entity that the camera is set to follow (i.e., use the position and orientation from) when it's in
* entity mode. You can also get the entity ID using the {@link Camera|Camera.cameraEntity} property.
* @function Camera.getCameraEntity
* @returns {Uuid} The ID of the entity that the camera is set to follow when in entity mode; <code>null</code> if no
* camera entity has been set.
*/
QUuid getCameraEntity() const;
/**jsdoc
* Set the entity that the camera should use the position and orientation from when it's in entity mode. You can also set the
* entity using the <code>Camera.cameraEntity</code> property.
* @function Camera.setCameraEntity
* @param {Uuid} entityID The entity that the camera should follow when it's in entity mode.
* @example <caption>Move your camera to the position and orientation of the closest entity.</caption>
* Camera.setModeString("entity");
* var entity = Entities.findClosestEntity(MyAvatar.position, 100.0);
* Camera.setCameraEntity(entity);
*/
* Sets the entity that the camera should follow (i.e., use the position and orientation from) when it's in entity mode.
* You can also set the entity using the {@link Camera|Camera.cameraEntity} property.
* @function Camera.setCameraEntity
* @param {Uuid} entityID - The entity that the camera should follow when it's in entity mode.
* @example <caption>Move your camera to the position and orientation of the closest entity.</caption>
* Camera.setModeString("entity");
* var entity = Entities.findClosestEntity(MyAvatar.position, 100.0);
* Camera.setCameraEntity(entity);
*/
void setCameraEntity(QUuid entityID);
private:

View file

@ -498,8 +498,10 @@ void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar
// on the creation of entities for that avatar instance and the deletion of entities for this instance
avatar->removeAvatarEntitiesFromTree();
if (removalReason != KillAvatarReason::AvatarDisconnected) {
emit AvatarInputs::getInstance()->avatarEnteredIgnoreRadius(avatar->getSessionUUID());
emit DependencyManager::get<UsersScriptingInterface>()->enteredIgnoreRadius();
if (removalReason == KillAvatarReason::TheirAvatarEnteredYourBubble) {
emit AvatarInputs::getInstance()->avatarEnteredIgnoreRadius(avatar->getSessionUUID());
emit DependencyManager::get<UsersScriptingInterface>()->enteredIgnoreRadius();
}
workload::Transaction workloadTransaction;
workloadTransaction.remove(avatar->getSpaceIndex());
@ -932,6 +934,18 @@ void AvatarManager::setAvatarSortCoefficient(const QString& name, const QScriptV
}
}
/**jsdoc
* PAL (People Access List) data for an avatar.
* @typedef {object} AvatarManager.PalData
* @property {Uuid} sessionUUID - The avatar's session ID. <code>""</code> if the avatar is your own.
* @property {string} sessionDisplayName - The avatar's display name, sanitized and versioned, as defined by the avatar mixer.
* It is unique among all avatars present in the domain at the time.
* @property {number} audioLoudness - The instantaneous loudness of the audio input that the avatar is injecting into the
* domain.
* @property {boolean} isReplicated - <strong>Deprecated.</strong>
* @property {Vec3} position - The position of the avatar.
* @property {number} palOrbOffset - The vertical offset from the avatar's position that an overlay orb should be displayed at.
*/
QVariantMap AvatarManager::getPalData(const QStringList& specificAvatarIdentifiers) {
QJsonArray palData;

View file

@ -37,10 +37,11 @@
using SortedAvatar = std::pair<float, std::shared_ptr<Avatar>>;
/**jsdoc
* The AvatarManager API has properties and methods which manage Avatars within the same domain.
* The <code>AvatarManager</code> API provides information about avatars within the current domain. The avatars available are
* those that Interface has displayed and therefore knows about.
*
* <p><strong>Note:</strong> This API is also provided to Interface and client entity scripts as the synonym,
* <code>AvatarList</code>. For assignment client scripts, see the separate {@link AvatarList} API.
* <p><strong>Warning:</strong> This API is also provided to Interface, client entity, and avatar scripts as the synonym,
* "<code>AvatarList</code>". For assignment client scripts, see the separate {@link AvatarList} API.</p>
*
* @namespace AvatarManager
*
@ -48,8 +49,9 @@ using SortedAvatar = std::pair<float, std::shared_ptr<Avatar>>;
* @hifi-client-entity
* @hifi-avatar
*
* @borrows AvatarList.getAvatarIdentifiers as getAvatarIdentifiers
* @borrows AvatarList.getAvatarsInRange as getAvatarsInRange
* @borrows AvatarList.getAvatar as getAvatar
* @comment AvatarList.getAvatarIdentifiers as getAvatarIdentifiers - Don't borrow because behavior is slightly different.
* @comment AvatarList.getAvatarsInRange as getAvatarsInRange - Don't borrow because behavior is slightly different.
* @borrows AvatarList.avatarAddedEvent as avatarAddedEvent
* @borrows AvatarList.avatarRemovedEvent as avatarRemovedEvent
* @borrows AvatarList.avatarSessionChangedEvent as avatarSessionChangedEvent
@ -67,6 +69,31 @@ class AvatarManager : public AvatarHashMap {
public:
/**jsdoc
* Gets the IDs of all avatars known about in the domain.
* Your own avatar is included in the list as a <code>null</code> value.
* @function AvatarManager.getAvatarIdentifiers
* @returns {Uuid[]} The IDs of all known avatars in the domain.
* @example <caption>Report the IDS of all avatars within the domain.</caption>
* var avatars = AvatarManager.getAvatarIdentifiers();
* print("Avatars in the domain: " + JSON.stringify(avatars));
* // A null item is included for your avatar.
*/
/**jsdoc
* Gets the IDs of all avatars known about within a specified distance from a point.
* Your own avatar's ID is included in the list if it is in range.
* @function AvatarManager.getAvatarsInRange
* @param {Vec3} position - The point about which the search is performed.
* @param {number} range - The search radius.
* @returns {Uuid[]} The IDs of all known avatars within the search distance from the position.
* @example <caption>Report the IDs of all avatars within 10m of your avatar.</caption>
* var RANGE = 10;
* var avatars = AvatarManager.getAvatarsInRange(MyAvatar.position, RANGE);
* print("Nearby avatars: " + JSON.stringify(avatars));
* print("Own avatar: " + MyAvatar.sessionUUID);
*/
/// Registers the script types associated with the avatar manager.
static void registerMetaTypes(QScriptEngine* engine);
@ -79,9 +106,7 @@ public:
glm::vec3 getMyAvatarPosition() const { return _myAvatar->getWorldPosition(); }
/**jsdoc
* @function AvatarManager.getAvatar
* @param {Uuid} avatarID
* @returns {AvatarData}
* @comment Uses the base class's JSDoc.
*/
// Null/Default-constructed QUuids will return MyAvatar
Q_INVOKABLE virtual ScriptAvatarData* getAvatar(QUuid avatarID) override { return new ScriptAvatar(getAvatarBySessionID(avatarID)); }
@ -112,36 +137,53 @@ public:
void handleCollisionEvents(const CollisionEvents& collisionEvents);
/**jsdoc
* Gets the amount of avatar mixer data being generated by an avatar other than your own.
* @function AvatarManager.getAvatarDataRate
* @param {Uuid} sessionID
* @param {string} [rateName=""]
* @returns {number}
* @param {Uuid} sessionID - The ID of the avatar whose data rate you're retrieving.
* @param {AvatarDataRate} [rateName=""] - The type of avatar mixer data to get the data rate of.
* @returns {number} The data rate in kbps; <code>0</code> if the avatar is your own.
*/
Q_INVOKABLE float getAvatarDataRate(const QUuid& sessionID, const QString& rateName = QString("")) const;
/**jsdoc
* Gets the update rate of avatar mixer data being generated by an avatar other than your own.
* @function AvatarManager.getAvatarUpdateRate
* @param {Uuid} sessionID
* @param {string} [rateName=""]
* @returns {number}
* @param {Uuid} sessionID - The ID of the avatar whose update rate you're retrieving.
* @param {AvatarUpdateRate} [rateName=""] - The type of avatar mixer data to get the update rate of.
* @returns {number} The update rate in Hz; <code>0</code> if the avatar is your own.
*/
Q_INVOKABLE float getAvatarUpdateRate(const QUuid& sessionID, const QString& rateName = QString("")) const;
/**jsdoc
* Gets the simulation rate of an avatar other than your own.
* @function AvatarManager.getAvatarSimulationRate
* @param {Uuid} sessionID
* @param {string} [rateName=""]
* @returns {number}
* @param {Uuid} sessionID - The ID of the avatar whose simulation you're retrieving.
* @param {AvatarSimulationRate} [rateName=""] - The type of avatar data to get the simulation rate of.
* @returns {number} The simulation rate in Hz; <code>0</code> if the avatar is your own.
*/
Q_INVOKABLE float getAvatarSimulationRate(const QUuid& sessionID, const QString& rateName = QString("")) const;
/**jsdoc
* Find the first avatar intersected by a {@link PickRay}.
* @function AvatarManager.findRayIntersection
* @param {PickRay} ray
* @param {Uuid[]} [avatarsToInclude=[]]
* @param {Uuid[]} [avatarsToDiscard=[]]
* @param {boolean} pickAgainstMesh
* @returns {RayToAvatarIntersectionResult}
* @param {PickRay} ray - The ray to use for finding avatars.
* @param {Uuid[]} [avatarsToInclude=[]] - If not empty then search is restricted to these avatars.
* @param {Uuid[]} [avatarsToDiscard=[]] - Avatars to ignore in the search.
* @param {boolean} [pickAgainstMesh=true] - If <code>true</code> then the exact intersection with the avatar mesh is
* calculated, if <code>false</code> then the intersection is approximate.
* @returns {RayToAvatarIntersectionResult} The result of the search for the first intersected avatar.
* @example <caption>Find the first avatar directly in front of you.</caption>
* var pickRay = {
* origin: MyAvatar.position,
* direction: Quat.getFront(MyAvatar.orientation)
* };
*
* var intersection = AvatarManager.findRayIntersection(pickRay);
* if (intersection.intersects) {
* print("Avatar found: " + JSON.stringify(intersection));
* } else {
* print("No avatar found.");
* }
*/
Q_INVOKABLE RayToAvatarIntersectionResult findRayIntersection(const PickRay& ray,
const QScriptValue& avatarIdsToInclude = QScriptValue(),
@ -149,11 +191,12 @@ public:
bool pickAgainstMesh = true);
/**jsdoc
* @function AvatarManager.findRayIntersectionVector
* @param {PickRay} ray
* @param {Uuid[]} avatarsToInclude
* @param {Uuid[]} avatarsToDiscard
* @param {boolean} pickAgainstMesh
* @returns {RayToAvatarIntersectionResult}
* @param {PickRay} ray - Ray.
* @param {Uuid[]} avatarsToInclude - Avatars to include.
* @param {Uuid[]} avatarsToDiscard - Avatars to discard.
* @param {boolean} pickAgainstMesh - Pick against mesh.
* @returns {RayToAvatarIntersectionResult} Intersection result.
* @deprecated This function is deprecated and will be removed.
*/
Q_INVOKABLE RayToAvatarIntersectionResult findRayIntersectionVector(const PickRay& ray,
const QVector<EntityItemID>& avatarsToInclude,
@ -162,10 +205,11 @@ public:
/**jsdoc
* @function AvatarManager.findParabolaIntersectionVector
* @param {PickParabola} pick
* @param {Uuid[]} avatarsToInclude
* @param {Uuid[]} avatarsToDiscard
* @returns {ParabolaToAvatarIntersectionResult}
* @param {PickParabola} pick - Pick.
* @param {Uuid[]} avatarsToInclude - Avatars to include.
* @param {Uuid[]} avatarsToDiscard - Avatars to discard.
* @returns {ParabolaToAvatarIntersectionResult} Intersection result.
* @deprecated This function is deprecated and will be removed.
*/
Q_INVOKABLE ParabolaToAvatarIntersectionResult findParabolaIntersectionVector(const PickParabola& pick,
const QVector<EntityItemID>& avatarsToInclude,
@ -173,27 +217,31 @@ public:
/**jsdoc
* @function AvatarManager.getAvatarSortCoefficient
* @param {string} name
* @returns {number}
* @param {string} name - Name.
* @returns {number} Value.
* @deprecated This function is deprecated and will be removed.
*/
// TODO: remove this HACK once we settle on optimal default sort coefficients
Q_INVOKABLE float getAvatarSortCoefficient(const QString& name);
/**jsdoc
* @function AvatarManager.setAvatarSortCoefficient
* @param {string} name
* @param {number} value
* @param {string} name - Name
* @param {number} value - Value.
* @deprecated This function is deprecated and will be removed.
*/
Q_INVOKABLE void setAvatarSortCoefficient(const QString& name, const QScriptValue& value);
/**jsdoc
* Used in the PAL for getting PAL-related data about avatars nearby. Using this method is faster
* than iterating over each avatar and obtaining data about them in JavaScript, as that method
* locks and unlocks each avatar's data structure potentially hundreds of times per update tick.
* Gets PAL (People Access List) data for one or more avatars. Using this method is faster than iterating over each avatar
* and obtaining data about each individually.
* @function AvatarManager.getPalData
* @param {string[]} [specificAvatarIdentifiers=[]] - The list of IDs of the avatars you want the PAL data for.
* If an empty list, the PAL data for all nearby avatars is returned.
* @returns {object[]} An array of objects, each object being the PAL data for an avatar.
* @param {string[]} [avatarIDs=[]] - The IDs of the avatars to get the PAL data for. If empty, then PAL data is obtained
* for all avatars.
* @returns {object<"data", AvatarManager.PalData[]>} An array of objects, each object being the PAL data for an avatar.
* @example <caption>Report the PAL data for an avatar nearby.</caption>
* var palData = AvatarManager.getPalData();
* print("PAL data for one avatar: " + JSON.stringify(palData.data[0]));
*/
Q_INVOKABLE QVariantMap getPalData(const QStringList& specificAvatarIdentifiers = QStringList());
@ -209,7 +257,8 @@ public:
public slots:
/**jsdoc
* @function AvatarManager.updateAvatarRenderStatus
* @param {boolean} shouldRenderAvatars
* @param {boolean} shouldRenderAvatars - Should render avatars.
* @deprecated This function is deprecated and will be removed.
*/
void updateAvatarRenderStatus(bool shouldRenderAvatars);

View file

@ -818,20 +818,8 @@ void MyAvatar::simulate(float deltaTime, bool inView) {
if (_cauterizationNeedsUpdate) {
_cauterizationNeedsUpdate = false;
// Redisplay cauterized entities that are no longer children of the avatar.
auto cauterizedChild = _cauterizedChildrenOfHead.begin();
if (cauterizedChild != _cauterizedChildrenOfHead.end()) {
auto children = getChildren();
while (cauterizedChild != _cauterizedChildrenOfHead.end()) {
if (!children.contains(*cauterizedChild)) {
updateChildCauterization(*cauterizedChild, false);
cauterizedChild = _cauterizedChildrenOfHead.erase(cauterizedChild);
} else {
++cauterizedChild;
}
}
}
auto objectsToUncauterize = _cauterizedChildrenOfHead;
_cauterizedChildrenOfHead.clear();
// Update cauterization of entities that are children of the avatar.
auto headBoneSet = _skeletonModel->getCauterizeBoneSet();
forEachChild([&](SpatiallyNestablePointer object) {
@ -843,15 +831,19 @@ void MyAvatar::simulate(float deltaTime, bool inView) {
updateChildCauterization(descendant, !_prevShouldDrawHead);
});
_cauterizedChildrenOfHead.insert(object);
} else if (_cauterizedChildrenOfHead.find(object) != _cauterizedChildrenOfHead.end()) {
// Redisplay cauterized children that are not longer children of the head.
updateChildCauterization(object, false);
objectsToUncauterize.erase(object);
} else if (objectsToUncauterize.find(object) == objectsToUncauterize.end()) {
objectsToUncauterize.insert(object);
object->forEachDescendant([&](SpatiallyNestablePointer descendant) {
updateChildCauterization(descendant, false);
objectsToUncauterize.insert(descendant);
});
_cauterizedChildrenOfHead.erase(object);
}
});
// Redisplay cauterized entities that are no longer children of the avatar.
for (auto cauterizedChild = objectsToUncauterize.begin(); cauterizedChild != objectsToUncauterize.end(); cauterizedChild++) {
updateChildCauterization(*cauterizedChild, false);
}
}
{
@ -3180,17 +3172,40 @@ int MyAvatar::sendAvatarDataPacket(bool sendAll) {
return bytesSent;
}
const float RENDER_HEAD_CUTOFF_DISTANCE = 0.47f;
bool MyAvatar::cameraInsideHead(const glm::vec3& cameraPosition) const {
if (!_skeletonModel) {
return false;
}
// transform cameraPosition into rig coordinates
AnimPose rigToWorld = AnimPose(getWorldOrientation() * Quaternions::Y_180, getWorldPosition());
AnimPose worldToRig = rigToWorld.inverse();
glm::vec3 rigCameraPosition = worldToRig * cameraPosition;
// use head k-dop shape to determine if camera is inside head.
const Rig& rig = _skeletonModel->getRig();
int headJointIndex = rig.indexOfJoint("Head");
if (headJointIndex >= 0) {
const HFMModel& hfmModel = _skeletonModel->getHFMModel();
AnimPose headPose;
if (rig.getAbsoluteJointPoseInRigFrame(headJointIndex, headPose)) {
glm::vec3 displacement;
const HFMJointShapeInfo& headShapeInfo = hfmModel.joints[headJointIndex].shapeInfo;
return findPointKDopDisplacement(rigCameraPosition, headPose, headShapeInfo, displacement);
}
}
// fall back to simple distance check.
const float RENDER_HEAD_CUTOFF_DISTANCE = 0.47f;
return glm::length(cameraPosition - getHeadPosition()) < (RENDER_HEAD_CUTOFF_DISTANCE * getModelScale());
}
bool MyAvatar::shouldRenderHead(const RenderArgs* renderArgs) const {
bool defaultMode = renderArgs->_renderMode == RenderArgs::DEFAULT_RENDER_MODE;
bool firstPerson = qApp->getCamera().getMode() == CAMERA_MODE_FIRST_PERSON;
bool overrideAnim = _skeletonModel ? _skeletonModel->getRig().isPlayingOverrideAnimation() : false;
bool insideHead = cameraInsideHead(renderArgs->getViewFrustum().getPosition());
return !defaultMode || !firstPerson || !insideHead;
return !defaultMode || (!firstPerson && !insideHead) || (overrideAnim && !insideHead);
}
void MyAvatar::setHasScriptedBlendshapes(bool hasScriptedBlendshapes) {
@ -3889,7 +3904,8 @@ bool MyAvatar::requiresSafeLanding(const glm::vec3& positionIn, glm::vec3& bette
// See https://highfidelity.fogbugz.com/f/cases/5003/findRayIntersection-has-option-to-use-collidableOnly-but-doesn-t-actually-use-colliders
QVariantMap extraInfo;
EntityItemID entityID = entityTree->evalRayIntersection(startPointIn, directionIn, include, ignore,
PickFilter(PickFilter::getBitMask(PickFilter::FlagBit::COLLIDABLE) | PickFilter::getBitMask(PickFilter::FlagBit::PRECISE)),
PickFilter(PickFilter::getBitMask(PickFilter::FlagBit::COLLIDABLE) | PickFilter::getBitMask(PickFilter::FlagBit::PRECISE)
| PickFilter::getBitMask(PickFilter::FlagBit::DOMAIN_ENTITIES) | PickFilter::getBitMask(PickFilter::FlagBit::AVATAR_ENTITIES)), // exclude Local entities
element, distance, face, normalOut, extraInfo, lockType, accurateResult);
if (entityID.isNull()) {
return false;
@ -4805,7 +4821,12 @@ bool MyAvatar::isReadyForPhysics() const {
}
void MyAvatar::setSprintMode(bool sprint) {
_walkSpeedScalar = sprint ? AVATAR_SPRINT_SPEED_SCALAR : AVATAR_WALK_SPEED_SCALAR;
if (qApp->isHMDMode()) {
_walkSpeedScalar = sprint ? AVATAR_DESKTOP_SPRINT_SPEED_SCALAR : AVATAR_WALK_SPEED_SCALAR;
}
else {
_walkSpeedScalar = sprint ? AVATAR_HMD_SPRINT_SPEED_SCALAR : AVATAR_WALK_SPEED_SCALAR;
}
}
void MyAvatar::setIsInWalkingState(bool isWalking) {

View file

@ -334,7 +334,9 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
eyeParams.leftEyeJointIndex = _rig.indexOfJoint("LeftEye");
eyeParams.rightEyeJointIndex = _rig.indexOfJoint("RightEye");
_rig.updateFromEyeParameters(eyeParams);
if (_owningAvatar->getHasProceduralEyeFaceMovement()) {
_rig.updateFromEyeParameters(eyeParams);
}
updateFingers();
}

View file

@ -96,28 +96,32 @@ int passwordCallback(char* password, int maxPasswordSize, int rwFlag, void* u) {
}
}
EC_KEY* readKeys(const char* filename) {
FILE* fp;
EC_KEY *key = NULL;
if ((fp = fopen(filename, "rt"))) {
EC_KEY* readKeys(QString filename) {
QFile file(filename);
EC_KEY* key = NULL;
if (file.open(QFile::ReadOnly)) {
// file opened successfully
qCDebug(commerce) << "opened key file" << filename;
if ((key = PEM_read_EC_PUBKEY(fp, NULL, NULL, NULL))) {
QByteArray pemKeyBytes = file.readAll();
BIO* bufio = BIO_new_mem_buf((void*)pemKeyBytes.constData(), pemKeyBytes.length());
if ((key = PEM_read_bio_EC_PUBKEY(bufio, NULL, NULL, NULL))) {
// now read private key
qCDebug(commerce) << "read public key";
if ((key = PEM_read_ECPrivateKey(fp, &key, passwordCallback, NULL))) {
if ((key = PEM_read_bio_ECPrivateKey(bufio, &key, passwordCallback, NULL))) {
qCDebug(commerce) << "read private key";
fclose(fp);
return key;
BIO_free(bufio);
file.close();
} else {
qCDebug(commerce) << "failed to read private key";
}
qCDebug(commerce) << "failed to read private key";
} else {
qCDebug(commerce) << "failed to read public key";
}
fclose(fp);
BIO_free(bufio);
file.close();
} else {
qCDebug(commerce) << "failed to open key file" << filename;
}
@ -131,8 +135,7 @@ bool Wallet::writeBackupInstructions() {
QFile outputFile(outputFilename);
bool retval = false;
if (getKeyFilePath().isEmpty())
{
if (getKeyFilePath().isEmpty()) {
return false;
}
@ -152,7 +155,7 @@ bool Wallet::writeBackupInstructions() {
outputFile.write(text.toUtf8());
// Close the output file
outputFile.close();
outputFile.close();
retval = true;
qCDebug(commerce) << "wrote html file successfully";
@ -165,28 +168,35 @@ bool Wallet::writeBackupInstructions() {
return retval;
}
bool writeKeys(const char* filename, EC_KEY* keys) {
FILE* fp;
bool writeKeys(QString filename, EC_KEY* keys) {
BIO* bio = BIO_new(BIO_s_mem());
bool retval = false;
if ((fp = fopen(filename, "wt"))) {
if (!PEM_write_EC_PUBKEY(fp, keys)) {
fclose(fp);
qCCritical(commerce) << "failed to write public key";
return retval;
}
if (!PEM_write_bio_EC_PUBKEY(bio, keys)) {
BIO_free(bio);
qCCritical(commerce) << "failed to write public key";
return retval;
}
if (!PEM_write_ECPrivateKey(fp, keys, EVP_des_ede3_cbc(), NULL, 0, passwordCallback, NULL)) {
fclose(fp);
qCCritical(commerce) << "failed to write private key";
return retval;
}
if (!PEM_write_bio_ECPrivateKey(bio, keys, EVP_des_ede3_cbc(), NULL, 0, passwordCallback, NULL)) {
BIO_free(bio);
qCCritical(commerce) << "failed to write private key";
return retval;
}
QFile file(filename);
if (!file.open(QIODevice::WriteOnly)) {
const char* bio_data;
long bio_size = BIO_get_mem_data(bio, &bio_data);
QByteArray keyBytes(bio_data, bio_size);
file.write(keyBytes);
retval = true;
qCDebug(commerce) << "wrote keys successfully";
fclose(fp);
file.close();
} else {
qCDebug(commerce) << "failed to open key file" << filename;
}
BIO_free(bio);
return retval;
}
@ -215,7 +225,6 @@ QByteArray Wallet::getWallet() {
}
QPair<QByteArray*, QByteArray*> generateECKeypair() {
EC_KEY* keyPair = EC_KEY_new_by_curve_name(NID_secp256k1);
QPair<QByteArray*, QByteArray*> retval{};
@ -235,7 +244,6 @@ QPair<QByteArray*, QByteArray*> generateECKeypair() {
if (publicKeyLength <= 0 || privateKeyLength <= 0) {
qCDebug(commerce) << "Error getting DER public or private key from EC struct -" << ERR_get_error();
// cleanup the EC struct
EC_KEY_free(keyPair);
@ -251,8 +259,7 @@ QPair<QByteArray*, QByteArray*> generateECKeypair() {
return retval;
}
if (!writeKeys(keyFilePath().toStdString().c_str(), keyPair)) {
if (!writeKeys(keyFilePath(), keyPair)) {
qCDebug(commerce) << "couldn't save keys!";
return retval;
}
@ -273,13 +280,18 @@ QPair<QByteArray*, QByteArray*> generateECKeypair() {
// END copied code (which will soon change)
// the public key can just go into a byte array
QByteArray readPublicKey(const char* filename) {
FILE* fp;
EC_KEY* key = NULL;
if ((fp = fopen(filename, "r"))) {
QByteArray readPublicKey(QString filename) {
QByteArray retval;
QFile file(filename);
if (file.open(QIODevice::ReadOnly)) {
// file opened successfully
qCDebug(commerce) << "opened key file" << filename;
if ((key = PEM_read_EC_PUBKEY(fp, NULL, NULL, NULL))) {
QByteArray pemKeyBytes = file.readAll();
BIO* bufio = BIO_new_mem_buf((void*)pemKeyBytes.constData(), pemKeyBytes.length());
EC_KEY* key = PEM_read_bio_EC_PUBKEY(bufio, NULL, NULL, NULL);
if (key) {
// file read successfully
unsigned char* publicKeyDER = NULL;
int publicKeyLength = i2d_EC_PUBKEY(key, &publicKeyDER);
@ -287,17 +299,19 @@ QByteArray readPublicKey(const char* filename) {
// cleanup
EC_KEY_free(key);
fclose(fp);
qCDebug(commerce) << "parsed public key file successfully";
QByteArray retval((char*)publicKeyDER, publicKeyLength);
OPENSSL_free(publicKeyDER);
BIO_free(bufio);
file.close();
return retval;
} else {
qCDebug(commerce) << "couldn't parse" << filename;
}
fclose(fp);
BIO_free(bufio);
file.close();
} else {
qCDebug(commerce) << "couldn't open" << filename;
}
@ -306,13 +320,17 @@ QByteArray readPublicKey(const char* filename) {
// the private key should be read/copied into heap memory. For now, we need the EC_KEY struct
// so I'll return that.
EC_KEY* readPrivateKey(const char* filename) {
FILE* fp;
EC_KEY* readPrivateKey(QString filename) {
QFile file(filename);
EC_KEY* key = NULL;
if ((fp = fopen(filename, "r"))) {
if (file.open(QIODevice::ReadOnly)) {
// file opened successfully
qCDebug(commerce) << "opened key file" << filename;
if ((key = PEM_read_ECPrivateKey(fp, &key, passwordCallback, NULL))) {
QByteArray pemKeyBytes = file.readAll();
BIO* bufio = BIO_new_mem_buf((void*)pemKeyBytes.constData(), pemKeyBytes.length());
if ((key = PEM_read_bio_ECPrivateKey(bufio, &key, passwordCallback, NULL))) {
qCDebug(commerce) << "parsed private key file successfully";
} else {
@ -320,7 +338,8 @@ EC_KEY* readPrivateKey(const char* filename) {
// if the passphrase is wrong, then let's not cache it
DependencyManager::get<Wallet>()->setPassphrase("");
}
fclose(fp);
BIO_free(bufio);
file.close();
} else {
qCDebug(commerce) << "couldn't open" << filename;
}
@ -361,7 +380,7 @@ Wallet::Wallet() {
if (wallet->getKeyFilePath().isEmpty() || !wallet->getSecurityImage()) {
if (keyStatus == "preexisting") {
status = (uint) WalletStatus::WALLET_STATUS_PREEXISTING;
} else{
} else {
status = (uint) WalletStatus::WALLET_STATUS_NOT_SET_UP;
}
} else if (!wallet->walletIsAuthenticatedWithPassphrase()) {
@ -371,7 +390,6 @@ Wallet::Wallet() {
} else {
status = (uint) WalletStatus::WALLET_STATUS_READY;
}
walletScriptingInterface->setWalletStatus(status);
});
@ -569,10 +587,10 @@ bool Wallet::walletIsAuthenticatedWithPassphrase() {
}
// otherwise, we have a passphrase but no keys, so we have to check
auto publicKey = readPublicKey(keyFilePath().toStdString().c_str());
auto publicKey = readPublicKey(keyFilePath());
if (publicKey.size() > 0) {
if (auto key = readPrivateKey(keyFilePath().toStdString().c_str())) {
if (auto key = readPrivateKey(keyFilePath())) {
EC_KEY_free(key);
// be sure to add the public key so we don't do this over and over
@ -631,8 +649,7 @@ QStringList Wallet::listPublicKeys() {
QString Wallet::signWithKey(const QByteArray& text, const QString& key) {
EC_KEY* ecPrivateKey = NULL;
auto keyFilePathString = keyFilePath().toStdString();
if ((ecPrivateKey = readPrivateKey(keyFilePath().toStdString().c_str()))) {
if ((ecPrivateKey = readPrivateKey(keyFilePath()))) {
unsigned char* sig = new unsigned char[ECDSA_size(ecPrivateKey)];
unsigned int signatureBytes = 0;
@ -641,12 +658,8 @@ QString Wallet::signWithKey(const QByteArray& text, const QString& key) {
QByteArray hashedPlaintext = QCryptographicHash::hash(text, QCryptographicHash::Sha256);
int retrn = ECDSA_sign(0,
reinterpret_cast<const unsigned char*>(hashedPlaintext.constData()),
hashedPlaintext.size(),
sig,
&signatureBytes, ecPrivateKey);
int retrn = ECDSA_sign(0, reinterpret_cast<const unsigned char*>(hashedPlaintext.constData()), hashedPlaintext.size(),
sig, &signatureBytes, ecPrivateKey);
EC_KEY_free(ecPrivateKey);
QByteArray signature(reinterpret_cast<const char*>(sig), signatureBytes);
@ -682,7 +695,6 @@ void Wallet::updateImageProvider() {
}
void Wallet::chooseSecurityImage(const QString& filename) {
if (_securityImage) {
delete _securityImage;
}
@ -754,7 +766,7 @@ QString Wallet::getKeyFilePath() {
}
bool Wallet::writeWallet(const QString& newPassphrase) {
EC_KEY* keys = readKeys(keyFilePath().toStdString().c_str());
EC_KEY* keys = readKeys(keyFilePath());
auto ledger = DependencyManager::get<Ledger>();
// Remove any existing locker, because it will be out of date.
if (!_publicKeys.isEmpty() && !ledger->receiveAt(_publicKeys.first(), _publicKeys.first(), QByteArray())) {
@ -768,7 +780,7 @@ bool Wallet::writeWallet(const QString& newPassphrase) {
setPassphrase(newPassphrase);
}
if (writeKeys(tempFileName.toStdString().c_str(), keys)) {
if (writeKeys(tempFileName, keys)) {
if (writeSecurityImage(_securityImage, tempFileName)) {
// ok, now move the temp file to the correct spot
QFile(QString(keyFilePath())).remove();
@ -834,10 +846,10 @@ void Wallet::handleChallengeOwnershipPacket(QSharedPointer<ReceivedMessage> pack
challengingNodeUUID = packet->read(challengingNodeUUIDByteArraySize);
}
EC_KEY* ec = readKeys(keyFilePath().toStdString().c_str());
EC_KEY* ec = readKeys(keyFilePath());
QString sig;
if (ec) {
if (ec) {
ERR_clear_error();
sig = signWithKey(text, ""); // base64 signature, QByteArray cast (on return) to QString FIXME should pass ec as string so we can tell which key to sign with
status = 1;

View file

@ -302,8 +302,11 @@ int main(int argc, const char* argv[]) {
PROFILE_SYNC_BEGIN(startup, "app full ctor", "");
Application app(argcExtended, const_cast<char**>(argvExtended.data()), startupTime, runningMarkerExisted);
PROFILE_SYNC_END(startup, "app full ctor", "");
#if defined(Q_OS_LINUX)
app.setWindowIcon(QIcon(PathUtils::resourcesPath() + "images/hifi-logo.svg"));
#endif
QTimer exitTimer;
if (traceDuration > 0.0f) {
exitTimer.setSingleShot(true);

View file

@ -156,10 +156,10 @@ void DialogsManager::hmdTools(bool showTools) {
}
_hmdToolsDialog->show();
_hmdToolsDialog->raise();
qApp->getWindow()->activateWindow();
} else {
hmdToolsClosed();
}
qApp->getWindow()->activateWindow();
}
void DialogsManager::hmdToolsClosed() {
@ -207,4 +207,4 @@ void DialogsManager::showDomainConnectionDialog() {
_domainConnectionDialog->show();
_domainConnectionDialog->raise();
}
}

View file

@ -142,3 +142,72 @@ glm::quat computeBodyFacingFromHead(const glm::quat& headRot, const glm::vec3& u
return glmExtractRotation(bodyMat);
}
const float INV_SQRT_3 = 1.0f / sqrtf(3.0f);
const int DOP14_COUNT = 14;
const glm::vec3 DOP14_NORMALS[DOP14_COUNT] = {
Vectors::UNIT_X,
-Vectors::UNIT_X,
Vectors::UNIT_Y,
-Vectors::UNIT_Y,
Vectors::UNIT_Z,
-Vectors::UNIT_Z,
glm::vec3(INV_SQRT_3, INV_SQRT_3, INV_SQRT_3),
-glm::vec3(INV_SQRT_3, INV_SQRT_3, INV_SQRT_3),
glm::vec3(INV_SQRT_3, -INV_SQRT_3, INV_SQRT_3),
-glm::vec3(INV_SQRT_3, -INV_SQRT_3, INV_SQRT_3),
glm::vec3(INV_SQRT_3, INV_SQRT_3, -INV_SQRT_3),
-glm::vec3(INV_SQRT_3, INV_SQRT_3, -INV_SQRT_3),
glm::vec3(INV_SQRT_3, -INV_SQRT_3, -INV_SQRT_3),
-glm::vec3(INV_SQRT_3, -INV_SQRT_3, -INV_SQRT_3)
};
// returns true if the given point lies inside of the k-dop, specified by shapeInfo & shapePose.
// if the given point does lie within the k-dop, it also returns the amount of displacement necessary to push that point outward
// such that it lies on the surface of the kdop.
bool findPointKDopDisplacement(const glm::vec3& point, const AnimPose& shapePose, const HFMJointShapeInfo& shapeInfo, glm::vec3& displacementOut) {
// transform point into local space of jointShape.
glm::vec3 localPoint = shapePose.inverse().xformPoint(point);
// Only works for 14-dop shape infos.
if (shapeInfo.dots.size() != DOP14_COUNT) {
return false;
}
glm::vec3 minDisplacement(FLT_MAX);
float minDisplacementLen = FLT_MAX;
glm::vec3 p = localPoint - shapeInfo.avgPoint;
float pLen = glm::length(p);
if (pLen > 0.0f) {
int slabCount = 0;
for (int i = 0; i < DOP14_COUNT; i++) {
float dot = glm::dot(p, DOP14_NORMALS[i]);
if (dot > 0.0f && dot < shapeInfo.dots[i]) {
slabCount++;
float distToPlane = pLen * (shapeInfo.dots[i] / dot);
float displacementLen = distToPlane - pLen;
// keep track of the smallest displacement
if (displacementLen < minDisplacementLen) {
minDisplacementLen = displacementLen;
minDisplacement = (p / pLen) * displacementLen;
}
}
}
if (slabCount == (DOP14_COUNT / 2) && minDisplacementLen != FLT_MAX) {
// we are within the k-dop so push the point along the minimum displacement found
displacementOut = shapePose.xformVectorFast(minDisplacement);
return true;
} else {
// point is outside of kdop
return false;
}
} else {
// point is directly on top of shapeInfo.avgPoint.
// push the point out along the x axis.
displacementOut = shapePose.xformVectorFast(shapeInfo.points[0]);
return true;
}
}

View file

@ -128,4 +128,10 @@ protected:
bool _snapshotValid { false };
};
// returns true if the given point lies inside of the k-dop, specified by shapeInfo & shapePose.
// if the given point does lie within the k-dop, it also returns the amount of displacement necessary to push that point outward
// such that it lies on the surface of the kdop.
bool findPointKDopDisplacement(const glm::vec3& point, const AnimPose& shapePose, const HFMJointShapeInfo& shapeInfo, glm::vec3& displacementOut);
#endif

View file

@ -1521,74 +1521,6 @@ void Rig::updateHead(bool headEnabled, bool hipsEnabled, const AnimPose& headPos
}
}
const float INV_SQRT_3 = 1.0f / sqrtf(3.0f);
const int DOP14_COUNT = 14;
const glm::vec3 DOP14_NORMALS[DOP14_COUNT] = {
Vectors::UNIT_X,
-Vectors::UNIT_X,
Vectors::UNIT_Y,
-Vectors::UNIT_Y,
Vectors::UNIT_Z,
-Vectors::UNIT_Z,
glm::vec3(INV_SQRT_3, INV_SQRT_3, INV_SQRT_3),
-glm::vec3(INV_SQRT_3, INV_SQRT_3, INV_SQRT_3),
glm::vec3(INV_SQRT_3, -INV_SQRT_3, INV_SQRT_3),
-glm::vec3(INV_SQRT_3, -INV_SQRT_3, INV_SQRT_3),
glm::vec3(INV_SQRT_3, INV_SQRT_3, -INV_SQRT_3),
-glm::vec3(INV_SQRT_3, INV_SQRT_3, -INV_SQRT_3),
glm::vec3(INV_SQRT_3, -INV_SQRT_3, -INV_SQRT_3),
-glm::vec3(INV_SQRT_3, -INV_SQRT_3, -INV_SQRT_3)
};
// returns true if the given point lies inside of the k-dop, specified by shapeInfo & shapePose.
// if the given point does lie within the k-dop, it also returns the amount of displacement necessary to push that point outward
// such that it lies on the surface of the kdop.
static bool findPointKDopDisplacement(const glm::vec3& point, const AnimPose& shapePose, const HFMJointShapeInfo& shapeInfo, glm::vec3& displacementOut) {
// transform point into local space of jointShape.
glm::vec3 localPoint = shapePose.inverse().xformPoint(point);
// Only works for 14-dop shape infos.
if (shapeInfo.dots.size() != DOP14_COUNT) {
return false;
}
glm::vec3 minDisplacement(FLT_MAX);
float minDisplacementLen = FLT_MAX;
glm::vec3 p = localPoint - shapeInfo.avgPoint;
float pLen = glm::length(p);
if (pLen > 0.0f) {
int slabCount = 0;
for (int i = 0; i < DOP14_COUNT; i++) {
float dot = glm::dot(p, DOP14_NORMALS[i]);
if (dot > 0.0f && dot < shapeInfo.dots[i]) {
slabCount++;
float distToPlane = pLen * (shapeInfo.dots[i] / dot);
float displacementLen = distToPlane - pLen;
// keep track of the smallest displacement
if (displacementLen < minDisplacementLen) {
minDisplacementLen = displacementLen;
minDisplacement = (p / pLen) * displacementLen;
}
}
}
if (slabCount == (DOP14_COUNT / 2) && minDisplacementLen != FLT_MAX) {
// we are within the k-dop so push the point along the minimum displacement found
displacementOut = shapePose.xformVectorFast(minDisplacement);
return true;
} else {
// point is outside of kdop
return false;
}
} else {
// point is directly on top of shapeInfo.avgPoint.
// push the point out along the x axis.
displacementOut = shapePose.xformVectorFast(shapeInfo.points[0]);
return true;
}
}
glm::vec3 Rig::deflectHandFromTorso(const glm::vec3& handPosition, const HFMJointShapeInfo& hipsShapeInfo, const HFMJointShapeInfo& spineShapeInfo,
const HFMJointShapeInfo& spine1ShapeInfo, const HFMJointShapeInfo& spine2ShapeInfo) const {
glm::vec3 position = handPosition;

View file

@ -116,8 +116,9 @@ public:
void destroyAnimGraph();
void overrideAnimation(const QString& url, float fps, bool loop, float firstFrame, float lastFrame);
bool isPlayingOverrideAnimation() const { return _userAnimState.clipNodeEnum != UserAnimState::None; };
void restoreAnimation();
void overrideNetworkAnimation(const QString& url, float fps, bool loop, float firstFrame, float lastFrame);
void triggerNetworkRole(const QString& role);
void restoreNetworkAnimation();
@ -333,7 +334,7 @@ protected:
RigRole _state { RigRole::Idle };
RigRole _desiredState { RigRole::Idle };
float _desiredStateAge { 0.0f };
struct NetworkAnimState {
enum ClipNodeEnum {
None = 0,

View file

@ -1397,7 +1397,6 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
// spatialize into mixBuffer
injector->getLocalFOA().render(_localScratchBuffer, mixBuffer, HRTF_DATASET_INDEX,
qw, qx, qy, qz, gain, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
} else if (options.stereo) {
if (options.positionSet) {
@ -1409,11 +1408,8 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
}
// direct mix into mixBuffer
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL; i++) {
mixBuffer[2*i+0] += convertToFloat(_localScratchBuffer[2*i+0]) * gain;
mixBuffer[2*i+1] += convertToFloat(_localScratchBuffer[2*i+1]) * gain;
}
injector->getLocalHRTF().mixStereo(_localScratchBuffer, mixBuffer, gain,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
} else { // injector is mono
if (options.positionSet) {
@ -1431,11 +1427,8 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
} else {
// direct mix into mixBuffer
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL; i++) {
float sample = convertToFloat(_localScratchBuffer[i]) * gain;
mixBuffer[2*i+0] += sample;
mixBuffer[2*i+1] += sample;
}
injector->getLocalHRTF().mixMono(_localScratchBuffer, mixBuffer, gain,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
}
}

View file

@ -882,14 +882,16 @@ static void convertInput_ref(int16_t* src, float *dst[4], float gain, int numFra
#endif
// in-place rotation of the soundfield
// crossfade between old and new rotation, to prevent artifacts
static void rotate_3x3_ref(float* buf[4], const float m0[3][3], const float m1[3][3], const float* win, int numFrames) {
// in-place rotation and scaling of the soundfield
// crossfade between old and new matrix, to prevent artifacts
static void rotate_4x4_ref(float* buf[4], const float m0[4][4], const float m1[4][4], const float* win, int numFrames) {
const float md[3][3] = {
{ m0[0][0] - m1[0][0], m0[0][1] - m1[0][1], m0[0][2] - m1[0][2] },
{ m0[1][0] - m1[1][0], m0[1][1] - m1[1][1], m0[1][2] - m1[1][2] },
{ m0[2][0] - m1[2][0], m0[2][1] - m1[2][1], m0[2][2] - m1[2][2] },
// matrix difference
const float md[4][4] = {
{ m0[0][0] - m1[0][0], m0[0][1] - m1[0][1], m0[0][2] - m1[0][2], m0[0][3] - m1[0][3] },
{ m0[1][0] - m1[1][0], m0[1][1] - m1[1][1], m0[1][2] - m1[1][2], m0[1][3] - m1[1][3] },
{ m0[2][0] - m1[2][0], m0[2][1] - m1[2][1], m0[2][2] - m1[2][2], m0[2][3] - m1[2][3] },
{ m0[3][0] - m1[3][0], m0[3][1] - m1[3][1], m0[3][2] - m1[3][2], m0[3][3] - m1[3][3] },
};
for (int i = 0; i < numFrames; i++) {
@ -898,22 +900,27 @@ static void rotate_3x3_ref(float* buf[4], const float m0[3][3], const float m1[3
// interpolate the matrix
float m00 = m1[0][0] + frac * md[0][0];
float m10 = m1[1][0] + frac * md[1][0];
float m20 = m1[2][0] + frac * md[2][0];
float m01 = m1[0][1] + frac * md[0][1];
float m11 = m1[1][1] + frac * md[1][1];
float m21 = m1[2][1] + frac * md[2][1];
float m31 = m1[3][1] + frac * md[3][1];
float m02 = m1[0][2] + frac * md[0][2];
float m12 = m1[1][2] + frac * md[1][2];
float m22 = m1[2][2] + frac * md[2][2];
float m32 = m1[3][2] + frac * md[3][2];
float m13 = m1[1][3] + frac * md[1][3];
float m23 = m1[2][3] + frac * md[2][3];
float m33 = m1[3][3] + frac * md[3][3];
// matrix multiply
float x = m00 * buf[1][i] + m01 * buf[2][i] + m02 * buf[3][i];
float y = m10 * buf[1][i] + m11 * buf[2][i] + m12 * buf[3][i];
float z = m20 * buf[1][i] + m21 * buf[2][i] + m22 * buf[3][i];
float w = m00 * buf[0][i];
float x = m11 * buf[1][i] + m12 * buf[2][i] + m13 * buf[3][i];
float y = m21 * buf[1][i] + m22 * buf[2][i] + m23 * buf[3][i];
float z = m31 * buf[1][i] + m32 * buf[2][i] + m33 * buf[3][i];
buf[0][i] = w;
buf[1][i] = x;
buf[2][i] = y;
buf[3][i] = z;
@ -932,7 +939,7 @@ void rfft512_AVX2(float buf[512]);
void rifft512_AVX2(float buf[512]);
void rfft512_cmadd_1X2_AVX2(const float src[512], const float coef0[512], const float coef1[512], float dst0[512], float dst1[512]);
void convertInput_AVX2(int16_t* src, float *dst[4], float gain, int numFrames);
void rotate_3x3_AVX2(float* buf[4], const float m0[3][3], const float m1[3][3], const float* win, int numFrames);
void rotate_4x4_AVX2(float* buf[4], const float m0[4][4], const float m1[4][4], const float* win, int numFrames);
static void rfft512(float buf[512]) {
static auto f = cpuSupportsAVX2() ? rfft512_AVX2 : rfft512_ref;
@ -954,8 +961,8 @@ static void convertInput(int16_t* src, float *dst[4], float gain, int numFrames)
(*f)(src, dst, gain, numFrames); // dispatch
}
static void rotate_3x3(float* buf[4], const float m0[3][3], const float m1[3][3], const float* win, int numFrames) {
static auto f = cpuSupportsAVX2() ? rotate_3x3_AVX2 : rotate_3x3_ref;
static void rotate_4x4(float* buf[4], const float m0[4][4], const float m1[4][4], const float* win, int numFrames) {
static auto f = cpuSupportsAVX2() ? rotate_4x4_AVX2 : rotate_4x4_ref;
(*f)(buf, m0, m1, win, numFrames); // dispatch
}
@ -965,7 +972,7 @@ static auto& rfft512 = rfft512_ref;
static auto& rifft512 = rifft512_ref;
static auto& rfft512_cmadd_1X2 = rfft512_cmadd_1X2_ref;
static auto& convertInput = convertInput_ref;
static auto& rotate_3x3 = rotate_3x3_ref;
static auto& rotate_4x4 = rotate_4x4_ref;
#endif
@ -1007,8 +1014,8 @@ ALIGN32 static const float crossfadeTable[FOA_BLOCK] = {
0.0020975362f, 0.0015413331f, 0.0010705384f, 0.0006852326f, 0.0003854819f, 0.0001713375f, 0.0000428362f, 0.0000000000f,
};
// convert quaternion to a column-major 3x3 rotation matrix
static void quatToMatrix_3x3(float w, float x, float y, float z, float m[3][3]) {
// convert quaternion to a column-major 4x4 rotation matrix
static void quatToMatrix_4x4(float w, float x, float y, float z, float m[4][4]) {
float xx = x * (x + x);
float xy = x * (y + y);
@ -1022,17 +1029,33 @@ static void quatToMatrix_3x3(float w, float x, float y, float z, float m[3][3])
float wy = w * (y + y);
float wz = w * (z + z);
m[0][0] = 1.0f - (yy + zz);
m[0][1] = xy - wz;
m[0][2] = xz + wy;
m[0][0] = 1.0f;
m[0][1] = 0.0f;
m[0][2] = 0.0f;
m[0][3] = 0.0f;
m[1][0] = xy + wz;
m[1][1] = 1.0f - (xx + zz);
m[1][2] = yz - wx;
m[1][0] = 0.0f;
m[1][1] = 1.0f - (yy + zz);
m[1][2] = xy - wz;
m[1][3] = xz + wy;
m[2][0] = xz - wy;
m[2][1] = yz + wx;
m[2][2] = 1.0f - (xx + yy);
m[2][0] = 0.0f;
m[2][1] = xy + wz;
m[2][2] = 1.0f - (xx + zz);
m[2][3] = yz - wx;
m[3][0] = 0.0f;
m[3][1] = xz - wy;
m[3][2] = yz + wx;
m[3][3] = 1.0f - (xx + yy);
}
static void scaleMatrix_4x4(float scale, float m[4][4]) {
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 4; j++) {
m[i][j] *= scale;
}
}
}
// Ambisonic to binaural render
@ -1047,18 +1070,26 @@ void AudioFOA::render(int16_t* input, float* output, int index, float qw, float
ALIGN32 float inBuffer[4][FOA_BLOCK]; // deinterleaved input buffers
float* in[4] = { inBuffer[0], inBuffer[1], inBuffer[2], inBuffer[3] };
float rotation[3][3];
float rotation[4][4];
// convert input to deinterleaved float
convertInput(input, in, FOA_GAIN * gain, FOA_BLOCK);
convertInput(input, in, FOA_GAIN, FOA_BLOCK);
// convert quaternion to 3x3 rotation
quatToMatrix_3x3(qw, qx, qy, qz, rotation);
// convert quaternion to 4x4 rotation
quatToMatrix_4x4(qw, qx, qy, qz, rotation);
// rotate the soundfield
rotate_3x3(in, _rotationState, rotation, crossfadeTable, FOA_BLOCK);
// apply gain as uniform scale
scaleMatrix_4x4(gain, rotation);
// rotation history update
// disable interpolation from reset state
if (_resetState) {
memcpy(_rotationState, rotation, sizeof(_rotationState));
}
// rotate and scale the soundfield
rotate_4x4(in, _rotationState, rotation, crossfadeTable, FOA_BLOCK);
// new parameters become old
memcpy(_rotationState, rotation, sizeof(_rotationState));
//
@ -1093,4 +1124,6 @@ void AudioFOA::render(int16_t* input, float* output, int index, float qw, float
output[2*i+0] += accBuffer[0][i + FOA_OVERLAP];
output[2*i+1] += accBuffer[1][i + FOA_OVERLAP];
}
_resetState = false;
}

View file

@ -28,12 +28,7 @@ static_assert((FOA_BLOCK + FOA_OVERLAP) == FOA_NFFT, "FFT convolution requires L
class AudioFOA {
public:
AudioFOA() {
// identity matrix
_rotationState[0][0] = 1.0f;
_rotationState[1][1] = 1.0f;
_rotationState[2][2] = 1.0f;
};
AudioFOA() {};
//
// input: interleaved First-Order Ambisonic source
@ -55,8 +50,10 @@ private:
// input history, for overlap-save
float _fftState[4][FOA_OVERLAP] = {};
// orientation history
float _rotationState[3][3] = {};
// orientation and gain history
float _rotationState[4][4] = {};
bool _resetState = true;
};
#endif // AudioFOA_h

View file

@ -750,6 +750,43 @@ static void interpolate(const float* src0, const float* src1, float* dst, float
#endif
// apply gain crossfade with accumulation (interleaved)
static void gainfade_1x2(int16_t* src, float* dst, const float* win, float gain0, float gain1, int numFrames) {
gain0 *= (1/32768.0f); // int16_t to float
gain1 *= (1/32768.0f);
for (int i = 0; i < numFrames; i++) {
float frac = win[i];
float gain = gain1 + frac * (gain0 - gain1);
float x0 = (float)src[i] * gain;
dst[2*i+0] += x0;
dst[2*i+1] += x0;
}
}
// apply gain crossfade with accumulation (interleaved)
static void gainfade_2x2(int16_t* src, float* dst, const float* win, float gain0, float gain1, int numFrames) {
gain0 *= (1/32768.0f); // int16_t to float
gain1 *= (1/32768.0f);
for (int i = 0; i < numFrames; i++) {
float frac = win[i];
float gain = gain1 + frac * (gain0 - gain1);
float x0 = (float)src[2*i+0] * gain;
float x1 = (float)src[2*i+1] * gain;
dst[2*i+0] += x0;
dst[2*i+1] += x1;
}
}
// design a 2nd order Thiran allpass
static void ThiranBiquad(float f, float& b0, float& b1, float& b2, float& a1, float& a2) {
@ -1104,6 +1141,13 @@ void AudioHRTF::render(int16_t* input, float* output, int index, float azimuth,
// apply global and local gain adjustment
gain *= _gainAdjust;
// disable interpolation from reset state
if (_resetState) {
_azimuthState = azimuth;
_distanceState = distance;
_gainState = gain;
}
// to avoid polluting the cache, old filters are recomputed instead of stored
setFilters(firCoef, bqCoef, delay, index, _azimuthState, _distanceState, _gainState, L0);
@ -1175,3 +1219,45 @@ void AudioHRTF::render(int16_t* input, float* output, int index, float azimuth,
_resetState = false;
}
void AudioHRTF::mixMono(int16_t* input, float* output, float gain, int numFrames) {
assert(numFrames == HRTF_BLOCK);
// apply global and local gain adjustment
gain *= _gainAdjust;
// disable interpolation from reset state
if (_resetState) {
_gainState = gain;
}
// crossfade gain and accumulate
gainfade_1x2(input, output, crossfadeTable, _gainState, gain, HRTF_BLOCK);
// new parameters become old
_gainState = gain;
_resetState = false;
}
void AudioHRTF::mixStereo(int16_t* input, float* output, float gain, int numFrames) {
assert(numFrames == HRTF_BLOCK);
// apply global and local gain adjustment
gain *= _gainAdjust;
// disable interpolation from reset state
if (_resetState) {
_gainState = gain;
}
// crossfade gain and accumulate
gainfade_2x2(input, output, crossfadeTable, _gainState, gain, HRTF_BLOCK);
// new parameters become old
_gainState = gain;
_resetState = false;
}

View file

@ -50,6 +50,12 @@ public:
//
void render(int16_t* input, float* output, int index, float azimuth, float distance, float gain, int numFrames);
//
// Non-spatialized direct mix (accumulates into existing output)
//
void mixMono(int16_t* input, float* output, float gain, int numFrames);
void mixStereo(int16_t* input, float* output, float gain, int numFrames);
//
// Fast path when input is known to be silent and state as been flushed
//

View file

@ -1289,14 +1289,16 @@ void convertInput_AVX2(int16_t* src, float *dst[4], float gain, int numFrames) {
#endif
// in-place rotation of the soundfield
// crossfade between old and new rotation, to prevent artifacts
void rotate_3x3_AVX2(float* buf[4], const float m0[3][3], const float m1[3][3], const float* win, int numFrames) {
// in-place rotation and scaling of the soundfield
// crossfade between old and new matrix, to prevent artifacts
void rotate_4x4_AVX2(float* buf[4], const float m0[4][4], const float m1[4][4], const float* win, int numFrames) {
const float md[3][3] = {
{ m0[0][0] - m1[0][0], m0[0][1] - m1[0][1], m0[0][2] - m1[0][2] },
{ m0[1][0] - m1[1][0], m0[1][1] - m1[1][1], m0[1][2] - m1[1][2] },
{ m0[2][0] - m1[2][0], m0[2][1] - m1[2][1], m0[2][2] - m1[2][2] },
// matrix difference
const float md[4][4] = {
{ m0[0][0] - m1[0][0], m0[0][1] - m1[0][1], m0[0][2] - m1[0][2], m0[0][3] - m1[0][3] },
{ m0[1][0] - m1[1][0], m0[1][1] - m1[1][1], m0[1][2] - m1[1][2], m0[1][3] - m1[1][3] },
{ m0[2][0] - m1[2][0], m0[2][1] - m1[2][1], m0[2][2] - m1[2][2], m0[2][3] - m1[2][3] },
{ m0[3][0] - m1[3][0], m0[3][1] - m1[3][1], m0[3][2] - m1[3][2], m0[3][3] - m1[3][3] },
};
assert(numFrames % 8 == 0);
@ -1307,30 +1309,35 @@ void rotate_3x3_AVX2(float* buf[4], const float m0[3][3], const float m1[3][3],
// interpolate the matrix
__m256 m00 = _mm256_fmadd_ps(frac, _mm256_broadcast_ss(&md[0][0]), _mm256_broadcast_ss(&m1[0][0]));
__m256 m10 = _mm256_fmadd_ps(frac, _mm256_broadcast_ss(&md[1][0]), _mm256_broadcast_ss(&m1[1][0]));
__m256 m20 = _mm256_fmadd_ps(frac, _mm256_broadcast_ss(&md[2][0]), _mm256_broadcast_ss(&m1[2][0]));
__m256 m01 = _mm256_fmadd_ps(frac, _mm256_broadcast_ss(&md[0][1]), _mm256_broadcast_ss(&m1[0][1]));
__m256 m11 = _mm256_fmadd_ps(frac, _mm256_broadcast_ss(&md[1][1]), _mm256_broadcast_ss(&m1[1][1]));
__m256 m21 = _mm256_fmadd_ps(frac, _mm256_broadcast_ss(&md[2][1]), _mm256_broadcast_ss(&m1[2][1]));
__m256 m31 = _mm256_fmadd_ps(frac, _mm256_broadcast_ss(&md[3][1]), _mm256_broadcast_ss(&m1[3][1]));
__m256 m02 = _mm256_fmadd_ps(frac, _mm256_broadcast_ss(&md[0][2]), _mm256_broadcast_ss(&m1[0][2]));
__m256 m12 = _mm256_fmadd_ps(frac, _mm256_broadcast_ss(&md[1][2]), _mm256_broadcast_ss(&m1[1][2]));
__m256 m22 = _mm256_fmadd_ps(frac, _mm256_broadcast_ss(&md[2][2]), _mm256_broadcast_ss(&m1[2][2]));
__m256 m32 = _mm256_fmadd_ps(frac, _mm256_broadcast_ss(&md[3][2]), _mm256_broadcast_ss(&m1[3][2]));
__m256 m13 = _mm256_fmadd_ps(frac, _mm256_broadcast_ss(&md[1][3]), _mm256_broadcast_ss(&m1[1][3]));
__m256 m23 = _mm256_fmadd_ps(frac, _mm256_broadcast_ss(&md[2][3]), _mm256_broadcast_ss(&m1[2][3]));
__m256 m33 = _mm256_fmadd_ps(frac, _mm256_broadcast_ss(&md[3][3]), _mm256_broadcast_ss(&m1[3][3]));
// matrix multiply
__m256 x = _mm256_mul_ps(m00, _mm256_loadu_ps(&buf[1][i]));
__m256 y = _mm256_mul_ps(m10, _mm256_loadu_ps(&buf[1][i]));
__m256 z = _mm256_mul_ps(m20, _mm256_loadu_ps(&buf[1][i]));
__m256 w = _mm256_mul_ps(m00, _mm256_loadu_ps(&buf[0][i]));
x = _mm256_fmadd_ps(m01, _mm256_loadu_ps(&buf[2][i]), x);
y = _mm256_fmadd_ps(m11, _mm256_loadu_ps(&buf[2][i]), y);
z = _mm256_fmadd_ps(m21, _mm256_loadu_ps(&buf[2][i]), z);
__m256 x = _mm256_mul_ps(m11, _mm256_loadu_ps(&buf[1][i]));
__m256 y = _mm256_mul_ps(m21, _mm256_loadu_ps(&buf[1][i]));
__m256 z = _mm256_mul_ps(m31, _mm256_loadu_ps(&buf[1][i]));
x = _mm256_fmadd_ps(m02, _mm256_loadu_ps(&buf[3][i]), x);
y = _mm256_fmadd_ps(m12, _mm256_loadu_ps(&buf[3][i]), y);
z = _mm256_fmadd_ps(m22, _mm256_loadu_ps(&buf[3][i]), z);
x = _mm256_fmadd_ps(m12, _mm256_loadu_ps(&buf[2][i]), x);
y = _mm256_fmadd_ps(m22, _mm256_loadu_ps(&buf[2][i]), y);
z = _mm256_fmadd_ps(m32, _mm256_loadu_ps(&buf[2][i]), z);
x = _mm256_fmadd_ps(m13, _mm256_loadu_ps(&buf[3][i]), x);
y = _mm256_fmadd_ps(m23, _mm256_loadu_ps(&buf[3][i]), y);
z = _mm256_fmadd_ps(m33, _mm256_loadu_ps(&buf[3][i]), z);
_mm256_storeu_ps(&buf[0][i], w);
_mm256_storeu_ps(&buf[1][i], x);
_mm256_storeu_ps(&buf[2][i], y);
_mm256_storeu_ps(&buf[3][i], z);

View file

@ -509,6 +509,26 @@ void Avatar::relayJointDataToChildren() {
_reconstructSoftEntitiesJointMap = false;
}
/**jsdoc
* An avatar has different types of data simulated at different rates, in Hz.
*
* <table>
* <thead>
* <tr><th>Rate Name</th><th>Description</th></tr>
* </thead>
* <tbody>
* <tr><td><code>"avatar" or ""</code></td><td>The rate at which the avatar is updated even if not in view.</td></tr>
* <tr><td><code>"avatarInView"</code></td><td>The rate at which the avatar is updated if in view.</td></tr>
* <tr><td><code>"skeletonModel"</code></td><td>The rate at which the skeleton model is being updated, even if there are no
* joint data available.</td></tr>
* <tr><td><code>"jointData"</code></td><td>The rate at which joint data are being updated.</td></tr>
* <tr><td><code>""</code></td><td>When no rate name is specified, the <code>"avatar"</code> update rate is
* provided.</td></tr>
* </tbody>
* </table>
*
* @typedef {string} AvatarSimulationRate
*/
float Avatar::getSimulationRate(const QString& rateName) const {
if (rateName == "") {
return _simulationRate.rate();

View file

@ -501,8 +501,8 @@ public:
/**jsdoc
* @function MyAvatar.getSimulationRate
* @param {string} [rateName=""] - Rate name.
* @returns {number} Simulation rate.
* @param {AvatarSimulationRate} [rateName=""] - Rate name.
* @returns {number} Simulation rate in Hz.
* @deprecated This function is deprecated and will be removed.
*/
Q_INVOKABLE float getSimulationRate(const QString& rateName = QString("")) const;

View file

@ -270,28 +270,19 @@ bool SkeletonModel::getEyeModelPositions(glm::vec3& firstEyePosition, glm::vec3&
getJointPosition(_rig.indexOfJoint("RightEye"), secondEyePosition)) {
return true;
}
// no eye joints; try to estimate based on head/neck joints
glm::vec3 neckPosition, headPosition;
if (getJointPosition(_rig.indexOfJoint("Neck"), neckPosition) &&
getJointPosition(_rig.indexOfJoint("Head"), headPosition)) {
const float EYE_PROPORTION = 0.6f;
glm::vec3 baseEyePosition = glm::mix(neckPosition, headPosition, EYE_PROPORTION);
int headJointIndex = _rig.indexOfJoint("Head");
glm::vec3 headPosition;
if (getJointPosition(headJointIndex, headPosition)) {
// get head joint rotation.
glm::quat headRotation;
getJointRotation(_rig.indexOfJoint("Head"), headRotation);
const float EYES_FORWARD = 0.25f;
const float EYE_SEPARATION = 0.1f;
float headHeight = glm::distance(neckPosition, headPosition);
firstEyePosition = baseEyePosition + headRotation * glm::vec3(EYE_SEPARATION, 0.0f, EYES_FORWARD) * headHeight;
secondEyePosition = baseEyePosition + headRotation * glm::vec3(-EYE_SEPARATION, 0.0f, EYES_FORWARD) * headHeight;
return true;
} else if (getJointPosition(_rig.indexOfJoint("Head"), headPosition)) {
glm::vec3 baseEyePosition = headPosition;
glm::quat headRotation;
getJointRotation(_rig.indexOfJoint("Head"), headRotation);
const float EYES_FORWARD_HEAD_ONLY = 0.30f;
const float EYE_SEPARATION = 0.1f;
firstEyePosition = baseEyePosition + headRotation * glm::vec3(EYE_SEPARATION, 0.0f, EYES_FORWARD_HEAD_ONLY);
secondEyePosition = baseEyePosition + headRotation * glm::vec3(-EYE_SEPARATION, 0.0f, EYES_FORWARD_HEAD_ONLY);
getJointRotation(headJointIndex, headRotation);
float heightRatio = _rig.getUnscaledEyeHeight() / DEFAULT_AVATAR_EYE_HEIGHT;
glm::vec3 ipdOffset = glm::vec3(DEFAULT_AVATAR_IPD / 2.0f, 0.0f, 0.0f);
firstEyePosition = headPosition + headRotation * heightRatio * (DEFAULT_AVATAR_HEAD_TO_MIDDLE_EYE_OFFSET + ipdOffset);
secondEyePosition = headPosition + headRotation * heightRatio * (DEFAULT_AVATAR_HEAD_TO_MIDDLE_EYE_OFFSET - ipdOffset);
return true;
}
return false;

View file

@ -1545,7 +1545,6 @@ float AvatarData::getDataRate(const QString& rateName) const {
* <tr><th>Rate Name</th><th>Description</th></tr>
* </thead>
* <tbody>
* <tr><td><code>"globalPosition"</code></td><td>Global position.</td></tr>
* <tr><td><code>"localPosition"</code></td><td>Local position.</td></tr>
* <tr><td><code>"avatarBoundingBox"</code></td><td>Avatar bounding box.</td></tr>
@ -1559,7 +1558,6 @@ float AvatarData::getDataRate(const QString& rateName) const {
* <tr><td><code>"faceTracker"</code></td><td>Face tracker data.</td></tr>
* <tr><td><code>"jointData"</code></td><td>Joint data.</td></tr>
* <tr><td><code>"farGrabJointData"</code></td><td>Far grab joint data.</td></tr>
* <tr><td><code>""</code></td><td>When no rate name is specified, the overall update rate is provided.</td></tr>
* </tbody>
* </table>
@ -1721,7 +1719,6 @@ glm::vec3 AvatarData::getJointTranslation(const QString& name) const {
// on another thread in between the call to getJointIndex and getJointTranslation
// return getJointTranslation(getJointIndex(name));
return readLockWithNamedJointIndex<glm::vec3>(name, [this](int index) {
return _jointData.at(index).translation;
return getJointTranslation(index);
});
}
@ -1809,8 +1806,8 @@ glm::quat AvatarData::getJointRotation(const QString& name) const {
// Can't do this, not thread safe
// return getJointRotation(getJointIndex(name));
return readLockWithNamedJointIndex<glm::quat>(name, [&](int index) {
return _jointData.at(index).rotation;
return readLockWithNamedJointIndex<glm::quat>(name, [this](int index) {
return getJointRotation(index);
});
}
@ -2905,6 +2902,20 @@ glm::mat4 AvatarData::getControllerRightHandMatrix() const {
return _controllerRightHandMatrixCache.get();
}
/**jsdoc
* Information about a ray-to-avatar intersection.
* @typedef {object} RayToAvatarIntersectionResult
* @property {boolean} intersects - <code>true</code> if an avatar is intersected, <code>false</code> if it isn't.
* @property {string} avatarID - The ID of the avatar that is intersected.
* @property {number} distance - The distance from the ray origin to the intersection.
* @property {string} face - The name of the box face that is intersected; <code>"UNKNOWN_FACE"</code> if mesh was picked
* against.
* @property {Vec3} intersection - The ray intersection point in world coordinates.
* @property {Vec3} surfaceNormal - The surface normal at the intersection point.
* @property {number} jointIndex - The index of the joint intersected.
* @property {SubmeshIntersection} extraInfo - Extra information on the mesh intersected if mesh was picked against,
* <code>{}</code> if it wasn't.
*/
QScriptValue RayToAvatarIntersectionResultToScriptValue(QScriptEngine* engine, const RayToAvatarIntersectionResult& value) {
QScriptValue obj = engine->newObject();
obj.setProperty("intersects", value.intersects);

View file

@ -479,7 +479,8 @@ class AvatarData : public QObject, public SpatiallyNestable {
* avatar. <em>Read-only.</em>
* @property {number} sensorToWorldScale - The scale that transforms dimensions in the user's real world to the avatar's
* size in the virtual world. <em>Read-only.</em>
* @property {boolean} hasPriority - is the avatar in a Hero zone? <em>Read-only.</em>
* @property {boolean} hasPriority - <code>true</code> if the avatar is in a "hero" zone, <code>false</code> if it isn't.
* <em>Read-only.</em>
*/
Q_PROPERTY(glm::vec3 position READ getWorldPosition WRITE setPositionViaScript)
Q_PROPERTY(float scale READ getDomainLimitedScale WRITE setTargetScale)
@ -1751,14 +1752,11 @@ protected:
template <typename T, typename F>
T readLockWithNamedJointIndex(const QString& name, const T& defaultValue, F f) const {
int index = getFauxJointIndex(name);
QReadLocker readLock(&_jointDataLock);
// The first conditional is superfluous, but illustrative
if (index == -1 || index < _jointData.size()) {
int index = getJointIndex(name);
if (index == -1) {
return defaultValue;
}
return f(index);
}
@ -1769,8 +1767,8 @@ protected:
template <typename F>
void writeLockWithNamedJointIndex(const QString& name, F f) {
int index = getFauxJointIndex(name);
QWriteLocker writeLock(&_jointDataLock);
int index = getJointIndex(name);
if (index == -1) {
return;
}

View file

@ -36,8 +36,10 @@ const int CLIENT_TO_AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND = 50;
const quint64 MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS = USECS_PER_SECOND / CLIENT_TO_AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND;
/**jsdoc
* <strong>Note:</strong> An <code>AvatarList</code> API is also provided for Interface and client entity scripts: it is a
* synonym for the {@link AvatarManager} API.
* The <code>AvatarList</code> API provides information about avatars within the current domain.
*
* <p><strong>Warning:</strong> An API named "<code>AvatarList</code>" is also provided for Interface, client entity, and avatar
* scripts, however, it is a synonym for the {@link AvatarManager} API.</p>
*
* @namespace AvatarList
*
@ -78,23 +80,37 @@ public:
// Currently, your own avatar will be included as the null avatar id.
/**jsdoc
* Gets the IDs of all avatars in the domain.
* <p><strong>Warning:</strong> If the AC script is acting as an avatar (i.e., <code>Agent.isAvatar == true</code>) the
* avatar's ID is NOT included in results.</p>
* @function AvatarList.getAvatarIdentifiers
* @returns {Uuid[]}
* @returns {Uuid[]} The IDs of all avatars in the domain (excluding AC script's avatar).
* @example <caption>Report the IDS of all avatars within the domain.</caption>
* var avatars = AvatarList.getAvatarIdentifiers();
* print("Avatars in the domain: " + JSON.stringify(avatars));
*/
Q_INVOKABLE QVector<QUuid> getAvatarIdentifiers();
/**jsdoc
* Gets the IDs of all avatars within a specified distance from a point.
* <p><strong>Warning:</strong> If the AC script is acting as an avatar (i.e., <code>Agent.isAvatar == true</code>) the
* avatar's ID is NOT included in results.</p>
* @function AvatarList.getAvatarsInRange
* @param {Vec3} position
* @param {number} range
* @returns {Uuid[]}
* @param {Vec3} position - The point about which the search is performed.
* @param {number} range - The search radius.
* @returns {Uuid[]} The IDs of all avatars within the search distance from the position (excluding AC script's avatar).
* @example <caption>Report the IDs of all avatars within 10m of the origin.</caption>
* var RANGE = 10;
* var avatars = AvatarList.getAvatarsInRange(Vec3.ZERO, RANGE);
* print("Avatars near the origin: " + JSON.stringify(avatars));
*/
Q_INVOKABLE QVector<QUuid> getAvatarsInRange(const glm::vec3& position, float rangeMeters) const;
/**jsdoc
* Gets information about an avatar.
* @function AvatarList.getAvatar
* @param {Uuid} avatarID
* @returns {AvatarData}
* @param {Uuid} avatarID - The ID of the avatar.
* @returns {AvatarData} Information about the avatar.
*/
// Null/Default-constructed QUuids will return MyAvatar
Q_INVOKABLE virtual ScriptAvatarData* getAvatar(QUuid avatarID) { return new ScriptAvatarData(getAvatarBySessionID(avatarID)); }
@ -110,34 +126,57 @@ public:
signals:
/**jsdoc
* Triggered when an avatar arrives in the domain.
* @function AvatarList.avatarAddedEvent
* @param {Uuid} sessionUUID
* @param {Uuid} sessionUUID - The ID of the avatar that arrived in the domain.
* @returns {Signal}
* @example <caption>Report when an avatar arrives in the domain.</caption>
* AvatarManager.avatarAddedEvent.connect(function (sessionID) {
* print("Avatar arrived: " + sessionID);
* });
*
* // Note: If using from the AvatarList API, replace "AvatarManager" with "AvatarList".
*/
void avatarAddedEvent(const QUuid& sessionUUID);
/**jsdoc
* Triggered when an avatar leaves the domain.
* @function AvatarList.avatarRemovedEvent
* @param {Uuid} sessionUUID
* @param {Uuid} sessionUUID - The ID of the avatar that left the domain.
* @returns {Signal}
* @example <caption>Report when an avatar leaves the domain.</caption>
* AvatarManager.avatarRemovedEvent.connect(function (sessionID) {
* print("Avatar left: " + sessionID);
* });
*
* // Note: If using from the AvatarList API, replace "AvatarManager" with "AvatarList".
*/
void avatarRemovedEvent(const QUuid& sessionUUID);
/**jsdoc
* Triggered when an avatar's session ID changes.
* @function AvatarList.avatarSessionChangedEvent
* @param {Uuid} sessionUUID
* @param {Uuid} oldSessionUUID
* @param {Uuid} newSessionUUID - The new session ID.
* @param {Uuid} oldSessionUUID - The old session ID.
* @returns {Signal}
* @example <caption>Report when an avatar's session ID changes.</caption>
* AvatarManager.avatarSessionChangedEvent.connect(function (newSessionID, oldSessionID) {
* print("Avatar session ID changed from " + oldSessionID + " to " + newSessionID);
* });
*
* // Note: If using from the AvatarList API, replace "AvatarManager" with "AvatarList".
*/
void avatarSessionChangedEvent(const QUuid& sessionUUID,const QUuid& oldUUID);
public slots:
/**jsdoc
* Checks whether there is an avatar within a specified distance from a point.
* @function AvatarList.isAvatarInRange
* @param {string} position
* @param {string} range
* @returns {boolean}
* @param {string} position - The test position.
* @param {string} range - The test distance.
* @returns {boolean} <code>true</code> if there's an avatar within the specified distance of the point, <code>false</code>
* if not.
*/
bool isAvatarInRange(const glm::vec3 & position, const float range);
@ -145,36 +184,41 @@ protected slots:
/**jsdoc
* @function AvatarList.sessionUUIDChanged
* @param {Uuid} sessionUUID
* @param {Uuid} oldSessionUUID
* @param {Uuid} sessionUUID - New session ID.
* @param {Uuid} oldSessionUUID - Old session ID.
* @deprecated This function is deprecated and will be removed.
*/
void sessionUUIDChanged(const QUuid& sessionUUID, const QUuid& oldUUID);
/**jsdoc
* @function AvatarList.processAvatarDataPacket
* @param {} message
* @param {} sendingNode
* @param {object} message - Message.
* @param {object} sendingNode - Sending node.
* @deprecated This function is deprecated and will be removed.
*/
void processAvatarDataPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
/**jsdoc
* @function AvatarList.processAvatarIdentityPacket
* @param {} message
* @param {} sendingNode
* @param {object} message - Message.
* @param {object} sendingNode - Sending node.
* @deprecated This function is deprecated and will be removed.
*/
void processAvatarIdentityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
/**jsdoc
* @function AvatarList.processBulkAvatarTraits
* @param {} message
* @param {} sendingNode
* @param {object} message - Message.
* @param {object} sendingNode - Sending node.
* @deprecated This function is deprecated and will be removed.
*/
void processBulkAvatarTraits(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
/**jsdoc
* @function AvatarList.processKillAvatar
* @param {} message
* @param {} sendingNode
* @param {object} message - Message.
* @param {object} sendingNode - Sending node.
* @deprecated This function is deprecated and will be removed.
*/
void processKillAvatar(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);

View file

@ -16,6 +16,52 @@
#include "AvatarData.h"
/**jsdoc
* Information about an avatar.
* @typedef {object} AvatarData
* @property {Vec3} position - The avatar's position.
* @property {number} scale - The target scale of the avatar without any restrictions on permissible values imposed by the
* domain.
* @property {Vec3} handPosition - A user-defined hand position, in world coordinates. The position moves with the avatar but
* is otherwise not used or changed by Interface.
* @property {number} bodyPitch - The pitch of the avatar's body, in degrees.
* @property {number} bodyYaw - The yaw of the avatar's body, in degrees.
* @property {number} bodyRoll - The roll of the avatar's body, in degrees.
* @property {Quat} orientation - The orientation of the avatar's body.
* @property {Quat} headOrientation - The orientation of the avatar's head.
* @property {number} headPitch - The pitch of the avatar's head relative to the body, in degrees.
* @property {number} headYaw - The yaw of the avatar's head relative to the body, in degrees.
* @property {number} headRoll - The roll of the avatar's head relative to the body, in degrees.
*
* @property {Vec3} velocity - The linear velocity of the avatar.
* @property {Vec3} angularVelocity - The angular velocity of the avatar.
*
* @property {Uuid} sessionUUID - The avatar's session ID.
* @property {string} displayName - The avatar's display name.
* @property {string} sessionDisplayName - The avatar's display name, sanitized and versioned, as defined by the avatar mixer.
* It is unique among all avatars present in the domain at the time.
* @property {boolean} isReplicated - <strong>Deprecated.</strong>
* @property {boolean} lookAtSnappingEnabled - <code>true</code> if the avatar's eyes snap to look at another avatar's eyes
* when the other avatar is in the line of sight and also has <code>lookAtSnappingEnabled == true</code>.
*
* @property {string} skeletonModelURL - The avatar's FST file.
* @property {AttachmentData[]} attachmentData - Information on the avatar's attachments.<br />
* <strong>Deprecated:</strong> Use avatar entities instead.
* @property {string[]} jointNames - The list of joints in the current avatar model.
*
* @property {number} audioLoudness - The instantaneous loudness of the audio input that the avatar is injecting into the
* domain.
* @property {number} audioAverageLoudness - The rolling average loudness of the audio input that the avatar is injecting into
* the domain.
*
* @property {Mat4} sensorToWorldMatrix - The scale, rotation, and translation transform from the user's real world to the
* avatar's size, orientation, and position in the virtual world.
* @property {Mat4} controllerLeftHandMatrix - The rotation and translation of the left hand controller relative to the avatar.
* @property {Mat4} controllerRightHandMatrix - The rotation and translation of the right hand controller relative to the
* avatar.
*
* @property {boolean} hasPriority - <code>true</code> if the avatar is in a "hero" zone, <code>false</code> if it isn't.
*/
class ScriptAvatarData : public QObject {
Q_OBJECT

View file

@ -144,7 +144,12 @@ void MaterialBaker::processMaterial() {
connect(textureBaker.data(), &TextureBaker::finished, this, &MaterialBaker::handleFinishedTextureBaker);
_textureBakers.insert(textureKey, textureBaker);
textureBaker->moveToThread(_getNextOvenWorkerThreadOperator ? _getNextOvenWorkerThreadOperator() : thread());
QMetaObject::invokeMethod(textureBaker.data(), "bake");
// By default, Qt will invoke this bake immediately if the TextureBaker is on the same worker thread as this MaterialBaker.
// We don't want that, because threads may be waiting for work while this thread is stuck processing a TextureBaker.
// On top of that, _textureBakers isn't fully populated.
// So, use Qt::QueuedConnection.
// TODO: Better thread utilization at the top level, not just the MaterialBaker level
QMetaObject::invokeMethod(textureBaker.data(), "bake", Qt::QueuedConnection);
}
_materialsNeedingRewrite.insert(textureKey, networkMaterial.second);
} else {

View file

@ -285,7 +285,7 @@ void ModelBaker::handleFinishedMaterialBaker() {
QJsonArray materialMapping;
for (auto material : _hfmModel->materials) {
QJsonObject json;
json["mat::" + material.name] = relativeBakedMaterialURL + "?" + material.name;
json["mat::" + material.name] = relativeBakedMaterialURL + "#" + material.name;
materialMapping.push_back(json);
}

View file

@ -109,7 +109,6 @@ public:
Q_ASSERT(_context);
_context->makeCurrent();
CHECK_GL_ERROR();
_context->doneCurrent();
while (!_shutdown) {
if (_pendingOtherThreadOperation) {
PROFILE_RANGE(render, "MainThreadOp")
@ -129,6 +128,7 @@ public:
Lock lock(_mutex);
_condition.wait(lock, [&] { return _finishedOtherThreadOperation; });
}
_context->makeCurrent();
}
// Check for a new display plugin
@ -140,18 +140,16 @@ public:
if (newPlugin != currentPlugin) {
// Deactivate the old plugin
if (currentPlugin != nullptr) {
_context->makeCurrent();
currentPlugin->uncustomizeContext();
CHECK_GL_ERROR();
_context->doneCurrent();
// Force completion of all pending GL commands
glFinish();
}
if (newPlugin) {
bool hasVsync = true;
QThread::setPriority(newPlugin->getPresentPriority());
bool wantVsync = newPlugin->wantVsync();
_context->makeCurrent();
CHECK_GL_ERROR();
#if defined(Q_OS_MAC)
newPlugin->swapBuffers();
#endif
@ -163,7 +161,8 @@ public:
newPlugin->setVsyncEnabled(hasVsync);
newPlugin->customizeContext();
CHECK_GL_ERROR();
_context->doneCurrent();
// Force completion of all pending GL commands
glFinish();
}
currentPlugin = newPlugin;
_newPluginQueue.pop();
@ -180,7 +179,6 @@ public:
}
// Execute the frame and present it to the display device.
_context->makeCurrent();
{
PROFILE_RANGE(render, "PluginPresent")
gl::globalLock();
@ -188,9 +186,9 @@ public:
gl::globalRelease(false);
CHECK_GL_ERROR();
}
_context->doneCurrent();
}
_context->doneCurrent();
Lock lock(_mutex);
_context->moveToThread(qApp->thread());
_shutdown = false;

View file

@ -166,7 +166,10 @@ ShapeKey EntityRenderer::getShapeKey() {
}
render::hifi::Tag EntityRenderer::getTagMask() const {
return _isVisibleInSecondaryCamera ? render::hifi::TAG_ALL_VIEWS : render::hifi::TAG_MAIN_VIEW;
render::hifi::Tag mask = render::hifi::TAG_NONE;
mask = (render::hifi::Tag)(mask | (!_cauterized * render::hifi::TAG_MAIN_VIEW));
mask = (render::hifi::Tag)(mask | (_isVisibleInSecondaryCamera * render::hifi::TAG_SECONDARY_VIEW));
return mask;
}
render::hifi::Layer EntityRenderer::getHifiRenderLayer() const {
@ -215,12 +218,7 @@ void EntityRenderer::render(RenderArgs* args) {
emit requestRenderUpdate();
}
auto& renderMode = args->_renderMode;
bool cauterized = (renderMode != RenderArgs::RenderMode::SHADOW_RENDER_MODE &&
renderMode != RenderArgs::RenderMode::SECONDARY_CAMERA_RENDER_MODE &&
_cauterized);
if (_visible && !cauterized) {
if (_visible && (args->_renderMode != RenderArgs::RenderMode::DEFAULT_RENDER_MODE || !_cauterized)) {
doRender(args);
}
}

View file

@ -121,7 +121,11 @@ void MaterialEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
QString materialURL = entity->getMaterialURL();
if (materialURL != _materialURL) {
_materialURL = materialURL;
if (_materialURL.contains("?")) {
if (_materialURL.contains("#")) {
auto split = _materialURL.split("#");
newCurrentMaterialName = split.last().toStdString();
} else if (_materialURL.contains("?")) {
qDebug() << "DEPRECATED: Use # instead of ? for material URLS:" << _materialURL;
auto split = _materialURL.split("?");
newCurrentMaterialName = split.last().toStdString();
}

View file

@ -1066,13 +1066,6 @@ ItemKey ModelEntityRenderer::getKey() {
return _itemKey;
}
render::hifi::Tag ModelEntityRenderer::getTagMask() const {
// Default behavior for model is to not be visible in main view if cauterized (aka parented to the avatar's neck joint)
return _cauterized ?
(_isVisibleInSecondaryCamera ? render::hifi::TAG_SECONDARY_VIEW : render::hifi::TAG_NONE) :
Parent::getTagMask(); // calculate which views to be shown in
}
uint32_t ModelEntityRenderer::metaFetchMetaSubItems(ItemIDs& subItems) {
if (_model) {
auto metaSubItems = _model->fetchRenderItemIDs();
@ -1409,6 +1402,10 @@ void ModelEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& sce
model->setVisibleInScene(_visible, scene);
}
if (model->isCauterized() != _cauterized) {
model->setCauterized(_cauterized, scene);
}
render::hifi::Tag tagMask = getTagMask();
if (model->getTagMask() != tagMask) {
model->setTagMask(tagMask, scene);

View file

@ -161,8 +161,6 @@ protected:
virtual void doRender(RenderArgs* args) override;
virtual void doRenderUpdateSynchronousTyped(const ScenePointer& scene, Transaction& transaction, const TypedEntityPointer& entity) override;
render::hifi::Tag getTagMask() const override;
void setIsVisibleInSecondaryCamera(bool value) override;
void setRenderLayer(RenderLayer value) override;
void setPrimitiveMode(PrimitiveMode value) override;

View file

@ -3007,6 +3007,26 @@ void EntityItem::setPrimitiveMode(PrimitiveMode value) {
}
}
bool EntityItem::getCauterized() const {
return resultWithReadLock<bool>([&] {
return _cauterized;
});
}
void EntityItem::setCauterized(bool value) {
bool changed = false;
withWriteLock([&] {
if (_cauterized != value) {
changed = true;
_cauterized = value;
}
});
if (changed) {
emit requestRenderUpdate();
}
}
bool EntityItem::getIgnorePickIntersection() const {
return resultWithReadLock<bool>([&] {
return _ignorePickIntersection;

View file

@ -303,6 +303,9 @@ public:
bool getCanCastShadow() const;
void setCanCastShadow(bool value);
void setCauterized(bool value);
bool getCauterized() const;
inline bool isVisible() const { return getVisible(); }
inline bool isInvisible() const { return !getVisible(); }
@ -530,9 +533,6 @@ public:
static QString _marketplacePublicKey;
static void retrieveMarketplacePublicKey();
void setCauterized(bool value) { _cauterized = value; }
bool getCauterized() const { return _cauterized; }
float getBoundingRadius() const { return _boundingRadius; }
void setSpaceIndex(int32_t index);
int32_t getSpaceIndex() const { return _spaceIndex; }

View file

@ -976,7 +976,7 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
* by setting the <code>entityHostType</code> parameter in {@link Entities.addEntity} to <code>"avatar"</code>.
* Material entities render as non-scalable spheres if they don't have their parent set.
* @typedef {object} Entities.EntityProperties-Material
* @property {string} materialURL="" - URL to a {@link MaterialResource}. If you append <code>?name</code> to the URL, the
* @property {string} materialURL="" - URL to a {@link MaterialResource}. If you append <code>#name</code> to the URL, the
* material with that name in the {@link MaterialResource} will be applied to the entity. <br />
* Alternatively, set the property value to <code>"materialData"</code> to use the <code>materialData</code> property
* for the {@link MaterialResource} values.
@ -2630,11 +2630,11 @@ bool EntityItemProperties::getPropertyInfo(const QString& propertyName, EntityPr
ENTITY_ITEM_MIN_FRICTION, ENTITY_ITEM_MAX_FRICTION);
ADD_PROPERTY_TO_MAP(PROP_LIFETIME, Lifetime, lifetime, float);
ADD_PROPERTY_TO_MAP(PROP_COLLISIONLESS, Collisionless, collisionless, bool);
ADD_PROPERTY_TO_MAP(PROP_COLLISIONLESS, unused, ignoreForCollisions, unused); // legacy support
ADD_PROPERTY_TO_MAP(PROP_COLLISION_MASK, unused, collisionMask, unused);
ADD_PROPERTY_TO_MAP(PROP_COLLISION_MASK, unused, collidesWith, unused);
ADD_PROPERTY_TO_MAP(PROP_DYNAMIC, unused, collisionsWillMove, unused); // legacy support
ADD_PROPERTY_TO_MAP(PROP_DYNAMIC, unused, dynamic, unused);
ADD_PROPERTY_TO_MAP(PROP_COLLISIONLESS, unused, ignoreForCollisions, bool); // legacy support
ADD_PROPERTY_TO_MAP(PROP_COLLISION_MASK, unused, collisionMask, uint16_t);
ADD_PROPERTY_TO_MAP(PROP_COLLISION_MASK, unused, collidesWith, uint16_t);
ADD_PROPERTY_TO_MAP(PROP_DYNAMIC, unused, collisionsWillMove, bool); // legacy support
ADD_PROPERTY_TO_MAP(PROP_DYNAMIC, unused, dynamic, bool);
ADD_PROPERTY_TO_MAP(PROP_COLLISION_SOUND_URL, CollisionSoundURL, collisionSoundURL, QString);
ADD_PROPERTY_TO_MAP(PROP_ACTION_DATA, ActionData, actionData, QByteArray);

View file

@ -14,6 +14,9 @@
#include <stdint.h>
#include <limits>
#include <type_traits>
#include <glm/glm.hpp>
#include <glm/gtx/component_wise.hpp>
@ -85,6 +88,16 @@ struct EntityPropertyInfo {
QVariant maximum;
};
template <typename T>
EntityPropertyInfo makePropertyInfo(EntityPropertyList p, typename std::enable_if<!std::is_integral<T>::value>::type* = 0) {
return EntityPropertyInfo(p);
}
template <typename T>
EntityPropertyInfo makePropertyInfo(EntityPropertyList p, typename std::enable_if<std::is_integral<T>::value>::type* = 0) {
return EntityPropertyInfo(p, std::numeric_limits<T>::min(), std::numeric_limits<T>::max());
}
/// A collection of properties of an entity item used in the scripting API. Translates between the actual properties of an
/// entity and a JavaScript style hash/QScriptValue storing a set of properties. Used in scripting to set/get the complete
/// set of entity item properties via JavaScript hashes/QScriptValues

View file

@ -416,9 +416,10 @@ inline QRect QRect_convertFromScriptValue(const QScriptValue& v, bool& isValid)
T _##n; \
static T _static##N;
#define ADD_PROPERTY_TO_MAP(P, N, n, T) \
{ \
EntityPropertyInfo propertyInfo = EntityPropertyInfo(P); \
EntityPropertyInfo propertyInfo { makePropertyInfo<T>(P) }; \
_propertyInfos[#n] = propertyInfo; \
_enumsToPropertyStrings[P] = #n; \
}

View file

@ -148,9 +148,13 @@ bool EntityTreeElement::checkFilterSettings(const EntityItemPointer& entity, Pic
(!searchFilter.doesPickLocalEntities() && hostType == entity::HostType::LOCAL)) {
return false;
}
// We only check the collidable filters for non-local entities, because local entities are always collisionless
bool collidable = !entity->getCollisionless() && (entity->getShapeType() != SHAPE_TYPE_NONE);
// We only check the collidable filters for non-local entities, because local entities are always collisionless,
// but picks always include COLLIDABLE (see PickScriptingInterface::getPickFilter()), so if we were to respect
// the getCollisionless() property of Local entities then we would *never* intersect them in a pick.
// An unfortunate side effect of the following code is that Local entities are intersected even if the
// pick explicitly requested only COLLIDABLE entities (but, again, Local entities are always collisionless).
if (hostType != entity::HostType::LOCAL) {
bool collidable = !entity->getCollisionless() && (entity->getShapeType() != SHAPE_TYPE_NONE);
if ((collidable && !searchFilter.doesPickCollidable()) || (!collidable && !searchFilter.doesPickNonCollidable())) {
return false;
}

View file

@ -588,6 +588,8 @@ void LimitedNodeList::eraseAllNodes() {
foreach(const SharedNodePointer& killedNode, killedNodes) {
handleNodeKill(killedNode);
}
_delayedNodeAdds.clear();
}
void LimitedNodeList::reset() {
@ -755,7 +757,7 @@ void LimitedNodeList::delayNodeAdd(NewNodeInfo info) {
}
void LimitedNodeList::removeDelayedAdd(QUuid nodeUUID) {
auto it = std::find_if(_delayedNodeAdds.begin(), _delayedNodeAdds.end(), [&](auto info) {
auto it = std::find_if(_delayedNodeAdds.begin(), _delayedNodeAdds.end(), [&](const auto& info) {
return info.uuid == nodeUUID;
});
if (it != _delayedNodeAdds.end()) {
@ -764,7 +766,7 @@ void LimitedNodeList::removeDelayedAdd(QUuid nodeUUID) {
}
bool LimitedNodeList::isDelayedNode(QUuid nodeUUID) {
auto it = std::find_if(_delayedNodeAdds.begin(), _delayedNodeAdds.end(), [&](auto info) {
auto it = std::find_if(_delayedNodeAdds.begin(), _delayedNodeAdds.end(), [&](const auto& info) {
return info.uuid == nodeUUID;
});
return it != _delayedNodeAdds.end();

View file

@ -26,7 +26,7 @@ const quint16 ICE_SERVER_DEFAULT_PORT = 7337;
const int ICE_HEARBEAT_INTERVAL_MSECS = 2 * 1000;
const int MAX_ICE_CONNECTION_ATTEMPTS = 5;
const int UDP_PUNCH_PING_INTERVAL_MS = 25;
const int UDP_PUNCH_PING_INTERVAL_MS = 250;
class NetworkPeer : public QObject {
Q_OBJECT

View file

@ -752,11 +752,11 @@ void NodeList::pingPunchForInactiveNode(const SharedNodePointer& node) {
flagTimeForConnectionStep(LimitedNodeList::ConnectionStep::SendAudioPing);
}
// every second we're trying to ping this node and we're not getting anywhere - debug that out
const int NUM_DEBUG_CONNECTION_ATTEMPTS = 1000 / (UDP_PUNCH_PING_INTERVAL_MS);
// every two seconds we're trying to ping this node and we're not getting anywhere - debug that out
const int NUM_DEBUG_CONNECTION_ATTEMPTS = 2000 / (UDP_PUNCH_PING_INTERVAL_MS);
if (node->getConnectionAttempts() > 0 && node->getConnectionAttempts() % NUM_DEBUG_CONNECTION_ATTEMPTS == 0) {
qCDebug(networking) << "No response to UDP hole punch pings for node" << node->getUUID() << "in last second.";
qCDebug(networking) << "No response to UDP hole punch pings for node" << node->getUUID() << "in last 2 s.";
}
auto nodeID = node->getUUID();

View file

@ -416,7 +416,7 @@ void ModelMeshPartPayload::bindTransform(gpu::Batch& batch, RenderArgs::RenderMo
void ModelMeshPartPayload::render(RenderArgs* args) {
PerformanceTimer perfTimer("ModelMeshPartPayload::render");
if (!args) {
if (!args || (args->_renderMode == RenderArgs::RenderMode::DEFAULT_RENDER_MODE && _cauterized)) {
return;
}

View file

@ -107,6 +107,7 @@ public:
void render(RenderArgs* args) override;
void setShapeKey(bool invalidateShapeKey, PrimitiveMode primitiveMode, bool useDualQuaternionSkinning);
void setCauterized(bool cauterized) { _cauterized = cauterized; }
// ModelMeshPartPayload functions to perform render
void bindMesh(gpu::Batch& batch) override;
@ -138,6 +139,8 @@ private:
gpu::BufferPointer _meshBlendshapeBuffer;
int _meshNumVertices;
render::ShapeKey _shapeKey { render::ShapeKey::Builder::invalid() };
bool _cauterized { false };
};
namespace render {

View file

@ -224,6 +224,7 @@ void Model::updateRenderItems() {
PrimitiveMode primitiveMode = self->getPrimitiveMode();
auto renderItemKeyGlobalFlags = self->getRenderItemKeyGlobalFlags();
bool cauterized = self->isCauterized();
render::Transaction transaction;
for (int i = 0; i < (int) self->_modelMeshRenderItemIDs.size(); i++) {
@ -237,7 +238,7 @@ void Model::updateRenderItems() {
bool useDualQuaternionSkinning = self->getUseDualQuaternionSkinning();
transaction.updateItem<ModelMeshPartPayload>(itemID, [modelTransform, meshState, useDualQuaternionSkinning,
invalidatePayloadShapeKey, primitiveMode, renderItemKeyGlobalFlags](ModelMeshPartPayload& data) {
invalidatePayloadShapeKey, primitiveMode, renderItemKeyGlobalFlags, cauterized](ModelMeshPartPayload& data) {
if (useDualQuaternionSkinning) {
data.updateClusterBuffer(meshState.clusterDualQuaternions);
} else {
@ -261,6 +262,7 @@ void Model::updateRenderItems() {
}
data.updateTransformForSkinnedMesh(renderTransform, modelTransform);
data.setCauterized(cauterized);
data.updateKey(renderItemKeyGlobalFlags);
data.setShapeKey(invalidatePayloadShapeKey, primitiveMode, useDualQuaternionSkinning);
});
@ -442,6 +444,19 @@ bool Model::findRayIntersectionAgainstSubMeshes(const glm::vec3& origin, const g
}
}
/**jsdoc
* Information about a submesh intersection point.
* @typedef {object} SubmeshIntersection
* @property {Vec3} worldIntersectionPoint - The intersection point in world coordinates.
* @property {Vec3} meshIntersectionPoint - The intersection point in model coordinates.
* @property {number} partIndex - The index of the intersected mesh part within the submesh.
* @property {number} shapeID - The index of the mesh part within the model.
* @property {number} subMeshIndex - The index of the intersected submesh within the model.
* @property {string} subMeshName - The name of the intersected submesh.
* @property {Triangle} subMeshTriangleWorld - The vertices of the intersected mesh part triangle in world coordinates.
* @property {Vec3} subMeshNormal - The normal of the intersected mesh part triangle in model coordinates.
* @property {Triangle} subMeshTriangle - The vertices of the intersected mesh part triangle in model coordinates.
*/
if (intersectedSomething) {
distance = bestDistance;
face = bestFace;
@ -922,6 +937,23 @@ bool Model::isGroupCulled() const {
return _renderItemKeyGlobalFlags.isSubMetaCulled();
}
void Model::setCauterized(bool cauterized, const render::ScenePointer& scene) {
if (Model::isCauterized() != cauterized) {
_cauterized = cauterized;
if (!scene) {
_needsFixupInScene = true;
return;
}
render::Transaction transaction;
foreach (auto item, _modelMeshRenderItemsMap.keys()) {
transaction.updateItem<ModelMeshPartPayload>(item, [cauterized](ModelMeshPartPayload& data) {
data.setCauterized(cauterized);
});
}
scene->enqueueTransaction(transaction);
}
}
const render::ItemKey Model::getRenderItemKeyGlobalFlags() const {
return _renderItemKeyGlobalFlags;
}
@ -1561,8 +1593,8 @@ void Model::applyMaterialMapping() {
{
QString url = networkMaterialResource->getURL().toString();
bool foundMaterialName = false;
if (url.contains("?")) {
auto split = url.split("?");
if (url.contains("#")) {
auto split = url.split("#");
std::string materialName = split.last().toStdString();
auto networkMaterialIter = networkMaterialResource->parsedMaterials.networkMaterials.find(materialName);
if (networkMaterialIter != networkMaterialResource->parsedMaterials.networkMaterials.end()) {

View file

@ -126,6 +126,9 @@ public:
void setHifiRenderLayer(render::hifi::Layer layer, const render::ScenePointer& scene = nullptr);
bool isCauterized() const { return _cauterized; }
void setCauterized(bool value, const render::ScenePointer& scene);
// Access the current RenderItemKey Global Flags used by the model and applied to the render items representing the parts of the model.
const render::ItemKey getRenderItemKeyGlobalFlags() const;
@ -502,6 +505,7 @@ protected:
// For this to work, a Meta RI must exists and knows about the RIs of this Model.
//
render::ItemKey _renderItemKeyGlobalFlags;
bool _cauterized { false };
bool shouldInvalidatePayloadShapeKey(int meshIndex);

View file

@ -23,9 +23,9 @@ namespace render {
// Tag is the alias names of render::ItemKey::Tag combinations used in the Hifi Render Engine
enum Tag : uint8_t {
TAG_NONE = render::ItemKey::TAG_BITS_NONE, // No Tags at all
TAG_MAIN_VIEW = render::ItemKey::TAG_BITS_0, // Main view
TAG_SECONDARY_VIEW = render::ItemKey::TAG_BITS_1, // Secondary View
TAG_NONE = render::ItemKey::TAG_BITS_NONE, // No Tags at all
TAG_MAIN_VIEW = render::ItemKey::TAG_BITS_0, // Main view
TAG_SECONDARY_VIEW = render::ItemKey::TAG_BITS_1, // Secondary View
TAG_ALL_VIEWS = TAG_MAIN_VIEW | TAG_SECONDARY_VIEW, // All views
};

View file

@ -58,10 +58,6 @@ void RenderShadowTask::build(JobModel& task, const render::Varying& input, rende
initZPassPipelines(*shapePlumber, state, fadeEffect->getBatchSetter(), fadeEffect->getItemUniformSetter());
}
// FIXME: calling this here before the zones/lights are drawn during the deferred/forward passes means we're actually using the frames from the previous draw
// Fetch the current frame stacks from all the stages
// Starting with the Light Frame genreated in previous tasks
const auto setupOutput = task.addJob<RenderShadowSetup>("ShadowSetup", input);
const auto queryResolution = setupOutput.getN<RenderShadowSetup::Output>(1);
const auto shadowFrame = setupOutput.getN<RenderShadowSetup::Output>(3);
@ -99,7 +95,7 @@ void RenderShadowTask::build(JobModel& task, const render::Varying& input, rende
for (auto i = 0; i < SHADOW_CASCADE_MAX_COUNT; i++) {
char jobName[64];
sprintf(jobName, "ShadowCascadeSetup%d", i);
const auto cascadeSetupOutput = task.addJob<RenderShadowCascadeSetup>(jobName, shadowFrame, i, tagBits, tagMask);
const auto cascadeSetupOutput = task.addJob<RenderShadowCascadeSetup>(jobName, shadowFrame, i, shadowCasterReceiverFilter);
const auto shadowFilter = cascadeSetupOutput.getN<RenderShadowCascadeSetup::Outputs>(0);
auto antiFrustum = render::Varying(ViewFrustumPointer());
cascadeFrustums[i] = cascadeSetupOutput.getN<RenderShadowCascadeSetup::Outputs>(1);
@ -452,8 +448,7 @@ void RenderShadowCascadeSetup::run(const render::RenderContextPointer& renderCon
const auto globalShadow = shadowFrame->_objects[0];
if (globalShadow && _cascadeIndex < globalShadow->getCascadeCount()) {
// Second item filter is to filter items to keep in shadow frustum computation (here we need to keep shadow receivers)
output.edit0() = ItemFilter::Builder::visibleWorldItems().withTypeShape().withOpaque().withoutLayered().withTagBits(_tagBits, _tagMask);
output.edit0() = _filter;
// Set the keylight render args
auto& cascade = globalShadow->getCascade(_cascadeIndex);
@ -551,7 +546,6 @@ void CullShadowBounds::run(const render::RenderContextPointer& renderContext, co
assert(lightStage);
const auto globalLightDir = currentKeyLight->getDirection();
auto castersFilter = render::ItemFilter::Builder(filter).withShadowCaster().build();
const auto& receiversFilter = filter;
for (auto& inItems : inShapes) {
auto key = inItems.first;
@ -570,7 +564,7 @@ void CullShadowBounds::run(const render::RenderContextPointer& renderContext, co
if (castersFilter.test(shapeKey)) {
outItems->second.emplace_back(item);
outBounds += item.bound;
} else if (receiversFilter.test(shapeKey)) {
} else {
// Receivers are not rendered but they still increase the bounds of the shadow scene
// although only in the direction of the light direction so as to have a correct far
// distance without decreasing the near distance.
@ -585,7 +579,7 @@ void CullShadowBounds::run(const render::RenderContextPointer& renderContext, co
if (castersFilter.test(shapeKey)) {
outItems->second.emplace_back(item);
outBounds += item.bound;
} else if (receiversFilter.test(shapeKey)) {
} else {
// Receivers are not rendered but they still increase the bounds of the shadow scene
// although only in the direction of the light direction so as to have a correct far
// distance without decreasing the near distance.

View file

@ -134,15 +134,13 @@ public:
using Outputs = render::VaryingSet3<render::ItemFilter, ViewFrustumPointer, RenderShadowTask::CullFunctor>;
using JobModel = render::Job::ModelIO<RenderShadowCascadeSetup, Inputs, Outputs>;
RenderShadowCascadeSetup(unsigned int cascadeIndex, uint8_t tagBits = 0x00, uint8_t tagMask = 0x00) :
_cascadeIndex(cascadeIndex), _tagBits(tagBits), _tagMask(tagMask) {}
RenderShadowCascadeSetup(unsigned int cascadeIndex, render::ItemFilter filter) : _cascadeIndex(cascadeIndex), _filter(filter) {}
void run(const render::RenderContextPointer& renderContext, const Inputs& input, Outputs& output);
private:
unsigned int _cascadeIndex;
uint8_t _tagBits { 0x00 };
uint8_t _tagMask { 0x00 };
render::ItemFilter _filter;
};
class RenderShadowCascadeTeardown {

View file

@ -61,7 +61,7 @@ namespace render {
class Args {
public:
enum RenderMode { DEFAULT_RENDER_MODE, SHADOW_RENDER_MODE, DIFFUSE_RENDER_MODE, NORMAL_RENDER_MODE, MIRROR_RENDER_MODE, SECONDARY_CAMERA_RENDER_MODE };
enum RenderMode { DEFAULT_RENDER_MODE, SHADOW_RENDER_MODE, MIRROR_RENDER_MODE, SECONDARY_CAMERA_RENDER_MODE };
enum DisplayMode { MONO, STEREO_MONITOR, STEREO_HMD };
enum DebugFlags {
RENDER_DEBUG_NONE = 0,

View file

@ -39,7 +39,7 @@ public:
RenderFetchCullSortTask() {}
void build(JobModel& task, const render::Varying& inputs, render::Varying& outputs, render::CullFunctor cullFunctor, uint8_t tagBits = 0x00, uint8_t tagMask = 0x00);
void build(JobModel& task, const render::Varying& inputs, render::Varying& outputs, render::CullFunctor cullFunctor, uint8_t tagBits, uint8_t tagMask);
};
#endif // hifi_RenderFetchCullSortTask_h

View file

@ -42,6 +42,7 @@ const float DEFAULT_AVATAR_HIPS_MASS = 40.0f;
const float DEFAULT_AVATAR_HEAD_MASS = 20.0f;
const float DEFAULT_AVATAR_LEFTHAND_MASS = 2.0f;
const float DEFAULT_AVATAR_RIGHTHAND_MASS = 2.0f;
const float DEFAULT_AVATAR_IPD = 0.064f;
// Used when avatar is missing joints... (avatar space)
const glm::quat DEFAULT_AVATAR_MIDDLE_EYE_ROT { Quaternions::Y_180 };
@ -102,6 +103,7 @@ static const float MAX_AVATAR_HEIGHT = 1000.0f * DEFAULT_AVATAR_HEIGHT; // meter
static const float MIN_AVATAR_HEIGHT = 0.005f * DEFAULT_AVATAR_HEIGHT; // meters
static const float MIN_AVATAR_RADIUS = 0.5f * MIN_AVATAR_HEIGHT;
static const float AVATAR_WALK_SPEED_SCALAR = 1.0f;
static const float AVATAR_SPRINT_SPEED_SCALAR = 2.0f;
static const float AVATAR_DESKTOP_SPRINT_SPEED_SCALAR = 3.0f;
static const float AVATAR_HMD_SPRINT_SPEED_SCALAR = 2.0f;
#endif // hifi_AvatarConstants_h

View file

@ -119,6 +119,13 @@ void swingTwistDecomposition(const glm::quat& rotation,
glm::quat& swing,
glm::quat& twist);
/**jsdoc
* A triangle in a mesh.
* @typedef {object} Triangle
* @property {Vec3} v0 - The position of vertex 0 in the triangle.
* @property {Vec3} v1 - The position of vertex 1 in the triangle.
* @property {Vec3} v2 - The position of vertex 2 in the triangle.
*/
class Triangle {
public:
glm::vec3 v0;

View file

@ -7,6 +7,7 @@
//
#include "Profile.h"
#include <chrono>
Q_LOGGING_CATEGORY(trace_app, "trace.app")
Q_LOGGING_CATEGORY(trace_app_detail, "trace.app.detail")
@ -41,14 +42,22 @@ static bool tracingEnabled() {
return DependencyManager::isSet<tracing::Tracer>() && DependencyManager::get<tracing::Tracer>()->isEnabled();
}
Duration::Duration(const QLoggingCategory& category, const QString& name, uint32_t argbColor, uint64_t payload, const QVariantMap& baseArgs) : _name(name), _category(category) {
DurationBase::DurationBase(const QLoggingCategory& category, const QString& name) : _name(name), _category(category) {
}
Duration::Duration(const QLoggingCategory& category,
const QString& name,
uint32_t argbColor,
uint64_t payload,
const QVariantMap& baseArgs) :
DurationBase(category, name) {
if (tracingEnabled() && category.isDebugEnabled()) {
QVariantMap args = baseArgs;
args["nv_payload"] = QVariant::fromValue(payload);
tracing::traceEvent(_category, _name, tracing::DurationBegin, "", args);
#if defined(NSIGHT_TRACING)
nvtxEventAttributes_t eventAttrib { 0 };
nvtxEventAttributes_t eventAttrib{ 0 };
eventAttrib.version = NVTX_VERSION;
eventAttrib.size = NVTX_EVENT_ATTRIB_STRUCT_SIZE;
eventAttrib.colorType = NVTX_COLOR_ARGB;
@ -98,3 +107,17 @@ void Duration::endRange(const QLoggingCategory& category, uint64_t rangeId) {
#endif
}
ConditionalDuration::ConditionalDuration(const QLoggingCategory& category, const QString& name, uint32_t minTime) :
DurationBase(category, name), _startTime(tracing::Tracer::now()), _minTime(minTime * USECS_PER_MSEC) {
}
ConditionalDuration::~ConditionalDuration() {
if (tracingEnabled() && _category.isDebugEnabled()) {
auto endTime = tracing::Tracer::now();
auto duration = endTime - _startTime;
if (duration >= _minTime) {
tracing::traceEvent(_category, _startTime, _name, tracing::DurationBegin);
tracing::traceEvent(_category, endTime, _name, tracing::DurationEnd);
}
}
}

View file

@ -37,17 +37,31 @@ Q_DECLARE_LOGGING_CATEGORY(trace_startup)
Q_DECLARE_LOGGING_CATEGORY(trace_workload)
Q_DECLARE_LOGGING_CATEGORY(trace_baker)
class Duration {
class DurationBase {
protected:
DurationBase(const QLoggingCategory& category, const QString& name);
const QString _name;
const QLoggingCategory& _category;
};
class Duration : public DurationBase {
public:
Duration(const QLoggingCategory& category, const QString& name, uint32_t argbColor = 0xff0000ff, uint64_t payload = 0, const QVariantMap& args = QVariantMap());
~Duration();
static uint64_t beginRange(const QLoggingCategory& category, const char* name, uint32_t argbColor);
static void endRange(const QLoggingCategory& category, uint64_t rangeId);
};
class ConditionalDuration : public DurationBase {
public:
ConditionalDuration(const QLoggingCategory& category, const QString& name, uint32_t minTime);
~ConditionalDuration();
private:
QString _name;
const QLoggingCategory& _category;
const int64_t _startTime;
const int64_t _minTime;
};
@ -95,6 +109,7 @@ inline void metadata(const QString& metadataType, const QVariantMap& args) {
}
#define PROFILE_RANGE(category, name) Duration profileRangeThis(trace_##category(), name);
#define PROFILE_RANGE_IF_LONGER(category, name, ms) ConditionalDuration profileRangeThis(trace_##category(), name, ms);
#define PROFILE_RANGE_EX(category, name, argbColor, payload, ...) Duration profileRangeThis(trace_##category(), name, argbColor, (uint64_t)payload, ##__VA_ARGS__);
#define PROFILE_RANGE_BEGIN(category, rangeId, name, argbColor) rangeId = Duration::beginRange(trace_##category(), name, argbColor)
#define PROFILE_RANGE_END(category, rangeId) Duration::endRange(trace_##category(), rangeId)

View file

@ -68,19 +68,28 @@ const QUuid SpatiallyNestable::getParentID() const {
void SpatiallyNestable::setParentID(const QUuid& parentID) {
bumpAncestorChainRenderableVersion();
bool success = false;
auto parent = getParentPointer(success);
bool parentChanged = false;
_idLock.withWriteLock([&] {
if (_parentID != parentID) {
parentChanged = true;
_parentID = parentID;
_parentKnowsMe = false;
}
});
if (parentChanged && success && parent) {
parent->recalculateChildCauterization();
}
if (!_parentKnowsMe) {
bool success = false;
auto parent = getParentPointer(success);
success = false;
parent = getParentPointer(success);
if (success && parent) {
bumpAncestorChainRenderableVersion();
parent->updateQueryAACube();
parent->recalculateChildCauterization();
}
}
}
@ -175,8 +184,9 @@ void SpatiallyNestable::forgetChild(SpatiallyNestablePointer newChild) const {
void SpatiallyNestable::setParentJointIndex(quint16 parentJointIndex) {
_parentJointIndex = parentJointIndex;
auto parent = _parent.lock();
if (parent) {
bool success = false;
auto parent = getParentPointer(success);
if (success && parent) {
parent->recalculateChildCauterization();
}
}

View file

@ -176,6 +176,10 @@ void Tracer::serialize(const QString& filename) {
#endif
}
int64_t Tracer::now() {
return std::chrono::duration_cast<std::chrono::microseconds>(p_high_resolution_clock::now().time_since_epoch()).count();
}
void Tracer::traceEvent(const QLoggingCategory& category,
const QString& name, EventType type,
qint64 timestamp, qint64 processID, qint64 threadID,
@ -226,9 +230,17 @@ void Tracer::traceEvent(const QLoggingCategory& category,
return;
}
auto timestamp = std::chrono::duration_cast<std::chrono::microseconds>(p_high_resolution_clock::now().time_since_epoch()).count();
traceEvent(category, name, type, now(), id, args, extra);
}
void Tracer::traceEvent(const QLoggingCategory& category,
const QString& name, EventType type, int64_t timestamp, const QString& id,
const QVariantMap& args, const QVariantMap& extra) {
if (!_enabled && type != Metadata) {
return;
}
auto processID = QCoreApplication::applicationPid();
auto threadID = int64_t(QThread::currentThreadId());
traceEvent(category, name, type, timestamp, processID, threadID, id, args, extra);
}

View file

@ -78,11 +78,18 @@ struct TraceEvent {
class Tracer : public Dependency {
public:
static int64_t now();
void traceEvent(const QLoggingCategory& category,
const QString& name, EventType type,
const QString& id = "",
const QVariantMap& args = QVariantMap(), const QVariantMap& extra = QVariantMap());
void traceEvent(const QLoggingCategory& category,
const QString& name, EventType type,
int64_t timestamp,
const QString& id = "",
const QVariantMap& args = QVariantMap(), const QVariantMap& extra = QVariantMap());
void startTracing();
void stopTracing();
void serialize(const QString& file);
@ -101,6 +108,16 @@ private:
std::mutex _eventsMutex;
};
inline void traceEvent(const QLoggingCategory& category, int64_t timestamp, const QString& name, EventType type, const QString& id = "", const QVariantMap& args = {}, const QVariantMap& extra = {}) {
if (!DependencyManager::isSet<Tracer>()) {
return;
}
const auto& tracer = DependencyManager::get<Tracer>();
if (tracer) {
tracer->traceEvent(category, name, type, timestamp, id, args, extra);
}
}
inline void traceEvent(const QLoggingCategory& category, const QString& name, EventType type, const QString& id = "", const QVariantMap& args = {}, const QVariantMap& extra = {}) {
if (!DependencyManager::isSet<Tracer>()) {
return;

View file

@ -36,25 +36,6 @@ static int cameraModeId = qRegisterMetaType<CameraMode>();
class Camera : public QObject {
Q_OBJECT
/**jsdoc
* The Camera API provides access to the "camera" that defines your view in desktop and HMD display modes.
*
* @namespace Camera
*
* @hifi-interface
* @hifi-client-entity
* @hifi-avatar
*
* @property {Vec3} position - The position of the camera. You can set this value only when the camera is in independent
* mode.
* @property {Quat} orientation - The orientation of the camera. You can set this value only when the camera is in
* independent mode.
* @property {Camera.Mode} mode - The camera mode.
* @property {ViewFrustum} frustum - The camera frustum.
* @property {Uuid} cameraEntity - The ID of the entity that is used for the camera position and orientation when the
* camera is in entity mode.
*/
// FIXME: The cameraEntity property definition is copied from FancyCamera.h.
Q_PROPERTY(glm::vec3 position READ getPosition WRITE setPosition)
Q_PROPERTY(glm::quat orientation READ getOrientation WRITE setOrientation)
Q_PROPERTY(QString mode READ getModeString WRITE setModeString)
@ -82,53 +63,54 @@ public:
public slots:
/**jsdoc
* Get the current camera mode. You can also get the mode using the <code>Camera.mode</code> property.
* Gets the current camera mode. You can also get the mode using the {@link Camera|Camera.mode} property.
* @function Camera.getModeString
* @returns {Camera.Mode} The current camera mode.
*/
QString getModeString() const;
/**jsdoc
* Set the camera mode. You can also set the mode using the <code>Camera.mode</code> property.
* @function Camera.setModeString
* @param {Camera.Mode} mode - The mode to set the camera to.
*/
* Sets the camera mode. You can also set the mode using the {@link Camera|Camera.mode} property.
* @function Camera.setModeString
* @param {Camera.Mode} mode - The mode to set the camera to.
*/
void setModeString(const QString& mode);
/**jsdoc
* Get the current camera position. You can also get the position using the <code>Camera.position</code> property.
* @function Camera.getPosition
* @returns {Vec3} The current camera position.
*/
* Gets the current camera position. You can also get the position using the {@link Camera|Camera.position} property.
* @function Camera.getPosition
* @returns {Vec3} The current camera position.
*/
glm::vec3 getPosition() const { return _position; }
/**jsdoc
* Set the camera position. You can also set the position using the <code>Camera.position</code> property. Only works if the
* camera is in independent mode.
* @function Camera.setPosition
* @param {Vec3} position - The position to set the camera at.
*/
* Sets the camera position. You can also set the position using the {@link Camera|Camera.position} property. Only works if
* the camera is in independent mode.
* @function Camera.setPosition
* @param {Vec3} position - The position to set the camera at.
*/
void setPosition(const glm::vec3& position);
/**jsdoc
* Get the current camera orientation. You can also get the orientation using the <code>Camera.orientation</code> property.
* @function Camera.getOrientation
* @returns {Quat} The current camera orientation.
*/
* Gets the current camera orientation. You can also get the orientation using the {@link Camera|Camera.orientation}
* property.
* @function Camera.getOrientation
* @returns {Quat} The current camera orientation.
*/
glm::quat getOrientation() const { return _orientation; }
/**jsdoc
* Set the camera orientation. You can also set the orientation using the <code>Camera.orientation</code> property. Only
* works if the camera is in independent mode.
* @function Camera.setOrientation
* @param {Quat} orientation - The orientation to set the camera to.
*/
* Sets the camera orientation. You can also set the orientation using the {@link Camera|Camera.orientation} property. Only
* works if the camera is in independent mode.
* @function Camera.setOrientation
* @param {Quat} orientation - The orientation to set the camera to.
*/
void setOrientation(const glm::quat& orientation);
/**jsdoc
* Compute a {@link PickRay} based on the current camera configuration and the specified <code>x, y</code> position on the
* screen. The {@link PickRay} can be used in functions such as {@link Entities.findRayIntersection} and
* {@link Overlays.findRayIntersection}.
* Computes a {@link PickRay} based on the current camera configuration and the specified <code>x, y</code> position on the
* screen. The {@link PickRay} can be used in functions such as {@link Entities.findRayIntersection} and
* {@link Overlays.findRayIntersection}.
* @function Camera.computePickRay
* @param {number} x - X-coordinate on screen.
* @param {number} y - Y-coordinate on screen.
@ -147,9 +129,9 @@ public slots:
virtual PickRay computePickRay(float x, float y) const = 0;
/**jsdoc
* Rotate the camera to look at the specified <code>position</code>. Only works if the camera is in independent mode.
* Rotates the camera to look at the specified <code>position</code>. Only works if the camera is in independent mode.
* @function Camera.lookAt
* @param {Vec3} position - Position to look at.
* @param {Vec3} position - The position to look at.
* @example <caption>Rotate your camera to look at entities as you click on them with your mouse.</caption>
* function onMousePressEvent(event) {
* var pickRay = Camera.computePickRay(event.x, event.y);
@ -168,15 +150,15 @@ public slots:
void lookAt(const glm::vec3& position);
/**jsdoc
* Set the camera to continue looking at the specified <code>position</code> even while the camera moves. Only works if the
* camera is in independent mode.
* Sets the camera to continue looking at the specified <code>position</code> even while the camera moves. Only works if
* the camera is in independent mode.
* @function Camera.keepLookingAt
* @param {Vec3} position - Position to keep looking at.
* @param {Vec3} position - The position to keep looking at.
*/
void keepLookingAt(const glm::vec3& position);
/**jsdoc
* Stops the camera from continually looking at the position that was set with <code>Camera.keepLookingAt</code>.
* Stops the camera from continually looking at the position that was set with {@link Camera.keepLookingAt}.
* @function Camera.stopLookingAt
*/
void stopLooking() { _isKeepLookingAt = false; }

View file

@ -2525,15 +2525,21 @@ var PropertiesTool = function (opts) {
propertyRanges: propertyRanges,
});
} else if (data.type === "materialTargetRequest") {
var properties = Entities.getEntityProperties(data.entityID, ["type", "parentID"]);
var parentModel = properties.parentID !== Uuid.NULL &&
Entities.getEntityProperties(properties.parentID, ["type"]).type === "Model";
var parentModelData;
if (properties.type === "Material" && parentModel) {
parentModelData = Graphics.getModel(properties.parentID);
}
var properties = Entities.getEntityProperties(data.entityID, ["type", "parentID"]);
if (properties.type === "Material" && properties.parentID !== Uuid.NULL) {
var parentType = Entities.getEntityProperties(properties.parentID, ["type"]).type;
if (parentType === "Model" || Entities.getNestableType(properties.parentID) === "avatar") {
parentModelData = Graphics.getModel(properties.parentID);
} else if (parentType === "Shape" || parentType === "Box" || parentType === "Sphere") {
parentModelData = {};
parentModelData.numMeshes = 1;
parentModelData.materialNames = [];
}
}
emitScriptEvent({
type: 'materialTargetReply',
entityID: data.entityID,
materialTargetData: parentModelData,
});
}

View file

@ -55,6 +55,7 @@ const GROUPS = [
label: "Parent",
type: "string",
propertyID: "parentID",
onChange: parentIDChanged,
},
{
label: "Parent Joint Index",
@ -616,6 +617,8 @@ const GROUPS = [
decimals: 3,
propertyID: "webAlpha",
propertyName: "alpha",
min: 0,
max: 1,
},
{
label: "Max FPS",
@ -2007,6 +2010,9 @@ function createStringProperty(property, elProperty) {
elInput.addEventListener('change', createEmitTextPropertyUpdateFunction(property));
if (propertyData.onChange !== undefined) {
elInput.addEventListener('change', propertyData.onChange);
}
elProperty.appendChild(elInput);
@ -2476,7 +2482,7 @@ function resetDynamicMultiselectProperty(elDivOptions) {
let elDivOption = elInputs[0].parentNode;
elDivOption.parentNode.removeChild(elDivOption);
}
elDivOptions.firstChild.style.display = "block"; // show "No Options" text
elDivOptions.firstChild.style.display = null; // show "No Options" text
elDivOptions.parentNode.lastChild.style.display = "none"; // hide Select/Clear all buttons
}
@ -2622,6 +2628,17 @@ function createProperty(propertyData, propertyElementID, propertyName, propertyI
}
/**
* PROPERTY-SPECIFIC CALLBACKS
*/
function parentIDChanged() {
if (selectedEntityProperties.type === "Material") {
requestMaterialTarget();
}
}
/**
* BUTTON CALLBACKS
*/
@ -3156,6 +3173,10 @@ function setTextareaScrolling(element) {
* MATERIAL TARGET FUNCTIONS
*/
function requestMaterialTarget() {
EventBridge.emitWebEvent(JSON.stringify({ type: 'materialTargetRequest', entityID: selectedEntityProperties.id }));
}
function setMaterialTargetData(materialTargetData) {
let elDivOptions = getPropertyInputElement("parentMaterialName");
resetDynamicMultiselectProperty(elDivOptions);
@ -3165,7 +3186,7 @@ function setMaterialTargetData(materialTargetData) {
}
elDivOptions.firstChild.style.display = "none"; // hide "No Options" text
elDivOptions.parentNode.lastChild.style.display = "block"; // show Select/Clear all buttons
elDivOptions.parentNode.lastChild.style.display = null; // show Select/Clear all buttons
let numMeshes = materialTargetData.numMeshes;
for (let i = 0; i < numMeshes; ++i) {
@ -3241,35 +3262,35 @@ function sendMaterialTargetProperty() {
let elDivOptions = getPropertyInputElement("parentMaterialName");
let elInputs = elDivOptions.getElementsByClassName("materialTargetInput");
let materialTargetList = "";
let materialTargetList = [];
for (let i = 0; i < elInputs.length; ++i) {
let elInput = elInputs[i];
if (elInput.checked) {
let targetID = elInput.getAttribute("targetID");
if (elInput.getAttribute("isMaterialName") === "true") {
materialTargetList += "mat::" + targetID + ",";
materialTargetList.push("mat::" + targetID);
} else {
materialTargetList += targetID + ",";
materialTargetList.push(targetID);
}
}
}
if (materialTargetList !== "") {
materialTargetList = materialTargetList.substring(0, materialTargetList.length - 1);
materialTargetList = "[" + materialTargetList + "]";
let propertyValue = materialTargetList.join(",");
if (propertyValue.length > 1) {
propertyValue = "[" + propertyValue + "]";
}
updateProperty("parentMaterialName", materialTargetList, false);
updateProperty("parentMaterialName", propertyValue, false);
}
function materialTargetPropertyUpdate(propertyValue) {
let elDivOptions = getPropertyInputElement("parentMaterialName");
let elInputs = elDivOptions.getElementsByClassName("materialTargetInput");
if (propertyValue.charAt(0) === '[') {
if (propertyValue.startsWith('[')) {
propertyValue = propertyValue.substring(1, propertyValue.length);
}
if (propertyValue.charAt(propertyValue.length - 1) === ']') {
if (propertyValue.endsWith(']')) {
propertyValue = propertyValue.substring(0, propertyValue.length - 1);
}
@ -3482,6 +3503,7 @@ function loaded() {
deleteJSONMaterialEditor();
}
}
lastEntityID = null;
resetProperties();
showGroupsForType("None");
@ -3780,7 +3802,7 @@ function loaded() {
}
if (hasSelectedEntityChanged && selectedEntityProperties.type === "Material") {
EventBridge.emitWebEvent(JSON.stringify({ type: 'materialTargetRequest', entityID: selectedEntityProperties.id }));
requestMaterialTarget();
}
let activeElement = document.activeElement;
@ -3833,7 +3855,9 @@ function loaded() {
}
}
} else if (data.type === 'materialTargetReply') {
setMaterialTargetData(data.materialTargetData);
if (data.entityID === selectedEntityProperties.id) {
setMaterialTargetData(data.materialTargetData);
}
}
});

View file

@ -1347,12 +1347,16 @@ SelectionDisplay = (function() {
};
that.updateLastMouseEvent = function(event) {
if (activeTool && lastMouseEvent !== null) {
if (activeTool && lastMouseEvent !== null) {
var change = lastMouseEvent.isShifted !== event.isShifted || lastMouseEvent.isMeta !== event.isMeta ||
lastMouseEvent.isControl !== event.isControl || lastMouseEvent.isAlt !== event.isAlt;
lastMouseEvent.isShifted = event.isShifted;
lastMouseEvent.isMeta = event.isMeta;
lastMouseEvent.isControl = event.isControl;
lastMouseEvent.isAlt = event.isAlt;
activeTool.onMove(lastMouseEvent);
lastMouseEvent.isAlt = event.isAlt;
if (change) {
activeTool.onMove(lastMouseEvent);
}
}
};

View file

@ -56,7 +56,7 @@
grab: { grabbable: true },
cloneable: true,
cloneLifetime: LIFETIME,
cloneLimit: 9999
cloneLimit: 9999,
position: Vec3.sum(MyAvatar.position, Vec3.sum(forwardOffset, forwardVector)),
color: newColor(),
script: SCRIPT_URL

View file

@ -58,6 +58,7 @@ exports.handlers = {
'../../libraries/physics/src',
'../../libraries/plugins/src/plugins',
'../../libraries/pointers/src',
'../../libraries/render-utils/src',
'../../libraries/script-engine/src',
'../../libraries/shared/src',
'../../libraries/shared/src/shared',

View file

@ -55,8 +55,9 @@ void BakerCLI::bakeFile(QUrl inputUrl, const QString& outputPath, const QString&
}
}
} else if (type == SCRIPT_EXTENSION) {
_baker = std::unique_ptr<Baker> { new JSBaker(inputUrl, outputPath) };
_baker->moveToThread(Oven::instance().getNextWorkerThread());
// FIXME: disabled for now because it breaks some scripts
//_baker = std::unique_ptr<Baker> { new JSBaker(inputUrl, outputPath) };
//_baker->moveToThread(Oven::instance().getNextWorkerThread());
} else if (type == MATERIAL_EXTENSION) {
_baker = std::unique_ptr<Baker> { new MaterialBaker(inputUrl.toDisplayString(), true, outputPath) };
_baker->moveToThread(Oven::instance().getNextWorkerThread());

View file

@ -397,6 +397,8 @@ void DomainBaker::enumerateEntities() {
}
}
// FIXME: disabled for now because it breaks some scripts
/*
// Scripts
if (entity.contains(SCRIPT_KEY)) {
addScriptBaker(SCRIPT_KEY, entity[SCRIPT_KEY].toString(), *it);
@ -404,14 +406,19 @@ void DomainBaker::enumerateEntities() {
if (entity.contains(SERVER_SCRIPTS_KEY)) {
// TODO: serverScripts can be multiple scripts, need to handle that
}
*/
// Materials
if (entity.contains(MATERIAL_URL_KEY)) {
addMaterialBaker(MATERIAL_URL_KEY, entity[MATERIAL_URL_KEY].toString(), true, *it);
}
// FIXME: Disabled for now because relative texture URLs are not supported for embedded materials in material entities
// We need to make texture URLs absolute in this particular case only, keeping in mind that FSTBaker also uses embedded materials
/*
if (entity.contains(MATERIAL_DATA_KEY)) {
addMaterialBaker(MATERIAL_DATA_KEY, entity[MATERIAL_DATA_KEY].toString(), false, *it);
}
*/
}
}

View file

@ -33,7 +33,7 @@ OvenCLIApplication::OvenCLIApplication(int argc, char* argv[]) :
parser.addOptions({
{ CLI_INPUT_PARAMETER, "Path to file that you would like to bake.", "input" },
{ CLI_OUTPUT_PARAMETER, "Path to folder that will be used as output.", "output" },
{ CLI_TYPE_PARAMETER, "Type of asset. [model|material|js]", "type" },
{ CLI_TYPE_PARAMETER, "Type of asset. [model|material]"/*|js]"*/, "type" },
{ CLI_DISABLE_TEXTURE_COMPRESSION_PARAMETER, "Disable texture compression." }
});

View file

@ -38,6 +38,6 @@ var hoop = Entities.addEntity({
grabbableKey: {
grabbable: false
}
})
}),
compoundShapeURL: hoopCollisionHullURL
});