diff --git a/assignment-client/src/avatars/AvatarMixer.cpp b/assignment-client/src/avatars/AvatarMixer.cpp index 05dbfee912..998799f5e6 100644 --- a/assignment-client/src/avatars/AvatarMixer.cpp +++ b/assignment-client/src/avatars/AvatarMixer.cpp @@ -402,7 +402,7 @@ void AvatarMixer::handleAvatarIdentityPacket(QSharedPointer mes AvatarData::parseAvatarIdentityPacket(message->getMessage(), identity); bool identityChanged = false; bool displayNameChanged = false; - avatar.processAvatarIdentity(identity, identityChanged, displayNameChanged); + avatar.processAvatarIdentity(identity, identityChanged, displayNameChanged, senderNode->getClockSkewUsec()); if (identityChanged) { QMutexLocker nodeDataLocker(&nodeData->getMutex()); nodeData->flagIdentityChange(); diff --git a/interface/resources/icons/tablet-icons/raise-hand-a.svg b/interface/resources/icons/tablet-icons/raise-hand-a.svg new file mode 100644 index 0000000000..fd35073332 --- /dev/null +++ b/interface/resources/icons/tablet-icons/raise-hand-a.svg @@ -0,0 +1,70 @@ + + + + + + + + image/svg+xml + + + + + + + + + + + + + diff --git a/interface/resources/icons/tablet-icons/raise-hand-i.svg b/interface/resources/icons/tablet-icons/raise-hand-i.svg new file mode 100644 index 0000000000..50a6aa2606 --- /dev/null +++ b/interface/resources/icons/tablet-icons/raise-hand-i.svg @@ -0,0 +1,60 @@ + + + + + + + + image/svg+xml + + + + + + + + + diff --git a/interface/resources/qml/hifi/Audio.qml b/interface/resources/qml/hifi/Audio.qml index d0c3122100..66760ff290 100644 --- a/interface/resources/qml/hifi/Audio.qml +++ b/interface/resources/qml/hifi/Audio.qml @@ -35,11 +35,6 @@ Rectangle { property string title: "Audio Options" signal sendToScript(var message); - //set models after Components is shown - Component.onCompleted: { - refreshTimer.start() - refreshTimerOutput.start() - } Component { id: separator @@ -84,7 +79,7 @@ Rectangle { } Connections { - target: AvatarInputs + target: AvatarInputs !== undefined ? AvatarInputs : null onShowAudioToolsChanged: { audioTools.checkbox.checked = showAudioTools } @@ -105,10 +100,12 @@ Rectangle { id: audioTools width: parent.width anchors { left: parent.left; right: parent.right; leftMargin: 30 } - checkbox.checked: AvatarInputs.showAudioTools + checkbox.checked: AvatarInputs !== undefined ? AvatarInputs.showAudioTools : false text.text: qsTr("Show audio level meter") onCheckBoxClicked: { - AvatarInputs.showAudioTools = checked + if (AvatarInputs !== undefined) { + AvatarInputs.showAudioTools = checked + } } } @@ -138,30 +135,34 @@ Rectangle { } ListView { - Timer { - id: refreshTimer - interval: 1 - repeat: false - onTriggered: { - //refresh model - inputAudioListView.model = undefined - inputAudioListView.model = AudioDevice.inputAudioDevices - } - } id: inputAudioListView anchors { left: parent.left; right: parent.right; leftMargin: 70 } height: 125 - spacing: 16 + spacing: 0 clip: true snapMode: ListView.SnapToItem - delegate: AudioCheckbox { + model: AudioDevice + delegate: Item { width: parent.width - checkbox.checked: (modelData === AudioDevice.getInputDevice()) - text.text: modelData - onCheckBoxClicked: { - if (checked) { - AudioDevice.setInputDevice(modelData) - refreshTimer.start() + visible: devicemode === 0 + height: visible ? 36 : 0 + + AudioCheckbox { + id: cbin + anchors.verticalCenter: parent.verticalCenter + Binding { + target: cbin.checkbox + property: 'checked' + value: devicechecked + } + + width: parent.width + cbchecked: devicechecked + text.text: devicename + onCheckBoxClicked: { + if (checked) { + AudioDevice.setInputDeviceAsync(devicename) + } } } } @@ -191,31 +192,33 @@ Rectangle { text: qsTr("CHOOSE OUTPUT DEVICE") } } + ListView { id: outputAudioListView - Timer { - id: refreshTimerOutput - interval: 1 - repeat: false - onTriggered: { - //refresh model - outputAudioListView.model = undefined - outputAudioListView.model = AudioDevice.outputAudioDevices - } - } anchors { left: parent.left; right: parent.right; leftMargin: 70 } height: 250 - spacing: 16 + spacing: 0 clip: true snapMode: ListView.SnapToItem - delegate: AudioCheckbox { + model: AudioDevice + delegate: Item { width: parent.width - checkbox.checked: (modelData === AudioDevice.getOutputDevice()) - text.text: modelData - onCheckBoxClicked: { - if (checked) { - AudioDevice.setOutputDevice(modelData) - refreshTimerOutput.start() + visible: devicemode === 1 + height: visible ? 36 : 0 + AudioCheckbox { + id: cbout + width: parent.width + anchors.verticalCenter: parent.verticalCenter + Binding { + target: cbout.checkbox + property: 'checked' + value: devicechecked + } + text.text: devicename + onCheckBoxClicked: { + if (checked) { + AudioDevice.setOutputDeviceAsync(devicename) + } } } } diff --git a/interface/resources/qml/hifi/Feed.qml b/interface/resources/qml/hifi/Feed.qml index fc108f47e3..c1bd35f49d 100644 --- a/interface/resources/qml/hifi/Feed.qml +++ b/interface/resources/qml/hifi/Feed.qml @@ -238,8 +238,25 @@ Column { stackShadowNarrowing: root.stackShadowNarrowing; shadowHeight: root.stackedCardShadowHeight; - hoverThunk: function () { scroll.currentIndex = index; } - unhoverThunk: function () { scroll.currentIndex = -1; } + hoverThunk: function () { scrollToIndex(index); } + unhoverThunk: function () { scrollToIndex(-1); } } } + NumberAnimation { + id: anim; + target: scroll; + property: "contentX"; + duration: 250; + } + function scrollToIndex(index) { + anim.running = false; + var pos = scroll.contentX; + var destPos; + scroll.positionViewAtIndex(index, ListView.Contain); + destPos = scroll.contentX; + anim.from = pos; + anim.to = destPos; + scroll.currentIndex = index; + anim.running = true; + } } diff --git a/interface/resources/qml/hifi/components/AudioCheckbox.qml b/interface/resources/qml/hifi/components/AudioCheckbox.qml index a8e0441e0a..b037fe4c7d 100644 --- a/interface/resources/qml/hifi/components/AudioCheckbox.qml +++ b/interface/resources/qml/hifi/components/AudioCheckbox.qml @@ -8,6 +8,7 @@ Row { id: row spacing: 16 property alias checkbox: cb + property alias cbchecked: cb.checked property alias text: txt signal checkBoxClicked(bool checked) diff --git a/interface/resources/qml/hifi/tablet/TabletGeneralPreferences.qml b/interface/resources/qml/hifi/tablet/TabletGeneralPreferences.qml index 85377aaeda..17d3f1b959 100644 --- a/interface/resources/qml/hifi/tablet/TabletGeneralPreferences.qml +++ b/interface/resources/qml/hifi/tablet/TabletGeneralPreferences.qml @@ -33,6 +33,6 @@ StackView { TabletPreferencesDialog { id: root objectName: "TabletGeneralPreferences" - showCategories: ["UI", "Snapshots", "Scripts", "Privacy", "Octree", "HMD", "Sixense Controllers", "Perception Neuron", "Kinect"] + showCategories: ["UI", "Snapshots", "Scripts", "Privacy", "Octree", "HMD", "Sixense Controllers", "Perception Neuron", "Kinect", "Vive Pucks Configuration"] } } diff --git a/interface/resources/qml/hifi/tablet/tabletWindows/TabletPreferencesDialog.qml b/interface/resources/qml/hifi/tablet/tabletWindows/TabletPreferencesDialog.qml index 2c8f6d9ea0..3e497b053e 100644 --- a/interface/resources/qml/hifi/tablet/tabletWindows/TabletPreferencesDialog.qml +++ b/interface/resources/qml/hifi/tablet/tabletWindows/TabletPreferencesDialog.qml @@ -136,8 +136,8 @@ Item { for (var i = 0; i < sections.length; i++) { totalHeight += sections[i].height + sections[i].getPreferencesHeight(); } - console.log(totalHeight); - return totalHeight; + var bottomPadding = 100; + return (totalHeight + bottomPadding); } } } diff --git a/interface/src/Application.cpp b/interface/src/Application.cpp index 3319a6a0e1..90aa9cd823 100644 --- a/interface/src/Application.cpp +++ b/interface/src/Application.cpp @@ -2053,6 +2053,8 @@ void Application::initializeUi() { rootContext->setContextProperty("ApplicationCompositor", &getApplicationCompositor()); + rootContext->setContextProperty("AvatarInputs", AvatarInputs::getInstance()); + if (auto steamClient = PluginManager::getInstance()->getSteamClientPlugin()) { rootContext->setContextProperty("Steam", new SteamScriptingInterface(engine, steamClient.get())); } diff --git a/interface/src/avatar/MyHead.cpp b/interface/src/avatar/MyHead.cpp index c41fff3bb5..34a75c5461 100644 --- a/interface/src/avatar/MyHead.cpp +++ b/interface/src/avatar/MyHead.cpp @@ -48,7 +48,7 @@ void MyHead::simulate(float deltaTime) { FaceTracker* faceTracker = qApp->getActiveFaceTracker(); _isFaceTrackerConnected = faceTracker != NULL && !faceTracker->isMuted(); if (_isFaceTrackerConnected) { - _blendshapeCoefficients = faceTracker->getBlendshapeCoefficients(); + _transientBlendshapeCoefficients = faceTracker->getBlendshapeCoefficients(); if (typeid(*faceTracker) == typeid(DdeFaceTracker)) { @@ -60,11 +60,11 @@ void MyHead::simulate(float deltaTime) { const int FUNNEL_BLENDSHAPE = 40; const int SMILE_LEFT_BLENDSHAPE = 28; const int SMILE_RIGHT_BLENDSHAPE = 29; - _blendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen; - _blendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4; - _blendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4; - _blendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2; - _blendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3; + _transientBlendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen; + _transientBlendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4; + _transientBlendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4; + _transientBlendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2; + _transientBlendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3; } applyEyelidOffset(getFinalOrientationInWorldFrame()); } diff --git a/interface/src/scripting/AudioDeviceScriptingInterface.cpp b/interface/src/scripting/AudioDeviceScriptingInterface.cpp index cbb08c0af0..05168b0d4c 100644 --- a/interface/src/scripting/AudioDeviceScriptingInterface.cpp +++ b/interface/src/scripting/AudioDeviceScriptingInterface.cpp @@ -11,21 +11,19 @@ #include "AudioClient.h" #include "AudioDeviceScriptingInterface.h" - +#include "SettingsScriptingInterface.h" AudioDeviceScriptingInterface* AudioDeviceScriptingInterface::getInstance() { static AudioDeviceScriptingInterface sharedInstance; return &sharedInstance; } -QStringList AudioDeviceScriptingInterface::inputAudioDevices() const -{ - return DependencyManager::get()->getDeviceNames(QAudio::AudioInput).toList();; +QStringList AudioDeviceScriptingInterface::inputAudioDevices() const { + return _inputAudioDevices; } -QStringList AudioDeviceScriptingInterface::outputAudioDevices() const -{ - return DependencyManager::get()->getDeviceNames(QAudio::AudioOutput).toList();; +QStringList AudioDeviceScriptingInterface::outputAudioDevices() const { + return _outputAudioDevices; } bool AudioDeviceScriptingInterface::muted() @@ -33,11 +31,27 @@ bool AudioDeviceScriptingInterface::muted() return getMuted(); } -AudioDeviceScriptingInterface::AudioDeviceScriptingInterface() { +AudioDeviceScriptingInterface::AudioDeviceScriptingInterface(): QAbstractListModel(nullptr) { connect(DependencyManager::get().data(), &AudioClient::muteToggled, this, &AudioDeviceScriptingInterface::muteToggled); connect(DependencyManager::get().data(), &AudioClient::deviceChanged, - this, &AudioDeviceScriptingInterface::deviceChanged); + this, &AudioDeviceScriptingInterface::onDeviceChanged, Qt::QueuedConnection); + connect(DependencyManager::get().data(), &AudioClient::currentInputDeviceChanged, + this, &AudioDeviceScriptingInterface::onCurrentInputDeviceChanged, Qt::QueuedConnection); + connect(DependencyManager::get().data(), &AudioClient::currentOutputDeviceChanged, + this, &AudioDeviceScriptingInterface::onCurrentOutputDeviceChanged, Qt::QueuedConnection); + //fill up model + onDeviceChanged(); + //set up previously saved device + SettingsScriptingInterface* settings = SettingsScriptingInterface::getInstance(); + const QString inDevice = settings->getValue("audio_input_device").toString(); + if (inDevice != _currentInputDevice) { + setInputDeviceAsync(inDevice); + } + const QString outDevice = settings->getValue("audio_output_device").toString(); + if (outDevice != _currentOutputDevice) { + setOutputDeviceAsync(outDevice); + } } bool AudioDeviceScriptingInterface::setInputDevice(const QString& deviceName) { @@ -58,6 +72,43 @@ bool AudioDeviceScriptingInterface::setOutputDevice(const QString& deviceName) { return result; } +bool AudioDeviceScriptingInterface::setDeviceFromMenu(const QString& deviceMenuName) { + QAudio::Mode mode; + + if (deviceMenuName.indexOf("for Output") != -1) { + mode = QAudio::AudioOutput; + } else if (deviceMenuName.indexOf("for Input") != -1) { + mode = QAudio::AudioInput; + } else { + return false; + } + + for (ScriptingAudioDeviceInfo di: _devices) { + if (mode == di.mode && deviceMenuName.contains(di.name)) { + if (mode == QAudio::AudioOutput) { + setOutputDeviceAsync(di.name); + } else { + setInputDeviceAsync(di.name); + } + return true; + } + } + + return false; +} + +void AudioDeviceScriptingInterface::setInputDeviceAsync(const QString& deviceName) { + QMetaObject::invokeMethod(DependencyManager::get().data(), "switchInputToAudioDevice", + Qt::QueuedConnection, + Q_ARG(const QString&, deviceName)); +} + +void AudioDeviceScriptingInterface::setOutputDeviceAsync(const QString& deviceName) { + QMetaObject::invokeMethod(DependencyManager::get().data(), "switchOutputToAudioDevice", + Qt::QueuedConnection, + Q_ARG(const QString&, deviceName)); +} + QString AudioDeviceScriptingInterface::getInputDevice() { return DependencyManager::get()->getDeviceName(QAudio::AudioInput); } @@ -116,3 +167,105 @@ void AudioDeviceScriptingInterface::setMuted(bool muted) bool AudioDeviceScriptingInterface::getMuted() { return DependencyManager::get()->isMuted(); } + +QVariant AudioDeviceScriptingInterface::data(const QModelIndex& index, int role) const { + //sanity + if (!index.isValid() || index.row() >= _devices.size()) + return QVariant(); + + + if (role == Qt::DisplayRole || role == DisplayNameRole) { + return _devices.at(index.row()).name; + } else if (role == SelectedRole) { + return _devices.at(index.row()).selected; + } else if (role == AudioModeRole) { + return (int)_devices.at(index.row()).mode; + } + return QVariant(); +} + +int AudioDeviceScriptingInterface::rowCount(const QModelIndex& parent) const { + Q_UNUSED(parent) + return _devices.size(); +} + +QHash AudioDeviceScriptingInterface::roleNames() const { + QHash roles; + roles.insert(DisplayNameRole, "devicename"); + roles.insert(SelectedRole, "devicechecked"); + roles.insert(AudioModeRole, "devicemode"); + return roles; +} + +void AudioDeviceScriptingInterface::onDeviceChanged() +{ + beginResetModel(); + _outputAudioDevices.clear(); + _devices.clear(); + _currentOutputDevice = getOutputDevice(); + for (QString name: getOutputDevices()) { + ScriptingAudioDeviceInfo di; + di.name = name; + di.selected = (name == _currentOutputDevice); + di.mode = QAudio::AudioOutput; + _devices.append(di); + _outputAudioDevices.append(name); + } + emit outputAudioDevicesChanged(_outputAudioDevices); + + _inputAudioDevices.clear(); + _currentInputDevice = getInputDevice(); + for (QString name: getInputDevices()) { + ScriptingAudioDeviceInfo di; + di.name = name; + di.selected = (name == _currentInputDevice); + di.mode = QAudio::AudioInput; + _devices.append(di); + _inputAudioDevices.append(name); + } + emit inputAudioDevicesChanged(_inputAudioDevices); + + endResetModel(); + emit deviceChanged(); +} + +void AudioDeviceScriptingInterface::onCurrentInputDeviceChanged(const QString& name) +{ + currentDeviceUpdate(name, QAudio::AudioInput); + //we got a signal that device changed. Save it now + SettingsScriptingInterface* settings = SettingsScriptingInterface::getInstance(); + settings->setValue("audio_input_device", name); + emit currentInputDeviceChanged(name); +} + +void AudioDeviceScriptingInterface::onCurrentOutputDeviceChanged(const QString& name) +{ + currentDeviceUpdate(name, QAudio::AudioOutput); + //we got a signal that device changed. Save it now + SettingsScriptingInterface* settings = SettingsScriptingInterface::getInstance(); + settings->setValue("audio_output_device", name); + emit currentOutputDeviceChanged(name); +} + +void AudioDeviceScriptingInterface::currentDeviceUpdate(const QString& name, QAudio::Mode mode) +{ + QVector role; + role.append(SelectedRole); + + for (int i = 0; i < _devices.size(); i++) { + ScriptingAudioDeviceInfo di = _devices.at(i); + if (di.mode != mode) { + continue; + } + if (di.selected && di.name != name ) { + di.selected = false; + _devices[i] = di; + emit dataChanged(index(i, 0), index(i, 0), role); + } + if (di.name == name) { + di.selected = true; + _devices[i] = di; + emit dataChanged(index(i, 0), index(i, 0), role); + } + } +} diff --git a/interface/src/scripting/AudioDeviceScriptingInterface.h b/interface/src/scripting/AudioDeviceScriptingInterface.h index 4d1d47dcba..f912c35288 100644 --- a/interface/src/scripting/AudioDeviceScriptingInterface.h +++ b/interface/src/scripting/AudioDeviceScriptingInterface.h @@ -15,10 +15,18 @@ #include #include #include +#include +#include class AudioEffectOptions; -class AudioDeviceScriptingInterface : public QObject { +struct ScriptingAudioDeviceInfo { + QString name; + bool selected; + QAudio::Mode mode; +}; + +class AudioDeviceScriptingInterface : public QAbstractListModel { Q_OBJECT Q_PROPERTY(QStringList inputAudioDevices READ inputAudioDevices NOTIFY inputAudioDevicesChanged) @@ -32,9 +40,26 @@ public: QStringList outputAudioDevices() const; bool muted(); + QVariant data(const QModelIndex& index, int role = Qt::DisplayRole) const override; + int rowCount(const QModelIndex& parent = QModelIndex()) const override; + QHash roleNames() const override; + + enum Roles { + DisplayNameRole = Qt::UserRole, + SelectedRole, + AudioModeRole + }; + +private slots: + void onDeviceChanged(); + void onCurrentInputDeviceChanged(const QString& name); + void onCurrentOutputDeviceChanged(const QString& name); + void currentDeviceUpdate(const QString& name, QAudio::Mode mode); + public slots: bool setInputDevice(const QString& deviceName); bool setOutputDevice(const QString& deviceName); + bool setDeviceFromMenu(const QString& deviceMenuName); QString getInputDevice(); QString getOutputDevice(); @@ -55,15 +80,28 @@ public slots: void setMuted(bool muted); + void setInputDeviceAsync(const QString& deviceName); + void setOutputDeviceAsync(const QString& deviceName); private: AudioDeviceScriptingInterface(); signals: void muteToggled(); void deviceChanged(); + void currentInputDeviceChanged(const QString& name); + void currentOutputDeviceChanged(const QString& name); void mutedChanged(bool muted); void inputAudioDevicesChanged(QStringList inputAudioDevices); void outputAudioDevicesChanged(QStringList outputAudioDevices); + +private: + QVector _devices; + + QStringList _inputAudioDevices; + QStringList _outputAudioDevices; + + QString _currentInputDevice; + QString _currentOutputDevice; }; #endif // hifi_AudioDeviceScriptingInterface_h diff --git a/interface/src/ui/overlays/Overlays.cpp b/interface/src/ui/overlays/Overlays.cpp index 61a283b88c..4970112405 100644 --- a/interface/src/ui/overlays/Overlays.cpp +++ b/interface/src/ui/overlays/Overlays.cpp @@ -408,6 +408,7 @@ RayToOverlayIntersectionResult Overlays::findRayIntersectionInternal(const PickR const QVector& overlaysToInclude, const QVector& overlaysToDiscard, bool visibleOnly, bool collidableOnly) { + QReadLocker lock(&_lock); float bestDistance = std::numeric_limits::max(); bool bestIsFront = false; diff --git a/libraries/audio-client/src/AudioClient.cpp b/libraries/audio-client/src/AudioClient.cpp index dae37ffc4b..1282dbb2dc 100644 --- a/libraries/audio-client/src/AudioClient.cpp +++ b/libraries/audio-client/src/AudioClient.cpp @@ -799,7 +799,8 @@ QString AudioClient::getDefaultDeviceName(QAudio::Mode mode) { QVector AudioClient::getDeviceNames(QAudio::Mode mode) { QVector deviceNames; - foreach(QAudioDeviceInfo audioDevice, getAvailableDevices(mode)) { + const QList &availableDevice = getAvailableDevices(mode); + foreach(const QAudioDeviceInfo &audioDevice, availableDevice) { deviceNames << audioDevice.deviceName().trimmed(); } return deviceNames; @@ -1402,7 +1403,7 @@ bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceIn _audioInput->stop(); _inputDevice = NULL; - delete _audioInput; + _audioInput->deleteLater(); _audioInput = NULL; _numInputCallbackBytes = 0; @@ -1418,6 +1419,7 @@ bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceIn if (!inputDeviceInfo.isNull()) { qCDebug(audioclient) << "The audio input device " << inputDeviceInfo.deviceName() << "is available."; _inputAudioDeviceName = inputDeviceInfo.deviceName().trimmed(); + emit currentInputDeviceChanged(_inputAudioDeviceName); if (adjustedFormatForAudioDevice(inputDeviceInfo, _desiredInputFormat, _inputFormat)) { qCDebug(audioclient) << "The format to be used for audio input is" << _inputFormat; @@ -1506,11 +1508,13 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice if (_audioOutput) { _audioOutput->stop(); - delete _audioOutput; + //must be deleted in next eventloop cycle when its called from notify() + _audioOutput->deleteLater(); _audioOutput = NULL; _loopbackOutputDevice = NULL; - delete _loopbackAudioOutput; + //must be deleted in next eventloop cycle when its called from notify() + _loopbackAudioOutput->deleteLater(); _loopbackAudioOutput = NULL; delete[] _outputMixBuffer; @@ -1535,6 +1539,7 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice if (!outputDeviceInfo.isNull()) { qCDebug(audioclient) << "The audio output device " << outputDeviceInfo.deviceName() << "is available."; _outputAudioDeviceName = outputDeviceInfo.deviceName().trimmed(); + emit currentOutputDeviceChanged(_outputAudioDeviceName); if (adjustedFormatForAudioDevice(outputDeviceInfo, _desiredOutputFormat, _outputFormat)) { qCDebug(audioclient) << "The format to be used for audio output is" << _outputFormat; diff --git a/libraries/audio-client/src/AudioClient.h b/libraries/audio-client/src/AudioClient.h index 0e5363e0ff..47808767b3 100644 --- a/libraries/audio-client/src/AudioClient.h +++ b/libraries/audio-client/src/AudioClient.h @@ -222,6 +222,9 @@ signals: void muteEnvironmentRequested(glm::vec3 position, float radius); + void currentOutputDeviceChanged(const QString& name); + void currentInputDeviceChanged(const QString& name); + protected: AudioClient(); ~AudioClient(); diff --git a/libraries/avatars-renderer/src/avatars-renderer/Head.cpp b/libraries/avatars-renderer/src/avatars-renderer/Head.cpp index 3935a4513f..93fe246266 100644 --- a/libraries/avatars-renderer/src/avatars-renderer/Head.cpp +++ b/libraries/avatars-renderer/src/avatars-renderer/Head.cpp @@ -151,7 +151,7 @@ void Head::simulate(float deltaTime) { _mouth2, _mouth3, _mouth4, - _blendshapeCoefficients); + _transientBlendshapeCoefficients); applyEyelidOffset(getOrientation()); @@ -234,15 +234,15 @@ void Head::applyEyelidOffset(glm::quat headOrientation) { for (int i = 0; i < 2; i++) { const int LEFT_EYE = 8; - float eyeCoefficient = _blendshapeCoefficients[i] - _blendshapeCoefficients[LEFT_EYE + i]; // Raw value + float eyeCoefficient = _transientBlendshapeCoefficients[i] - _transientBlendshapeCoefficients[LEFT_EYE + i]; eyeCoefficient = glm::clamp(eyelidOffset + eyeCoefficient * (1.0f - eyelidOffset), -1.0f, 1.0f); if (eyeCoefficient > 0.0f) { - _blendshapeCoefficients[i] = eyeCoefficient; - _blendshapeCoefficients[LEFT_EYE + i] = 0.0f; + _transientBlendshapeCoefficients[i] = eyeCoefficient; + _transientBlendshapeCoefficients[LEFT_EYE + i] = 0.0f; } else { - _blendshapeCoefficients[i] = 0.0f; - _blendshapeCoefficients[LEFT_EYE + i] = -eyeCoefficient; + _transientBlendshapeCoefficients[i] = 0.0f; + _transientBlendshapeCoefficients[LEFT_EYE + i] = -eyeCoefficient; } } } diff --git a/libraries/avatars/src/AvatarData.cpp b/libraries/avatars/src/AvatarData.cpp index cb819c6b20..6992e66f0e 100644 --- a/libraries/avatars/src/AvatarData.cpp +++ b/libraries/avatars/src/AvatarData.cpp @@ -445,17 +445,17 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent if (hasFaceTrackerInfo) { auto startSection = destinationBuffer; auto faceTrackerInfo = reinterpret_cast(destinationBuffer); + auto blendshapeCoefficients = _headData->getSummedBlendshapeCoefficients(); faceTrackerInfo->leftEyeBlink = _headData->_leftEyeBlink; faceTrackerInfo->rightEyeBlink = _headData->_rightEyeBlink; faceTrackerInfo->averageLoudness = _headData->_averageLoudness; faceTrackerInfo->browAudioLift = _headData->_browAudioLift; - faceTrackerInfo->numBlendshapeCoefficients = _headData->_blendshapeCoefficients.size(); + faceTrackerInfo->numBlendshapeCoefficients = blendshapeCoefficients.size(); destinationBuffer += sizeof(AvatarDataPacket::FaceTrackerInfo); - // followed by a variable number of float coefficients - memcpy(destinationBuffer, _headData->_blendshapeCoefficients.data(), _headData->_blendshapeCoefficients.size() * sizeof(float)); - destinationBuffer += _headData->_blendshapeCoefficients.size() * sizeof(float); + memcpy(destinationBuffer, blendshapeCoefficients.data(), blendshapeCoefficients.size() * sizeof(float)); + destinationBuffer += blendshapeCoefficients.size() * sizeof(float); int numBytes = destinationBuffer - startSection; if (outboundDataRateOut) { @@ -965,7 +965,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) { const int coefficientsSize = sizeof(float) * numCoefficients; PACKET_READ_CHECK(FaceTrackerCoefficients, coefficientsSize); _headData->_blendshapeCoefficients.resize(numCoefficients); // make sure there's room for the copy! - _headData->_baseBlendshapeCoefficients.resize(numCoefficients); + _headData->_transientBlendshapeCoefficients.resize(numCoefficients); memcpy(_headData->_blendshapeCoefficients.data(), sourceBuffer, coefficientsSize); sourceBuffer += coefficientsSize; int numBytesRead = sourceBuffer - startSection; @@ -1495,11 +1495,14 @@ QUrl AvatarData::cannonicalSkeletonModelURL(const QUrl& emptyURL) const { return _skeletonModelURL.scheme() == "file" ? emptyURL : _skeletonModelURL; } -void AvatarData::processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged) { +void AvatarData::processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged, const qint64 clockSkew) { - if (identity.updatedAt < _identityUpdatedAt) { + // Consider the case where this packet is being processed on Client A, and Client A is connected to Sandbox B. + // If Client A's system clock is *ahead of* Sandbox B's system clock, "clockSkew" will be *negative*. + // If Client A's system clock is *behind* Sandbox B's system clock, "clockSkew" will be *positive*. + if ((_identityUpdatedAt > identity.updatedAt - clockSkew) && (_identityUpdatedAt != 0)) { qCDebug(avatars) << "Ignoring late identity packet for avatar " << getSessionUUID() - << "identity.updatedAt:" << identity.updatedAt << "_identityUpdatedAt:" << _identityUpdatedAt; + << "_identityUpdatedAt (" << _identityUpdatedAt << ") is greater than identity.updatedAt - clockSkew (" << identity.updatedAt << "-" << clockSkew << ")"; return; } @@ -1535,7 +1538,7 @@ void AvatarData::processAvatarIdentity(const Identity& identity, bool& identityC // use the timestamp from this identity, since we want to honor the updated times in "server clock" // this will overwrite any changes we made locally to this AvatarData's _identityUpdatedAt - _identityUpdatedAt = identity.updatedAt; + _identityUpdatedAt = identity.updatedAt - clockSkew; } QByteArray AvatarData::identityByteArray() const { diff --git a/libraries/avatars/src/AvatarData.h b/libraries/avatars/src/AvatarData.h index 6d801793b7..e6e0571878 100644 --- a/libraries/avatars/src/AvatarData.h +++ b/libraries/avatars/src/AvatarData.h @@ -538,7 +538,7 @@ public: // identityChanged returns true if identity has changed, false otherwise. // displayNameChanged returns true if displayName has changed, false otherwise. - void processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged); + void processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged, const qint64 clockSkew); QByteArray identityByteArray() const; diff --git a/libraries/avatars/src/AvatarHashMap.cpp b/libraries/avatars/src/AvatarHashMap.cpp index 0d341c684e..155ef9a0a2 100644 --- a/libraries/avatars/src/AvatarHashMap.cpp +++ b/libraries/avatars/src/AvatarHashMap.cpp @@ -148,7 +148,8 @@ void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer auto avatar = newOrExistingAvatar(identity.uuid, sendingNode); bool identityChanged = false; bool displayNameChanged = false; - avatar->processAvatarIdentity(identity, identityChanged, displayNameChanged); + // In this case, the "sendingNode" is the Avatar Mixer. + avatar->processAvatarIdentity(identity, identityChanged, displayNameChanged, sendingNode->getClockSkewUsec()); } } diff --git a/libraries/avatars/src/HeadData.cpp b/libraries/avatars/src/HeadData.cpp index b55be7c156..2e4eec73a8 100644 --- a/libraries/avatars/src/HeadData.cpp +++ b/libraries/avatars/src/HeadData.cpp @@ -34,8 +34,9 @@ HeadData::HeadData(AvatarData* owningAvatar) : _rightEyeBlink(0.0f), _averageLoudness(0.0f), _browAudioLift(0.0f), - _baseBlendshapeCoefficients(QVector(0, 0.0f)), - _currBlendShapeCoefficients(QVector(0, 0.0f)), + _blendshapeCoefficients(QVector(0, 0.0f)), + _transientBlendshapeCoefficients(QVector(0, 0.0f)), + _summedBlendshapeCoefficients(QVector(0, 0.0f)), _owningAvatar(owningAvatar) { @@ -85,22 +86,22 @@ static const QMap& getBlendshapesLookupMap() { } const QVector& HeadData::getSummedBlendshapeCoefficients() { - int maxSize = std::max(_baseBlendshapeCoefficients.size(), _blendshapeCoefficients.size()); - if (_currBlendShapeCoefficients.size() != maxSize) { - _currBlendShapeCoefficients.resize(maxSize); + int maxSize = std::max(_blendshapeCoefficients.size(), _transientBlendshapeCoefficients.size()); + if (_summedBlendshapeCoefficients.size() != maxSize) { + _summedBlendshapeCoefficients.resize(maxSize); } for (int i = 0; i < maxSize; i++) { - if (i >= _baseBlendshapeCoefficients.size()) { - _currBlendShapeCoefficients[i] = _blendshapeCoefficients[i]; - } else if (i >= _blendshapeCoefficients.size()) { - _currBlendShapeCoefficients[i] = _baseBlendshapeCoefficients[i]; + if (i >= _blendshapeCoefficients.size()) { + _summedBlendshapeCoefficients[i] = _transientBlendshapeCoefficients[i]; + } else if (i >= _transientBlendshapeCoefficients.size()) { + _summedBlendshapeCoefficients[i] = _blendshapeCoefficients[i]; } else { - _currBlendShapeCoefficients[i] = _baseBlendshapeCoefficients[i] + _blendshapeCoefficients[i]; + _summedBlendshapeCoefficients[i] = _blendshapeCoefficients[i] + _transientBlendshapeCoefficients[i]; } } - return _currBlendShapeCoefficients; + return _summedBlendshapeCoefficients; } void HeadData::setBlendshape(QString name, float val) { @@ -112,10 +113,10 @@ void HeadData::setBlendshape(QString name, float val) { if (_blendshapeCoefficients.size() <= it.value()) { _blendshapeCoefficients.resize(it.value() + 1); } - if (_baseBlendshapeCoefficients.size() <= it.value()) { - _baseBlendshapeCoefficients.resize(it.value() + 1); + if (_transientBlendshapeCoefficients.size() <= it.value()) { + _transientBlendshapeCoefficients.resize(it.value() + 1); } - _baseBlendshapeCoefficients[it.value()] = val; + _blendshapeCoefficients[it.value()] = val; } } @@ -131,14 +132,16 @@ QJsonObject HeadData::toJson() const { QJsonObject blendshapesJson; for (auto name : blendshapeLookupMap.keys()) { auto index = blendshapeLookupMap[name]; - if (index >= _blendshapeCoefficients.size()) { - continue; + float value = 0.0f; + if (index < _blendshapeCoefficients.size()) { + value += _blendshapeCoefficients[index]; } - auto value = _blendshapeCoefficients[index]; - if (value == 0.0f) { - continue; + if (index < _transientBlendshapeCoefficients.size()) { + value += _transientBlendshapeCoefficients[index]; + } + if (value != 0.0f) { + blendshapesJson[name] = value; } - blendshapesJson[name] = value; } if (!blendshapesJson.isEmpty()) { headJson[JSON_AVATAR_HEAD_BLENDSHAPE_COEFFICIENTS] = blendshapesJson; @@ -163,8 +166,8 @@ void HeadData::fromJson(const QJsonObject& json) { QJsonArray blendshapeCoefficientsJson = jsonValue.toArray(); for (const auto& blendshapeCoefficient : blendshapeCoefficientsJson) { blendshapeCoefficients.push_back((float)blendshapeCoefficient.toDouble()); - setBlendshapeCoefficients(blendshapeCoefficients); } + setBlendshapeCoefficients(blendshapeCoefficients); } else if (jsonValue.isObject()) { QJsonObject blendshapeCoefficientsJson = jsonValue.toObject(); for (const QString& name : blendshapeCoefficientsJson.keys()) { diff --git a/libraries/avatars/src/HeadData.h b/libraries/avatars/src/HeadData.h index dbed0a6a65..9b28616b3f 100644 --- a/libraries/avatars/src/HeadData.h +++ b/libraries/avatars/src/HeadData.h @@ -93,8 +93,8 @@ protected: float _browAudioLift; QVector _blendshapeCoefficients; - QVector _baseBlendshapeCoefficients; - QVector _currBlendShapeCoefficients; + QVector _transientBlendshapeCoefficients; + QVector _summedBlendshapeCoefficients; AvatarData* _owningAvatar; private: diff --git a/libraries/script-engine/src/AudioScriptingInterface.h b/libraries/script-engine/src/AudioScriptingInterface.h index e97bc329c6..5ec8ce4b12 100644 --- a/libraries/script-engine/src/AudioScriptingInterface.h +++ b/libraries/script-engine/src/AudioScriptingInterface.h @@ -24,6 +24,7 @@ class AudioScriptingInterface : public QObject, public Dependency { SINGLETON_DEPENDENCY public: + virtual ~AudioScriptingInterface() {} void setLocalAudioInterface(AbstractAudioInterface* audioInterface) { _localAudioInterface = audioInterface; } protected: diff --git a/plugins/openvr/src/ViveControllerManager.cpp b/plugins/openvr/src/ViveControllerManager.cpp index 6e5697730b..8c357103c9 100644 --- a/plugins/openvr/src/ViveControllerManager.cpp +++ b/plugins/openvr/src/ViveControllerManager.cpp @@ -22,6 +22,8 @@ #include #include #include +#include +#include #include #include #include @@ -64,7 +66,7 @@ static glm::mat4 computeOffset(glm::mat4 defaultToReferenceMat, glm::mat4 defaul } static bool sortPucksYPosition(std::pair firstPuck, std::pair secondPuck) { - return (firstPuck.second.translation.y < firstPuck.second.translation.y); + return (firstPuck.second.translation.y < secondPuck.second.translation.y); } bool ViveControllerManager::isSupported() const { @@ -243,6 +245,7 @@ void ViveControllerManager::InputDevice::calibrateOrUncalibrate(const controller } void ViveControllerManager::InputDevice::calibrate(const controller::InputCalibrationData& inputCalibration) { + qDebug() << "Puck Calibration: Starting..."; // convert the hmd head from sensor space to avatar space glm::mat4 hmdSensorFlippedMat = inputCalibration.hmdSensorMat * Matrices::Y_180; glm::mat4 sensorToAvatarMat = glm::inverse(inputCalibration.avatarMat) * inputCalibration.sensorToWorldMat; @@ -262,20 +265,31 @@ void ViveControllerManager::InputDevice::calibrate(const controller::InputCalibr glm::mat4 defaultToReferenceMat = currentHead * glm::inverse(inputCalibration.defaultHeadMat); int puckCount = (int)_validTrackedObjects.size(); - if (puckCount == MIN_PUCK_COUNT) { - _config = Config::Feet; - } else if (puckCount == MIN_FEET_AND_HIPS) { - _config = Config::FeetAndHips; - } else if (puckCount >= MIN_FEET_HIPS_CHEST) { - _config = Config::FeetHipsAndChest; - } else { + qDebug() << "Puck Calibration: " << puckCount << " pucks found for calibration"; + _config = _preferedConfig; + if (_config != Config::Auto && puckCount < MIN_PUCK_COUNT) { + qDebug() << "Puck Calibration: Failed: Could not meet the minimal # of pucks"; + uncalibrate(); return; + } else if (_config == Config::Auto){ + if (puckCount == MIN_PUCK_COUNT) { + _config = Config::Feet; + qDebug() << "Puck Calibration: Auto Config: " << configToString(_config) << " configuration"; + } else if (puckCount == MIN_FEET_AND_HIPS) { + _config = Config::FeetAndHips; + qDebug() << "Puck Calibration: Auto Config: " << configToString(_config) << " configuration"; + } else if (puckCount >= MIN_FEET_HIPS_CHEST) { + _config = Config::FeetHipsAndChest; + qDebug() << "Puck Calibration: Auto Config: " << configToString(_config) << " configuration"; + } else { + qDebug() << "Puck Calibration: Auto Config Failed: Could not meet the minimal # of pucks"; + uncalibrate(); + return; + } } std::sort(_validTrackedObjects.begin(), _validTrackedObjects.end(), sortPucksYPosition); - - auto& firstFoot = _validTrackedObjects[FIRST_FOOT]; auto& secondFoot = _validTrackedObjects[SECOND_FOOT]; controller::Pose& firstFootPose = firstFoot.second; @@ -296,19 +310,25 @@ void ViveControllerManager::InputDevice::calibrate(const controller::InputCalibr if (_config == Config::Feet) { // done - } else if (_config == Config::FeetAndHips) { + } else if (_config == Config::FeetAndHips && puckCount >= MIN_FEET_AND_HIPS) { _jointToPuckMap[controller::HIPS] = _validTrackedObjects[HIP].first; _pucksOffset[_validTrackedObjects[HIP].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultHips, _validTrackedObjects[HIP].second); - } else if (_config == Config::FeetHipsAndChest) { + } else if (_config == Config::FeetHipsAndChest && puckCount >= MIN_FEET_HIPS_CHEST) { _jointToPuckMap[controller::HIPS] = _validTrackedObjects[HIP].first; _pucksOffset[_validTrackedObjects[HIP].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultHips, _validTrackedObjects[HIP].second); _jointToPuckMap[controller::SPINE2] = _validTrackedObjects[CHEST].first; _pucksOffset[_validTrackedObjects[CHEST].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultSpine2, _validTrackedObjects[CHEST].second); + } else { + qDebug() << "Puck Calibration: " << configToString(_config) << " Config Failed: Could not meet the minimal # of pucks"; + uncalibrate(); + return; } _calibrated = true; + qDebug() << "PuckCalibration: " << configToString(_config) << " Configuration Successful"; } void ViveControllerManager::InputDevice::uncalibrate() { + _config = Config::Auto; _pucksOffset.clear(); _jointToPuckMap.clear(); _calibrated = false; @@ -544,6 +564,74 @@ void ViveControllerManager::InputDevice::hapticsHelper(float deltaTime, bool lef } } +void ViveControllerManager::InputDevice::loadSettings() { + Settings settings; + settings.beginGroup("PUCK_CONFIG"); + { + _preferedConfig = (Config)settings.value("configuration", QVariant((int)Config::Auto)).toInt(); + } + settings.endGroup(); +} + +void ViveControllerManager::InputDevice::saveSettings() const { + Settings settings; + settings.beginGroup("PUCK_CONFIG"); + { + settings.setValue(QString("configuration"), (int)_preferedConfig); + } + settings.endGroup(); +} + +QString ViveControllerManager::InputDevice::configToString(Config config) { + QString currentConfig; + switch (config) { + case Config::Auto: + currentConfig = "Auto"; + break; + + case Config::Feet: + currentConfig = "Feet"; + break; + + case Config::FeetAndHips: + currentConfig = "FeetAndHips"; + break; + + case Config::FeetHipsAndChest: + currentConfig = "FeetHipsAndChest"; + break; + } + return currentConfig; +} + +void ViveControllerManager::InputDevice::setConfigFromString(const QString& value) { + if (value == "Auto") { + _preferedConfig = Config::Auto; + } else if (value == "Feet") { + _preferedConfig = Config::Feet; + } else if (value == "FeetAndHips") { + _preferedConfig = Config::FeetAndHips; + } else if (value == "FeetHipsAndChest") { + _preferedConfig = Config::FeetHipsAndChest; + } +} + +void ViveControllerManager::InputDevice::createPreferences() { + loadSettings(); + auto preferences = DependencyManager::get(); + static const QString VIVE_PUCKS_CONFIG = "Vive Pucks Configuration"; + + { + auto getter = [this]()->QString { return configToString(_preferedConfig); }; + auto setter = [this](const QString& value) { setConfigFromString(value); saveSettings(); }; + auto preference = new ComboBoxPreference(VIVE_PUCKS_CONFIG, "Configuration", getter, setter); + QStringList list = (QStringList() << "Auto" << "Feet" << "FeetAndHips" << "FeetHipsAndChest"); + preference->setItems(list); + preferences->addPreference(preference); + + } +} + controller::Input::NamedVector ViveControllerManager::InputDevice::getAvailableInputs() const { using namespace controller; QVector availableInputs{ diff --git a/plugins/openvr/src/ViveControllerManager.h b/plugins/openvr/src/ViveControllerManager.h index 4e8b2b3a04..c2ebdc6144 100644 --- a/plugins/openvr/src/ViveControllerManager.h +++ b/plugins/openvr/src/ViveControllerManager.h @@ -50,14 +50,14 @@ public: private: class InputDevice : public controller::InputDevice { public: - InputDevice(vr::IVRSystem*& system) : controller::InputDevice("Vive"), _system(system) {} + InputDevice(vr::IVRSystem*& system) : controller::InputDevice("Vive"), _system(system) { createPreferences(); } private: // Device functions controller::Input::NamedVector getAvailableInputs() const override; QString getDefaultMappingConfig() const override; void update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) override; void focusOutEvent() override; - + void createPreferences(); bool triggerHapticPulse(float strength, float duration, controller::Hand hand) override; void hapticsHelper(float deltaTime, bool leftHand); void calibrateOrUncalibrate(const controller::InputCalibrationData& inputCalibration); @@ -100,8 +100,9 @@ private: float _timer { 0.0f }; glm::vec2 _stick { 0.0f, 0.0f }; }; - enum class Config { Feet, FeetAndHips, FeetHipsAndChest, NoConfig }; - Config _config { Config::NoConfig }; + enum class Config { Feet, FeetAndHips, FeetHipsAndChest, Auto }; + Config _config { Config::Auto }; + Config _preferedConfig { Config::Auto }; FilteredStick _filteredLeftStick; FilteredStick _filteredRightStick; @@ -125,6 +126,10 @@ private: bool _timeTilCalibrationSet { false }; mutable std::recursive_mutex _lock; + QString configToString(Config config); + void setConfigFromString(const QString& value); + void loadSettings(); + void saveSettings() const; friend class ViveControllerManager; }; diff --git a/scripts/system/makeUserConnection.js b/scripts/system/makeUserConnection.js index a8afad2e1c..37a334bd70 100644 --- a/scripts/system/makeUserConnection.js +++ b/scripts/system/makeUserConnection.js @@ -198,7 +198,7 @@ } var animationData = {}; - function updateAnimationData() { + function updateAnimationData(verticalOffset) { // all we are doing here is moving the right hand to a spot // that is in front of and a bit above the hips. Basing how // far in front as scaling with the avatar's height (say hips @@ -209,6 +209,9 @@ offset = 0.8 * MyAvatar.getAbsoluteJointTranslationInObjectFrame(headIndex).y; } animationData.rightHandPosition = Vec3.multiply(offset, {x: -0.25, y: 0.8, z: 1.3}); + if (verticalOffset) { + animationData.rightHandPosition.y += verticalOffset; + } animationData.rightHandRotation = Quat.fromPitchYawRollDegrees(90, 0, 90); } function shakeHandsAnimation() { @@ -347,7 +350,32 @@ } return false; } - + function findNearestAvatar() { + // We only look some max distance away (much larger than the handshake distance, but still...) + var minDistance = MAX_AVATAR_DISTANCE * 20; + var closestAvatar; + AvatarList.getAvatarIdentifiers().forEach(function (id) { + var avatar = AvatarList.getAvatar(id); + if (avatar && avatar.sessionUUID != MyAvatar.sessionUUID) { + var currentDistance = Vec3.distance(avatar.position, MyAvatar.position); + if (minDistance > currentDistance) { + minDistance = currentDistance; + closestAvatar = avatar; + } + } + }); + return closestAvatar; + } + function adjustAnimationHeight() { + var avatar = findNearestAvatar(); + if (avatar) { + var myHeadIndex = MyAvatar.getJointIndex("Head"); + var otherHeadIndex = avatar.getJointIndex("Head"); + var diff = (avatar.getJointPosition(otherHeadIndex).y - MyAvatar.getJointPosition(myHeadIndex).y) / 2; + print("head height difference: " + diff); + updateAnimationData(diff); + } + } function findNearestWaitingAvatar() { var handPosition = getHandPosition(MyAvatar, currentHandJointIndex); var minDistance = MAX_AVATAR_DISTANCE; @@ -436,6 +464,10 @@ handStringMessageSend({ key: "waiting", }); + // potentially adjust height of handshake + if (fromKeyboard) { + adjustAnimationHeight(); + } lookForWaitingAvatar(); } } diff --git a/scripts/system/selectAudioDevice.js b/scripts/system/selectAudioDevice.js index 2dd426932f..2d40795692 100644 --- a/scripts/system/selectAudioDevice.js +++ b/scripts/system/selectAudioDevice.js @@ -17,32 +17,22 @@ const INPUT = "Input"; const OUTPUT = "Output"; -function parseMenuItem(item) { - const USE = "Use "; - const FOR_INPUT = " for " + INPUT; - const FOR_OUTPUT = " for " + OUTPUT; - if (item.slice(0, USE.length) == USE) { - if (item.slice(-FOR_INPUT.length) == FOR_INPUT) { - return { device: item.slice(USE.length, -FOR_INPUT.length), mode: INPUT }; - } else if (item.slice(-FOR_OUTPUT.length) == FOR_OUTPUT) { - return { device: item.slice(USE.length, -FOR_OUTPUT.length), mode: OUTPUT }; - } - } -} - +const SELECT_AUDIO_SCRIPT_STARTUP_TIMEOUT = 300; // // VAR DEFINITIONS // var debugPrintStatements = true; const INPUT_DEVICE_SETTING = "audio_input_device"; const OUTPUT_DEVICE_SETTING = "audio_output_device"; -var audioDevicesList = []; +var audioDevicesList = []; // placeholder for menu items var wasHmdActive = false; // assume it's not active to start var switchedAudioInputToHMD = false; var switchedAudioOutputToHMD = false; var previousSelectedInputAudioDevice = ""; var previousSelectedOutputAudioDevice = ""; -var skipMenuEvents = true; + +var interfaceInputDevice = ""; +var interfaceOutputDevice = ""; // // BEGIN FUNCTION DEFINITIONS @@ -56,56 +46,37 @@ function debug() { function setupAudioMenus() { // menu events can be triggered asynchronously; skip them for 200ms to avoid recursion and false switches - skipMenuEvents = true; - Script.setTimeout(function() { skipMenuEvents = false; }, 200); - removeAudioMenus(); // Setup audio input devices Menu.addSeparator("Audio", "Input Audio Device"); - var inputDevices = AudioDevice.getInputDevices(); - for (var i = 0; i < inputDevices.length; i++) { - var audioDeviceMenuString = "Use " + inputDevices[i] + " for Input"; + var currentInputDevice = AudioDevice.getInputDevice() + for (var i = 0; i < AudioDevice.inputAudioDevices.length; i++) { + var audioDeviceMenuString = "Use " + AudioDevice.inputAudioDevices[i] + " for Input"; Menu.addMenuItem({ menuName: "Audio", menuItemName: audioDeviceMenuString, isCheckable: true, - isChecked: inputDevices[i] == AudioDevice.getInputDevice() + isChecked: AudioDevice.inputAudioDevices[i] == currentInputDevice }); audioDevicesList.push(audioDeviceMenuString); } // Setup audio output devices Menu.addSeparator("Audio", "Output Audio Device"); - var outputDevices = AudioDevice.getOutputDevices(); - for (var i = 0; i < outputDevices.length; i++) { - var audioDeviceMenuString = "Use " + outputDevices[i] + " for Output"; + var currentOutputDevice = AudioDevice.getOutputDevice() + for (var i = 0; i < AudioDevice.outputAudioDevices.length; i++) { + var audioDeviceMenuString = "Use " + AudioDevice.outputAudioDevices[i] + " for Output"; Menu.addMenuItem({ menuName: "Audio", menuItemName: audioDeviceMenuString, isCheckable: true, - isChecked: outputDevices[i] == AudioDevice.getOutputDevice() + isChecked: AudioDevice.outputAudioDevices[i] == currentOutputDevice }); audioDevicesList.push(audioDeviceMenuString); } } -function checkDeviceMismatch() { - var inputDeviceSetting = Settings.getValue(INPUT_DEVICE_SETTING); - var interfaceInputDevice = AudioDevice.getInputDevice(); - if (interfaceInputDevice != inputDeviceSetting) { - debug("Input Setting & Device mismatch! Input SETTING: " + inputDeviceSetting + "Input DEVICE IN USE: " + interfaceInputDevice); - switchAudioDevice("Use " + inputDeviceSetting + " for Input"); - } - - var outputDeviceSetting = Settings.getValue(OUTPUT_DEVICE_SETTING); - var interfaceOutputDevice = AudioDevice.getOutputDevice(); - if (interfaceOutputDevice != outputDeviceSetting) { - debug("Output Setting & Device mismatch! Output SETTING: " + outputDeviceSetting + "Output DEVICE IN USE: " + interfaceOutputDevice); - switchAudioDevice("Use " + outputDeviceSetting + " for Output"); - } -} - function removeAudioMenus() { Menu.removeSeparator("Audio", "Input Audio Device"); Menu.removeSeparator("Audio", "Output Audio Device"); @@ -124,67 +95,28 @@ function removeAudioMenus() { function onDevicechanged() { debug("System audio devices changed. Removing and replacing Audio Menus..."); setupAudioMenus(); - checkDeviceMismatch(); } function onMenuEvent(audioDeviceMenuString) { - if (!skipMenuEvents) { - switchAudioDevice(audioDeviceMenuString); + if (Menu.isOptionChecked(audioDeviceMenuString) && + (audioDeviceMenuString !== interfaceInputDevice && + audioDeviceMenuString !== interfaceOutputDevice)) { + AudioDevice.setDeviceFromMenu(audioDeviceMenuString) } } -function switchAudioDevice(audioDeviceMenuString) { - // if the device is not plugged in, short-circuit - if (!~audioDevicesList.indexOf(audioDeviceMenuString)) { - return; - } - - var selection = parseMenuItem(audioDeviceMenuString); - if (!selection) { - debug("Invalid Audio audioDeviceMenuString! Doesn't end with 'for Input' or 'for Output'"); - return; - } - - // menu events can be triggered asynchronously; skip them for 200ms to avoid recursion and false switches - skipMenuEvents = true; - Script.setTimeout(function() { skipMenuEvents = false; }, 200); - - var selectedDevice = selection.device; - if (selection.mode == INPUT) { - var currentInputDevice = AudioDevice.getInputDevice(); - if (selectedDevice != currentInputDevice) { - debug("Switching audio INPUT device from " + currentInputDevice + " to " + selectedDevice); - Menu.setIsOptionChecked("Use " + currentInputDevice + " for Input", false); - if (AudioDevice.setInputDevice(selectedDevice)) { - Settings.setValue(INPUT_DEVICE_SETTING, selectedDevice); - Menu.setIsOptionChecked(audioDeviceMenuString, true); - } else { - debug("Error setting audio input device!") - Menu.setIsOptionChecked(audioDeviceMenuString, false); - } +function onCurrentDeviceChanged() { + debug("System audio device switched. "); + interfaceInputDevice = "Use " + AudioDevice.getInputDevice() + " for Input"; + interfaceOutputDevice = "Use " + AudioDevice.getOutputDevice() + " for Output"; + for (var index = 0; index < audioDevicesList.length; index++) { + if (audioDevicesList[index] === interfaceInputDevice || + audioDevicesList[index] === interfaceOutputDevice) { + if (Menu.isOptionChecked(audioDevicesList[index]) === false) + Menu.setIsOptionChecked(audioDevicesList[index], true); } else { - debug("Selected input device is the same as the current input device!") - Settings.setValue(INPUT_DEVICE_SETTING, selectedDevice); - Menu.setIsOptionChecked(audioDeviceMenuString, true); - AudioDevice.setInputDevice(selectedDevice); // Still try to force-set the device (in case the user's trying to forcefully debug an issue) - } - } else if (selection.mode == OUTPUT) { - var currentOutputDevice = AudioDevice.getOutputDevice(); - if (selectedDevice != currentOutputDevice) { - debug("Switching audio OUTPUT device from " + currentOutputDevice + " to " + selectedDevice); - Menu.setIsOptionChecked("Use " + currentOutputDevice + " for Output", false); - if (AudioDevice.setOutputDevice(selectedDevice)) { - Settings.setValue(OUTPUT_DEVICE_SETTING, selectedDevice); - Menu.setIsOptionChecked(audioDeviceMenuString, true); - } else { - debug("Error setting audio output device!") - Menu.setIsOptionChecked(audioDeviceMenuString, false); - } - } else { - debug("Selected output device is the same as the current output device!") - Settings.setValue(OUTPUT_DEVICE_SETTING, selectedDevice); - Menu.setIsOptionChecked(audioDeviceMenuString, true); - AudioDevice.setOutputDevice(selectedDevice); // Still try to force-set the device (in case the user's trying to forcefully debug an issue) + if (Menu.isOptionChecked(audioDevicesList[index]) === true) + Menu.setIsOptionChecked(audioDevicesList[index], false); } } } @@ -192,12 +124,12 @@ function switchAudioDevice(audioDeviceMenuString) { function restoreAudio() { if (switchedAudioInputToHMD) { debug("Switching back from HMD preferred audio input to: " + previousSelectedInputAudioDevice); - switchAudioDevice("Use " + previousSelectedInputAudioDevice + " for Input"); + AudioDevice.setInputDeviceAsync(previousSelectedInputAudioDevice) switchedAudioInputToHMD = false; } if (switchedAudioOutputToHMD) { debug("Switching back from HMD preferred audio output to: " + previousSelectedOutputAudioDevice); - switchAudioDevice("Use " + previousSelectedOutputAudioDevice + " for Output"); + AudioDevice.setOutputDeviceAsync(previousSelectedOutputAudioDevice) switchedAudioOutputToHMD = false; } } @@ -224,7 +156,7 @@ function checkHMDAudio() { debug("previousSelectedInputAudioDevice: " + previousSelectedInputAudioDevice); if (hmdPreferredAudioInput != previousSelectedInputAudioDevice) { switchedAudioInputToHMD = true; - switchAudioDevice("Use " + hmdPreferredAudioInput + " for Input"); + AudioDevice.setInputDeviceAsync(hmdPreferredAudioInput) } } if (hmdPreferredAudioOutput !== "") { @@ -233,7 +165,7 @@ function checkHMDAudio() { debug("previousSelectedOutputAudioDevice: " + previousSelectedOutputAudioDevice); if (hmdPreferredAudioOutput != previousSelectedOutputAudioDevice) { switchedAudioOutputToHMD = true; - switchAudioDevice("Use " + hmdPreferredAudioOutput + " for Output"); + AudioDevice.setOutputDeviceAsync(hmdPreferredAudioOutput) } } } else { @@ -255,14 +187,15 @@ function checkHMDAudio() { Script.setTimeout(function () { debug("Connecting deviceChanged(), displayModeChanged(), and switchAudioDevice()..."); AudioDevice.deviceChanged.connect(onDevicechanged); + AudioDevice.currentInputDeviceChanged.connect(onCurrentDeviceChanged); + AudioDevice.currentOutputDeviceChanged.connect(onCurrentDeviceChanged); HMD.displayModeChanged.connect(checkHMDAudio); Menu.menuItemEvent.connect(onMenuEvent); debug("Setting up Audio I/O menu for the first time..."); setupAudioMenus(); - checkDeviceMismatch(); debug("Checking HMD audio status...") checkHMDAudio(); -}, 3000); +}, SELECT_AUDIO_SCRIPT_STARTUP_TIMEOUT); debug("Connecting scriptEnding()"); Script.scriptEnding.connect(function () { @@ -270,6 +203,8 @@ Script.scriptEnding.connect(function () { removeAudioMenus(); Menu.menuItemEvent.disconnect(onMenuEvent); HMD.displayModeChanged.disconnect(checkHMDAudio); + AudioDevice.currentInputDeviceChanged.disconnect(onCurrentDeviceChanged); + AudioDevice.currentOutputDeviceChanged.disconnect(onCurrentDeviceChanged); AudioDevice.deviceChanged.disconnect(onDevicechanged); }); diff --git a/unpublishedScripts/marketplace/tablet-raiseHand/tablet-raiseHand.js b/unpublishedScripts/marketplace/tablet-raiseHand/tablet-raiseHand.js new file mode 100644 index 0000000000..f7702053a4 --- /dev/null +++ b/unpublishedScripts/marketplace/tablet-raiseHand/tablet-raiseHand.js @@ -0,0 +1,102 @@ +"use strict"; +// +// tablet-raiseHand.js +// +// client script that creates a tablet button to raise hand +// +// Created by Triplelexx on 17/04/22 +// Copyright 2017 High Fidelity, Inc. +// +// Hand icons adapted from https://linearicons.com, created by Perxis https://perxis.com CC BY-SA 4.0 license. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +(function() { // BEGIN LOCAL_SCOPE + var BUTTON_NAME = "RAISE\nHAND"; + var USERCONNECTION_MESSAGE_CHANNEL = "io.highfidelity.makeUserConnection"; + var DEBUG_PREFIX = "TABLET RAISE HAND: "; + var isRaiseHandButtonActive = false; + var animHandlerId; + + var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system"); + var button = tablet.addButton({ + text: BUTTON_NAME, + icon: "icons/tablet-icons/raise-hand-i.svg", + activeIcon: "icons/tablet-icons/raise-hand-a.svg" + }); + + function onClicked() { + isRaiseHandButtonActive = !isRaiseHandButtonActive; + button.editProperties({ isActive: isRaiseHandButtonActive }); + if (isRaiseHandButtonActive) { + removeAnimation(); + animHandlerId = MyAvatar.addAnimationStateHandler(raiseHandAnimation, []); + Messages.subscribe(USERCONNECTION_MESSAGE_CHANNEL); + Messages.messageReceived.connect(messageHandler); + } else { + removeAnimation(); + Messages.unsubscribe(USERCONNECTION_MESSAGE_CHANNEL); + Messages.messageReceived.disconnect(messageHandler); + } + } + + function removeAnimation() { + if (animHandlerId) { + animHandlerId = MyAvatar.removeAnimationStateHandler(animHandlerId); + } + } + + function raiseHandAnimation(animationProperties) { + // all we are doing here is moving the right hand to a spot that is above the hips. + var headIndex = MyAvatar.getJointIndex("Head"); + var offset = 0.0; + var result = {}; + if (headIndex) { + offset = 0.85 * MyAvatar.getAbsoluteJointTranslationInObjectFrame(headIndex).y; + } + var handPos = Vec3.multiply(offset, { x: -0.7, y: 1.25, z: 0.25 }); + result.rightHandPosition = handPos; + result.rightHandRotation = Quat.fromPitchYawRollDegrees(0, 0, 0); + return result; + } + + function messageHandler(channel, messageString, senderID) { + if (channel !== USERCONNECTION_MESSAGE_CHANNEL && senderID !== MyAvatar.sessionUUID) { + return; + } + var message = {}; + try { + message = JSON.parse(messageString); + } catch (e) { + print(DEBUG_PREFIX + "messageHandler error: " + e); + } + switch (message.key) { + case "waiting": + case "connecting": + case "connectionAck": + case "connectionRequest": + case "done": + removeAnimation(); + if (isRaiseHandButtonActive) { + isRaiseHandButtonActive = false; + button.editProperties({ isActive: isRaiseHandButtonActive }); + } + break; + default: + print(DEBUG_PREFIX + "messageHandler unknown message: " + message); + break; + } + } + + button.clicked.connect(onClicked); + + Script.scriptEnding.connect(function() { + Messages.unsubscribe(USERCONNECTION_MESSAGE_CHANNEL); + Messages.messageReceived.disconnect(messageHandler); + button.clicked.disconnect(onClicked); + tablet.removeButton(button); + removeAnimation(); + }); +}()); // END LOCAL_SCOPE