diff --git a/assignment-client/src/avatars/AvatarMixer.cpp b/assignment-client/src/avatars/AvatarMixer.cpp index 05dbfee912..998799f5e6 100644 --- a/assignment-client/src/avatars/AvatarMixer.cpp +++ b/assignment-client/src/avatars/AvatarMixer.cpp @@ -402,7 +402,7 @@ void AvatarMixer::handleAvatarIdentityPacket(QSharedPointer<ReceivedMessage> mes AvatarData::parseAvatarIdentityPacket(message->getMessage(), identity); bool identityChanged = false; bool displayNameChanged = false; - avatar.processAvatarIdentity(identity, identityChanged, displayNameChanged); + avatar.processAvatarIdentity(identity, identityChanged, displayNameChanged, senderNode->getClockSkewUsec()); if (identityChanged) { QMutexLocker nodeDataLocker(&nodeData->getMutex()); nodeData->flagIdentityChange(); diff --git a/interface/resources/qml/hifi/Audio.qml b/interface/resources/qml/hifi/Audio.qml index d0c3122100..66760ff290 100644 --- a/interface/resources/qml/hifi/Audio.qml +++ b/interface/resources/qml/hifi/Audio.qml @@ -35,11 +35,6 @@ Rectangle { property string title: "Audio Options" signal sendToScript(var message); - //set models after Components is shown - Component.onCompleted: { - refreshTimer.start() - refreshTimerOutput.start() - } Component { id: separator @@ -84,7 +79,7 @@ Rectangle { } Connections { - target: AvatarInputs + target: AvatarInputs !== undefined ? AvatarInputs : null onShowAudioToolsChanged: { audioTools.checkbox.checked = showAudioTools } @@ -105,10 +100,12 @@ Rectangle { id: audioTools width: parent.width anchors { left: parent.left; right: parent.right; leftMargin: 30 } - checkbox.checked: AvatarInputs.showAudioTools + checkbox.checked: AvatarInputs !== undefined ? AvatarInputs.showAudioTools : false text.text: qsTr("Show audio level meter") onCheckBoxClicked: { - AvatarInputs.showAudioTools = checked + if (AvatarInputs !== undefined) { + AvatarInputs.showAudioTools = checked + } } } @@ -138,30 +135,34 @@ Rectangle { } ListView { - Timer { - id: refreshTimer - interval: 1 - repeat: false - onTriggered: { - //refresh model - inputAudioListView.model = undefined - inputAudioListView.model = AudioDevice.inputAudioDevices - } - } id: inputAudioListView anchors { left: parent.left; right: parent.right; leftMargin: 70 } height: 125 - spacing: 16 + spacing: 0 clip: true snapMode: ListView.SnapToItem - delegate: AudioCheckbox { + model: AudioDevice + delegate: Item { width: parent.width - checkbox.checked: (modelData === AudioDevice.getInputDevice()) - text.text: modelData - onCheckBoxClicked: { - if (checked) { - AudioDevice.setInputDevice(modelData) - refreshTimer.start() + visible: devicemode === 0 + height: visible ? 36 : 0 + + AudioCheckbox { + id: cbin + anchors.verticalCenter: parent.verticalCenter + Binding { + target: cbin.checkbox + property: 'checked' + value: devicechecked + } + + width: parent.width + cbchecked: devicechecked + text.text: devicename + onCheckBoxClicked: { + if (checked) { + AudioDevice.setInputDeviceAsync(devicename) + } } } } @@ -191,31 +192,33 @@ Rectangle { text: qsTr("CHOOSE OUTPUT DEVICE") } } + ListView { id: outputAudioListView - Timer { - id: refreshTimerOutput - interval: 1 - repeat: false - onTriggered: { - //refresh model - outputAudioListView.model = undefined - outputAudioListView.model = AudioDevice.outputAudioDevices - } - } anchors { left: parent.left; right: parent.right; leftMargin: 70 } height: 250 - spacing: 16 + spacing: 0 clip: true snapMode: ListView.SnapToItem - delegate: AudioCheckbox { + model: AudioDevice + delegate: Item { width: parent.width - checkbox.checked: (modelData === AudioDevice.getOutputDevice()) - text.text: modelData - onCheckBoxClicked: { - if (checked) { - AudioDevice.setOutputDevice(modelData) - refreshTimerOutput.start() + visible: devicemode === 1 + height: visible ? 36 : 0 + AudioCheckbox { + id: cbout + width: parent.width + anchors.verticalCenter: parent.verticalCenter + Binding { + target: cbout.checkbox + property: 'checked' + value: devicechecked + } + text.text: devicename + onCheckBoxClicked: { + if (checked) { + AudioDevice.setOutputDeviceAsync(devicename) + } } } } diff --git a/interface/resources/qml/hifi/components/AudioCheckbox.qml b/interface/resources/qml/hifi/components/AudioCheckbox.qml index a8e0441e0a..b037fe4c7d 100644 --- a/interface/resources/qml/hifi/components/AudioCheckbox.qml +++ b/interface/resources/qml/hifi/components/AudioCheckbox.qml @@ -8,6 +8,7 @@ Row { id: row spacing: 16 property alias checkbox: cb + property alias cbchecked: cb.checked property alias text: txt signal checkBoxClicked(bool checked) diff --git a/interface/resources/qml/hifi/tablet/TabletGeneralPreferences.qml b/interface/resources/qml/hifi/tablet/TabletGeneralPreferences.qml index 85377aaeda..17d3f1b959 100644 --- a/interface/resources/qml/hifi/tablet/TabletGeneralPreferences.qml +++ b/interface/resources/qml/hifi/tablet/TabletGeneralPreferences.qml @@ -33,6 +33,6 @@ StackView { TabletPreferencesDialog { id: root objectName: "TabletGeneralPreferences" - showCategories: ["UI", "Snapshots", "Scripts", "Privacy", "Octree", "HMD", "Sixense Controllers", "Perception Neuron", "Kinect"] + showCategories: ["UI", "Snapshots", "Scripts", "Privacy", "Octree", "HMD", "Sixense Controllers", "Perception Neuron", "Kinect", "Vive Pucks Configuration"] } } diff --git a/interface/resources/qml/hifi/tablet/tabletWindows/TabletPreferencesDialog.qml b/interface/resources/qml/hifi/tablet/tabletWindows/TabletPreferencesDialog.qml index 2c8f6d9ea0..3e497b053e 100644 --- a/interface/resources/qml/hifi/tablet/tabletWindows/TabletPreferencesDialog.qml +++ b/interface/resources/qml/hifi/tablet/tabletWindows/TabletPreferencesDialog.qml @@ -136,8 +136,8 @@ Item { for (var i = 0; i < sections.length; i++) { totalHeight += sections[i].height + sections[i].getPreferencesHeight(); } - console.log(totalHeight); - return totalHeight; + var bottomPadding = 100; + return (totalHeight + bottomPadding); } } } diff --git a/interface/src/Application.cpp b/interface/src/Application.cpp index d3a690b020..32f6d9554e 100644 --- a/interface/src/Application.cpp +++ b/interface/src/Application.cpp @@ -2053,6 +2053,8 @@ void Application::initializeUi() { rootContext->setContextProperty("ApplicationCompositor", &getApplicationCompositor()); + rootContext->setContextProperty("AvatarInputs", AvatarInputs::getInstance()); + if (auto steamClient = PluginManager::getInstance()->getSteamClientPlugin()) { rootContext->setContextProperty("Steam", new SteamScriptingInterface(engine, steamClient.get())); } diff --git a/interface/src/avatar/MyHead.cpp b/interface/src/avatar/MyHead.cpp index c41fff3bb5..34a75c5461 100644 --- a/interface/src/avatar/MyHead.cpp +++ b/interface/src/avatar/MyHead.cpp @@ -48,7 +48,7 @@ void MyHead::simulate(float deltaTime) { FaceTracker* faceTracker = qApp->getActiveFaceTracker(); _isFaceTrackerConnected = faceTracker != NULL && !faceTracker->isMuted(); if (_isFaceTrackerConnected) { - _blendshapeCoefficients = faceTracker->getBlendshapeCoefficients(); + _transientBlendshapeCoefficients = faceTracker->getBlendshapeCoefficients(); if (typeid(*faceTracker) == typeid(DdeFaceTracker)) { @@ -60,11 +60,11 @@ void MyHead::simulate(float deltaTime) { const int FUNNEL_BLENDSHAPE = 40; const int SMILE_LEFT_BLENDSHAPE = 28; const int SMILE_RIGHT_BLENDSHAPE = 29; - _blendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen; - _blendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4; - _blendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4; - _blendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2; - _blendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3; + _transientBlendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen; + _transientBlendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4; + _transientBlendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4; + _transientBlendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2; + _transientBlendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3; } applyEyelidOffset(getFinalOrientationInWorldFrame()); } diff --git a/interface/src/scripting/AudioDeviceScriptingInterface.cpp b/interface/src/scripting/AudioDeviceScriptingInterface.cpp index cbb08c0af0..05168b0d4c 100644 --- a/interface/src/scripting/AudioDeviceScriptingInterface.cpp +++ b/interface/src/scripting/AudioDeviceScriptingInterface.cpp @@ -11,21 +11,19 @@ #include "AudioClient.h" #include "AudioDeviceScriptingInterface.h" - +#include "SettingsScriptingInterface.h" AudioDeviceScriptingInterface* AudioDeviceScriptingInterface::getInstance() { static AudioDeviceScriptingInterface sharedInstance; return &sharedInstance; } -QStringList AudioDeviceScriptingInterface::inputAudioDevices() const -{ - return DependencyManager::get<AudioClient>()->getDeviceNames(QAudio::AudioInput).toList();; +QStringList AudioDeviceScriptingInterface::inputAudioDevices() const { + return _inputAudioDevices; } -QStringList AudioDeviceScriptingInterface::outputAudioDevices() const -{ - return DependencyManager::get<AudioClient>()->getDeviceNames(QAudio::AudioOutput).toList();; +QStringList AudioDeviceScriptingInterface::outputAudioDevices() const { + return _outputAudioDevices; } bool AudioDeviceScriptingInterface::muted() @@ -33,11 +31,27 @@ bool AudioDeviceScriptingInterface::muted() return getMuted(); } -AudioDeviceScriptingInterface::AudioDeviceScriptingInterface() { +AudioDeviceScriptingInterface::AudioDeviceScriptingInterface(): QAbstractListModel(nullptr) { connect(DependencyManager::get<AudioClient>().data(), &AudioClient::muteToggled, this, &AudioDeviceScriptingInterface::muteToggled); connect(DependencyManager::get<AudioClient>().data(), &AudioClient::deviceChanged, - this, &AudioDeviceScriptingInterface::deviceChanged); + this, &AudioDeviceScriptingInterface::onDeviceChanged, Qt::QueuedConnection); + connect(DependencyManager::get<AudioClient>().data(), &AudioClient::currentInputDeviceChanged, + this, &AudioDeviceScriptingInterface::onCurrentInputDeviceChanged, Qt::QueuedConnection); + connect(DependencyManager::get<AudioClient>().data(), &AudioClient::currentOutputDeviceChanged, + this, &AudioDeviceScriptingInterface::onCurrentOutputDeviceChanged, Qt::QueuedConnection); + //fill up model + onDeviceChanged(); + //set up previously saved device + SettingsScriptingInterface* settings = SettingsScriptingInterface::getInstance(); + const QString inDevice = settings->getValue("audio_input_device").toString(); + if (inDevice != _currentInputDevice) { + setInputDeviceAsync(inDevice); + } + const QString outDevice = settings->getValue("audio_output_device").toString(); + if (outDevice != _currentOutputDevice) { + setOutputDeviceAsync(outDevice); + } } bool AudioDeviceScriptingInterface::setInputDevice(const QString& deviceName) { @@ -58,6 +72,43 @@ bool AudioDeviceScriptingInterface::setOutputDevice(const QString& deviceName) { return result; } +bool AudioDeviceScriptingInterface::setDeviceFromMenu(const QString& deviceMenuName) { + QAudio::Mode mode; + + if (deviceMenuName.indexOf("for Output") != -1) { + mode = QAudio::AudioOutput; + } else if (deviceMenuName.indexOf("for Input") != -1) { + mode = QAudio::AudioInput; + } else { + return false; + } + + for (ScriptingAudioDeviceInfo di: _devices) { + if (mode == di.mode && deviceMenuName.contains(di.name)) { + if (mode == QAudio::AudioOutput) { + setOutputDeviceAsync(di.name); + } else { + setInputDeviceAsync(di.name); + } + return true; + } + } + + return false; +} + +void AudioDeviceScriptingInterface::setInputDeviceAsync(const QString& deviceName) { + QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "switchInputToAudioDevice", + Qt::QueuedConnection, + Q_ARG(const QString&, deviceName)); +} + +void AudioDeviceScriptingInterface::setOutputDeviceAsync(const QString& deviceName) { + QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "switchOutputToAudioDevice", + Qt::QueuedConnection, + Q_ARG(const QString&, deviceName)); +} + QString AudioDeviceScriptingInterface::getInputDevice() { return DependencyManager::get<AudioClient>()->getDeviceName(QAudio::AudioInput); } @@ -116,3 +167,105 @@ void AudioDeviceScriptingInterface::setMuted(bool muted) bool AudioDeviceScriptingInterface::getMuted() { return DependencyManager::get<AudioClient>()->isMuted(); } + +QVariant AudioDeviceScriptingInterface::data(const QModelIndex& index, int role) const { + //sanity + if (!index.isValid() || index.row() >= _devices.size()) + return QVariant(); + + + if (role == Qt::DisplayRole || role == DisplayNameRole) { + return _devices.at(index.row()).name; + } else if (role == SelectedRole) { + return _devices.at(index.row()).selected; + } else if (role == AudioModeRole) { + return (int)_devices.at(index.row()).mode; + } + return QVariant(); +} + +int AudioDeviceScriptingInterface::rowCount(const QModelIndex& parent) const { + Q_UNUSED(parent) + return _devices.size(); +} + +QHash<int, QByteArray> AudioDeviceScriptingInterface::roleNames() const { + QHash<int, QByteArray> roles; + roles.insert(DisplayNameRole, "devicename"); + roles.insert(SelectedRole, "devicechecked"); + roles.insert(AudioModeRole, "devicemode"); + return roles; +} + +void AudioDeviceScriptingInterface::onDeviceChanged() +{ + beginResetModel(); + _outputAudioDevices.clear(); + _devices.clear(); + _currentOutputDevice = getOutputDevice(); + for (QString name: getOutputDevices()) { + ScriptingAudioDeviceInfo di; + di.name = name; + di.selected = (name == _currentOutputDevice); + di.mode = QAudio::AudioOutput; + _devices.append(di); + _outputAudioDevices.append(name); + } + emit outputAudioDevicesChanged(_outputAudioDevices); + + _inputAudioDevices.clear(); + _currentInputDevice = getInputDevice(); + for (QString name: getInputDevices()) { + ScriptingAudioDeviceInfo di; + di.name = name; + di.selected = (name == _currentInputDevice); + di.mode = QAudio::AudioInput; + _devices.append(di); + _inputAudioDevices.append(name); + } + emit inputAudioDevicesChanged(_inputAudioDevices); + + endResetModel(); + emit deviceChanged(); +} + +void AudioDeviceScriptingInterface::onCurrentInputDeviceChanged(const QString& name) +{ + currentDeviceUpdate(name, QAudio::AudioInput); + //we got a signal that device changed. Save it now + SettingsScriptingInterface* settings = SettingsScriptingInterface::getInstance(); + settings->setValue("audio_input_device", name); + emit currentInputDeviceChanged(name); +} + +void AudioDeviceScriptingInterface::onCurrentOutputDeviceChanged(const QString& name) +{ + currentDeviceUpdate(name, QAudio::AudioOutput); + //we got a signal that device changed. Save it now + SettingsScriptingInterface* settings = SettingsScriptingInterface::getInstance(); + settings->setValue("audio_output_device", name); + emit currentOutputDeviceChanged(name); +} + +void AudioDeviceScriptingInterface::currentDeviceUpdate(const QString& name, QAudio::Mode mode) +{ + QVector<int> role; + role.append(SelectedRole); + + for (int i = 0; i < _devices.size(); i++) { + ScriptingAudioDeviceInfo di = _devices.at(i); + if (di.mode != mode) { + continue; + } + if (di.selected && di.name != name ) { + di.selected = false; + _devices[i] = di; + emit dataChanged(index(i, 0), index(i, 0), role); + } + if (di.name == name) { + di.selected = true; + _devices[i] = di; + emit dataChanged(index(i, 0), index(i, 0), role); + } + } +} diff --git a/interface/src/scripting/AudioDeviceScriptingInterface.h b/interface/src/scripting/AudioDeviceScriptingInterface.h index 4d1d47dcba..f912c35288 100644 --- a/interface/src/scripting/AudioDeviceScriptingInterface.h +++ b/interface/src/scripting/AudioDeviceScriptingInterface.h @@ -15,10 +15,18 @@ #include <QObject> #include <QString> #include <QVector> +#include <QAbstractListModel> +#include <QAudio> class AudioEffectOptions; -class AudioDeviceScriptingInterface : public QObject { +struct ScriptingAudioDeviceInfo { + QString name; + bool selected; + QAudio::Mode mode; +}; + +class AudioDeviceScriptingInterface : public QAbstractListModel { Q_OBJECT Q_PROPERTY(QStringList inputAudioDevices READ inputAudioDevices NOTIFY inputAudioDevicesChanged) @@ -32,9 +40,26 @@ public: QStringList outputAudioDevices() const; bool muted(); + QVariant data(const QModelIndex& index, int role = Qt::DisplayRole) const override; + int rowCount(const QModelIndex& parent = QModelIndex()) const override; + QHash<int, QByteArray> roleNames() const override; + + enum Roles { + DisplayNameRole = Qt::UserRole, + SelectedRole, + AudioModeRole + }; + +private slots: + void onDeviceChanged(); + void onCurrentInputDeviceChanged(const QString& name); + void onCurrentOutputDeviceChanged(const QString& name); + void currentDeviceUpdate(const QString& name, QAudio::Mode mode); + public slots: bool setInputDevice(const QString& deviceName); bool setOutputDevice(const QString& deviceName); + bool setDeviceFromMenu(const QString& deviceMenuName); QString getInputDevice(); QString getOutputDevice(); @@ -55,15 +80,28 @@ public slots: void setMuted(bool muted); + void setInputDeviceAsync(const QString& deviceName); + void setOutputDeviceAsync(const QString& deviceName); private: AudioDeviceScriptingInterface(); signals: void muteToggled(); void deviceChanged(); + void currentInputDeviceChanged(const QString& name); + void currentOutputDeviceChanged(const QString& name); void mutedChanged(bool muted); void inputAudioDevicesChanged(QStringList inputAudioDevices); void outputAudioDevicesChanged(QStringList outputAudioDevices); + +private: + QVector<ScriptingAudioDeviceInfo> _devices; + + QStringList _inputAudioDevices; + QStringList _outputAudioDevices; + + QString _currentInputDevice; + QString _currentOutputDevice; }; #endif // hifi_AudioDeviceScriptingInterface_h diff --git a/interface/src/ui/overlays/Web3DOverlay.cpp b/interface/src/ui/overlays/Web3DOverlay.cpp index ecc63801fc..d9eab9a78d 100644 --- a/interface/src/ui/overlays/Web3DOverlay.cpp +++ b/interface/src/ui/overlays/Web3DOverlay.cpp @@ -225,10 +225,6 @@ void Web3DOverlay::setMaxFPS(uint8_t maxFPS) { } void Web3DOverlay::render(RenderArgs* args) { - if (!_visible || !getParentVisible()) { - return; - } - QOpenGLContext * currentContext = QOpenGLContext::currentContext(); QSurface * currentSurface = currentContext->surface(); if (!_webSurface) { @@ -282,6 +278,10 @@ void Web3DOverlay::render(RenderArgs* args) { _webSurface->resize(QSize(_resolution.x, _resolution.y)); } + if (!_visible || !getParentVisible()) { + return; + } + vec2 halfSize = getSize() / 2.0f; vec4 color(toGlm(getColor()), getAlpha()); diff --git a/libraries/audio-client/src/AudioClient.cpp b/libraries/audio-client/src/AudioClient.cpp index dae37ffc4b..1282dbb2dc 100644 --- a/libraries/audio-client/src/AudioClient.cpp +++ b/libraries/audio-client/src/AudioClient.cpp @@ -799,7 +799,8 @@ QString AudioClient::getDefaultDeviceName(QAudio::Mode mode) { QVector<QString> AudioClient::getDeviceNames(QAudio::Mode mode) { QVector<QString> deviceNames; - foreach(QAudioDeviceInfo audioDevice, getAvailableDevices(mode)) { + const QList<QAudioDeviceInfo> &availableDevice = getAvailableDevices(mode); + foreach(const QAudioDeviceInfo &audioDevice, availableDevice) { deviceNames << audioDevice.deviceName().trimmed(); } return deviceNames; @@ -1402,7 +1403,7 @@ bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceIn _audioInput->stop(); _inputDevice = NULL; - delete _audioInput; + _audioInput->deleteLater(); _audioInput = NULL; _numInputCallbackBytes = 0; @@ -1418,6 +1419,7 @@ bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceIn if (!inputDeviceInfo.isNull()) { qCDebug(audioclient) << "The audio input device " << inputDeviceInfo.deviceName() << "is available."; _inputAudioDeviceName = inputDeviceInfo.deviceName().trimmed(); + emit currentInputDeviceChanged(_inputAudioDeviceName); if (adjustedFormatForAudioDevice(inputDeviceInfo, _desiredInputFormat, _inputFormat)) { qCDebug(audioclient) << "The format to be used for audio input is" << _inputFormat; @@ -1506,11 +1508,13 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice if (_audioOutput) { _audioOutput->stop(); - delete _audioOutput; + //must be deleted in next eventloop cycle when its called from notify() + _audioOutput->deleteLater(); _audioOutput = NULL; _loopbackOutputDevice = NULL; - delete _loopbackAudioOutput; + //must be deleted in next eventloop cycle when its called from notify() + _loopbackAudioOutput->deleteLater(); _loopbackAudioOutput = NULL; delete[] _outputMixBuffer; @@ -1535,6 +1539,7 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice if (!outputDeviceInfo.isNull()) { qCDebug(audioclient) << "The audio output device " << outputDeviceInfo.deviceName() << "is available."; _outputAudioDeviceName = outputDeviceInfo.deviceName().trimmed(); + emit currentOutputDeviceChanged(_outputAudioDeviceName); if (adjustedFormatForAudioDevice(outputDeviceInfo, _desiredOutputFormat, _outputFormat)) { qCDebug(audioclient) << "The format to be used for audio output is" << _outputFormat; diff --git a/libraries/audio-client/src/AudioClient.h b/libraries/audio-client/src/AudioClient.h index 0e5363e0ff..47808767b3 100644 --- a/libraries/audio-client/src/AudioClient.h +++ b/libraries/audio-client/src/AudioClient.h @@ -222,6 +222,9 @@ signals: void muteEnvironmentRequested(glm::vec3 position, float radius); + void currentOutputDeviceChanged(const QString& name); + void currentInputDeviceChanged(const QString& name); + protected: AudioClient(); ~AudioClient(); diff --git a/libraries/avatars-renderer/src/avatars-renderer/Head.cpp b/libraries/avatars-renderer/src/avatars-renderer/Head.cpp index 1c54ea269a..93fe246266 100644 --- a/libraries/avatars-renderer/src/avatars-renderer/Head.cpp +++ b/libraries/avatars-renderer/src/avatars-renderer/Head.cpp @@ -151,7 +151,7 @@ void Head::simulate(float deltaTime) { _mouth2, _mouth3, _mouth4, - _blendshapeCoefficients); + _transientBlendshapeCoefficients); applyEyelidOffset(getOrientation()); @@ -202,6 +202,13 @@ void Head::calculateMouthShapes(float deltaTime) { float trailingAudioJawOpenRatio = (100.0f - deltaTime * NORMAL_HZ) / 100.0f; // --> 0.99 at 60 Hz _trailingAudioJawOpen = glm::mix(_trailingAudioJawOpen, _audioJawOpen, trailingAudioJawOpenRatio); + // truncate _mouthTime when mouth goes quiet to prevent floating point error on increment + const float SILENT_TRAILING_JAW_OPEN = 0.0002f; + const float MAX_SILENT_MOUTH_TIME = 10.0f; + if (_trailingAudioJawOpen < SILENT_TRAILING_JAW_OPEN && _mouthTime > MAX_SILENT_MOUTH_TIME) { + _mouthTime = 0.0f; + } + // Advance time at a rate proportional to loudness, and move the mouth shapes through // a cycle at differing speeds to create a continuous random blend of shapes. _mouthTime += sqrtf(_averageLoudness) * TIMESTEP_CONSTANT * deltaTimeRatio; @@ -227,15 +234,15 @@ void Head::applyEyelidOffset(glm::quat headOrientation) { for (int i = 0; i < 2; i++) { const int LEFT_EYE = 8; - float eyeCoefficient = _blendshapeCoefficients[i] - _blendshapeCoefficients[LEFT_EYE + i]; // Raw value + float eyeCoefficient = _transientBlendshapeCoefficients[i] - _transientBlendshapeCoefficients[LEFT_EYE + i]; eyeCoefficient = glm::clamp(eyelidOffset + eyeCoefficient * (1.0f - eyelidOffset), -1.0f, 1.0f); if (eyeCoefficient > 0.0f) { - _blendshapeCoefficients[i] = eyeCoefficient; - _blendshapeCoefficients[LEFT_EYE + i] = 0.0f; + _transientBlendshapeCoefficients[i] = eyeCoefficient; + _transientBlendshapeCoefficients[LEFT_EYE + i] = 0.0f; } else { - _blendshapeCoefficients[i] = 0.0f; - _blendshapeCoefficients[LEFT_EYE + i] = -eyeCoefficient; + _transientBlendshapeCoefficients[i] = 0.0f; + _transientBlendshapeCoefficients[LEFT_EYE + i] = -eyeCoefficient; } } } diff --git a/libraries/avatars/src/AvatarData.cpp b/libraries/avatars/src/AvatarData.cpp index cb819c6b20..6992e66f0e 100644 --- a/libraries/avatars/src/AvatarData.cpp +++ b/libraries/avatars/src/AvatarData.cpp @@ -445,17 +445,17 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent if (hasFaceTrackerInfo) { auto startSection = destinationBuffer; auto faceTrackerInfo = reinterpret_cast<AvatarDataPacket::FaceTrackerInfo*>(destinationBuffer); + auto blendshapeCoefficients = _headData->getSummedBlendshapeCoefficients(); faceTrackerInfo->leftEyeBlink = _headData->_leftEyeBlink; faceTrackerInfo->rightEyeBlink = _headData->_rightEyeBlink; faceTrackerInfo->averageLoudness = _headData->_averageLoudness; faceTrackerInfo->browAudioLift = _headData->_browAudioLift; - faceTrackerInfo->numBlendshapeCoefficients = _headData->_blendshapeCoefficients.size(); + faceTrackerInfo->numBlendshapeCoefficients = blendshapeCoefficients.size(); destinationBuffer += sizeof(AvatarDataPacket::FaceTrackerInfo); - // followed by a variable number of float coefficients - memcpy(destinationBuffer, _headData->_blendshapeCoefficients.data(), _headData->_blendshapeCoefficients.size() * sizeof(float)); - destinationBuffer += _headData->_blendshapeCoefficients.size() * sizeof(float); + memcpy(destinationBuffer, blendshapeCoefficients.data(), blendshapeCoefficients.size() * sizeof(float)); + destinationBuffer += blendshapeCoefficients.size() * sizeof(float); int numBytes = destinationBuffer - startSection; if (outboundDataRateOut) { @@ -965,7 +965,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) { const int coefficientsSize = sizeof(float) * numCoefficients; PACKET_READ_CHECK(FaceTrackerCoefficients, coefficientsSize); _headData->_blendshapeCoefficients.resize(numCoefficients); // make sure there's room for the copy! - _headData->_baseBlendshapeCoefficients.resize(numCoefficients); + _headData->_transientBlendshapeCoefficients.resize(numCoefficients); memcpy(_headData->_blendshapeCoefficients.data(), sourceBuffer, coefficientsSize); sourceBuffer += coefficientsSize; int numBytesRead = sourceBuffer - startSection; @@ -1495,11 +1495,14 @@ QUrl AvatarData::cannonicalSkeletonModelURL(const QUrl& emptyURL) const { return _skeletonModelURL.scheme() == "file" ? emptyURL : _skeletonModelURL; } -void AvatarData::processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged) { +void AvatarData::processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged, const qint64 clockSkew) { - if (identity.updatedAt < _identityUpdatedAt) { + // Consider the case where this packet is being processed on Client A, and Client A is connected to Sandbox B. + // If Client A's system clock is *ahead of* Sandbox B's system clock, "clockSkew" will be *negative*. + // If Client A's system clock is *behind* Sandbox B's system clock, "clockSkew" will be *positive*. + if ((_identityUpdatedAt > identity.updatedAt - clockSkew) && (_identityUpdatedAt != 0)) { qCDebug(avatars) << "Ignoring late identity packet for avatar " << getSessionUUID() - << "identity.updatedAt:" << identity.updatedAt << "_identityUpdatedAt:" << _identityUpdatedAt; + << "_identityUpdatedAt (" << _identityUpdatedAt << ") is greater than identity.updatedAt - clockSkew (" << identity.updatedAt << "-" << clockSkew << ")"; return; } @@ -1535,7 +1538,7 @@ void AvatarData::processAvatarIdentity(const Identity& identity, bool& identityC // use the timestamp from this identity, since we want to honor the updated times in "server clock" // this will overwrite any changes we made locally to this AvatarData's _identityUpdatedAt - _identityUpdatedAt = identity.updatedAt; + _identityUpdatedAt = identity.updatedAt - clockSkew; } QByteArray AvatarData::identityByteArray() const { diff --git a/libraries/avatars/src/AvatarData.h b/libraries/avatars/src/AvatarData.h index 6d801793b7..e6e0571878 100644 --- a/libraries/avatars/src/AvatarData.h +++ b/libraries/avatars/src/AvatarData.h @@ -538,7 +538,7 @@ public: // identityChanged returns true if identity has changed, false otherwise. // displayNameChanged returns true if displayName has changed, false otherwise. - void processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged); + void processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged, const qint64 clockSkew); QByteArray identityByteArray() const; diff --git a/libraries/avatars/src/AvatarHashMap.cpp b/libraries/avatars/src/AvatarHashMap.cpp index 0d341c684e..155ef9a0a2 100644 --- a/libraries/avatars/src/AvatarHashMap.cpp +++ b/libraries/avatars/src/AvatarHashMap.cpp @@ -148,7 +148,8 @@ void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<ReceivedMessage> auto avatar = newOrExistingAvatar(identity.uuid, sendingNode); bool identityChanged = false; bool displayNameChanged = false; - avatar->processAvatarIdentity(identity, identityChanged, displayNameChanged); + // In this case, the "sendingNode" is the Avatar Mixer. + avatar->processAvatarIdentity(identity, identityChanged, displayNameChanged, sendingNode->getClockSkewUsec()); } } diff --git a/libraries/avatars/src/HeadData.cpp b/libraries/avatars/src/HeadData.cpp index b55be7c156..2e4eec73a8 100644 --- a/libraries/avatars/src/HeadData.cpp +++ b/libraries/avatars/src/HeadData.cpp @@ -34,8 +34,9 @@ HeadData::HeadData(AvatarData* owningAvatar) : _rightEyeBlink(0.0f), _averageLoudness(0.0f), _browAudioLift(0.0f), - _baseBlendshapeCoefficients(QVector<float>(0, 0.0f)), - _currBlendShapeCoefficients(QVector<float>(0, 0.0f)), + _blendshapeCoefficients(QVector<float>(0, 0.0f)), + _transientBlendshapeCoefficients(QVector<float>(0, 0.0f)), + _summedBlendshapeCoefficients(QVector<float>(0, 0.0f)), _owningAvatar(owningAvatar) { @@ -85,22 +86,22 @@ static const QMap<QString, int>& getBlendshapesLookupMap() { } const QVector<float>& HeadData::getSummedBlendshapeCoefficients() { - int maxSize = std::max(_baseBlendshapeCoefficients.size(), _blendshapeCoefficients.size()); - if (_currBlendShapeCoefficients.size() != maxSize) { - _currBlendShapeCoefficients.resize(maxSize); + int maxSize = std::max(_blendshapeCoefficients.size(), _transientBlendshapeCoefficients.size()); + if (_summedBlendshapeCoefficients.size() != maxSize) { + _summedBlendshapeCoefficients.resize(maxSize); } for (int i = 0; i < maxSize; i++) { - if (i >= _baseBlendshapeCoefficients.size()) { - _currBlendShapeCoefficients[i] = _blendshapeCoefficients[i]; - } else if (i >= _blendshapeCoefficients.size()) { - _currBlendShapeCoefficients[i] = _baseBlendshapeCoefficients[i]; + if (i >= _blendshapeCoefficients.size()) { + _summedBlendshapeCoefficients[i] = _transientBlendshapeCoefficients[i]; + } else if (i >= _transientBlendshapeCoefficients.size()) { + _summedBlendshapeCoefficients[i] = _blendshapeCoefficients[i]; } else { - _currBlendShapeCoefficients[i] = _baseBlendshapeCoefficients[i] + _blendshapeCoefficients[i]; + _summedBlendshapeCoefficients[i] = _blendshapeCoefficients[i] + _transientBlendshapeCoefficients[i]; } } - return _currBlendShapeCoefficients; + return _summedBlendshapeCoefficients; } void HeadData::setBlendshape(QString name, float val) { @@ -112,10 +113,10 @@ void HeadData::setBlendshape(QString name, float val) { if (_blendshapeCoefficients.size() <= it.value()) { _blendshapeCoefficients.resize(it.value() + 1); } - if (_baseBlendshapeCoefficients.size() <= it.value()) { - _baseBlendshapeCoefficients.resize(it.value() + 1); + if (_transientBlendshapeCoefficients.size() <= it.value()) { + _transientBlendshapeCoefficients.resize(it.value() + 1); } - _baseBlendshapeCoefficients[it.value()] = val; + _blendshapeCoefficients[it.value()] = val; } } @@ -131,14 +132,16 @@ QJsonObject HeadData::toJson() const { QJsonObject blendshapesJson; for (auto name : blendshapeLookupMap.keys()) { auto index = blendshapeLookupMap[name]; - if (index >= _blendshapeCoefficients.size()) { - continue; + float value = 0.0f; + if (index < _blendshapeCoefficients.size()) { + value += _blendshapeCoefficients[index]; } - auto value = _blendshapeCoefficients[index]; - if (value == 0.0f) { - continue; + if (index < _transientBlendshapeCoefficients.size()) { + value += _transientBlendshapeCoefficients[index]; + } + if (value != 0.0f) { + blendshapesJson[name] = value; } - blendshapesJson[name] = value; } if (!blendshapesJson.isEmpty()) { headJson[JSON_AVATAR_HEAD_BLENDSHAPE_COEFFICIENTS] = blendshapesJson; @@ -163,8 +166,8 @@ void HeadData::fromJson(const QJsonObject& json) { QJsonArray blendshapeCoefficientsJson = jsonValue.toArray(); for (const auto& blendshapeCoefficient : blendshapeCoefficientsJson) { blendshapeCoefficients.push_back((float)blendshapeCoefficient.toDouble()); - setBlendshapeCoefficients(blendshapeCoefficients); } + setBlendshapeCoefficients(blendshapeCoefficients); } else if (jsonValue.isObject()) { QJsonObject blendshapeCoefficientsJson = jsonValue.toObject(); for (const QString& name : blendshapeCoefficientsJson.keys()) { diff --git a/libraries/avatars/src/HeadData.h b/libraries/avatars/src/HeadData.h index dbed0a6a65..9b28616b3f 100644 --- a/libraries/avatars/src/HeadData.h +++ b/libraries/avatars/src/HeadData.h @@ -93,8 +93,8 @@ protected: float _browAudioLift; QVector<float> _blendshapeCoefficients; - QVector<float> _baseBlendshapeCoefficients; - QVector<float> _currBlendShapeCoefficients; + QVector<float> _transientBlendshapeCoefficients; + QVector<float> _summedBlendshapeCoefficients; AvatarData* _owningAvatar; private: diff --git a/libraries/controllers/src/controllers/InputRecorder.cpp b/libraries/controllers/src/controllers/InputRecorder.cpp index 2d2cd40739..7433f181a1 100644 --- a/libraries/controllers/src/controllers/InputRecorder.cpp +++ b/libraries/controllers/src/controllers/InputRecorder.cpp @@ -22,20 +22,20 @@ #include <BuildInfo.h> #include <GLMHelpers.h> - + QString SAVE_DIRECTORY = QStandardPaths::writableLocation(QStandardPaths::AppLocalDataLocation) + "/" + BuildInfo::MODIFIED_ORGANIZATION + "/" + BuildInfo::INTERFACE_NAME + "/hifi-input-recordings/"; QString FILE_PREFIX_NAME = "input-recording-"; QString COMPRESS_EXTENSION = ".tar.gz"; namespace controller { - + QJsonObject poseToJsonObject(const Pose pose) { QJsonObject newPose; - + QJsonArray translation; translation.append(pose.translation.x); translation.append(pose.translation.y); translation.append(pose.translation.z); - + QJsonArray rotation; rotation.append(pose.rotation.x); rotation.append(pose.rotation.y); @@ -69,7 +69,7 @@ namespace controller { QJsonArray angularVelocity = object["angularVelocity"].toArray(); pose.valid = object["valid"].toBool(); - + pose.translation.x = translation[0].toDouble(); pose.translation.y = translation[1].toDouble(); pose.translation.z = translation[2].toDouble(); @@ -89,13 +89,13 @@ namespace controller { return pose; } - + void exportToFile(QJsonObject& object) { if (!QDir(SAVE_DIRECTORY).exists()) { QDir().mkdir(SAVE_DIRECTORY); } - + QString timeStamp = QDateTime::currentDateTime().toString(Qt::ISODate); timeStamp.replace(":", "-"); QString fileName = SAVE_DIRECTORY + FILE_PREFIX_NAME + timeStamp + COMPRESS_EXTENSION; @@ -124,7 +124,7 @@ namespace controller { status = true; return object; } - + InputRecorder::InputRecorder() {} InputRecorder::~InputRecorder() {} @@ -195,16 +195,16 @@ namespace controller { _framesRecorded = data["frameCount"].toInt(); QJsonArray actionArrayList = data["actionList"].toArray(); QJsonArray poseArrayList = data["poseList"].toArray(); - + for (int actionIndex = 0; actionIndex < actionArrayList.size(); actionIndex++) { QJsonArray actionState = actionArrayList[actionIndex].toArray(); for (int index = 0; index < actionState.size(); index++) { - _currentFrameActions[index] = actionState[index].toInt(); + _currentFrameActions[index] = actionState[index].toDouble(); } _actionStateList.push_back(_currentFrameActions); _currentFrameActions = ActionStates(toInt(Action::NUM_ACTIONS)); } - + for (int poseIndex = 0; poseIndex < poseArrayList.size(); poseIndex++) { QJsonArray poseState = poseArrayList[poseIndex].toArray(); for (int index = 0; index < poseState.size(); index++) { @@ -250,13 +250,13 @@ namespace controller { for(auto& channel : _currentFramePoses) { channel = Pose(); } - + for(auto& channel : _currentFrameActions) { channel = 0.0f; } } } - + float InputRecorder::getActionState(controller::Action action) { if (_actionStateList.size() > 0 ) { return _actionStateList[_playCount][toInt(action)]; diff --git a/libraries/script-engine/src/AudioScriptingInterface.h b/libraries/script-engine/src/AudioScriptingInterface.h index e97bc329c6..5ec8ce4b12 100644 --- a/libraries/script-engine/src/AudioScriptingInterface.h +++ b/libraries/script-engine/src/AudioScriptingInterface.h @@ -24,6 +24,7 @@ class AudioScriptingInterface : public QObject, public Dependency { SINGLETON_DEPENDENCY public: + virtual ~AudioScriptingInterface() {} void setLocalAudioInterface(AbstractAudioInterface* audioInterface) { _localAudioInterface = audioInterface; } protected: diff --git a/plugins/openvr/src/ViveControllerManager.cpp b/plugins/openvr/src/ViveControllerManager.cpp index 411cac3d2b..606cc38da2 100644 --- a/plugins/openvr/src/ViveControllerManager.cpp +++ b/plugins/openvr/src/ViveControllerManager.cpp @@ -22,6 +22,8 @@ #include <ui-plugins/PluginContainer.h> #include <UserActivityLogger.h> #include <NumericalConstants.h> +#include <Preferences.h> +#include <SettingHandle.h> #include <OffscreenUi.h> #include <GLMHelpers.h> #include <glm/ext.hpp> @@ -280,14 +282,21 @@ void ViveControllerManager::InputDevice::calibrate(const controller::InputCalibr glm::mat4 defaultToReferenceMat = currentHead * glm::inverse(inputCalibration.defaultHeadMat); int puckCount = (int)_validTrackedObjects.size(); - if (puckCount == MIN_PUCK_COUNT) { - _config = Config::Feet; - } else if (puckCount == MIN_FEET_AND_HIPS) { - _config = Config::FeetAndHips; - } else if (puckCount >= MIN_FEET_HIPS_CHEST) { - _config = Config::FeetHipsAndChest; - } else { + _config = _preferedConfig; + if (_config != Config::Auto && puckCount < MIN_PUCK_COUNT) { + uncalibrate(); return; + } else if (_config == Config::Auto){ + if (puckCount == MIN_PUCK_COUNT) { + _config = Config::Feet; + } else if (puckCount == MIN_FEET_AND_HIPS) { + _config = Config::FeetAndHips; + } else if (puckCount >= MIN_FEET_HIPS_CHEST) { + _config = Config::FeetHipsAndChest; + } else { + uncalibrate(); + return; + } } std::sort(_validTrackedObjects.begin(), _validTrackedObjects.end(), sortPucksYPosition); @@ -314,19 +323,23 @@ void ViveControllerManager::InputDevice::calibrate(const controller::InputCalibr if (_config == Config::Feet) { // done - } else if (_config == Config::FeetAndHips) { + } else if (_config == Config::FeetAndHips && puckCount >= MIN_FEET_AND_HIPS) { _jointToPuckMap[controller::HIPS] = _validTrackedObjects[HIP].first; _pucksOffset[_validTrackedObjects[HIP].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultHips, _validTrackedObjects[HIP].second); - } else if (_config == Config::FeetHipsAndChest) { + } else if (_config == Config::FeetHipsAndChest && puckCount >= MIN_FEET_HIPS_CHEST) { _jointToPuckMap[controller::HIPS] = _validTrackedObjects[HIP].first; _pucksOffset[_validTrackedObjects[HIP].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultHips, _validTrackedObjects[HIP].second); _jointToPuckMap[controller::SPINE2] = _validTrackedObjects[CHEST].first; _pucksOffset[_validTrackedObjects[CHEST].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultSpine2, _validTrackedObjects[CHEST].second); + } else { + uncalibrate(); + return; } _calibrated = true; } void ViveControllerManager::InputDevice::uncalibrate() { + _config = Config::Auto; _pucksOffset.clear(); _jointToPuckMap.clear(); _calibrated = false; @@ -562,6 +575,74 @@ void ViveControllerManager::InputDevice::hapticsHelper(float deltaTime, bool lef } } +void ViveControllerManager::InputDevice::loadSettings() { + Settings settings; + settings.beginGroup("PUCK_CONFIG"); + { + _preferedConfig = (Config)settings.value("configuration", QVariant((int)Config::Auto)).toInt(); + } + settings.endGroup(); +} + +void ViveControllerManager::InputDevice::saveSettings() const { + Settings settings; + settings.beginGroup("PUCK_CONFIG"); + { + settings.setValue(QString("configuration"), (int)_preferedConfig); + } + settings.endGroup(); +} + +QString ViveControllerManager::InputDevice::configToString() { + QString currentConfig; + switch (_preferedConfig) { + case Config::Auto: + currentConfig = "Auto"; + break; + + case Config::Feet: + currentConfig = "Feet"; + break; + + case Config::FeetAndHips: + currentConfig = "FeetAndHips"; + break; + + case Config::FeetHipsAndChest: + currentConfig = "FeetHipsAndChest"; + break; + } + return currentConfig; +} + +void ViveControllerManager::InputDevice::setConfigFromString(const QString& value) { + if (value == "Auto") { + _preferedConfig = Config::Auto; + } else if (value == "Feet") { + _preferedConfig = Config::Feet; + } else if (value == "FeetAndHips") { + _preferedConfig = Config::FeetAndHips; + } else if (value == "FeetHipsAndChest") { + _preferedConfig = Config::FeetHipsAndChest; + } +} + +void ViveControllerManager::InputDevice::createPreferences() { + loadSettings(); + auto preferences = DependencyManager::get<Preferences>(); + static const QString VIVE_PUCKS_CONFIG = "Vive Pucks Configuration"; + + { + auto getter = [this]()->QString { return configToString(); }; + auto setter = [this](const QString& value) { setConfigFromString(value); saveSettings(); }; + auto preference = new ComboBoxPreference(VIVE_PUCKS_CONFIG, "Configuration", getter, setter); + QStringList list = (QStringList() << "Auto" << "Feet" << "FeetAndHips" << "FeetHipsAndChest"); + preference->setItems(list); + preferences->addPreference(preference); + + } +} + controller::Input::NamedVector ViveControllerManager::InputDevice::getAvailableInputs() const { using namespace controller; QVector<Input::NamedPair> availableInputs{ diff --git a/plugins/openvr/src/ViveControllerManager.h b/plugins/openvr/src/ViveControllerManager.h index ca78fd0b37..30680ec264 100644 --- a/plugins/openvr/src/ViveControllerManager.h +++ b/plugins/openvr/src/ViveControllerManager.h @@ -51,14 +51,14 @@ public: private: class InputDevice : public controller::InputDevice { public: - InputDevice(vr::IVRSystem*& system) : controller::InputDevice("Vive"), _system(system) {} + InputDevice(vr::IVRSystem*& system) : controller::InputDevice("Vive"), _system(system) { createPreferences(); } private: // Device functions controller::Input::NamedVector getAvailableInputs() const override; QString getDefaultMappingConfig() const override; void update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) override; void focusOutEvent() override; - + void createPreferences(); bool triggerHapticPulse(float strength, float duration, controller::Hand hand) override; void hapticsHelper(float deltaTime, bool leftHand); void calibrateOrUncalibrate(const controller::InputCalibrationData& inputCalibration); @@ -101,8 +101,9 @@ private: float _timer { 0.0f }; glm::vec2 _stick { 0.0f, 0.0f }; }; - enum class Config { Feet, FeetAndHips, FeetHipsAndChest, NoConfig }; - Config _config { Config::NoConfig }; + enum class Config { Feet, FeetAndHips, FeetHipsAndChest, Auto }; + Config _config { Config::Auto }; + Config _preferedConfig { Config::Auto }; FilteredStick _filteredLeftStick; FilteredStick _filteredRightStick; @@ -127,6 +128,10 @@ private: bool _timeTilCalibrationSet { false }; mutable std::recursive_mutex _lock; + QString configToString(); + void setConfigFromString(const QString& value); + void loadSettings(); + void saveSettings() const; friend class ViveControllerManager; }; diff --git a/scripts/system/libraries/WebTablet.js b/scripts/system/libraries/WebTablet.js index 1493ce7953..757743accc 100644 --- a/scripts/system/libraries/WebTablet.js +++ b/scripts/system/libraries/WebTablet.js @@ -96,7 +96,7 @@ function calcSpawnInfo(hand, height) { * @param hand [number] -1 indicates no hand, Controller.Standard.RightHand or Controller.Standard.LeftHand * @param clientOnly [bool] true indicates tablet model is only visible to client. */ -WebTablet = function (url, width, dpi, hand, clientOnly, location) { +WebTablet = function (url, width, dpi, hand, clientOnly, location, visible) { var _this = this; @@ -107,6 +107,8 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location) { this.depth = TABLET_NATURAL_DIMENSIONS.z * tabletScaleFactor; this.landscape = false; + visible = visible === true; + if (dpi) { this.dpi = dpi; } else { @@ -125,7 +127,8 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location) { "grabbableKey": {"grabbable": true} }), dimensions: this.getDimensions(), - parentID: AVATAR_SELF_ID + parentID: AVATAR_SELF_ID, + visible: visible }; // compute position, rotation & parentJointIndex of the tablet @@ -158,7 +161,8 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location) { parentID: this.tabletEntityID, parentJointIndex: -1, showKeyboardFocusHighlight: false, - isAA: HMD.active + isAA: HMD.active, + visible: visible }); var HOME_BUTTON_Y_OFFSET = (this.height / 2) - (this.height / 20); @@ -168,7 +172,7 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location) { localRotation: {x: 0, y: 1, z: 0, w: 0}, dimensions: { x: 4 * tabletScaleFactor, y: 4 * tabletScaleFactor, z: 4 * tabletScaleFactor}, alpha: 0.0, - visible: true, + visible: visible, drawInFront: false, parentID: this.tabletEntityID, parentJointIndex: -1 diff --git a/scripts/system/libraries/entitySelectionTool.js b/scripts/system/libraries/entitySelectionTool.js index 79d45d5cd2..8bebbff19a 100644 --- a/scripts/system/libraries/entitySelectionTool.js +++ b/scripts/system/libraries/entitySelectionTool.js @@ -3,6 +3,7 @@ // examples // // Created by Brad hefta-Gaub on 10/1/14. +// Modified by Daniela Fontes @DanielaFifo and Tiago Andrade @TagoWill on 4/7/2017 // Copyright 2014 High Fidelity, Inc. // // This script implements a class useful for building tools for editing entities. @@ -2592,6 +2593,16 @@ SelectionDisplay = (function() { // pivot - point to use as a pivot // offset - the position of the overlay tool relative to the selections center position var makeStretchTool = function(stretchMode, direction, pivot, offset, customOnMove) { + // directionFor3DStretch - direction and pivot for 3D stretch + // distanceFor3DStretch - distance from the intersection point and the handController + // used to increase the scale taking into account the distance to the object + // DISTANCE_INFLUENCE_THRESHOLD - constant that holds the minimum distance where the + // distance to the object will influence the stretch/resize/scale + var directionFor3DStretch = getDirectionsFor3DStretch(stretchMode); + var distanceFor3DStretch = 0; + var DISTANCE_INFLUENCE_THRESHOLD = 1.2; + + var signs = { x: direction.x < 0 ? -1 : (direction.x > 0 ? 1 : 0), y: direction.y < 0 ? -1 : (direction.y > 0 ? 1 : 0), @@ -2603,18 +2614,23 @@ SelectionDisplay = (function() { y: Math.abs(direction.y) > 0 ? 1 : 0, z: Math.abs(direction.z) > 0 ? 1 : 0, }; + + var numDimensions = mask.x + mask.y + mask.z; var planeNormal = null; var lastPick = null; + var lastPick3D = null; var initialPosition = null; var initialDimensions = null; var initialIntersection = null; var initialProperties = null; var registrationPoint = null; var deltaPivot = null; + var deltaPivot3D = null; var pickRayPosition = null; + var pickRayPosition3D = null; var rotation = null; var onBegin = function(event) { @@ -2652,8 +2668,20 @@ SelectionDisplay = (function() { // Scaled offset in world coordinates var scaledOffsetWorld = vec3Mult(initialDimensions, offsetRP); + pickRayPosition = Vec3.sum(initialPosition, Vec3.multiplyQbyV(rotation, scaledOffsetWorld)); - + + if (directionFor3DStretch) { + // pivot, offset and pickPlanePosition for 3D manipulation + var scaledPivot3D = Vec3.multiply(0.5, Vec3.multiply(1.0, directionFor3DStretch)); + deltaPivot3D = Vec3.subtract(centeredRP, scaledPivot3D); + + var scaledOffsetWorld3D = vec3Mult(initialDimensions, + Vec3.subtract(Vec3.multiply(0.5, Vec3.multiply(-1.0, directionFor3DStretch)), + centeredRP)); + + pickRayPosition3D = Vec3.sum(initialPosition, Vec3.multiplyQbyV(rotation, scaledOffsetWorld)); + } var start = null; var end = null; if (numDimensions == 1 && mask.x) { @@ -2754,12 +2782,25 @@ SelectionDisplay = (function() { }; } } + planeNormal = Vec3.multiplyQbyV(rotation, planeNormal); var pickRay = generalComputePickRay(event.x, event.y); lastPick = rayPlaneIntersection(pickRay, pickRayPosition, planeNormal); - + + var planeNormal3D = { + x: 0, + y: 0, + z: 0 + }; + if (directionFor3DStretch) { + lastPick3D = rayPlaneIntersection(pickRay, + pickRayPosition3D, + planeNormal3D); + distanceFor3DStretch = Vec3.length(Vec3.subtract(pickRayPosition3D, pickRay.origin)); + } + SelectionManager.saveProperties(); }; @@ -2790,24 +2831,50 @@ SelectionDisplay = (function() { dimensions = SelectionManager.worldDimensions; rotation = SelectionManager.worldRotation; } + + var localDeltaPivot = deltaPivot; + var localSigns = signs; var pickRay = generalComputePickRay(event.x, event.y); - newPick = rayPlaneIntersection(pickRay, + + // Are we using handControllers or Mouse - only relevant for 3D tools + var controllerPose = getControllerWorldLocation(activeHand, true); + if (HMD.isHMDAvailable() + && HMD.isHandControllerAvailable() && controllerPose.valid && that.triggered && directionFor3DStretch) { + localDeltaPivot = deltaPivot3D; + + newPick = pickRay.origin; + + var vector = Vec3.subtract(newPick, lastPick3D); + + vector = Vec3.multiplyQbyV(Quat.inverse(rotation), vector); + + if (distanceFor3DStretch > DISTANCE_INFLUENCE_THRESHOLD) { + // Range of Motion + vector = Vec3.multiply(distanceFor3DStretch , vector); + } + + localSigns = directionFor3DStretch; + + } else { + newPick = rayPlaneIntersection(pickRay, pickRayPosition, planeNormal); - var vector = Vec3.subtract(newPick, lastPick); + var vector = Vec3.subtract(newPick, lastPick); - vector = Vec3.multiplyQbyV(Quat.inverse(rotation), vector); - - vector = vec3Mult(mask, vector); + vector = Vec3.multiplyQbyV(Quat.inverse(rotation), vector); + vector = vec3Mult(mask, vector); + + } + if (customOnMove) { - var change = Vec3.multiply(-1, vec3Mult(signs, vector)); + var change = Vec3.multiply(-1, vec3Mult(localSigns, vector)); customOnMove(vector, change); } else { vector = grid.snapToSpacing(vector); - var changeInDimensions = Vec3.multiply(-1, vec3Mult(signs, vector)); + var changeInDimensions = Vec3.multiply(-1, vec3Mult(localSigns, vector)); var newDimensions; if (proportional) { var absX = Math.abs(changeInDimensions.x); @@ -2829,37 +2896,39 @@ SelectionDisplay = (function() { } else { newDimensions = Vec3.sum(initialDimensions, changeInDimensions); } - - newDimensions.x = Math.max(newDimensions.x, MINIMUM_DIMENSION); - newDimensions.y = Math.max(newDimensions.y, MINIMUM_DIMENSION); - newDimensions.z = Math.max(newDimensions.z, MINIMUM_DIMENSION); - - var changeInPosition = Vec3.multiplyQbyV(rotation, vec3Mult(deltaPivot, changeInDimensions)); - var newPosition = Vec3.sum(initialPosition, changeInPosition); - - for (var i = 0; i < SelectionManager.selections.length; i++) { - Entities.editEntity(SelectionManager.selections[i], { - position: newPosition, - dimensions: newDimensions, - }); - } - - var wantDebug = false; - if (wantDebug) { - print(stretchMode); - //Vec3.print(" newIntersection:", newIntersection); - Vec3.print(" vector:", vector); - //Vec3.print(" oldPOS:", oldPOS); - //Vec3.print(" newPOS:", newPOS); - Vec3.print(" changeInDimensions:", changeInDimensions); - Vec3.print(" newDimensions:", newDimensions); - - Vec3.print(" changeInPosition:", changeInPosition); - Vec3.print(" newPosition:", newPosition); - } - - SelectionManager._update(); } + + + newDimensions.x = Math.max(newDimensions.x, MINIMUM_DIMENSION); + newDimensions.y = Math.max(newDimensions.y, MINIMUM_DIMENSION); + newDimensions.z = Math.max(newDimensions.z, MINIMUM_DIMENSION); + + var changeInPosition = Vec3.multiplyQbyV(rotation, vec3Mult(localDeltaPivot, changeInDimensions)); + var newPosition = Vec3.sum(initialPosition, changeInPosition); + + for (var i = 0; i < SelectionManager.selections.length; i++) { + Entities.editEntity(SelectionManager.selections[i], { + position: newPosition, + dimensions: newDimensions, + }); + } + + + var wantDebug = false; + if (wantDebug) { + print(stretchMode); + //Vec3.print(" newIntersection:", newIntersection); + Vec3.print(" vector:", vector); + //Vec3.print(" oldPOS:", oldPOS); + //Vec3.print(" newPOS:", newPOS); + Vec3.print(" changeInDimensions:", changeInDimensions); + Vec3.print(" newDimensions:", newDimensions); + + Vec3.print(" changeInPosition:", changeInPosition); + Vec3.print(" newPosition:", newPosition); + } + + SelectionManager._update(); }; @@ -2870,6 +2939,75 @@ SelectionDisplay = (function() { onEnd: onEnd }; }; + + // Direction for the stretch tool when using hand controller + var directionsFor3DGrab = { + LBN: { + x: 1, + y: 1, + z: 1 + }, + RBN: { + x: -1, + y: 1, + z: 1 + }, + LBF: { + x: 1, + y: 1, + z: -1 + }, + RBF: { + x: -1, + y: 1, + z: -1 + }, + LTN: { + x: 1, + y: -1, + z: 1 + }, + RTN: { + x: -1, + y: -1, + z: 1 + }, + LTF: { + x: 1, + y: -1, + z: -1 + }, + RTF: { + x: -1, + y: -1, + z: -1 + } + }; + + // Returns a vector with directions for the stretch tool in 3D using hand controllers + function getDirectionsFor3DStretch(mode) { + if (mode === "STRETCH_LBN") { + return directionsFor3DGrab.LBN; + } else if (mode === "STRETCH_RBN") { + return directionsFor3DGrab.RBN; + } else if (mode === "STRETCH_LBF") { + return directionsFor3DGrab.LBF; + } else if (mode === "STRETCH_RBF") { + return directionsFor3DGrab.RBF; + } else if (mode === "STRETCH_LTN") { + return directionsFor3DGrab.LTN; + } else if (mode === "STRETCH_RTN") { + return directionsFor3DGrab.RTN; + } else if (mode === "STRETCH_LTF") { + return directionsFor3DGrab.LTF; + } else if (mode === "STRETCH_RTF") { + return directionsFor3DGrab.RTF; + } else { + return null; + } + } + + function addStretchTool(overlay, mode, pivot, direction, offset, handleMove) { if (!pivot) { diff --git a/scripts/system/selectAudioDevice.js b/scripts/system/selectAudioDevice.js index 2dd426932f..2d40795692 100644 --- a/scripts/system/selectAudioDevice.js +++ b/scripts/system/selectAudioDevice.js @@ -17,32 +17,22 @@ const INPUT = "Input"; const OUTPUT = "Output"; -function parseMenuItem(item) { - const USE = "Use "; - const FOR_INPUT = " for " + INPUT; - const FOR_OUTPUT = " for " + OUTPUT; - if (item.slice(0, USE.length) == USE) { - if (item.slice(-FOR_INPUT.length) == FOR_INPUT) { - return { device: item.slice(USE.length, -FOR_INPUT.length), mode: INPUT }; - } else if (item.slice(-FOR_OUTPUT.length) == FOR_OUTPUT) { - return { device: item.slice(USE.length, -FOR_OUTPUT.length), mode: OUTPUT }; - } - } -} - +const SELECT_AUDIO_SCRIPT_STARTUP_TIMEOUT = 300; // // VAR DEFINITIONS // var debugPrintStatements = true; const INPUT_DEVICE_SETTING = "audio_input_device"; const OUTPUT_DEVICE_SETTING = "audio_output_device"; -var audioDevicesList = []; +var audioDevicesList = []; // placeholder for menu items var wasHmdActive = false; // assume it's not active to start var switchedAudioInputToHMD = false; var switchedAudioOutputToHMD = false; var previousSelectedInputAudioDevice = ""; var previousSelectedOutputAudioDevice = ""; -var skipMenuEvents = true; + +var interfaceInputDevice = ""; +var interfaceOutputDevice = ""; // // BEGIN FUNCTION DEFINITIONS @@ -56,56 +46,37 @@ function debug() { function setupAudioMenus() { // menu events can be triggered asynchronously; skip them for 200ms to avoid recursion and false switches - skipMenuEvents = true; - Script.setTimeout(function() { skipMenuEvents = false; }, 200); - removeAudioMenus(); // Setup audio input devices Menu.addSeparator("Audio", "Input Audio Device"); - var inputDevices = AudioDevice.getInputDevices(); - for (var i = 0; i < inputDevices.length; i++) { - var audioDeviceMenuString = "Use " + inputDevices[i] + " for Input"; + var currentInputDevice = AudioDevice.getInputDevice() + for (var i = 0; i < AudioDevice.inputAudioDevices.length; i++) { + var audioDeviceMenuString = "Use " + AudioDevice.inputAudioDevices[i] + " for Input"; Menu.addMenuItem({ menuName: "Audio", menuItemName: audioDeviceMenuString, isCheckable: true, - isChecked: inputDevices[i] == AudioDevice.getInputDevice() + isChecked: AudioDevice.inputAudioDevices[i] == currentInputDevice }); audioDevicesList.push(audioDeviceMenuString); } // Setup audio output devices Menu.addSeparator("Audio", "Output Audio Device"); - var outputDevices = AudioDevice.getOutputDevices(); - for (var i = 0; i < outputDevices.length; i++) { - var audioDeviceMenuString = "Use " + outputDevices[i] + " for Output"; + var currentOutputDevice = AudioDevice.getOutputDevice() + for (var i = 0; i < AudioDevice.outputAudioDevices.length; i++) { + var audioDeviceMenuString = "Use " + AudioDevice.outputAudioDevices[i] + " for Output"; Menu.addMenuItem({ menuName: "Audio", menuItemName: audioDeviceMenuString, isCheckable: true, - isChecked: outputDevices[i] == AudioDevice.getOutputDevice() + isChecked: AudioDevice.outputAudioDevices[i] == currentOutputDevice }); audioDevicesList.push(audioDeviceMenuString); } } -function checkDeviceMismatch() { - var inputDeviceSetting = Settings.getValue(INPUT_DEVICE_SETTING); - var interfaceInputDevice = AudioDevice.getInputDevice(); - if (interfaceInputDevice != inputDeviceSetting) { - debug("Input Setting & Device mismatch! Input SETTING: " + inputDeviceSetting + "Input DEVICE IN USE: " + interfaceInputDevice); - switchAudioDevice("Use " + inputDeviceSetting + " for Input"); - } - - var outputDeviceSetting = Settings.getValue(OUTPUT_DEVICE_SETTING); - var interfaceOutputDevice = AudioDevice.getOutputDevice(); - if (interfaceOutputDevice != outputDeviceSetting) { - debug("Output Setting & Device mismatch! Output SETTING: " + outputDeviceSetting + "Output DEVICE IN USE: " + interfaceOutputDevice); - switchAudioDevice("Use " + outputDeviceSetting + " for Output"); - } -} - function removeAudioMenus() { Menu.removeSeparator("Audio", "Input Audio Device"); Menu.removeSeparator("Audio", "Output Audio Device"); @@ -124,67 +95,28 @@ function removeAudioMenus() { function onDevicechanged() { debug("System audio devices changed. Removing and replacing Audio Menus..."); setupAudioMenus(); - checkDeviceMismatch(); } function onMenuEvent(audioDeviceMenuString) { - if (!skipMenuEvents) { - switchAudioDevice(audioDeviceMenuString); + if (Menu.isOptionChecked(audioDeviceMenuString) && + (audioDeviceMenuString !== interfaceInputDevice && + audioDeviceMenuString !== interfaceOutputDevice)) { + AudioDevice.setDeviceFromMenu(audioDeviceMenuString) } } -function switchAudioDevice(audioDeviceMenuString) { - // if the device is not plugged in, short-circuit - if (!~audioDevicesList.indexOf(audioDeviceMenuString)) { - return; - } - - var selection = parseMenuItem(audioDeviceMenuString); - if (!selection) { - debug("Invalid Audio audioDeviceMenuString! Doesn't end with 'for Input' or 'for Output'"); - return; - } - - // menu events can be triggered asynchronously; skip them for 200ms to avoid recursion and false switches - skipMenuEvents = true; - Script.setTimeout(function() { skipMenuEvents = false; }, 200); - - var selectedDevice = selection.device; - if (selection.mode == INPUT) { - var currentInputDevice = AudioDevice.getInputDevice(); - if (selectedDevice != currentInputDevice) { - debug("Switching audio INPUT device from " + currentInputDevice + " to " + selectedDevice); - Menu.setIsOptionChecked("Use " + currentInputDevice + " for Input", false); - if (AudioDevice.setInputDevice(selectedDevice)) { - Settings.setValue(INPUT_DEVICE_SETTING, selectedDevice); - Menu.setIsOptionChecked(audioDeviceMenuString, true); - } else { - debug("Error setting audio input device!") - Menu.setIsOptionChecked(audioDeviceMenuString, false); - } +function onCurrentDeviceChanged() { + debug("System audio device switched. "); + interfaceInputDevice = "Use " + AudioDevice.getInputDevice() + " for Input"; + interfaceOutputDevice = "Use " + AudioDevice.getOutputDevice() + " for Output"; + for (var index = 0; index < audioDevicesList.length; index++) { + if (audioDevicesList[index] === interfaceInputDevice || + audioDevicesList[index] === interfaceOutputDevice) { + if (Menu.isOptionChecked(audioDevicesList[index]) === false) + Menu.setIsOptionChecked(audioDevicesList[index], true); } else { - debug("Selected input device is the same as the current input device!") - Settings.setValue(INPUT_DEVICE_SETTING, selectedDevice); - Menu.setIsOptionChecked(audioDeviceMenuString, true); - AudioDevice.setInputDevice(selectedDevice); // Still try to force-set the device (in case the user's trying to forcefully debug an issue) - } - } else if (selection.mode == OUTPUT) { - var currentOutputDevice = AudioDevice.getOutputDevice(); - if (selectedDevice != currentOutputDevice) { - debug("Switching audio OUTPUT device from " + currentOutputDevice + " to " + selectedDevice); - Menu.setIsOptionChecked("Use " + currentOutputDevice + " for Output", false); - if (AudioDevice.setOutputDevice(selectedDevice)) { - Settings.setValue(OUTPUT_DEVICE_SETTING, selectedDevice); - Menu.setIsOptionChecked(audioDeviceMenuString, true); - } else { - debug("Error setting audio output device!") - Menu.setIsOptionChecked(audioDeviceMenuString, false); - } - } else { - debug("Selected output device is the same as the current output device!") - Settings.setValue(OUTPUT_DEVICE_SETTING, selectedDevice); - Menu.setIsOptionChecked(audioDeviceMenuString, true); - AudioDevice.setOutputDevice(selectedDevice); // Still try to force-set the device (in case the user's trying to forcefully debug an issue) + if (Menu.isOptionChecked(audioDevicesList[index]) === true) + Menu.setIsOptionChecked(audioDevicesList[index], false); } } } @@ -192,12 +124,12 @@ function switchAudioDevice(audioDeviceMenuString) { function restoreAudio() { if (switchedAudioInputToHMD) { debug("Switching back from HMD preferred audio input to: " + previousSelectedInputAudioDevice); - switchAudioDevice("Use " + previousSelectedInputAudioDevice + " for Input"); + AudioDevice.setInputDeviceAsync(previousSelectedInputAudioDevice) switchedAudioInputToHMD = false; } if (switchedAudioOutputToHMD) { debug("Switching back from HMD preferred audio output to: " + previousSelectedOutputAudioDevice); - switchAudioDevice("Use " + previousSelectedOutputAudioDevice + " for Output"); + AudioDevice.setOutputDeviceAsync(previousSelectedOutputAudioDevice) switchedAudioOutputToHMD = false; } } @@ -224,7 +156,7 @@ function checkHMDAudio() { debug("previousSelectedInputAudioDevice: " + previousSelectedInputAudioDevice); if (hmdPreferredAudioInput != previousSelectedInputAudioDevice) { switchedAudioInputToHMD = true; - switchAudioDevice("Use " + hmdPreferredAudioInput + " for Input"); + AudioDevice.setInputDeviceAsync(hmdPreferredAudioInput) } } if (hmdPreferredAudioOutput !== "") { @@ -233,7 +165,7 @@ function checkHMDAudio() { debug("previousSelectedOutputAudioDevice: " + previousSelectedOutputAudioDevice); if (hmdPreferredAudioOutput != previousSelectedOutputAudioDevice) { switchedAudioOutputToHMD = true; - switchAudioDevice("Use " + hmdPreferredAudioOutput + " for Output"); + AudioDevice.setOutputDeviceAsync(hmdPreferredAudioOutput) } } } else { @@ -255,14 +187,15 @@ function checkHMDAudio() { Script.setTimeout(function () { debug("Connecting deviceChanged(), displayModeChanged(), and switchAudioDevice()..."); AudioDevice.deviceChanged.connect(onDevicechanged); + AudioDevice.currentInputDeviceChanged.connect(onCurrentDeviceChanged); + AudioDevice.currentOutputDeviceChanged.connect(onCurrentDeviceChanged); HMD.displayModeChanged.connect(checkHMDAudio); Menu.menuItemEvent.connect(onMenuEvent); debug("Setting up Audio I/O menu for the first time..."); setupAudioMenus(); - checkDeviceMismatch(); debug("Checking HMD audio status...") checkHMDAudio(); -}, 3000); +}, SELECT_AUDIO_SCRIPT_STARTUP_TIMEOUT); debug("Connecting scriptEnding()"); Script.scriptEnding.connect(function () { @@ -270,6 +203,8 @@ Script.scriptEnding.connect(function () { removeAudioMenus(); Menu.menuItemEvent.disconnect(onMenuEvent); HMD.displayModeChanged.disconnect(checkHMDAudio); + AudioDevice.currentInputDeviceChanged.disconnect(onCurrentDeviceChanged); + AudioDevice.currentOutputDeviceChanged.disconnect(onCurrentDeviceChanged); AudioDevice.deviceChanged.disconnect(onDevicechanged); }); diff --git a/scripts/system/snapshot.js b/scripts/system/snapshot.js index 77278caadd..2a2e816d45 100644 --- a/scripts/system/snapshot.js +++ b/scripts/system/snapshot.js @@ -111,7 +111,7 @@ function onMessage(message) { case 'openSettings': if ((HMD.active && Settings.getValue("hmdTabletBecomesToolbar", false)) || (!HMD.active && Settings.getValue("desktopTabletBecomesToolbar", true))) { - Desktop.show("hifi/dialogs/GeneralPreferencesDialog.qml", "General Preferences"); + Desktop.show("hifi/dialogs/GeneralPreferencesDialog.qml", "GeneralPreferencesDialog"); } else { tablet.loadQMLOnTop("TabletGeneralPreferences.qml"); } diff --git a/scripts/system/tablet-ui/tabletUI.js b/scripts/system/tablet-ui/tabletUI.js index bd5be142a0..f83e8d9550 100644 --- a/scripts/system/tablet-ui/tabletUI.js +++ b/scripts/system/tablet-ui/tabletUI.js @@ -92,7 +92,7 @@ tabletScalePercentage = getTabletScalePercentageFromSettings(); UIWebTablet = new WebTablet("qml/hifi/tablet/TabletRoot.qml", DEFAULT_WIDTH * (tabletScalePercentage / 100), - null, activeHand, true); + null, activeHand, true, null, false); UIWebTablet.register(); HMD.tabletID = UIWebTablet.tabletEntityID; HMD.homeButtonID = UIWebTablet.homeButtonID; diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/applauseOmeter.js b/unpublishedScripts/marketplace/audienceApplauseMeter/applauseOmeter.js new file mode 100644 index 0000000000..f6225d1a13 --- /dev/null +++ b/unpublishedScripts/marketplace/audienceApplauseMeter/applauseOmeter.js @@ -0,0 +1,70 @@ +// +// Created by Alan-Michael Moody on 5/2/2017 +// + +(function () { + var thisEntityID; + + this.preload = function (entityID) { + thisEntityID = entityID; + }; + + var SCAN_RATE = 100; //ms + var REFERENCE_FRAME_COUNT = 30; + var MAX_AUDIO_THRESHOLD = 16000; + + var framePool = []; + + function scanEngine() { + var avatarLoudnessPool = []; + + function average(a) { + var sum = 0; + var total = a.length; + for (var i = 0; i < total; i++) { + sum += a[i]; + } + return Math.round(sum / total); + } + + function audioClamp(input) { + if (input > MAX_AUDIO_THRESHOLD) return MAX_AUDIO_THRESHOLD; + return input; + } + + + var avatars = AvatarList.getAvatarIdentifiers(); + avatars.forEach(function (id) { + var avatar = AvatarList.getAvatar(id); + avatarLoudnessPool.push(audioClamp(Math.round(avatar.audioLoudness))); + + }); + + + framePool.push(average(avatarLoudnessPool)); + if (framePool.length >= REFERENCE_FRAME_COUNT) { + framePool.shift(); + } + + function normalizedAverage(a) { + a = a.map(function (v) { + return Math.round(( 100 / MAX_AUDIO_THRESHOLD ) * v); + }); + return average(a); + } + + var norm = normalizedAverage(framePool); + + // we have a range of 55 to -53 degrees for the needle + + var scaledDegrees = (norm / -.94) + 54.5; // shifting scale from 100 to 55 to -53 ish its more like -51 ; + + Entities.setAbsoluteJointRotationInObjectFrame(thisEntityID, 0, Quat.fromPitchYawRollDegrees(0, 0, scaledDegrees)); + + } + + Script.setInterval(function () { + scanEngine(); + }, SCAN_RATE); + +}); diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/bakedTextMeter.js b/unpublishedScripts/marketplace/audienceApplauseMeter/bakedTextMeter.js new file mode 100644 index 0000000000..021429618e --- /dev/null +++ b/unpublishedScripts/marketplace/audienceApplauseMeter/bakedTextMeter.js @@ -0,0 +1,79 @@ +// +// Created by Alan-Michael Moody on 4/17/2017 +// + +(function () { + var barID; + + this.preload = function (entityID) { + var children = Entities.getChildrenIDs(entityID); + var childZero = Entities.getEntityProperties(children[0]); + barID = childZero.id; + }; + + var SCAN_RATE = 100; //ms + var REFERENCE_FRAME_COUNT = 30; + var MAX_AUDIO_THRESHOLD = 16000; + + var framePool = []; + + function scanEngine() { + var avatarLoudnessPool = []; + + function average(a) { + var sum = 0; + var total = a.length; + for (var i = 0; i < total; i++) { + sum += a[i]; + } + return Math.round(sum / total); + } + + function audioClamp(input) { + if (input > MAX_AUDIO_THRESHOLD) return MAX_AUDIO_THRESHOLD; + return input; + } + + + var avatars = AvatarList.getAvatarIdentifiers(); + avatars.forEach(function (id) { + var avatar = AvatarList.getAvatar(id); + avatarLoudnessPool.push(audioClamp(Math.round(avatar.audioLoudness))); + }); + + + framePool.push(average(avatarLoudnessPool)); + if (framePool.length >= REFERENCE_FRAME_COUNT) { + framePool.shift(); + } + + function normalizedAverage(a) { + a = a.map(function (v) { + return Math.round(( 100 / MAX_AUDIO_THRESHOLD ) * v); + }); + return average(a); + } + + var norm = normalizedAverage(framePool); + + + var barProperties = Entities.getEntityProperties(barID); + + var colorShift = 2.55 * norm; //shifting the scale to 0 - 255 + var xShift = norm / 52; // changing scale from 0-100 to 0-1.9 ish + var normShift = xShift - 0.88; //shifting local displacement (-0.88) + var halfShift = xShift / 2; + Entities.editEntity(barID, { + dimensions: {x: xShift, y: barProperties.dimensions.y, z: barProperties.dimensions.z}, + localPosition: {x: normShift - (halfShift), y: -0.0625, z: -0.015}, + color: {red: colorShift, green: barProperties.color.green, blue: barProperties.color.blue} + }); + + + } + + Script.setInterval(function () { + scanEngine(); + }, SCAN_RATE); + +}); diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/meter.js b/unpublishedScripts/marketplace/audienceApplauseMeter/meter.js new file mode 100644 index 0000000000..e753633c0b --- /dev/null +++ b/unpublishedScripts/marketplace/audienceApplauseMeter/meter.js @@ -0,0 +1,92 @@ +// +// Created by Alan-Michael Moody on 4/17/2017 +// + +(function () { + var barID, textID; + + this.preload = function (entityID) { + + var children = Entities.getChildrenIDs(entityID); + var childZero = Entities.getEntityProperties(children[0]); + var childOne = Entities.getEntityProperties(children[1]); + var childZeroUserData = JSON.parse(Entities.getEntityProperties(children[0]).userData); + + if (childZeroUserData.name === "bar") { + barID = childZero.id; + textID = childOne.id; + } else { + barID = childOne.id; + textID = childZero.id; + } + }; + + var SCAN_RATE = 100; //ms + var REFERENCE_FRAME_COUNT = 30; + var MAX_AUDIO_THRESHOLD = 16000; + + var framePool = []; + + function scanEngine() { + var avatarLoudnessPool = []; + + function average(a) { + var sum = 0; + var total = a.length; + for (var i = 0; i < total; i++) { + sum += a[i]; + } + return Math.round(sum / total); + } + + function audioClamp(input) { + if (input > MAX_AUDIO_THRESHOLD) return MAX_AUDIO_THRESHOLD; + return input; + } + + + var avatars = AvatarList.getAvatarIdentifiers(); + avatars.forEach(function (id) { + var avatar = AvatarList.getAvatar(id); + avatarLoudnessPool.push(audioClamp(Math.round(avatar.audioLoudness))); + + }); + + + framePool.push(average(avatarLoudnessPool)); + if (framePool.length >= REFERENCE_FRAME_COUNT) { + framePool.shift(); + } + + function normalizedAverage(a) { + a = a.map(function (v) { + return Math.round(( 100 / MAX_AUDIO_THRESHOLD ) * v); + }); + return average(a); + } + + var norm = normalizedAverage(framePool); + + Entities.editEntity(textID, {text: "Loudness: % " + norm}); + + var barProperties = Entities.getEntityProperties(barID); + + + var colorShift = 2.55 * norm; //shifting the scale to 0 - 255 + var xShift = norm / 100; // changing scale from 0-100 to 0-1 + var normShift = xShift - .5; //shifting scale form 0-1 to -.5 to .5 + var halfShift = xShift / 2 ; + Entities.editEntity(barID, { + dimensions: {x: xShift, y: barProperties.dimensions.y, z: barProperties.dimensions.z}, + localPosition: {x: normShift - (halfShift), y: 0, z: 0.1}, + color: {red: colorShift, green: barProperties.color.green, blue: barProperties.color.blue} + }); + + + } + + Script.setInterval(function () { + scanEngine(); + }, SCAN_RATE); + +}); diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/models/applauseOmeter.fbx b/unpublishedScripts/marketplace/audienceApplauseMeter/models/applauseOmeter.fbx new file mode 100644 index 0000000000..4f9ae22b32 Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/models/applauseOmeter.fbx differ diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/models/meter-plastic.fbx b/unpublishedScripts/marketplace/audienceApplauseMeter/models/meter-plastic.fbx new file mode 100644 index 0000000000..940ae0d867 Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/models/meter-plastic.fbx differ diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/models/meter-text-entity.fbx b/unpublishedScripts/marketplace/audienceApplauseMeter/models/meter-text-entity.fbx new file mode 100644 index 0000000000..fd930d3072 Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/models/meter-text-entity.fbx differ diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/models/meter-wood.fbx b/unpublishedScripts/marketplace/audienceApplauseMeter/models/meter-wood.fbx new file mode 100644 index 0000000000..86b87832c4 Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/models/meter-wood.fbx differ diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/rezApplauseOmeter.js b/unpublishedScripts/marketplace/audienceApplauseMeter/rezApplauseOmeter.js new file mode 100644 index 0000000000..1d89861512 --- /dev/null +++ b/unpublishedScripts/marketplace/audienceApplauseMeter/rezApplauseOmeter.js @@ -0,0 +1,24 @@ +// +// Created by Alan-Michael Moody on 5/2/2017 +// + +'use strict'; + +(function () { + var pos = Vec3.sum(MyAvatar.position, Quat.getFront(MyAvatar.orientation)); + + var meter = { + stand: { + type: 'Model', + modelURL: 'https://binaryrelay.com/files/public-docs/hifi/meter/applauseOmeter.fbx', + lifetime: '3600', + script: 'https://binaryrelay.com/files/public-docs/hifi/meter/applauseOmeter.js', + position: Vec3.sum(pos, {x: 0, y: 2.0, z: 0}) + } + + }; + + + Entities.addEntity(meter.stand); + +})(); diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/rezMeter.js b/unpublishedScripts/marketplace/audienceApplauseMeter/rezMeter.js new file mode 100644 index 0000000000..d3fba9ea56 --- /dev/null +++ b/unpublishedScripts/marketplace/audienceApplauseMeter/rezMeter.js @@ -0,0 +1,67 @@ +// +// Created by Alan-Michael Moody on 4/17/2017 +// + +"use strict"; + +(function () { // BEGIN LOCAL_SCOPE + var pos = Vec3.sum(MyAvatar.position, Quat.getFront(MyAvatar.orientation)); + + var graph = { + background: { + type: "Box", + dimensions: {x: 1, y: 1, z: .1}, + color: { + red: 128, + green: 128, + blue: 128 + }, + lifetime: "3600", + script: "https://binaryrelay.com/files/public-docs/hifi/meter/basic/meter.js", + position: pos + }, + bar: { + type: "Box", + parentID: "", + userData: '{"name":"bar"}', + dimensions: {x: .05, y: .25, z: .1}, + color: { + red: 0, + green: 0, + blue: 0 + }, + lifetime: "3600", + position: Vec3.sum(pos, {x: -0.495, y: 0, z: 0.1}) + }, + displayText: { + type: "Text", + parentID: "", + userData: '{"name":"displayText"}', + text: "Loudness: % ", + textColor: { + red: 0, + green: 0, + blue: 0 + }, + backgroundColor: { + red: 128, + green: 128, + blue: 128 + }, + visible: 0.5, + dimensions: {x: 0.70, y: 0.15, z: 0.1}, + lifetime: "3600", + position: Vec3.sum(pos, {x: 0, y: 0.4, z: 0.06}) + } + }; + + var background = Entities.addEntity(graph.background); + + graph.bar.parentID = background; + graph.displayText.parentID = background; + + var bar = Entities.addEntity(graph.bar); + var displayText = Entities.addEntity(graph.displayText); + + +})(); // END LOCAL_SCOPE diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/rezPlasticMeter.js b/unpublishedScripts/marketplace/audienceApplauseMeter/rezPlasticMeter.js new file mode 100644 index 0000000000..781585ebf6 --- /dev/null +++ b/unpublishedScripts/marketplace/audienceApplauseMeter/rezPlasticMeter.js @@ -0,0 +1,43 @@ +// +// Created by Alan-Michael Moody on 4/17/2017 +// + +"use strict"; + +(function () { + var pos = Vec3.sum(MyAvatar.position, Quat.getFront(MyAvatar.orientation)); + + var graph = { + background: { + type: "Model", + modelURL: "https://binaryrelay.com/files/public-docs/hifi/meter/plastic/meter-plastic.fbx", + color: { + red: 128, + green: 128, + blue: 128 + }, + lifetime: "3600", + script: "https://binaryrelay.com/files/public-docs/hifi/meter/plastic/meter.js", + position: pos + }, + bar: { + type: "Box", + parentID: "", + userData: '{"name":"bar"}', + dimensions: {x: .05, y: .245, z: .07}, + color: { + red: 0, + green: 0, + blue: 0 + }, + lifetime: "3600", + position: Vec3.sum(pos, {x: -0.90, y: 0, z: -0.15}) + } + }; + + + graph.bar.parentID = Entities.addEntity(graph.background); + Entities.addEntity(graph.bar); + + +})(); diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/rezTextEntityMeter.js b/unpublishedScripts/marketplace/audienceApplauseMeter/rezTextEntityMeter.js new file mode 100644 index 0000000000..c74d595683 --- /dev/null +++ b/unpublishedScripts/marketplace/audienceApplauseMeter/rezTextEntityMeter.js @@ -0,0 +1,67 @@ +// +// Created by Alan-Michael Moody on 4/17/2017 +// + +"use strict"; + +(function () { + var pos = Vec3.sum(MyAvatar.position, Quat.getFront(MyAvatar.orientation)); + + var graph = { + background: { + type: "Model", + modelURL: "https://binaryrelay.com/files/public-docs/hifi/meter/text-entity/meter-text-entity.fbx", + color: { + red: 128, + green: 128, + blue: 128 + }, + lifetime: "3600", + script: "https://binaryrelay.com/files/public-docs/hifi/meter/text-entity/meter.js", + position: pos + }, + bar: { + type: "Box", + parentID: "", + userData: '{"name":"bar"}', + dimensions: {x: .05, y: .245, z: .07}, + color: { + red: 0, + green: 0, + blue: 0 + }, + lifetime: "3600", + position: Vec3.sum(pos, {x: -0.88, y: 0, z: -0.15}) + }, + displayText: { + type: "Text", + parentID: "", + userData: '{"name":"displayText"}', + text: "Make Some Noise:", + textColor: { + red: 0, + green: 0, + blue: 0 + }, + backgroundColor: { + red: 255, + green: 255, + blue: 255 + }, + dimensions: {x: .82, y: 0.115, z: 0.15}, + lifetime: "3600", + lineHeight: .08, + position: Vec3.sum(pos, {x: -0.2, y: 0.175, z: -0.035}) + } + }; + + var background = Entities.addEntity(graph.background); + + graph.bar.parentID = background; + graph.displayText.parentID = background; + + var bar = Entities.addEntity(graph.bar); + var displayText = Entities.addEntity(graph.displayText); + + +})(); diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/rezWoodMeter.js b/unpublishedScripts/marketplace/audienceApplauseMeter/rezWoodMeter.js new file mode 100644 index 0000000000..b40c60275b --- /dev/null +++ b/unpublishedScripts/marketplace/audienceApplauseMeter/rezWoodMeter.js @@ -0,0 +1,42 @@ +// +// Created by Alan-Michael Moody on 4/17/2017 +// + +"use strict"; + +(function () { + var pos = Vec3.sum(MyAvatar.position, Quat.getFront(MyAvatar.orientation)); + + var graph = { + background: { + type: "Model", + modelURL: "https://binaryrelay.com/files/public-docs/hifi/meter/wood/meter-wood.fbx", + color: { + red: 128, + green: 128, + blue: 128 + }, + lifetime: "3600", + script: "https://binaryrelay.com/files/public-docs/hifi/meter/wood/meter.js", + position: pos + }, + bar: { + type: "Box", + parentID: "", + userData: '{"name":"bar"}', + dimensions: {x: .05, y: .245, z: .07}, + color: { + red: 0, + green: 0, + blue: 0 + }, + lifetime: "3600", + position: Vec3.sum(pos, {x: -0.88, y: 0, z: -0.15}) + } + }; + + graph.bar.parentID = Entities.addEntity(graph.background); + Entities.addEntity(graph.bar); + + +})(); diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/textEntityMeter.js b/unpublishedScripts/marketplace/audienceApplauseMeter/textEntityMeter.js new file mode 100644 index 0000000000..f290e9604d --- /dev/null +++ b/unpublishedScripts/marketplace/audienceApplauseMeter/textEntityMeter.js @@ -0,0 +1,89 @@ +// +// Created by Alan-Michael Moody on 4/17/2017 +// + +(function () { + var barID, textID, originalText; + + this.preload = function (entityID) { + + var children = Entities.getChildrenIDs(entityID); + var childZero = Entities.getEntityProperties(children[0]); + var childOne = Entities.getEntityProperties(children[1]); + var childZeroUserData = JSON.parse(Entities.getEntityProperties(children[0]).userData); + + if (childZeroUserData.name === "bar") { + barID = childZero.id; + textID = childOne.id; + originalText = childOne.text + } else { + barID = childOne.id; + textID = childZero.id; + originalText = childZero.text; + } + }; + + var SCAN_RATE = 100; //ms + var REFERENCE_FRAME_COUNT = 30; + var MAX_AUDIO_THRESHOLD = 16000; + + var framePool = []; + + function scanEngine() { + var avatarLoudnessPool = []; + + function average(a) { + var sum = 0; + var total = a.length; + for (var i = 0; i < total; i++) { + sum += a[i]; + } + return Math.round(sum / total); + } + + function audioClamp(input) { + if (input > MAX_AUDIO_THRESHOLD) return MAX_AUDIO_THRESHOLD; + return input; + } + + + var avatars = AvatarList.getAvatarIdentifiers(); + avatars.forEach(function (id) { + var avatar = AvatarList.getAvatar(id); + avatarLoudnessPool.push(audioClamp(Math.round(avatar.audioLoudness))); + }); + + + framePool.push(average(avatarLoudnessPool)); + if (framePool.length >= REFERENCE_FRAME_COUNT) { + framePool.shift(); + } + + function normalizedAverage(a) { + a = a.map(function (v) { + return Math.round(( 100 / MAX_AUDIO_THRESHOLD ) * v); + }); + return average(a); + } + + var norm = normalizedAverage(framePool); + Entities.editEntity(textID, {text: originalText + " % " + norm}); + + var barProperties = Entities.getEntityProperties(barID); + + var colorShift = 2.55 * norm; //shifting the scale to 0 - 255 + var xShift = norm / 52; // changing scale from 0-100 to 0-1.9 ish + var normShift = xShift - 0.88; //shifting local displacement (-0.88) + var halfShift = xShift / 2; + Entities.editEntity(barID, { + dimensions: {x: xShift, y: barProperties.dimensions.y, z: barProperties.dimensions.z}, + localPosition: {x: normShift - ( halfShift ), y: -0.0625, z: -0.015}, + color: {red: colorShift, green: barProperties.color.green, blue: barProperties.color.blue} + }); + } + + Script.setInterval(function () { + scanEngine(); + }, SCAN_RATE); + +}); diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/textures/meter.diffuse.psd b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/meter.diffuse.psd new file mode 100644 index 0000000000..07fed10d31 Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/meter.diffuse.psd differ diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_diffuse.png b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_diffuse.png new file mode 100644 index 0000000000..4e7a3110ab Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_diffuse.png differ diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_diffuse_text.png b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_diffuse_text.png new file mode 100644 index 0000000000..402f2ecf3f Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_diffuse_text.png differ diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_emissive.png b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_emissive.png new file mode 100644 index 0000000000..cb5ee722f2 Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_emissive.png differ diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_normal_map.png b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_normal_map.png new file mode 100644 index 0000000000..c96e377a59 Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_normal_map.png differ diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_reflection.png b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_reflection.png new file mode 100644 index 0000000000..a3023ced35 Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_reflection.png differ diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_roughness.png b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_roughness.png new file mode 100644 index 0000000000..f9fcc2040f Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/plastic/meter.done.plastic_roughness.png differ diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_diffuse.png b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_diffuse.png new file mode 100644 index 0000000000..8243b4e250 Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_diffuse.png differ diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_diffuse_text.png b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_diffuse_text.png new file mode 100644 index 0000000000..b9d32a5bb8 Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_diffuse_text.png differ diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_emissive.png b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_emissive.png new file mode 100644 index 0000000000..cb5ee722f2 Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_emissive.png differ diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_normal_map.png b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_normal_map.png new file mode 100644 index 0000000000..3029bf60c4 Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_normal_map.png differ diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_reflection.png b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_reflection.png new file mode 100644 index 0000000000..a3023ced35 Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_reflection.png differ diff --git a/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_roughness.png b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_roughness.png new file mode 100644 index 0000000000..89d439c404 Binary files /dev/null and b/unpublishedScripts/marketplace/audienceApplauseMeter/textures/wood/meter.done_roughness.png differ