Merge branch 'master' of github.com:highfidelity/hifi into puck-lost-tracking

This commit is contained in:
Dante Ruiz 2017-05-11 23:19:03 +01:00
commit 66020cf446
53 changed files with 1241 additions and 285 deletions

View file

@ -402,7 +402,7 @@ void AvatarMixer::handleAvatarIdentityPacket(QSharedPointer<ReceivedMessage> mes
AvatarData::parseAvatarIdentityPacket(message->getMessage(), identity); AvatarData::parseAvatarIdentityPacket(message->getMessage(), identity);
bool identityChanged = false; bool identityChanged = false;
bool displayNameChanged = false; bool displayNameChanged = false;
avatar.processAvatarIdentity(identity, identityChanged, displayNameChanged); avatar.processAvatarIdentity(identity, identityChanged, displayNameChanged, senderNode->getClockSkewUsec());
if (identityChanged) { if (identityChanged) {
QMutexLocker nodeDataLocker(&nodeData->getMutex()); QMutexLocker nodeDataLocker(&nodeData->getMutex());
nodeData->flagIdentityChange(); nodeData->flagIdentityChange();

View file

@ -35,11 +35,6 @@ Rectangle {
property string title: "Audio Options" property string title: "Audio Options"
signal sendToScript(var message); signal sendToScript(var message);
//set models after Components is shown
Component.onCompleted: {
refreshTimer.start()
refreshTimerOutput.start()
}
Component { Component {
id: separator id: separator
@ -84,7 +79,7 @@ Rectangle {
} }
Connections { Connections {
target: AvatarInputs target: AvatarInputs !== undefined ? AvatarInputs : null
onShowAudioToolsChanged: { onShowAudioToolsChanged: {
audioTools.checkbox.checked = showAudioTools audioTools.checkbox.checked = showAudioTools
} }
@ -105,10 +100,12 @@ Rectangle {
id: audioTools id: audioTools
width: parent.width width: parent.width
anchors { left: parent.left; right: parent.right; leftMargin: 30 } anchors { left: parent.left; right: parent.right; leftMargin: 30 }
checkbox.checked: AvatarInputs.showAudioTools checkbox.checked: AvatarInputs !== undefined ? AvatarInputs.showAudioTools : false
text.text: qsTr("Show audio level meter") text.text: qsTr("Show audio level meter")
onCheckBoxClicked: { onCheckBoxClicked: {
AvatarInputs.showAudioTools = checked if (AvatarInputs !== undefined) {
AvatarInputs.showAudioTools = checked
}
} }
} }
@ -138,30 +135,34 @@ Rectangle {
} }
ListView { ListView {
Timer {
id: refreshTimer
interval: 1
repeat: false
onTriggered: {
//refresh model
inputAudioListView.model = undefined
inputAudioListView.model = AudioDevice.inputAudioDevices
}
}
id: inputAudioListView id: inputAudioListView
anchors { left: parent.left; right: parent.right; leftMargin: 70 } anchors { left: parent.left; right: parent.right; leftMargin: 70 }
height: 125 height: 125
spacing: 16 spacing: 0
clip: true clip: true
snapMode: ListView.SnapToItem snapMode: ListView.SnapToItem
delegate: AudioCheckbox { model: AudioDevice
delegate: Item {
width: parent.width width: parent.width
checkbox.checked: (modelData === AudioDevice.getInputDevice()) visible: devicemode === 0
text.text: modelData height: visible ? 36 : 0
onCheckBoxClicked: {
if (checked) { AudioCheckbox {
AudioDevice.setInputDevice(modelData) id: cbin
refreshTimer.start() anchors.verticalCenter: parent.verticalCenter
Binding {
target: cbin.checkbox
property: 'checked'
value: devicechecked
}
width: parent.width
cbchecked: devicechecked
text.text: devicename
onCheckBoxClicked: {
if (checked) {
AudioDevice.setInputDeviceAsync(devicename)
}
} }
} }
} }
@ -191,31 +192,33 @@ Rectangle {
text: qsTr("CHOOSE OUTPUT DEVICE") text: qsTr("CHOOSE OUTPUT DEVICE")
} }
} }
ListView { ListView {
id: outputAudioListView id: outputAudioListView
Timer {
id: refreshTimerOutput
interval: 1
repeat: false
onTriggered: {
//refresh model
outputAudioListView.model = undefined
outputAudioListView.model = AudioDevice.outputAudioDevices
}
}
anchors { left: parent.left; right: parent.right; leftMargin: 70 } anchors { left: parent.left; right: parent.right; leftMargin: 70 }
height: 250 height: 250
spacing: 16 spacing: 0
clip: true clip: true
snapMode: ListView.SnapToItem snapMode: ListView.SnapToItem
delegate: AudioCheckbox { model: AudioDevice
delegate: Item {
width: parent.width width: parent.width
checkbox.checked: (modelData === AudioDevice.getOutputDevice()) visible: devicemode === 1
text.text: modelData height: visible ? 36 : 0
onCheckBoxClicked: { AudioCheckbox {
if (checked) { id: cbout
AudioDevice.setOutputDevice(modelData) width: parent.width
refreshTimerOutput.start() anchors.verticalCenter: parent.verticalCenter
Binding {
target: cbout.checkbox
property: 'checked'
value: devicechecked
}
text.text: devicename
onCheckBoxClicked: {
if (checked) {
AudioDevice.setOutputDeviceAsync(devicename)
}
} }
} }
} }

View file

@ -8,6 +8,7 @@ Row {
id: row id: row
spacing: 16 spacing: 16
property alias checkbox: cb property alias checkbox: cb
property alias cbchecked: cb.checked
property alias text: txt property alias text: txt
signal checkBoxClicked(bool checked) signal checkBoxClicked(bool checked)

View file

@ -33,6 +33,6 @@ StackView {
TabletPreferencesDialog { TabletPreferencesDialog {
id: root id: root
objectName: "TabletGeneralPreferences" objectName: "TabletGeneralPreferences"
showCategories: ["UI", "Snapshots", "Scripts", "Privacy", "Octree", "HMD", "Sixense Controllers", "Perception Neuron", "Kinect"] showCategories: ["UI", "Snapshots", "Scripts", "Privacy", "Octree", "HMD", "Sixense Controllers", "Perception Neuron", "Kinect", "Vive Pucks Configuration"]
} }
} }

View file

@ -136,8 +136,8 @@ Item {
for (var i = 0; i < sections.length; i++) { for (var i = 0; i < sections.length; i++) {
totalHeight += sections[i].height + sections[i].getPreferencesHeight(); totalHeight += sections[i].height + sections[i].getPreferencesHeight();
} }
console.log(totalHeight); var bottomPadding = 100;
return totalHeight; return (totalHeight + bottomPadding);
} }
} }
} }

View file

@ -2053,6 +2053,8 @@ void Application::initializeUi() {
rootContext->setContextProperty("ApplicationCompositor", &getApplicationCompositor()); rootContext->setContextProperty("ApplicationCompositor", &getApplicationCompositor());
rootContext->setContextProperty("AvatarInputs", AvatarInputs::getInstance());
if (auto steamClient = PluginManager::getInstance()->getSteamClientPlugin()) { if (auto steamClient = PluginManager::getInstance()->getSteamClientPlugin()) {
rootContext->setContextProperty("Steam", new SteamScriptingInterface(engine, steamClient.get())); rootContext->setContextProperty("Steam", new SteamScriptingInterface(engine, steamClient.get()));
} }

View file

@ -48,7 +48,7 @@ void MyHead::simulate(float deltaTime) {
FaceTracker* faceTracker = qApp->getActiveFaceTracker(); FaceTracker* faceTracker = qApp->getActiveFaceTracker();
_isFaceTrackerConnected = faceTracker != NULL && !faceTracker->isMuted(); _isFaceTrackerConnected = faceTracker != NULL && !faceTracker->isMuted();
if (_isFaceTrackerConnected) { if (_isFaceTrackerConnected) {
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients(); _transientBlendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
if (typeid(*faceTracker) == typeid(DdeFaceTracker)) { if (typeid(*faceTracker) == typeid(DdeFaceTracker)) {
@ -60,11 +60,11 @@ void MyHead::simulate(float deltaTime) {
const int FUNNEL_BLENDSHAPE = 40; const int FUNNEL_BLENDSHAPE = 40;
const int SMILE_LEFT_BLENDSHAPE = 28; const int SMILE_LEFT_BLENDSHAPE = 28;
const int SMILE_RIGHT_BLENDSHAPE = 29; const int SMILE_RIGHT_BLENDSHAPE = 29;
_blendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen; _transientBlendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen;
_blendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4; _transientBlendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4;
_blendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4; _transientBlendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4;
_blendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2; _transientBlendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2;
_blendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3; _transientBlendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3;
} }
applyEyelidOffset(getFinalOrientationInWorldFrame()); applyEyelidOffset(getFinalOrientationInWorldFrame());
} }

View file

@ -11,21 +11,19 @@
#include "AudioClient.h" #include "AudioClient.h"
#include "AudioDeviceScriptingInterface.h" #include "AudioDeviceScriptingInterface.h"
#include "SettingsScriptingInterface.h"
AudioDeviceScriptingInterface* AudioDeviceScriptingInterface::getInstance() { AudioDeviceScriptingInterface* AudioDeviceScriptingInterface::getInstance() {
static AudioDeviceScriptingInterface sharedInstance; static AudioDeviceScriptingInterface sharedInstance;
return &sharedInstance; return &sharedInstance;
} }
QStringList AudioDeviceScriptingInterface::inputAudioDevices() const QStringList AudioDeviceScriptingInterface::inputAudioDevices() const {
{ return _inputAudioDevices;
return DependencyManager::get<AudioClient>()->getDeviceNames(QAudio::AudioInput).toList();;
} }
QStringList AudioDeviceScriptingInterface::outputAudioDevices() const QStringList AudioDeviceScriptingInterface::outputAudioDevices() const {
{ return _outputAudioDevices;
return DependencyManager::get<AudioClient>()->getDeviceNames(QAudio::AudioOutput).toList();;
} }
bool AudioDeviceScriptingInterface::muted() bool AudioDeviceScriptingInterface::muted()
@ -33,11 +31,27 @@ bool AudioDeviceScriptingInterface::muted()
return getMuted(); return getMuted();
} }
AudioDeviceScriptingInterface::AudioDeviceScriptingInterface() { AudioDeviceScriptingInterface::AudioDeviceScriptingInterface(): QAbstractListModel(nullptr) {
connect(DependencyManager::get<AudioClient>().data(), &AudioClient::muteToggled, connect(DependencyManager::get<AudioClient>().data(), &AudioClient::muteToggled,
this, &AudioDeviceScriptingInterface::muteToggled); this, &AudioDeviceScriptingInterface::muteToggled);
connect(DependencyManager::get<AudioClient>().data(), &AudioClient::deviceChanged, connect(DependencyManager::get<AudioClient>().data(), &AudioClient::deviceChanged,
this, &AudioDeviceScriptingInterface::deviceChanged); this, &AudioDeviceScriptingInterface::onDeviceChanged, Qt::QueuedConnection);
connect(DependencyManager::get<AudioClient>().data(), &AudioClient::currentInputDeviceChanged,
this, &AudioDeviceScriptingInterface::onCurrentInputDeviceChanged, Qt::QueuedConnection);
connect(DependencyManager::get<AudioClient>().data(), &AudioClient::currentOutputDeviceChanged,
this, &AudioDeviceScriptingInterface::onCurrentOutputDeviceChanged, Qt::QueuedConnection);
//fill up model
onDeviceChanged();
//set up previously saved device
SettingsScriptingInterface* settings = SettingsScriptingInterface::getInstance();
const QString inDevice = settings->getValue("audio_input_device").toString();
if (inDevice != _currentInputDevice) {
setInputDeviceAsync(inDevice);
}
const QString outDevice = settings->getValue("audio_output_device").toString();
if (outDevice != _currentOutputDevice) {
setOutputDeviceAsync(outDevice);
}
} }
bool AudioDeviceScriptingInterface::setInputDevice(const QString& deviceName) { bool AudioDeviceScriptingInterface::setInputDevice(const QString& deviceName) {
@ -58,6 +72,43 @@ bool AudioDeviceScriptingInterface::setOutputDevice(const QString& deviceName) {
return result; return result;
} }
bool AudioDeviceScriptingInterface::setDeviceFromMenu(const QString& deviceMenuName) {
QAudio::Mode mode;
if (deviceMenuName.indexOf("for Output") != -1) {
mode = QAudio::AudioOutput;
} else if (deviceMenuName.indexOf("for Input") != -1) {
mode = QAudio::AudioInput;
} else {
return false;
}
for (ScriptingAudioDeviceInfo di: _devices) {
if (mode == di.mode && deviceMenuName.contains(di.name)) {
if (mode == QAudio::AudioOutput) {
setOutputDeviceAsync(di.name);
} else {
setInputDeviceAsync(di.name);
}
return true;
}
}
return false;
}
void AudioDeviceScriptingInterface::setInputDeviceAsync(const QString& deviceName) {
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "switchInputToAudioDevice",
Qt::QueuedConnection,
Q_ARG(const QString&, deviceName));
}
void AudioDeviceScriptingInterface::setOutputDeviceAsync(const QString& deviceName) {
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "switchOutputToAudioDevice",
Qt::QueuedConnection,
Q_ARG(const QString&, deviceName));
}
QString AudioDeviceScriptingInterface::getInputDevice() { QString AudioDeviceScriptingInterface::getInputDevice() {
return DependencyManager::get<AudioClient>()->getDeviceName(QAudio::AudioInput); return DependencyManager::get<AudioClient>()->getDeviceName(QAudio::AudioInput);
} }
@ -116,3 +167,105 @@ void AudioDeviceScriptingInterface::setMuted(bool muted)
bool AudioDeviceScriptingInterface::getMuted() { bool AudioDeviceScriptingInterface::getMuted() {
return DependencyManager::get<AudioClient>()->isMuted(); return DependencyManager::get<AudioClient>()->isMuted();
} }
QVariant AudioDeviceScriptingInterface::data(const QModelIndex& index, int role) const {
//sanity
if (!index.isValid() || index.row() >= _devices.size())
return QVariant();
if (role == Qt::DisplayRole || role == DisplayNameRole) {
return _devices.at(index.row()).name;
} else if (role == SelectedRole) {
return _devices.at(index.row()).selected;
} else if (role == AudioModeRole) {
return (int)_devices.at(index.row()).mode;
}
return QVariant();
}
int AudioDeviceScriptingInterface::rowCount(const QModelIndex& parent) const {
Q_UNUSED(parent)
return _devices.size();
}
QHash<int, QByteArray> AudioDeviceScriptingInterface::roleNames() const {
QHash<int, QByteArray> roles;
roles.insert(DisplayNameRole, "devicename");
roles.insert(SelectedRole, "devicechecked");
roles.insert(AudioModeRole, "devicemode");
return roles;
}
void AudioDeviceScriptingInterface::onDeviceChanged()
{
beginResetModel();
_outputAudioDevices.clear();
_devices.clear();
_currentOutputDevice = getOutputDevice();
for (QString name: getOutputDevices()) {
ScriptingAudioDeviceInfo di;
di.name = name;
di.selected = (name == _currentOutputDevice);
di.mode = QAudio::AudioOutput;
_devices.append(di);
_outputAudioDevices.append(name);
}
emit outputAudioDevicesChanged(_outputAudioDevices);
_inputAudioDevices.clear();
_currentInputDevice = getInputDevice();
for (QString name: getInputDevices()) {
ScriptingAudioDeviceInfo di;
di.name = name;
di.selected = (name == _currentInputDevice);
di.mode = QAudio::AudioInput;
_devices.append(di);
_inputAudioDevices.append(name);
}
emit inputAudioDevicesChanged(_inputAudioDevices);
endResetModel();
emit deviceChanged();
}
void AudioDeviceScriptingInterface::onCurrentInputDeviceChanged(const QString& name)
{
currentDeviceUpdate(name, QAudio::AudioInput);
//we got a signal that device changed. Save it now
SettingsScriptingInterface* settings = SettingsScriptingInterface::getInstance();
settings->setValue("audio_input_device", name);
emit currentInputDeviceChanged(name);
}
void AudioDeviceScriptingInterface::onCurrentOutputDeviceChanged(const QString& name)
{
currentDeviceUpdate(name, QAudio::AudioOutput);
//we got a signal that device changed. Save it now
SettingsScriptingInterface* settings = SettingsScriptingInterface::getInstance();
settings->setValue("audio_output_device", name);
emit currentOutputDeviceChanged(name);
}
void AudioDeviceScriptingInterface::currentDeviceUpdate(const QString& name, QAudio::Mode mode)
{
QVector<int> role;
role.append(SelectedRole);
for (int i = 0; i < _devices.size(); i++) {
ScriptingAudioDeviceInfo di = _devices.at(i);
if (di.mode != mode) {
continue;
}
if (di.selected && di.name != name ) {
di.selected = false;
_devices[i] = di;
emit dataChanged(index(i, 0), index(i, 0), role);
}
if (di.name == name) {
di.selected = true;
_devices[i] = di;
emit dataChanged(index(i, 0), index(i, 0), role);
}
}
}

View file

@ -15,10 +15,18 @@
#include <QObject> #include <QObject>
#include <QString> #include <QString>
#include <QVector> #include <QVector>
#include <QAbstractListModel>
#include <QAudio>
class AudioEffectOptions; class AudioEffectOptions;
class AudioDeviceScriptingInterface : public QObject { struct ScriptingAudioDeviceInfo {
QString name;
bool selected;
QAudio::Mode mode;
};
class AudioDeviceScriptingInterface : public QAbstractListModel {
Q_OBJECT Q_OBJECT
Q_PROPERTY(QStringList inputAudioDevices READ inputAudioDevices NOTIFY inputAudioDevicesChanged) Q_PROPERTY(QStringList inputAudioDevices READ inputAudioDevices NOTIFY inputAudioDevicesChanged)
@ -32,9 +40,26 @@ public:
QStringList outputAudioDevices() const; QStringList outputAudioDevices() const;
bool muted(); bool muted();
QVariant data(const QModelIndex& index, int role = Qt::DisplayRole) const override;
int rowCount(const QModelIndex& parent = QModelIndex()) const override;
QHash<int, QByteArray> roleNames() const override;
enum Roles {
DisplayNameRole = Qt::UserRole,
SelectedRole,
AudioModeRole
};
private slots:
void onDeviceChanged();
void onCurrentInputDeviceChanged(const QString& name);
void onCurrentOutputDeviceChanged(const QString& name);
void currentDeviceUpdate(const QString& name, QAudio::Mode mode);
public slots: public slots:
bool setInputDevice(const QString& deviceName); bool setInputDevice(const QString& deviceName);
bool setOutputDevice(const QString& deviceName); bool setOutputDevice(const QString& deviceName);
bool setDeviceFromMenu(const QString& deviceMenuName);
QString getInputDevice(); QString getInputDevice();
QString getOutputDevice(); QString getOutputDevice();
@ -55,15 +80,28 @@ public slots:
void setMuted(bool muted); void setMuted(bool muted);
void setInputDeviceAsync(const QString& deviceName);
void setOutputDeviceAsync(const QString& deviceName);
private: private:
AudioDeviceScriptingInterface(); AudioDeviceScriptingInterface();
signals: signals:
void muteToggled(); void muteToggled();
void deviceChanged(); void deviceChanged();
void currentInputDeviceChanged(const QString& name);
void currentOutputDeviceChanged(const QString& name);
void mutedChanged(bool muted); void mutedChanged(bool muted);
void inputAudioDevicesChanged(QStringList inputAudioDevices); void inputAudioDevicesChanged(QStringList inputAudioDevices);
void outputAudioDevicesChanged(QStringList outputAudioDevices); void outputAudioDevicesChanged(QStringList outputAudioDevices);
private:
QVector<ScriptingAudioDeviceInfo> _devices;
QStringList _inputAudioDevices;
QStringList _outputAudioDevices;
QString _currentInputDevice;
QString _currentOutputDevice;
}; };
#endif // hifi_AudioDeviceScriptingInterface_h #endif // hifi_AudioDeviceScriptingInterface_h

View file

@ -225,10 +225,6 @@ void Web3DOverlay::setMaxFPS(uint8_t maxFPS) {
} }
void Web3DOverlay::render(RenderArgs* args) { void Web3DOverlay::render(RenderArgs* args) {
if (!_visible || !getParentVisible()) {
return;
}
QOpenGLContext * currentContext = QOpenGLContext::currentContext(); QOpenGLContext * currentContext = QOpenGLContext::currentContext();
QSurface * currentSurface = currentContext->surface(); QSurface * currentSurface = currentContext->surface();
if (!_webSurface) { if (!_webSurface) {
@ -282,6 +278,10 @@ void Web3DOverlay::render(RenderArgs* args) {
_webSurface->resize(QSize(_resolution.x, _resolution.y)); _webSurface->resize(QSize(_resolution.x, _resolution.y));
} }
if (!_visible || !getParentVisible()) {
return;
}
vec2 halfSize = getSize() / 2.0f; vec2 halfSize = getSize() / 2.0f;
vec4 color(toGlm(getColor()), getAlpha()); vec4 color(toGlm(getColor()), getAlpha());

View file

@ -799,7 +799,8 @@ QString AudioClient::getDefaultDeviceName(QAudio::Mode mode) {
QVector<QString> AudioClient::getDeviceNames(QAudio::Mode mode) { QVector<QString> AudioClient::getDeviceNames(QAudio::Mode mode) {
QVector<QString> deviceNames; QVector<QString> deviceNames;
foreach(QAudioDeviceInfo audioDevice, getAvailableDevices(mode)) { const QList<QAudioDeviceInfo> &availableDevice = getAvailableDevices(mode);
foreach(const QAudioDeviceInfo &audioDevice, availableDevice) {
deviceNames << audioDevice.deviceName().trimmed(); deviceNames << audioDevice.deviceName().trimmed();
} }
return deviceNames; return deviceNames;
@ -1402,7 +1403,7 @@ bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceIn
_audioInput->stop(); _audioInput->stop();
_inputDevice = NULL; _inputDevice = NULL;
delete _audioInput; _audioInput->deleteLater();
_audioInput = NULL; _audioInput = NULL;
_numInputCallbackBytes = 0; _numInputCallbackBytes = 0;
@ -1418,6 +1419,7 @@ bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceIn
if (!inputDeviceInfo.isNull()) { if (!inputDeviceInfo.isNull()) {
qCDebug(audioclient) << "The audio input device " << inputDeviceInfo.deviceName() << "is available."; qCDebug(audioclient) << "The audio input device " << inputDeviceInfo.deviceName() << "is available.";
_inputAudioDeviceName = inputDeviceInfo.deviceName().trimmed(); _inputAudioDeviceName = inputDeviceInfo.deviceName().trimmed();
emit currentInputDeviceChanged(_inputAudioDeviceName);
if (adjustedFormatForAudioDevice(inputDeviceInfo, _desiredInputFormat, _inputFormat)) { if (adjustedFormatForAudioDevice(inputDeviceInfo, _desiredInputFormat, _inputFormat)) {
qCDebug(audioclient) << "The format to be used for audio input is" << _inputFormat; qCDebug(audioclient) << "The format to be used for audio input is" << _inputFormat;
@ -1506,11 +1508,13 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
if (_audioOutput) { if (_audioOutput) {
_audioOutput->stop(); _audioOutput->stop();
delete _audioOutput; //must be deleted in next eventloop cycle when its called from notify()
_audioOutput->deleteLater();
_audioOutput = NULL; _audioOutput = NULL;
_loopbackOutputDevice = NULL; _loopbackOutputDevice = NULL;
delete _loopbackAudioOutput; //must be deleted in next eventloop cycle when its called from notify()
_loopbackAudioOutput->deleteLater();
_loopbackAudioOutput = NULL; _loopbackAudioOutput = NULL;
delete[] _outputMixBuffer; delete[] _outputMixBuffer;
@ -1535,6 +1539,7 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
if (!outputDeviceInfo.isNull()) { if (!outputDeviceInfo.isNull()) {
qCDebug(audioclient) << "The audio output device " << outputDeviceInfo.deviceName() << "is available."; qCDebug(audioclient) << "The audio output device " << outputDeviceInfo.deviceName() << "is available.";
_outputAudioDeviceName = outputDeviceInfo.deviceName().trimmed(); _outputAudioDeviceName = outputDeviceInfo.deviceName().trimmed();
emit currentOutputDeviceChanged(_outputAudioDeviceName);
if (adjustedFormatForAudioDevice(outputDeviceInfo, _desiredOutputFormat, _outputFormat)) { if (adjustedFormatForAudioDevice(outputDeviceInfo, _desiredOutputFormat, _outputFormat)) {
qCDebug(audioclient) << "The format to be used for audio output is" << _outputFormat; qCDebug(audioclient) << "The format to be used for audio output is" << _outputFormat;

View file

@ -222,6 +222,9 @@ signals:
void muteEnvironmentRequested(glm::vec3 position, float radius); void muteEnvironmentRequested(glm::vec3 position, float radius);
void currentOutputDeviceChanged(const QString& name);
void currentInputDeviceChanged(const QString& name);
protected: protected:
AudioClient(); AudioClient();
~AudioClient(); ~AudioClient();

View file

@ -151,7 +151,7 @@ void Head::simulate(float deltaTime) {
_mouth2, _mouth2,
_mouth3, _mouth3,
_mouth4, _mouth4,
_blendshapeCoefficients); _transientBlendshapeCoefficients);
applyEyelidOffset(getOrientation()); applyEyelidOffset(getOrientation());
@ -202,6 +202,13 @@ void Head::calculateMouthShapes(float deltaTime) {
float trailingAudioJawOpenRatio = (100.0f - deltaTime * NORMAL_HZ) / 100.0f; // --> 0.99 at 60 Hz float trailingAudioJawOpenRatio = (100.0f - deltaTime * NORMAL_HZ) / 100.0f; // --> 0.99 at 60 Hz
_trailingAudioJawOpen = glm::mix(_trailingAudioJawOpen, _audioJawOpen, trailingAudioJawOpenRatio); _trailingAudioJawOpen = glm::mix(_trailingAudioJawOpen, _audioJawOpen, trailingAudioJawOpenRatio);
// truncate _mouthTime when mouth goes quiet to prevent floating point error on increment
const float SILENT_TRAILING_JAW_OPEN = 0.0002f;
const float MAX_SILENT_MOUTH_TIME = 10.0f;
if (_trailingAudioJawOpen < SILENT_TRAILING_JAW_OPEN && _mouthTime > MAX_SILENT_MOUTH_TIME) {
_mouthTime = 0.0f;
}
// Advance time at a rate proportional to loudness, and move the mouth shapes through // Advance time at a rate proportional to loudness, and move the mouth shapes through
// a cycle at differing speeds to create a continuous random blend of shapes. // a cycle at differing speeds to create a continuous random blend of shapes.
_mouthTime += sqrtf(_averageLoudness) * TIMESTEP_CONSTANT * deltaTimeRatio; _mouthTime += sqrtf(_averageLoudness) * TIMESTEP_CONSTANT * deltaTimeRatio;
@ -227,15 +234,15 @@ void Head::applyEyelidOffset(glm::quat headOrientation) {
for (int i = 0; i < 2; i++) { for (int i = 0; i < 2; i++) {
const int LEFT_EYE = 8; const int LEFT_EYE = 8;
float eyeCoefficient = _blendshapeCoefficients[i] - _blendshapeCoefficients[LEFT_EYE + i]; // Raw value float eyeCoefficient = _transientBlendshapeCoefficients[i] - _transientBlendshapeCoefficients[LEFT_EYE + i];
eyeCoefficient = glm::clamp(eyelidOffset + eyeCoefficient * (1.0f - eyelidOffset), -1.0f, 1.0f); eyeCoefficient = glm::clamp(eyelidOffset + eyeCoefficient * (1.0f - eyelidOffset), -1.0f, 1.0f);
if (eyeCoefficient > 0.0f) { if (eyeCoefficient > 0.0f) {
_blendshapeCoefficients[i] = eyeCoefficient; _transientBlendshapeCoefficients[i] = eyeCoefficient;
_blendshapeCoefficients[LEFT_EYE + i] = 0.0f; _transientBlendshapeCoefficients[LEFT_EYE + i] = 0.0f;
} else { } else {
_blendshapeCoefficients[i] = 0.0f; _transientBlendshapeCoefficients[i] = 0.0f;
_blendshapeCoefficients[LEFT_EYE + i] = -eyeCoefficient; _transientBlendshapeCoefficients[LEFT_EYE + i] = -eyeCoefficient;
} }
} }
} }

View file

@ -445,17 +445,17 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
if (hasFaceTrackerInfo) { if (hasFaceTrackerInfo) {
auto startSection = destinationBuffer; auto startSection = destinationBuffer;
auto faceTrackerInfo = reinterpret_cast<AvatarDataPacket::FaceTrackerInfo*>(destinationBuffer); auto faceTrackerInfo = reinterpret_cast<AvatarDataPacket::FaceTrackerInfo*>(destinationBuffer);
auto blendshapeCoefficients = _headData->getSummedBlendshapeCoefficients();
faceTrackerInfo->leftEyeBlink = _headData->_leftEyeBlink; faceTrackerInfo->leftEyeBlink = _headData->_leftEyeBlink;
faceTrackerInfo->rightEyeBlink = _headData->_rightEyeBlink; faceTrackerInfo->rightEyeBlink = _headData->_rightEyeBlink;
faceTrackerInfo->averageLoudness = _headData->_averageLoudness; faceTrackerInfo->averageLoudness = _headData->_averageLoudness;
faceTrackerInfo->browAudioLift = _headData->_browAudioLift; faceTrackerInfo->browAudioLift = _headData->_browAudioLift;
faceTrackerInfo->numBlendshapeCoefficients = _headData->_blendshapeCoefficients.size(); faceTrackerInfo->numBlendshapeCoefficients = blendshapeCoefficients.size();
destinationBuffer += sizeof(AvatarDataPacket::FaceTrackerInfo); destinationBuffer += sizeof(AvatarDataPacket::FaceTrackerInfo);
// followed by a variable number of float coefficients memcpy(destinationBuffer, blendshapeCoefficients.data(), blendshapeCoefficients.size() * sizeof(float));
memcpy(destinationBuffer, _headData->_blendshapeCoefficients.data(), _headData->_blendshapeCoefficients.size() * sizeof(float)); destinationBuffer += blendshapeCoefficients.size() * sizeof(float);
destinationBuffer += _headData->_blendshapeCoefficients.size() * sizeof(float);
int numBytes = destinationBuffer - startSection; int numBytes = destinationBuffer - startSection;
if (outboundDataRateOut) { if (outboundDataRateOut) {
@ -965,7 +965,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
const int coefficientsSize = sizeof(float) * numCoefficients; const int coefficientsSize = sizeof(float) * numCoefficients;
PACKET_READ_CHECK(FaceTrackerCoefficients, coefficientsSize); PACKET_READ_CHECK(FaceTrackerCoefficients, coefficientsSize);
_headData->_blendshapeCoefficients.resize(numCoefficients); // make sure there's room for the copy! _headData->_blendshapeCoefficients.resize(numCoefficients); // make sure there's room for the copy!
_headData->_baseBlendshapeCoefficients.resize(numCoefficients); _headData->_transientBlendshapeCoefficients.resize(numCoefficients);
memcpy(_headData->_blendshapeCoefficients.data(), sourceBuffer, coefficientsSize); memcpy(_headData->_blendshapeCoefficients.data(), sourceBuffer, coefficientsSize);
sourceBuffer += coefficientsSize; sourceBuffer += coefficientsSize;
int numBytesRead = sourceBuffer - startSection; int numBytesRead = sourceBuffer - startSection;
@ -1495,11 +1495,14 @@ QUrl AvatarData::cannonicalSkeletonModelURL(const QUrl& emptyURL) const {
return _skeletonModelURL.scheme() == "file" ? emptyURL : _skeletonModelURL; return _skeletonModelURL.scheme() == "file" ? emptyURL : _skeletonModelURL;
} }
void AvatarData::processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged) { void AvatarData::processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged, const qint64 clockSkew) {
if (identity.updatedAt < _identityUpdatedAt) { // Consider the case where this packet is being processed on Client A, and Client A is connected to Sandbox B.
// If Client A's system clock is *ahead of* Sandbox B's system clock, "clockSkew" will be *negative*.
// If Client A's system clock is *behind* Sandbox B's system clock, "clockSkew" will be *positive*.
if ((_identityUpdatedAt > identity.updatedAt - clockSkew) && (_identityUpdatedAt != 0)) {
qCDebug(avatars) << "Ignoring late identity packet for avatar " << getSessionUUID() qCDebug(avatars) << "Ignoring late identity packet for avatar " << getSessionUUID()
<< "identity.updatedAt:" << identity.updatedAt << "_identityUpdatedAt:" << _identityUpdatedAt; << "_identityUpdatedAt (" << _identityUpdatedAt << ") is greater than identity.updatedAt - clockSkew (" << identity.updatedAt << "-" << clockSkew << ")";
return; return;
} }
@ -1535,7 +1538,7 @@ void AvatarData::processAvatarIdentity(const Identity& identity, bool& identityC
// use the timestamp from this identity, since we want to honor the updated times in "server clock" // use the timestamp from this identity, since we want to honor the updated times in "server clock"
// this will overwrite any changes we made locally to this AvatarData's _identityUpdatedAt // this will overwrite any changes we made locally to this AvatarData's _identityUpdatedAt
_identityUpdatedAt = identity.updatedAt; _identityUpdatedAt = identity.updatedAt - clockSkew;
} }
QByteArray AvatarData::identityByteArray() const { QByteArray AvatarData::identityByteArray() const {

View file

@ -538,7 +538,7 @@ public:
// identityChanged returns true if identity has changed, false otherwise. // identityChanged returns true if identity has changed, false otherwise.
// displayNameChanged returns true if displayName has changed, false otherwise. // displayNameChanged returns true if displayName has changed, false otherwise.
void processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged); void processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged, const qint64 clockSkew);
QByteArray identityByteArray() const; QByteArray identityByteArray() const;

View file

@ -148,7 +148,8 @@ void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<ReceivedMessage>
auto avatar = newOrExistingAvatar(identity.uuid, sendingNode); auto avatar = newOrExistingAvatar(identity.uuid, sendingNode);
bool identityChanged = false; bool identityChanged = false;
bool displayNameChanged = false; bool displayNameChanged = false;
avatar->processAvatarIdentity(identity, identityChanged, displayNameChanged); // In this case, the "sendingNode" is the Avatar Mixer.
avatar->processAvatarIdentity(identity, identityChanged, displayNameChanged, sendingNode->getClockSkewUsec());
} }
} }

View file

@ -34,8 +34,9 @@ HeadData::HeadData(AvatarData* owningAvatar) :
_rightEyeBlink(0.0f), _rightEyeBlink(0.0f),
_averageLoudness(0.0f), _averageLoudness(0.0f),
_browAudioLift(0.0f), _browAudioLift(0.0f),
_baseBlendshapeCoefficients(QVector<float>(0, 0.0f)), _blendshapeCoefficients(QVector<float>(0, 0.0f)),
_currBlendShapeCoefficients(QVector<float>(0, 0.0f)), _transientBlendshapeCoefficients(QVector<float>(0, 0.0f)),
_summedBlendshapeCoefficients(QVector<float>(0, 0.0f)),
_owningAvatar(owningAvatar) _owningAvatar(owningAvatar)
{ {
@ -85,22 +86,22 @@ static const QMap<QString, int>& getBlendshapesLookupMap() {
} }
const QVector<float>& HeadData::getSummedBlendshapeCoefficients() { const QVector<float>& HeadData::getSummedBlendshapeCoefficients() {
int maxSize = std::max(_baseBlendshapeCoefficients.size(), _blendshapeCoefficients.size()); int maxSize = std::max(_blendshapeCoefficients.size(), _transientBlendshapeCoefficients.size());
if (_currBlendShapeCoefficients.size() != maxSize) { if (_summedBlendshapeCoefficients.size() != maxSize) {
_currBlendShapeCoefficients.resize(maxSize); _summedBlendshapeCoefficients.resize(maxSize);
} }
for (int i = 0; i < maxSize; i++) { for (int i = 0; i < maxSize; i++) {
if (i >= _baseBlendshapeCoefficients.size()) { if (i >= _blendshapeCoefficients.size()) {
_currBlendShapeCoefficients[i] = _blendshapeCoefficients[i]; _summedBlendshapeCoefficients[i] = _transientBlendshapeCoefficients[i];
} else if (i >= _blendshapeCoefficients.size()) { } else if (i >= _transientBlendshapeCoefficients.size()) {
_currBlendShapeCoefficients[i] = _baseBlendshapeCoefficients[i]; _summedBlendshapeCoefficients[i] = _blendshapeCoefficients[i];
} else { } else {
_currBlendShapeCoefficients[i] = _baseBlendshapeCoefficients[i] + _blendshapeCoefficients[i]; _summedBlendshapeCoefficients[i] = _blendshapeCoefficients[i] + _transientBlendshapeCoefficients[i];
} }
} }
return _currBlendShapeCoefficients; return _summedBlendshapeCoefficients;
} }
void HeadData::setBlendshape(QString name, float val) { void HeadData::setBlendshape(QString name, float val) {
@ -112,10 +113,10 @@ void HeadData::setBlendshape(QString name, float val) {
if (_blendshapeCoefficients.size() <= it.value()) { if (_blendshapeCoefficients.size() <= it.value()) {
_blendshapeCoefficients.resize(it.value() + 1); _blendshapeCoefficients.resize(it.value() + 1);
} }
if (_baseBlendshapeCoefficients.size() <= it.value()) { if (_transientBlendshapeCoefficients.size() <= it.value()) {
_baseBlendshapeCoefficients.resize(it.value() + 1); _transientBlendshapeCoefficients.resize(it.value() + 1);
} }
_baseBlendshapeCoefficients[it.value()] = val; _blendshapeCoefficients[it.value()] = val;
} }
} }
@ -131,14 +132,16 @@ QJsonObject HeadData::toJson() const {
QJsonObject blendshapesJson; QJsonObject blendshapesJson;
for (auto name : blendshapeLookupMap.keys()) { for (auto name : blendshapeLookupMap.keys()) {
auto index = blendshapeLookupMap[name]; auto index = blendshapeLookupMap[name];
if (index >= _blendshapeCoefficients.size()) { float value = 0.0f;
continue; if (index < _blendshapeCoefficients.size()) {
value += _blendshapeCoefficients[index];
} }
auto value = _blendshapeCoefficients[index]; if (index < _transientBlendshapeCoefficients.size()) {
if (value == 0.0f) { value += _transientBlendshapeCoefficients[index];
continue; }
if (value != 0.0f) {
blendshapesJson[name] = value;
} }
blendshapesJson[name] = value;
} }
if (!blendshapesJson.isEmpty()) { if (!blendshapesJson.isEmpty()) {
headJson[JSON_AVATAR_HEAD_BLENDSHAPE_COEFFICIENTS] = blendshapesJson; headJson[JSON_AVATAR_HEAD_BLENDSHAPE_COEFFICIENTS] = blendshapesJson;
@ -163,8 +166,8 @@ void HeadData::fromJson(const QJsonObject& json) {
QJsonArray blendshapeCoefficientsJson = jsonValue.toArray(); QJsonArray blendshapeCoefficientsJson = jsonValue.toArray();
for (const auto& blendshapeCoefficient : blendshapeCoefficientsJson) { for (const auto& blendshapeCoefficient : blendshapeCoefficientsJson) {
blendshapeCoefficients.push_back((float)blendshapeCoefficient.toDouble()); blendshapeCoefficients.push_back((float)blendshapeCoefficient.toDouble());
setBlendshapeCoefficients(blendshapeCoefficients);
} }
setBlendshapeCoefficients(blendshapeCoefficients);
} else if (jsonValue.isObject()) { } else if (jsonValue.isObject()) {
QJsonObject blendshapeCoefficientsJson = jsonValue.toObject(); QJsonObject blendshapeCoefficientsJson = jsonValue.toObject();
for (const QString& name : blendshapeCoefficientsJson.keys()) { for (const QString& name : blendshapeCoefficientsJson.keys()) {

View file

@ -93,8 +93,8 @@ protected:
float _browAudioLift; float _browAudioLift;
QVector<float> _blendshapeCoefficients; QVector<float> _blendshapeCoefficients;
QVector<float> _baseBlendshapeCoefficients; QVector<float> _transientBlendshapeCoefficients;
QVector<float> _currBlendShapeCoefficients; QVector<float> _summedBlendshapeCoefficients;
AvatarData* _owningAvatar; AvatarData* _owningAvatar;
private: private:

View file

@ -22,20 +22,20 @@
#include <BuildInfo.h> #include <BuildInfo.h>
#include <GLMHelpers.h> #include <GLMHelpers.h>
QString SAVE_DIRECTORY = QStandardPaths::writableLocation(QStandardPaths::AppLocalDataLocation) + "/" + BuildInfo::MODIFIED_ORGANIZATION + "/" + BuildInfo::INTERFACE_NAME + "/hifi-input-recordings/"; QString SAVE_DIRECTORY = QStandardPaths::writableLocation(QStandardPaths::AppLocalDataLocation) + "/" + BuildInfo::MODIFIED_ORGANIZATION + "/" + BuildInfo::INTERFACE_NAME + "/hifi-input-recordings/";
QString FILE_PREFIX_NAME = "input-recording-"; QString FILE_PREFIX_NAME = "input-recording-";
QString COMPRESS_EXTENSION = ".tar.gz"; QString COMPRESS_EXTENSION = ".tar.gz";
namespace controller { namespace controller {
QJsonObject poseToJsonObject(const Pose pose) { QJsonObject poseToJsonObject(const Pose pose) {
QJsonObject newPose; QJsonObject newPose;
QJsonArray translation; QJsonArray translation;
translation.append(pose.translation.x); translation.append(pose.translation.x);
translation.append(pose.translation.y); translation.append(pose.translation.y);
translation.append(pose.translation.z); translation.append(pose.translation.z);
QJsonArray rotation; QJsonArray rotation;
rotation.append(pose.rotation.x); rotation.append(pose.rotation.x);
rotation.append(pose.rotation.y); rotation.append(pose.rotation.y);
@ -69,7 +69,7 @@ namespace controller {
QJsonArray angularVelocity = object["angularVelocity"].toArray(); QJsonArray angularVelocity = object["angularVelocity"].toArray();
pose.valid = object["valid"].toBool(); pose.valid = object["valid"].toBool();
pose.translation.x = translation[0].toDouble(); pose.translation.x = translation[0].toDouble();
pose.translation.y = translation[1].toDouble(); pose.translation.y = translation[1].toDouble();
pose.translation.z = translation[2].toDouble(); pose.translation.z = translation[2].toDouble();
@ -89,13 +89,13 @@ namespace controller {
return pose; return pose;
} }
void exportToFile(QJsonObject& object) { void exportToFile(QJsonObject& object) {
if (!QDir(SAVE_DIRECTORY).exists()) { if (!QDir(SAVE_DIRECTORY).exists()) {
QDir().mkdir(SAVE_DIRECTORY); QDir().mkdir(SAVE_DIRECTORY);
} }
QString timeStamp = QDateTime::currentDateTime().toString(Qt::ISODate); QString timeStamp = QDateTime::currentDateTime().toString(Qt::ISODate);
timeStamp.replace(":", "-"); timeStamp.replace(":", "-");
QString fileName = SAVE_DIRECTORY + FILE_PREFIX_NAME + timeStamp + COMPRESS_EXTENSION; QString fileName = SAVE_DIRECTORY + FILE_PREFIX_NAME + timeStamp + COMPRESS_EXTENSION;
@ -124,7 +124,7 @@ namespace controller {
status = true; status = true;
return object; return object;
} }
InputRecorder::InputRecorder() {} InputRecorder::InputRecorder() {}
InputRecorder::~InputRecorder() {} InputRecorder::~InputRecorder() {}
@ -195,16 +195,16 @@ namespace controller {
_framesRecorded = data["frameCount"].toInt(); _framesRecorded = data["frameCount"].toInt();
QJsonArray actionArrayList = data["actionList"].toArray(); QJsonArray actionArrayList = data["actionList"].toArray();
QJsonArray poseArrayList = data["poseList"].toArray(); QJsonArray poseArrayList = data["poseList"].toArray();
for (int actionIndex = 0; actionIndex < actionArrayList.size(); actionIndex++) { for (int actionIndex = 0; actionIndex < actionArrayList.size(); actionIndex++) {
QJsonArray actionState = actionArrayList[actionIndex].toArray(); QJsonArray actionState = actionArrayList[actionIndex].toArray();
for (int index = 0; index < actionState.size(); index++) { for (int index = 0; index < actionState.size(); index++) {
_currentFrameActions[index] = actionState[index].toInt(); _currentFrameActions[index] = actionState[index].toDouble();
} }
_actionStateList.push_back(_currentFrameActions); _actionStateList.push_back(_currentFrameActions);
_currentFrameActions = ActionStates(toInt(Action::NUM_ACTIONS)); _currentFrameActions = ActionStates(toInt(Action::NUM_ACTIONS));
} }
for (int poseIndex = 0; poseIndex < poseArrayList.size(); poseIndex++) { for (int poseIndex = 0; poseIndex < poseArrayList.size(); poseIndex++) {
QJsonArray poseState = poseArrayList[poseIndex].toArray(); QJsonArray poseState = poseArrayList[poseIndex].toArray();
for (int index = 0; index < poseState.size(); index++) { for (int index = 0; index < poseState.size(); index++) {
@ -250,13 +250,13 @@ namespace controller {
for(auto& channel : _currentFramePoses) { for(auto& channel : _currentFramePoses) {
channel = Pose(); channel = Pose();
} }
for(auto& channel : _currentFrameActions) { for(auto& channel : _currentFrameActions) {
channel = 0.0f; channel = 0.0f;
} }
} }
} }
float InputRecorder::getActionState(controller::Action action) { float InputRecorder::getActionState(controller::Action action) {
if (_actionStateList.size() > 0 ) { if (_actionStateList.size() > 0 ) {
return _actionStateList[_playCount][toInt(action)]; return _actionStateList[_playCount][toInt(action)];

View file

@ -24,6 +24,7 @@ class AudioScriptingInterface : public QObject, public Dependency {
SINGLETON_DEPENDENCY SINGLETON_DEPENDENCY
public: public:
virtual ~AudioScriptingInterface() {}
void setLocalAudioInterface(AbstractAudioInterface* audioInterface) { _localAudioInterface = audioInterface; } void setLocalAudioInterface(AbstractAudioInterface* audioInterface) { _localAudioInterface = audioInterface; }
protected: protected:

View file

@ -22,6 +22,8 @@
#include <ui-plugins/PluginContainer.h> #include <ui-plugins/PluginContainer.h>
#include <UserActivityLogger.h> #include <UserActivityLogger.h>
#include <NumericalConstants.h> #include <NumericalConstants.h>
#include <Preferences.h>
#include <SettingHandle.h>
#include <OffscreenUi.h> #include <OffscreenUi.h>
#include <GLMHelpers.h> #include <GLMHelpers.h>
#include <glm/ext.hpp> #include <glm/ext.hpp>
@ -280,14 +282,21 @@ void ViveControllerManager::InputDevice::calibrate(const controller::InputCalibr
glm::mat4 defaultToReferenceMat = currentHead * glm::inverse(inputCalibration.defaultHeadMat); glm::mat4 defaultToReferenceMat = currentHead * glm::inverse(inputCalibration.defaultHeadMat);
int puckCount = (int)_validTrackedObjects.size(); int puckCount = (int)_validTrackedObjects.size();
if (puckCount == MIN_PUCK_COUNT) { _config = _preferedConfig;
_config = Config::Feet; if (_config != Config::Auto && puckCount < MIN_PUCK_COUNT) {
} else if (puckCount == MIN_FEET_AND_HIPS) { uncalibrate();
_config = Config::FeetAndHips;
} else if (puckCount >= MIN_FEET_HIPS_CHEST) {
_config = Config::FeetHipsAndChest;
} else {
return; return;
} else if (_config == Config::Auto){
if (puckCount == MIN_PUCK_COUNT) {
_config = Config::Feet;
} else if (puckCount == MIN_FEET_AND_HIPS) {
_config = Config::FeetAndHips;
} else if (puckCount >= MIN_FEET_HIPS_CHEST) {
_config = Config::FeetHipsAndChest;
} else {
uncalibrate();
return;
}
} }
std::sort(_validTrackedObjects.begin(), _validTrackedObjects.end(), sortPucksYPosition); std::sort(_validTrackedObjects.begin(), _validTrackedObjects.end(), sortPucksYPosition);
@ -314,19 +323,23 @@ void ViveControllerManager::InputDevice::calibrate(const controller::InputCalibr
if (_config == Config::Feet) { if (_config == Config::Feet) {
// done // done
} else if (_config == Config::FeetAndHips) { } else if (_config == Config::FeetAndHips && puckCount >= MIN_FEET_AND_HIPS) {
_jointToPuckMap[controller::HIPS] = _validTrackedObjects[HIP].first; _jointToPuckMap[controller::HIPS] = _validTrackedObjects[HIP].first;
_pucksOffset[_validTrackedObjects[HIP].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultHips, _validTrackedObjects[HIP].second); _pucksOffset[_validTrackedObjects[HIP].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultHips, _validTrackedObjects[HIP].second);
} else if (_config == Config::FeetHipsAndChest) { } else if (_config == Config::FeetHipsAndChest && puckCount >= MIN_FEET_HIPS_CHEST) {
_jointToPuckMap[controller::HIPS] = _validTrackedObjects[HIP].first; _jointToPuckMap[controller::HIPS] = _validTrackedObjects[HIP].first;
_pucksOffset[_validTrackedObjects[HIP].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultHips, _validTrackedObjects[HIP].second); _pucksOffset[_validTrackedObjects[HIP].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultHips, _validTrackedObjects[HIP].second);
_jointToPuckMap[controller::SPINE2] = _validTrackedObjects[CHEST].first; _jointToPuckMap[controller::SPINE2] = _validTrackedObjects[CHEST].first;
_pucksOffset[_validTrackedObjects[CHEST].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultSpine2, _validTrackedObjects[CHEST].second); _pucksOffset[_validTrackedObjects[CHEST].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultSpine2, _validTrackedObjects[CHEST].second);
} else {
uncalibrate();
return;
} }
_calibrated = true; _calibrated = true;
} }
void ViveControllerManager::InputDevice::uncalibrate() { void ViveControllerManager::InputDevice::uncalibrate() {
_config = Config::Auto;
_pucksOffset.clear(); _pucksOffset.clear();
_jointToPuckMap.clear(); _jointToPuckMap.clear();
_calibrated = false; _calibrated = false;
@ -562,6 +575,74 @@ void ViveControllerManager::InputDevice::hapticsHelper(float deltaTime, bool lef
} }
} }
void ViveControllerManager::InputDevice::loadSettings() {
Settings settings;
settings.beginGroup("PUCK_CONFIG");
{
_preferedConfig = (Config)settings.value("configuration", QVariant((int)Config::Auto)).toInt();
}
settings.endGroup();
}
void ViveControllerManager::InputDevice::saveSettings() const {
Settings settings;
settings.beginGroup("PUCK_CONFIG");
{
settings.setValue(QString("configuration"), (int)_preferedConfig);
}
settings.endGroup();
}
QString ViveControllerManager::InputDevice::configToString() {
QString currentConfig;
switch (_preferedConfig) {
case Config::Auto:
currentConfig = "Auto";
break;
case Config::Feet:
currentConfig = "Feet";
break;
case Config::FeetAndHips:
currentConfig = "FeetAndHips";
break;
case Config::FeetHipsAndChest:
currentConfig = "FeetHipsAndChest";
break;
}
return currentConfig;
}
void ViveControllerManager::InputDevice::setConfigFromString(const QString& value) {
if (value == "Auto") {
_preferedConfig = Config::Auto;
} else if (value == "Feet") {
_preferedConfig = Config::Feet;
} else if (value == "FeetAndHips") {
_preferedConfig = Config::FeetAndHips;
} else if (value == "FeetHipsAndChest") {
_preferedConfig = Config::FeetHipsAndChest;
}
}
void ViveControllerManager::InputDevice::createPreferences() {
loadSettings();
auto preferences = DependencyManager::get<Preferences>();
static const QString VIVE_PUCKS_CONFIG = "Vive Pucks Configuration";
{
auto getter = [this]()->QString { return configToString(); };
auto setter = [this](const QString& value) { setConfigFromString(value); saveSettings(); };
auto preference = new ComboBoxPreference(VIVE_PUCKS_CONFIG, "Configuration", getter, setter);
QStringList list = (QStringList() << "Auto" << "Feet" << "FeetAndHips" << "FeetHipsAndChest");
preference->setItems(list);
preferences->addPreference(preference);
}
}
controller::Input::NamedVector ViveControllerManager::InputDevice::getAvailableInputs() const { controller::Input::NamedVector ViveControllerManager::InputDevice::getAvailableInputs() const {
using namespace controller; using namespace controller;
QVector<Input::NamedPair> availableInputs{ QVector<Input::NamedPair> availableInputs{

View file

@ -51,14 +51,14 @@ public:
private: private:
class InputDevice : public controller::InputDevice { class InputDevice : public controller::InputDevice {
public: public:
InputDevice(vr::IVRSystem*& system) : controller::InputDevice("Vive"), _system(system) {} InputDevice(vr::IVRSystem*& system) : controller::InputDevice("Vive"), _system(system) { createPreferences(); }
private: private:
// Device functions // Device functions
controller::Input::NamedVector getAvailableInputs() const override; controller::Input::NamedVector getAvailableInputs() const override;
QString getDefaultMappingConfig() const override; QString getDefaultMappingConfig() const override;
void update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) override; void update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) override;
void focusOutEvent() override; void focusOutEvent() override;
void createPreferences();
bool triggerHapticPulse(float strength, float duration, controller::Hand hand) override; bool triggerHapticPulse(float strength, float duration, controller::Hand hand) override;
void hapticsHelper(float deltaTime, bool leftHand); void hapticsHelper(float deltaTime, bool leftHand);
void calibrateOrUncalibrate(const controller::InputCalibrationData& inputCalibration); void calibrateOrUncalibrate(const controller::InputCalibrationData& inputCalibration);
@ -101,8 +101,9 @@ private:
float _timer { 0.0f }; float _timer { 0.0f };
glm::vec2 _stick { 0.0f, 0.0f }; glm::vec2 _stick { 0.0f, 0.0f };
}; };
enum class Config { Feet, FeetAndHips, FeetHipsAndChest, NoConfig }; enum class Config { Feet, FeetAndHips, FeetHipsAndChest, Auto };
Config _config { Config::NoConfig }; Config _config { Config::Auto };
Config _preferedConfig { Config::Auto };
FilteredStick _filteredLeftStick; FilteredStick _filteredLeftStick;
FilteredStick _filteredRightStick; FilteredStick _filteredRightStick;
@ -127,6 +128,10 @@ private:
bool _timeTilCalibrationSet { false }; bool _timeTilCalibrationSet { false };
mutable std::recursive_mutex _lock; mutable std::recursive_mutex _lock;
QString configToString();
void setConfigFromString(const QString& value);
void loadSettings();
void saveSettings() const;
friend class ViveControllerManager; friend class ViveControllerManager;
}; };

View file

@ -96,7 +96,7 @@ function calcSpawnInfo(hand, height) {
* @param hand [number] -1 indicates no hand, Controller.Standard.RightHand or Controller.Standard.LeftHand * @param hand [number] -1 indicates no hand, Controller.Standard.RightHand or Controller.Standard.LeftHand
* @param clientOnly [bool] true indicates tablet model is only visible to client. * @param clientOnly [bool] true indicates tablet model is only visible to client.
*/ */
WebTablet = function (url, width, dpi, hand, clientOnly, location) { WebTablet = function (url, width, dpi, hand, clientOnly, location, visible) {
var _this = this; var _this = this;
@ -107,6 +107,8 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location) {
this.depth = TABLET_NATURAL_DIMENSIONS.z * tabletScaleFactor; this.depth = TABLET_NATURAL_DIMENSIONS.z * tabletScaleFactor;
this.landscape = false; this.landscape = false;
visible = visible === true;
if (dpi) { if (dpi) {
this.dpi = dpi; this.dpi = dpi;
} else { } else {
@ -125,7 +127,8 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location) {
"grabbableKey": {"grabbable": true} "grabbableKey": {"grabbable": true}
}), }),
dimensions: this.getDimensions(), dimensions: this.getDimensions(),
parentID: AVATAR_SELF_ID parentID: AVATAR_SELF_ID,
visible: visible
}; };
// compute position, rotation & parentJointIndex of the tablet // compute position, rotation & parentJointIndex of the tablet
@ -158,7 +161,8 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location) {
parentID: this.tabletEntityID, parentID: this.tabletEntityID,
parentJointIndex: -1, parentJointIndex: -1,
showKeyboardFocusHighlight: false, showKeyboardFocusHighlight: false,
isAA: HMD.active isAA: HMD.active,
visible: visible
}); });
var HOME_BUTTON_Y_OFFSET = (this.height / 2) - (this.height / 20); var HOME_BUTTON_Y_OFFSET = (this.height / 2) - (this.height / 20);
@ -168,7 +172,7 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location) {
localRotation: {x: 0, y: 1, z: 0, w: 0}, localRotation: {x: 0, y: 1, z: 0, w: 0},
dimensions: { x: 4 * tabletScaleFactor, y: 4 * tabletScaleFactor, z: 4 * tabletScaleFactor}, dimensions: { x: 4 * tabletScaleFactor, y: 4 * tabletScaleFactor, z: 4 * tabletScaleFactor},
alpha: 0.0, alpha: 0.0,
visible: true, visible: visible,
drawInFront: false, drawInFront: false,
parentID: this.tabletEntityID, parentID: this.tabletEntityID,
parentJointIndex: -1 parentJointIndex: -1

View file

@ -3,6 +3,7 @@
// examples // examples
// //
// Created by Brad hefta-Gaub on 10/1/14. // Created by Brad hefta-Gaub on 10/1/14.
// Modified by Daniela Fontes @DanielaFifo and Tiago Andrade @TagoWill on 4/7/2017
// Copyright 2014 High Fidelity, Inc. // Copyright 2014 High Fidelity, Inc.
// //
// This script implements a class useful for building tools for editing entities. // This script implements a class useful for building tools for editing entities.
@ -2592,6 +2593,16 @@ SelectionDisplay = (function() {
// pivot - point to use as a pivot // pivot - point to use as a pivot
// offset - the position of the overlay tool relative to the selections center position // offset - the position of the overlay tool relative to the selections center position
var makeStretchTool = function(stretchMode, direction, pivot, offset, customOnMove) { var makeStretchTool = function(stretchMode, direction, pivot, offset, customOnMove) {
// directionFor3DStretch - direction and pivot for 3D stretch
// distanceFor3DStretch - distance from the intersection point and the handController
// used to increase the scale taking into account the distance to the object
// DISTANCE_INFLUENCE_THRESHOLD - constant that holds the minimum distance where the
// distance to the object will influence the stretch/resize/scale
var directionFor3DStretch = getDirectionsFor3DStretch(stretchMode);
var distanceFor3DStretch = 0;
var DISTANCE_INFLUENCE_THRESHOLD = 1.2;
var signs = { var signs = {
x: direction.x < 0 ? -1 : (direction.x > 0 ? 1 : 0), x: direction.x < 0 ? -1 : (direction.x > 0 ? 1 : 0),
y: direction.y < 0 ? -1 : (direction.y > 0 ? 1 : 0), y: direction.y < 0 ? -1 : (direction.y > 0 ? 1 : 0),
@ -2603,18 +2614,23 @@ SelectionDisplay = (function() {
y: Math.abs(direction.y) > 0 ? 1 : 0, y: Math.abs(direction.y) > 0 ? 1 : 0,
z: Math.abs(direction.z) > 0 ? 1 : 0, z: Math.abs(direction.z) > 0 ? 1 : 0,
}; };
var numDimensions = mask.x + mask.y + mask.z; var numDimensions = mask.x + mask.y + mask.z;
var planeNormal = null; var planeNormal = null;
var lastPick = null; var lastPick = null;
var lastPick3D = null;
var initialPosition = null; var initialPosition = null;
var initialDimensions = null; var initialDimensions = null;
var initialIntersection = null; var initialIntersection = null;
var initialProperties = null; var initialProperties = null;
var registrationPoint = null; var registrationPoint = null;
var deltaPivot = null; var deltaPivot = null;
var deltaPivot3D = null;
var pickRayPosition = null; var pickRayPosition = null;
var pickRayPosition3D = null;
var rotation = null; var rotation = null;
var onBegin = function(event) { var onBegin = function(event) {
@ -2652,8 +2668,20 @@ SelectionDisplay = (function() {
// Scaled offset in world coordinates // Scaled offset in world coordinates
var scaledOffsetWorld = vec3Mult(initialDimensions, offsetRP); var scaledOffsetWorld = vec3Mult(initialDimensions, offsetRP);
pickRayPosition = Vec3.sum(initialPosition, Vec3.multiplyQbyV(rotation, scaledOffsetWorld)); pickRayPosition = Vec3.sum(initialPosition, Vec3.multiplyQbyV(rotation, scaledOffsetWorld));
if (directionFor3DStretch) {
// pivot, offset and pickPlanePosition for 3D manipulation
var scaledPivot3D = Vec3.multiply(0.5, Vec3.multiply(1.0, directionFor3DStretch));
deltaPivot3D = Vec3.subtract(centeredRP, scaledPivot3D);
var scaledOffsetWorld3D = vec3Mult(initialDimensions,
Vec3.subtract(Vec3.multiply(0.5, Vec3.multiply(-1.0, directionFor3DStretch)),
centeredRP));
pickRayPosition3D = Vec3.sum(initialPosition, Vec3.multiplyQbyV(rotation, scaledOffsetWorld));
}
var start = null; var start = null;
var end = null; var end = null;
if (numDimensions == 1 && mask.x) { if (numDimensions == 1 && mask.x) {
@ -2754,12 +2782,25 @@ SelectionDisplay = (function() {
}; };
} }
} }
planeNormal = Vec3.multiplyQbyV(rotation, planeNormal); planeNormal = Vec3.multiplyQbyV(rotation, planeNormal);
var pickRay = generalComputePickRay(event.x, event.y); var pickRay = generalComputePickRay(event.x, event.y);
lastPick = rayPlaneIntersection(pickRay, lastPick = rayPlaneIntersection(pickRay,
pickRayPosition, pickRayPosition,
planeNormal); planeNormal);
var planeNormal3D = {
x: 0,
y: 0,
z: 0
};
if (directionFor3DStretch) {
lastPick3D = rayPlaneIntersection(pickRay,
pickRayPosition3D,
planeNormal3D);
distanceFor3DStretch = Vec3.length(Vec3.subtract(pickRayPosition3D, pickRay.origin));
}
SelectionManager.saveProperties(); SelectionManager.saveProperties();
}; };
@ -2790,24 +2831,50 @@ SelectionDisplay = (function() {
dimensions = SelectionManager.worldDimensions; dimensions = SelectionManager.worldDimensions;
rotation = SelectionManager.worldRotation; rotation = SelectionManager.worldRotation;
} }
var localDeltaPivot = deltaPivot;
var localSigns = signs;
var pickRay = generalComputePickRay(event.x, event.y); var pickRay = generalComputePickRay(event.x, event.y);
newPick = rayPlaneIntersection(pickRay,
// Are we using handControllers or Mouse - only relevant for 3D tools
var controllerPose = getControllerWorldLocation(activeHand, true);
if (HMD.isHMDAvailable()
&& HMD.isHandControllerAvailable() && controllerPose.valid && that.triggered && directionFor3DStretch) {
localDeltaPivot = deltaPivot3D;
newPick = pickRay.origin;
var vector = Vec3.subtract(newPick, lastPick3D);
vector = Vec3.multiplyQbyV(Quat.inverse(rotation), vector);
if (distanceFor3DStretch > DISTANCE_INFLUENCE_THRESHOLD) {
// Range of Motion
vector = Vec3.multiply(distanceFor3DStretch , vector);
}
localSigns = directionFor3DStretch;
} else {
newPick = rayPlaneIntersection(pickRay,
pickRayPosition, pickRayPosition,
planeNormal); planeNormal);
var vector = Vec3.subtract(newPick, lastPick); var vector = Vec3.subtract(newPick, lastPick);
vector = Vec3.multiplyQbyV(Quat.inverse(rotation), vector); vector = Vec3.multiplyQbyV(Quat.inverse(rotation), vector);
vector = vec3Mult(mask, vector);
vector = vec3Mult(mask, vector);
}
if (customOnMove) { if (customOnMove) {
var change = Vec3.multiply(-1, vec3Mult(signs, vector)); var change = Vec3.multiply(-1, vec3Mult(localSigns, vector));
customOnMove(vector, change); customOnMove(vector, change);
} else { } else {
vector = grid.snapToSpacing(vector); vector = grid.snapToSpacing(vector);
var changeInDimensions = Vec3.multiply(-1, vec3Mult(signs, vector)); var changeInDimensions = Vec3.multiply(-1, vec3Mult(localSigns, vector));
var newDimensions; var newDimensions;
if (proportional) { if (proportional) {
var absX = Math.abs(changeInDimensions.x); var absX = Math.abs(changeInDimensions.x);
@ -2829,37 +2896,39 @@ SelectionDisplay = (function() {
} else { } else {
newDimensions = Vec3.sum(initialDimensions, changeInDimensions); newDimensions = Vec3.sum(initialDimensions, changeInDimensions);
} }
newDimensions.x = Math.max(newDimensions.x, MINIMUM_DIMENSION);
newDimensions.y = Math.max(newDimensions.y, MINIMUM_DIMENSION);
newDimensions.z = Math.max(newDimensions.z, MINIMUM_DIMENSION);
var changeInPosition = Vec3.multiplyQbyV(rotation, vec3Mult(deltaPivot, changeInDimensions));
var newPosition = Vec3.sum(initialPosition, changeInPosition);
for (var i = 0; i < SelectionManager.selections.length; i++) {
Entities.editEntity(SelectionManager.selections[i], {
position: newPosition,
dimensions: newDimensions,
});
}
var wantDebug = false;
if (wantDebug) {
print(stretchMode);
//Vec3.print(" newIntersection:", newIntersection);
Vec3.print(" vector:", vector);
//Vec3.print(" oldPOS:", oldPOS);
//Vec3.print(" newPOS:", newPOS);
Vec3.print(" changeInDimensions:", changeInDimensions);
Vec3.print(" newDimensions:", newDimensions);
Vec3.print(" changeInPosition:", changeInPosition);
Vec3.print(" newPosition:", newPosition);
}
SelectionManager._update();
} }
newDimensions.x = Math.max(newDimensions.x, MINIMUM_DIMENSION);
newDimensions.y = Math.max(newDimensions.y, MINIMUM_DIMENSION);
newDimensions.z = Math.max(newDimensions.z, MINIMUM_DIMENSION);
var changeInPosition = Vec3.multiplyQbyV(rotation, vec3Mult(localDeltaPivot, changeInDimensions));
var newPosition = Vec3.sum(initialPosition, changeInPosition);
for (var i = 0; i < SelectionManager.selections.length; i++) {
Entities.editEntity(SelectionManager.selections[i], {
position: newPosition,
dimensions: newDimensions,
});
}
var wantDebug = false;
if (wantDebug) {
print(stretchMode);
//Vec3.print(" newIntersection:", newIntersection);
Vec3.print(" vector:", vector);
//Vec3.print(" oldPOS:", oldPOS);
//Vec3.print(" newPOS:", newPOS);
Vec3.print(" changeInDimensions:", changeInDimensions);
Vec3.print(" newDimensions:", newDimensions);
Vec3.print(" changeInPosition:", changeInPosition);
Vec3.print(" newPosition:", newPosition);
}
SelectionManager._update();
}; };
@ -2870,6 +2939,75 @@ SelectionDisplay = (function() {
onEnd: onEnd onEnd: onEnd
}; };
}; };
// Direction for the stretch tool when using hand controller
var directionsFor3DGrab = {
LBN: {
x: 1,
y: 1,
z: 1
},
RBN: {
x: -1,
y: 1,
z: 1
},
LBF: {
x: 1,
y: 1,
z: -1
},
RBF: {
x: -1,
y: 1,
z: -1
},
LTN: {
x: 1,
y: -1,
z: 1
},
RTN: {
x: -1,
y: -1,
z: 1
},
LTF: {
x: 1,
y: -1,
z: -1
},
RTF: {
x: -1,
y: -1,
z: -1
}
};
// Returns a vector with directions for the stretch tool in 3D using hand controllers
function getDirectionsFor3DStretch(mode) {
if (mode === "STRETCH_LBN") {
return directionsFor3DGrab.LBN;
} else if (mode === "STRETCH_RBN") {
return directionsFor3DGrab.RBN;
} else if (mode === "STRETCH_LBF") {
return directionsFor3DGrab.LBF;
} else if (mode === "STRETCH_RBF") {
return directionsFor3DGrab.RBF;
} else if (mode === "STRETCH_LTN") {
return directionsFor3DGrab.LTN;
} else if (mode === "STRETCH_RTN") {
return directionsFor3DGrab.RTN;
} else if (mode === "STRETCH_LTF") {
return directionsFor3DGrab.LTF;
} else if (mode === "STRETCH_RTF") {
return directionsFor3DGrab.RTF;
} else {
return null;
}
}
function addStretchTool(overlay, mode, pivot, direction, offset, handleMove) { function addStretchTool(overlay, mode, pivot, direction, offset, handleMove) {
if (!pivot) { if (!pivot) {

View file

@ -17,32 +17,22 @@
const INPUT = "Input"; const INPUT = "Input";
const OUTPUT = "Output"; const OUTPUT = "Output";
function parseMenuItem(item) { const SELECT_AUDIO_SCRIPT_STARTUP_TIMEOUT = 300;
const USE = "Use ";
const FOR_INPUT = " for " + INPUT;
const FOR_OUTPUT = " for " + OUTPUT;
if (item.slice(0, USE.length) == USE) {
if (item.slice(-FOR_INPUT.length) == FOR_INPUT) {
return { device: item.slice(USE.length, -FOR_INPUT.length), mode: INPUT };
} else if (item.slice(-FOR_OUTPUT.length) == FOR_OUTPUT) {
return { device: item.slice(USE.length, -FOR_OUTPUT.length), mode: OUTPUT };
}
}
}
// //
// VAR DEFINITIONS // VAR DEFINITIONS
// //
var debugPrintStatements = true; var debugPrintStatements = true;
const INPUT_DEVICE_SETTING = "audio_input_device"; const INPUT_DEVICE_SETTING = "audio_input_device";
const OUTPUT_DEVICE_SETTING = "audio_output_device"; const OUTPUT_DEVICE_SETTING = "audio_output_device";
var audioDevicesList = []; var audioDevicesList = []; // placeholder for menu items
var wasHmdActive = false; // assume it's not active to start var wasHmdActive = false; // assume it's not active to start
var switchedAudioInputToHMD = false; var switchedAudioInputToHMD = false;
var switchedAudioOutputToHMD = false; var switchedAudioOutputToHMD = false;
var previousSelectedInputAudioDevice = ""; var previousSelectedInputAudioDevice = "";
var previousSelectedOutputAudioDevice = ""; var previousSelectedOutputAudioDevice = "";
var skipMenuEvents = true;
var interfaceInputDevice = "";
var interfaceOutputDevice = "";
// //
// BEGIN FUNCTION DEFINITIONS // BEGIN FUNCTION DEFINITIONS
@ -56,56 +46,37 @@ function debug() {
function setupAudioMenus() { function setupAudioMenus() {
// menu events can be triggered asynchronously; skip them for 200ms to avoid recursion and false switches // menu events can be triggered asynchronously; skip them for 200ms to avoid recursion and false switches
skipMenuEvents = true;
Script.setTimeout(function() { skipMenuEvents = false; }, 200);
removeAudioMenus(); removeAudioMenus();
// Setup audio input devices // Setup audio input devices
Menu.addSeparator("Audio", "Input Audio Device"); Menu.addSeparator("Audio", "Input Audio Device");
var inputDevices = AudioDevice.getInputDevices(); var currentInputDevice = AudioDevice.getInputDevice()
for (var i = 0; i < inputDevices.length; i++) { for (var i = 0; i < AudioDevice.inputAudioDevices.length; i++) {
var audioDeviceMenuString = "Use " + inputDevices[i] + " for Input"; var audioDeviceMenuString = "Use " + AudioDevice.inputAudioDevices[i] + " for Input";
Menu.addMenuItem({ Menu.addMenuItem({
menuName: "Audio", menuName: "Audio",
menuItemName: audioDeviceMenuString, menuItemName: audioDeviceMenuString,
isCheckable: true, isCheckable: true,
isChecked: inputDevices[i] == AudioDevice.getInputDevice() isChecked: AudioDevice.inputAudioDevices[i] == currentInputDevice
}); });
audioDevicesList.push(audioDeviceMenuString); audioDevicesList.push(audioDeviceMenuString);
} }
// Setup audio output devices // Setup audio output devices
Menu.addSeparator("Audio", "Output Audio Device"); Menu.addSeparator("Audio", "Output Audio Device");
var outputDevices = AudioDevice.getOutputDevices(); var currentOutputDevice = AudioDevice.getOutputDevice()
for (var i = 0; i < outputDevices.length; i++) { for (var i = 0; i < AudioDevice.outputAudioDevices.length; i++) {
var audioDeviceMenuString = "Use " + outputDevices[i] + " for Output"; var audioDeviceMenuString = "Use " + AudioDevice.outputAudioDevices[i] + " for Output";
Menu.addMenuItem({ Menu.addMenuItem({
menuName: "Audio", menuName: "Audio",
menuItemName: audioDeviceMenuString, menuItemName: audioDeviceMenuString,
isCheckable: true, isCheckable: true,
isChecked: outputDevices[i] == AudioDevice.getOutputDevice() isChecked: AudioDevice.outputAudioDevices[i] == currentOutputDevice
}); });
audioDevicesList.push(audioDeviceMenuString); audioDevicesList.push(audioDeviceMenuString);
} }
} }
function checkDeviceMismatch() {
var inputDeviceSetting = Settings.getValue(INPUT_DEVICE_SETTING);
var interfaceInputDevice = AudioDevice.getInputDevice();
if (interfaceInputDevice != inputDeviceSetting) {
debug("Input Setting & Device mismatch! Input SETTING: " + inputDeviceSetting + "Input DEVICE IN USE: " + interfaceInputDevice);
switchAudioDevice("Use " + inputDeviceSetting + " for Input");
}
var outputDeviceSetting = Settings.getValue(OUTPUT_DEVICE_SETTING);
var interfaceOutputDevice = AudioDevice.getOutputDevice();
if (interfaceOutputDevice != outputDeviceSetting) {
debug("Output Setting & Device mismatch! Output SETTING: " + outputDeviceSetting + "Output DEVICE IN USE: " + interfaceOutputDevice);
switchAudioDevice("Use " + outputDeviceSetting + " for Output");
}
}
function removeAudioMenus() { function removeAudioMenus() {
Menu.removeSeparator("Audio", "Input Audio Device"); Menu.removeSeparator("Audio", "Input Audio Device");
Menu.removeSeparator("Audio", "Output Audio Device"); Menu.removeSeparator("Audio", "Output Audio Device");
@ -124,67 +95,28 @@ function removeAudioMenus() {
function onDevicechanged() { function onDevicechanged() {
debug("System audio devices changed. Removing and replacing Audio Menus..."); debug("System audio devices changed. Removing and replacing Audio Menus...");
setupAudioMenus(); setupAudioMenus();
checkDeviceMismatch();
} }
function onMenuEvent(audioDeviceMenuString) { function onMenuEvent(audioDeviceMenuString) {
if (!skipMenuEvents) { if (Menu.isOptionChecked(audioDeviceMenuString) &&
switchAudioDevice(audioDeviceMenuString); (audioDeviceMenuString !== interfaceInputDevice &&
audioDeviceMenuString !== interfaceOutputDevice)) {
AudioDevice.setDeviceFromMenu(audioDeviceMenuString)
} }
} }
function switchAudioDevice(audioDeviceMenuString) { function onCurrentDeviceChanged() {
// if the device is not plugged in, short-circuit debug("System audio device switched. ");
if (!~audioDevicesList.indexOf(audioDeviceMenuString)) { interfaceInputDevice = "Use " + AudioDevice.getInputDevice() + " for Input";
return; interfaceOutputDevice = "Use " + AudioDevice.getOutputDevice() + " for Output";
} for (var index = 0; index < audioDevicesList.length; index++) {
if (audioDevicesList[index] === interfaceInputDevice ||
var selection = parseMenuItem(audioDeviceMenuString); audioDevicesList[index] === interfaceOutputDevice) {
if (!selection) { if (Menu.isOptionChecked(audioDevicesList[index]) === false)
debug("Invalid Audio audioDeviceMenuString! Doesn't end with 'for Input' or 'for Output'"); Menu.setIsOptionChecked(audioDevicesList[index], true);
return;
}
// menu events can be triggered asynchronously; skip them for 200ms to avoid recursion and false switches
skipMenuEvents = true;
Script.setTimeout(function() { skipMenuEvents = false; }, 200);
var selectedDevice = selection.device;
if (selection.mode == INPUT) {
var currentInputDevice = AudioDevice.getInputDevice();
if (selectedDevice != currentInputDevice) {
debug("Switching audio INPUT device from " + currentInputDevice + " to " + selectedDevice);
Menu.setIsOptionChecked("Use " + currentInputDevice + " for Input", false);
if (AudioDevice.setInputDevice(selectedDevice)) {
Settings.setValue(INPUT_DEVICE_SETTING, selectedDevice);
Menu.setIsOptionChecked(audioDeviceMenuString, true);
} else {
debug("Error setting audio input device!")
Menu.setIsOptionChecked(audioDeviceMenuString, false);
}
} else { } else {
debug("Selected input device is the same as the current input device!") if (Menu.isOptionChecked(audioDevicesList[index]) === true)
Settings.setValue(INPUT_DEVICE_SETTING, selectedDevice); Menu.setIsOptionChecked(audioDevicesList[index], false);
Menu.setIsOptionChecked(audioDeviceMenuString, true);
AudioDevice.setInputDevice(selectedDevice); // Still try to force-set the device (in case the user's trying to forcefully debug an issue)
}
} else if (selection.mode == OUTPUT) {
var currentOutputDevice = AudioDevice.getOutputDevice();
if (selectedDevice != currentOutputDevice) {
debug("Switching audio OUTPUT device from " + currentOutputDevice + " to " + selectedDevice);
Menu.setIsOptionChecked("Use " + currentOutputDevice + " for Output", false);
if (AudioDevice.setOutputDevice(selectedDevice)) {
Settings.setValue(OUTPUT_DEVICE_SETTING, selectedDevice);
Menu.setIsOptionChecked(audioDeviceMenuString, true);
} else {
debug("Error setting audio output device!")
Menu.setIsOptionChecked(audioDeviceMenuString, false);
}
} else {
debug("Selected output device is the same as the current output device!")
Settings.setValue(OUTPUT_DEVICE_SETTING, selectedDevice);
Menu.setIsOptionChecked(audioDeviceMenuString, true);
AudioDevice.setOutputDevice(selectedDevice); // Still try to force-set the device (in case the user's trying to forcefully debug an issue)
} }
} }
} }
@ -192,12 +124,12 @@ function switchAudioDevice(audioDeviceMenuString) {
function restoreAudio() { function restoreAudio() {
if (switchedAudioInputToHMD) { if (switchedAudioInputToHMD) {
debug("Switching back from HMD preferred audio input to: " + previousSelectedInputAudioDevice); debug("Switching back from HMD preferred audio input to: " + previousSelectedInputAudioDevice);
switchAudioDevice("Use " + previousSelectedInputAudioDevice + " for Input"); AudioDevice.setInputDeviceAsync(previousSelectedInputAudioDevice)
switchedAudioInputToHMD = false; switchedAudioInputToHMD = false;
} }
if (switchedAudioOutputToHMD) { if (switchedAudioOutputToHMD) {
debug("Switching back from HMD preferred audio output to: " + previousSelectedOutputAudioDevice); debug("Switching back from HMD preferred audio output to: " + previousSelectedOutputAudioDevice);
switchAudioDevice("Use " + previousSelectedOutputAudioDevice + " for Output"); AudioDevice.setOutputDeviceAsync(previousSelectedOutputAudioDevice)
switchedAudioOutputToHMD = false; switchedAudioOutputToHMD = false;
} }
} }
@ -224,7 +156,7 @@ function checkHMDAudio() {
debug("previousSelectedInputAudioDevice: " + previousSelectedInputAudioDevice); debug("previousSelectedInputAudioDevice: " + previousSelectedInputAudioDevice);
if (hmdPreferredAudioInput != previousSelectedInputAudioDevice) { if (hmdPreferredAudioInput != previousSelectedInputAudioDevice) {
switchedAudioInputToHMD = true; switchedAudioInputToHMD = true;
switchAudioDevice("Use " + hmdPreferredAudioInput + " for Input"); AudioDevice.setInputDeviceAsync(hmdPreferredAudioInput)
} }
} }
if (hmdPreferredAudioOutput !== "") { if (hmdPreferredAudioOutput !== "") {
@ -233,7 +165,7 @@ function checkHMDAudio() {
debug("previousSelectedOutputAudioDevice: " + previousSelectedOutputAudioDevice); debug("previousSelectedOutputAudioDevice: " + previousSelectedOutputAudioDevice);
if (hmdPreferredAudioOutput != previousSelectedOutputAudioDevice) { if (hmdPreferredAudioOutput != previousSelectedOutputAudioDevice) {
switchedAudioOutputToHMD = true; switchedAudioOutputToHMD = true;
switchAudioDevice("Use " + hmdPreferredAudioOutput + " for Output"); AudioDevice.setOutputDeviceAsync(hmdPreferredAudioOutput)
} }
} }
} else { } else {
@ -255,14 +187,15 @@ function checkHMDAudio() {
Script.setTimeout(function () { Script.setTimeout(function () {
debug("Connecting deviceChanged(), displayModeChanged(), and switchAudioDevice()..."); debug("Connecting deviceChanged(), displayModeChanged(), and switchAudioDevice()...");
AudioDevice.deviceChanged.connect(onDevicechanged); AudioDevice.deviceChanged.connect(onDevicechanged);
AudioDevice.currentInputDeviceChanged.connect(onCurrentDeviceChanged);
AudioDevice.currentOutputDeviceChanged.connect(onCurrentDeviceChanged);
HMD.displayModeChanged.connect(checkHMDAudio); HMD.displayModeChanged.connect(checkHMDAudio);
Menu.menuItemEvent.connect(onMenuEvent); Menu.menuItemEvent.connect(onMenuEvent);
debug("Setting up Audio I/O menu for the first time..."); debug("Setting up Audio I/O menu for the first time...");
setupAudioMenus(); setupAudioMenus();
checkDeviceMismatch();
debug("Checking HMD audio status...") debug("Checking HMD audio status...")
checkHMDAudio(); checkHMDAudio();
}, 3000); }, SELECT_AUDIO_SCRIPT_STARTUP_TIMEOUT);
debug("Connecting scriptEnding()"); debug("Connecting scriptEnding()");
Script.scriptEnding.connect(function () { Script.scriptEnding.connect(function () {
@ -270,6 +203,8 @@ Script.scriptEnding.connect(function () {
removeAudioMenus(); removeAudioMenus();
Menu.menuItemEvent.disconnect(onMenuEvent); Menu.menuItemEvent.disconnect(onMenuEvent);
HMD.displayModeChanged.disconnect(checkHMDAudio); HMD.displayModeChanged.disconnect(checkHMDAudio);
AudioDevice.currentInputDeviceChanged.disconnect(onCurrentDeviceChanged);
AudioDevice.currentOutputDeviceChanged.disconnect(onCurrentDeviceChanged);
AudioDevice.deviceChanged.disconnect(onDevicechanged); AudioDevice.deviceChanged.disconnect(onDevicechanged);
}); });

View file

@ -111,7 +111,7 @@ function onMessage(message) {
case 'openSettings': case 'openSettings':
if ((HMD.active && Settings.getValue("hmdTabletBecomesToolbar", false)) if ((HMD.active && Settings.getValue("hmdTabletBecomesToolbar", false))
|| (!HMD.active && Settings.getValue("desktopTabletBecomesToolbar", true))) { || (!HMD.active && Settings.getValue("desktopTabletBecomesToolbar", true))) {
Desktop.show("hifi/dialogs/GeneralPreferencesDialog.qml", "General Preferences"); Desktop.show("hifi/dialogs/GeneralPreferencesDialog.qml", "GeneralPreferencesDialog");
} else { } else {
tablet.loadQMLOnTop("TabletGeneralPreferences.qml"); tablet.loadQMLOnTop("TabletGeneralPreferences.qml");
} }

View file

@ -92,7 +92,7 @@
tabletScalePercentage = getTabletScalePercentageFromSettings(); tabletScalePercentage = getTabletScalePercentageFromSettings();
UIWebTablet = new WebTablet("qml/hifi/tablet/TabletRoot.qml", UIWebTablet = new WebTablet("qml/hifi/tablet/TabletRoot.qml",
DEFAULT_WIDTH * (tabletScalePercentage / 100), DEFAULT_WIDTH * (tabletScalePercentage / 100),
null, activeHand, true); null, activeHand, true, null, false);
UIWebTablet.register(); UIWebTablet.register();
HMD.tabletID = UIWebTablet.tabletEntityID; HMD.tabletID = UIWebTablet.tabletEntityID;
HMD.homeButtonID = UIWebTablet.homeButtonID; HMD.homeButtonID = UIWebTablet.homeButtonID;

View file

@ -0,0 +1,70 @@
//
// Created by Alan-Michael Moody on 5/2/2017
//
(function () {
var thisEntityID;
this.preload = function (entityID) {
thisEntityID = entityID;
};
var SCAN_RATE = 100; //ms
var REFERENCE_FRAME_COUNT = 30;
var MAX_AUDIO_THRESHOLD = 16000;
var framePool = [];
function scanEngine() {
var avatarLoudnessPool = [];
function average(a) {
var sum = 0;
var total = a.length;
for (var i = 0; i < total; i++) {
sum += a[i];
}
return Math.round(sum / total);
}
function audioClamp(input) {
if (input > MAX_AUDIO_THRESHOLD) return MAX_AUDIO_THRESHOLD;
return input;
}
var avatars = AvatarList.getAvatarIdentifiers();
avatars.forEach(function (id) {
var avatar = AvatarList.getAvatar(id);
avatarLoudnessPool.push(audioClamp(Math.round(avatar.audioLoudness)));
});
framePool.push(average(avatarLoudnessPool));
if (framePool.length >= REFERENCE_FRAME_COUNT) {
framePool.shift();
}
function normalizedAverage(a) {
a = a.map(function (v) {
return Math.round(( 100 / MAX_AUDIO_THRESHOLD ) * v);
});
return average(a);
}
var norm = normalizedAverage(framePool);
// we have a range of 55 to -53 degrees for the needle
var scaledDegrees = (norm / -.94) + 54.5; // shifting scale from 100 to 55 to -53 ish its more like -51 ;
Entities.setAbsoluteJointRotationInObjectFrame(thisEntityID, 0, Quat.fromPitchYawRollDegrees(0, 0, scaledDegrees));
}
Script.setInterval(function () {
scanEngine();
}, SCAN_RATE);
});

View file

@ -0,0 +1,79 @@
//
// Created by Alan-Michael Moody on 4/17/2017
//
(function () {
var barID;
this.preload = function (entityID) {
var children = Entities.getChildrenIDs(entityID);
var childZero = Entities.getEntityProperties(children[0]);
barID = childZero.id;
};
var SCAN_RATE = 100; //ms
var REFERENCE_FRAME_COUNT = 30;
var MAX_AUDIO_THRESHOLD = 16000;
var framePool = [];
function scanEngine() {
var avatarLoudnessPool = [];
function average(a) {
var sum = 0;
var total = a.length;
for (var i = 0; i < total; i++) {
sum += a[i];
}
return Math.round(sum / total);
}
function audioClamp(input) {
if (input > MAX_AUDIO_THRESHOLD) return MAX_AUDIO_THRESHOLD;
return input;
}
var avatars = AvatarList.getAvatarIdentifiers();
avatars.forEach(function (id) {
var avatar = AvatarList.getAvatar(id);
avatarLoudnessPool.push(audioClamp(Math.round(avatar.audioLoudness)));
});
framePool.push(average(avatarLoudnessPool));
if (framePool.length >= REFERENCE_FRAME_COUNT) {
framePool.shift();
}
function normalizedAverage(a) {
a = a.map(function (v) {
return Math.round(( 100 / MAX_AUDIO_THRESHOLD ) * v);
});
return average(a);
}
var norm = normalizedAverage(framePool);
var barProperties = Entities.getEntityProperties(barID);
var colorShift = 2.55 * norm; //shifting the scale to 0 - 255
var xShift = norm / 52; // changing scale from 0-100 to 0-1.9 ish
var normShift = xShift - 0.88; //shifting local displacement (-0.88)
var halfShift = xShift / 2;
Entities.editEntity(barID, {
dimensions: {x: xShift, y: barProperties.dimensions.y, z: barProperties.dimensions.z},
localPosition: {x: normShift - (halfShift), y: -0.0625, z: -0.015},
color: {red: colorShift, green: barProperties.color.green, blue: barProperties.color.blue}
});
}
Script.setInterval(function () {
scanEngine();
}, SCAN_RATE);
});

View file

@ -0,0 +1,92 @@
//
// Created by Alan-Michael Moody on 4/17/2017
//
(function () {
var barID, textID;
this.preload = function (entityID) {
var children = Entities.getChildrenIDs(entityID);
var childZero = Entities.getEntityProperties(children[0]);
var childOne = Entities.getEntityProperties(children[1]);
var childZeroUserData = JSON.parse(Entities.getEntityProperties(children[0]).userData);
if (childZeroUserData.name === "bar") {
barID = childZero.id;
textID = childOne.id;
} else {
barID = childOne.id;
textID = childZero.id;
}
};
var SCAN_RATE = 100; //ms
var REFERENCE_FRAME_COUNT = 30;
var MAX_AUDIO_THRESHOLD = 16000;
var framePool = [];
function scanEngine() {
var avatarLoudnessPool = [];
function average(a) {
var sum = 0;
var total = a.length;
for (var i = 0; i < total; i++) {
sum += a[i];
}
return Math.round(sum / total);
}
function audioClamp(input) {
if (input > MAX_AUDIO_THRESHOLD) return MAX_AUDIO_THRESHOLD;
return input;
}
var avatars = AvatarList.getAvatarIdentifiers();
avatars.forEach(function (id) {
var avatar = AvatarList.getAvatar(id);
avatarLoudnessPool.push(audioClamp(Math.round(avatar.audioLoudness)));
});
framePool.push(average(avatarLoudnessPool));
if (framePool.length >= REFERENCE_FRAME_COUNT) {
framePool.shift();
}
function normalizedAverage(a) {
a = a.map(function (v) {
return Math.round(( 100 / MAX_AUDIO_THRESHOLD ) * v);
});
return average(a);
}
var norm = normalizedAverage(framePool);
Entities.editEntity(textID, {text: "Loudness: % " + norm});
var barProperties = Entities.getEntityProperties(barID);
var colorShift = 2.55 * norm; //shifting the scale to 0 - 255
var xShift = norm / 100; // changing scale from 0-100 to 0-1
var normShift = xShift - .5; //shifting scale form 0-1 to -.5 to .5
var halfShift = xShift / 2 ;
Entities.editEntity(barID, {
dimensions: {x: xShift, y: barProperties.dimensions.y, z: barProperties.dimensions.z},
localPosition: {x: normShift - (halfShift), y: 0, z: 0.1},
color: {red: colorShift, green: barProperties.color.green, blue: barProperties.color.blue}
});
}
Script.setInterval(function () {
scanEngine();
}, SCAN_RATE);
});

View file

@ -0,0 +1,24 @@
//
// Created by Alan-Michael Moody on 5/2/2017
//
'use strict';
(function () {
var pos = Vec3.sum(MyAvatar.position, Quat.getFront(MyAvatar.orientation));
var meter = {
stand: {
type: 'Model',
modelURL: 'https://binaryrelay.com/files/public-docs/hifi/meter/applauseOmeter.fbx',
lifetime: '3600',
script: 'https://binaryrelay.com/files/public-docs/hifi/meter/applauseOmeter.js',
position: Vec3.sum(pos, {x: 0, y: 2.0, z: 0})
}
};
Entities.addEntity(meter.stand);
})();

View file

@ -0,0 +1,67 @@
//
// Created by Alan-Michael Moody on 4/17/2017
//
"use strict";
(function () { // BEGIN LOCAL_SCOPE
var pos = Vec3.sum(MyAvatar.position, Quat.getFront(MyAvatar.orientation));
var graph = {
background: {
type: "Box",
dimensions: {x: 1, y: 1, z: .1},
color: {
red: 128,
green: 128,
blue: 128
},
lifetime: "3600",
script: "https://binaryrelay.com/files/public-docs/hifi/meter/basic/meter.js",
position: pos
},
bar: {
type: "Box",
parentID: "",
userData: '{"name":"bar"}',
dimensions: {x: .05, y: .25, z: .1},
color: {
red: 0,
green: 0,
blue: 0
},
lifetime: "3600",
position: Vec3.sum(pos, {x: -0.495, y: 0, z: 0.1})
},
displayText: {
type: "Text",
parentID: "",
userData: '{"name":"displayText"}',
text: "Loudness: % ",
textColor: {
red: 0,
green: 0,
blue: 0
},
backgroundColor: {
red: 128,
green: 128,
blue: 128
},
visible: 0.5,
dimensions: {x: 0.70, y: 0.15, z: 0.1},
lifetime: "3600",
position: Vec3.sum(pos, {x: 0, y: 0.4, z: 0.06})
}
};
var background = Entities.addEntity(graph.background);
graph.bar.parentID = background;
graph.displayText.parentID = background;
var bar = Entities.addEntity(graph.bar);
var displayText = Entities.addEntity(graph.displayText);
})(); // END LOCAL_SCOPE

View file

@ -0,0 +1,43 @@
//
// Created by Alan-Michael Moody on 4/17/2017
//
"use strict";
(function () {
var pos = Vec3.sum(MyAvatar.position, Quat.getFront(MyAvatar.orientation));
var graph = {
background: {
type: "Model",
modelURL: "https://binaryrelay.com/files/public-docs/hifi/meter/plastic/meter-plastic.fbx",
color: {
red: 128,
green: 128,
blue: 128
},
lifetime: "3600",
script: "https://binaryrelay.com/files/public-docs/hifi/meter/plastic/meter.js",
position: pos
},
bar: {
type: "Box",
parentID: "",
userData: '{"name":"bar"}',
dimensions: {x: .05, y: .245, z: .07},
color: {
red: 0,
green: 0,
blue: 0
},
lifetime: "3600",
position: Vec3.sum(pos, {x: -0.90, y: 0, z: -0.15})
}
};
graph.bar.parentID = Entities.addEntity(graph.background);
Entities.addEntity(graph.bar);
})();

View file

@ -0,0 +1,67 @@
//
// Created by Alan-Michael Moody on 4/17/2017
//
"use strict";
(function () {
var pos = Vec3.sum(MyAvatar.position, Quat.getFront(MyAvatar.orientation));
var graph = {
background: {
type: "Model",
modelURL: "https://binaryrelay.com/files/public-docs/hifi/meter/text-entity/meter-text-entity.fbx",
color: {
red: 128,
green: 128,
blue: 128
},
lifetime: "3600",
script: "https://binaryrelay.com/files/public-docs/hifi/meter/text-entity/meter.js",
position: pos
},
bar: {
type: "Box",
parentID: "",
userData: '{"name":"bar"}',
dimensions: {x: .05, y: .245, z: .07},
color: {
red: 0,
green: 0,
blue: 0
},
lifetime: "3600",
position: Vec3.sum(pos, {x: -0.88, y: 0, z: -0.15})
},
displayText: {
type: "Text",
parentID: "",
userData: '{"name":"displayText"}',
text: "Make Some Noise:",
textColor: {
red: 0,
green: 0,
blue: 0
},
backgroundColor: {
red: 255,
green: 255,
blue: 255
},
dimensions: {x: .82, y: 0.115, z: 0.15},
lifetime: "3600",
lineHeight: .08,
position: Vec3.sum(pos, {x: -0.2, y: 0.175, z: -0.035})
}
};
var background = Entities.addEntity(graph.background);
graph.bar.parentID = background;
graph.displayText.parentID = background;
var bar = Entities.addEntity(graph.bar);
var displayText = Entities.addEntity(graph.displayText);
})();

View file

@ -0,0 +1,42 @@
//
// Created by Alan-Michael Moody on 4/17/2017
//
"use strict";
(function () {
var pos = Vec3.sum(MyAvatar.position, Quat.getFront(MyAvatar.orientation));
var graph = {
background: {
type: "Model",
modelURL: "https://binaryrelay.com/files/public-docs/hifi/meter/wood/meter-wood.fbx",
color: {
red: 128,
green: 128,
blue: 128
},
lifetime: "3600",
script: "https://binaryrelay.com/files/public-docs/hifi/meter/wood/meter.js",
position: pos
},
bar: {
type: "Box",
parentID: "",
userData: '{"name":"bar"}',
dimensions: {x: .05, y: .245, z: .07},
color: {
red: 0,
green: 0,
blue: 0
},
lifetime: "3600",
position: Vec3.sum(pos, {x: -0.88, y: 0, z: -0.15})
}
};
graph.bar.parentID = Entities.addEntity(graph.background);
Entities.addEntity(graph.bar);
})();

View file

@ -0,0 +1,89 @@
//
// Created by Alan-Michael Moody on 4/17/2017
//
(function () {
var barID, textID, originalText;
this.preload = function (entityID) {
var children = Entities.getChildrenIDs(entityID);
var childZero = Entities.getEntityProperties(children[0]);
var childOne = Entities.getEntityProperties(children[1]);
var childZeroUserData = JSON.parse(Entities.getEntityProperties(children[0]).userData);
if (childZeroUserData.name === "bar") {
barID = childZero.id;
textID = childOne.id;
originalText = childOne.text
} else {
barID = childOne.id;
textID = childZero.id;
originalText = childZero.text;
}
};
var SCAN_RATE = 100; //ms
var REFERENCE_FRAME_COUNT = 30;
var MAX_AUDIO_THRESHOLD = 16000;
var framePool = [];
function scanEngine() {
var avatarLoudnessPool = [];
function average(a) {
var sum = 0;
var total = a.length;
for (var i = 0; i < total; i++) {
sum += a[i];
}
return Math.round(sum / total);
}
function audioClamp(input) {
if (input > MAX_AUDIO_THRESHOLD) return MAX_AUDIO_THRESHOLD;
return input;
}
var avatars = AvatarList.getAvatarIdentifiers();
avatars.forEach(function (id) {
var avatar = AvatarList.getAvatar(id);
avatarLoudnessPool.push(audioClamp(Math.round(avatar.audioLoudness)));
});
framePool.push(average(avatarLoudnessPool));
if (framePool.length >= REFERENCE_FRAME_COUNT) {
framePool.shift();
}
function normalizedAverage(a) {
a = a.map(function (v) {
return Math.round(( 100 / MAX_AUDIO_THRESHOLD ) * v);
});
return average(a);
}
var norm = normalizedAverage(framePool);
Entities.editEntity(textID, {text: originalText + " % " + norm});
var barProperties = Entities.getEntityProperties(barID);
var colorShift = 2.55 * norm; //shifting the scale to 0 - 255
var xShift = norm / 52; // changing scale from 0-100 to 0-1.9 ish
var normShift = xShift - 0.88; //shifting local displacement (-0.88)
var halfShift = xShift / 2;
Entities.editEntity(barID, {
dimensions: {x: xShift, y: barProperties.dimensions.y, z: barProperties.dimensions.z},
localPosition: {x: normShift - ( halfShift ), y: -0.0625, z: -0.015},
color: {red: colorShift, green: barProperties.color.green, blue: barProperties.color.blue}
});
}
Script.setInterval(function () {
scanEngine();
}, SCAN_RATE);
});

Binary file not shown.

After

Width:  |  Height:  |  Size: 399 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 410 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 651 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 528 KiB