mirror of
https://github.com/overte-org/overte.git
synced 2025-04-22 11:33:31 +02:00
fixed merge conflict'
This commit is contained in:
commit
da29157a3c
70 changed files with 1169 additions and 4504 deletions
assignment-client/src
domain-server/resources
interface
libraries
animation/src
audio-client/src
avatars/src
script-engine/src
plugins
scripts
defaultScripts.js
system
tutorial
|
@ -186,6 +186,10 @@ AssignmentClientApp::AssignmentClientApp(int argc, char* argv[]) :
|
|||
listenPort = argumentVariantMap.value(ASSIGNMENT_CLIENT_LISTEN_PORT_OPTION).toUInt();
|
||||
}
|
||||
|
||||
if (parser.isSet(portOption)) {
|
||||
listenPort = parser.value(portOption).toUInt();
|
||||
}
|
||||
|
||||
if (parser.isSet(numChildsOption)) {
|
||||
if (minForks && minForks > numForks) {
|
||||
qCritical() << "--min can't be more than -n";
|
||||
|
|
|
@ -132,7 +132,7 @@ void AvatarMixer::start() {
|
|||
auto start = usecTimestampNow();
|
||||
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
|
||||
std::for_each(cbegin, cend, [&](const SharedNodePointer& node) {
|
||||
manageDisplayName(node);
|
||||
manageIdentityData(node);
|
||||
++_sumListeners;
|
||||
});
|
||||
}, &lockWait, &nodeTransform, &functor);
|
||||
|
@ -183,8 +183,9 @@ void AvatarMixer::start() {
|
|||
|
||||
// NOTE: nodeData->getAvatar() might be side effected, must be called when access to node/nodeData
|
||||
// is guaranteed to not be accessed by other thread
|
||||
void AvatarMixer::manageDisplayName(const SharedNodePointer& node) {
|
||||
void AvatarMixer::manageIdentityData(const SharedNodePointer& node) {
|
||||
AvatarMixerClientData* nodeData = reinterpret_cast<AvatarMixerClientData*>(node->getLinkedData());
|
||||
bool sendIdentity = false;
|
||||
if (nodeData && nodeData->getAvatarSessionDisplayNameMustChange()) {
|
||||
AvatarData& avatar = nodeData->getAvatar();
|
||||
const QString& existingBaseDisplayName = nodeData->getBaseDisplayName();
|
||||
|
@ -210,9 +211,39 @@ void AvatarMixer::manageDisplayName(const SharedNodePointer& node) {
|
|||
soFar.second++; // refcount
|
||||
nodeData->flagIdentityChange();
|
||||
nodeData->setAvatarSessionDisplayNameMustChange(false);
|
||||
sendIdentityPacket(nodeData, node); // Tell node whose name changed about its new session display name.
|
||||
sendIdentity = true;
|
||||
qCDebug(avatars) << "Giving session display name" << sessionDisplayName << "to node with ID" << node->getUUID();
|
||||
}
|
||||
if (nodeData && nodeData->getAvatarSkeletonModelUrlMustChange()) { // never true for an empty _avatarWhitelist
|
||||
nodeData->setAvatarSkeletonModelUrlMustChange(false);
|
||||
AvatarData& avatar = nodeData->getAvatar();
|
||||
static const QUrl emptyURL("");
|
||||
QUrl url = avatar.cannonicalSkeletonModelURL(emptyURL);
|
||||
if (!isAvatarInWhitelist(url)) {
|
||||
qCDebug(avatars) << "Forbidden avatar" << nodeData->getNodeID() << avatar.getSkeletonModelURL() << "replaced with" << (_replacementAvatar.isEmpty() ? "default" : _replacementAvatar);
|
||||
avatar.setSkeletonModelURL(_replacementAvatar);
|
||||
sendIdentity = true;
|
||||
}
|
||||
}
|
||||
if (sendIdentity) {
|
||||
sendIdentityPacket(nodeData, node); // Tell node whose name changed about its new session display name or avatar.
|
||||
}
|
||||
}
|
||||
|
||||
bool AvatarMixer::isAvatarInWhitelist(const QUrl& url) {
|
||||
// The avatar is in the whitelist if:
|
||||
// 1. The avatar's URL's host matches one of the hosts of the URLs in the whitelist AND
|
||||
// 2. The avatar's URL's path starts with the path of that same URL in the whitelist
|
||||
for (const auto& whiteListedPrefix : _avatarWhitelist) {
|
||||
auto whiteListURL = QUrl::fromUserInput(whiteListedPrefix);
|
||||
// check if this script URL matches the whitelist domain and, optionally, is beneath the path
|
||||
if (url.host().compare(whiteListURL.host(), Qt::CaseInsensitive) == 0 &&
|
||||
url.path().startsWith(whiteListURL.path(), Qt::CaseInsensitive)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void AvatarMixer::throttle(std::chrono::microseconds duration, int frame) {
|
||||
|
@ -402,13 +433,17 @@ void AvatarMixer::handleAvatarIdentityPacket(QSharedPointer<ReceivedMessage> mes
|
|||
AvatarData::parseAvatarIdentityPacket(message->getMessage(), identity);
|
||||
bool identityChanged = false;
|
||||
bool displayNameChanged = false;
|
||||
avatar.processAvatarIdentity(identity, identityChanged, displayNameChanged);
|
||||
bool skeletonModelUrlChanged = false;
|
||||
avatar.processAvatarIdentity(identity, identityChanged, displayNameChanged, skeletonModelUrlChanged);
|
||||
if (identityChanged) {
|
||||
QMutexLocker nodeDataLocker(&nodeData->getMutex());
|
||||
nodeData->flagIdentityChange();
|
||||
if (displayNameChanged) {
|
||||
nodeData->setAvatarSessionDisplayNameMustChange(true);
|
||||
}
|
||||
if (skeletonModelUrlChanged && !_avatarWhitelist.isEmpty()) {
|
||||
nodeData->setAvatarSkeletonModelUrlMustChange(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -764,4 +799,19 @@ void AvatarMixer::parseDomainServerSettings(const QJsonObject& domainSettings) {
|
|||
qCDebug(avatars) << "This domain requires a minimum avatar scale of" << _domainMinimumScale
|
||||
<< "and a maximum avatar scale of" << _domainMaximumScale;
|
||||
|
||||
const QString AVATAR_WHITELIST_DEFAULT{ "" };
|
||||
static const QString AVATAR_WHITELIST_OPTION = "avatar_whitelist";
|
||||
_avatarWhitelist = domainSettings[AVATARS_SETTINGS_KEY].toObject()[AVATAR_WHITELIST_OPTION].toString(AVATAR_WHITELIST_DEFAULT).split(',', QString::KeepEmptyParts);
|
||||
|
||||
static const QString REPLACEMENT_AVATAR_OPTION = "replacement_avatar";
|
||||
_replacementAvatar = domainSettings[AVATARS_SETTINGS_KEY].toObject()[REPLACEMENT_AVATAR_OPTION].toString(REPLACEMENT_AVATAR_DEFAULT);
|
||||
|
||||
if ((_avatarWhitelist.count() == 1) && _avatarWhitelist[0].isEmpty()) {
|
||||
_avatarWhitelist.clear(); // KeepEmptyParts above will parse "," as ["", ""] (which is ok), but "" as [""] (which is not ok).
|
||||
}
|
||||
if (_avatarWhitelist.isEmpty()) {
|
||||
qCDebug(avatars) << "All avatars are allowed.";
|
||||
} else {
|
||||
qCDebug(avatars) << "Avatars other than" << _avatarWhitelist << "will be replaced by" << (_replacementAvatar.isEmpty() ? "default" : _replacementAvatar);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,7 +59,12 @@ private:
|
|||
void parseDomainServerSettings(const QJsonObject& domainSettings);
|
||||
void sendIdentityPacket(AvatarMixerClientData* nodeData, const SharedNodePointer& destinationNode);
|
||||
|
||||
void manageDisplayName(const SharedNodePointer& node);
|
||||
void manageIdentityData(const SharedNodePointer& node);
|
||||
bool isAvatarInWhitelist(const QUrl& url);
|
||||
|
||||
const QString REPLACEMENT_AVATAR_DEFAULT{ "" };
|
||||
QStringList _avatarWhitelist { };
|
||||
QString _replacementAvatar { REPLACEMENT_AVATAR_DEFAULT };
|
||||
|
||||
p_high_resolution_clock::time_point _lastFrameTimestamp;
|
||||
|
||||
|
|
|
@ -65,6 +65,8 @@ public:
|
|||
void flagIdentityChange() { _identityChangeTimestamp = usecTimestampNow(); }
|
||||
bool getAvatarSessionDisplayNameMustChange() const { return _avatarSessionDisplayNameMustChange; }
|
||||
void setAvatarSessionDisplayNameMustChange(bool set = true) { _avatarSessionDisplayNameMustChange = set; }
|
||||
bool getAvatarSkeletonModelUrlMustChange() const { return _avatarSkeletonModelUrlMustChange; }
|
||||
void setAvatarSkeletonModelUrlMustChange(bool set = true) { _avatarSkeletonModelUrlMustChange = set; }
|
||||
|
||||
void resetNumAvatarsSentLastFrame() { _numAvatarsSentLastFrame = 0; }
|
||||
void incrementNumAvatarsSentLastFrame() { ++_numAvatarsSentLastFrame; }
|
||||
|
@ -146,6 +148,7 @@ private:
|
|||
|
||||
uint64_t _identityChangeTimestamp;
|
||||
bool _avatarSessionDisplayNameMustChange{ true };
|
||||
bool _avatarSkeletonModelUrlMustChange{ false };
|
||||
|
||||
int _numAvatarsSentLastFrame = 0;
|
||||
int _numFramesSinceAdjustment = 0;
|
||||
|
|
|
@ -866,6 +866,22 @@
|
|||
"help": "Limits the scale of avatars in your domain. Cannot be greater than 1000.",
|
||||
"placeholder": 3.0,
|
||||
"default": 3.0
|
||||
},
|
||||
{
|
||||
"name": "avatar_whitelist",
|
||||
"label": "Avatars Allowed from:",
|
||||
"help": "Comma separated list of URLs (with optional paths) that avatar .fst files are allowed from. If someone attempts to use an avatar with a different domain, it will be rejected and the replacement avatar will be used. If left blank, any domain is allowed.",
|
||||
"placeholder": "",
|
||||
"default": "",
|
||||
"advanced": true
|
||||
},
|
||||
{
|
||||
"name": "replacement_avatar",
|
||||
"label": "Replacement Avatar for disallowed avatars",
|
||||
"help": "A URL for an avatar .fst to be used when someone tries to use an avatar that is not allowed. If left blank, the generic default avatar is used.",
|
||||
"placeholder": "",
|
||||
"default": "",
|
||||
"advanced": true
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
@ -18,6 +18,7 @@ Original.CheckBox {
|
|||
id: checkBox
|
||||
|
||||
property int colorScheme: hifi.colorSchemes.light
|
||||
property string color: hifi.colors.lightGray
|
||||
readonly property bool isLightColorScheme: colorScheme == hifi.colorSchemes.light
|
||||
property bool isRedCheck: false
|
||||
property int boxSize: 14
|
||||
|
@ -89,7 +90,7 @@ Original.CheckBox {
|
|||
|
||||
label: Label {
|
||||
text: control.text
|
||||
colorScheme: checkBox.colorScheme
|
||||
color: control.color
|
||||
x: 2
|
||||
wrapMode: Text.Wrap
|
||||
enabled: checkBox.enabled
|
||||
|
|
|
@ -1,266 +0,0 @@
|
|||
//
|
||||
// Audio.qml
|
||||
// qml/hifi
|
||||
//
|
||||
// Audio setup
|
||||
//
|
||||
// Created by Vlad Stelmahovsky on 03/22/2017
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import QtGraphicalEffects 1.0
|
||||
|
||||
import "../styles-uit"
|
||||
import "../controls-uit" as HifiControls
|
||||
|
||||
import "components"
|
||||
|
||||
Rectangle {
|
||||
id: audio;
|
||||
|
||||
//put info text here
|
||||
property alias infoText: infoArea.text
|
||||
|
||||
color: "#404040";
|
||||
|
||||
HifiConstants { id: hifi; }
|
||||
objectName: "AudioWindow"
|
||||
|
||||
property string title: "Audio Options"
|
||||
signal sendToScript(var message);
|
||||
|
||||
|
||||
Component {
|
||||
id: separator
|
||||
LinearGradient {
|
||||
start: Qt.point(0, 0)
|
||||
end: Qt.point(0, 4)
|
||||
gradient: Gradient {
|
||||
GradientStop { position: 0.0; color: "#303030" }
|
||||
GradientStop { position: 0.33; color: "#252525" } // Equivalent of darkGray0 over baseGray background.
|
||||
GradientStop { position: 0.5; color: "#303030" }
|
||||
GradientStop { position: 0.6; color: "#454a49" }
|
||||
GradientStop { position: 1.0; color: "#454a49" }
|
||||
}
|
||||
cached: true
|
||||
}
|
||||
}
|
||||
|
||||
Column {
|
||||
anchors { left: parent.left; right: parent.right }
|
||||
spacing: 8
|
||||
|
||||
RalewayRegular {
|
||||
anchors { left: parent.left; right: parent.right; leftMargin: 30 }
|
||||
height: 45
|
||||
size: 20
|
||||
color: "white"
|
||||
text: audio.title
|
||||
}
|
||||
|
||||
Loader {
|
||||
width: parent.width
|
||||
height: 5
|
||||
sourceComponent: separator
|
||||
}
|
||||
|
||||
//connections required to syncronize with Menu
|
||||
Connections {
|
||||
target: AudioDevice
|
||||
onMuteToggled: {
|
||||
audioMute.checkbox.checked = AudioDevice.getMuted()
|
||||
}
|
||||
}
|
||||
|
||||
Connections {
|
||||
target: AvatarInputs !== undefined ? AvatarInputs : null
|
||||
onShowAudioToolsChanged: {
|
||||
audioTools.checkbox.checked = showAudioTools
|
||||
}
|
||||
}
|
||||
|
||||
AudioCheckbox {
|
||||
id: audioMute
|
||||
width: parent.width
|
||||
anchors { left: parent.left; right: parent.right; leftMargin: 30 }
|
||||
checkbox.checked: AudioDevice.muted
|
||||
text.text: qsTr("Mute microphone")
|
||||
onCheckBoxClicked: {
|
||||
AudioDevice.muted = checked
|
||||
}
|
||||
}
|
||||
|
||||
AudioCheckbox {
|
||||
id: audioTools
|
||||
width: parent.width
|
||||
anchors { left: parent.left; right: parent.right; leftMargin: 30 }
|
||||
checkbox.checked: AvatarInputs !== undefined ? AvatarInputs.showAudioTools : false
|
||||
text.text: qsTr("Show audio level meter")
|
||||
onCheckBoxClicked: {
|
||||
if (AvatarInputs !== undefined) {
|
||||
AvatarInputs.showAudioTools = checked
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Loader {
|
||||
width: parent.width
|
||||
height: 5
|
||||
sourceComponent: separator
|
||||
}
|
||||
|
||||
Row {
|
||||
anchors { left: parent.left; right: parent.right; leftMargin: 30 }
|
||||
height: 40
|
||||
spacing: 8
|
||||
|
||||
HiFiGlyphs {
|
||||
text: hifi.glyphs.mic
|
||||
color: hifi.colors.primaryHighlight
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
size: 32
|
||||
}
|
||||
RalewayRegular {
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
size: 16
|
||||
color: "#AFAFAF"
|
||||
text: qsTr("CHOOSE INPUT DEVICE")
|
||||
}
|
||||
}
|
||||
|
||||
ListView {
|
||||
id: inputAudioListView
|
||||
anchors { left: parent.left; right: parent.right; leftMargin: 70 }
|
||||
height: 125
|
||||
spacing: 0
|
||||
clip: true
|
||||
snapMode: ListView.SnapToItem
|
||||
model: AudioDevice
|
||||
delegate: Item {
|
||||
width: parent.width
|
||||
visible: devicemode === 0
|
||||
height: visible ? 36 : 0
|
||||
|
||||
AudioCheckbox {
|
||||
id: cbin
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
Binding {
|
||||
target: cbin.checkbox
|
||||
property: 'checked'
|
||||
value: devicechecked
|
||||
}
|
||||
|
||||
width: parent.width
|
||||
cbchecked: devicechecked
|
||||
text.text: devicename
|
||||
onCheckBoxClicked: {
|
||||
if (checked) {
|
||||
if (devicename.length > 0) {
|
||||
console.log("Audio.qml about to call AudioDevice.setInputDeviceAsync().devicename:" + devicename);
|
||||
AudioDevice.setInputDeviceAsync(devicename);
|
||||
} else {
|
||||
console.log("Audio.qml attempted to set input device to empty device name.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Loader {
|
||||
width: parent.width
|
||||
height: 5
|
||||
sourceComponent: separator
|
||||
}
|
||||
|
||||
Row {
|
||||
anchors { left: parent.left; right: parent.right; leftMargin: 30 }
|
||||
height: 40
|
||||
spacing: 8
|
||||
|
||||
HiFiGlyphs {
|
||||
text: hifi.glyphs.unmuted
|
||||
color: hifi.colors.primaryHighlight
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
size: 32
|
||||
}
|
||||
RalewayRegular {
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
size: 16
|
||||
color: "#AFAFAF"
|
||||
text: qsTr("CHOOSE OUTPUT DEVICE")
|
||||
}
|
||||
}
|
||||
|
||||
ListView {
|
||||
id: outputAudioListView
|
||||
anchors { left: parent.left; right: parent.right; leftMargin: 70 }
|
||||
height: 250
|
||||
spacing: 0
|
||||
clip: true
|
||||
snapMode: ListView.SnapToItem
|
||||
model: AudioDevice
|
||||
delegate: Item {
|
||||
width: parent.width
|
||||
visible: devicemode === 1
|
||||
height: visible ? 36 : 0
|
||||
AudioCheckbox {
|
||||
id: cbout
|
||||
width: parent.width
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
Binding {
|
||||
target: cbout.checkbox
|
||||
property: 'checked'
|
||||
value: devicechecked
|
||||
}
|
||||
text.text: devicename
|
||||
onCheckBoxClicked: {
|
||||
if (checked) {
|
||||
if (devicename.length > 0) {
|
||||
console.log("Audio.qml about to call AudioDevice.setOutputDeviceAsync().devicename:" + devicename);
|
||||
AudioDevice.setOutputDeviceAsync(devicename);
|
||||
} else {
|
||||
console.log("Audio.qml attempted to set output device to empty device name.");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Loader {
|
||||
id: lastSeparator
|
||||
width: parent.width
|
||||
height: 6
|
||||
sourceComponent: separator
|
||||
}
|
||||
|
||||
Row {
|
||||
anchors { left: parent.left; right: parent.right; leftMargin: 30 }
|
||||
height: 40
|
||||
spacing: 8
|
||||
|
||||
HiFiGlyphs {
|
||||
id: infoSign
|
||||
text: hifi.glyphs.info
|
||||
color: "#AFAFAF"
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
size: 60
|
||||
}
|
||||
RalewayRegular {
|
||||
id: infoArea
|
||||
width: parent.width - infoSign.implicitWidth - parent.spacing - 10
|
||||
wrapMode: Text.WordWrap
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
size: 12
|
||||
color: hifi.colors.baseGrayHighlight
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
162
interface/resources/qml/hifi/audio/Audio.qml
Normal file
162
interface/resources/qml/hifi/audio/Audio.qml
Normal file
|
@ -0,0 +1,162 @@
|
|||
//
|
||||
// Audio.qml
|
||||
// qml/hifi/audio
|
||||
//
|
||||
// Audio setup
|
||||
//
|
||||
// Created by Vlad Stelmahovsky on 03/22/2017
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import QtQuick.Layouts 1.3
|
||||
|
||||
import "../../styles-uit"
|
||||
import "../../controls-uit" as HifiControls
|
||||
import "../../windows"
|
||||
import "./" as Audio
|
||||
|
||||
Rectangle {
|
||||
id: root;
|
||||
|
||||
HifiConstants { id: hifi; }
|
||||
|
||||
property var eventBridge;
|
||||
property string title: "Audio Settings - " + Audio.context;
|
||||
signal sendToScript(var message);
|
||||
|
||||
color: hifi.colors.baseGray;
|
||||
|
||||
// only show the title if loaded through a "loader"
|
||||
function showTitle() {
|
||||
return root.parent.objectName == "loader";
|
||||
}
|
||||
|
||||
Column {
|
||||
y: 16; // padding does not work
|
||||
spacing: 16;
|
||||
width: parent.width;
|
||||
|
||||
RalewayRegular {
|
||||
x: 16; // padding does not work
|
||||
size: 16;
|
||||
color: "white";
|
||||
text: root.title;
|
||||
|
||||
visible: root.showTitle();
|
||||
}
|
||||
|
||||
Separator { visible: root.showTitle() }
|
||||
|
||||
Grid {
|
||||
columns: 2;
|
||||
x: 16; // padding does not work
|
||||
spacing: 16;
|
||||
|
||||
Audio.CheckBox {
|
||||
text: qsTr("Mute microphone");
|
||||
checked: Audio.muted;
|
||||
onClicked: {
|
||||
Audio.muted = checked;
|
||||
checked = Qt.binding(function() { return Audio.muted; }); // restore binding
|
||||
}
|
||||
}
|
||||
Audio.CheckBox {
|
||||
text: qsTr("Enable noise reduction");
|
||||
checked: Audio.noiseReduction;
|
||||
onClicked: {
|
||||
Audio.noiseReduction = checked;
|
||||
checked = Qt.binding(function() { return Audio.noiseReduction; }); // restore binding
|
||||
}
|
||||
}
|
||||
Audio.CheckBox {
|
||||
text: qsTr("Show audio level meter");
|
||||
checked: AvatarInputs.showAudioTools;
|
||||
onClicked: {
|
||||
AvatarInputs.showAudioTools = checked;
|
||||
checked = Qt.binding(function() { return AvatarInputs.showAudioTools; }); // restore binding
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Separator {}
|
||||
|
||||
RowLayout {
|
||||
HiFiGlyphs {
|
||||
text: hifi.glyphs.mic;
|
||||
color: hifi.colors.primaryHighlight;
|
||||
anchors.verticalCenter: parent.verticalCenter;
|
||||
size: 28;
|
||||
}
|
||||
RalewayRegular {
|
||||
anchors.verticalCenter: parent.verticalCenter;
|
||||
size: 16;
|
||||
color: hifi.colors.lightGrayText;
|
||||
text: qsTr("CHOOSE INPUT DEVICE");
|
||||
}
|
||||
}
|
||||
|
||||
ListView {
|
||||
anchors { left: parent.left; right: parent.right; leftMargin: 70 }
|
||||
height: 125;
|
||||
spacing: 0;
|
||||
snapMode: ListView.SnapToItem;
|
||||
clip: true;
|
||||
model: Audio.devices.input;
|
||||
delegate: Item {
|
||||
width: parent.width;
|
||||
height: 36;
|
||||
Audio.CheckBox {
|
||||
text: display;
|
||||
checked: selected;
|
||||
onClicked: {
|
||||
selected = checked;
|
||||
checked = Qt.binding(function() { return selected; }); // restore binding
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Separator {}
|
||||
|
||||
RowLayout {
|
||||
HiFiGlyphs {
|
||||
text: hifi.glyphs.unmuted;
|
||||
color: hifi.colors.primaryHighlight;
|
||||
anchors.verticalCenter: parent.verticalCenter;
|
||||
size: 36;
|
||||
}
|
||||
RalewayRegular {
|
||||
anchors.verticalCenter: parent.verticalCenter;
|
||||
size: 16;
|
||||
color: hifi.colors.lightGrayText;
|
||||
text: qsTr("CHOOSE OUTPUT DEVICE");
|
||||
}
|
||||
}
|
||||
|
||||
ListView {
|
||||
anchors { left: parent.left; right: parent.right; leftMargin: 70 }
|
||||
height: 125;
|
||||
spacing: 0;
|
||||
snapMode: ListView.SnapToItem;
|
||||
clip: true;
|
||||
model: Audio.devices.output;
|
||||
delegate: Item {
|
||||
width: parent.width;
|
||||
height: 36;
|
||||
Audio.CheckBox {
|
||||
text: display;
|
||||
checked: selected;
|
||||
onClicked: {
|
||||
selected = checked;
|
||||
checked = Qt.binding(function() { return selected; }); // restore binding
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
18
interface/resources/qml/hifi/audio/CheckBox.qml
Normal file
18
interface/resources/qml/hifi/audio/CheckBox.qml
Normal file
|
@ -0,0 +1,18 @@
|
|||
//
|
||||
// CheckBox.qml
|
||||
// qml/hifi/audio
|
||||
//
|
||||
// Created by Zach Pomerantz on 6/12/2017
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import QtQuick 2.5
|
||||
|
||||
import "../../controls-uit" as HifiControls
|
||||
|
||||
HifiControls.CheckBox {
|
||||
color: "white"
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
|
||||
import "../../styles-uit"
|
||||
import "../../controls-uit" as HifiControls
|
||||
|
||||
Row {
|
||||
id: row
|
||||
spacing: 16
|
||||
property alias checkbox: cb
|
||||
property alias cbchecked: cb.checked
|
||||
property alias text: txt
|
||||
signal checkBoxClicked(bool checked)
|
||||
|
||||
HifiControls.CheckBox {
|
||||
id: cb
|
||||
boxSize: 20
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
onClicked: checkBoxClicked(cb.checked)
|
||||
}
|
||||
RalewayBold {
|
||||
id: txt
|
||||
wrapMode: Text.WordWrap
|
||||
width: parent.width - cb.boxSize - 30
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
size: 16
|
||||
color: "white"
|
||||
}
|
||||
}
|
27
interface/resources/qml/hifi/dialogs/Audio.qml
Normal file
27
interface/resources/qml/hifi/dialogs/Audio.qml
Normal file
|
@ -0,0 +1,27 @@
|
|||
//
|
||||
// Audio.qml
|
||||
//
|
||||
// Created by Zach Pomerantz on 6/12/2017
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import "../../windows"
|
||||
import "../audio"
|
||||
|
||||
ScrollingWindow {
|
||||
id: root;
|
||||
|
||||
resizable: true;
|
||||
destroyOnHidden: true;
|
||||
width: 400;
|
||||
height: 577;
|
||||
minSize: Qt.vector2d(400, 500);
|
||||
|
||||
Audio { id: audio; width: root.width }
|
||||
|
||||
objectName: "AudioDialog";
|
||||
title: audio.title;
|
||||
}
|
|
@ -5,9 +5,9 @@ import "../../dialogs"
|
|||
|
||||
PreferencesDialog {
|
||||
id: root
|
||||
objectName: "AudioPreferencesDialog"
|
||||
objectName: "AudioBuffersDialog"
|
||||
title: "Audio Settings"
|
||||
showCategories: ["Audio"]
|
||||
showCategories: ["Audio Buffers"]
|
||||
property var settings: Settings {
|
||||
category: root.objectName
|
||||
property alias x: root.x
|
||||
|
@ -16,4 +16,3 @@ PreferencesDialog {
|
|||
property alias height: root.height
|
||||
}
|
||||
}
|
||||
|
|
@ -6,16 +6,10 @@ Item {
|
|||
id: tablet
|
||||
objectName: "tablet"
|
||||
property double micLevel: 0.8
|
||||
property bool micEnabled: true
|
||||
property int rowIndex: 0
|
||||
property int columnIndex: 0
|
||||
property int count: (flowMain.children.length - 1)
|
||||
|
||||
// called by C++ code to keep mic state updated
|
||||
function setMicEnabled(newMicEnabled) {
|
||||
tablet.micEnabled = newMicEnabled;
|
||||
}
|
||||
|
||||
// called by C++ code to keep audio bar updated
|
||||
function setMicLevel(newMicLevel) {
|
||||
tablet.micLevel = newMicLevel;
|
||||
|
@ -121,8 +115,8 @@ Item {
|
|||
}
|
||||
|
||||
Item {
|
||||
visible: (!tablet.micEnabled && !toggleMuteMouseArea.containsMouse)
|
||||
|| (tablet.micEnabled && toggleMuteMouseArea.containsMouse)
|
||||
visible: (Audio.muted && !toggleMuteMouseArea.containsMouse)
|
||||
|| (!Audio.muted && toggleMuteMouseArea.containsMouse)
|
||||
|
||||
Image {
|
||||
id: muteIcon
|
||||
|
@ -201,7 +195,7 @@ Item {
|
|||
preventStealing: true
|
||||
propagateComposedEvents: false
|
||||
scrollGestureEnabled: false
|
||||
onClicked: tabletRoot.toggleMicEnabled()
|
||||
onClicked: { Audio.muted = !Audio.muted }
|
||||
}
|
||||
|
||||
RalewaySemiBold {
|
||||
|
@ -271,7 +265,7 @@ Item {
|
|||
|
||||
PropertyChanges {
|
||||
target: muteIcon
|
||||
visible: micEnabled
|
||||
visible: !Audio.muted
|
||||
}
|
||||
|
||||
PropertyChanges {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//
|
||||
// TabletAudioPreferences.qml
|
||||
// TabletAudioBuffers.qml
|
||||
//
|
||||
// Created by Davd Rowe on 7 Mar 2017.
|
||||
// Created by Zach Pomerantz on 6/5/2017.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
|
@ -17,7 +17,9 @@ StackView {
|
|||
id: profileRoot
|
||||
initialItem: root
|
||||
objectName: "stack"
|
||||
property string title: "Audio Settings"
|
||||
property string title: "Audio Buffers"
|
||||
property alias gotoPreviousApp: root.gotoPreviousApp;
|
||||
property var eventBridge;
|
||||
|
||||
signal sendToScript(var message);
|
||||
|
||||
|
@ -31,7 +33,7 @@ StackView {
|
|||
|
||||
TabletPreferencesDialog {
|
||||
id: root
|
||||
objectName: "TabletAudioPreferences"
|
||||
showCategories: ["Audio"]
|
||||
objectName: "TabletAudioBuffersDialog"
|
||||
showCategories: ["Audio Buffers"]
|
||||
}
|
||||
}
|
|
@ -147,10 +147,6 @@ Item {
|
|||
}
|
||||
}
|
||||
|
||||
function toggleMicEnabled() {
|
||||
ApplicationInterface.toggleMuteAudio();
|
||||
}
|
||||
|
||||
function setUsername(newUsername) {
|
||||
username = newUsername;
|
||||
}
|
||||
|
|
|
@ -71,10 +71,6 @@ Windows.ScrollingWindow {
|
|||
}
|
||||
}
|
||||
|
||||
function toggleMicEnabled() {
|
||||
ApplicationInterface.toggleMuteAudio();
|
||||
}
|
||||
|
||||
function setUsername(newUsername) {
|
||||
username = newUsername;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// Separator.qml
|
||||
//
|
||||
// Created by Dante Ruiz on 6/1/17.
|
||||
// Created by Zach Fox on 2017-06-06
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
|
@ -7,20 +8,33 @@
|
|||
//
|
||||
|
||||
import QtQuick 2.5
|
||||
import QtGraphicalEffects 1.0
|
||||
import "../styles-uit"
|
||||
|
||||
Item {
|
||||
// Size
|
||||
height: 2;
|
||||
width: parent.width;
|
||||
|
||||
Rectangle {
|
||||
id: shadows
|
||||
width: parent.width
|
||||
height: 2
|
||||
color: hifi.colors.baseGrayShadow
|
||||
// Size
|
||||
width: parent.width;
|
||||
height: 1;
|
||||
// Anchors
|
||||
anchors.left: parent.left;
|
||||
anchors.bottom: parent.bottom;
|
||||
anchors.bottomMargin: height;
|
||||
// Style
|
||||
color: hifi.colors.baseGrayShadow;
|
||||
}
|
||||
|
||||
|
||||
Rectangle {
|
||||
width: parent.width
|
||||
height: 1
|
||||
anchors.top: shadows.bottom
|
||||
color: hifi.colors.baseGrayHighlight
|
||||
// Size
|
||||
width: parent.width;
|
||||
height: 1;
|
||||
// Anchors
|
||||
anchors.left: parent.left;
|
||||
anchors.bottom: parent.bottom;
|
||||
// Style
|
||||
color: hifi.colors.baseGrayHighlight;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -151,11 +151,11 @@
|
|||
#include "InterfaceLogging.h"
|
||||
#include "LODManager.h"
|
||||
#include "ModelPackager.h"
|
||||
#include "scripting/Audio.h"
|
||||
#include "networking/CloseEventSender.h"
|
||||
#include "scripting/TestScriptingInterface.h"
|
||||
#include "scripting/AccountScriptingInterface.h"
|
||||
#include "scripting/AssetMappingsScriptingInterface.h"
|
||||
#include "scripting/AudioDeviceScriptingInterface.h"
|
||||
#include "scripting/ClipboardScriptingInterface.h"
|
||||
#include "scripting/DesktopScriptingInterface.h"
|
||||
#include "scripting/GlobalServicesScriptingInterface.h"
|
||||
|
@ -252,6 +252,8 @@ static const QString MARKETPLACE_CDN_HOSTNAME = "mpassets.highfidelity.com";
|
|||
static const int INTERVAL_TO_CHECK_HMD_WORN_STATUS = 500; // milliseconds
|
||||
static const QString DESKTOP_DISPLAY_PLUGIN_NAME = "Desktop";
|
||||
|
||||
static const QString SYSTEM_TABLET = "com.highfidelity.interface.tablet.system";
|
||||
|
||||
const QHash<QString, Application::AcceptURLMethod> Application::_acceptedExtensions {
|
||||
{ SVO_EXTENSION, &Application::importSVOFromURL },
|
||||
{ SVO_JSON_EXTENSION, &Application::importSVOFromURL },
|
||||
|
@ -713,9 +715,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
recorder->recordFrame(AUDIO_FRAME_TYPE, audio);
|
||||
}
|
||||
});
|
||||
audioIO->startThread();
|
||||
|
||||
auto audioScriptingInterface = DependencyManager::set<AudioScriptingInterface>();
|
||||
connect(audioIO.data(), &AudioClient::muteToggled, this, &Application::audioMuteToggled);
|
||||
auto audioScriptingInterface = DependencyManager::set<AudioScriptingInterface, scripting::Audio>();
|
||||
connect(audioIO.data(), &AudioClient::mutedByMixer, audioScriptingInterface.data(), &AudioScriptingInterface::mutedByMixer);
|
||||
connect(audioIO.data(), &AudioClient::receivedFirstPacket, audioScriptingInterface.data(), &AudioScriptingInterface::receivedFirstPacket);
|
||||
connect(audioIO.data(), &AudioClient::disconnected, audioScriptingInterface.data(), &AudioScriptingInterface::disconnected);
|
||||
|
@ -731,8 +733,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
audioScriptingInterface->environmentMuted();
|
||||
}
|
||||
});
|
||||
|
||||
audioIO->startThread();
|
||||
connect(this, &Application::activeDisplayPluginChanged,
|
||||
reinterpret_cast<scripting::Audio*>(audioScriptingInterface.data()), &scripting::Audio::onContextChanged);
|
||||
|
||||
ResourceManager::init();
|
||||
// Make sure we don't time out during slow operations at startup
|
||||
|
@ -1607,12 +1609,12 @@ QString Application::getUserAgent() {
|
|||
return userAgent;
|
||||
}
|
||||
|
||||
void Application::toggleTabletUI() const {
|
||||
void Application::toggleTabletUI(bool shouldOpen) const {
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
auto hmd = DependencyManager::get<HMDScriptingInterface>();
|
||||
TabletProxy* tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet("com.highfidelity.interface.tablet.system"));
|
||||
TabletProxy* tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet(SYSTEM_TABLET));
|
||||
bool messageOpen = tablet->isMessageDialogOpen();
|
||||
if (!messageOpen || (messageOpen && !hmd->getShouldShowTablet())) {
|
||||
if ((!messageOpen || (messageOpen && !hmd->getShouldShowTablet())) && !(shouldOpen && hmd->getShouldShowTablet())) {
|
||||
auto HMD = DependencyManager::get<HMDScriptingInterface>();
|
||||
HMD->toggleShouldShowTablet();
|
||||
}
|
||||
|
@ -1989,7 +1991,6 @@ void Application::initializeUi() {
|
|||
surfaceContext->setContextProperty("Stats", Stats::getInstance());
|
||||
surfaceContext->setContextProperty("Settings", SettingsScriptingInterface::getInstance());
|
||||
surfaceContext->setContextProperty("ScriptDiscoveryService", DependencyManager::get<ScriptEngines>().data());
|
||||
surfaceContext->setContextProperty("AudioDevice", AudioDeviceScriptingInterface::getInstance());
|
||||
surfaceContext->setContextProperty("AvatarBookmarks", DependencyManager::get<AvatarBookmarks>().data());
|
||||
surfaceContext->setContextProperty("LocationBookmarks", DependencyManager::get<LocationBookmarks>().data());
|
||||
|
||||
|
@ -2318,12 +2319,6 @@ void Application::runTests() {
|
|||
runUnitTests();
|
||||
}
|
||||
|
||||
void Application::audioMuteToggled() const {
|
||||
QAction* muteAction = Menu::getInstance()->getActionForOption(MenuOption::MuteAudio);
|
||||
Q_CHECK_PTR(muteAction);
|
||||
muteAction->setChecked(DependencyManager::get<AudioClient>()->isMuted());
|
||||
}
|
||||
|
||||
void Application::faceTrackerMuteToggled() {
|
||||
|
||||
QAction* muteAction = Menu::getInstance()->getActionForOption(MenuOption::MuteFaceTracking);
|
||||
|
@ -2393,7 +2388,7 @@ void Application::showHelp() {
|
|||
queryString.addQueryItem("handControllerName", handControllerName);
|
||||
queryString.addQueryItem("defaultTab", defaultTab);
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
TabletProxy* tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet("com.highfidelity.interface.tablet.system"));
|
||||
TabletProxy* tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet(SYSTEM_TABLET));
|
||||
tablet->gotoWebScreen(INFO_HELP_PATH + "?" + queryString.toString());
|
||||
//InfoView::show(INFO_HELP_PATH, false, queryString.toString());
|
||||
}
|
||||
|
@ -2849,7 +2844,7 @@ void Application::keyPressEvent(QKeyEvent* event) {
|
|||
if (isShifted && isMeta) {
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
offscreenUi->togglePinned();
|
||||
//offscreenUi->getRootContext()->engine()->clearComponentCache();
|
||||
//offscreenUi->getSurfaceContext()->engine()->clearComponentCache();
|
||||
//OffscreenUi::information("Debugging", "Component cache cleared");
|
||||
// placeholder for dialogs being converted to QML.
|
||||
}
|
||||
|
@ -2890,6 +2885,12 @@ void Application::keyPressEvent(QKeyEvent* event) {
|
|||
Menu::getInstance()->triggerOption(MenuOption::DefaultSkybox);
|
||||
break;
|
||||
|
||||
case Qt::Key_M:
|
||||
if (isMeta) {
|
||||
DependencyManager::get<AudioClient>()->toggleMute();
|
||||
}
|
||||
break;
|
||||
|
||||
case Qt::Key_N:
|
||||
if (!isOption && !isShifted && isMeta) {
|
||||
DependencyManager::get<NodeList>()->toggleIgnoreRadius();
|
||||
|
@ -4474,10 +4475,11 @@ void Application::update(float deltaTime) {
|
|||
} else {
|
||||
const quint64 MUTE_MICROPHONE_AFTER_USECS = 5000000; //5 secs
|
||||
Menu* menu = Menu::getInstance();
|
||||
if (menu->isOptionChecked(MenuOption::AutoMuteAudio) && !menu->isOptionChecked(MenuOption::MuteAudio)) {
|
||||
auto audioClient = DependencyManager::get<AudioClient>();
|
||||
if (menu->isOptionChecked(MenuOption::AutoMuteAudio) && !audioClient->isMuted()) {
|
||||
if (_lastFaceTrackerUpdate > 0
|
||||
&& ((usecTimestampNow() - _lastFaceTrackerUpdate) > MUTE_MICROPHONE_AFTER_USECS)) {
|
||||
menu->triggerOption(MenuOption::MuteAudio);
|
||||
audioClient->toggleMute();
|
||||
_lastFaceTrackerUpdate = 0;
|
||||
}
|
||||
} else {
|
||||
|
@ -5298,6 +5300,11 @@ void Application::nodeActivated(SharedNodePointer node) {
|
|||
|
||||
if (node->getType() == NodeType::AvatarMixer) {
|
||||
// new avatar mixer, send off our identity packet on next update loop
|
||||
// Reset skeletonModelUrl if the last server modified our choice.
|
||||
static const QUrl empty{};
|
||||
if (getMyAvatar()->getFullAvatarURLFromPreferences() != getMyAvatar()->cannonicalSkeletonModelURL(empty)) {
|
||||
getMyAvatar()->resetFullAvatarURL();
|
||||
}
|
||||
getMyAvatar()->markIdentityDataChanged();
|
||||
getMyAvatar()->resetLastSent();
|
||||
}
|
||||
|
@ -5518,7 +5525,6 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
|||
scriptEngine->registerGlobalObject("Stats", Stats::getInstance());
|
||||
scriptEngine->registerGlobalObject("Settings", SettingsScriptingInterface::getInstance());
|
||||
scriptEngine->registerGlobalObject("Snapshot", DependencyManager::get<Snapshot>().data());
|
||||
scriptEngine->registerGlobalObject("AudioDevice", AudioDeviceScriptingInterface::getInstance());
|
||||
scriptEngine->registerGlobalObject("AudioStats", DependencyManager::get<AudioClient>()->getStats().data());
|
||||
scriptEngine->registerGlobalObject("AudioScope", DependencyManager::get<AudioScope>().data());
|
||||
scriptEngine->registerGlobalObject("AvatarBookmarks", DependencyManager::get<AvatarBookmarks>().data());
|
||||
|
@ -5797,38 +5803,24 @@ bool Application::displayAvatarAttachmentConfirmationDialog(const QString& name)
|
|||
}
|
||||
}
|
||||
|
||||
void Application::toggleRunningScriptsWidget() const {
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>();
|
||||
bool scriptsRunning = !scriptEngines->getRunningScripts().isEmpty();
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
auto tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet("com.highfidelity.interface.tablet.system"));
|
||||
void Application::showDialog(const QUrl& widgetUrl, const QUrl& tabletUrl, const QString& name) const {
|
||||
auto tablet = DependencyManager::get<TabletScriptingInterface>()->getTablet(SYSTEM_TABLET);
|
||||
auto hmd = DependencyManager::get<HMDScriptingInterface>();
|
||||
bool onTablet = false;
|
||||
|
||||
if (tablet->getToolbarMode() || false == scriptsRunning) {
|
||||
static const QUrl url("hifi/dialogs/RunningScripts.qml");
|
||||
DependencyManager::get<OffscreenUi>()->show(url, "RunningScripts");
|
||||
} else {
|
||||
auto hmd = DependencyManager::get<HMDScriptingInterface>();
|
||||
if (!hmd->getShouldShowTablet() && !isHMDMode()) {
|
||||
static const QUrl url("hifi/dialogs/RunningScripts.qml");
|
||||
DependencyManager::get<OffscreenUi>()->show(url, "RunningScripts");
|
||||
} else {
|
||||
static const QUrl url("../../hifi/dialogs/TabletRunningScripts.qml");
|
||||
tablet->pushOntoStack(url);
|
||||
if (!tablet->getToolbarMode()) {
|
||||
onTablet = tablet->pushOntoStack(tabletUrl);
|
||||
if (onTablet) {
|
||||
toggleTabletUI(true);
|
||||
}
|
||||
}
|
||||
//DependencyManager::get<OffscreenUi>()->show(url, "RunningScripts");
|
||||
//if (_runningScriptsWidget->isVisible()) {
|
||||
// if (_runningScriptsWidget->hasFocus()) {
|
||||
// _runningScriptsWidget->hide();
|
||||
// } else {
|
||||
// _runningScriptsWidget->raise();
|
||||
// setActiveWindow(_runningScriptsWidget);
|
||||
// _runningScriptsWidget->setFocus();
|
||||
// }
|
||||
//} else {
|
||||
// _runningScriptsWidget->show();
|
||||
// _runningScriptsWidget->setFocus();
|
||||
//}
|
||||
|
||||
if (!onTablet) {
|
||||
DependencyManager::get<OffscreenUi>()->show(widgetUrl, name);
|
||||
}
|
||||
if (tablet->getToolbarMode()) {
|
||||
DependencyManager::get<OffscreenUi>()->show(widgetUrl, name);
|
||||
}
|
||||
}
|
||||
|
||||
void Application::showScriptLogs() {
|
||||
|
@ -5850,7 +5842,7 @@ void Application::showAssetServerWidget(QString filePath) {
|
|||
}
|
||||
};
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
auto tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet("com.highfidelity.interface.tablet.system"));
|
||||
auto tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet(SYSTEM_TABLET));
|
||||
auto hmd = DependencyManager::get<HMDScriptingInterface>();
|
||||
if (tablet->getToolbarMode()) {
|
||||
DependencyManager::get<OffscreenUi>()->show(url, "AssetServer", startUpload);
|
||||
|
@ -5885,21 +5877,6 @@ void Application::addAssetToWorldFromURL(QString url) {
|
|||
request->send();
|
||||
}
|
||||
|
||||
void Application::showDialog(const QString& desktopURL, const QString& tabletURL, const QString& name) const {
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
auto tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet("com.highfidelity.interface.tablet.system"));
|
||||
auto hmd = DependencyManager::get<HMDScriptingInterface>();
|
||||
if (tablet->getToolbarMode()) {
|
||||
DependencyManager::get<OffscreenUi>()->show(desktopURL, name);
|
||||
} else {
|
||||
tablet->pushOntoStack(tabletURL);
|
||||
if (!hmd->getShouldShowTablet() && !isHMDMode()) {
|
||||
hmd->openTablet();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
void Application::addAssetToWorldFromURLRequestFinished() {
|
||||
auto request = qobject_cast<ResourceRequest*>(sender());
|
||||
auto url = request->getUrl().toString();
|
||||
|
@ -6383,7 +6360,7 @@ void Application::loadScriptURLDialog() const {
|
|||
|
||||
void Application::loadLODToolsDialog() {
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
auto tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet("com.highfidelity.interface.tablet.system"));
|
||||
auto tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet(SYSTEM_TABLET));
|
||||
if (tablet->getToolbarMode() || (!tablet->getTabletRoot() && !isHMDMode())) {
|
||||
auto dialogsManager = DependencyManager::get<DialogsManager>();
|
||||
dialogsManager->lodTools();
|
||||
|
@ -6395,7 +6372,7 @@ void Application::loadLODToolsDialog() {
|
|||
|
||||
void Application::loadEntityStatisticsDialog() {
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
auto tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet("com.highfidelity.interface.tablet.system"));
|
||||
auto tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet(SYSTEM_TABLET));
|
||||
if (tablet->getToolbarMode() || (!tablet->getTabletRoot() && !isHMDMode())) {
|
||||
auto dialogsManager = DependencyManager::get<DialogsManager>();
|
||||
dialogsManager->octreeStatsDetails();
|
||||
|
@ -6406,7 +6383,7 @@ void Application::loadEntityStatisticsDialog() {
|
|||
|
||||
void Application::loadDomainConnectionDialog() {
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
auto tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet("com.highfidelity.interface.tablet.system"));
|
||||
auto tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet(SYSTEM_TABLET));
|
||||
if (tablet->getToolbarMode() || (!tablet->getTabletRoot() && !isHMDMode())) {
|
||||
auto dialogsManager = DependencyManager::get<DialogsManager>();
|
||||
dialogsManager->showDomainConnectionDialog();
|
||||
|
@ -7063,11 +7040,6 @@ void Application::updateSystemTabletMode() {
|
|||
}
|
||||
}
|
||||
|
||||
void Application::toggleMuteAudio() {
|
||||
auto menu = Menu::getInstance();
|
||||
menu->setIsOptionChecked(MenuOption::MuteAudio, !menu->isOptionChecked(MenuOption::MuteAudio));
|
||||
}
|
||||
|
||||
OverlayID Application::getTabletScreenID() const {
|
||||
auto HMD = DependencyManager::get<HMDScriptingInterface>();
|
||||
return HMD->getCurrentTabletScreenID();
|
||||
|
|
|
@ -318,11 +318,10 @@ public slots:
|
|||
Q_INVOKABLE void loadScriptURLDialog() const;
|
||||
void toggleLogDialog();
|
||||
void toggleEntityScriptServerLogDialog();
|
||||
void toggleRunningScriptsWidget() const;
|
||||
Q_INVOKABLE void showAssetServerWidget(QString filePath = "");
|
||||
Q_INVOKABLE void loadAddAvatarBookmarkDialog() const;
|
||||
|
||||
void showDialog(const QString& desktopURL, const QString& tabletURL, const QString& name) const;
|
||||
void showDialog(const QUrl& widgetUrl, const QUrl& tabletUrl, const QString& name) const;
|
||||
|
||||
// FIXME: Move addAssetToWorld* methods to own class?
|
||||
void addAssetToWorldFromURL(QString url);
|
||||
|
@ -391,7 +390,6 @@ public slots:
|
|||
|
||||
void addAssetToWorldMessageClose();
|
||||
|
||||
Q_INVOKABLE void toggleMuteAudio();
|
||||
void loadLODToolsDialog();
|
||||
void loadEntityStatisticsDialog();
|
||||
void loadDomainConnectionDialog();
|
||||
|
@ -405,7 +403,6 @@ private slots:
|
|||
|
||||
void resettingDomain();
|
||||
|
||||
void audioMuteToggled() const;
|
||||
void faceTrackerMuteToggled();
|
||||
|
||||
void activeChanged(Qt::ApplicationState state);
|
||||
|
@ -503,7 +500,7 @@ private:
|
|||
static void dragEnterEvent(QDragEnterEvent* event);
|
||||
|
||||
void maybeToggleMenuVisible(QMouseEvent* event) const;
|
||||
void toggleTabletUI() const;
|
||||
void toggleTabletUI(bool shouldOpen = false) const;
|
||||
|
||||
MainWindow* _window;
|
||||
QElapsedTimer& _sessionRunTimer;
|
||||
|
|
|
@ -95,8 +95,13 @@ Menu::Menu() {
|
|||
addActionToQMenuAndActionHash(editMenu, redoAction);
|
||||
|
||||
// Edit > Running Scripts
|
||||
addActionToQMenuAndActionHash(editMenu, MenuOption::RunningScripts, Qt::CTRL | Qt::Key_J,
|
||||
qApp, SLOT(toggleRunningScriptsWidget()));
|
||||
auto action = addActionToQMenuAndActionHash(editMenu, MenuOption::RunningScripts, Qt::CTRL | Qt::Key_J);
|
||||
connect(action, &QAction::triggered, [] {
|
||||
static const QUrl widgetUrl("hifi/dialogs/RunningScripts.qml");
|
||||
static const QUrl tabletUrl("../../hifi/dialogs/TabletRunningScripts.qml");
|
||||
static const QString name("RunningScripts");
|
||||
qApp->showDialog(widgetUrl, tabletUrl, name);
|
||||
});
|
||||
|
||||
// Edit > Open and Run Script from File... [advanced]
|
||||
addActionToQMenuAndActionHash(editMenu, MenuOption::LoadScript, Qt::CTRL | Qt::Key_O,
|
||||
|
@ -144,28 +149,13 @@ Menu::Menu() {
|
|||
addActionToQMenuAndActionHash(editMenu, MenuOption::ReloadContent, 0, qApp, SLOT(reloadResourceCaches()),
|
||||
QAction::NoRole, UNSPECIFIED_POSITION, "Advanced");
|
||||
|
||||
|
||||
// Audio menu ----------------------------------
|
||||
MenuWrapper* audioMenu = addMenu("Audio");
|
||||
auto audioIO = DependencyManager::get<AudioClient>();
|
||||
|
||||
// Audio > Mute
|
||||
addCheckableActionToQMenuAndActionHash(audioMenu, MenuOption::MuteAudio, Qt::CTRL | Qt::Key_M, false,
|
||||
audioIO.data(), SLOT(toggleMute()));
|
||||
|
||||
// Audio > Show Level Meter
|
||||
addCheckableActionToQMenuAndActionHash(audioMenu, MenuOption::AudioTools, 0, false);
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(audioMenu, MenuOption::AudioNoiseReduction, 0, true,
|
||||
audioIO.data(), SLOT(toggleAudioNoiseReduction()));
|
||||
|
||||
// Avatar menu ----------------------------------
|
||||
MenuWrapper* avatarMenu = addMenu("Avatar");
|
||||
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||
auto avatar = avatarManager->getMyAvatar();
|
||||
|
||||
// Avatar > Attachments...
|
||||
auto action = addActionToQMenuAndActionHash(avatarMenu, MenuOption::Attachments);
|
||||
action = addActionToQMenuAndActionHash(avatarMenu, MenuOption::Attachments);
|
||||
connect(action, &QAction::triggered, [] {
|
||||
qApp->showDialog(QString("hifi/dialogs/AttachmentsDialog.qml"),
|
||||
QString("../../hifi/tablet/TabletAttachmentsDialog.qml"), "AttachmentsDialog");
|
||||
|
@ -298,6 +288,14 @@ Menu::Menu() {
|
|||
QString("../../hifi/tablet/TabletGeneralPreferences.qml"), "GeneralPreferencesDialog");
|
||||
});
|
||||
|
||||
action = addActionToQMenuAndActionHash(settingsMenu, "Audio...");
|
||||
connect(action, &QAction::triggered, [] {
|
||||
static const QUrl widgetUrl("hifi/dialogs/Audio.qml");
|
||||
static const QUrl tabletUrl("../../hifi/audio/Audio.qml");
|
||||
static const QString name("AudioDialog");
|
||||
qApp->showDialog(widgetUrl, tabletUrl, name);
|
||||
});
|
||||
|
||||
// Settings > Avatar...
|
||||
action = addActionToQMenuAndActionHash(settingsMenu, "Avatar...");
|
||||
connect(action, &QAction::triggered, [] {
|
||||
|
@ -629,10 +627,11 @@ Menu::Menu() {
|
|||
|
||||
action = addActionToQMenuAndActionHash(audioDebugMenu, "Buffers...");
|
||||
connect(action, &QAction::triggered, [] {
|
||||
qApp->showDialog(QString("hifi/dialogs/AudioPreferencesDialog.qml"),
|
||||
QString("../../hifi/tablet/TabletAudioPreferences.qml"), "AudioPreferencesDialog");
|
||||
qApp->showDialog(QString("hifi/dialogs/AudioBuffers.qml"),
|
||||
QString("../../hifi/tablet/TabletAudioBuffers.qml"), "AudioBuffersDialog");
|
||||
});
|
||||
|
||||
auto audioIO = DependencyManager::get<AudioClient>();
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::EchoServerAudio, 0, false,
|
||||
audioIO.data(), SLOT(toggleServerEcho()));
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::EchoLocalAudio, 0, false,
|
||||
|
|
|
@ -36,7 +36,6 @@ namespace MenuOption {
|
|||
const QString AssetMigration = "ATP Asset Migration";
|
||||
const QString AssetServer = "Asset Browser";
|
||||
const QString Attachments = "Attachments...";
|
||||
const QString AudioNoiseReduction = "Noise Reduction";
|
||||
const QString AudioScope = "Show Scope";
|
||||
const QString AudioScopeFiftyFrames = "Fifty";
|
||||
const QString AudioScopeFiveFrames = "Five";
|
||||
|
@ -44,7 +43,6 @@ namespace MenuOption {
|
|||
const QString AudioScopePause = "Pause Scope";
|
||||
const QString AudioScopeTwentyFrames = "Twenty";
|
||||
const QString AudioStatsShowInjectedStreams = "Audio Stats Show Injected Streams";
|
||||
const QString AudioTools = "Show Level Meter";
|
||||
const QString AutoMuteAudio = "Auto Mute Microphone";
|
||||
const QString AvatarReceiveStats = "Show Receive Stats";
|
||||
const QString AvatarBookmarks = "Avatar Bookmarks";
|
||||
|
@ -124,7 +122,6 @@ namespace MenuOption {
|
|||
const QString LogExtraTimings = "Log Extra Timing Details";
|
||||
const QString LowVelocityFilter = "Low Velocity Filter";
|
||||
const QString MeshVisible = "Draw Mesh";
|
||||
const QString MuteAudio = "Mute Microphone";
|
||||
const QString MuteEnvironment = "Mute Environment";
|
||||
const QString MuteFaceTracking = "Mute Face Tracking";
|
||||
const QString NamesAboveHeads = "Names Above Heads";
|
||||
|
|
109
interface/src/scripting/Audio.cpp
Normal file
109
interface/src/scripting/Audio.cpp
Normal file
|
@ -0,0 +1,109 @@
|
|||
//
|
||||
// Audio.cpp
|
||||
// interface/src/scripting
|
||||
//
|
||||
// Created by Zach Pomerantz on 28/5/2017.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "Audio.h"
|
||||
|
||||
#include "Application.h"
|
||||
#include "AudioClient.h"
|
||||
#include "ui/AvatarInputs.h"
|
||||
|
||||
using namespace scripting;
|
||||
|
||||
QString Audio::AUDIO { "Audio" };
|
||||
QString Audio::DESKTOP { "Desktop" };
|
||||
QString Audio::HMD { "VR" };
|
||||
|
||||
Setting::Handle<bool> enableNoiseReductionSetting { QStringList { Audio::AUDIO, "NoiseReduction" }, true };
|
||||
|
||||
Audio::Audio() : _devices(_contextIsHMD) {
|
||||
auto client = DependencyManager::get<AudioClient>();
|
||||
connect(client.data(), &AudioClient::muteToggled, this, &Audio::onMutedChanged);
|
||||
connect(this, &Audio::contextChanged, &_devices, &AudioDevices::onContextChanged);
|
||||
connect(&_devices._inputs, &AudioDeviceList::deviceChanged, this, &Audio::onInputChanged);
|
||||
enableNoiseReduction(enableNoiseReductionSetting.get());
|
||||
}
|
||||
|
||||
void Audio::setMuted(bool isMuted) {
|
||||
if (_isMuted != isMuted) {
|
||||
auto client = DependencyManager::get<AudioClient>().data();
|
||||
QMetaObject::invokeMethod(client, "toggleMute", Qt::BlockingQueuedConnection);
|
||||
|
||||
_isMuted = isMuted;
|
||||
emit mutedChanged(_isMuted);
|
||||
}
|
||||
}
|
||||
|
||||
void Audio::onMutedChanged() {
|
||||
auto client = DependencyManager::get<AudioClient>().data();
|
||||
bool isMuted;
|
||||
QMetaObject::invokeMethod(client, "isMuted", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, isMuted));
|
||||
|
||||
if (_isMuted != isMuted) {
|
||||
_isMuted = isMuted;
|
||||
emit mutedChanged(_isMuted);
|
||||
}
|
||||
}
|
||||
|
||||
void Audio::enableNoiseReduction(bool enable) {
|
||||
if (_enableNoiseReduction != enable) {
|
||||
auto client = DependencyManager::get<AudioClient>().data();
|
||||
QMetaObject::invokeMethod(client, "setNoiseReduction", Qt::BlockingQueuedConnection, Q_ARG(bool, enable));
|
||||
|
||||
enableNoiseReductionSetting.set(enable);
|
||||
_enableNoiseReduction = enable;
|
||||
emit noiseReductionChanged(enable);
|
||||
}
|
||||
}
|
||||
|
||||
void Audio::setInputVolume(float volume) {
|
||||
// getInputVolume will not reflect changes synchronously, so clamp beforehand
|
||||
volume = glm::clamp(volume, 0.0f, 1.0f);
|
||||
|
||||
if (_inputVolume != volume) {
|
||||
auto client = DependencyManager::get<AudioClient>().data();
|
||||
QMetaObject::invokeMethod(client, "setInputVolume", Qt::BlockingQueuedConnection, Q_ARG(float, volume));
|
||||
|
||||
_inputVolume = volume;
|
||||
emit inputVolumeChanged(_inputVolume);
|
||||
}
|
||||
}
|
||||
|
||||
// different audio input devices may have different volumes
|
||||
void Audio::onInputChanged() {
|
||||
auto client = DependencyManager::get<AudioClient>().data();
|
||||
float volume;
|
||||
QMetaObject::invokeMethod(client, "getInputVolume", Qt::BlockingQueuedConnection, Q_RETURN_ARG(float, volume));
|
||||
|
||||
if (_inputVolume != volume) {
|
||||
_inputVolume = volume;
|
||||
emit inputVolumeChanged(_inputVolume);
|
||||
}
|
||||
}
|
||||
|
||||
QString Audio::getContext() const {
|
||||
return _contextIsHMD ? Audio::HMD : Audio::DESKTOP;
|
||||
}
|
||||
|
||||
void Audio::onContextChanged() {
|
||||
bool isHMD = qApp->isHMDMode();
|
||||
if (_contextIsHMD != isHMD) {
|
||||
_contextIsHMD = isHMD;
|
||||
emit contextChanged(getContext());
|
||||
}
|
||||
}
|
||||
|
||||
void Audio::setReverb(bool enable) {
|
||||
DependencyManager::get<AudioClient>()->setReverb(enable);
|
||||
}
|
||||
|
||||
void Audio::setReverbOptions(const AudioEffectOptions* options) {
|
||||
DependencyManager::get<AudioClient>()->setReverbOptions(options);
|
||||
}
|
81
interface/src/scripting/Audio.h
Normal file
81
interface/src/scripting/Audio.h
Normal file
|
@ -0,0 +1,81 @@
|
|||
//
|
||||
// Audio.h
|
||||
// interface/src/scripting
|
||||
//
|
||||
// Created by Zach Pomerantz on 28/5/2017.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_scripting_Audio_h
|
||||
#define hifi_scripting_Audio_h
|
||||
|
||||
#include "AudioScriptingInterface.h"
|
||||
#include "AudioDevices.h"
|
||||
#include "AudioEffectOptions.h"
|
||||
#include "SettingHandle.h"
|
||||
|
||||
namespace scripting {
|
||||
|
||||
class Audio : public AudioScriptingInterface {
|
||||
Q_OBJECT
|
||||
SINGLETON_DEPENDENCY
|
||||
|
||||
Q_PROPERTY(bool muted READ isMuted WRITE setMuted NOTIFY mutedChanged)
|
||||
Q_PROPERTY(bool noiseReduction READ noiseReductionEnabled WRITE enableNoiseReduction NOTIFY noiseReductionChanged)
|
||||
Q_PROPERTY(float inputVolume READ getInputVolume WRITE setInputVolume NOTIFY inputVolumeChanged)
|
||||
Q_PROPERTY(QString context READ getContext NOTIFY contextChanged)
|
||||
Q_PROPERTY(AudioDevices* devices READ getDevices NOTIFY nop)
|
||||
|
||||
public:
|
||||
static QString AUDIO;
|
||||
static QString HMD;
|
||||
static QString DESKTOP;
|
||||
|
||||
virtual ~Audio() {}
|
||||
|
||||
bool isMuted() const { return _isMuted; }
|
||||
bool noiseReductionEnabled() const { return _enableNoiseReduction; }
|
||||
float getInputVolume() const { return _inputVolume; }
|
||||
QString getContext() const;
|
||||
|
||||
void setMuted(bool muted);
|
||||
void enableNoiseReduction(bool enable);
|
||||
void showMicMeter(bool show);
|
||||
void setInputVolume(float volume);
|
||||
|
||||
Q_INVOKABLE void setReverb(bool enable);
|
||||
Q_INVOKABLE void setReverbOptions(const AudioEffectOptions* options);
|
||||
|
||||
signals:
|
||||
void nop();
|
||||
void mutedChanged(bool isMuted);
|
||||
void noiseReductionChanged(bool isEnabled);
|
||||
void inputVolumeChanged(float volume);
|
||||
void contextChanged(const QString& context);
|
||||
|
||||
public slots:
|
||||
void onMutedChanged();
|
||||
void onContextChanged();
|
||||
void onInputChanged();
|
||||
|
||||
protected:
|
||||
// Audio must live on a separate thread from AudioClient to avoid deadlocks
|
||||
Audio();
|
||||
|
||||
private:
|
||||
|
||||
float _inputVolume { 1.0f };
|
||||
bool _isMuted { false };
|
||||
bool _enableNoiseReduction;
|
||||
bool _contextIsHMD { false };
|
||||
|
||||
AudioDevices* getDevices() { return &_devices; }
|
||||
AudioDevices _devices;
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi_scripting_Audio_h
|
|
@ -1,301 +0,0 @@
|
|||
//
|
||||
// AudioDeviceScriptingInterface.cpp
|
||||
// interface/src/scripting
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 3/23/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <AudioClient.h>
|
||||
#include <AudioClientLogging.h>
|
||||
|
||||
#include "AudioDeviceScriptingInterface.h"
|
||||
#include "SettingsScriptingInterface.h"
|
||||
|
||||
AudioDeviceScriptingInterface* AudioDeviceScriptingInterface::getInstance() {
|
||||
static AudioDeviceScriptingInterface sharedInstance;
|
||||
return &sharedInstance;
|
||||
}
|
||||
|
||||
QStringList AudioDeviceScriptingInterface::inputAudioDevices() const {
|
||||
return _inputAudioDevices;
|
||||
}
|
||||
|
||||
QStringList AudioDeviceScriptingInterface::outputAudioDevices() const {
|
||||
return _outputAudioDevices;
|
||||
}
|
||||
|
||||
bool AudioDeviceScriptingInterface::muted()
|
||||
{
|
||||
return getMuted();
|
||||
}
|
||||
|
||||
AudioDeviceScriptingInterface::AudioDeviceScriptingInterface(): QAbstractListModel(nullptr) {
|
||||
connect(DependencyManager::get<AudioClient>().data(), &AudioClient::muteToggled,
|
||||
this, &AudioDeviceScriptingInterface::muteToggled);
|
||||
connect(DependencyManager::get<AudioClient>().data(), &AudioClient::deviceChanged,
|
||||
this, &AudioDeviceScriptingInterface::onDeviceChanged, Qt::QueuedConnection);
|
||||
connect(DependencyManager::get<AudioClient>().data(), &AudioClient::currentInputDeviceChanged,
|
||||
this, &AudioDeviceScriptingInterface::onCurrentInputDeviceChanged, Qt::QueuedConnection);
|
||||
connect(DependencyManager::get<AudioClient>().data(), &AudioClient::currentOutputDeviceChanged,
|
||||
this, &AudioDeviceScriptingInterface::onCurrentOutputDeviceChanged, Qt::QueuedConnection);
|
||||
//fill up model
|
||||
onDeviceChanged();
|
||||
//set up previously saved device
|
||||
SettingsScriptingInterface* settings = SettingsScriptingInterface::getInstance();
|
||||
const QString inDevice = settings->getValue("audio_input_device", _currentInputDevice).toString();
|
||||
if (inDevice != _currentInputDevice) {
|
||||
qCDebug(audioclient) << __FUNCTION__ << "about to call setInputDeviceAsync() device: [" << inDevice << "] _currentInputDevice:" << _currentInputDevice;
|
||||
setInputDeviceAsync(inDevice);
|
||||
}
|
||||
|
||||
// If the audio_output_device setting is not available, use the _currentOutputDevice
|
||||
auto outDevice = settings->getValue("audio_output_device", _currentOutputDevice).toString();
|
||||
if (outDevice != _currentOutputDevice) {
|
||||
qCDebug(audioclient) << __FUNCTION__ << "about to call setOutputDeviceAsync() outDevice: [" << outDevice << "] _currentOutputDevice:" << _currentOutputDevice;
|
||||
setOutputDeviceAsync(outDevice);
|
||||
}
|
||||
}
|
||||
|
||||
bool AudioDeviceScriptingInterface::setInputDevice(const QString& deviceName) {
|
||||
qCDebug(audioclient) << __FUNCTION__ << "deviceName:" << deviceName;
|
||||
|
||||
bool result;
|
||||
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "switchInputToAudioDevice",
|
||||
Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(bool, result),
|
||||
Q_ARG(const QString&, deviceName));
|
||||
return result;
|
||||
}
|
||||
|
||||
bool AudioDeviceScriptingInterface::setOutputDevice(const QString& deviceName) {
|
||||
|
||||
qCDebug(audioclient) << __FUNCTION__ << "deviceName:" << deviceName;
|
||||
|
||||
bool result;
|
||||
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "switchOutputToAudioDevice",
|
||||
Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(bool, result),
|
||||
Q_ARG(const QString&, deviceName));
|
||||
return result;
|
||||
}
|
||||
|
||||
bool AudioDeviceScriptingInterface::setDeviceFromMenu(const QString& deviceMenuName) {
|
||||
QAudio::Mode mode;
|
||||
|
||||
if (deviceMenuName.indexOf("for Output") != -1) {
|
||||
mode = QAudio::AudioOutput;
|
||||
} else if (deviceMenuName.indexOf("for Input") != -1) {
|
||||
mode = QAudio::AudioInput;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (ScriptingAudioDeviceInfo di: _devices) {
|
||||
if (mode == di.mode && deviceMenuName.contains(di.name)) {
|
||||
if (mode == QAudio::AudioOutput) {
|
||||
qCDebug(audioclient) << __FUNCTION__ << "about to call setOutputDeviceAsync() device: [" << di.name << "]";
|
||||
setOutputDeviceAsync(di.name);
|
||||
} else {
|
||||
qCDebug(audioclient) << __FUNCTION__ << "about to call setInputDeviceAsync() device: [" << di.name << "]";
|
||||
setInputDeviceAsync(di.name);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void AudioDeviceScriptingInterface::setInputDeviceAsync(const QString& deviceName) {
|
||||
qCDebug(audioclient) << __FUNCTION__ << "deviceName:" << deviceName;
|
||||
|
||||
if (deviceName.isEmpty()) {
|
||||
qCDebug(audioclient) << __FUNCTION__ << "attempt to set empty deviceName:" << deviceName << "... ignoring!";
|
||||
return;
|
||||
}
|
||||
|
||||
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "switchInputToAudioDevice",
|
||||
Qt::QueuedConnection,
|
||||
Q_ARG(const QString&, deviceName));
|
||||
}
|
||||
|
||||
void AudioDeviceScriptingInterface::setOutputDeviceAsync(const QString& deviceName) {
|
||||
qCDebug(audioclient) << __FUNCTION__ << "deviceName:" << deviceName;
|
||||
|
||||
if (deviceName.isEmpty()) {
|
||||
qCDebug(audioclient) << __FUNCTION__ << "attempt to set empty deviceName:" << deviceName << "... ignoring!";
|
||||
return;
|
||||
}
|
||||
|
||||
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "switchOutputToAudioDevice",
|
||||
Qt::QueuedConnection,
|
||||
Q_ARG(const QString&, deviceName));
|
||||
}
|
||||
|
||||
QString AudioDeviceScriptingInterface::getInputDevice() {
|
||||
return DependencyManager::get<AudioClient>()->getDeviceName(QAudio::AudioInput);
|
||||
}
|
||||
|
||||
QString AudioDeviceScriptingInterface::getOutputDevice() {
|
||||
return DependencyManager::get<AudioClient>()->getDeviceName(QAudio::AudioOutput);
|
||||
}
|
||||
|
||||
QString AudioDeviceScriptingInterface::getDefaultInputDevice() {
|
||||
return DependencyManager::get<AudioClient>()->getDefaultDeviceName(QAudio::AudioInput);
|
||||
}
|
||||
|
||||
QString AudioDeviceScriptingInterface::getDefaultOutputDevice() {
|
||||
return DependencyManager::get<AudioClient>()->getDefaultDeviceName(QAudio::AudioOutput);
|
||||
}
|
||||
|
||||
QVector<QString> AudioDeviceScriptingInterface::getInputDevices() {
|
||||
return DependencyManager::get<AudioClient>()->getDeviceNames(QAudio::AudioInput);
|
||||
}
|
||||
|
||||
QVector<QString> AudioDeviceScriptingInterface::getOutputDevices() {
|
||||
return DependencyManager::get<AudioClient>()->getDeviceNames(QAudio::AudioOutput);
|
||||
}
|
||||
|
||||
float AudioDeviceScriptingInterface::getInputVolume() {
|
||||
return DependencyManager::get<AudioClient>()->getInputVolume();
|
||||
}
|
||||
|
||||
void AudioDeviceScriptingInterface::setInputVolume(float volume) {
|
||||
DependencyManager::get<AudioClient>()->setInputVolume(volume);
|
||||
}
|
||||
|
||||
void AudioDeviceScriptingInterface::setReverb(bool reverb) {
|
||||
DependencyManager::get<AudioClient>()->setReverb(reverb);
|
||||
}
|
||||
|
||||
void AudioDeviceScriptingInterface::setReverbOptions(const AudioEffectOptions* options) {
|
||||
DependencyManager::get<AudioClient>()->setReverbOptions(options);
|
||||
}
|
||||
|
||||
void AudioDeviceScriptingInterface::toggleMute() {
|
||||
DependencyManager::get<AudioClient>()->toggleMute();
|
||||
}
|
||||
|
||||
void AudioDeviceScriptingInterface::setMuted(bool muted)
|
||||
{
|
||||
bool lMuted = getMuted();
|
||||
if (lMuted == muted)
|
||||
return;
|
||||
|
||||
toggleMute();
|
||||
lMuted = getMuted();
|
||||
emit mutedChanged(lMuted);
|
||||
}
|
||||
|
||||
bool AudioDeviceScriptingInterface::getMuted() {
|
||||
return DependencyManager::get<AudioClient>()->isMuted();
|
||||
}
|
||||
|
||||
QVariant AudioDeviceScriptingInterface::data(const QModelIndex& index, int role) const {
|
||||
//sanity
|
||||
if (!index.isValid() || index.row() >= _devices.size())
|
||||
return QVariant();
|
||||
|
||||
|
||||
if (role == Qt::DisplayRole || role == DisplayNameRole) {
|
||||
return _devices.at(index.row()).name;
|
||||
} else if (role == SelectedRole) {
|
||||
return _devices.at(index.row()).selected;
|
||||
} else if (role == AudioModeRole) {
|
||||
return (int)_devices.at(index.row()).mode;
|
||||
}
|
||||
return QVariant();
|
||||
}
|
||||
|
||||
int AudioDeviceScriptingInterface::rowCount(const QModelIndex& parent) const {
|
||||
Q_UNUSED(parent)
|
||||
return _devices.size();
|
||||
}
|
||||
|
||||
QHash<int, QByteArray> AudioDeviceScriptingInterface::roleNames() const {
|
||||
QHash<int, QByteArray> roles;
|
||||
roles.insert(DisplayNameRole, "devicename");
|
||||
roles.insert(SelectedRole, "devicechecked");
|
||||
roles.insert(AudioModeRole, "devicemode");
|
||||
return roles;
|
||||
}
|
||||
|
||||
void AudioDeviceScriptingInterface::onDeviceChanged()
|
||||
{
|
||||
beginResetModel();
|
||||
_outputAudioDevices.clear();
|
||||
_devices.clear();
|
||||
_currentOutputDevice = getOutputDevice();
|
||||
for (QString name: getOutputDevices()) {
|
||||
ScriptingAudioDeviceInfo di;
|
||||
di.name = name;
|
||||
di.selected = (name == _currentOutputDevice);
|
||||
di.mode = QAudio::AudioOutput;
|
||||
_devices.append(di);
|
||||
_outputAudioDevices.append(name);
|
||||
}
|
||||
emit outputAudioDevicesChanged(_outputAudioDevices);
|
||||
|
||||
_inputAudioDevices.clear();
|
||||
_currentInputDevice = getInputDevice();
|
||||
for (QString name: getInputDevices()) {
|
||||
ScriptingAudioDeviceInfo di;
|
||||
di.name = name;
|
||||
di.selected = (name == _currentInputDevice);
|
||||
di.mode = QAudio::AudioInput;
|
||||
_devices.append(di);
|
||||
_inputAudioDevices.append(name);
|
||||
}
|
||||
emit inputAudioDevicesChanged(_inputAudioDevices);
|
||||
|
||||
endResetModel();
|
||||
emit deviceChanged();
|
||||
}
|
||||
|
||||
void AudioDeviceScriptingInterface::onCurrentInputDeviceChanged(const QString& name)
|
||||
{
|
||||
currentDeviceUpdate(name, QAudio::AudioInput);
|
||||
//we got a signal that device changed. Save it now
|
||||
SettingsScriptingInterface* settings = SettingsScriptingInterface::getInstance();
|
||||
settings->setValue("audio_input_device", name);
|
||||
emit currentInputDeviceChanged(name);
|
||||
}
|
||||
|
||||
void AudioDeviceScriptingInterface::onCurrentOutputDeviceChanged(const QString& name)
|
||||
{
|
||||
currentDeviceUpdate(name, QAudio::AudioOutput);
|
||||
|
||||
// FIXME - this is kinda janky to set the setting on the result of a signal
|
||||
//we got a signal that device changed. Save it now
|
||||
SettingsScriptingInterface* settings = SettingsScriptingInterface::getInstance();
|
||||
qCDebug(audioclient) << __FUNCTION__ << "about to call settings->setValue('audio_output_device', name); name:" << name;
|
||||
settings->setValue("audio_output_device", name);
|
||||
emit currentOutputDeviceChanged(name);
|
||||
}
|
||||
|
||||
void AudioDeviceScriptingInterface::currentDeviceUpdate(const QString& name, QAudio::Mode mode)
|
||||
{
|
||||
QVector<int> role;
|
||||
role.append(SelectedRole);
|
||||
|
||||
for (int i = 0; i < _devices.size(); i++) {
|
||||
ScriptingAudioDeviceInfo di = _devices.at(i);
|
||||
if (di.mode != mode) {
|
||||
continue;
|
||||
}
|
||||
if (di.selected && di.name != name ) {
|
||||
di.selected = false;
|
||||
_devices[i] = di;
|
||||
emit dataChanged(index(i, 0), index(i, 0), role);
|
||||
}
|
||||
if (di.name == name) {
|
||||
di.selected = true;
|
||||
_devices[i] = di;
|
||||
emit dataChanged(index(i, 0), index(i, 0), role);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,107 +0,0 @@
|
|||
//
|
||||
// AudioDeviceScriptingInterface.h
|
||||
// interface/src/scripting
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 3/22/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_AudioDeviceScriptingInterface_h
|
||||
#define hifi_AudioDeviceScriptingInterface_h
|
||||
|
||||
#include <QObject>
|
||||
#include <QString>
|
||||
#include <QVector>
|
||||
#include <QAbstractListModel>
|
||||
#include <QAudio>
|
||||
|
||||
class AudioEffectOptions;
|
||||
|
||||
struct ScriptingAudioDeviceInfo {
|
||||
QString name;
|
||||
bool selected;
|
||||
QAudio::Mode mode;
|
||||
};
|
||||
|
||||
class AudioDeviceScriptingInterface : public QAbstractListModel {
|
||||
Q_OBJECT
|
||||
|
||||
Q_PROPERTY(QStringList inputAudioDevices READ inputAudioDevices NOTIFY inputAudioDevicesChanged)
|
||||
Q_PROPERTY(QStringList outputAudioDevices READ outputAudioDevices NOTIFY outputAudioDevicesChanged)
|
||||
Q_PROPERTY(bool muted READ muted WRITE setMuted NOTIFY mutedChanged)
|
||||
|
||||
public:
|
||||
static AudioDeviceScriptingInterface* getInstance();
|
||||
|
||||
QStringList inputAudioDevices() const;
|
||||
QStringList outputAudioDevices() const;
|
||||
bool muted();
|
||||
|
||||
QVariant data(const QModelIndex& index, int role = Qt::DisplayRole) const override;
|
||||
int rowCount(const QModelIndex& parent = QModelIndex()) const override;
|
||||
QHash<int, QByteArray> roleNames() const override;
|
||||
|
||||
enum Roles {
|
||||
DisplayNameRole = Qt::UserRole,
|
||||
SelectedRole,
|
||||
AudioModeRole
|
||||
};
|
||||
|
||||
private slots:
|
||||
void onDeviceChanged();
|
||||
void onCurrentInputDeviceChanged(const QString& name);
|
||||
void onCurrentOutputDeviceChanged(const QString& name);
|
||||
void currentDeviceUpdate(const QString& name, QAudio::Mode mode);
|
||||
|
||||
public slots:
|
||||
bool setInputDevice(const QString& deviceName);
|
||||
bool setOutputDevice(const QString& deviceName);
|
||||
bool setDeviceFromMenu(const QString& deviceMenuName);
|
||||
|
||||
QString getInputDevice();
|
||||
QString getOutputDevice();
|
||||
|
||||
QString getDefaultInputDevice();
|
||||
QString getDefaultOutputDevice();
|
||||
|
||||
QVector<QString> getInputDevices();
|
||||
QVector<QString> getOutputDevices();
|
||||
|
||||
float getInputVolume();
|
||||
void setInputVolume(float volume);
|
||||
void setReverb(bool reverb);
|
||||
void setReverbOptions(const AudioEffectOptions* options);
|
||||
|
||||
bool getMuted();
|
||||
void toggleMute();
|
||||
|
||||
void setMuted(bool muted);
|
||||
|
||||
void setInputDeviceAsync(const QString& deviceName);
|
||||
void setOutputDeviceAsync(const QString& deviceName);
|
||||
private:
|
||||
AudioDeviceScriptingInterface();
|
||||
|
||||
signals:
|
||||
void muteToggled();
|
||||
void deviceChanged();
|
||||
void currentInputDeviceChanged(const QString& name);
|
||||
void currentOutputDeviceChanged(const QString& name);
|
||||
void mutedChanged(bool muted);
|
||||
void inputAudioDevicesChanged(QStringList inputAudioDevices);
|
||||
void outputAudioDevicesChanged(QStringList outputAudioDevices);
|
||||
|
||||
private:
|
||||
QVector<ScriptingAudioDeviceInfo> _devices;
|
||||
|
||||
QStringList _inputAudioDevices;
|
||||
QStringList _outputAudioDevices;
|
||||
|
||||
QString _currentInputDevice;
|
||||
QString _currentOutputDevice;
|
||||
};
|
||||
|
||||
#endif // hifi_AudioDeviceScriptingInterface_h
|
242
interface/src/scripting/AudioDevices.cpp
Normal file
242
interface/src/scripting/AudioDevices.cpp
Normal file
|
@ -0,0 +1,242 @@
|
|||
//
|
||||
// AudioDevices.cpp
|
||||
// interface/src/scripting
|
||||
//
|
||||
// Created by Zach Pomerantz on 28/5/2017.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "AudioDevices.h"
|
||||
|
||||
#include "Application.h"
|
||||
#include "AudioClient.h"
|
||||
#include "Audio.h"
|
||||
|
||||
using namespace scripting;
|
||||
|
||||
Setting::Handle<QString> inputDeviceDesktop { QStringList { Audio::AUDIO, Audio::DESKTOP, "INPUT" }};
|
||||
Setting::Handle<QString> outputDeviceDesktop { QStringList { Audio::AUDIO, Audio::DESKTOP, "OUTPUT" }};
|
||||
Setting::Handle<QString> inputDeviceHMD { QStringList { Audio::AUDIO, Audio::HMD, "INPUT" }};
|
||||
Setting::Handle<QString> outputDeviceHMD { QStringList { Audio::AUDIO, Audio::HMD, "OUTPUT" }};
|
||||
|
||||
QHash<int, QByteArray> AudioDeviceList::_roles {
|
||||
{ Qt::DisplayRole, "display" },
|
||||
{ Qt::CheckStateRole, "selected" }
|
||||
};
|
||||
Qt::ItemFlags AudioDeviceList::_flags { Qt::ItemIsSelectable | Qt::ItemIsEnabled };
|
||||
|
||||
QVariant AudioDeviceList::data(const QModelIndex& index, int role) const {
|
||||
if (!index.isValid() || index.row() >= _devices.size()) {
|
||||
return QVariant();
|
||||
}
|
||||
|
||||
if (role == Qt::DisplayRole) {
|
||||
return _devices.at(index.row()).display;
|
||||
} else if (role == Qt::CheckStateRole) {
|
||||
return _devices.at(index.row()).selected;
|
||||
} else {
|
||||
return QVariant();
|
||||
}
|
||||
}
|
||||
|
||||
bool AudioDeviceList::setData(const QModelIndex& index, const QVariant& value, int role) {
|
||||
if (!index.isValid() || index.row() >= _devices.size()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
bool success = false;
|
||||
|
||||
if (role == Qt::CheckStateRole) {
|
||||
auto selected = value.toBool();
|
||||
auto& device = _devices[index.row()];
|
||||
|
||||
// only allow switching to a new device, not deactivating an in-use device
|
||||
if (selected
|
||||
// skip if already selected
|
||||
&& selected != device.selected) {
|
||||
|
||||
auto client = DependencyManager::get<AudioClient>();
|
||||
QMetaObject::invokeMethod(client.data(), "switchAudioDevice", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(bool, success),
|
||||
Q_ARG(QAudio::Mode, _mode),
|
||||
Q_ARG(const QAudioDeviceInfo&, device.info));
|
||||
|
||||
if (success) {
|
||||
device.selected = true;
|
||||
emit deviceSelected(device.info);
|
||||
emit deviceChanged(device.info);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
emit dataChanged(createIndex(0, 0), createIndex(rowCount() - 1, 0));
|
||||
return success;
|
||||
}
|
||||
|
||||
void AudioDeviceList::resetDevice(bool contextIsHMD, const QString& device) {
|
||||
bool success { false };
|
||||
|
||||
// try to set the last selected device
|
||||
if (!device.isNull()) {
|
||||
auto i = 0;
|
||||
for (; i < rowCount(); ++i) {
|
||||
if (device == _devices[i].info.deviceName()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (i < rowCount()) {
|
||||
success = setData(createIndex(i, 0), true, Qt::CheckStateRole);
|
||||
}
|
||||
|
||||
// the selection failed - reset it
|
||||
if (!success) {
|
||||
emit deviceSelected(QAudioDeviceInfo());
|
||||
}
|
||||
}
|
||||
|
||||
// try to set to the default device for this mode
|
||||
if (!success) {
|
||||
auto client = DependencyManager::get<AudioClient>().data();
|
||||
if (contextIsHMD) {
|
||||
QString deviceName;
|
||||
if (_mode == QAudio::AudioInput) {
|
||||
deviceName = qApp->getActiveDisplayPlugin()->getPreferredAudioInDevice();
|
||||
} else { // if (_mode == QAudio::AudioOutput)
|
||||
deviceName = qApp->getActiveDisplayPlugin()->getPreferredAudioOutDevice();
|
||||
}
|
||||
if (!deviceName.isNull()) {
|
||||
QMetaObject::invokeMethod(client, "switchAudioDevice", Q_ARG(QAudio::Mode, _mode), Q_ARG(QString, deviceName));
|
||||
}
|
||||
} else {
|
||||
// use the system default
|
||||
QMetaObject::invokeMethod(client, "switchAudioDevice", Q_ARG(QAudio::Mode, _mode));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AudioDeviceList::onDeviceChanged(const QAudioDeviceInfo& device) {
|
||||
_selectedDevice = device;
|
||||
QModelIndex index;
|
||||
|
||||
for (auto i = 0; i < _devices.size(); ++i) {
|
||||
AudioDevice& device = _devices[i];
|
||||
|
||||
if (device.selected && device.info != _selectedDevice) {
|
||||
device.selected = false;
|
||||
} else if (device.info == _selectedDevice) {
|
||||
device.selected = true;
|
||||
index = createIndex(i, 0);
|
||||
}
|
||||
}
|
||||
|
||||
emit deviceChanged(_selectedDevice);
|
||||
emit dataChanged(createIndex(0, 0), createIndex(rowCount() - 1, 0));
|
||||
}
|
||||
|
||||
void AudioDeviceList::onDevicesChanged(const QList<QAudioDeviceInfo>& devices) {
|
||||
beginResetModel();
|
||||
|
||||
_devices.clear();
|
||||
|
||||
foreach(const QAudioDeviceInfo& deviceInfo, devices) {
|
||||
AudioDevice device;
|
||||
device.info = deviceInfo;
|
||||
device.display = device.info.deviceName()
|
||||
.replace("High Definition", "HD")
|
||||
.remove("Device")
|
||||
.replace(" )", ")");
|
||||
device.selected = (device.info == _selectedDevice);
|
||||
_devices.push_back(device);
|
||||
}
|
||||
|
||||
endResetModel();
|
||||
}
|
||||
|
||||
AudioDevices::AudioDevices(bool& contextIsHMD) : _contextIsHMD(contextIsHMD) {
|
||||
auto client = DependencyManager::get<AudioClient>();
|
||||
|
||||
connect(client.data(), &AudioClient::deviceChanged, this, &AudioDevices::onDeviceChanged, Qt::QueuedConnection);
|
||||
connect(client.data(), &AudioClient::devicesChanged, this, &AudioDevices::onDevicesChanged, Qt::QueuedConnection);
|
||||
|
||||
// connections are made after client is initialized, so we must also fetch the devices
|
||||
_inputs.onDeviceChanged(client->getActiveAudioDevice(QAudio::AudioInput));
|
||||
_outputs.onDeviceChanged(client->getActiveAudioDevice(QAudio::AudioOutput));
|
||||
_inputs.onDevicesChanged(client->getAudioDevices(QAudio::AudioInput));
|
||||
_outputs.onDevicesChanged(client->getAudioDevices(QAudio::AudioOutput));
|
||||
|
||||
connect(&_inputs, &AudioDeviceList::deviceSelected, this, &AudioDevices::onInputDeviceSelected);
|
||||
connect(&_outputs, &AudioDeviceList::deviceSelected, this, &AudioDevices::onOutputDeviceSelected);
|
||||
}
|
||||
|
||||
void AudioDevices::onContextChanged(const QString& context) {
|
||||
QString input;
|
||||
QString output;
|
||||
if (_contextIsHMD) {
|
||||
input = inputDeviceHMD.get();
|
||||
output = outputDeviceHMD.get();
|
||||
} else {
|
||||
input = inputDeviceDesktop.get();
|
||||
output = outputDeviceDesktop.get();
|
||||
}
|
||||
|
||||
_inputs.resetDevice(_contextIsHMD, input);
|
||||
_outputs.resetDevice(_contextIsHMD, output);
|
||||
}
|
||||
|
||||
void AudioDevices::onInputDeviceSelected(const QAudioDeviceInfo& device) {
|
||||
QString deviceName;
|
||||
if (!device.isNull()) {
|
||||
deviceName = device.deviceName();
|
||||
}
|
||||
|
||||
if (_contextIsHMD) {
|
||||
inputDeviceHMD.set(deviceName);
|
||||
} else {
|
||||
inputDeviceDesktop.set(deviceName);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioDevices::onOutputDeviceSelected(const QAudioDeviceInfo& device) {
|
||||
QString deviceName;
|
||||
if (!device.isNull()) {
|
||||
deviceName = device.deviceName();
|
||||
}
|
||||
|
||||
if (_contextIsHMD) {
|
||||
outputDeviceHMD.set(deviceName);
|
||||
} else {
|
||||
outputDeviceDesktop.set(deviceName);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioDevices::onDeviceChanged(QAudio::Mode mode, const QAudioDeviceInfo& device) {
|
||||
if (mode == QAudio::AudioInput) {
|
||||
_inputs.onDeviceChanged(device);
|
||||
} else { // if (mode == QAudio::AudioOutput)
|
||||
_outputs.onDeviceChanged(device);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioDevices::onDevicesChanged(QAudio::Mode mode, const QList<QAudioDeviceInfo>& devices) {
|
||||
static bool initialized { false };
|
||||
auto initialize = [&]{
|
||||
if (initialized) {
|
||||
onContextChanged(QString());
|
||||
} else {
|
||||
initialized = true;
|
||||
}
|
||||
};
|
||||
|
||||
if (mode == QAudio::AudioInput) {
|
||||
_inputs.onDevicesChanged(devices);
|
||||
static std::once_flag inputFlag;
|
||||
std::call_once(inputFlag, initialize);
|
||||
} else { // if (mode == QAudio::AudioOutput)
|
||||
_outputs.onDevicesChanged(devices);
|
||||
static std::once_flag outputFlag;
|
||||
std::call_once(outputFlag, initialize);
|
||||
}
|
||||
}
|
98
interface/src/scripting/AudioDevices.h
Normal file
98
interface/src/scripting/AudioDevices.h
Normal file
|
@ -0,0 +1,98 @@
|
|||
//
|
||||
// AudioDevices.h
|
||||
// interface/src/scripting
|
||||
//
|
||||
// Created by Zach Pomerantz on 28/5/2017.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_scripting_AudioDevices_h
|
||||
#define hifi_scripting_AudioDevices_h
|
||||
|
||||
#include <QObject>
|
||||
#include <QAbstractListModel>
|
||||
#include <QAudioDeviceInfo>
|
||||
|
||||
namespace scripting {
|
||||
|
||||
class AudioDevice {
|
||||
public:
|
||||
QAudioDeviceInfo info;
|
||||
QString display;
|
||||
bool selected { false };
|
||||
};
|
||||
|
||||
class AudioDeviceList : public QAbstractListModel {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
AudioDeviceList(QAudio::Mode mode) : _mode(mode) {}
|
||||
|
||||
int rowCount(const QModelIndex& parent = QModelIndex()) const override { Q_UNUSED(parent); return _devices.size(); }
|
||||
QHash<int, QByteArray> roleNames() const override { return _roles; }
|
||||
Qt::ItemFlags flags(const QModelIndex& index) const override { return _flags; }
|
||||
|
||||
// get/set devices through a QML ListView
|
||||
QVariant data(const QModelIndex& index, int role) const override;
|
||||
bool setData(const QModelIndex& index, const QVariant &value, int role) override;
|
||||
|
||||
// reset device to the last selected device in this context, or the default
|
||||
void resetDevice(bool contextIsHMD, const QString& device);
|
||||
|
||||
signals:
|
||||
void deviceSelected(const QAudioDeviceInfo& device);
|
||||
void deviceChanged(const QAudioDeviceInfo& device);
|
||||
|
||||
private slots:
|
||||
void onDeviceChanged(const QAudioDeviceInfo& device);
|
||||
void onDevicesChanged(const QList<QAudioDeviceInfo>& devices);
|
||||
|
||||
private:
|
||||
friend class AudioDevices;
|
||||
|
||||
static QHash<int, QByteArray> _roles;
|
||||
static Qt::ItemFlags _flags;
|
||||
|
||||
QAudio::Mode _mode;
|
||||
QAudioDeviceInfo _selectedDevice;
|
||||
QList<AudioDevice> _devices;
|
||||
};
|
||||
|
||||
class Audio;
|
||||
|
||||
class AudioDevices : public QObject {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(AudioDeviceList* input READ getInputList NOTIFY nop)
|
||||
Q_PROPERTY(AudioDeviceList* output READ getOutputList NOTIFY nop)
|
||||
|
||||
public:
|
||||
AudioDevices(bool& contextIsHMD);
|
||||
|
||||
signals:
|
||||
void nop();
|
||||
|
||||
private slots:
|
||||
void onContextChanged(const QString& context);
|
||||
void onInputDeviceSelected(const QAudioDeviceInfo& device);
|
||||
void onOutputDeviceSelected(const QAudioDeviceInfo& device);
|
||||
void onDeviceChanged(QAudio::Mode mode, const QAudioDeviceInfo& device);
|
||||
void onDevicesChanged(QAudio::Mode mode, const QList<QAudioDeviceInfo>& devices);
|
||||
|
||||
private:
|
||||
friend class Audio;
|
||||
|
||||
AudioDeviceList* getInputList() { return &_inputs; }
|
||||
AudioDeviceList* getOutputList() { return &_outputs; }
|
||||
|
||||
AudioDeviceList _inputs { QAudio::AudioInput };
|
||||
AudioDeviceList _outputs { QAudio::AudioOutput };
|
||||
|
||||
bool& _contextIsHMD;
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi_scripting_AudioDevices_h
|
|
@ -18,9 +18,10 @@
|
|||
|
||||
HIFI_QML_DEF(AvatarInputs)
|
||||
|
||||
|
||||
static AvatarInputs* INSTANCE{ nullptr };
|
||||
|
||||
Setting::Handle<bool> showAudioToolsSetting { QStringList { "AvatarInputs", "showAudioTools" }, false };
|
||||
|
||||
AvatarInputs* AvatarInputs::getInstance() {
|
||||
if (!INSTANCE) {
|
||||
AvatarInputs::registerType();
|
||||
|
@ -32,6 +33,7 @@ AvatarInputs* AvatarInputs::getInstance() {
|
|||
|
||||
AvatarInputs::AvatarInputs(QQuickItem* parent) : QQuickItem(parent) {
|
||||
INSTANCE = this;
|
||||
_showAudioTools = showAudioToolsSetting.get();
|
||||
}
|
||||
|
||||
#define AI_UPDATE(name, src) \
|
||||
|
@ -43,16 +45,6 @@ AvatarInputs::AvatarInputs(QQuickItem* parent) : QQuickItem(parent) {
|
|||
} \
|
||||
}
|
||||
|
||||
#define AI_UPDATE_WRITABLE(name, src) \
|
||||
{ \
|
||||
auto val = src; \
|
||||
if (_##name != val) { \
|
||||
_##name = val; \
|
||||
qDebug() << "AvatarInputs" << val; \
|
||||
emit name##Changed(val); \
|
||||
} \
|
||||
}
|
||||
|
||||
#define AI_UPDATE_FLOAT(name, src, epsilon) \
|
||||
{ \
|
||||
float val = src; \
|
||||
|
@ -94,8 +86,6 @@ void AvatarInputs::update() {
|
|||
AI_UPDATE(cameraMuted, Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking));
|
||||
AI_UPDATE(isHMD, qApp->isHMDMode());
|
||||
|
||||
AI_UPDATE_WRITABLE(showAudioTools, Menu::getInstance()->isOptionChecked(MenuOption::AudioTools));
|
||||
|
||||
auto audioIO = DependencyManager::get<AudioClient>();
|
||||
|
||||
const float audioLevel = loudnessToAudioLevel(DependencyManager::get<AudioClient>()->getLastInputLoudness());
|
||||
|
@ -122,8 +112,9 @@ void AvatarInputs::setShowAudioTools(bool showAudioTools) {
|
|||
if (_showAudioTools == showAudioTools)
|
||||
return;
|
||||
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::AudioTools, showAudioTools);
|
||||
update();
|
||||
_showAudioTools = showAudioTools;
|
||||
showAudioToolsSetting.set(_showAudioTools);
|
||||
emit showAudioToolsChanged(_showAudioTools);
|
||||
}
|
||||
|
||||
void AvatarInputs::toggleCameraMute() {
|
||||
|
|
|
@ -49,7 +49,7 @@ signals:
|
|||
void audioClippingChanged();
|
||||
void audioLevelChanged();
|
||||
void isHMDChanged();
|
||||
void showAudioToolsChanged(bool showAudioTools);
|
||||
void showAudioToolsChanged(bool show);
|
||||
|
||||
protected:
|
||||
Q_INVOKABLE void resetSensors();
|
||||
|
|
|
@ -227,17 +227,17 @@ void setupPreferences() {
|
|||
preferences->addPreference(preference);
|
||||
}
|
||||
|
||||
static const QString AUDIO("Audio");
|
||||
static const QString AUDIO_BUFFERS("Audio Buffers");
|
||||
{
|
||||
auto getter = []()->bool { return !DependencyManager::get<AudioClient>()->getReceivedAudioStream().dynamicJitterBufferEnabled(); };
|
||||
auto setter = [](bool value) { DependencyManager::get<AudioClient>()->getReceivedAudioStream().setDynamicJitterBufferEnabled(!value); };
|
||||
auto preference = new CheckPreference(AUDIO, "Disable dynamic jitter buffer", getter, setter);
|
||||
auto preference = new CheckPreference(AUDIO_BUFFERS, "Disable dynamic jitter buffer", getter, setter);
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
{
|
||||
auto getter = []()->float { return DependencyManager::get<AudioClient>()->getReceivedAudioStream().getStaticJitterBufferFrames(); };
|
||||
auto setter = [](float value) { DependencyManager::get<AudioClient>()->getReceivedAudioStream().setStaticJitterBufferFrames(value); };
|
||||
auto preference = new SpinnerPreference(AUDIO, "Static jitter buffer frames", getter, setter);
|
||||
auto preference = new SpinnerPreference(AUDIO_BUFFERS, "Static jitter buffer frames", getter, setter);
|
||||
preference->setMin(0);
|
||||
preference->setMax(2000);
|
||||
preference->setStep(1);
|
||||
|
@ -246,13 +246,13 @@ void setupPreferences() {
|
|||
{
|
||||
auto getter = []()->bool { return !DependencyManager::get<AudioClient>()->getOutputStarveDetectionEnabled(); };
|
||||
auto setter = [](bool value) { DependencyManager::get<AudioClient>()->setOutputStarveDetectionEnabled(!value); };
|
||||
auto preference = new CheckPreference(AUDIO, "Disable output starve detection", getter, setter);
|
||||
auto preference = new CheckPreference(AUDIO_BUFFERS, "Disable output starve detection", getter, setter);
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
{
|
||||
auto getter = []()->float { return DependencyManager::get<AudioClient>()->getOutputBufferSize(); };
|
||||
auto setter = [](float value) { DependencyManager::get<AudioClient>()->setOutputBufferSize(value); };
|
||||
auto preference = new SpinnerPreference(AUDIO, "Output buffer initial frames", getter, setter);
|
||||
auto preference = new SpinnerPreference(AUDIO_BUFFERS, "Output buffer initial frames", getter, setter);
|
||||
preference->setMin(AudioClient::MIN_BUFFER_FRAMES);
|
||||
preference->setMax(AudioClient::MAX_BUFFER_FRAMES);
|
||||
preference->setStep(1);
|
||||
|
@ -262,13 +262,13 @@ void setupPreferences() {
|
|||
{
|
||||
auto getter = []()->bool { return DependencyManager::get<AudioClient>()->isSimulatingJitter(); };
|
||||
auto setter = [](bool value) { return DependencyManager::get<AudioClient>()->setIsSimulatingJitter(value); };
|
||||
auto preference = new CheckPreference(AUDIO, "Packet jitter simulator", getter, setter);
|
||||
auto preference = new CheckPreference(AUDIO_BUFFERS, "Packet jitter simulator", getter, setter);
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
{
|
||||
auto getter = []()->float { return DependencyManager::get<AudioClient>()->getGateThreshold(); };
|
||||
auto setter = [](float value) { return DependencyManager::get<AudioClient>()->setGateThreshold(value); };
|
||||
auto preference = new SpinnerPreference(AUDIO, "Packet throttle threshold", getter, setter);
|
||||
auto preference = new SpinnerPreference(AUDIO_BUFFERS, "Packet throttle threshold", getter, setter);
|
||||
preference->setMin(1);
|
||||
preference->setMax(200);
|
||||
preference->setStep(1);
|
||||
|
|
|
@ -47,7 +47,6 @@
|
|||
#include "LODManager.h"
|
||||
#include "ui/OctreeStatsProvider.h"
|
||||
#include "ui/DomainConnectionModel.h"
|
||||
#include "scripting/AudioDeviceScriptingInterface.h"
|
||||
#include "ui/AvatarInputs.h"
|
||||
#include "avatar/AvatarManager.h"
|
||||
#include "scripting/GlobalServicesScriptingInterface.h"
|
||||
|
@ -187,6 +186,7 @@ void Web3DOverlay::loadSourceURL() {
|
|||
_webSurface->getSurfaceContext()->setContextProperty("offscreenFlags", flags);
|
||||
_webSurface->getSurfaceContext()->setContextProperty("AddressManager", DependencyManager::get<AddressManager>().data());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("Account", AccountScriptingInterface::getInstance());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("Audio", DependencyManager::get<AudioScriptingInterface>().data());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("AudioStats", DependencyManager::get<AudioClient>()->getStats().data());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("HMD", DependencyManager::get<HMDScriptingInterface>().data());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("fileDialogHelper", new FileDialogHelper());
|
||||
|
@ -197,7 +197,6 @@ void Web3DOverlay::loadSourceURL() {
|
|||
_webSurface->getSurfaceContext()->setContextProperty("LODManager", DependencyManager::get<LODManager>().data());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("OctreeStats", DependencyManager::get<OctreeStatsProvider>().data());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("DCModel", DependencyManager::get<DomainConnectionModel>().data());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("AudioDevice", AudioDeviceScriptingInterface::getInstance());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("AvatarInputs", AvatarInputs::getInstance());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("GlobalServices", GlobalServicesScriptingInterface::getInstance());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("AvatarList", DependencyManager::get<AvatarManager>().data());
|
||||
|
|
|
@ -1164,9 +1164,32 @@ void Rig::updateFromHandAndFeetParameters(const HandAndFeetParameters& params, f
|
|||
bool bodySensorTrackingEnabled = params.isLeftFootEnabled || params.isRightFootEnabled;
|
||||
|
||||
const float RELAX_DURATION = 0.6f;
|
||||
const float CONTROL_DURATION = 0.4f;
|
||||
const bool TO_CONTROLLED = true;
|
||||
const bool FROM_CONTROLLED = false;
|
||||
const bool LEFT_HAND = true;
|
||||
const bool RIGHT_HAND = false;
|
||||
|
||||
if (params.isLeftEnabled) {
|
||||
if (!_isLeftHandControlled) {
|
||||
_leftHandControlTimeRemaining = CONTROL_DURATION;
|
||||
_isLeftHandControlled = true;
|
||||
}
|
||||
|
||||
glm::vec3 handPosition = params.leftPosition;
|
||||
glm::quat handRotation = params.leftOrientation;
|
||||
|
||||
if (_leftHandControlTimeRemaining > 0.0f) {
|
||||
// Move hand from non-controlled position to controlled position.
|
||||
_leftHandControlTimeRemaining = std::max(_leftHandControlTimeRemaining - dt, 0.0f);
|
||||
AnimPose handPose(Vectors::ONE, handRotation, handPosition);
|
||||
if (transitionHandPose(_leftHandControlTimeRemaining, CONTROL_DURATION, handPose, LEFT_HAND, TO_CONTROLLED,
|
||||
handPose)) {
|
||||
handPosition = handPose.trans();
|
||||
handRotation = handPose.rot();
|
||||
}
|
||||
}
|
||||
|
||||
if (!bodySensorTrackingEnabled) {
|
||||
// prevent the hand IK targets from intersecting the body capsule
|
||||
glm::vec3 displacement;
|
||||
|
@ -1176,27 +1199,22 @@ void Rig::updateFromHandAndFeetParameters(const HandAndFeetParameters& params, f
|
|||
}
|
||||
|
||||
_animVars.set("leftHandPosition", handPosition);
|
||||
_animVars.set("leftHandRotation", params.leftOrientation);
|
||||
_animVars.set("leftHandRotation", handRotation);
|
||||
_animVars.set("leftHandType", (int)IKTarget::Type::RotationAndPosition);
|
||||
|
||||
_isLeftHandControlled = true;
|
||||
_lastLeftHandControlledPose = AnimPose(glm::vec3(1.0f), params.leftOrientation, handPosition);
|
||||
_lastLeftHandControlledPose = AnimPose(Vectors::ONE, handRotation, handPosition);
|
||||
} else {
|
||||
if (_isLeftHandControlled) {
|
||||
_leftHandRelaxDuration = RELAX_DURATION;
|
||||
_leftHandRelaxTimeRemaining = RELAX_DURATION;
|
||||
_isLeftHandControlled = false;
|
||||
}
|
||||
|
||||
if (_leftHandRelaxDuration > 0) {
|
||||
if (_leftHandRelaxTimeRemaining > 0.0f) {
|
||||
// Move hand from controlled position to non-controlled position.
|
||||
_leftHandRelaxDuration = std::max(_leftHandRelaxDuration - dt, 0.0f);
|
||||
auto ikNode = getAnimInverseKinematicsNode();
|
||||
if (ikNode) {
|
||||
float alpha = 1.0f - _leftHandRelaxDuration / RELAX_DURATION;
|
||||
const AnimPose geometryToRigTransform(_geometryToRigTransform);
|
||||
AnimPose uncontrolledHandPose = geometryToRigTransform * ikNode->getUncontrolledLeftHandPose();
|
||||
AnimPose handPose;
|
||||
::blend(1, &_lastLeftHandControlledPose, &uncontrolledHandPose, alpha, &handPose);
|
||||
_leftHandRelaxTimeRemaining = std::max(_leftHandRelaxTimeRemaining - dt, 0.0f);
|
||||
AnimPose handPose;
|
||||
if (transitionHandPose(_leftHandRelaxTimeRemaining, RELAX_DURATION, _lastLeftHandControlledPose, LEFT_HAND,
|
||||
FROM_CONTROLLED, handPose)) {
|
||||
_animVars.set("leftHandPosition", handPose.trans());
|
||||
_animVars.set("leftHandRotation", handPose.rot());
|
||||
_animVars.set("leftHandType", (int)IKTarget::Type::RotationAndPosition);
|
||||
|
@ -1209,7 +1227,25 @@ void Rig::updateFromHandAndFeetParameters(const HandAndFeetParameters& params, f
|
|||
}
|
||||
|
||||
if (params.isRightEnabled) {
|
||||
if (!_isRightHandControlled) {
|
||||
_rightHandControlTimeRemaining = CONTROL_DURATION;
|
||||
_isRightHandControlled = true;
|
||||
}
|
||||
|
||||
glm::vec3 handPosition = params.rightPosition;
|
||||
glm::quat handRotation = params.rightOrientation;
|
||||
|
||||
if (_rightHandControlTimeRemaining > 0.0f) {
|
||||
// Move hand from non-controlled position to controlled position.
|
||||
_rightHandControlTimeRemaining = std::max(_rightHandControlTimeRemaining - dt, 0.0f);
|
||||
AnimPose handPose(Vectors::ONE, handRotation, handPosition);
|
||||
if (transitionHandPose(_rightHandControlTimeRemaining, CONTROL_DURATION, handPose, RIGHT_HAND, TO_CONTROLLED,
|
||||
handPose)) {
|
||||
handPosition = handPose.trans();
|
||||
handRotation = handPose.rot();
|
||||
}
|
||||
}
|
||||
|
||||
if (!bodySensorTrackingEnabled) {
|
||||
// prevent the hand IK targets from intersecting the body capsule
|
||||
glm::vec3 displacement;
|
||||
|
@ -1219,27 +1255,22 @@ void Rig::updateFromHandAndFeetParameters(const HandAndFeetParameters& params, f
|
|||
}
|
||||
|
||||
_animVars.set("rightHandPosition", handPosition);
|
||||
_animVars.set("rightHandRotation", params.rightOrientation);
|
||||
_animVars.set("rightHandRotation", handRotation);
|
||||
_animVars.set("rightHandType", (int)IKTarget::Type::RotationAndPosition);
|
||||
|
||||
_isRightHandControlled = true;
|
||||
_lastRightHandControlledPose = AnimPose(glm::vec3(1.0f), params.rightOrientation, handPosition);
|
||||
_lastRightHandControlledPose = AnimPose(Vectors::ONE, handRotation, handPosition);
|
||||
} else {
|
||||
if (_isRightHandControlled) {
|
||||
_rightHandRelaxDuration = RELAX_DURATION;
|
||||
_rightHandRelaxTimeRemaining = RELAX_DURATION;
|
||||
_isRightHandControlled = false;
|
||||
}
|
||||
|
||||
if (_rightHandRelaxDuration > 0) {
|
||||
if (_rightHandRelaxTimeRemaining > 0.0f) {
|
||||
// Move hand from controlled position to non-controlled position.
|
||||
_rightHandRelaxDuration = std::max(_rightHandRelaxDuration - dt, 0.0f);
|
||||
auto ikNode = getAnimInverseKinematicsNode();
|
||||
if (ikNode) {
|
||||
float alpha = 1.0f - _rightHandRelaxDuration / RELAX_DURATION;
|
||||
const AnimPose geometryToRigTransform(_geometryToRigTransform);
|
||||
AnimPose uncontrolledHandPose = geometryToRigTransform * ikNode->getUncontrolledRightHandPose();
|
||||
AnimPose handPose;
|
||||
::blend(1, &_lastRightHandControlledPose, &uncontrolledHandPose, alpha, &handPose);
|
||||
_rightHandRelaxTimeRemaining = std::max(_rightHandRelaxTimeRemaining - dt, 0.0f);
|
||||
AnimPose handPose;
|
||||
if (transitionHandPose(_rightHandRelaxTimeRemaining, RELAX_DURATION, _lastRightHandControlledPose, RIGHT_HAND,
|
||||
FROM_CONTROLLED, handPose)) {
|
||||
_animVars.set("rightHandPosition", handPose.trans());
|
||||
_animVars.set("rightHandRotation", handPose.rot());
|
||||
_animVars.set("rightHandType", (int)IKTarget::Type::RotationAndPosition);
|
||||
|
@ -1545,3 +1576,25 @@ void Rig::computeAvatarBoundingCapsule(
|
|||
glm::vec3 rigCenter = (geometryToRig * (0.5f * (totalExtents.maximum + totalExtents.minimum)));
|
||||
localOffsetOut = rigCenter - (geometryToRig * rootPosition);
|
||||
}
|
||||
|
||||
bool Rig::transitionHandPose(float deltaTime, float totalDuration, AnimPose& controlledHandPose, bool isLeftHand,
|
||||
bool isToControlled, AnimPose& returnHandPose) {
|
||||
auto ikNode = getAnimInverseKinematicsNode();
|
||||
if (ikNode) {
|
||||
float alpha = 1.0f - deltaTime / totalDuration;
|
||||
const AnimPose geometryToRigTransform(_geometryToRigTransform);
|
||||
AnimPose uncontrolledHandPose;
|
||||
if (isLeftHand) {
|
||||
uncontrolledHandPose = geometryToRigTransform * ikNode->getUncontrolledLeftHandPose();
|
||||
} else {
|
||||
uncontrolledHandPose = geometryToRigTransform * ikNode->getUncontrolledRightHandPose();
|
||||
}
|
||||
if (isToControlled) {
|
||||
::blend(1, &uncontrolledHandPose, &controlledHandPose, alpha, &returnHandPose);
|
||||
} else {
|
||||
::blend(1, &controlledHandPose, &uncontrolledHandPose, alpha, &returnHandPose);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -352,10 +352,15 @@ private:
|
|||
int _nextStateHandlerId { 0 };
|
||||
QMutex _stateMutex;
|
||||
|
||||
bool transitionHandPose(float deltaTime, float totalDuration, AnimPose& controlledHandPose, bool isLeftHand,
|
||||
bool isToControlled, AnimPose& returnHandPose);
|
||||
|
||||
bool _isLeftHandControlled { false };
|
||||
bool _isRightHandControlled { false };
|
||||
float _leftHandRelaxDuration { 0.0f };
|
||||
float _rightHandRelaxDuration { 0.0f };
|
||||
float _leftHandControlTimeRemaining { 0.0f };
|
||||
float _rightHandControlTimeRemaining { 0.0f };
|
||||
float _leftHandRelaxTimeRemaining { 0.0f };
|
||||
float _rightHandRelaxTimeRemaining { 0.0f };
|
||||
AnimPose _lastLeftHandControlledPose;
|
||||
AnimPose _lastRightHandControlledPose;
|
||||
};
|
||||
|
|
|
@ -79,6 +79,49 @@ using Mutex = std::mutex;
|
|||
using Lock = std::unique_lock<Mutex>;
|
||||
static Mutex _deviceMutex;
|
||||
|
||||
// thread-safe
|
||||
QList<QAudioDeviceInfo> getAvailableDevices(QAudio::Mode mode) {
|
||||
// NOTE: availableDevices() clobbers the Qt internal device list
|
||||
Lock lock(_deviceMutex);
|
||||
return QAudioDeviceInfo::availableDevices(mode);
|
||||
}
|
||||
|
||||
// now called from a background thread, to keep blocking operations off the audio thread
|
||||
void AudioClient::checkDevices() {
|
||||
auto inputDevices = getAvailableDevices(QAudio::AudioInput);
|
||||
auto outputDevices = getAvailableDevices(QAudio::AudioOutput);
|
||||
|
||||
if (inputDevices != _inputDevices) {
|
||||
_inputDevices.swap(inputDevices);
|
||||
emit devicesChanged(QAudio::AudioInput, _inputDevices);
|
||||
}
|
||||
|
||||
if (outputDevices != _outputDevices) {
|
||||
_outputDevices.swap(outputDevices);
|
||||
emit devicesChanged(QAudio::AudioOutput, _outputDevices);
|
||||
}
|
||||
}
|
||||
|
||||
QAudioDeviceInfo AudioClient::getActiveAudioDevice(QAudio::Mode mode) const {
|
||||
Lock lock(_deviceMutex);
|
||||
|
||||
if (mode == QAudio::AudioInput) {
|
||||
return _inputDeviceInfo;
|
||||
} else { // if (mode == QAudio::AudioOutput)
|
||||
return _outputDeviceInfo;
|
||||
}
|
||||
}
|
||||
|
||||
QList<QAudioDeviceInfo> AudioClient::getAudioDevices(QAudio::Mode mode) const {
|
||||
Lock lock(_deviceMutex);
|
||||
|
||||
if (mode == QAudio::AudioInput) {
|
||||
return _inputDevices;
|
||||
} else { // if (mode == QAudio::AudioOutput)
|
||||
return _outputDevices;
|
||||
}
|
||||
}
|
||||
|
||||
static void channelUpmix(int16_t* source, int16_t* dest, int numSamples, int numExtraChannels) {
|
||||
for (int i = 0; i < numSamples/2; i++) {
|
||||
|
||||
|
@ -173,8 +216,9 @@ AudioClient::AudioClient() :
|
|||
|
||||
connect(&_receivedAudioStream, &InboundAudioStream::mismatchedAudioCodec, this, &AudioClient::handleMismatchAudioFormat);
|
||||
|
||||
_inputDevices = getDeviceNames(QAudio::AudioInput);
|
||||
_outputDevices = getDeviceNames(QAudio::AudioOutput);
|
||||
// initialize wasapi; if getAvailableDevices is called from the CheckDevicesThread before this, it will crash
|
||||
getAvailableDevices(QAudio::AudioInput);
|
||||
getAvailableDevices(QAudio::AudioOutput);
|
||||
|
||||
|
||||
// start a thread to detect any device changes
|
||||
|
@ -241,13 +285,6 @@ void AudioClient::audioMixerKilled() {
|
|||
emit disconnected();
|
||||
}
|
||||
|
||||
// thread-safe
|
||||
QList<QAudioDeviceInfo> getAvailableDevices(QAudio::Mode mode) {
|
||||
// NOTE: availableDevices() clobbers the Qt internal device list
|
||||
Lock lock(_deviceMutex);
|
||||
return QAudioDeviceInfo::availableDevices(mode);
|
||||
}
|
||||
|
||||
QAudioDeviceInfo getNamedAudioDeviceForMode(QAudio::Mode mode, const QString& deviceName) {
|
||||
QAudioDeviceInfo result;
|
||||
foreach(QAudioDeviceInfo audioDevice, getAvailableDevices(mode)) {
|
||||
|
@ -261,7 +298,7 @@ QAudioDeviceInfo getNamedAudioDeviceForMode(QAudio::Mode mode, const QString& de
|
|||
}
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
QString friendlyNameForAudioDevice(IMMDevice* pEndpoint) {
|
||||
QString getWinDeviceName(IMMDevice* pEndpoint) {
|
||||
QString deviceName;
|
||||
IPropertyStore* pPropertyStore;
|
||||
pEndpoint->OpenPropertyStore(STGM_READ, &pPropertyStore);
|
||||
|
@ -282,7 +319,7 @@ QString friendlyNameForAudioDevice(IMMDevice* pEndpoint) {
|
|||
return deviceName;
|
||||
}
|
||||
|
||||
QString AudioClient::friendlyNameForAudioDevice(wchar_t* guid) {
|
||||
QString AudioClient::getWinDeviceName(wchar_t* guid) {
|
||||
QString deviceName;
|
||||
HRESULT hr = S_OK;
|
||||
CoInitialize(nullptr);
|
||||
|
@ -294,7 +331,7 @@ QString AudioClient::friendlyNameForAudioDevice(wchar_t* guid) {
|
|||
printf("Audio Error: device not found\n");
|
||||
deviceName = QString("NONE");
|
||||
} else {
|
||||
deviceName = ::friendlyNameForAudioDevice(pEndpoint);
|
||||
deviceName = ::getWinDeviceName(pEndpoint);
|
||||
}
|
||||
pMMDeviceEnumerator->Release();
|
||||
pMMDeviceEnumerator = nullptr;
|
||||
|
@ -377,7 +414,7 @@ QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
|
|||
printf("Audio Error: device not found\n");
|
||||
deviceName = QString("NONE");
|
||||
} else {
|
||||
deviceName = friendlyNameForAudioDevice(pEndpoint);
|
||||
deviceName = getWinDeviceName(pEndpoint);
|
||||
}
|
||||
pMMDeviceEnumerator->Release();
|
||||
pMMDeviceEnumerator = NULL;
|
||||
|
@ -395,6 +432,11 @@ QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
|
|||
return (mode == QAudio::AudioInput) ? QAudioDeviceInfo::defaultInputDevice() : QAudioDeviceInfo::defaultOutputDevice();
|
||||
}
|
||||
|
||||
bool AudioClient::getNamedAudioDeviceForModeExists(QAudio::Mode mode, const QString& deviceName) {
|
||||
return (getNamedAudioDeviceForMode(mode, deviceName).deviceName() == deviceName);
|
||||
}
|
||||
|
||||
|
||||
// attempt to use the native sample rate and channel count
|
||||
bool nativeFormatForAudioDevice(const QAudioDeviceInfo& audioDevice,
|
||||
QAudioFormat& audioFormat) {
|
||||
|
@ -742,29 +784,22 @@ void AudioClient::selectAudioFormat(const QString& selectedCodecName) {
|
|||
|
||||
}
|
||||
|
||||
bool AudioClient::switchAudioDevice(QAudio::Mode mode, const QAudioDeviceInfo& deviceInfo) {
|
||||
auto device = deviceInfo;
|
||||
|
||||
QString AudioClient::getDefaultDeviceName(QAudio::Mode mode) {
|
||||
QAudioDeviceInfo deviceInfo = defaultAudioDeviceForMode(mode);
|
||||
return deviceInfo.deviceName();
|
||||
}
|
||||
|
||||
QVector<QString> AudioClient::getDeviceNames(QAudio::Mode mode) {
|
||||
QVector<QString> deviceNames;
|
||||
const QList<QAudioDeviceInfo> &availableDevice = getAvailableDevices(mode);
|
||||
foreach(const QAudioDeviceInfo &audioDevice, availableDevice) {
|
||||
deviceNames << audioDevice.deviceName().trimmed();
|
||||
if (device.isNull()) {
|
||||
device = defaultAudioDeviceForMode(mode);
|
||||
}
|
||||
|
||||
if (mode == QAudio::AudioInput) {
|
||||
return switchInputToAudioDevice(device);
|
||||
} else { // if (mode == QAudio::AudioOutput)
|
||||
return switchOutputToAudioDevice(device);
|
||||
}
|
||||
return deviceNames;
|
||||
}
|
||||
|
||||
bool AudioClient::switchInputToAudioDevice(const QString& inputDeviceName) {
|
||||
qCDebug(audioclient) << "switchInputToAudioDevice [" << inputDeviceName << "] [" << getNamedAudioDeviceForMode(QAudio::AudioInput, inputDeviceName).deviceName() << "]";
|
||||
return switchInputToAudioDevice(getNamedAudioDeviceForMode(QAudio::AudioInput, inputDeviceName));
|
||||
}
|
||||
|
||||
bool AudioClient::switchOutputToAudioDevice(const QString& outputDeviceName) {
|
||||
qCDebug(audioclient) << "switchOutputToAudioDevice [" << outputDeviceName << "] [" << getNamedAudioDeviceForMode(QAudio::AudioOutput, outputDeviceName).deviceName() << "]";
|
||||
return switchOutputToAudioDevice(getNamedAudioDeviceForMode(QAudio::AudioOutput, outputDeviceName));
|
||||
bool AudioClient::switchAudioDevice(QAudio::Mode mode, const QString& deviceName) {
|
||||
return switchAudioDevice(mode, getNamedAudioDeviceForMode(mode, deviceName));
|
||||
}
|
||||
|
||||
void AudioClient::configureReverb() {
|
||||
|
@ -1308,8 +1343,7 @@ void AudioClient::setIsStereoInput(bool isStereoInput) {
|
|||
}
|
||||
|
||||
// change in channel count for desired input format, restart the input device
|
||||
qCDebug(audioclient) << __FUNCTION__ << "about to call switchInputToAudioDevice:" << _inputAudioDeviceName;
|
||||
switchInputToAudioDevice(_inputAudioDeviceName);
|
||||
switchInputToAudioDevice(_inputDeviceInfo);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1348,6 +1382,9 @@ bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceIn
|
|||
qCDebug(audioclient) << __FUNCTION__ << "inputDeviceInfo: [" << inputDeviceInfo.deviceName() << "]";
|
||||
bool supportedFormat = false;
|
||||
|
||||
// NOTE: device start() uses the Qt internal device list
|
||||
Lock lock(_deviceMutex);
|
||||
|
||||
// cleanup any previously initialized device
|
||||
if (_audioInput) {
|
||||
// The call to stop() causes _inputDevice to be destructed.
|
||||
|
@ -1360,7 +1397,7 @@ bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceIn
|
|||
_audioInput = NULL;
|
||||
_numInputCallbackBytes = 0;
|
||||
|
||||
_inputAudioDeviceName = "";
|
||||
_inputDeviceInfo = QAudioDeviceInfo();
|
||||
}
|
||||
|
||||
if (_inputToNetworkResampler) {
|
||||
|
@ -1375,8 +1412,8 @@ bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceIn
|
|||
|
||||
if (!inputDeviceInfo.isNull()) {
|
||||
qCDebug(audioclient) << "The audio input device " << inputDeviceInfo.deviceName() << "is available.";
|
||||
_inputAudioDeviceName = inputDeviceInfo.deviceName().trimmed();
|
||||
emit currentInputDeviceChanged(_inputAudioDeviceName);
|
||||
_inputDeviceInfo = inputDeviceInfo;
|
||||
emit deviceChanged(QAudio::AudioInput, inputDeviceInfo);
|
||||
|
||||
if (adjustedFormatForAudioDevice(inputDeviceInfo, _desiredInputFormat, _inputFormat)) {
|
||||
qCDebug(audioclient) << "The format to be used for audio input is" << _inputFormat;
|
||||
|
@ -1411,10 +1448,7 @@ bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceIn
|
|||
int numFrameSamples = calculateNumberOfFrameSamples(_numInputCallbackBytes);
|
||||
_inputRingBuffer.resizeForFrameSize(numFrameSamples);
|
||||
|
||||
// NOTE: device start() uses the Qt internal device list
|
||||
Lock lock(_deviceMutex);
|
||||
_inputDevice = _audioInput->start();
|
||||
lock.unlock();
|
||||
|
||||
if (_inputDevice) {
|
||||
connect(_inputDevice, SIGNAL(readyRead()), this, SLOT(handleMicAudioInput()));
|
||||
|
@ -1464,6 +1498,9 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
|
|||
|
||||
bool supportedFormat = false;
|
||||
|
||||
// NOTE: device start() uses the Qt internal device list
|
||||
Lock lock(_deviceMutex);
|
||||
|
||||
Lock localAudioLock(_localAudioMutex);
|
||||
_localSamplesAvailable.exchange(0, std::memory_order_release);
|
||||
|
||||
|
@ -1489,6 +1526,8 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
|
|||
|
||||
delete[] _localOutputMixBuffer;
|
||||
_localOutputMixBuffer = NULL;
|
||||
|
||||
_outputDeviceInfo = QAudioDeviceInfo();
|
||||
}
|
||||
|
||||
if (_networkToOutputResampler) {
|
||||
|
@ -1502,8 +1541,8 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
|
|||
|
||||
if (!outputDeviceInfo.isNull()) {
|
||||
qCDebug(audioclient) << "The audio output device " << outputDeviceInfo.deviceName() << "is available.";
|
||||
_outputAudioDeviceName = outputDeviceInfo.deviceName().trimmed();
|
||||
emit currentOutputDeviceChanged(_outputAudioDeviceName);
|
||||
_outputDeviceInfo = outputDeviceInfo;
|
||||
emit deviceChanged(QAudio::AudioOutput, outputDeviceInfo);
|
||||
|
||||
if (adjustedFormatForAudioDevice(outputDeviceInfo, _desiredOutputFormat, _outputFormat)) {
|
||||
qCDebug(audioclient) << "The format to be used for audio output is" << _outputFormat;
|
||||
|
@ -1578,10 +1617,7 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
|
|||
|
||||
_audioOutputIODevice.start();
|
||||
|
||||
// NOTE: device start() uses the Qt internal device list
|
||||
Lock lock(_deviceMutex);
|
||||
_audioOutput->start(&_audioOutputIODevice);
|
||||
lock.unlock();
|
||||
|
||||
// setup a loopback audio output device
|
||||
_loopbackAudioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
||||
|
@ -1781,19 +1817,6 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
|
|||
return bytesWritten;
|
||||
}
|
||||
|
||||
// now called from a background thread, to keep blocking operations off the audio thread
|
||||
void AudioClient::checkDevices() {
|
||||
QVector<QString> inputDevices = getDeviceNames(QAudio::AudioInput);
|
||||
QVector<QString> outputDevices = getDeviceNames(QAudio::AudioOutput);
|
||||
|
||||
if (inputDevices != _inputDevices || outputDevices != _outputDevices) {
|
||||
_inputDevices = inputDevices;
|
||||
_outputDevices = outputDevices;
|
||||
|
||||
emit deviceChanged();
|
||||
}
|
||||
}
|
||||
|
||||
void AudioClient::loadSettings() {
|
||||
_receivedAudioStream.setDynamicJitterBufferEnabled(dynamicJitterBufferEnabled.get());
|
||||
_receivedAudioStream.setStaticJitterBufferFrames(staticJitterBufferFrames.get());
|
||||
|
|
|
@ -123,8 +123,6 @@ public:
|
|||
float getTimeSinceLastClip() const { return _timeSinceLastClip; }
|
||||
float getAudioAverageInputLoudness() const { return _lastInputLoudness; }
|
||||
|
||||
bool isMuted() { return _muted; }
|
||||
|
||||
const AudioIOStats& getStats() const { return _stats; }
|
||||
|
||||
int getOutputBufferSize() { return _outputBufferSizeFrames.get(); }
|
||||
|
@ -147,10 +145,15 @@ public:
|
|||
|
||||
bool outputLocalInjector(AudioInjector* injector) override;
|
||||
|
||||
QAudioDeviceInfo getActiveAudioDevice(QAudio::Mode mode) const;
|
||||
QList<QAudioDeviceInfo> getAudioDevices(QAudio::Mode mode) const;
|
||||
|
||||
static const float CALLBACK_ACCELERATOR_RATIO;
|
||||
|
||||
bool getNamedAudioDeviceForModeExists(QAudio::Mode mode, const QString& deviceName);
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
static QString friendlyNameForAudioDevice(wchar_t* guid);
|
||||
static QString getWinDeviceName(wchar_t* guid);
|
||||
#endif
|
||||
|
||||
public slots:
|
||||
|
@ -170,11 +173,14 @@ public slots:
|
|||
void handleRecordedAudioInput(const QByteArray& audio);
|
||||
void reset();
|
||||
void audioMixerKilled();
|
||||
|
||||
void toggleMute();
|
||||
bool isMuted() { return _muted; }
|
||||
|
||||
|
||||
virtual void setIsStereoInput(bool stereo) override;
|
||||
|
||||
void toggleAudioNoiseReduction() { _isNoiseGateEnabled = !_isNoiseGateEnabled; }
|
||||
void setNoiseReduction(bool isNoiseGateEnabled) { _isNoiseGateEnabled = isNoiseGateEnabled; }
|
||||
|
||||
void toggleLocalEcho() { _shouldEchoLocally = !_shouldEchoLocally; }
|
||||
void toggleServerEcho() { _shouldEchoToServer = !_shouldEchoToServer; }
|
||||
|
@ -186,12 +192,9 @@ public slots:
|
|||
|
||||
bool shouldLoopbackInjectors() override { return _shouldEchoToServer; }
|
||||
|
||||
bool switchInputToAudioDevice(const QString& inputDeviceName);
|
||||
bool switchOutputToAudioDevice(const QString& outputDeviceName);
|
||||
QString getDeviceName(QAudio::Mode mode) const { return (mode == QAudio::AudioInput) ?
|
||||
_inputAudioDeviceName : _outputAudioDeviceName; }
|
||||
QString getDefaultDeviceName(QAudio::Mode mode);
|
||||
QVector<QString> getDeviceNames(QAudio::Mode mode);
|
||||
// calling with a null QAudioDevice will use the system default
|
||||
bool switchAudioDevice(QAudio::Mode mode, const QAudioDeviceInfo& deviceInfo = QAudioDeviceInfo());
|
||||
bool switchAudioDevice(QAudio::Mode mode, const QString& deviceName);
|
||||
|
||||
float getInputVolume() const { return (_audioInput) ? (float)_audioInput->volume() : 0.0f; }
|
||||
void setInputVolume(float volume) { if (_audioInput) _audioInput->setVolume(volume); }
|
||||
|
@ -213,7 +216,9 @@ signals:
|
|||
void noiseGateClosed();
|
||||
|
||||
void changeDevice(const QAudioDeviceInfo& outputDeviceInfo);
|
||||
void deviceChanged();
|
||||
|
||||
void deviceChanged(QAudio::Mode mode, const QAudioDeviceInfo& device);
|
||||
void devicesChanged(QAudio::Mode mode, const QList<QAudioDeviceInfo>& devices);
|
||||
|
||||
void receivedFirstPacket();
|
||||
void disconnected();
|
||||
|
@ -222,9 +227,6 @@ signals:
|
|||
|
||||
void muteEnvironmentRequested(glm::vec3 position, float radius);
|
||||
|
||||
void currentOutputDeviceChanged(const QString& name);
|
||||
void currentInputDeviceChanged(const QString& name);
|
||||
|
||||
protected:
|
||||
AudioClient();
|
||||
~AudioClient();
|
||||
|
@ -291,9 +293,6 @@ private:
|
|||
MixedProcessedAudioStream _receivedAudioStream;
|
||||
bool _isStereoInput;
|
||||
|
||||
QString _inputAudioDeviceName;
|
||||
QString _outputAudioDeviceName;
|
||||
|
||||
quint64 _outputStarveDetectionStartTimeMsec;
|
||||
int _outputStarveDetectionCount;
|
||||
|
||||
|
@ -369,8 +368,11 @@ private:
|
|||
glm::vec3 avatarBoundingBoxCorner;
|
||||
glm::vec3 avatarBoundingBoxScale;
|
||||
|
||||
QVector<QString> _inputDevices;
|
||||
QVector<QString> _outputDevices;
|
||||
QAudioDeviceInfo _inputDeviceInfo;
|
||||
QAudioDeviceInfo _outputDeviceInfo;
|
||||
|
||||
QList<QAudioDeviceInfo> _inputDevices;
|
||||
QList<QAudioDeviceInfo> _outputDevices;
|
||||
|
||||
bool _hasReceivedFirstPacket { false };
|
||||
|
||||
|
|
|
@ -1504,7 +1504,7 @@ QUrl AvatarData::cannonicalSkeletonModelURL(const QUrl& emptyURL) const {
|
|||
return _skeletonModelURL.scheme() == "file" ? emptyURL : _skeletonModelURL;
|
||||
}
|
||||
|
||||
void AvatarData::processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged) {
|
||||
void AvatarData::processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged, bool& skeletonModelUrlChanged) {
|
||||
|
||||
if (identity.sequenceId < _identitySequenceId) {
|
||||
qCDebug(avatars) << "Ignoring older identity packet for avatar" << getSessionUUID()
|
||||
|
@ -1517,6 +1517,7 @@ void AvatarData::processAvatarIdentity(const Identity& identity, bool& identityC
|
|||
if (_firstSkeletonCheck || (identity.skeletonModelURL != cannonicalSkeletonModelURL(emptyURL))) {
|
||||
setSkeletonModelURL(identity.skeletonModelURL);
|
||||
identityChanged = true;
|
||||
skeletonModelUrlChanged = true;
|
||||
if (_firstSkeletonCheck) {
|
||||
displayNameChanged = true;
|
||||
}
|
||||
|
|
|
@ -368,6 +368,7 @@ public:
|
|||
virtual ~AvatarData();
|
||||
|
||||
static const QUrl& defaultFullAvatarModelUrl();
|
||||
QUrl cannonicalSkeletonModelURL(const QUrl& empty) const;
|
||||
|
||||
virtual bool isMyAvatar() const { return false; }
|
||||
|
||||
|
@ -536,9 +537,8 @@ public:
|
|||
|
||||
static void parseAvatarIdentityPacket(const QByteArray& data, Identity& identityOut);
|
||||
|
||||
// identityChanged returns true if identity has changed, false otherwise.
|
||||
// displayNameChanged returns true if displayName has changed, false otherwise.
|
||||
void processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged);
|
||||
// identityChanged returns true if identity has changed, false otherwise. Similarly for displayNameChanged and skeletonModelUrlChange.
|
||||
void processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged, bool& skeletonModelUrlChanged);
|
||||
|
||||
QByteArray identityByteArray() const;
|
||||
|
||||
|
@ -697,7 +697,6 @@ protected:
|
|||
QVector<AttachmentData> _attachmentData;
|
||||
QString _displayName;
|
||||
QString _sessionDisplayName { };
|
||||
QUrl cannonicalSkeletonModelURL(const QUrl& empty) const;
|
||||
|
||||
QHash<QString, int> _jointIndices; ///< 1-based, since zero is returned for missing keys
|
||||
QStringList _jointNames; ///< in order of depth-first traversal
|
||||
|
|
|
@ -148,8 +148,9 @@ void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<ReceivedMessage>
|
|||
auto avatar = newOrExistingAvatar(identity.uuid, sendingNode);
|
||||
bool identityChanged = false;
|
||||
bool displayNameChanged = false;
|
||||
bool skeletonModelUrlChanged = false;
|
||||
// In this case, the "sendingNode" is the Avatar Mixer.
|
||||
avatar->processAvatarIdentity(identity, identityChanged, displayNameChanged);
|
||||
avatar->processAvatarIdentity(identity, identityChanged, displayNameChanged, skeletonModelUrlChanged);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,12 +19,6 @@ void registerAudioMetaTypes(QScriptEngine* engine) {
|
|||
qScriptRegisterMetaType(engine, soundSharedPointerToScriptValue, soundSharedPointerFromScriptValue);
|
||||
}
|
||||
|
||||
AudioScriptingInterface::AudioScriptingInterface() :
|
||||
_localAudioInterface(NULL)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
ScriptAudioInjector* AudioScriptingInterface::playSound(SharedSoundPointer sound, const AudioInjectorOptions& injectorOptions) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
ScriptAudioInjector* injector = NULL;
|
||||
|
|
|
@ -28,6 +28,7 @@ public:
|
|||
void setLocalAudioInterface(AbstractAudioInterface* audioInterface) { _localAudioInterface = audioInterface; }
|
||||
|
||||
protected:
|
||||
AudioScriptingInterface() {}
|
||||
|
||||
// this method is protected to stop C++ callers from calling, but invokable from script
|
||||
Q_INVOKABLE ScriptAudioInjector* playSound(SharedSoundPointer sound, const AudioInjectorOptions& injectorOptions = AudioInjectorOptions());
|
||||
|
@ -44,9 +45,7 @@ signals:
|
|||
void inputReceived(const QByteArray& inputSamples); /// a frame of mic input audio has been received and processed
|
||||
|
||||
private:
|
||||
AudioScriptingInterface();
|
||||
|
||||
AbstractAudioInterface* _localAudioInterface;
|
||||
AbstractAudioInterface* _localAudioInterface { nullptr };
|
||||
};
|
||||
|
||||
void registerAudioMetaTypes(QScriptEngine* engine);
|
||||
|
|
|
@ -439,7 +439,7 @@ void TabletProxy::loadQMLSource(const QVariant& path) {
|
|||
}
|
||||
}
|
||||
|
||||
void TabletProxy::pushOntoStack(const QVariant& path) {
|
||||
bool TabletProxy::pushOntoStack(const QVariant& path) {
|
||||
QObject* root = nullptr;
|
||||
if (!_toolbarMode && _qmlTabletRoot) {
|
||||
root = _qmlTabletRoot;
|
||||
|
@ -457,6 +457,8 @@ void TabletProxy::pushOntoStack(const QVariant& path) {
|
|||
} else {
|
||||
qCDebug(scriptengine) << "tablet cannot push QML because _qmlTabletRoot or _desktopWindow is null";
|
||||
}
|
||||
|
||||
return root;
|
||||
}
|
||||
|
||||
void TabletProxy::popFromStack() {
|
||||
|
@ -612,15 +614,6 @@ void TabletProxy::removeButton(QObject* tabletButtonProxy) {
|
|||
}
|
||||
}
|
||||
|
||||
void TabletProxy::updateMicEnabled(const bool micOn) {
|
||||
auto tablet = getQmlTablet();
|
||||
if (!tablet) {
|
||||
//qCCritical(scriptengine) << "Could not find tablet in TabletRoot.qml";
|
||||
} else {
|
||||
QMetaObject::invokeMethod(tablet, "setMicEnabled", Qt::AutoConnection, Q_ARG(QVariant, QVariant(micOn)));
|
||||
}
|
||||
}
|
||||
|
||||
void TabletProxy::updateAudioBar(const double micLevel) {
|
||||
auto tablet = getQmlTablet();
|
||||
if (!tablet) {
|
||||
|
|
|
@ -119,7 +119,9 @@ public:
|
|||
Q_INVOKABLE void gotoWebScreen(const QString& url, const QString& injectedJavaScriptUrl);
|
||||
|
||||
Q_INVOKABLE void loadQMLSource(const QVariant& path);
|
||||
Q_INVOKABLE void pushOntoStack(const QVariant& path);
|
||||
// FIXME: This currently relies on a script initializing the tablet (hence the bool denoting success);
|
||||
// it should be initialized internally so it cannot fail
|
||||
Q_INVOKABLE bool pushOntoStack(const QVariant& path);
|
||||
Q_INVOKABLE void popFromStack();
|
||||
|
||||
Q_INVOKABLE void loadQMLOnTop(const QVariant& path);
|
||||
|
@ -150,13 +152,6 @@ public:
|
|||
*/
|
||||
Q_INVOKABLE void removeButton(QObject* tabletButtonProxy);
|
||||
|
||||
/**jsdoc
|
||||
* Updates the tablet's mic enabled state
|
||||
* @function TabletProxy#updateMicEnabled
|
||||
* @param micEnabled {bool} mic enabled state
|
||||
*/
|
||||
Q_INVOKABLE void updateMicEnabled(const bool micEnabled);
|
||||
|
||||
/**jsdoc
|
||||
* Updates the audio bar in tablet to reflect latest mic level
|
||||
* @function TabletProxy#updateAudioBar
|
||||
|
|
|
@ -244,7 +244,7 @@ QString OculusDisplayPlugin::getPreferredAudioInDevice() const {
|
|||
if (!OVR_SUCCESS(ovr_GetAudioDeviceInGuidStr(buffer))) {
|
||||
return QString();
|
||||
}
|
||||
return AudioClient::friendlyNameForAudioDevice(buffer);
|
||||
return AudioClient::getWinDeviceName(buffer);
|
||||
}
|
||||
|
||||
QString OculusDisplayPlugin::getPreferredAudioOutDevice() const {
|
||||
|
@ -252,7 +252,7 @@ QString OculusDisplayPlugin::getPreferredAudioOutDevice() const {
|
|||
if (!OVR_SUCCESS(ovr_GetAudioDeviceOutGuidStr(buffer))) {
|
||||
return QString();
|
||||
}
|
||||
return AudioClient::friendlyNameForAudioDevice(buffer);
|
||||
return AudioClient::getWinDeviceName(buffer);
|
||||
}
|
||||
|
||||
OculusDisplayPlugin::~OculusDisplayPlugin() {
|
||||
|
|
|
@ -725,7 +725,7 @@ QString OpenVrDisplayPlugin::getPreferredAudioInDevice() const {
|
|||
std::vector<WCHAR> deviceW;
|
||||
deviceW.assign(size, INIT);
|
||||
device.toWCharArray(deviceW.data());
|
||||
device = AudioClient::friendlyNameForAudioDevice(deviceW.data());
|
||||
device = AudioClient::getWinDeviceName(deviceW.data());
|
||||
}
|
||||
return device;
|
||||
}
|
||||
|
@ -738,7 +738,7 @@ QString OpenVrDisplayPlugin::getPreferredAudioOutDevice() const {
|
|||
std::vector<WCHAR> deviceW;
|
||||
deviceW.assign(size, INIT);
|
||||
device.toWCharArray(deviceW.data());
|
||||
device = AudioClient::friendlyNameForAudioDevice(deviceW.data());
|
||||
device = AudioClient::getWinDeviceName(deviceW.data());
|
||||
}
|
||||
return device;
|
||||
}
|
||||
|
|
|
@ -25,7 +25,6 @@ var DEFAULT_SCRIPTS_COMBINED = [
|
|||
"system/tablet-goto.js",
|
||||
"system/marketplaces/marketplaces.js",
|
||||
"system/edit.js",
|
||||
"system/selectAudioDevice.js",
|
||||
"system/notifications.js",
|
||||
"system/dialTone.js",
|
||||
"system/firstPersonHMD.js",
|
||||
|
@ -33,7 +32,7 @@ var DEFAULT_SCRIPTS_COMBINED = [
|
|||
];
|
||||
var DEFAULT_SCRIPTS_SEPARATE = [
|
||||
"system/controllers/controllerScripts.js",
|
||||
"system/chat.js"
|
||||
// "system/chat.js"
|
||||
];
|
||||
|
||||
// add a menu item for debugging
|
||||
|
|
|
@ -27,7 +27,7 @@ var UNMUTE_ICONS = {
|
|||
};
|
||||
|
||||
function onMuteToggled() {
|
||||
if (AudioDevice.getMuted()) {
|
||||
if (Audio.muted) {
|
||||
button.editProperties(MUTE_ICONS);
|
||||
} else {
|
||||
button.editProperties(UNMUTE_ICONS);
|
||||
|
@ -46,7 +46,7 @@ function onClicked() {
|
|||
Entities.editEntity(entity, { textures: JSON.stringify({ "tex.close": HOME_BUTTON_TEXTURE }) });
|
||||
shouldActivateButton = true;
|
||||
shouldActivateButton = true;
|
||||
tablet.loadQMLSource("../Audio.qml");
|
||||
tablet.loadQMLSource("../audio/Audio.qml");
|
||||
onAudioScreen = true;
|
||||
}
|
||||
}
|
||||
|
@ -60,8 +60,8 @@ function onScreenChanged(type, url) {
|
|||
|
||||
var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
|
||||
var button = tablet.addButton({
|
||||
icon: AudioDevice.getMuted() ? MUTE_ICONS.icon : UNMUTE_ICONS.icon,
|
||||
activeIcon: AudioDevice.getMuted() ? MUTE_ICONS.activeIcon : UNMUTE_ICONS.activeIcon,
|
||||
icon: Audio.muted ? MUTE_ICONS.icon : UNMUTE_ICONS.icon,
|
||||
activeIcon: Audio.muted ? MUTE_ICONS.activeIcon : UNMUTE_ICONS.activeIcon,
|
||||
text: TABLET_BUTTON_NAME,
|
||||
sortOrder: 1
|
||||
});
|
||||
|
@ -70,7 +70,7 @@ onMuteToggled();
|
|||
|
||||
button.clicked.connect(onClicked);
|
||||
tablet.screenChanged.connect(onScreenChanged);
|
||||
AudioDevice.muteToggled.connect(onMuteToggled);
|
||||
Audio.mutedChanged.connect(onMuteToggled);
|
||||
|
||||
Script.scriptEnding.connect(function () {
|
||||
if (onAudioScreen) {
|
||||
|
@ -78,7 +78,7 @@ Script.scriptEnding.connect(function () {
|
|||
}
|
||||
button.clicked.disconnect(onClicked);
|
||||
tablet.screenChanged.disconnect(onScreenChanged);
|
||||
AudioDevice.muteToggled.disconnect(onMuteToggled);
|
||||
Audio.mutedChanged.disconnect(onMuteToggled);
|
||||
tablet.removeButton(button);
|
||||
});
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@
|
|||
Script.scriptEnding.connect(cleanup);
|
||||
|
||||
function update(dt) {
|
||||
if (!AudioDevice.getMuted()) {
|
||||
if (!Audio.muted) {
|
||||
if (hasOverlay()) {
|
||||
deleteOverlay();
|
||||
}
|
||||
|
@ -47,7 +47,7 @@
|
|||
createOverlay();
|
||||
} else {
|
||||
updateOverlay();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getOffsetPosition() {
|
||||
|
@ -98,7 +98,7 @@
|
|||
|
||||
function cleanup() {
|
||||
deleteOverlay();
|
||||
AudioDevice.muteToggled.disconnect(onMuteToggled);
|
||||
Audio.muted.disconnect(onMuteToggled);
|
||||
Script.update.disconnect(update);
|
||||
}
|
||||
}()); // END LOCAL_SCOPE
|
||||
|
|
|
@ -196,9 +196,9 @@ MyAvatar.wentActive.connect(setActiveProperties)
|
|||
|
||||
function setAwayProperties() {
|
||||
isAway = true;
|
||||
wasMuted = AudioDevice.getMuted();
|
||||
wasMuted = Audio.muted;
|
||||
if (!wasMuted) {
|
||||
AudioDevice.toggleMute();
|
||||
Audio.muted = !Audio.muted;
|
||||
}
|
||||
MyAvatar.setEnableMeshVisible(false); // just for our own display, without changing point of view
|
||||
playAwayAnimation(); // animation is still seen by others
|
||||
|
@ -221,7 +221,7 @@ function setAwayProperties() {
|
|||
function setActiveProperties() {
|
||||
isAway = false;
|
||||
if (!wasMuted) {
|
||||
AudioDevice.toggleMute();
|
||||
Audio.muted = !Audio.muted;
|
||||
}
|
||||
MyAvatar.setEnableMeshVisible(true); // IWBNI we respected Developer->Avatar->Draw Mesh setting.
|
||||
stopAwayAnimation();
|
||||
|
|
|
@ -33,8 +33,8 @@ Audio.disconnected.connect(function(){
|
|||
Audio.playSound(disconnectSound, soundOptions);
|
||||
});
|
||||
|
||||
AudioDevice.muteToggled.connect(function () {
|
||||
if (AudioDevice.getMuted()) {
|
||||
Audio.mutedChanged.connect(function () {
|
||||
if (Audio.muted) {
|
||||
Audio.playSound(micMutedSound, soundOptions);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
//
|
||||
// 1. Set the Event Connector at the bottom of the script.
|
||||
// example:
|
||||
// AudioDevice.muteToggled.connect(onMuteStateChanged);
|
||||
// Audio.mutedChanged.connect(onMuteStateChanged);
|
||||
//
|
||||
// 2. Create a new function to produce a text string, do not include new line returns.
|
||||
// example:
|
||||
|
@ -34,7 +34,7 @@
|
|||
// var muteState,
|
||||
// muteString;
|
||||
//
|
||||
// muteState = AudioDevice.getMuted() ? "muted" : "unmuted";
|
||||
// muteState = Audio.muted ? "muted" : "unmuted";
|
||||
// muteString = "Microphone is now " + muteState;
|
||||
// createNotification(muteString, NotificationType.MUTE_TOGGLE);
|
||||
// }
|
||||
|
|
|
@ -1,215 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
//
|
||||
// audioDeviceExample.js
|
||||
// examples
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 3/22/14
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// This is an example script that demonstrates use of the Menu object
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
(function() { // BEGIN LOCAL_SCOPE
|
||||
|
||||
const INPUT = "Input";
|
||||
const OUTPUT = "Output";
|
||||
const SELECT_AUDIO_SCRIPT_STARTUP_TIMEOUT = 300;
|
||||
//
|
||||
// VAR DEFINITIONS
|
||||
//
|
||||
var debugPrintStatements = true;
|
||||
const INPUT_DEVICE_SETTING = "audio_input_device";
|
||||
const OUTPUT_DEVICE_SETTING = "audio_output_device";
|
||||
var audioDevicesList = []; // placeholder for menu items
|
||||
var wasHmdActive = false; // assume it's not active to start
|
||||
var switchedAudioInputToHMD = false;
|
||||
var switchedAudioOutputToHMD = false;
|
||||
var previousSelectedInputAudioDevice = "";
|
||||
var previousSelectedOutputAudioDevice = "";
|
||||
|
||||
var interfaceInputDevice = "";
|
||||
var interfaceOutputDevice = "";
|
||||
|
||||
//
|
||||
// BEGIN FUNCTION DEFINITIONS
|
||||
//
|
||||
function debug() {
|
||||
if (debugPrintStatements) {
|
||||
print.apply(null, [].concat.apply(["selectAudioDevice.js:"], [].map.call(arguments, JSON.stringify)));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function setupAudioMenus() {
|
||||
// menu events can be triggered asynchronously; skip them for 200ms to avoid recursion and false switches
|
||||
removeAudioMenus();
|
||||
|
||||
// Setup audio input devices
|
||||
Menu.addSeparator("Audio", "Input Audio Device");
|
||||
var currentInputDevice = AudioDevice.getInputDevice()
|
||||
for (var i = 0; i < AudioDevice.inputAudioDevices.length; i++) {
|
||||
var audioDeviceMenuString = "Use " + AudioDevice.inputAudioDevices[i] + " for Input";
|
||||
Menu.addMenuItem({
|
||||
menuName: "Audio",
|
||||
menuItemName: audioDeviceMenuString,
|
||||
isCheckable: true,
|
||||
isChecked: AudioDevice.inputAudioDevices[i] == currentInputDevice
|
||||
});
|
||||
audioDevicesList.push(audioDeviceMenuString);
|
||||
}
|
||||
|
||||
// Setup audio output devices
|
||||
Menu.addSeparator("Audio", "Output Audio Device");
|
||||
var currentOutputDevice = AudioDevice.getOutputDevice()
|
||||
for (var i = 0; i < AudioDevice.outputAudioDevices.length; i++) {
|
||||
var audioDeviceMenuString = "Use " + AudioDevice.outputAudioDevices[i] + " for Output";
|
||||
Menu.addMenuItem({
|
||||
menuName: "Audio",
|
||||
menuItemName: audioDeviceMenuString,
|
||||
isCheckable: true,
|
||||
isChecked: AudioDevice.outputAudioDevices[i] == currentOutputDevice
|
||||
});
|
||||
audioDevicesList.push(audioDeviceMenuString);
|
||||
}
|
||||
}
|
||||
|
||||
function removeAudioMenus() {
|
||||
Menu.removeSeparator("Audio", "Input Audio Device");
|
||||
Menu.removeSeparator("Audio", "Output Audio Device");
|
||||
|
||||
for (var index = 0; index < audioDevicesList.length; index++) {
|
||||
if (Menu.menuItemExists("Audio", audioDevicesList[index])) {
|
||||
Menu.removeMenuItem("Audio", audioDevicesList[index]);
|
||||
}
|
||||
}
|
||||
|
||||
Menu.removeMenu("Audio > Devices");
|
||||
|
||||
audioDevicesList = [];
|
||||
}
|
||||
|
||||
function onDevicechanged() {
|
||||
debug("System audio devices changed. Removing and replacing Audio Menus...");
|
||||
setupAudioMenus();
|
||||
}
|
||||
|
||||
function onMenuEvent(audioDeviceMenuString) {
|
||||
if (Menu.isOptionChecked(audioDeviceMenuString) &&
|
||||
(audioDeviceMenuString !== interfaceInputDevice &&
|
||||
audioDeviceMenuString !== interfaceOutputDevice)) {
|
||||
AudioDevice.setDeviceFromMenu(audioDeviceMenuString)
|
||||
}
|
||||
}
|
||||
|
||||
function onCurrentDeviceChanged() {
|
||||
debug("System audio device switched. ");
|
||||
interfaceInputDevice = "Use " + AudioDevice.getInputDevice() + " for Input";
|
||||
interfaceOutputDevice = "Use " + AudioDevice.getOutputDevice() + " for Output";
|
||||
for (var index = 0; index < audioDevicesList.length; index++) {
|
||||
if (audioDevicesList[index] === interfaceInputDevice ||
|
||||
audioDevicesList[index] === interfaceOutputDevice) {
|
||||
if (Menu.isOptionChecked(audioDevicesList[index]) === false)
|
||||
Menu.setIsOptionChecked(audioDevicesList[index], true);
|
||||
} else {
|
||||
if (Menu.isOptionChecked(audioDevicesList[index]) === true)
|
||||
Menu.setIsOptionChecked(audioDevicesList[index], false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function restoreAudio() {
|
||||
if (switchedAudioInputToHMD) {
|
||||
debug("Switching back from HMD preferred audio input to: " + previousSelectedInputAudioDevice);
|
||||
AudioDevice.setInputDeviceAsync(previousSelectedInputAudioDevice)
|
||||
switchedAudioInputToHMD = false;
|
||||
}
|
||||
if (switchedAudioOutputToHMD) {
|
||||
debug("Switching back from HMD preferred audio output to: " + previousSelectedOutputAudioDevice);
|
||||
AudioDevice.setOutputDeviceAsync(previousSelectedOutputAudioDevice)
|
||||
switchedAudioOutputToHMD = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Some HMDs (like Oculus CV1) have a built in audio device. If they
|
||||
// do, then this function will handle switching to that device automatically
|
||||
// when you goActive with the HMD active.
|
||||
function checkHMDAudio() {
|
||||
// HMD Active state is changing; handle switching
|
||||
if (HMD.active != wasHmdActive) {
|
||||
debug("HMD Active state changed!");
|
||||
|
||||
// We're putting the HMD on; switch to those devices
|
||||
if (HMD.active) {
|
||||
debug("HMD is now Active.");
|
||||
var hmdPreferredAudioInput = HMD.preferredAudioInput();
|
||||
var hmdPreferredAudioOutput = HMD.preferredAudioOutput();
|
||||
debug("hmdPreferredAudioInput: " + hmdPreferredAudioInput);
|
||||
debug("hmdPreferredAudioOutput: " + hmdPreferredAudioOutput);
|
||||
|
||||
if (hmdPreferredAudioInput !== "") {
|
||||
debug("HMD has preferred audio input device.");
|
||||
previousSelectedInputAudioDevice = Settings.getValue(INPUT_DEVICE_SETTING);
|
||||
debug("previousSelectedInputAudioDevice: " + previousSelectedInputAudioDevice);
|
||||
if (hmdPreferredAudioInput != previousSelectedInputAudioDevice) {
|
||||
switchedAudioInputToHMD = true;
|
||||
AudioDevice.setInputDeviceAsync(hmdPreferredAudioInput)
|
||||
}
|
||||
}
|
||||
if (hmdPreferredAudioOutput !== "") {
|
||||
debug("HMD has preferred audio output device.");
|
||||
previousSelectedOutputAudioDevice = Settings.getValue(OUTPUT_DEVICE_SETTING);
|
||||
debug("previousSelectedOutputAudioDevice: " + previousSelectedOutputAudioDevice);
|
||||
if (hmdPreferredAudioOutput != previousSelectedOutputAudioDevice) {
|
||||
switchedAudioOutputToHMD = true;
|
||||
AudioDevice.setOutputDeviceAsync(hmdPreferredAudioOutput)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
debug("HMD no longer active. Restoring audio I/O devices...");
|
||||
restoreAudio();
|
||||
}
|
||||
}
|
||||
wasHmdActive = HMD.active;
|
||||
}
|
||||
|
||||
//
|
||||
// END FUNCTION DEFINITIONS
|
||||
//
|
||||
|
||||
//
|
||||
// BEGIN SCRIPT BODY
|
||||
//
|
||||
// Wait for the C++ systems to fire up before trying to do anything with audio devices
|
||||
Script.setTimeout(function () {
|
||||
debug("Connecting deviceChanged(), displayModeChanged(), and switchAudioDevice()...");
|
||||
AudioDevice.deviceChanged.connect(onDevicechanged);
|
||||
AudioDevice.currentInputDeviceChanged.connect(onCurrentDeviceChanged);
|
||||
AudioDevice.currentOutputDeviceChanged.connect(onCurrentDeviceChanged);
|
||||
HMD.displayModeChanged.connect(checkHMDAudio);
|
||||
Menu.menuItemEvent.connect(onMenuEvent);
|
||||
debug("Setting up Audio I/O menu for the first time...");
|
||||
setupAudioMenus();
|
||||
debug("Checking HMD audio status...")
|
||||
checkHMDAudio();
|
||||
}, SELECT_AUDIO_SCRIPT_STARTUP_TIMEOUT);
|
||||
|
||||
debug("Connecting scriptEnding()");
|
||||
Script.scriptEnding.connect(function () {
|
||||
restoreAudio();
|
||||
removeAudioMenus();
|
||||
Menu.menuItemEvent.disconnect(onMenuEvent);
|
||||
HMD.displayModeChanged.disconnect(checkHMDAudio);
|
||||
AudioDevice.currentInputDeviceChanged.disconnect(onCurrentDeviceChanged);
|
||||
AudioDevice.currentOutputDeviceChanged.disconnect(onCurrentDeviceChanged);
|
||||
AudioDevice.deviceChanged.disconnect(onDevicechanged);
|
||||
});
|
||||
|
||||
//
|
||||
// END SCRIPT BODY
|
||||
//
|
||||
|
||||
}()); // END LOCAL_SCOPE
|
|
@ -144,7 +144,9 @@ function onMessage(message) {
|
|||
isDomainOpen(Settings.getValue("previousSnapshotDomainID"), function (canShare) {
|
||||
var isGif = fileExtensionMatches(message.data, "gif");
|
||||
isLoggedIn = Account.isLoggedIn();
|
||||
isUploadingPrintableStill = canShare && Account.isLoggedIn() && !isGif;
|
||||
if (!isGif) {
|
||||
isUploadingPrintableStill = canShare && Account.isLoggedIn();
|
||||
}
|
||||
if (canShare) {
|
||||
if (isLoggedIn) {
|
||||
print('Sharing snapshot with audience "for_url":', message.data);
|
||||
|
|
|
@ -185,10 +185,7 @@
|
|||
|
||||
//TODO: move to tablet qml?
|
||||
if (tabletShown) {
|
||||
var currentMicEnabled = !Menu.isOptionChecked(MUTE_MICROPHONE_MENU_ITEM);
|
||||
var currentMicLevel = getMicLevel();
|
||||
gTablet.updateMicEnabled(currentMicEnabled);
|
||||
gTablet.updateAudioBar(currentMicLevel);
|
||||
gTablet.updateAudioBar(getMicLevel());
|
||||
}
|
||||
|
||||
if (now - validCheckTime > MSECS_PER_SEC) {
|
||||
|
|
|
@ -1,308 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
/*jslint nomen: true, plusplus: true, vars: true*/
|
||||
/*global AvatarList, Entities, EntityViewer, Script, SoundCache, Audio, print, randFloat*/
|
||||
//
|
||||
// ACAudioSearchAndInject.js
|
||||
// audio
|
||||
//
|
||||
// Created by Eric Levin and Howard Stearns 2/1/2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Keeps track of all sounds within QUERY_RADIUS of an avatar, where a "sound" is specified in entity userData.
|
||||
// Inject as many as practical into the audio mixer.
|
||||
// See acAudioSearchAndCompatibilityEntitySpawner.js.
|
||||
//
|
||||
// This implementation takes some precautions to scale well:
|
||||
// - It doesn't hastle the entity server because it issues at most one octree query every UPDATE_TIME period, regardless of the number of avatars.
|
||||
// - It does not load itself because it only gathers entities once every UPDATE_TIME period, and only
|
||||
// checks entity properties for those small number of entities that are currently playing (plus a RECHECK_TIME period examination of all entities).
|
||||
// This implementation tries to use all the available injectors.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
||||
var MSEC_PER_SEC = 1000;
|
||||
var SOUND_DATA_KEY = "io.highfidelity.soundKey"; // Sound data is specified in userData under this key.
|
||||
var old_sound_data_key = "soundKey"; // For backwards compatibility.
|
||||
var QUERY_RADIUS = 50; // meters
|
||||
var UPDATE_TIME = 100; // ms. We'll update just one thing on this period.
|
||||
var EXPIRATION_TIME = 5 * MSEC_PER_SEC; // ms. Remove sounds that have been out of range for this time.
|
||||
var RECHECK_TIME = 10 * MSEC_PER_SEC; // ms. Check for new userData properties this often when not currently playing.
|
||||
// (By not checking most of the time when not playing, we can efficiently go through all entities without getEntityProperties.)
|
||||
var UPDATES_PER_STATS_LOG = RECHECK_TIME / UPDATE_TIME; // (It's nice to smooth out the results by straddling a recheck.)
|
||||
|
||||
var DEFAULT_SOUND_DATA = {
|
||||
volume: 0.5, // userData cannot specify zero volume with our current method of defaulting.
|
||||
loop: false, // Default must be false with our current method of defaulting, else there's no way to get a false value.
|
||||
playbackGap: MSEC_PER_SEC, // in ms
|
||||
playbackGapRange: 0 // in ms
|
||||
};
|
||||
|
||||
//var AGENT_AVATAR_POSITION = { x: -1.5327, y: 0.672515, z: 5.91573 };
|
||||
var AGENT_AVATAR_POSITION = { x: -2.83785, y: 1.45243, z: -13.6042 };
|
||||
|
||||
//var isACScript = this.EntityViewer !== undefined;
|
||||
var isACScript = true;
|
||||
|
||||
if (isACScript) {
|
||||
Agent.isAvatar = true; // This puts a robot at 0,0,0, but is currently necessary in order to use AvatarList.
|
||||
Avatar.skeletonModelURL = "http://hifi-content.s3.amazonaws.com/ozan/dev/avatars/invisible_avatar/invisible_avatar.fst";
|
||||
Avatar.position = AGENT_AVATAR_POSITION;
|
||||
Agent.isListeningToAudioStream = true;
|
||||
}
|
||||
function ignore() {}
|
||||
function debug() { // Display the arguments not just [Object object].
|
||||
//print.apply(null, [].map.call(arguments, JSON.stringify));
|
||||
}
|
||||
|
||||
function randFloat(low, high) {
|
||||
return low + Math.random() * (high - low);
|
||||
}
|
||||
|
||||
if (isACScript) {
|
||||
EntityViewer.setCenterRadius(QUERY_RADIUS);
|
||||
}
|
||||
|
||||
// ENTITY DATA CACHE
|
||||
//
|
||||
var entityCache = {}; // A dictionary of unexpired EntityData objects.
|
||||
var entityInvalidUserDataCache = {}; // A cache containing the entity IDs that have
|
||||
// previously been identified as containing non-JSON userData.
|
||||
// We use a dictionary here so id lookups are constant time.
|
||||
var examinationCount = 0;
|
||||
function EntityDatum(entityIdentifier) { // Just the data of an entity that we need to know about.
|
||||
// This data is only use for our sound injection. There is no need to store such info in the replicated entity on everyone's computer.
|
||||
var that = this;
|
||||
that.lastUserDataUpdate = 0; // new entity is in need of rechecking user data
|
||||
// State Transitions:
|
||||
// no data => no data | sound data | expired
|
||||
// expired => stop => remove
|
||||
// sound data => downloading
|
||||
// downloading => downloading | waiting
|
||||
// waiting => playing | waiting (if too many already playing)
|
||||
// playing => update position etc | no data
|
||||
that.stop = function stop() {
|
||||
if (!that.sound) {
|
||||
return;
|
||||
}
|
||||
print("stopping sound", entityIdentifier, that.url);
|
||||
delete that.sound;
|
||||
delete that.url;
|
||||
if (!that.injector) {
|
||||
return;
|
||||
}
|
||||
that.injector.stop();
|
||||
delete that.injector;
|
||||
};
|
||||
this.update = function stateTransitions(expirationCutoff, userDataCutoff, now) {
|
||||
if (that.timestamp < expirationCutoff) { // EXPIRED => STOP => REMOVE
|
||||
that.stop(); // Alternatively, we could fade out and then stop...
|
||||
delete entityCache[entityIdentifier];
|
||||
return;
|
||||
}
|
||||
var properties, soundData; // Latest data, pulled from local octree.
|
||||
|
||||
// getEntityProperties locks the tree, which competes with the asynchronous processing of queryOctree results.
|
||||
// Most entity updates are fast and only a very few do getEntityProperties.
|
||||
function ensureSoundData() { // We only getEntityProperities when we need to.
|
||||
if (properties) {
|
||||
return;
|
||||
}
|
||||
properties = Entities.getEntityProperties(entityIdentifier, ['userData', 'position']);
|
||||
examinationCount++; // Collect statistics on how many getEntityProperties we do.
|
||||
debug("updating", that, properties);
|
||||
try {
|
||||
var userData = properties.userData && JSON.parse(properties.userData);
|
||||
soundData = userData && (userData[SOUND_DATA_KEY] || userData[old_sound_data_key]); // Don't store soundData yet. Let state changes compare.
|
||||
that.lastUserDataUpdate = now; // But do update these ...
|
||||
that.url = soundData && soundData.url;
|
||||
that.playAfter = that.url && now;
|
||||
} catch (err) {
|
||||
if (!(entityIdentifier in entityInvalidUserDataCache)) {
|
||||
print(err, properties.userData);
|
||||
entityInvalidUserDataCache[entityIdentifier] = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Stumbling on big new pile of entities will do a lot of getEntityProperties. Once.
|
||||
if (that.lastUserDataUpdate < userDataCutoff) { // NO DATA => SOUND DATA
|
||||
ensureSoundData();
|
||||
}
|
||||
|
||||
if (!that.url) { // NO DATA => NO DATA
|
||||
return that.stop();
|
||||
}
|
||||
|
||||
if (!that.sound) { // SOUND DATA => DOWNLOADING
|
||||
that.sound = SoundCache.getSound(soundData.url); // SoundCache can manage duplicates better than we can.
|
||||
}
|
||||
|
||||
if (!that.sound.downloaded) { // DOWNLOADING => DOWNLOADING
|
||||
return;
|
||||
}
|
||||
|
||||
if (that.playAfter > now) { // DOWNLOADING | WAITING => WAITING
|
||||
return;
|
||||
}
|
||||
|
||||
ensureSoundData(); // We'll try to play/setOptions and will need position, so we might as well get soundData, too.
|
||||
if (soundData.url !== that.url) { // WAITING => NO DATA (update next time around)
|
||||
return that.stop();
|
||||
}
|
||||
|
||||
var options = {
|
||||
position: properties.position,
|
||||
loop: soundData.loop || DEFAULT_SOUND_DATA.loop,
|
||||
volume: soundData.volume || DEFAULT_SOUND_DATA.volume
|
||||
};
|
||||
|
||||
function repeat() {
|
||||
return !options.loop && (soundData.playbackGap >= 0);
|
||||
}
|
||||
|
||||
function randomizedNextPlay() { // time of next play or recheck, randomized to distribute the work
|
||||
var range = soundData.playbackGapRange || DEFAULT_SOUND_DATA.playbackGapRange,
|
||||
base = repeat() ? ((that.sound.duration * MSEC_PER_SEC) + (soundData.playbackGap || DEFAULT_SOUND_DATA.playbackGap)) : RECHECK_TIME;
|
||||
return now + base + randFloat(-Math.min(base, range), range);
|
||||
}
|
||||
|
||||
if (that.injector && soundData.playing === false) {
|
||||
that.injector.stop();
|
||||
that.injector = null;
|
||||
}
|
||||
|
||||
if (!that.injector) {
|
||||
if (soundData.playing === false) { // WAITING => PLAYING | WAITING
|
||||
return;
|
||||
}
|
||||
debug("starting", that, options);
|
||||
that.injector = Audio.playSound(that.sound, options); // Might be null if at at injector limit. Will try again later.
|
||||
if (that.injector) {
|
||||
print("started", entityIdentifier, that.url);
|
||||
} else { // Don't hammer ensureSoundData or injector manager.
|
||||
that.playAfter = randomizedNextPlay();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
that.injector.setOptions(options); // PLAYING => UPDATE POSITION ETC
|
||||
if (!that.injector.playing) { // Subtle: a looping sound will not check playbackGap.
|
||||
if (repeat()) { // WAITING => PLAYING
|
||||
// Setup next play just once, now. Changes won't be looked at while we wait.
|
||||
that.playAfter = randomizedNextPlay();
|
||||
// Subtle: if the restart fails b/c we're at injector limit, we won't try again until next playAfter.
|
||||
that.injector.restart();
|
||||
} else { // PLAYING => NO DATA
|
||||
that.playAfter = Infinity; // was one-shot and we're finished
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function internEntityDatum(entityIdentifier, timestamp, avatarPosition, avatar) {
|
||||
ignore(avatarPosition, avatar); // We could use avatars and/or avatarPositions to prioritize which ones to play.
|
||||
var entitySound = entityCache[entityIdentifier];
|
||||
if (!entitySound) {
|
||||
entitySound = entityCache[entityIdentifier] = new EntityDatum(entityIdentifier);
|
||||
}
|
||||
entitySound.timestamp = timestamp; // Might be updated for multiple avatars. That's fine.
|
||||
}
|
||||
|
||||
var nUpdates = UPDATES_PER_STATS_LOG, lastStats = Date.now();
|
||||
|
||||
function updateAllEntityData() { // A fast update of all entities we know about. A few make sounds.
|
||||
var now = Date.now(),
|
||||
expirationCutoff = now - EXPIRATION_TIME,
|
||||
userDataRecheckCutoff = now - RECHECK_TIME;
|
||||
Object.keys(entityCache).forEach(function (entityIdentifier) {
|
||||
entityCache[entityIdentifier].update(expirationCutoff, userDataRecheckCutoff, now);
|
||||
});
|
||||
if (nUpdates-- <= 0) { // Report statistics.
|
||||
// For example, with:
|
||||
// injector-limit = 40 (in C++ code)
|
||||
// N_SOUNDS = 1000 (from userData in, e.g., acAudioSearchCompatibleEntitySpawner.js)
|
||||
// replay-period = 3 + 20 = 23 (seconds, ditto)
|
||||
// stats-period = UPDATES_PER_STATS_LOG * UPDATE_TIME / MSEC_PER_SEC = 10 seconds
|
||||
// The log should show between each stats report:
|
||||
// "start" lines ~= injector-limit * P(finish) = injector-limit * stats-period/replay-period = 17 ?
|
||||
// total attempts at starting ("start" lines + "could not thread" lines) ~= N_SOUNDS = 1000 ?
|
||||
// entities > N_SOUNDS * (1+ N_SILENT_ENTITIES_PER_SOUND) = 11000 + whatever was in the scene before running spawner
|
||||
// sounds = N_SOUNDS = 1000
|
||||
// getEntityPropertiesPerUpdate ~= playing + failed-starts/UPDATES_PER_STATS_LOG + other-rechecks-each-update
|
||||
// = injector-limit + (total attempts - "start" lines)/UPDATES_PER_STATS__LOG
|
||||
// + (entities - playing - failed-starts/UPDATES_PER_STATS_LOG) * P(recheck-in-update)
|
||||
// where failed-starts/UPDATES_PER_STATS_LOG = (1000-17)/100 = 10
|
||||
// = 40 + 10 + (11000 - 40 - 10)*UPDATE_TIME/RECHECK_TIME
|
||||
// = 40 + 10 + 10950*0.01 = 159 (mostly proportional to enties/RECHECK_TIME)
|
||||
// millisecondsPerUpdate ~= UPDATE_TIME = 100 (+ some timer machinery time)
|
||||
// this assignment client activity monitor < 100% cpu
|
||||
var stats = {
|
||||
entities: 0,
|
||||
sounds: 0,
|
||||
playing: 0,
|
||||
getEntityPropertiesPerUpdate: examinationCount / UPDATES_PER_STATS_LOG,
|
||||
millisecondsPerUpdate: (now - lastStats) / UPDATES_PER_STATS_LOG
|
||||
};
|
||||
nUpdates = UPDATES_PER_STATS_LOG;
|
||||
lastStats = now;
|
||||
examinationCount = 0;
|
||||
Object.keys(entityCache).forEach(function (entityIdentifier) {
|
||||
var datum = entityCache[entityIdentifier];
|
||||
stats.entities++;
|
||||
if (datum.url) {
|
||||
stats.sounds++;
|
||||
if (datum.injector && datum.injector.playing) {
|
||||
stats.playing++;
|
||||
}
|
||||
}
|
||||
});
|
||||
print(JSON.stringify(stats));
|
||||
}
|
||||
}
|
||||
|
||||
// Update the set of which EntityData we know about.
|
||||
//
|
||||
function updateEntiesForAvatar(avatarIdentifier) { // Just one piece of update work.
|
||||
// This does at most:
|
||||
// one queryOctree request of the entity server, and
|
||||
// one findEntities geometry query of our own octree, and
|
||||
// a quick internEntityDatum of each of what may be a large number of entityIdentifiers.
|
||||
// The idea is that this is a nice bounded piece of work that should not be done too frequently.
|
||||
// However, it means that we won't learn about new entities until, on average (nAvatars * UPDATE_TIME) + query round trip.
|
||||
var avatar = AvatarList.getAvatar(avatarIdentifier), avatarPosition = avatar && avatar.position;
|
||||
if (!avatarPosition) { // No longer here.
|
||||
return;
|
||||
}
|
||||
var timestamp = Date.now();
|
||||
if (isACScript) {
|
||||
EntityViewer.setPosition(avatarPosition);
|
||||
EntityViewer.queryOctree(); // Requests an update, but there's no telling when we'll actually see different results.
|
||||
}
|
||||
var entities = Entities.findEntities(avatarPosition, QUERY_RADIUS);
|
||||
debug("found", entities.length, "entities near", avatar.name || "unknown", "at", avatarPosition);
|
||||
entities.forEach(function (entityIdentifier) {
|
||||
internEntityDatum(entityIdentifier, timestamp, avatarPosition, avatar);
|
||||
});
|
||||
}
|
||||
|
||||
// Slowly update the set of data we have to work with.
|
||||
//
|
||||
var workQueue = [];
|
||||
function updateWorkQueueForAvatarsPresent() { // when nothing else to do, fill queue with individual avatar updates
|
||||
workQueue = AvatarList.getAvatarIdentifiers().map(function (avatarIdentifier) {
|
||||
return function () {
|
||||
updateEntiesForAvatar(avatarIdentifier);
|
||||
};
|
||||
});
|
||||
}
|
||||
Script.setInterval(function () {
|
||||
// There might be thousands of EntityData known to us, but only a few will require any work to update.
|
||||
updateAllEntityData(); // i.e., this better be pretty fast.
|
||||
// Each interval, we do no more than one updateEntitiesforAvatar.
|
||||
if (!workQueue.length) {
|
||||
workQueue = [updateWorkQueueForAvatarsPresent];
|
||||
}
|
||||
workQueue.pop()(); // There's always one
|
||||
}, UPDATE_TIME);
|
|
@ -1,3 +0,0 @@
|
|||
* home-tutorial-34
|
||||
* Update tutorial to only start if `HMD.active`
|
||||
* Update builder's grid to use "Good - Sub-meshes" for collision shape type
|
|
@ -1,131 +0,0 @@
|
|||
fireworkURLs = [
|
||||
"atp:/tutorial_models/bomb1.fbx",
|
||||
"atp:/tutorial_models/bomb2.fbx",
|
||||
"atp:/tutorial_models/bomb3.fbx",
|
||||
"atp:/tutorial_models/bomb4.fbx",
|
||||
"atp:/tutorial_models/bomb5.fbx",
|
||||
"atp:/tutorial_models/bomb6.fbx",
|
||||
];
|
||||
|
||||
fireworkBaseProps = {
|
||||
"collisionsWillMove": 1,
|
||||
velocity: {
|
||||
x: 0,
|
||||
y: -0.2,
|
||||
z: 0
|
||||
},
|
||||
"dynamic": 1,
|
||||
"gravity": {
|
||||
"x": 0,
|
||||
"y": -10,
|
||||
"z": 0
|
||||
},
|
||||
"id": "{1c4061bc-b2e7-4435-bc47-3fcc39ae6624}",
|
||||
"modelURL": "atp:/tutorial_models/bomb1.fbx",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"x": 0.11612319946289062,
|
||||
"y": 0,
|
||||
"z": 0.21749019622802734
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 0.24519434571266174,
|
||||
"x": -0.0064739733934402466,
|
||||
"y": -0.12259717285633087,
|
||||
"z": 0.094893023371696472
|
||||
},
|
||||
"rotation": {
|
||||
"w": -0.083054840564727783,
|
||||
"x": 0.93615627288818359,
|
||||
"y": 0.34154272079467773,
|
||||
"z": -0.0073701143264770508
|
||||
},
|
||||
"shapeType": "simple-hull",
|
||||
"type": "Model",
|
||||
"userData": "{\n \"hifiHomeKey\": {\n \"reset\": true\n }\n}"
|
||||
} ;
|
||||
|
||||
|
||||
birdFirework2 = {
|
||||
"collisionsWillMove": 1,
|
||||
velocity: {
|
||||
x: 0,
|
||||
y: -0.01,
|
||||
z: 0
|
||||
},
|
||||
"dynamic": 1,
|
||||
"gravity": {
|
||||
"x": 0,
|
||||
"y": -10,
|
||||
"z": 0
|
||||
},
|
||||
"id": "{ba067084-8d0f-4eeb-a8a1-c6814527c1bb}",
|
||||
"modelURL": "atp:/tutorial_models/bomb2.fbx",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"x": 0,
|
||||
"y": 0.014694660902023315,
|
||||
"z": 0
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 0.24011452496051788,
|
||||
"x": -0.12005726248025894,
|
||||
"y": -0.10536260157823563,
|
||||
"z": -0.12005726248025894
|
||||
},
|
||||
"rotation": {
|
||||
"w": 0.55410087108612061,
|
||||
"x": 0.36000609397888184,
|
||||
"y": -0.33641564846038818,
|
||||
"z": -0.67092394828796387
|
||||
},
|
||||
"shapeType": "simple-compound",
|
||||
"type": "Model",
|
||||
"userData": "{\n \"hifiHomeKey\": {\n \"reset\": true\n }\n}"
|
||||
};
|
||||
|
||||
|
||||
Step1BlockData = {
|
||||
"clientOnly": 0,
|
||||
"color": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 255
|
||||
},
|
||||
"created": "2016-08-22T22:54:07Z",
|
||||
"dimensions": {
|
||||
"x": 0.20000000298023224,
|
||||
"y": 0.20000000298023224,
|
||||
"z": 0.20000000298023224
|
||||
},
|
||||
name: "tutorial/block",
|
||||
"collisionsWillMove": 1,
|
||||
velocity: {
|
||||
x: 0,
|
||||
y: -0.2,
|
||||
z: 0
|
||||
},
|
||||
"dynamic": 1,
|
||||
"gravity": {
|
||||
"x": 0,
|
||||
"y": -10,
|
||||
"z": 0
|
||||
},
|
||||
"id": "{5c7223f8-3bc5-4cb4-913c-0e93f5994ca2}",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"queryAACube": {
|
||||
"scale": 0.34641015529632568,
|
||||
"x": -0.17320507764816284,
|
||||
"y": -0.17320507764816284,
|
||||
"z": -0.17320507764816284
|
||||
},
|
||||
"rotation": {
|
||||
"w": 1,
|
||||
"x": -0.0001373291015625,
|
||||
"y": -7.62939453125e-05,
|
||||
"z": -0.0003204345703125
|
||||
},
|
||||
"shape": "Cube",
|
||||
"type": "Box",
|
||||
"userData": JSON.stringify({ hifiHomeKey: { reset: true } }),
|
||||
};
|
|
@ -1,181 +0,0 @@
|
|||
// Originally written by James Pollack, modified by Ryan Huffman for the tutorial
|
||||
//
|
||||
// this script turns an entity into an exploder -- anything that collides with it will be vaporized!
|
||||
|
||||
(function() {
|
||||
|
||||
function debug() {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
args.unshift("fire.js | ");
|
||||
print.apply(this, args);
|
||||
}
|
||||
|
||||
var _this = this;
|
||||
|
||||
function Fire() {
|
||||
_this = this;
|
||||
}
|
||||
|
||||
var RED = {
|
||||
red: 255,
|
||||
green: 0,
|
||||
blue: 0
|
||||
};
|
||||
|
||||
var ORANGE = {
|
||||
red: 255,
|
||||
green: 165,
|
||||
blue: 0
|
||||
};
|
||||
|
||||
var YELLOW = {
|
||||
red: 255,
|
||||
green: 255,
|
||||
blue: 0
|
||||
};
|
||||
|
||||
var GREEN = {
|
||||
red: 0,
|
||||
green: 255,
|
||||
blue: 0
|
||||
};
|
||||
|
||||
var BLUE = {
|
||||
red: 0,
|
||||
green: 0,
|
||||
blue: 255
|
||||
};
|
||||
|
||||
var INDIGO = {
|
||||
red: 128,
|
||||
green: 0,
|
||||
blue: 128
|
||||
};
|
||||
|
||||
var VIOLET = {
|
||||
red: 75,
|
||||
green: 0,
|
||||
blue: 130
|
||||
};
|
||||
|
||||
var colors = [RED, ORANGE, YELLOW, GREEN, BLUE, INDIGO, VIOLET];
|
||||
|
||||
var firePitSoundURL = Script.resolvePath("fire_burst.wav");
|
||||
debug("Firepit burst sound url is: ", firePitSoundURL);
|
||||
|
||||
var explodeTextureURL = Script.resolvePath("explode.png");
|
||||
debug("Firepit explode texture url is: ", explodeTextureURL);
|
||||
|
||||
Fire.prototype = {
|
||||
preload: function(entityID) {
|
||||
debug("Preload");
|
||||
this.entityID = entityID;
|
||||
this.EXPLOSION_SOUND = SoundCache.getSound(firePitSoundURL);
|
||||
},
|
||||
collisionWithEntity: function(myID, otherID, collisionInfo) {
|
||||
debug("Collided with entity: ", myID, otherID);
|
||||
var otherProps = Entities.getEntityProperties(otherID);
|
||||
var data = null;
|
||||
try {
|
||||
data = JSON.parse(otherProps.userData);
|
||||
} catch (err) {
|
||||
debug('ERROR GETTING USERDATA!');
|
||||
}
|
||||
if (data === null || "") {
|
||||
debug("Data is null or empty", data);
|
||||
return;
|
||||
} else {
|
||||
debug("Got data", data);
|
||||
if (data.hasOwnProperty('hifiHomeKey')) {
|
||||
debug("Has hifiHomeKey");
|
||||
if (data.hifiHomeKey.reset === true) {
|
||||
debug("Reset is true");
|
||||
_this.playSoundAtCurrentPosition();
|
||||
_this.explodeWithColor();
|
||||
Entities.deleteEntity(otherID)
|
||||
debug("Sending local message");
|
||||
Messages.sendLocalMessage('Entity-Exploded', JSON.stringify({
|
||||
entityID: otherID,
|
||||
position: Entities.getEntityProperties(this.entityID).position
|
||||
}));
|
||||
debug("Done sending local message");
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
explodeWithColor: function() {
|
||||
var myProps = Entities.getEntityProperties(this.entityID);
|
||||
var color = colors[Math.floor(Math.random() * colors.length)];
|
||||
var explosionParticleProperties = {
|
||||
"color": color,
|
||||
"isEmitting": 1,
|
||||
"maxParticles": 1000,
|
||||
"lifespan": 0.25,
|
||||
"emitRate": 1,
|
||||
"emitSpeed": 0.1,
|
||||
"speedSpread": 1,
|
||||
"emitOrientation": Quat.getUp(myProps.rotation),
|
||||
"emitDimensions": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"polarStart": 0,
|
||||
"polarFinish": 0,
|
||||
"azimuthStart": 0,
|
||||
"azimuthFinish": 0,
|
||||
"emitAcceleration": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"accelerationSpread": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"particleRadius": 0.829,
|
||||
"radiusSpread": 0,
|
||||
"radiusStart": 0.361,
|
||||
"radiusFinish": 0.294,
|
||||
"colorSpread": {
|
||||
"red": 0,
|
||||
"green": 0,
|
||||
"blue": 0
|
||||
},
|
||||
"colorStart": {
|
||||
"red": 255,
|
||||
"green": 255,
|
||||
"blue": 255
|
||||
},
|
||||
"colorFinish": {
|
||||
"red": 255,
|
||||
"green": 255,
|
||||
"blue": 255
|
||||
},
|
||||
"alpha": 1,
|
||||
"alphaSpread": 0,
|
||||
"alphaStart": -0.2,
|
||||
"alphaFinish": 0.5,
|
||||
"emitterShouldTrail": 0,
|
||||
"textures": explodeTextureURL,
|
||||
"type": "ParticleEffect",
|
||||
lifetime: 1,
|
||||
position: myProps.position
|
||||
};
|
||||
|
||||
var explosion = Entities.addEntity(explosionParticleProperties);
|
||||
},
|
||||
playSoundAtCurrentPosition: function() {
|
||||
|
||||
var audioProperties = {
|
||||
volume: 0.5,
|
||||
position: Entities.getEntityProperties(this.entityID).position
|
||||
};
|
||||
|
||||
Audio.playSound(this.EXPLOSION_SOUND, audioProperties);
|
||||
},
|
||||
}
|
||||
|
||||
return new Fire();
|
||||
});
|
|
@ -1,52 +0,0 @@
|
|||
// Originally written for the Home content set. Pulled into the tutorial by Ryan Huffman
|
||||
(function() {
|
||||
|
||||
var MINIMUM_LIGHT_INTENSITY = 50.0;
|
||||
var MAXIMUM_LIGHT_INTENSITY = 200.0;
|
||||
var LIGHT_FALLOFF_RADIUS = 0.1;
|
||||
var LIGHT_INTENSITY_RANDOMNESS = 0.1;
|
||||
|
||||
function randFloat(low, high) {
|
||||
return low + Math.random() * (high - low);
|
||||
}
|
||||
|
||||
var _this;
|
||||
|
||||
function FlickeringFlame() {
|
||||
_this = this;
|
||||
}
|
||||
|
||||
var totalTime = 0;
|
||||
var spacer = 2;
|
||||
FlickeringFlame.prototype = {
|
||||
preload: function(entityID) {
|
||||
this.entityID = entityID;
|
||||
Script.update.connect(this.update);
|
||||
},
|
||||
update: function(deltaTime) {
|
||||
|
||||
totalTime += deltaTime;
|
||||
if (totalTime > spacer) {
|
||||
var howManyAvatars = AvatarList.getAvatarIdentifiers().length;
|
||||
var intensity = (MINIMUM_LIGHT_INTENSITY + (MAXIMUM_LIGHT_INTENSITY + (Math.sin(totalTime) * MAXIMUM_LIGHT_INTENSITY)));
|
||||
intensity += randFloat(-LIGHT_INTENSITY_RANDOMNESS, LIGHT_INTENSITY_RANDOMNESS);
|
||||
|
||||
Entities.editEntity(_this.entityID, {
|
||||
intensity: intensity
|
||||
});
|
||||
|
||||
spacer = Math.random(0, 100) * (2 / howManyAvatars);
|
||||
totalTime = 0;
|
||||
} else {
|
||||
//just keep counting
|
||||
}
|
||||
},
|
||||
unload: function() {
|
||||
Script.update.disconnect(this.update)
|
||||
}
|
||||
}
|
||||
|
||||
return new FlickeringFlame
|
||||
|
||||
|
||||
});
|
119
tutorial/fuse.js
119
tutorial/fuse.js
|
@ -1,119 +0,0 @@
|
|||
//
|
||||
// fuse.js
|
||||
//
|
||||
// Created by Ryan Huffman on 9/1/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
(function() {
|
||||
Script.include('utils.js');
|
||||
|
||||
var DEBUG = true;
|
||||
function debug() {
|
||||
if (DEBUG) {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
args.unshift("fuse.js | ");
|
||||
print.apply(this, args);
|
||||
}
|
||||
}
|
||||
|
||||
var active = false;
|
||||
|
||||
var fuseSound = SoundCache.getSound("atp:/tutorial_sounds/fuse.wav");
|
||||
function getChildProperties(entityID, propertyNames) {
|
||||
var childEntityIDs = Entities.getChildrenIDs(entityID);
|
||||
var results = {}
|
||||
for (var i = 0; i < childEntityIDs.length; ++i) {
|
||||
var childEntityID = childEntityIDs[i];
|
||||
var properties = Entities.getEntityProperties(childEntityID, propertyNames);
|
||||
results[childEntityID] = properties;
|
||||
}
|
||||
return results;
|
||||
}
|
||||
var Fuse = function() {
|
||||
};
|
||||
Fuse.prototype = {
|
||||
light: function() {
|
||||
debug("Received light()", this.entityID);
|
||||
|
||||
var visible = Entities.getEntityProperties(this.entityID, ['visible']).visible;
|
||||
if (!visible) {
|
||||
debug("Fuse is not visible, returning");
|
||||
return;
|
||||
}
|
||||
|
||||
if (active) {
|
||||
debug("Fuse is active, returning");
|
||||
return;
|
||||
}
|
||||
active = true;
|
||||
|
||||
Entities.editEntity(this.entityID, {
|
||||
animation: {
|
||||
currentFrame: 1,
|
||||
lastFrame: 150,
|
||||
running: 1,
|
||||
url: "atp:/tutorial_models/fuse.fbx",
|
||||
loop: 0
|
||||
},
|
||||
});
|
||||
var injector = Audio.playSound(fuseSound, {
|
||||
position: Entities.getEntityProperties(this.entityID, 'position').position,
|
||||
volume: 0.7,
|
||||
loop: true
|
||||
});
|
||||
|
||||
var childrenProps = getChildProperties(this.entityID, ['type']);
|
||||
for (var childEntityID in childrenProps) {
|
||||
debug("Updating: ", childEntityID);
|
||||
var props = childrenProps[childEntityID];
|
||||
if (props.type == "ParticleEffect") {
|
||||
Entities.editEntity(childEntityID, {
|
||||
emitRate: 140,
|
||||
});
|
||||
} else if (props.type == "Light") {
|
||||
Entities.editEntity(childEntityID, {
|
||||
visible: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
var self = this;
|
||||
Script.setTimeout(function() {
|
||||
debug("Setting off fireworks");
|
||||
var spinnerID = "{dd13fcd5-616f-4749-ab28-2e1e8bc512e9}";
|
||||
Entities.callEntityMethod(spinnerID, "onLit");
|
||||
injector.stop();
|
||||
|
||||
var childrenProps = getChildProperties(self.entityID, ['type']);
|
||||
for (var childEntityID in childrenProps) {
|
||||
debug("Updating: ", childEntityID);
|
||||
var props = childrenProps[childEntityID];
|
||||
if (props.type == "ParticleEffect") {
|
||||
Entities.editEntity(childEntityID, {
|
||||
emitRate: 0,
|
||||
});
|
||||
} else if (props.type == "Light") {
|
||||
Entities.editEntity(childEntityID, {
|
||||
visible: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
}, 4900);
|
||||
|
||||
Script.setTimeout(function() {
|
||||
debug("Setting fuse to inactive");
|
||||
active = false;
|
||||
}, 14000);
|
||||
},
|
||||
preload: function(entityID) {
|
||||
debug("Preload");
|
||||
this.entityID = entityID;
|
||||
},
|
||||
};
|
||||
return new Fuse();
|
||||
});
|
|
@ -1,18 +0,0 @@
|
|||
(function() {
|
||||
Script.include('utils.js');
|
||||
|
||||
var Fuse = function() {
|
||||
};
|
||||
Fuse.prototype = {
|
||||
onLit: function() {
|
||||
print("fuseCollider.js | Lit", this.entityID);
|
||||
var fuseID = "{c8944a13-9acb-4d77-b1ee-851845e98357}"
|
||||
Entities.callEntityMethod(fuseID, "light");
|
||||
},
|
||||
preload: function(entityID) {
|
||||
print("fuseCollider.js | preload");
|
||||
this.entityID = entityID;
|
||||
},
|
||||
};
|
||||
return new Fuse();
|
||||
});
|
|
@ -1,192 +0,0 @@
|
|||
//
|
||||
// Created by Thijs Wenker on September 14, 2016.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
(function() {
|
||||
var _this;
|
||||
|
||||
function getResourceURL(file) {
|
||||
return 'atp:/' + file;
|
||||
};
|
||||
|
||||
const LIGHTER_ON_SOUND_URL = getResourceURL('tutorial_sounds/lighter_on.wav');
|
||||
const BUTANE_SOUND_URL = getResourceURL('tutorial_sounds/butane.wav');
|
||||
|
||||
// TODO: fix this in the client, changing the sound volume while a sound is playing doesn't seem to work right now
|
||||
const DYNAMIC_SOUND_VOLUME = false;
|
||||
const BUTANE_MIN_SOUND_VOLUME = 0.05;
|
||||
|
||||
const FLAME_LENGTH = 0.05;
|
||||
|
||||
const BUTANE_SOUND_SETTINGS = {
|
||||
volume: 0.4,
|
||||
loop: true,
|
||||
playbackGap: 1000,
|
||||
playbackGapRange: 1000
|
||||
};
|
||||
|
||||
const LIGHTER_ON_SOUND_SETTINGS = {
|
||||
volume: 0.5,
|
||||
loop: false
|
||||
};
|
||||
|
||||
function RemoteLogSender(channel, identifier) {
|
||||
this.channel = channel;
|
||||
this.identifier = identifier;
|
||||
}
|
||||
|
||||
RemoteLogSender.prototype = {
|
||||
channel: null,
|
||||
identifier: null,
|
||||
debug: function(message) {
|
||||
Messages.sendLocalMessage(this.channel, JSON.stringify({
|
||||
message: '[DEBUG ' + this.identifier + '] ' + message
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
var remoteLogSender = null;
|
||||
|
||||
function debugPrint(message) {
|
||||
if (remoteLogSender !== null) {
|
||||
remoteLogSender.debug(message);
|
||||
}
|
||||
}
|
||||
|
||||
ButaneLighter = function() {
|
||||
_this = this;
|
||||
_this.triggerValue = 0.0; // be sure to set this value in the constructor, otherwise it will be a shared value
|
||||
_this.isFiring = false;
|
||||
}
|
||||
|
||||
ButaneLighter.prototype = {
|
||||
entityID: null,
|
||||
lighterOnSound: null,
|
||||
butaneSound: null,
|
||||
lighterOnSoundInjector: null,
|
||||
butaneSoundInjector: null,
|
||||
butaneSoundInjectorOptions: null,
|
||||
lighterParticleEntity: null,
|
||||
buttonBeingPressed: null,
|
||||
triggerValue: null,
|
||||
isFiring: null,
|
||||
getLighterParticleEntity: function() {
|
||||
var result = null;
|
||||
Entities.getChildrenIDs(_this.entityID).forEach(function(entity) {
|
||||
var name = Entities.getEntityProperties(entity, ['name']).name;
|
||||
if (name === 'lighter_particle') {
|
||||
result = entity;
|
||||
}
|
||||
});
|
||||
return result;
|
||||
},
|
||||
preload: function(entityID) {
|
||||
_this.entityID = entityID;
|
||||
_this.lighterOnSound = SoundCache.getSound(LIGHTER_ON_SOUND_URL);
|
||||
_this.butaneSound = SoundCache.getSound(BUTANE_SOUND_URL);
|
||||
var properties = Entities.getEntityProperties(_this.entityID, ['userData']);
|
||||
try {
|
||||
var userData = JSON.parse(properties.userData);
|
||||
if (userData['debug'] !== undefined && userData['debug']['sessionUUID'] === MyAvatar.sessionUUID &&
|
||||
userData['debug']['channel'] !== undefined)
|
||||
{
|
||||
remoteLogSender = new RemoteLogSender(userData['debug']['channel'], _this.entityID);
|
||||
remoteLogSender.debug('Debugger initialized');
|
||||
}
|
||||
} catch (e) {
|
||||
//failed to detect if we have a debugger
|
||||
}
|
||||
debugPrint(_this.getLighterParticleEntity());
|
||||
},
|
||||
startEquip: function(entityID, args) {
|
||||
_this.lighterParticleEntity = _this.getLighterParticleEntity();
|
||||
},
|
||||
continueEquip: function(entityID, args) {
|
||||
_this.triggerValue = Controller.getValue(args[0] === 'left' ? Controller.Standard.LT : Controller.Standard.RT);
|
||||
if (_this.triggerValue > 0.2) {
|
||||
if (!_this.isFiring) {
|
||||
_this.startFiring();
|
||||
}
|
||||
_this.tryToIgnite();
|
||||
_this.updateButaneSound()
|
||||
return;
|
||||
}
|
||||
_this.stopFiring();
|
||||
},
|
||||
startFiring: function() {
|
||||
if (_this.isFiring) {
|
||||
return;
|
||||
}
|
||||
_this.isFiring = true;
|
||||
if (_this.lighterOnSound.downloaded) {
|
||||
// We don't want to override the default volume setting, so lets clone the default SETTINGS object
|
||||
var lighterOnOptions = JSON.parse(JSON.stringify(LIGHTER_ON_SOUND_SETTINGS));
|
||||
lighterOnOptions['position'] = Entities.getEntityProperties(_this.entityID, ['position']).position;
|
||||
_this.lighterOnSoundInjector = Audio.playSound(_this.lighterOnSound, lighterOnOptions);
|
||||
}
|
||||
if (_this.butaneSound.downloaded) {
|
||||
_this.butaneSoundInjectorOptions = JSON.parse(JSON.stringify(BUTANE_SOUND_SETTINGS));
|
||||
_this.butaneSoundInjectorOptions['position'] = Entities.getEntityProperties(_this.lighterParticleEntity, ['position']).position;
|
||||
if (DYNAMIC_SOUND_VOLUME) {
|
||||
_this.butaneSoundInjectorOptions['volume'] = BUTANE_MIN_SOUND_VOLUME;
|
||||
}
|
||||
_this.butaneSoundInjector = Audio.playSound(_this.butaneSound, _this.butaneSoundInjectorOptions);
|
||||
}
|
||||
Entities.editEntity(_this.lighterParticleEntity, {isEmitting: _this.isFiring});
|
||||
|
||||
},
|
||||
stopFiring: function() {
|
||||
if (!_this.isFiring) {
|
||||
return;
|
||||
}
|
||||
_this.isFiring = false;
|
||||
Entities.editEntity(_this.lighterParticleEntity, {isEmitting: _this.isFiring});
|
||||
_this.stopButaneSound();
|
||||
},
|
||||
tryToIgnite: function() {
|
||||
var flameProperties = Entities.getEntityProperties(_this.lighterParticleEntity, ['position', 'rotation']);
|
||||
var pickRay = {
|
||||
origin: flameProperties.position,
|
||||
direction: Quat.inverse(Quat.getFront(flameProperties.rotation))
|
||||
}
|
||||
var intersection = Entities.findRayIntersection(pickRay, true, [], [_this.entityID, _this.lighterParticleEntity]);
|
||||
if (intersection.intersects && intersection.distance <= FLAME_LENGTH) {
|
||||
var properties = Entities.getEntityProperties(intersection.entityID, 'script');
|
||||
if (properties.script !== '') {
|
||||
Entities.callEntityMethod(intersection.entityID, 'onLit', [_this.triggerValue]);
|
||||
debugPrint('Light it up! found: ' + intersection.entityID);
|
||||
}
|
||||
}
|
||||
},
|
||||
releaseEquip: function(entityID, args) {
|
||||
_this.stopFiring();
|
||||
// reset trigger value;
|
||||
_this.triggerValue = 0.0;
|
||||
},
|
||||
updateButaneSound: function() {
|
||||
if (_this.butaneSoundInjector !== null && _this.butaneSoundInjector.isPlaying()) {
|
||||
_this.butaneSoundInjectorOptions = _this.butaneSoundInjector.options;
|
||||
_this.butaneSoundInjectorOptions['position'] = Entities.getEntityProperties(_this.entityID, ['position']).position;
|
||||
if (DYNAMIC_SOUND_VOLUME) {
|
||||
_this.butaneSoundInjectorOptions['volume'] = ((BUTANE_SOUND_SETTINGS.volume - BUTANE_MIN_SOUND_VOLUME) *
|
||||
_this.triggerValue) + BUTANE_MIN_SOUND_VOLUME;
|
||||
}
|
||||
_this.butaneSoundInjector.options = _this.butaneSoundInjectorOptions;
|
||||
}
|
||||
},
|
||||
stopButaneSound: function() {
|
||||
if (_this.butaneSoundInjector !== null && _this.butaneSoundInjector.isPlaying()) {
|
||||
_this.butaneSoundInjector.stop();
|
||||
}
|
||||
_this.butaneSoundInjector = null;
|
||||
},
|
||||
unload: function() {
|
||||
_this.stopButaneSound();
|
||||
},
|
||||
};
|
||||
return new ButaneLighter();
|
||||
})
|
|
@ -1,220 +0,0 @@
|
|||
//
|
||||
// Created by Thijs Wenker on September 14, 2016.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
const TEST_MODE = false;
|
||||
const SCRIPT_URL = 'atp:/tutorial/lighter/butaneLighter.js';
|
||||
|
||||
//Creates an entity and returns a mixed object of the creation properties and the assigned entityID
|
||||
var createEntity = function(entityProperties, parent) {
|
||||
if (parent.rotation !== undefined) {
|
||||
if (entityProperties.rotation !== undefined) {
|
||||
entityProperties.rotation = Quat.multiply(parent.rotation, entityProperties.rotation);
|
||||
} else {
|
||||
entityProperties.rotation = parent.rotation;
|
||||
}
|
||||
}
|
||||
if (parent.position !== undefined) {
|
||||
var localPosition = (parent.rotation !== undefined) ? Vec3.multiplyQbyV(parent.rotation, entityProperties.position) : entityProperties.position;
|
||||
entityProperties.position = Vec3.sum(localPosition, parent.position)
|
||||
}
|
||||
if (parent.id !== undefined) {
|
||||
entityProperties.parentID = parent.id;
|
||||
}
|
||||
entityProperties.id = Entities.addEntity(entityProperties);
|
||||
return entityProperties;
|
||||
};
|
||||
|
||||
createButaneLighter = function(transform) {
|
||||
var entityProperties = {
|
||||
collisionsWillMove: true,
|
||||
dimensions: {
|
||||
x: 0.025599999353289604,
|
||||
y: 0.057399999350309372,
|
||||
z: 0.37419998645782471
|
||||
},
|
||||
dynamic: true,
|
||||
gravity: {
|
||||
x: 0,
|
||||
y: -9.8,
|
||||
z: 0
|
||||
},
|
||||
velocity: {
|
||||
x: 0,
|
||||
y: -0.01,
|
||||
z: 0
|
||||
},
|
||||
modelURL: 'atp:/tutorial_models/lighterIceCreamSandwich.fbx',
|
||||
name: 'BrutaneLighter',
|
||||
shapeType: 'simple-compound',
|
||||
type: 'Model',
|
||||
userData: JSON.stringify({
|
||||
"tag": "equip-temporary",
|
||||
"grabbableKey": {
|
||||
"invertSolidWhileHeld": true
|
||||
},
|
||||
"wearable": {
|
||||
"joints": {
|
||||
"RightHand": [
|
||||
{
|
||||
"x": 0.049671292304992676,
|
||||
"y": 0.09825992584228516,
|
||||
"z": 0.03760027885437012
|
||||
},
|
||||
{
|
||||
"x": 0.6562752723693848,
|
||||
"y": 0.27598991990089417,
|
||||
"z": 0.6638742685317993,
|
||||
"w": -0.22890058159828186
|
||||
}
|
||||
],
|
||||
"LeftHand": [
|
||||
{
|
||||
"x": -0.028073370456695557,
|
||||
"y": 0.09609812498092651,
|
||||
"z": 0.039550721645355225
|
||||
},
|
||||
{
|
||||
"x": -0.6697965264320374,
|
||||
"y": 0.22050897777080536,
|
||||
"z": 0.6544681191444397,
|
||||
"w": 0.27283111214637756
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}),
|
||||
script: SCRIPT_URL
|
||||
};
|
||||
return createEntity(entityProperties, transform);
|
||||
}
|
||||
|
||||
function createFireParticle(butaneLighter) {
|
||||
var entityProperties = {
|
||||
userData: JSON.stringify({ tag: "equip-temporary" }),
|
||||
accelerationSpread: {
|
||||
x: 0.1,
|
||||
y: 0,
|
||||
z: 0.1
|
||||
},
|
||||
alpha: 0.039999999105930328,
|
||||
alphaFinish: 0.039999999105930328,
|
||||
alphaStart: 0.039999999105930328,
|
||||
azimuthFinish: 0.039999999105930328,
|
||||
azimuthStart: 0,
|
||||
dimensions: {
|
||||
x: 0.49194091558456421,
|
||||
y: 0.49194091558456421,
|
||||
z: 0.49194091558456421
|
||||
},
|
||||
emitAcceleration: {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
},
|
||||
emitOrientation: {
|
||||
w: 1,
|
||||
x: -1.52587890625e-05,
|
||||
y: -1.52587890625e-05,
|
||||
z: -1.52587890625e-05
|
||||
},
|
||||
emitRate: 770,
|
||||
emitSpeed: 0.014000000432133675,
|
||||
isEmitting: false,
|
||||
lifespan: 0.37000000476837158,
|
||||
maxParticles: 820,
|
||||
name: 'lighter_particle',
|
||||
particleRadius: 0.0027000000700354576,
|
||||
position: {
|
||||
x: -0.00044769048690795898,
|
||||
y: 0.016354814171791077,
|
||||
z: 0.19217036664485931
|
||||
},
|
||||
radiusFinish: 0.0027000000700354576,
|
||||
radiusSpread: 3,
|
||||
radiusStart: 0.0027000000700354576,
|
||||
rotation: {
|
||||
w: 1,
|
||||
x: -0.0001678466796875,
|
||||
y: -1.52587890625e-05,
|
||||
z: -1.52587890625e-05
|
||||
},
|
||||
speedSpread: 0.56999999284744263,
|
||||
textures: 'atp:/textures/fire3.png',
|
||||
type: 'ParticleEffect',
|
||||
|
||||
|
||||
"color": {
|
||||
"red": 255,
|
||||
"green": 255,
|
||||
"blue": 255
|
||||
},
|
||||
"isEmitting": 0,
|
||||
"maxParticles": 820,
|
||||
"lifespan": 0.28,
|
||||
"emitRate": 1100,
|
||||
"emitSpeed": 0.007,
|
||||
"speedSpread": 0.5699999928474426,
|
||||
"emitOrientation": {
|
||||
"x": -0.0000152587890625,
|
||||
"y": -0.0000152587890625,
|
||||
"z": -0.0000152587890625,
|
||||
"w": 1
|
||||
},
|
||||
"emitDimensions": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"polarStart": 0,
|
||||
"polarFinish": 0,
|
||||
"azimuthStart": 0,
|
||||
"azimuthFinish": 0.03999999910593033,
|
||||
"emitAcceleration": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"accelerationSpread": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"particleRadius": 0.0037,
|
||||
"radiusSpread": 3,
|
||||
"radiusStart": 0.008,
|
||||
"radiusFinish": 0.0004,
|
||||
"colorSpread": {
|
||||
"red": 0,
|
||||
"green": 0,
|
||||
"blue": 0
|
||||
},
|
||||
"colorStart": {
|
||||
"red": 255,
|
||||
"green": 255,
|
||||
"blue": 255
|
||||
},
|
||||
"colorFinish": {
|
||||
"red": 255,
|
||||
"green": 255,
|
||||
"blue": 255
|
||||
},
|
||||
"alpha": 0.03999999910593033,
|
||||
"alphaSpread": 0,
|
||||
"alphaStart": 0.141,
|
||||
"alphaFinish": 0.02,
|
||||
"emitterShouldTrail": 0,
|
||||
"textures": "atp:/textures/fire3.png"
|
||||
};
|
||||
return createEntity(entityProperties, butaneLighter);
|
||||
}
|
||||
|
||||
doCreateButaneLighter = function(transform) {
|
||||
var butaneLighter = createButaneLighter(transform);
|
||||
createFireParticle(butaneLighter);
|
||||
return butaneLighter;
|
||||
}
|
|
@ -1,188 +0,0 @@
|
|||
if (!Function.prototype.bind) {
|
||||
Function.prototype.bind = function(oThis) {
|
||||
if (typeof this !== 'function') {
|
||||
// closest thing possible to the ECMAScript 5
|
||||
// internal IsCallable function
|
||||
throw new TypeError('Function.prototype.bind - what is trying to be bound is not callable');
|
||||
}
|
||||
|
||||
var aArgs = Array.prototype.slice.call(arguments, 1),
|
||||
fToBind = this,
|
||||
fNOP = function() {},
|
||||
fBound = function() {
|
||||
return fToBind.apply(this instanceof fNOP
|
||||
? this
|
||||
: oThis,
|
||||
aArgs.concat(Array.prototype.slice.call(arguments)));
|
||||
};
|
||||
|
||||
if (this.prototype) {
|
||||
// Function.prototype doesn't have a prototype property
|
||||
fNOP.prototype = this.prototype;
|
||||
}
|
||||
fBound.prototype = new fNOP();
|
||||
|
||||
return fBound;
|
||||
};
|
||||
}
|
||||
|
||||
function getOption(options, key, defaultValue) {
|
||||
if (options.hasOwnProperty(key)) {
|
||||
return options[key];
|
||||
}
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
var TOKEN_NAME_PREFIX = "ownership_token-";
|
||||
|
||||
function getOwnershipTokenID(parentEntityID) {
|
||||
var childEntityIDs = Entities.getChildrenIDs(parentEntityID);
|
||||
var ownerID = null;
|
||||
var ownerName = '';
|
||||
for (var i = 0; i < childEntityIDs.length; ++i) {
|
||||
var childID = childEntityIDs[i];
|
||||
var properties = Entities.getEntityProperties(childID, ['name', 'userData', 'lifetime', 'age']);
|
||||
var childName = properties.name;
|
||||
if (childName.indexOf(TOKEN_NAME_PREFIX) == 0) {
|
||||
if (ownerID === null || childName < ownerName) {
|
||||
ownerID = childID;
|
||||
ownerName = childName;
|
||||
}
|
||||
}
|
||||
}
|
||||
return ownerID;
|
||||
}
|
||||
|
||||
function createOwnershipToken(name, parentEntityID) {
|
||||
return Entities.addEntity({
|
||||
type: "Box",
|
||||
name: TOKEN_NAME_PREFIX + name,
|
||||
visible: false,
|
||||
parentID: parentEntityID,
|
||||
locationPosition: { x: 0, y: 0, z: 0 },
|
||||
dimensions: { x: 100, y: 100, z: 100 },
|
||||
collisionless: true,
|
||||
lifetime: 5
|
||||
});
|
||||
}
|
||||
|
||||
var DEBUG = true;
|
||||
function debug() {
|
||||
if (DEBUG) {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
print.apply(this, args);
|
||||
}
|
||||
}
|
||||
|
||||
var TOKEN_STATE_DESTROYED = -1;
|
||||
var TOKEN_STATE_UNOWNED = 0;
|
||||
var TOKEN_STATE_REQUESTING_OWNERSHIP = 1;
|
||||
var TOKEN_STATE_OWNED = 2;
|
||||
|
||||
OwnershipToken = function(name, parentEntityID, options) {
|
||||
this.name = MyAvatar.sessionUUID + "-" + Math.floor(Math.random() * 10000000);
|
||||
this.parentEntityID = parentEntityID;
|
||||
|
||||
// How often to check whether the token is available if we don't currently own it
|
||||
this.checkEverySeconds = getOption(options, 'checkEverySeconds', 1000);
|
||||
this.updateTokenLifetimeEvery = getOption(options, 'updateTokenLifetimeEvery', 2000);
|
||||
|
||||
this.onGainedOwnership = getOption(options, 'onGainedOwnership', function() { });
|
||||
this.onLostOwnership = getOption(options, 'onLostOwnership', function() { });
|
||||
|
||||
this.ownershipTokenID = null;
|
||||
this.setState(TOKEN_STATE_UNOWNED);
|
||||
};
|
||||
|
||||
OwnershipToken.prototype = {
|
||||
destroy: function() {
|
||||
debug(this.name, "Destroying token");
|
||||
this.setState(TOKEN_STATE_DESTROYED);
|
||||
},
|
||||
|
||||
setState: function(newState) {
|
||||
if (this.state == newState) {
|
||||
debug(this.name, "Warning: Trying to set state to the current state");
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.updateLifetimeID) {
|
||||
debug(this.name, "Clearing update lifetime interval");
|
||||
Script.clearInterval(this.updateLifetimeID);
|
||||
this.updateLifetimeID = null;
|
||||
}
|
||||
|
||||
if (this.checkOwnershipAvailableID) {
|
||||
Script.clearInterval(this.checkOwnershipAvailableID);
|
||||
this.checkOwnershipAvailableID = null;
|
||||
}
|
||||
|
||||
if (this.state == TOKEN_STATE_OWNED) {
|
||||
this.onLostOwnership(this);
|
||||
}
|
||||
|
||||
if (newState == TOKEN_STATE_UNOWNED) {
|
||||
this.checkOwnershipAvailableID = Script.setInterval(
|
||||
this.tryRequestingOwnership.bind(this), this.checkEverySeconds);
|
||||
|
||||
} else if (newState == TOKEN_STATE_REQUESTING_OWNERSHIP) {
|
||||
|
||||
} else if (newState == TOKEN_STATE_OWNED) {
|
||||
this.onGainedOwnership(this);
|
||||
this.updateLifetimeID = Script.setInterval(
|
||||
this.updateTokenLifetime.bind(this), this.updateTokenLifetimeEvery);
|
||||
} else if (newState == TOKEN_STATE_DESTROYED) {
|
||||
Entities.deleteEntity(this.ownershipTokenID);
|
||||
}
|
||||
|
||||
debug(this.name, "Info: Switching to state:", newState);
|
||||
this.state = newState;
|
||||
},
|
||||
updateTokenLifetime: function() {
|
||||
if (this.state != TOKEN_STATE_OWNED) {
|
||||
debug(this.name, "Error: Trying to update token while it is unowned");
|
||||
return;
|
||||
}
|
||||
|
||||
debug(this.name, "Updating entity lifetime");
|
||||
var age = Entities.getEntityProperties(this.ownershipTokenID, 'age').age;
|
||||
Entities.editEntity(this.ownershipTokenID, {
|
||||
lifetime: age + 5
|
||||
});
|
||||
},
|
||||
tryRequestingOwnership: function() {
|
||||
if (this.state == TOKEN_STATE_REQUESTING_OWNERSHIP || this.state == TOKEN_STATE_OWNED) {
|
||||
debug(this.name, "We already have or are requesting ownership");
|
||||
return;
|
||||
}
|
||||
|
||||
var ownerID = getOwnershipTokenID(this.parentEntityID);
|
||||
if (ownerID !== null) {
|
||||
// Already owned, return
|
||||
debug(this.name, "Token already owned by another client, returning. Owner: " + owenerID + ", Us: " + this.name);
|
||||
return;
|
||||
}
|
||||
|
||||
this.ownershipTokenID = createOwnershipToken(this.name, this.parentEntityID);
|
||||
this.setState(TOKEN_STATE_REQUESTING_OWNERSHIP);
|
||||
|
||||
function checkOwnershipRequest() {
|
||||
var ownerID = getOwnershipTokenID(this.parentEntityID);
|
||||
if (ownerID == this.ownershipTokenID) {
|
||||
debug(this.name, "Info: Obtained ownership");
|
||||
this.setState(TOKEN_STATE_OWNED);
|
||||
} else {
|
||||
if (ownerID === null) {
|
||||
debug(this.name, "Warning: Checked ownership request and no tokens existed");
|
||||
}
|
||||
debug(this.name, "Info: Lost ownership request")
|
||||
this.ownershipTokenID = null;
|
||||
this.setState(TOKEN_STATE_UNOWNED);
|
||||
}
|
||||
}
|
||||
|
||||
Script.setTimeout(checkOwnershipRequest.bind(this), 2000);
|
||||
},
|
||||
};
|
||||
|
||||
debug("Returning from ownershipToken");
|
|
@ -1,84 +0,0 @@
|
|||
//
|
||||
// spinner.js
|
||||
//
|
||||
// Created by Ryan Huffman on 9/1/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
(function() {
|
||||
var DEBUG = true;
|
||||
function debug() {
|
||||
if (DEBUG) {
|
||||
print.apply(this, arguments);
|
||||
}
|
||||
}
|
||||
|
||||
var spinnerSound = SoundCache.getSound("atp:/tutorial_sounds/Pinwheel.L.wav");
|
||||
var Spinner = function() {
|
||||
};
|
||||
function getChildProperties(entityID, propertyNames) {
|
||||
var childEntityIDs = Entities.getChildrenIDs(entityID);
|
||||
var results = {}
|
||||
for (var i = 0; i < childEntityIDs.length; ++i) {
|
||||
var childEntityID = childEntityIDs[i];
|
||||
var properties = Entities.getEntityProperties(childEntityID, propertyNames);
|
||||
results[childEntityID] = properties;
|
||||
}
|
||||
return results;
|
||||
}
|
||||
Spinner.prototype = {
|
||||
onLit: function() {
|
||||
debug("spinner.js | Spinner lit");
|
||||
Entities.editEntity(this.entityID, {
|
||||
"angularDamping": 0.1,
|
||||
"angularVelocity": {
|
||||
"x": 20.471975326538086,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
});
|
||||
var injector = Audio.playSound(spinnerSound, {
|
||||
position: Entities.getEntityProperties(this.entityID, 'position').position,
|
||||
volume: 1.0,
|
||||
loop: false
|
||||
});
|
||||
|
||||
var childrenProps = getChildProperties(this.entityID, ['type']);
|
||||
for (var childEntityID in childrenProps) {
|
||||
var props = childrenProps[childEntityID];
|
||||
if (props.type == "ParticleEffect") {
|
||||
debug("spinner.js | Modifying: ", childEntityID);
|
||||
Entities.editEntity(childEntityID, {
|
||||
emitRate: 35,
|
||||
});
|
||||
}
|
||||
}
|
||||
Messages.sendLocalMessage("Tutorial-Spinner", "wasLit");
|
||||
|
||||
var self = this;
|
||||
Script.setTimeout(function() {
|
||||
debug("spinner.js | Finishing spinner");
|
||||
injector.stop();
|
||||
|
||||
var childrenProps = getChildProperties(self.entityID, ['type']);
|
||||
for (var childEntityID in childrenProps) {
|
||||
var props = childrenProps[childEntityID];
|
||||
if (props.type == "ParticleEffect") {
|
||||
debug("spinner.js | Modifying: ", childEntityID);
|
||||
Entities.editEntity(childEntityID, {
|
||||
emitRate: 0,
|
||||
});
|
||||
}
|
||||
}
|
||||
}, 4900);
|
||||
},
|
||||
preload: function(entityID) {
|
||||
debug("spinner.js | Preload");
|
||||
this.entityID = entityID;
|
||||
},
|
||||
};
|
||||
return new Spinner();
|
||||
});
|
Binary file not shown.
Binary file not shown.
1202
tutorial/tutorial.js
1202
tutorial/tutorial.js
File diff suppressed because it is too large
Load diff
|
@ -1,360 +0,0 @@
|
|||
TUTORIAL_TAG_TO_ENTITY_IDS_MAP = {
|
||||
"teleport-vive": {
|
||||
"{7df1abc4-1b7c-4352-985c-f3f6ad8d65b7}": {
|
||||
"tag": "teleport-vive"
|
||||
}
|
||||
},
|
||||
"teleport-touch": {
|
||||
"{ff064b9e-7fa4-4693-a386-a67b9f92a948}": {
|
||||
"tag": "teleport-touch"
|
||||
}
|
||||
},
|
||||
"turnAround-vive": {
|
||||
"{9b14f224-b2f6-447f-bb86-f5d875cf4c33}": {
|
||||
"tag": "turnAround-vive"
|
||||
}
|
||||
},
|
||||
"turnAround-touch": {
|
||||
"{ce74b3ca-d1c7-4980-bd98-2d488095a39e}": {
|
||||
"tag": "turnAround-touch"
|
||||
}
|
||||
},
|
||||
"teleport": {
|
||||
"{4478f7b5-d3ac-4213-9a7b-ad8cd69575b8}": {
|
||||
"tag": "teleport"
|
||||
}
|
||||
},
|
||||
"finish": {
|
||||
"{340e05b5-88df-4b2b-b43c-756dd714d6d8}": {
|
||||
"tag": "finish"
|
||||
}
|
||||
},
|
||||
"door": {
|
||||
"{9c5b0fee-e695-4516-94cd-153371e3857b}": {
|
||||
"tag": "door"
|
||||
}
|
||||
},
|
||||
"farGrab": {
|
||||
"{70fcd96c-cd59-4f23-9ca5-a167f2f85680}": {
|
||||
"visible": false,
|
||||
"tag": "farGrab"
|
||||
},
|
||||
"{ff7b9793-0d94-4f18-bc09-4ab589126e60}": {
|
||||
"tag": "farGrab"
|
||||
},
|
||||
"{fdd77d2c-af36-41c1-ba57-74b7ae79d996}": {
|
||||
"tag": "farGrab"
|
||||
},
|
||||
"{e11700f6-bc9a-411f-9ddc-bf265d4e3ccf}": {
|
||||
"tag": "farGrab"
|
||||
},
|
||||
"{95850c56-cd1c-42b9-ab6b-a163a6f2878f}": {
|
||||
"tag": "farGrab"
|
||||
}
|
||||
},
|
||||
"nearGrab-vive": {
|
||||
"{88221a22-b710-4d35-852b-5257b0aa77dc}": {
|
||||
"tag": "nearGrab-vive"
|
||||
}
|
||||
},
|
||||
"nearGrab-touch": {
|
||||
"{7c0f2fde-6c5c-459b-bf82-421979cebf2e}": {
|
||||
"tag": "nearGrab-touch"
|
||||
}
|
||||
},
|
||||
"nearGrab": {
|
||||
"{55c861ef-60ca-4722-a6c5-9c6967966ec5}": {
|
||||
"tag": "nearGrab"
|
||||
},
|
||||
"{644d655b-ae66-43b1-9bab-a44b9a8ad632}": {
|
||||
"tag": "nearGrab"
|
||||
},
|
||||
"{8bf0baa1-88d0-448a-a782-100d4413bd82}": {
|
||||
"tag": "nearGrab"
|
||||
},
|
||||
"{5cf22b9c-fb22-4854-8821-554422980b24}": {
|
||||
"visible": false,
|
||||
"tag": "nearGrab"
|
||||
}
|
||||
},
|
||||
"equip-part1-touch": {
|
||||
"{470f0634-8be7-4b52-a8bd-5183d489fcb6}": {
|
||||
"tag": "equip-part1-touch"
|
||||
}
|
||||
},
|
||||
"equip-part1-vive": {
|
||||
"{97ced5e7-fc81-40f9-a9e8-f85b4b30f24c}": {
|
||||
"tag": "equip-part1-vive"
|
||||
}
|
||||
},
|
||||
"equip-part1": {
|
||||
"{d73822ca-0a34-4cf4-a530-3258ac459a14}": {
|
||||
"tag": "equip-part1"
|
||||
},
|
||||
"{8572d991-5777-45df-97bf-7243d7b12f81}": {
|
||||
"tag": "equip-part1"
|
||||
},
|
||||
"{da5ea72e-54b6-41ac-b711-742b062b6968}": {
|
||||
"tag": "equip-part1"
|
||||
},
|
||||
"{c8944a13-9acb-4d77-b1ee-851845e98357}": {
|
||||
"tag": "equip-part1"
|
||||
},
|
||||
"{e9481c78-1a21-43f7-b54c-58f2efdf3c8f}": {
|
||||
"tag": "equip-part1"
|
||||
},
|
||||
"{ca3c28f3-15fc-4349-a85e-eaca0fad6434}": {
|
||||
"tag": "equip-part1"
|
||||
},
|
||||
"{09ddcb94-52a7-4f50-a5a2-db9db28fc519}": {
|
||||
"tag": "equip-part1"
|
||||
},
|
||||
"{dd13fcd5-616f-4749-ab28-2e1e8bc512e9}": {
|
||||
"tag": "equip-part1"
|
||||
}
|
||||
},
|
||||
"equip-part2-vive": {
|
||||
"{b5d17eda-90ab-40cf-b973-efcecb2e992e}": {
|
||||
"tag": "equip-part2-vive"
|
||||
},
|
||||
"{6307cd16-dd1d-4988-a339-578178436b45}": {
|
||||
"tag": "equip-part2-vive"
|
||||
}
|
||||
},
|
||||
"equip-part2-touch": {
|
||||
"{69195139-e020-4739-bb2c-50faebc6860a}": {
|
||||
"tag": "equip-part2-touch"
|
||||
},
|
||||
"{9b0a99ae-221b-4e59-ba3c-d8e64a083774}": {
|
||||
"tag": "equip-part2-touch"
|
||||
}
|
||||
},
|
||||
"bothGrab": {
|
||||
"{14792a6e-dc6f-4e7a-843f-4b109b06b5a4}": {
|
||||
"visible": false,
|
||||
"tag": "bothGrab",
|
||||
"collidable": true
|
||||
},
|
||||
"{215dcd14-88fc-4604-9033-cbd2a660178a}": {
|
||||
"tag": "bothGrab"
|
||||
},
|
||||
"{fbc2e40d-0633-45ac-b1c9-97fc8465f93b}": {
|
||||
"tag": "bothGrab"
|
||||
},
|
||||
"{6752dad6-109d-4dc5-aef7-dc8509468cf4}": {
|
||||
"tag": "bothGrab"
|
||||
},
|
||||
"{178e2c71-dff5-4231-8d28-df47fddf4709}": {
|
||||
"soundKey": {
|
||||
"playbackGapRange": 0,
|
||||
"url": "atp:/sounds/crackling_fire.L.wav",
|
||||
"volume": 0.5,
|
||||
"playbackGap": 5,
|
||||
"playing": false,
|
||||
"loop": true
|
||||
},
|
||||
"tag": "bothGrab"
|
||||
},
|
||||
"{52445ac5-8730-4457-827e-6c076d2c609c}": {
|
||||
"tag": "bothGrab"
|
||||
}
|
||||
},
|
||||
"raiseHands": {
|
||||
"{7139e45d-25cf-470b-b133-c0fda0099d2b}": {
|
||||
"tag": "raiseHands"
|
||||
}
|
||||
},
|
||||
"equip": {
|
||||
"{e7897c9c-f4fa-4989-a383-28af56c2e544}": {
|
||||
"visible": false,
|
||||
"tag": "equip"
|
||||
},
|
||||
"{9df518da-9e65-4b76-8a79-eeefdb0b7310}": {
|
||||
"visible": false,
|
||||
"tag": "equip"
|
||||
},
|
||||
"{1a77c20e-5d9b-4b54-bf20-1416141a7ca8}": {
|
||||
"tag": "equip"
|
||||
}
|
||||
},
|
||||
"orient-vive": {
|
||||
"{95d233ab-ed0a-46e1-b047-1c542688ef3f}": {
|
||||
"tag": "orient-vive"
|
||||
}
|
||||
},
|
||||
"orient-touch": {
|
||||
"{1c95f945-ec46-4aac-b0f1-e64e073dbfaa}": {
|
||||
"tag": "orient-touch"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
TUTORIAL_NAME_TO_ENTITY_PROPERTIES_MAP = {
|
||||
"tutorial/gun_spawn": {
|
||||
"clientOnly": 0,
|
||||
"collisionless": 1,
|
||||
"color": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 255
|
||||
},
|
||||
"created": "2016-09-08T18:38:24Z",
|
||||
"dimensions": {
|
||||
"x": 0.0649842768907547,
|
||||
"y": 0.0649842768907547,
|
||||
"z": 0.0649842768907547
|
||||
},
|
||||
"id": "{9df518da-9e65-4b76-8a79-eeefdb0b7310}",
|
||||
"ignoreForCollisions": 1,
|
||||
"lastEdited": 1481926907366120,
|
||||
"lastEditedBy": "{b80185ea-0936-4397-a5a4-3a64004f545f}",
|
||||
"name": "tutorial/gun_spawn",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"x": 0.60231781005859375,
|
||||
"y": 0.68465065956115723,
|
||||
"z": 0.39223569631576538
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 0.11255607008934021,
|
||||
"x": 0.54603976011276245,
|
||||
"y": 0.62837260961532593,
|
||||
"z": 0.33595764636993408
|
||||
},
|
||||
"rotation": {
|
||||
"w": -0.025101065635681152,
|
||||
"x": 0.70666050910949707,
|
||||
"y": 0.70666050910949707,
|
||||
"z": -0.025131583213806152
|
||||
},
|
||||
"shape": "Cube",
|
||||
"type": "Box",
|
||||
"userData": "{\"visible\":false,\"tag\":\"equip\"}",
|
||||
"visible": 0
|
||||
},
|
||||
"tutorial/nearGrab/box_spawn": {
|
||||
"clientOnly": 0,
|
||||
"collisionless": 1,
|
||||
"color": {
|
||||
"blue": 255,
|
||||
"green": 0,
|
||||
"red": 255
|
||||
},
|
||||
"created": "2016-09-08T18:38:24Z",
|
||||
"dimensions": {
|
||||
"x": 0.082253716886043549,
|
||||
"y": 0.082253716886043549,
|
||||
"z": 0.082253716886043549
|
||||
},
|
||||
"id": "{5cf22b9c-fb22-4854-8821-554422980b24}",
|
||||
"ignoreForCollisions": 1,
|
||||
"lastEdited": 1481926907334206,
|
||||
"lastEditedBy": "{b80185ea-0936-4397-a5a4-3a64004f545f}",
|
||||
"name": "tutorial/nearGrab/box_spawn",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"x": 0.61857688426971436,
|
||||
"y": 0.80955326557159424,
|
||||
"z": 0.36191046237945557
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 0.14246761798858643,
|
||||
"x": 0.54734307527542114,
|
||||
"y": 0.73831945657730103,
|
||||
"z": 0.29067665338516235
|
||||
},
|
||||
"rotation": {
|
||||
"w": 1,
|
||||
"x": -1.52587890625e-05,
|
||||
"y": -1.52587890625e-05,
|
||||
"z": -1.52587890625e-05
|
||||
},
|
||||
"shape": "Cube",
|
||||
"type": "Box",
|
||||
"userData": "{\"visible\":false,\"tag\":\"nearGrab\"}",
|
||||
"visible": 0
|
||||
},
|
||||
"tutorial/farGrab/box_spawn": {
|
||||
"clientOnly": 0,
|
||||
"collisionless": 1,
|
||||
"color": {
|
||||
"blue": 255,
|
||||
"green": 0,
|
||||
"red": 255
|
||||
},
|
||||
"created": "2016-09-08T18:38:24Z",
|
||||
"dimensions": {
|
||||
"x": 0.16850528120994568,
|
||||
"y": 0.16850528120994568,
|
||||
"z": 0.16850528120994568
|
||||
},
|
||||
"id": "{70fcd96c-cd59-4f23-9ca5-a167f2f85680}",
|
||||
"ignoreForCollisions": 1,
|
||||
"lastEdited": 1481926908795578,
|
||||
"lastEditedBy": "{b80185ea-0936-4397-a5a4-3a64004f545f}",
|
||||
"name": "tutorial/farGrab/box_spawn",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"x": 3.4866282939910889,
|
||||
"y": 0.67159509658813477,
|
||||
"z": 0.47892442345619202
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 0.64707136154174805,
|
||||
"x": 3.2037394046783447,
|
||||
"y": 0.33042514324188232,
|
||||
"z": 0.14542555809020996
|
||||
},
|
||||
"rotation": {
|
||||
"w": 1,
|
||||
"x": -1.52587890625e-05,
|
||||
"y": -1.52587890625e-05,
|
||||
"z": -1.52587890625e-05
|
||||
},
|
||||
"shape": "Cube",
|
||||
"type": "Box",
|
||||
"userData": "{\"visible\":false,\"tag\":\"farGrab\"}",
|
||||
"visible": 0
|
||||
},
|
||||
"tutorial/teleport/pad": {
|
||||
"userData": "{\"tag\":\"teleport\"}",
|
||||
"rotation": {
|
||||
"y": -0.9702650308609009,
|
||||
"x": -2.1246911273919977e-05,
|
||||
"z": -4.222852112434339e-06,
|
||||
"w": 0.2420452982187271
|
||||
},
|
||||
"dimensions": {
|
||||
"y": 0.4365682601928711,
|
||||
"x": 2.1751723289489746,
|
||||
"z": 2.175173044204712
|
||||
},
|
||||
"collisionless": 1,
|
||||
"created": "2016-09-08T18:38:24Z",
|
||||
"queryAACube": {
|
||||
"y": -1.7979401350021362,
|
||||
"x": 7.5136213302612305,
|
||||
"scale": 3.106983184814453,
|
||||
"z": -1.4602710008621216
|
||||
},
|
||||
"visible": 0,
|
||||
"angularVelocity": {
|
||||
"y": -0.5235987901687622,
|
||||
"x": 0,
|
||||
"z": 0
|
||||
},
|
||||
"clientOnly": 0,
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"angularDamping": 0,
|
||||
"position": {
|
||||
"y": -0.2444484978914261,
|
||||
"x": 9.067112922668457,
|
||||
"z": 0.09322060644626617
|
||||
},
|
||||
"modelURL": "atp:/alan/dev/Teleport-Pad.fbx",
|
||||
"ignoreForCollisions": 1,
|
||||
"type": "Model",
|
||||
"id": "{4478f7b5-d3ac-4213-9a7b-ad8cd69575b8}",
|
||||
"name": "tutorial/teleport/pad"
|
||||
}
|
||||
};
|
|
@ -1,51 +0,0 @@
|
|||
(function() {
|
||||
var TutorialStartZone = function() {
|
||||
print("TutorialStartZone | Creating");
|
||||
};
|
||||
|
||||
TutorialStartZone.prototype = {
|
||||
preload: function(entityID) {
|
||||
print("TutorialStartZone | Preload");
|
||||
this.entityID = entityID;
|
||||
this.sendStartIntervalID = null;
|
||||
},
|
||||
enterEntity: function() {
|
||||
var self = this;
|
||||
// send message to outer zone
|
||||
print("TutorialStartZone | Entered the tutorial start area");
|
||||
if (HMD.isHMDAvailable() && HMD.isHandControllerAvailable() && HMD.active) {
|
||||
function sendStart() {
|
||||
print("TutorialStartZone | Checking parent ID");
|
||||
var parentID = Entities.getEntityProperties(self.entityID, 'parentID').parentID;
|
||||
print("TutorialStartZone | Parent ID is: ", parentID);
|
||||
if (parentID) {
|
||||
print("TutorialStartZone | Sending start");
|
||||
Entities.callEntityMethod(parentID, 'onEnteredStartZone');
|
||||
} else {
|
||||
print("TutorialStartZone | ERROR: No parent id found on tutorial start zone");
|
||||
}
|
||||
}
|
||||
this.sendStartIntervalID = Script.setInterval(sendStart, 1500);
|
||||
sendStart();
|
||||
} else {
|
||||
print("TutorialStartZone | User tried to go to tutorial without active HMD and hand controllers, sending back to /");
|
||||
Window.alert("To proceed with this tutorial, please connect your Vive or Oculus headset and hand controllers.");
|
||||
location = "/";
|
||||
}
|
||||
},
|
||||
leaveEntity: function() {
|
||||
print("TutorialStartZone | Exited the tutorial start area");
|
||||
if (this.sendStartIntervalID) {
|
||||
Script.clearInterval(this.sendStartIntervalID);
|
||||
}
|
||||
var parentID = Entities.getEntityProperties(this.entityID, 'parentID').parentID;
|
||||
print("TutorialStartZone | Parent ID is: ", parentID);
|
||||
if (parentID) {
|
||||
print("TutorialStartZone | Sending onLeftStartZone");
|
||||
Entities.callEntityMethod(parentID, 'on');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return new TutorialStartZone();
|
||||
});
|
|
@ -1,142 +0,0 @@
|
|||
if (!Function.prototype.bind) {
|
||||
Function.prototype.bind = function(oThis) {
|
||||
if (typeof this !== 'function') {
|
||||
// closest thing possible to the ECMAScript 5
|
||||
// internal IsCallable function
|
||||
throw new TypeError('Function.prototype.bind - what is trying to be bound is not callable');
|
||||
}
|
||||
|
||||
var aArgs = Array.prototype.slice.call(arguments, 1),
|
||||
fToBind = this,
|
||||
fNOP = function() {},
|
||||
fBound = function() {
|
||||
return fToBind.apply(this instanceof fNOP
|
||||
? this
|
||||
: oThis,
|
||||
aArgs.concat(Array.prototype.slice.call(arguments)));
|
||||
};
|
||||
|
||||
if (this.prototype) {
|
||||
// Function.prototype doesn't have a prototype property
|
||||
fNOP.prototype = this.prototype;
|
||||
}
|
||||
fBound.prototype = new fNOP();
|
||||
|
||||
return fBound;
|
||||
};
|
||||
}
|
||||
|
||||
(function() {
|
||||
Script.include("ownershipToken.js");
|
||||
Script.include("tutorial.js");
|
||||
|
||||
var CHANNEL_AWAY_ENABLE = "Hifi-Away-Enable";
|
||||
function setAwayEnabled(value) {
|
||||
var message = value ? 'enable' : 'disable';
|
||||
Messages.sendLocalMessage(CHANNEL_AWAY_ENABLE, message);
|
||||
}
|
||||
|
||||
var TutorialZone = function() {
|
||||
print("TutorialZone | Creating");
|
||||
this.token = null;
|
||||
};
|
||||
|
||||
TutorialZone.prototype = {
|
||||
keyReleaseHandler: function(event) {
|
||||
print(event.text);
|
||||
if (event.isShifted && event.isAlt) {
|
||||
if (event.text == "F12") {
|
||||
if (!this.tutorialManager.startNextStep()) {
|
||||
this.tutorialManager.startTutorial();
|
||||
}
|
||||
} else if (event.text == "F11") {
|
||||
this.tutorialManager.restartStep();
|
||||
} else if (event.text == "F10") {
|
||||
MyAvatar.shouldRenderLocally = !MyAvatar.shouldRenderLocally;
|
||||
} else if (event.text == "r") {
|
||||
this.tutorialManager.stopTutorial();
|
||||
this.tutorialManager.startTutorial();
|
||||
}
|
||||
}
|
||||
},
|
||||
preload: function(entityID) {
|
||||
print("TutorialZone | Preload");
|
||||
this.entityID = entityID;
|
||||
},
|
||||
onEnteredStartZone: function() {
|
||||
print("TutorialZone | Got onEnteredStartZone");
|
||||
var self = this;
|
||||
if (!this.token) {
|
||||
print("TutorialZone | Creating token");
|
||||
// The start zone has been entered, hide the overlays immediately
|
||||
setAwayEnabled(false);
|
||||
Menu.setIsOptionChecked("Overlays", false);
|
||||
MyAvatar.shouldRenderLocally = false;
|
||||
Toolbars.getToolbar("com.highfidelity.interface.toolbar.system").writeProperty("visible", false);
|
||||
this.token = new OwnershipToken(Math.random() * 100000, this.entityID, {
|
||||
onGainedOwnership: function(token) {
|
||||
print("TutorialZone | GOT OWNERSHIP");
|
||||
if (!self.tutorialManager) {
|
||||
self.tutorialManager = new TutorialManager();
|
||||
}
|
||||
self.tutorialManager.startTutorial();
|
||||
print("TutorialZone | making bound release handler");
|
||||
self.keyReleaseHandlerBound = self.keyReleaseHandler.bind(self);
|
||||
print("TutorialZone | binding");
|
||||
Controller.keyReleaseEvent.connect(self.keyReleaseHandlerBound);
|
||||
print("TutorialZone | done");
|
||||
},
|
||||
onLostOwnership: function(token) {
|
||||
print("TutorialZone | LOST OWNERSHIP");
|
||||
if (self.tutorialManager) {
|
||||
print("TutorialZone | stopping tutorial..");
|
||||
self.tutorialManager.stopTutorial();
|
||||
print("TutorialZone | done");
|
||||
Controller.keyReleaseEvent.disconnect(self.keyReleaseHandlerBound);
|
||||
} else {
|
||||
print("TutorialZone | no tutorial manager...");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
onLeftStartZone: function() {
|
||||
print("TutorialZone | Got onLeftStartZone");
|
||||
|
||||
// If the start zone was exited, and the tutorial hasn't started, go ahead and
|
||||
// re-enable the HUD/Overlays
|
||||
if (!this.tutorialManager) {
|
||||
Menu.setIsOptionChecked("Overlays", true);
|
||||
MyAvatar.shouldRenderLocally = true;
|
||||
setAwayEnabled(true);
|
||||
Toolbars.getToolbar("com.highfidelity.interface.toolbar.system").writeProperty("visible", true);
|
||||
}
|
||||
},
|
||||
|
||||
onEnteredEntryPortal: function() {
|
||||
print("TutorialZone | Got onEnteredEntryPortal");
|
||||
if (this.tutorialManager) {
|
||||
print("TutorialZone | Calling enteredEntryPortal");
|
||||
this.tutorialManager.enteredEntryPortal();
|
||||
}
|
||||
},
|
||||
|
||||
enterEntity: function() {
|
||||
print("TutorialZone | ENTERED THE TUTORIAL AREA");
|
||||
},
|
||||
leaveEntity: function() {
|
||||
print("TutorialZone | EXITED THE TUTORIAL AREA");
|
||||
if (this.token) {
|
||||
print("TutorialZone | Destroying token");
|
||||
this.token.destroy();
|
||||
this.token = null;
|
||||
}
|
||||
if (this.tutorialManager) {
|
||||
this.tutorialManager.stopTutorial();
|
||||
//this.tutorialManager = null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return new TutorialZone();
|
||||
});
|
Loading…
Reference in a new issue