Merge branch 'PAL_v2' of https://github.com/highfidelity/hifi into dk/newMessageStuff

This commit is contained in:
David Kelly 2017-03-16 09:01:13 -07:00
commit f0fad5981a
33 changed files with 702 additions and 156 deletions

View file

@ -17,6 +17,7 @@ module.exports = {
"Clipboard": false,
"Controller": false,
"DialogsManager": false,
"DebugDraw": false,
"Entities": false,
"FaceTracker": false,
"GlobalServices": false,

View file

@ -37,7 +37,6 @@ const QString AVATAR_MIXER_LOGGING_NAME = "avatar-mixer";
// FIXME - what we'd actually like to do is send to users at ~50% of their present rate down to 30hz. Assume 90 for now.
const int AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND = 45;
const unsigned int AVATAR_DATA_SEND_INTERVAL_MSECS = (1.0f / (float) AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND) * 1000;
AvatarMixer::AvatarMixer(ReceivedMessage& message) :
ThreadedAssignment(message)

View file

@ -0,0 +1,46 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FFFFFF;}
</style>
<g id="Layer_2">
</g>
<g>
<path class="st0" d="M25.3,23.7c-0.6,0-1.2,0.2-1.6,0.7S23,25.4,23,26c0,0.6,0.2,1.1,0.7,1.6c0.5,0.5,1,0.7,1.6,0.7
c0.6,0,1.2-0.2,1.6-0.7c0.5-0.5,0.7-1,0.7-1.6c0-0.6-0.2-1.2-0.7-1.6C26.5,23.9,26,23.7,25.3,23.7z"/>
<path class="st0" d="M25.3,17.2c-5,0-9,4-9,9c0,5,4,9,9,9s9-4,9-9C34.4,21.2,30.3,17.2,25.3,17.2z M31.5,26c0,0.3-0.1,0.6-0.3,0.8
c-0.2,0.2-0.5,0.3-0.8,0.3c-0.2,0-0.3,0-0.5-0.1c-0.1,0-0.3,0-0.5-0.1c-0.2,0.6-0.3,1.1-0.6,1.4c0.2,0.1,0.3,0.2,0.4,0.3h0.1
c0.2,0.1,0.4,0.2,0.4,0.2c0.5,0.5,0.5,1.1,0,1.6c-0.2,0.2-0.5,0.3-0.8,0.3c-0.3,0-0.6-0.1-0.8-0.3c0,0-0.1-0.2-0.2-0.4
c0,0,0-0.1-0.1-0.1c-0.1-0.1-0.2-0.2-0.2-0.4c-0.4,0.3-0.9,0.5-1.4,0.6c0.1,0.2,0.1,0.4,0.1,0.5c0.1,0.2,0.1,0.3,0.1,0.5
c0,0.3-0.1,0.6-0.3,0.8c-0.2,0.2-0.5,0.3-0.8,0.3c-0.3,0-0.6-0.1-0.8-0.3c-0.2-0.2-0.3-0.5-0.3-0.8c0-0.2,0-0.3,0.1-0.5
c0-0.1,0-0.3,0.1-0.5c-0.5-0.1-1-0.3-1.4-0.6c-0.1,0.2-0.2,0.3-0.2,0.4L22.8,30c0,0.1-0.1,0.2-0.2,0.4c-0.2,0.2-0.5,0.3-0.8,0.3
c-0.3,0-0.6-0.1-0.8-0.3c-0.5-0.5-0.5-1.1,0-1.6c0.1-0.1,0.2-0.2,0.5-0.2c0-0.1,0.2-0.2,0.4-0.3c-0.2-0.3-0.4-0.8-0.6-1.4
C21.1,27,20.9,27,20.7,27V27c-0.1,0.1-0.2,0.1-0.5,0.1c-0.3,0-0.6-0.1-0.8-0.3c-0.2-0.2-0.3-0.5-0.3-0.8c0-0.3,0.1-0.6,0.3-0.8
c0.2-0.2,0.5-0.3,0.8-0.3c0.2,0,0.3,0,0.5,0.1c0.1,0,0.3,0,0.5,0.1c0.1-0.5,0.3-1,0.6-1.4c-0.1,0-0.2-0.1-0.4-0.2h-0.1
c-0.2-0.1-0.3-0.2-0.4-0.3c-0.5-0.5-0.5-1,0-1.5c0.5-0.5,1.1-0.5,1.6,0c0.1,0.1,0.2,0.2,0.2,0.4c0.1,0.1,0.2,0.2,0.3,0.5
c0.4-0.3,0.9-0.5,1.4-0.6c-0.1-0.2-0.1-0.3-0.1-0.5v-0.1c-0.1-0.2-0.1-0.3-0.1-0.5c0-0.3,0.1-0.6,0.3-0.8c0.2-0.2,0.5-0.3,0.8-0.3
c0.3,0,0.6,0.1,0.8,0.3c0.2,0.2,0.3,0.5,0.3,0.8c0,0.2,0,0.3-0.1,0.5v0.1c0,0.2,0,0.3-0.1,0.5c0.4,0.1,0.9,0.3,1.4,0.6
c0-0.2,0.1-0.3,0.2-0.5c0-0.1,0.1-0.2,0.3-0.4c0.2-0.2,0.4-0.3,0.8-0.3c0.3,0,0.6,0.1,0.8,0.3c0.5,0.5,0.5,1.1,0,1.6
c-0.2,0.2-0.3,0.2-0.4,0.3c-0.1,0.1-0.2,0.2-0.5,0.2c0.3,0.5,0.5,1,0.6,1.4c0.2-0.1,0.3-0.1,0.5-0.1H30c0.2-0.1,0.3-0.1,0.5-0.1
c0.3,0,0.6,0.1,0.8,0.3C31.4,25.5,31.5,25.7,31.5,26L31.5,26z"/>
</g>
<path class="st0" d="M22.3,15.4v-2.6c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2v3.5C20.7,15.9,21.5,15.6,22.3,15.4z"/>
<path class="st0" d="M25.3,15c0.4,0,0.8,0,1.1,0.1V8.6c0-0.6-0.5-1.2-1.2-1.2S24.1,8,24.1,8.6V15C24.5,15,24.9,15,25.3,15z"/>
<path class="st0" d="M30.6,16.3V3.6c0-0.6-0.5-1.2-1.2-1.2S28.3,3,28.3,3.6v11.7C29.1,15.6,29.9,15.9,30.6,16.3z"/>
<path class="st0" d="M48,24.9h-0.6v-2.1c0-0.6-0.5-1.2-1.2-1.2S45,22.2,45,22.9v2.1h-1.8V12c0-0.6-0.5-1.2-1.2-1.2s-1.2,0.5-1.2,1.2
v12.9H39v-9.6c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2V36c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2v-8.7h1.8v12.9
c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2V27.3H45v2.9c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-2.9H48c0.6,0,1.2-0.5,1.2-1.2
S48.6,24.9,48,24.9z"/>
<path class="st0" d="M13.9,12c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2v12.9H9.7v-4.6c0-0.6-0.5-1.2-1.2-1.2
c-0.6,0-1.2,0.5-1.2,1.2v4.6H5.5v-2.1c0-0.6-0.5-1.2-1.2-1.2s-1.2,0.5-1.2,1.2v2.1H2.6c-0.6,0-1.2,0.5-1.2,1.2s0.5,1.2,1.2,1.2h0.6
v2.1c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-2.1h1.8v6.2c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2v-6.2h1.8v12.1
c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2V12z"/>
<path class="st0" d="M28.3,37v9.8c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2V36.1C29.9,36.5,29.1,36.8,28.3,37z"/>
<path class="st0" d="M25.3,37.5c-0.4,0-0.8,0-1.2-0.1v4.5c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-4.5
C26.1,37.4,25.7,37.5,25.3,37.5z"/>
<path class="st0" d="M19.9,36.1v3.3c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2V37C21.5,36.8,20.7,36.5,19.9,36.1z"/>
<rect x="12" y="24.6" class="st0" width="6.9" height="3"/>
<rect x="32.9" y="24.6" class="st0" width="5.1" height="3"/>
</svg>

After

Width:  |  Height:  |  Size: 4 KiB

View file

@ -0,0 +1,30 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FFFFFF;}
</style>
<g id="Layer_2">
</g>
<path class="st0" d="M22.3,15.4v-2.6c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2v3.5C20.7,15.9,21.5,15.6,22.3,15.4z"/>
<path class="st0" d="M25.3,15c0.4,0,0.8,0,1.1,0.1V8.6c0-0.6-0.5-1.2-1.2-1.2S24.1,8,24.1,8.6V15C24.5,15,24.9,15,25.3,15z"/>
<path class="st0" d="M30.6,16.3V3.6c0-0.6-0.5-1.2-1.2-1.2S28.3,3,28.3,3.6v11.7C29.1,15.6,29.9,15.9,30.6,16.3z"/>
<path class="st0" d="M48,24.9h-0.6v-2.1c0-0.6-0.5-1.2-1.2-1.2S45,22.2,45,22.9v2.1h-1.8V12c0-0.6-0.5-1.2-1.2-1.2s-1.2,0.5-1.2,1.2
v12.9H39v-9.6c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2V36c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2v-8.7h1.8v12.9
c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2V27.3H45v2.9c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-2.9H48c0.6,0,1.2-0.5,1.2-1.2
S48.6,24.9,48,24.9z"/>
<path class="st0" d="M13.9,12c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2v12.9H9.7v-4.6c0-0.6-0.5-1.2-1.2-1.2
c-0.6,0-1.2,0.5-1.2,1.2v4.6H5.5v-2.1c0-0.6-0.5-1.2-1.2-1.2s-1.2,0.5-1.2,1.2v2.1H2.6c-0.6,0-1.2,0.5-1.2,1.2s0.5,1.2,1.2,1.2h0.6
v2.1c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-2.1h1.8v6.2c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2v-6.2h1.8v12.1
c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2V12z"/>
<path class="st0" d="M28.3,37v9.8c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2V36.1C29.9,36.5,29.1,36.8,28.3,37z"/>
<path class="st0" d="M25.3,37.5c-0.4,0-0.8,0-1.2-0.1v4.5c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-4.5
C26.1,37.4,25.7,37.5,25.3,37.5z"/>
<path class="st0" d="M19.9,36.1v3.3c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2V37C21.5,36.8,20.7,36.5,19.9,36.1z"/>
<rect x="12" y="24.6" class="st0" width="7.3" height="3"/>
<rect x="32.9" y="24.6" class="st0" width="5.3" height="3"/>
<path class="st0" d="M25.3,17c-5,0-9,4-9,9c0,5,4,9,9,9s9-4,9-9C34.4,21,30.3,17,25.3,17z M24.1,29.7c0,0.5-0.6,1-1.4,1
s-1.4-0.4-1.4-1v-7.3c0-0.5,0.6-1,1.4-1s1.4,0.4,1.4,1V29.7z M29.3,29.7c0,0.5-0.6,1-1.4,1c-0.8,0-1.4-0.4-1.4-1v-7.3
c0-0.5,0.6-1,1.4-1c0.8,0,1.4,0.4,1.4,1V29.7z"/>
</svg>

After

Width:  |  Height:  |  Size: 2.3 KiB

View file

@ -0,0 +1,30 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FFFFFF;}
</style>
<g id="Layer_2">
</g>
<path class="st0" d="M22.3,15.4v-2.6c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2v3.5C20.7,15.9,21.5,15.6,22.3,15.4z"/>
<path class="st0" d="M25.3,15c0.4,0,0.8,0,1.1,0.1V8.6c0-0.6-0.5-1.2-1.2-1.2S24.1,8,24.1,8.6V15C24.5,15,24.9,15,25.3,15z"/>
<path class="st0" d="M30.6,16.3V3.6c0-0.6-0.5-1.2-1.2-1.2S28.3,3,28.3,3.6v11.7C29.1,15.6,29.9,15.9,30.6,16.3z"/>
<path class="st0" d="M48,24.9h-0.6v-2.1c0-0.6-0.5-1.2-1.2-1.2S45,22.2,45,22.9v2.1h-1.8V12c0-0.6-0.5-1.2-1.2-1.2s-1.2,0.5-1.2,1.2
v12.9H39v-9.6c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2V36c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2v-8.7h1.8v12.9
c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2V27.3H45v2.9c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-2.9H48c0.6,0,1.2-0.5,1.2-1.2
S48.6,24.9,48,24.9z"/>
<path class="st0" d="M13.9,12c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2v12.9H9.7v-4.6c0-0.6-0.5-1.2-1.2-1.2
c-0.6,0-1.2,0.5-1.2,1.2v4.6H5.5v-2.1c0-0.6-0.5-1.2-1.2-1.2s-1.2,0.5-1.2,1.2v2.1H2.6c-0.6,0-1.2,0.5-1.2,1.2s0.5,1.2,1.2,1.2h0.6
v2.1c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-2.1h1.8v6.2c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2v-6.2h1.8v12.1
c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2V12z"/>
<path class="st0" d="M28.3,37v9.8c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2V36.1C29.9,36.5,29.1,36.8,28.3,37z"/>
<path class="st0" d="M25.3,37.5c-0.4,0-0.8,0-1.2-0.1v4.5c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-4.5
C26.1,37.4,25.7,37.5,25.3,37.5z"/>
<path class="st0" d="M19.9,36.1v3.3c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2V37C21.5,36.8,20.7,36.5,19.9,36.1z"/>
<path class="st0" d="M25.3,17.2c-5,0-9,4-9,9c0,5,4,9,9,9s9-4,9-9C34.4,21.2,30.3,17.2,25.3,17.2z M30.3,26.1l-6,5.4
c-0.1,0.1-0.2,0.1-0.4,0.1c-0.1,0-0.3,0-0.3-0.1c-0.3-0.1-0.5-0.4-0.5-0.5V20.9c0-0.1,0.2-0.4,0.4-0.5c0.1,0,0.2-0.1,0.3-0.1
c0.2,0,0.3,0,0.4,0.1l6,4.8c0.1,0.1,0.2,0.3,0.2,0.4C30.5,25.8,30.4,26,30.3,26.1z"/>
<rect x="12" y="24.6" class="st0" width="7.3" height="3"/>
<rect x="32.9" y="24.6" class="st0" width="7.3" height="3"/>
</svg>

After

Width:  |  Height:  |  Size: 2.3 KiB

View file

@ -181,6 +181,31 @@ Item {
root.avatarMixerOutPps + "pps, " +
root.myAvatarSendRate.toFixed(2) + "hz";
}
StatText {
visible: root.expanded;
text: "Audio Mixer In: " + root.audioMixerInKbps + " kbps, " +
root.audioMixerInPps + "pps";
}
StatText {
visible: root.expanded;
text: "Audio In Audio: " + root.audioAudioInboundPPS + " pps, " +
"Silent: " + root.audioSilentInboundPPS + " pps";
}
StatText {
visible: root.expanded;
text: "Audio Mixer Out: " + root.audioMixerOutKbps + " kbps, " +
root.audioMixerOutPps + "pps";
}
StatText {
visible: root.expanded;
text: "Audio Out Mic: " + root.audioMicOutboundPPS + " pps, " +
"Silent: " + root.audioSilentOutboundPPS + " pps";
}
StatText {
visible: root.expanded;
text: "Audio Codec: " + root.audioCodec + " Noise Gate: " +
root.audioNoiseGate;
}
StatText {
visible: root.expanded;
text: "Downloads: " + root.downloads + "/" + root.downloadLimit +

View file

@ -27,7 +27,7 @@ Item {
// Properties
property string profileUrl: "";
property string defaultBaseUrl: "http://highfidelity.com";
property string defaultBaseUrl: location.metaverseServerUrl;
property string connectionStatus : ""
property string uuid: ""
property string displayName: ""
@ -79,21 +79,22 @@ Item {
}
StateImage {
id: infoHoverImage;
visible: avatarImageMouseArea.containsMouse ? true : false;
visible: false;
imageURL: "../../images/info-icon-2-state.svg";
size: 32;
buttonState: 1;
anchors.centerIn: parent;
}
MouseArea {
id: avatarImageMouseArea;
anchors.fill: parent
enabled: selected || isMyCard;
hoverEnabled: enabled
onClicked: {
userInfoViewer.url = "http://highfidelity.com/users/" + userName;
userInfoViewer.url = defaultBaseUrl + "/users/" + userName;
userInfoViewer.visible = true;
}
onEntered: infoHoverImage.visible = true;
onExited: infoHoverImage.visible = false;
}
}
@ -116,15 +117,15 @@ Item {
id: myDisplayName
visible: isMyCard
// Size
width: parent.width - avatarImage.width - anchors.leftMargin*2 - anchors.rightMargin;
width: parent.width - avatarImage.width - anchors.leftMargin - anchors.rightMargin*2;
height: 40
// Anchors
anchors.top: avatarImage.top
anchors.left: avatarImage.right
anchors.leftMargin: 5;
anchors.leftMargin: avatarImage.visible ? 5 : 0;
anchors.rightMargin: 5;
// Style
color: myDisplayNameMouseArea.containsMouse ? hifi.colors.lightGrayText : hifi.colors.textFieldLightBackground
color: hifi.colors.textFieldLightBackground
border.color: hifi.colors.blueHighlight
border.width: 0
TextInput {
@ -165,7 +166,6 @@ Item {
}
}
MouseArea {
id: myDisplayNameMouseArea;
anchors.fill: parent
hoverEnabled: true
onClicked: {
@ -182,6 +182,8 @@ Item {
pal.currentlyEditingDisplayName = true
myDisplayNameText.autoScroll = true;
}
onEntered: myDisplayName.color = hifi.colors.lightGrayText;
onExited: myDisplayName.color = hifi.colors.textFieldLightBackground;
}
// Edit pencil glyph
HiFiGlyphs {
@ -226,13 +228,20 @@ Item {
// Text Positioning
verticalAlignment: Text.AlignTop
// Style
color: (displayNameTextMouseArea.containsMouse || userNameTextMouseArea.containsMouse) ? hifi.colors.blueHighlight : hifi.colors.darkGray;
color: hifi.colors.darkGray;
MouseArea {
id: displayNameTextMouseArea;
anchors.fill: parent
enabled: selected && pal.activeTab == "nearbyTab" && thisNameCard.userName !== "";
hoverEnabled: enabled
onClicked: pal.sendToScript({method: 'goToUser', params: thisNameCard.userName});
onEntered: {
displayNameText.color = hifi.colors.blueHighlight;
userNameText.color = hifi.colors.blueHighlight;
}
onExited: {
displayNameText.color = hifi.colors.darkGray
userNameText.color = hifi.colors.greenShadow;
}
}
}
TextMetrics {
@ -293,7 +302,7 @@ Item {
FiraSansRegular {
id: userNameText
// Properties
text: thisNameCard.userName
text: thisNameCard.userName === "Unknown user" ? "not logged in" : thisNameCard.userName;
elide: Text.ElideRight
visible: thisNameCard.userName !== "";
// Size
@ -309,14 +318,20 @@ Item {
// Text Positioning
verticalAlignment: Text.AlignBottom
// Style
color: (pal.activeTab == "nearbyTab" && (displayNameTextMouseArea.containsMouse || userNameTextMouseArea.containsMouse))
? hifi.colors.blueHighlight : hifi.colors.greenShadow;
color: hifi.colors.greenShadow;
MouseArea {
id: userNameTextMouseArea;
anchors.fill: parent
enabled: selected && pal.activeTab == "nearbyTab" && thisNameCard.userName !== "";
hoverEnabled: enabled
onClicked: pal.sendToScript({method: 'goToUser', params: thisNameCard.userName});
onEntered: {
displayNameText.color = hifi.colors.blueHighlight;
userNameText.color = hifi.colors.blueHighlight;
}
onExited: {
displayNameText.color = hifi.colors.darkGray;
userNameText.color = hifi.colors.greenShadow;
}
}
}
// VU Meter

View file

@ -17,6 +17,7 @@ import QtGraphicalEffects 1.0
import Qt.labs.settings 1.0
import "../styles-uit"
import "../controls-uit" as HifiControls
import HFWebEngineProfile 1.0
// references HMD, Users, UserActivityLogger from root context
@ -285,7 +286,9 @@ Rectangle {
pal.sendToScript({method: 'refreshConnections'});
}
activeTab = "connectionsTab";
connectionsLoading.visible = false;
connectionsLoading.visible = true;
connectionsRefreshProblemText.visible = false;
}
}
@ -307,6 +310,7 @@ Rectangle {
width: reloadConnections.height;
glyph: hifi.glyphs.reload;
onClicked: {
connectionsLoading.visible = false;
connectionsLoading.visible = true;
pal.sendToScript({method: 'refreshConnections'});
}
@ -346,13 +350,12 @@ Rectangle {
text: "[?]";
size: connectionsTabSelectorText.size + 6;
font.capitalization: Font.AllUppercase;
color: connectionsTabSelectorMouseArea.containsMouse ? hifi.colors.redAccent : hifi.colors.redHighlight;
color: hifi.colors.redHighlight;
horizontalAlignment: Text.AlignHCenter;
verticalAlignment: Text.AlignVCenter;
anchors.fill: parent;
}
MouseArea {
id: connectionsTabSelectorMouseArea;
anchors.fill: parent;
hoverEnabled: true;
onClicked: letterbox(hifi.glyphs.question,
@ -361,6 +364,8 @@ Rectangle {
"When your availability is set to Everyone, Connections can see your username and location.<br><br>" +
"<font color='green'>Green borders around profile pictures are <b>Friends</b>.</font><br>" +
"When your availability is set to Friends, only Friends can see your username and location.");
onEntered: connectionsHelpText.color = hifi.colors.redAccent;
onExited: connectionsHelpText.color = hifi.colors.redHighlight;
}
}
}
@ -703,7 +708,6 @@ Rectangle {
}
// This Rectangle refers to the [?] popup button next to "NAMES"
Rectangle {
id: helpText;
color: hifi.colors.tableBackgroundLight;
width: 20;
height: hifi.dimensions.tableHeaderHeight - 2;
@ -712,16 +716,16 @@ Rectangle {
anchors.topMargin: 1;
anchors.leftMargin: actionButtonWidth + nearbyNameCardWidth/2 + displayNameHeaderMetrics.width/2 + 6;
RalewayRegular {
id: helpText;
text: "[?]";
size: hifi.fontSizes.tableHeading + 2;
font.capitalization: Font.AllUppercase;
color: helpTextMouseArea.containsMouse ? hifi.colors.baseGrayHighlight : hifi.colors.darkGray;
color: hifi.colors.darkGray;
horizontalAlignment: Text.AlignHCenter;
verticalAlignment: Text.AlignVCenter;
anchors.fill: parent;
}
MouseArea {
id: helpTextMouseArea;
anchors.fill: parent;
hoverEnabled: true;
onClicked: letterbox(hifi.glyphs.question,
@ -733,6 +737,8 @@ Rectangle {
"If you can see someone's username, you can GoTo them by selecting them in the PAL, then clicking their name.<br>" +
"<br>If someone's display name isn't set, a unique <b>session display name</b> is assigned to them.<br>" +
"<br>Administrators of this domain can also see the <b>username</b> or <b>machine ID</b> associated with each avatar present.");
onEntered: helpText.color = hifi.colors.baseGrayHighlight;
onExited: helpText.color = hifi.colors.darkGray;
}
}
// This Rectangle refers to the [?] popup button next to "ADMIN"
@ -750,19 +756,20 @@ Rectangle {
text: "[?]";
size: hifi.fontSizes.tableHeading + 2;
font.capitalization: Font.AllUppercase;
color: adminHelpTextMouseArea.containsMouse ? "#94132e" : hifi.colors.redHighlight;
color: hifi.colors.redHighlight;
horizontalAlignment: Text.AlignHCenter;
verticalAlignment: Text.AlignVCenter;
anchors.fill: parent;
}
MouseArea {
id: adminHelpTextMouseArea;
anchors.fill: parent;
hoverEnabled: true;
onClicked: letterbox(hifi.glyphs.question,
"Admin Actions",
"<b>Silence</b> mutes a user's microphone. Silenced users can unmute themselves by clicking &quot;UNMUTE&quot; on their toolbar.<br><br>" +
"<b>Ban</b> removes a user from this domain and prevents them from returning. Admins can un-ban users from the Sandbox Domain Settings page.");
onEntered: adminHelpText.color = "#94132e";
onExited: adminHelpText.color = hifi.colors.redHighlight;
}
}
} // "Nearby" Tab
@ -792,6 +799,35 @@ Rectangle {
height: width;
anchors.centerIn: parent;
visible: true;
onVisibleChanged: {
if (visible) {
connectionsTimeoutTimer.start();
} else {
connectionsTimeoutTimer.stop();
connectionsRefreshProblemText.visible = false;
}
}
}
// "This is taking too long..." text
FiraSansSemiBold {
id: connectionsRefreshProblemText
// Properties
text: "This is taking longer than normal.\nIf you get stuck, try refreshing the Connections tab.";
// Anchors
anchors.top: connectionsLoading.bottom;
anchors.topMargin: 10;
anchors.left: parent.left;
anchors.bottom: parent.bottom;
width: parent.width;
// Text Size
size: 16;
// Text Positioning
verticalAlignment: Text.AlignTop;
horizontalAlignment: Text.AlignHCenter;
wrapMode: Text.WordWrap;
// Style
color: hifi.colors.darkGray;
}
// This TableView refers to the Connections Table (on the "Connections" tab below the current user's NameCard)
@ -887,13 +923,14 @@ Rectangle {
// Text Positioning
verticalAlignment: Text.AlignVCenter
// Style
color: connectionsLocationDataMouseArea.containsMouse ? hifi.colors.blueHighlight : hifi.colors.darkGray;
color: hifi.colors.darkGray;
MouseArea {
id: connectionsLocationDataMouseArea;
anchors.fill: parent
hoverEnabled: enabled
enabled: connectionsNameCard.selected && pal.activeTab == "connectionsTab"
onClicked: pal.sendToScript({method: 'goToUser', params: model.userName});
onEntered: connectionsLocationData.color = hifi.colors.blueHighlight;
onExited: connectionsLocationData.color = hifi.colors.darkGray;
}
}
@ -941,7 +978,7 @@ Rectangle {
Rectangle {
id: navigationContainer;
visible: userInfoViewer.visible;
height: 75;
height: 60;
anchors {
top: parent.top;
left: parent.left;
@ -955,7 +992,7 @@ Rectangle {
top: parent.top;
left: parent.left;
}
height: parent.height - urlBar.height;
height: parent.height - addressBar.height;
width: parent.width/2;
FiraSansSemiBold {
@ -975,21 +1012,26 @@ Rectangle {
id: backButtonMouseArea;
anchors.fill: parent
hoverEnabled: enabled
onClicked: userInfoViewer.goBack();
onClicked: {
if (userInfoViewer.canGoBack) {
userInfoViewer.goBack();
}
}
}
}
}
Item {
id: closeButton
id: closeButtonContainer
anchors {
top: parent.top;
right: parent.right;
}
height: parent.height - urlBar.height;
height: parent.height - addressBar.height;
width: parent.width/2;
FiraSansSemiBold {
id: closeButton;
// Properties
text: "CLOSE";
elide: Text.ElideRight;
@ -1001,24 +1043,25 @@ Rectangle {
verticalAlignment: Text.AlignVCenter
horizontalAlignment: Text.AlignHCenter;
// Style
color: closeButtonMouseArea.containsMouse ? hifi.colors.redAccent : hifi.colors.redHighlight;
color: hifi.colors.redHighlight;
MouseArea {
id: closeButtonMouseArea;
anchors.fill: parent
hoverEnabled: enabled
onClicked: userInfoViewer.visible = false;
onEntered: closeButton.color = hifi.colors.redAccent;
onExited: closeButton.color = hifi.colors.redHighlight;
}
}
}
Item {
id: urlBar
id: addressBar
anchors {
top: closeButton.bottom;
top: closeButtonContainer.bottom;
left: parent.left;
right: parent.right;
}
height: 25;
height: 30;
width: parent.width;
FiraSansRegular {
@ -1027,17 +1070,14 @@ Rectangle {
elide: Text.ElideRight;
// Anchors
anchors.fill: parent;
anchors.leftMargin: 5;
// Text Size
size: 14;
// Text Positioning
verticalAlignment: Text.AlignVCenter
horizontalAlignment: Text.AlignHCenter;
horizontalAlignment: Text.AlignLeft;
// Style
color: hifi.colors.lightGray;
MouseArea {
anchors.fill: parent
onClicked: userInfoViewer.visible = false;
}
}
}
}
@ -1056,9 +1096,11 @@ Rectangle {
HifiControls.WebView {
id: userInfoViewer;
profile: HFWebEngineProfile {
storageName: "qmlWebEngine"
}
anchors {
top: navigationContainer.bottom;
topMargin: 5;
bottom: parent.bottom;
left: parent.left;
right: parent.right;
@ -1087,6 +1129,15 @@ Rectangle {
}
}
// Timer used when refreshing the Connections tab
Timer {
id: connectionsTimeoutTimer;
interval: 3000; // 3 seconds
onTriggered: {
connectionsRefreshProblemText.visible = true;
}
}
function rowColor(selected, alternate) {
return selected ? hifi.colors.orangeHighlight : alternate ? hifi.colors.tableRowLightEven : hifi.colors.tableRowLightOdd;
}
@ -1125,9 +1176,11 @@ Rectangle {
break;
case 'connections':
var data = message.params;
console.log('Got connection data: ', JSON.stringify(data));
connectionsUserModelData = data;
sortConnectionsModel();
connectionsLoading.visible = false;
connectionsRefreshProblemText.visible = false;
break;
case 'select':
var sessionIds = message.params[0];

View file

@ -608,6 +608,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
}
}
// make sure the debug draw singleton is initialized on the main thread.
DebugDraw::getInstance().removeMarker("");
_runningMarker.startRunningMarker();
@ -1182,6 +1184,10 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// set the local loopback interface for local sounds
AudioInjector::setLocalAudioInterface(audioIO.data());
AudioScriptingInterface::getInstance().setLocalAudioInterface(audioIO.data());
connect(audioIO.data(), &AudioClient::noiseGateOpened, &AudioScriptingInterface::getInstance(), &AudioScriptingInterface::noiseGateOpened);
connect(audioIO.data(), &AudioClient::noiseGateClosed, &AudioScriptingInterface::getInstance(), &AudioScriptingInterface::noiseGateClosed);
connect(audioIO.data(), &AudioClient::inputReceived, &AudioScriptingInterface::getInstance(), &AudioScriptingInterface::inputReceived);
this->installEventFilter(this);
@ -1947,6 +1953,8 @@ void Application::initializeUi() {
rootContext->setContextProperty("ApplicationInterface", this);
rootContext->setContextProperty("Audio", &AudioScriptingInterface::getInstance());
rootContext->setContextProperty("AudioStats", DependencyManager::get<AudioClient>()->getStats().data());
rootContext->setContextProperty("AudioScope", DependencyManager::get<AudioScope>().data());
rootContext->setContextProperty("Controller", DependencyManager::get<controller::ScriptingInterface>().data());
rootContext->setContextProperty("Entities", DependencyManager::get<EntityScriptingInterface>().data());
_fileDownload = new FileScriptingInterface(engine);
@ -2003,6 +2011,7 @@ void Application::initializeUi() {
rootContext->setContextProperty("Scene", DependencyManager::get<SceneScriptingInterface>().data());
rootContext->setContextProperty("Render", _renderEngine->getConfiguration().get());
rootContext->setContextProperty("Reticle", getApplicationCompositor().getReticleInterface());
rootContext->setContextProperty("location", DependencyManager::get<AddressManager>().data());
rootContext->setContextProperty("ApplicationCompositor", &getApplicationCompositor());
@ -3174,7 +3183,23 @@ void Application::mousePressEvent(QMouseEvent* event) {
}
}
void Application::mouseDoublePressEvent(QMouseEvent* event) const {
void Application::mouseDoublePressEvent(QMouseEvent* event) {
auto offscreenUi = DependencyManager::get<OffscreenUi>();
auto eventPosition = getApplicationCompositor().getMouseEventPosition(event);
QPointF transformedPos = offscreenUi->mapToVirtualScreen(eventPosition, _glWidget);
QMouseEvent mappedEvent(event->type(),
transformedPos,
event->screenPos(), event->button(),
event->buttons(), event->modifiers());
if (!_aboutToQuit) {
getOverlays().mouseDoublePressEvent(&mappedEvent);
if (!_controllerScriptingInterface->areEntityClicksCaptured()) {
getEntities()->mouseDoublePressEvent(&mappedEvent);
}
}
// if one of our scripts have asked to capture this event, then stop processing it
if (_controllerScriptingInterface->isMouseCaptured()) {
return;
@ -5521,6 +5546,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
scriptEngine->registerGlobalObject("Settings", SettingsScriptingInterface::getInstance());
scriptEngine->registerGlobalObject("AudioDevice", AudioDeviceScriptingInterface::getInstance());
scriptEngine->registerGlobalObject("AudioStats", DependencyManager::get<AudioClient>()->getStats().data());
scriptEngine->registerGlobalObject("AudioScope", DependencyManager::get<AudioScope>().data());
// Caches
scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCache>().data());

View file

@ -494,7 +494,7 @@ private:
void mouseMoveEvent(QMouseEvent* event);
void mousePressEvent(QMouseEvent* event);
void mouseDoublePressEvent(QMouseEvent* event) const;
void mouseDoublePressEvent(QMouseEvent* event);
void mouseReleaseEvent(QMouseEvent* event);
void touchBeginEvent(QTouchEvent* event);

View file

@ -52,12 +52,14 @@ AudioScope::AudioScope() :
connect(audioIO.data(), &AudioClient::inputReceived, this, &AudioScope::addInputToScope);
}
void AudioScope::toggle() {
_isEnabled = !_isEnabled;
if (_isEnabled) {
allocateScope();
} else {
freeScope();
void AudioScope::setVisible(bool visible) {
if (_isEnabled != visible) {
_isEnabled = visible;
if (_isEnabled) {
allocateScope();
} else {
freeScope();
}
}
}

View file

@ -34,8 +34,14 @@ public:
void render(RenderArgs* renderArgs, int width, int height);
public slots:
void toggle();
void toggle() { setVisible(!_isEnabled); }
void setVisible(bool visible);
bool getVisible() const { return _isEnabled; }
void togglePause() { _isPaused = !_isPaused; }
void setPause(bool paused) { _isPaused = paused; }
bool getPause() { return _isPaused; }
void selectAudioScopeFiveFrames();
void selectAudioScopeTwentyFrames();
void selectAudioScopeFiftyFrames();
@ -74,7 +80,6 @@ private:
int _inputID;
int _outputLeftID;
int _outputRightD;
};
#endif // hifi_AudioScope_h

View file

@ -198,15 +198,16 @@ void Stats::updateStats(bool force) {
STAT_UPDATE(avatarMixerInPps, roundf(bandwidthRecorder->getAverageInputPacketsPerSecond(NodeType::AvatarMixer)));
STAT_UPDATE(avatarMixerOutKbps, roundf(bandwidthRecorder->getAverageOutputKilobitsPerSecond(NodeType::AvatarMixer)));
STAT_UPDATE(avatarMixerOutPps, roundf(bandwidthRecorder->getAverageOutputPacketsPerSecond(NodeType::AvatarMixer)));
STAT_UPDATE(myAvatarSendRate, avatarManager->getMyAvatarSendRate());
} else {
STAT_UPDATE(avatarMixerInKbps, -1);
STAT_UPDATE(avatarMixerInPps, -1);
STAT_UPDATE(avatarMixerOutKbps, -1);
STAT_UPDATE(avatarMixerOutPps, -1);
STAT_UPDATE(myAvatarSendRate, avatarManager->getMyAvatarSendRate());
}
STAT_UPDATE(myAvatarSendRate, avatarManager->getMyAvatarSendRate());
SharedNodePointer audioMixerNode = nodeList->soloNodeOfType(NodeType::AudioMixer);
auto audioClient = DependencyManager::get<AudioClient>();
if (audioMixerNode || force) {
STAT_UPDATE(audioMixerKbps, roundf(
bandwidthRecorder->getAverageInputKilobitsPerSecond(NodeType::AudioMixer) +
@ -214,10 +215,30 @@ void Stats::updateStats(bool force) {
STAT_UPDATE(audioMixerPps, roundf(
bandwidthRecorder->getAverageInputPacketsPerSecond(NodeType::AudioMixer) +
bandwidthRecorder->getAverageOutputPacketsPerSecond(NodeType::AudioMixer)));
STAT_UPDATE(audioMixerInKbps, roundf(bandwidthRecorder->getAverageInputKilobitsPerSecond(NodeType::AudioMixer)));
STAT_UPDATE(audioMixerInPps, roundf(bandwidthRecorder->getAverageInputPacketsPerSecond(NodeType::AudioMixer)));
STAT_UPDATE(audioMixerOutKbps, roundf(bandwidthRecorder->getAverageOutputKilobitsPerSecond(NodeType::AudioMixer)));
STAT_UPDATE(audioMixerOutPps, roundf(bandwidthRecorder->getAverageOutputPacketsPerSecond(NodeType::AudioMixer)));
STAT_UPDATE(audioMicOutboundPPS, audioClient->getMicAudioOutboundPPS());
STAT_UPDATE(audioSilentOutboundPPS, audioClient->getSilentOutboundPPS());
STAT_UPDATE(audioAudioInboundPPS, audioClient->getAudioInboundPPS());
STAT_UPDATE(audioSilentInboundPPS, audioClient->getSilentInboundPPS());
} else {
STAT_UPDATE(audioMixerKbps, -1);
STAT_UPDATE(audioMixerPps, -1);
STAT_UPDATE(audioMixerInKbps, -1);
STAT_UPDATE(audioMixerInPps, -1);
STAT_UPDATE(audioMixerOutKbps, -1);
STAT_UPDATE(audioMixerOutPps, -1);
STAT_UPDATE(audioMicOutboundPPS, -1);
STAT_UPDATE(audioSilentOutboundPPS, -1);
STAT_UPDATE(audioAudioInboundPPS, -1);
STAT_UPDATE(audioSilentInboundPPS, -1);
}
STAT_UPDATE(audioCodec, audioClient->getSelectedAudioFormat());
STAT_UPDATE(audioNoiseGate, audioClient->getNoiseGateOpen() ? "Open" : "Closed");
auto loadingRequests = ResourceCache::getLoadingRequests();
STAT_UPDATE(downloads, loadingRequests.size());

View file

@ -70,8 +70,20 @@ class Stats : public QQuickItem {
STATS_PROPERTY(int, avatarMixerOutKbps, 0)
STATS_PROPERTY(int, avatarMixerOutPps, 0)
STATS_PROPERTY(float, myAvatarSendRate, 0)
STATS_PROPERTY(int, audioMixerInKbps, 0)
STATS_PROPERTY(int, audioMixerInPps, 0)
STATS_PROPERTY(int, audioMixerOutKbps, 0)
STATS_PROPERTY(int, audioMixerOutPps, 0)
STATS_PROPERTY(int, audioMixerKbps, 0)
STATS_PROPERTY(int, audioMixerPps, 0)
STATS_PROPERTY(int, audioMicOutboundPPS, 0)
STATS_PROPERTY(int, audioSilentOutboundPPS, 0)
STATS_PROPERTY(int, audioAudioInboundPPS, 0)
STATS_PROPERTY(int, audioSilentInboundPPS, 0)
STATS_PROPERTY(QString, audioCodec, QString())
STATS_PROPERTY(QString, audioNoiseGate, QString())
STATS_PROPERTY(int, downloads, 0)
STATS_PROPERTY(int, downloadLimit, 0)
STATS_PROPERTY(int, downloadsPending, 0)
@ -180,8 +192,19 @@ signals:
void avatarMixerOutKbpsChanged();
void avatarMixerOutPpsChanged();
void myAvatarSendRateChanged();
void audioMixerInKbpsChanged();
void audioMixerInPpsChanged();
void audioMixerOutKbpsChanged();
void audioMixerOutPpsChanged();
void audioMixerKbpsChanged();
void audioMixerPpsChanged();
void audioMicOutboundPPSChanged();
void audioSilentOutboundPPSChanged();
void audioAudioInboundPPSChanged();
void audioSilentInboundPPSChanged();
void audioCodecChanged();
void audioNoiseGateChanged();
void downloadsChanged();
void downloadLimitChanged();
void downloadsPendingChanged();

View file

@ -769,6 +769,26 @@ bool Overlays::mousePressEvent(QMouseEvent* event) {
return false;
}
bool Overlays::mouseDoublePressEvent(QMouseEvent* event) {
PerformanceTimer perfTimer("Overlays::mouseDoublePressEvent");
PickRay ray = qApp->computePickRay(event->x(), event->y());
RayToOverlayIntersectionResult rayPickResult = findRayIntersectionForMouseEvent(ray);
if (rayPickResult.intersects) {
_currentClickingOnOverlayID = rayPickResult.overlayID;
// Only Web overlays can have focus.
auto thisOverlay = std::dynamic_pointer_cast<Web3DOverlay>(getOverlay(_currentClickingOnOverlayID));
if (thisOverlay) {
auto pointerEvent = calculatePointerEvent(thisOverlay, ray, rayPickResult, event, PointerEvent::Press);
emit mouseDoublePressOnOverlay(_currentClickingOnOverlayID, pointerEvent);
return true;
}
}
emit mouseDoublePressOffOverlay();
return false;
}
bool Overlays::mouseReleaseEvent(QMouseEvent* event) {
PerformanceTimer perfTimer("Overlays::mouseReleaseEvent");

View file

@ -101,6 +101,7 @@ public:
OverlayID addOverlay(Overlay::Pointer overlay);
bool mousePressEvent(QMouseEvent* event);
bool mouseDoublePressEvent(QMouseEvent* event);
bool mouseReleaseEvent(QMouseEvent* event);
bool mouseMoveEvent(QMouseEvent* event);
@ -300,9 +301,11 @@ signals:
void panelDeleted(OverlayID id);
void mousePressOnOverlay(OverlayID overlayID, const PointerEvent& event);
void mouseDoublePressOnOverlay(OverlayID overlayID, const PointerEvent& event);
void mouseReleaseOnOverlay(OverlayID overlayID, const PointerEvent& event);
void mouseMoveOnOverlay(OverlayID overlayID, const PointerEvent& event);
void mousePressOffOverlay();
void mouseDoublePressOffOverlay();
void hoverEnterOverlay(OverlayID overlayID, const PointerEvent& event);
void hoverOverOverlay(OverlayID overlayID, const PointerEvent& event);

View file

@ -608,6 +608,13 @@ void AudioClient::handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessag
}
void AudioClient::handleAudioDataPacket(QSharedPointer<ReceivedMessage> message) {
if (message->getType() == PacketType::SilentAudioFrame) {
_silentInbound.increment();
} else {
_audioInbound.increment();
}
auto nodeList = DependencyManager::get<NodeList>();
nodeList->flagTimeForConnectionStep(LimitedNodeList::ConnectionStep::ReceiveFirstAudioPacket);
@ -1021,9 +1028,10 @@ void AudioClient::handleAudioInput() {
// if we performed the noise gate we can get values from it instead of enumerating the samples again
_lastInputLoudness = _inputGate.getLastLoudness();
if (_inputGate.clippedInLastFrame()) {
if (_inputGate.clippedInLastBlock()) {
_timeSinceLastClip = 0.0f;
}
} else {
float loudness = 0.0f;
@ -1041,6 +1049,12 @@ void AudioClient::handleAudioInput() {
emit inputReceived({ reinterpret_cast<char*>(networkAudioSamples), numNetworkBytes });
if (_inputGate.openedInLastBlock()) {
emit noiseGateOpened();
} else if (_inputGate.closedInLastBlock()) {
emit noiseGateClosed();
}
} else {
// our input loudness is 0, since we're muted
_lastInputLoudness = 0;
@ -1057,9 +1071,13 @@ void AudioClient::handleAudioInput() {
// the output from the input gate (eventually, this could be crossfaded)
// and allow the codec to properly encode down to silent/zero. If we still
// have _lastInputLoudness of 0 in our NEXT frame, we will send a silent packet
if (_lastInputLoudness == 0 && !_inputGate.closedInLastFrame()) {
if (_lastInputLoudness == 0 && !_inputGate.closedInLastBlock()) {
packetType = PacketType::SilentAudioFrame;
_silentOutbound.increment();
} else {
_micAudioOutbound.increment();
}
Transform audioTransform;
audioTransform.setTranslation(_positionGetter());
audioTransform.setRotation(_orientationGetter());
@ -1084,6 +1102,7 @@ void AudioClient::handleAudioInput() {
}
}
// FIXME - should this go through the noise gate and honor mute and echo?
void AudioClient::handleRecordedAudioInput(const QByteArray& audio) {
Transform audioTransform;
audioTransform.setTranslation(_positionGetter());
@ -1096,6 +1115,8 @@ void AudioClient::handleRecordedAudioInput(const QByteArray& audio) {
encodedBuffer = audio;
}
_micAudioOutbound.increment();
// FIXME check a flag to see if we should echo audio?
emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), _outgoingAvatarAudioSequenceNumber,
audioTransform, avatarBoundingBoxCorner, avatarBoundingBoxScale,

View file

@ -45,11 +45,13 @@
#include <AudioReverb.h>
#include <AudioLimiter.h>
#include <AudioConstants.h>
#include <AudioNoiseGate.h>
#include <shared/RateCounter.h>
#include <plugins/CodecPlugin.h>
#include "AudioIOStats.h"
#include "AudioNoiseGate.h"
#ifdef _WIN32
#pragma warning( push )
@ -121,6 +123,13 @@ public:
void negotiateAudioFormat();
void selectAudioFormat(const QString& selectedCodecName);
Q_INVOKABLE QString getSelectedAudioFormat() const { return _selectedCodecName; }
Q_INVOKABLE bool getNoiseGateOpen() const { return _inputGate.isOpen(); }
Q_INVOKABLE float getSilentOutboundPPS() const { return _silentOutbound.rate(); }
Q_INVOKABLE float getMicAudioOutboundPPS() const { return _micAudioOutbound.rate(); }
Q_INVOKABLE float getSilentInboundPPS() const { return _silentInbound.rate(); }
Q_INVOKABLE float getAudioInboundPPS() const { return _audioInbound.rate(); }
const MixedProcessedAudioStream& getReceivedAudioStream() const { return _receivedAudioStream; }
MixedProcessedAudioStream& getReceivedAudioStream() { return _receivedAudioStream; }
@ -218,6 +227,8 @@ signals:
void inputReceived(const QByteArray& inputSamples);
void outputBytesToNetwork(int numBytes);
void inputBytesFromNetwork(int numBytes);
void noiseGateOpened();
void noiseGateClosed();
void changeDevice(const QAudioDeviceInfo& outputDeviceInfo);
void deviceChanged();
@ -382,6 +393,11 @@ private:
Encoder* _encoder { nullptr }; // for outbound mic stream
QThread* _checkDevicesThread { nullptr };
RateCounter<> _silentOutbound;
RateCounter<> _micAudioOutbound;
RateCounter<> _silentInbound;
RateCounter<> _audioInbound;
};

View file

@ -1,6 +1,6 @@
//
// AudioNoiseGate.cpp
// interface/src/audio
// libraries/audio
//
// Created by Stephen Birarda on 2014-12-16.
// Copyright 2014 High Fidelity, Inc.
@ -9,35 +9,29 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AudioNoiseGate.h"
#include <cstdlib>
#include <string.h>
#include <AudioConstants.h>
#include "AudioNoiseGate.h"
#include "AudioConstants.h"
const float AudioNoiseGate::CLIPPING_THRESHOLD = 0.90f;
AudioNoiseGate::AudioNoiseGate() :
_inputFrameCounter(0),
_lastLoudness(0.0f),
_quietestFrame(std::numeric_limits<float>::max()),
_loudestFrame(0.0f),
_didClipInLastFrame(false),
_didClipInLastBlock(false),
_dcOffset(0.0f),
_measuredFloor(0.0f),
_sampleCounter(0),
_isOpen(false),
_framesToClose(0)
{
}
_blocksToClose(0) {}
void AudioNoiseGate::removeDCOffset(int16_t* samples, int numSamples) {
//
// DC Offset correction
//
// Measure the DC offset over a trailing number of frames, and remove it from the input signal.
// Measure the DC offset over a trailing number of blocks, and remove it from the input signal.
// This causes the noise background measurements and server muting to be more accurate. Many off-board
// ADC's have a noticeable DC offset.
//
@ -51,7 +45,7 @@ void AudioNoiseGate::removeDCOffset(int16_t* samples, int numSamples) {
// Update measured DC offset
measuredDcOffset /= numSamples;
if (_dcOffset == 0.0f) {
// On first frame, copy over measured offset
// On first block, copy over measured offset
_dcOffset = measuredDcOffset;
} else {
_dcOffset = DC_OFFSET_AVERAGING * _dcOffset + (1.0f - DC_OFFSET_AVERAGING) * measuredDcOffset;
@ -69,89 +63,102 @@ void AudioNoiseGate::gateSamples(int16_t* samples, int numSamples) {
//
// NOISE_GATE_HEIGHT: How loud you have to speak relative to noise background to open the gate.
// Make this value lower for more sensitivity and less rejection of noise.
// NOISE_GATE_WIDTH: The number of samples in an audio frame for which the height must be exceeded
// NOISE_GATE_WIDTH: The number of samples in an audio block for which the height must be exceeded
// to open the gate.
// NOISE_GATE_CLOSE_FRAME_DELAY: Once the noise is below the gate height for the frame, how many frames
// NOISE_GATE_CLOSE_BLOCK_DELAY: Once the noise is below the gate height for the block, how many blocks
// will we wait before closing the gate.
// NOISE_GATE_FRAMES_TO_AVERAGE: How many audio frames should we average together to compute noise floor.
// NOISE_GATE_BLOCKS_TO_AVERAGE: How many audio blocks should we average together to compute noise floor.
// More means better rejection but also can reject continuous things like singing.
// NUMBER_OF_NOISE_SAMPLE_FRAMES: How often should we re-evaluate the noise floor?
// NUMBER_OF_NOISE_SAMPLE_BLOCKS: How often should we re-evaluate the noise floor?
float loudness = 0;
int thisSample = 0;
int samplesOverNoiseGate = 0;
const float NOISE_GATE_HEIGHT = 7.0f;
const int NOISE_GATE_WIDTH = 5;
const int NOISE_GATE_CLOSE_FRAME_DELAY = 5;
const int NOISE_GATE_FRAMES_TO_AVERAGE = 5;
const int NOISE_GATE_CLOSE_BLOCK_DELAY = 5;
const int NOISE_GATE_BLOCKS_TO_AVERAGE = 5;
// Check clipping, and check if should open noise gate
_didClipInLastFrame = false;
_didClipInLastBlock = false;
for (int i = 0; i < numSamples; i++) {
thisSample = std::abs(samples[i]);
if (thisSample >= ((float) AudioConstants::MAX_SAMPLE_VALUE * CLIPPING_THRESHOLD)) {
_didClipInLastFrame = true;
_didClipInLastBlock = true;
}
loudness += thisSample;
// Noise Reduction: Count peaks above the average loudness
if (thisSample > (_measuredFloor * NOISE_GATE_HEIGHT)) {
samplesOverNoiseGate++;
}
}
_lastLoudness = fabs(loudness / numSamples);
if (_quietestFrame > _lastLoudness) {
_quietestFrame = _lastLoudness;
}
if (_loudestFrame < _lastLoudness) {
_loudestFrame = _lastLoudness;
}
const int FRAMES_FOR_NOISE_DETECTION = 400;
if (_inputFrameCounter++ > FRAMES_FOR_NOISE_DETECTION) {
_quietestFrame = std::numeric_limits<float>::max();
_loudestFrame = 0.0f;
_inputFrameCounter = 0;
}
// If Noise Gate is enabled, check and turn the gate on and off
float averageOfAllSampleFrames = 0.0f;
_sampleFrames[_sampleCounter++] = _lastLoudness;
if (_sampleCounter == NUMBER_OF_NOISE_SAMPLE_FRAMES) {
float averageOfAllSampleBlocks = 0.0f;
_sampleBlocks[_sampleCounter++] = _lastLoudness;
if (_sampleCounter == NUMBER_OF_NOISE_SAMPLE_BLOCKS) {
float smallestSample = std::numeric_limits<float>::max();
for (int i = 0; i <= NUMBER_OF_NOISE_SAMPLE_FRAMES - NOISE_GATE_FRAMES_TO_AVERAGE; i += NOISE_GATE_FRAMES_TO_AVERAGE) {
for (int i = 0; i <= NUMBER_OF_NOISE_SAMPLE_BLOCKS - NOISE_GATE_BLOCKS_TO_AVERAGE; i += NOISE_GATE_BLOCKS_TO_AVERAGE) {
float thisAverage = 0.0f;
for (int j = i; j < i + NOISE_GATE_FRAMES_TO_AVERAGE; j++) {
thisAverage += _sampleFrames[j];
averageOfAllSampleFrames += _sampleFrames[j];
for (int j = i; j < i + NOISE_GATE_BLOCKS_TO_AVERAGE; j++) {
thisAverage += _sampleBlocks[j];
averageOfAllSampleBlocks += _sampleBlocks[j];
}
thisAverage /= NOISE_GATE_FRAMES_TO_AVERAGE;
thisAverage /= NOISE_GATE_BLOCKS_TO_AVERAGE;
if (thisAverage < smallestSample) {
smallestSample = thisAverage;
}
}
averageOfAllSampleFrames /= NUMBER_OF_NOISE_SAMPLE_FRAMES;
averageOfAllSampleBlocks /= NUMBER_OF_NOISE_SAMPLE_BLOCKS;
_measuredFloor = smallestSample;
_sampleCounter = 0;
}
_closedInLastBlock = false;
_openedInLastBlock = false;
if (samplesOverNoiseGate > NOISE_GATE_WIDTH) {
_openedInLastBlock = !_isOpen;
_isOpen = true;
_framesToClose = NOISE_GATE_CLOSE_FRAME_DELAY;
_blocksToClose = NOISE_GATE_CLOSE_BLOCK_DELAY;
} else {
if (--_framesToClose == 0) {
_closedInLastFrame = !_isOpen;
if (--_blocksToClose == 0) {
_closedInLastBlock = _isOpen;
_isOpen = false;
}
}
if (!_isOpen) {
memset(samples, 0, numSamples * sizeof(int16_t));
// First block after being closed gets faded to silence, we fade across
// the entire block on fading out. All subsequent blocks are muted by being slammed
// to zeros
if (_closedInLastBlock) {
float fadeSlope = (1.0f / numSamples);
for (int i = 0; i < numSamples; i++) {
float fadedSample = (1.0f - ((float)i * fadeSlope)) * (float)samples[i];
samples[i] = (int16_t)fadedSample;
}
} else {
memset(samples, 0, numSamples * sizeof(int16_t));
}
_lastLoudness = 0;
}
if (_openedInLastBlock) {
// would be nice to do a little crossfade from silence, but we only want to fade
// across the first 1/10th of the block, because we don't want to miss early
// transients.
int fadeSamples = numSamples / 10; // fade over 1/10th of the samples
float fadeSlope = (1.0f / fadeSamples);
for (int i = 0; i < fadeSamples; i++) {
float fadedSample = (float)i * fadeSlope * (float)samples[i];
samples[i] = (int16_t)fadedSample;
}
}
}

View file

@ -1,6 +1,6 @@
//
// AudioNoiseGate.h
// interface/src/audio
// libraries/audio
//
// Created by Stephen Birarda on 2014-12-16.
// Copyright 2014 High Fidelity, Inc.
@ -14,35 +14,35 @@
#include <stdint.h>
const int NUMBER_OF_NOISE_SAMPLE_FRAMES = 300;
const int NUMBER_OF_NOISE_SAMPLE_BLOCKS = 300;
class AudioNoiseGate {
public:
AudioNoiseGate();
void gateSamples(int16_t* samples, int numSamples);
void removeDCOffset(int16_t* samples, int numSamples);
bool clippedInLastFrame() const { return _didClipInLastFrame; }
bool closedInLastFrame() const { return _closedInLastFrame; }
bool clippedInLastBlock() const { return _didClipInLastBlock; }
bool closedInLastBlock() const { return _closedInLastBlock; }
bool openedInLastBlock() const { return _openedInLastBlock; }
bool isOpen() const { return _isOpen; }
float getMeasuredFloor() const { return _measuredFloor; }
float getLastLoudness() const { return _lastLoudness; }
static const float CLIPPING_THRESHOLD;
private:
int _inputFrameCounter;
float _lastLoudness;
float _quietestFrame;
float _loudestFrame;
bool _didClipInLastFrame;
bool _didClipInLastBlock;
float _dcOffset;
float _measuredFloor;
float _sampleFrames[NUMBER_OF_NOISE_SAMPLE_FRAMES];
float _sampleBlocks[NUMBER_OF_NOISE_SAMPLE_BLOCKS];
int _sampleCounter;
bool _isOpen;
bool _closedInLastFrame { false };
int _framesToClose;
bool _closedInLastBlock { false };
bool _openedInLastBlock { false };
int _blocksToClose;
};
#endif // hifi_AudioNoiseGate_h
#endif // hifi_AudioNoiseGate_h

View file

@ -735,6 +735,52 @@ void EntityTreeRenderer::mousePressEvent(QMouseEvent* event) {
}
}
void EntityTreeRenderer::mouseDoublePressEvent(QMouseEvent* event) {
// If we don't have a tree, or we're in the process of shutting down, then don't
// process these events.
if (!_tree || _shuttingDown) {
return;
}
PerformanceTimer perfTimer("EntityTreeRenderer::mouseDoublePressEvent");
PickRay ray = _viewState->computePickRay(event->x(), event->y());
bool precisionPicking = !_dontDoPrecisionPicking;
RayToEntityIntersectionResult rayPickResult = findRayIntersectionWorker(ray, Octree::Lock, precisionPicking);
if (rayPickResult.intersects) {
//qCDebug(entitiesrenderer) << "mouseDoublePressEvent over entity:" << rayPickResult.entityID;
QString urlString = rayPickResult.properties.getHref();
QUrl url = QUrl(urlString, QUrl::StrictMode);
if (url.isValid() && !url.isEmpty()){
DependencyManager::get<AddressManager>()->handleLookupString(urlString);
}
glm::vec2 pos2D = projectOntoEntityXYPlane(rayPickResult.entity, ray, rayPickResult);
PointerEvent pointerEvent(PointerEvent::Press, MOUSE_POINTER_ID,
pos2D, rayPickResult.intersection,
rayPickResult.surfaceNormal, ray.direction,
toPointerButton(*event), toPointerButtons(*event));
emit mouseDoublePressOnEntity(rayPickResult.entityID, pointerEvent);
if (_entitiesScriptEngine) {
_entitiesScriptEngine->callEntityScriptMethod(rayPickResult.entityID, "mouseDoublePressOnEntity", pointerEvent);
}
_currentClickingOnEntityID = rayPickResult.entityID;
emit clickDownOnEntity(_currentClickingOnEntityID, pointerEvent);
if (_entitiesScriptEngine) {
_entitiesScriptEngine->callEntityScriptMethod(_currentClickingOnEntityID, "doubleclickOnEntity", pointerEvent);
}
_lastPointerEvent = pointerEvent;
_lastPointerEventValid = true;
} else {
emit mouseDoublePressOffEntity();
}
}
void EntityTreeRenderer::mouseReleaseEvent(QMouseEvent* event) {
// If we don't have a tree, or we're in the process of shutting down, then don't
// process these events.

View file

@ -90,6 +90,7 @@ public:
// event handles which may generate entity related events
void mouseReleaseEvent(QMouseEvent* event);
void mousePressEvent(QMouseEvent* event);
void mouseDoublePressEvent(QMouseEvent* event);
void mouseMoveEvent(QMouseEvent* event);
/// connect our signals to anEntityScriptingInterface for firing of events related clicking,
@ -103,9 +104,11 @@ public:
signals:
void mousePressOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
void mouseDoublePressOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
void mouseMoveOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
void mouseReleaseOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
void mousePressOffEntity();
void mouseDoublePressOffEntity();
void clickDownOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
void holdingClickOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);

View file

@ -143,12 +143,35 @@ void PhysicsEngine::addObjectToDynamicsWorld(ObjectMotionState* motionState) {
}
void PhysicsEngine::removeObjects(const VectorOfMotionStates& objects) {
// first bump and prune contacts for all objects in the list
// bump and prune contacts for all objects in the list
for (auto object : objects) {
bumpAndPruneContacts(object);
}
// then remove them
if (_activeStaticBodies.size() > 0) {
// _activeStaticBodies was not cleared last frame.
// The only way to get here is if a static object were moved but we did not actually step the simulation last
// frame (because the framerate is faster than our physics simulation rate). When this happens we must scan
// _activeStaticBodies for objects that were recently deleted so we don't try to access a dangling pointer.
for (auto object : objects) {
btRigidBody* body = object->getRigidBody();
std::vector<btRigidBody*>::reverse_iterator itr = _activeStaticBodies.rbegin();
while (itr != _activeStaticBodies.rend()) {
if (body == *itr) {
if (*itr != *(_activeStaticBodies.rbegin())) {
// swap with rbegin
*itr = *(_activeStaticBodies.rbegin());
}
_activeStaticBodies.pop_back();
break;
}
++itr;
}
}
}
// remove bodies
for (auto object : objects) {
btRigidBody* body = object->getRigidBody();
if (body) {

View file

@ -346,7 +346,9 @@ void AnimDebugDraw::update() {
numVerts += (int)markerMap.size() * VERTICES_PER_BONE;
auto myAvatarMarkerMap = DebugDraw::getInstance().getMyAvatarMarkerMap();
numVerts += (int)myAvatarMarkerMap.size() * VERTICES_PER_BONE;
numVerts += (int)DebugDraw::getInstance().getRays().size() * VERTICES_PER_RAY;
auto rays = DebugDraw::getInstance().getRays();
DebugDraw::getInstance().clearRays();
numVerts += (int)rays.size() * VERTICES_PER_RAY;
// allocate verts!
std::vector<AnimDebugDrawData::Vertex> vertices;
@ -398,10 +400,9 @@ void AnimDebugDraw::update() {
}
// draw rays from shared DebugDraw singleton
for (auto& iter : DebugDraw::getInstance().getRays()) {
for (auto& iter : rays) {
addLine(std::get<0>(iter), std::get<1>(iter), std::get<2>(iter), v);
}
DebugDraw::getInstance().clearRays();
data._vertexBuffer->resize(sizeof(AnimDebugDrawData::Vertex) * numVerts);
data._vertexBuffer->setSubData<AnimDebugDrawData::Vertex>(0, vertices);

View file

@ -32,10 +32,13 @@ protected:
Q_INVOKABLE void setStereoInput(bool stereo);
signals:
void mutedByMixer();
void environmentMuted();
void receivedFirstPacket();
void disconnected();
void mutedByMixer(); /// the client has been muted by the mixer
void environmentMuted(); /// the entire environment has been muted by the mixer
void receivedFirstPacket(); /// the client has received its first packet from the audio mixer
void disconnected(); /// the client has been disconnected from the audio mixer
void noiseGateOpened(); /// the noise gate has opened
void noiseGateClosed(); /// the noise gate has closed
void inputReceived(const QByteArray& inputSamples); /// a frame of mic input audio has been received and processed
private:
AudioScriptingInterface();

View file

@ -142,7 +142,7 @@ QString encodeEntityIdIntoEntityUrl(const QString& url, const QString& entityID)
QString ScriptEngine::logException(const QScriptValue& exception) {
auto message = formatException(exception);
scriptErrorMessage(qPrintable(message));
scriptErrorMessage(message);
return message;
}
@ -453,7 +453,7 @@ void ScriptEngine::loadURL(const QUrl& scriptURL, bool reload) {
}
void ScriptEngine::scriptErrorMessage(const QString& message) {
qCCritical(scriptengine) << message;
qCCritical(scriptengine) << qPrintable(message);
emit errorMessage(message);
}

View file

@ -143,7 +143,7 @@ void XMLHttpRequestClass::open(const QString& method, const QString& url, bool a
if (url.toLower().left(METAVERSE_API_URL.length()) == METAVERSE_API_URL) {
auto accountManager = DependencyManager::get<AccountManager>();
if (_url.scheme() == "https" && accountManager->hasValidAccessToken()) {
if (accountManager->hasValidAccessToken()) {
static const QString HTTP_AUTHORIZATION_HEADER = "Authorization";
QString bearerString = "Bearer " + accountManager->getAccountInfo().getAccessToken().token;
_request.setRawHeader(HTTP_AUTHORIZATION_HEADER.toLocal8Bit(), bearerString.toLocal8Bit());

View file

@ -47,6 +47,9 @@ QScriptValue PointerEvent::toScriptValue(QScriptEngine* engine, const PointerEve
case Press:
obj.setProperty("type", "Press");
break;
case DoublePress:
obj.setProperty("type", "DoublePress");
break;
case Release:
obj.setProperty("type", "Release");
break;
@ -128,6 +131,8 @@ void PointerEvent::fromScriptValue(const QScriptValue& object, PointerEvent& eve
QString typeStr = type.isString() ? type.toString() : "Move";
if (typeStr == "Press") {
event._type = Press;
} else if (typeStr == "DoublePress") {
event._type = DoublePress;
} else if (typeStr == "Release") {
event._type = Release;
} else {

View file

@ -26,9 +26,10 @@ public:
};
enum EventType {
Press, // A button has just been pressed
Release, // A button has just been released
Move // The pointer has just moved
Press, // A button has just been pressed
DoublePress, // A button has just been double pressed
Release, // A button has just been released
Move // The pointer has just moved
};
PointerEvent();

View file

@ -24,29 +24,34 @@ public:
RateCounter() { _rate = 0; } // avoid use of std::atomic copy ctor
void increment(size_t count = 1) {
auto now = usecTimestampNow();
float currentIntervalMs = (now - _start) / (float) USECS_PER_MSEC;
if (currentIntervalMs > (float) INTERVAL) {
float currentCount = _count;
float intervalSeconds = currentIntervalMs / (float) MSECS_PER_SECOND;
_rate = roundf(currentCount / intervalSeconds * _scale) / _scale;
_start = now;
_count = 0;
};
checkRate();
_count += count;
}
float rate() const { return _rate; }
float rate() const { checkRate(); return _rate; }
uint8_t precision() const { return PRECISION; }
uint32_t interval() const { return INTERVAL; }
private:
uint64_t _start { usecTimestampNow() };
size_t _count { 0 };
mutable uint64_t _start { usecTimestampNow() };
mutable size_t _count { 0 };
const float _scale { powf(10, PRECISION) };
std::atomic<float> _rate;
mutable std::atomic<float> _rate;
void checkRate() const {
auto now = usecTimestampNow();
float currentIntervalMs = (now - _start) / (float)USECS_PER_MSEC;
if (currentIntervalMs > (float)INTERVAL) {
float currentCount = _count;
float intervalSeconds = currentIntervalMs / (float)MSECS_PER_SECOND;
_rate = roundf(currentCount / intervalSeconds * _scale) / _scale;
_start = now;
_count = 0;
};
}
};
#endif

View file

@ -0,0 +1,19 @@
(function() {
var _this;
function DoubleClickExample() {
_this = this;
return;
}
DoubleClickExample.prototype = {
clickDownOnEntity: function() {
print("clickDownOnEntity");
},
doubleclickOnEntity: function() {
print("doubleclickOnEntity");
}
};
return new DoubleClickExample();
});

View file

@ -0,0 +1,95 @@
"use strict";
//
// audioScope.js
// scripts/system/
//
// Created by Brad Hefta-Gaub on 3/10/2016
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
/* global Script, Tablet, AudioScope, Audio */
(function () { // BEGIN LOCAL_SCOPE
var scopeVisibile = AudioScope.getVisible();
var scopePaused = AudioScope.getPause();
var autoPause = false;
var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
var showScopeButton = tablet.addButton({
icon: "icons/tablet-icons/scope.svg",
text: "Audio Scope",
isActive: scopeVisibile
});
var scopePauseImage = "icons/tablet-icons/scope-pause.svg";
var scopePlayImage = "icons/tablet-icons/scope-play.svg";
var pauseScopeButton = tablet.addButton({
icon: scopePaused ? scopePlayImage : scopePauseImage,
text: scopePaused ? "Unpause" : "Pause",
isActive: scopePaused
});
var autoPauseScopeButton = tablet.addButton({
icon: "icons/tablet-icons/scope-auto.svg",
text: "Auto Pause",
isActive: autoPause
});
function setScopePause(paused) {
scopePaused = paused;
pauseScopeButton.editProperties({
isActive: scopePaused,
icon: scopePaused ? scopePlayImage : scopePauseImage,
text: scopePaused ? "Unpause" : "Pause"
});
AudioScope.setPause(scopePaused);
}
showScopeButton.clicked.connect(function () {
// toggle button active state
scopeVisibile = !scopeVisibile;
showScopeButton.editProperties({
isActive: scopeVisibile
});
AudioScope.setVisible(scopeVisibile);
});
pauseScopeButton.clicked.connect(function () {
// toggle button active state
setScopePause(!scopePaused);
});
autoPauseScopeButton.clicked.connect(function () {
// toggle button active state
autoPause = !autoPause;
autoPauseScopeButton.editProperties({
isActive: autoPause,
text: autoPause ? "Auto Pause" : "Manual"
});
});
Script.scriptEnding.connect(function () {
tablet.removeButton(showScopeButton);
tablet.removeButton(pauseScopeButton);
tablet.removeButton(autoPauseScopeButton);
});
Audio.noiseGateOpened.connect(function(){
if (autoPause) {
setScopePause(false);
}
});
Audio.noiseGateClosed.connect(function(){
// noise gate closed
if (autoPause) {
setScopePause(true);
}
});
}()); // END LOCAL_SCOPE

View file

@ -261,6 +261,7 @@ function fromQml(message) { // messages are {method, params}, like json-rpc. See
UserActivityLogger.palAction("refresh_nearby", "");
break;
case 'refreshConnections':
print('Refreshing Connections...');
getConnectionData();
UserActivityLogger.palAction("refresh_connections", "");
break;
@ -298,7 +299,8 @@ function updateUser(data) {
// User management services
//
// These are prototype versions that will be changed when the back end changes.
var METAVERSE_BASE = 'https://metaverse.highfidelity.com';
var METAVERSE_BASE = location.metaverseServerUrl;
function request(url, callback) { // cb(error, responseOfCorrectContentType) of url. General for 'get' text/html/json, but without redirects.
var httpRequest = new XMLHttpRequest();