diff --git a/assignment-client/src/avatars/AvatarMixer.cpp b/assignment-client/src/avatars/AvatarMixer.cpp
index c10a616818..d2bfdde7ea 100644
--- a/assignment-client/src/avatars/AvatarMixer.cpp
+++ b/assignment-client/src/avatars/AvatarMixer.cpp
@@ -37,7 +37,6 @@ const QString AVATAR_MIXER_LOGGING_NAME = "avatar-mixer";
// FIXME - what we'd actually like to do is send to users at ~50% of their present rate down to 30hz. Assume 90 for now.
const int AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND = 45;
-const unsigned int AVATAR_DATA_SEND_INTERVAL_MSECS = (1.0f / (float) AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND) * 1000;
AvatarMixer::AvatarMixer(ReceivedMessage& message) :
ThreadedAssignment(message)
diff --git a/interface/resources/icons/tablet-icons/scope-auto.svg b/interface/resources/icons/tablet-icons/scope-auto.svg
new file mode 100644
index 0000000000..85ef3f0e38
--- /dev/null
+++ b/interface/resources/icons/tablet-icons/scope-auto.svg
@@ -0,0 +1,46 @@
+
+
+
diff --git a/interface/resources/icons/tablet-icons/scope-pause.svg b/interface/resources/icons/tablet-icons/scope-pause.svg
new file mode 100644
index 0000000000..3fe74fcc9f
--- /dev/null
+++ b/interface/resources/icons/tablet-icons/scope-pause.svg
@@ -0,0 +1,30 @@
+
+
+
diff --git a/interface/resources/icons/tablet-icons/scope-play.svg b/interface/resources/icons/tablet-icons/scope-play.svg
new file mode 100644
index 0000000000..56d90ef38a
--- /dev/null
+++ b/interface/resources/icons/tablet-icons/scope-play.svg
@@ -0,0 +1,30 @@
+
+
+
diff --git a/interface/resources/qml/Stats.qml b/interface/resources/qml/Stats.qml
index 58d589b667..564c74b526 100644
--- a/interface/resources/qml/Stats.qml
+++ b/interface/resources/qml/Stats.qml
@@ -181,6 +181,31 @@ Item {
root.avatarMixerOutPps + "pps, " +
root.myAvatarSendRate.toFixed(2) + "hz";
}
+ StatText {
+ visible: root.expanded;
+ text: "Audio Mixer In: " + root.audioMixerInKbps + " kbps, " +
+ root.audioMixerInPps + "pps";
+ }
+ StatText {
+ visible: root.expanded;
+ text: "Audio In Audio: " + root.audioAudioInboundPPS + " pps, " +
+ "Silent: " + root.audioSilentInboundPPS + " pps";
+ }
+ StatText {
+ visible: root.expanded;
+ text: "Audio Mixer Out: " + root.audioMixerOutKbps + " kbps, " +
+ root.audioMixerOutPps + "pps";
+ }
+ StatText {
+ visible: root.expanded;
+ text: "Audio Out Mic: " + root.audioMicOutboundPPS + " pps, " +
+ "Silent: " + root.audioSilentOutboundPPS + " pps";
+ }
+ StatText {
+ visible: root.expanded;
+ text: "Audio Codec: " + root.audioCodec + " Noise Gate: " +
+ root.audioNoiseGate;
+ }
StatText {
visible: root.expanded;
text: "Downloads: " + root.downloads + "/" + root.downloadLimit +
diff --git a/interface/src/Application.cpp b/interface/src/Application.cpp
index 141b168f82..3eb316b0dc 100644
--- a/interface/src/Application.cpp
+++ b/interface/src/Application.cpp
@@ -1188,6 +1188,10 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// set the local loopback interface for local sounds
AudioInjector::setLocalAudioInterface(audioIO.data());
AudioScriptingInterface::getInstance().setLocalAudioInterface(audioIO.data());
+ connect(audioIO.data(), &AudioClient::noiseGateOpened, &AudioScriptingInterface::getInstance(), &AudioScriptingInterface::noiseGateOpened);
+ connect(audioIO.data(), &AudioClient::noiseGateClosed, &AudioScriptingInterface::getInstance(), &AudioScriptingInterface::noiseGateClosed);
+ connect(audioIO.data(), &AudioClient::inputReceived, &AudioScriptingInterface::getInstance(), &AudioScriptingInterface::inputReceived);
+
this->installEventFilter(this);
@@ -1950,6 +1954,8 @@ void Application::initializeUi() {
rootContext->setContextProperty("ApplicationInterface", this);
rootContext->setContextProperty("Audio", &AudioScriptingInterface::getInstance());
rootContext->setContextProperty("AudioStats", DependencyManager::get()->getStats().data());
+ rootContext->setContextProperty("AudioScope", DependencyManager::get().data());
+
rootContext->setContextProperty("Controller", DependencyManager::get().data());
rootContext->setContextProperty("Entities", DependencyManager::get().data());
_fileDownload = new FileScriptingInterface(engine);
@@ -3178,7 +3184,23 @@ void Application::mousePressEvent(QMouseEvent* event) {
}
}
-void Application::mouseDoublePressEvent(QMouseEvent* event) const {
+void Application::mouseDoublePressEvent(QMouseEvent* event) {
+ auto offscreenUi = DependencyManager::get();
+ auto eventPosition = getApplicationCompositor().getMouseEventPosition(event);
+ QPointF transformedPos = offscreenUi->mapToVirtualScreen(eventPosition, _glWidget);
+ QMouseEvent mappedEvent(event->type(),
+ transformedPos,
+ event->screenPos(), event->button(),
+ event->buttons(), event->modifiers());
+
+ if (!_aboutToQuit) {
+ getOverlays().mouseDoublePressEvent(&mappedEvent);
+ if (!_controllerScriptingInterface->areEntityClicksCaptured()) {
+ getEntities()->mouseDoublePressEvent(&mappedEvent);
+ }
+ }
+
+
// if one of our scripts have asked to capture this event, then stop processing it
if (_controllerScriptingInterface->isMouseCaptured()) {
return;
@@ -5525,6 +5547,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
scriptEngine->registerGlobalObject("Settings", SettingsScriptingInterface::getInstance());
scriptEngine->registerGlobalObject("AudioDevice", AudioDeviceScriptingInterface::getInstance());
scriptEngine->registerGlobalObject("AudioStats", DependencyManager::get()->getStats().data());
+ scriptEngine->registerGlobalObject("AudioScope", DependencyManager::get().data());
// Caches
scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get().data());
diff --git a/interface/src/Application.h b/interface/src/Application.h
index 5c72f0fa90..31a719f1f1 100644
--- a/interface/src/Application.h
+++ b/interface/src/Application.h
@@ -497,7 +497,7 @@ private:
void mouseMoveEvent(QMouseEvent* event);
void mousePressEvent(QMouseEvent* event);
- void mouseDoublePressEvent(QMouseEvent* event) const;
+ void mouseDoublePressEvent(QMouseEvent* event);
void mouseReleaseEvent(QMouseEvent* event);
void touchBeginEvent(QTouchEvent* event);
diff --git a/interface/src/audio/AudioScope.cpp b/interface/src/audio/AudioScope.cpp
index 346fbd11f4..cf9984e32b 100644
--- a/interface/src/audio/AudioScope.cpp
+++ b/interface/src/audio/AudioScope.cpp
@@ -52,12 +52,14 @@ AudioScope::AudioScope() :
connect(audioIO.data(), &AudioClient::inputReceived, this, &AudioScope::addInputToScope);
}
-void AudioScope::toggle() {
- _isEnabled = !_isEnabled;
- if (_isEnabled) {
- allocateScope();
- } else {
- freeScope();
+void AudioScope::setVisible(bool visible) {
+ if (_isEnabled != visible) {
+ _isEnabled = visible;
+ if (_isEnabled) {
+ allocateScope();
+ } else {
+ freeScope();
+ }
}
}
diff --git a/interface/src/audio/AudioScope.h b/interface/src/audio/AudioScope.h
index 0b716d7666..615bdaf17f 100644
--- a/interface/src/audio/AudioScope.h
+++ b/interface/src/audio/AudioScope.h
@@ -34,8 +34,14 @@ public:
void render(RenderArgs* renderArgs, int width, int height);
public slots:
- void toggle();
+ void toggle() { setVisible(!_isEnabled); }
+ void setVisible(bool visible);
+ bool getVisible() const { return _isEnabled; }
+
void togglePause() { _isPaused = !_isPaused; }
+ void setPause(bool paused) { _isPaused = paused; }
+ bool getPause() { return _isPaused; }
+
void selectAudioScopeFiveFrames();
void selectAudioScopeTwentyFrames();
void selectAudioScopeFiftyFrames();
@@ -74,7 +80,6 @@ private:
int _inputID;
int _outputLeftID;
int _outputRightD;
-
};
#endif // hifi_AudioScope_h
diff --git a/interface/src/ui/Stats.cpp b/interface/src/ui/Stats.cpp
index 1075bbdaa4..923d9f642d 100644
--- a/interface/src/ui/Stats.cpp
+++ b/interface/src/ui/Stats.cpp
@@ -198,15 +198,16 @@ void Stats::updateStats(bool force) {
STAT_UPDATE(avatarMixerInPps, roundf(bandwidthRecorder->getAverageInputPacketsPerSecond(NodeType::AvatarMixer)));
STAT_UPDATE(avatarMixerOutKbps, roundf(bandwidthRecorder->getAverageOutputKilobitsPerSecond(NodeType::AvatarMixer)));
STAT_UPDATE(avatarMixerOutPps, roundf(bandwidthRecorder->getAverageOutputPacketsPerSecond(NodeType::AvatarMixer)));
- STAT_UPDATE(myAvatarSendRate, avatarManager->getMyAvatarSendRate());
} else {
STAT_UPDATE(avatarMixerInKbps, -1);
STAT_UPDATE(avatarMixerInPps, -1);
STAT_UPDATE(avatarMixerOutKbps, -1);
STAT_UPDATE(avatarMixerOutPps, -1);
- STAT_UPDATE(myAvatarSendRate, avatarManager->getMyAvatarSendRate());
}
+ STAT_UPDATE(myAvatarSendRate, avatarManager->getMyAvatarSendRate());
+
SharedNodePointer audioMixerNode = nodeList->soloNodeOfType(NodeType::AudioMixer);
+ auto audioClient = DependencyManager::get();
if (audioMixerNode || force) {
STAT_UPDATE(audioMixerKbps, roundf(
bandwidthRecorder->getAverageInputKilobitsPerSecond(NodeType::AudioMixer) +
@@ -214,10 +215,30 @@ void Stats::updateStats(bool force) {
STAT_UPDATE(audioMixerPps, roundf(
bandwidthRecorder->getAverageInputPacketsPerSecond(NodeType::AudioMixer) +
bandwidthRecorder->getAverageOutputPacketsPerSecond(NodeType::AudioMixer)));
+
+ STAT_UPDATE(audioMixerInKbps, roundf(bandwidthRecorder->getAverageInputKilobitsPerSecond(NodeType::AudioMixer)));
+ STAT_UPDATE(audioMixerInPps, roundf(bandwidthRecorder->getAverageInputPacketsPerSecond(NodeType::AudioMixer)));
+ STAT_UPDATE(audioMixerOutKbps, roundf(bandwidthRecorder->getAverageOutputKilobitsPerSecond(NodeType::AudioMixer)));
+ STAT_UPDATE(audioMixerOutPps, roundf(bandwidthRecorder->getAverageOutputPacketsPerSecond(NodeType::AudioMixer)));
+ STAT_UPDATE(audioMicOutboundPPS, audioClient->getMicAudioOutboundPPS());
+ STAT_UPDATE(audioSilentOutboundPPS, audioClient->getSilentOutboundPPS());
+ STAT_UPDATE(audioAudioInboundPPS, audioClient->getAudioInboundPPS());
+ STAT_UPDATE(audioSilentInboundPPS, audioClient->getSilentInboundPPS());
} else {
STAT_UPDATE(audioMixerKbps, -1);
STAT_UPDATE(audioMixerPps, -1);
+ STAT_UPDATE(audioMixerInKbps, -1);
+ STAT_UPDATE(audioMixerInPps, -1);
+ STAT_UPDATE(audioMixerOutKbps, -1);
+ STAT_UPDATE(audioMixerOutPps, -1);
+ STAT_UPDATE(audioMicOutboundPPS, -1);
+ STAT_UPDATE(audioSilentOutboundPPS, -1);
+ STAT_UPDATE(audioAudioInboundPPS, -1);
+ STAT_UPDATE(audioSilentInboundPPS, -1);
}
+ STAT_UPDATE(audioCodec, audioClient->getSelectedAudioFormat());
+ STAT_UPDATE(audioNoiseGate, audioClient->getNoiseGateOpen() ? "Open" : "Closed");
+
auto loadingRequests = ResourceCache::getLoadingRequests();
STAT_UPDATE(downloads, loadingRequests.size());
diff --git a/interface/src/ui/Stats.h b/interface/src/ui/Stats.h
index 6be084100c..0ce113e0a0 100644
--- a/interface/src/ui/Stats.h
+++ b/interface/src/ui/Stats.h
@@ -70,8 +70,20 @@ class Stats : public QQuickItem {
STATS_PROPERTY(int, avatarMixerOutKbps, 0)
STATS_PROPERTY(int, avatarMixerOutPps, 0)
STATS_PROPERTY(float, myAvatarSendRate, 0)
+
+ STATS_PROPERTY(int, audioMixerInKbps, 0)
+ STATS_PROPERTY(int, audioMixerInPps, 0)
+ STATS_PROPERTY(int, audioMixerOutKbps, 0)
+ STATS_PROPERTY(int, audioMixerOutPps, 0)
STATS_PROPERTY(int, audioMixerKbps, 0)
STATS_PROPERTY(int, audioMixerPps, 0)
+ STATS_PROPERTY(int, audioMicOutboundPPS, 0)
+ STATS_PROPERTY(int, audioSilentOutboundPPS, 0)
+ STATS_PROPERTY(int, audioAudioInboundPPS, 0)
+ STATS_PROPERTY(int, audioSilentInboundPPS, 0)
+ STATS_PROPERTY(QString, audioCodec, QString())
+ STATS_PROPERTY(QString, audioNoiseGate, QString())
+
STATS_PROPERTY(int, downloads, 0)
STATS_PROPERTY(int, downloadLimit, 0)
STATS_PROPERTY(int, downloadsPending, 0)
@@ -180,8 +192,19 @@ signals:
void avatarMixerOutKbpsChanged();
void avatarMixerOutPpsChanged();
void myAvatarSendRateChanged();
+ void audioMixerInKbpsChanged();
+ void audioMixerInPpsChanged();
+ void audioMixerOutKbpsChanged();
+ void audioMixerOutPpsChanged();
void audioMixerKbpsChanged();
void audioMixerPpsChanged();
+ void audioMicOutboundPPSChanged();
+ void audioSilentOutboundPPSChanged();
+ void audioAudioInboundPPSChanged();
+ void audioSilentInboundPPSChanged();
+ void audioCodecChanged();
+ void audioNoiseGateChanged();
+
void downloadsChanged();
void downloadLimitChanged();
void downloadsPendingChanged();
diff --git a/interface/src/ui/overlays/Overlays.cpp b/interface/src/ui/overlays/Overlays.cpp
index 9847961f5a..97a263c11d 100644
--- a/interface/src/ui/overlays/Overlays.cpp
+++ b/interface/src/ui/overlays/Overlays.cpp
@@ -768,6 +768,26 @@ bool Overlays::mousePressEvent(QMouseEvent* event) {
return false;
}
+bool Overlays::mouseDoublePressEvent(QMouseEvent* event) {
+ PerformanceTimer perfTimer("Overlays::mouseDoublePressEvent");
+
+ PickRay ray = qApp->computePickRay(event->x(), event->y());
+ RayToOverlayIntersectionResult rayPickResult = findRayIntersectionForMouseEvent(ray);
+ if (rayPickResult.intersects) {
+ _currentClickingOnOverlayID = rayPickResult.overlayID;
+
+ // Only Web overlays can have focus.
+ auto thisOverlay = std::dynamic_pointer_cast(getOverlay(_currentClickingOnOverlayID));
+ if (thisOverlay) {
+ auto pointerEvent = calculatePointerEvent(thisOverlay, ray, rayPickResult, event, PointerEvent::Press);
+ emit mouseDoublePressOnOverlay(_currentClickingOnOverlayID, pointerEvent);
+ return true;
+ }
+ }
+ emit mouseDoublePressOffOverlay();
+ return false;
+}
+
bool Overlays::mouseReleaseEvent(QMouseEvent* event) {
PerformanceTimer perfTimer("Overlays::mouseReleaseEvent");
diff --git a/interface/src/ui/overlays/Overlays.h b/interface/src/ui/overlays/Overlays.h
index 5c22e46880..c35c7c1ced 100644
--- a/interface/src/ui/overlays/Overlays.h
+++ b/interface/src/ui/overlays/Overlays.h
@@ -101,6 +101,7 @@ public:
OverlayID addOverlay(Overlay::Pointer overlay);
bool mousePressEvent(QMouseEvent* event);
+ bool mouseDoublePressEvent(QMouseEvent* event);
bool mouseReleaseEvent(QMouseEvent* event);
bool mouseMoveEvent(QMouseEvent* event);
@@ -300,9 +301,11 @@ signals:
void panelDeleted(OverlayID id);
void mousePressOnOverlay(OverlayID overlayID, const PointerEvent& event);
+ void mouseDoublePressOnOverlay(OverlayID overlayID, const PointerEvent& event);
void mouseReleaseOnOverlay(OverlayID overlayID, const PointerEvent& event);
void mouseMoveOnOverlay(OverlayID overlayID, const PointerEvent& event);
void mousePressOffOverlay();
+ void mouseDoublePressOffOverlay();
void hoverEnterOverlay(OverlayID overlayID, const PointerEvent& event);
void hoverOverOverlay(OverlayID overlayID, const PointerEvent& event);
diff --git a/libraries/audio-client/src/AudioClient.cpp b/libraries/audio-client/src/AudioClient.cpp
index 9cd87d2e70..c32b5600d9 100644
--- a/libraries/audio-client/src/AudioClient.cpp
+++ b/libraries/audio-client/src/AudioClient.cpp
@@ -608,6 +608,13 @@ void AudioClient::handleAudioEnvironmentDataPacket(QSharedPointer message) {
+
+ if (message->getType() == PacketType::SilentAudioFrame) {
+ _silentInbound.increment();
+ } else {
+ _audioInbound.increment();
+ }
+
auto nodeList = DependencyManager::get();
nodeList->flagTimeForConnectionStep(LimitedNodeList::ConnectionStep::ReceiveFirstAudioPacket);
@@ -1021,9 +1028,10 @@ void AudioClient::handleAudioInput() {
// if we performed the noise gate we can get values from it instead of enumerating the samples again
_lastInputLoudness = _inputGate.getLastLoudness();
- if (_inputGate.clippedInLastFrame()) {
+ if (_inputGate.clippedInLastBlock()) {
_timeSinceLastClip = 0.0f;
}
+
} else {
float loudness = 0.0f;
@@ -1041,6 +1049,12 @@ void AudioClient::handleAudioInput() {
emit inputReceived({ reinterpret_cast(networkAudioSamples), numNetworkBytes });
+ if (_inputGate.openedInLastBlock()) {
+ emit noiseGateOpened();
+ } else if (_inputGate.closedInLastBlock()) {
+ emit noiseGateClosed();
+ }
+
} else {
// our input loudness is 0, since we're muted
_lastInputLoudness = 0;
@@ -1057,9 +1071,13 @@ void AudioClient::handleAudioInput() {
// the output from the input gate (eventually, this could be crossfaded)
// and allow the codec to properly encode down to silent/zero. If we still
// have _lastInputLoudness of 0 in our NEXT frame, we will send a silent packet
- if (_lastInputLoudness == 0 && !_inputGate.closedInLastFrame()) {
+ if (_lastInputLoudness == 0 && !_inputGate.closedInLastBlock()) {
packetType = PacketType::SilentAudioFrame;
+ _silentOutbound.increment();
+ } else {
+ _micAudioOutbound.increment();
}
+
Transform audioTransform;
audioTransform.setTranslation(_positionGetter());
audioTransform.setRotation(_orientationGetter());
@@ -1084,6 +1102,7 @@ void AudioClient::handleAudioInput() {
}
}
+// FIXME - should this go through the noise gate and honor mute and echo?
void AudioClient::handleRecordedAudioInput(const QByteArray& audio) {
Transform audioTransform;
audioTransform.setTranslation(_positionGetter());
@@ -1096,6 +1115,8 @@ void AudioClient::handleRecordedAudioInput(const QByteArray& audio) {
encodedBuffer = audio;
}
+ _micAudioOutbound.increment();
+
// FIXME check a flag to see if we should echo audio?
emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), _outgoingAvatarAudioSequenceNumber,
audioTransform, avatarBoundingBoxCorner, avatarBoundingBoxScale,
diff --git a/libraries/audio-client/src/AudioClient.h b/libraries/audio-client/src/AudioClient.h
index 5619051eaf..512b4bb3c1 100644
--- a/libraries/audio-client/src/AudioClient.h
+++ b/libraries/audio-client/src/AudioClient.h
@@ -46,6 +46,8 @@
#include
#include
+#include
+
#include
#include "AudioIOStats.h"
@@ -121,6 +123,13 @@ public:
void negotiateAudioFormat();
void selectAudioFormat(const QString& selectedCodecName);
+ Q_INVOKABLE QString getSelectedAudioFormat() const { return _selectedCodecName; }
+ Q_INVOKABLE bool getNoiseGateOpen() const { return _inputGate.isOpen(); }
+ Q_INVOKABLE float getSilentOutboundPPS() const { return _silentOutbound.rate(); }
+ Q_INVOKABLE float getMicAudioOutboundPPS() const { return _micAudioOutbound.rate(); }
+ Q_INVOKABLE float getSilentInboundPPS() const { return _silentInbound.rate(); }
+ Q_INVOKABLE float getAudioInboundPPS() const { return _audioInbound.rate(); }
+
const MixedProcessedAudioStream& getReceivedAudioStream() const { return _receivedAudioStream; }
MixedProcessedAudioStream& getReceivedAudioStream() { return _receivedAudioStream; }
@@ -218,6 +227,8 @@ signals:
void inputReceived(const QByteArray& inputSamples);
void outputBytesToNetwork(int numBytes);
void inputBytesFromNetwork(int numBytes);
+ void noiseGateOpened();
+ void noiseGateClosed();
void changeDevice(const QAudioDeviceInfo& outputDeviceInfo);
void deviceChanged();
@@ -382,6 +393,11 @@ private:
Encoder* _encoder { nullptr }; // for outbound mic stream
QThread* _checkDevicesThread { nullptr };
+
+ RateCounter<> _silentOutbound;
+ RateCounter<> _micAudioOutbound;
+ RateCounter<> _silentInbound;
+ RateCounter<> _audioInbound;
};
diff --git a/libraries/audio-client/src/AudioNoiseGate.cpp b/libraries/audio-client/src/AudioNoiseGate.cpp
index 98ce8cc9e8..8a9134b5dc 100644
--- a/libraries/audio-client/src/AudioNoiseGate.cpp
+++ b/libraries/audio-client/src/AudioNoiseGate.cpp
@@ -19,16 +19,16 @@
const float AudioNoiseGate::CLIPPING_THRESHOLD = 0.90f;
AudioNoiseGate::AudioNoiseGate() :
- _inputFrameCounter(0),
+ _inputBlockCounter(0),
_lastLoudness(0.0f),
- _quietestFrame(std::numeric_limits::max()),
- _loudestFrame(0.0f),
- _didClipInLastFrame(false),
+ _quietestBlock(std::numeric_limits::max()),
+ _loudestBlock(0.0f),
+ _didClipInLastBlock(false),
_dcOffset(0.0f),
_measuredFloor(0.0f),
_sampleCounter(0),
_isOpen(false),
- _framesToClose(0)
+ _blocksToClose(0)
{
}
@@ -37,7 +37,7 @@ void AudioNoiseGate::removeDCOffset(int16_t* samples, int numSamples) {
//
// DC Offset correction
//
- // Measure the DC offset over a trailing number of frames, and remove it from the input signal.
+ // Measure the DC offset over a trailing number of blocks, and remove it from the input signal.
// This causes the noise background measurements and server muting to be more accurate. Many off-board
// ADC's have a noticeable DC offset.
//
@@ -51,7 +51,7 @@ void AudioNoiseGate::removeDCOffset(int16_t* samples, int numSamples) {
// Update measured DC offset
measuredDcOffset /= numSamples;
if (_dcOffset == 0.0f) {
- // On first frame, copy over measured offset
+ // On first block, copy over measured offset
_dcOffset = measuredDcOffset;
} else {
_dcOffset = DC_OFFSET_AVERAGING * _dcOffset + (1.0f - DC_OFFSET_AVERAGING) * measuredDcOffset;
@@ -69,13 +69,13 @@ void AudioNoiseGate::gateSamples(int16_t* samples, int numSamples) {
//
// NOISE_GATE_HEIGHT: How loud you have to speak relative to noise background to open the gate.
// Make this value lower for more sensitivity and less rejection of noise.
- // NOISE_GATE_WIDTH: The number of samples in an audio frame for which the height must be exceeded
+ // NOISE_GATE_WIDTH: The number of samples in an audio block for which the height must be exceeded
// to open the gate.
- // NOISE_GATE_CLOSE_FRAME_DELAY: Once the noise is below the gate height for the frame, how many frames
+ // NOISE_GATE_CLOSE_BLOCK_DELAY: Once the noise is below the gate height for the block, how many blocks
// will we wait before closing the gate.
- // NOISE_GATE_FRAMES_TO_AVERAGE: How many audio frames should we average together to compute noise floor.
+ // NOISE_GATE_BLOCKS_TO_AVERAGE: How many audio blocks should we average together to compute noise floor.
// More means better rejection but also can reject continuous things like singing.
- // NUMBER_OF_NOISE_SAMPLE_FRAMES: How often should we re-evaluate the noise floor?
+ // NUMBER_OF_NOISE_SAMPLE_BLOCKS: How often should we re-evaluate the noise floor?
float loudness = 0;
int thisSample = 0;
@@ -83,16 +83,16 @@ void AudioNoiseGate::gateSamples(int16_t* samples, int numSamples) {
const float NOISE_GATE_HEIGHT = 7.0f;
const int NOISE_GATE_WIDTH = 5;
- const int NOISE_GATE_CLOSE_FRAME_DELAY = 5;
- const int NOISE_GATE_FRAMES_TO_AVERAGE = 5;
+ const int NOISE_GATE_CLOSE_BLOCK_DELAY = 5;
+ const int NOISE_GATE_BLOCKS_TO_AVERAGE = 5;
// Check clipping, and check if should open noise gate
- _didClipInLastFrame = false;
+ _didClipInLastBlock = false;
for (int i = 0; i < numSamples; i++) {
thisSample = std::abs(samples[i]);
if (thisSample >= ((float) AudioConstants::MAX_SAMPLE_VALUE * CLIPPING_THRESHOLD)) {
- _didClipInLastFrame = true;
+ _didClipInLastBlock = true;
}
loudness += thisSample;
@@ -104,54 +104,81 @@ void AudioNoiseGate::gateSamples(int16_t* samples, int numSamples) {
_lastLoudness = fabs(loudness / numSamples);
- if (_quietestFrame > _lastLoudness) {
- _quietestFrame = _lastLoudness;
+ if (_quietestBlock > _lastLoudness) {
+ _quietestBlock = _lastLoudness;
}
- if (_loudestFrame < _lastLoudness) {
- _loudestFrame = _lastLoudness;
+ if (_loudestBlock < _lastLoudness) {
+ _loudestBlock = _lastLoudness;
}
const int FRAMES_FOR_NOISE_DETECTION = 400;
- if (_inputFrameCounter++ > FRAMES_FOR_NOISE_DETECTION) {
- _quietestFrame = std::numeric_limits::max();
- _loudestFrame = 0.0f;
- _inputFrameCounter = 0;
+ if (_inputBlockCounter++ > FRAMES_FOR_NOISE_DETECTION) {
+ _quietestBlock = std::numeric_limits::max();
+ _loudestBlock = 0.0f;
+ _inputBlockCounter = 0;
}
// If Noise Gate is enabled, check and turn the gate on and off
- float averageOfAllSampleFrames = 0.0f;
- _sampleFrames[_sampleCounter++] = _lastLoudness;
- if (_sampleCounter == NUMBER_OF_NOISE_SAMPLE_FRAMES) {
+ float averageOfAllSampleBlocks = 0.0f;
+ _sampleBlocks[_sampleCounter++] = _lastLoudness;
+ if (_sampleCounter == NUMBER_OF_NOISE_SAMPLE_BLOCKS) {
float smallestSample = std::numeric_limits::max();
- for (int i = 0; i <= NUMBER_OF_NOISE_SAMPLE_FRAMES - NOISE_GATE_FRAMES_TO_AVERAGE; i += NOISE_GATE_FRAMES_TO_AVERAGE) {
+ for (int i = 0; i <= NUMBER_OF_NOISE_SAMPLE_BLOCKS - NOISE_GATE_BLOCKS_TO_AVERAGE; i += NOISE_GATE_BLOCKS_TO_AVERAGE) {
float thisAverage = 0.0f;
- for (int j = i; j < i + NOISE_GATE_FRAMES_TO_AVERAGE; j++) {
- thisAverage += _sampleFrames[j];
- averageOfAllSampleFrames += _sampleFrames[j];
+ for (int j = i; j < i + NOISE_GATE_BLOCKS_TO_AVERAGE; j++) {
+ thisAverage += _sampleBlocks[j];
+ averageOfAllSampleBlocks += _sampleBlocks[j];
}
- thisAverage /= NOISE_GATE_FRAMES_TO_AVERAGE;
+ thisAverage /= NOISE_GATE_BLOCKS_TO_AVERAGE;
if (thisAverage < smallestSample) {
smallestSample = thisAverage;
}
}
- averageOfAllSampleFrames /= NUMBER_OF_NOISE_SAMPLE_FRAMES;
+ averageOfAllSampleBlocks /= NUMBER_OF_NOISE_SAMPLE_BLOCKS;
_measuredFloor = smallestSample;
_sampleCounter = 0;
}
+ _closedInLastBlock = false;
+ _openedInLastBlock = false;
+
if (samplesOverNoiseGate > NOISE_GATE_WIDTH) {
+ _openedInLastBlock = !_isOpen;
_isOpen = true;
- _framesToClose = NOISE_GATE_CLOSE_FRAME_DELAY;
+ _blocksToClose = NOISE_GATE_CLOSE_BLOCK_DELAY;
} else {
- if (--_framesToClose == 0) {
- _closedInLastFrame = !_isOpen;
+ if (--_blocksToClose == 0) {
+ _closedInLastBlock = _isOpen;
_isOpen = false;
}
}
if (!_isOpen) {
- memset(samples, 0, numSamples * sizeof(int16_t));
+ // First block after being closed gets faded to silence, we fade across
+ // the entire block on fading out. All subsequent blocks are muted by being slammed
+ // to zeros
+ if (_closedInLastBlock) {
+ float fadeSlope = (1.0f / numSamples);
+ for (int i = 0; i < numSamples; i++) {
+ float fadedSample = (1.0f - ((float)i * fadeSlope)) * (float)samples[i];
+ samples[i] = (int16_t)fadedSample;
+ }
+ } else {
+ memset(samples, 0, numSamples * sizeof(int16_t));
+ }
_lastLoudness = 0;
}
+
+ if (_openedInLastBlock) {
+ // would be nice to do a little crossfade from silence, but we only want to fade
+ // across the first 1/10th of the block, because we don't want to miss early
+ // transients.
+ int fadeSamples = numSamples / 10; // fade over 1/10th of the samples
+ float fadeSlope = (1.0f / fadeSamples);
+ for (int i = 0; i < fadeSamples; i++) {
+ float fadedSample = (float)i * fadeSlope * (float)samples[i];
+ samples[i] = (int16_t)fadedSample;
+ }
+ }
}
diff --git a/libraries/audio-client/src/AudioNoiseGate.h b/libraries/audio-client/src/AudioNoiseGate.h
index 774a4157bb..f72e92b0d5 100644
--- a/libraries/audio-client/src/AudioNoiseGate.h
+++ b/libraries/audio-client/src/AudioNoiseGate.h
@@ -14,7 +14,7 @@
#include
-const int NUMBER_OF_NOISE_SAMPLE_FRAMES = 300;
+const int NUMBER_OF_NOISE_SAMPLE_BLOCKS = 300;
class AudioNoiseGate {
public:
@@ -23,26 +23,29 @@ public:
void gateSamples(int16_t* samples, int numSamples);
void removeDCOffset(int16_t* samples, int numSamples);
- bool clippedInLastFrame() const { return _didClipInLastFrame; }
- bool closedInLastFrame() const { return _closedInLastFrame; }
+ bool clippedInLastBlock() const { return _didClipInLastBlock; }
+ bool closedInLastBlock() const { return _closedInLastBlock; }
+ bool openedInLastBlock() const { return _openedInLastBlock; }
+ bool isOpen() const { return _isOpen; }
float getMeasuredFloor() const { return _measuredFloor; }
float getLastLoudness() const { return _lastLoudness; }
static const float CLIPPING_THRESHOLD;
private:
- int _inputFrameCounter;
+ int _inputBlockCounter;
float _lastLoudness;
- float _quietestFrame;
- float _loudestFrame;
- bool _didClipInLastFrame;
+ float _quietestBlock;
+ float _loudestBlock;
+ bool _didClipInLastBlock;
float _dcOffset;
float _measuredFloor;
- float _sampleFrames[NUMBER_OF_NOISE_SAMPLE_FRAMES];
+ float _sampleBlocks[NUMBER_OF_NOISE_SAMPLE_BLOCKS];
int _sampleCounter;
bool _isOpen;
- bool _closedInLastFrame { false };
- int _framesToClose;
+ bool _closedInLastBlock { false };
+ bool _openedInLastBlock { false };
+ int _blocksToClose;
};
#endif // hifi_AudioNoiseGate_h
\ No newline at end of file
diff --git a/libraries/entities-renderer/src/EntityTreeRenderer.cpp b/libraries/entities-renderer/src/EntityTreeRenderer.cpp
index 7e2d78a837..8ed9cc23c7 100644
--- a/libraries/entities-renderer/src/EntityTreeRenderer.cpp
+++ b/libraries/entities-renderer/src/EntityTreeRenderer.cpp
@@ -736,6 +736,52 @@ void EntityTreeRenderer::mousePressEvent(QMouseEvent* event) {
}
}
+void EntityTreeRenderer::mouseDoublePressEvent(QMouseEvent* event) {
+ // If we don't have a tree, or we're in the process of shutting down, then don't
+ // process these events.
+ if (!_tree || _shuttingDown) {
+ return;
+ }
+ PerformanceTimer perfTimer("EntityTreeRenderer::mouseDoublePressEvent");
+ PickRay ray = _viewState->computePickRay(event->x(), event->y());
+
+ bool precisionPicking = !_dontDoPrecisionPicking;
+ RayToEntityIntersectionResult rayPickResult = findRayIntersectionWorker(ray, Octree::Lock, precisionPicking);
+ if (rayPickResult.intersects) {
+ //qCDebug(entitiesrenderer) << "mouseDoublePressEvent over entity:" << rayPickResult.entityID;
+
+ QString urlString = rayPickResult.properties.getHref();
+ QUrl url = QUrl(urlString, QUrl::StrictMode);
+ if (url.isValid() && !url.isEmpty()){
+ DependencyManager::get()->handleLookupString(urlString);
+ }
+
+ glm::vec2 pos2D = projectOntoEntityXYPlane(rayPickResult.entity, ray, rayPickResult);
+ PointerEvent pointerEvent(PointerEvent::Press, MOUSE_POINTER_ID,
+ pos2D, rayPickResult.intersection,
+ rayPickResult.surfaceNormal, ray.direction,
+ toPointerButton(*event), toPointerButtons(*event));
+
+ emit mouseDoublePressOnEntity(rayPickResult.entityID, pointerEvent);
+
+ if (_entitiesScriptEngine) {
+ _entitiesScriptEngine->callEntityScriptMethod(rayPickResult.entityID, "mouseDoublePressOnEntity", pointerEvent);
+ }
+
+ _currentClickingOnEntityID = rayPickResult.entityID;
+ emit clickDownOnEntity(_currentClickingOnEntityID, pointerEvent);
+ if (_entitiesScriptEngine) {
+ _entitiesScriptEngine->callEntityScriptMethod(_currentClickingOnEntityID, "doubleclickOnEntity", pointerEvent);
+ }
+
+ _lastPointerEvent = pointerEvent;
+ _lastPointerEventValid = true;
+
+ } else {
+ emit mouseDoublePressOffEntity();
+ }
+}
+
void EntityTreeRenderer::mouseReleaseEvent(QMouseEvent* event) {
// If we don't have a tree, or we're in the process of shutting down, then don't
// process these events.
diff --git a/libraries/entities-renderer/src/EntityTreeRenderer.h b/libraries/entities-renderer/src/EntityTreeRenderer.h
index c11738c459..753f25310c 100644
--- a/libraries/entities-renderer/src/EntityTreeRenderer.h
+++ b/libraries/entities-renderer/src/EntityTreeRenderer.h
@@ -90,6 +90,7 @@ public:
// event handles which may generate entity related events
void mouseReleaseEvent(QMouseEvent* event);
void mousePressEvent(QMouseEvent* event);
+ void mouseDoublePressEvent(QMouseEvent* event);
void mouseMoveEvent(QMouseEvent* event);
/// connect our signals to anEntityScriptingInterface for firing of events related clicking,
@@ -103,9 +104,11 @@ public:
signals:
void mousePressOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
+ void mouseDoublePressOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
void mouseMoveOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
void mouseReleaseOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
void mousePressOffEntity();
+ void mouseDoublePressOffEntity();
void clickDownOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
void holdingClickOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
diff --git a/libraries/script-engine/src/AudioScriptingInterface.h b/libraries/script-engine/src/AudioScriptingInterface.h
index 07a6b171f4..6cce78d48f 100644
--- a/libraries/script-engine/src/AudioScriptingInterface.h
+++ b/libraries/script-engine/src/AudioScriptingInterface.h
@@ -32,10 +32,13 @@ protected:
Q_INVOKABLE void setStereoInput(bool stereo);
signals:
- void mutedByMixer();
- void environmentMuted();
- void receivedFirstPacket();
- void disconnected();
+ void mutedByMixer(); /// the client has been muted by the mixer
+ void environmentMuted(); /// the entire environment has been muted by the mixer
+ void receivedFirstPacket(); /// the client has received its first packet from the audio mixer
+ void disconnected(); /// the client has been disconnected from the audio mixer
+ void noiseGateOpened(); /// the noise gate has opened
+ void noiseGateClosed(); /// the noise gate has closed
+ void inputReceived(const QByteArray& inputSamples); /// a frame of mic input audio has been received and processed
private:
AudioScriptingInterface();
diff --git a/libraries/shared/src/PointerEvent.cpp b/libraries/shared/src/PointerEvent.cpp
index 0833657886..1a64a5ddb1 100644
--- a/libraries/shared/src/PointerEvent.cpp
+++ b/libraries/shared/src/PointerEvent.cpp
@@ -48,6 +48,9 @@ QScriptValue PointerEvent::toScriptValue(QScriptEngine* engine, const PointerEve
case Press:
obj.setProperty("type", "Press");
break;
+ case DoublePress:
+ obj.setProperty("type", "DoublePress");
+ break;
case Release:
obj.setProperty("type", "Release");
break;
@@ -131,6 +134,8 @@ void PointerEvent::fromScriptValue(const QScriptValue& object, PointerEvent& eve
QString typeStr = type.isString() ? type.toString() : "Move";
if (typeStr == "Press") {
event._type = Press;
+ } else if (typeStr == "DoublePress") {
+ event._type = DoublePress;
} else if (typeStr == "Release") {
event._type = Release;
} else {
diff --git a/libraries/shared/src/PointerEvent.h b/libraries/shared/src/PointerEvent.h
index 980510b091..4c00ba3e69 100644
--- a/libraries/shared/src/PointerEvent.h
+++ b/libraries/shared/src/PointerEvent.h
@@ -28,9 +28,10 @@ public:
};
enum EventType {
- Press, // A button has just been pressed
- Release, // A button has just been released
- Move // The pointer has just moved
+ Press, // A button has just been pressed
+ DoublePress, // A button has just been double pressed
+ Release, // A button has just been released
+ Move // The pointer has just moved
};
PointerEvent();
diff --git a/libraries/shared/src/shared/RateCounter.h b/libraries/shared/src/shared/RateCounter.h
index d04d87493a..3cf509b6bf 100644
--- a/libraries/shared/src/shared/RateCounter.h
+++ b/libraries/shared/src/shared/RateCounter.h
@@ -24,29 +24,34 @@ public:
RateCounter() { _rate = 0; } // avoid use of std::atomic copy ctor
void increment(size_t count = 1) {
- auto now = usecTimestampNow();
- float currentIntervalMs = (now - _start) / (float) USECS_PER_MSEC;
- if (currentIntervalMs > (float) INTERVAL) {
- float currentCount = _count;
- float intervalSeconds = currentIntervalMs / (float) MSECS_PER_SECOND;
- _rate = roundf(currentCount / intervalSeconds * _scale) / _scale;
- _start = now;
- _count = 0;
- };
+ checkRate();
_count += count;
}
- float rate() const { return _rate; }
+ float rate() const { checkRate(); return _rate; }
uint8_t precision() const { return PRECISION; }
uint32_t interval() const { return INTERVAL; }
private:
- uint64_t _start { usecTimestampNow() };
- size_t _count { 0 };
+ mutable uint64_t _start { usecTimestampNow() };
+ mutable size_t _count { 0 };
const float _scale { powf(10, PRECISION) };
- std::atomic _rate;
+ mutable std::atomic _rate;
+
+ void checkRate() const {
+ auto now = usecTimestampNow();
+ float currentIntervalMs = (now - _start) / (float)USECS_PER_MSEC;
+ if (currentIntervalMs > (float)INTERVAL) {
+ float currentCount = _count;
+ float intervalSeconds = currentIntervalMs / (float)MSECS_PER_SECOND;
+ _rate = roundf(currentCount / intervalSeconds * _scale) / _scale;
+ _start = now;
+ _count = 0;
+ };
+ }
+
};
#endif
diff --git a/script-archive/entityScripts/doubleClickExample.js b/script-archive/entityScripts/doubleClickExample.js
new file mode 100644
index 0000000000..daff2668ed
--- /dev/null
+++ b/script-archive/entityScripts/doubleClickExample.js
@@ -0,0 +1,19 @@
+(function() {
+ var _this;
+ function DoubleClickExample() {
+ _this = this;
+ return;
+ }
+
+ DoubleClickExample.prototype = {
+ clickDownOnEntity: function() {
+ print("clickDownOnEntity");
+ },
+
+ doubleclickOnEntity: function() {
+ print("doubleclickOnEntity");
+ }
+
+ };
+ return new DoubleClickExample();
+});
\ No newline at end of file
diff --git a/scripts/system/audioScope.js b/scripts/system/audioScope.js
new file mode 100644
index 0000000000..81d8e8fbd4
--- /dev/null
+++ b/scripts/system/audioScope.js
@@ -0,0 +1,95 @@
+"use strict";
+//
+// audioScope.js
+// scripts/system/
+//
+// Created by Brad Hefta-Gaub on 3/10/2016
+// Copyright 2016 High Fidelity, Inc.
+//
+// Distributed under the Apache License, Version 2.0.
+// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
+//
+/* global Script, Tablet, AudioScope, Audio */
+
+(function () { // BEGIN LOCAL_SCOPE
+
+ var scopeVisibile = AudioScope.getVisible();
+ var scopePaused = AudioScope.getPause();
+ var autoPause = false;
+
+ var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
+ var showScopeButton = tablet.addButton({
+ icon: "icons/tablet-icons/scope.svg",
+ text: "Audio Scope",
+ isActive: scopeVisibile
+ });
+
+ var scopePauseImage = "icons/tablet-icons/scope-pause.svg";
+ var scopePlayImage = "icons/tablet-icons/scope-play.svg";
+
+ var pauseScopeButton = tablet.addButton({
+ icon: scopePaused ? scopePlayImage : scopePauseImage,
+ text: scopePaused ? "Unpause" : "Pause",
+ isActive: scopePaused
+ });
+
+ var autoPauseScopeButton = tablet.addButton({
+ icon: "icons/tablet-icons/scope-auto.svg",
+ text: "Auto Pause",
+ isActive: autoPause
+ });
+
+ function setScopePause(paused) {
+ scopePaused = paused;
+ pauseScopeButton.editProperties({
+ isActive: scopePaused,
+ icon: scopePaused ? scopePlayImage : scopePauseImage,
+ text: scopePaused ? "Unpause" : "Pause"
+ });
+ AudioScope.setPause(scopePaused);
+ }
+
+ showScopeButton.clicked.connect(function () {
+ // toggle button active state
+ scopeVisibile = !scopeVisibile;
+ showScopeButton.editProperties({
+ isActive: scopeVisibile
+ });
+
+ AudioScope.setVisible(scopeVisibile);
+ });
+
+ pauseScopeButton.clicked.connect(function () {
+ // toggle button active state
+ setScopePause(!scopePaused);
+ });
+
+ autoPauseScopeButton.clicked.connect(function () {
+ // toggle button active state
+ autoPause = !autoPause;
+ autoPauseScopeButton.editProperties({
+ isActive: autoPause,
+ text: autoPause ? "Auto Pause" : "Manual"
+ });
+ });
+
+ Script.scriptEnding.connect(function () {
+ tablet.removeButton(showScopeButton);
+ tablet.removeButton(pauseScopeButton);
+ tablet.removeButton(autoPauseScopeButton);
+ });
+
+ Audio.noiseGateOpened.connect(function(){
+ if (autoPause) {
+ setScopePause(false);
+ }
+ });
+
+ Audio.noiseGateClosed.connect(function(){
+ // noise gate closed
+ if (autoPause) {
+ setScopePause(true);
+ }
+ });
+
+}()); // END LOCAL_SCOPE
\ No newline at end of file