mirror of
https://github.com/overte-org/overte.git
synced 2025-04-23 03:33:38 +02:00
add various scriping interfaces for managing the audio scope and detecting noise gate transitions
This commit is contained in:
parent
69fdea3621
commit
eaea718de1
8 changed files with 47 additions and 16 deletions
interface/src
libraries
audio-client/src
script-engine/src
|
@ -1182,6 +1182,10 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
// set the local loopback interface for local sounds
|
||||
AudioInjector::setLocalAudioInterface(audioIO.data());
|
||||
AudioScriptingInterface::getInstance().setLocalAudioInterface(audioIO.data());
|
||||
connect(audioIO.data(), &AudioClient::noiseGateOpened, &AudioScriptingInterface::getInstance(), &AudioScriptingInterface::noiseGateOpened);
|
||||
connect(audioIO.data(), &AudioClient::noiseGateClosed, &AudioScriptingInterface::getInstance(), &AudioScriptingInterface::noiseGateClosed);
|
||||
connect(audioIO.data(), &AudioClient::inputReceived, &AudioScriptingInterface::getInstance(), &AudioScriptingInterface::inputReceived);
|
||||
|
||||
|
||||
this->installEventFilter(this);
|
||||
|
||||
|
@ -1947,6 +1951,9 @@ void Application::initializeUi() {
|
|||
rootContext->setContextProperty("ApplicationInterface", this);
|
||||
rootContext->setContextProperty("Audio", &AudioScriptingInterface::getInstance());
|
||||
rootContext->setContextProperty("AudioStats", DependencyManager::get<AudioClient>()->getStats().data());
|
||||
//rootContext->setContextProperty("AudioScope", DependencyManager::get<AudioScope>.data());
|
||||
|
||||
|
||||
rootContext->setContextProperty("Controller", DependencyManager::get<controller::ScriptingInterface>().data());
|
||||
rootContext->setContextProperty("Entities", DependencyManager::get<EntityScriptingInterface>().data());
|
||||
_fileDownload = new FileScriptingInterface(engine);
|
||||
|
@ -5521,6 +5528,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
|||
scriptEngine->registerGlobalObject("Settings", SettingsScriptingInterface::getInstance());
|
||||
scriptEngine->registerGlobalObject("AudioDevice", AudioDeviceScriptingInterface::getInstance());
|
||||
scriptEngine->registerGlobalObject("AudioStats", DependencyManager::get<AudioClient>()->getStats().data());
|
||||
scriptEngine->registerGlobalObject("AudioScope", DependencyManager::get<AudioScope>().data());
|
||||
|
||||
// Caches
|
||||
scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCache>().data());
|
||||
|
|
|
@ -52,12 +52,14 @@ AudioScope::AudioScope() :
|
|||
connect(audioIO.data(), &AudioClient::inputReceived, this, &AudioScope::addInputToScope);
|
||||
}
|
||||
|
||||
void AudioScope::toggle() {
|
||||
_isEnabled = !_isEnabled;
|
||||
if (_isEnabled) {
|
||||
allocateScope();
|
||||
} else {
|
||||
freeScope();
|
||||
void AudioScope::setVisible(bool visible) {
|
||||
if (_isEnabled != visible) {
|
||||
_isEnabled = visible;
|
||||
if (_isEnabled) {
|
||||
allocateScope();
|
||||
} else {
|
||||
freeScope();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -34,8 +34,14 @@ public:
|
|||
void render(RenderArgs* renderArgs, int width, int height);
|
||||
|
||||
public slots:
|
||||
void toggle();
|
||||
void toggle() { setVisible(!_isEnabled); }
|
||||
void setVisible(bool visible);
|
||||
bool getVisible() const { return _isEnabled; }
|
||||
|
||||
void togglePause() { _isPaused = !_isPaused; }
|
||||
void setPause(bool paused) { _isPaused = paused; }
|
||||
bool getPause() { return _isPaused; }
|
||||
|
||||
void selectAudioScopeFiveFrames();
|
||||
void selectAudioScopeTwentyFrames();
|
||||
void selectAudioScopeFiftyFrames();
|
||||
|
@ -74,7 +80,6 @@ private:
|
|||
int _inputID;
|
||||
int _outputLeftID;
|
||||
int _outputRightD;
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi_AudioScope_h
|
||||
|
|
|
@ -1024,6 +1024,7 @@ void AudioClient::handleAudioInput() {
|
|||
if (_inputGate.clippedInLastFrame()) {
|
||||
_timeSinceLastClip = 0.0f;
|
||||
}
|
||||
|
||||
} else {
|
||||
float loudness = 0.0f;
|
||||
|
||||
|
@ -1041,6 +1042,13 @@ void AudioClient::handleAudioInput() {
|
|||
|
||||
emit inputReceived({ reinterpret_cast<char*>(networkAudioSamples), numNetworkBytes });
|
||||
|
||||
if (_inputGate.openedInLastFrame()) {
|
||||
emit noiseGateOpened();
|
||||
}
|
||||
if (_inputGate.closedInLastFrame()) {
|
||||
emit noiseGateClosed();
|
||||
}
|
||||
|
||||
} else {
|
||||
// our input loudness is 0, since we're muted
|
||||
_lastInputLoudness = 0;
|
||||
|
|
|
@ -218,6 +218,8 @@ signals:
|
|||
void inputReceived(const QByteArray& inputSamples);
|
||||
void outputBytesToNetwork(int numBytes);
|
||||
void inputBytesFromNetwork(int numBytes);
|
||||
void noiseGateOpened();
|
||||
void noiseGateClosed();
|
||||
|
||||
void changeDevice(const QAudioDeviceInfo& outputDeviceInfo);
|
||||
void deviceChanged();
|
||||
|
|
|
@ -141,15 +141,16 @@ void AudioNoiseGate::gateSamples(int16_t* samples, int numSamples) {
|
|||
|
||||
}
|
||||
|
||||
bool isOpeningGate = false;
|
||||
_closedInLastFrame = false;
|
||||
_openedInLastFrame = false;
|
||||
|
||||
if (samplesOverNoiseGate > NOISE_GATE_WIDTH) {
|
||||
isOpeningGate = !_isOpen;
|
||||
_openedInLastFrame = !_isOpen;
|
||||
_isOpen = true;
|
||||
_framesToClose = NOISE_GATE_CLOSE_FRAME_DELAY;
|
||||
} else {
|
||||
if (--_framesToClose == 0) {
|
||||
_closedInLastFrame = !_isOpen;
|
||||
_closedInLastFrame = _isOpen;
|
||||
_isOpen = false;
|
||||
}
|
||||
}
|
||||
|
@ -166,7 +167,7 @@ void AudioNoiseGate::gateSamples(int16_t* samples, int numSamples) {
|
|||
_lastLoudness = 0;
|
||||
}
|
||||
|
||||
if (isOpeningGate) {
|
||||
if (_openedInLastFrame) {
|
||||
// would be nice to do a little crossfade from silence
|
||||
for (int i = 0; i < numSamples; i++) {
|
||||
float fadedSample = ((float)i / (float)numSamples) * (float)samples[i];
|
||||
|
|
|
@ -25,6 +25,7 @@ public:
|
|||
|
||||
bool clippedInLastFrame() const { return _didClipInLastFrame; }
|
||||
bool closedInLastFrame() const { return _closedInLastFrame; }
|
||||
bool openedInLastFrame() const { return _openedInLastFrame; }
|
||||
float getMeasuredFloor() const { return _measuredFloor; }
|
||||
float getLastLoudness() const { return _lastLoudness; }
|
||||
|
||||
|
@ -42,6 +43,7 @@ private:
|
|||
int _sampleCounter;
|
||||
bool _isOpen;
|
||||
bool _closedInLastFrame { false };
|
||||
bool _openedInLastFrame { false };
|
||||
int _framesToClose;
|
||||
};
|
||||
|
||||
|
|
|
@ -32,10 +32,13 @@ protected:
|
|||
Q_INVOKABLE void setStereoInput(bool stereo);
|
||||
|
||||
signals:
|
||||
void mutedByMixer();
|
||||
void environmentMuted();
|
||||
void receivedFirstPacket();
|
||||
void disconnected();
|
||||
void mutedByMixer(); /// the client has been muted by the mixer
|
||||
void environmentMuted(); /// the entire environment has been muted by the mixer
|
||||
void receivedFirstPacket(); /// the client has received its first packet from the audio mixer
|
||||
void disconnected(); /// the client has been disconnected from the audio mixer
|
||||
void noiseGateOpened(); /// the noise gate has opened
|
||||
void noiseGateClosed(); /// the noise gate has closed
|
||||
void inputReceived(const QByteArray& inputSamples); /// a frame of mic input audio has been received and processed
|
||||
|
||||
private:
|
||||
AudioScriptingInterface();
|
||||
|
|
Loading…
Reference in a new issue