mirror of
https://github.com/lubosz/overte.git
synced 2025-04-10 17:22:40 +02:00
Merge pull request #14880 from kencooke/audio-meter-improvements
Case 21091: Improved audio metering
This commit is contained in:
commit
a23f436a9f
7 changed files with 237 additions and 100 deletions
|
@ -16,7 +16,13 @@ import TabletScriptingInterface 1.0
|
|||
|
||||
Rectangle {
|
||||
readonly property var level: AudioScriptingInterface.inputLevel;
|
||||
|
||||
|
||||
property bool gated: false;
|
||||
Component.onCompleted: {
|
||||
AudioScriptingInterface.noiseGateOpened.connect(function() { gated = false; });
|
||||
AudioScriptingInterface.noiseGateClosed.connect(function() { gated = true; });
|
||||
}
|
||||
|
||||
property bool standalone: false;
|
||||
property var dragTarget: null;
|
||||
|
||||
|
@ -77,6 +83,7 @@ Rectangle {
|
|||
readonly property string gutter: "#575757";
|
||||
readonly property string greenStart: "#39A38F";
|
||||
readonly property string greenEnd: "#1FC6A6";
|
||||
readonly property string yellow: "#C0C000";
|
||||
readonly property string red: colors.muted;
|
||||
readonly property string fill: "#55000000";
|
||||
readonly property string border: standalone ? "#80FFFFFF" : "#55FFFFFF";
|
||||
|
@ -189,7 +196,7 @@ Rectangle {
|
|||
|
||||
Rectangle { // mask
|
||||
id: mask;
|
||||
width: parent.width * level;
|
||||
width: gated ? 0 : parent.width * level;
|
||||
radius: 5;
|
||||
anchors {
|
||||
bottom: parent.bottom;
|
||||
|
@ -212,18 +219,42 @@ Rectangle {
|
|||
color: colors.greenStart;
|
||||
}
|
||||
GradientStop {
|
||||
position: 0.8;
|
||||
position: 0.5;
|
||||
color: colors.greenEnd;
|
||||
}
|
||||
GradientStop {
|
||||
position: 0.81;
|
||||
color: colors.red;
|
||||
}
|
||||
GradientStop {
|
||||
position: 1;
|
||||
color: colors.red;
|
||||
color: colors.yellow;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
id: gatedIndicator;
|
||||
visible: gated && !AudioScriptingInterface.clipping
|
||||
|
||||
radius: 4;
|
||||
width: 2 * radius;
|
||||
height: 2 * radius;
|
||||
color: "#0080FF";
|
||||
anchors {
|
||||
right: parent.left;
|
||||
verticalCenter: parent.verticalCenter;
|
||||
}
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
id: clippingIndicator;
|
||||
visible: AudioScriptingInterface.clipping
|
||||
|
||||
radius: 4;
|
||||
width: 2 * radius;
|
||||
height: 2 * radius;
|
||||
color: colors.red;
|
||||
anchors {
|
||||
left: parent.right;
|
||||
verticalCenter: parent.verticalCenter;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
|
||||
#include "Application.h"
|
||||
#include "AudioClient.h"
|
||||
#include "AudioHelpers.h"
|
||||
#include "ui/AvatarInputs.h"
|
||||
|
||||
using namespace scripting;
|
||||
|
@ -26,26 +27,9 @@ QString Audio::HMD { "VR" };
|
|||
Setting::Handle<bool> enableNoiseReductionSetting { QStringList { Audio::AUDIO, "NoiseReduction" }, true };
|
||||
|
||||
float Audio::loudnessToLevel(float loudness) {
|
||||
const float LOG2 = log(2.0f);
|
||||
const float METER_LOUDNESS_SCALE = 2.8f / 5.0f;
|
||||
const float LOG2_LOUDNESS_FLOOR = 11.0f;
|
||||
|
||||
float level = 0.0f;
|
||||
|
||||
loudness += 1.0f;
|
||||
float log2loudness = logf(loudness) / LOG2;
|
||||
|
||||
if (log2loudness <= LOG2_LOUDNESS_FLOOR) {
|
||||
level = (log2loudness / LOG2_LOUDNESS_FLOOR) * METER_LOUDNESS_SCALE;
|
||||
} else {
|
||||
level = (log2loudness - (LOG2_LOUDNESS_FLOOR - 1.0f)) * METER_LOUDNESS_SCALE;
|
||||
}
|
||||
|
||||
if (level > 1.0f) {
|
||||
level = 1.0;
|
||||
}
|
||||
|
||||
return level;
|
||||
float level = 6.02059991f * fastLog2f(loudness); // level in dBFS
|
||||
level = (level + 48.0f) * (1/39.0f); // map [-48, -9] dBFS to [0, 1]
|
||||
return glm::clamp(level, 0.0f, 1.0f);
|
||||
}
|
||||
|
||||
Audio::Audio() : _devices(_contextIsHMD) {
|
||||
|
@ -150,18 +134,33 @@ float Audio::getInputLevel() const {
|
|||
});
|
||||
}
|
||||
|
||||
void Audio::onInputLoudnessChanged(float loudness) {
|
||||
bool Audio::isClipping() const {
|
||||
return resultWithReadLock<bool>([&] {
|
||||
return _isClipping;
|
||||
});
|
||||
}
|
||||
|
||||
void Audio::onInputLoudnessChanged(float loudness, bool isClipping) {
|
||||
float level = loudnessToLevel(loudness);
|
||||
bool changed = false;
|
||||
bool levelChanged = false;
|
||||
bool isClippingChanged = false;
|
||||
|
||||
withWriteLock([&] {
|
||||
if (_inputLevel != level) {
|
||||
_inputLevel = level;
|
||||
changed = true;
|
||||
levelChanged = true;
|
||||
}
|
||||
if (_isClipping != isClipping) {
|
||||
_isClipping = isClipping;
|
||||
isClippingChanged = true;
|
||||
}
|
||||
});
|
||||
if (changed) {
|
||||
if (levelChanged) {
|
||||
emit inputLevelChanged(level);
|
||||
}
|
||||
if (isClippingChanged) {
|
||||
emit clippingChanged(isClipping);
|
||||
}
|
||||
}
|
||||
|
||||
QString Audio::getContext() const {
|
||||
|
|
|
@ -41,6 +41,7 @@ class Audio : public AudioScriptingInterface, protected ReadWriteLockable {
|
|||
* above the noise floor.
|
||||
* @property {number} inputLevel - The loudness of the audio input, range <code>0.0</code> (no sound) –
|
||||
* <code>1.0</code> (the onset of clipping). <em>Read-only.</em>
|
||||
* @property {boolean} clipping - <code>true</code> if the audio input is clipping, otherwise <code>false</code>.
|
||||
* @property {number} inputVolume - Adjusts the volume of the input audio; range <code>0.0</code> – <code>1.0</code>.
|
||||
* If set to a value, the resulting value depends on the input device: for example, the volume can't be changed on some
|
||||
* devices, and others might only support values of <code>0.0</code> and <code>1.0</code>.
|
||||
|
@ -58,6 +59,7 @@ class Audio : public AudioScriptingInterface, protected ReadWriteLockable {
|
|||
Q_PROPERTY(bool noiseReduction READ noiseReductionEnabled WRITE enableNoiseReduction NOTIFY noiseReductionChanged)
|
||||
Q_PROPERTY(float inputVolume READ getInputVolume WRITE setInputVolume NOTIFY inputVolumeChanged)
|
||||
Q_PROPERTY(float inputLevel READ getInputLevel NOTIFY inputLevelChanged)
|
||||
Q_PROPERTY(bool clipping READ isClipping NOTIFY clippingChanged)
|
||||
Q_PROPERTY(QString context READ getContext NOTIFY contextChanged)
|
||||
Q_PROPERTY(AudioDevices* devices READ getDevices NOTIFY nop)
|
||||
|
||||
|
@ -74,6 +76,7 @@ public:
|
|||
bool noiseReductionEnabled() const;
|
||||
float getInputVolume() const;
|
||||
float getInputLevel() const;
|
||||
bool isClipping() const;
|
||||
QString getContext() const;
|
||||
|
||||
void showMicMeter(bool show);
|
||||
|
@ -217,6 +220,14 @@ signals:
|
|||
*/
|
||||
void inputLevelChanged(float level);
|
||||
|
||||
/**jsdoc
|
||||
* Triggered when the clipping state of the input audio changes.
|
||||
* @function Audio.clippingChanged
|
||||
* @param {boolean} isClipping - <code>true</code> if the audio input is clipping, otherwise <code>false</code>.
|
||||
* @returns {Signal}
|
||||
*/
|
||||
void clippingChanged(bool isClipping);
|
||||
|
||||
/**jsdoc
|
||||
* Triggered when the current context of the audio changes.
|
||||
* @function Audio.contextChanged
|
||||
|
@ -237,7 +248,7 @@ private slots:
|
|||
void setMuted(bool muted);
|
||||
void enableNoiseReduction(bool enable);
|
||||
void setInputVolume(float volume);
|
||||
void onInputLoudnessChanged(float loudness);
|
||||
void onInputLoudnessChanged(float loudness, bool isClipping);
|
||||
|
||||
protected:
|
||||
// Audio must live on a separate thread from AudioClient to avoid deadlocks
|
||||
|
@ -247,6 +258,7 @@ private:
|
|||
|
||||
float _inputVolume { 1.0f };
|
||||
float _inputLevel { 0.0f };
|
||||
bool _isClipping { false };
|
||||
bool _isMuted { false };
|
||||
bool _enableNoiseReduction { true }; // Match default value of AudioClient::_isNoiseGateEnabled.
|
||||
bool _contextIsHMD { false };
|
||||
|
|
|
@ -170,6 +170,57 @@ static void channelDownmix(int16_t* source, int16_t* dest, int numSamples) {
|
|||
}
|
||||
}
|
||||
|
||||
static float computeLoudness(int16_t* samples, int numSamples, int numChannels, bool& isClipping) {
|
||||
|
||||
const int32_t CLIPPING_THRESHOLD = 32392; // -0.1 dBFS
|
||||
const int32_t CLIPPING_DETECTION = 3; // consecutive samples over threshold
|
||||
|
||||
float scale = numSamples ? 1.0f / (numSamples * 32768.0f) : 0.0f;
|
||||
|
||||
int32_t loudness = 0;
|
||||
isClipping = false;
|
||||
|
||||
if (numChannels == 2) {
|
||||
int32_t oversLeft = 0;
|
||||
int32_t oversRight = 0;
|
||||
|
||||
for (int i = 0; i < numSamples/2; i++) {
|
||||
int32_t left = std::abs((int32_t)samples[2*i+0]);
|
||||
int32_t right = std::abs((int32_t)samples[2*i+1]);
|
||||
|
||||
loudness += left;
|
||||
loudness += right;
|
||||
|
||||
if (left > CLIPPING_THRESHOLD) {
|
||||
isClipping |= (++oversLeft >= CLIPPING_DETECTION);
|
||||
} else {
|
||||
oversLeft = 0;
|
||||
}
|
||||
if (right > CLIPPING_THRESHOLD) {
|
||||
isClipping |= (++oversRight >= CLIPPING_DETECTION);
|
||||
} else {
|
||||
oversRight = 0;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
int32_t overs = 0;
|
||||
|
||||
for (int i = 0; i < numSamples; i++) {
|
||||
int32_t sample = std::abs((int32_t)samples[i]);
|
||||
|
||||
loudness += sample;
|
||||
|
||||
if (sample > CLIPPING_THRESHOLD) {
|
||||
isClipping |= (++overs >= CLIPPING_DETECTION);
|
||||
} else {
|
||||
overs = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return (float)loudness * scale;
|
||||
}
|
||||
|
||||
static inline float convertToFloat(int16_t sample) {
|
||||
return (float)sample * (1 / 32768.0f);
|
||||
}
|
||||
|
@ -1075,45 +1126,25 @@ void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
|
|||
|
||||
void AudioClient::handleAudioInput(QByteArray& audioBuffer) {
|
||||
if (!_audioPaused) {
|
||||
if (_muted) {
|
||||
_lastInputLoudness = 0.0f;
|
||||
_timeSinceLastClip = 0.0f;
|
||||
} else {
|
||||
|
||||
bool audioGateOpen = false;
|
||||
|
||||
if (!_muted) {
|
||||
int16_t* samples = reinterpret_cast<int16_t*>(audioBuffer.data());
|
||||
int numSamples = audioBuffer.size() / AudioConstants::SAMPLE_SIZE;
|
||||
int numFrames = numSamples / (_isStereoInput ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
|
||||
if (_isNoiseGateEnabled) {
|
||||
// The audio gate includes DC removal
|
||||
_audioGate->render(samples, samples, numFrames);
|
||||
audioGateOpen = _audioGate->render(samples, samples, numFrames);
|
||||
} else {
|
||||
_audioGate->removeDC(samples, samples, numFrames);
|
||||
}
|
||||
|
||||
int32_t loudness = 0;
|
||||
assert(numSamples < 65536); // int32_t loudness cannot overflow
|
||||
bool didClip = false;
|
||||
for (int i = 0; i < numSamples; ++i) {
|
||||
const int32_t CLIPPING_THRESHOLD = (int32_t)(AudioConstants::MAX_SAMPLE_VALUE * 0.9f);
|
||||
int32_t sample = std::abs((int32_t)samples[i]);
|
||||
loudness += sample;
|
||||
didClip |= (sample > CLIPPING_THRESHOLD);
|
||||
}
|
||||
_lastInputLoudness = (float)loudness / numSamples;
|
||||
|
||||
if (didClip) {
|
||||
_timeSinceLastClip = 0.0f;
|
||||
} else if (_timeSinceLastClip >= 0.0f) {
|
||||
_timeSinceLastClip += (float)numSamples / (float)AudioConstants::SAMPLE_RATE;
|
||||
audioGateOpen = _audioGate->removeDC(samples, samples, numFrames);
|
||||
}
|
||||
|
||||
emit inputReceived(audioBuffer);
|
||||
}
|
||||
|
||||
emit inputLoudnessChanged(_lastInputLoudness);
|
||||
|
||||
// state machine to detect gate opening and closing
|
||||
bool audioGateOpen = (_lastInputLoudness != 0.0f);
|
||||
// detect gate opening and closing
|
||||
bool openedInLastBlock = !_audioGateOpen && audioGateOpen; // the gate just opened
|
||||
bool closedInLastBlock = _audioGateOpen && !audioGateOpen; // the gate just closed
|
||||
_audioGateOpen = audioGateOpen;
|
||||
|
@ -1186,10 +1217,27 @@ void AudioClient::handleMicAudioInput() {
|
|||
static int16_t networkAudioSamples[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
|
||||
|
||||
while (_inputRingBuffer.samplesAvailable() >= inputSamplesRequired) {
|
||||
if (_muted) {
|
||||
_inputRingBuffer.shiftReadPosition(inputSamplesRequired);
|
||||
} else {
|
||||
_inputRingBuffer.readSamples(inputAudioSamples.get(), inputSamplesRequired);
|
||||
|
||||
_inputRingBuffer.readSamples(inputAudioSamples.get(), inputSamplesRequired);
|
||||
|
||||
// detect loudness and clipping on the raw input
|
||||
bool isClipping = false;
|
||||
float inputLoudness = computeLoudness(inputAudioSamples.get(), inputSamplesRequired, _inputFormat.channelCount(), isClipping);
|
||||
|
||||
float tc = (inputLoudness > _lastInputLoudness) ? 0.378f : 0.967f; // 10ms attack, 300ms release @ 100Hz
|
||||
inputLoudness += tc * (_lastInputLoudness - inputLoudness);
|
||||
_lastInputLoudness = inputLoudness;
|
||||
|
||||
if (isClipping) {
|
||||
_timeSinceLastClip = 0.0f;
|
||||
} else if (_timeSinceLastClip >= 0.0f) {
|
||||
_timeSinceLastClip += AudioConstants::NETWORK_FRAME_SECS;
|
||||
}
|
||||
isClipping = (_timeSinceLastClip >= 0.0f) && (_timeSinceLastClip < 2.0f); // 2 second hold time
|
||||
|
||||
emit inputLoudnessChanged(_lastInputLoudness, isClipping);
|
||||
|
||||
if (!_muted) {
|
||||
possibleResampling(_inputToNetworkResampler,
|
||||
inputAudioSamples.get(), networkAudioSamples,
|
||||
inputSamplesRequired, numNetworkSamples,
|
||||
|
|
|
@ -248,7 +248,7 @@ signals:
|
|||
void noiseReductionChanged(bool noiseReductionEnabled);
|
||||
void mutedByMixer();
|
||||
void inputReceived(const QByteArray& inputSamples);
|
||||
void inputLoudnessChanged(float loudness);
|
||||
void inputLoudnessChanged(float loudness, bool isClipping);
|
||||
void outputBytesToNetwork(int numBytes);
|
||||
void inputBytesFromNetwork(int numBytes);
|
||||
void noiseGateOpened();
|
||||
|
|
|
@ -138,8 +138,8 @@ public:
|
|||
int32_t hysteresis(int32_t peak);
|
||||
int32_t envelope(int32_t attn);
|
||||
|
||||
virtual void process(int16_t* input, int16_t* output, int numFrames) = 0;
|
||||
virtual void removeDC(int16_t* input, int16_t* output, int numFrames) = 0;
|
||||
virtual bool process(int16_t* input, int16_t* output, int numFrames) = 0;
|
||||
virtual bool removeDC(int16_t* input, int16_t* output, int numFrames) = 0;
|
||||
};
|
||||
|
||||
GateImpl::GateImpl(int sampleRate) {
|
||||
|
@ -403,14 +403,15 @@ public:
|
|||
GateMono(int sampleRate) : GateImpl(sampleRate) {}
|
||||
|
||||
// mono input/output (in-place is allowed)
|
||||
void process(int16_t* input, int16_t* output, int numFrames) override;
|
||||
void removeDC(int16_t* input, int16_t* output, int numFrames) override;
|
||||
bool process(int16_t* input, int16_t* output, int numFrames) override;
|
||||
bool removeDC(int16_t* input, int16_t* output, int numFrames) override;
|
||||
};
|
||||
|
||||
template<int N>
|
||||
void GateMono<N>::process(int16_t* input, int16_t* output, int numFrames) {
|
||||
bool GateMono<N>::process(int16_t* input, int16_t* output, int numFrames) {
|
||||
|
||||
clearHistogram();
|
||||
int32_t mask = 0;
|
||||
|
||||
for (int n = 0; n < numFrames; n++) {
|
||||
|
||||
|
@ -453,15 +454,21 @@ void GateMono<N>::process(int16_t* input, int16_t* output, int numFrames) {
|
|||
x = MULQ31(x, attn);
|
||||
|
||||
// store 16-bit output
|
||||
output[n] = (int16_t)saturateQ30(x);
|
||||
x = saturateQ30(x);
|
||||
output[n] = (int16_t)x;
|
||||
|
||||
mask |= x;
|
||||
}
|
||||
|
||||
// update adaptive threshold
|
||||
processHistogram(numFrames);
|
||||
return mask != 0;
|
||||
}
|
||||
|
||||
template<int N>
|
||||
void GateMono<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
|
||||
bool GateMono<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
|
||||
|
||||
int32_t mask = 0;
|
||||
|
||||
for (int n = 0; n < numFrames; n++) {
|
||||
|
||||
|
@ -471,8 +478,13 @@ void GateMono<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
|
|||
_dc.process(x);
|
||||
|
||||
// store 16-bit output
|
||||
output[n] = (int16_t)saturateQ30(x);
|
||||
x = saturateQ30(x);
|
||||
output[n] = (int16_t)x;
|
||||
|
||||
mask |= x;
|
||||
}
|
||||
|
||||
return mask != 0;
|
||||
}
|
||||
|
||||
//
|
||||
|
@ -489,14 +501,15 @@ public:
|
|||
GateStereo(int sampleRate) : GateImpl(sampleRate) {}
|
||||
|
||||
// interleaved stereo input/output (in-place is allowed)
|
||||
void process(int16_t* input, int16_t* output, int numFrames) override;
|
||||
void removeDC(int16_t* input, int16_t* output, int numFrames) override;
|
||||
bool process(int16_t* input, int16_t* output, int numFrames) override;
|
||||
bool removeDC(int16_t* input, int16_t* output, int numFrames) override;
|
||||
};
|
||||
|
||||
template<int N>
|
||||
void GateStereo<N>::process(int16_t* input, int16_t* output, int numFrames) {
|
||||
bool GateStereo<N>::process(int16_t* input, int16_t* output, int numFrames) {
|
||||
|
||||
clearHistogram();
|
||||
int32_t mask = 0;
|
||||
|
||||
for (int n = 0; n < numFrames; n++) {
|
||||
|
||||
|
@ -541,16 +554,23 @@ void GateStereo<N>::process(int16_t* input, int16_t* output, int numFrames) {
|
|||
x1 = MULQ31(x1, attn);
|
||||
|
||||
// store 16-bit output
|
||||
output[2*n+0] = (int16_t)saturateQ30(x0);
|
||||
output[2*n+1] = (int16_t)saturateQ30(x1);
|
||||
x0 = saturateQ30(x0);
|
||||
x1 = saturateQ30(x1);
|
||||
output[2*n+0] = (int16_t)x0;
|
||||
output[2*n+1] = (int16_t)x1;
|
||||
|
||||
mask |= (x0 | x1);
|
||||
}
|
||||
|
||||
// update adaptive threshold
|
||||
processHistogram(numFrames);
|
||||
return mask != 0;
|
||||
}
|
||||
|
||||
template<int N>
|
||||
void GateStereo<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
|
||||
bool GateStereo<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
|
||||
|
||||
int32_t mask = 0;
|
||||
|
||||
for (int n = 0; n < numFrames; n++) {
|
||||
|
||||
|
@ -561,9 +581,15 @@ void GateStereo<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
|
|||
_dc.process(x0, x1);
|
||||
|
||||
// store 16-bit output
|
||||
output[2*n+0] = (int16_t)saturateQ30(x0);
|
||||
output[2*n+1] = (int16_t)saturateQ30(x1);
|
||||
x0 = saturateQ30(x0);
|
||||
x1 = saturateQ30(x1);
|
||||
output[2*n+0] = (int16_t)x0;
|
||||
output[2*n+1] = (int16_t)x1;
|
||||
|
||||
mask |= (x0 | x1);
|
||||
}
|
||||
|
||||
return mask != 0;
|
||||
}
|
||||
|
||||
//
|
||||
|
@ -580,14 +606,15 @@ public:
|
|||
GateQuad(int sampleRate) : GateImpl(sampleRate) {}
|
||||
|
||||
// interleaved quad input/output (in-place is allowed)
|
||||
void process(int16_t* input, int16_t* output, int numFrames) override;
|
||||
void removeDC(int16_t* input, int16_t* output, int numFrames) override;
|
||||
bool process(int16_t* input, int16_t* output, int numFrames) override;
|
||||
bool removeDC(int16_t* input, int16_t* output, int numFrames) override;
|
||||
};
|
||||
|
||||
template<int N>
|
||||
void GateQuad<N>::process(int16_t* input, int16_t* output, int numFrames) {
|
||||
bool GateQuad<N>::process(int16_t* input, int16_t* output, int numFrames) {
|
||||
|
||||
clearHistogram();
|
||||
int32_t mask = 0;
|
||||
|
||||
for (int n = 0; n < numFrames; n++) {
|
||||
|
||||
|
@ -636,18 +663,27 @@ void GateQuad<N>::process(int16_t* input, int16_t* output, int numFrames) {
|
|||
x3 = MULQ31(x3, attn);
|
||||
|
||||
// store 16-bit output
|
||||
output[4*n+0] = (int16_t)saturateQ30(x0);
|
||||
output[4*n+1] = (int16_t)saturateQ30(x1);
|
||||
output[4*n+2] = (int16_t)saturateQ30(x2);
|
||||
output[4*n+3] = (int16_t)saturateQ30(x3);
|
||||
x0 = saturateQ30(x0);
|
||||
x1 = saturateQ30(x1);
|
||||
x2 = saturateQ30(x2);
|
||||
x3 = saturateQ30(x3);
|
||||
output[4*n+0] = (int16_t)x0;
|
||||
output[4*n+1] = (int16_t)x1;
|
||||
output[4*n+2] = (int16_t)x2;
|
||||
output[4*n+3] = (int16_t)x3;
|
||||
|
||||
mask |= (x0 | x1 | x2 | x3);
|
||||
}
|
||||
|
||||
// update adaptive threshold
|
||||
processHistogram(numFrames);
|
||||
return mask != 0;
|
||||
}
|
||||
|
||||
template<int N>
|
||||
void GateQuad<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
|
||||
bool GateQuad<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
|
||||
|
||||
int32_t mask = 0;
|
||||
|
||||
for (int n = 0; n < numFrames; n++) {
|
||||
|
||||
|
@ -660,11 +696,19 @@ void GateQuad<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
|
|||
_dc.process(x0, x1, x2, x3);
|
||||
|
||||
// store 16-bit output
|
||||
output[4*n+0] = (int16_t)saturateQ30(x0);
|
||||
output[4*n+1] = (int16_t)saturateQ30(x1);
|
||||
output[4*n+2] = (int16_t)saturateQ30(x2);
|
||||
output[4*n+3] = (int16_t)saturateQ30(x3);
|
||||
x0 = saturateQ30(x0);
|
||||
x1 = saturateQ30(x1);
|
||||
x2 = saturateQ30(x2);
|
||||
x3 = saturateQ30(x3);
|
||||
output[4*n+0] = (int16_t)x0;
|
||||
output[4*n+1] = (int16_t)x1;
|
||||
output[4*n+2] = (int16_t)x2;
|
||||
output[4*n+3] = (int16_t)x3;
|
||||
|
||||
mask |= (x0 | x1 | x2 | x3);
|
||||
}
|
||||
|
||||
return mask != 0;
|
||||
}
|
||||
|
||||
//
|
||||
|
@ -721,12 +765,12 @@ AudioGate::~AudioGate() {
|
|||
delete _impl;
|
||||
}
|
||||
|
||||
void AudioGate::render(int16_t* input, int16_t* output, int numFrames) {
|
||||
_impl->process(input, output, numFrames);
|
||||
bool AudioGate::render(int16_t* input, int16_t* output, int numFrames) {
|
||||
return _impl->process(input, output, numFrames);
|
||||
}
|
||||
|
||||
void AudioGate::removeDC(int16_t* input, int16_t* output, int numFrames) {
|
||||
_impl->removeDC(input, output, numFrames);
|
||||
bool AudioGate::removeDC(int16_t* input, int16_t* output, int numFrames) {
|
||||
return _impl->removeDC(input, output, numFrames);
|
||||
}
|
||||
|
||||
void AudioGate::setThreshold(float threshold) {
|
||||
|
|
|
@ -18,9 +18,12 @@ public:
|
|||
AudioGate(int sampleRate, int numChannels);
|
||||
~AudioGate();
|
||||
|
||||
// interleaved int16_t input/output (in-place is allowed)
|
||||
void render(int16_t* input, int16_t* output, int numFrames);
|
||||
void removeDC(int16_t* input, int16_t* output, int numFrames);
|
||||
//
|
||||
// Process interleaved int16_t input/output (in-place is allowed).
|
||||
// Returns true when output is non-zero.
|
||||
//
|
||||
bool render(int16_t* input, int16_t* output, int numFrames);
|
||||
bool removeDC(int16_t* input, int16_t* output, int numFrames);
|
||||
|
||||
void setThreshold(float threshold);
|
||||
void setRelease(float release);
|
||||
|
|
Loading…
Reference in a new issue