Merge pull request #8936 from zzmp/clean/audio-mixer

clean audio mixer
This commit is contained in:
Howard Stearns 2016-10-27 15:57:16 -07:00 committed by GitHub
commit d2b83b47ba
4 changed files with 82 additions and 81 deletions

View file

@ -90,8 +90,8 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
PacketType::InjectAudio, PacketType::SilentAudioFrame, PacketType::InjectAudio, PacketType::SilentAudioFrame,
PacketType::AudioStreamStats }, PacketType::AudioStreamStats },
this, "handleNodeAudioPacket"); this, "handleNodeAudioPacket");
packetReceiver.registerListener(PacketType::MuteEnvironment, this, "handleMuteEnvironmentPacket");
packetReceiver.registerListener(PacketType::NegotiateAudioFormat, this, "handleNegotiateAudioFormat"); packetReceiver.registerListener(PacketType::NegotiateAudioFormat, this, "handleNegotiateAudioFormat");
packetReceiver.registerListener(PacketType::MuteEnvironment, this, "handleMuteEnvironmentPacket");
packetReceiver.registerListener(PacketType::NodeIgnoreRequest, this, "handleNodeIgnoreRequestPacket"); packetReceiver.registerListener(PacketType::NodeIgnoreRequest, this, "handleNodeIgnoreRequestPacket");
connect(nodeList.data(), &NodeList::nodeKilled, this, &AudioMixer::handleNodeKilled); connect(nodeList.data(), &NodeList::nodeKilled, this, &AudioMixer::handleNodeKilled);
@ -481,6 +481,7 @@ void AudioMixer::sendAudioEnvironmentPacket(SharedNodePointer node) {
} }
void AudioMixer::handleNodeAudioPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) { void AudioMixer::handleNodeAudioPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
getOrCreateClientData(sendingNode.data());
DependencyManager::get<NodeList>()->updateNodeWithDataFromPacket(message, sendingNode); DependencyManager::get<NodeList>()->updateNodeWithDataFromPacket(message, sendingNode);
} }
@ -579,18 +580,8 @@ void AudioMixer::handleNegotiateAudioFormat(QSharedPointer<ReceivedMessage> mess
} }
} }
auto clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData()); auto clientData = getOrCreateClientData(sendingNode.data());
// FIXME - why would we not have client data at this point??
if (!clientData) {
qDebug() << "UNEXPECTED -- didn't have node linked data in " << __FUNCTION__;
sendingNode->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(sendingNode->getUUID()) });
clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
}
clientData->setupCodec(selectedCodec, selectedCodecName); clientData->setupCodec(selectedCodec, selectedCodecName);
qDebug() << "selectedCodecName:" << selectedCodecName; qDebug() << "selectedCodecName:" << selectedCodecName;
clientData->sendSelectAudioFormat(sendingNode, selectedCodecName); clientData->sendSelectAudioFormat(sendingNode, selectedCodecName);
} }
@ -709,17 +700,24 @@ void AudioMixer::run() {
ThreadedAssignment::commonInit(AUDIO_MIXER_LOGGING_TARGET_NAME, NodeType::AudioMixer); ThreadedAssignment::commonInit(AUDIO_MIXER_LOGGING_TARGET_NAME, NodeType::AudioMixer);
} }
AudioMixerClientData* AudioMixer::getOrCreateClientData(Node* node) {
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
if (!clientData) {
node->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(node->getUUID()) });
clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
}
return clientData;
}
void AudioMixer::domainSettingsRequestComplete() { void AudioMixer::domainSettingsRequestComplete() {
auto nodeList = DependencyManager::get<NodeList>(); auto nodeList = DependencyManager::get<NodeList>();
nodeList->addNodeTypeToInterestSet(NodeType::Agent); nodeList->addNodeTypeToInterestSet(NodeType::Agent);
nodeList->linkedDataCreateCallback = [&](Node* node) { nodeList->linkedDataCreateCallback = [&](Node* node) { getOrCreateClientData(node); };
node->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(node->getUUID()) });
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
};
DomainHandler& domainHandler = nodeList->getDomainHandler(); DomainHandler& domainHandler = nodeList->getDomainHandler();
const QJsonObject& settingsObject = domainHandler.getSettingsObject(); const QJsonObject& settingsObject = domainHandler.getSettingsObject();
@ -732,72 +730,64 @@ void AudioMixer::domainSettingsRequestComplete() {
} }
void AudioMixer::broadcastMixes() { void AudioMixer::broadcastMixes() {
const int TRAILING_AVERAGE_FRAMES = 100;
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
const float STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.10f;
const float BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.20f;
const float RATIO_BACK_OFF = 0.02f;
auto nodeList = DependencyManager::get<NodeList>(); auto nodeList = DependencyManager::get<NodeList>();
auto nextFrameTimestamp = p_high_resolution_clock::now(); auto nextFrameTimestamp = p_high_resolution_clock::now();
auto timeToSleep = std::chrono::microseconds(0); auto timeToSleep = std::chrono::microseconds(0);
const int TRAILING_AVERAGE_FRAMES = 100; int currentFrame = 1;
int numFramesPerSecond = (int) ceil(AudioConstants::NETWORK_FRAMES_PER_SEC);
int framesSinceCutoffEvent = TRAILING_AVERAGE_FRAMES; int framesSinceCutoffEvent = TRAILING_AVERAGE_FRAMES;
int currentFrame { 1 };
int numFramesPerSecond { (int) ceil(AudioConstants::NETWORK_FRAMES_PER_SEC) };
while (!_isFinished) { while (!_isFinished) {
const float STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.10f; // manage mixer load
const float BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.20f; {
_trailingSleepRatio = (PREVIOUS_FRAMES_RATIO * _trailingSleepRatio) +
// ratio of frame spent sleeping / total frame time
((CURRENT_FRAME_RATIO * timeToSleep.count()) / (float) AudioConstants::NETWORK_FRAME_USECS);
const float RATIO_BACK_OFF = 0.02f; bool hasRatioChanged = false;
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES; if (framesSinceCutoffEvent >= TRAILING_AVERAGE_FRAMES) {
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO; if (_trailingSleepRatio <= STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD) {
qDebug() << "Mixer is struggling";
if (timeToSleep.count() < 0) { // change our min required loudness to reduce some load
timeToSleep = std::chrono::microseconds(0); _performanceThrottlingRatio = _performanceThrottlingRatio + (0.5f * (1.0f - _performanceThrottlingRatio));
} hasRatioChanged = true;
} else if (_trailingSleepRatio >= BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD && _performanceThrottlingRatio != 0) {
_trailingSleepRatio = (PREVIOUS_FRAMES_RATIO * _trailingSleepRatio) qDebug() << "Mixer is recovering";
+ (timeToSleep.count() * CURRENT_FRAME_RATIO / (float) AudioConstants::NETWORK_FRAME_USECS); // back off the required loudness
_performanceThrottlingRatio = std::max(0.0f, _performanceThrottlingRatio - RATIO_BACK_OFF);
float lastCutoffRatio = _performanceThrottlingRatio; hasRatioChanged = true;
bool hasRatioChanged = false;
if (framesSinceCutoffEvent >= TRAILING_AVERAGE_FRAMES) {
if (_trailingSleepRatio <= STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD) {
// we're struggling - change our min required loudness to reduce some load
_performanceThrottlingRatio = _performanceThrottlingRatio + (0.5f * (1.0f - _performanceThrottlingRatio));
qDebug() << "Mixer is struggling, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
hasRatioChanged = true;
} else if (_trailingSleepRatio >= BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD && _performanceThrottlingRatio != 0) {
// we've recovered and can back off the required loudness
_performanceThrottlingRatio = _performanceThrottlingRatio - RATIO_BACK_OFF;
if (_performanceThrottlingRatio < 0) {
_performanceThrottlingRatio = 0;
} }
qDebug() << "Mixer is recovering, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was" if (hasRatioChanged) {
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio; // set out min audability threshold from the new ratio
hasRatioChanged = true; _minAudibilityThreshold = LOUDNESS_TO_DISTANCE_RATIO / (2.0f * (1.0f - _performanceThrottlingRatio));
framesSinceCutoffEvent = 0;
qDebug() << "Sleeping" << _trailingSleepRatio << "of frame";
qDebug() << "Cutoff is" << _performanceThrottlingRatio;
qDebug() << "Minimum audibility to be mixed is" << _minAudibilityThreshold;
}
} }
if (hasRatioChanged) { if (!hasRatioChanged) {
// set out min audability threshold from the new ratio ++framesSinceCutoffEvent;
_minAudibilityThreshold = LOUDNESS_TO_DISTANCE_RATIO / (2.0f * (1.0f - _performanceThrottlingRatio));
qDebug() << "Minimum audability required to be mixed is now" << _minAudibilityThreshold;
framesSinceCutoffEvent = 0;
} }
} }
if (!hasRatioChanged) { // mix
++framesSinceCutoffEvent;
}
nodeList->eachNode([&](const SharedNodePointer& node) { nodeList->eachNode([&](const SharedNodePointer& node) {
if (node->getLinkedData()) { if (node->getLinkedData()) {
AudioMixerClientData* nodeData = (AudioMixerClientData*)node->getLinkedData(); AudioMixerClientData* nodeData = (AudioMixerClientData*)node->getLinkedData();
@ -878,24 +868,32 @@ void AudioMixer::broadcastMixes() {
++_numStatFrames; ++_numStatFrames;
// since we're a while loop we need to help Qt's event processing // play nice with qt event-looping
QCoreApplication::processEvents(); {
// since we're a while loop we need to help qt's event processing
QCoreApplication::processEvents();
if (_isFinished) { if (_isFinished) {
// at this point the audio-mixer is done // alert qt that this is finished
// check if we have a deferred delete event to process (which we should once finished) QCoreApplication::sendPostedEvents(this, QEvent::DeferredDelete);
QCoreApplication::sendPostedEvents(this, QEvent::DeferredDelete); break;
break; }
} }
// push the next frame timestamp to when we should send the next // sleep until the next frame, if necessary
nextFrameTimestamp += std::chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS); {
nextFrameTimestamp += std::chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
// sleep as long as we need until next frame, if we can auto now = p_high_resolution_clock::now();
auto now = p_high_resolution_clock::now(); timeToSleep = std::chrono::duration_cast<std::chrono::microseconds>(nextFrameTimestamp - now);
timeToSleep = std::chrono::duration_cast<std::chrono::microseconds>(nextFrameTimestamp - now);
std::this_thread::sleep_for(timeToSleep); if (timeToSleep.count() < 0) {
nextFrameTimestamp = now;
timeToSleep = std::chrono::microseconds(0);
}
std::this_thread::sleep_for(timeToSleep);
}
} }
} }

View file

@ -52,6 +52,7 @@ private slots:
void removeHRTFsForFinishedInjector(const QUuid& streamID); void removeHRTFsForFinishedInjector(const QUuid& streamID);
private: private:
AudioMixerClientData* getOrCreateClientData(Node* node);
void domainSettingsRequestComplete(); void domainSettingsRequestComplete();
/// adds one stream to the mix for a listening node /// adds one stream to the mix for a listening node

View file

@ -357,7 +357,10 @@ QJsonObject AudioMixerClientData::getAudioStreamStats() {
} }
void AudioMixerClientData::handleMismatchAudioFormat(SharedNodePointer node, const QString& currentCodec, const QString& recievedCodec) { void AudioMixerClientData::handleMismatchAudioFormat(SharedNodePointer node, const QString& currentCodec, const QString& recievedCodec) {
qDebug() << __FUNCTION__ << "sendingNode:" << *node << "currentCodec:" << currentCodec << "recievedCodec:" << recievedCodec; qDebug() << __FUNCTION__ <<
"sendingNode:" << *node <<
"currentCodec:" << currentCodec <<
"receivedCodec:" << recievedCodec;
sendSelectAudioFormat(node, currentCodec); sendSelectAudioFormat(node, currentCodec);
} }

View file

@ -344,7 +344,6 @@ bool OffscreenQmlSurface::allowNewFrame(uint8_t fps) {
OffscreenQmlSurface::OffscreenQmlSurface() { OffscreenQmlSurface::OffscreenQmlSurface() {
} }
static const uint64_t MAX_SHUTDOWN_WAIT_SECS = 2;
OffscreenQmlSurface::~OffscreenQmlSurface() { OffscreenQmlSurface::~OffscreenQmlSurface() {
QObject::disconnect(&_updateTimer); QObject::disconnect(&_updateTimer);
QObject::disconnect(qApp); QObject::disconnect(qApp);