disble sending avatar and audio packets during interstitial mode

This commit is contained in:
Dante Ruiz 2018-08-02 11:39:06 -07:00
parent 478bb88c69
commit 048196ec6f
6 changed files with 125 additions and 98 deletions

View file

@ -1372,6 +1372,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
}); });
connect(this, &Application::activeDisplayPluginChanged, connect(this, &Application::activeDisplayPluginChanged,
reinterpret_cast<scripting::Audio*>(audioScriptingInterface.data()), &scripting::Audio::onContextChanged); reinterpret_cast<scripting::Audio*>(audioScriptingInterface.data()), &scripting::Audio::onContextChanged);
connect(this, &Application::interstitialModeChanged, audioIO.data(), &AudioClient::setInterstitialStatus);
} }
// Create the rendering engine. This can be slow on some machines due to lots of // Create the rendering engine. This can be slow on some machines due to lots of
@ -2252,6 +2253,25 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// Preload Tablet sounds // Preload Tablet sounds
DependencyManager::get<TabletScriptingInterface>()->preloadSounds(); DependencyManager::get<TabletScriptingInterface>()->preloadSounds();
connect(this, &Application::interstitialModeChanged, this, [this] (bool interstitialMode) {
if (!interstitialMode) {
DependencyManager::get<AudioClient>()->negotiateAudioFormat();
_queryExpiry = SteadyClock::now();
if (_avatarOverrideUrl.isValid()) {
getMyAvatar()->useFullAvatarURL(_avatarOverrideUrl);
}
static const QUrl empty{};
if (getMyAvatar()->getFullAvatarURLFromPreferences() != getMyAvatar()->cannonicalSkeletonModelURL(empty)) {
getMyAvatar()->resetFullAvatarURL();
}
getMyAvatar()->markIdentityDataChanged();
getMyAvatar()->resetLastSent();
// transmit a "sendAll" packet to the AvatarMixer we just connected to.
getMyAvatar()->sendAvatarDataPacket(true);
}
});
_pendingIdleEvent = false; _pendingIdleEvent = false;
_pendingRenderEvent = false; _pendingRenderEvent = false;
@ -3412,13 +3432,14 @@ bool Application::isServerlessMode() const {
return false; return false;
} }
bool Application::isInterstitialPage() { bool Application::isInterstitialMode() const {
return _interstitialMode; return _interstitialMode;
} }
void Application::setInterstitialMode(bool interstitialMode) { void Application::setIsInterstitialMode(bool interstitialMode) {
if (_interstitialMode != interstitialMode) { if (_interstitialMode != interstitialMode) {
_interstitialMode = interstitialMode; _interstitialMode = interstitialMode;
emit interstitialModeChanged(_interstitialMode);
} }
} }
@ -5481,8 +5502,6 @@ static bool domainLoadingInProgress = false;
void Application::update(float deltaTime) { void Application::update(float deltaTime) {
PROFILE_RANGE_EX(app, __FUNCTION__, 0xffff0000, (uint64_t)_renderFrameCount + 1); PROFILE_RANGE_EX(app, __FUNCTION__, 0xffff0000, (uint64_t)_renderFrameCount + 1);
auto audioClient = DependencyManager::get<AudioClient>();
audioClient->setMuted(true);
if (!_physicsEnabled) { if (!_physicsEnabled) {
if (!domainLoadingInProgress) { if (!domainLoadingInProgress) {
PROFILE_ASYNC_BEGIN(app, "Scene Loading", ""); PROFILE_ASYNC_BEGIN(app, "Scene Loading", "");
@ -5504,6 +5523,7 @@ void Application::update(float deltaTime) {
// scene is ready to compute its collision shape. // scene is ready to compute its collision shape.
if (nearbyEntitiesAreReadyForPhysics() && getMyAvatar()->isReadyForPhysics()) { if (nearbyEntitiesAreReadyForPhysics() && getMyAvatar()->isReadyForPhysics()) {
_physicsEnabled = true; _physicsEnabled = true;
setIsInterstitialMode(false);
getMyAvatar()->updateMotionBehaviorFromMenu(); getMyAvatar()->updateMotionBehaviorFromMenu();
} }
} }
@ -5909,7 +5929,7 @@ void Application::update(float deltaTime) {
// send packet containing downstream audio stats to the AudioMixer // send packet containing downstream audio stats to the AudioMixer
{ {
quint64 sinceLastNack = now - _lastSendDownstreamAudioStats; quint64 sinceLastNack = now - _lastSendDownstreamAudioStats;
if (sinceLastNack > TOO_LONG_SINCE_LAST_SEND_DOWNSTREAM_AUDIO_STATS) { if (sinceLastNack > TOO_LONG_SINCE_LAST_SEND_DOWNSTREAM_AUDIO_STATS && !isInterstitialMode()) {
_lastSendDownstreamAudioStats = now; _lastSendDownstreamAudioStats = now;
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "sendDownstreamAudioStatsPacket", Qt::QueuedConnection); QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "sendDownstreamAudioStatsPacket", Qt::QueuedConnection);
@ -6072,21 +6092,23 @@ void Application::updateRenderArgs(float deltaTime) {
} }
void Application::queryAvatars() { void Application::queryAvatars() {
auto avatarPacket = NLPacket::create(PacketType::AvatarQuery); if (!isInterstitialMode()) {
auto destinationBuffer = reinterpret_cast<unsigned char*>(avatarPacket->getPayload()); auto avatarPacket = NLPacket::create(PacketType::AvatarQuery);
unsigned char* bufferStart = destinationBuffer; auto destinationBuffer = reinterpret_cast<unsigned char*>(avatarPacket->getPayload());
unsigned char* bufferStart = destinationBuffer;
uint8_t numFrustums = (uint8_t)_conicalViews.size(); uint8_t numFrustums = (uint8_t)_conicalViews.size();
memcpy(destinationBuffer, &numFrustums, sizeof(numFrustums)); memcpy(destinationBuffer, &numFrustums, sizeof(numFrustums));
destinationBuffer += sizeof(numFrustums); destinationBuffer += sizeof(numFrustums);
for (const auto& view : _conicalViews) { for (const auto& view : _conicalViews) {
destinationBuffer += view.serialize(destinationBuffer); destinationBuffer += view.serialize(destinationBuffer);
}
avatarPacket->setPayloadSize(destinationBuffer - bufferStart);
DependencyManager::get<NodeList>()->broadcastToNodes(std::move(avatarPacket), NodeSet() << NodeType::AvatarMixer);
} }
avatarPacket->setPayloadSize(destinationBuffer - bufferStart);
DependencyManager::get<NodeList>()->broadcastToNodes(std::move(avatarPacket), NodeSet() << NodeType::AvatarMixer);
} }
@ -6293,6 +6315,7 @@ void Application::clearDomainOctreeDetails() {
qCDebug(interfaceapp) << "Clearing domain octree details..."; qCDebug(interfaceapp) << "Clearing domain octree details...";
resetPhysicsReadyInformation(); resetPhysicsReadyInformation();
setIsInterstitialMode(true);
_octreeServerSceneStats.withWriteLock([&] { _octreeServerSceneStats.withWriteLock([&] {
_octreeServerSceneStats.clear(); _octreeServerSceneStats.clear();
@ -6367,11 +6390,11 @@ void Application::nodeActivated(SharedNodePointer node) {
_octreeQuery.incrementConnectionID(); _octreeQuery.incrementConnectionID();
} }
if (node->getType() == NodeType::AudioMixer) { if (node->getType() == NodeType::AudioMixer && !isInterstitialMode()) {
DependencyManager::get<AudioClient>()->negotiateAudioFormat(); DependencyManager::get<AudioClient>()->negotiateAudioFormat();
} }
if (node->getType() == NodeType::AvatarMixer) { if (node->getType() == NodeType::AvatarMixer && !isInterstitialMode()) {
_queryExpiry = SteadyClock::now(); _queryExpiry = SteadyClock::now();
// new avatar mixer, send off our identity packet on next update loop // new avatar mixer, send off our identity packet on next update loop

View file

@ -328,6 +328,7 @@ signals:
void activeDisplayPluginChanged(); void activeDisplayPluginChanged();
void uploadRequest(QString path); void uploadRequest(QString path);
void interstitialModeChanged(bool interstitialMode);
public slots: public slots:
QVector<EntityItemID> pasteEntities(float x, float y, float z); QVector<EntityItemID> pasteEntities(float x, float y, float z);

View file

@ -121,7 +121,7 @@ void AvatarManager::updateMyAvatar(float deltaTime) {
quint64 now = usecTimestampNow(); quint64 now = usecTimestampNow();
quint64 dt = now - _lastSendAvatarDataTime; quint64 dt = now - _lastSendAvatarDataTime;
if (dt > MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS) { if (dt > MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS && !qApp->isInterstitialMode()) {
// send head/hand data to the avatar mixer and voxel server // send head/hand data to the avatar mixer and voxel server
PerformanceTimer perfTimer("send"); PerformanceTimer perfTimer("send");
_myAvatar->sendAvatarDataPacket(); _myAvatar->sendAvatarDataPacket();
@ -755,13 +755,13 @@ void AvatarManager::setAvatarSortCoefficient(const QString& name, const QScriptV
QString currentSessionUUID = avatar->getSessionUUID().toString(); QString currentSessionUUID = avatar->getSessionUUID().toString();
if (specificAvatarIdentifiers.isEmpty() || specificAvatarIdentifiers.contains(currentSessionUUID)) { if (specificAvatarIdentifiers.isEmpty() || specificAvatarIdentifiers.contains(currentSessionUUID)) {
QJsonObject thisAvatarPalData; QJsonObject thisAvatarPalData;
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar(); auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
if (currentSessionUUID == myAvatar->getSessionUUID().toString()) { if (currentSessionUUID == myAvatar->getSessionUUID().toString()) {
currentSessionUUID = ""; currentSessionUUID = "";
} }
thisAvatarPalData.insert("sessionUUID", currentSessionUUID); thisAvatarPalData.insert("sessionUUID", currentSessionUUID);
thisAvatarPalData.insert("sessionDisplayName", avatar->getSessionDisplayName()); thisAvatarPalData.insert("sessionDisplayName", avatar->getSessionDisplayName());
thisAvatarPalData.insert("audioLoudness", avatar->getAudioLoudness()); thisAvatarPalData.insert("audioLoudness", avatar->getAudioLoudness());

View file

@ -2212,6 +2212,7 @@ void MyAvatar::setHasScriptedBlendshapes(bool hasScriptedBlendshapes) {
// send a forced avatarData update to make sure the script can send neutal blendshapes on unload // send a forced avatarData update to make sure the script can send neutal blendshapes on unload
// without having to wait for the update loop, make sure _hasScriptedBlendShapes is still true // without having to wait for the update loop, make sure _hasScriptedBlendShapes is still true
// before sending the update, or else it won't send the neutal blendshapes to the receiving clients // before sending the update, or else it won't send the neutal blendshapes to the receiving clients
sendAvatarDataPacket(true); sendAvatarDataPacket(true);
} }
_hasScriptedBlendShapes = hasScriptedBlendshapes; _hasScriptedBlendShapes = hasScriptedBlendshapes;

View file

@ -667,8 +667,7 @@ void AudioClient::stop() {
} }
void AudioClient::handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessage> message) { void AudioClient::handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessage> message) {
char bitset;
/*char bitset;
message->readPrimitive(&bitset); message->readPrimitive(&bitset);
bool hasReverb = oneAtBit(bitset, HAS_REVERB_BIT); bool hasReverb = oneAtBit(bitset, HAS_REVERB_BIT);
@ -680,11 +679,10 @@ void AudioClient::handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessag
_receivedAudioStream.setReverb(reverbTime, wetLevel); _receivedAudioStream.setReverb(reverbTime, wetLevel);
} else { } else {
_receivedAudioStream.clearReverb(); _receivedAudioStream.clearReverb();
}*/ }
} }
void AudioClient::handleAudioDataPacket(QSharedPointer<ReceivedMessage> message) { void AudioClient::handleAudioDataPacket(QSharedPointer<ReceivedMessage> message) {
/*
if (message->getType() == PacketType::SilentAudioFrame) { if (message->getType() == PacketType::SilentAudioFrame) {
_silentInbound.increment(); _silentInbound.increment();
} else { } else {
@ -709,7 +707,7 @@ void AudioClient::handleAudioDataPacket(QSharedPointer<ReceivedMessage> message)
// Audio output must exist and be correctly set up if we're going to process received audio // Audio output must exist and be correctly set up if we're going to process received audio
_receivedAudioStream.parseData(*message); _receivedAudioStream.parseData(*message);
#endif #endif
}*/ }
} }
AudioClient::Gate::Gate(AudioClient* audioClient) : AudioClient::Gate::Gate(AudioClient* audioClient) :
@ -1042,80 +1040,82 @@ void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
} }
void AudioClient::handleAudioInput(QByteArray& audioBuffer) { void AudioClient::handleAudioInput(QByteArray& audioBuffer) {
if (_muted) { if (!_interstitialMode) {
_lastInputLoudness = 0.0f; if (_muted) {
_timeSinceLastClip = 0.0f; _lastInputLoudness = 0.0f;
} else {
int16_t* samples = reinterpret_cast<int16_t*>(audioBuffer.data());
int numSamples = audioBuffer.size() / AudioConstants::SAMPLE_SIZE;
int numFrames = numSamples / (_isStereoInput ? AudioConstants::STEREO : AudioConstants::MONO);
if (_isNoiseGateEnabled) {
// The audio gate includes DC removal
_audioGate->render(samples, samples, numFrames);
} else {
_audioGate->removeDC(samples, samples, numFrames);
}
int32_t loudness = 0;
assert(numSamples < 65536); // int32_t loudness cannot overflow
bool didClip = false;
for (int i = 0; i < numSamples; ++i) {
const int32_t CLIPPING_THRESHOLD = (int32_t)(AudioConstants::MAX_SAMPLE_VALUE * 0.9f);
int32_t sample = std::abs((int32_t)samples[i]);
loudness += sample;
didClip |= (sample > CLIPPING_THRESHOLD);
}
_lastInputLoudness = (float)loudness / numSamples;
if (didClip) {
_timeSinceLastClip = 0.0f; _timeSinceLastClip = 0.0f;
} else if (_timeSinceLastClip >= 0.0f) { } else {
_timeSinceLastClip += (float)numSamples / (float)AudioConstants::SAMPLE_RATE; int16_t* samples = reinterpret_cast<int16_t*>(audioBuffer.data());
int numSamples = audioBuffer.size() / AudioConstants::SAMPLE_SIZE;
int numFrames = numSamples / (_isStereoInput ? AudioConstants::STEREO : AudioConstants::MONO);
if (_isNoiseGateEnabled) {
// The audio gate includes DC removal
_audioGate->render(samples, samples, numFrames);
} else {
_audioGate->removeDC(samples, samples, numFrames);
}
int32_t loudness = 0;
assert(numSamples < 65536); // int32_t loudness cannot overflow
bool didClip = false;
for (int i = 0; i < numSamples; ++i) {
const int32_t CLIPPING_THRESHOLD = (int32_t)(AudioConstants::MAX_SAMPLE_VALUE * 0.9f);
int32_t sample = std::abs((int32_t)samples[i]);
loudness += sample;
didClip |= (sample > CLIPPING_THRESHOLD);
}
_lastInputLoudness = (float)loudness / numSamples;
if (didClip) {
_timeSinceLastClip = 0.0f;
} else if (_timeSinceLastClip >= 0.0f) {
_timeSinceLastClip += (float)numSamples / (float)AudioConstants::SAMPLE_RATE;
}
emit inputReceived(audioBuffer);
} }
emit inputReceived(audioBuffer); emit inputLoudnessChanged(_lastInputLoudness);
// state machine to detect gate opening and closing
bool audioGateOpen = (_lastInputLoudness != 0.0f);
bool openedInLastBlock = !_audioGateOpen && audioGateOpen; // the gate just opened
bool closedInLastBlock = _audioGateOpen && !audioGateOpen; // the gate just closed
_audioGateOpen = audioGateOpen;
if (openedInLastBlock) {
emit noiseGateOpened();
} else if (closedInLastBlock) {
emit noiseGateClosed();
}
// the codec must be flushed to silence before sending silent packets,
// so delay the transition to silent packets by one packet after becoming silent.
auto packetType = _shouldEchoToServer ? PacketType::MicrophoneAudioWithEcho : PacketType::MicrophoneAudioNoEcho;
if (!audioGateOpen && !closedInLastBlock) {
packetType = PacketType::SilentAudioFrame;
_silentOutbound.increment();
} else {
_audioOutbound.increment();
}
Transform audioTransform;
audioTransform.setTranslation(_positionGetter());
audioTransform.setRotation(_orientationGetter());
QByteArray encodedBuffer;
if (_encoder) {
_encoder->encode(audioBuffer, encodedBuffer);
} else {
encodedBuffer = audioBuffer;
}
emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), _outgoingAvatarAudioSequenceNumber, _isStereoInput,
audioTransform, avatarBoundingBoxCorner, avatarBoundingBoxScale,
packetType, _selectedCodecName);
_stats.sentPacket();
} }
emit inputLoudnessChanged(_lastInputLoudness);
// state machine to detect gate opening and closing
bool audioGateOpen = (_lastInputLoudness != 0.0f);
bool openedInLastBlock = !_audioGateOpen && audioGateOpen; // the gate just opened
bool closedInLastBlock = _audioGateOpen && !audioGateOpen; // the gate just closed
_audioGateOpen = audioGateOpen;
if (openedInLastBlock) {
emit noiseGateOpened();
} else if (closedInLastBlock) {
emit noiseGateClosed();
}
// the codec must be flushed to silence before sending silent packets,
// so delay the transition to silent packets by one packet after becoming silent.
auto packetType = _shouldEchoToServer ? PacketType::MicrophoneAudioWithEcho : PacketType::MicrophoneAudioNoEcho;
if (!audioGateOpen && !closedInLastBlock) {
packetType = PacketType::SilentAudioFrame;
_silentOutbound.increment();
} else {
_audioOutbound.increment();
}
Transform audioTransform;
audioTransform.setTranslation(_positionGetter());
audioTransform.setRotation(_orientationGetter());
QByteArray encodedBuffer;
if (_encoder) {
_encoder->encode(audioBuffer, encodedBuffer);
} else {
encodedBuffer = audioBuffer;
}
emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), _outgoingAvatarAudioSequenceNumber, _isStereoInput,
audioTransform, avatarBoundingBoxCorner, avatarBoundingBoxScale,
packetType, _selectedCodecName);
_stats.sentPacket();
} }
void AudioClient::handleMicAudioInput() { void AudioClient::handleMicAudioInput() {
@ -2017,7 +2017,7 @@ void AudioClient::loadSettings() {
_receivedAudioStream.setDynamicJitterBufferEnabled(dynamicJitterBufferEnabled.get()); _receivedAudioStream.setDynamicJitterBufferEnabled(dynamicJitterBufferEnabled.get());
_receivedAudioStream.setStaticJitterBufferFrames(staticJitterBufferFrames.get()); _receivedAudioStream.setStaticJitterBufferFrames(staticJitterBufferFrames.get());
qCDebug(audioclient) << "---- Initializing Audio Client ----";
auto codecPlugins = PluginManager::getInstance()->getCodecPlugins(); auto codecPlugins = PluginManager::getInstance()->getCodecPlugins();
for (auto& plugin : codecPlugins) { for (auto& plugin : codecPlugins) {
qCDebug(audioclient) << "Codec available:" << plugin->getName(); qCDebug(audioclient) << "Codec available:" << plugin->getName();

View file

@ -188,6 +188,7 @@ public slots:
void handleRecordedAudioInput(const QByteArray& audio); void handleRecordedAudioInput(const QByteArray& audio);
void reset(); void reset();
void audioMixerKilled(); void audioMixerKilled();
void setInterstitialStatus(bool interstitialMode) { _interstitialMode = interstitialMode; }
void setMuted(bool muted, bool emitSignal = true); void setMuted(bool muted, bool emitSignal = true);
bool isMuted() { return _muted; } bool isMuted() { return _muted; }
@ -417,6 +418,7 @@ private:
QVector<AudioInjectorPointer> _activeLocalAudioInjectors; QVector<AudioInjectorPointer> _activeLocalAudioInjectors;
bool _isPlayingBackRecording { false }; bool _isPlayingBackRecording { false };
bool _interstitialMode { true };
CodecPluginPointer _codec; CodecPluginPointer _codec;
QString _selectedCodecName; QString _selectedCodecName;