mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-08-08 14:29:24 +02:00
disble sending avatar and audio packets during interstitial mode
This commit is contained in:
parent
478bb88c69
commit
048196ec6f
6 changed files with 125 additions and 98 deletions
|
@ -1372,6 +1372,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
});
|
||||
connect(this, &Application::activeDisplayPluginChanged,
|
||||
reinterpret_cast<scripting::Audio*>(audioScriptingInterface.data()), &scripting::Audio::onContextChanged);
|
||||
connect(this, &Application::interstitialModeChanged, audioIO.data(), &AudioClient::setInterstitialStatus);
|
||||
}
|
||||
|
||||
// Create the rendering engine. This can be slow on some machines due to lots of
|
||||
|
@ -2252,6 +2253,25 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
// Preload Tablet sounds
|
||||
DependencyManager::get<TabletScriptingInterface>()->preloadSounds();
|
||||
|
||||
connect(this, &Application::interstitialModeChanged, this, [this] (bool interstitialMode) {
|
||||
if (!interstitialMode) {
|
||||
DependencyManager::get<AudioClient>()->negotiateAudioFormat();
|
||||
_queryExpiry = SteadyClock::now();
|
||||
if (_avatarOverrideUrl.isValid()) {
|
||||
getMyAvatar()->useFullAvatarURL(_avatarOverrideUrl);
|
||||
}
|
||||
static const QUrl empty{};
|
||||
if (getMyAvatar()->getFullAvatarURLFromPreferences() != getMyAvatar()->cannonicalSkeletonModelURL(empty)) {
|
||||
getMyAvatar()->resetFullAvatarURL();
|
||||
}
|
||||
getMyAvatar()->markIdentityDataChanged();
|
||||
getMyAvatar()->resetLastSent();
|
||||
|
||||
// transmit a "sendAll" packet to the AvatarMixer we just connected to.
|
||||
getMyAvatar()->sendAvatarDataPacket(true);
|
||||
}
|
||||
});
|
||||
|
||||
_pendingIdleEvent = false;
|
||||
_pendingRenderEvent = false;
|
||||
|
||||
|
@ -3412,13 +3432,14 @@ bool Application::isServerlessMode() const {
|
|||
return false;
|
||||
}
|
||||
|
||||
bool Application::isInterstitialPage() {
|
||||
bool Application::isInterstitialMode() const {
|
||||
return _interstitialMode;
|
||||
}
|
||||
|
||||
void Application::setInterstitialMode(bool interstitialMode) {
|
||||
void Application::setIsInterstitialMode(bool interstitialMode) {
|
||||
if (_interstitialMode != interstitialMode) {
|
||||
_interstitialMode = interstitialMode;
|
||||
emit interstitialModeChanged(_interstitialMode);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5481,8 +5502,6 @@ static bool domainLoadingInProgress = false;
|
|||
void Application::update(float deltaTime) {
|
||||
PROFILE_RANGE_EX(app, __FUNCTION__, 0xffff0000, (uint64_t)_renderFrameCount + 1);
|
||||
|
||||
auto audioClient = DependencyManager::get<AudioClient>();
|
||||
audioClient->setMuted(true);
|
||||
if (!_physicsEnabled) {
|
||||
if (!domainLoadingInProgress) {
|
||||
PROFILE_ASYNC_BEGIN(app, "Scene Loading", "");
|
||||
|
@ -5504,6 +5523,7 @@ void Application::update(float deltaTime) {
|
|||
// scene is ready to compute its collision shape.
|
||||
if (nearbyEntitiesAreReadyForPhysics() && getMyAvatar()->isReadyForPhysics()) {
|
||||
_physicsEnabled = true;
|
||||
setIsInterstitialMode(false);
|
||||
getMyAvatar()->updateMotionBehaviorFromMenu();
|
||||
}
|
||||
}
|
||||
|
@ -5909,7 +5929,7 @@ void Application::update(float deltaTime) {
|
|||
// send packet containing downstream audio stats to the AudioMixer
|
||||
{
|
||||
quint64 sinceLastNack = now - _lastSendDownstreamAudioStats;
|
||||
if (sinceLastNack > TOO_LONG_SINCE_LAST_SEND_DOWNSTREAM_AUDIO_STATS) {
|
||||
if (sinceLastNack > TOO_LONG_SINCE_LAST_SEND_DOWNSTREAM_AUDIO_STATS && !isInterstitialMode()) {
|
||||
_lastSendDownstreamAudioStats = now;
|
||||
|
||||
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "sendDownstreamAudioStatsPacket", Qt::QueuedConnection);
|
||||
|
@ -6072,6 +6092,7 @@ void Application::updateRenderArgs(float deltaTime) {
|
|||
}
|
||||
|
||||
void Application::queryAvatars() {
|
||||
if (!isInterstitialMode()) {
|
||||
auto avatarPacket = NLPacket::create(PacketType::AvatarQuery);
|
||||
auto destinationBuffer = reinterpret_cast<unsigned char*>(avatarPacket->getPayload());
|
||||
unsigned char* bufferStart = destinationBuffer;
|
||||
|
@ -6087,6 +6108,7 @@ void Application::queryAvatars() {
|
|||
avatarPacket->setPayloadSize(destinationBuffer - bufferStart);
|
||||
|
||||
DependencyManager::get<NodeList>()->broadcastToNodes(std::move(avatarPacket), NodeSet() << NodeType::AvatarMixer);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -6293,6 +6315,7 @@ void Application::clearDomainOctreeDetails() {
|
|||
qCDebug(interfaceapp) << "Clearing domain octree details...";
|
||||
|
||||
resetPhysicsReadyInformation();
|
||||
setIsInterstitialMode(true);
|
||||
|
||||
_octreeServerSceneStats.withWriteLock([&] {
|
||||
_octreeServerSceneStats.clear();
|
||||
|
@ -6367,11 +6390,11 @@ void Application::nodeActivated(SharedNodePointer node) {
|
|||
_octreeQuery.incrementConnectionID();
|
||||
}
|
||||
|
||||
if (node->getType() == NodeType::AudioMixer) {
|
||||
if (node->getType() == NodeType::AudioMixer && !isInterstitialMode()) {
|
||||
DependencyManager::get<AudioClient>()->negotiateAudioFormat();
|
||||
}
|
||||
|
||||
if (node->getType() == NodeType::AvatarMixer) {
|
||||
if (node->getType() == NodeType::AvatarMixer && !isInterstitialMode()) {
|
||||
_queryExpiry = SteadyClock::now();
|
||||
|
||||
// new avatar mixer, send off our identity packet on next update loop
|
||||
|
|
|
@ -328,6 +328,7 @@ signals:
|
|||
void activeDisplayPluginChanged();
|
||||
|
||||
void uploadRequest(QString path);
|
||||
void interstitialModeChanged(bool interstitialMode);
|
||||
|
||||
public slots:
|
||||
QVector<EntityItemID> pasteEntities(float x, float y, float z);
|
||||
|
|
|
@ -121,7 +121,7 @@ void AvatarManager::updateMyAvatar(float deltaTime) {
|
|||
quint64 now = usecTimestampNow();
|
||||
quint64 dt = now - _lastSendAvatarDataTime;
|
||||
|
||||
if (dt > MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS) {
|
||||
if (dt > MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS && !qApp->isInterstitialMode()) {
|
||||
// send head/hand data to the avatar mixer and voxel server
|
||||
PerformanceTimer perfTimer("send");
|
||||
_myAvatar->sendAvatarDataPacket();
|
||||
|
|
|
@ -2212,6 +2212,7 @@ void MyAvatar::setHasScriptedBlendshapes(bool hasScriptedBlendshapes) {
|
|||
// send a forced avatarData update to make sure the script can send neutal blendshapes on unload
|
||||
// without having to wait for the update loop, make sure _hasScriptedBlendShapes is still true
|
||||
// before sending the update, or else it won't send the neutal blendshapes to the receiving clients
|
||||
|
||||
sendAvatarDataPacket(true);
|
||||
}
|
||||
_hasScriptedBlendShapes = hasScriptedBlendshapes;
|
||||
|
|
|
@ -667,8 +667,7 @@ void AudioClient::stop() {
|
|||
}
|
||||
|
||||
void AudioClient::handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessage> message) {
|
||||
|
||||
/*char bitset;
|
||||
char bitset;
|
||||
message->readPrimitive(&bitset);
|
||||
|
||||
bool hasReverb = oneAtBit(bitset, HAS_REVERB_BIT);
|
||||
|
@ -680,11 +679,10 @@ void AudioClient::handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessag
|
|||
_receivedAudioStream.setReverb(reverbTime, wetLevel);
|
||||
} else {
|
||||
_receivedAudioStream.clearReverb();
|
||||
}*/
|
||||
}
|
||||
}
|
||||
|
||||
void AudioClient::handleAudioDataPacket(QSharedPointer<ReceivedMessage> message) {
|
||||
/*
|
||||
if (message->getType() == PacketType::SilentAudioFrame) {
|
||||
_silentInbound.increment();
|
||||
} else {
|
||||
|
@ -709,7 +707,7 @@ void AudioClient::handleAudioDataPacket(QSharedPointer<ReceivedMessage> message)
|
|||
// Audio output must exist and be correctly set up if we're going to process received audio
|
||||
_receivedAudioStream.parseData(*message);
|
||||
#endif
|
||||
}*/
|
||||
}
|
||||
}
|
||||
|
||||
AudioClient::Gate::Gate(AudioClient* audioClient) :
|
||||
|
@ -1042,6 +1040,7 @@ void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
|
|||
}
|
||||
|
||||
void AudioClient::handleAudioInput(QByteArray& audioBuffer) {
|
||||
if (!_interstitialMode) {
|
||||
if (_muted) {
|
||||
_lastInputLoudness = 0.0f;
|
||||
_timeSinceLastClip = 0.0f;
|
||||
|
@ -1116,6 +1115,7 @@ void AudioClient::handleAudioInput(QByteArray& audioBuffer) {
|
|||
audioTransform, avatarBoundingBoxCorner, avatarBoundingBoxScale,
|
||||
packetType, _selectedCodecName);
|
||||
_stats.sentPacket();
|
||||
}
|
||||
}
|
||||
|
||||
void AudioClient::handleMicAudioInput() {
|
||||
|
@ -2017,7 +2017,7 @@ void AudioClient::loadSettings() {
|
|||
_receivedAudioStream.setDynamicJitterBufferEnabled(dynamicJitterBufferEnabled.get());
|
||||
_receivedAudioStream.setStaticJitterBufferFrames(staticJitterBufferFrames.get());
|
||||
|
||||
qCDebug(audioclient) << "---- Initializing Audio Client ----";
|
||||
|
||||
auto codecPlugins = PluginManager::getInstance()->getCodecPlugins();
|
||||
for (auto& plugin : codecPlugins) {
|
||||
qCDebug(audioclient) << "Codec available:" << plugin->getName();
|
||||
|
|
|
@ -188,6 +188,7 @@ public slots:
|
|||
void handleRecordedAudioInput(const QByteArray& audio);
|
||||
void reset();
|
||||
void audioMixerKilled();
|
||||
void setInterstitialStatus(bool interstitialMode) { _interstitialMode = interstitialMode; }
|
||||
|
||||
void setMuted(bool muted, bool emitSignal = true);
|
||||
bool isMuted() { return _muted; }
|
||||
|
@ -417,6 +418,7 @@ private:
|
|||
QVector<AudioInjectorPointer> _activeLocalAudioInjectors;
|
||||
|
||||
bool _isPlayingBackRecording { false };
|
||||
bool _interstitialMode { true };
|
||||
|
||||
CodecPluginPointer _codec;
|
||||
QString _selectedCodecName;
|
||||
|
|
Loading…
Reference in a new issue