mirror of
https://github.com/overte-org/overte.git
synced 2025-04-22 17:53:32 +02:00
Merge branch 'master' of github.com:highfidelity/hifi into dk/moreACAudioArtifacts
This commit is contained in:
commit
52a14bf5c3
26 changed files with 327 additions and 199 deletions
|
@ -15,19 +15,23 @@
|
|||
// this should send a signal every 10ms, with pretty good precision. Hardcoding
|
||||
// to 10ms since that's what you'd want for audio.
|
||||
void AvatarAudioTimer::start() {
|
||||
qDebug() << "AvatarAudioTimer::start called";
|
||||
qDebug() << __FUNCTION__;
|
||||
auto startTime = usecTimestampNow();
|
||||
quint64 frameCounter = 0;
|
||||
const int TARGET_INTERVAL_USEC = 10000; // 10ms
|
||||
while (!_quit) {
|
||||
frameCounter++;
|
||||
// simplest possible timer
|
||||
++frameCounter;
|
||||
|
||||
// tick every 10ms from startTime
|
||||
quint64 targetTime = startTime + frameCounter * TARGET_INTERVAL_USEC;
|
||||
quint64 interval = std::max((quint64)0, targetTime - usecTimestampNow());
|
||||
usleep(interval);
|
||||
quint64 now = usecTimestampNow();
|
||||
|
||||
// avoid quint64 underflow
|
||||
if (now < targetTime) {
|
||||
usleep(targetTime - now);
|
||||
}
|
||||
|
||||
emit avatarTick();
|
||||
}
|
||||
qDebug() << "AvatarAudioTimer is finished";
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -90,8 +90,8 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
|
|||
PacketType::InjectAudio, PacketType::SilentAudioFrame,
|
||||
PacketType::AudioStreamStats },
|
||||
this, "handleNodeAudioPacket");
|
||||
packetReceiver.registerListener(PacketType::MuteEnvironment, this, "handleMuteEnvironmentPacket");
|
||||
packetReceiver.registerListener(PacketType::NegotiateAudioFormat, this, "handleNegotiateAudioFormat");
|
||||
packetReceiver.registerListener(PacketType::MuteEnvironment, this, "handleMuteEnvironmentPacket");
|
||||
packetReceiver.registerListener(PacketType::NodeIgnoreRequest, this, "handleNodeIgnoreRequestPacket");
|
||||
|
||||
connect(nodeList.data(), &NodeList::nodeKilled, this, &AudioMixer::handleNodeKilled);
|
||||
|
@ -481,6 +481,7 @@ void AudioMixer::sendAudioEnvironmentPacket(SharedNodePointer node) {
|
|||
}
|
||||
|
||||
void AudioMixer::handleNodeAudioPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||
getOrCreateClientData(sendingNode.data());
|
||||
DependencyManager::get<NodeList>()->updateNodeWithDataFromPacket(message, sendingNode);
|
||||
}
|
||||
|
||||
|
@ -579,18 +580,8 @@ void AudioMixer::handleNegotiateAudioFormat(QSharedPointer<ReceivedMessage> mess
|
|||
}
|
||||
}
|
||||
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
|
||||
|
||||
// FIXME - why would we not have client data at this point??
|
||||
if (!clientData) {
|
||||
qDebug() << "UNEXPECTED -- didn't have node linked data in " << __FUNCTION__;
|
||||
sendingNode->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(sendingNode->getUUID()) });
|
||||
clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
|
||||
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
|
||||
}
|
||||
|
||||
auto clientData = getOrCreateClientData(sendingNode.data());
|
||||
clientData->setupCodec(selectedCodec, selectedCodecName);
|
||||
|
||||
qDebug() << "selectedCodecName:" << selectedCodecName;
|
||||
clientData->sendSelectAudioFormat(sendingNode, selectedCodecName);
|
||||
}
|
||||
|
@ -646,7 +637,8 @@ void AudioMixer::sendStatsPacket() {
|
|||
statsObject["trailing_sleep_percentage"] = _trailingSleepRatio * 100.0f;
|
||||
statsObject["performance_throttling_ratio"] = _performanceThrottlingRatio;
|
||||
|
||||
statsObject["avg_listeners_per_frame"] = (float) _sumListeners / (float) _numStatFrames;
|
||||
statsObject["avg_streams_per_frame"] = (float)_sumStreams / (float)_numStatFrames;
|
||||
statsObject["avg_listeners_per_frame"] = (float)_sumListeners / (float)_numStatFrames;
|
||||
|
||||
QJsonObject mixStats;
|
||||
mixStats["%_hrtf_mixes"] = percentageForMixStats(_hrtfRenders);
|
||||
|
@ -660,6 +652,7 @@ void AudioMixer::sendStatsPacket() {
|
|||
|
||||
statsObject["mix_stats"] = mixStats;
|
||||
|
||||
_sumStreams = 0;
|
||||
_sumListeners = 0;
|
||||
_hrtfRenders = 0;
|
||||
_hrtfSilentRenders = 0;
|
||||
|
@ -707,17 +700,24 @@ void AudioMixer::run() {
|
|||
ThreadedAssignment::commonInit(AUDIO_MIXER_LOGGING_TARGET_NAME, NodeType::AudioMixer);
|
||||
}
|
||||
|
||||
AudioMixerClientData* AudioMixer::getOrCreateClientData(Node* node) {
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
|
||||
if (!clientData) {
|
||||
node->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(node->getUUID()) });
|
||||
clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
|
||||
}
|
||||
|
||||
return clientData;
|
||||
}
|
||||
|
||||
void AudioMixer::domainSettingsRequestComplete() {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
|
||||
nodeList->linkedDataCreateCallback = [&](Node* node) {
|
||||
node->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(node->getUUID()) });
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
|
||||
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
|
||||
};
|
||||
nodeList->linkedDataCreateCallback = [&](Node* node) { getOrCreateClientData(node); };
|
||||
|
||||
DomainHandler& domainHandler = nodeList->getDomainHandler();
|
||||
const QJsonObject& settingsObject = domainHandler.getSettingsObject();
|
||||
|
@ -730,79 +730,71 @@ void AudioMixer::domainSettingsRequestComplete() {
|
|||
}
|
||||
|
||||
void AudioMixer::broadcastMixes() {
|
||||
const int TRAILING_AVERAGE_FRAMES = 100;
|
||||
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
|
||||
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
|
||||
|
||||
const float STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.10f;
|
||||
const float BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.20f;
|
||||
|
||||
const float RATIO_BACK_OFF = 0.02f;
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
auto nextFrameTimestamp = p_high_resolution_clock::now();
|
||||
auto timeToSleep = std::chrono::microseconds(0);
|
||||
|
||||
const int TRAILING_AVERAGE_FRAMES = 100;
|
||||
int currentFrame = 1;
|
||||
int numFramesPerSecond = (int) ceil(AudioConstants::NETWORK_FRAMES_PER_SEC);
|
||||
int framesSinceCutoffEvent = TRAILING_AVERAGE_FRAMES;
|
||||
|
||||
int currentFrame { 1 };
|
||||
int numFramesPerSecond { (int) ceil(AudioConstants::NETWORK_FRAMES_PER_SEC) };
|
||||
|
||||
while (!_isFinished) {
|
||||
const float STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.10f;
|
||||
const float BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.20f;
|
||||
// manage mixer load
|
||||
{
|
||||
_trailingSleepRatio = (PREVIOUS_FRAMES_RATIO * _trailingSleepRatio) +
|
||||
// ratio of frame spent sleeping / total frame time
|
||||
((CURRENT_FRAME_RATIO * timeToSleep.count()) / (float) AudioConstants::NETWORK_FRAME_USECS);
|
||||
|
||||
const float RATIO_BACK_OFF = 0.02f;
|
||||
bool hasRatioChanged = false;
|
||||
|
||||
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
|
||||
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
|
||||
|
||||
if (timeToSleep.count() < 0) {
|
||||
timeToSleep = std::chrono::microseconds(0);
|
||||
}
|
||||
|
||||
_trailingSleepRatio = (PREVIOUS_FRAMES_RATIO * _trailingSleepRatio)
|
||||
+ (timeToSleep.count() * CURRENT_FRAME_RATIO / (float) AudioConstants::NETWORK_FRAME_USECS);
|
||||
|
||||
float lastCutoffRatio = _performanceThrottlingRatio;
|
||||
bool hasRatioChanged = false;
|
||||
|
||||
if (framesSinceCutoffEvent >= TRAILING_AVERAGE_FRAMES) {
|
||||
if (_trailingSleepRatio <= STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD) {
|
||||
// we're struggling - change our min required loudness to reduce some load
|
||||
_performanceThrottlingRatio = _performanceThrottlingRatio + (0.5f * (1.0f - _performanceThrottlingRatio));
|
||||
|
||||
qDebug() << "Mixer is struggling, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
|
||||
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
|
||||
hasRatioChanged = true;
|
||||
} else if (_trailingSleepRatio >= BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD && _performanceThrottlingRatio != 0) {
|
||||
// we've recovered and can back off the required loudness
|
||||
_performanceThrottlingRatio = _performanceThrottlingRatio - RATIO_BACK_OFF;
|
||||
|
||||
if (_performanceThrottlingRatio < 0) {
|
||||
_performanceThrottlingRatio = 0;
|
||||
if (framesSinceCutoffEvent >= TRAILING_AVERAGE_FRAMES) {
|
||||
if (_trailingSleepRatio <= STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD) {
|
||||
qDebug() << "Mixer is struggling";
|
||||
// change our min required loudness to reduce some load
|
||||
_performanceThrottlingRatio = _performanceThrottlingRatio + (0.5f * (1.0f - _performanceThrottlingRatio));
|
||||
hasRatioChanged = true;
|
||||
} else if (_trailingSleepRatio >= BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD && _performanceThrottlingRatio != 0) {
|
||||
qDebug() << "Mixer is recovering";
|
||||
// back off the required loudness
|
||||
_performanceThrottlingRatio = std::max(0.0f, _performanceThrottlingRatio - RATIO_BACK_OFF);
|
||||
hasRatioChanged = true;
|
||||
}
|
||||
|
||||
qDebug() << "Mixer is recovering, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
|
||||
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
|
||||
hasRatioChanged = true;
|
||||
if (hasRatioChanged) {
|
||||
// set out min audability threshold from the new ratio
|
||||
_minAudibilityThreshold = LOUDNESS_TO_DISTANCE_RATIO / (2.0f * (1.0f - _performanceThrottlingRatio));
|
||||
framesSinceCutoffEvent = 0;
|
||||
|
||||
qDebug() << "Sleeping" << _trailingSleepRatio << "of frame";
|
||||
qDebug() << "Cutoff is" << _performanceThrottlingRatio;
|
||||
qDebug() << "Minimum audibility to be mixed is" << _minAudibilityThreshold;
|
||||
}
|
||||
}
|
||||
|
||||
if (hasRatioChanged) {
|
||||
// set out min audability threshold from the new ratio
|
||||
_minAudibilityThreshold = LOUDNESS_TO_DISTANCE_RATIO / (2.0f * (1.0f - _performanceThrottlingRatio));
|
||||
qDebug() << "Minimum audability required to be mixed is now" << _minAudibilityThreshold;
|
||||
|
||||
framesSinceCutoffEvent = 0;
|
||||
if (!hasRatioChanged) {
|
||||
++framesSinceCutoffEvent;
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasRatioChanged) {
|
||||
++framesSinceCutoffEvent;
|
||||
}
|
||||
|
||||
// mix
|
||||
nodeList->eachNode([&](const SharedNodePointer& node) {
|
||||
|
||||
if (node->getLinkedData()) {
|
||||
AudioMixerClientData* nodeData = (AudioMixerClientData*)node->getLinkedData();
|
||||
|
||||
// this function will attempt to pop a frame from each audio stream.
|
||||
// a pointer to the popped data is stored as a member in InboundAudioStream.
|
||||
// That's how the popped audio data will be read for mixing (but only if the pop was successful)
|
||||
nodeData->checkBuffersBeforeFrameSend();
|
||||
_sumStreams += nodeData->checkBuffersBeforeFrameSend();
|
||||
|
||||
// if the stream should be muted, send mute packet
|
||||
if (nodeData->getAvatarAudioStream()
|
||||
|
@ -881,24 +873,32 @@ void AudioMixer::broadcastMixes() {
|
|||
|
||||
++_numStatFrames;
|
||||
|
||||
// since we're a while loop we need to help Qt's event processing
|
||||
QCoreApplication::processEvents();
|
||||
// play nice with qt event-looping
|
||||
{
|
||||
// since we're a while loop we need to help qt's event processing
|
||||
QCoreApplication::processEvents();
|
||||
|
||||
if (_isFinished) {
|
||||
// at this point the audio-mixer is done
|
||||
// check if we have a deferred delete event to process (which we should once finished)
|
||||
QCoreApplication::sendPostedEvents(this, QEvent::DeferredDelete);
|
||||
break;
|
||||
if (_isFinished) {
|
||||
// alert qt that this is finished
|
||||
QCoreApplication::sendPostedEvents(this, QEvent::DeferredDelete);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// push the next frame timestamp to when we should send the next
|
||||
nextFrameTimestamp += std::chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
|
||||
// sleep until the next frame, if necessary
|
||||
{
|
||||
nextFrameTimestamp += std::chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
|
||||
|
||||
// sleep as long as we need until next frame, if we can
|
||||
auto now = p_high_resolution_clock::now();
|
||||
timeToSleep = std::chrono::duration_cast<std::chrono::microseconds>(nextFrameTimestamp - now);
|
||||
auto now = p_high_resolution_clock::now();
|
||||
timeToSleep = std::chrono::duration_cast<std::chrono::microseconds>(nextFrameTimestamp - now);
|
||||
|
||||
std::this_thread::sleep_for(timeToSleep);
|
||||
if (timeToSleep.count() < 0) {
|
||||
nextFrameTimestamp = now;
|
||||
timeToSleep = std::chrono::microseconds(0);
|
||||
}
|
||||
|
||||
std::this_thread::sleep_for(timeToSleep);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -52,6 +52,7 @@ private slots:
|
|||
void removeHRTFsForFinishedInjector(const QUuid& streamID);
|
||||
|
||||
private:
|
||||
AudioMixerClientData* getOrCreateClientData(Node* node);
|
||||
void domainSettingsRequestComplete();
|
||||
|
||||
/// adds one stream to the mix for a listening node
|
||||
|
@ -85,6 +86,7 @@ private:
|
|||
float _attenuationPerDoublingInDistance;
|
||||
float _noiseMutingThreshold;
|
||||
int _numStatFrames { 0 };
|
||||
int _sumStreams { 0 };
|
||||
int _sumListeners { 0 };
|
||||
int _hrtfRenders { 0 };
|
||||
int _hrtfSilentRenders { 0 };
|
||||
|
|
|
@ -180,7 +180,7 @@ int AudioMixerClientData::parseData(ReceivedMessage& message) {
|
|||
return 0;
|
||||
}
|
||||
|
||||
void AudioMixerClientData::checkBuffersBeforeFrameSend() {
|
||||
int AudioMixerClientData::checkBuffersBeforeFrameSend() {
|
||||
QWriteLocker writeLocker { &_streamsLock };
|
||||
|
||||
auto it = _audioStreams.begin();
|
||||
|
@ -208,6 +208,8 @@ void AudioMixerClientData::checkBuffersBeforeFrameSend() {
|
|||
++it;
|
||||
}
|
||||
}
|
||||
|
||||
return (int)_audioStreams.size();
|
||||
}
|
||||
|
||||
bool AudioMixerClientData::shouldSendStats(int frameNumber) {
|
||||
|
@ -355,7 +357,10 @@ QJsonObject AudioMixerClientData::getAudioStreamStats() {
|
|||
}
|
||||
|
||||
void AudioMixerClientData::handleMismatchAudioFormat(SharedNodePointer node, const QString& currentCodec, const QString& recievedCodec) {
|
||||
qDebug() << __FUNCTION__ << "sendingNode:" << *node << "currentCodec:" << currentCodec << "recievedCodec:" << recievedCodec;
|
||||
qDebug() << __FUNCTION__ <<
|
||||
"sendingNode:" << *node <<
|
||||
"currentCodec:" << currentCodec <<
|
||||
"receivedCodec:" << recievedCodec;
|
||||
sendSelectAudioFormat(node, currentCodec);
|
||||
}
|
||||
|
||||
|
|
|
@ -52,7 +52,8 @@ public:
|
|||
|
||||
int parseData(ReceivedMessage& message) override;
|
||||
|
||||
void checkBuffersBeforeFrameSend();
|
||||
// attempt to pop a frame from each audio stream, and return the number of streams from this client
|
||||
int checkBuffersBeforeFrameSend();
|
||||
|
||||
void removeDeadInjectedStreams();
|
||||
|
||||
|
|
4
cmake/externals/openvr/CMakeLists.txt
vendored
4
cmake/externals/openvr/CMakeLists.txt
vendored
|
@ -7,8 +7,8 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
|||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://github.com/ValveSoftware/openvr/archive/v1.0.2.zip
|
||||
URL_MD5 0d1cf5f579cf092e33f34759967b7046
|
||||
URL https://github.com/ValveSoftware/openvr/archive/v1.0.3.zip
|
||||
URL_MD5 b484b12901917cc739e40389583c8b0d
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
|
|
|
@ -17,6 +17,12 @@ macro(SETUP_HIFI_PLUGIN)
|
|||
set(PLUGIN_PATH "plugins")
|
||||
endif()
|
||||
|
||||
if (WIN32)
|
||||
# produce PDB files for plugins as well
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /Zi")
|
||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /DEBUG")
|
||||
endif()
|
||||
|
||||
if (CMAKE_SYSTEM_NAME MATCHES "Linux" OR CMAKE_GENERATOR STREQUAL "Unix Makefiles")
|
||||
set(PLUGIN_FULL_PATH "${CMAKE_BINARY_DIR}/interface/${PLUGIN_PATH}/")
|
||||
else()
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 246 KiB After Width: | Height: | Size: 106 KiB |
|
@ -534,6 +534,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
_maxOctreePPS(maxOctreePacketsPerSecond.get()),
|
||||
_lastFaceTrackerUpdate(0)
|
||||
{
|
||||
setProperty("com.highfidelity.launchedFromSteam", SteamClient::isRunning());
|
||||
|
||||
_runningMarker.startRunningMarker();
|
||||
|
||||
PluginContainer* pluginContainer = dynamic_cast<PluginContainer*>(this); // set the container for any plugins that care
|
||||
|
@ -569,6 +571,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
_deadlockWatchdogThread = new DeadlockWatchdogThread();
|
||||
_deadlockWatchdogThread->start();
|
||||
|
||||
qCDebug(interfaceapp) << "[VERSION] SteamVR buildID:" << SteamClient::getSteamVRBuildID();
|
||||
qCDebug(interfaceapp) << "[VERSION] Build sequence:" << qPrintable(applicationVersion());
|
||||
qCDebug(interfaceapp) << "[VERSION] MODIFIED_ORGANIZATION:" << BuildInfo::MODIFIED_ORGANIZATION;
|
||||
qCDebug(interfaceapp) << "[VERSION] VERSION:" << BuildInfo::VERSION;
|
||||
|
@ -1191,6 +1194,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
properties["dropped_frame_rate"] = displayPlugin->droppedFrameRate();
|
||||
properties["sim_rate"] = getAverageSimsPerSecond();
|
||||
properties["avatar_sim_rate"] = getAvatarSimrate();
|
||||
properties["has_async_reprojection"] = displayPlugin->hasAsyncReprojection();
|
||||
|
||||
auto bandwidthRecorder = DependencyManager::get<BandwidthRecorder>();
|
||||
properties["packet_rate_in"] = bandwidthRecorder->getCachedTotalAverageInputPacketsPerSecond();
|
||||
|
@ -1234,6 +1238,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
auto glInfo = getGLContextData();
|
||||
properties["gl_info"] = glInfo;
|
||||
properties["gpu_free_memory"] = (int)BYTES_TO_MB(gpu::Context::getFreeGPUMemory());
|
||||
properties["ideal_thread_count"] = QThread::idealThreadCount();
|
||||
|
||||
auto hmdHeadPose = getHMDSensorPose();
|
||||
properties["hmd_head_pose_changed"] = isHMDMode() && (hmdHeadPose != lastHMDHeadPose);
|
||||
|
|
|
@ -338,6 +338,9 @@ Menu::Menu() {
|
|||
// Developer > Render > Throttle FPS If Not Focus
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::ThrottleFPSIfNotFocus, 0, true);
|
||||
|
||||
// Developer > Render > OpenVR threaded submit
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::OpenVrThreadedSubmit, 0, true);
|
||||
|
||||
// Developer > Render > Resolution
|
||||
MenuWrapper* resolutionMenu = renderOptionsMenu->addMenu(MenuOption::RenderResolution);
|
||||
QActionGroup* resolutionGroup = new QActionGroup(resolutionMenu);
|
||||
|
@ -617,6 +620,14 @@ Menu::Menu() {
|
|||
// Developer > Audio >>>
|
||||
MenuWrapper* audioDebugMenu = developerMenu->addMenu("Audio");
|
||||
|
||||
action = addActionToQMenuAndActionHash(audioDebugMenu, "Stats...");
|
||||
connect(action, &QAction::triggered, [] {
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>();
|
||||
QUrl defaultScriptsLoc = defaultScriptsLocation();
|
||||
defaultScriptsLoc.setPath(defaultScriptsLoc.path() + "developer/utilities/audio/stats.js");
|
||||
scriptEngines->loadScript(defaultScriptsLoc.toString());
|
||||
});
|
||||
|
||||
action = addActionToQMenuAndActionHash(audioDebugMenu, "Buffers...");
|
||||
connect(action, &QAction::triggered, [] {
|
||||
DependencyManager::get<OffscreenUi>()->toggle(QString("hifi/dialogs/AudioPreferencesDialog.qml"), "AudioPreferencesDialog");
|
||||
|
|
|
@ -136,6 +136,7 @@ namespace MenuOption {
|
|||
const QString OctreeStats = "Entity Statistics";
|
||||
const QString OnePointCalibration = "1 Point Calibration";
|
||||
const QString OnlyDisplayTopTen = "Only Display Top Ten";
|
||||
const QString OpenVrThreadedSubmit = "OpenVR Threaded Submit";
|
||||
const QString OutputMenu = "Display";
|
||||
const QString Overlays = "Overlays";
|
||||
const QString PackageModel = "Package Model...";
|
||||
|
|
|
@ -161,9 +161,6 @@ int main(int argc, const char* argv[]) {
|
|||
QSettings::setDefaultFormat(QSettings::IniFormat);
|
||||
Application app(argc, const_cast<char**>(argv), startupTime, runServer, serverContentPathOptionValue);
|
||||
|
||||
bool launchedFromSteam = SteamClient::isRunning();
|
||||
app.setProperty("com.highfidelity.launchedFromSteam", launchedFromSteam);
|
||||
|
||||
// If we failed the OpenGLVersion check, log it.
|
||||
if (override) {
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
|
|
|
@ -344,7 +344,6 @@ bool OffscreenQmlSurface::allowNewFrame(uint8_t fps) {
|
|||
OffscreenQmlSurface::OffscreenQmlSurface() {
|
||||
}
|
||||
|
||||
static const uint64_t MAX_SHUTDOWN_WAIT_SECS = 2;
|
||||
OffscreenQmlSurface::~OffscreenQmlSurface() {
|
||||
QObject::disconnect(&_updateTimer);
|
||||
QObject::disconnect(qApp);
|
||||
|
|
|
@ -32,6 +32,7 @@ using namespace udt;
|
|||
Socket::Socket(QObject* parent, bool shouldChangeSocketOptions) :
|
||||
QObject(parent),
|
||||
_synTimer(new QTimer(this)),
|
||||
_readyReadBackupTimer(new QTimer(this)),
|
||||
_shouldChangeSocketOptions(shouldChangeSocketOptions)
|
||||
{
|
||||
connect(&_udpSocket, &QUdpSocket::readyRead, this, &Socket::readPendingDatagrams);
|
||||
|
@ -46,6 +47,11 @@ Socket::Socket(QObject* parent, bool shouldChangeSocketOptions) :
|
|||
connect(&_udpSocket, SIGNAL(error(QAbstractSocket::SocketError)),
|
||||
this, SLOT(handleSocketError(QAbstractSocket::SocketError)));
|
||||
connect(&_udpSocket, &QAbstractSocket::stateChanged, this, &Socket::handleStateChanged);
|
||||
|
||||
// in order to help track down the zombie server bug, add a timer to check if we missed a readyRead
|
||||
const int READY_READ_BACKUP_CHECK_MSECS = 10 * 1000;
|
||||
connect(_readyReadBackupTimer, &QTimer::timeout, this, &Socket::checkForReadyReadBackup);
|
||||
_readyReadBackupTimer->start(READY_READ_BACKUP_CHECK_MSECS);
|
||||
}
|
||||
|
||||
void Socket::bind(const QHostAddress& address, quint16 port) {
|
||||
|
@ -296,9 +302,25 @@ void Socket::messageFailed(Connection* connection, Packet::MessageNumber message
|
|||
}
|
||||
}
|
||||
|
||||
void Socket::checkForReadyReadBackup() {
|
||||
if (_udpSocket.hasPendingDatagrams()) {
|
||||
qCDebug(networking) << "Socket::checkForReadyReadBackup() detected blocked readyRead signal. Flushing pending datagrams.";
|
||||
|
||||
// drop all of the pending datagrams on the floor
|
||||
while (_udpSocket.hasPendingDatagrams()) {
|
||||
_udpSocket.readDatagram(nullptr, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Socket::readPendingDatagrams() {
|
||||
int packetSizeWithHeader = -1;
|
||||
|
||||
while ((packetSizeWithHeader = _udpSocket.pendingDatagramSize()) != -1) {
|
||||
|
||||
// we're reading a packet so re-start the readyRead backup timer
|
||||
_readyReadBackupTimer->start();
|
||||
|
||||
// grab a time point we can mark as the receive time of this packet
|
||||
auto receiveTime = p_high_resolution_clock::now();
|
||||
|
||||
|
|
|
@ -101,6 +101,7 @@ public slots:
|
|||
|
||||
private slots:
|
||||
void readPendingDatagrams();
|
||||
void checkForReadyReadBackup();
|
||||
void rateControlSync();
|
||||
|
||||
void handleSocketError(QAbstractSocket::SocketError socketError);
|
||||
|
@ -136,6 +137,8 @@ private:
|
|||
int _synInterval { 10 }; // 10ms
|
||||
QTimer* _synTimer { nullptr };
|
||||
|
||||
QTimer* _readyReadBackupTimer { nullptr };
|
||||
|
||||
int _maxBandwidth { -1 };
|
||||
|
||||
std::unique_ptr<CongestionControlVirtualFactory> _ccFactory { new CongestionControlFactory<TCPVegasCC>() };
|
||||
|
|
|
@ -139,6 +139,7 @@ public:
|
|||
virtual bool isStereo() const { return isHmd(); }
|
||||
virtual bool isThrottled() const { return false; }
|
||||
virtual float getTargetFrameRate() const { return 0.0f; }
|
||||
virtual bool hasAsyncReprojection() const { return false; }
|
||||
|
||||
/// Returns a boolean value indicating whether the display is currently visible
|
||||
/// to the user. For monitor displays, false might indicate that a screensaver,
|
||||
|
|
|
@ -44,33 +44,40 @@ void BatchLoader::start() {
|
|||
return;
|
||||
}
|
||||
|
||||
|
||||
for (const auto& rawURL : _urls) {
|
||||
QUrl url = expandScriptUrl(normalizeScriptURL(rawURL));
|
||||
|
||||
qCDebug(scriptengine) << "Loading script at " << url;
|
||||
|
||||
QPointer<BatchLoader> self = this;
|
||||
DependencyManager::get<ScriptCache>()->getScriptContents(url.toString(), [this, self](const QString& url, const QString& contents, bool isURL, bool success) {
|
||||
if (!self) {
|
||||
return;
|
||||
}
|
||||
auto scriptCache = DependencyManager::get<ScriptCache>();
|
||||
|
||||
// Because the ScriptCache may call this callback from differents threads,
|
||||
// we need to make sure this is thread-safe.
|
||||
std::lock_guard<std::mutex> lock(_dataLock);
|
||||
// Use a proxy callback to handle the call and emit the signal in a thread-safe way.
|
||||
// If BatchLoader is deleted before the callback is called, the subsequent "emit" call will not do
|
||||
// anything.
|
||||
ScriptCacheSignalProxy* proxy = new ScriptCacheSignalProxy(scriptCache.data());
|
||||
scriptCache->getScriptContents(url.toString(), [proxy](const QString& url, const QString& contents, bool isURL, bool success) {
|
||||
proxy->receivedContent(url, contents, isURL, success);
|
||||
proxy->deleteLater();
|
||||
}, false);
|
||||
|
||||
connect(proxy, &ScriptCacheSignalProxy::contentAvailable, this, [this](const QString& url, const QString& contents, bool isURL, bool success) {
|
||||
if (isURL && success) {
|
||||
_data.insert(url, contents);
|
||||
qCDebug(scriptengine) << "Loaded: " << url;
|
||||
} else {
|
||||
_data.insert(url, QString());
|
||||
qCDebug(scriptengine) << "Could not load" << url;
|
||||
qCDebug(scriptengine) << "Could not load: " << url;
|
||||
}
|
||||
|
||||
if (!_finished && _urls.size() == _data.size()) {
|
||||
_finished = true;
|
||||
emit finished(_data);
|
||||
}
|
||||
}, false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
void ScriptCacheSignalProxy::receivedContent(const QString& url, const QString& contents, bool isURL, bool success) {
|
||||
emit contentAvailable(url, contents, isURL, success);
|
||||
}
|
||||
|
|
|
@ -21,10 +21,20 @@
|
|||
|
||||
#include <mutex>
|
||||
|
||||
class ScriptCacheSignalProxy : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
ScriptCacheSignalProxy(QObject* parent) : QObject(parent) { }
|
||||
void receivedContent(const QString& url, const QString& contents, bool isURL, bool success);
|
||||
|
||||
signals:
|
||||
void contentAvailable(const QString& url, const QString& contents, bool isURL, bool success);
|
||||
};
|
||||
|
||||
class BatchLoader : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
BatchLoader(const QList<QUrl>& urls) ;
|
||||
BatchLoader(const QList<QUrl>& urls);
|
||||
|
||||
void start();
|
||||
bool isFinished() const { return _finished; };
|
||||
|
@ -39,7 +49,6 @@ private:
|
|||
bool _finished;
|
||||
QSet<QUrl> _urls;
|
||||
QMap<QUrl, QString> _data;
|
||||
std::mutex _dataLock;
|
||||
};
|
||||
|
||||
#endif // hifi_BatchLoader_h
|
||||
|
|
|
@ -245,6 +245,32 @@ void SteamClient::shutdown() {
|
|||
steamCallbackManager.getTicketRequests().stopAll();
|
||||
}
|
||||
|
||||
int SteamClient::getSteamVRBuildID() {
|
||||
if (initialized) {
|
||||
static const int MAX_PATH_SIZE = 512;
|
||||
static const int STEAMVR_APPID = 250820;
|
||||
char rawPath[MAX_PATH_SIZE];
|
||||
SteamApps()->GetAppInstallDir(STEAMVR_APPID, rawPath, MAX_PATH_SIZE);
|
||||
|
||||
QString path(rawPath);
|
||||
path += "\\bin\\version.txt";
|
||||
qDebug() << "SteamVR version file path:" << path;
|
||||
|
||||
QFile file(path);
|
||||
if (file.open(QIODevice::ReadOnly)) {
|
||||
QString buildIDString = file.readLine();
|
||||
|
||||
bool ok = false;
|
||||
int buildID = buildIDString.toInt(&ok);
|
||||
if (ok) {
|
||||
return buildID;
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
void SteamClient::runCallbacks() {
|
||||
if (!initialized) {
|
||||
return;
|
||||
|
|
|
@ -37,6 +37,7 @@ public:
|
|||
static void openInviteOverlay();
|
||||
static void joinLobby(QString lobbyId);
|
||||
|
||||
static int getSteamVRBuildID();
|
||||
};
|
||||
|
||||
class SteamScriptingInterface : public QObject {
|
||||
|
|
|
@ -19,6 +19,9 @@ public:
|
|||
~OculusBaseDisplayPlugin();
|
||||
bool isSupported() const override;
|
||||
|
||||
bool hasAsyncReprojection() const override { return true; }
|
||||
|
||||
|
||||
// Stereo specific methods
|
||||
void resetSensors() override final;
|
||||
bool beginFrameRender(uint32_t frameIndex) override;
|
||||
|
|
|
@ -35,6 +35,7 @@ Q_DECLARE_LOGGING_CATEGORY(displayplugins)
|
|||
|
||||
const QString OpenVrDisplayPlugin::NAME("OpenVR (Vive)");
|
||||
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probably shouldn't be hardcoded here
|
||||
const QString OpenVrThreadedSubmit = "OpenVR Threaded Submit"; // this probably shouldn't be hardcoded here
|
||||
|
||||
PoseData _nextRenderPoseData;
|
||||
PoseData _nextSimPoseData;
|
||||
|
@ -42,15 +43,12 @@ PoseData _nextSimPoseData;
|
|||
#define MIN_CORES_FOR_NORMAL_RENDER 5
|
||||
bool forceInterleavedReprojection = (QThread::idealThreadCount() < MIN_CORES_FOR_NORMAL_RENDER);
|
||||
|
||||
|
||||
static std::array<vr::Hmd_Eye, 2> VR_EYES { { vr::Eye_Left, vr::Eye_Right } };
|
||||
bool _openVrDisplayActive { false };
|
||||
// Flip y-axis since GL UV coords are backwards.
|
||||
static vr::VRTextureBounds_t OPENVR_TEXTURE_BOUNDS_LEFT{ 0, 0, 0.5f, 1 };
|
||||
static vr::VRTextureBounds_t OPENVR_TEXTURE_BOUNDS_RIGHT{ 0.5f, 0, 1, 1 };
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
|
||||
#define REPROJECTION_BINDING 1
|
||||
|
||||
static const char* HMD_REPROJECTION_VERT = R"SHADER(
|
||||
|
@ -351,12 +349,17 @@ public:
|
|||
OpenVrDisplayPlugin& _plugin;
|
||||
};
|
||||
|
||||
#endif
|
||||
|
||||
bool OpenVrDisplayPlugin::isSupported() const {
|
||||
return openVrSupported();
|
||||
}
|
||||
|
||||
float OpenVrDisplayPlugin::getTargetFrameRate() const {
|
||||
if (forceInterleavedReprojection && !_asyncReprojectionActive) {
|
||||
return TARGET_RATE_OpenVr / 2.0f;
|
||||
}
|
||||
return TARGET_RATE_OpenVr;
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::init() {
|
||||
Plugin::init();
|
||||
|
||||
|
@ -376,6 +379,9 @@ void OpenVrDisplayPlugin::init() {
|
|||
emit deviceConnected(getName());
|
||||
}
|
||||
|
||||
// FIXME remove once OpenVR header is updated
|
||||
#define VRCompositor_ReprojectionAsync 0x04
|
||||
|
||||
bool OpenVrDisplayPlugin::internalActivate() {
|
||||
if (!_system) {
|
||||
_system = acquireOpenVrSystem();
|
||||
|
@ -394,6 +400,16 @@ bool OpenVrDisplayPlugin::internalActivate() {
|
|||
return false;
|
||||
}
|
||||
|
||||
vr::Compositor_FrameTiming timing;
|
||||
memset(&timing, 0, sizeof(timing));
|
||||
timing.m_nSize = sizeof(vr::Compositor_FrameTiming);
|
||||
vr::VRCompositor()->GetFrameTiming(&timing);
|
||||
_asyncReprojectionActive = timing.m_nReprojectionFlags & VRCompositor_ReprojectionAsync;
|
||||
|
||||
_threadedSubmit = !_asyncReprojectionActive;
|
||||
qDebug() << "OpenVR Async Reprojection active: " << _asyncReprojectionActive;
|
||||
qDebug() << "OpenVR Threaded submit enabled: " << _threadedSubmit;
|
||||
|
||||
_openVrDisplayActive = true;
|
||||
_container->setIsOptionChecked(StandingHMDSensorMode, true);
|
||||
|
||||
|
@ -434,16 +450,16 @@ bool OpenVrDisplayPlugin::internalActivate() {
|
|||
#endif
|
||||
}
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
_submitThread = std::make_shared<OpenVrSubmitThread>(*this);
|
||||
if (!_submitCanvas) {
|
||||
withMainThreadContext([&] {
|
||||
_submitCanvas = std::make_shared<gl::OffscreenContext>();
|
||||
_submitCanvas->create();
|
||||
_submitCanvas->doneCurrent();
|
||||
});
|
||||
if (_threadedSubmit) {
|
||||
_submitThread = std::make_shared<OpenVrSubmitThread>(*this);
|
||||
if (!_submitCanvas) {
|
||||
withMainThreadContext([&] {
|
||||
_submitCanvas = std::make_shared<gl::OffscreenContext>();
|
||||
_submitCanvas->create();
|
||||
_submitCanvas->doneCurrent();
|
||||
});
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
return Parent::internalActivate();
|
||||
}
|
||||
|
@ -473,27 +489,27 @@ void OpenVrDisplayPlugin::customizeContext() {
|
|||
|
||||
Parent::customizeContext();
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
_compositeInfos[0].texture = _compositeFramebuffer->getRenderBuffer(0);
|
||||
for (size_t i = 0; i < COMPOSITING_BUFFER_SIZE; ++i) {
|
||||
if (0 != i) {
|
||||
_compositeInfos[i].texture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element::COLOR_RGBA_32, _renderTargetSize.x, _renderTargetSize.y, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT)));
|
||||
if (_threadedSubmit) {
|
||||
_compositeInfos[0].texture = _compositeFramebuffer->getRenderBuffer(0);
|
||||
for (size_t i = 0; i < COMPOSITING_BUFFER_SIZE; ++i) {
|
||||
if (0 != i) {
|
||||
_compositeInfos[i].texture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element::COLOR_RGBA_32, _renderTargetSize.x, _renderTargetSize.y, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT)));
|
||||
}
|
||||
_compositeInfos[i].textureID = getGLBackend()->getTextureID(_compositeInfos[i].texture, false);
|
||||
}
|
||||
_compositeInfos[i].textureID = getGLBackend()->getTextureID(_compositeInfos[i].texture, false);
|
||||
_submitThread->_canvas = _submitCanvas;
|
||||
_submitThread->start(QThread::HighPriority);
|
||||
}
|
||||
_submitThread->_canvas = _submitCanvas;
|
||||
_submitThread->start(QThread::HighPriority);
|
||||
#endif
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::uncustomizeContext() {
|
||||
Parent::uncustomizeContext();
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
_submitThread->_quit = true;
|
||||
_submitThread->wait();
|
||||
_submitThread.reset();
|
||||
#endif
|
||||
if (_threadedSubmit) {
|
||||
_submitThread->_quit = true;
|
||||
_submitThread->wait();
|
||||
_submitThread.reset();
|
||||
}
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::resetSensors() {
|
||||
|
@ -582,75 +598,76 @@ bool OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
|
|||
}
|
||||
|
||||
void OpenVrDisplayPlugin::compositeLayers() {
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
++_renderingIndex;
|
||||
_renderingIndex %= COMPOSITING_BUFFER_SIZE;
|
||||
if (_threadedSubmit) {
|
||||
++_renderingIndex;
|
||||
_renderingIndex %= COMPOSITING_BUFFER_SIZE;
|
||||
|
||||
auto& newComposite = _compositeInfos[_renderingIndex];
|
||||
newComposite.pose = _currentPresentFrameInfo.presentPose;
|
||||
_compositeFramebuffer->setRenderBuffer(0, newComposite.texture);
|
||||
#endif
|
||||
auto& newComposite = _compositeInfos[_renderingIndex];
|
||||
newComposite.pose = _currentPresentFrameInfo.presentPose;
|
||||
_compositeFramebuffer->setRenderBuffer(0, newComposite.texture);
|
||||
}
|
||||
|
||||
Parent::compositeLayers();
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
newComposite.fence = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
|
||||
// https://www.opengl.org/registry/specs/ARB/sync.txt:
|
||||
// > The simple flushing behavior defined by
|
||||
// > SYNC_FLUSH_COMMANDS_BIT will not help when waiting for a fence
|
||||
// > command issued in another context's command stream to complete.
|
||||
// > Applications which block on a fence sync object must take
|
||||
// > additional steps to assure that the context from which the
|
||||
// > corresponding fence command was issued has flushed that command
|
||||
// > to the graphics pipeline.
|
||||
glFlush();
|
||||
if (_threadedSubmit) {
|
||||
auto& newComposite = _compositeInfos[_renderingIndex];
|
||||
newComposite.fence = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
|
||||
// https://www.opengl.org/registry/specs/ARB/sync.txt:
|
||||
// > The simple flushing behavior defined by
|
||||
// > SYNC_FLUSH_COMMANDS_BIT will not help when waiting for a fence
|
||||
// > command issued in another context's command stream to complete.
|
||||
// > Applications which block on a fence sync object must take
|
||||
// > additional steps to assure that the context from which the
|
||||
// > corresponding fence command was issued has flushed that command
|
||||
// > to the graphics pipeline.
|
||||
glFlush();
|
||||
|
||||
if (!newComposite.textureID) {
|
||||
newComposite.textureID = getGLBackend()->getTextureID(newComposite.texture, false);
|
||||
if (!newComposite.textureID) {
|
||||
newComposite.textureID = getGLBackend()->getTextureID(newComposite.texture, false);
|
||||
}
|
||||
withPresentThreadLock([&] {
|
||||
_submitThread->update(newComposite);
|
||||
});
|
||||
}
|
||||
withPresentThreadLock([&] {
|
||||
_submitThread->update(newComposite);
|
||||
});
|
||||
#endif
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::hmdPresent() {
|
||||
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)_currentFrame->frameIndex)
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
_submitThread->waitForPresent();
|
||||
#else
|
||||
GLuint glTexId = getGLBackend()->getTextureID(_compositeFramebuffer->getRenderBuffer(0), false);
|
||||
vr::Texture_t vrTexture{ (void*)glTexId, vr::API_OpenGL, vr::ColorSpace_Auto };
|
||||
vr::VRCompositor()->Submit(vr::Eye_Left, &vrTexture, &OPENVR_TEXTURE_BOUNDS_LEFT);
|
||||
vr::VRCompositor()->Submit(vr::Eye_Right, &vrTexture, &OPENVR_TEXTURE_BOUNDS_RIGHT);
|
||||
vr::VRCompositor()->PostPresentHandoff();
|
||||
#endif
|
||||
if (_threadedSubmit) {
|
||||
_submitThread->waitForPresent();
|
||||
} else {
|
||||
GLuint glTexId = getGLBackend()->getTextureID(_compositeFramebuffer->getRenderBuffer(0), false);
|
||||
vr::Texture_t vrTexture { (void*)glTexId, vr::API_OpenGL, vr::ColorSpace_Auto };
|
||||
vr::VRCompositor()->Submit(vr::Eye_Left, &vrTexture, &OPENVR_TEXTURE_BOUNDS_LEFT);
|
||||
vr::VRCompositor()->Submit(vr::Eye_Right, &vrTexture, &OPENVR_TEXTURE_BOUNDS_RIGHT);
|
||||
vr::VRCompositor()->PostPresentHandoff();
|
||||
_presentRate.increment();
|
||||
}
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::postPreview() {
|
||||
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)_currentFrame->frameIndex)
|
||||
PoseData nextRender, nextSim;
|
||||
nextRender.frameIndex = presentCount();
|
||||
#if !OPENVR_THREADED_SUBMIT
|
||||
vr::VRCompositor()->WaitGetPoses(nextRender.vrPoses, vr::k_unMaxTrackedDeviceCount, nextSim.vrPoses, vr::k_unMaxTrackedDeviceCount);
|
||||
|
||||
glm::mat4 resetMat;
|
||||
withPresentThreadLock([&] {
|
||||
resetMat = _sensorResetMat;
|
||||
});
|
||||
nextRender.update(resetMat);
|
||||
nextSim.update(resetMat);
|
||||
withPresentThreadLock([&] {
|
||||
_nextSimPoseData = nextSim;
|
||||
});
|
||||
_nextRenderPoseData = nextRender;
|
||||
|
||||
// FIXME - this looks wrong!
|
||||
_hmdActivityLevel = vr::k_EDeviceActivityLevel_UserInteraction; // _system->GetTrackedDeviceActivityLevel(vr::k_unTrackedDeviceIndex_Hmd);
|
||||
#else
|
||||
_hmdActivityLevel = _system->GetTrackedDeviceActivityLevel(vr::k_unTrackedDeviceIndex_Hmd);
|
||||
#endif
|
||||
|
||||
if (!_threadedSubmit) {
|
||||
vr::VRCompositor()->WaitGetPoses(nextRender.vrPoses, vr::k_unMaxTrackedDeviceCount, nextSim.vrPoses, vr::k_unMaxTrackedDeviceCount);
|
||||
|
||||
glm::mat4 resetMat;
|
||||
withPresentThreadLock([&] {
|
||||
resetMat = _sensorResetMat;
|
||||
});
|
||||
nextRender.update(resetMat);
|
||||
nextSim.update(resetMat);
|
||||
withPresentThreadLock([&] {
|
||||
_nextSimPoseData = nextSim;
|
||||
});
|
||||
_nextRenderPoseData = nextRender;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
bool OpenVrDisplayPlugin::isHmdMounted() const {
|
||||
|
@ -684,3 +701,7 @@ void OpenVrDisplayPlugin::unsuppressKeyboard() {
|
|||
bool OpenVrDisplayPlugin::isKeyboardVisible() {
|
||||
return isOpenVrKeyboardShown();
|
||||
}
|
||||
|
||||
int OpenVrDisplayPlugin::getRequiredThreadCount() const {
|
||||
return Parent::getRequiredThreadCount() + (_threadedSubmit ? 1 : 0);
|
||||
}
|
||||
|
|
|
@ -15,9 +15,6 @@
|
|||
|
||||
const float TARGET_RATE_OpenVr = 90.0f; // FIXME: get from sdk tracked device property? This number is vive-only.
|
||||
|
||||
#define OPENVR_THREADED_SUBMIT 1
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
namespace gl {
|
||||
class OffscreenContext;
|
||||
}
|
||||
|
@ -34,7 +31,6 @@ struct CompositeInfo {
|
|||
glm::mat4 pose;
|
||||
GLsync fence{ 0 };
|
||||
};
|
||||
#endif
|
||||
|
||||
class OpenVrDisplayPlugin : public HmdDisplayPlugin {
|
||||
using Parent = HmdDisplayPlugin;
|
||||
|
@ -44,7 +40,8 @@ public:
|
|||
|
||||
void init() override;
|
||||
|
||||
float getTargetFrameRate() const override { return TARGET_RATE_OpenVr; }
|
||||
float getTargetFrameRate() const override;
|
||||
bool hasAsyncReprojection() const override { return _asyncReprojectionActive; }
|
||||
|
||||
void customizeContext() override;
|
||||
void uncustomizeContext() override;
|
||||
|
@ -58,8 +55,8 @@ public:
|
|||
void unsuppressKeyboard() override;
|
||||
bool isKeyboardVisible() override;
|
||||
|
||||
// Needs an additional thread for VR submission
|
||||
int getRequiredThreadCount() const override { return Parent::getRequiredThreadCount() + 1; }
|
||||
// Possibly needs an additional thread for VR submission
|
||||
int getRequiredThreadCount() const override;
|
||||
|
||||
protected:
|
||||
bool internalActivate() override;
|
||||
|
@ -71,7 +68,6 @@ protected:
|
|||
bool isHmdMounted() const override;
|
||||
void postPreview() override;
|
||||
|
||||
|
||||
private:
|
||||
vr::IVRSystem* _system { nullptr };
|
||||
std::atomic<vr::EDeviceActivityLevel> _hmdActivityLevel { vr::k_EDeviceActivityLevel_Unknown };
|
||||
|
@ -80,12 +76,13 @@ private:
|
|||
|
||||
vr::HmdMatrix34_t _lastGoodHMDPose;
|
||||
mat4 _sensorResetMat;
|
||||
bool _threadedSubmit { true };
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
CompositeInfo::Array _compositeInfos;
|
||||
size_t _renderingIndex { 0 };
|
||||
std::shared_ptr<OpenVrSubmitThread> _submitThread;
|
||||
std::shared_ptr<gl::OffscreenContext> _submitCanvas;
|
||||
friend class OpenVrSubmitThread;
|
||||
#endif
|
||||
|
||||
bool _asyncReprojectionActive { false };
|
||||
};
|
||||
|
|
|
@ -210,6 +210,11 @@ void ViveControllerManager::renderHand(const controller::Pose& pose, gpu::Batch&
|
|||
|
||||
|
||||
void ViveControllerManager::pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
|
||||
|
||||
if (!_system) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
|
||||
handleOpenVrEvents();
|
||||
if (openVrQuitRequested()) {
|
||||
|
|
|
@ -304,7 +304,7 @@ function setEnabled(value) {
|
|||
|
||||
var CHANNEL_AWAY_ENABLE = "Hifi-Away-Enable";
|
||||
var handleMessage = function(channel, message, sender) {
|
||||
if (channel === CHANNEL_AWAY_ENABLE) {
|
||||
if (channel === CHANNEL_AWAY_ENABLE && sender === MyAvatar.sessionUUID) {
|
||||
print("away.js | Got message on Hifi-Away-Enable: ", message);
|
||||
setEnabled(message === 'enable');
|
||||
}
|
||||
|
@ -344,6 +344,7 @@ Script.scriptEnding.connect(function () {
|
|||
Controller.mousePressEvent.disconnect(goActive);
|
||||
Controller.keyPressEvent.disconnect(maybeGoActive);
|
||||
Messages.messageReceived.disconnect(handleMessage);
|
||||
Messages.unsubscribe(CHANNEL_AWAY_ENABLE);
|
||||
});
|
||||
|
||||
if (HMD.active && !HMD.mounted) {
|
||||
|
|
|
@ -812,7 +812,8 @@ for (var key in trayIcons) {
|
|||
const notificationIcon = path.join(__dirname, '../resources/console-notification.png');
|
||||
|
||||
function onContentLoaded() {
|
||||
maybeShowSplash();
|
||||
// Disable splash window for now.
|
||||
// maybeShowSplash();
|
||||
|
||||
if (buildInfo.releaseType == 'PRODUCTION') {
|
||||
var currentVersion = null;
|
||||
|
|
Loading…
Reference in a new issue