mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-08-08 01:31:48 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into purple
This commit is contained in:
commit
c65269e72d
86 changed files with 1300 additions and 587 deletions
|
@ -361,10 +361,6 @@ void Agent::executeScript() {
|
|||
// register ourselves to the script engine
|
||||
_scriptEngine->registerGlobalObject("Agent", this);
|
||||
|
||||
// FIXME -we shouldn't be calling this directly, it's normally called by run(), not sure why
|
||||
// viewers would need this called.
|
||||
//_scriptEngine->init(); // must be done before we set up the viewers
|
||||
|
||||
_scriptEngine->registerGlobalObject("SoundCache", DependencyManager::get<SoundCache>().data());
|
||||
|
||||
QScriptValue webSocketServerConstructorValue = _scriptEngine->newFunction(WebSocketServerClass::constructor);
|
||||
|
@ -478,9 +474,24 @@ void Agent::processAgentAvatar() {
|
|||
nodeList->broadcastToNodes(std::move(avatarPacket), NodeSet() << NodeType::AvatarMixer);
|
||||
}
|
||||
}
|
||||
void Agent::flushEncoder() {
|
||||
_flushEncoder = false;
|
||||
static QByteArray zeros(AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL, 0);
|
||||
static QByteArray encodedZeros;
|
||||
if (_encoder) {
|
||||
_encoder->encode(zeros, encodedZeros);
|
||||
}
|
||||
}
|
||||
|
||||
void Agent::processAgentAvatarAudio() {
|
||||
if (_isAvatar && (_isListeningToAudioStream || _avatarSound)) {
|
||||
// after sound is done playing, encoder has a bit of state in it,
|
||||
// and needs some 0s to forget or you get a little click next time
|
||||
// you play something
|
||||
if (_flushEncoder) {
|
||||
flushEncoder();
|
||||
}
|
||||
|
||||
// if we have an avatar audio stream then send it out to our audio-mixer
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
bool silentFrame = true;
|
||||
|
@ -513,6 +524,7 @@ void Agent::processAgentAvatarAudio() {
|
|||
// and our sent bytes back to zero
|
||||
_avatarSound.clear();
|
||||
_numAvatarSoundSentBytes = 0;
|
||||
_flushEncoder = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -529,14 +541,16 @@ void Agent::processAgentAvatarAudio() {
|
|||
return;
|
||||
}
|
||||
|
||||
// write the codec
|
||||
audioPacket->writeString(_selectedCodecName);
|
||||
|
||||
// write the number of silent samples so the audio-mixer can uphold timing
|
||||
audioPacket->writePrimitive(AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
audioPacket->writePrimitive(numAvailableSamples);
|
||||
|
||||
// use the orientation and position of this avatar for the source of this audio
|
||||
audioPacket->writePrimitive(scriptedAvatar->getPosition());
|
||||
glm::quat headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioPacket->writePrimitive(headOrientation);
|
||||
|
||||
} else if (nextSoundOutput) {
|
||||
|
||||
// write the codec
|
||||
|
@ -550,30 +564,28 @@ void Agent::processAgentAvatarAudio() {
|
|||
glm::quat headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioPacket->writePrimitive(headOrientation);
|
||||
|
||||
QByteArray decodedBuffer(reinterpret_cast<const char*>(nextSoundOutput), numAvailableSamples*sizeof(int16_t));
|
||||
QByteArray encodedBuffer;
|
||||
// encode it
|
||||
if(_encoder) {
|
||||
QByteArray decodedBuffer(reinterpret_cast<const char*>(nextSoundOutput), numAvailableSamples*sizeof(int16_t));
|
||||
QByteArray encodedBuffer;
|
||||
_encoder->encode(decodedBuffer, encodedBuffer);
|
||||
audioPacket->write(encodedBuffer.data(), encodedBuffer.size());
|
||||
} else {
|
||||
audioPacket->write(reinterpret_cast<const char*>(nextSoundOutput), numAvailableSamples*sizeof(int16_t));
|
||||
} else {
|
||||
encodedBuffer = decodedBuffer;
|
||||
}
|
||||
|
||||
audioPacket->write(encodedBuffer.constData(), encodedBuffer.size());
|
||||
}
|
||||
|
||||
// write audio packet to AudioMixer nodes
|
||||
// write audio packet to AudioMixer nodes
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->eachNode([this, &nodeList, &audioPacket](const SharedNodePointer& node) {
|
||||
// only send to nodes of type AudioMixer
|
||||
if (node->getType() == NodeType::AudioMixer) {
|
||||
// pack sequence number
|
||||
quint16 sequence = _outgoingScriptAudioSequenceNumbers[node->getUUID()]++;
|
||||
audioPacket->seek(0);
|
||||
audioPacket->writePrimitive(sequence);
|
||||
|
||||
// send audio packet
|
||||
nodeList->sendUnreliablePacket(*audioPacket, *node);
|
||||
// pack sequence number
|
||||
quint16 sequence = _outgoingScriptAudioSequenceNumbers[node->getUUID()]++;
|
||||
audioPacket->seek(0);
|
||||
audioPacket->writePrimitive(sequence);
|
||||
// send audio packet
|
||||
nodeList->sendUnreliablePacket(*audioPacket, *node);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -81,7 +81,8 @@ signals:
|
|||
private:
|
||||
void negotiateAudioFormat();
|
||||
void selectAudioFormat(const QString& selectedCodecName);
|
||||
|
||||
void flushEncoder();
|
||||
|
||||
std::unique_ptr<ScriptEngine> _scriptEngine;
|
||||
EntityEditPacketSender _entityEditSender;
|
||||
EntityTreeHeadlessViewer _entityViewer;
|
||||
|
@ -107,6 +108,7 @@ private:
|
|||
QString _selectedCodecName;
|
||||
Encoder* _encoder { nullptr };
|
||||
QThread _avatarAudioTimerThread;
|
||||
bool _flushEncoder { false };
|
||||
};
|
||||
|
||||
#endif // hifi_Agent_h
|
||||
|
|
4
cmake/externals/openvr/CMakeLists.txt
vendored
4
cmake/externals/openvr/CMakeLists.txt
vendored
|
@ -7,8 +7,8 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
|||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://github.com/ValveSoftware/openvr/archive/v1.0.2.zip
|
||||
URL_MD5 0d1cf5f579cf092e33f34759967b7046
|
||||
URL https://github.com/ValveSoftware/openvr/archive/v1.0.3.zip
|
||||
URL_MD5 b484b12901917cc739e40389583c8b0d
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
|
|
|
@ -17,6 +17,12 @@ macro(SETUP_HIFI_PLUGIN)
|
|||
set(PLUGIN_PATH "plugins")
|
||||
endif()
|
||||
|
||||
if (WIN32)
|
||||
# produce PDB files for plugins as well
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /Zi")
|
||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /DEBUG")
|
||||
endif()
|
||||
|
||||
if (CMAKE_SYSTEM_NAME MATCHES "Linux" OR CMAKE_GENERATOR STREQUAL "Unix Makefiles")
|
||||
set(PLUGIN_FULL_PATH "${CMAKE_BINARY_DIR}/interface/${PLUGIN_PATH}/")
|
||||
else()
|
||||
|
|
|
@ -30,7 +30,7 @@ const int PEER_SILENCE_THRESHOLD_MSECS = 5 * 1000;
|
|||
IceServer::IceServer(int argc, char* argv[]) :
|
||||
QCoreApplication(argc, argv),
|
||||
_id(QUuid::createUuid()),
|
||||
_serverSocket(),
|
||||
_serverSocket(0, false),
|
||||
_activePeers()
|
||||
{
|
||||
// start the ice-server socket
|
||||
|
|
|
@ -3,6 +3,11 @@
|
|||
"channels": [
|
||||
{ "from": "GamePad.LY", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Actions.TranslateZ" },
|
||||
{ "from": "GamePad.LX", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Actions.TranslateX" },
|
||||
|
||||
{ "from": "GamePad.LT", "to": "Standard.LTClick",
|
||||
"peek": true,
|
||||
"filters": [ { "type": "hysteresis", "min": 0.85, "max": 0.9 } ]
|
||||
},
|
||||
{ "from": "GamePad.LT", "to": "Standard.LT" },
|
||||
{ "from": "GamePad.LB", "to": "Standard.LB" },
|
||||
{ "from": "GamePad.LS", "to": "Standard.LS" },
|
||||
|
@ -31,6 +36,10 @@
|
|||
]
|
||||
},
|
||||
|
||||
{ "from": "GamePad.RT", "to": "Standard.RTClick",
|
||||
"peek": true,
|
||||
"filters": [ { "type": "hysteresis", "min": 0.85, "max": 0.9 } ]
|
||||
},
|
||||
{ "from": "GamePad.RT", "to": "Standard.RT" },
|
||||
{ "from": "GamePad.RB", "to": "Standard.RB" },
|
||||
{ "from": "GamePad.RS", "to": "Standard.RS" },
|
||||
|
|
14
interface/resources/qml/ConnectionFailureDialog.qml
Normal file
14
interface/resources/qml/ConnectionFailureDialog.qml
Normal file
|
@ -0,0 +1,14 @@
|
|||
import QtQuick.Dialogs 1.2 as OriginalDialogs
|
||||
|
||||
import "dialogs"
|
||||
|
||||
MessageDialog {
|
||||
id: root
|
||||
objectName: "ConnectionFailureDialog"
|
||||
|
||||
title: "No Connection"
|
||||
text: "Unable to connect to this domain. Click the 'GO TO' button on the toolbar to visit another domain."
|
||||
buttons: OriginalDialogs.StandardButton.Ok
|
||||
icon: OriginalDialogs.StandardIcon.Warning
|
||||
defaultButton: OriginalDialogs.StandardButton.NoButton;
|
||||
}
|
|
@ -56,6 +56,10 @@ Windows.ScrollingWindow {
|
|||
onWidthChanged: notifyResized();
|
||||
onHeightChanged: notifyResized();
|
||||
|
||||
onShownChanged: {
|
||||
keyboardEnabled = HMD.active;
|
||||
}
|
||||
|
||||
Item {
|
||||
width: pane.contentWidth
|
||||
implicitHeight: pane.scrollHeight
|
||||
|
|
|
@ -205,6 +205,12 @@ Item {
|
|||
StatText {
|
||||
text: " Count: " + root.gpuTextures;
|
||||
}
|
||||
StatText {
|
||||
text: " Rectified: " + root.rectifiedTextureCount;
|
||||
}
|
||||
StatText {
|
||||
text: " Decimated: " + root.decimatedTextureCount;
|
||||
}
|
||||
StatText {
|
||||
text: " Sparse Count: " + root.gpuTexturesSparse;
|
||||
visible: 0 != root.gpuSparseTextureEnabled;
|
||||
|
@ -215,6 +221,9 @@ Item {
|
|||
StatText {
|
||||
text: " Commited Memory: " + root.gpuTextureMemory + " MB";
|
||||
}
|
||||
StatText {
|
||||
text: " Framebuffer Memory: " + root.gpuTextureFramebufferMemory + " MB";
|
||||
}
|
||||
StatText {
|
||||
text: " Sparse Memory: " + root.gpuTextureSparseMemory + " MB";
|
||||
visible: 0 != root.gpuSparseTextureEnabled;
|
||||
|
@ -225,6 +234,12 @@ Item {
|
|||
StatText {
|
||||
text: " Count: " + root.gpuTextures;
|
||||
}
|
||||
StatText {
|
||||
text: " Memory: " + root.gpuBufferMemory;
|
||||
}
|
||||
StatText {
|
||||
text: "GL Swapchain Memory: " + root.glContextSwapchainMemory + " MB";
|
||||
}
|
||||
StatText {
|
||||
text: "QML Texture Memory: " + root.qmlTextureMemory + " MB";
|
||||
}
|
||||
|
|
|
@ -13,6 +13,9 @@ struct OverlayData {
|
|||
vec4 glowPoints;
|
||||
vec4 glowColors[2];
|
||||
vec4 resolutionRadiusAlpha;
|
||||
|
||||
vec4 extraGlowColor;
|
||||
vec2 extraGlowPoint;
|
||||
};
|
||||
|
||||
layout(std140) uniform overlayBuffer {
|
||||
|
@ -25,6 +28,9 @@ float alpha = overlay.resolutionRadiusAlpha.w;
|
|||
vec4 glowPoints = overlay.glowPoints;
|
||||
vec4 glowColors[2] = overlay.glowColors;
|
||||
|
||||
vec2 extraGlowPoint = overlay.extraGlowPoint;
|
||||
vec4 extraGlowColor = overlay.extraGlowColor;
|
||||
|
||||
in vec3 vPosition;
|
||||
in vec2 vTexCoord;
|
||||
|
||||
|
@ -48,11 +54,16 @@ void main() {
|
|||
float glowIntensity = 0.0;
|
||||
float dist1 = distance(vTexCoord * aspect, glowPoints.xy * aspect);
|
||||
float dist2 = distance(vTexCoord * aspect, glowPoints.zw * aspect);
|
||||
float dist = min(dist1, dist2);
|
||||
float dist3 = distance(vTexCoord * aspect, extraGlowPoint * aspect);
|
||||
float distX = min(dist1, dist2);
|
||||
float dist = min(distX, dist3);
|
||||
vec3 glowColor = glowColors[0].rgb;
|
||||
if (dist2 < dist1) {
|
||||
glowColor = glowColors[1].rgb;
|
||||
}
|
||||
if (dist3 < dist2) {
|
||||
glowColor = extraGlowColor.rgb;
|
||||
}
|
||||
|
||||
if (dist <= radius) {
|
||||
glowIntensity = 1.0 - (dist / radius);
|
||||
|
|
|
@ -11,6 +11,9 @@ struct OverlayData {
|
|||
vec4 glowPoints;
|
||||
vec4 glowColors[2];
|
||||
vec4 resolutionRadiusAlpha;
|
||||
|
||||
vec4 extraGlowColor;
|
||||
vec2 extraGlowPoint;
|
||||
};
|
||||
|
||||
layout(std140) uniform overlayBuffer {
|
||||
|
|
|
@ -858,7 +858,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
{ "gl_version_int", glVersionToInteger(glContextData.value("version").toString()) },
|
||||
{ "gl_version", glContextData["version"] },
|
||||
{ "gl_vender", glContextData["vendor"] },
|
||||
{ "gl_sl_version", glContextData["slVersion"] },
|
||||
{ "gl_sl_version", glContextData["sl_version"] },
|
||||
{ "gl_renderer", glContextData["renderer"] },
|
||||
{ "ideal_thread_count", QThread::idealThreadCount() }
|
||||
};
|
||||
|
@ -882,8 +882,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
|
||||
UserActivityLogger::getInstance().logAction("launch", properties);
|
||||
|
||||
_connectionMonitor.init();
|
||||
|
||||
// Tell our entity edit sender about our known jurisdictions
|
||||
_entityEditSender.setServerJurisdictions(&_entityServerJurisdictions);
|
||||
_entityEditSender.setMyAvatar(myAvatar.get());
|
||||
|
@ -1175,10 +1173,19 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
properties["process_memory_used"] = static_cast<qint64>(memInfo.processUsedMemoryBytes);
|
||||
}
|
||||
|
||||
// content location and build info - useful for filtering stats
|
||||
auto addressManager = DependencyManager::get<AddressManager>();
|
||||
auto currentDomain = addressManager->currentShareableAddress(true).toString(); // domain only
|
||||
auto currentPath = addressManager->currentPath(true); // with orientation
|
||||
properties["current_domain"] = currentDomain;
|
||||
properties["current_path"] = currentPath;
|
||||
properties["build_version"] = BuildInfo::VERSION;
|
||||
|
||||
auto displayPlugin = qApp->getActiveDisplayPlugin();
|
||||
|
||||
properties["fps"] = _frameCounter.rate();
|
||||
properties["target_frame_rate"] = getTargetFrameRate();
|
||||
properties["render_rate"] = displayPlugin->renderRate();
|
||||
properties["present_rate"] = displayPlugin->presentRate();
|
||||
properties["new_frame_present_rate"] = displayPlugin->newFramePresentRate();
|
||||
properties["dropped_frame_rate"] = displayPlugin->droppedFrameRate();
|
||||
|
@ -1224,6 +1231,10 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
properties["active_display_plugin"] = getActiveDisplayPlugin()->getName();
|
||||
properties["using_hmd"] = isHMDMode();
|
||||
|
||||
auto glInfo = getGLContextData();
|
||||
properties["gl_info"] = glInfo;
|
||||
properties["gpu_free_memory"] = (int)BYTES_TO_MB(gpu::Context::getFreeGPUMemory());
|
||||
|
||||
auto hmdHeadPose = getHMDSensorPose();
|
||||
properties["hmd_head_pose_changed"] = isHMDMode() && (hmdHeadPose != lastHMDHeadPose);
|
||||
lastHMDHeadPose = hmdHeadPose;
|
||||
|
@ -1376,6 +1387,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
}
|
||||
}
|
||||
|
||||
_connectionMonitor.init();
|
||||
|
||||
// After all of the constructor is completed, then set firstRun to false.
|
||||
firstRun.set(false);
|
||||
}
|
||||
|
@ -1478,6 +1491,7 @@ void Application::updateHeartbeat() const {
|
|||
|
||||
void Application::aboutToQuit() {
|
||||
emit beforeAboutToQuit();
|
||||
DependencyManager::get<AudioClient>()->beforeAboutToQuit();
|
||||
|
||||
foreach(auto inputPlugin, PluginManager::getInstance()->getInputPlugins()) {
|
||||
if (inputPlugin->isActive()) {
|
||||
|
@ -1556,17 +1570,6 @@ void Application::cleanupBeforeQuit() {
|
|||
saveSettings();
|
||||
_window->saveGeometry();
|
||||
|
||||
// stop the AudioClient
|
||||
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(),
|
||||
"stop", Qt::BlockingQueuedConnection);
|
||||
|
||||
// destroy the AudioClient so it and its thread have a chance to go down safely
|
||||
DependencyManager::destroy<AudioClient>();
|
||||
|
||||
// destroy the AudioInjectorManager so it and its thread have a chance to go down safely
|
||||
// this will also stop any ongoing network injectors
|
||||
DependencyManager::destroy<AudioInjectorManager>();
|
||||
|
||||
// Destroy third party processes after scripts have finished using them.
|
||||
#ifdef HAVE_DDE
|
||||
DependencyManager::destroy<DdeFaceTracker>();
|
||||
|
@ -1575,10 +1578,29 @@ void Application::cleanupBeforeQuit() {
|
|||
DependencyManager::destroy<EyeTracker>();
|
||||
#endif
|
||||
|
||||
// stop QML
|
||||
DependencyManager::destroy<OffscreenUi>();
|
||||
|
||||
// stop audio after QML, as there are unexplained audio crashes originating in qtwebengine
|
||||
|
||||
// stop the AudioClient, synchronously
|
||||
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(),
|
||||
"stop", Qt::BlockingQueuedConnection);
|
||||
|
||||
// destroy Audio so it and its threads have a chance to go down safely
|
||||
DependencyManager::destroy<AudioClient>();
|
||||
DependencyManager::destroy<AudioInjectorManager>();
|
||||
|
||||
// shutdown render engine
|
||||
_main3DScene = nullptr;
|
||||
_renderEngine = nullptr;
|
||||
|
||||
qCDebug(interfaceapp) << "Application::cleanupBeforeQuit() complete";
|
||||
}
|
||||
|
||||
Application::~Application() {
|
||||
DependencyManager::destroy<Preferences>();
|
||||
|
||||
_entityClipboard->eraseAllOctreeElements();
|
||||
_entityClipboard.reset();
|
||||
|
||||
|
@ -1596,7 +1618,6 @@ Application::~Application() {
|
|||
DependencyManager::get<AvatarManager>()->getObjectsToRemoveFromPhysics(motionStates);
|
||||
_physicsEngine->removeObjects(motionStates);
|
||||
|
||||
DependencyManager::destroy<OffscreenUi>();
|
||||
DependencyManager::destroy<AvatarManager>();
|
||||
DependencyManager::destroy<AnimationCache>();
|
||||
DependencyManager::destroy<FramebufferCache>();
|
||||
|
@ -2165,13 +2186,10 @@ void Application::resizeGL() {
|
|||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
auto uiSize = displayPlugin->getRecommendedUiSize();
|
||||
// Bit of a hack since there's no device pixel ratio change event I can find.
|
||||
static qreal lastDevicePixelRatio = 0;
|
||||
qreal devicePixelRatio = _window->devicePixelRatio();
|
||||
if (offscreenUi->size() != fromGlm(uiSize) || devicePixelRatio != lastDevicePixelRatio) {
|
||||
if (offscreenUi->size() != fromGlm(uiSize)) {
|
||||
qCDebug(interfaceapp) << "Device pixel ratio changed, triggering resize to " << uiSize;
|
||||
offscreenUi->resize(fromGlm(uiSize), true);
|
||||
_offscreenContext->makeCurrent();
|
||||
lastDevicePixelRatio = devicePixelRatio;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3972,8 +3990,6 @@ void Application::update(float deltaTime) {
|
|||
auto collisionEvents = _physicsEngine->getCollisionEvents();
|
||||
avatarManager->handleCollisionEvents(collisionEvents);
|
||||
|
||||
_physicsEngine->dumpStatsIfNecessary();
|
||||
|
||||
if (!_aboutToQuit) {
|
||||
PerformanceTimer perfTimer("entities");
|
||||
// Collision events (and their scripts) must not be handled when we're locked, above. (That would risk
|
||||
|
@ -3986,6 +4002,13 @@ void Application::update(float deltaTime) {
|
|||
}
|
||||
|
||||
myAvatar->harvestResultsFromPhysicsSimulation(deltaTime);
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::DisplayDebugTimingDetails) &&
|
||||
Menu::getInstance()->isOptionChecked(MenuOption::ExpandPhysicsSimulationTiming)) {
|
||||
_physicsEngine->harvestPerformanceStats();
|
||||
}
|
||||
// NOTE: the PhysicsEngine stats are written to stdout NOT to Qt log framework
|
||||
_physicsEngine->dumpStatsIfNecessary();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,34 +13,42 @@
|
|||
|
||||
#include "ui/DialogsManager.h"
|
||||
|
||||
#include <NodeList.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <DomainHandler.h>
|
||||
#include <AddressManager.h>
|
||||
#include <NodeList.h>
|
||||
|
||||
// Because the connection monitor is created at startup, the time we wait on initial load
|
||||
// should be longer to allow the application to initialize.
|
||||
static const int ON_INITIAL_LOAD_DISPLAY_AFTER_DISCONNECTED_FOR_X_MS = 10000;
|
||||
static const int DISPLAY_AFTER_DISCONNECTED_FOR_X_MS = 5000;
|
||||
|
||||
void ConnectionMonitor::init() {
|
||||
// Connect to domain disconnected message
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
const DomainHandler& domainHandler = nodeList->getDomainHandler();
|
||||
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, this, &ConnectionMonitor::disconnectedFromDomain);
|
||||
connect(&domainHandler, &DomainHandler::connectedToDomain, this, &ConnectionMonitor::connectedToDomain);
|
||||
connect(&domainHandler, &DomainHandler::resetting, this, &ConnectionMonitor::startTimer);
|
||||
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, this, &ConnectionMonitor::startTimer);
|
||||
connect(&domainHandler, &DomainHandler::connectedToDomain, this, &ConnectionMonitor::stopTimer);
|
||||
connect(&domainHandler, &DomainHandler::domainConnectionRefused, this, &ConnectionMonitor::stopTimer);
|
||||
|
||||
_timer.setSingleShot(true);
|
||||
_timer.setInterval(DISPLAY_AFTER_DISCONNECTED_FOR_X_MS);
|
||||
if (!domainHandler.isConnected()) {
|
||||
_timer.start();
|
||||
_timer.start(ON_INITIAL_LOAD_DISPLAY_AFTER_DISCONNECTED_FOR_X_MS);
|
||||
}
|
||||
|
||||
auto dialogsManager = DependencyManager::get<DialogsManager>();
|
||||
connect(&_timer, &QTimer::timeout, dialogsManager.data(), &DialogsManager::indicateDomainConnectionFailure);
|
||||
connect(&_timer, &QTimer::timeout, this, []() {
|
||||
qDebug() << "ConnectionMonitor: Showing connection failure window";
|
||||
DependencyManager::get<DialogsManager>()->setDomainConnectionFailureVisibility(true);
|
||||
});
|
||||
}
|
||||
|
||||
void ConnectionMonitor::disconnectedFromDomain() {
|
||||
_timer.start();
|
||||
void ConnectionMonitor::startTimer() {
|
||||
qDebug() << "ConnectionMonitor: Starting timer";
|
||||
_timer.start(DISPLAY_AFTER_DISCONNECTED_FOR_X_MS);
|
||||
}
|
||||
|
||||
void ConnectionMonitor::connectedToDomain(const QString& name) {
|
||||
void ConnectionMonitor::stopTimer() {
|
||||
qDebug() << "ConnectionMonitor: Stopping timer";
|
||||
_timer.stop();
|
||||
DependencyManager::get<DialogsManager>()->setDomainConnectionFailureVisibility(false);
|
||||
}
|
||||
|
|
|
@ -23,8 +23,8 @@ public:
|
|||
void init();
|
||||
|
||||
private slots:
|
||||
void disconnectedFromDomain();
|
||||
void connectedToDomain(const QString& name);
|
||||
void startTimer();
|
||||
void stopTimer();
|
||||
|
||||
private:
|
||||
QTimer _timer;
|
||||
|
|
|
@ -338,6 +338,9 @@ Menu::Menu() {
|
|||
// Developer > Render > Throttle FPS If Not Focus
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::ThrottleFPSIfNotFocus, 0, true);
|
||||
|
||||
// Developer > Render > OpenVR threaded submit
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::OpenVrThreadedSubmit, 0, true);
|
||||
|
||||
// Developer > Render > Resolution
|
||||
MenuWrapper* resolutionMenu = renderOptionsMenu->addMenu(MenuOption::RenderResolution);
|
||||
QActionGroup* resolutionGroup = new QActionGroup(resolutionMenu);
|
||||
|
@ -605,6 +608,7 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::ExpandMyAvatarSimulateTiming, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::ExpandOtherAvatarTiming, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::ExpandPaintGLTiming, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::ExpandPhysicsSimulationTiming, 0, false);
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(timingMenu, MenuOption::FrameTimer);
|
||||
addActionToQMenuAndActionHash(timingMenu, MenuOption::RunTimingTests, 0, qApp, SLOT(runTests()));
|
||||
|
|
|
@ -104,6 +104,7 @@ namespace MenuOption {
|
|||
const QString ExpandMyAvatarTiming = "Expand /myAvatar";
|
||||
const QString ExpandOtherAvatarTiming = "Expand /otherAvatar";
|
||||
const QString ExpandPaintGLTiming = "Expand /paintGL";
|
||||
const QString ExpandPhysicsSimulationTiming = "Expand /physics";
|
||||
const QString ExpandUpdateTiming = "Expand /update";
|
||||
const QString Faceshift = "Faceshift";
|
||||
const QString FirstPerson = "First Person";
|
||||
|
@ -135,6 +136,7 @@ namespace MenuOption {
|
|||
const QString OctreeStats = "Entity Statistics";
|
||||
const QString OnePointCalibration = "1 Point Calibration";
|
||||
const QString OnlyDisplayTopTen = "Only Display Top Ten";
|
||||
const QString OpenVrThreadedSubmit = "OpenVR Threaded Submit";
|
||||
const QString OutputMenu = "Display";
|
||||
const QString Overlays = "Overlays";
|
||||
const QString PackageModel = "Package Model...";
|
||||
|
|
|
@ -36,6 +36,7 @@
|
|||
#include "Application.h"
|
||||
#include "Avatar.h"
|
||||
#include "AvatarManager.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "Menu.h"
|
||||
#include "MyAvatar.h"
|
||||
#include "SceneScriptingInterface.h"
|
||||
|
@ -208,11 +209,15 @@ AvatarSharedPointer AvatarManager::addAvatar(const QUuid& sessionUUID, const QWe
|
|||
auto rawRenderableAvatar = std::static_pointer_cast<Avatar>(newAvatar);
|
||||
|
||||
render::ScenePointer scene = qApp->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
|
||||
rawRenderableAvatar->addToScene(rawRenderableAvatar, scene, pendingChanges);
|
||||
if (scene) {
|
||||
render::PendingChanges pendingChanges;
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
|
||||
rawRenderableAvatar->addToScene(rawRenderableAvatar, scene, pendingChanges);
|
||||
}
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
} else {
|
||||
qCWarning(interfaceapp) << "AvatarManager::addAvatar() : Unexpected null scene, possibly during application shutdown";
|
||||
}
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
|
||||
return newAvatar;
|
||||
}
|
||||
|
|
|
@ -161,6 +161,9 @@ int main(int argc, const char* argv[]) {
|
|||
QSettings::setDefaultFormat(QSettings::IniFormat);
|
||||
Application app(argc, const_cast<char**>(argv), startupTime, runServer, serverContentPathOptionValue);
|
||||
|
||||
bool launchedFromSteam = SteamClient::isRunning();
|
||||
app.setProperty("com.highfidelity.launchedFromSteam", launchedFromSteam);
|
||||
|
||||
// If we failed the OpenGLVersion check, log it.
|
||||
if (override) {
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
|
|
|
@ -144,6 +144,27 @@ bool HMDScriptingInterface::setHandLasers(int hands, bool enabled, const glm::ve
|
|||
color, direction);
|
||||
}
|
||||
|
||||
bool HMDScriptingInterface::setExtraLaser(const glm::vec3& worldStart, bool enabled, const glm::vec4& color, const glm::vec3& direction) const {
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
offscreenUi->executeOnUiThread([offscreenUi, enabled] {
|
||||
offscreenUi->getDesktop()->setProperty("hmdHandMouseActive", enabled);
|
||||
});
|
||||
|
||||
|
||||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
auto sensorToWorld = myAvatar->getSensorToWorldMatrix();
|
||||
auto worldToSensor = glm::inverse(sensorToWorld);
|
||||
auto sensorStart = ::transformPoint(worldToSensor, worldStart);
|
||||
auto sensorDirection = ::transformVectorFast(worldToSensor, direction);
|
||||
|
||||
return qApp->getActiveDisplayPlugin()->setExtraLaser(enabled ? DisplayPlugin::HandLaserMode::Overlay : DisplayPlugin::HandLaserMode::None,
|
||||
color, sensorStart, sensorDirection);
|
||||
}
|
||||
|
||||
void HMDScriptingInterface::disableExtraLaser() const {
|
||||
setExtraLaser(vec3(0), false, vec4(0), vec3(0));
|
||||
}
|
||||
|
||||
void HMDScriptingInterface::disableHandLasers(int hands) const {
|
||||
setHandLasers(hands, false, vec4(0), vec3(0));
|
||||
}
|
||||
|
|
|
@ -46,8 +46,12 @@ public:
|
|||
Q_INVOKABLE bool shouldShowHandControllers() const;
|
||||
|
||||
Q_INVOKABLE bool setHandLasers(int hands, bool enabled, const glm::vec4& color, const glm::vec3& direction) const;
|
||||
|
||||
Q_INVOKABLE void disableHandLasers(int hands) const;
|
||||
|
||||
Q_INVOKABLE bool setExtraLaser(const glm::vec3& worldStart, bool enabled, const glm::vec4& color, const glm::vec3& direction) const;
|
||||
Q_INVOKABLE void disableExtraLaser() const;
|
||||
|
||||
|
||||
/// Suppress the activation of any on-screen keyboard so that a script operation will
|
||||
/// not be interrupted by a keyboard popup
|
||||
/// Returns false if there is already an active keyboard displayed.
|
||||
|
|
3
interface/src/ui/ConnectionFailureDialog.cpp
Normal file
3
interface/src/ui/ConnectionFailureDialog.cpp
Normal file
|
@ -0,0 +1,3 @@
|
|||
#include "ConnectionFailureDialog.h"
|
||||
|
||||
HIFI_QML_DEF(ConnectionFailureDialog)
|
8
interface/src/ui/ConnectionFailureDialog.h
Normal file
8
interface/src/ui/ConnectionFailureDialog.h
Normal file
|
@ -0,0 +1,8 @@
|
|||
#pragma once
|
||||
|
||||
#include <OffscreenQmlDialog.h>
|
||||
|
||||
class ConnectionFailureDialog : public OffscreenQmlDialog {
|
||||
Q_OBJECT
|
||||
HIFI_QML_DECL
|
||||
};
|
|
@ -21,6 +21,7 @@
|
|||
#include "AddressBarDialog.h"
|
||||
#include "BandwidthDialog.h"
|
||||
#include "CachesSizeDialog.h"
|
||||
#include "ConnectionFailureDialog.h"
|
||||
#include "DiskCacheEditor.h"
|
||||
#include "DomainConnectionDialog.h"
|
||||
#include "HMDToolsDialog.h"
|
||||
|
@ -59,8 +60,12 @@ void DialogsManager::showFeed() {
|
|||
emit setUseFeed(true);
|
||||
}
|
||||
|
||||
void DialogsManager::indicateDomainConnectionFailure() {
|
||||
OffscreenUi::information("No Connection", "Unable to connect to this domain. Click the 'GO TO' button on the toolbar to visit another domain.");
|
||||
void DialogsManager::setDomainConnectionFailureVisibility(bool visible) {
|
||||
if (visible) {
|
||||
ConnectionFailureDialog::show();
|
||||
} else {
|
||||
ConnectionFailureDialog::hide();
|
||||
}
|
||||
}
|
||||
|
||||
void DialogsManager::toggleDiskCacheEditor() {
|
||||
|
|
|
@ -44,7 +44,7 @@ public slots:
|
|||
void toggleAddressBar();
|
||||
void showAddressBar();
|
||||
void showFeed();
|
||||
void indicateDomainConnectionFailure();
|
||||
void setDomainConnectionFailureVisibility(bool visible);
|
||||
void toggleDiskCacheEditor();
|
||||
void toggleLoginDialog();
|
||||
void showLoginDialog();
|
||||
|
|
|
@ -25,6 +25,8 @@
|
|||
#include <PerfStat.h>
|
||||
#include <plugins/DisplayPlugin.h>
|
||||
|
||||
#include <gl/Context.h>
|
||||
|
||||
#include "BandwidthRecorder.h"
|
||||
#include "Menu.h"
|
||||
#include "Util.h"
|
||||
|
@ -55,7 +57,9 @@ Stats::Stats(QQuickItem* parent) : QQuickItem(parent) {
|
|||
bool Stats::includeTimingRecord(const QString& name) {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::DisplayDebugTimingDetails)) {
|
||||
if (name.startsWith("/idle/update/")) {
|
||||
if (name.startsWith("/idle/update/myAvatar/")) {
|
||||
if (name.startsWith("/idle/update/physics/")) {
|
||||
return Menu::getInstance()->isOptionChecked(MenuOption::ExpandPhysicsSimulationTiming);
|
||||
} else if (name.startsWith("/idle/update/myAvatar/")) {
|
||||
if (name.startsWith("/idle/update/myAvatar/simulate/")) {
|
||||
return Menu::getInstance()->isOptionChecked(MenuOption::ExpandMyAvatarSimulateTiming);
|
||||
}
|
||||
|
@ -92,6 +96,8 @@ bool Stats::includeTimingRecord(const QString& name) {
|
|||
} \
|
||||
}
|
||||
|
||||
extern std::atomic<size_t> DECIMATED_TEXTURE_COUNT;
|
||||
extern std::atomic<size_t> RECTIFIED_TEXTURE_COUNT;
|
||||
|
||||
void Stats::updateStats(bool force) {
|
||||
if (!force) {
|
||||
|
@ -285,14 +291,21 @@ void Stats::updateStats(bool force) {
|
|||
}
|
||||
|
||||
STAT_UPDATE(gpuBuffers, (int)gpu::Context::getBufferGPUCount());
|
||||
STAT_UPDATE(gpuBufferMemory, (int)BYTES_TO_MB(gpu::Context::getBufferGPUMemoryUsage()));
|
||||
STAT_UPDATE(gpuTextures, (int)gpu::Context::getTextureGPUCount());
|
||||
STAT_UPDATE(gpuTexturesSparse, (int)gpu::Context::getTextureGPUSparseCount());
|
||||
|
||||
STAT_UPDATE(glContextSwapchainMemory, (int)BYTES_TO_MB(gl::Context::getSwapchainMemoryUsage()));
|
||||
|
||||
STAT_UPDATE(qmlTextureMemory, (int)BYTES_TO_MB(OffscreenQmlSurface::getUsedTextureMemory()));
|
||||
STAT_UPDATE(gpuTextureMemory, (int)BYTES_TO_MB(gpu::Texture::getTextureGPUMemoryUsage()));
|
||||
STAT_UPDATE(gpuTextureVirtualMemory, (int)BYTES_TO_MB(gpu::Texture::getTextureGPUVirtualMemoryUsage()));
|
||||
STAT_UPDATE(gpuTextureFramebufferMemory, (int)BYTES_TO_MB(gpu::Texture::getTextureGPUFramebufferMemoryUsage()));
|
||||
STAT_UPDATE(gpuTextureSparseMemory, (int)BYTES_TO_MB(gpu::Texture::getTextureGPUSparseMemoryUsage()));
|
||||
STAT_UPDATE(gpuSparseTextureEnabled, gpu::Texture::getEnableSparseTextures() ? 1 : 0);
|
||||
STAT_UPDATE(gpuFreeMemory, (int)BYTES_TO_MB(gpu::Context::getFreeGPUMemory()));
|
||||
STAT_UPDATE(rectifiedTextureCount, (int)RECTIFIED_TEXTURE_COUNT.load());
|
||||
STAT_UPDATE(decimatedTextureCount, (int)DECIMATED_TEXTURE_COUNT.load());
|
||||
|
||||
// Incoming packets
|
||||
QLocale locale(QLocale::English);
|
||||
|
@ -369,7 +382,7 @@ void Stats::updateStats(bool force) {
|
|||
QString functionName = j.value();
|
||||
const PerformanceTimerRecord& record = allRecords.value(functionName);
|
||||
perfLines += QString("%1: %2 [%3]\n").
|
||||
arg(QString(qPrintable(functionName)), 90, noBreakingSpace).
|
||||
arg(QString(qPrintable(functionName)), -80, noBreakingSpace).
|
||||
arg((float)record.getMovingAverage() / (float)USECS_PER_MSEC, 8, 'f', 3, noBreakingSpace).
|
||||
arg((int)record.getCount(), 6, 10, noBreakingSpace);
|
||||
linesDisplayed++;
|
||||
|
|
|
@ -87,12 +87,17 @@ class Stats : public QQuickItem {
|
|||
STATS_PROPERTY(int, localElements, 0)
|
||||
STATS_PROPERTY(int, localInternal, 0)
|
||||
STATS_PROPERTY(int, localLeaves, 0)
|
||||
STATS_PROPERTY(int, rectifiedTextureCount, 0)
|
||||
STATS_PROPERTY(int, decimatedTextureCount, 0)
|
||||
STATS_PROPERTY(int, gpuBuffers, 0)
|
||||
STATS_PROPERTY(int, gpuBufferMemory, 0)
|
||||
STATS_PROPERTY(int, gpuTextures, 0)
|
||||
STATS_PROPERTY(int, gpuTexturesSparse, 0)
|
||||
STATS_PROPERTY(int, glContextSwapchainMemory, 0)
|
||||
STATS_PROPERTY(int, qmlTextureMemory, 0)
|
||||
STATS_PROPERTY(int, gpuTextureMemory, 0)
|
||||
STATS_PROPERTY(int, gpuTextureVirtualMemory, 0)
|
||||
STATS_PROPERTY(int, gpuTextureFramebufferMemory, 0)
|
||||
STATS_PROPERTY(int, gpuTextureSparseMemory, 0)
|
||||
STATS_PROPERTY(int, gpuSparseTextureEnabled, 0)
|
||||
STATS_PROPERTY(int, gpuFreeMemory, 0)
|
||||
|
@ -181,15 +186,20 @@ signals:
|
|||
void localInternalChanged();
|
||||
void localLeavesChanged();
|
||||
void timingStatsChanged();
|
||||
void glContextSwapchainMemoryChanged();
|
||||
void qmlTextureMemoryChanged();
|
||||
void gpuBuffersChanged();
|
||||
void gpuBufferMemoryChanged();
|
||||
void gpuTexturesChanged();
|
||||
void gpuTexturesSparseChanged();
|
||||
void gpuTextureMemoryChanged();
|
||||
void gpuTextureVirtualMemoryChanged();
|
||||
void gpuTextureFramebufferMemoryChanged();
|
||||
void gpuTextureSparseMemoryChanged();
|
||||
void gpuSparseTextureEnabledChanged();
|
||||
void gpuFreeMemoryChanged();
|
||||
void rectifiedTextureCountChanged();
|
||||
void decimatedTextureCountChanged();
|
||||
|
||||
private:
|
||||
int _recentMaxPackets{ 0 } ; // recent max incoming voxel packets to process
|
||||
|
|
|
@ -82,10 +82,10 @@ public:
|
|||
|
||||
CheckDevicesThread(AudioClient* audioClient)
|
||||
: _audioClient(audioClient) {
|
||||
}
|
||||
|
||||
connect(qApp, &QCoreApplication::aboutToQuit, [this] {
|
||||
_quit = true;
|
||||
});
|
||||
void beforeAboutToQuit() {
|
||||
_quit = true;
|
||||
}
|
||||
|
||||
void run() override {
|
||||
|
@ -159,10 +159,10 @@ AudioClient::AudioClient() :
|
|||
_outputDevices = getDeviceNames(QAudio::AudioOutput);
|
||||
|
||||
// start a thread to detect any device changes
|
||||
QThread* checkDevicesThread = new CheckDevicesThread(this);
|
||||
checkDevicesThread->setObjectName("CheckDevices Thread");
|
||||
checkDevicesThread->setPriority(QThread::LowPriority);
|
||||
checkDevicesThread->start();
|
||||
_checkDevicesThread = new CheckDevicesThread(this);
|
||||
_checkDevicesThread->setObjectName("CheckDevices Thread");
|
||||
_checkDevicesThread->setPriority(QThread::LowPriority);
|
||||
_checkDevicesThread->start();
|
||||
|
||||
configureReverb();
|
||||
|
||||
|
@ -177,6 +177,7 @@ AudioClient::AudioClient() :
|
|||
}
|
||||
|
||||
AudioClient::~AudioClient() {
|
||||
delete _checkDevicesThread;
|
||||
stop();
|
||||
if (_codec && _encoder) {
|
||||
_codec->releaseEncoder(_encoder);
|
||||
|
@ -184,6 +185,11 @@ AudioClient::~AudioClient() {
|
|||
}
|
||||
}
|
||||
|
||||
void AudioClient::beforeAboutToQuit() {
|
||||
static_cast<CheckDevicesThread*>(_checkDevicesThread)->beforeAboutToQuit();
|
||||
}
|
||||
|
||||
|
||||
void AudioClient::handleMismatchAudioFormat(SharedNodePointer node, const QString& currentCodec, const QString& recievedCodec) {
|
||||
qCDebug(audioclient) << __FUNCTION__ << "sendingNode:" << *node << "currentCodec:" << currentCodec << "recievedCodec:" << recievedCodec;
|
||||
selectAudioFormat(recievedCodec);
|
||||
|
|
|
@ -155,6 +155,8 @@ public slots:
|
|||
void audioMixerKilled();
|
||||
void toggleMute();
|
||||
|
||||
void beforeAboutToQuit();
|
||||
|
||||
virtual void setIsStereoInput(bool stereo) override;
|
||||
|
||||
void toggleAudioNoiseReduction() { _isNoiseGateEnabled = !_isNoiseGateEnabled; }
|
||||
|
@ -332,6 +334,8 @@ private:
|
|||
CodecPluginPointer _codec;
|
||||
QString _selectedCodecName;
|
||||
Encoder* _encoder { nullptr }; // for outbound mic stream
|
||||
|
||||
QThread* _checkDevicesThread { nullptr };
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -111,6 +111,8 @@ public:
|
|||
void setDisplayPlugin(const DisplayPluginPointer& displayPlugin) { _currentDisplayPlugin = displayPlugin; }
|
||||
void setFrameInfo(uint32_t frame, const glm::mat4& camera) { _currentCamera = camera; }
|
||||
|
||||
float getHmdUiRadius() const { return _hmdUIRadius; }
|
||||
|
||||
signals:
|
||||
void allowMouseCaptureChanged();
|
||||
void alphaChanged();
|
||||
|
|
|
@ -116,6 +116,7 @@ void HmdDisplayPlugin::customizeContext() {
|
|||
for (size_t i = 0; i < _geometryIds.size(); ++i) {
|
||||
_geometryIds[i] = geometryCache->allocateID();
|
||||
}
|
||||
_extraLaserID = geometryCache->allocateID();
|
||||
}
|
||||
|
||||
void HmdDisplayPlugin::uncustomizeContext() {
|
||||
|
@ -135,6 +136,7 @@ void HmdDisplayPlugin::uncustomizeContext() {
|
|||
for (size_t i = 0; i < _geometryIds.size(); ++i) {
|
||||
geometryCache->releaseID(_geometryIds[i]);
|
||||
}
|
||||
geometryCache->releaseID(_extraLaserID);
|
||||
Parent::uncustomizeContext();
|
||||
}
|
||||
|
||||
|
@ -359,11 +361,18 @@ void HmdDisplayPlugin::updateFrameData() {
|
|||
_presentHandLasers = _handLasers;
|
||||
_presentHandPoses = _handPoses;
|
||||
_presentUiModelTransform = _uiModelTransform;
|
||||
|
||||
_presentExtraLaser = _extraLaser;
|
||||
_presentExtraLaserStart = _extraLaserStart;
|
||||
});
|
||||
|
||||
auto compositorHelper = DependencyManager::get<CompositorHelper>();
|
||||
glm::mat4 modelMat = compositorHelper->getModelTransform().getMatrix();
|
||||
std::array<vec2, NUMBER_OF_HANDS> handGlowPoints{ { vec2(-1), vec2(-1) } };
|
||||
static const float OUT_OF_BOUNDS = -1;
|
||||
std::array<vec2, NUMBER_OF_HANDS> handGlowPoints { { vec2(OUT_OF_BOUNDS), vec2(OUT_OF_BOUNDS) } };
|
||||
vec2 extraGlowPoint(OUT_OF_BOUNDS);
|
||||
|
||||
float uiRadius = compositorHelper->getHmdUiRadius();
|
||||
|
||||
// compute the glow point interesections
|
||||
for (size_t i = 0; i < NUMBER_OF_HANDS; ++i) {
|
||||
|
@ -390,9 +399,6 @@ void HmdDisplayPlugin::updateFrameData() {
|
|||
}
|
||||
castStart += glm::quat_cast(model) * grabPointOffset;
|
||||
|
||||
// FIXME fetch the actual UI radius from... somewhere?
|
||||
float uiRadius = 1.0f;
|
||||
|
||||
// Find the intersection of the laser with he UI and use it to scale the model matrix
|
||||
float distance;
|
||||
if (!glm::intersectRaySphere(castStart, castDirection,
|
||||
|
@ -425,6 +431,42 @@ void HmdDisplayPlugin::updateFrameData() {
|
|||
handGlowPoints[i] = yawPitch;
|
||||
}
|
||||
|
||||
// compute the glow point interesections
|
||||
if (_presentExtraLaser.valid()) {
|
||||
const vec3& laserDirection = _presentExtraLaser.direction;
|
||||
vec3 castStart = _presentExtraLaserStart;
|
||||
vec3 castDirection = laserDirection;
|
||||
|
||||
// Find the intersection of the laser with he UI and use it to scale the model matrix
|
||||
float distance;
|
||||
if (glm::intersectRaySphere(castStart, castDirection,
|
||||
_presentUiModelTransform.getTranslation(), uiRadius * uiRadius, distance)) {
|
||||
|
||||
|
||||
_presentExtraLaserPoints.first = castStart;
|
||||
_presentExtraLaserPoints.second = _presentExtraLaserPoints.first + (castDirection * distance);
|
||||
|
||||
vec3 intersectionPosition = castStart + (castDirection * distance) - _presentUiModelTransform.getTranslation();
|
||||
intersectionPosition = glm::inverse(_presentUiModelTransform.getRotation()) * intersectionPosition;
|
||||
|
||||
// Take the interesection normal and convert it to a texture coordinate
|
||||
vec2 yawPitch;
|
||||
{
|
||||
vec2 xdir = glm::normalize(vec2(intersectionPosition.x, -intersectionPosition.z));
|
||||
yawPitch.x = glm::atan(xdir.x, xdir.y);
|
||||
yawPitch.y = (acosf(intersectionPosition.y) * -1.0f) + (float)M_PI_2;
|
||||
}
|
||||
vec2 halfFov = CompositorHelper::VIRTUAL_UI_TARGET_FOV / 2.0f;
|
||||
|
||||
// Are we out of range
|
||||
if (!glm::any(glm::greaterThan(glm::abs(yawPitch), halfFov))) {
|
||||
yawPitch /= CompositorHelper::VIRTUAL_UI_TARGET_FOV;
|
||||
yawPitch += 0.5f;
|
||||
extraGlowPoint = yawPitch;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
for_each_eye([&](Eye eye) {
|
||||
auto modelView = glm::inverse(_currentPresentFrameInfo.presentPose * getEyeToHeadTransform(eye)) * modelMat;
|
||||
|
@ -438,6 +480,8 @@ void HmdDisplayPlugin::updateFrameData() {
|
|||
uniforms.glowPoints = vec4(handGlowPoints[0], handGlowPoints[1]);
|
||||
uniforms.glowColors[0] = _presentHandLasers[0].color;
|
||||
uniforms.glowColors[1] = _presentHandLasers[1].color;
|
||||
uniforms.extraGlowPoint = extraGlowPoint;
|
||||
uniforms.extraGlowColor = _presentExtraLaser.color;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -613,13 +657,29 @@ bool HmdDisplayPlugin::setHandLaser(uint32_t hands, HandLaserMode mode, const ve
|
|||
return true;
|
||||
}
|
||||
|
||||
bool HmdDisplayPlugin::setExtraLaser(HandLaserMode mode, const vec4& color, const glm::vec3& sensorSpaceStart, const vec3& sensorSpaceDirection) {
|
||||
HandLaserInfo info;
|
||||
info.mode = mode;
|
||||
info.color = color;
|
||||
info.direction = sensorSpaceDirection;
|
||||
withNonPresentThreadLock([&] {
|
||||
_extraLaser = info;
|
||||
_extraLaserStart = sensorSpaceStart;
|
||||
});
|
||||
|
||||
// FIXME defer to a child class plugin to determine if hand lasers are actually
|
||||
// available based on the presence or absence of hand controllers
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
void HmdDisplayPlugin::compositeExtra() {
|
||||
// If neither hand laser is activated, exit
|
||||
if (!_presentHandLasers[0].valid() && !_presentHandLasers[1].valid()) {
|
||||
if (!_presentHandLasers[0].valid() && !_presentHandLasers[1].valid() && !_presentExtraLaser.valid()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (_presentHandPoses[0] == IDENTITY_MATRIX && _presentHandPoses[1] == IDENTITY_MATRIX) {
|
||||
if (_presentHandPoses[0] == IDENTITY_MATRIX && _presentHandPoses[1] == IDENTITY_MATRIX && !_presentExtraLaser.valid()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -639,6 +699,11 @@ void HmdDisplayPlugin::compositeExtra() {
|
|||
geometryCache->renderGlowLine(batch, points.first, points.second, laser.color, _geometryIds[index]);
|
||||
}
|
||||
});
|
||||
|
||||
if (_presentExtraLaser.valid()) {
|
||||
const auto& points = _presentExtraLaserPoints;
|
||||
geometryCache->renderGlowLine(batch, points.first, points.second, _presentExtraLaser.color, _extraLaserID);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -38,6 +38,7 @@ public:
|
|||
virtual glm::mat4 getHeadPose() const override;
|
||||
|
||||
bool setHandLaser(uint32_t hands, HandLaserMode mode, const vec4& color, const vec3& direction) override;
|
||||
bool setExtraLaser(HandLaserMode mode, const vec4& color, const glm::vec3& sensorSpaceStart, const vec3& sensorSpaceDirection) override;
|
||||
|
||||
bool wantVsync() const override {
|
||||
return false;
|
||||
|
@ -78,8 +79,16 @@ protected:
|
|||
Transform _presentUiModelTransform;
|
||||
std::array<HandLaserInfo, 2> _presentHandLasers;
|
||||
std::array<int, 2> _geometryIds;
|
||||
int _extraLaserID;
|
||||
std::array<mat4, 2> _presentHandPoses;
|
||||
std::array<std::pair<vec3, vec3>, 2> _presentHandLaserPoints;
|
||||
|
||||
HandLaserInfo _extraLaser;
|
||||
HandLaserInfo _presentExtraLaser;
|
||||
vec3 _extraLaserStart;
|
||||
vec3 _presentExtraLaserStart;
|
||||
std::pair<vec3, vec3> _presentExtraLaserPoints;
|
||||
|
||||
std::array<mat4, 2> _eyeOffsets;
|
||||
std::array<mat4, 2> _eyeProjections;
|
||||
std::array<mat4, 2> _eyeInverseProjections;
|
||||
|
@ -130,6 +139,9 @@ private:
|
|||
vec2 resolution { CompositorHelper::VIRTUAL_SCREEN_SIZE };
|
||||
float radius { 0.005f };
|
||||
float alpha { 1.0f };
|
||||
|
||||
vec4 extraGlowColor;
|
||||
vec2 extraGlowPoint { -1 };
|
||||
} uniforms;
|
||||
|
||||
struct Vertex {
|
||||
|
|
|
@ -1,75 +0,0 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2016/07/11
|
||||
// Copyright 2013-2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
uniform sampler2D sampler;
|
||||
|
||||
struct OverlayData {
|
||||
mat4 mvp;
|
||||
vec4 glowPoints;
|
||||
vec4 glowColors[2];
|
||||
vec4 resolutionRadiusAlpha;
|
||||
};
|
||||
|
||||
layout(std140) uniform overlayBuffer {
|
||||
OverlayData overlay;
|
||||
};
|
||||
|
||||
vec2 resolution = overlay.resolutionRadiusAlpha.xy;
|
||||
float radius = overlay.resolutionRadiusAlpha.z;
|
||||
float alpha = overlay.resolutionRadiusAlpha.w;
|
||||
vec4 glowPoints = overlay.glowPoints;
|
||||
vec4 glowColors[2] = overlay.glowColors;
|
||||
|
||||
in vec3 vPosition;
|
||||
in vec2 vTexCoord;
|
||||
|
||||
out vec4 FragColor;
|
||||
|
||||
float easeInOutCubic(float f) {
|
||||
const float d = 1.0;
|
||||
const float b = 0.0;
|
||||
const float c = 1.0;
|
||||
float t = f;
|
||||
if ((t /= d / 2.0) < 1.0) return c / 2.0 * t * t * t + b;
|
||||
return c / 2.0 * ((t -= 2.0) * t * t + 2.0) + b;
|
||||
}
|
||||
|
||||
void main() {
|
||||
FragColor = texture(sampler, vTexCoord);
|
||||
|
||||
vec2 aspect = resolution;
|
||||
aspect /= resolution.x;
|
||||
|
||||
float glowIntensity = 0.0;
|
||||
float dist1 = distance(vTexCoord * aspect, glowPoints.xy * aspect);
|
||||
float dist2 = distance(vTexCoord * aspect, glowPoints.zw * aspect);
|
||||
float dist = min(dist1, dist2);
|
||||
vec3 glowColor = glowColors[0].rgb;
|
||||
if (dist2 < dist1) {
|
||||
glowColor = glowColors[1].rgb;
|
||||
}
|
||||
|
||||
if (dist <= radius) {
|
||||
glowIntensity = 1.0 - (dist / radius);
|
||||
glowColor.rgb = pow(glowColor, vec3(1.0 - glowIntensity));
|
||||
glowIntensity = easeInOutCubic(glowIntensity);
|
||||
glowIntensity = pow(glowIntensity, 0.5);
|
||||
}
|
||||
|
||||
if (alpha <= 0.0) {
|
||||
if (glowIntensity <= 0.0) {
|
||||
discard;
|
||||
}
|
||||
|
||||
FragColor = vec4(glowColor, glowIntensity);
|
||||
return;
|
||||
}
|
||||
|
||||
FragColor.rgb = mix(FragColor.rgb, glowColor.rgb, glowIntensity);
|
||||
FragColor.a *= alpha;
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2016/07/11
|
||||
// Copyright 2013-2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
struct OverlayData {
|
||||
mat4 mvp;
|
||||
vec4 glowPoints;
|
||||
vec4 glowColors[2];
|
||||
vec4 resolutionRadiusAlpha;
|
||||
};
|
||||
|
||||
layout(std140) uniform overlayBuffer {
|
||||
OverlayData overlay;
|
||||
};
|
||||
|
||||
mat4 mvp = overlay.mvp;
|
||||
|
||||
layout(location = 0) in vec3 Position;
|
||||
layout(location = 3) in vec2 TexCoord;
|
||||
|
||||
out vec3 vPosition;
|
||||
out vec2 vTexCoord;
|
||||
|
||||
void main() {
|
||||
gl_Position = mvp * vec4(Position, 1);
|
||||
vTexCoord = TexCoord;
|
||||
vPosition = Position;
|
||||
}
|
|
@ -128,11 +128,15 @@ void EntityTreeRenderer::clear() {
|
|||
|
||||
// remove all entities from the scene
|
||||
auto scene = _viewState->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
foreach(auto entity, _entitiesInScene) {
|
||||
entity->removeFromScene(entity, scene, pendingChanges);
|
||||
if (scene) {
|
||||
render::PendingChanges pendingChanges;
|
||||
foreach(auto entity, _entitiesInScene) {
|
||||
entity->removeFromScene(entity, scene, pendingChanges);
|
||||
}
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
} else {
|
||||
qCWarning(entitiesrenderer) << "EntitityTreeRenderer::clear(), Unexpected null scene, possibly during application shutdown";
|
||||
}
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
_entitiesInScene.clear();
|
||||
|
||||
// reset the zone to the default (while we load the next scene)
|
||||
|
@ -901,8 +905,12 @@ void EntityTreeRenderer::deletingEntity(const EntityItemID& entityID) {
|
|||
auto entity = _entitiesInScene.take(entityID);
|
||||
render::PendingChanges pendingChanges;
|
||||
auto scene = _viewState->getMain3DScene();
|
||||
entity->removeFromScene(entity, scene, pendingChanges);
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
if (scene) {
|
||||
entity->removeFromScene(entity, scene, pendingChanges);
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
} else {
|
||||
qCWarning(entitiesrenderer) << "EntityTreeRenderer::deletingEntity(), Unexpected null scene, possibly during application shutdown";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -919,10 +927,14 @@ void EntityTreeRenderer::addEntityToScene(EntityItemPointer entity) {
|
|||
// here's where we add the entity payload to the scene
|
||||
render::PendingChanges pendingChanges;
|
||||
auto scene = _viewState->getMain3DScene();
|
||||
if (entity->addToScene(entity, scene, pendingChanges)) {
|
||||
_entitiesInScene.insert(entity->getEntityItemID(), entity);
|
||||
if (scene) {
|
||||
if (entity->addToScene(entity, scene, pendingChanges)) {
|
||||
_entitiesInScene.insert(entity->getEntityItemID(), entity);
|
||||
}
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
} else {
|
||||
qCWarning(entitiesrenderer) << "EntityTreeRenderer::addEntityToScene(), Unexpected null scene, possibly during application shutdown";
|
||||
}
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
#include <render/Scene.h>
|
||||
#include <EntityItem.h>
|
||||
#include "AbstractViewStateInterface.h"
|
||||
#include "EntitiesRendererLogging.h"
|
||||
|
||||
|
||||
// These or the icon "name" used by the render item status value, they correspond to the atlas texture used by the DrawItemStatus
|
||||
|
@ -79,10 +80,14 @@ public:
|
|||
render::PendingChanges pendingChanges;
|
||||
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();
|
||||
|
||||
pendingChanges.updateItem<RenderableEntityItemProxy>(_myItem, [](RenderableEntityItemProxy& data) {
|
||||
});
|
||||
if (scene) {
|
||||
pendingChanges.updateItem<RenderableEntityItemProxy>(_myItem, [](RenderableEntityItemProxy& data) {
|
||||
});
|
||||
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
} else {
|
||||
qCWarning(entitiesrenderer) << "SimpleRenderableEntityItem::notifyChanged(), Unexpected null scene, possibly during application shutdown";
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
|
|
|
@ -248,9 +248,12 @@ void RenderableZoneEntityItem::notifyBoundChanged() {
|
|||
}
|
||||
render::PendingChanges pendingChanges;
|
||||
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();
|
||||
if (scene) {
|
||||
pendingChanges.updateItem<RenderableZoneEntityItemMeta>(_myMetaItem, [](RenderableZoneEntityItemMeta& data) {
|
||||
});
|
||||
|
||||
pendingChanges.updateItem<RenderableZoneEntityItemMeta>(_myMetaItem, [](RenderableZoneEntityItemMeta& data) {
|
||||
});
|
||||
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
} else {
|
||||
qCWarning(entitiesrenderer) << "RenderableZoneEntityItem::notifyBoundChanged(), Unexpected null scene, possibly during application shutdown";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,6 +40,27 @@ static bool enableDebugLogger = QProcessEnvironment::systemEnvironment().contain
|
|||
|
||||
using namespace gl;
|
||||
|
||||
|
||||
std::atomic<size_t> Context::_totalSwapchainMemoryUsage { 0 };
|
||||
|
||||
size_t Context::getSwapchainMemoryUsage() { return _totalSwapchainMemoryUsage.load(); }
|
||||
|
||||
size_t Context::evalSurfaceMemoryUsage(uint32_t width, uint32_t height, uint32_t pixelSize) {
|
||||
return width * height * pixelSize;
|
||||
}
|
||||
|
||||
void Context::updateSwapchainMemoryUsage(size_t prevSize, size_t newSize) {
|
||||
if (prevSize == newSize) {
|
||||
return;
|
||||
}
|
||||
if (newSize > prevSize) {
|
||||
_totalSwapchainMemoryUsage.fetch_add(newSize - prevSize);
|
||||
} else {
|
||||
_totalSwapchainMemoryUsage.fetch_sub(prevSize - newSize);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Context* Context::PRIMARY = nullptr;
|
||||
|
||||
Context::Context() {}
|
||||
|
@ -78,18 +99,35 @@ void Context::release() {
|
|||
if (PRIMARY == this) {
|
||||
PRIMARY = nullptr;
|
||||
}
|
||||
}
|
||||
updateSwapchainMemoryCounter();
|
||||
}
|
||||
|
||||
Context::~Context() {
|
||||
release();
|
||||
}
|
||||
|
||||
void Context::updateSwapchainMemoryCounter() {
|
||||
if (_window) {
|
||||
auto newSize = _window->size();
|
||||
auto newMemSize = gl::Context::evalSurfaceMemoryUsage(newSize.width(), newSize.height(), (uint32_t) _swapchainPixelSize);
|
||||
gl::Context::updateSwapchainMemoryUsage(_swapchainMemoryUsage, newMemSize);
|
||||
_swapchainMemoryUsage = newMemSize;
|
||||
} else {
|
||||
// No window ? no more swapchain
|
||||
gl::Context::updateSwapchainMemoryUsage(_swapchainMemoryUsage, 0);
|
||||
_swapchainMemoryUsage = 0;
|
||||
}
|
||||
}
|
||||
|
||||
void Context::setWindow(QWindow* window) {
|
||||
release();
|
||||
_window = window;
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
_hwnd = (HWND)window->winId();
|
||||
#endif
|
||||
|
||||
updateSwapchainMemoryCounter();
|
||||
}
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
|
@ -98,6 +136,8 @@ static const char* PRIMARY_CONTEXT_PROPERTY_NAME = "com.highfidelity.gl.primaryC
|
|||
bool Context::makeCurrent() {
|
||||
BOOL result = wglMakeCurrent(_hdc, _hglrc);
|
||||
assert(result);
|
||||
updateSwapchainMemoryCounter();
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -217,6 +257,11 @@ void Context::create() {
|
|||
wglChoosePixelFormatARB(_hdc, &formatAttribs[0], NULL, 1, &pixelFormat, &numFormats);
|
||||
DescribePixelFormat(_hdc, pixelFormat, sizeof(pfd), &pfd);
|
||||
}
|
||||
// The swap chain pixel size for swap chains is : rgba32 + depth24stencil8
|
||||
// We don't apply the length of the swap chain into this pixelSize since it is not vsible for the Process (on windows).
|
||||
_swapchainPixelSize = 32 + 32;
|
||||
updateSwapchainMemoryCounter();
|
||||
|
||||
SetPixelFormat(_hdc, pixelFormat, &pfd);
|
||||
{
|
||||
std::vector<int> contextAttribs;
|
||||
|
@ -277,6 +322,8 @@ void OffscreenContext::create() {
|
|||
_window->setSurfaceType(QSurface::OpenGLSurface);
|
||||
_window->create();
|
||||
setWindow(_window);
|
||||
QSize windowSize = _window->size() * _window->devicePixelRatio();
|
||||
qCDebug(glLogging) << "New Offscreen GLContext, window size = " << windowSize.width() << " , " << windowSize.height();
|
||||
QGuiApplication::processEvents();
|
||||
}
|
||||
Parent::create();
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
|
||||
#include <stdint.h>
|
||||
#include <QtGlobal>
|
||||
#include <atomic>
|
||||
|
||||
#if defined(Q_OS_WIN)
|
||||
#include <Windows.h>
|
||||
|
@ -23,7 +24,7 @@ class QThread;
|
|||
|
||||
namespace gl {
|
||||
|
||||
class Context {
|
||||
class Context {
|
||||
protected:
|
||||
QWindow* _window { nullptr };
|
||||
static Context* PRIMARY;
|
||||
|
@ -57,6 +58,17 @@ class Context {
|
|||
virtual void create();
|
||||
QOpenGLContext* qglContext();
|
||||
void moveToThread(QThread* thread);
|
||||
|
||||
static size_t evalSurfaceMemoryUsage(uint32_t width, uint32_t height, uint32_t pixelSize);
|
||||
static size_t getSwapchainMemoryUsage();
|
||||
static void updateSwapchainMemoryUsage(size_t prevSize, size_t newSize);
|
||||
|
||||
private:
|
||||
static std::atomic<size_t> _totalSwapchainMemoryUsage;
|
||||
|
||||
size_t _swapchainMemoryUsage { 0 };
|
||||
size_t _swapchainPixelSize { 0 };
|
||||
void updateSwapchainMemoryCounter();
|
||||
};
|
||||
|
||||
class OffscreenContext : public Context {
|
||||
|
@ -67,6 +79,7 @@ class Context {
|
|||
virtual ~OffscreenContext();
|
||||
void create() override;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // hifi_gpu_GPUConfig_h
|
||||
|
|
|
@ -15,6 +15,8 @@
|
|||
#include <QtPlatformHeaders/QWGLNativeContext>
|
||||
#endif
|
||||
|
||||
#include "GLHelpers.h"
|
||||
|
||||
using namespace gl;
|
||||
|
||||
void Context::destroyContext(QOpenGLContext* context) {
|
||||
|
@ -45,6 +47,7 @@ void Context::moveToThread(QThread* thread) {
|
|||
|
||||
#ifndef Q_OS_WIN
|
||||
bool Context::makeCurrent() {
|
||||
updateSwapchainMemoryCounter();
|
||||
return _context->makeCurrent(_window);
|
||||
}
|
||||
|
||||
|
@ -70,6 +73,9 @@ void Context::create() {
|
|||
}
|
||||
_context->setFormat(getDefaultOpenGLSurfaceFormat());
|
||||
_context->create();
|
||||
|
||||
_swapchainPixelSize = evalGLFormatSwapchainPixelSize(_context->format());
|
||||
updateSwapchainMemoryCounter();
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
@ -13,6 +13,17 @@
|
|||
|
||||
#include <QtOpenGL/QGL>
|
||||
|
||||
size_t evalGLFormatSwapchainPixelSize(const QSurfaceFormat& format) {
|
||||
size_t pixelSize = format.redBufferSize() + format.greenBufferSize() + format.blueBufferSize() + format.alphaBufferSize();
|
||||
// We don't apply the length of the swap chain into this pixelSize since it is not vsible for the Process (on windows).
|
||||
// Let s keep this here remember that:
|
||||
// if (format.swapBehavior() > 0) {
|
||||
// pixelSize *= format.swapBehavior(); // multiply the color buffer pixel size by the actual swapchain depth
|
||||
// }
|
||||
pixelSize += format.stencilBufferSize() + format.depthBufferSize();
|
||||
return pixelSize;
|
||||
}
|
||||
|
||||
const QSurfaceFormat& getDefaultOpenGLSurfaceFormat() {
|
||||
static QSurfaceFormat format;
|
||||
static std::once_flag once;
|
||||
|
@ -35,17 +46,22 @@ int glVersionToInteger(QString glVersion) {
|
|||
}
|
||||
|
||||
QJsonObject getGLContextData() {
|
||||
QString glVersion = QString((const char*)glGetString(GL_VERSION));
|
||||
QString glslVersion = QString((const char*) glGetString(GL_SHADING_LANGUAGE_VERSION));
|
||||
QString glVendor = QString((const char*) glGetString(GL_VENDOR));
|
||||
QString glRenderer = QString((const char*)glGetString(GL_RENDERER));
|
||||
static QJsonObject result;
|
||||
static std::once_flag once;
|
||||
std::call_once(once, [] {
|
||||
QString glVersion = QString((const char*)glGetString(GL_VERSION));
|
||||
QString glslVersion = QString((const char*) glGetString(GL_SHADING_LANGUAGE_VERSION));
|
||||
QString glVendor = QString((const char*) glGetString(GL_VENDOR));
|
||||
QString glRenderer = QString((const char*)glGetString(GL_RENDERER));
|
||||
|
||||
return QJsonObject {
|
||||
{ "version", glVersion },
|
||||
{ "slVersion", glslVersion },
|
||||
{ "vendor", glVendor },
|
||||
{ "renderer", glRenderer },
|
||||
};
|
||||
result = QJsonObject {
|
||||
{ "version", glVersion },
|
||||
{ "sl_version", glslVersion },
|
||||
{ "vendor", glVendor },
|
||||
{ "renderer", glRenderer },
|
||||
};
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
QThread* RENDER_THREAD = nullptr;
|
||||
|
|
|
@ -26,6 +26,8 @@ class QGLFormat;
|
|||
template<class F>
|
||||
void setGLFormatVersion(F& format, int major = 4, int minor = 5) { format.setVersion(major, minor); }
|
||||
|
||||
size_t evalGLFormatSwapchainPixelSize(const QSurfaceFormat& format);
|
||||
|
||||
const QSurfaceFormat& getDefaultOpenGLSurfaceFormat();
|
||||
QJsonObject getGLContextData();
|
||||
int glVersionToInteger(QString glVersion);
|
||||
|
|
|
@ -17,11 +17,15 @@
|
|||
#include <QtGui/QOffscreenSurface>
|
||||
#include <QtGui/QOpenGLContext>
|
||||
|
||||
#include "Context.h"
|
||||
#include "GLHelpers.h"
|
||||
#include "GLLogging.h"
|
||||
|
||||
|
||||
OffscreenGLCanvas::OffscreenGLCanvas() : _context(new QOpenGLContext), _offscreenSurface(new QOffscreenSurface){
|
||||
OffscreenGLCanvas::OffscreenGLCanvas() :
|
||||
_context(new QOpenGLContext),
|
||||
_offscreenSurface(new QOffscreenSurface)
|
||||
{
|
||||
}
|
||||
|
||||
OffscreenGLCanvas::~OffscreenGLCanvas() {
|
||||
|
@ -56,7 +60,6 @@ bool OffscreenGLCanvas::create(QOpenGLContext* sharedContext) {
|
|||
bool OffscreenGLCanvas::makeCurrent() {
|
||||
bool result = _context->makeCurrent(_offscreenSurface);
|
||||
Q_ASSERT(result);
|
||||
|
||||
std::call_once(_reportOnce, [this]{
|
||||
qCDebug(glLogging) << "GL Version: " << QString((const char*) glGetString(GL_VERSION));
|
||||
qCDebug(glLogging) << "GL Shader Language Version: " << QString((const char*) glGetString(GL_SHADING_LANGUAGE_VERSION));
|
||||
|
|
|
@ -430,26 +430,23 @@ void OffscreenQmlSurface::create(QOpenGLContext* shareContext) {
|
|||
rootContext->setContextProperty("resourceDirectoryUrl", QUrl::fromLocalFile(PathUtils::resourcesPath()));
|
||||
}
|
||||
|
||||
static uvec2 clampSize(const uvec2& size, uint32_t maxDimension) {
|
||||
return glm::clamp(size, glm::uvec2(1), glm::uvec2(maxDimension));
|
||||
}
|
||||
|
||||
static QSize clampSize(const QSize& qsize, uint32_t maxDimension) {
|
||||
return fromGlm(clampSize(toGlm(qsize), maxDimension));
|
||||
}
|
||||
|
||||
void OffscreenQmlSurface::resize(const QSize& newSize_, bool forceResize) {
|
||||
|
||||
if (!_quickWindow) {
|
||||
return;
|
||||
}
|
||||
|
||||
const float MAX_OFFSCREEN_DIMENSION = 4096;
|
||||
QSize newSize = newSize_;
|
||||
|
||||
if (newSize.width() > MAX_OFFSCREEN_DIMENSION || newSize.height() > MAX_OFFSCREEN_DIMENSION) {
|
||||
float scale = std::min(
|
||||
((float)newSize.width() / MAX_OFFSCREEN_DIMENSION),
|
||||
((float)newSize.height() / MAX_OFFSCREEN_DIMENSION));
|
||||
newSize = QSize(
|
||||
std::max(static_cast<int>(scale * newSize.width()), 10),
|
||||
std::max(static_cast<int>(scale * newSize.height()), 10));
|
||||
}
|
||||
|
||||
QSize currentSize = _quickWindow->geometry().size();
|
||||
if (newSize == currentSize && !forceResize) {
|
||||
const uint32_t MAX_OFFSCREEN_DIMENSION = 4096;
|
||||
const QSize newSize = clampSize(newSize_, MAX_OFFSCREEN_DIMENSION);
|
||||
if (!forceResize && newSize == _quickWindow->geometry().size()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -465,17 +462,12 @@ void OffscreenQmlSurface::resize(const QSize& newSize_, bool forceResize) {
|
|||
|
||||
// Qt bug in 5.4 forces this check of pixel ratio,
|
||||
// even though we're rendering offscreen.
|
||||
qreal pixelRatio = 1.0;
|
||||
if (_renderControl && _renderControl->_renderWindow) {
|
||||
pixelRatio = _renderControl->_renderWindow->devicePixelRatio();
|
||||
}
|
||||
|
||||
uvec2 newOffscreenSize = toGlm(newSize * pixelRatio);
|
||||
uvec2 newOffscreenSize = toGlm(newSize);
|
||||
if (newOffscreenSize == _size) {
|
||||
return;
|
||||
}
|
||||
|
||||
qCDebug(glLogging) << "Offscreen UI resizing to " << newSize.width() << "x" << newSize.height() << " with pixel ratio " << pixelRatio;
|
||||
qCDebug(glLogging) << "Offscreen UI resizing to " << newSize.width() << "x" << newSize.height();
|
||||
|
||||
_canvas->makeCurrent();
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ using namespace gpu;
|
|||
using namespace gpu::gl;
|
||||
|
||||
GLFramebuffer::~GLFramebuffer() {
|
||||
if (_id) {
|
||||
if (_id) {
|
||||
auto backend = _backend.lock();
|
||||
if (backend) {
|
||||
backend->releaseFramebuffer(_id);
|
||||
|
|
|
@ -18,22 +18,6 @@ using namespace gpu::gl;
|
|||
|
||||
std::shared_ptr<GLTextureTransferHelper> GLTexture::_textureTransferHelper;
|
||||
|
||||
// FIXME placeholder for texture memory over-use
|
||||
#define DEFAULT_MAX_MEMORY_MB 256
|
||||
#define OVER_MEMORY_PRESSURE 2.0f
|
||||
|
||||
// FIXME other apps show things like Oculus home consuming large amounts of GPU memory
|
||||
// which causes us to blur textures needlessly (since other app GPU memory usage will likely
|
||||
// be swapped out and not cause any actual impact
|
||||
//#define CHECK_MIN_FREE_GPU_MEMORY
|
||||
#ifdef CHECK_MIN_FREE_GPU_MEMORY
|
||||
#define MIN_FREE_GPU_MEMORY_PERCENTAGE 0.25f
|
||||
#endif
|
||||
|
||||
// Allow 65% of all available GPU memory to be consumed by textures
|
||||
// FIXME overly conservative?
|
||||
#define MAX_CONSUMED_TEXTURE_MEMORY_PERCENTAGE 0.65f
|
||||
|
||||
const GLenum GLTexture::CUBE_FACE_LAYOUT[6] = {
|
||||
GL_TEXTURE_CUBE_MAP_POSITIVE_X, GL_TEXTURE_CUBE_MAP_NEGATIVE_X,
|
||||
GL_TEXTURE_CUBE_MAP_POSITIVE_Y, GL_TEXTURE_CUBE_MAP_NEGATIVE_Y,
|
||||
|
@ -105,37 +89,30 @@ const std::vector<GLenum>& GLTexture::getFaceTargets(GLenum target) {
|
|||
return faceTargets;
|
||||
}
|
||||
|
||||
// Default texture memory = GPU total memory - 2GB
|
||||
#define GPU_MEMORY_RESERVE_BYTES MB_TO_BYTES(2048)
|
||||
// Minimum texture memory = 1GB
|
||||
#define TEXTURE_MEMORY_MIN_BYTES MB_TO_BYTES(1024)
|
||||
|
||||
|
||||
float GLTexture::getMemoryPressure() {
|
||||
// Check for an explicit memory limit
|
||||
auto availableTextureMemory = Texture::getAllowedGPUMemoryUsage();
|
||||
|
||||
|
||||
// If no memory limit has been set, use a percentage of the total dedicated memory
|
||||
if (!availableTextureMemory) {
|
||||
auto totalGpuMemory = getDedicatedMemory();
|
||||
|
||||
if (!totalGpuMemory) {
|
||||
// If we can't query the dedicated memory just use a fallback fixed value of 256 MB
|
||||
totalGpuMemory = MB_TO_BYTES(DEFAULT_MAX_MEMORY_MB);
|
||||
#if 0
|
||||
auto totalMemory = getDedicatedMemory();
|
||||
if ((GPU_MEMORY_RESERVE_BYTES + TEXTURE_MEMORY_MIN_BYTES) > totalMemory) {
|
||||
availableTextureMemory = TEXTURE_MEMORY_MIN_BYTES;
|
||||
} else {
|
||||
#ifdef CHECK_MIN_FREE_GPU_MEMORY
|
||||
// Check the global free GPU memory
|
||||
auto freeGpuMemory = getFreeDedicatedMemory();
|
||||
if (freeGpuMemory) {
|
||||
static gpu::Size lastFreeGpuMemory = 0;
|
||||
auto freePercentage = (float)freeGpuMemory / (float)totalGpuMemory;
|
||||
if (freeGpuMemory != lastFreeGpuMemory) {
|
||||
lastFreeGpuMemory = freeGpuMemory;
|
||||
if (freePercentage < MIN_FREE_GPU_MEMORY_PERCENTAGE) {
|
||||
qCDebug(gpugllogging) << "Exceeded min free GPU memory " << freePercentage;
|
||||
return OVER_MEMORY_PRESSURE;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
availableTextureMemory = totalMemory - GPU_MEMORY_RESERVE_BYTES;
|
||||
}
|
||||
|
||||
availableTextureMemory = static_cast<gpu::Size>(totalGpuMemory * MAX_CONSUMED_TEXTURE_MEMORY_PERCENTAGE);
|
||||
#else
|
||||
// Hardcode texture limit for sparse textures at 1 GB for now
|
||||
availableTextureMemory = GPU_MEMORY_RESERVE_BYTES;
|
||||
#endif
|
||||
}
|
||||
|
||||
// Return the consumed texture memory divided by the available texture memory.
|
||||
|
@ -209,6 +186,10 @@ GLTexture::~GLTexture() {
|
|||
// the GL45Texture destructor for doing any required work tracking GPU stats
|
||||
backend->releaseTexture(_id, _size);
|
||||
}
|
||||
|
||||
if (!_external && !_transferrable) {
|
||||
Backend::updateTextureGPUFramebufferMemoryUsage(_size, 0);
|
||||
}
|
||||
}
|
||||
Backend::updateTextureGPUVirtualMemoryUsage(_virtualSize, 0);
|
||||
}
|
||||
|
@ -245,6 +226,9 @@ void GLTexture::withPreservedTexture(std::function<void()> f) const {
|
|||
}
|
||||
|
||||
void GLTexture::setSize(GLuint size) const {
|
||||
if (!_external && !_transferrable) {
|
||||
Backend::updateTextureGPUFramebufferMemoryUsage(_size, size);
|
||||
}
|
||||
Backend::updateTextureGPUMemoryUsage(_size, size);
|
||||
const_cast<GLuint&>(_size) = size;
|
||||
}
|
||||
|
|
|
@ -170,7 +170,8 @@ std::atomic<Buffer::Size> Context::_bufferGPUMemoryUsage { 0 };
|
|||
std::atomic<uint32_t> Context::_textureGPUCount{ 0 };
|
||||
std::atomic<uint32_t> Context::_textureGPUSparseCount { 0 };
|
||||
std::atomic<Texture::Size> Context::_textureGPUMemoryUsage { 0 };
|
||||
std::atomic<Texture::Size> Context::_textureGPUVirtualMemoryUsage{ 0 };
|
||||
std::atomic<Texture::Size> Context::_textureGPUVirtualMemoryUsage { 0 };
|
||||
std::atomic<Texture::Size> Context::_textureGPUFramebufferMemoryUsage { 0 };
|
||||
std::atomic<Texture::Size> Context::_textureGPUSparseMemoryUsage { 0 };
|
||||
std::atomic<uint32_t> Context::_textureGPUTransferCount { 0 };
|
||||
|
||||
|
@ -262,6 +263,17 @@ void Context::updateTextureGPUVirtualMemoryUsage(Size prevObjectSize, Size newOb
|
|||
}
|
||||
}
|
||||
|
||||
void Context::updateTextureGPUFramebufferMemoryUsage(Size prevObjectSize, Size newObjectSize) {
|
||||
if (prevObjectSize == newObjectSize) {
|
||||
return;
|
||||
}
|
||||
if (newObjectSize > prevObjectSize) {
|
||||
_textureGPUFramebufferMemoryUsage.fetch_add(newObjectSize - prevObjectSize);
|
||||
} else {
|
||||
_textureGPUFramebufferMemoryUsage.fetch_sub(prevObjectSize - newObjectSize);
|
||||
}
|
||||
}
|
||||
|
||||
void Context::updateTextureGPUSparseMemoryUsage(Size prevObjectSize, Size newObjectSize) {
|
||||
if (prevObjectSize == newObjectSize) {
|
||||
return;
|
||||
|
@ -310,6 +322,10 @@ Context::Size Context::getTextureGPUVirtualMemoryUsage() {
|
|||
return _textureGPUVirtualMemoryUsage.load();
|
||||
}
|
||||
|
||||
Context::Size Context::getTextureGPUFramebufferMemoryUsage() {
|
||||
return _textureGPUFramebufferMemoryUsage.load();
|
||||
}
|
||||
|
||||
Context::Size Context::getTextureGPUSparseMemoryUsage() {
|
||||
return _textureGPUSparseMemoryUsage.load();
|
||||
}
|
||||
|
@ -329,6 +345,7 @@ void Backend::incrementTextureGPUSparseCount() { Context::incrementTextureGPUSpa
|
|||
void Backend::decrementTextureGPUSparseCount() { Context::decrementTextureGPUSparseCount(); }
|
||||
void Backend::updateTextureGPUMemoryUsage(Resource::Size prevObjectSize, Resource::Size newObjectSize) { Context::updateTextureGPUMemoryUsage(prevObjectSize, newObjectSize); }
|
||||
void Backend::updateTextureGPUVirtualMemoryUsage(Resource::Size prevObjectSize, Resource::Size newObjectSize) { Context::updateTextureGPUVirtualMemoryUsage(prevObjectSize, newObjectSize); }
|
||||
void Backend::updateTextureGPUFramebufferMemoryUsage(Resource::Size prevObjectSize, Resource::Size newObjectSize) { Context::updateTextureGPUFramebufferMemoryUsage(prevObjectSize, newObjectSize); }
|
||||
void Backend::updateTextureGPUSparseMemoryUsage(Resource::Size prevObjectSize, Resource::Size newObjectSize) { Context::updateTextureGPUSparseMemoryUsage(prevObjectSize, newObjectSize); }
|
||||
void Backend::incrementTextureGPUTransferCount() { Context::incrementTextureGPUTransferCount(); }
|
||||
void Backend::decrementTextureGPUTransferCount() { Context::decrementTextureGPUTransferCount(); }
|
||||
|
|
|
@ -101,6 +101,7 @@ public:
|
|||
static void updateTextureGPUMemoryUsage(Resource::Size prevObjectSize, Resource::Size newObjectSize);
|
||||
static void updateTextureGPUSparseMemoryUsage(Resource::Size prevObjectSize, Resource::Size newObjectSize);
|
||||
static void updateTextureGPUVirtualMemoryUsage(Resource::Size prevObjectSize, Resource::Size newObjectSize);
|
||||
static void updateTextureGPUFramebufferMemoryUsage(Resource::Size prevObjectSize, Resource::Size newObjectSize);
|
||||
static void incrementTextureGPUTransferCount();
|
||||
static void decrementTextureGPUTransferCount();
|
||||
|
||||
|
@ -210,6 +211,7 @@ public:
|
|||
static Size getFreeGPUMemory();
|
||||
static Size getTextureGPUMemoryUsage();
|
||||
static Size getTextureGPUVirtualMemoryUsage();
|
||||
static Size getTextureGPUFramebufferMemoryUsage();
|
||||
static Size getTextureGPUSparseMemoryUsage();
|
||||
static uint32_t getTextureGPUTransferCount();
|
||||
|
||||
|
@ -249,6 +251,7 @@ protected:
|
|||
static void updateTextureGPUMemoryUsage(Size prevObjectSize, Size newObjectSize);
|
||||
static void updateTextureGPUSparseMemoryUsage(Size prevObjectSize, Size newObjectSize);
|
||||
static void updateTextureGPUVirtualMemoryUsage(Size prevObjectSize, Size newObjectSize);
|
||||
static void updateTextureGPUFramebufferMemoryUsage(Size prevObjectSize, Size newObjectSize);
|
||||
static void incrementTextureGPUTransferCount();
|
||||
static void decrementTextureGPUTransferCount();
|
||||
|
||||
|
@ -264,6 +267,7 @@ protected:
|
|||
static std::atomic<Size> _textureGPUMemoryUsage;
|
||||
static std::atomic<Size> _textureGPUSparseMemoryUsage;
|
||||
static std::atomic<Size> _textureGPUVirtualMemoryUsage;
|
||||
static std::atomic<Size> _textureGPUFramebufferMemoryUsage;
|
||||
static std::atomic<uint32_t> _textureGPUTransferCount;
|
||||
|
||||
|
||||
|
|
|
@ -102,6 +102,11 @@ Texture::Size Texture::getTextureGPUVirtualMemoryUsage() {
|
|||
return Context::getTextureGPUVirtualMemoryUsage();
|
||||
}
|
||||
|
||||
|
||||
Texture::Size Texture::getTextureGPUFramebufferMemoryUsage() {
|
||||
return Context::getTextureGPUFramebufferMemoryUsage();
|
||||
}
|
||||
|
||||
Texture::Size Texture::getTextureGPUSparseMemoryUsage() {
|
||||
return Context::getTextureGPUSparseMemoryUsage();
|
||||
}
|
||||
|
|
|
@ -154,6 +154,7 @@ public:
|
|||
static uint32_t getTextureGPUSparseCount();
|
||||
static Size getTextureGPUMemoryUsage();
|
||||
static Size getTextureGPUVirtualMemoryUsage();
|
||||
static Size getTextureGPUFramebufferMemoryUsage();
|
||||
static Size getTextureGPUSparseMemoryUsage();
|
||||
static uint32_t getTextureGPUTransferCount();
|
||||
static Size getAllowedGPUMemoryUsage();
|
||||
|
|
|
@ -21,12 +21,59 @@ using namespace gpu;
|
|||
|
||||
// FIXME: Declare this to enable compression
|
||||
//#define COMPRESS_TEXTURES
|
||||
|
||||
static const uvec2 SPARSE_PAGE_SIZE(128);
|
||||
static const uvec2 MAX_TEXTURE_SIZE(4096);
|
||||
bool DEV_DECIMATE_TEXTURES = false;
|
||||
QImage processSourceImage(const QImage& srcImage) {
|
||||
if (DEV_DECIMATE_TEXTURES) {
|
||||
return srcImage.scaled(srcImage.size() * 0.5f);
|
||||
|
||||
bool needsSparseRectification(const uvec2& size) {
|
||||
// Don't attempt to rectify small textures (textures less than the sparse page size in any dimension)
|
||||
if (glm::any(glm::lessThan(size, SPARSE_PAGE_SIZE))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Don't rectify textures that are already an exact multiple of sparse page size
|
||||
if (uvec2(0) == (size % SPARSE_PAGE_SIZE)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Texture is not sparse compatible, but is bigger than the sparse page size in both dimensions, rectify!
|
||||
return true;
|
||||
}
|
||||
|
||||
uvec2 rectifyToSparseSize(const uvec2& size) {
|
||||
uvec2 pages = ((size / SPARSE_PAGE_SIZE) + glm::clamp(size % SPARSE_PAGE_SIZE, uvec2(0), uvec2(1)));
|
||||
uvec2 result = pages * SPARSE_PAGE_SIZE;
|
||||
return result;
|
||||
}
|
||||
|
||||
std::atomic<size_t> DECIMATED_TEXTURE_COUNT { 0 };
|
||||
std::atomic<size_t> RECTIFIED_TEXTURE_COUNT { 0 };
|
||||
|
||||
QImage processSourceImage(const QImage& srcImage, bool cubemap) {
|
||||
const uvec2 srcImageSize = toGlm(srcImage.size());
|
||||
uvec2 targetSize = srcImageSize;
|
||||
|
||||
while (glm::any(glm::greaterThan(targetSize, MAX_TEXTURE_SIZE))) {
|
||||
targetSize /= 2;
|
||||
}
|
||||
if (targetSize != srcImageSize) {
|
||||
++DECIMATED_TEXTURE_COUNT;
|
||||
}
|
||||
|
||||
if (!cubemap && needsSparseRectification(targetSize)) {
|
||||
++RECTIFIED_TEXTURE_COUNT;
|
||||
targetSize = rectifyToSparseSize(targetSize);
|
||||
}
|
||||
|
||||
if (DEV_DECIMATE_TEXTURES && glm::all(glm::greaterThanEqual(targetSize / SPARSE_PAGE_SIZE, uvec2(2)))) {
|
||||
targetSize /= 2;
|
||||
}
|
||||
|
||||
if (targetSize != srcImageSize) {
|
||||
qDebug() << "Resizing texture from " << srcImageSize.x << "x" << srcImageSize.y << " to " << targetSize.x << "x" << targetSize.y;
|
||||
return srcImage.scaled(fromGlm(targetSize));
|
||||
}
|
||||
|
||||
return srcImage;
|
||||
}
|
||||
|
||||
|
@ -60,7 +107,7 @@ void TextureMap::setLightmapOffsetScale(float offset, float scale) {
|
|||
}
|
||||
|
||||
const QImage TextureUsage::process2DImageColor(const QImage& srcImage, bool& validAlpha, bool& alphaAsMask) {
|
||||
QImage image = processSourceImage(srcImage);
|
||||
QImage image = processSourceImage(srcImage, false);
|
||||
validAlpha = false;
|
||||
alphaAsMask = true;
|
||||
const uint8 OPAQUE_ALPHA = 255;
|
||||
|
@ -233,7 +280,7 @@ gpu::Texture* TextureUsage::createLightmapTextureFromImage(const QImage& srcImag
|
|||
|
||||
|
||||
gpu::Texture* TextureUsage::createNormalTextureFromNormalImage(const QImage& srcImage, const std::string& srcImageName) {
|
||||
QImage image = processSourceImage(srcImage);
|
||||
QImage image = processSourceImage(srcImage, false);
|
||||
|
||||
if (image.format() != QImage::Format_RGB888) {
|
||||
image = image.convertToFormat(QImage::Format_RGB888);
|
||||
|
@ -267,7 +314,7 @@ double mapComponent(double sobelValue) {
|
|||
}
|
||||
|
||||
gpu::Texture* TextureUsage::createNormalTextureFromBumpImage(const QImage& srcImage, const std::string& srcImageName) {
|
||||
QImage image = processSourceImage(srcImage);
|
||||
QImage image = processSourceImage(srcImage, false);
|
||||
|
||||
if (image.format() != QImage::Format_RGB888) {
|
||||
image = image.convertToFormat(QImage::Format_RGB888);
|
||||
|
@ -339,7 +386,7 @@ gpu::Texture* TextureUsage::createNormalTextureFromBumpImage(const QImage& srcIm
|
|||
}
|
||||
|
||||
gpu::Texture* TextureUsage::createRoughnessTextureFromImage(const QImage& srcImage, const std::string& srcImageName) {
|
||||
QImage image = processSourceImage(srcImage);
|
||||
QImage image = processSourceImage(srcImage, false);
|
||||
if (!image.hasAlphaChannel()) {
|
||||
if (image.format() != QImage::Format_RGB888) {
|
||||
image = image.convertToFormat(QImage::Format_RGB888);
|
||||
|
@ -373,7 +420,7 @@ gpu::Texture* TextureUsage::createRoughnessTextureFromImage(const QImage& srcIma
|
|||
}
|
||||
|
||||
gpu::Texture* TextureUsage::createRoughnessTextureFromGlossImage(const QImage& srcImage, const std::string& srcImageName) {
|
||||
QImage image = processSourceImage(srcImage);
|
||||
QImage image = processSourceImage(srcImage, false);
|
||||
if (!image.hasAlphaChannel()) {
|
||||
if (image.format() != QImage::Format_RGB888) {
|
||||
image = image.convertToFormat(QImage::Format_RGB888);
|
||||
|
@ -411,7 +458,7 @@ gpu::Texture* TextureUsage::createRoughnessTextureFromGlossImage(const QImage& s
|
|||
}
|
||||
|
||||
gpu::Texture* TextureUsage::createMetallicTextureFromImage(const QImage& srcImage, const std::string& srcImageName) {
|
||||
QImage image = processSourceImage(srcImage);
|
||||
QImage image = processSourceImage(srcImage, false);
|
||||
if (!image.hasAlphaChannel()) {
|
||||
if (image.format() != QImage::Format_RGB888) {
|
||||
image = image.convertToFormat(QImage::Format_RGB888);
|
||||
|
@ -704,7 +751,7 @@ const int CubeLayout::NUM_CUBEMAP_LAYOUTS = sizeof(CubeLayout::CUBEMAP_LAYOUTS)
|
|||
gpu::Texture* TextureUsage::processCubeTextureColorFromImage(const QImage& srcImage, const std::string& srcImageName, bool isLinear, bool doCompress, bool generateMips, bool generateIrradiance) {
|
||||
gpu::Texture* theTexture = nullptr;
|
||||
if ((srcImage.width() > 0) && (srcImage.height() > 0)) {
|
||||
QImage image = processSourceImage(srcImage);
|
||||
QImage image = processSourceImage(srcImage, true);
|
||||
if (image.format() != QImage::Format_RGB888) {
|
||||
image = image.convertToFormat(QImage::Format_RGB888);
|
||||
}
|
||||
|
@ -714,7 +761,8 @@ gpu::Texture* TextureUsage::processCubeTextureColorFromImage(const QImage& srcIm
|
|||
defineColorTexelFormats(formatGPU, formatMip, image, isLinear, doCompress);
|
||||
|
||||
// Find the layout of the cubemap in the 2D image
|
||||
int foundLayout = CubeLayout::findLayout(image.width(), image.height());
|
||||
// Use the original image size since processSourceImage may have altered the size / aspect ratio
|
||||
int foundLayout = CubeLayout::findLayout(srcImage.width(), srcImage.height());
|
||||
|
||||
std::vector<QImage> faces;
|
||||
// If found, go extract the faces as separate images
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
#include <QRegExp>
|
||||
#include <QStringList>
|
||||
|
||||
#include <BuildInfo.h>
|
||||
#include <GLMHelpers.h>
|
||||
#include <NumericalConstants.h>
|
||||
#include <SettingHandle.h>
|
||||
|
@ -27,6 +28,11 @@
|
|||
#include "UserActivityLogger.h"
|
||||
#include "udt/PacketHeaders.h"
|
||||
|
||||
#if USE_STABLE_GLOBAL_SERVICES
|
||||
const QString DEFAULT_HIFI_ADDRESS = "hifi://welcome";
|
||||
#else
|
||||
const QString DEFAULT_HIFI_ADDRESS = "hifi://dev-welcome";
|
||||
#endif
|
||||
|
||||
const QString ADDRESS_MANAGER_SETTINGS_GROUP = "AddressManager";
|
||||
const QString SETTINGS_CURRENT_ADDRESS_KEY = "address";
|
||||
|
@ -47,7 +53,7 @@ bool AddressManager::isConnected() {
|
|||
return DependencyManager::get<NodeList>()->getDomainHandler().isConnected();
|
||||
}
|
||||
|
||||
QUrl AddressManager::currentAddress() const {
|
||||
QUrl AddressManager::currentAddress(bool domainOnly) const {
|
||||
QUrl hifiURL;
|
||||
|
||||
hifiURL.setScheme(HIFI_URL_SCHEME);
|
||||
|
@ -57,7 +63,9 @@ QUrl AddressManager::currentAddress() const {
|
|||
hifiURL.setPort(_port);
|
||||
}
|
||||
|
||||
hifiURL.setPath(currentPath());
|
||||
if (!domainOnly) {
|
||||
hifiURL.setPath(currentPath());
|
||||
}
|
||||
|
||||
return hifiURL;
|
||||
}
|
||||
|
@ -69,8 +77,7 @@ QUrl AddressManager::currentFacingAddress() const {
|
|||
return hifiURL;
|
||||
}
|
||||
|
||||
|
||||
QUrl AddressManager::currentShareableAddress() const {
|
||||
QUrl AddressManager::currentShareableAddress(bool domainOnly) const {
|
||||
if (!_shareablePlaceName.isEmpty()) {
|
||||
// if we have a shareable place name use that instead of whatever the current host is
|
||||
QUrl hifiURL;
|
||||
|
@ -78,11 +85,13 @@ QUrl AddressManager::currentShareableAddress() const {
|
|||
hifiURL.setScheme(HIFI_URL_SCHEME);
|
||||
hifiURL.setHost(_shareablePlaceName);
|
||||
|
||||
hifiURL.setPath(currentPath());
|
||||
if (!domainOnly) {
|
||||
hifiURL.setPath(currentPath());
|
||||
}
|
||||
|
||||
return hifiURL;
|
||||
} else {
|
||||
return currentAddress();
|
||||
return currentAddress(domainOnly);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -24,11 +24,7 @@
|
|||
|
||||
const QString HIFI_URL_SCHEME = "hifi";
|
||||
|
||||
#if USE_STABLE_GLOBAL_SERVICES
|
||||
const QString DEFAULT_HIFI_ADDRESS = "hifi://welcome";
|
||||
#else
|
||||
const QString DEFAULT_HIFI_ADDRESS = "hifi://dev-welcome";
|
||||
#endif
|
||||
extern const QString DEFAULT_HIFI_ADDRESS;
|
||||
|
||||
const QString SANDBOX_HIFI_ADDRESS = "hifi://localhost";
|
||||
const QString INDEX_PATH = "/";
|
||||
|
@ -63,9 +59,9 @@ public:
|
|||
bool isConnected();
|
||||
const QString& getProtocol() { return HIFI_URL_SCHEME; };
|
||||
|
||||
QUrl currentAddress() const;
|
||||
QUrl currentAddress(bool domainOnly = false) const;
|
||||
QUrl currentFacingAddress() const;
|
||||
QUrl currentShareableAddress() const;
|
||||
QUrl currentShareableAddress(bool domainOnly = false) const;
|
||||
QUrl currentFacingShareableAddress() const;
|
||||
QString currentPath(bool withOrientation = true) const;
|
||||
QString currentFacingPath() const;
|
||||
|
|
|
@ -350,7 +350,7 @@ void NodeList::sendDomainServerCheckIn() {
|
|||
|
||||
// pack our data to send to the domain-server including
|
||||
// the hostname information (so the domain-server can see which place name we came in on)
|
||||
packetStream << _ownerType << _publicSockAddr << _localSockAddr << _nodeTypesOfInterest.toList();
|
||||
packetStream << _ownerType.load() << _publicSockAddr << _localSockAddr << _nodeTypesOfInterest.toList();
|
||||
packetStream << DependencyManager::get<AddressManager>()->getPlaceName();
|
||||
|
||||
if (!_domainHandler.isConnected()) {
|
||||
|
|
|
@ -51,8 +51,8 @@ class NodeList : public LimitedNodeList {
|
|||
SINGLETON_DEPENDENCY
|
||||
|
||||
public:
|
||||
NodeType_t getOwnerType() const { return _ownerType; }
|
||||
void setOwnerType(NodeType_t ownerType) { _ownerType = ownerType; }
|
||||
NodeType_t getOwnerType() const { return _ownerType.load(); }
|
||||
void setOwnerType(NodeType_t ownerType) { _ownerType.store(ownerType); }
|
||||
|
||||
Q_INVOKABLE qint64 sendStats(QJsonObject statsObject, HifiSockAddr destination);
|
||||
Q_INVOKABLE qint64 sendStatsToDomainServer(QJsonObject statsObject);
|
||||
|
@ -134,7 +134,7 @@ private:
|
|||
|
||||
bool sockAddrBelongsToDomainOrNode(const HifiSockAddr& sockAddr);
|
||||
|
||||
NodeType_t _ownerType;
|
||||
std::atomic<NodeType_t> _ownerType;
|
||||
NodeSet _nodeTypesOfInterest;
|
||||
DomainHandler _domainHandler;
|
||||
int _numNoReplyDomainCheckIns;
|
||||
|
|
|
@ -29,9 +29,11 @@
|
|||
|
||||
using namespace udt;
|
||||
|
||||
Socket::Socket(QObject* parent) :
|
||||
Socket::Socket(QObject* parent, bool shouldChangeSocketOptions) :
|
||||
QObject(parent),
|
||||
_synTimer(new QTimer(this))
|
||||
_synTimer(new QTimer(this)),
|
||||
_readyReadBackupTimer(new QTimer(this)),
|
||||
_shouldChangeSocketOptions(shouldChangeSocketOptions)
|
||||
{
|
||||
connect(&_udpSocket, &QUdpSocket::readyRead, this, &Socket::readPendingDatagrams);
|
||||
|
||||
|
@ -45,21 +47,29 @@ Socket::Socket(QObject* parent) :
|
|||
connect(&_udpSocket, SIGNAL(error(QAbstractSocket::SocketError)),
|
||||
this, SLOT(handleSocketError(QAbstractSocket::SocketError)));
|
||||
connect(&_udpSocket, &QAbstractSocket::stateChanged, this, &Socket::handleStateChanged);
|
||||
|
||||
// in order to help track down the zombie server bug, add a timer to check if we missed a readyRead
|
||||
const int READY_READ_BACKUP_CHECK_MSECS = 10 * 1000;
|
||||
connect(_readyReadBackupTimer, &QTimer::timeout, this, &Socket::checkForReadyReadBackup);
|
||||
_readyReadBackupTimer->start(READY_READ_BACKUP_CHECK_MSECS);
|
||||
}
|
||||
|
||||
void Socket::bind(const QHostAddress& address, quint16 port) {
|
||||
_udpSocket.bind(address, port);
|
||||
setSystemBufferSizes();
|
||||
|
||||
if (_shouldChangeSocketOptions) {
|
||||
setSystemBufferSizes();
|
||||
|
||||
#if defined(Q_OS_LINUX)
|
||||
auto sd = _udpSocket.socketDescriptor();
|
||||
int val = IP_PMTUDISC_DONT;
|
||||
setsockopt(sd, IPPROTO_IP, IP_MTU_DISCOVER, &val, sizeof(val));
|
||||
auto sd = _udpSocket.socketDescriptor();
|
||||
int val = IP_PMTUDISC_DONT;
|
||||
setsockopt(sd, IPPROTO_IP, IP_MTU_DISCOVER, &val, sizeof(val));
|
||||
#elif defined(Q_OS_WINDOWS)
|
||||
auto sd = _udpSocket.socketDescriptor();
|
||||
int val = 0; // false
|
||||
setsockopt(sd, IPPROTO_IP, IP_DONTFRAGMENT, &val, sizeof(val));
|
||||
auto sd = _udpSocket.socketDescriptor();
|
||||
int val = 0; // false
|
||||
setsockopt(sd, IPPROTO_IP, IP_DONTFRAGMENT, &val, sizeof(val));
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
void Socket::rebind() {
|
||||
|
@ -292,9 +302,25 @@ void Socket::messageFailed(Connection* connection, Packet::MessageNumber message
|
|||
}
|
||||
}
|
||||
|
||||
void Socket::checkForReadyReadBackup() {
|
||||
if (_udpSocket.hasPendingDatagrams()) {
|
||||
qCDebug(networking) << "Socket::checkForReadyReadBackup() detected blocked readyRead signal. Flushing pending datagrams.";
|
||||
|
||||
// drop all of the pending datagrams on the floor
|
||||
while (_udpSocket.hasPendingDatagrams()) {
|
||||
_udpSocket.readDatagram(nullptr, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Socket::readPendingDatagrams() {
|
||||
int packetSizeWithHeader = -1;
|
||||
|
||||
while ((packetSizeWithHeader = _udpSocket.pendingDatagramSize()) != -1) {
|
||||
|
||||
// we're reading a packet so re-start the readyRead backup timer
|
||||
_readyReadBackupTimer->start();
|
||||
|
||||
// grab a time point we can mark as the receive time of this packet
|
||||
auto receiveTime = p_high_resolution_clock::now();
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ class Socket : public QObject {
|
|||
public:
|
||||
using StatsVector = std::vector<std::pair<HifiSockAddr, ConnectionStats::Stats>>;
|
||||
|
||||
Socket(QObject* object = 0);
|
||||
Socket(QObject* object = 0, bool shouldChangeSocketOptions = true);
|
||||
|
||||
quint16 localPort() const { return _udpSocket.localPort(); }
|
||||
|
||||
|
@ -101,6 +101,7 @@ public slots:
|
|||
|
||||
private slots:
|
||||
void readPendingDatagrams();
|
||||
void checkForReadyReadBackup();
|
||||
void rateControlSync();
|
||||
|
||||
void handleSocketError(QAbstractSocket::SocketError socketError);
|
||||
|
@ -136,9 +137,13 @@ private:
|
|||
int _synInterval { 10 }; // 10ms
|
||||
QTimer* _synTimer { nullptr };
|
||||
|
||||
QTimer* _readyReadBackupTimer { nullptr };
|
||||
|
||||
int _maxBandwidth { -1 };
|
||||
|
||||
std::unique_ptr<CongestionControlVirtualFactory> _ccFactory { new CongestionControlFactory<TCPVegasCC>() };
|
||||
|
||||
bool _shouldChangeSocketOptions { true };
|
||||
|
||||
friend UDTTest;
|
||||
};
|
||||
|
|
|
@ -11,6 +11,8 @@
|
|||
|
||||
#include <PhysicsCollisionGroups.h>
|
||||
|
||||
#include <PerfStat.h>
|
||||
|
||||
#include "CharacterController.h"
|
||||
#include "ObjectMotionState.h"
|
||||
#include "PhysicsEngine.h"
|
||||
|
@ -286,6 +288,47 @@ void PhysicsEngine::stepSimulation() {
|
|||
}
|
||||
}
|
||||
|
||||
void PhysicsEngine::harvestPerformanceStats() {
|
||||
// unfortunately the full context names get too long for our stats presentation format
|
||||
//QString contextName = PerformanceTimer::getContextName(); // TODO: how to show full context name?
|
||||
QString contextName("...");
|
||||
|
||||
CProfileIterator* profileIterator = CProfileManager::Get_Iterator();
|
||||
if (profileIterator) {
|
||||
// hunt for stepSimulation context
|
||||
profileIterator->First();
|
||||
for (int32_t childIndex = 0; !profileIterator->Is_Done(); ++childIndex) {
|
||||
if (QString(profileIterator->Get_Current_Name()) == "stepSimulation") {
|
||||
profileIterator->Enter_Child(childIndex);
|
||||
recursivelyHarvestPerformanceStats(profileIterator, contextName);
|
||||
break;
|
||||
}
|
||||
profileIterator->Next();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void PhysicsEngine::recursivelyHarvestPerformanceStats(CProfileIterator* profileIterator, QString contextName) {
|
||||
QString parentContextName = contextName + QString("/") + QString(profileIterator->Get_Current_Parent_Name());
|
||||
// get the stats for the children
|
||||
int32_t numChildren = 0;
|
||||
profileIterator->First();
|
||||
while (!profileIterator->Is_Done()) {
|
||||
QString childContextName = parentContextName + QString("/") + QString(profileIterator->Get_Current_Name());
|
||||
uint64_t time = (uint64_t)((btScalar)MSECS_PER_SECOND * profileIterator->Get_Current_Total_Time());
|
||||
PerformanceTimer::addTimerRecord(childContextName, time);
|
||||
profileIterator->Next();
|
||||
++numChildren;
|
||||
}
|
||||
// recurse the children
|
||||
for (int32_t i = 0; i < numChildren; ++i) {
|
||||
profileIterator->Enter_Child(i);
|
||||
recursivelyHarvestPerformanceStats(profileIterator, contextName);
|
||||
}
|
||||
// retreat back to parent
|
||||
profileIterator->Enter_Parent();
|
||||
}
|
||||
|
||||
void PhysicsEngine::doOwnershipInfection(const btCollisionObject* objectA, const btCollisionObject* objectB) {
|
||||
BT_PROFILE("ownershipInfection");
|
||||
|
||||
|
|
|
@ -59,6 +59,7 @@ public:
|
|||
void reinsertObject(ObjectMotionState* object);
|
||||
|
||||
void stepSimulation();
|
||||
void harvestPerformanceStats();
|
||||
void updateContactMap();
|
||||
|
||||
bool hasOutgoingChanges() const { return _hasOutgoingChanges; }
|
||||
|
@ -89,6 +90,7 @@ public:
|
|||
|
||||
private:
|
||||
void addObjectToDynamicsWorld(ObjectMotionState* motionState);
|
||||
void recursivelyHarvestPerformanceStats(CProfileIterator* profileIterator, QString contextName);
|
||||
|
||||
/// \brief bump any objects that touch this one, then remove contact info
|
||||
void bumpAndPruneContacts(ObjectMotionState* motionState);
|
||||
|
|
|
@ -115,6 +115,10 @@ public:
|
|||
return false;
|
||||
}
|
||||
|
||||
virtual bool setExtraLaser(HandLaserMode mode, const vec4& color, const glm::vec3& sensorSpaceStart, const vec3& sensorSpaceDirection) {
|
||||
return false;
|
||||
}
|
||||
|
||||
virtual bool suppressKeyboard() { return false; }
|
||||
virtual void unsuppressKeyboard() {};
|
||||
virtual bool isKeyboardVisible() { return false; }
|
||||
|
|
|
@ -862,8 +862,12 @@ void Model::setURL(const QUrl& url) {
|
|||
{
|
||||
render::PendingChanges pendingChanges;
|
||||
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();
|
||||
removeFromScene(scene, pendingChanges);
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
if (scene) {
|
||||
removeFromScene(scene, pendingChanges);
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
} else {
|
||||
qCWarning(renderutils) << "Model::setURL(), Unexpected null scene, possibly during application shutdown";
|
||||
}
|
||||
}
|
||||
|
||||
_needsReload = true;
|
||||
|
|
|
@ -48,6 +48,10 @@ Scene::Scene(glm::vec3 origin, float size) :
|
|||
_items.push_back(Item()); // add the itemID #0 to nothing
|
||||
}
|
||||
|
||||
Scene::~Scene() {
|
||||
qDebug() << "Scene::~Scene()";
|
||||
}
|
||||
|
||||
ItemID Scene::allocateID() {
|
||||
// Just increment and return the proevious value initialized at 0
|
||||
return _IDAllocator.fetch_add(1);
|
||||
|
|
|
@ -55,7 +55,7 @@ typedef std::queue<PendingChanges> PendingChangesQueue;
|
|||
class Scene {
|
||||
public:
|
||||
Scene(glm::vec3 origin, float size);
|
||||
~Scene() {}
|
||||
~Scene();
|
||||
|
||||
// This call is thread safe, can be called from anywhere to allocate a new ID
|
||||
ItemID allocateID();
|
||||
|
|
|
@ -31,8 +31,8 @@ bool PerformanceWarning::_suppressShortTimings = false;
|
|||
// Destructor handles recording all of our stats
|
||||
PerformanceWarning::~PerformanceWarning() {
|
||||
quint64 end = usecTimestampNow();
|
||||
quint64 elapsedusec = (end - _start);
|
||||
double elapsedmsec = elapsedusec / 1000.0;
|
||||
quint64 elapsedUsec = (end - _start);
|
||||
double elapsedmsec = elapsedUsec / 1000.0;
|
||||
if ((_alwaysDisplay || _renderWarningsOn) && elapsedmsec > 1) {
|
||||
if (elapsedmsec > 1000) {
|
||||
double elapsedsec = (end - _start) / 1000000.0;
|
||||
|
@ -53,7 +53,7 @@ PerformanceWarning::~PerformanceWarning() {
|
|||
}
|
||||
// if the caller gave us a pointer to store the running total, track it now.
|
||||
if (_runningTotal) {
|
||||
*_runningTotal += elapsedusec;
|
||||
*_runningTotal += elapsedUsec;
|
||||
}
|
||||
if (_totalCalls) {
|
||||
*_totalCalls += 1;
|
||||
|
@ -65,11 +65,11 @@ PerformanceWarning::~PerformanceWarning() {
|
|||
// ----------------------------------------------------------------------------
|
||||
const quint64 STALE_STAT_PERIOD = 4 * USECS_PER_SECOND;
|
||||
|
||||
void PerformanceTimerRecord::tallyResult(const quint64& now) {
|
||||
void PerformanceTimerRecord::tallyResult(const quint64& now) {
|
||||
if (_numAccumulations > 0) {
|
||||
_numTallies++;
|
||||
_movingAverage.updateAverage(_runningTotal - _lastTotal);
|
||||
_lastTotal = _runningTotal;
|
||||
_numTallies++;
|
||||
_movingAverage.updateAverage(_runningTotal - _lastTotal);
|
||||
_lastTotal = _runningTotal;
|
||||
_numAccumulations = 0;
|
||||
_expiry = now + STALE_STAT_PERIOD;
|
||||
}
|
||||
|
@ -96,10 +96,10 @@ PerformanceTimer::PerformanceTimer(const QString& name) {
|
|||
|
||||
PerformanceTimer::~PerformanceTimer() {
|
||||
if (_isActive && _start != 0) {
|
||||
quint64 elapsedusec = (usecTimestampNow() - _start);
|
||||
quint64 elapsedUsec = (usecTimestampNow() - _start);
|
||||
QString& fullName = _fullNames[QThread::currentThread()];
|
||||
PerformanceTimerRecord& namedRecord = _records[fullName];
|
||||
namedRecord.accumulateResult(elapsedusec);
|
||||
namedRecord.accumulateResult(elapsedUsec);
|
||||
fullName.resize(fullName.size() - (_name.size() + 1));
|
||||
}
|
||||
}
|
||||
|
@ -109,6 +109,17 @@ bool PerformanceTimer::isActive() {
|
|||
return _isActive;
|
||||
}
|
||||
|
||||
// static
|
||||
QString PerformanceTimer::getContextName() {
|
||||
return _fullNames[QThread::currentThread()];
|
||||
}
|
||||
|
||||
// static
|
||||
void PerformanceTimer::addTimerRecord(const QString& fullName, quint64 elapsedUsec) {
|
||||
PerformanceTimerRecord& namedRecord = _records[fullName];
|
||||
namedRecord.accumulateResult(elapsedUsec);
|
||||
}
|
||||
|
||||
// static
|
||||
void PerformanceTimer::setActive(bool active) {
|
||||
if (active != _isActive) {
|
||||
|
@ -117,7 +128,7 @@ void PerformanceTimer::setActive(bool active) {
|
|||
_fullNames.clear();
|
||||
_records.clear();
|
||||
}
|
||||
|
||||
|
||||
qDebug() << "PerformanceTimer has been turned" << ((active) ? "on" : "off");
|
||||
}
|
||||
}
|
||||
|
@ -142,7 +153,7 @@ void PerformanceTimer::dumpAllTimerRecords() {
|
|||
QMapIterator<QString, PerformanceTimerRecord> i(_records);
|
||||
while (i.hasNext()) {
|
||||
i.next();
|
||||
qCDebug(shared) << i.key() << ": average " << i.value().getAverage()
|
||||
qCDebug(shared) << i.key() << ": average " << i.value().getAverage()
|
||||
<< " [" << i.value().getMovingAverage() << "]"
|
||||
<< "usecs over" << i.value().getCount() << "calls";
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ public:
|
|||
_alwaysDisplay(alwaysDisplay),
|
||||
_runningTotal(runningTotal),
|
||||
_totalCalls(totalCalls) { }
|
||||
|
||||
|
||||
quint64 elapsed() const { return (usecTimestampNow() - _start); };
|
||||
|
||||
~PerformanceWarning();
|
||||
|
@ -56,14 +56,14 @@ public:
|
|||
class PerformanceTimerRecord {
|
||||
public:
|
||||
PerformanceTimerRecord() : _runningTotal(0), _lastTotal(0), _numAccumulations(0), _numTallies(0), _expiry(0) {}
|
||||
|
||||
|
||||
void accumulateResult(const quint64& elapsed) { _runningTotal += elapsed; ++_numAccumulations; }
|
||||
void tallyResult(const quint64& now);
|
||||
bool isStale(const quint64& now) const { return now > _expiry; }
|
||||
quint64 getAverage() const { return (_numTallies == 0) ? 0 : _runningTotal / _numTallies; }
|
||||
quint64 getMovingAverage() const { return (_numTallies == 0) ? 0 : _movingAverage.getAverage(); }
|
||||
quint64 getCount() const { return _numTallies; }
|
||||
|
||||
|
||||
private:
|
||||
quint64 _runningTotal;
|
||||
quint64 _lastTotal;
|
||||
|
@ -81,7 +81,9 @@ public:
|
|||
|
||||
static bool isActive();
|
||||
static void setActive(bool active);
|
||||
|
||||
|
||||
static QString getContextName();
|
||||
static void addTimerRecord(const QString& fullName, quint64 elapsedUsec);
|
||||
static const PerformanceTimerRecord& getTimerRecord(const QString& name) { return _records[name]; };
|
||||
static const QMap<QString, PerformanceTimerRecord>& getAllTimerRecords() { return _records; };
|
||||
static void tallyAllTimerRecords();
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
#include "NumericalConstants.h"
|
||||
#include "PathUtils.h"
|
||||
|
||||
|
||||
RunningMarker::RunningMarker(QObject* parent, QString name) :
|
||||
_parent(parent),
|
||||
_name(name)
|
||||
|
@ -30,24 +29,28 @@ void RunningMarker::startRunningMarker() {
|
|||
static const int RUNNING_STATE_CHECK_IN_MSECS = MSECS_PER_SECOND;
|
||||
|
||||
// start the nodeThread so its event loop is running
|
||||
QThread* runningMarkerThread = new QThread(_parent);
|
||||
runningMarkerThread->setObjectName("Running Marker Thread");
|
||||
runningMarkerThread->start();
|
||||
_runningMarkerThread = new QThread(_parent);
|
||||
_runningMarkerThread->setObjectName("Running Marker Thread");
|
||||
_runningMarkerThread->start();
|
||||
|
||||
writeRunningMarkerFiler(); // write the first file, even before timer
|
||||
|
||||
QTimer* runningMarkerTimer = new QTimer(_parent);
|
||||
QObject::connect(runningMarkerTimer, &QTimer::timeout, [=](){
|
||||
_runningMarkerTimer = new QTimer();
|
||||
QObject::connect(_runningMarkerTimer, &QTimer::timeout, [=](){
|
||||
writeRunningMarkerFiler();
|
||||
});
|
||||
runningMarkerTimer->start(RUNNING_STATE_CHECK_IN_MSECS);
|
||||
_runningMarkerTimer->start(RUNNING_STATE_CHECK_IN_MSECS);
|
||||
|
||||
// put the time on the thread
|
||||
runningMarkerTimer->moveToThread(runningMarkerThread);
|
||||
_runningMarkerTimer->moveToThread(_runningMarkerThread);
|
||||
}
|
||||
|
||||
RunningMarker::~RunningMarker() {
|
||||
deleteRunningMarkerFile();
|
||||
QMetaObject::invokeMethod(_runningMarkerTimer, "stop", Qt::BlockingQueuedConnection);
|
||||
_runningMarkerThread->quit();
|
||||
_runningMarkerTimer->deleteLater();
|
||||
_runningMarkerThread->deleteLater();
|
||||
}
|
||||
|
||||
void RunningMarker::writeRunningMarkerFiler() {
|
||||
|
|
|
@ -15,6 +15,9 @@
|
|||
#include <QObject>
|
||||
#include <QString>
|
||||
|
||||
class QThread;
|
||||
class QTimer;
|
||||
|
||||
class RunningMarker {
|
||||
public:
|
||||
RunningMarker(QObject* parent, QString name);
|
||||
|
@ -30,6 +33,8 @@ protected:
|
|||
|
||||
QObject* _parent { nullptr };
|
||||
QString _name;
|
||||
QThread* _runningMarkerThread { nullptr };
|
||||
QTimer* _runningMarkerTimer { nullptr };
|
||||
};
|
||||
|
||||
#endif // hifi_RunningMarker_h
|
||||
|
|
|
@ -22,6 +22,7 @@ private: \
|
|||
public: \
|
||||
static void registerType(); \
|
||||
static void show(std::function<void(QQmlContext*, QObject*)> f = [](QQmlContext*, QObject*) {}); \
|
||||
static void hide(); \
|
||||
static void toggle(std::function<void(QQmlContext*, QObject*)> f = [](QQmlContext*, QObject*) {}); \
|
||||
static void load(std::function<void(QQmlContext*, QObject*)> f = [](QQmlContext*, QObject*) {}); \
|
||||
private:
|
||||
|
@ -33,6 +34,7 @@ protected: \
|
|||
public: \
|
||||
static void registerType(); \
|
||||
static void show(); \
|
||||
static void hide(); \
|
||||
static void toggle(); \
|
||||
static void load(); \
|
||||
private:
|
||||
|
@ -50,6 +52,11 @@ private:
|
|||
offscreenUi->show(QML, NAME, f); \
|
||||
} \
|
||||
\
|
||||
void x::hide() { \
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>(); \
|
||||
offscreenUi->hide(NAME); \
|
||||
} \
|
||||
\
|
||||
void x::toggle(std::function<void(QQmlContext*, QObject*)> f) { \
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>(); \
|
||||
offscreenUi->toggle(QML, NAME, f); \
|
||||
|
@ -70,6 +77,11 @@ private:
|
|||
auto offscreenUi = DependencyManager::get<OffscreenUi>(); \
|
||||
offscreenUi->show(QML, NAME, f); \
|
||||
} \
|
||||
void x::hide() { \
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>(); \
|
||||
offscreenUi->hide(NAME); \
|
||||
} \
|
||||
\
|
||||
void x::toggle() { \
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>(); \
|
||||
offscreenUi->toggle(QML, NAME, f); \
|
||||
|
|
|
@ -35,6 +35,7 @@ Q_DECLARE_LOGGING_CATEGORY(displayplugins)
|
|||
|
||||
const QString OpenVrDisplayPlugin::NAME("OpenVR (Vive)");
|
||||
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probably shouldn't be hardcoded here
|
||||
const QString OpenVrThreadedSubmit = "OpenVR Threaded Submit"; // this probably shouldn't be hardcoded here
|
||||
|
||||
PoseData _nextRenderPoseData;
|
||||
PoseData _nextSimPoseData;
|
||||
|
@ -49,8 +50,6 @@ bool _openVrDisplayActive { false };
|
|||
static vr::VRTextureBounds_t OPENVR_TEXTURE_BOUNDS_LEFT{ 0, 0, 0.5f, 1 };
|
||||
static vr::VRTextureBounds_t OPENVR_TEXTURE_BOUNDS_RIGHT{ 0.5f, 0, 1, 1 };
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
|
||||
#define REPROJECTION_BINDING 1
|
||||
|
||||
static const char* HMD_REPROJECTION_VERT = R"SHADER(
|
||||
|
@ -351,8 +350,6 @@ public:
|
|||
OpenVrDisplayPlugin& _plugin;
|
||||
};
|
||||
|
||||
#endif
|
||||
|
||||
bool OpenVrDisplayPlugin::isSupported() const {
|
||||
return openVrSupported();
|
||||
}
|
||||
|
@ -376,6 +373,9 @@ void OpenVrDisplayPlugin::init() {
|
|||
emit deviceConnected(getName());
|
||||
}
|
||||
|
||||
// FIXME remove once OpenVR header is updated
|
||||
#define VRCompositor_ReprojectionAsync 0x04
|
||||
|
||||
bool OpenVrDisplayPlugin::internalActivate() {
|
||||
if (!_system) {
|
||||
_system = acquireOpenVrSystem();
|
||||
|
@ -394,6 +394,15 @@ bool OpenVrDisplayPlugin::internalActivate() {
|
|||
return false;
|
||||
}
|
||||
|
||||
vr::Compositor_FrameTiming timing;
|
||||
memset(&timing, 0, sizeof(timing));
|
||||
timing.m_nSize = sizeof(vr::Compositor_FrameTiming);
|
||||
vr::VRCompositor()->GetFrameTiming(&timing);
|
||||
bool asyncReprojectionActive = timing.m_nReprojectionFlags & VRCompositor_ReprojectionAsync;
|
||||
|
||||
_threadedSubmit = !asyncReprojectionActive;
|
||||
qDebug() << "OpenVR Threaded submit enabled: " << _threadedSubmit;
|
||||
|
||||
_openVrDisplayActive = true;
|
||||
_container->setIsOptionChecked(StandingHMDSensorMode, true);
|
||||
|
||||
|
@ -434,16 +443,16 @@ bool OpenVrDisplayPlugin::internalActivate() {
|
|||
#endif
|
||||
}
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
_submitThread = std::make_shared<OpenVrSubmitThread>(*this);
|
||||
if (!_submitCanvas) {
|
||||
withMainThreadContext([&] {
|
||||
_submitCanvas = std::make_shared<gl::OffscreenContext>();
|
||||
_submitCanvas->create();
|
||||
_submitCanvas->doneCurrent();
|
||||
});
|
||||
if (_threadedSubmit) {
|
||||
_submitThread = std::make_shared<OpenVrSubmitThread>(*this);
|
||||
if (!_submitCanvas) {
|
||||
withMainThreadContext([&] {
|
||||
_submitCanvas = std::make_shared<gl::OffscreenContext>();
|
||||
_submitCanvas->create();
|
||||
_submitCanvas->doneCurrent();
|
||||
});
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
return Parent::internalActivate();
|
||||
}
|
||||
|
@ -473,27 +482,27 @@ void OpenVrDisplayPlugin::customizeContext() {
|
|||
|
||||
Parent::customizeContext();
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
_compositeInfos[0].texture = _compositeFramebuffer->getRenderBuffer(0);
|
||||
for (size_t i = 0; i < COMPOSITING_BUFFER_SIZE; ++i) {
|
||||
if (0 != i) {
|
||||
_compositeInfos[i].texture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element::COLOR_RGBA_32, _renderTargetSize.x, _renderTargetSize.y, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT)));
|
||||
if (_threadedSubmit) {
|
||||
_compositeInfos[0].texture = _compositeFramebuffer->getRenderBuffer(0);
|
||||
for (size_t i = 0; i < COMPOSITING_BUFFER_SIZE; ++i) {
|
||||
if (0 != i) {
|
||||
_compositeInfos[i].texture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element::COLOR_RGBA_32, _renderTargetSize.x, _renderTargetSize.y, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT)));
|
||||
}
|
||||
_compositeInfos[i].textureID = getGLBackend()->getTextureID(_compositeInfos[i].texture, false);
|
||||
}
|
||||
_compositeInfos[i].textureID = getGLBackend()->getTextureID(_compositeInfos[i].texture, false);
|
||||
_submitThread->_canvas = _submitCanvas;
|
||||
_submitThread->start(QThread::HighPriority);
|
||||
}
|
||||
_submitThread->_canvas = _submitCanvas;
|
||||
_submitThread->start(QThread::HighPriority);
|
||||
#endif
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::uncustomizeContext() {
|
||||
Parent::uncustomizeContext();
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
_submitThread->_quit = true;
|
||||
_submitThread->wait();
|
||||
_submitThread.reset();
|
||||
#endif
|
||||
if (_threadedSubmit) {
|
||||
_submitThread->_quit = true;
|
||||
_submitThread->wait();
|
||||
_submitThread.reset();
|
||||
}
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::resetSensors() {
|
||||
|
@ -582,75 +591,76 @@ bool OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
|
|||
}
|
||||
|
||||
void OpenVrDisplayPlugin::compositeLayers() {
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
++_renderingIndex;
|
||||
_renderingIndex %= COMPOSITING_BUFFER_SIZE;
|
||||
if (_threadedSubmit) {
|
||||
++_renderingIndex;
|
||||
_renderingIndex %= COMPOSITING_BUFFER_SIZE;
|
||||
|
||||
auto& newComposite = _compositeInfos[_renderingIndex];
|
||||
newComposite.pose = _currentPresentFrameInfo.presentPose;
|
||||
_compositeFramebuffer->setRenderBuffer(0, newComposite.texture);
|
||||
#endif
|
||||
auto& newComposite = _compositeInfos[_renderingIndex];
|
||||
newComposite.pose = _currentPresentFrameInfo.presentPose;
|
||||
_compositeFramebuffer->setRenderBuffer(0, newComposite.texture);
|
||||
}
|
||||
|
||||
Parent::compositeLayers();
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
newComposite.fence = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
|
||||
// https://www.opengl.org/registry/specs/ARB/sync.txt:
|
||||
// > The simple flushing behavior defined by
|
||||
// > SYNC_FLUSH_COMMANDS_BIT will not help when waiting for a fence
|
||||
// > command issued in another context's command stream to complete.
|
||||
// > Applications which block on a fence sync object must take
|
||||
// > additional steps to assure that the context from which the
|
||||
// > corresponding fence command was issued has flushed that command
|
||||
// > to the graphics pipeline.
|
||||
glFlush();
|
||||
if (_threadedSubmit) {
|
||||
auto& newComposite = _compositeInfos[_renderingIndex];
|
||||
newComposite.fence = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
|
||||
// https://www.opengl.org/registry/specs/ARB/sync.txt:
|
||||
// > The simple flushing behavior defined by
|
||||
// > SYNC_FLUSH_COMMANDS_BIT will not help when waiting for a fence
|
||||
// > command issued in another context's command stream to complete.
|
||||
// > Applications which block on a fence sync object must take
|
||||
// > additional steps to assure that the context from which the
|
||||
// > corresponding fence command was issued has flushed that command
|
||||
// > to the graphics pipeline.
|
||||
glFlush();
|
||||
|
||||
if (!newComposite.textureID) {
|
||||
newComposite.textureID = getGLBackend()->getTextureID(newComposite.texture, false);
|
||||
if (!newComposite.textureID) {
|
||||
newComposite.textureID = getGLBackend()->getTextureID(newComposite.texture, false);
|
||||
}
|
||||
withPresentThreadLock([&] {
|
||||
_submitThread->update(newComposite);
|
||||
});
|
||||
}
|
||||
withPresentThreadLock([&] {
|
||||
_submitThread->update(newComposite);
|
||||
});
|
||||
#endif
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::hmdPresent() {
|
||||
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)_currentFrame->frameIndex)
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
_submitThread->waitForPresent();
|
||||
#else
|
||||
GLuint glTexId = getGLBackend()->getTextureID(_compositeFramebuffer->getRenderBuffer(0), false);
|
||||
vr::Texture_t vrTexture{ (void*)glTexId, vr::API_OpenGL, vr::ColorSpace_Auto };
|
||||
vr::VRCompositor()->Submit(vr::Eye_Left, &vrTexture, &OPENVR_TEXTURE_BOUNDS_LEFT);
|
||||
vr::VRCompositor()->Submit(vr::Eye_Right, &vrTexture, &OPENVR_TEXTURE_BOUNDS_RIGHT);
|
||||
vr::VRCompositor()->PostPresentHandoff();
|
||||
#endif
|
||||
if (_threadedSubmit) {
|
||||
_submitThread->waitForPresent();
|
||||
} else {
|
||||
GLuint glTexId = getGLBackend()->getTextureID(_compositeFramebuffer->getRenderBuffer(0), false);
|
||||
vr::Texture_t vrTexture { (void*)glTexId, vr::API_OpenGL, vr::ColorSpace_Auto };
|
||||
vr::VRCompositor()->Submit(vr::Eye_Left, &vrTexture, &OPENVR_TEXTURE_BOUNDS_LEFT);
|
||||
vr::VRCompositor()->Submit(vr::Eye_Right, &vrTexture, &OPENVR_TEXTURE_BOUNDS_RIGHT);
|
||||
vr::VRCompositor()->PostPresentHandoff();
|
||||
_presentRate.increment();
|
||||
}
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::postPreview() {
|
||||
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)_currentFrame->frameIndex)
|
||||
PoseData nextRender, nextSim;
|
||||
nextRender.frameIndex = presentCount();
|
||||
#if !OPENVR_THREADED_SUBMIT
|
||||
vr::VRCompositor()->WaitGetPoses(nextRender.vrPoses, vr::k_unMaxTrackedDeviceCount, nextSim.vrPoses, vr::k_unMaxTrackedDeviceCount);
|
||||
|
||||
glm::mat4 resetMat;
|
||||
withPresentThreadLock([&] {
|
||||
resetMat = _sensorResetMat;
|
||||
});
|
||||
nextRender.update(resetMat);
|
||||
nextSim.update(resetMat);
|
||||
withPresentThreadLock([&] {
|
||||
_nextSimPoseData = nextSim;
|
||||
});
|
||||
_nextRenderPoseData = nextRender;
|
||||
|
||||
// FIXME - this looks wrong!
|
||||
_hmdActivityLevel = vr::k_EDeviceActivityLevel_UserInteraction; // _system->GetTrackedDeviceActivityLevel(vr::k_unTrackedDeviceIndex_Hmd);
|
||||
#else
|
||||
_hmdActivityLevel = _system->GetTrackedDeviceActivityLevel(vr::k_unTrackedDeviceIndex_Hmd);
|
||||
#endif
|
||||
|
||||
if (!_threadedSubmit) {
|
||||
vr::VRCompositor()->WaitGetPoses(nextRender.vrPoses, vr::k_unMaxTrackedDeviceCount, nextSim.vrPoses, vr::k_unMaxTrackedDeviceCount);
|
||||
|
||||
glm::mat4 resetMat;
|
||||
withPresentThreadLock([&] {
|
||||
resetMat = _sensorResetMat;
|
||||
});
|
||||
nextRender.update(resetMat);
|
||||
nextSim.update(resetMat);
|
||||
withPresentThreadLock([&] {
|
||||
_nextSimPoseData = nextSim;
|
||||
});
|
||||
_nextRenderPoseData = nextRender;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
bool OpenVrDisplayPlugin::isHmdMounted() const {
|
||||
|
@ -684,3 +694,7 @@ void OpenVrDisplayPlugin::unsuppressKeyboard() {
|
|||
bool OpenVrDisplayPlugin::isKeyboardVisible() {
|
||||
return isOpenVrKeyboardShown();
|
||||
}
|
||||
|
||||
int OpenVrDisplayPlugin::getRequiredThreadCount() const {
|
||||
return Parent::getRequiredThreadCount() + (_threadedSubmit ? 1 : 0);
|
||||
}
|
|
@ -15,9 +15,6 @@
|
|||
|
||||
const float TARGET_RATE_OpenVr = 90.0f; // FIXME: get from sdk tracked device property? This number is vive-only.
|
||||
|
||||
#define OPENVR_THREADED_SUBMIT 1
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
namespace gl {
|
||||
class OffscreenContext;
|
||||
}
|
||||
|
@ -34,7 +31,6 @@ struct CompositeInfo {
|
|||
glm::mat4 pose;
|
||||
GLsync fence{ 0 };
|
||||
};
|
||||
#endif
|
||||
|
||||
class OpenVrDisplayPlugin : public HmdDisplayPlugin {
|
||||
using Parent = HmdDisplayPlugin;
|
||||
|
@ -58,8 +54,8 @@ public:
|
|||
void unsuppressKeyboard() override;
|
||||
bool isKeyboardVisible() override;
|
||||
|
||||
// Needs an additional thread for VR submission
|
||||
int getRequiredThreadCount() const override { return Parent::getRequiredThreadCount() + 1; }
|
||||
// Possibly needs an additional thread for VR submission
|
||||
int getRequiredThreadCount() const override;
|
||||
|
||||
protected:
|
||||
bool internalActivate() override;
|
||||
|
@ -71,7 +67,6 @@ protected:
|
|||
bool isHmdMounted() const override;
|
||||
void postPreview() override;
|
||||
|
||||
|
||||
private:
|
||||
vr::IVRSystem* _system { nullptr };
|
||||
std::atomic<vr::EDeviceActivityLevel> _hmdActivityLevel { vr::k_EDeviceActivityLevel_Unknown };
|
||||
|
@ -80,12 +75,11 @@ private:
|
|||
|
||||
vr::HmdMatrix34_t _lastGoodHMDPose;
|
||||
mat4 _sensorResetMat;
|
||||
bool _threadedSubmit { true };
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
CompositeInfo::Array _compositeInfos;
|
||||
size_t _renderingIndex { 0 };
|
||||
std::shared_ptr<OpenVrSubmitThread> _submitThread;
|
||||
std::shared_ptr<gl::OffscreenContext> _submitCanvas;
|
||||
friend class OpenVrSubmitThread;
|
||||
#endif
|
||||
};
|
||||
|
|
|
@ -210,6 +210,11 @@ void ViveControllerManager::renderHand(const controller::Pose& pose, gpu::Batch&
|
|||
|
||||
|
||||
void ViveControllerManager::pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
|
||||
|
||||
if (!_system) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
|
||||
handleOpenVrEvents();
|
||||
if (openVrQuitRequested()) {
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
|
||||
var MESSAGE_CHANNEL = "io.highfidelity.summon-crowd";
|
||||
|
||||
print('crowd-agent version 2');
|
||||
print('crowd-agent version 3');
|
||||
|
||||
/* Observations:
|
||||
- File urls for AC scripts silently fail. Use a local server (e.g., python SimpleHTTPServer) for development.
|
||||
|
@ -102,6 +102,7 @@ function messageHandler(channel, messageString, senderID) {
|
|||
messageSend({key: 'hello'}); // Allow the coordinator to count responses and make assignments.
|
||||
break;
|
||||
case 'hello': // ignore responses (e.g., from other agents)
|
||||
case 'finishedSound':
|
||||
break;
|
||||
case "SUMMON":
|
||||
if (message.rcpt === Agent.sessionUUID) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
"use strict";
|
||||
/*jslint vars: true, plusplus: true*/
|
||||
/*globals Script, MyAvatar, Quat, Render, ScriptDiscoveryService, Window, LODManager, Entities, print*/
|
||||
/*globals Script, MyAvatar, Quat, Vec3, Render, ScriptDiscoveryService, Window, LODManager, Entities, Messages, AvatarList, Menu, Stats, HMD, location, print*/
|
||||
//
|
||||
// loadedMachine.js
|
||||
// scripts/developer/tests/
|
||||
|
@ -17,37 +17,157 @@ var MINIMUM_DESKTOP_FRAMERATE = 57; // frames per second
|
|||
var MINIMUM_HMD_FRAMERATE = 86;
|
||||
var EXPECTED_DESKTOP_FRAMERATE = 60;
|
||||
var EXPECTED_HMD_FRAMERATE = 90;
|
||||
var MAXIMUM_LOAD_TIME = 60; // seconds
|
||||
var MINIMUM_AVATARS = 25; // FIXME: not implemented yet. Requires agent scripts. Idea is to have them organize themselves to the right number.
|
||||
var NOMINAL_LOAD_TIME = 30; // seconds
|
||||
var MAXIMUM_LOAD_TIME = NOMINAL_LOAD_TIME * 2;
|
||||
var MINIMUM_AVATARS = 25; // changeable by prompt
|
||||
|
||||
var version = 2;
|
||||
var DENSITY = 0.3; // square meters per person. Some say 10 sq ft is arm's length (0.9m^2), 4.5 is crowd (0.4m^2), 2.5 is mosh pit (0.2m^2).
|
||||
var SOUND_DATA = {url: "http://hifi-content.s3.amazonaws.com/howard/sounds/piano1.wav"};
|
||||
var AVATARS_CHATTERING_AT_ONCE = 4; // How many of the agents should we request to play SOUND at once.
|
||||
var NEXT_SOUND_SPREAD = 500; // millisecond range of how long to wait after one sound finishes, before playing the next
|
||||
var ANIMATION_DATA = {
|
||||
"url": "http://hifi-content.s3.amazonaws.com/howard/resources/avatar/animations/idle.fbx",
|
||||
// "url": "http://hifi-content.s3.amazonaws.com/howard/resources/avatar/animations/walk_fwd.fbx", // alternative example
|
||||
"startFrame": 0.0,
|
||||
"endFrame": 300.0,
|
||||
"timeScale": 1.0,
|
||||
"loopFlag": true
|
||||
};
|
||||
|
||||
var version = 3;
|
||||
function debug() {
|
||||
print.apply(null, [].concat.apply(['hrs fixme', version], [].map.call(arguments, JSON.stringify)));
|
||||
}
|
||||
|
||||
function canonicalizePlacename(name) {
|
||||
var prefix = 'dev-';
|
||||
name = name.toLowerCase();
|
||||
if (name.indexOf(prefix) === 0) {
|
||||
name = name.slice(prefix.length);
|
||||
}
|
||||
return name;
|
||||
}
|
||||
var cachePlaces = ['localhost', 'welcome'].map(canonicalizePlacename); // For now, list the lighter weight one first.
|
||||
var defaultPlace = location.hostname;
|
||||
var prompt = "domain-check.js version " + version + "\n\nWhat place should we enter?";
|
||||
debug(cachePlaces, defaultPlace, prompt);
|
||||
var entryPlace = Window.prompt(prompt, defaultPlace);
|
||||
var runTribbles = Window.confirm("Run tribbles?\n\n\
|
||||
At most, only one participant should say yes.");
|
||||
MINIMUM_AVATARS = parseInt(Window.prompt("Total avatars (including yourself and any already present)?", MINIMUM_AVATARS.toString()) || "0", 10);
|
||||
AVATARS_CHATTERING_AT_ONCE = MINIMUM_AVATARS ? parseInt(Window.prompt("Number making sound?", Math.min(MINIMUM_AVATARS - 1, AVATARS_CHATTERING_AT_ONCE).toString()) || "0", 10) : 0;
|
||||
|
||||
function placesMatch(a, b) { // handling case and 'dev-' variations
|
||||
return canonicalizePlacename(a) === canonicalizePlacename(b);
|
||||
}
|
||||
function isNowIn(place) { // true if currently in specified place
|
||||
return location.hostname.toLowerCase() === place.toLowerCase();
|
||||
placesMatch(location.hostname, place);
|
||||
}
|
||||
|
||||
var cachePlaces = ['dev-Welcome', 'localhost']; // For now, list the lighter weight one first.
|
||||
var isInCachePlace = cachePlaces.some(isNowIn);
|
||||
var defaultPlace = isInCachePlace ? 'dev-Playa' : location.hostname;
|
||||
var prompt = "domain-check.js version " + version + "\n\nWhat place should we enter?";
|
||||
debug(cachePlaces, isInCachePlace, defaultPlace, prompt);
|
||||
var entryPlace = Window.prompt(prompt, defaultPlace);
|
||||
function go(place) { // handle (dev-)welcome in the appropriate version-specific way
|
||||
debug('go', place);
|
||||
if (placesMatch(place, 'welcome')) {
|
||||
location.goToEntry();
|
||||
} else {
|
||||
location.handleLookupString(place);
|
||||
}
|
||||
}
|
||||
|
||||
var spread = Math.sqrt(MINIMUM_AVATARS * DENSITY); // meters
|
||||
var turnSpread = 90; // How many degrees should turn from front range over.
|
||||
|
||||
function coord() { return (Math.random() * spread) - (spread / 2); } // randomly distribute a coordinate zero += spread/2.
|
||||
function contains(array, item) { return array.indexOf(item) >= 0; }
|
||||
function without(array, itemsToRemove) { return array.filter(function (item) { return !contains(itemsToRemove, item); }); }
|
||||
function nextAfter(array, id) { // Wrapping next element in array after id.
|
||||
var index = array.indexOf(id) + 1;
|
||||
return array[(index >= array.length) ? 0 : index];
|
||||
}
|
||||
|
||||
var summonedAgents = [];
|
||||
var chattering = [];
|
||||
var MESSAGE_CHANNEL = "io.highfidelity.summon-crowd";
|
||||
function messageSend(message) {
|
||||
Messages.sendMessage(MESSAGE_CHANNEL, JSON.stringify(message));
|
||||
}
|
||||
function messageHandler(channel, messageString, senderID) {
|
||||
if (channel !== MESSAGE_CHANNEL) {
|
||||
return;
|
||||
}
|
||||
debug('message', channel, messageString, senderID);
|
||||
if (MyAvatar.sessionUUID === senderID) { // ignore my own
|
||||
return;
|
||||
}
|
||||
var message = {}, avatarIdentifiers;
|
||||
try {
|
||||
message = JSON.parse(messageString);
|
||||
} catch (e) {
|
||||
print(e);
|
||||
}
|
||||
switch (message.key) {
|
||||
case "hello":
|
||||
// There can be avatars we've summoned that do not yet appear in the AvatarList.
|
||||
avatarIdentifiers = without(AvatarList.getAvatarIdentifiers(), summonedAgents);
|
||||
debug('present', avatarIdentifiers, summonedAgents);
|
||||
if ((summonedAgents.length + avatarIdentifiers.length) < MINIMUM_AVATARS) {
|
||||
var chatter = chattering.length < AVATARS_CHATTERING_AT_ONCE;
|
||||
if (chatter) {
|
||||
chattering.push(senderID);
|
||||
}
|
||||
summonedAgents.push(senderID);
|
||||
messageSend({
|
||||
key: 'SUMMON',
|
||||
rcpt: senderID,
|
||||
position: Vec3.sum(MyAvatar.position, {x: coord(), y: 0, z: coord()}),
|
||||
orientation: Quat.fromPitchYawRollDegrees(0, Quat.safeEulerAngles(MyAvatar.orientation).y + (turnSpread * (Math.random() - 0.5)), 0),
|
||||
soundData: chatter && SOUND_DATA,
|
||||
skeletonModelURL: "http://hifi-content.s3.amazonaws.com/howard/resources/meshes/defaultAvatar_full.fst",
|
||||
animationData: ANIMATION_DATA
|
||||
});
|
||||
}
|
||||
break;
|
||||
case "finishedSound": // Give someone else a chance.
|
||||
chattering = without(chattering, [senderID]);
|
||||
Script.setTimeout(function () {
|
||||
messageSend({
|
||||
key: 'SUMMON',
|
||||
rcpt: nextAfter(without(summonedAgents, chattering), senderID),
|
||||
soundData: SOUND_DATA
|
||||
});
|
||||
}, Math.random() * NEXT_SOUND_SPREAD);
|
||||
break;
|
||||
case "HELO":
|
||||
Window.alert("Someone else is summoning avatars.");
|
||||
break;
|
||||
default:
|
||||
print("crowd-agent received unrecognized message:", messageString);
|
||||
}
|
||||
}
|
||||
Messages.subscribe(MESSAGE_CHANNEL);
|
||||
Messages.messageReceived.connect(messageHandler);
|
||||
Script.scriptEnding.connect(function () {
|
||||
debug('stopping agents', summonedAgents);
|
||||
summonedAgents.forEach(function (id) { messageSend({key: 'STOP', rcpt: id}); });
|
||||
debug('agents stopped');
|
||||
Script.setTimeout(function () {
|
||||
Messages.messageReceived.disconnect(messageHandler);
|
||||
Messages.unsubscribe(MESSAGE_CHANNEL);
|
||||
debug('unsubscribed');
|
||||
}, 500);
|
||||
});
|
||||
|
||||
var fail = false, results = "";
|
||||
function addResult(label, actual, minimum, maximum) {
|
||||
function addResult(label, actual, nominal, minimum, maximum) {
|
||||
if ((minimum !== undefined) && (actual < minimum)) {
|
||||
fail = true;
|
||||
fail = ' FAILED: ' + label + ' below ' + minimum;
|
||||
}
|
||||
if ((maximum !== undefined) && (actual > maximum)) {
|
||||
fail = true;
|
||||
fail = ' FAILED: ' + label + ' above ' + maximum;
|
||||
}
|
||||
results += "\n" + label + ": " + actual + " (" + ((100 * actual) / (maximum || minimum)).toFixed(0) + "%)";
|
||||
results += "\n" + label + ": " + actual.toFixed(0) + " (" + ((100 * actual) / nominal).toFixed(0) + "%)";
|
||||
}
|
||||
function giveReport() {
|
||||
Window.alert(entryPlace + (fail ? " FAILED" : " OK") + "\n" + results);
|
||||
Window.alert(entryPlace + (fail || " OK") + "\n" + results + "\nwith " + summonedAgents.length + " avatars added,\nand " + AVATARS_CHATTERING_AT_ONCE + " making noise.");
|
||||
}
|
||||
|
||||
// Tests are performed domain-wide, at full LOD
|
||||
|
@ -122,9 +242,8 @@ function doLoad(place, continuationWithLoadTime) { // Go to place and call conti
|
|||
}
|
||||
};
|
||||
|
||||
debug('go', place);
|
||||
location.hostChanged.connect(waitForLoad);
|
||||
location.handleLookupString(place);
|
||||
go(place);
|
||||
}
|
||||
|
||||
var config = Render.getConfig("Stats");
|
||||
|
@ -133,48 +252,59 @@ function doRender(continuation) {
|
|||
function onNewStats() { // Accumulates frames on signal during load test
|
||||
frames++;
|
||||
}
|
||||
if (MINIMUM_AVATARS) {
|
||||
messageSend({key: 'HELO'}); // Ask agents to report in now.
|
||||
}
|
||||
|
||||
config.newStats.connect(onNewStats);
|
||||
startTwirl(720, 1, 15, 0.08, function () {
|
||||
var end = Date.now();
|
||||
config.newStats.disconnect(onNewStats);
|
||||
addResult('frame rate', 1000 * frames / (end - start),
|
||||
HMD.active ? MINIMUM_HMD_FRAMERATE : MINIMUM_DESKTOP_FRAMERATE,
|
||||
HMD.active ? EXPECTED_HMD_FRAMERATE : EXPECTED_DESKTOP_FRAMERATE);
|
||||
HMD.active ? EXPECTED_HMD_FRAMERATE : EXPECTED_DESKTOP_FRAMERATE,
|
||||
HMD.active ? MINIMUM_HMD_FRAMERATE : MINIMUM_DESKTOP_FRAMERATE);
|
||||
var total = AvatarList.getAvatarIdentifiers().length;
|
||||
if (MINIMUM_AVATARS && !fail) {
|
||||
if (0 === summonedAgents.length) {
|
||||
fail = "FAIL: No agents reported.\nPlease run " + MINIMUM_AVATARS + " instances of\n\
|
||||
http://hifi-content.s3.amazonaws.com/howard/scripts/tests/performance/crowd-agent.js?v=3\n\
|
||||
on your domain server.";
|
||||
} else if (total < MINIMUM_AVATARS) {
|
||||
fail = "FAIL: Only " + summonedAgents.length + " avatars reported. Missing " + (MINIMUM_AVATARS - total) + ".";
|
||||
}
|
||||
}
|
||||
continuation();
|
||||
});
|
||||
}
|
||||
|
||||
var TELEPORT_PAUSE = 500;
|
||||
function maybePrepareCache(continuation) {
|
||||
var prepareCache = Window.confirm("Prepare cache?\n\n\
|
||||
Should we start with all and only those items cached that are encountered when visiting:\n" + cachePlaces.join(', ') + "\n\
|
||||
If 'yes', cache will be cleared and we will visit these two, with a turn in each, and wait for everything to be loaded.\n\
|
||||
You would want to say 'no' (and make other preparations) if you were testing these places.");
|
||||
|
||||
if (prepareCache) {
|
||||
function loadNext() {
|
||||
var place = cachePlaces.shift();
|
||||
doLoad(place, function (prepTime) {
|
||||
debug(place, 'ready', prepTime);
|
||||
if (cachePlaces.length) {
|
||||
loadNext();
|
||||
} else {
|
||||
continuation();
|
||||
}
|
||||
});
|
||||
}
|
||||
location.handleLookupString(cachePlaces[cachePlaces.length - 1]);
|
||||
function prepareCache(continuation) {
|
||||
function loadNext() {
|
||||
var place = cachePlaces.shift();
|
||||
doLoad(place, function (prepTime) {
|
||||
debug(place, 'ready', prepTime);
|
||||
if (cachePlaces.length) {
|
||||
loadNext();
|
||||
} else {
|
||||
continuation();
|
||||
}
|
||||
});
|
||||
}
|
||||
// remove entryPlace target from cachePlaces
|
||||
var targetInCache = cachePlaces.indexOf(canonicalizePlacename(entryPlace));
|
||||
if (targetInCache !== -1) {
|
||||
cachePlaces.splice(targetInCache, 1);
|
||||
}
|
||||
debug('cachePlaces', cachePlaces);
|
||||
go(cachePlaces[1] || entryPlace); // Not quite right for entryPlace case (allows some qt pre-caching), but close enough.
|
||||
Script.setTimeout(function () {
|
||||
Menu.triggerOption("Reload Content (Clears all caches)");
|
||||
Script.setTimeout(loadNext, TELEPORT_PAUSE);
|
||||
} else {
|
||||
location.handleLookupString(isNowIn(cachePlaces[0]) ? cachePlaces[1] : cachePlaces[0]);
|
||||
Script.setTimeout(continuation, TELEPORT_PAUSE);
|
||||
}
|
||||
}, TELEPORT_PAUSE);
|
||||
}
|
||||
|
||||
function maybeRunTribbles(continuation) {
|
||||
if (Window.confirm("Run tribbles?\n\n\
|
||||
At most, only one participant should say yes.")) {
|
||||
if (runTribbles) {
|
||||
Script.load('http://cdn.highfidelity.com/davidkelly/production/scripts/tests/performance/tribbles.js');
|
||||
Script.setTimeout(continuation, 3000);
|
||||
} else {
|
||||
|
@ -186,10 +316,11 @@ if (!entryPlace) {
|
|||
Window.alert("domain-check.js cancelled");
|
||||
Script.stop();
|
||||
} else {
|
||||
maybePrepareCache(function (prepTime) {
|
||||
prepareCache(function (prepTime) {
|
||||
debug('cache ready', prepTime);
|
||||
doLoad(entryPlace, function (loadTime) {
|
||||
addResult("load time", loadTime, undefined, MAXIMUM_LOAD_TIME);
|
||||
addResult("load time", loadTime, NOMINAL_LOAD_TIME, undefined, MAXIMUM_LOAD_TIME);
|
||||
LODManager.setAutomaticLODAdjust(initialLodIsAutomatic); // after loading, restore lod.
|
||||
maybeRunTribbles(function () {
|
||||
doRender(function () {
|
||||
giveReport();
|
||||
|
|
|
@ -304,9 +304,9 @@ function setEnabled(value) {
|
|||
|
||||
var CHANNEL_AWAY_ENABLE = "Hifi-Away-Enable";
|
||||
var handleMessage = function(channel, message, sender) {
|
||||
print("Got away message");
|
||||
if (channel == CHANNEL_AWAY_ENABLE) {
|
||||
setEnabled(message == 'enable');
|
||||
if (channel === CHANNEL_AWAY_ENABLE) {
|
||||
print("away.js | Got message on Hifi-Away-Enable: ", message);
|
||||
setEnabled(message === 'enable');
|
||||
}
|
||||
}
|
||||
Messages.subscribe(CHANNEL_AWAY_ENABLE);
|
||||
|
@ -343,6 +343,7 @@ Script.scriptEnding.connect(function () {
|
|||
Controller.disableMapping(eventMappingName);
|
||||
Controller.mousePressEvent.disconnect(goActive);
|
||||
Controller.keyPressEvent.disconnect(maybeGoActive);
|
||||
Messages.messageReceived.disconnect(handleMessage);
|
||||
});
|
||||
|
||||
if (HMD.active && !HMD.mounted) {
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
Script.include("controllerDisplay.js");
|
||||
Script.include("viveControllerConfiguration.js");
|
||||
|
||||
var HIDE_CONTROLLERS_ON_EQUIP = false;
|
||||
|
||||
//
|
||||
// Management of controller display
|
||||
//
|
||||
|
@ -116,12 +118,14 @@ ControllerDisplayManager = function() {
|
|||
}
|
||||
}
|
||||
} else if (channel === 'Hifi-Object-Manipulation') {
|
||||
data = JSON.parse(message);
|
||||
visible = data.action !== 'equip';
|
||||
if (data.joint === "LeftHand") {
|
||||
self.setLeftVisible(visible);
|
||||
} else if (data.joint === "RightHand") {
|
||||
self.setRightVisible(visible);
|
||||
if (HIDE_CONTROLLERS_ON_EQUIP) {
|
||||
data = JSON.parse(message);
|
||||
visible = data.action !== 'equip';
|
||||
if (data.joint === "LeftHand") {
|
||||
self.setLeftVisible(visible);
|
||||
} else if (data.joint === "RightHand") {
|
||||
self.setRightVisible(visible);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -204,7 +204,7 @@ function overlayFromWorldPoint(point) {
|
|||
}
|
||||
|
||||
function activeHudPoint2d(activeHand) { // if controller is valid, update reticle position and answer 2d point. Otherwise falsey.
|
||||
var controllerPose = getControllerWorldLocation(activeHand, true);
|
||||
var controllerPose = getControllerWorldLocation(activeHand, true); // note: this will return head pose if hand pose is invalid (third eye)
|
||||
if (!controllerPose.valid) {
|
||||
return; // Controller is cradled.
|
||||
}
|
||||
|
@ -447,12 +447,20 @@ function clearSystemLaser() {
|
|||
return;
|
||||
}
|
||||
HMD.disableHandLasers(BOTH_HUD_LASERS);
|
||||
HMD.disableExtraLaser();
|
||||
systemLaserOn = false;
|
||||
weMovedReticle = true;
|
||||
Reticle.position = { x: -1, y: -1 };
|
||||
}
|
||||
function setColoredLaser() { // answer trigger state if lasers supported, else falsey.
|
||||
var color = (activeTrigger.state === 'full') ? LASER_TRIGGER_COLOR_XYZW : LASER_SEARCH_COLOR_XYZW;
|
||||
|
||||
if (!HMD.isHandControllerAvailable()) {
|
||||
var position = MyAvatar.getHeadPosition();
|
||||
var direction = Quat.getUp(Quat.multiply(MyAvatar.headOrientation, Quat.angleAxis(-90, { x: 1, y: 0, z: 0 })));
|
||||
return HMD.setExtraLaser(position, true, color, direction);
|
||||
}
|
||||
|
||||
return HMD.setHandLasers(activeHudLaser, true, color, SYSTEM_LASER_DIRECTION) && activeTrigger.state;
|
||||
}
|
||||
|
||||
|
@ -491,11 +499,21 @@ function update() {
|
|||
if (!hudPoint2d) {
|
||||
return off();
|
||||
}
|
||||
|
||||
|
||||
// If there's a HUD element at the (newly moved) reticle, just make it visible and bail.
|
||||
if (isPointingAtOverlay(hudPoint2d)) {
|
||||
if (HMD.active) {
|
||||
Reticle.depth = hudReticleDistance();
|
||||
|
||||
if (!HMD.isHandControllerAvailable()) {
|
||||
var color = (activeTrigger.state === 'full') ? LASER_TRIGGER_COLOR_XYZW : LASER_SEARCH_COLOR_XYZW;
|
||||
var position = MyAvatar.getHeadPosition();
|
||||
var direction = Quat.getUp(Quat.multiply(MyAvatar.headOrientation, Quat.angleAxis(-90, { x: 1, y: 0, z: 0 })));
|
||||
HMD.setExtraLaser(position, true, color, direction);
|
||||
}
|
||||
}
|
||||
|
||||
if (activeTrigger.state && (!systemLaserOn || (systemLaserOn !== activeTrigger.state))) { // last=>wrong color
|
||||
// If the active plugin doesn't implement hand lasers, show the mouse reticle instead.
|
||||
systemLaserOn = setColoredLaser();
|
||||
|
|
|
@ -312,15 +312,11 @@
|
|||
<input type="number" id="property-lifetime">
|
||||
</div>
|
||||
<hr class="behavior-group" />
|
||||
<div class="behavior-group property url ">
|
||||
<!--
|
||||
FIXME: If reload buttons at the end of each URL continue to work OK during beta, this reload button and associated
|
||||
code should be removed.
|
||||
<div class="behavior-group property url refresh">
|
||||
<input type="hidden" id="property-script-timestamp" class="value">
|
||||
<input type="button" id="reload-script-button" value="Reload">
|
||||
-->
|
||||
<label for="property-script-url">Script URL</label>
|
||||
<input type="text" id="property-script-url">
|
||||
<input type="button" id="reload-script-button" class="glyph" value="F">
|
||||
</div>
|
||||
<div class="section-header model-group model-section zone-section">
|
||||
<label>Model</label><span>M</span>
|
||||
|
|
|
@ -591,10 +591,7 @@ function loaded() {
|
|||
|
||||
var elLifetime = document.getElementById("property-lifetime");
|
||||
var elScriptURL = document.getElementById("property-script-url");
|
||||
/*
|
||||
FIXME: See FIXME for property-script-url.
|
||||
var elScriptTimestamp = document.getElementById("property-script-timestamp");
|
||||
*/
|
||||
var elReloadScriptButton = document.getElementById("reload-script-button");
|
||||
var elUserData = document.getElementById("property-user-data");
|
||||
var elClearUserData = document.getElementById("userdata-clear");
|
||||
|
@ -851,10 +848,7 @@ function loaded() {
|
|||
elCollisionSoundURL.value = properties.collisionSoundURL;
|
||||
elLifetime.value = properties.lifetime;
|
||||
elScriptURL.value = properties.script;
|
||||
/*
|
||||
FIXME: See FIXME for property-script-url.
|
||||
elScriptTimestamp.value = properties.scriptTimestamp;
|
||||
*/
|
||||
|
||||
var json = null;
|
||||
try {
|
||||
|
@ -1150,11 +1144,7 @@ function loaded() {
|
|||
|
||||
elLifetime.addEventListener('change', createEmitNumberPropertyUpdateFunction('lifetime'));
|
||||
elScriptURL.addEventListener('change', createEmitTextPropertyUpdateFunction('script'));
|
||||
/*
|
||||
FIXME: See FIXME for property-script-url.
|
||||
elScriptTimestamp.addEventListener('change', createEmitNumberPropertyUpdateFunction('scriptTimestamp'));
|
||||
*/
|
||||
|
||||
|
||||
elClearUserData.addEventListener("click", function() {
|
||||
deleteJSONEditor();
|
||||
|
@ -1171,11 +1161,8 @@ function loaded() {
|
|||
properties: properties,
|
||||
})
|
||||
);
|
||||
|
||||
|
||||
});
|
||||
|
||||
|
||||
elSaveUserData.addEventListener("click", function() {
|
||||
saveJSONUserData(true);
|
||||
});
|
||||
|
@ -1410,15 +1397,12 @@ function loaded() {
|
|||
percentage: parseInt(elRescaleDimensionsPct.value),
|
||||
}));
|
||||
});
|
||||
/*
|
||||
FIXME: See FIXME for property-script-url.
|
||||
elReloadScriptButton.addEventListener("click", function() {
|
||||
EventBridge.emitWebEvent(JSON.stringify({
|
||||
type: "action",
|
||||
action: "reloadScript"
|
||||
}));
|
||||
});
|
||||
*/
|
||||
|
||||
window.onblur = function() {
|
||||
// Fake a change event
|
||||
|
|
|
@ -34,6 +34,7 @@ getControllerWorldLocation = function (handController, doOffset) {
|
|||
var orientation;
|
||||
var position;
|
||||
var pose = Controller.getPoseValue(handController);
|
||||
var valid = pose.valid;
|
||||
if (pose.valid) {
|
||||
orientation = Quat.multiply(MyAvatar.orientation, pose.rotation);
|
||||
position = Vec3.sum(Vec3.multiplyQbyV(MyAvatar.orientation, pose.translation), MyAvatar.position);
|
||||
|
@ -41,10 +42,15 @@ getControllerWorldLocation = function (handController, doOffset) {
|
|||
if (doOffset) {
|
||||
position = Vec3.sum(position, Vec3.multiplyQbyV(orientation, getGrabPointSphereOffset(handController)));
|
||||
}
|
||||
} else if (!HMD.isHandControllerAvailable()) {
|
||||
position = MyAvatar.getHeadPosition();
|
||||
orientation = Quat.multiply(MyAvatar.headOrientation, Quat.angleAxis(-90, { x: 1, y: 0, z: 0 }));
|
||||
valid = true;
|
||||
}
|
||||
|
||||
return {position: position,
|
||||
translation: position,
|
||||
orientation: orientation,
|
||||
rotation: orientation,
|
||||
valid: pose.valid};
|
||||
valid: valid};
|
||||
};
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "hf-console",
|
||||
"description": "High Fidelity Console",
|
||||
"name": "HighFidelitySandbox",
|
||||
"description": "High Fidelity Sandbox",
|
||||
"author": "High Fidelity",
|
||||
"license": "Apache-2.0",
|
||||
"version": "1.0.0",
|
||||
|
@ -33,6 +33,7 @@
|
|||
"request": "^2.67.0",
|
||||
"request-progress": "1.0.2",
|
||||
"tar-fs": "^1.12.0",
|
||||
"yargs": "^3.30.0"
|
||||
"yargs": "^3.30.0",
|
||||
"electron-log": "1.1.1"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ const appIcon = path.join(__dirname, '../resources/console.png');
|
|||
const DELETE_LOG_FILES_OLDER_THAN_X_SECONDS = 60 * 60 * 24 * 7; // 7 Days
|
||||
const LOG_FILE_REGEX = /(domain-server|ac-monitor|ac)-.*-std(out|err).txt/;
|
||||
|
||||
const HOME_CONTENT_URL = "http://cachefly.highfidelity.com/home-tutorial-9.tar.gz";
|
||||
const HOME_CONTENT_URL = "http://cachefly.highfidelity.com/home-tutorial-release-5572.tar.gz";
|
||||
|
||||
function getBuildInfo() {
|
||||
var buildInfoPath = null;
|
||||
|
@ -64,7 +64,6 @@ function getBuildInfo() {
|
|||
var buildInfo = DEFAULT_BUILD_INFO;
|
||||
|
||||
if (buildInfoPath) {
|
||||
console.log('Build info path:', buildInfoPath);
|
||||
try {
|
||||
buildInfo = JSON.parse(fs.readFileSync(buildInfoPath));
|
||||
} catch (e) {
|
||||
|
@ -74,11 +73,8 @@ function getBuildInfo() {
|
|||
|
||||
return buildInfo;
|
||||
}
|
||||
|
||||
const buildInfo = getBuildInfo();
|
||||
|
||||
console.log("build info", buildInfo);
|
||||
|
||||
function getRootHifiDataDirectory() {
|
||||
var organization = "High Fidelity";
|
||||
if (buildInfo.releaseType != "PRODUCTION") {
|
||||
|
@ -105,16 +101,26 @@ function getApplicationDataDirectory() {
|
|||
return path.join(getRootHifiDataDirectory(), '/Server Console');
|
||||
}
|
||||
|
||||
console.log("Root hifi directory is: ", getRootHifiDataDirectory());
|
||||
// Configure log
|
||||
global.log = require('electron-log');
|
||||
const logFile = getApplicationDataDirectory() + '/log.txt';
|
||||
fs.ensureFileSync(logFile); // Ensure file exists
|
||||
log.transports.file.maxSize = 5 * 1024 * 1024;
|
||||
log.transports.file.file = logFile;
|
||||
|
||||
log.debug("build info", buildInfo);
|
||||
log.debug("Root hifi directory is: ", getRootHifiDataDirectory());
|
||||
|
||||
const ipcMain = electron.ipcMain;
|
||||
|
||||
|
||||
var isShuttingDown = false;
|
||||
function shutdown() {
|
||||
log.debug("Normal shutdown (isShuttingDown: " + isShuttingDown + ")");
|
||||
if (!isShuttingDown) {
|
||||
// if the home server is running, show a prompt before quit to ask if the user is sure
|
||||
if (homeServer.state == ProcessGroupStates.STARTED) {
|
||||
log.debug("Showing shutdown dialog.");
|
||||
dialog.showMessageBox({
|
||||
type: 'question',
|
||||
buttons: ['Yes', 'No'],
|
||||
|
@ -129,21 +135,26 @@ function shutdown() {
|
|||
}
|
||||
|
||||
function forcedShutdown() {
|
||||
log.debug("Forced shutdown (isShuttingDown: " + isShuttingDown + ")");
|
||||
if (!isShuttingDown) {
|
||||
shutdownCallback(0);
|
||||
}
|
||||
}
|
||||
|
||||
function shutdownCallback(idx) {
|
||||
log.debug("Entering shutdown callback.");
|
||||
if (idx == 0 && !isShuttingDown) {
|
||||
isShuttingDown = true;
|
||||
|
||||
log.debug("Saving user config");
|
||||
userConfig.save(configPath);
|
||||
|
||||
if (logWindow) {
|
||||
log.debug("Closing log window");
|
||||
logWindow.close();
|
||||
}
|
||||
if (homeServer) {
|
||||
log.debug("Stoping home server");
|
||||
homeServer.stop();
|
||||
}
|
||||
|
||||
|
@ -151,14 +162,17 @@ function shutdownCallback(idx) {
|
|||
|
||||
if (homeServer.state == ProcessGroupStates.STOPPED) {
|
||||
// if the home server is already down, take down the server console now
|
||||
log.debug("Quitting.");
|
||||
app.quit();
|
||||
} else {
|
||||
// if the home server is still running, wait until we get a state change or timeout
|
||||
// before quitting the app
|
||||
log.debug("Server still shutting down. Waiting");
|
||||
var timeoutID = setTimeout(app.quit, 5000);
|
||||
homeServer.on('state-update', function(processGroup) {
|
||||
if (processGroup.state == ProcessGroupStates.STOPPED) {
|
||||
clearTimeout(timeoutID);
|
||||
log.debug("Quitting.");
|
||||
app.quit();
|
||||
}
|
||||
});
|
||||
|
@ -167,36 +181,36 @@ function shutdownCallback(idx) {
|
|||
}
|
||||
|
||||
function deleteOldFiles(directoryPath, maxAgeInSeconds, filenameRegex) {
|
||||
console.log("Deleting old log files in " + directoryPath);
|
||||
log.debug("Deleting old log files in " + directoryPath);
|
||||
|
||||
var filenames = [];
|
||||
try {
|
||||
filenames = fs.readdirSync(directoryPath);
|
||||
} catch (e) {
|
||||
console.warn("Error reading contents of log file directory", e);
|
||||
log.warn("Error reading contents of log file directory", e);
|
||||
return;
|
||||
}
|
||||
|
||||
for (const filename of filenames) {
|
||||
console.log("Checking", filename);
|
||||
log.debug("Checking", filename);
|
||||
const absolutePath = path.join(directoryPath, filename);
|
||||
var stat = null;
|
||||
try {
|
||||
stat = fs.statSync(absolutePath);
|
||||
} catch (e) {
|
||||
console.log("Error stat'ing file", absolutePath, e);
|
||||
log.debug("Error stat'ing file", absolutePath, e);
|
||||
continue;
|
||||
}
|
||||
const curTime = Date.now();
|
||||
if (stat.isFile() && filename.search(filenameRegex) >= 0) {
|
||||
const ageInSeconds = (curTime - stat.mtime.getTime()) / 1000.0;
|
||||
if (ageInSeconds >= maxAgeInSeconds) {
|
||||
console.log("\tDeleting:", filename, ageInSeconds);
|
||||
log.debug("\tDeleting:", filename, ageInSeconds);
|
||||
try {
|
||||
fs.unlinkSync(absolutePath);
|
||||
} catch (e) {
|
||||
if (e.code != 'EBUSY') {
|
||||
console.warn("\tError deleting:", e);
|
||||
log.warn("\tError deleting:", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -206,8 +220,8 @@ function deleteOldFiles(directoryPath, maxAgeInSeconds, filenameRegex) {
|
|||
|
||||
var logPath = path.join(getApplicationDataDirectory(), '/logs');
|
||||
|
||||
console.log("Log directory:", logPath);
|
||||
console.log("Data directory:", getRootHifiDataDirectory());
|
||||
log.debug("Log directory:", logPath);
|
||||
log.debug("Data directory:", getRootHifiDataDirectory());
|
||||
|
||||
const configPath = path.join(getApplicationDataDirectory(), 'config.json');
|
||||
var userConfig = new Config();
|
||||
|
@ -215,8 +229,8 @@ userConfig.load(configPath);
|
|||
|
||||
// print out uncaught exceptions in the console
|
||||
process.on('uncaughtException', function(err) {
|
||||
console.error(err);
|
||||
console.error(err.stack);
|
||||
log.error(err);
|
||||
log.error(err.stack);
|
||||
});
|
||||
|
||||
var shouldQuit = app.makeSingleInstance(function(commandLine, workingDirectory) {
|
||||
|
@ -225,7 +239,7 @@ var shouldQuit = app.makeSingleInstance(function(commandLine, workingDirectory)
|
|||
});
|
||||
|
||||
if (shouldQuit) {
|
||||
console.warn("Another instance of the Sandbox is already running - this instance will quit.");
|
||||
log.warn("Another instance of the Sandbox is already running - this instance will quit.");
|
||||
app.quit();
|
||||
return;
|
||||
}
|
||||
|
@ -506,7 +520,7 @@ const httpStatusPort = 60332;
|
|||
function backupResourceDirectories(folder) {
|
||||
try {
|
||||
fs.mkdirSync(folder);
|
||||
console.log("Created directory " + folder);
|
||||
log.debug("Created directory " + folder);
|
||||
|
||||
var dsBackup = path.join(folder, '/domain-server');
|
||||
var acBackup = path.join(folder, '/assignment-client');
|
||||
|
@ -519,7 +533,7 @@ function backupResourceDirectories(folder) {
|
|||
|
||||
return true;
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
log.debug(e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -541,7 +555,7 @@ function openBackupInstructions(folder) {
|
|||
window.setSize(obj.width, obj.height);
|
||||
});
|
||||
electron.ipcMain.on('ready', function() {
|
||||
console.log("got ready");
|
||||
log.debug("got ready");
|
||||
window.webContents.send('update', folder);
|
||||
});
|
||||
}
|
||||
|
@ -575,9 +589,9 @@ function checkNewContent() {
|
|||
|
||||
var wantDebug = false;
|
||||
if (wantDebug) {
|
||||
console.log('Last Modified: ' + response.headers['last-modified']);
|
||||
console.log(localContent + " " + remoteContent + " " + shouldUpdate + " " + new Date());
|
||||
console.log("Remote content is " + (shouldUpdate ? "newer" : "older") + " that local content.");
|
||||
log.debug('Last Modified: ' + response.headers['last-modified']);
|
||||
log.debug(localContent + " " + remoteContent + " " + shouldUpdate + " " + new Date());
|
||||
log.debug("Remote content is " + (shouldUpdate ? "newer" : "older") + " that local content.");
|
||||
}
|
||||
|
||||
if (shouldUpdate) {
|
||||
|
@ -619,46 +633,46 @@ function maybeInstallDefaultContentSet(onComplete) {
|
|||
// Check for existing data
|
||||
const acResourceDirectory = getAssignmentClientResourcesDirectory();
|
||||
|
||||
console.log("Checking for existence of " + acResourceDirectory);
|
||||
log.debug("Checking for existence of " + acResourceDirectory);
|
||||
|
||||
var userHasExistingACData = true;
|
||||
try {
|
||||
fs.accessSync(acResourceDirectory);
|
||||
console.log("Found directory " + acResourceDirectory);
|
||||
log.debug("Found directory " + acResourceDirectory);
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
log.debug(e);
|
||||
userHasExistingACData = false;
|
||||
}
|
||||
|
||||
const dsResourceDirectory = getDomainServerClientResourcesDirectory();
|
||||
|
||||
console.log("checking for existence of " + dsResourceDirectory);
|
||||
log.debug("checking for existence of " + dsResourceDirectory);
|
||||
|
||||
var userHasExistingDSData = true;
|
||||
try {
|
||||
fs.accessSync(dsResourceDirectory);
|
||||
console.log("Found directory " + dsResourceDirectory);
|
||||
log.debug("Found directory " + dsResourceDirectory);
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
log.debug(e);
|
||||
userHasExistingDSData = false;
|
||||
}
|
||||
|
||||
if (userHasExistingACData || userHasExistingDSData) {
|
||||
console.log("User has existing data, suppressing downloader");
|
||||
log.debug("User has existing data, suppressing downloader");
|
||||
onComplete();
|
||||
|
||||
checkNewContent();
|
||||
return;
|
||||
}
|
||||
|
||||
console.log("Found contentPath:" + argv.contentPath);
|
||||
log.debug("Found contentPath:" + argv.contentPath);
|
||||
if (argv.contentPath) {
|
||||
fs.copy(argv.contentPath, getRootHifiDataDirectory(), function (err) {
|
||||
if (err) {
|
||||
console.log('Could not copy home content: ' + err);
|
||||
return console.error(err)
|
||||
log.debug('Could not copy home content: ' + err);
|
||||
return log.error(err)
|
||||
}
|
||||
console.log('Copied home content over to: ' + getRootHifiDataDirectory());
|
||||
log.debug('Copied home content over to: ' + getRootHifiDataDirectory());
|
||||
userConfig.set('homeContentLastModified', new Date());
|
||||
onComplete();
|
||||
});
|
||||
|
@ -685,11 +699,11 @@ function maybeInstallDefaultContentSet(onComplete) {
|
|||
window.on('closed', onComplete);
|
||||
|
||||
electron.ipcMain.on('ready', function() {
|
||||
console.log("got ready");
|
||||
log.debug("got ready");
|
||||
var currentState = '';
|
||||
|
||||
function sendStateUpdate(state, args) {
|
||||
// console.log(state, window, args);
|
||||
// log.debug(state, window, args);
|
||||
window.webContents.send('update', { state: state, args: args });
|
||||
currentState = state;
|
||||
}
|
||||
|
@ -723,10 +737,10 @@ function maybeInstallDefaultContentSet(onComplete) {
|
|||
});
|
||||
|
||||
function extractError(err) {
|
||||
console.log("Aborting request because gunzip/untar failed");
|
||||
log.debug("Aborting request because gunzip/untar failed");
|
||||
aborted = true;
|
||||
req.abort();
|
||||
console.log("ERROR" + err);
|
||||
log.debug("ERROR" + err);
|
||||
|
||||
sendStateUpdate('error', {
|
||||
message: "Error installing resources."
|
||||
|
@ -738,7 +752,7 @@ function maybeInstallDefaultContentSet(onComplete) {
|
|||
|
||||
req.pipe(gunzip).pipe(tar.extract(getRootHifiDataDirectory())).on('error', extractError).on('finish', function(){
|
||||
// response and decompression complete, return
|
||||
console.log("Finished unarchiving home content set");
|
||||
log.debug("Finished unarchiving home content set");
|
||||
userConfig.set('homeContentLastModified', new Date());
|
||||
sendStateUpdate('complete');
|
||||
});
|
||||
|
@ -824,7 +838,7 @@ function onContentLoaded() {
|
|||
}
|
||||
});
|
||||
notifier.on('click', function(notifierObject, options) {
|
||||
console.log("Got click", options.url);
|
||||
log.debug("Got click", options.url);
|
||||
shell.openExternal(options.url);
|
||||
});
|
||||
}
|
||||
|
@ -855,7 +869,7 @@ function onContentLoaded() {
|
|||
// shutting down. The interface app will regularly update a running state file which we will check.
|
||||
// If the file doesn't exist or stops updating for a significant amount of time, we will shut down.
|
||||
if (argv.shutdownWatcher) {
|
||||
console.log("Shutdown watcher requested... argv.shutdownWatcher:", argv.shutdownWatcher);
|
||||
log.debug("Shutdown watcher requested... argv.shutdownWatcher:", argv.shutdownWatcher);
|
||||
var MAX_TIME_SINCE_EDIT = 5000; // 5 seconds between updates
|
||||
var firstAttemptToCheck = new Date().getTime();
|
||||
var shutdownWatchInterval = setInterval(function(){
|
||||
|
@ -863,14 +877,14 @@ function onContentLoaded() {
|
|||
if (err) {
|
||||
var sinceFirstCheck = new Date().getTime() - firstAttemptToCheck;
|
||||
if (sinceFirstCheck > MAX_TIME_SINCE_EDIT) {
|
||||
console.log("Running state file is missing, assume interface has shutdown... shutting down snadbox.");
|
||||
log.debug("Running state file is missing, assume interface has shutdown... shutting down snadbox.");
|
||||
forcedShutdown();
|
||||
clearTimeout(shutdownWatchInterval);
|
||||
}
|
||||
} else {
|
||||
var sinceEdit = new Date().getTime() - stats.mtime.getTime();
|
||||
if (sinceEdit > MAX_TIME_SINCE_EDIT) {
|
||||
console.log("Running state of interface hasn't updated in MAX time... shutting down.");
|
||||
log.debug("Running state of interface hasn't updated in MAX time... shutting down.");
|
||||
forcedShutdown();
|
||||
clearTimeout(shutdownWatchInterval);
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ Config.prototype = {
|
|||
try {
|
||||
rawData = fs.readFileSync(filePath);
|
||||
} catch(e) {
|
||||
console.log("Config file not found");
|
||||
log.debug("Config file not found");
|
||||
}
|
||||
var configData = {};
|
||||
|
||||
|
@ -21,7 +21,7 @@ Config.prototype = {
|
|||
configData = {};
|
||||
}
|
||||
} catch(e) {
|
||||
console.error("Error parsing config file", filePath)
|
||||
log.error("Error parsing config file", filePath)
|
||||
}
|
||||
|
||||
this.data = {};
|
||||
|
@ -37,7 +37,7 @@ Config.prototype = {
|
|||
return defaultValue;
|
||||
},
|
||||
set: function(key, value) {
|
||||
console.log("Setting", key, "to", value);
|
||||
log.debug("Setting", key, "to", value);
|
||||
this.data[key] = value;
|
||||
}
|
||||
};
|
||||
|
|
36
server-console/src/modules/hf-process.js
Executable file → Normal file
36
server-console/src/modules/hf-process.js
Executable file → Normal file
|
@ -43,7 +43,7 @@ ProcessGroup.prototype = extend(ProcessGroup.prototype, {
|
|||
},
|
||||
start: function() {
|
||||
if (this.state != ProcessGroupStates.STOPPED) {
|
||||
console.warn("Can't start process group that is not stopped.");
|
||||
log.warn("Can't start process group that is not stopped.");
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -56,7 +56,7 @@ ProcessGroup.prototype = extend(ProcessGroup.prototype, {
|
|||
},
|
||||
stop: function() {
|
||||
if (this.state != ProcessGroupStates.STARTED) {
|
||||
console.warn("Can't stop process group that is not started.");
|
||||
log.warn("Can't stop process group that is not started.");
|
||||
return;
|
||||
}
|
||||
for (let process of this.processes) {
|
||||
|
@ -120,10 +120,10 @@ util.inherits(Process, events.EventEmitter);
|
|||
Process.prototype = extend(Process.prototype, {
|
||||
start: function() {
|
||||
if (this.state != ProcessStates.STOPPED) {
|
||||
console.warn("Can't start process that is not stopped.");
|
||||
log.warn("Can't start process that is not stopped.");
|
||||
return;
|
||||
}
|
||||
console.log("Starting " + this.command + " " + this.commandArgs.join(' '));
|
||||
log.debug("Starting " + this.command + " " + this.commandArgs.join(' '));
|
||||
|
||||
var logStdout = 'ignore',
|
||||
logStderr = 'ignore';
|
||||
|
@ -138,7 +138,7 @@ Process.prototype = extend(Process.prototype, {
|
|||
if (e.code == 'EEXIST') {
|
||||
logDirectoryCreated = true;
|
||||
} else {
|
||||
console.error("Error creating log directory");
|
||||
log.error("Error creating log directory");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -151,13 +151,13 @@ Process.prototype = extend(Process.prototype, {
|
|||
try {
|
||||
logStdout = fs.openSync(tmpLogStdout, 'ax');
|
||||
} catch(e) {
|
||||
console.log("Error creating stdout log file", e);
|
||||
log.debug("Error creating stdout log file", e);
|
||||
logStdout = 'ignore';
|
||||
}
|
||||
try {
|
||||
logStderr = fs.openSync(tmpLogStderr, 'ax');
|
||||
} catch(e) {
|
||||
console.log("Error creating stderr log file", e);
|
||||
log.debug("Error creating stderr log file", e);
|
||||
logStderr = 'ignore';
|
||||
}
|
||||
}
|
||||
|
@ -169,7 +169,7 @@ Process.prototype = extend(Process.prototype, {
|
|||
stdio: ['ignore', logStdout, logStderr]
|
||||
});
|
||||
} catch (e) {
|
||||
console.log("Got error starting child process for " + this.name, e);
|
||||
log.debug("Got error starting child process for " + this.name, e);
|
||||
this.child = null;
|
||||
this.updateState(ProcessStates.STOPPED);
|
||||
return;
|
||||
|
@ -179,7 +179,7 @@ Process.prototype = extend(Process.prototype, {
|
|||
var pidLogStdout = path.resolve(this.logDirectory + '/' + this.name + "-" + this.child.pid + "-" + time + "-stdout.txt");
|
||||
fs.rename(tmpLogStdout, pidLogStdout, function(e) {
|
||||
if (e !== null) {
|
||||
console.log("Error renaming log file from " + tmpLogStdout + " to " + pidLogStdout, e);
|
||||
log.debug("Error renaming log file from " + tmpLogStdout + " to " + pidLogStdout, e);
|
||||
}
|
||||
});
|
||||
this.logStdout = pidLogStdout;
|
||||
|
@ -190,7 +190,7 @@ Process.prototype = extend(Process.prototype, {
|
|||
var pidLogStderr = path.resolve(this.logDirectory + '/' + this.name + "-" + this.child.pid + "-" + time + "-stderr.txt");
|
||||
fs.rename(tmpLogStderr, pidLogStderr, function(e) {
|
||||
if (e !== null) {
|
||||
console.log("Error renaming log file from " + tmpLogStdout + " to " + pidLogStdout, e);
|
||||
log.debug("Error renaming log file from " + tmpLogStdout + " to " + pidLogStdout, e);
|
||||
}
|
||||
});
|
||||
this.logStderr = pidLogStderr;
|
||||
|
@ -201,13 +201,13 @@ Process.prototype = extend(Process.prototype, {
|
|||
this.child.on('error', this.onChildStartError.bind(this));
|
||||
this.child.on('close', this.onChildClose.bind(this));
|
||||
|
||||
console.log("Child process started");
|
||||
log.debug("Child process started");
|
||||
this.updateState(ProcessStates.STARTED);
|
||||
this.emit('logs-updated');
|
||||
},
|
||||
stop: function(force) {
|
||||
if (this.state == ProcessStates.STOPPED) {
|
||||
console.warn("Can't stop process that is not started or stopping.");
|
||||
log.warn("Can't stop process that is not started or stopping.");
|
||||
return;
|
||||
}
|
||||
if (os.type() == "Windows_NT") {
|
||||
|
@ -217,7 +217,7 @@ Process.prototype = extend(Process.prototype, {
|
|||
}
|
||||
childProcess.exec(command, {}, function(error) {
|
||||
if (error) {
|
||||
console.error('Error executing taskkill:', error);
|
||||
log.error('Error executing taskkill:', error);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
|
@ -225,12 +225,12 @@ Process.prototype = extend(Process.prototype, {
|
|||
this.child.kill(signal);
|
||||
}
|
||||
|
||||
console.log("Stopping child process:", this.child.pid, this.name);
|
||||
log.debug("Stopping child process:", this.child.pid, this.name);
|
||||
|
||||
if (!force) {
|
||||
this.stoppingTimeoutID = setTimeout(function() {
|
||||
if (this.state == ProcessStates.STOPPING) {
|
||||
console.log("Force killling", this.name, this.child.pid);
|
||||
log.debug("Force killling", this.name, this.child.pid);
|
||||
this.stop(true);
|
||||
}
|
||||
}.bind(this), 2500);
|
||||
|
@ -257,11 +257,11 @@ Process.prototype = extend(Process.prototype, {
|
|||
|
||||
// Events
|
||||
onChildStartError: function(error) {
|
||||
console.log("Child process error ", error);
|
||||
log.debug("Child process error ", error);
|
||||
this.updateState(ProcessStates.STOPPED);
|
||||
},
|
||||
onChildClose: function(code) {
|
||||
console.log("Child process closed with code ", code, this.name);
|
||||
log.debug("Child process closed with code ", code, this.name);
|
||||
if (this.stoppingTimeoutID) {
|
||||
clearTimeout(this.stoppingTimeoutID);
|
||||
this.stoppingTimeoutID = null;
|
||||
|
@ -332,7 +332,7 @@ ACMonitorProcess.prototype = extend(ACMonitorProcess.prototype, {
|
|||
this.pendingRequest = null;
|
||||
|
||||
if (error) {
|
||||
console.error('ERROR Getting AC Monitor status', error);
|
||||
log.error('ERROR Getting AC Monitor status', error);
|
||||
} else {
|
||||
this.childServers = body.servers;
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ const BUILDS_URL = 'https://highfidelity.com/builds.xml';
|
|||
|
||||
function UpdateChecker(currentVersion, checkForUpdatesEveryXSeconds) {
|
||||
this.currentVersion = currentVersion;
|
||||
console.log('cur', currentVersion);
|
||||
log.debug('cur', currentVersion);
|
||||
|
||||
setInterval(this.checkForUpdates.bind(this), checkForUpdatesEveryXSeconds * 1000);
|
||||
this.checkForUpdates();
|
||||
|
@ -19,10 +19,10 @@ function UpdateChecker(currentVersion, checkForUpdatesEveryXSeconds) {
|
|||
util.inherits(UpdateChecker, events.EventEmitter);
|
||||
UpdateChecker.prototype = extend(UpdateChecker.prototype, {
|
||||
checkForUpdates: function() {
|
||||
console.log("Checking for updates");
|
||||
log.debug("Checking for updates");
|
||||
request(BUILDS_URL, (error, response, body) => {
|
||||
if (error) {
|
||||
console.log("Error", error);
|
||||
log.debug("Error", error);
|
||||
return;
|
||||
}
|
||||
if (response.statusCode == 200) {
|
||||
|
@ -30,13 +30,13 @@ UpdateChecker.prototype = extend(UpdateChecker.prototype, {
|
|||
var $ = cheerio.load(body, { xmlMode: true });
|
||||
const latestBuild = $('project[name="interface"] platform[name="' + platform + '"]').children().first();
|
||||
const latestVersion = parseInt(latestBuild.find('version').text());
|
||||
console.log("Latest version is:", latestVersion, this.currentVersion);
|
||||
log.debug("Latest version is:", latestVersion, this.currentVersion);
|
||||
if (latestVersion > this.currentVersion) {
|
||||
const url = latestBuild.find('url').text();
|
||||
this.emit('update-available', latestVersion, url);
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn("Error when checking for updates", e);
|
||||
log.warn("Error when checking for updates", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -72,11 +72,11 @@ exports.discoveredPath = function (name, binaryType, releaseType) {
|
|||
var extension = platformExtension(name);
|
||||
|
||||
if (stats.isFile() || (stats.isDirectory() && extension == ".app")) {
|
||||
console.log("Found " + name + " at " + testPath);
|
||||
log.debug("Found " + name + " at " + testPath);
|
||||
return testPath;
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("Executable with name " + name + " not found at path " + testPath);
|
||||
log.debug("Executable with name " + name + " not found at path " + testPath);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -164,11 +164,59 @@ class MyTestWindow : public TestWindow {
|
|||
}
|
||||
};
|
||||
|
||||
extern bool needsSparseRectification(const uvec2& size);
|
||||
extern uvec2 rectifyToSparseSize(const uvec2& size);
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
void testSparseRectify() {
|
||||
std::vector<std::pair<uvec2, bool>> NEEDS_SPARSE_TESTS {{
|
||||
// Already sparse
|
||||
{ {1024, 1024 }, false },
|
||||
{ { 128, 128 }, false },
|
||||
// Too small in one dimension
|
||||
{ { 127, 127 }, false },
|
||||
{ { 1, 1 }, false },
|
||||
{ { 1000, 1 }, false },
|
||||
{ { 1024, 1 }, false },
|
||||
{ { 100, 100 }, false },
|
||||
// needs rectification
|
||||
{ { 1000, 1000 }, true },
|
||||
{ { 1024, 1000 }, true },
|
||||
} };
|
||||
|
||||
for (const auto& test : NEEDS_SPARSE_TESTS) {
|
||||
const auto& size = test.first;
|
||||
const auto& expected = test.second;
|
||||
auto result = needsSparseRectification(size);
|
||||
Q_ASSERT(expected == result);
|
||||
result = needsSparseRectification(uvec2(size.y, size.x));
|
||||
Q_ASSERT(expected == result);
|
||||
}
|
||||
|
||||
std::vector<std::pair<uvec2, uvec2>> SPARSE_SIZE_TESTS { {
|
||||
// needs rectification
|
||||
{ { 1000, 1000 }, { 1024, 1024 } },
|
||||
{ { 1024, 1000 }, { 1024, 1024 } },
|
||||
} };
|
||||
|
||||
for (const auto& test : SPARSE_SIZE_TESTS) {
|
||||
const auto& size = test.first;
|
||||
const auto& expected = test.second;
|
||||
auto result = rectifyToSparseSize(size);
|
||||
Q_ASSERT(expected == result);
|
||||
result = rectifyToSparseSize(uvec2(size.y, size.x));
|
||||
Q_ASSERT(expected == uvec2(result.y, result.x));
|
||||
}
|
||||
}
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
testSparseRectify();
|
||||
|
||||
// FIXME this test appears to be broken
|
||||
#if 0
|
||||
QGuiApplication app(argc, argv);
|
||||
MyTestWindow window;
|
||||
app.exec();
|
||||
#endif
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
|
45
unpublishedScripts/DomainContent/Home/portal.js
Normal file
45
unpublishedScripts/DomainContent/Home/portal.js
Normal file
|
@ -0,0 +1,45 @@
|
|||
(function(){
|
||||
var teleport;
|
||||
var portalDestination;
|
||||
|
||||
function playSound() {
|
||||
Audio.playSound(teleport, { volume: 0.40, localOnly: true });
|
||||
};
|
||||
|
||||
this.preload = function(entityID) {
|
||||
teleport = SoundCache.getSound("atp:/sounds/teleport.raw");
|
||||
|
||||
var properties = Entities.getEntityProperties(entityID);
|
||||
portalDestination = properties.userData;
|
||||
|
||||
print("portal.js | The portal destination is " + portalDestination);
|
||||
}
|
||||
|
||||
this.enterEntity = function(entityID) {
|
||||
print("portal.js | enterEntity");
|
||||
|
||||
var properties = Entities.getEntityProperties(entityID); // in case the userData/portalURL has changed
|
||||
portalDestination = properties.userData;
|
||||
|
||||
print("portal.js | enterEntity() .... The portal destination is " + portalDestination);
|
||||
|
||||
if (portalDestination.length > 0) {
|
||||
if (portalDestination[0] == '/') {
|
||||
print("Teleporting to " + portalDestination);
|
||||
Window.location = portalDestination;
|
||||
} else {
|
||||
print("Teleporting to hifi://" + portalDestination);
|
||||
Window.location = "hifi://" + portalDestination;
|
||||
}
|
||||
} else {
|
||||
location.goToEntry(); // going forward: no data means go to appropriate entry point
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
this.leaveEntity = function(entityID) {
|
||||
print("portal.js | leaveEntity");
|
||||
|
||||
playSound();
|
||||
};
|
||||
})
|
Loading…
Reference in a new issue