refactor application.cpp

This commit is contained in:
HifiExperiments 2024-04-02 22:21:22 -07:00
parent 9da839fe48
commit e6d7ffdd76
32 changed files with 9319 additions and 9024 deletions

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,77 @@
//
// ApplicationEventHandler.h
// interface/src
//
// Split from Application.cpp by HifiExperiments on 3/30/24
// Created by Andrzej Kapolka on 5/10/13.
// Copyright 2013 High Fidelity, Inc.
// Copyright 2020 Vircadia contributors.
// Copyright 2022-2023 Overte e.V.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// SPDX-License-Identifier: Apache-2.0
//
#ifndef hifi_ApplicationEventHandler_h
#define hifi_ApplicationEventHandler_h
#include <QtCore/QAbstractNativeEventFilter>
#include <qsystemdetection.h>
#include <MainWindow.h>
#include "Application.h"
#ifdef Q_OS_WIN
static const UINT UWM_IDENTIFY_INSTANCES =
RegisterWindowMessage("UWM_IDENTIFY_INSTANCES_{8AB82783-B74A-4258-955B-8188C22AA0D6}_" + qgetenv("USERNAME"));
static const UINT UWM_SHOW_APPLICATION =
RegisterWindowMessage("UWM_SHOW_APPLICATION_{71123FD6-3DA8-4DC1-9C27-8A12A6250CBA}_" + qgetenv("USERNAME"));
class MyNativeEventFilter : public QAbstractNativeEventFilter {
public:
static MyNativeEventFilter& getInstance() {
static MyNativeEventFilter staticInstance;
return staticInstance;
}
bool nativeEventFilter(const QByteArray &eventType, void* msg, long* result) Q_DECL_OVERRIDE {
if (eventType == "windows_generic_MSG") {
MSG* message = (MSG*)msg;
if (message->message == UWM_IDENTIFY_INSTANCES) {
*result = UWM_IDENTIFY_INSTANCES;
return true;
}
if (message->message == UWM_SHOW_APPLICATION) {
MainWindow* applicationWindow = qApp->getWindow();
if (applicationWindow->isMinimized()) {
applicationWindow->showNormal(); // Restores to windowed or maximized state appropriately.
}
qApp->setActiveWindow(applicationWindow); // Flashes the taskbar icon if not focus.
return true;
}
// Attempting to close MIDI interfaces of a hot-unplugged device can result in audio-driver deadlock.
// Detecting MIDI devices that have been added/removed after starting Inteface has been disabled.
// https://support.microsoft.com/en-us/help/4460006/midi-device-app-hangs-when-former-midi-api-is-used
#if 0
if (message->message == WM_DEVICECHANGE) {
const float MIN_DELTA_SECONDS = 2.0f; // de-bounce signal
static float lastTriggerTime = 0.0f;
const float deltaSeconds = secTimestampNow() - lastTriggerTime;
lastTriggerTime = secTimestampNow();
if (deltaSeconds > MIN_DELTA_SECONDS) {
Midi::USBchanged(); // re-scan the MIDI bus
}
}
#endif
}
return false;
}
};
#endif
#endif // hifi_ApplicationEventHandler_h

View file

@ -0,0 +1,67 @@
//
// ApplicationMeshProvider.h
// interface/src
//
// Split from Application.cpp by HifiExperiments on 3/30/24
// Created by Andrzej Kapolka on 5/10/13.
// Copyright 2013 High Fidelity, Inc.
// Copyright 2020 Vircadia contributors.
// Copyright 2022-2023 Overte e.V.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// SPDX-License-Identifier: Apache-2.0
//
#ifndef hifi_ApplicationMeshProvider_h
#define hifi_ApplicationMeshProvider_h
#include <graphics/Forward.h>
class ApplicationMeshProvider : public scriptable::ModelProviderFactory {
public:
virtual scriptable::ModelProviderPointer lookupModelProvider(const QUuid& uuid) override {
bool success;
if (auto nestable = DependencyManager::get<SpatialParentFinder>()->find(uuid, success).lock()) {
auto type = nestable->getNestableType();
#ifdef SCRIPTABLE_MESH_DEBUG
qCDebug(interfaceapp) << "ApplicationMeshProvider::lookupModelProvider" << uuid << SpatiallyNestable::nestableTypeToString(type);
#endif
switch (type) {
case NestableType::Entity:
return getEntityModelProvider(static_cast<EntityItemID>(uuid));
case NestableType::Avatar:
return getAvatarModelProvider(uuid);
}
}
return nullptr;
}
private:
scriptable::ModelProviderPointer getEntityModelProvider(EntityItemID entityID) {
scriptable::ModelProviderPointer provider;
auto entityTreeRenderer = qApp->getEntities();
auto entityTree = entityTreeRenderer->getTree();
if (auto entity = entityTree->findEntityByID(entityID)) {
if (auto renderer = entityTreeRenderer->renderableForEntityId(entityID)) {
provider = std::dynamic_pointer_cast<scriptable::ModelProvider>(renderer);
provider->modelProviderType = NestableType::Entity;
} else {
qCWarning(interfaceapp) << "no renderer for entity ID" << entityID.toString();
}
}
return provider;
}
scriptable::ModelProviderPointer getAvatarModelProvider(QUuid sessionUUID) {
scriptable::ModelProviderPointer provider;
auto avatarManager = DependencyManager::get<AvatarManager>();
if (auto avatar = avatarManager->getAvatarBySessionID(sessionUUID)) {
provider = std::dynamic_pointer_cast<scriptable::ModelProvider>(avatar);
provider->modelProviderType = NestableType::Avatar;
}
return provider;
}
};
#endif // hifi_ApplicationMeshProvider_h

View file

@ -0,0 +1,523 @@
//
// Application_Assets.cpp
// interface/src
//
// Split from Application.cpp by HifiExperiments on 3/30/24
// Created by Andrzej Kapolka on 5/10/13.
// Copyright 2013 High Fidelity, Inc.
// Copyright 2020 Vircadia contributors.
// Copyright 2022-2023 Overte e.V.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// SPDX-License-Identifier: Apache-2.0
//
#include "Application.h"
#include <QQuickItem>
#include <QTemporaryDir>
#include <AddressManager.h>
#include <AssetUpload.h>
#include <MappingRequest.h>
#include "ArchiveDownloadInterface.h"
#include "InterfaceLogging.h"
#include "Menu.h"
#include "ModelPackager.h"
static const QString SVO_EXTENSION = ".svo";
static const QString SVO_JSON_EXTENSION = ".svo.json";
static const QString JSON_EXTENSION = ".json";
static const QString JS_EXTENSION = ".js";
static const QString FST_EXTENSION = ".fst";
static const QString FBX_EXTENSION = ".fbx";
static const QString OBJ_EXTENSION = ".obj";
static const QString JSON_GZ_EXTENSION = ".json.gz";
static const QString CONTENT_ZIP_EXTENSION = ".content.zip";
static const QString ZIP_EXTENSION = ".zip";
static const QString JPG_EXTENSION = ".jpg";
static const QString PNG_EXTENSION = ".png";
static const QString WEB_VIEW_TAG = "noDownload=true";
const std::vector<std::pair<QString, Application::AcceptURLMethod>> Application::_acceptedExtensions {
{ SVO_EXTENSION, &Application::importSVOFromURL },
{ SVO_JSON_EXTENSION, &Application::importSVOFromURL },
{ JSON_EXTENSION, &Application::importJSONFromURL },
{ JS_EXTENSION, &Application::askToLoadScript },
{ FST_EXTENSION, &Application::askToSetAvatarUrl },
{ JSON_GZ_EXTENSION, &Application::askToReplaceDomainContent },
{ CONTENT_ZIP_EXTENSION, &Application::askToReplaceDomainContent },
{ ZIP_EXTENSION, &Application::importFromZIP },
{ JPG_EXTENSION, &Application::importImage },
{ PNG_EXTENSION, &Application::importImage }
};
bool Application::canAcceptURL(const QString& urlString) const {
QUrl url(urlString);
if (url.query().contains(WEB_VIEW_TAG)) {
return false;
} else if (urlString.startsWith(URL_SCHEME_OVERTE)) {
return true;
}
QString lowerPath = url.path().toLower();
for (auto& pair : _acceptedExtensions) {
if (lowerPath.endsWith(pair.first, Qt::CaseInsensitive)) {
return true;
}
}
return false;
}
bool Application::acceptURL(const QString& urlString, bool defaultUpload) {
QUrl url(urlString);
if (url.scheme() == URL_SCHEME_OVERTE) {
// this is a hifi URL - have the AddressManager handle it
QMetaObject::invokeMethod(DependencyManager::get<AddressManager>().data(), "handleLookupString",
Qt::AutoConnection, Q_ARG(const QString&, urlString));
return true;
}
QString lowerPath = url.path().toLower();
for (auto& pair : _acceptedExtensions) {
if (lowerPath.endsWith(pair.first, Qt::CaseInsensitive)) {
AcceptURLMethod method = pair.second;
return (this->*method)(urlString);
}
}
if (defaultUpload && !url.fileName().isEmpty() && url.isLocalFile()) {
showAssetServerWidget(urlString);
}
return defaultUpload;
}
void Application::addAssetToWorldFromURL(QString url) {
QString filename;
if (url.contains("filename")) {
filename = url.section("filename=", 1, 1); // Filename is in "?filename=" parameter at end of URL.
}
if (url.contains("poly.google.com/downloads")) {
filename = url.section('/', -1);
if (url.contains("noDownload")) {
filename.remove(".zip?noDownload=false");
} else {
filename.remove(".zip");
}
}
if (!DependencyManager::get<NodeList>()->getThisNodeCanWriteAssets()) {
QString errorInfo = "You do not have permissions to write to the Asset Server.";
qWarning(interfaceapp) << "Error downloading model: " + errorInfo;
addAssetToWorldError(filename, errorInfo);
return;
}
addAssetToWorldInfo(filename, "Downloading model file " + filename + ".");
auto request = DependencyManager::get<ResourceManager>()->createResourceRequest(
nullptr, QUrl(url), true, -1, "Application::addAssetToWorldFromURL");
connect(request, &ResourceRequest::finished, this, &Application::addAssetToWorldFromURLRequestFinished);
request->send();
}
void Application::addAssetToWorldFromURLRequestFinished() {
auto request = qobject_cast<ResourceRequest*>(sender());
Q_ASSERT(request != nullptr);
auto url = request->getUrl().toString();
auto result = request->getResult();
QString filename;
bool isBlocks = false;
if (url.contains("filename")) {
filename = url.section("filename=", 1, 1); // Filename is in "?filename=" parameter at end of URL.
}
if (url.contains("poly.google.com/downloads")) {
filename = url.section('/', -1);
if (url.contains("noDownload")) {
filename.remove(".zip?noDownload=false");
} else {
filename.remove(".zip");
}
isBlocks = true;
}
if (result == ResourceRequest::Success) {
QTemporaryDir temporaryDir;
temporaryDir.setAutoRemove(false);
if (temporaryDir.isValid()) {
QString temporaryDirPath = temporaryDir.path();
QString downloadPath = temporaryDirPath + "/" + filename;
QFile tempFile(downloadPath);
if (tempFile.open(QIODevice::WriteOnly)) {
tempFile.write(request->getData());
addAssetToWorldInfoClear(filename); // Remove message from list; next one added will have a different key.
tempFile.close();
qApp->getFileDownloadInterface()->runUnzip(downloadPath, url, true, false, isBlocks);
} else {
QString errorInfo = "Couldn't open temporary file for download";
qWarning(interfaceapp) << errorInfo;
addAssetToWorldError(filename, errorInfo);
}
} else {
QString errorInfo = "Couldn't create temporary directory for download";
qWarning(interfaceapp) << errorInfo;
addAssetToWorldError(filename, errorInfo);
}
} else {
qWarning(interfaceapp) << "Error downloading" << url << ":" << request->getResultString();
addAssetToWorldError(filename, "Error downloading " + filename + " : " + request->getResultString());
}
request->deleteLater();
}
QString filenameFromPath(QString filePath) {
return filePath.right(filePath.length() - filePath.lastIndexOf("/") - 1);
}
void Application::addAssetToWorld(QString path, QString zipFile, bool isZip, bool isBlocks) {
// Automatically upload and add asset to world as an alternative manual process initiated by showAssetServerWidget().
QString mapping;
QString filename = filenameFromPath(path);
if (isZip || isBlocks) {
QString assetName = zipFile.section("/", -1).remove(QRegExp("[.]zip(.*)$"));
QString assetFolder = path.section("model_repo/", -1);
mapping = "/" + assetName + "/" + assetFolder;
} else {
mapping = "/" + filename;
}
// Test repeated because possibly different code paths.
if (!DependencyManager::get<NodeList>()->getThisNodeCanWriteAssets()) {
QString errorInfo = "You do not have permissions to write to the Asset Server.";
qWarning(interfaceapp) << "Error downloading model: " + errorInfo;
addAssetToWorldError(filename, errorInfo);
return;
}
addAssetToWorldInfo(filename, "Adding " + mapping.mid(1) + " to the Asset Server.");
addAssetToWorldWithNewMapping(path, mapping, 0, isZip, isBlocks);
}
void Application::addAssetToWorldUnzipFailure(QString filePath) {
QString filename = filenameFromPath(QUrl(filePath).toLocalFile());
qWarning(interfaceapp) << "Couldn't unzip file" << filePath;
addAssetToWorldError(filename, "Couldn't unzip file " + filename + ".");
}
void Application::addAssetToWorldWithNewMapping(QString filePath, QString mapping, int copy, bool isZip, bool isBlocks) {
auto request = DependencyManager::get<AssetClient>()->createGetMappingRequest(mapping);
QObject::connect(request, &GetMappingRequest::finished, this, [=](GetMappingRequest* request) mutable {
const int MAX_COPY_COUNT = 100; // Limit number of duplicate assets; recursion guard.
auto result = request->getError();
if (result == GetMappingRequest::NotFound) {
addAssetToWorldUpload(filePath, mapping, isZip, isBlocks);
} else if (result != GetMappingRequest::NoError) {
QString errorInfo = "Could not map asset name: "
+ mapping.left(mapping.length() - QString::number(copy).length() - 1);
qWarning(interfaceapp) << "Error downloading model: " + errorInfo;
addAssetToWorldError(filenameFromPath(filePath), errorInfo);
} else if (copy < MAX_COPY_COUNT - 1) {
if (copy > 0) {
mapping = mapping.remove(mapping.lastIndexOf("-"), QString::number(copy).length() + 1);
}
copy++;
mapping = mapping.insert(mapping.lastIndexOf("."), "-" + QString::number(copy));
addAssetToWorldWithNewMapping(filePath, mapping, copy, isZip, isBlocks);
} else {
QString errorInfo = "Too many copies of asset name: "
+ mapping.left(mapping.length() - QString::number(copy).length() - 1);
qWarning(interfaceapp) << "Error downloading model: " + errorInfo;
addAssetToWorldError(filenameFromPath(filePath), errorInfo);
}
request->deleteLater();
});
request->start();
}
void Application::addAssetToWorldUpload(QString filePath, QString mapping, bool isZip, bool isBlocks) {
qInfo(interfaceapp) << "Uploading" << filePath << "to Asset Server as" << mapping;
auto upload = DependencyManager::get<AssetClient>()->createUpload(filePath);
QObject::connect(upload, &AssetUpload::finished, this, [=](AssetUpload* upload, const QString& hash) mutable {
if (upload->getError() != AssetUpload::NoError) {
QString errorInfo = "Could not upload model to the Asset Server.";
qWarning(interfaceapp) << "Error downloading model: " + errorInfo;
addAssetToWorldError(filenameFromPath(filePath), errorInfo);
} else {
addAssetToWorldSetMapping(filePath, mapping, hash, isZip, isBlocks);
}
// Remove temporary directory created by Clara.io market place download.
int index = filePath.lastIndexOf("/model_repo/");
if (index > 0) {
QString tempDir = filePath.left(index);
qCDebug(interfaceapp) << "Removing temporary directory at: " + tempDir;
QDir(tempDir).removeRecursively();
}
upload->deleteLater();
});
upload->start();
}
void Application::addAssetToWorldSetMapping(QString filePath, QString mapping, QString hash, bool isZip, bool isBlocks) {
auto request = DependencyManager::get<AssetClient>()->createSetMappingRequest(mapping, hash);
connect(request, &SetMappingRequest::finished, this, [=](SetMappingRequest* request) mutable {
if (request->getError() != SetMappingRequest::NoError) {
QString errorInfo = "Could not set asset mapping.";
qWarning(interfaceapp) << "Error downloading model: " + errorInfo;
addAssetToWorldError(filenameFromPath(filePath), errorInfo);
} else {
// to prevent files that aren't models or texture files from being loaded into world automatically
if ((filePath.toLower().endsWith(OBJ_EXTENSION) || filePath.toLower().endsWith(FBX_EXTENSION)) ||
((filePath.toLower().endsWith(JPG_EXTENSION) || filePath.toLower().endsWith(PNG_EXTENSION)) &&
((!isBlocks) && (!isZip)))) {
addAssetToWorldAddEntity(filePath, mapping);
} else {
qCDebug(interfaceapp) << "Zipped contents are not supported entity files";
addAssetToWorldInfoDone(filenameFromPath(filePath));
}
}
request->deleteLater();
});
request->start();
}
void Application::addAssetToWorldAddEntity(QString filePath, QString mapping) {
EntityItemProperties properties;
properties.setName(mapping.right(mapping.length() - 1));
if (filePath.toLower().endsWith(PNG_EXTENSION) || filePath.toLower().endsWith(JPG_EXTENSION)) {
properties.setType(EntityTypes::Image);
properties.setImageURL(QString("atp:" + mapping));
properties.setKeepAspectRatio(false);
} else {
properties.setType(EntityTypes::Model);
properties.setModelURL("atp:" + mapping);
properties.setShapeType(SHAPE_TYPE_SIMPLE_COMPOUND);
}
properties.setCollisionless(true); // Temporarily set so that doesn't collide with avatar.
properties.setVisible(false); // Temporarily set so that don't see at large unresized dimensions.
bool grabbable = (Menu::getInstance()->isOptionChecked(MenuOption::CreateEntitiesGrabbable));
properties.setUserData(grabbable ? GRABBABLE_USER_DATA : NOT_GRABBABLE_USER_DATA);
glm::vec3 positionOffset = getMyAvatar()->getWorldOrientation() * (getMyAvatar()->getSensorToWorldScale() * glm::vec3(0.0f, 0.0f, -2.0f));
properties.setPosition(getMyAvatar()->getWorldPosition() + positionOffset);
properties.setRotation(getMyAvatar()->getWorldOrientation());
properties.setGravity(glm::vec3(0.0f, 0.0f, 0.0f));
auto entityID = DependencyManager::get<EntityScriptingInterface>()->addEntity(properties);
// Note: Model dimensions are not available here; model is scaled per FBX mesh in RenderableModelEntityItem::update() later
// on. But FBX dimensions may be in cm, so we monitor for the dimension change and rescale again if warranted.
if (entityID == QUuid()) {
QString errorInfo = "Could not add model " + mapping + " to world.";
qWarning(interfaceapp) << "Could not add model to world: " + errorInfo;
addAssetToWorldError(filenameFromPath(filePath), errorInfo);
} else {
// Monitor when asset is rendered in world so that can resize if necessary.
_addAssetToWorldResizeList.insert(entityID, 0); // List value is count of checks performed.
if (!_addAssetToWorldResizeTimer.isActive()) {
_addAssetToWorldResizeTimer.start();
}
// Close progress message box.
addAssetToWorldInfoDone(filenameFromPath(filePath));
}
}
void Application::handleUnzip(QString zipFile, QStringList unzipFile, bool autoAdd, bool isZip, bool isBlocks) {
if (autoAdd) {
if (!unzipFile.isEmpty()) {
for (int i = 0; i < unzipFile.length(); i++) {
if (QFileInfo(unzipFile.at(i)).isFile()) {
qCDebug(interfaceapp) << "Preparing file for asset server: " << unzipFile.at(i);
addAssetToWorld(unzipFile.at(i), zipFile, isZip, isBlocks);
}
}
} else {
addAssetToWorldUnzipFailure(zipFile);
}
} else {
showAssetServerWidget(unzipFile.first());
}
}
void Application::packageModel() {
ModelPackager::package();
}
void Application::addAssetToWorldCheckModelSize() {
if (_addAssetToWorldResizeList.size() == 0) {
return;
}
auto item = _addAssetToWorldResizeList.begin();
while (item != _addAssetToWorldResizeList.end()) {
auto entityID = item.key();
EntityPropertyFlags propertyFlags;
propertyFlags += PROP_NAME;
propertyFlags += PROP_DIMENSIONS;
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
auto properties = entityScriptingInterface->getEntityPropertiesInternal(entityID, propertyFlags, false);
auto name = properties.getName();
auto dimensions = properties.getDimensions();
bool doResize = false;
const glm::vec3 DEFAULT_DIMENSIONS = glm::vec3(0.1f, 0.1f, 0.1f);
if (dimensions != DEFAULT_DIMENSIONS) {
// Scale model so that its maximum is exactly specific size.
const float MAXIMUM_DIMENSION = getMyAvatar()->getSensorToWorldScale();
auto previousDimensions = dimensions;
auto scale = std::min(MAXIMUM_DIMENSION / dimensions.x, std::min(MAXIMUM_DIMENSION / dimensions.y,
MAXIMUM_DIMENSION / dimensions.z));
dimensions *= scale;
qInfo(interfaceapp) << "Model" << name << "auto-resized from" << previousDimensions << " to " << dimensions;
doResize = true;
item = _addAssetToWorldResizeList.erase(item); // Finished with this entity; advance to next.
} else {
// Increment count of checks done.
_addAssetToWorldResizeList[entityID]++;
const int CHECK_MODEL_SIZE_MAX_CHECKS = 300;
if (_addAssetToWorldResizeList[entityID] > CHECK_MODEL_SIZE_MAX_CHECKS) {
// Have done enough checks; model was either the default size or something's gone wrong.
// Rescale all dimensions.
const glm::vec3 UNIT_DIMENSIONS = glm::vec3(1.0f, 1.0f, 1.0f);
dimensions = UNIT_DIMENSIONS;
qInfo(interfaceapp) << "Model" << name << "auto-resize timed out; resized to " << dimensions;
doResize = true;
item = _addAssetToWorldResizeList.erase(item); // Finished with this entity; advance to next.
} else {
// No action on this entity; advance to next.
++item;
}
}
if (doResize) {
EntityItemProperties properties;
properties.setDimensions(dimensions);
properties.setVisible(true);
if (!name.toLower().endsWith(PNG_EXTENSION) && !name.toLower().endsWith(JPG_EXTENSION)) {
properties.setCollisionless(false);
}
bool grabbable = (Menu::getInstance()->isOptionChecked(MenuOption::CreateEntitiesGrabbable));
properties.setUserData(grabbable ? GRABBABLE_USER_DATA : NOT_GRABBABLE_USER_DATA);
properties.setLastEdited(usecTimestampNow());
entityScriptingInterface->editEntity(entityID, properties);
}
}
// Stop timer if nothing in list to check.
if (_addAssetToWorldResizeList.size() == 0) {
_addAssetToWorldResizeTimer.stop();
}
}
void Application::onAssetToWorldMessageBoxClosed() {
if (_addAssetToWorldMessageBox) {
// User manually closed message box; perhaps because it has become stuck, so reset all messages.
qInfo(interfaceapp) << "User manually closed download status message box";
disconnect(_addAssetToWorldMessageBox);
_addAssetToWorldMessageBox = nullptr;
addAssetToWorldMessageClose();
}
}
void Application::addAssetToWorldInfoTimeout() {
if (_aboutToQuit) {
return;
}
/*
If list not empty, display last message in list (may already be displayed ) unless an error is being displayed.
If list empty, close the message box unless an error is being displayed.
*/
if (!_addAssetToWorldErrorTimer.isActive() && _addAssetToWorldMessageBox) {
if (_addAssetToWorldInfoKeys.length() > 0) {
_addAssetToWorldMessageBox->setProperty("text", "\n" + _addAssetToWorldInfoMessages.last());
} else {
disconnect(_addAssetToWorldMessageBox);
_addAssetToWorldMessageBox->setVisible(false);
_addAssetToWorldMessageBox->deleteLater();
_addAssetToWorldMessageBox = nullptr;
}
}
}
void Application::addAssetToWorldErrorTimeout() {
if (_aboutToQuit) {
return;
}
/*
If list is not empty, display message from last entry.
If list is empty, close the message box.
*/
if (_addAssetToWorldMessageBox) {
if (_addAssetToWorldInfoKeys.length() > 0) {
_addAssetToWorldMessageBox->setProperty("text", "\n" + _addAssetToWorldInfoMessages.last());
} else {
disconnect(_addAssetToWorldMessageBox);
_addAssetToWorldMessageBox->setVisible(false);
_addAssetToWorldMessageBox->deleteLater();
_addAssetToWorldMessageBox = nullptr;
}
}
}
bool Application::importJSONFromURL(const QString& urlString) {
// we only load files that terminate in just .json (not .svo.json and not .ava.json)
QUrl jsonURL { urlString };
emit svoImportRequested(urlString);
return true;
}
bool Application::importSVOFromURL(const QString& urlString) {
emit svoImportRequested(urlString);
return true;
}
bool Application::importFromZIP(const QString& filePath) {
qDebug() << "A zip file has been dropped in: " << filePath;
QUrl empty;
// handle Blocks download from Marketplace
if (filePath.contains("poly.google.com/downloads")) {
addAssetToWorldFromURL(filePath);
} else {
qApp->getFileDownloadInterface()->runUnzip(filePath, empty, true, true, false);
}
return true;
}
bool Application::importImage(const QString& urlString) {
qCDebug(interfaceapp) << "An image file has been dropped in";
QString filepath(urlString);
#if defined(Q_OS_WIN)
filepath.remove("file:///");
#else
filepath.remove("file://");
#endif
addAssetToWorld(filepath, "", false, false);
return true;
}

View file

@ -0,0 +1,374 @@
//
// Application_Camera.cpp
// interface/src
//
// Split from Application.cpp by HifiExperiments on 3/30/24
// Created by Andrzej Kapolka on 5/10/13.
// Copyright 2013 High Fidelity, Inc.
// Copyright 2020 Vircadia contributors.
// Copyright 2022-2023 Overte e.V.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// SPDX-License-Identifier: Apache-2.0
//
#include "Application.h"
#include <glm/gtx/transform.hpp>
#include <controllers/UserInputMapper.h>
#include <SecondaryCamera.h>
#include "avatar/MyAvatar.h"
#include "avatar/MyHead.h"
#include "Menu.h"
static const float MIRROR_FULLSCREEN_DISTANCE = 0.789f;
void Application::copyViewFrustum(ViewFrustum& viewOut) const {
QMutexLocker viewLocker(&_viewMutex);
viewOut = _viewFrustum;
}
void Application::copyDisplayViewFrustum(ViewFrustum& viewOut) const {
QMutexLocker viewLocker(&_viewMutex);
viewOut = _displayViewFrustum;
}
void Application::updateCamera(RenderArgs& renderArgs, float deltaTime) {
PROFILE_RANGE(render, __FUNCTION__);
PerformanceTimer perfTimer("updateCamera");
glm::vec3 boomOffset;
auto myAvatar = getMyAvatar();
boomOffset = myAvatar->getModelScale() * myAvatar->getBoomLength() * -IDENTITY_FORWARD;
// The render mode is default or mirror if the camera is in mirror mode, assigned further below
renderArgs._renderMode = RenderArgs::DEFAULT_RENDER_MODE;
// Always use the default eye position, not the actual head eye position.
// Using the latter will cause the camera to wobble with idle animations,
// or with changes from the face tracker
CameraMode mode = _myCamera.getMode();
if (mode == CAMERA_MODE_FIRST_PERSON || mode == CAMERA_MODE_FIRST_PERSON_LOOK_AT) {
_thirdPersonHMDCameraBoomValid= false;
if (isHMDMode()) {
mat4 camMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
_myCamera.setPosition(extractTranslation(camMat));
_myCamera.setOrientation(glmExtractRotation(camMat));
} else if (mode == CAMERA_MODE_FIRST_PERSON) {
_myCamera.setPosition(myAvatar->getDefaultEyePosition());
_myCamera.setOrientation(myAvatar->getMyHead()->getHeadOrientation());
} else {
_myCamera.setPosition(myAvatar->getCameraEyesPosition(deltaTime));
_myCamera.setOrientation(myAvatar->getLookAtRotation());
}
} else if (mode == CAMERA_MODE_THIRD_PERSON || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE) {
if (isHMDMode()) {
if (!_thirdPersonHMDCameraBoomValid) {
const glm::vec3 CAMERA_OFFSET = glm::vec3(0.0f, 0.0f, 0.7f);
_thirdPersonHMDCameraBoom = cancelOutRollAndPitch(myAvatar->getHMDSensorOrientation()) * CAMERA_OFFSET;
_thirdPersonHMDCameraBoomValid = true;
}
glm::mat4 thirdPersonCameraSensorToWorldMatrix = myAvatar->getSensorToWorldMatrix();
const glm::vec3 cameraPos = myAvatar->getHMDSensorPosition() + _thirdPersonHMDCameraBoom * myAvatar->getBoomLength();
glm::mat4 sensorCameraMat = createMatFromQuatAndPos(myAvatar->getHMDSensorOrientation(), cameraPos);
glm::mat4 worldCameraMat = thirdPersonCameraSensorToWorldMatrix * sensorCameraMat;
_myCamera.setOrientation(glm::normalize(glmExtractRotation(worldCameraMat)));
_myCamera.setPosition(extractTranslation(worldCameraMat));
} else {
_thirdPersonHMDCameraBoomValid = false;
if (mode == CAMERA_MODE_THIRD_PERSON) {
_myCamera.setOrientation(myAvatar->getHead()->getOrientation());
if (isOptionChecked(MenuOption::CenterPlayerInView)) {
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ _myCamera.getOrientation() * boomOffset);
} else {
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ myAvatar->getWorldOrientation() * boomOffset);
}
} else {
glm::quat lookAtRotation = myAvatar->getLookAtRotation();
if (mode == CAMERA_MODE_SELFIE) {
lookAtRotation = lookAtRotation * glm::angleAxis(PI, myAvatar->getWorldOrientation() * Vectors::UP);
}
_myCamera.setPosition(myAvatar->getLookAtPivotPoint()
+ lookAtRotation * boomOffset);
_myCamera.lookAt(myAvatar->getLookAtPivotPoint());
}
}
} else if (mode == CAMERA_MODE_MIRROR) {
_thirdPersonHMDCameraBoomValid= false;
if (isHMDMode()) {
auto mirrorBodyOrientation = myAvatar->getWorldOrientation() * glm::quat(glm::vec3(0.0f, PI + _mirrorYawOffset, 0.0f));
glm::quat hmdRotation = extractRotation(myAvatar->getHMDSensorMatrix());
// Mirror HMD yaw and roll
glm::vec3 mirrorHmdEulers = glm::eulerAngles(hmdRotation);
mirrorHmdEulers.y = -mirrorHmdEulers.y;
mirrorHmdEulers.z = -mirrorHmdEulers.z;
glm::quat mirrorHmdRotation = glm::quat(mirrorHmdEulers);
glm::quat worldMirrorRotation = mirrorBodyOrientation * mirrorHmdRotation;
_myCamera.setOrientation(worldMirrorRotation);
glm::vec3 hmdOffset = extractTranslation(myAvatar->getHMDSensorMatrix());
// Mirror HMD lateral offsets
hmdOffset.x = -hmdOffset.x;
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ glm::vec3(0, _raiseMirror * myAvatar->getModelScale(), 0)
+ mirrorBodyOrientation * glm::vec3(0.0f, 0.0f, 1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror
+ mirrorBodyOrientation * hmdOffset);
} else {
auto userInputMapper = DependencyManager::get<UserInputMapper>();
const float YAW_SPEED = TWO_PI / 5.0f;
float deltaYaw = userInputMapper->getActionState(controller::Action::YAW) * YAW_SPEED * deltaTime;
_mirrorYawOffset += deltaYaw;
_myCamera.setOrientation(myAvatar->getWorldOrientation() * glm::quat(glm::vec3(0.0f, PI + _mirrorYawOffset, 0.0f)));
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ glm::vec3(0, _raiseMirror * myAvatar->getModelScale(), 0)
+ (myAvatar->getWorldOrientation() * glm::quat(glm::vec3(0.0f, _mirrorYawOffset, 0.0f))) *
glm::vec3(0.0f, 0.0f, -1.0f) * myAvatar->getBoomLength() * _scaleMirror);
}
renderArgs._renderMode = RenderArgs::MIRROR_RENDER_MODE;
} else if (mode == CAMERA_MODE_ENTITY) {
_thirdPersonHMDCameraBoomValid= false;
EntityItemPointer cameraEntity = _myCamera.getCameraEntityPointer();
if (cameraEntity != nullptr) {
if (isHMDMode()) {
glm::quat hmdRotation = extractRotation(myAvatar->getHMDSensorMatrix());
_myCamera.setOrientation(cameraEntity->getWorldOrientation() * hmdRotation);
glm::vec3 hmdOffset = extractTranslation(myAvatar->getHMDSensorMatrix());
_myCamera.setPosition(cameraEntity->getWorldPosition() + (hmdRotation * hmdOffset));
} else {
_myCamera.setOrientation(cameraEntity->getWorldOrientation());
_myCamera.setPosition(cameraEntity->getWorldPosition());
}
}
}
// Update camera position
if (!isHMDMode()) {
_myCamera.update();
}
renderArgs._cameraMode = (int8_t)_myCamera.getMode();
}
void Application::updateSecondaryCameraViewFrustum() {
// TODO: Fix this by modeling the way the secondary camera works on how the main camera works
// ie. Use a camera object stored in the game logic and informs the Engine on where the secondary
// camera should be.
// Code based on SecondaryCameraJob
auto renderConfig = _graphicsEngine->getRenderEngine()->getConfiguration();
assert(renderConfig);
auto camera = dynamic_cast<SecondaryCameraJobConfig*>(renderConfig->getConfig("SecondaryCamera"));
if (!camera || !camera->isEnabled()) {
return;
}
ViewFrustum secondaryViewFrustum;
if (camera->portalProjection && !camera->attachedEntityId.isNull() && !camera->portalEntranceEntityId.isNull()) {
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
EntityItemPointer portalEntrance = qApp->getEntities()->getTree()->findEntityByID(camera->portalEntranceEntityId);
EntityItemPointer portalExit = qApp->getEntities()->getTree()->findEntityByID(camera->attachedEntityId);
glm::vec3 portalEntrancePropertiesPosition = portalEntrance->getWorldPosition();
glm::quat portalEntrancePropertiesRotation = portalEntrance->getWorldOrientation();
glm::mat4 worldFromPortalEntranceRotation = glm::mat4_cast(portalEntrancePropertiesRotation);
glm::mat4 worldFromPortalEntranceTranslation = glm::translate(portalEntrancePropertiesPosition);
glm::mat4 worldFromPortalEntrance = worldFromPortalEntranceTranslation * worldFromPortalEntranceRotation;
glm::mat4 portalEntranceFromWorld = glm::inverse(worldFromPortalEntrance);
glm::vec3 portalExitPropertiesPosition = portalExit->getWorldPosition();
glm::quat portalExitPropertiesRotation = portalExit->getWorldOrientation();
glm::vec3 portalExitPropertiesDimensions = portalExit->getScaledDimensions();
glm::vec3 halfPortalExitPropertiesDimensions = 0.5f * portalExitPropertiesDimensions;
glm::mat4 worldFromPortalExitRotation = glm::mat4_cast(portalExitPropertiesRotation);
glm::mat4 worldFromPortalExitTranslation = glm::translate(portalExitPropertiesPosition);
glm::mat4 worldFromPortalExit = worldFromPortalExitTranslation * worldFromPortalExitRotation;
glm::vec3 mainCameraPositionWorld = getCamera().getPosition();
glm::vec3 mainCameraPositionPortalEntrance = vec3(portalEntranceFromWorld * vec4(mainCameraPositionWorld, 1.0f));
mainCameraPositionPortalEntrance = vec3(-mainCameraPositionPortalEntrance.x, mainCameraPositionPortalEntrance.y,
-mainCameraPositionPortalEntrance.z);
glm::vec3 portalExitCameraPositionWorld = vec3(worldFromPortalExit * vec4(mainCameraPositionPortalEntrance, 1.0f));
secondaryViewFrustum.setPosition(portalExitCameraPositionWorld);
secondaryViewFrustum.setOrientation(portalExitPropertiesRotation);
float nearClip = mainCameraPositionPortalEntrance.z + portalExitPropertiesDimensions.z * 2.0f;
// `mainCameraPositionPortalEntrance` should technically be `mainCameraPositionPortalExit`,
// but the values are the same.
glm::vec3 upperRight = halfPortalExitPropertiesDimensions - mainCameraPositionPortalEntrance;
glm::vec3 bottomLeft = -halfPortalExitPropertiesDimensions - mainCameraPositionPortalEntrance;
glm::mat4 frustum = glm::frustum(bottomLeft.x, upperRight.x, bottomLeft.y, upperRight.y, nearClip, camera->farClipPlaneDistance);
secondaryViewFrustum.setProjection(frustum);
} else if (camera->mirrorProjection && !camera->attachedEntityId.isNull()) {
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
auto entityProperties = entityScriptingInterface->getEntityProperties(camera->attachedEntityId);
glm::vec3 mirrorPropertiesPosition = entityProperties.getPosition();
glm::quat mirrorPropertiesRotation = entityProperties.getRotation();
glm::vec3 mirrorPropertiesDimensions = entityProperties.getDimensions();
glm::vec3 halfMirrorPropertiesDimensions = 0.5f * mirrorPropertiesDimensions;
// setup mirror from world as inverse of world from mirror transformation using inverted x and z for mirrored image
// TODO: we are assuming here that UP is world y-axis
glm::mat4 worldFromMirrorRotation = glm::mat4_cast(mirrorPropertiesRotation) * glm::scale(vec3(-1.0f, 1.0f, -1.0f));
glm::mat4 worldFromMirrorTranslation = glm::translate(mirrorPropertiesPosition);
glm::mat4 worldFromMirror = worldFromMirrorTranslation * worldFromMirrorRotation;
glm::mat4 mirrorFromWorld = glm::inverse(worldFromMirror);
// get mirror camera position by reflecting main camera position's z coordinate in mirror space
glm::vec3 mainCameraPositionWorld = getCamera().getPosition();
glm::vec3 mainCameraPositionMirror = vec3(mirrorFromWorld * vec4(mainCameraPositionWorld, 1.0f));
glm::vec3 mirrorCameraPositionMirror = vec3(mainCameraPositionMirror.x, mainCameraPositionMirror.y,
-mainCameraPositionMirror.z);
glm::vec3 mirrorCameraPositionWorld = vec3(worldFromMirror * vec4(mirrorCameraPositionMirror, 1.0f));
// set frustum position to be mirrored camera and set orientation to mirror's adjusted rotation
glm::quat mirrorCameraOrientation = glm::quat_cast(worldFromMirrorRotation);
secondaryViewFrustum.setPosition(mirrorCameraPositionWorld);
secondaryViewFrustum.setOrientation(mirrorCameraOrientation);
// build frustum using mirror space translation of mirrored camera
float nearClip = mirrorCameraPositionMirror.z + mirrorPropertiesDimensions.z * 2.0f;
glm::vec3 upperRight = halfMirrorPropertiesDimensions - mirrorCameraPositionMirror;
glm::vec3 bottomLeft = -halfMirrorPropertiesDimensions - mirrorCameraPositionMirror;
glm::mat4 frustum = glm::frustum(bottomLeft.x, upperRight.x, bottomLeft.y, upperRight.y, nearClip, camera->farClipPlaneDistance);
secondaryViewFrustum.setProjection(frustum);
} else {
if (!camera->attachedEntityId.isNull()) {
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
auto entityProperties = entityScriptingInterface->getEntityProperties(camera->attachedEntityId);
secondaryViewFrustum.setPosition(entityProperties.getPosition());
secondaryViewFrustum.setOrientation(entityProperties.getRotation());
} else {
secondaryViewFrustum.setPosition(camera->position);
secondaryViewFrustum.setOrientation(camera->orientation);
}
float aspectRatio = (float)camera->textureWidth / (float)camera->textureHeight;
secondaryViewFrustum.setProjection(camera->vFoV,
aspectRatio,
camera->nearClipPlaneDistance,
camera->farClipPlaneDistance);
}
// Without calculating the bound planes, the secondary camera will use the same culling frustum as the main camera,
// which is not what we want here.
secondaryViewFrustum.calculate();
_conicalViews.push_back(secondaryViewFrustum);
}
void Application::setFieldOfView(float fov) {
if (fov != _fieldOfView.get()) {
_fieldOfView.set(fov);
resizeGL();
}
}
// Called during Application::update immediately before AvatarManager::updateMyAvatar, updating my data that is then sent
// to everyone.
// The principal result is to call updateLookAtTargetAvatar() and then setLookAtPosition().
// Note that it is called BEFORE we update position or joints based on sensors, etc.
void Application::updateMyAvatarLookAtPosition(float deltaTime) {
PerformanceTimer perfTimer("lookAt");
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::updateMyAvatarLookAtPosition()");
auto myAvatar = getMyAvatar();
myAvatar->updateEyesLookAtPosition(deltaTime);
}
void Application::cycleCamera() {
auto menu = Menu::getInstance();
if (menu->isOptionChecked(MenuOption::FirstPersonLookAt)) {
menu->setIsOptionChecked(MenuOption::FirstPersonLookAt, false);
menu->setIsOptionChecked(MenuOption::LookAtCamera, true);
} else if (menu->isOptionChecked(MenuOption::LookAtCamera)) {
menu->setIsOptionChecked(MenuOption::LookAtCamera, false);
if (menu->getActionForOption(MenuOption::SelfieCamera)->isVisible()) {
menu->setIsOptionChecked(MenuOption::SelfieCamera, true);
} else {
menu->setIsOptionChecked(MenuOption::FirstPersonLookAt, true);
}
} else if (menu->isOptionChecked(MenuOption::SelfieCamera)) {
menu->setIsOptionChecked(MenuOption::SelfieCamera, false);
menu->setIsOptionChecked(MenuOption::FirstPersonLookAt, true);
}
cameraMenuChanged(); // handle the menu change
}
void Application::cameraModeChanged() {
switch (_myCamera.getMode()) {
case CAMERA_MODE_FIRST_PERSON_LOOK_AT:
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPersonLookAt, true);
break;
case CAMERA_MODE_LOOK_AT:
Menu::getInstance()->setIsOptionChecked(MenuOption::LookAtCamera, true);
break;
case CAMERA_MODE_SELFIE:
Menu::getInstance()->setIsOptionChecked(MenuOption::SelfieCamera, true);
break;
default:
// we don't have menu items for the others, so just leave it alone.
return;
}
cameraMenuChanged();
}
void Application::cameraMenuChanged() {
auto menu = Menu::getInstance();
if (menu->isOptionChecked(MenuOption::FirstPersonLookAt)) {
if (_myCamera.getMode() != CAMERA_MODE_FIRST_PERSON_LOOK_AT) {
_myCamera.setMode(CAMERA_MODE_FIRST_PERSON_LOOK_AT);
getMyAvatar()->setBoomLength(MyAvatar::ZOOM_MIN);
}
} else if (menu->isOptionChecked(MenuOption::LookAtCamera)) {
if (_myCamera.getMode() != CAMERA_MODE_LOOK_AT) {
_myCamera.setMode(CAMERA_MODE_LOOK_AT);
if (getMyAvatar()->getBoomLength() == MyAvatar::ZOOM_MIN) {
getMyAvatar()->setBoomLength(MyAvatar::ZOOM_DEFAULT);
}
}
} else if (menu->isOptionChecked(MenuOption::SelfieCamera)) {
if (_myCamera.getMode() != CAMERA_MODE_SELFIE) {
_myCamera.setMode(CAMERA_MODE_SELFIE);
if (getMyAvatar()->getBoomLength() == MyAvatar::ZOOM_MIN) {
getMyAvatar()->setBoomLength(MyAvatar::ZOOM_DEFAULT);
}
}
}
}
void Application::changeViewAsNeeded(float boomLength) {
// Switch between first and third person views as needed
// This is called when the boom length has changed
bool boomLengthGreaterThanMinimum = (boomLength > MyAvatar::ZOOM_MIN);
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON_LOOK_AT && boomLengthGreaterThanMinimum) {
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPersonLookAt, false);
Menu::getInstance()->setIsOptionChecked(MenuOption::LookAtCamera, true);
cameraMenuChanged();
} else if (_myCamera.getMode() == CAMERA_MODE_LOOK_AT && !boomLengthGreaterThanMinimum) {
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPersonLookAt, true);
Menu::getInstance()->setIsOptionChecked(MenuOption::LookAtCamera, false);
cameraMenuChanged();
}
}

View file

@ -0,0 +1,359 @@
//
// Application_Entities.cpp
// interface/src
//
// Split from Application.cpp by HifiExperiments on 3/30/24
// Created by Andrzej Kapolka on 5/10/13.
// Copyright 2013 High Fidelity, Inc.
// Copyright 2020 Vircadia contributors.
// Copyright 2022-2023 Overte e.V.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// SPDX-License-Identifier: Apache-2.0
//
#include "Application.h"
#include <AnimationCache.h>
#include <input-plugins/KeyboardMouseDevice.h>
#include <MainWindow.h>
#include <recording/ClipCache.h>
#include <RenderableEntityItem.h>
#include <SoundCache.h>
#include "InterfaceLogging.h"
#include "LODManager.h"
static const float FOCUS_HIGHLIGHT_EXPANSION_FACTOR = 1.05f;
static const float INITIAL_QUERY_RADIUS = 10.0f; // priority radius for entities before physics enabled
void Application::setMaxOctreePacketsPerSecond(int maxOctreePPS) {
if (maxOctreePPS != _maxOctreePPS) {
_maxOctreePPS = maxOctreePPS;
_maxOctreePacketsPerSecond.set(_maxOctreePPS);
}
}
QVector<EntityItemID> Application::pasteEntities(const QString& entityHostType, float x, float y, float z) {
return _entityClipboard->sendEntities(_entityEditSender.get(), getEntities()->getTree(), entityHostType, x, y, z);
}
bool Application::exportEntities(const QString& filename,
const QVector<QUuid>& entityIDs,
const glm::vec3* givenOffset) {
QHash<EntityItemID, EntityItemPointer> entities;
auto nodeList = DependencyManager::get<NodeList>();
const QUuid myAvatarID = nodeList->getSessionUUID();
auto entityTree = getEntities()->getTree();
auto exportTree = std::make_shared<EntityTree>();
exportTree->setMyAvatar(getMyAvatar());
exportTree->createRootElement();
glm::vec3 root(TREE_SCALE, TREE_SCALE, TREE_SCALE);
bool success = true;
entityTree->withReadLock([entityIDs, entityTree, givenOffset, myAvatarID, &root, &entities, &success, &exportTree] {
for (auto entityID : entityIDs) { // Gather entities and properties.
auto entityItem = entityTree->findEntityByEntityItemID(entityID);
if (!entityItem) {
qCWarning(interfaceapp) << "Skipping export of" << entityID << "that is not in scene.";
continue;
}
if (!givenOffset) {
EntityItemID parentID = entityItem->getParentID();
bool parentIsAvatar = (parentID == AVATAR_SELF_ID || parentID == myAvatarID);
if (!parentIsAvatar && (parentID.isInvalidID() ||
!entityIDs.contains(parentID) ||
!entityTree->findEntityByEntityItemID(parentID))) {
// If parent wasn't selected, we want absolute position, which isn't in properties.
auto position = entityItem->getWorldPosition();
root.x = glm::min(root.x, position.x);
root.y = glm::min(root.y, position.y);
root.z = glm::min(root.z, position.z);
}
}
entities[entityID] = entityItem;
}
if (entities.size() == 0) {
success = false;
return;
}
if (givenOffset) {
root = *givenOffset;
}
for (EntityItemPointer& entityDatum : entities) {
auto properties = entityDatum->getProperties();
EntityItemID parentID = properties.getParentID();
bool parentIsAvatar = (parentID == AVATAR_SELF_ID || parentID == myAvatarID);
if (parentIsAvatar) {
properties.setParentID(AVATAR_SELF_ID);
} else {
if (parentID.isInvalidID()) {
properties.setPosition(properties.getPosition() - root);
} else if (!entities.contains(parentID)) {
entityDatum->globalizeProperties(properties, "Parent %3 of %2 %1 is not selected for export.", -root);
} // else valid parent -- don't offset
}
exportTree->addEntity(entityDatum->getEntityItemID(), properties);
}
});
if (success) {
success = exportTree->writeToJSONFile(filename.toLocal8Bit().constData());
// restore the main window's active state
_window->activateWindow();
}
return success;
}
bool Application::exportEntities(const QString& filename, float x, float y, float z, float scale) {
glm::vec3 center(x, y, z);
glm::vec3 minCorner = center - vec3(scale);
float cubeSize = scale * 2;
AACube boundingCube(minCorner, cubeSize);
QVector<QUuid> entities;
auto entityTree = getEntities()->getTree();
entityTree->withReadLock([&] {
entityTree->evalEntitiesInCube(boundingCube, PickFilter(), entities);
});
return exportEntities(filename, entities, &center);
}
bool Application::importEntities(const QString& urlOrFilename, const bool isObservable, const qint64 callerId) {
bool success = false;
_entityClipboard->withWriteLock([&] {
_entityClipboard->eraseAllOctreeElements();
// FIXME: readFromURL() can take over the main event loop which may cause problems, especially if downloading the JSON
// from the Web.
success = _entityClipboard->readFromURL(urlOrFilename, isObservable, callerId, true);
if (success) {
_entityClipboard->reaverageOctreeElements();
}
});
return success;
}
void Application::setKeyboardFocusHighlight(const glm::vec3& position, const glm::quat& rotation, const glm::vec3& dimensions) {
if (qApp->getLoginDialogPoppedUp()) {
return;
}
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
if (_keyboardFocusHighlightID == UNKNOWN_ENTITY_ID || !entityScriptingInterface->isAddedEntity(_keyboardFocusHighlightID)) {
EntityItemProperties properties;
properties.setType(EntityTypes::Box);
properties.setAlpha(1.0f);
properties.setColor({ 0xFF, 0xEF, 0x00 });
properties.setPrimitiveMode(PrimitiveMode::LINES);
properties.getPulse().setMin(0.5);
properties.getPulse().setMax(1.0f);
properties.getPulse().setColorMode(PulseMode::IN_PHASE);
properties.setIgnorePickIntersection(true);
_keyboardFocusHighlightID = entityScriptingInterface->addEntityInternal(properties, entity::HostType::LOCAL);
}
// Position focus
EntityItemProperties properties;
properties.setPosition(position);
properties.setRotation(rotation);
properties.setDimensions(dimensions);
properties.setVisible(true);
entityScriptingInterface->editEntity(_keyboardFocusHighlightID, properties);
}
void Application::setKeyboardFocusEntity(const QUuid& id) {
if (_keyboardFocusedEntity.get() != id) {
if (qApp->getLoginDialogPoppedUp() && !_loginDialogID.isNull()) {
if (id == _loginDialogID) {
emit loginDialogFocusEnabled();
} else if (!_keyboardFocusWaitingOnRenderable) {
// that's the only entity we want in focus;
return;
}
}
_keyboardFocusedEntity.set(id);
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
if (id != UNKNOWN_ENTITY_ID) {
EntityPropertyFlags desiredProperties;
desiredProperties += PROP_VISIBLE;
desiredProperties += PROP_SHOW_KEYBOARD_FOCUS_HIGHLIGHT;
auto properties = entityScriptingInterface->getEntityProperties(id);
if (properties.getVisible()) {
auto entities = getEntities();
auto entityId = _keyboardFocusedEntity.get();
auto entityItemRenderable = entities->renderableForEntityId(entityId);
if (!entityItemRenderable) {
_keyboardFocusWaitingOnRenderable = true;
} else if (entityItemRenderable->wantsKeyboardFocus()) {
entities->setProxyWindow(entityId, _window->windowHandle());
if (_keyboardMouseDevice->isActive()) {
_keyboardMouseDevice->pluginFocusOutEvent();
}
_lastAcceptedKeyPress = usecTimestampNow();
if (properties.getShowKeyboardFocusHighlight()) {
if (auto entity = entities->getEntity(entityId)) {
setKeyboardFocusHighlight(entity->getWorldPosition(), entity->getWorldOrientation(),
entity->getScaledDimensions() * FOCUS_HIGHLIGHT_EXPANSION_FACTOR);
return;
}
}
}
}
}
EntityItemProperties properties;
properties.setVisible(false);
entityScriptingInterface->editEntity(_keyboardFocusHighlightID, properties);
}
}
void Application::clearDomainOctreeDetails(bool clearAll) {
// if we're about to quit, we really don't need to do the rest of these things...
if (_aboutToQuit) {
return;
}
qCDebug(interfaceapp) << "Clearing domain octree details...";
resetPhysicsReadyInformation();
setIsInterstitialMode(true);
auto octreeServerSceneStats = getOcteeSceneStats();
octreeServerSceneStats->withWriteLock([&] {
octreeServerSceneStats->clear();
});
// reset the model renderer
clearAll ? getEntities()->clear() : getEntities()->clearDomainAndNonOwnedEntities();
DependencyManager::get<AnimationCache>()->clearUnusedResources();
DependencyManager::get<SoundCache>()->clearUnusedResources();
DependencyManager::get<MaterialCache>()->clearUnusedResources();
DependencyManager::get<ModelCache>()->clearUnusedResources();
ShaderCache::instance().clearUnusedResources();
DependencyManager::get<TextureCache>()->clearUnusedResources();
DependencyManager::get<recording::ClipCache>()->clearUnusedResources();
}
void Application::resettingDomain() {
_notifiedPacketVersionMismatchThisDomain = false;
clearDomainOctreeDetails(false);
}
void Application::queryOctree(NodeType_t serverType, PacketType packetType) {
if (!_settingsLoaded) {
return; // bail early if settings are not loaded
}
const bool isModifiedQuery = !_physicsEnabled;
if (isModifiedQuery) {
if (!_octreeProcessor->safeLandingIsActive()) {
// don't send the octreeQuery until SafeLanding knows it has started
return;
}
// Create modified view that is a simple sphere.
bool interstitialModeEnabled = DependencyManager::get<NodeList>()->getDomainHandler().getInterstitialModeEnabled();
ConicalViewFrustum sphericalView;
AABox box = getMyAvatar()->getGlobalBoundingBox();
float radius = glm::max(INITIAL_QUERY_RADIUS, 0.5f * glm::length(box.getDimensions()));
sphericalView.setPositionAndSimpleRadius(box.calcCenter(), radius);
if (interstitialModeEnabled) {
ConicalViewFrustum farView;
farView.set(_viewFrustum);
_octreeQuery.setConicalViews({ sphericalView, farView });
} else {
_octreeQuery.setConicalViews({ sphericalView });
}
_octreeQuery.setOctreeSizeScale(DEFAULT_OCTREE_SIZE_SCALE);
static constexpr float MIN_LOD_ADJUST = -20.0f;
_octreeQuery.setBoundaryLevelAdjust(MIN_LOD_ADJUST);
} else {
_octreeQuery.setConicalViews(_conicalViews);
auto lodManager = DependencyManager::get<LODManager>();
_octreeQuery.setOctreeSizeScale(lodManager->getOctreeSizeScale());
_octreeQuery.setBoundaryLevelAdjust(lodManager->getBoundaryLevelAdjust());
}
_octreeQuery.setReportInitialCompletion(isModifiedQuery);
auto nodeList = DependencyManager::get<NodeList>();
auto node = nodeList->soloNodeOfType(serverType);
if (node && node->getActiveSocket()) {
_octreeQuery.setMaxQueryPacketsPerSecond(getMaxOctreePacketsPerSecond());
auto queryPacket = NLPacket::create(packetType);
// encode the query data
auto packetData = reinterpret_cast<unsigned char*>(queryPacket->getPayload());
int packetSize = _octreeQuery.getBroadcastData(packetData);
queryPacket->setPayloadSize(packetSize);
// make sure we still have an active socket
nodeList->sendUnreliablePacket(*queryPacket, *node);
}
}
int Application::sendNackPackets() {
// iterates through all nodes in NodeList
auto nodeList = DependencyManager::get<NodeList>();
int packetsSent = 0;
nodeList->eachNode([&](const SharedNodePointer& node){
if (node->getActiveSocket() && node->getType() == NodeType::EntityServer) {
auto nackPacketList = NLPacketList::create(PacketType::OctreeDataNack);
QUuid nodeUUID = node->getUUID();
// if there are octree packets from this node that are waiting to be processed,
// don't send a NACK since the missing packets may be among those waiting packets.
if (_octreeProcessor->hasPacketsToProcessFrom(nodeUUID)) {
return;
}
QSet<OCTREE_PACKET_SEQUENCE> missingSequenceNumbers;
auto octreeServerSceneStats = getOcteeSceneStats();
octreeServerSceneStats->withReadLock([&] {
// retrieve octree scene stats of this node
if (octreeServerSceneStats->find(nodeUUID) == octreeServerSceneStats->end()) {
return;
}
// get sequence number stats of node, prune its missing set, and make a copy of the missing set
SequenceNumberStats& sequenceNumberStats = (*octreeServerSceneStats)[nodeUUID].getIncomingOctreeSequenceNumberStats();
sequenceNumberStats.pruneMissingSet();
missingSequenceNumbers = sequenceNumberStats.getMissingSet();
});
_isMissingSequenceNumbers = (missingSequenceNumbers.size() != 0);
// construct nack packet(s) for this node
foreach(const OCTREE_PACKET_SEQUENCE& missingNumber, missingSequenceNumbers) {
nackPacketList->writePrimitive(missingNumber);
}
if (nackPacketList->getNumPackets()) {
packetsSent += (int)nackPacketList->getNumPackets();
// send the packet list
nodeList->sendPacketList(std::move(nackPacketList), *node);
}
}
});
return packetsSent;
}

View file

@ -0,0 +1,931 @@
//
// Application_Events.cpp
// interface/src
//
// Split from Application.cpp by HifiExperiments on 3/30/24
// Created by Andrzej Kapolka on 5/10/13.
// Copyright 2013 High Fidelity, Inc.
// Copyright 2020 Vircadia contributors.
// Copyright 2022-2023 Overte e.V.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// SPDX-License-Identifier: Apache-2.0
//
#include "Application.h"
#include <QtCore/QMimeData>
#include <controllers/InputRecorder.h>
#include <display-plugins/CompositorHelper.h>
#include <graphics/RenderEventHandler.h>
#include <input-plugins/KeyboardMouseDevice.h>
#include <input-plugins/TouchscreenDevice.h>
#include <input-plugins/TouchscreenVirtualPadDevice.h>
#include <MainWindow.h>
#include <OffscreenUi.h>
#include <plugins/PluginManager.h>
#include <ScriptEngines.h>
#include <scripting/Audio.h>
#include <scripting/ControllerScriptingInterface.h>
#include <shared/FileUtils.h>
#include <ui/DialogsManager.h>
#include "AudioClient.h"
#include "GLCanvas.h"
#include "Menu.h"
#if defined(Q_OS_ANDROID)
#include "AndroidHelper.h"
#endif
Q_LOGGING_CATEGORY(trace_app_input_mouse, "trace.app.input.mouse")
static const unsigned int THROTTLED_SIM_FRAMERATE = 15;
static const int THROTTLED_SIM_FRAME_PERIOD_MS = MSECS_PER_SECOND / THROTTLED_SIM_FRAMERATE;
class LambdaEvent : public QEvent {
std::function<void()> _fun;
public:
LambdaEvent(const std::function<void()> & fun) :
QEvent(static_cast<QEvent::Type>(ApplicationEvent::Lambda)), _fun(fun) {}
LambdaEvent(std::function<void()> && fun) :
QEvent(static_cast<QEvent::Type>(ApplicationEvent::Lambda)), _fun(fun) {}
void call() const { _fun(); }
};
bool Application::notify(QObject* object, QEvent* event) {
if (thread() == QThread::currentThread()) {
PROFILE_RANGE_IF_LONGER(app, "notify", 2)
return QApplication::notify(object, event);
}
return QApplication::notify(object, event);
}
static inline bool isKeyEvent(QEvent::Type type) {
return type == QEvent::KeyPress || type == QEvent::KeyRelease;
}
bool Application::event(QEvent* event) {
if (_aboutToQuit) {
return false;
}
// This helps avoid deadlock issue early during Application initialization
if (!_isMenuInitialized) {
return QApplication::event(event);
}
if (!Menu::getInstance()) {
return false;
}
if ((event->type() == QEvent::InputMethod || event->type() == QEvent::InputMethodQuery) && handleInputMethodEventForFocusedEntity(event)) {
return true;
}
// Allow focused Entities to handle keyboard input
if (isKeyEvent(event->type()) && handleKeyEventForFocusedEntity(event)) {
return true;
}
int type = event->type();
switch (type) {
case ApplicationEvent::Lambda:
static_cast<LambdaEvent*>(event)->call();
return true;
// Explicit idle keeps the idle running at a lower interval, but without any rendering
// see (windowMinimizedChanged)
case ApplicationEvent::Idle:
idle();
#ifdef DEBUG_EVENT_QUEUE_DEPTH
// The event queue may very well grow beyond 400, so
// this code should only be enabled on local builds
{
int count = ::hifi::qt::getEventQueueSize(QThread::currentThread());
if (count > 400) {
::hifi::qt::dumpEventQueue(QThread::currentThread());
}
}
#endif // DEBUG_EVENT_QUEUE_DEPTH
_pendingIdleEvent.store(false);
return true;
case QEvent::MouseMove:
mouseMoveEvent(static_cast<QMouseEvent*>(event));
return true;
case QEvent::MouseButtonPress:
mousePressEvent(static_cast<QMouseEvent*>(event));
return true;
case QEvent::MouseButtonDblClick:
mouseDoublePressEvent(static_cast<QMouseEvent*>(event));
return true;
case QEvent::MouseButtonRelease:
mouseReleaseEvent(static_cast<QMouseEvent*>(event));
return true;
case QEvent::KeyPress:
keyPressEvent(static_cast<QKeyEvent*>(event));
return true;
case QEvent::KeyRelease:
keyReleaseEvent(static_cast<QKeyEvent*>(event));
return true;
case QEvent::FocusOut:
focusOutEvent(static_cast<QFocusEvent*>(event));
return true;
case QEvent::FocusIn:
{ //testing to see if we can set focus when focus is not set to root window.
_glWidget->activateWindow();
_glWidget->setFocus();
return true;
}
case QEvent::TouchBegin:
touchBeginEvent(static_cast<QTouchEvent*>(event));
event->accept();
return true;
case QEvent::TouchEnd:
touchEndEvent(static_cast<QTouchEvent*>(event));
return true;
case QEvent::TouchUpdate:
touchUpdateEvent(static_cast<QTouchEvent*>(event));
return true;
case QEvent::Gesture:
touchGestureEvent((QGestureEvent*)event);
return true;
case QEvent::Wheel:
wheelEvent(static_cast<QWheelEvent*>(event));
return true;
case QEvent::Drop:
dropEvent(static_cast<QDropEvent*>(event));
return true;
case QEvent::FileOpen:
if (handleFileOpenEvent(static_cast<QFileOpenEvent*>(event))) {
return true;
}
break;
default:
break;
}
return QApplication::event(event);
}
bool Application::eventFilter(QObject* object, QEvent* event) {
auto eventType = event->type();
if (_aboutToQuit && eventType != QEvent::DeferredDelete && eventType != QEvent::Destroy) {
return true;
}
#if defined(Q_OS_MAC)
// On Mac OS, Cmd+LeftClick is treated as a RightClick (more specifically, it seems to
// be Cmd+RightClick without the modifier being dropped). Starting in Qt 5.12, only
// on Mac, the MouseButtonRelease event for these mouse presses is sent to the top
// level QWidgetWindow, but are not propagated further. This means that the Application
// will see a MouseButtonPress, but no MouseButtonRelease, causing the client to get
// stuck in "mouse-look." The cause of the problem is in the way QWidgetWindow processes
// events where QMouseEvent::button() is not equal to QMouseEvent::buttons(). In this case
// QMouseEvent::button() is Qt::RightButton, while QMouseEvent::buttons() is (correctly?)
// Qt::LeftButton.
//
// The change here gets around this problem by capturing these
// pseudo-RightClicks, and re-emitting them as "pure" RightClicks, where
// QMouseEvent::button() == QMouseEvent::buttons() == Qt::RightButton.
//
if (eventType == QEvent::MouseButtonPress) {
auto mouseEvent = static_cast<QMouseEvent*>(event);
if (mouseEvent->button() == Qt::RightButton
&& mouseEvent->buttons() == Qt::LeftButton
&& mouseEvent->modifiers() == Qt::MetaModifier) {
QMouseEvent* newEvent = new QMouseEvent(
QEvent::MouseButtonPress, mouseEvent->localPos(), mouseEvent->windowPos(),
mouseEvent->screenPos(), Qt::RightButton, Qt::MouseButtons(Qt::RightButton),
mouseEvent->modifiers());
QApplication::postEvent(object, newEvent);
return true;
}
}
#endif
if (eventType == QEvent::KeyPress || eventType == QEvent::KeyRelease || eventType == QEvent::MouseMove) {
getRefreshRateManager().resetInactiveTimer();
}
if (event->type() == QEvent::Leave) {
getApplicationCompositor().handleLeaveEvent();
}
if (event->type() == QEvent::ShortcutOverride) {
#if !defined(DISABLE_QML)
if (getOffscreenUI()->shouldSwallowShortcut(event)) {
event->accept();
return true;
}
#endif
// Filter out captured keys before they're used for shortcut actions.
if (_controllerScriptingInterface->isKeyCaptured(static_cast<QKeyEvent*>(event))) {
event->accept();
return true;
}
}
if (event->type() == QEvent::WindowStateChange) {
if (getWindow()->windowState() & Qt::WindowMinimized) {
getRefreshRateManager().setRefreshRateRegime(RefreshRateManager::RefreshRateRegime::MINIMIZED);
} else {
auto* windowStateChangeEvent = static_cast<QWindowStateChangeEvent*>(event);
if (windowStateChangeEvent->oldState() & Qt::WindowMinimized) {
getRefreshRateManager().setRefreshRateRegime(RefreshRateManager::RefreshRateRegime::FOCUS_ACTIVE);
getRefreshRateManager().resetInactiveTimer();
}
}
}
return false;
}
void Application::postLambdaEvent(const std::function<void()>& f) {
if (this->thread() == QThread::currentThread()) {
f();
} else {
QCoreApplication::postEvent(this, new LambdaEvent(f));
}
}
void Application::sendLambdaEvent(const std::function<void()>& f) {
if (this->thread() == QThread::currentThread()) {
f();
} else {
LambdaEvent event(f);
QCoreApplication::sendEvent(this, &event);
}
}
void Application::pushPostUpdateLambda(void* key, const std::function<void()>& func) {
std::unique_lock<std::mutex> guard(_postUpdateLambdasLock);
_postUpdateLambdas[key] = func;
}
// thread-safe
void Application::onPresent(quint32 frameCount) {
bool expected = false;
if (_pendingIdleEvent.compare_exchange_strong(expected, true)) {
postEvent(this, new QEvent((QEvent::Type)ApplicationEvent::Idle), Qt::HighEventPriority);
}
expected = false;
if (_graphicsEngine->checkPendingRenderEvent() && !isAboutToQuit()) {
postEvent(_graphicsEngine->_renderEventHandler, new QEvent((QEvent::Type)ApplicationEvent::Render));
}
}
void Application::activeChanged(Qt::ApplicationState state) {
switch (state) {
case Qt::ApplicationActive:
_isForeground = true;
if (!_aboutToQuit && _startUpFinished) {
getRefreshRateManager().setRefreshRateRegime(RefreshRateManager::RefreshRateRegime::FOCUS_ACTIVE);
}
break;
case Qt::ApplicationSuspended:
break;
case Qt::ApplicationHidden:
break;
case Qt::ApplicationInactive:
if (!_aboutToQuit && _startUpFinished) {
getRefreshRateManager().setRefreshRateRegime(RefreshRateManager::RefreshRateRegime::UNFOCUS);
}
break;
default:
_isForeground = false;
break;
}
}
void Application::windowMinimizedChanged(bool minimized) {
// initialize the _minimizedWindowTimer
static std::once_flag once;
std::call_once(once, [&] {
connect(&_minimizedWindowTimer, &QTimer::timeout, this, [] {
QCoreApplication::postEvent(QCoreApplication::instance(), new QEvent(static_cast<QEvent::Type>(Idle)), Qt::HighEventPriority);
});
});
// avoid rendering to the display plugin but continue posting Idle events,
// so that physics continues to simulate and the deadlock watchdog knows we're alive
if (!minimized && !getActiveDisplayPlugin()->isActive()) {
_minimizedWindowTimer.stop();
getActiveDisplayPlugin()->activate();
} else if (minimized && getActiveDisplayPlugin()->isActive()) {
getActiveDisplayPlugin()->deactivate();
_minimizedWindowTimer.start(THROTTLED_SIM_FRAME_PERIOD_MS);
}
}
void Application::keyPressEvent(QKeyEvent* event) {
if (!event->isAutoRepeat()) {
_keysPressed.insert(event->key(), *event);
}
_controllerScriptingInterface->emitKeyPressEvent(event); // send events to any registered scripts
// if one of our scripts have asked to capture this event, then stop processing it
if (_controllerScriptingInterface->isKeyCaptured(event) || isInterstitialMode()) {
return;
}
if (hasFocus() && getLoginDialogPoppedUp()) {
if (_keyboardMouseDevice->isActive()) {
_keyboardMouseDevice->keyReleaseEvent(event);
}
bool isControlOrCommand = event->modifiers().testFlag(Qt::ControlModifier);
bool isOption = event->modifiers().testFlag(Qt::AltModifier);
switch (event->key()) {
case Qt::Key_4:
case Qt::Key_5:
case Qt::Key_6:
case Qt::Key_7:
if (isControlOrCommand || isOption) {
unsigned int index = static_cast<unsigned int>(event->key() - Qt::Key_1);
const auto& displayPlugins = PluginManager::getInstance()->getDisplayPlugins();
if (index < displayPlugins.size()) {
auto targetPlugin = displayPlugins.at(index);
QString targetName = targetPlugin->getName();
auto menu = Menu::getInstance();
QAction* action = menu->getActionForOption(targetName);
if (action && !action->isChecked()) {
action->trigger();
}
}
}
break;
}
} else if (hasFocus()) {
if (_keyboardMouseDevice->isActive()) {
_keyboardMouseDevice->keyPressEvent(event);
}
bool isShifted = event->modifiers().testFlag(Qt::ShiftModifier);
bool isControlOrCommand = event->modifiers().testFlag(Qt::ControlModifier);
bool isMetaOrMacControl = event->modifiers().testFlag(Qt::MetaModifier);
bool isOption = event->modifiers().testFlag(Qt::AltModifier);
switch (event->key()) {
case Qt::Key_Enter:
case Qt::Key_Return:
if (isOption) {
if (_window->isFullScreen()) {
unsetFullscreen();
} else {
setFullscreen(nullptr);
}
}
break;
case Qt::Key_1: {
Menu* menu = Menu::getInstance();
menu->triggerOption(MenuOption::FirstPersonLookAt);
break;
}
case Qt::Key_2: {
Menu* menu = Menu::getInstance();
menu->triggerOption(MenuOption::SelfieCamera);
break;
}
case Qt::Key_3: {
Menu* menu = Menu::getInstance();
menu->triggerOption(MenuOption::LookAtCamera);
break;
}
case Qt::Key_4:
case Qt::Key_5:
case Qt::Key_6:
case Qt::Key_7:
if (isControlOrCommand || isOption) {
unsigned int index = static_cast<unsigned int>(event->key() - Qt::Key_1);
const auto& displayPlugins = PluginManager::getInstance()->getDisplayPlugins();
if (index < displayPlugins.size()) {
auto targetPlugin = displayPlugins.at(index);
QString targetName = targetPlugin->getName();
auto menu = Menu::getInstance();
QAction* action = menu->getActionForOption(targetName);
if (action && !action->isChecked()) {
action->trigger();
}
}
}
break;
case Qt::Key_G:
if (isShifted && isControlOrCommand && isOption && isMetaOrMacControl) {
static const QString HIFI_FRAMES_FOLDER_VAR = "HIFI_FRAMES_FOLDER";
static const QString GPU_FRAME_FOLDER = QProcessEnvironment::systemEnvironment().contains(HIFI_FRAMES_FOLDER_VAR)
? QProcessEnvironment::systemEnvironment().value(HIFI_FRAMES_FOLDER_VAR)
: "hifiFrames";
static QString GPU_FRAME_TEMPLATE = GPU_FRAME_FOLDER + "/{DATE}_{TIME}";
QString fullPath = FileUtils::computeDocumentPath(FileUtils::replaceDateTimeTokens(GPU_FRAME_TEMPLATE));
if (FileUtils::canCreateFile(fullPath)) {
getActiveDisplayPlugin()->captureFrame(fullPath.toStdString());
}
}
break;
case Qt::Key_X:
if (isShifted && isControlOrCommand) {
auto offscreenUi = getOffscreenUI();
offscreenUi->togglePinned();
//offscreenUi->getSurfaceContext()->engine()->clearComponentCache();
//OffscreenUi::information("Debugging", "Component cache cleared");
// placeholder for dialogs being converted to QML.
}
break;
case Qt::Key_Y:
if (isShifted && isControlOrCommand) {
getActiveDisplayPlugin()->cycleDebugOutput();
}
break;
case Qt::Key_B:
if (isOption) {
controller::InputRecorder* inputRecorder = controller::InputRecorder::getInstance();
inputRecorder->stopPlayback();
}
break;
case Qt::Key_L:
if (isShifted && isControlOrCommand) {
Menu::getInstance()->triggerOption(MenuOption::Log);
} else if (isControlOrCommand) {
auto dialogsManager = DependencyManager::get<DialogsManager>();
dialogsManager->toggleAddressBar();
}
break;
case Qt::Key_R:
if (isControlOrCommand && !event->isAutoRepeat()) {
DependencyManager::get<ScriptEngines>()->reloadAllScripts();
getOffscreenUI()->clearCache();
}
break;
case Qt::Key_Asterisk:
Menu::getInstance()->triggerOption(MenuOption::DefaultSkybox);
break;
case Qt::Key_M:
if (isControlOrCommand) {
auto audioClient = DependencyManager::get<AudioClient>();
audioClient->setMuted(!audioClient->isMuted());
QSharedPointer<scripting::Audio> audioScriptingInterface = qSharedPointerDynamicCast<scripting::Audio>(DependencyManager::get<AudioScriptingInterface>());
if (audioScriptingInterface && audioScriptingInterface->getPTT()) {
audioScriptingInterface->setPushingToTalk(!audioClient->isMuted());
}
}
break;
case Qt::Key_S:
if (isShifted && isControlOrCommand && !isOption) {
Menu::getInstance()->triggerOption(MenuOption::SuppressShortTimings);
}
break;
case Qt::Key_Apostrophe: {
if (isControlOrCommand) {
auto cursor = Cursor::Manager::instance().getCursor();
auto curIcon = cursor->getIcon();
if (curIcon == Cursor::Icon::DEFAULT) {
showCursor(Cursor::Icon::RETICLE);
} else if (curIcon == Cursor::Icon::RETICLE) {
showCursor(Cursor::Icon::SYSTEM);
} else if (curIcon == Cursor::Icon::SYSTEM) {
showCursor(Cursor::Icon::LINK);
} else {
showCursor(Cursor::Icon::DEFAULT);
}
} else if (!event->isAutoRepeat()){
resetSensors(true);
}
break;
}
case Qt::Key_Backslash:
Menu::getInstance()->triggerOption(MenuOption::Chat);
break;
case Qt::Key_Slash:
Menu::getInstance()->triggerOption(MenuOption::Stats);
break;
case Qt::Key_Plus: {
if (isControlOrCommand && event->modifiers().testFlag(Qt::KeypadModifier)) {
auto& cursorManager = Cursor::Manager::instance();
cursorManager.setScale(cursorManager.getScale() * 1.1f);
} else {
getMyAvatar()->increaseSize();
}
break;
}
case Qt::Key_Minus: {
if (isControlOrCommand && event->modifiers().testFlag(Qt::KeypadModifier)) {
auto& cursorManager = Cursor::Manager::instance();
cursorManager.setScale(cursorManager.getScale() / 1.1f);
} else {
getMyAvatar()->decreaseSize();
}
break;
}
case Qt::Key_Equal:
getMyAvatar()->resetSize();
break;
case Qt::Key_Escape: {
getActiveDisplayPlugin()->abandonCalibration();
break;
}
default:
event->ignore();
break;
}
}
}
void Application::keyReleaseEvent(QKeyEvent* event) {
if (!event->isAutoRepeat()) {
_keysPressed.remove(event->key());
}
#if defined(Q_OS_ANDROID)
if (event->key() == Qt::Key_Back) {
event->accept();
AndroidHelper::instance().requestActivity("Home", false);
}
#endif
_controllerScriptingInterface->emitKeyReleaseEvent(event); // send events to any registered scripts
// if one of our scripts have asked to capture this event, then stop processing it
if (_controllerScriptingInterface->isKeyCaptured(event)) {
return;
}
if (_keyboardMouseDevice->isActive()) {
_keyboardMouseDevice->keyReleaseEvent(event);
}
}
void Application::focusOutEvent(QFocusEvent* event) {
const auto& inputPlugins = PluginManager::getInstance()->getInputPlugins();
for(const auto& inputPlugin : inputPlugins) {
if (inputPlugin->isActive()) {
inputPlugin->pluginFocusOutEvent();
}
}
// FIXME spacemouse code still needs cleanup
#if 0
//SpacemouseDevice::getInstance().focusOutEvent();
//SpacemouseManager::getInstance().getDevice()->focusOutEvent();
SpacemouseManager::getInstance().ManagerFocusOutEvent();
#endif
synthesizeKeyReleasEvents();
}
void Application::synthesizeKeyReleasEvents() {
// synthesize events for keys currently pressed, since we may not get their release events
// Because our key event handlers may manipulate _keysPressed, lets swap the keys pressed into a local copy,
// clearing the existing list.
QHash<int, QKeyEvent> keysPressed;
std::swap(keysPressed, _keysPressed);
for (auto& ev : keysPressed) {
QKeyEvent synthesizedEvent { QKeyEvent::KeyRelease, ev.key(), Qt::NoModifier, ev.text() };
keyReleaseEvent(&synthesizedEvent);
}
}
void Application::mouseMoveEvent(QMouseEvent* event) {
PROFILE_RANGE(app_input_mouse, __FUNCTION__);
if (_ignoreMouseMove) {
_ignoreMouseMove = false;
return;
}
maybeToggleMenuVisible(event);
auto& compositor = getApplicationCompositor();
// if this is a real mouse event, and we're in HMD mode, then we should use it to move the
// compositor reticle
// handleRealMouseMoveEvent() will return true, if we shouldn't process the event further
if (!compositor.fakeEventActive() && compositor.handleRealMouseMoveEvent()) {
return; // bail
}
#if !defined(DISABLE_QML)
auto offscreenUi = getOffscreenUI();
auto eventPosition = compositor.getMouseEventPosition(event);
QPointF transformedPos = offscreenUi ? offscreenUi->mapToVirtualScreen(eventPosition) : QPointF();
#else
QPointF transformedPos;
#endif
auto button = event->button();
auto buttons = event->buttons();
// Determine if the ReticleClick Action is 1 and if so, fake include the LeftMouseButton
if (_reticleClickPressed) {
if (button == Qt::NoButton) {
button = Qt::LeftButton;
}
buttons |= Qt::LeftButton;
}
QMouseEvent mappedEvent(event->type(),
transformedPos,
event->screenPos(), button,
buttons, event->modifiers());
if (compositor.getReticleVisible() || !isHMDMode() || !compositor.getReticleOverDesktop() ||
getOverlays().getOverlayAtPoint(glm::vec2(transformedPos.x(), transformedPos.y())) != UNKNOWN_ENTITY_ID) {
getEntities()->mouseMoveEvent(&mappedEvent);
}
_controllerScriptingInterface->emitMouseMoveEvent(&mappedEvent); // send events to any registered scripts
// if one of our scripts have asked to capture this event, then stop processing it
if (_controllerScriptingInterface->isMouseCaptured()) {
return;
}
if (_keyboardMouseDevice->isActive()) {
_keyboardMouseDevice->mouseMoveEvent(event, _captureMouse, _mouseCaptureTarget);
}
}
void Application::mousePressEvent(QMouseEvent* event) {
#if !defined(DISABLE_QML)
auto offscreenUi = getOffscreenUI();
// If we get a mouse press event it means it wasn't consumed by the offscreen UI,
// hence, we should defocus all of the offscreen UI windows, in order to allow
// keyboard shortcuts not to be swallowed by them. In particular, WebEngineViews
// will consume all keyboard events.
offscreenUi->unfocusWindows();
auto eventPosition = getApplicationCompositor().getMouseEventPosition(event);
QPointF transformedPos = offscreenUi->mapToVirtualScreen(eventPosition);
#else
QPointF transformedPos;
#endif
QMouseEvent mappedEvent(event->type(), transformedPos, event->screenPos(), event->button(), event->buttons(), event->modifiers());
QUuid result = getEntities()->mousePressEvent(&mappedEvent);
setKeyboardFocusEntity(getEntities()->wantsKeyboardFocus(result) ? result : UNKNOWN_ENTITY_ID);
_controllerScriptingInterface->emitMousePressEvent(&mappedEvent); // send events to any registered scripts
// if one of our scripts have asked to capture this event, then stop processing it
if (_controllerScriptingInterface->isMouseCaptured()) {
return;
}
#if defined(Q_OS_MAC)
// Fix for OSX right click dragging on window when coming from a native window
bool isFocussed = hasFocus();
if (!isFocussed && event->button() == Qt::MouseButton::RightButton) {
setFocus();
isFocussed = true;
}
if (isFocussed) {
#else
if (hasFocus()) {
#endif
if (_keyboardMouseDevice->isActive()) {
_keyboardMouseDevice->mousePressEvent(event);
}
}
}
void Application::mouseDoublePressEvent(QMouseEvent* event) {
#if !defined(DISABLE_QML)
auto offscreenUi = getOffscreenUI();
auto eventPosition = getApplicationCompositor().getMouseEventPosition(event);
QPointF transformedPos = offscreenUi->mapToVirtualScreen(eventPosition);
#else
QPointF transformedPos;
#endif
QMouseEvent mappedEvent(event->type(),
transformedPos,
event->screenPos(), event->button(),
event->buttons(), event->modifiers());
getEntities()->mouseDoublePressEvent(&mappedEvent);
// if one of our scripts have asked to capture this event, then stop processing it
if (_controllerScriptingInterface->isMouseCaptured()) {
return;
}
_controllerScriptingInterface->emitMouseDoublePressEvent(event);
}
void Application::mouseReleaseEvent(QMouseEvent* event) {
#if !defined(DISABLE_QML)
auto offscreenUi = getOffscreenUI();
auto eventPosition = getApplicationCompositor().getMouseEventPosition(event);
QPointF transformedPos = offscreenUi->mapToVirtualScreen(eventPosition);
#else
QPointF transformedPos;
#endif
QMouseEvent mappedEvent(event->type(),
transformedPos,
event->screenPos(), event->button(),
event->buttons(), event->modifiers());
getEntities()->mouseReleaseEvent(&mappedEvent);
_controllerScriptingInterface->emitMouseReleaseEvent(&mappedEvent); // send events to any registered scripts
// if one of our scripts have asked to capture this event, then stop processing it
if (_controllerScriptingInterface->isMouseCaptured()) {
return;
}
if (hasFocus()) {
if (_keyboardMouseDevice->isActive()) {
_keyboardMouseDevice->mouseReleaseEvent(event);
}
}
}
void Application::touchBeginEvent(QTouchEvent* event) {
TouchEvent thisEvent(*event); // on touch begin, we don't compare to last event
_controllerScriptingInterface->emitTouchBeginEvent(thisEvent); // send events to any registered scripts
_lastTouchEvent = thisEvent; // and we reset our last event to this event before we call our update
touchUpdateEvent(event);
// if one of our scripts have asked to capture this event, then stop processing it
if (_controllerScriptingInterface->isTouchCaptured()) {
return;
}
if (_keyboardMouseDevice->isActive()) {
_keyboardMouseDevice->touchBeginEvent(event);
}
if (_touchscreenDevice && _touchscreenDevice->isActive()) {
_touchscreenDevice->touchBeginEvent(event);
}
if (_touchscreenVirtualPadDevice && _touchscreenVirtualPadDevice->isActive()) {
_touchscreenVirtualPadDevice->touchBeginEvent(event);
}
}
void Application::touchEndEvent(QTouchEvent* event) {
TouchEvent thisEvent(*event, _lastTouchEvent);
_controllerScriptingInterface->emitTouchEndEvent(thisEvent); // send events to any registered scripts
_lastTouchEvent = thisEvent;
// if one of our scripts have asked to capture this event, then stop processing it
if (_controllerScriptingInterface->isTouchCaptured()) {
return;
}
if (_keyboardMouseDevice->isActive()) {
_keyboardMouseDevice->touchEndEvent(event);
}
if (_touchscreenDevice && _touchscreenDevice->isActive()) {
_touchscreenDevice->touchEndEvent(event);
}
if (_touchscreenVirtualPadDevice && _touchscreenVirtualPadDevice->isActive()) {
_touchscreenVirtualPadDevice->touchEndEvent(event);
}
// put any application specific touch behavior below here..
}
void Application::touchUpdateEvent(QTouchEvent* event) {
if (event->type() == QEvent::TouchUpdate) {
TouchEvent thisEvent(*event, _lastTouchEvent);
_controllerScriptingInterface->emitTouchUpdateEvent(thisEvent); // send events to any registered scripts
_lastTouchEvent = thisEvent;
}
// if one of our scripts have asked to capture this event, then stop processing it
if (_controllerScriptingInterface->isTouchCaptured()) {
return;
}
if (_keyboardMouseDevice->isActive()) {
_keyboardMouseDevice->touchUpdateEvent(event);
}
if (_touchscreenDevice && _touchscreenDevice->isActive()) {
_touchscreenDevice->touchUpdateEvent(event);
}
if (_touchscreenVirtualPadDevice && _touchscreenVirtualPadDevice->isActive()) {
_touchscreenVirtualPadDevice->touchUpdateEvent(event);
}
}
void Application::touchGestureEvent(QGestureEvent* event) {
if (_keyboardMouseDevice->isActive()) {
_keyboardMouseDevice->touchGestureEvent(event);
}
if (_touchscreenDevice && _touchscreenDevice->isActive()) {
_touchscreenDevice->touchGestureEvent(event);
}
if (_touchscreenVirtualPadDevice && _touchscreenVirtualPadDevice->isActive()) {
_touchscreenVirtualPadDevice->touchGestureEvent(event);
}
}
void Application::wheelEvent(QWheelEvent* event) const {
_controllerScriptingInterface->emitWheelEvent(event); // send events to any registered scripts
// if one of our scripts have asked to capture this event, then stop processing it
if (_controllerScriptingInterface->isWheelCaptured() || getLoginDialogPoppedUp()) {
return;
}
if (_keyboardMouseDevice->isActive()) {
_keyboardMouseDevice->wheelEvent(event);
}
}
void Application::dropEvent(QDropEvent *event) {
const QMimeData* mimeData = event->mimeData();
for (auto& url : mimeData->urls()) {
QString urlString = url.toString();
if (acceptURL(urlString, true)) {
event->acceptProposedAction();
}
}
}
bool Application::handleInputMethodEventForFocusedEntity(QEvent* event) {
if (_keyboardFocusedEntity.get() != UNKNOWN_ENTITY_ID) {
switch (event->type()) {
case QEvent::InputMethod:
case QEvent::InputMethodQuery:
{
auto eventHandler = getEntities()->getEventHandler(_keyboardFocusedEntity.get());
if (eventHandler) {
event->setAccepted(false);
QCoreApplication::sendEvent(eventHandler, event);
if (event->isAccepted()) {
_lastAcceptedKeyPress = usecTimestampNow();
return true;
}
}
break;
}
default:
break;
}
}
return false;
}
bool Application::handleKeyEventForFocusedEntity(QEvent* event) {
if (_keyboardFocusedEntity.get() != UNKNOWN_ENTITY_ID) {
switch (event->type()) {
case QEvent::KeyPress:
case QEvent::KeyRelease:
{
auto eventHandler = getEntities()->getEventHandler(_keyboardFocusedEntity.get());
if (eventHandler) {
event->setAccepted(false);
QCoreApplication::sendEvent(eventHandler, event);
if (event->isAccepted()) {
_lastAcceptedKeyPress = usecTimestampNow();
return true;
}
}
break;
}
default:
break;
}
}
return false;
}
bool Application::handleFileOpenEvent(QFileOpenEvent* fileEvent) {
QUrl url = fileEvent->url();
if (!url.isEmpty()) {
QString urlString = url.toString();
if (canAcceptURL(urlString)) {
return acceptURL(urlString);
}
}
return false;
}

View file

@ -0,0 +1,595 @@
//
// Application_Graphics.cpp
// interface/src
//
// Split from Application.cpp by HifiExperiments on 3/30/24
// Created by Andrzej Kapolka on 5/10/13.
// Copyright 2013 High Fidelity, Inc.
// Copyright 2020 Vircadia contributors.
// Copyright 2022-2023 Overte e.V.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// SPDX-License-Identifier: Apache-2.0
//
#include "Application.h"
#include <memory>
#include <QtQml/QQmlContext>
#include <AudioScriptingInterface.h>
#include <display-plugins/CompositorHelper.h>
#include <ErrorDialog.h>
#include <FramebufferCache.h>
#include <gl/GLHelpers.h>
#include <input-plugins/KeyboardMouseDevice.h>
#include <input-plugins/TouchscreenDevice.h>
#include <input-plugins/TouchscreenVirtualPadDevice.h>
#include <MainWindow.h>
#include <plugins/PluginManager.h>
#include <Preferences.h>
#include <RenderableWebEntityItem.h>
#include <scripting/AccountServicesScriptingInterface.h>
#include <scripting/HMDScriptingInterface.h>
#include <scripting/PlatformInfoScriptingInterface.h>
#include <scripting/RenderScriptingInterface.h>
#include <scripting/TTSScriptingInterface.h>
#include <Tooltip.h>
#include <ui/AddressBarDialog.h>
#include <ui/Keyboard.h>
#include <ui/LoginDialog.h>
#include <ui/OffscreenQmlSurfaceCache.h>
#include <ui/PreferencesDialog.h>
#include <ui/ResourceImageItem.h>
#include <ui/TabletScriptingInterface.h>
#include <ui/types/ContextAwareProfile.h>
#include <ui/UpdateDialog.h>
#include "DeadlockWatchdog.h"
#include "GLCanvas.h"
#include "InterfaceLogging.h"
#include "LODManager.h"
#include "Menu.h"
#include "webbrowser/WebBrowserSuggestionsEngine.h"
#if defined(Q_OS_ANDROID)
#include "AndroidHelper.h"
#endif
Q_GUI_EXPORT void qt_gl_set_global_share_context(QOpenGLContext *context);
Q_GUI_EXPORT QOpenGLContext *qt_gl_global_share_context();
static const QString SYSTEM_TABLET = "com.highfidelity.interface.tablet.system";
void Application::initializeGL() {
qCDebug(interfaceapp) << "Created Display Window.";
#ifdef DISABLE_QML
setAttribute(Qt::AA_DontCheckOpenGLContextThreadAffinity);
#endif
// initialize glut for shape drawing; Qt apparently initializes it on OS X
if (_isGLInitialized) {
return;
} else {
_isGLInitialized = true;
}
_glWidget->windowHandle()->setFormat(getDefaultOpenGLSurfaceFormat());
// When loading QtWebEngineWidgets, it creates a global share context on startup.
// We have to account for this possibility by checking here for an existing
// global share context
auto globalShareContext = qt_gl_global_share_context();
#if !defined(DISABLE_QML)
// Build a shared canvas / context for the Chromium processes
if (!globalShareContext) {
// Chromium rendering uses some GL functions that prevent nSight from capturing
// frames, so we only create the shared context if nsight is NOT active.
if (!nsightActive()) {
// FIXME hack access to the internal share context for the Chromium helper
// Normally we'd want to use QWebEngine::initialize(), but we can't because
// our primary context is a QGLWidget, which can't easily be initialized to share
// from a QOpenGLContext.
//
// So instead we create a new offscreen context to share with the QGLWidget,
// and manually set THAT to be the shared context for the Chromium helper
OffscreenGLCanvas* chromiumShareContext = new OffscreenGLCanvas();
chromiumShareContext->setObjectName("ChromiumShareContext");
auto format = QSurfaceFormat::defaultFormat();
#ifdef Q_OS_MAC
// On mac, the primary shared OpenGL context must be a 3.2 core context,
// or chromium flips out and spews error spam (but renders fine)
format.setMajorVersion(3);
format.setMinorVersion(2);
#endif
chromiumShareContext->setFormat(format);
chromiumShareContext->create();
if (!chromiumShareContext->makeCurrent()) {
qCWarning(interfaceapp, "Unable to make chromium shared context current");
}
globalShareContext = chromiumShareContext->getContext();
qt_gl_set_global_share_context(globalShareContext);
chromiumShareContext->doneCurrent();
}
}
#endif
_glWidget->createContext(globalShareContext);
if (!_glWidget->makeCurrent()) {
qCWarning(interfaceapp, "Unable to make window context current");
}
// Populate the global OpenGL context based on the information for the primary window GL context
gl::ContextInfo::get(true);
#if !defined(DISABLE_QML)
QStringList chromiumFlags;
// HACK: re-expose mic and camera to prevent crash on domain-change in chromium's media::FakeAudioInputStream::ReadAudioFromSource()
// Bug 21993: disable microphone and camera input
//chromiumFlags << "--use-fake-device-for-media-stream";
// Disable signed distance field font rendering on ATI/AMD GPUs, due to
// https://highfidelity.manuscript.com/f/cases/13677/Text-showing-up-white-on-Marketplace-app
std::string vendor{ (const char*)glGetString(GL_VENDOR) };
if ((vendor.find("AMD") != std::string::npos) || (vendor.find("ATI") != std::string::npos)) {
chromiumFlags << "--disable-distance-field-text";
}
// Ensure all Qt webengine processes launched from us have the appropriate command line flags
if (!chromiumFlags.empty()) {
qputenv("QTWEBENGINE_CHROMIUM_FLAGS", chromiumFlags.join(' ').toLocal8Bit());
}
#endif
if (!globalShareContext) {
globalShareContext = _glWidget->qglContext();
qt_gl_set_global_share_context(globalShareContext);
}
// Build a shared canvas / context for the QML rendering
#if !defined(DISABLE_QML)
{
OffscreenGLCanvas* qmlShareContext = new OffscreenGLCanvas();
qmlShareContext->setObjectName("QmlShareContext");
qmlShareContext->create(globalShareContext);
if (!qmlShareContext->makeCurrent()) {
qCWarning(interfaceapp, "Unable to make QML shared context current");
}
OffscreenQmlSurface::setSharedContext(qmlShareContext->getContext());
qmlShareContext->doneCurrent();
if (!_glWidget->makeCurrent()) {
qCWarning(interfaceapp, "Unable to make window context current");
}
}
#endif
// Build an offscreen GL context for the main thread.
_glWidget->makeCurrent();
glClearColor(0.2f, 0.2f, 0.2f, 1);
glClear(GL_COLOR_BUFFER_BIT);
_glWidget->swapBuffers();
_graphicsEngine->initializeGPU(_glWidget);
}
void Application::initializeRenderEngine() {
// FIXME: on low end systems os the shaders take up to 1 minute to compile, so we pause the deadlock watchdog thread.
DeadlockWatchdogThread::withPause([&] {
_graphicsEngine->initializeRender();
DependencyManager::get<Keyboard>()->registerKeyboardHighlighting();
});
}
#if !defined(DISABLE_QML)
static const char* EXCLUSION_GROUP_KEY = "exclusionGroup";
static void addDisplayPluginToMenu(const DisplayPluginPointer& displayPlugin, int index, bool active) {
auto menu = Menu::getInstance();
QString name = displayPlugin->getName();
auto grouping = displayPlugin->getGrouping();
QString groupingMenu { "" };
Q_ASSERT(!menu->menuItemExists(MenuOption::OutputMenu, name));
// assign the meny grouping based on plugin grouping
switch (grouping) {
case Plugin::ADVANCED:
groupingMenu = "Advanced";
break;
case Plugin::DEVELOPER:
groupingMenu = "Developer";
break;
default:
groupingMenu = "Standard";
break;
}
static QActionGroup* displayPluginGroup = nullptr;
if (!displayPluginGroup) {
displayPluginGroup = new QActionGroup(menu);
displayPluginGroup->setExclusive(true);
}
auto parent = menu->getMenu(MenuOption::OutputMenu);
auto action = menu->addActionToQMenuAndActionHash(parent,
name, QKeySequence(Qt::CTRL + (Qt::Key_0 + index)), qApp,
SLOT(updateDisplayMode()),
QAction::NoRole, Menu::UNSPECIFIED_POSITION, groupingMenu);
action->setCheckable(true);
action->setChecked(active);
displayPluginGroup->addAction(action);
action->setProperty(EXCLUSION_GROUP_KEY, QVariant::fromValue(displayPluginGroup));
Q_ASSERT(menu->menuItemExists(MenuOption::OutputMenu, name));
}
#endif
void Application::initializeUi() {
// Allow remote QML content from trusted sources ONLY
{
auto defaultUrlValidator = OffscreenQmlSurface::getUrlValidator();
auto newValidator = [=](const QUrl& url) -> bool {
QString allowlistPrefix = "[ALLOWLIST ENTITY SCRIPTS]";
QList<QString> safeURLS = { "" };
safeURLS += qEnvironmentVariable("EXTRA_ALLOWLIST").trimmed().split(QRegExp("\\s*,\\s*"), Qt::SkipEmptyParts);
// PULL SAFEURLS FROM INTERFACE.JSON Settings
QVariant raw = Setting::Handle<QVariant>("private/settingsSafeURLS").get();
QStringList settingsSafeURLS = raw.toString().trimmed().split(QRegExp("\\s*[,\r\n]+\\s*"), Qt::SkipEmptyParts);
safeURLS += settingsSafeURLS;
// END PULL SAFEURLS FROM INTERFACE.JSON Settings
if (QUrl(NetworkingConstants::OVERTE_COMMUNITY_APPLICATIONS).isParentOf(url)) {
return true;
} else {
for (const auto& str : safeURLS) {
if (!str.isEmpty() && url.toString().endsWith(".qml") &&
url.toString().startsWith(str)) {
qCDebug(interfaceapp) << "Found matching url!" << url.host();
return true;
}
}
}
qCDebug(interfaceapp) << "No matching url" << url.host();
return defaultUrlValidator(url);
};
OffscreenQmlSurface::setUrlValidator(newValidator);
}
AddressBarDialog::registerType();
ErrorDialog::registerType();
LoginDialog::registerType();
Tooltip::registerType();
UpdateDialog::registerType();
QmlContextCallback platformInfoCallback = [](QQmlContext* context) {
context->setContextProperty("PlatformInfo", new PlatformInfoScriptingInterface());
};
OffscreenQmlSurface::addAllowlistContextHandler({
QUrl{ "hifi/tablet/TabletAddressDialog.qml" },
QUrl{ "hifi/Card.qml" },
QUrl{ "hifi/Pal.qml" },
QUrl{ "hifi/NameCard.qml" },
}, platformInfoCallback);
QmlContextCallback ttsCallback = [](QQmlContext* context) {
context->setContextProperty("TextToSpeech", DependencyManager::get<TTSScriptingInterface>().data());
};
OffscreenQmlSurface::addAllowlistContextHandler({
QUrl{ "hifi/tts/TTS.qml" }
}, ttsCallback);
qmlRegisterType<ResourceImageItem>("Hifi", 1, 0, "ResourceImageItem");
qmlRegisterType<Preference>("Hifi", 1, 0, "Preference");
qmlRegisterType<WebBrowserSuggestionsEngine>("HifiWeb", 1, 0, "WebBrowserSuggestionsEngine");
{
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
tabletScriptingInterface->getTablet(SYSTEM_TABLET);
}
auto offscreenUi = getOffscreenUI();
connect(offscreenUi.data(), &hifi::qml::OffscreenSurface::rootContextCreated,
this, &Application::onDesktopRootContextCreated);
connect(offscreenUi.data(), &hifi::qml::OffscreenSurface::rootItemCreated,
this, &Application::onDesktopRootItemCreated);
#if !defined(DISABLE_QML)
offscreenUi->setProxyWindow(_window->windowHandle());
// OffscreenUi is a subclass of OffscreenQmlSurface specifically designed to
// support the window management and scripting proxies for VR use
DeadlockWatchdogThread::withPause([&] {
offscreenUi->createDesktop(PathUtils::qmlUrl("hifi/Desktop.qml"));
});
// FIXME either expose so that dialogs can set this themselves or
// do better detection in the offscreen UI of what has focus
offscreenUi->setNavigationFocused(false);
#else
_window->setMenuBar(new Menu());
#endif
setupPreferences();
#if !defined(DISABLE_QML)
_glWidget->installEventFilter(offscreenUi.data());
offscreenUi->setMouseTranslator([=](const QPointF& pt) {
QPointF result = pt;
auto displayPlugin = getActiveDisplayPlugin();
if (displayPlugin->isHmd()) {
getApplicationCompositor().handleRealMouseMoveEvent(false);
auto resultVec = getApplicationCompositor().getReticlePosition();
result = QPointF(resultVec.x, resultVec.y);
}
return result.toPoint();
});
// BUGZ-1365 - the root context should explicitly default to being unable to load local HTML content
ContextAwareProfile::restrictContext(offscreenUi->getSurfaceContext(), true);
offscreenUi->resume();
#endif
connect(_window, &MainWindow::windowGeometryChanged, [this](const QRect& r){
resizeGL();
if (_touchscreenVirtualPadDevice) {
_touchscreenVirtualPadDevice->resize();
}
});
// This will set up the input plugins UI
for(const auto& inputPlugin : PluginManager::getInstance()->getInputPlugins()) {
if (KeyboardMouseDevice::NAME == inputPlugin->getName()) {
_keyboardMouseDevice = std::dynamic_pointer_cast<KeyboardMouseDevice>(inputPlugin);
}
if (TouchscreenDevice::NAME == inputPlugin->getName()) {
_touchscreenDevice = std::dynamic_pointer_cast<TouchscreenDevice>(inputPlugin);
}
if (TouchscreenVirtualPadDevice::NAME == inputPlugin->getName()) {
_touchscreenVirtualPadDevice = std::dynamic_pointer_cast<TouchscreenVirtualPadDevice>(inputPlugin);
#if defined(ANDROID_APP_INTERFACE)
auto& virtualPadManager = VirtualPad::Manager::instance();
connect(&virtualPadManager, &VirtualPad::Manager::hapticFeedbackRequested,
this, [](int duration) {
AndroidHelper::instance().performHapticFeedback(duration);
});
#endif
}
}
auto compositorHelper = DependencyManager::get<CompositorHelper>();
connect(compositorHelper.data(), &CompositorHelper::allowMouseCaptureChanged, this, [=] {
if (isHMDMode()) {
auto compositorHelper = DependencyManager::get<CompositorHelper>(); // don't capture outer smartpointer
showCursor(compositorHelper->getAllowMouseCapture() ?
Cursor::Manager::lookupIcon(_preferredCursor.get()) :
Cursor::Icon::SYSTEM);
}
});
#if !defined(DISABLE_QML)
// Pre-create a couple of offscreen surfaces to speed up tablet UI
auto offscreenSurfaceCache = DependencyManager::get<OffscreenQmlSurfaceCache>();
offscreenSurfaceCache->setOnRootContextCreated([&](const QString& rootObject, QQmlContext* surfaceContext) {
if (rootObject == TabletScriptingInterface::QML) {
// in Qt 5.10.0 there is already an "Audio" object in the QML context
// though I failed to find it (from QtMultimedia??). So.. let it be "AudioScriptingInterface"
surfaceContext->setContextProperty("AudioScriptingInterface", DependencyManager::get<AudioScriptingInterface>().data());
surfaceContext->setContextProperty("Account", AccountServicesScriptingInterface::getInstance()); // DEPRECATED - TO BE REMOVED
}
});
offscreenSurfaceCache->reserve(TabletScriptingInterface::QML, 1);
offscreenSurfaceCache->reserve(render::entities::WebEntityRenderer::QML, 2);
#endif
flushMenuUpdates();
#if !defined(DISABLE_QML)
// Now that the menu is instantiated, ensure the display plugin menu is properly updated
{
DisplayPluginList displayPlugins = PluginManager::getInstance()->getDisplayPlugins();
// first sort the plugins into groupings: standard, advanced, developer
std::stable_sort(displayPlugins.begin(), displayPlugins.end(),
[](const DisplayPluginPointer& a, const DisplayPluginPointer& b) -> bool { return a->getGrouping() < b->getGrouping(); });
int dpIndex = 1;
// concatenate the groupings into a single list in the order: standard, advanced, developer
for(const auto& displayPlugin : displayPlugins) {
addDisplayPluginToMenu(displayPlugin, dpIndex, _displayPlugin == displayPlugin);
dpIndex++;
}
// after all plugins have been added to the menu, add a separator to the menu
auto parent = getPrimaryMenu()->getMenu(MenuOption::OutputMenu);
parent->addSeparator();
}
#endif
// The display plugins are created before the menu now, so we need to do this here to hide the menu bar
// now that it exists
if (_window && _window->isFullScreen()) {
setFullscreen(nullptr, true);
}
setIsInterstitialMode(true);
#if defined(DISABLE_QML) && defined(Q_OS_LINUX)
resumeAfterLoginDialogActionTaken();
#endif
}
void Application::resizeGL() {
PROFILE_RANGE(render, __FUNCTION__);
if (nullptr == _displayPlugin) {
return;
}
auto displayPlugin = getActiveDisplayPlugin();
// Set the desired FBO texture size. If it hasn't changed, this does nothing.
// Otherwise, it must rebuild the FBOs
uvec2 framebufferSize = displayPlugin->getRecommendedRenderSize();
uvec2 renderSize = uvec2(framebufferSize);
if (_renderResolution != renderSize) {
_renderResolution = renderSize;
DependencyManager::get<FramebufferCache>()->setFrameBufferSize(fromGlm(renderSize));
}
// FIXME the aspect ratio for stereo displays is incorrect based on this.
float aspectRatio = displayPlugin->getRecommendedAspectRatio();
_myCamera.setProjection(glm::perspective(glm::radians(_fieldOfView.get()), aspectRatio,
DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP));
// Possible change in aspect ratio
{
QMutexLocker viewLocker(&_viewMutex);
_myCamera.loadViewFrustum(_viewFrustum);
}
#if !defined(DISABLE_QML)
getOffscreenUI()->resize(fromGlm(displayPlugin->getRecommendedUiSize()));
#endif
}
glm::uvec2 Application::getCanvasSize() const {
return glm::uvec2(_glWidget->width(), _glWidget->height());
}
float Application::getRenderResolutionScale() const {
return RenderScriptingInterface::getInstance()->getViewportResolutionScale();
}
void Application::updateRenderArgs(float deltaTime) {
_graphicsEngine->editRenderArgs([this, deltaTime](AppRenderArgs& appRenderArgs) {
PerformanceTimer perfTimer("editRenderArgs");
appRenderArgs._headPose = getHMDSensorPose();
auto myAvatar = getMyAvatar();
// update the avatar with a fresh HMD pose
{
PROFILE_RANGE(render, "/updateAvatar");
myAvatar->updateFromHMDSensorMatrix(appRenderArgs._headPose);
}
auto lodManager = DependencyManager::get<LODManager>();
float sensorToWorldScale = getMyAvatar()->getSensorToWorldScale();
appRenderArgs._sensorToWorldScale = sensorToWorldScale;
appRenderArgs._sensorToWorld = getMyAvatar()->getSensorToWorldMatrix();
{
PROFILE_RANGE(render, "/buildFrustrumAndArgs");
{
QMutexLocker viewLocker(&_viewMutex);
// adjust near clip plane to account for sensor scaling.
auto adjustedProjection = glm::perspective(glm::radians(_fieldOfView.get()),
getActiveDisplayPlugin()->getRecommendedAspectRatio(),
DEFAULT_NEAR_CLIP * sensorToWorldScale,
DEFAULT_FAR_CLIP);
_viewFrustum.setProjection(adjustedProjection);
_viewFrustum.calculate();
}
appRenderArgs._renderArgs = RenderArgs(_graphicsEngine->getGPUContext(), lodManager->getVisibilityDistance(),
lodManager->getBoundaryLevelAdjust(), lodManager->getLODFarHalfAngleTan(), lodManager->getLODNearHalfAngleTan(),
lodManager->getLODFarDistance(), lodManager->getLODNearDistance(), RenderArgs::DEFAULT_RENDER_MODE,
RenderArgs::MONO, RenderArgs::DEFERRED, RenderArgs::RENDER_DEBUG_NONE);
appRenderArgs._renderArgs._scene = getMain3DScene();
{
QMutexLocker viewLocker(&_viewMutex);
appRenderArgs._renderArgs.setViewFrustum(_viewFrustum);
}
}
{
PROFILE_RANGE(render, "/resizeGL");
bool showWarnings = false;
bool suppressShortTimings = false;
auto menu = Menu::getInstance();
if (menu) {
suppressShortTimings = menu->isOptionChecked(MenuOption::SuppressShortTimings);
showWarnings = menu->isOptionChecked(MenuOption::PipelineWarnings);
}
PerformanceWarning::setSuppressShortTimings(suppressShortTimings);
PerformanceWarning warn(showWarnings, "Application::paintGL()");
resizeGL();
}
this->updateCamera(appRenderArgs._renderArgs, deltaTime);
appRenderArgs._eyeToWorld = _myCamera.getTransform();
appRenderArgs._isStereo = false;
{
auto hmdInterface = DependencyManager::get<HMDScriptingInterface>();
float ipdScale = hmdInterface->getIPDScale();
// scale IPD by sensorToWorldScale, to make the world seem larger or smaller accordingly.
ipdScale *= sensorToWorldScale;
auto baseProjection = appRenderArgs._renderArgs.getViewFrustum().getProjection();
if (getActiveDisplayPlugin()->isStereo()) {
// Stereo modes will typically have a larger projection matrix overall,
// so we ask for the 'mono' projection matrix, which for stereo and HMD
// plugins will imply the combined projection for both eyes.
//
// This is properly implemented for the Oculus plugins, but for OpenVR
// and Stereo displays I'm not sure how to get / calculate it, so we're
// just relying on the left FOV in each case and hoping that the
// overall culling margin of error doesn't cause popping in the
// right eye. There are FIXMEs in the relevant plugins
_myCamera.setProjection(getActiveDisplayPlugin()->getCullingProjection(baseProjection));
appRenderArgs._isStereo = true;
auto& eyeOffsets = appRenderArgs._eyeOffsets;
auto& eyeProjections = appRenderArgs._eyeProjections;
// FIXME we probably don't need to set the projection matrix every frame,
// only when the display plugin changes (or in non-HMD modes when the user
// changes the FOV manually, which right now I don't think they can.
for_each_eye([&](Eye eye) {
// For providing the stereo eye views, the HMD head pose has already been
// applied to the avatar, so we need to get the difference between the head
// pose applied to the avatar and the per eye pose, and use THAT as
// the per-eye stereo matrix adjustment.
mat4 eyeToHead = getActiveDisplayPlugin()->getEyeToHeadTransform(eye);
// Grab the translation
vec3 eyeOffset = glm::vec3(eyeToHead[3]);
// Apply IPD scaling
mat4 eyeOffsetTransform = glm::translate(mat4(), eyeOffset * -1.0f * ipdScale);
eyeOffsets[eye] = eyeOffsetTransform;
eyeProjections[eye] = getActiveDisplayPlugin()->getEyeProjection(eye, baseProjection);
});
// Configure the type of display / stereo
appRenderArgs._renderArgs._displayMode = (isHMDMode() ? RenderArgs::STEREO_HMD : RenderArgs::STEREO_MONITOR);
}
}
appRenderArgs._renderArgs._stencilMaskMode = getActiveDisplayPlugin()->getStencilMaskMode();
if (appRenderArgs._renderArgs._stencilMaskMode == StencilMaskMode::MESH) {
appRenderArgs._renderArgs._stencilMaskOperator = getActiveDisplayPlugin()->getStencilMaskMeshOperator();
}
{
QMutexLocker viewLocker(&_viewMutex);
_myCamera.loadViewFrustum(_displayViewFrustum);
appRenderArgs._view = glm::inverse(_displayViewFrustum.getView());
}
{
QMutexLocker viewLocker(&_viewMutex);
appRenderArgs._renderArgs.setViewFrustum(_displayViewFrustum);
}
// HACK
// load the view frustum
// FIXME: This preDisplayRender call is temporary until we create a separate render::scene for the mirror rendering.
// Then we can move this logic into the Avatar::simulate call.
myAvatar->preDisplaySide(&appRenderArgs._renderArgs);
});
}

View file

@ -0,0 +1,418 @@
//
// Application_Plugins.cpp
// interface/src
//
// Split from Application.cpp by HifiExperiments on 3/30/24
// Created by Andrzej Kapolka on 5/10/13.
// Copyright 2013 High Fidelity, Inc.
// Copyright 2020 Vircadia contributors.
// Copyright 2022-2023 Overte e.V.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// SPDX-License-Identifier: Apache-2.0
//
#include "Application.h"
#include <QtCore/QCommandLineParser>
#include <input-plugins/InputPlugin.h>
#include <display-plugins/DisplayPlugin.h>
#include <display-plugins/hmd/HmdDisplayPlugin.h>
#include <OffscreenUi.h>
#include <plugins/PluginManager.h>
#include <plugins/PluginUtils.h>
#include <scripting/HMDScriptingInterface.h>
#include <UserActivityLogger.h>
#include "AudioClient.h"
#include "InterfaceLogging.h"
#include "Menu.h"
static const int INTERVAL_TO_CHECK_HMD_WORN_STATUS = 500; // milliseconds
static const QString ACTIVE_DISPLAY_PLUGIN_SETTING_NAME = "activeDisplayPlugin";
static const QString DESKTOP_DISPLAY_PLUGIN_NAME = "Desktop";
// Statically provided display and input plugins
extern DisplayPluginList getDisplayPlugins();
extern InputPluginList getInputPlugins();
extern void saveInputPluginSettings(const InputPluginList& plugins);
void Application::initializePluginManager(const QCommandLineParser& parser) {
DependencyManager::set<PluginManager>();
auto pluginManager = PluginManager::getInstance();
// To avoid any confusion: the getInputPlugins and getDisplayPlugins are not the ones
// from PluginManager, but functions exported by input-plugins/InputPlugin.cpp and
// display-plugins/DisplayPlugin.cpp.
//
// These functions provide the plugin manager with static default plugins.
pluginManager->setInputPluginProvider([] { return getInputPlugins(); });
pluginManager->setDisplayPluginProvider([] { return getDisplayPlugins(); });
pluginManager->setInputPluginSettingsPersister([](const InputPluginList& plugins) { saveInputPluginSettings(plugins); });
// This must be a member function -- PluginManager must exist, and for that
// QApplication must exist, or it can't find the plugin path, as QCoreApplication:applicationDirPath
// won't work yet.
if (parser.isSet("display")) {
auto preferredDisplays = parser.value("display").split(',', Qt::SkipEmptyParts);
qInfo() << "Setting prefered display plugins:" << preferredDisplays;
PluginManager::getInstance()->setPreferredDisplayPlugins(preferredDisplays);
}
if (parser.isSet("disableDisplayPlugins")) {
auto disabledDisplays = parser.value("disableDisplayPlugins").split(',', Qt::SkipEmptyParts);
qInfo() << "Disabling following display plugins:" << disabledDisplays;
PluginManager::getInstance()->disableDisplays(disabledDisplays);
}
if (parser.isSet("disableInputPlugins")) {
auto disabledInputs = parser.value("disableInputPlugins").split(',', Qt::SkipEmptyParts);
qInfo() << "Disabling following input plugins:" << disabledInputs;
PluginManager::getInstance()->disableInputs(disabledInputs);
}
}
void Application::shutdownPlugins() {}
void Application::initializeDisplayPlugins() {
const auto& displayPlugins = PluginManager::getInstance()->getDisplayPlugins();
Setting::Handle<QString> activeDisplayPluginSetting { ACTIVE_DISPLAY_PLUGIN_SETTING_NAME, displayPlugins.at(0)->getName() };
auto lastActiveDisplayPluginName = activeDisplayPluginSetting.get();
auto defaultDisplayPlugin = displayPlugins.at(0);
// One time initialization code
DisplayPluginPointer targetDisplayPlugin;
for(const auto& displayPlugin : displayPlugins) {
displayPlugin->setContext(_graphicsEngine->getGPUContext());
if (displayPlugin->getName() == lastActiveDisplayPluginName) {
targetDisplayPlugin = displayPlugin;
}
if (!_autoSwitchDisplayModeSupportedHMDPlugin) {
if (displayPlugin->isHmd() && displayPlugin->getSupportsAutoSwitch()) {
_autoSwitchDisplayModeSupportedHMDPlugin = displayPlugin;
_autoSwitchDisplayModeSupportedHMDPluginName = _autoSwitchDisplayModeSupportedHMDPlugin->getName();
_previousHMDWornStatus = _autoSwitchDisplayModeSupportedHMDPlugin->isDisplayVisible() && _autoSwitchDisplayModeSupportedHMDPlugin->isActive();
}
}
QObject::connect(displayPlugin.get(), &DisplayPlugin::recommendedFramebufferSizeChanged,
[this](const QSize& size) { resizeGL(); });
QObject::connect(displayPlugin.get(), &DisplayPlugin::resetSensorsRequested, this, &Application::requestReset);
if (displayPlugin->isHmd()) {
auto hmdDisplayPlugin = dynamic_cast<HmdDisplayPlugin*>(displayPlugin.get());
QObject::connect(hmdDisplayPlugin, &HmdDisplayPlugin::hmdMountedChanged,
DependencyManager::get<HMDScriptingInterface>().data(), &HMDScriptingInterface::mountedChanged);
QObject::connect(hmdDisplayPlugin, &HmdDisplayPlugin::hmdVisibleChanged, this, &Application::hmdVisibleChanged);
}
}
// The default display plugin needs to be activated first, otherwise the display plugin thread
// may be launched by an external plugin, which is bad
setDisplayPlugin(defaultDisplayPlugin);
// Now set the desired plugin if it's not the same as the default plugin
if (targetDisplayPlugin && (targetDisplayPlugin != defaultDisplayPlugin)) {
setDisplayPlugin(targetDisplayPlugin);
}
if (_autoSwitchDisplayModeSupportedHMDPlugin) {
if (getActiveDisplayPlugin() != _autoSwitchDisplayModeSupportedHMDPlugin && !_autoSwitchDisplayModeSupportedHMDPlugin->isSessionActive()) {
startHMDStandBySession();
}
// Poll periodically to check whether the user has worn HMD or not. Switch Display mode accordingly.
// If the user wears HMD then switch to VR mode. If the user removes HMD then switch to Desktop mode.
QTimer* autoSwitchDisplayModeTimer = new QTimer(this);
connect(autoSwitchDisplayModeTimer, SIGNAL(timeout()), this, SLOT(switchDisplayMode()));
autoSwitchDisplayModeTimer->start(INTERVAL_TO_CHECK_HMD_WORN_STATUS);
}
// Submit a default frame to render until the engine starts up
updateRenderArgs(0.0f);
}
DisplayPluginPointer Application::getActiveDisplayPlugin() const {
if (QThread::currentThread() != thread()) {
std::unique_lock<std::mutex> lock(_displayPluginLock);
return _displayPlugin;
}
if (!_aboutToQuit && !_displayPlugin) {
const_cast<Application*>(this)->updateDisplayMode();
Q_ASSERT(_displayPlugin);
}
return _displayPlugin;
}
void Application::setActiveDisplayPlugin(const QString& pluginName) {
DisplayPluginPointer newDisplayPlugin;
for (const DisplayPluginPointer& displayPlugin : PluginManager::getInstance()->getDisplayPlugins()) {
QString name = displayPlugin->getName();
if (pluginName == name) {
newDisplayPlugin = displayPlugin;
break;
}
}
if (newDisplayPlugin) {
setDisplayPlugin(newDisplayPlugin);
}
}
glm::uvec2 Application::getUiSize() const {
static const uint MIN_SIZE = 1;
glm::uvec2 result(MIN_SIZE);
if (_displayPlugin) {
result = getActiveDisplayPlugin()->getRecommendedUiSize();
}
return result;
}
QRect Application::getRecommendedHUDRect() const {
auto uiSize = getUiSize();
QRect result(0, 0, uiSize.x, uiSize.y);
if (_displayPlugin) {
result = getActiveDisplayPlugin()->getRecommendedHUDRect();
}
return result;
}
glm::vec2 Application::getDeviceSize() const {
static const int MIN_SIZE = 1;
glm::vec2 result(MIN_SIZE);
if (_displayPlugin) {
result = getActiveDisplayPlugin()->getRecommendedRenderSize();
}
return result;
}
bool Application::isThrottleRendering() const {
if (_displayPlugin) {
return getActiveDisplayPlugin()->isThrottled();
}
return false;
}
float Application::getTargetRenderFrameRate() const {
return getActiveDisplayPlugin()->getTargetFrameRate();
}
bool Application::hasRiftControllers() {
return PluginUtils::isOculusTouchControllerAvailable();
}
bool Application::hasViveControllers() {
return PluginUtils::isViveControllerAvailable();
}
bool Application::isHMDMode() const {
return getActiveDisplayPlugin()->isHmd();
}
mat4 Application::getHMDSensorPose() const {
if (isHMDMode()) {
return getActiveDisplayPlugin()->getHeadPose();
}
return mat4();
}
mat4 Application::getEyeOffset(int eye) const {
// FIXME invert?
return getActiveDisplayPlugin()->getEyeToHeadTransform((Eye)eye);
}
mat4 Application::getEyeProjection(int eye) const {
QMutexLocker viewLocker(&_viewMutex);
if (isHMDMode()) {
return getActiveDisplayPlugin()->getEyeProjection((Eye)eye, _viewFrustum.getProjection());
}
return _viewFrustum.getProjection();
}
// resentSensors() is a bit of vestigial feature. It used to be used for Oculus DK2 to recenter the view around
// the current head orientation. With the introduction of "room scale" tracking we no longer need that particular
// feature. However, we still use this to reset face trackers, eye trackers, audio and to optionally re-load the avatar
// rig and animations from scratch.
void Application::resetSensors(bool andReload) {
_overlayConductor.centerUI();
getActiveDisplayPlugin()->resetSensors();
getMyAvatar()->reset(true, andReload);
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "reset", Qt::QueuedConnection);
}
void Application::updateDisplayMode() {
// Unsafe to call this method from anything but the main thread
if (QThread::currentThread() != thread()) {
qFatal("Attempted to switch display plugins from a non-main thread");
}
// Once time initialization code that depends on the UI being available
const auto& displayPlugins = getDisplayPlugins();
// Default to the first item on the list, in case none of the menu items match
DisplayPluginPointer newDisplayPlugin = displayPlugins.at(0);
auto menu = getPrimaryMenu();
if (menu) {
for (const auto& displayPlugin : PluginManager::getInstance()->getDisplayPlugins()) {
QString name = displayPlugin->getName();
QAction* action = menu->getActionForOption(name);
// Menu might have been removed if the display plugin lost
if (!action) {
continue;
}
if (action->isChecked()) {
newDisplayPlugin = displayPlugin;
break;
}
}
}
if (newDisplayPlugin == _displayPlugin) {
return;
}
setDisplayPlugin(newDisplayPlugin);
}
void Application::switchDisplayMode() {
if (!_autoSwitchDisplayModeSupportedHMDPlugin) {
return;
}
bool currentHMDWornStatus = _autoSwitchDisplayModeSupportedHMDPlugin->isDisplayVisible();
if (currentHMDWornStatus != _previousHMDWornStatus) {
// Switch to respective mode as soon as currentHMDWornStatus changes
if (currentHMDWornStatus) {
qCDebug(interfaceapp) << "Switching from Desktop to HMD mode";
endHMDSession();
setActiveDisplayPlugin(_autoSwitchDisplayModeSupportedHMDPluginName);
} else {
qCDebug(interfaceapp) << "Switching from HMD to desktop mode";
setActiveDisplayPlugin(DESKTOP_DISPLAY_PLUGIN_NAME);
startHMDStandBySession();
}
}
_previousHMDWornStatus = currentHMDWornStatus;
}
void Application::setDisplayPlugin(DisplayPluginPointer newDisplayPlugin) {
if (newDisplayPlugin == _displayPlugin) {
return;
}
// FIXME don't have the application directly set the state of the UI,
// instead emit a signal that the display plugin is changing and let
// the desktop lock itself. Reduces coupling between the UI and display
// plugins
auto offscreenUi = getOffscreenUI();
auto desktop = offscreenUi ? offscreenUi->getDesktop() : nullptr;
auto menu = Menu::getInstance();
// Make the switch atomic from the perspective of other threads
{
std::unique_lock<std::mutex> lock(_displayPluginLock);
bool wasRepositionLocked = false;
if (desktop) {
// Tell the desktop to no reposition (which requires plugin info), until we have set the new plugin, below.
wasRepositionLocked = desktop->property("repositionLocked").toBool();
desktop->setProperty("repositionLocked", true);
}
if (_displayPlugin) {
disconnect(_displayPlugin.get(), &DisplayPlugin::presented, this, &Application::onPresent);
_displayPlugin->deactivate();
}
auto oldDisplayPlugin = _displayPlugin;
bool active = newDisplayPlugin->activate();
if (!active) {
const DisplayPluginList& displayPlugins = PluginManager::getInstance()->getDisplayPlugins();
// If the new plugin fails to activate, fallback to last display
qWarning() << "Failed to activate display: " << newDisplayPlugin->getName();
newDisplayPlugin = oldDisplayPlugin;
if (newDisplayPlugin) {
qWarning() << "Falling back to last display: " << newDisplayPlugin->getName();
active = newDisplayPlugin->activate();
}
// If there is no last display, or
// If the last display fails to activate, fallback to desktop
if (!active) {
newDisplayPlugin = displayPlugins.at(0);
qWarning() << "Falling back to display: " << newDisplayPlugin->getName();
active = newDisplayPlugin->activate();
}
if (!active) {
qFatal("Failed to activate fallback plugin");
}
}
if (offscreenUi) {
offscreenUi->resize(fromGlm(newDisplayPlugin->getRecommendedUiSize()));
}
getApplicationCompositor().setDisplayPlugin(newDisplayPlugin);
_displayPlugin = newDisplayPlugin;
connect(_displayPlugin.get(), &DisplayPlugin::presented, this, &Application::onPresent, Qt::DirectConnection);
if (desktop) {
desktop->setProperty("repositionLocked", wasRepositionLocked);
}
RefreshRateManager& refreshRateManager = getRefreshRateManager();
refreshRateManager.setRefreshRateOperator(OpenGLDisplayPlugin::getRefreshRateOperator());
bool isHmd = newDisplayPlugin->isHmd();
RefreshRateManager::UXMode uxMode = isHmd ? RefreshRateManager::UXMode::VR :
RefreshRateManager::UXMode::DESKTOP;
refreshRateManager.setUXMode(uxMode);
}
bool isHmd = _displayPlugin->isHmd();
qCDebug(interfaceapp) << "Entering into" << (isHmd ? "HMD" : "Desktop") << "Mode";
// Only log/emit after a successful change
UserActivityLogger::getInstance().logAction("changed_display_mode", {
{ "previous_display_mode", _displayPlugin ? _displayPlugin->getName() : "" },
{ "display_mode", newDisplayPlugin ? newDisplayPlugin->getName() : "" },
{ "hmd", isHmd }
});
emit activeDisplayPluginChanged();
// reset the avatar, to set head and hand palms back to a reasonable default pose.
getMyAvatar()->reset(false);
// switch to first person if entering hmd and setting is checked
if (menu) {
QAction* action = menu->getActionForOption(newDisplayPlugin->getName());
if (action) {
action->setChecked(true);
}
if (isHmd && menu->isOptionChecked(MenuOption::FirstPersonHMD)) {
menu->setIsOptionChecked(MenuOption::FirstPersonLookAt, true);
cameraMenuChanged();
}
// Remove the selfie camera options from menu if in HMD mode
auto selfieAction = menu->getActionForOption(MenuOption::SelfieCamera);
selfieAction->setVisible(!isHmd);
}
Q_ASSERT_X(_displayPlugin, "Application::updateDisplayMode", "could not find an activated display plugin");
}
void Application::startHMDStandBySession() {
_autoSwitchDisplayModeSupportedHMDPlugin->startStandBySession();
}
void Application::endHMDSession() {
_autoSwitchDisplayModeSupportedHMDPlugin->endSession();
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -26,8 +26,6 @@
#include "Application.h"
#include "Menu.h"
#include <RunningMarker.h>
#include <SettingHandle.h>
#include <SettingHelpers.h>
@ -40,7 +38,6 @@
bool CrashRecoveryHandler::checkForResetSettings(bool wasLikelyCrash, bool suppressPrompt) {
Setting::Handle<bool> crashReportingAsked { "CrashReportingAsked", false };
Settings settings;
settings.beginGroup("Developer");
QVariant displayCrashOptions = settings.value(MenuOption::DisplayCrashOptions);
@ -94,8 +91,6 @@ bool CrashRecoveryHandler::suggestCrashReporting() {
QString explainText;
auto &ch = CrashHandler::getInstance();
switch(BuildInfo::BUILD_TYPE) {
case BuildInfo::BuildType::Dev:
explainText = "You're running a pre-release version. This is an official release, but the code\n"

View file

@ -14,11 +14,6 @@
#include <QString>
class CrashRecoveryHandler {
public:

View file

@ -0,0 +1,22 @@
//
// DeadlockWatchdog.cpp
// interface/src
//
// Split from Application.cpp by HifiExperiments on 3/30/24
// Created by Andrzej Kapolka on 5/10/13.
// Copyright 2013 High Fidelity, Inc.
// Copyright 2020 Vircadia contributors.
// Copyright 2022-2023 Overte e.V.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// SPDX-License-Identifier: Apache-2.0
//
#include "DeadlockWatchdog.h"
std::atomic<bool> DeadlockWatchdogThread::_paused;
std::atomic<uint64_t> DeadlockWatchdogThread::_heartbeat;
std::atomic<uint64_t> DeadlockWatchdogThread::_maxElapsed;
std::atomic<int> DeadlockWatchdogThread::_maxElapsedAverage;
ThreadSafeMovingAverage<int, DeadlockWatchdogThread::HEARTBEAT_SAMPLES> DeadlockWatchdogThread::_movingAverage;

View file

@ -0,0 +1,159 @@
//
// DeadlockWatchdog.h
// interface/src
//
// Split from Application.cpp by HifiExperiments on 3/30/24
// Created by Andrzej Kapolka on 5/10/13.
// Copyright 2013 High Fidelity, Inc.
// Copyright 2020 Vircadia contributors.
// Copyright 2022-2023 Overte e.V.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// SPDX-License-Identifier: Apache-2.0
//
#ifndef hifi_DeadlockWatchdog_h
#define hifi_DeadlockWatchdog_h
#include <QThread>
#include <NumericalConstants.h>
#include <SharedUtil.h>
#include <crash-handler/CrashHandler.h>
#include "InterfaceLogging.h"
#include "SimpleMovingAverage.h"
class DeadlockWatchdogThread : public QThread {
public:
static const unsigned long HEARTBEAT_UPDATE_INTERVAL_SECS = 1;
static const unsigned long MAX_HEARTBEAT_AGE_USECS = 120 * USECS_PER_SECOND; // 2 mins with no checkin probably a deadlock
static const int WARNING_ELAPSED_HEARTBEAT = 500 * USECS_PER_MSEC; // warn if elapsed heartbeat average is large
static const int HEARTBEAT_SAMPLES = 100000; // ~5 seconds worth of samples
// Set the heartbeat on launch
DeadlockWatchdogThread() {
setObjectName("Deadlock Watchdog");
// Give the heartbeat an initial value
_heartbeat = usecTimestampNow();
_paused = false;
connect(qApp, &QCoreApplication::aboutToQuit, [this] {
_quit = true;
});
}
void setMainThreadID(Qt::HANDLE threadID) {
_mainThreadID = threadID;
}
static void updateHeartbeat() {
auto now = usecTimestampNow();
auto elapsed = now - _heartbeat;
_movingAverage.addSample(elapsed);
_heartbeat = now;
}
void deadlockDetectionCrash() {
auto &ch = CrashHandler::getInstance();
ch.setAnnotation("_mod_faulting_tid", std::to_string((uint64_t)_mainThreadID));
ch.setAnnotation("deadlock", "1");
uint32_t* crashTrigger = nullptr;
*crashTrigger = 0xDEAD10CC;
}
static void withPause(const std::function<void()>& lambda) {
pause();
lambda();
resume();
}
static void pause() {
_paused = true;
}
static void resume() {
// Update the heartbeat BEFORE resuming the checks
updateHeartbeat();
_paused = false;
}
void run() override {
while (!_quit) {
QThread::sleep(HEARTBEAT_UPDATE_INTERVAL_SECS);
// Don't do heartbeat detection under nsight
if (_paused) {
continue;
}
uint64_t lastHeartbeat = _heartbeat; // sample atomic _heartbeat, because we could context switch away and have it updated on us
uint64_t now = usecTimestampNow();
auto lastHeartbeatAge = (now > lastHeartbeat) ? now - lastHeartbeat : 0;
auto elapsedMovingAverage = _movingAverage.getAverage();
if (elapsedMovingAverage > _maxElapsedAverage * 1.1f) {
#if !defined(NDEBUG)
qCDebug(interfaceapp_deadlock) << "DEADLOCK WATCHDOG WARNING:"
<< "lastHeartbeatAge:" << lastHeartbeatAge
<< "elapsedMovingAverage:" << elapsedMovingAverage
<< "maxElapsed:" << _maxElapsed
<< "PREVIOUS maxElapsedAverage:" << _maxElapsedAverage
<< "NEW maxElapsedAverage:" << elapsedMovingAverage << "** NEW MAX ELAPSED AVERAGE **"
<< "samples:" << _movingAverage.getSamples();
#endif
_maxElapsedAverage = elapsedMovingAverage;
}
if (lastHeartbeatAge > _maxElapsed) {
#if !defined(NDEBUG)
qCDebug(interfaceapp_deadlock) << "DEADLOCK WATCHDOG WARNING:"
<< "lastHeartbeatAge:" << lastHeartbeatAge
<< "elapsedMovingAverage:" << elapsedMovingAverage
<< "PREVIOUS maxElapsed:" << _maxElapsed
<< "NEW maxElapsed:" << lastHeartbeatAge << "** NEW MAX ELAPSED **"
<< "maxElapsedAverage:" << _maxElapsedAverage
<< "samples:" << _movingAverage.getSamples();
#endif
_maxElapsed = lastHeartbeatAge;
}
#if !defined(NDEBUG)
if (elapsedMovingAverage > WARNING_ELAPSED_HEARTBEAT) {
qCDebug(interfaceapp_deadlock) << "DEADLOCK WATCHDOG WARNING:"
<< "lastHeartbeatAge:" << lastHeartbeatAge
<< "elapsedMovingAverage:" << elapsedMovingAverage << "** OVER EXPECTED VALUE **"
<< "maxElapsed:" << _maxElapsed
<< "maxElapsedAverage:" << _maxElapsedAverage
<< "samples:" << _movingAverage.getSamples();
}
#endif
if (lastHeartbeatAge > MAX_HEARTBEAT_AGE_USECS) {
qCDebug(interfaceapp_deadlock) << "DEADLOCK DETECTED -- "
<< "lastHeartbeatAge:" << lastHeartbeatAge
<< "[ lastHeartbeat :" << lastHeartbeat
<< "now:" << now << " ]"
<< "elapsedMovingAverage:" << elapsedMovingAverage
<< "maxElapsed:" << _maxElapsed
<< "maxElapsedAverage:" << _maxElapsedAverage
<< "samples:" << _movingAverage.getSamples();
// Don't actually crash in debug builds, in case this apparent deadlock is simply from
// the developer actively debugging code
#ifdef NDEBUG
deadlockDetectionCrash();
#endif
}
}
}
static std::atomic<bool> _paused;
static std::atomic<uint64_t> _heartbeat;
static std::atomic<uint64_t> _maxElapsed;
static std::atomic<int> _maxElapsedAverage;
static ThreadSafeMovingAverage<int, HEARTBEAT_SAMPLES> _movingAverage;
bool _quit { false };
Qt::HANDLE _mainThreadID = nullptr;
};
#endif // hifi_DeadlockWatchdog_h

View file

@ -21,6 +21,7 @@
#include "Application.h"
#include "ui/DialogsManager.h"
#include "ui/LodToolsDialog.h"
#include "InterfaceLogging.h"
STATIC_SCRIPT_TYPES_INITIALIZER((+[](ScriptManager* manager){

View file

@ -0,0 +1,240 @@
//
// WindowSystemInfo.h
// interface/src
//
// Split from Application.cpp by HifiExperiments on 3/30/24
// Created by Andrzej Kapolka on 5/10/13.
// Copyright 2013 High Fidelity, Inc.
// Copyright 2020 Vircadia contributors.
// Copyright 2022-2023 Overte e.V.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// SPDX-License-Identifier: Apache-2.0
//
#ifndef hifi_WindowSystemInfo_h
#define hifi_WindowSystemInfo_h
#include <qsystemdetection.h>
#ifdef Q_OS_WIN
#include <Windows.h>
#include <TCHAR.h>
#include <pdh.h>
#pragma comment(lib, "pdh.lib")
#pragma comment(lib, "ntdll.lib")
#include <mutex>
#include <string>
#include <vector>
#include <glm/glm.hpp>
#include <QThread>
#include <QTimer>
#include "Profile.h"
#include "ThreadHelpers.h"
extern "C" {
enum SYSTEM_INFORMATION_CLASS {
SystemBasicInformation = 0,
SystemProcessorPerformanceInformation = 8,
};
struct SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION {
LARGE_INTEGER IdleTime;
LARGE_INTEGER KernelTime;
LARGE_INTEGER UserTime;
LARGE_INTEGER DpcTime;
LARGE_INTEGER InterruptTime;
ULONG InterruptCount;
};
struct SYSTEM_BASIC_INFORMATION {
ULONG Reserved;
ULONG TimerResolution;
ULONG PageSize;
ULONG NumberOfPhysicalPages;
ULONG LowestPhysicalPageNumber;
ULONG HighestPhysicalPageNumber;
ULONG AllocationGranularity;
ULONG_PTR MinimumUserModeAddress;
ULONG_PTR MaximumUserModeAddress;
ULONG_PTR ActiveProcessorsAffinityMask;
CCHAR NumberOfProcessors;
};
NTSYSCALLAPI LONG NTAPI NtQuerySystemInformation(
_In_ SYSTEM_INFORMATION_CLASS SystemInformationClass,
_Out_writes_bytes_opt_(SystemInformationLength) PVOID SystemInformation,
_In_ ULONG SystemInformationLength,
_Out_opt_ PULONG ReturnLength
);
}
template <typename T>
LONG NtQuerySystemInformation(SYSTEM_INFORMATION_CLASS SystemInformationClass, T& t) {
return NtQuerySystemInformation(SystemInformationClass, &t, (ULONG)sizeof(T), nullptr);
}
template <typename T>
LONG NtQuerySystemInformation(SYSTEM_INFORMATION_CLASS SystemInformationClass, std::vector<T>& t) {
return NtQuerySystemInformation(SystemInformationClass, t.data(), (ULONG)(sizeof(T) * t.size()), nullptr);
}
template <typename T>
void updateValueAndDelta(std::pair<T, T>& pair, T newValue) {
auto& value = pair.first;
auto& delta = pair.second;
delta = (value != 0) ? newValue - value : 0;
value = newValue;
}
struct MyCpuInfo {
using ValueAndDelta = std::pair<LONGLONG, LONGLONG>;
std::string name;
ValueAndDelta kernel { 0, 0 };
ValueAndDelta user { 0, 0 };
ValueAndDelta idle { 0, 0 };
float kernelUsage { 0.0f };
float userUsage { 0.0f };
void update(const SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION& cpuInfo) {
updateValueAndDelta(kernel, cpuInfo.KernelTime.QuadPart);
updateValueAndDelta(user, cpuInfo.UserTime.QuadPart);
updateValueAndDelta(idle, cpuInfo.IdleTime.QuadPart);
auto totalTime = kernel.second + user.second + idle.second;
if (totalTime != 0) {
kernelUsage = (FLOAT)kernel.second / totalTime;
userUsage = (FLOAT)user.second / totalTime;
} else {
kernelUsage = userUsage = 0.0f;
}
}
};
void updateCpuInformation() {
static std::once_flag once;
static SYSTEM_BASIC_INFORMATION systemInfo {};
static SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION cpuTotals;
static std::vector<SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION> cpuInfos;
static std::vector<MyCpuInfo> myCpuInfos;
static MyCpuInfo myCpuTotals;
std::call_once(once, [&] {
NtQuerySystemInformation( SystemBasicInformation, systemInfo);
cpuInfos.resize(systemInfo.NumberOfProcessors);
myCpuInfos.resize(systemInfo.NumberOfProcessors);
for (size_t i = 0; i < systemInfo.NumberOfProcessors; ++i) {
myCpuInfos[i].name = "cpu." + std::to_string(i);
}
myCpuTotals.name = "cpu.total";
});
NtQuerySystemInformation(SystemProcessorPerformanceInformation, cpuInfos);
// Zero the CPU totals.
memset(&cpuTotals, 0, sizeof(SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION));
for (size_t i = 0; i < systemInfo.NumberOfProcessors; ++i) {
auto& cpuInfo = cpuInfos[i];
// KernelTime includes IdleTime.
cpuInfo.KernelTime.QuadPart -= cpuInfo.IdleTime.QuadPart;
// Update totals
cpuTotals.IdleTime.QuadPart += cpuInfo.IdleTime.QuadPart;
cpuTotals.KernelTime.QuadPart += cpuInfo.KernelTime.QuadPart;
cpuTotals.UserTime.QuadPart += cpuInfo.UserTime.QuadPart;
// Update friendly structure
auto& myCpuInfo = myCpuInfos[i];
myCpuInfo.update(cpuInfo);
PROFILE_COUNTER(app, myCpuInfo.name.c_str(), {
{ "kernel", myCpuInfo.kernelUsage },
{ "user", myCpuInfo.userUsage }
});
}
myCpuTotals.update(cpuTotals);
PROFILE_COUNTER(app, myCpuTotals.name.c_str(), {
{ "kernel", myCpuTotals.kernelUsage },
{ "user", myCpuTotals.userUsage }
});
}
static ULARGE_INTEGER lastCPU, lastSysCPU, lastUserCPU;
static int numProcessors;
static HANDLE self;
static PDH_HQUERY cpuQuery;
static PDH_HCOUNTER cpuTotal;
void initCpuUsage() {
SYSTEM_INFO sysInfo;
FILETIME ftime, fsys, fuser;
GetSystemInfo(&sysInfo);
numProcessors = sysInfo.dwNumberOfProcessors;
GetSystemTimeAsFileTime(&ftime);
memcpy(&lastCPU, &ftime, sizeof(FILETIME));
self = GetCurrentProcess();
GetProcessTimes(self, &ftime, &ftime, &fsys, &fuser);
memcpy(&lastSysCPU, &fsys, sizeof(FILETIME));
memcpy(&lastUserCPU, &fuser, sizeof(FILETIME));
PdhOpenQuery(NULL, NULL, &cpuQuery);
PdhAddCounter(cpuQuery, "\\Processor(_Total)\\% Processor Time", NULL, &cpuTotal);
PdhCollectQueryData(cpuQuery);
}
void getCpuUsage(glm::vec3& systemAndUser) {
FILETIME ftime, fsys, fuser;
ULARGE_INTEGER now, sys, user;
GetSystemTimeAsFileTime(&ftime);
memcpy(&now, &ftime, sizeof(FILETIME));
GetProcessTimes(self, &ftime, &ftime, &fsys, &fuser);
memcpy(&sys, &fsys, sizeof(FILETIME));
memcpy(&user, &fuser, sizeof(FILETIME));
systemAndUser.x = (sys.QuadPart - lastSysCPU.QuadPart);
systemAndUser.y = (user.QuadPart - lastUserCPU.QuadPart);
systemAndUser /= (float)(now.QuadPart - lastCPU.QuadPart);
systemAndUser /= (float)numProcessors;
systemAndUser *= 100.0f;
lastCPU = now;
lastUserCPU = user;
lastSysCPU = sys;
PDH_FMT_COUNTERVALUE counterVal;
PdhCollectQueryData(cpuQuery);
PdhGetFormattedCounterValue(cpuTotal, PDH_FMT_DOUBLE, NULL, &counterVal);
systemAndUser.z = (float)counterVal.doubleValue;
}
void setupCpuMonitorThread() {
initCpuUsage();
auto cpuMonitorThread = QThread::currentThread();
setThreadName("CPU Monitor Thread");
QTimer* timer = new QTimer();
timer->setInterval(50);
QObject::connect(timer, &QTimer::timeout, [] {
updateCpuInformation();
glm::vec3 kernelUserAndSystem;
getCpuUsage(kernelUserAndSystem);
PROFILE_COUNTER(app, "cpuProcess", { { "system", kernelUserAndSystem.x }, { "user", kernelUserAndSystem.y } });
PROFILE_COUNTER(app, "cpuSystem", { { "system", kernelUserAndSystem.z } });
});
QObject::connect(cpuMonitorThread, &QThread::finished, [=] {
timer->deleteLater();
cpuMonitorThread->deleteLater();
});
timer->start();
}
#endif
#endif // hifi_WindowSystemInfo_h

View file

@ -33,6 +33,7 @@
#include <AnimInverseKinematics.h>
#include <AudioClient.h>
#include <ClientTraitsHandler.h>
#include <controllers/UserInputMapper.h>
#include <recording/Clip.h>
#include <recording/Deck.h>
#include <display-plugins/DisplayPlugin.h>

View file

@ -15,6 +15,7 @@
#include <recording/Deck.h>
#include <Rig.h>
#include <BlendshapeConstants.h>
#include <controllers/UserInputMapper.h>
#include "Application.h"
#include "MyAvatar.h"

View file

@ -13,6 +13,7 @@
#include <shared/GlobalAppProperties.h>
#include <shared/QtHelpers.h>
#include <crash-handler/CrashHandler.h>
#include "ThreadHelpers.h"
RenderEventHandler::RenderEventHandler(CheckCall checkCall, RenderCall renderCall) :

View file

@ -18,6 +18,7 @@
#include <QLocalServer>
#include <QSharedMemory>
#include <QTranslator>
#include <QStandardPaths>
#include <BuildInfo.h>
#include <SandboxUtils.h>
@ -25,6 +26,10 @@
#include <NetworkAccessManager.h>
#include <gl/GLHelpers.h>
#include <iostream>
#include <plugins/InputPlugin.h>
#include <plugins/PluginManager.h>
#include <plugins/DisplayPlugin.h>
#include <plugins/CodecPlugin.h>
#include "AddressManager.h"
#include "Application.h"
@ -34,9 +39,7 @@
#include "MainWindow.h"
#include "Profile.h"
#include "LogHandler.h"
#include <plugins/PluginManager.h>
#include <plugins/DisplayPlugin.h>
#include <plugins/CodecPlugin.h>
#include "RunningMarker.h"
#ifdef Q_OS_WIN
#include <Windows.h>
@ -558,10 +561,6 @@ int main(int argc, const char* argv[]) {
QCoreApplication::setAttribute(Qt::AA_UseOpenGLES);
#endif
// Instance UserActivityLogger now that the settings are loaded
auto& ual = UserActivityLogger::getInstance();
auto& ch = CrashHandler::getInstance();
@ -725,7 +724,7 @@ int main(int argc, const char* argv[]) {
int exitCode;
{
RunningMarker runningMarker(RUNNING_MARKER_FILENAME);
RunningMarker runningMarker("Interface.running");
bool runningMarkerExisted = runningMarker.fileExists();
runningMarker.writeRunningMarkerFile();

View file

@ -17,6 +17,10 @@
#include "Menu.h"
#include "SceneScriptingInterface.h"
#ifndef Q_OS_ANDROID
#include <shared/FileLogger.h>
#endif
OctreePacketProcessor::OctreePacketProcessor():
_safeLanding(new SafeLanding())
{
@ -55,7 +59,7 @@ void OctreePacketProcessor::processPacket(QSharedPointer<ReceivedMessage> messag
// immediately following them inside the same packet. So, we process the PacketType_OCTREE_STATS first
// then process any remaining bytes as if it was another packet
if (octreePacketType == PacketType::OctreeStats) {
int statsMessageLength = qApp->processOctreeStats(*message, sendingNode);
int statsMessageLength = processOctreeStats(*message, sendingNode);
wasStatsPacket = true;
int piggybackBytes = message->getSize() - statsMessageLength;
@ -93,7 +97,7 @@ void OctreePacketProcessor::processPacket(QSharedPointer<ReceivedMessage> messag
}
if (packetType != PacketType::EntityQueryInitialResultsComplete) {
qApp->trackIncomingOctreePacket(*message, sendingNode, wasStatsPacket);
trackIncomingOctreePacket(*message, sendingNode, wasStatsPacket);
}
// seek back to beginning of packet after tracking
@ -175,3 +179,38 @@ bool OctreePacketProcessor::safeLandingIsComplete() const {
}
return false;
}
int OctreePacketProcessor::processOctreeStats(ReceivedMessage& message, SharedNodePointer sendingNode) {
// parse the incoming stats datas stick it in a temporary object for now, while we
// determine which server it belongs to
int statsMessageLength = 0;
const QUuid& nodeUUID = sendingNode->getUUID();
// now that we know the node ID, let's add these stats to the stats for that node...
_octreeServerSceneStats.withWriteLock([&] {
OctreeSceneStats& octreeStats = _octreeServerSceneStats[nodeUUID];
statsMessageLength = octreeStats.unpackFromPacket(message);
if (octreeStats.isFullScene()) {
_fullSceneReceivedCounter++;
}
});
return statsMessageLength;
}
void OctreePacketProcessor::trackIncomingOctreePacket(ReceivedMessage& message, SharedNodePointer sendingNode, bool wasStatsPacket) {
// Attempt to identify the sender from its address.
if (sendingNode) {
const QUuid& nodeUUID = sendingNode->getUUID();
// now that we know the node ID, let's add these stats to the stats for that node...
_octreeServerSceneStats.withWriteLock([&] {
if (_octreeServerSceneStats.find(nodeUUID) != _octreeServerSceneStats.end()) {
OctreeSceneStats& stats = _octreeServerSceneStats[nodeUUID];
stats.trackIncomingOctreePacket(message, wasStatsPacket, sendingNode->getClockSkewUsec());
}
});
}
}

View file

@ -17,6 +17,7 @@
#include <ReceivedPacketProcessor.h>
#include <ReceivedMessage.h>
#include "OctreeSceneStats.h"
#include "SafeLanding.h"
/// Handles processing of incoming voxel packets for the interface application. As with other ReceivedPacketProcessor classes
@ -27,6 +28,9 @@ public:
OctreePacketProcessor();
~OctreePacketProcessor();
NodeToOctreeSceneStats* getOctreeSceneStats() { return &_octreeServerSceneStats; }
std::atomic<uint32_t>& getFullSceneReceivedCounter() { return _fullSceneReceivedCounter; }
void startSafeLanding();
void updateSafeLanding();
void stopSafeLanding();
@ -46,6 +50,12 @@ private slots:
void handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
private:
int processOctreeStats(ReceivedMessage& message, SharedNodePointer sendingNode);
void trackIncomingOctreePacket(ReceivedMessage& message, SharedNodePointer sendingNode, bool wasStatsPacket);
NodeToOctreeSceneStats _octreeServerSceneStats;
std::atomic<uint32_t> _fullSceneReceivedCounter { 0 }; // how many times have we received a full-scene octree stats packet
OCTREE_PACKET_SEQUENCE _safeLandingSequenceStart { SafeLanding::INVALID_SEQUENCE };
std::unique_ptr<SafeLanding> _safeLanding;
};

View file

@ -24,6 +24,8 @@
#include "StylusPick.h"
#include <ScriptManager.h>
#include <controllers/UserInputMapper.h>
STATIC_SCRIPT_TYPES_INITIALIZER((+[](ScriptManager* manager){
auto scriptEngine = manager->engine().get();
scriptRegisterMetaType<RayPointerProperties, rayPointerPropertiesToScriptValue, rayPointerPropertiesFromScriptValue>(scriptEngine);

View file

@ -11,6 +11,8 @@
#include "PerformanceScriptingInterface.h"
#include <QtQML>
#include <ScriptEngineCast.h>
#include "../Application.h"

View file

@ -9,11 +9,14 @@
//
#include "RenderScriptingInterface.h"
#include <QScreen>
#include <QtQML>
#include <RenderCommonTask.h>
#include <ScriptEngineCast.h>
#include "LightingModel.h"
#include <QScreen>
#include "Menu.h"
#include "ScreenName.h"
#include <procedural/Procedural.h>

View file

@ -28,6 +28,8 @@
#include "MainWindow.h"
#include "Menu.h"
#include "DialogsManager.h"
#include "LodToolsDialog.h"
#include "OctreeStatsDialog.h"
static const int WIDTH = 350;
static const int HEIGHT = 100;

View file

@ -17,6 +17,7 @@
#include <QLabel>
#include <QScrollBar>
#include <QtConcurrent/QtConcurrentRun>
#include <QStandardPaths>
#include <QStringListModel>
#include <QListView>
#include <QToolTip>

View file

@ -21,6 +21,7 @@
#include <OctreeSceneStats.h>
#include "Application.h"
#include "Menu.h"
#include "../octree/OctreePacketProcessor.h"
OctreeStatsDialog::OctreeStatsDialog(QWidget* parent, NodeToOctreeSceneStats* model) :

View file

@ -12,6 +12,7 @@
#include "OctreeStatsProvider.h"
#include "Application.h"
#include "Menu.h"
#include "octree/OctreePacketProcessor.h"
OctreeStatsProvider::OctreeStatsProvider(QObject* parent, NodeToOctreeSceneStats* model) :

View file

@ -30,9 +30,6 @@ using ConicalViewFrustums = std::vector<ConicalViewFrustum>;
/// Interface provided by Application to other objects that need access to the current view state details
class AbstractViewStateInterface {
public:
/// copies the current view frustum for rendering the view state
virtual void copyCurrentViewFrustum(ViewFrustum& viewOut) const = 0;
virtual const ConicalViewFrustums& getConicalViews() const = 0;
virtual QThread* getMainThread() = 0;
@ -54,8 +51,6 @@ public:
// is called from
virtual void sendLambdaEvent(const std::function<void()>& f) = 0;
virtual qreal getDevicePixelRatio() = 0;
virtual render::ScenePointer getMain3DScene() = 0;
virtual render::EnginePointer getRenderEngine() = 0;