whitespace cleanup

This commit is contained in:
Andrew Meadows 2015-11-23 18:04:32 -08:00
parent d32e500464
commit 5135c9dded
2 changed files with 85 additions and 85 deletions

View file

@ -679,7 +679,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
}));
userInputMapper->registerDevice(_applicationStateDevice);
// Setup the keyboardMouseDevice and the user input mapper with the default bindings
userInputMapper->registerDevice(_keyboardMouseDevice->getInputDevice());
@ -749,7 +749,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
_oldHandRightClick[0] = false;
_oldHandLeftClick[1] = false;
_oldHandRightClick[1] = false;
auto applicationUpdater = DependencyManager::get<AutoUpdater>();
connect(applicationUpdater.data(), &AutoUpdater::newVersionIsAvailable, dialogsManager.data(), &DialogsManager::showUpdateDialog);
applicationUpdater->checkForUpdate();
@ -768,7 +768,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
// If the user clicks an an entity, we will check that it's an unlocked web entity, and if so, set the focus to it
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
connect(entityScriptingInterface.data(), &EntityScriptingInterface::clickDownOnEntity,
connect(entityScriptingInterface.data(), &EntityScriptingInterface::clickDownOnEntity,
[this, entityScriptingInterface](const EntityItemID& entityItemID, const MouseEvent& event) {
if (_keyboardFocusedItem != entityItemID) {
_keyboardFocusedItem = UNKNOWN_ENTITY_ID;
@ -817,7 +817,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
});
// If the user clicks somewhere where there is NO entity at all, we will release focus
connect(getEntities(), &EntityTreeRenderer::mousePressOffEntity,
connect(getEntities(), &EntityTreeRenderer::mousePressOffEntity,
[=](const RayToEntityIntersectionResult& entityItemID, const QMouseEvent* event, unsigned int deviceId) {
_keyboardFocusedItem = UNKNOWN_ENTITY_ID;
if (_keyboardFocusHighlight) {
@ -826,17 +826,17 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
});
connect(this, &Application::applicationStateChanged, this, &Application::activeChanged);
qCDebug(interfaceapp, "Startup time: %4.2f seconds.", (double)startupTimer.elapsed() / 1000.0);
}
void Application::aboutToQuit() {
emit beforeAboutToQuit();
getActiveDisplayPlugin()->deactivate();
_aboutToQuit = true;
cleanupBeforeQuit();
}
@ -860,16 +860,16 @@ void Application::cleanupBeforeQuit() {
_keyboardFocusHighlight = nullptr;
_entities.clear(); // this will allow entity scripts to properly shutdown
auto nodeList = DependencyManager::get<NodeList>();
// send the domain a disconnect packet, force stoppage of domain-server check-ins
nodeList->getDomainHandler().disconnect();
nodeList->setIsShuttingDown(true);
// tell the packet receiver we're shutting down, so it can drop packets
nodeList->getPacketReceiver().setShouldDropPackets(true);
_entities.shutdown(); // tell the entities system we're shutting down, so it will stop running scripts
ScriptEngine::stopAllScripts(this); // stop all currently running global scripts
@ -947,7 +947,7 @@ Application::~Application() {
DependencyManager::destroy<GeometryCache>();
DependencyManager::destroy<ScriptCache>();
DependencyManager::destroy<SoundCache>();
// cleanup the AssetClient thread
QThread* assetThread = DependencyManager::get<AssetClient>()->thread();
DependencyManager::destroy<AssetClient>();
@ -955,14 +955,14 @@ Application::~Application() {
assetThread->wait();
QThread* nodeThread = DependencyManager::get<NodeList>()->thread();
// remove the NodeList from the DependencyManager
DependencyManager::destroy<NodeList>();
// ask the node thread to quit and wait until it is done
nodeThread->quit();
nodeThread->wait();
Leapmotion::destroy();
RealSense::destroy();
@ -1058,7 +1058,7 @@ void Application::initializeUi() {
resizeGL();
}
});
// This will set up the input plugins UI
_activeInputPlugins.clear();
foreach(auto inputPlugin, PluginManager::getInstance()->getInputPlugins()) {
@ -1100,8 +1100,8 @@ void Application::paintGL() {
return;
}
// Some plugins process message events, potentially leading to
// re-entering a paint event. don't allow further processing if this
// Some plugins process message events, potentially leading to
// re-entering a paint event. don't allow further processing if this
// happens
if (_inPaint) {
return;
@ -1137,17 +1137,17 @@ void Application::paintGL() {
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
PerformanceTimer perfTimer("Mirror");
auto primaryFbo = DependencyManager::get<FramebufferCache>()->getPrimaryFramebufferDepthColor();
renderArgs._renderMode = RenderArgs::MIRROR_RENDER_MODE;
renderRearViewMirror(&renderArgs, _mirrorViewRect);
renderArgs._renderMode = RenderArgs::DEFAULT_RENDER_MODE;
{
float ratio = ((float)QApplication::desktop()->windowHandle()->devicePixelRatio() * getRenderResolutionScale());
// Flip the src and destination rect horizontally to do the mirror
auto mirrorRect = glm::ivec4(0, 0, _mirrorViewRect.width() * ratio, _mirrorViewRect.height() * ratio);
auto mirrorRectDest = glm::ivec4(mirrorRect.z, mirrorRect.y, mirrorRect.x, mirrorRect.w);
auto selfieFbo = DependencyManager::get<FramebufferCache>()->getSelfieFramebuffer();
gpu::doInBatch(renderArgs._context, [=](gpu::Batch& batch) {
batch.setFramebuffer(selfieFbo);
@ -1169,9 +1169,9 @@ void Application::paintGL() {
{
PerformanceTimer perfTimer("CameraUpdates");
auto myAvatar = getMyAvatar();
myAvatar->startCapture();
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON || _myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, myAvatar->getBoomLength() <= MyAvatar::ZOOM_MIN);
@ -1208,26 +1208,26 @@ void Application::paintGL() {
* (myAvatar->getScale() * myAvatar->getBoomLength() * glm::vec3(0.0f, 0.0f, 1.0f)));
} else {
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ myAvatar->getOrientation()
+ myAvatar->getOrientation()
* (myAvatar->getScale() * myAvatar->getBoomLength() * glm::vec3(0.0f, 0.0f, 1.0f)));
}
}
} else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
if (isHMDMode()) {
glm::quat hmdRotation = extractRotation(myAvatar->getHMDSensorMatrix());
_myCamera.setRotation(myAvatar->getWorldAlignedOrientation()
_myCamera.setRotation(myAvatar->getWorldAlignedOrientation()
* glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)) * hmdRotation);
glm::vec3 hmdOffset = extractTranslation(myAvatar->getHMDSensorMatrix());
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ glm::vec3(0, _raiseMirror * myAvatar->getScale(), 0)
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ glm::vec3(0, _raiseMirror * myAvatar->getScale(), 0)
+ (myAvatar->getOrientation() * glm::quat(glm::vec3(0.0f, _rotateMirror, 0.0f))) *
glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror
glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror
+ (myAvatar->getOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f))) * hmdOffset);
} else {
_myCamera.setRotation(myAvatar->getWorldAlignedOrientation()
_myCamera.setRotation(myAvatar->getWorldAlignedOrientation()
* glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ glm::vec3(0, _raiseMirror * myAvatar->getScale(), 0)
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ glm::vec3(0, _raiseMirror * myAvatar->getScale(), 0)
+ (myAvatar->getOrientation() * glm::quat(glm::vec3(0.0f, _rotateMirror, 0.0f))) *
glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
}
@ -1246,7 +1246,7 @@ void Application::paintGL() {
}
}
}
// Update camera position
// Update camera position
if (!isHMDMode()) {
_myCamera.update(1.0f / _fps);
}
@ -1264,12 +1264,12 @@ void Application::paintGL() {
if (displayPlugin->isStereo()) {
// Stereo modes will typically have a larger projection matrix overall,
// so we ask for the 'mono' projection matrix, which for stereo and HMD
// plugins will imply the combined projection for both eyes.
// plugins will imply the combined projection for both eyes.
//
// This is properly implemented for the Oculus plugins, but for OpenVR
// and Stereo displays I'm not sure how to get / calculate it, so we're
// just relying on the left FOV in each case and hoping that the
// overall culling margin of error doesn't cause popping in the
// and Stereo displays I'm not sure how to get / calculate it, so we're
// just relying on the left FOV in each case and hoping that the
// overall culling margin of error doesn't cause popping in the
// right eye. There are FIXMEs in the relevant plugins
_myCamera.setProjection(displayPlugin->getProjection(Mono, _myCamera.getProjection()));
renderArgs._context->enableStereo(true);
@ -1279,11 +1279,11 @@ void Application::paintGL() {
auto hmdInterface = DependencyManager::get<HMDScriptingInterface>();
float IPDScale = hmdInterface->getIPDScale();
// FIXME we probably don't need to set the projection matrix every frame,
// only when the display plugin changes (or in non-HMD modes when the user
// only when the display plugin changes (or in non-HMD modes when the user
// changes the FOV manually, which right now I don't think they can.
for_each_eye([&](Eye eye) {
// For providing the stereo eye views, the HMD head pose has already been
// applied to the avatar, so we need to get the difference between the head
// For providing the stereo eye views, the HMD head pose has already been
// applied to the avatar, so we need to get the difference between the head
// pose applied to the avatar and the per eye pose, and use THAT as
// the per-eye stereo matrix adjustment.
mat4 eyeToHead = displayPlugin->getEyeToHeadTransform(eye);
@ -1293,10 +1293,10 @@ void Application::paintGL() {
mat4 eyeOffsetTransform = glm::translate(mat4(), eyeOffset * -1.0f * IPDScale);
eyeOffsets[eye] = eyeOffsetTransform;
// Tell the plugin what pose we're using to render. In this case we're just using the
// unmodified head pose because the only plugin that cares (the Oculus plugin) uses it
// for rotational timewarp. If we move to support positonal timewarp, we need to
// ensure this contains the full pose composed with the eye offsets.
// Tell the plugin what pose we're using to render. In this case we're just using the
// unmodified head pose because the only plugin that cares (the Oculus plugin) uses it
// for rotational timewarp. If we move to support positonal timewarp, we need to
// ensure this contains the full pose composed with the eye offsets.
mat4 headPose = displayPlugin->getHeadPose();
displayPlugin->setEyeRenderPose(eye, headPose);
@ -1343,7 +1343,7 @@ void Application::paintGL() {
PerformanceTimer perfTimer("pluginOutput");
auto primaryFbo = framebufferCache->getPrimaryFramebuffer();
GLuint finalTexture = gpu::GLBackend::getTextureID(primaryFbo->getRenderBuffer(0));
// Ensure the rendering context commands are completed when rendering
// Ensure the rendering context commands are completed when rendering
GLsync sync = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
// Ensure the sync object is flushed to the driver thread before releasing the context
// CRITICAL for the mac driver apparently.
@ -1394,7 +1394,7 @@ void Application::audioMuteToggled() {
}
void Application::faceTrackerMuteToggled() {
QAction* muteAction = Menu::getInstance()->getActionForOption(MenuOption::MuteFaceTracking);
Q_CHECK_PTR(muteAction);
bool isMuted = getSelectedFaceTracker()->isMuted();
@ -1427,7 +1427,7 @@ void Application::resizeGL() {
if (nullptr == _displayPlugin) {
return;
}
auto displayPlugin = getActiveDisplayPlugin();
// Set the desired FBO texture size. If it hasn't changed, this does nothing.
// Otherwise, it must rebuild the FBOs
@ -1437,14 +1437,14 @@ void Application::resizeGL() {
_renderResolution = renderSize;
DependencyManager::get<FramebufferCache>()->setFrameBufferSize(fromGlm(renderSize));
}
// FIXME the aspect ratio for stereo displays is incorrect based on this.
float aspectRatio = displayPlugin->getRecommendedAspectRatio();
_myCamera.setProjection(glm::perspective(glm::radians(_fieldOfView.get()), aspectRatio,
DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP));
// Possible change in aspect ratio
loadViewFrustum(_myCamera, _viewFrustum);
auto offscreenUi = DependencyManager::get<OffscreenUi>();
auto uiSize = displayPlugin->getRecommendedUiSize();
// Bit of a hack since there's no device pixel ratio change event I can find.
@ -1613,7 +1613,7 @@ void Application::keyPressEvent(QKeyEvent* event) {
if (isMeta) {
auto offscreenUi = DependencyManager::get<OffscreenUi>();
offscreenUi->load("Browser.qml");
}
}
break;
case Qt::Key_X:
@ -2109,7 +2109,7 @@ void Application::wheelEvent(QWheelEvent* event) {
if (_controllerScriptingInterface->isWheelCaptured()) {
return;
}
if (Menu::getInstance()->isOptionChecked(KeyboardMouseDevice::NAME)) {
_keyboardMouseDevice->wheelEvent(event);
}
@ -2227,7 +2227,7 @@ void Application::idle(uint64_t now) {
_idleLoopStdev.reset();
}
}
_overlayConductor.update(secondsSinceLastUpdate);
// check for any requested background downloads.
@ -2481,7 +2481,7 @@ void Application::init() {
DependencyManager::get<AddressManager>()->loadSettings(addressLookupString);
qCDebug(interfaceapp) << "Loaded settings";
Leapmotion::init();
RealSense::init();
@ -2539,7 +2539,7 @@ void Application::setAvatarUpdateThreading(bool isThreaded) {
if (_avatarUpdate && (_avatarUpdate->isThreaded() == isThreaded)) {
return;
}
auto myAvatar = getMyAvatar();
bool isRigEnabled = myAvatar->getEnableRigAnimations();
bool isGraphEnabled = myAvatar->getEnableAnimGraph();
@ -2756,7 +2756,7 @@ void Application::updateDialogs(float deltaTime) {
if(audioStatsDialog) {
audioStatsDialog->update();
}
// Update bandwidth dialog, if any
BandwidthDialog* bandwidthDialog = dialogsManager->getBandwidthDialog();
if (bandwidthDialog) {
@ -2892,7 +2892,7 @@ void Application::update(float deltaTime) {
_entities.getTree()->withWriteLock([&] {
_physicsEngine->stepSimulation();
});
if (_physicsEngine->hasOutgoingChanges()) {
_entities.getTree()->withWriteLock([&] {
_entitySimulation.handleOutgoingChanges(_physicsEngine->getOutgoingChanges(), _physicsEngine->getSessionID());
@ -3026,10 +3026,10 @@ int Application::sendNackPackets() {
foreach(const OCTREE_PACKET_SEQUENCE& missingNumber, missingSequenceNumbers) {
nackPacketList->writePrimitive(missingNumber);
}
if (nackPacketList->getNumPackets()) {
packetsSent += nackPacketList->getNumPackets();
// send the packet list
nodeList->sendPacketList(std::move(nackPacketList), *node);
}
@ -3635,7 +3635,7 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
float fov = MIRROR_FIELD_OF_VIEW;
auto myAvatar = getMyAvatar();
// bool eyeRelativeCamera = false;
if (billboard) {
fov = BILLBOARD_FIELD_OF_VIEW; // degees
@ -3843,7 +3843,7 @@ void Application::nodeKilled(SharedNodePointer node) {
Menu::getInstance()->getActionForOption(MenuOption::UploadAsset)->setEnabled(false);
}
}
void Application::trackIncomingOctreePacket(NLPacket& packet, SharedNodePointer sendingNode, bool wasStatsPacket) {
// Attempt to identify the sender from its address.
@ -3868,7 +3868,7 @@ int Application::processOctreeStats(NLPacket& packet, SharedNodePointer sendingN
int statsMessageLength = 0;
const QUuid& nodeUUID = sendingNode->getUUID();
// now that we know the node ID, let's add these stats to the stats for that node...
_octreeServerSceneStats.withWriteLock([&] {
OctreeSceneStats& octreeStats = _octreeServerSceneStats[nodeUUID];
@ -4069,7 +4069,7 @@ bool Application::acceptURL(const QString& urlString, bool defaultUpload) {
Qt::AutoConnection, Q_ARG(const QString&, urlString));
return true;
}
QUrl url(urlString);
QHashIterator<QString, AcceptURLMethod> i(_acceptedExtensions);
QString lowerPath = url.path().toLower();
@ -4080,7 +4080,7 @@ bool Application::acceptURL(const QString& urlString, bool defaultUpload) {
return (this->*method)(urlString);
}
}
return defaultUpload && askToUploadAsset(urlString);
}
@ -4162,10 +4162,10 @@ bool Application::askToUploadAsset(const QString& filename) {
QString("You don't have upload rights on that domain.\n\n"));
return false;
}
QUrl url { filename };
if (auto upload = DependencyManager::get<AssetClient>()->createUpload(url.toLocalFile())) {
QMessageBox messageBox;
messageBox.setWindowTitle("Asset upload");
messageBox.setText("You are about to upload the following file to the asset server:\n" +
@ -4173,19 +4173,19 @@ bool Application::askToUploadAsset(const QString& filename) {
messageBox.setInformativeText("Do you want to continue?");
messageBox.setStandardButtons(QMessageBox::Ok | QMessageBox::Cancel);
messageBox.setDefaultButton(QMessageBox::Ok);
// Option to drop model in world for models
if (filename.endsWith(FBX_EXTENSION) || filename.endsWith(OBJ_EXTENSION)) {
auto checkBox = new QCheckBox(&messageBox);
checkBox->setText("Add to scene");
messageBox.setCheckBox(checkBox);
}
if (messageBox.exec() != QMessageBox::Ok) {
upload->deleteLater();
return false;
}
// connect to the finished signal so we know when the AssetUpload is done
if (messageBox.checkBox() && (messageBox.checkBox()->checkState() == Qt::Checked)) {
// Custom behavior for models
@ -4195,12 +4195,12 @@ bool Application::askToUploadAsset(const QString& filename) {
&AssetUploadDialogFactory::getInstance(),
&AssetUploadDialogFactory::handleUploadFinished);
}
// start the upload now
upload->start();
return true;
}
// display a message box with the error
QMessageBox::warning(_window, "Failed Upload", QString("Failed to upload %1.\n\n").arg(filename));
return false;
@ -4208,20 +4208,20 @@ bool Application::askToUploadAsset(const QString& filename) {
void Application::modelUploadFinished(AssetUpload* upload, const QString& hash) {
auto filename = QFileInfo(upload->getFilename()).fileName();
if ((upload->getError() == AssetUpload::NoError) &&
(filename.endsWith(FBX_EXTENSION) || filename.endsWith(OBJ_EXTENSION))) {
auto entities = DependencyManager::get<EntityScriptingInterface>();
EntityItemProperties properties;
properties.setType(EntityTypes::Model);
properties.setModelURL(QString("%1:%2.%3").arg(URL_SCHEME_ATP).arg(hash).arg(upload->getExtension()));
properties.setPosition(_myCamera.getPosition() + _myCamera.getOrientation() * Vectors::FRONT * 2.0f);
properties.setName(QUrl(upload->getFilename()).fileName());
entities->addEntity(properties);
upload->deleteLater();
} else {
AssetUploadDialogFactory::getInstance().handleUploadFinished(upload, hash);
@ -4510,7 +4510,7 @@ void Application::takeSnapshot() {
_snapshotShareDialog = new SnapshotShareDialog(fileName, _glWidget);
}
_snapshotShareDialog->show();
}
float Application::getRenderResolutionScale() const {
@ -4755,8 +4755,8 @@ void Application::updateDisplayMode() {
return;
}
// Some plugins *cough* Oculus *cough* process message events from inside their
// display function, and we don't want to change the display plugin underneath
// Some plugins *cough* Oculus *cough* process message events from inside their
// display function, and we don't want to change the display plugin underneath
// the paintGL call, so we need to guard against that
if (_inPaint) {
qDebug() << "Deferring plugin switch until out of painting";
@ -4790,14 +4790,14 @@ void Application::updateDisplayMode() {
oldDisplayPlugin = _displayPlugin;
_displayPlugin = newDisplayPlugin;
// If the displayPlugin is a screen based HMD, then it will want the HMDTools displayed
// Direct Mode HMDs (like windows Oculus) will be isHmd() but will have a screen of -1
bool newPluginWantsHMDTools = newDisplayPlugin ?
(newDisplayPlugin->isHmd() && (newDisplayPlugin->getHmdScreen() >= 0)) : false;
bool oldPluginWantedHMDTools = oldDisplayPlugin ?
bool oldPluginWantedHMDTools = oldDisplayPlugin ?
(oldDisplayPlugin->isHmd() && (oldDisplayPlugin->getHmdScreen() >= 0)) : false;
// Only show the hmd tools after the correct plugin has
// been activated so that it's UI is setup correctly
if (newPluginWantsHMDTools) {
@ -4807,7 +4807,7 @@ void Application::updateDisplayMode() {
if (oldDisplayPlugin) {
oldDisplayPlugin->deactivate();
_offscreenContext->makeCurrent();
// if the old plugin was HMD and the new plugin is not HMD, then hide our hmdtools
if (oldPluginWantedHMDTools && !newPluginWantsHMDTools) {
DependencyManager::get<DialogsManager>()->hmdTools(false);
@ -4940,7 +4940,7 @@ void Application::setPalmData(Hand* hand, const controller::Pose& pose, float de
rawVelocity = glm::vec3(0.0f);
}
palm.setRawVelocity(rawVelocity); // meters/sec
// Angular Velocity of Palm
glm::quat deltaRotation = rotation * glm::inverse(palm.getRawRotation());
glm::vec3 angularVelocity(0.0f);
@ -5020,7 +5020,7 @@ void Application::emulateMouse(Hand* hand, float click, float shift, HandData::H
pos.setY(canvasSize.y / 2.0f + cursorRange * yAngle);
}
//If we are off screen then we should stop processing, and if a trigger or bumper is pressed,
//we should unpress them.
if (pos.x() == INT_MAX) {

View file

@ -244,14 +244,14 @@ void PhysicsEngine::stepSimulation() {
float timeStep = btMin(dt, MAX_TIMESTEP);
if (_myAvatarController) {
// ADEBUG TODO: move this stuff outside and in front of stepSimulation, because
// TODO: move this stuff outside and in front of stepSimulation, because
// the updateShapeIfNecessary() call needs info from MyAvatar and should
// be done on the main thread during the pre-simulation stuff
if (_myAvatarController->needsRemoval()) {
_myAvatarController->setDynamicsWorld(nullptr);
// We must remove any existing contacts for the avatar so that any new contacts will have
// valid data. MyAvatar's RigidBody is the ONLY one in the simulation that does not yet
// valid data. MyAvatar's RigidBody is the ONLY one in the simulation that does not yet
// have a MotionState so we pass nullptr to removeContacts().
removeContacts(nullptr);
}