pull qApp spaghetti out of Avatar class

some ends of spaghetti get pushed into AvatarManger class
split Camera class into Camera base and FancyCamera derivation
Application::getCamera() returns Camera by refence instead of pointer
This commit is contained in:
Andrew Meadows 2017-04-10 17:28:21 -07:00
parent 7505b82189
commit 1137bd1436
13 changed files with 131 additions and 79 deletions

View file

@ -1124,19 +1124,19 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
return qApp->isHMDMode() ? 1 : 0;
});
_applicationStateDevice->setInputVariant(STATE_CAMERA_FULL_SCREEN_MIRROR, []() -> float {
return qApp->getCamera()->getMode() == CAMERA_MODE_MIRROR ? 1 : 0;
return qApp->getCamera().getMode() == CAMERA_MODE_MIRROR ? 1 : 0;
});
_applicationStateDevice->setInputVariant(STATE_CAMERA_FIRST_PERSON, []() -> float {
return qApp->getCamera()->getMode() == CAMERA_MODE_FIRST_PERSON ? 1 : 0;
return qApp->getCamera().getMode() == CAMERA_MODE_FIRST_PERSON ? 1 : 0;
});
_applicationStateDevice->setInputVariant(STATE_CAMERA_THIRD_PERSON, []() -> float {
return qApp->getCamera()->getMode() == CAMERA_MODE_THIRD_PERSON ? 1 : 0;
return qApp->getCamera().getMode() == CAMERA_MODE_THIRD_PERSON ? 1 : 0;
});
_applicationStateDevice->setInputVariant(STATE_CAMERA_ENTITY, []() -> float {
return qApp->getCamera()->getMode() == CAMERA_MODE_ENTITY ? 1 : 0;
return qApp->getCamera().getMode() == CAMERA_MODE_ENTITY ? 1 : 0;
});
_applicationStateDevice->setInputVariant(STATE_CAMERA_INDEPENDENT, []() -> float {
return qApp->getCamera()->getMode() == CAMERA_MODE_INDEPENDENT ? 1 : 0;
return qApp->getCamera().getMode() == CAMERA_MODE_INDEPENDENT ? 1 : 0;
});
_applicationStateDevice->setInputVariant(STATE_SNAP_TURN, []() -> float {
return qApp->getMyAvatar()->getSnapTurn() ? 1 : 0;

View file

@ -52,7 +52,7 @@
#include "avatar/MyAvatar.h"
#include "BandwidthRecorder.h"
#include "Camera.h"
#include "FancyCamera.h"
#include "ConnectionMonitor.h"
#include "gpu/Context.h"
#include "Menu.h"
@ -174,8 +174,8 @@ public:
bool isThrottleRendering() const;
Camera* getCamera() { return &_myCamera; }
const Camera* getCamera() const { return &_myCamera; }
Camera& getCamera() { return _myCamera; }
const Camera& getCamera() const { return _myCamera; }
// Represents the current view frustum of the avatar.
void copyViewFrustum(ViewFrustum& viewOut) const;
// Represents the view frustum of the current rendering pass,
@ -558,7 +558,7 @@ private:
SimpleMovingAverage _avatarSimsPerSecond {10};
int _avatarSimsPerSecondReport {0};
quint64 _lastAvatarSimsPerSecondUpdate {0};
Camera _myCamera; // My view onto the world
FancyCamera _myCamera; // My view onto the world
Setting::Handle<QString> _previousScriptLocation;
Setting::Handle<float> _fieldOfView;

View file

@ -98,18 +98,7 @@ void Camera::setMode(CameraMode mode) {
emit modeUpdated(modeToString(mode));
}
QUuid Camera::getCameraEntity() const {
if (_cameraEntity != nullptr) {
return _cameraEntity->getID();
}
return QUuid();
};
void Camera::setCameraEntity(QUuid entityID) {
_cameraEntity = qApp->getEntities()->getTree()->findEntityByID(entityID);
}
void Camera::setProjection(const glm::mat4& projection) {
void Camera::setProjection(const glm::mat4& projection) {
_projection = projection;
}
@ -119,7 +108,7 @@ PickRay Camera::computePickRay(float x, float y) {
void Camera::setModeString(const QString& mode) {
CameraMode targetMode = stringToMode(mode);
switch (targetMode) {
case CAMERA_MODE_FIRST_PERSON:
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, true);
@ -139,7 +128,7 @@ void Camera::setModeString(const QString& mode) {
default:
break;
}
qApp->cameraMenuChanged();
if (_mode != targetMode) {

View file

@ -43,15 +43,11 @@ class Camera : public QObject {
* @property position {Vec3} The position of the camera.
* @property orientation {Quat} The orientation of the camera.
* @property mode {string} The current camera mode.
* @property cameraEntity {EntityID} The position and rotation properties of
* the entity specified by this ID are then used as the camera's position and
* orientation. Only works when <code>mode</code> is "entity".
* @property frustum {Object} The frustum of the camera.
*/
Q_PROPERTY(glm::vec3 position READ getPosition WRITE setPosition)
Q_PROPERTY(glm::quat orientation READ getOrientation WRITE setOrientation)
Q_PROPERTY(QString mode READ getModeString WRITE setModeString)
Q_PROPERTY(QUuid cameraEntity READ getCameraEntity WRITE setCameraEntity)
Q_PROPERTY(QVariantMap frustum READ getViewFrustum CONSTANT)
public:
@ -67,8 +63,6 @@ public:
void loadViewFrustum(ViewFrustum& frustum) const;
ViewFrustum toViewFrustum() const;
EntityItemPointer getCameraEntityPointer() const { return _cameraEntity; }
const glm::mat4& getTransform() const { return _transform; }
void setTransform(const glm::mat4& transform);
@ -87,9 +81,6 @@ public slots:
glm::quat getOrientation() const { return _orientation; }
void setOrientation(const glm::quat& orientation);
QUuid getCameraEntity() const;
void setCameraEntity(QUuid entityID);
/**jsdoc
* Compute a {PickRay} based on the current camera configuration and the position x,y on the screen.
* @function Camera.computePickRay
@ -143,7 +134,6 @@ private:
glm::quat _orientation;
bool _isKeepLookingAt{ false };
glm::vec3 _lookingAt;
EntityItemPointer _cameraEntity;
};
#endif // hifi_Camera_h

View file

@ -0,0 +1,31 @@
//
// Camera.cpp
// interface/src
//
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
//#include <glm/gtx/quaternion.hpp>
//#include <SharedUtil.h>
//#include <EventTypes.h>
#include "Application.h"
#include "FancyCamera.h"
//#include "Menu.h"
//#include "Util.h"
QUuid FancyCamera::getCameraEntity() const {
if (_cameraEntity != nullptr) {
return _cameraEntity->getID();
}
return QUuid();
};
void FancyCamera::setCameraEntity(QUuid entityID) {
_cameraEntity = qApp->getEntities()->getTree()->findEntityByID(entityID);
}

View file

@ -0,0 +1,40 @@
//
// FancyCamera.h
// interface/src
//
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_FancyCamera_h
#define hifi_FancyCamera_h
#include "Camera.h"
class FancyCamera : public Camera {
Q_OBJECT
/**jsdoc
* @namespace Camera
* @property cameraEntity {EntityID} The position and rotation properties of
* the entity specified by this ID are then used as the camera's position and
* orientation. Only works when <code>mode</code> is "entity".
*/
Q_PROPERTY(QUuid cameraEntity READ getCameraEntity WRITE setCameraEntity)
public:
FancyCamera() : Camera() {}
EntityItemPointer getCameraEntityPointer() const { return _cameraEntity; }
public slots:
QUuid getCameraEntity() const;
void setCameraEntity(QUuid entityID);
private:
EntityItemPointer _cameraEntity;
};
#endif // hifi_FancyCamera_h

View file

@ -28,6 +28,7 @@
#include <OctreeUtils.h>
#include <udt/PacketHeaders.h>
#include <PerfStat.h>
#include <Rig.h>
#include <SharedUtil.h>
#include <TextRenderer3D.h>
#include <TextureCache.h>
@ -46,7 +47,6 @@
#include "InterfaceLogging.h"
#include "SceneScriptingInterface.h"
#include "SoftAttachmentModel.h"
#include <Rig.h>
using namespace std;
@ -71,7 +71,7 @@ namespace render {
auto avatarPtr = static_pointer_cast<Avatar>(avatar);
if (avatarPtr->isInitialized() && args) {
PROFILE_RANGE_BATCH(*args->_batch, "renderAvatarPayload");
avatarPtr->render(args, qApp->getCamera()->getPosition());
avatarPtr->render(args, qApp->getMain3DScene(), qApp->getCamera());
}
}
template <> uint32_t metaFetchMetaSubItems(const AvatarSharedPointer& avatar, ItemIDs& subItems) {
@ -85,7 +85,7 @@ namespace render {
}
}
Avatar::Avatar(RigPointer rig) :
Avatar::Avatar(QThread* thread, RigPointer rig) :
AvatarData(),
_skeletonOffset(0.0f),
_bodyYawDelta(0.0f),
@ -108,7 +108,7 @@ Avatar::Avatar(RigPointer rig) :
_voiceSphereID(GeometryCache::UNKNOWN_ID)
{
// we may have been created in the network thread, but we live in the main thread
moveToThread(qApp->thread());
moveToThread(thread);
setScale(glm::vec3(1.0f)); // avatar scale is uniform
@ -128,7 +128,7 @@ Avatar::Avatar(RigPointer rig) :
Avatar::~Avatar() {
assert(isDead()); // mark dead before calling the dtor
auto treeRenderer = qApp->getEntities();
auto treeRenderer = DependencyManager::get<EntityTreeRenderer>();
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
if (entityTree) {
entityTree->withWriteLock([&] {
@ -237,7 +237,7 @@ void Avatar::updateAvatarEntities() {
return; // wait until MyAvatar gets an ID before doing this.
}
auto treeRenderer = qApp->getEntities();
auto treeRenderer = DependencyManager::get<EntityTreeRenderer>();
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
if (!entityTree) {
return;
@ -578,7 +578,7 @@ void Avatar::postUpdate(float deltaTime) {
}
}
void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
void Avatar::render(RenderArgs* renderArgs, render::ScenePointer scene, const Camera& camera) {
auto& batch = *renderArgs->_batch;
PROFILE_RANGE_BATCH(batch, __FUNCTION__);
@ -653,11 +653,11 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
}
}
glm::vec3 toTarget = cameraPosition - getPosition();
glm::vec3 toTarget = camera.getPosition() - getPosition();
float distanceToTarget = glm::length(toTarget);
{
fixupModelsInScene();
fixupModelsInScene(scene);
if (renderArgs->_renderMode != RenderArgs::SHADOW_RENDER_MODE) {
// add local lights
@ -686,7 +686,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
const float DISPLAYNAME_DISTANCE = 20.0f;
setShowDisplayName(distanceToTarget < DISPLAYNAME_DISTANCE);
auto cameraMode = qApp->getCamera()->getMode();
auto cameraMode = camera.getMode();
if (!isMyAvatar() || cameraMode != CAMERA_MODE_FIRST_PERSON) {
auto& frustum = renderArgs->getViewFrustum();
auto textPosition = getDisplayNamePosition();
@ -712,12 +712,11 @@ glm::quat Avatar::computeRotationFromBodyToWorldUp(float proportion) const {
return glm::angleAxis(angle * proportion, axis);
}
void Avatar::fixupModelsInScene() {
void Avatar::fixupModelsInScene(render::ScenePointer scene) {
_attachmentsToDelete.clear();
// check to see if when we added our models to the scene they were ready, if they were not ready, then
// fix them up in the scene
render::ScenePointer scene = qApp->getMain3DScene();
render::Transaction transaction;
if (_skeletonModel->isRenderable() && _skeletonModel->needsFixupInScene()) {
_skeletonModel->removeFromScene(scene, transaction);
@ -1490,8 +1489,7 @@ QList<QVariant> Avatar::getSkeleton() {
return QList<QVariant>();
}
void Avatar::addToScene(AvatarSharedPointer myHandle) {
render::ScenePointer scene = qApp->getMain3DScene();
void Avatar::addToScene(AvatarSharedPointer myHandle, render::ScenePointer scene) {
if (scene) {
render::Transaction transaction;
auto nodelist = DependencyManager::get<NodeList>();
@ -1505,8 +1503,9 @@ void Avatar::addToScene(AvatarSharedPointer myHandle) {
qCWarning(interfaceapp) << "AvatarManager::addAvatar() : Unexpected null scene, possibly during application shutdown";
}
}
void Avatar::ensureInScene(AvatarSharedPointer self) {
void Avatar::ensureInScene(AvatarSharedPointer self, render::ScenePointer scene) {
if (!render::Item::isValidID(_renderItemID)) {
addToScene(self);
addToScene(self, scene);
}
}

View file

@ -22,6 +22,8 @@
#include <render/Scene.h>
#include "Camera.h"
#include "Head.h"
#include "SkeletonModel.h"
#include "world.h"
@ -68,7 +70,7 @@ class Avatar : public AvatarData {
Q_PROPERTY(glm::vec3 skeletonOffset READ getSkeletonOffset WRITE setSkeletonOffset)
public:
explicit Avatar(RigPointer rig = nullptr);
explicit Avatar(QThread* thread, RigPointer rig = nullptr);
~Avatar();
typedef render::Payload<AvatarData> Payload;
@ -79,7 +81,7 @@ public:
void simulate(float deltaTime, bool inView);
virtual void simulateAttachments(float deltaTime);
virtual void render(RenderArgs* renderArgs, const glm::vec3& cameraPosition);
virtual void render(RenderArgs* renderArgs, render::ScenePointer scene, const Camera& camera);
void addToScene(AvatarSharedPointer self, std::shared_ptr<render::Scene> scene,
render::Transaction& transaction);
@ -305,7 +307,7 @@ protected:
Transform calculateDisplayNameTransform(const ViewFrustum& view, const glm::vec3& textPosition) const;
void renderDisplayName(gpu::Batch& batch, const ViewFrustum& view, const glm::vec3& textPosition) const;
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const;
virtual void fixupModelsInScene();
virtual void fixupModelsInScene(render::ScenePointer scene);
virtual void updatePalms();
@ -316,8 +318,8 @@ protected:
ThreadSafeValueCache<glm::vec3> _rightPalmPositionCache { glm::vec3() };
ThreadSafeValueCache<glm::quat> _rightPalmRotationCache { glm::quat() };
void addToScene(AvatarSharedPointer self);
void ensureInScene(AvatarSharedPointer self);
void addToScene(AvatarSharedPointer self, render::ScenePointer scene);
void ensureInScene(AvatarSharedPointer self, render::ScenePointer scene);
bool isInScene() const { return render::Item::isValidID(_renderItemID); }
// Some rate tracking support

View file

@ -69,7 +69,7 @@ void AvatarManager::registerMetaTypes(QScriptEngine* engine) {
AvatarManager::AvatarManager(QObject* parent) :
_avatarsToFade(),
_myAvatar(std::make_shared<MyAvatar>(std::make_shared<Rig>()))
_myAvatar(std::make_shared<MyAvatar>(qApp->thread(), std::make_shared<Rig>()))
{
// register a meta type for the weak pointer we'll use for the owning avatar mixer for each avatar
qRegisterMetaType<QWeakPointer<Node> >("NodeWeakPointer");
@ -198,7 +198,8 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
// for ALL avatars...
if (_shouldRender) {
avatar->ensureInScene(avatar);
render::ScenePointer scene = qApp->getMain3DScene();
avatar->ensureInScene(avatar, scene);
}
if (!avatar->getMotionState()) {
ShapeInfo shapeInfo;
@ -329,7 +330,7 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
}
AvatarSharedPointer AvatarManager::newSharedAvatar() {
return std::make_shared<Avatar>(std::make_shared<Rig>());
return std::make_shared<Avatar>(qApp->thread(), std::make_shared<Rig>());
}
void AvatarManager::processAvatarDataPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {

View file

@ -83,8 +83,8 @@ const float MyAvatar::ZOOM_MIN = 0.5f;
const float MyAvatar::ZOOM_MAX = 25.0f;
const float MyAvatar::ZOOM_DEFAULT = 1.5f;
MyAvatar::MyAvatar(RigPointer rig) :
Avatar(rig),
MyAvatar::MyAvatar(QThread* thread, RigPointer rig) :
Avatar(thread, rig),
_wasPushing(false),
_isPushing(false),
_isBeingPushed(false),
@ -260,7 +260,7 @@ void MyAvatar::simulateAttachments(float deltaTime) {
}
QByteArray MyAvatar::toByteArrayStateful(AvatarDataDetail dataDetail) {
CameraMode mode = qApp->getCamera()->getMode();
CameraMode mode = qApp->getCamera().getMode();
_globalPosition = getPosition();
_globalBoundingBoxDimensions.x = _characterController.getCapsuleRadius();
_globalBoundingBoxDimensions.y = _characterController.getCapsuleHalfHeight();
@ -752,13 +752,13 @@ controller::Pose MyAvatar::getRightHandTipPose() const {
}
// virtual
void MyAvatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
void MyAvatar::render(RenderArgs* renderArgs, render::ScenePointer scene, const Camera& camera) {
// don't render if we've been asked to disable local rendering
if (!_shouldRender) {
return; // exit early
}
Avatar::render(renderArgs, cameraPosition);
Avatar::render(renderArgs, scene, camera);
}
void MyAvatar::overrideAnimation(const QString& url, float fps, bool loop, float firstFrame, float lastFrame) {
@ -1079,7 +1079,7 @@ void MyAvatar::updateLookAtTargetAvatar() {
_targetAvatarPosition = glm::vec3(0.0f);
glm::vec3 lookForward = getHead()->getFinalOrientationInWorldFrame() * IDENTITY_FORWARD;
glm::vec3 cameraPosition = qApp->getCamera()->getPosition();
glm::vec3 cameraPosition = qApp->getCamera().getPosition();
float smallestAngleTo = glm::radians(DEFAULT_FIELD_OF_VIEW_DEGREES) / 2.0f;
const float KEEP_LOOKING_AT_CURRENT_ANGLE_FACTOR = 1.3f;
@ -1275,7 +1275,7 @@ void MyAvatar::setAttachmentData(const QVector<AttachmentData>& attachmentData)
}
glm::vec3 MyAvatar::getSkeletonPosition() const {
CameraMode mode = qApp->getCamera()->getMode();
CameraMode mode = qApp->getCamera().getMode();
if (mode == CAMERA_MODE_THIRD_PERSON || mode == CAMERA_MODE_INDEPENDENT) {
// The avatar is rotated PI about the yAxis, so we have to correct for it
// to get the skeleton offset contribution in the world-frame.
@ -1704,13 +1704,13 @@ void MyAvatar::preDisplaySide(RenderArgs* renderArgs) {
const float RENDER_HEAD_CUTOFF_DISTANCE = 0.3f;
bool MyAvatar::cameraInsideHead() const {
const glm::vec3 cameraPosition = qApp->getCamera()->getPosition();
const glm::vec3 cameraPosition = qApp->getCamera().getPosition();
return glm::length(cameraPosition - getHeadPosition()) < (RENDER_HEAD_CUTOFF_DISTANCE * getUniformScale());
}
bool MyAvatar::shouldRenderHead(const RenderArgs* renderArgs) const {
bool defaultMode = renderArgs->_renderMode == RenderArgs::DEFAULT_RENDER_MODE;
bool firstPerson = qApp->getCamera()->getMode() == CAMERA_MODE_FIRST_PERSON;
bool firstPerson = qApp->getCamera().getMode() == CAMERA_MODE_FIRST_PERSON;
bool insideHead = cameraInsideHead();
return !defaultMode || !firstPerson || !insideHead;
}
@ -2265,7 +2265,7 @@ glm::vec3 MyAvatar::getPositionForAudio() {
case AudioListenerMode::FROM_HEAD:
return getHead()->getPosition();
case AudioListenerMode::FROM_CAMERA:
return qApp->getCamera()->getPosition();
return qApp->getCamera().getPosition();
case AudioListenerMode::CUSTOM:
return _customListenPosition;
}
@ -2277,7 +2277,7 @@ glm::quat MyAvatar::getOrientationForAudio() {
case AudioListenerMode::FROM_HEAD:
return getHead()->getFinalOrientationInWorldFrame();
case AudioListenerMode::FROM_CAMERA:
return qApp->getCamera()->getOrientation();
return qApp->getCamera().getOrientation();
case AudioListenerMode::CUSTOM:
return _customListenOrientation;
}
@ -2367,7 +2367,7 @@ void MyAvatar::FollowHelper::decrementTimeRemaining(float dt) {
}
bool MyAvatar::FollowHelper::shouldActivateRotation(const MyAvatar& myAvatar, const glm::mat4& desiredBodyMatrix, const glm::mat4& currentBodyMatrix) const {
auto cameraMode = qApp->getCamera()->getMode();
auto cameraMode = qApp->getCamera().getMode();
if (cameraMode == CAMERA_MODE_THIRD_PERSON) {
return false;
} else {
@ -2515,8 +2515,8 @@ void MyAvatar::setAway(bool value) {
glm::mat4 MyAvatar::computeCameraRelativeHandControllerMatrix(const glm::mat4& controllerSensorMatrix) const {
// Fetch the current camera transform.
glm::mat4 cameraWorldMatrix = qApp->getCamera()->getTransform();
if (qApp->getCamera()->getMode() == CAMERA_MODE_MIRROR) {
glm::mat4 cameraWorldMatrix = qApp->getCamera().getTransform();
if (qApp->getCamera().getMode() == CAMERA_MODE_MIRROR) {
cameraWorldMatrix *= createMatFromScaleQuatAndPos(vec3(-1.0f, 1.0f, 1.0f), glm::quat(), glm::vec3());
}
@ -2561,7 +2561,7 @@ glm::quat MyAvatar::getAbsoluteJointRotationInObjectFrame(int index) const {
Transform avatarTransform;
Transform::mult(avatarTransform, getParentTransform(success), getLocalTransform());
glm::mat4 invAvatarMat = avatarTransform.getInverseMatrix();
return glmExtractRotation(invAvatarMat * qApp->getCamera()->getTransform());
return glmExtractRotation(invAvatarMat * qApp->getCamera().getTransform());
}
default: {
return Avatar::getAbsoluteJointRotationInObjectFrame(index);
@ -2598,7 +2598,7 @@ glm::vec3 MyAvatar::getAbsoluteJointTranslationInObjectFrame(int index) const {
Transform avatarTransform;
Transform::mult(avatarTransform, getParentTransform(success), getLocalTransform());
glm::mat4 invAvatarMat = avatarTransform.getInverseMatrix();
return extractTranslation(invAvatarMat * qApp->getCamera()->getTransform());
return extractTranslation(invAvatarMat * qApp->getCamera().getTransform());
}
default: {
return Avatar::getAbsoluteJointTranslationInObjectFrame(index);

View file

@ -146,7 +146,7 @@ public:
};
Q_ENUM(DriveKeys)
explicit MyAvatar(RigPointer rig);
explicit MyAvatar(QThread* thread, RigPointer rig);
~MyAvatar();
void registerMetaTypes(QScriptEngine* engine);
@ -525,7 +525,7 @@ private:
void simulate(float deltaTime);
void updateFromTrackers(float deltaTime);
virtual void render(RenderArgs* renderArgs, const glm::vec3& cameraPositio) override;
virtual void render(RenderArgs* renderArgs, render::ScenePointer scene, const Camera& camera) override;
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const override;
void setShouldRenderLocally(bool shouldRender) { _shouldRender = shouldRender; setEnableMeshVisible(shouldRender); }
bool getShouldRenderLocally() const { return _shouldRender; }

View file

@ -105,9 +105,9 @@ QScriptValue HMDScriptingInterface::getHUDLookAtPosition3D(QScriptContext* conte
}
bool HMDScriptingInterface::getHUDLookAtPosition3D(glm::vec3& result) const {
Camera* camera = qApp->getCamera();
glm::vec3 position = camera->getPosition();
glm::quat orientation = camera->getOrientation();
const Camera& camera = qApp->getCamera();
glm::vec3 position = camera.getPosition();
glm::quat orientation = camera.getOrientation();
glm::vec3 direction = orientation * glm::vec3(0.0f, 0.0f, -1.0f);

View file

@ -31,7 +31,7 @@ QVariant Billboardable::getProperty(const QString &property) {
void Billboardable::pointTransformAtCamera(Transform& transform, glm::quat offsetRotation) {
if (isFacingAvatar()) {
glm::vec3 billboardPos = transform.getTranslation();
glm::vec3 cameraPos = qApp->getCamera()->getPosition();
glm::vec3 cameraPos = qApp->getCamera().getPosition();
glm::vec3 look = cameraPos - billboardPos;
float elevation = -asinf(look.y / glm::length(look));
float azimuth = atan2f(look.x, look.z);