Merge branch 'master' of github.com:highfidelity/hifi into kinematic-grab

This commit is contained in:
Seth Alves 2015-10-06 09:20:13 -07:00
commit febdb52b26
47 changed files with 759 additions and 279 deletions

View file

@ -26,28 +26,28 @@ public:
~EntityServer(); ~EntityServer();
// Subclasses must implement these methods // Subclasses must implement these methods
virtual OctreeQueryNode* createOctreeQueryNode(); virtual OctreeQueryNode* createOctreeQueryNode() override ;
virtual char getMyNodeType() const { return NodeType::EntityServer; } virtual char getMyNodeType() const override { return NodeType::EntityServer; }
virtual PacketType getMyQueryMessageType() const { return PacketType::EntityQuery; } virtual PacketType getMyQueryMessageType() const override { return PacketType::EntityQuery; }
virtual const char* getMyServerName() const { return MODEL_SERVER_NAME; } virtual const char* getMyServerName() const override { return MODEL_SERVER_NAME; }
virtual const char* getMyLoggingServerTargetName() const { return MODEL_SERVER_LOGGING_TARGET_NAME; } virtual const char* getMyLoggingServerTargetName() const override { return MODEL_SERVER_LOGGING_TARGET_NAME; }
virtual const char* getMyDefaultPersistFilename() const { return LOCAL_MODELS_PERSIST_FILE; } virtual const char* getMyDefaultPersistFilename() const override { return LOCAL_MODELS_PERSIST_FILE; }
virtual PacketType getMyEditNackType() const { return PacketType::EntityEditNack; } virtual PacketType getMyEditNackType() const override { return PacketType::EntityEditNack; }
virtual QString getMyDomainSettingsKey() const { return QString("entity_server_settings"); } virtual QString getMyDomainSettingsKey() const override { return QString("entity_server_settings"); }
// subclass may implement these method // subclass may implement these method
virtual void beforeRun(); virtual void beforeRun() override;
virtual bool hasSpecialPacketsToSend(const SharedNodePointer& node); virtual bool hasSpecialPacketsToSend(const SharedNodePointer& node) override;
virtual int sendSpecialPackets(const SharedNodePointer& node, OctreeQueryNode* queryNode, int& packetsSent); virtual int sendSpecialPackets(const SharedNodePointer& node, OctreeQueryNode* queryNode, int& packetsSent) override;
virtual void entityCreated(const EntityItem& newEntity, const SharedNodePointer& senderNode); virtual void entityCreated(const EntityItem& newEntity, const SharedNodePointer& senderNode) override;
virtual bool readAdditionalConfiguration(const QJsonObject& settingsSectionObject) override; virtual bool readAdditionalConfiguration(const QJsonObject& settingsSectionObject) override;
public slots: public slots:
void pruneDeletedEntities(); void pruneDeletedEntities();
protected: protected:
virtual OctreePointer createTree(); virtual OctreePointer createTree() override;
private slots: private slots:
void handleEntityPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode); void handleEntityPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);

View file

@ -133,33 +133,14 @@ bool DomainServer::optionallyReadX509KeyAndCertificate() {
QString keyPath = _settingsManager.getSettingsMap().value(X509_PRIVATE_KEY_OPTION).toString(); QString keyPath = _settingsManager.getSettingsMap().value(X509_PRIVATE_KEY_OPTION).toString();
if (!certPath.isEmpty() && !keyPath.isEmpty()) { if (!certPath.isEmpty() && !keyPath.isEmpty()) {
// the user wants to use DTLS to encrypt communication with nodes // the user wants to use the following cert and key for HTTPS
// this is used for Oauth callbacks when authorizing users against a data server
// let's make sure we can load the key and certificate // let's make sure we can load the key and certificate
// _x509Credentials = new gnutls_certificate_credentials_t;
// gnutls_certificate_allocate_credentials(_x509Credentials);
QString keyPassphraseString = QProcessEnvironment::systemEnvironment().value(X509_KEY_PASSPHRASE_ENV); QString keyPassphraseString = QProcessEnvironment::systemEnvironment().value(X509_KEY_PASSPHRASE_ENV);
qDebug() << "Reading certificate file at" << certPath << "for DTLS."; qDebug() << "Reading certificate file at" << certPath << "for HTTPS.";
qDebug() << "Reading key file at" << keyPath << "for DTLS."; qDebug() << "Reading key file at" << keyPath << "for HTTPS.";
// int gnutlsReturn = gnutls_certificate_set_x509_key_file2(*_x509Credentials,
// certPath.toLocal8Bit().constData(),
// keyPath.toLocal8Bit().constData(),
// GNUTLS_X509_FMT_PEM,
// keyPassphraseString.toLocal8Bit().constData(),
// 0);
//
// if (gnutlsReturn < 0) {
// qDebug() << "Unable to load certificate or key file." << "Error" << gnutlsReturn << "- domain-server will now quit.";
// QMetaObject::invokeMethod(this, "quit", Qt::QueuedConnection);
// return false;
// }
// qDebug() << "Successfully read certificate and private key.";
// we need to also pass this certificate and private key to the HTTPS manager
// this is used for Oauth callbacks when authorizing users against a data server
QFile certFile(certPath); QFile certFile(certPath);
certFile.open(QIODevice::ReadOnly); certFile.open(QIODevice::ReadOnly);

View file

@ -0,0 +1,42 @@
//
// Created by Bradley Austin Davis on 2015/10/04
// Copyright 2013-2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
IPDScalingTest = function() {
// Switch every 5 seconds between normal IPD and 0 IPD (in seconds)
this.UPDATE_INTERVAL = 10.0;
this.lastUpdateInterval = 0;
this.scaled = false;
var that = this;
Script.scriptEnding.connect(function() {
that.onCleanup();
});
Script.update.connect(function(deltaTime) {
that.lastUpdateInterval += deltaTime;
if (that.lastUpdateInterval >= that.UPDATE_INTERVAL) {
that.onUpdate(that.lastUpdateInterval);
that.lastUpdateInterval = 0;
}
});
}
IPDScalingTest.prototype.onCleanup = function() {
HMD.setIPDScale(1.0);
}
IPDScalingTest.prototype.onUpdate = function(deltaTime) {
this.scaled = !this.scaled;
if (this.scaled) {
HMD.ipdScale = 0.0;
} else {
HMD.ipdScale = 1.0;
}
}
new IPDScalingTest();

View file

@ -0,0 +1,68 @@
//
// MonoHMD.js
//
// Created by Chris Collins on 10/5/15
// Copyright 2015 High Fidelity, Inc.
//
// This script allows you to switch between mono and stereo mode within the HMD.
// It will add adition menu to Tools called "IPD".
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
function setupipdMenu() {
if (!Menu.menuExists("Tools > IPD")) {
Menu.addMenu("Tools > IPD");
}
if (!Menu.menuItemExists("Tools > IPD", "Stereo")) {
Menu.addMenuItem({
menuName: "Tools > IPD",
menuItemName: "Stereo",
isCheckable: true,
isChecked: true
});
}
if (!Menu.menuItemExists("Tools > IPD", "Mono")) {
Menu.addMenuItem({
menuName: "Tools > IPD",
menuItemName: "Mono",
isCheckable: true,
isChecked: false
});
}
}
function menuItemEvent(menuItem) {
if (menuItem == "Stereo") {
Menu.setIsOptionChecked("Mono", false);
HMD.ipdScale = 1.0;
}
if (menuItem == "Mono") {
Menu.setIsOptionChecked("Stereo", false);
HMD.ipdScale = 0.0;
}
}
function scriptEnding() {
Menu.removeMenuItem("Tools > IPD", "Stereo");
Menu.removeMenuItem("Tools > IPD", "Mono");
Menu.removeMenu("Tools > IPD");
//reset the HMD to stereo mode
HMD.setIPDScale(1.0);
}
setupipdMenu();
Menu.menuItemEvent.connect(menuItemEvent);
Script.scriptEnding.connect(scriptEnding);

View file

@ -300,6 +300,7 @@ bool setupEssentials(int& argc, char** argv) {
auto desktopScriptingInterface = DependencyManager::set<DesktopScriptingInterface>(); auto desktopScriptingInterface = DependencyManager::set<DesktopScriptingInterface>();
auto entityScriptingInterface = DependencyManager::set<EntityScriptingInterface>(); auto entityScriptingInterface = DependencyManager::set<EntityScriptingInterface>();
auto windowScriptingInterface = DependencyManager::set<WindowScriptingInterface>(); auto windowScriptingInterface = DependencyManager::set<WindowScriptingInterface>();
auto hmdScriptingInterface = DependencyManager::set<HMDScriptingInterface>();
#if defined(Q_OS_MAC) || defined(Q_OS_WIN) #if defined(Q_OS_MAC) || defined(Q_OS_WIN)
auto speechRecognizer = DependencyManager::set<SpeechRecognizer>(); auto speechRecognizer = DependencyManager::set<SpeechRecognizer>();
#endif #endif
@ -1203,9 +1204,11 @@ void Application::paintGL() {
// right eye. There are FIXMEs in the relevant plugins // right eye. There are FIXMEs in the relevant plugins
_myCamera.setProjection(displayPlugin->getProjection(Mono, _myCamera.getProjection())); _myCamera.setProjection(displayPlugin->getProjection(Mono, _myCamera.getProjection()));
renderArgs._context->enableStereo(true); renderArgs._context->enableStereo(true);
mat4 eyeViews[2]; mat4 eyeOffsets[2];
mat4 eyeProjections[2]; mat4 eyeProjections[2];
auto baseProjection = renderArgs._viewFrustum->getProjection(); auto baseProjection = renderArgs._viewFrustum->getProjection();
auto hmdInterface = DependencyManager::get<HMDScriptingInterface>();
float IPDScale = hmdInterface->getIPDScale();
// FIXME we probably don't need to set the projection matrix every frame, // FIXME we probably don't need to set the projection matrix every frame,
// only when the display plugin changes (or in non-HMD modes when the user // only when the display plugin changes (or in non-HMD modes when the user
// changes the FOV manually, which right now I don't think they can. // changes the FOV manually, which right now I don't think they can.
@ -1214,14 +1217,24 @@ void Application::paintGL() {
// applied to the avatar, so we need to get the difference between the head // applied to the avatar, so we need to get the difference between the head
// pose applied to the avatar and the per eye pose, and use THAT as // pose applied to the avatar and the per eye pose, and use THAT as
// the per-eye stereo matrix adjustment. // the per-eye stereo matrix adjustment.
mat4 eyePose = displayPlugin->getEyePose(eye); mat4 eyeToHead = displayPlugin->getEyeToHeadTransform(eye);
// Grab the translation
vec3 eyeOffset = glm::vec3(eyeToHead[3]);
// Apply IPD scaling
mat4 eyeOffsetTransform = glm::translate(mat4(), eyeOffset * -1.0f * IPDScale);
eyeOffsets[eye] = eyeOffsetTransform;
// Tell the plugin what pose we're using to render. In this case we're just using the
// unmodified head pose because the only plugin that cares (the Oculus plugin) uses it
// for rotational timewarp. If we move to support positonal timewarp, we need to
// ensure this contains the full pose composed with the eye offsets.
mat4 headPose = displayPlugin->getHeadPose(); mat4 headPose = displayPlugin->getHeadPose();
mat4 eyeView = glm::inverse(eyePose) * headPose; displayPlugin->setEyeRenderPose(eye, headPose);
eyeViews[eye] = eyeView;
eyeProjections[eye] = displayPlugin->getProjection(eye, baseProjection); eyeProjections[eye] = displayPlugin->getProjection(eye, baseProjection);
}); });
renderArgs._context->setStereoProjections(eyeProjections); renderArgs._context->setStereoProjections(eyeProjections);
renderArgs._context->setStereoViews(eyeViews); renderArgs._context->setStereoViews(eyeOffsets);
} }
displaySide(&renderArgs, _myCamera); displaySide(&renderArgs, _myCamera);
renderArgs._context->enableStereo(false); renderArgs._context->enableStereo(false);
@ -4130,7 +4143,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
scriptEngine->registerGlobalObject("Paths", DependencyManager::get<PathUtils>().data()); scriptEngine->registerGlobalObject("Paths", DependencyManager::get<PathUtils>().data());
scriptEngine->registerGlobalObject("HMD", &HMDScriptingInterface::getInstance()); scriptEngine->registerGlobalObject("HMD", DependencyManager::get<HMDScriptingInterface>().data());
scriptEngine->registerFunction("HMD", "getHUDLookAtPosition2D", HMDScriptingInterface::getHUDLookAtPosition2D, 0); scriptEngine->registerFunction("HMD", "getHUDLookAtPosition2D", HMDScriptingInterface::getHUDLookAtPosition2D, 0);
scriptEngine->registerFunction("HMD", "getHUDLookAtPosition3D", HMDScriptingInterface::getHUDLookAtPosition3D, 0); scriptEngine->registerFunction("HMD", "getHUDLookAtPosition3D", HMDScriptingInterface::getHUDLookAtPosition3D, 0);
@ -4980,19 +4993,25 @@ mat4 Application::getEyeProjection(int eye) const {
mat4 Application::getEyePose(int eye) const { mat4 Application::getEyePose(int eye) const {
if (isHMDMode()) { if (isHMDMode()) {
return getActiveDisplayPlugin()->getEyePose((Eye)eye); auto hmdInterface = DependencyManager::get<HMDScriptingInterface>();
float IPDScale = hmdInterface->getIPDScale();
auto displayPlugin = getActiveDisplayPlugin();
mat4 headPose = displayPlugin->getHeadPose();
mat4 eyeToHead = displayPlugin->getEyeToHeadTransform((Eye)eye);
{
vec3 eyeOffset = glm::vec3(eyeToHead[3]);
// Apply IPD scaling
mat4 eyeOffsetTransform = glm::translate(mat4(), eyeOffset * -1.0f * IPDScale);
eyeToHead[3] = vec4(eyeOffset, 1.0);
}
return eyeToHead * headPose;
} }
return mat4(); return mat4();
} }
mat4 Application::getEyeOffset(int eye) const { mat4 Application::getEyeOffset(int eye) const {
if (isHMDMode()) { // FIXME invert?
mat4 identity; return getActiveDisplayPlugin()->getEyeToHeadTransform((Eye)eye);
return getActiveDisplayPlugin()->getView((Eye)eye, identity);
}
return mat4();
} }
mat4 Application::getHMDSensorPose() const { mat4 Application::getHMDSensorPose() const {

View file

@ -16,6 +16,9 @@ PluginContainerProxy::PluginContainerProxy() {
Plugin::setContainer(this); Plugin::setContainer(this);
} }
PluginContainerProxy::~PluginContainerProxy() {
}
bool PluginContainerProxy::isForeground() { bool PluginContainerProxy::isForeground() {
return qApp->_isForeground && !qApp->getWindow()->isMinimized(); return qApp->_isForeground && !qApp->getWindow()->isMinimized();
} }
@ -139,6 +142,10 @@ void PluginContainerProxy::unsetFullscreen(const QScreen* avoid) {
#endif #endif
} }
void PluginContainerProxy::requestReset() {
// We could signal qApp to sequence this, but it turns out that requestReset is only used from within the main thread anyway.
qApp->resetSensors();
}
void PluginContainerProxy::showDisplayPluginsTools() { void PluginContainerProxy::showDisplayPluginsTools() {
DependencyManager::get<DialogsManager>()->hmdTools(true); DependencyManager::get<DialogsManager>()->hmdTools(true);
@ -147,3 +154,7 @@ void PluginContainerProxy::showDisplayPluginsTools() {
QGLWidget* PluginContainerProxy::getPrimarySurface() { QGLWidget* PluginContainerProxy::getPrimarySurface() {
return qApp->_glWidget; return qApp->_glWidget;
} }
const DisplayPlugin* PluginContainerProxy::getActiveDisplayPlugin() const {
return qApp->getActiveDisplayPlugin();
}

View file

@ -11,17 +11,21 @@
class PluginContainerProxy : public QObject, PluginContainer { class PluginContainerProxy : public QObject, PluginContainer {
Q_OBJECT Q_OBJECT
PluginContainerProxy(); PluginContainerProxy();
virtual ~PluginContainerProxy();
virtual void addMenu(const QString& menuName) override; virtual void addMenu(const QString& menuName) override;
virtual void removeMenu(const QString& menuName) override; virtual void removeMenu(const QString& menuName) override;
virtual QAction* addMenuItem(const QString& path, const QString& name, std::function<void(bool)> onClicked, bool checkable = false, bool checked = false, const QString& groupName = "") override; virtual QAction* addMenuItem(const QString& path, const QString& name, std::function<void(bool)> onClicked, bool checkable = false, bool checked = false, const QString& groupName = "") override;
virtual void removeMenuItem(const QString& menuName, const QString& menuItem) override; virtual void removeMenuItem(const QString& menuName, const QString& menuItem) override;
virtual bool isOptionChecked(const QString& name) override; virtual bool isOptionChecked(const QString& name) override;
virtual void setIsOptionChecked(const QString& path, bool checked); virtual void setIsOptionChecked(const QString& path, bool checked) override;
virtual void setFullscreen(const QScreen* targetScreen, bool hideMenu = true) override; virtual void setFullscreen(const QScreen* targetScreen, bool hideMenu = true) override;
virtual void unsetFullscreen(const QScreen* avoidScreen = nullptr) override; virtual void unsetFullscreen(const QScreen* avoidScreen = nullptr) override;
virtual void showDisplayPluginsTools() override; virtual void showDisplayPluginsTools() override;
virtual void requestReset() override;
virtual QGLWidget* getPrimarySurface() override; virtual QGLWidget* getPrimarySurface() override;
virtual bool isForeground() override; virtual bool isForeground() override;
virtual const DisplayPlugin* getActiveDisplayPlugin() const override;
QRect _savedGeometry{ 10, 120, 800, 600 }; QRect _savedGeometry{ 10, 120, 800, 600 };
friend class Application; friend class Application;

View file

@ -1341,11 +1341,13 @@ void MyAvatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, fl
if (qApp->isHMDMode()) { if (qApp->isHMDMode()) {
glm::vec3 cameraPosition = Application::getInstance()->getCamera()->getPosition(); glm::vec3 cameraPosition = Application::getInstance()->getCamera()->getPosition();
glm::mat4 leftEyePose = Application::getInstance()->getActiveDisplayPlugin()->getEyePose(Eye::Left);
glm::vec3 leftEyePosition = glm::vec3(leftEyePose[3]);
glm::mat4 rightEyePose = Application::getInstance()->getActiveDisplayPlugin()->getEyePose(Eye::Right);
glm::vec3 rightEyePosition = glm::vec3(rightEyePose[3]);
glm::mat4 headPose = Application::getInstance()->getActiveDisplayPlugin()->getHeadPose(); glm::mat4 headPose = Application::getInstance()->getActiveDisplayPlugin()->getHeadPose();
glm::mat4 leftEyePose = Application::getInstance()->getActiveDisplayPlugin()->getEyeToHeadTransform(Eye::Left);
leftEyePose = leftEyePose * headPose;
glm::vec3 leftEyePosition = glm::vec3(leftEyePose[3]);
glm::mat4 rightEyePose = Application::getInstance()->getActiveDisplayPlugin()->getEyeToHeadTransform(Eye::Right);
rightEyePose = rightEyePose * headPose;
glm::vec3 rightEyePosition = glm::vec3(rightEyePose[3]);
glm::vec3 headPosition = glm::vec3(headPose[3]); glm::vec3 headPosition = glm::vec3(headPose[3]);
getHead()->renderLookAts(renderArgs, getHead()->renderLookAts(renderArgs,

View file

@ -139,19 +139,19 @@ public:
void updateLookAtTargetAvatar(); void updateLookAtTargetAvatar();
void clearLookAtTargetAvatar(); void clearLookAtTargetAvatar();
virtual void setJointRotations(QVector<glm::quat> jointRotations); virtual void setJointRotations(QVector<glm::quat> jointRotations) override;
virtual void setJointTranslations(QVector<glm::vec3> jointTranslations); virtual void setJointTranslations(QVector<glm::vec3> jointTranslations) override;
virtual void setJointData(int index, const glm::quat& rotation, const glm::vec3& translation); virtual void setJointData(int index, const glm::quat& rotation, const glm::vec3& translation) override;
virtual void setJointRotation(int index, const glm::quat& rotation); virtual void setJointRotation(int index, const glm::quat& rotation) override;
virtual void setJointTranslation(int index, const glm::vec3& translation); virtual void setJointTranslation(int index, const glm::vec3& translation) override;
virtual void clearJointData(int index); virtual void clearJointData(int index) override;
virtual void clearJointsData(); virtual void clearJointsData() override;
Q_INVOKABLE void useFullAvatarURL(const QUrl& fullAvatarURL, const QString& modelName = QString()); Q_INVOKABLE void useFullAvatarURL(const QUrl& fullAvatarURL, const QString& modelName = QString());
Q_INVOKABLE const QUrl& getFullAvatarURLFromPreferences() const { return _fullAvatarURLFromPreferences; } Q_INVOKABLE const QUrl& getFullAvatarURLFromPreferences() const { return _fullAvatarURLFromPreferences; }
Q_INVOKABLE const QString& getFullAvatarModelName() const { return _fullAvatarModelName; } Q_INVOKABLE const QString& getFullAvatarModelName() const { return _fullAvatarModelName; }
virtual void setAttachmentData(const QVector<AttachmentData>& attachmentData); virtual void setAttachmentData(const QVector<AttachmentData>& attachmentData) override;
DynamicCharacterController* getCharacterController() { return &_characterController; } DynamicCharacterController* getCharacterController() { return &_characterController; }
@ -218,7 +218,7 @@ public slots:
void saveRecording(QString filename); void saveRecording(QString filename);
void loadLastRecording(); void loadLastRecording();
virtual void rebuildSkeletonBody(); virtual void rebuildSkeletonBody() override;
bool getEnableRigAnimations() const { return _rig->getEnableRig(); } bool getEnableRigAnimations() const { return _rig->getEnableRig(); }
void setEnableRigAnimations(bool isEnabled); void setEnableRigAnimations(bool isEnabled);
@ -243,7 +243,7 @@ private:
glm::vec3 getWorldBodyPosition() const; glm::vec3 getWorldBodyPosition() const;
glm::quat getWorldBodyOrientation() const; glm::quat getWorldBodyOrientation() const;
QByteArray toByteArray(bool cullSmallChanges, bool sendAll); QByteArray toByteArray(bool cullSmallChanges, bool sendAll) override;
void simulate(float deltaTime); void simulate(float deltaTime);
void updateFromTrackers(float deltaTime); void updateFromTrackers(float deltaTime);
virtual void render(RenderArgs* renderArgs, const glm::vec3& cameraPositio) override; virtual void render(RenderArgs* renderArgs, const glm::vec3& cameraPositio) override;
@ -252,9 +252,9 @@ private:
void setShouldRenderLocally(bool shouldRender) { _shouldRender = shouldRender; } void setShouldRenderLocally(bool shouldRender) { _shouldRender = shouldRender; }
bool getShouldRenderLocally() const { return _shouldRender; } bool getShouldRenderLocally() const { return _shouldRender; }
bool getDriveKeys(int key) { return _driveKeys[key] != 0.0f; }; bool getDriveKeys(int key) { return _driveKeys[key] != 0.0f; };
bool isMyAvatar() const { return true; } bool isMyAvatar() const override { return true; }
virtual int parseDataFromBuffer(const QByteArray& buffer); virtual int parseDataFromBuffer(const QByteArray& buffer) override;
virtual glm::vec3 getSkeletonPosition() const; virtual glm::vec3 getSkeletonPosition() const override;
glm::vec3 getScriptedMotorVelocity() const { return _scriptedMotorVelocity; } glm::vec3 getScriptedMotorVelocity() const { return _scriptedMotorVelocity; }
float getScriptedMotorTimescale() const { return _scriptedMotorTimescale; } float getScriptedMotorTimescale() const { return _scriptedMotorTimescale; }
@ -264,7 +264,7 @@ private:
void setScriptedMotorFrame(QString frame); void setScriptedMotorFrame(QString frame);
virtual void attach(const QString& modelURL, const QString& jointName = QString(), virtual void attach(const QString& modelURL, const QString& jointName = QString(),
const glm::vec3& translation = glm::vec3(), const glm::quat& rotation = glm::quat(), float scale = 1.0f, const glm::vec3& translation = glm::vec3(), const glm::quat& rotation = glm::quat(), float scale = 1.0f,
bool allowDuplicates = false, bool useSaved = true); bool allowDuplicates = false, bool useSaved = true) override;
void renderLaserPointers(gpu::Batch& batch); void renderLaserPointers(gpu::Batch& batch);
const RecorderPointer getRecorder() const { return _recorder; } const RecorderPointer getRecorder() const { return _recorder; }
@ -273,8 +273,8 @@ private:
bool cameraInsideHead() const; bool cameraInsideHead() const;
// These are made private for MyAvatar so that you will use the "use" methods instead // These are made private for MyAvatar so that you will use the "use" methods instead
virtual void setFaceModelURL(const QUrl& faceModelURL); virtual void setFaceModelURL(const QUrl& faceModelURL) override;
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL); virtual void setSkeletonModelURL(const QUrl& skeletonModelURL) override;
void setVisibleInSceneIfReady(Model* model, render::ScenePointer scene, bool visiblity); void setVisibleInSceneIfReady(Model* model, render::ScenePointer scene, bool visiblity);

View file

@ -27,10 +27,10 @@ public:
SkeletonModel(Avatar* owningAvatar, QObject* parent = nullptr, RigPointer rig = nullptr); SkeletonModel(Avatar* owningAvatar, QObject* parent = nullptr, RigPointer rig = nullptr);
~SkeletonModel(); ~SkeletonModel();
virtual void initJointStates(QVector<JointState> states); virtual void initJointStates(QVector<JointState> states) override;
virtual void simulate(float deltaTime, bool fullUpdate = true); virtual void simulate(float deltaTime, bool fullUpdate = true) override;
virtual void updateRig(float deltaTime, glm::mat4 parentTransform); virtual void updateRig(float deltaTime, glm::mat4 parentTransform) override;
void updateAttitude(); void updateAttitude();
void renderIKConstraints(gpu::Batch& batch); void renderIKConstraints(gpu::Batch& batch);

View file

@ -10,31 +10,20 @@
// //
#include "HMDScriptingInterface.h" #include "HMDScriptingInterface.h"
#include <QtScript/QScriptContext>
#include "display-plugins/DisplayPlugin.h" #include "display-plugins/DisplayPlugin.h"
#include <avatar/AvatarManager.h> #include <avatar/AvatarManager.h>
#include "Application.h"
HMDScriptingInterface& HMDScriptingInterface::getInstance() { HMDScriptingInterface::HMDScriptingInterface() {
static HMDScriptingInterface sharedInstance;
return sharedInstance;
}
bool HMDScriptingInterface::getHUDLookAtPosition3D(glm::vec3& result) const {
Camera* camera = Application::getInstance()->getCamera();
glm::vec3 position = camera->getPosition();
glm::quat orientation = camera->getOrientation();
glm::vec3 direction = orientation * glm::vec3(0.0f, 0.0f, -1.0f);
const auto& compositor = Application::getInstance()->getApplicationCompositor();
return compositor.calculateRayUICollisionPoint(position, direction, result);
} }
QScriptValue HMDScriptingInterface::getHUDLookAtPosition2D(QScriptContext* context, QScriptEngine* engine) { QScriptValue HMDScriptingInterface::getHUDLookAtPosition2D(QScriptContext* context, QScriptEngine* engine) {
glm::vec3 hudIntersection; glm::vec3 hudIntersection;
auto instance = DependencyManager::get<HMDScriptingInterface>();
if ((&HMDScriptingInterface::getInstance())->getHUDLookAtPosition3D(hudIntersection)) { if (instance->getHUDLookAtPosition3D(hudIntersection)) {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar(); MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::vec3 sphereCenter = myAvatar->getDefaultEyePosition(); glm::vec3 sphereCenter = myAvatar->getDefaultEyePosition();
glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * (hudIntersection - sphereCenter); glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * (hudIntersection - sphereCenter);
@ -48,12 +37,29 @@ QScriptValue HMDScriptingInterface::getHUDLookAtPosition2D(QScriptContext* conte
QScriptValue HMDScriptingInterface::getHUDLookAtPosition3D(QScriptContext* context, QScriptEngine* engine) { QScriptValue HMDScriptingInterface::getHUDLookAtPosition3D(QScriptContext* context, QScriptEngine* engine) {
glm::vec3 result; glm::vec3 result;
if ((&HMDScriptingInterface::getInstance())->getHUDLookAtPosition3D(result)) { auto instance = DependencyManager::get<HMDScriptingInterface>();
if (instance->getHUDLookAtPosition3D(result)) {
return qScriptValueFromValue<glm::vec3>(engine, result); return qScriptValueFromValue<glm::vec3>(engine, result);
} }
return QScriptValue::NullValue; return QScriptValue::NullValue;
} }
float HMDScriptingInterface::getIPD() const { void HMDScriptingInterface::toggleMagnifier() {
return Application::getInstance()->getActiveDisplayPlugin()->getIPD(); qApp->getApplicationCompositor().toggleMagnifier();
}
bool HMDScriptingInterface::getMagnifier() const {
return Application::getInstance()->getApplicationCompositor().hasMagnifier();
}
bool HMDScriptingInterface::getHUDLookAtPosition3D(glm::vec3& result) const {
Camera* camera = Application::getInstance()->getCamera();
glm::vec3 position = camera->getPosition();
glm::quat orientation = camera->getOrientation();
glm::vec3 direction = orientation * glm::vec3(0.0f, 0.0f, -1.0f);
const auto& compositor = Application::getInstance()->getApplicationCompositor();
return compositor.calculateRayUICollisionPoint(position, direction, result);
} }

View file

@ -12,32 +12,29 @@
#ifndef hifi_HMDScriptingInterface_h #ifndef hifi_HMDScriptingInterface_h
#define hifi_HMDScriptingInterface_h #define hifi_HMDScriptingInterface_h
#include <QtScript/QScriptValue>
class QScriptContext;
class QScriptEngine;
#include <GLMHelpers.h> #include <GLMHelpers.h>
#include <DependencyManager.h>
#include <display-plugins/AbstractHMDScriptingInterface.h>
#include "Application.h"
class HMDScriptingInterface : public QObject { class HMDScriptingInterface : public AbstractHMDScriptingInterface, public Dependency {
Q_OBJECT Q_OBJECT
Q_PROPERTY(bool magnifier READ getMagnifier) Q_PROPERTY(bool magnifier READ getMagnifier)
Q_PROPERTY(bool active READ isHMDMode)
Q_PROPERTY(float ipd READ getIPD)
public: public:
static HMDScriptingInterface& getInstance(); HMDScriptingInterface();
static QScriptValue getHUDLookAtPosition2D(QScriptContext* context, QScriptEngine* engine); static QScriptValue getHUDLookAtPosition2D(QScriptContext* context, QScriptEngine* engine);
static QScriptValue getHUDLookAtPosition3D(QScriptContext* context, QScriptEngine* engine); static QScriptValue getHUDLookAtPosition3D(QScriptContext* context, QScriptEngine* engine);
public slots: public slots:
void toggleMagnifier() { Application::getInstance()->getApplicationCompositor().toggleMagnifier(); }; void toggleMagnifier();
private: private:
HMDScriptingInterface() {}; bool getMagnifier() const;
bool getMagnifier() const { return Application::getInstance()->getApplicationCompositor().hasMagnifier(); };
bool isHMDMode() const { return Application::getInstance()->isHMDMode(); }
float getIPD() const;
bool getHUDLookAtPosition3D(glm::vec3& result) const; bool getHUDLookAtPosition3D(glm::vec3& result) const;
}; };
#endif // hifi_HMDScriptingInterface_h #endif // hifi_HMDScriptingInterface_h

View file

@ -159,7 +159,7 @@ void AnimClip::copyFromNetworkAnim() {
// used to adjust translation offsets, so large translation animatons on the reference skeleton // used to adjust translation offsets, so large translation animatons on the reference skeleton
// will be adjusted when played on a skeleton with short limbs. // will be adjusted when played on a skeleton with short limbs.
float limbLengthScale = fabs(glm::length(fbxZeroTrans)) <= 0.0001f ? 1.0f : (glm::length(relBindPose.trans) / glm::length(fbxZeroTrans)); float limbLengthScale = fabsf(glm::length(fbxZeroTrans)) <= 0.0001f ? 1.0f : (glm::length(relBindPose.trans) / glm::length(fbxZeroTrans));
AnimPose& pose = _anim[frame][skeletonJoint]; AnimPose& pose = _anim[frame][skeletonJoint];
const FBXAnimationFrame& fbxAnimFrame = geom.animationFrames[frame]; const FBXAnimationFrame& fbxAnimFrame = geom.animationFrames[frame];

View file

@ -52,7 +52,7 @@ protected:
void computeTargets(const AnimVariantMap& animVars, std::vector<IKTarget>& targets, const AnimPoseVec& underPoses); void computeTargets(const AnimVariantMap& animVars, std::vector<IKTarget>& targets, const AnimPoseVec& underPoses);
void solveWithCyclicCoordinateDescent(const std::vector<IKTarget>& targets); void solveWithCyclicCoordinateDescent(const std::vector<IKTarget>& targets);
virtual void setSkeletonInternal(AnimSkeleton::ConstPointer skeleton); virtual void setSkeletonInternal(AnimSkeleton::ConstPointer skeleton) override;
// for AnimDebugDraw rendering // for AnimDebugDraw rendering
virtual const AnimPoseVec& getPosesInternal() const override { return _relativePoses; } virtual const AnimPoseVec& getPosesInternal() const override { return _relativePoses; }

View file

@ -0,0 +1,52 @@
//
// Created by Bradley Austin Davis on 2015/10/04
// Copyright 2013-2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AbstractHMDScriptingInterface.h"
#include <SettingHandle.h>
#include "DisplayPlugin.h"
#include <plugins/PluginContainer.h>
#include <OVR_CAPI_Keys.h>
static Setting::Handle<float> IPD_SCALE_HANDLE("hmd.ipdScale", 1.0f);
AbstractHMDScriptingInterface::AbstractHMDScriptingInterface() {
_IPDScale = IPD_SCALE_HANDLE.get();
}
float AbstractHMDScriptingInterface::getIPD() const {
return PluginContainer::getInstance().getActiveDisplayPlugin()->getIPD();
}
float AbstractHMDScriptingInterface::getEyeHeight() const {
// FIXME update the display plugin interface to expose per-plugin settings
return OVR_DEFAULT_EYE_HEIGHT;
}
float AbstractHMDScriptingInterface::getPlayerHeight() const {
// FIXME update the display plugin interface to expose per-plugin settings
return OVR_DEFAULT_PLAYER_HEIGHT;
}
float AbstractHMDScriptingInterface::getIPDScale() const {
return _IPDScale;
}
void AbstractHMDScriptingInterface::setIPDScale(float IPDScale) {
IPDScale = glm::clamp(IPDScale, -1.0f, 3.0f);
if (IPDScale != _IPDScale) {
_IPDScale = IPDScale;
IPD_SCALE_HANDLE.set(IPDScale);
emit IPDScaleChanged();
}
}
bool AbstractHMDScriptingInterface::isHMDMode() const {
return PluginContainer::getInstance().getActiveDisplayPlugin()->isHmd();
}

View file

@ -0,0 +1,39 @@
//
// Created by Bradley Austin Davis on 2015/10/04
// Copyright 2013-2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_AbstractHMDScriptingInterface_h
#define hifi_AbstractHMDScriptingInterface_h
#include <GLMHelpers.h>
class AbstractHMDScriptingInterface : public QObject {
Q_OBJECT
Q_PROPERTY(bool active READ isHMDMode)
Q_PROPERTY(float ipd READ getIPD)
Q_PROPERTY(float eyeHeight READ getEyeHeight)
Q_PROPERTY(float playerHeight READ getPlayerHeight)
Q_PROPERTY(float ipdScale READ getIPDScale WRITE setIPDScale NOTIFY IPDScaleChanged)
public:
AbstractHMDScriptingInterface();
float getIPD() const;
float getEyeHeight() const;
float getPlayerHeight() const;
float getIPDScale() const;
void setIPDScale(float ipdScale);
bool isHMDMode() const;
signals:
void IPDScaleChanged();
private:
float _IPDScale{ 1.0 };
};
#endif // hifi_AbstractHMDScriptingInterface_h

View file

@ -46,6 +46,8 @@ void for_each_eye(F f, FF ff) {
class QWindow; class QWindow;
#define AVERAGE_HUMAN_IPD 0.064f
class DisplayPlugin : public Plugin { class DisplayPlugin : public Plugin {
Q_OBJECT Q_OBJECT
public: public:
@ -107,21 +109,22 @@ public:
return baseProjection; return baseProjection;
} }
virtual glm::mat4 getView(Eye eye, const glm::mat4& baseView) const {
return glm::inverse(getEyePose(eye)) * baseView;
}
// HMD specific methods // HMD specific methods
// TODO move these into another class? // TODO move these into another class?
virtual glm::mat4 getEyePose(Eye eye) const { virtual glm::mat4 getEyeToHeadTransform(Eye eye) const {
static const glm::mat4 pose; return pose; static const glm::mat4 transform; return transform;
} }
virtual glm::mat4 getHeadPose() const { virtual glm::mat4 getHeadPose() const {
static const glm::mat4 pose; return pose; static const glm::mat4 pose; return pose;
} }
virtual float getIPD() const { return 0.0f; } // Needed for timewarp style features
virtual void setEyeRenderPose(Eye eye, const glm::mat4& pose) {
// NOOP
}
virtual float getIPD() const { return AVERAGE_HUMAN_IPD; }
virtual void abandonCalibration() {} virtual void abandonCalibration() {}
virtual void resetSensors() {} virtual void resetSensors() {}

View file

@ -19,7 +19,6 @@ void OculusBaseDisplayPlugin::preRender() {
#if (OVR_MAJOR_VERSION >= 6) #if (OVR_MAJOR_VERSION >= 6)
ovrFrameTiming ftiming = ovr_GetFrameTiming(_hmd, _frameIndex); ovrFrameTiming ftiming = ovr_GetFrameTiming(_hmd, _frameIndex);
_trackingState = ovr_GetTrackingState(_hmd, ftiming.DisplayMidpointSeconds); _trackingState = ovr_GetTrackingState(_hmd, ftiming.DisplayMidpointSeconds);
ovr_CalcEyePoses(_trackingState.HeadPose.ThePose, _eyeOffsets, _eyePoses);
#endif #endif
} }
@ -33,14 +32,19 @@ void OculusBaseDisplayPlugin::resetSensors() {
#endif #endif
} }
glm::mat4 OculusBaseDisplayPlugin::getEyePose(Eye eye) const { glm::mat4 OculusBaseDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
return toGlm(_eyePoses[eye]); return glm::translate(mat4(), toGlm(_eyeOffsets[eye]));
} }
glm::mat4 OculusBaseDisplayPlugin::getHeadPose() const { glm::mat4 OculusBaseDisplayPlugin::getHeadPose() const {
return toGlm(_trackingState.HeadPose.ThePose); return toGlm(_trackingState.HeadPose.ThePose);
} }
void OculusBaseDisplayPlugin::setEyeRenderPose(Eye eye, const glm::mat4& pose) {
_eyePoses[eye] = ovrPoseFromGlm(pose);
}
bool OculusBaseDisplayPlugin::isSupported() const { bool OculusBaseDisplayPlugin::isSupported() const {
#if (OVR_MAJOR_VERSION >= 6) #if (OVR_MAJOR_VERSION >= 6)
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) { if (!OVR_SUCCESS(ovr_Initialize(nullptr))) {
@ -151,9 +155,9 @@ void OculusBaseDisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sce
} }
float OculusBaseDisplayPlugin::getIPD() const { float OculusBaseDisplayPlugin::getIPD() const {
float result = 0.0f; float result = OVR_DEFAULT_IPD;
#if (OVR_MAJOR_VERSION >= 6) #if (OVR_MAJOR_VERSION >= 6)
result = ovr_GetFloat(_hmd, OVR_KEY_IPD, OVR_DEFAULT_IPD); result = ovr_GetFloat(_hmd, OVR_KEY_IPD, result);
#endif #endif
return result; return result;
} }

View file

@ -29,8 +29,9 @@ public:
virtual glm::uvec2 getRecommendedRenderSize() const override final; virtual glm::uvec2 getRecommendedRenderSize() const override final;
virtual glm::uvec2 getRecommendedUiSize() const override final { return uvec2(1920, 1080); } virtual glm::uvec2 getRecommendedUiSize() const override final { return uvec2(1920, 1080); }
virtual void resetSensors() override final; virtual void resetSensors() override final;
virtual glm::mat4 getEyePose(Eye eye) const override final; virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override final;
virtual glm::mat4 getHeadPose() const override final; virtual glm::mat4 getHeadPose() const override final;
virtual void setEyeRenderPose(Eye eye, const glm::mat4& pose) override final;
virtual float getIPD() const override final; virtual float getIPD() const override final;
protected: protected:
@ -39,6 +40,7 @@ protected:
protected: protected:
ovrPosef _eyePoses[2]; ovrPosef _eyePoses[2];
ovrVector3f _eyeOffsets[2];
mat4 _eyeProjections[3]; mat4 _eyeProjections[3];
mat4 _compositeEyeProjections[2]; mat4 _compositeEyeProjections[2];
@ -50,7 +52,6 @@ protected:
ovrHmd _hmd; ovrHmd _hmd;
float _ipd{ OVR_DEFAULT_IPD }; float _ipd{ OVR_DEFAULT_IPD };
ovrEyeRenderDesc _eyeRenderDescs[2]; ovrEyeRenderDesc _eyeRenderDescs[2];
ovrVector3f _eyeOffsets[2];
ovrFovPort _eyeFovs[2]; ovrFovPort _eyeFovs[2];
ovrHmdDesc _hmdDesc; ovrHmdDesc _hmdDesc;
ovrLayerEyeFov _sceneLayer; ovrLayerEyeFov _sceneLayer;

View file

@ -79,3 +79,11 @@ inline ovrQuatf ovrFromGlm(const glm::quat & q) {
return{ q.x, q.y, q.z, q.w }; return{ q.x, q.y, q.z, q.w };
} }
inline ovrPosef ovrPoseFromGlm(const glm::mat4 & m) {
glm::vec3 translation = glm::vec3(m[3]) / m[3].w;
glm::quat orientation = glm::quat_cast(m);
ovrPosef result;
result.Orientation = ovrFromGlm(orientation);
result.Position = ovrFromGlm(translation);
return result;
}

View file

@ -59,11 +59,11 @@ void OculusLegacyDisplayPlugin::resetSensors() {
#endif #endif
} }
glm::mat4 OculusLegacyDisplayPlugin::getEyePose(Eye eye) const { glm::mat4 OculusLegacyDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
#if (OVR_MAJOR_VERSION == 5) #if (OVR_MAJOR_VERSION == 5)
return toGlm(_eyePoses[eye]); return toGlm(_eyePoses[eye]);
#else #else
return WindowOpenGLDisplayPlugin::getEyePose(eye); return WindowOpenGLDisplayPlugin::getEyeToHeadTransform(eye);
#endif #endif
} }

View file

@ -31,7 +31,7 @@ public:
virtual glm::uvec2 getRecommendedRenderSize() const override; virtual glm::uvec2 getRecommendedRenderSize() const override;
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); } virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
virtual void resetSensors() override; virtual void resetSensors() override;
virtual glm::mat4 getEyePose(Eye eye) const override; virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override;
virtual glm::mat4 getHeadPose() const override; virtual glm::mat4 getHeadPose() const override;
protected: protected:

View file

@ -160,8 +160,8 @@ void OpenVrDisplayPlugin::resetSensors() {
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(_trackedDevicePoseMat4[0])); _sensorResetMat = glm::inverse(cancelOutRollAndPitch(_trackedDevicePoseMat4[0]));
} }
glm::mat4 OpenVrDisplayPlugin::getEyePose(Eye eye) const { glm::mat4 OpenVrDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
return getHeadPose() * _eyesData[eye]._eyeOffset; return _eyesData[eye]._eyeOffset;
} }
glm::mat4 OpenVrDisplayPlugin::getHeadPose() const { glm::mat4 OpenVrDisplayPlugin::getHeadPose() const {

View file

@ -29,7 +29,7 @@ public:
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override; virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual void resetSensors() override; virtual void resetSensors() override;
virtual glm::mat4 getEyePose(Eye eye) const override; virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override;
virtual glm::mat4 getHeadPose() const override; virtual glm::mat4 getHeadPose() const override;
protected: protected:

View file

@ -61,10 +61,6 @@ glm::mat4 StereoDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProje
return eyeProjection; return eyeProjection;
} }
glm::mat4 StereoDisplayPlugin::getEyePose(Eye eye) const {
return mat4();
}
std::vector<QAction*> _screenActions; std::vector<QAction*> _screenActions;
void StereoDisplayPlugin::activate() { void StereoDisplayPlugin::activate() {
auto screens = qApp->screens(); auto screens = qApp->screens();

View file

@ -21,7 +21,14 @@ public:
virtual float getRecommendedAspectRatio() const override; virtual float getRecommendedAspectRatio() const override;
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override; virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::mat4 getEyePose(Eye eye) const override;
// NOTE, because Stereo displays don't include head tracking, and therefore
// can't include roll or pitch, the eye separation is embedded into the projection
// matrix. However, this eliminates the possibility of easily mainpulating
// the IPD at the Application level, the way we now allow with HMDs.
// If that becomes an issue then we'll need to break up the functionality similar
// to the HMD plugins.
// virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override;
protected: protected:
void updateScreen(); void updateScreen();

View file

@ -56,7 +56,7 @@ void RenderableBoxEntityItem::render(RenderArgs* args) {
if (_procedural->ready()) { if (_procedural->ready()) {
batch.setModelTransform(getTransformToCenter()); // we want to include the scale as well batch.setModelTransform(getTransformToCenter()); // we want to include the scale as well
_procedural->prepare(batch, this->getDimensions()); _procedural->prepare(batch, getPosition(), getDimensions());
auto color = _procedural->getColor(cubeColor); auto color = _procedural->getColor(cubeColor);
batch._glColor4f(color.r, color.g, color.b, color.a); batch._glColor4f(color.r, color.g, color.b, color.a);
DependencyManager::get<GeometryCache>()->renderCube(batch); DependencyManager::get<GeometryCache>()->renderCube(batch);

View file

@ -25,8 +25,8 @@ public:
void updateRenderItem(); void updateRenderItem();
virtual bool addToScene(EntityItemPointer self, render::ScenePointer scene, render::PendingChanges& pendingChanges); virtual bool addToScene(EntityItemPointer self, render::ScenePointer scene, render::PendingChanges& pendingChanges) override;
virtual void removeFromScene(EntityItemPointer self, render::ScenePointer scene, render::PendingChanges& pendingChanges); virtual void removeFromScene(EntityItemPointer self, render::ScenePointer scene, render::PendingChanges& pendingChanges) override;
protected: protected:
render::ItemID _renderItemId; render::ItemID _renderItemId;

View file

@ -62,7 +62,7 @@ void RenderableSphereEntityItem::render(RenderArgs* args) {
modelTransform.postScale(SPHERE_ENTITY_SCALE); modelTransform.postScale(SPHERE_ENTITY_SCALE);
if (_procedural->ready()) { if (_procedural->ready()) {
batch.setModelTransform(modelTransform); // use a transform with scale, rotation, registration point and translation batch.setModelTransform(modelTransform); // use a transform with scale, rotation, registration point and translation
_procedural->prepare(batch, getDimensions()); _procedural->prepare(batch, getPosition(), getDimensions());
auto color = _procedural->getColor(sphereColor); auto color = _procedural->getColor(sphereColor);
batch._glColor4f(color.r, color.g, color.b, color.a); batch._glColor4f(color.r, color.g, color.b, color.a);
DependencyManager::get<GeometryCache>()->renderSphere(batch); DependencyManager::get<GeometryCache>()->renderSphere(batch);

View file

@ -246,6 +246,26 @@ public:
void _glUniform4iv(int location, int count, const int* value); void _glUniform4iv(int location, int count, const int* value);
void _glUniformMatrix4fv(int location, int count, unsigned char transpose, const float* value); void _glUniformMatrix4fv(int location, int count, unsigned char transpose, const float* value);
void _glUniform(int location, int v0) {
_glUniform1i(location, v0);
}
void _glUniform(int location, float v0) {
_glUniform1f(location, v0);
}
void _glUniform(int location, const glm::vec2& v) {
_glUniform2f(location, v.x, v.y);
}
void _glUniform(int location, const glm::vec3& v) {
_glUniform3f(location, v.x, v.y, v.z);
}
void _glUniform(int location, const glm::vec4& v) {
_glUniform4f(location, v.x, v.y, v.z, v.w);
}
void _glColor4f(float red, float green, float blue, float alpha); void _glColor4f(float red, float green, float blue, float alpha);
enum Command { enum Command {

View file

@ -343,9 +343,22 @@ bool Texture::assignStoredMipFace(uint16 level, const Element& format, Size size
} }
uint16 Texture::autoGenerateMips(uint16 maxMip) { uint16 Texture::autoGenerateMips(uint16 maxMip) {
bool changed = false;
if (!_autoGenerateMips) {
changed = true;
_autoGenerateMips = true; _autoGenerateMips = true;
_maxMip = std::min((uint16) (evalNumMips() - 1), maxMip); }
auto newMaxMip = std::min((uint16)(evalNumMips() - 1), maxMip);
if (newMaxMip != _maxMip) {
changed = true;
_maxMip = newMaxMip;;
}
if (changed) {
_stamp++; _stamp++;
}
return _maxMip; return _maxMip;
} }

View file

@ -59,7 +59,7 @@ public:
// Plugin functions // Plugin functions
virtual bool isSupported() const override { return true; } virtual bool isSupported() const override { return true; }
virtual bool isJointController() const override { return false; } virtual bool isJointController() const override { return false; }
const QString& getName() const { return NAME; } const QString& getName() const override { return NAME; }
virtual void activate() override {}; virtual void activate() override {};
virtual void deactivate() override {}; virtual void deactivate() override {};

View file

@ -30,7 +30,7 @@ public:
// Plugin functions // Plugin functions
virtual bool isSupported() const override; virtual bool isSupported() const override;
virtual bool isJointController() const override { return false; } virtual bool isJointController() const override { return false; }
const QString& getName() const { return NAME; } const QString& getName() const override { return NAME; }
virtual void init() override; virtual void init() override;
virtual void deinit() override; virtual void deinit() override;

View file

@ -285,6 +285,7 @@ void SixenseManager::updateCalibration(void* controllersX) {
_avatarRotation = glm::inverse(glm::quat_cast(glm::mat3(xAxis, Vectors::UNIT_Y, zAxis))); _avatarRotation = glm::inverse(glm::quat_cast(glm::mat3(xAxis, Vectors::UNIT_Y, zAxis)));
const float Y_OFFSET_CALIBRATED_HANDS_TO_AVATAR = -0.3f; const float Y_OFFSET_CALIBRATED_HANDS_TO_AVATAR = -0.3f;
_avatarPosition.y += Y_OFFSET_CALIBRATED_HANDS_TO_AVATAR; _avatarPosition.y += Y_OFFSET_CALIBRATED_HANDS_TO_AVATAR;
CONTAINER->requestReset();
qCDebug(inputplugins, "succeess: sixense calibration"); qCDebug(inputplugins, "succeess: sixense calibration");
} }
break; break;

View file

@ -63,7 +63,7 @@ public:
// Plugin functions // Plugin functions
virtual bool isSupported() const override; virtual bool isSupported() const override;
virtual bool isJointController() const override { return true; } virtual bool isJointController() const override { return true; }
const QString& getName() const { return NAME; } const QString& getName() const override { return NAME; }
virtual void activate() override; virtual void activate() override;
virtual void deactivate() override; virtual void deactivate() override;

View file

@ -53,7 +53,7 @@ public:
// Plugin functions // Plugin functions
virtual bool isSupported() const override; virtual bool isSupported() const override;
virtual bool isJointController() const override { return true; } virtual bool isJointController() const override { return true; }
const QString& getName() const { return NAME; } const QString& getName() const override { return NAME; }
virtual void activate() override; virtual void activate() override;
virtual void deactivate() override; virtual void deactivate() override;

View file

@ -7,5 +7,5 @@ add_dependency_external_projects(glm)
find_package(GLM REQUIRED) find_package(GLM REQUIRED)
target_include_directories(${TARGET_NAME} PUBLIC ${GLM_INCLUDE_DIRS}) target_include_directories(${TARGET_NAME} PUBLIC ${GLM_INCLUDE_DIRS})
link_hifi_libraries(shared networking gpu model) link_hifi_libraries(shared networking gpu model fbx)

View file

@ -30,9 +30,9 @@ class OctreeHeadlessViewer : public OctreeRenderer {
public: public:
OctreeHeadlessViewer(); OctreeHeadlessViewer();
virtual ~OctreeHeadlessViewer() {}; virtual ~OctreeHeadlessViewer() {};
virtual void renderElement(OctreeElementPointer element, RenderArgs* args) { /* swallow these */ } virtual void renderElement(OctreeElementPointer element, RenderArgs* args) override { /* swallow these */ }
virtual void init(); virtual void init() override ;
virtual void render(RenderArgs* renderArgs) override { /* swallow these */ } virtual void render(RenderArgs* renderArgs) override { /* swallow these */ }
void setJurisdictionListener(JurisdictionListener* jurisdictionListener) { _jurisdictionListener = jurisdictionListener; } void setJurisdictionListener(JurisdictionListener* jurisdictionListener) { _jurisdictionListener = jurisdictionListener; }
@ -58,7 +58,7 @@ public slots:
// getters for LOD and PPS // getters for LOD and PPS
float getVoxelSizeScale() const { return _voxelSizeScale; } float getVoxelSizeScale() const { return _voxelSizeScale; }
int getBoundaryLevelAdjust() const { return _boundaryLevelAdjust; } int getBoundaryLevelAdjust() const override { return _boundaryLevelAdjust; }
int getMaxPacketsPerSecond() const { return _maxPacketsPerSecond; } int getMaxPacketsPerSecond() const { return _maxPacketsPerSecond; }
unsigned getOctreeElementsCount() const { return _tree->getOctreeElementsCount(); } unsigned getOctreeElementsCount() const { return _tree->getOctreeElementsCount(); }

View file

@ -9,7 +9,17 @@
static PluginContainer* INSTANCE{ nullptr }; static PluginContainer* INSTANCE{ nullptr };
PluginContainer& PluginContainer::getInstance() {
Q_ASSERT(INSTANCE);
return *INSTANCE;
}
PluginContainer::PluginContainer() { PluginContainer::PluginContainer() {
Q_ASSERT(!INSTANCE); Q_ASSERT(!INSTANCE);
INSTANCE = this; INSTANCE = this;
}; };
PluginContainer::~PluginContainer() {
Q_ASSERT(INSTANCE == this);
INSTANCE = nullptr;
};

View file

@ -13,10 +13,13 @@
class QAction; class QAction;
class QGLWidget; class QGLWidget;
class QScreen; class QScreen;
class DisplayPlugin;
class PluginContainer { class PluginContainer {
public: public:
static PluginContainer& getInstance();
PluginContainer(); PluginContainer();
virtual ~PluginContainer();
virtual void addMenu(const QString& menuName) = 0; virtual void addMenu(const QString& menuName) = 0;
virtual void removeMenu(const QString& menuName) = 0; virtual void removeMenu(const QString& menuName) = 0;
virtual QAction* addMenuItem(const QString& path, const QString& name, std::function<void(bool)> onClicked, bool checkable = false, bool checked = false, const QString& groupName = "") = 0; virtual QAction* addMenuItem(const QString& path, const QString& name, std::function<void(bool)> onClicked, bool checkable = false, bool checked = false, const QString& groupName = "") = 0;
@ -26,6 +29,8 @@ public:
virtual void setFullscreen(const QScreen* targetScreen, bool hideMenu = false) = 0; virtual void setFullscreen(const QScreen* targetScreen, bool hideMenu = false) = 0;
virtual void unsetFullscreen(const QScreen* avoidScreen = nullptr) = 0; virtual void unsetFullscreen(const QScreen* avoidScreen = nullptr) = 0;
virtual void showDisplayPluginsTools() = 0; virtual void showDisplayPluginsTools() = 0;
virtual void requestReset() = 0;
virtual QGLWidget* getPrimarySurface() = 0; virtual QGLWidget* getPrimarySurface() = 0;
virtual bool isForeground() = 0; virtual bool isForeground() = 0;
virtual const DisplayPlugin* getActiveDisplayPlugin() const = 0;
}; };

View file

@ -17,20 +17,30 @@
#include <gpu/Batch.h> #include <gpu/Batch.h>
#include <SharedUtil.h> #include <SharedUtil.h>
#include <NumericalConstants.h> #include <NumericalConstants.h>
#include <GLMHelpers.h>
#include "ProceduralShaders.h" #include "ProceduralShaders.h"
static const char* const UNIFORM_TIME_NAME= "iGlobalTime"; // Userdata parsing constants
static const char* const UNIFORM_SCALE_NAME = "iWorldScale";
static const QString PROCEDURAL_USER_DATA_KEY = "ProceduralEntity"; static const QString PROCEDURAL_USER_DATA_KEY = "ProceduralEntity";
static const QString URL_KEY = "shaderUrl"; static const QString URL_KEY = "shaderUrl";
static const QString VERSION_KEY = "version"; static const QString VERSION_KEY = "version";
static const QString UNIFORMS_KEY = "uniforms"; static const QString UNIFORMS_KEY = "uniforms";
static const QString CHANNELS_KEY = "channels";
// Shader replace strings
static const std::string PROCEDURAL_BLOCK = "//PROCEDURAL_BLOCK"; static const std::string PROCEDURAL_BLOCK = "//PROCEDURAL_BLOCK";
static const std::string PROCEDURAL_COMMON_BLOCK = "//PROCEDURAL_COMMON_BLOCK"; static const std::string PROCEDURAL_COMMON_BLOCK = "//PROCEDURAL_COMMON_BLOCK";
static const std::string PROCEDURAL_VERSION = "//PROCEDURAL_VERSION"; static const std::string PROCEDURAL_VERSION = "//PROCEDURAL_VERSION";
static const std::string STANDARD_UNIFORM_NAMES[Procedural::NUM_STANDARD_UNIFORMS] = {
"iDate",
"iGlobalTime",
"iFrameCount",
"iWorldScale",
"iWorldPosition",
"iChannelResolution"
};
// Example // Example
//{ //{
@ -100,7 +110,21 @@ void Procedural::parse(const QJsonObject& proceduralData) {
{ {
auto uniforms = proceduralData[UNIFORMS_KEY]; auto uniforms = proceduralData[UNIFORMS_KEY];
if (uniforms.isObject()) { if (uniforms.isObject()) {
_uniforms = uniforms.toObject();; _parsedUniforms = uniforms.toObject();
}
}
// Grab any textures
{
auto channels = proceduralData[CHANNELS_KEY];
if (channels.isArray()) {
auto textureCache = DependencyManager::get<TextureCache>();
_parsedChannels = channels.toArray();
size_t channelCount = std::min(MAX_PROCEDURAL_TEXTURE_CHANNELS, (size_t)_parsedChannels.size());
for (size_t i = 0; i < channelCount; ++i) {
QString url = _parsedChannels.at(i).toString();
_channels[i] = textureCache->getTexture(QUrl(url));
}
} }
} }
_enabled = true; _enabled = true;
@ -111,20 +135,26 @@ bool Procedural::ready() {
return false; return false;
} }
if (!_shaderPath.isEmpty()) { // Do we have a network or local shader
return true; if (_shaderPath.isEmpty() && (!_networkShader || !_networkShader->isLoaded())) {
}
if (_networkShader) {
return _networkShader->isLoaded();
}
return false; return false;
}
// Do we have textures, and if so, are they loaded?
for (size_t i = 0; i < MAX_PROCEDURAL_TEXTURE_CHANNELS; ++i) {
if (_channels[i] && !_channels[i]->isLoaded()) {
return false;
}
}
return true;
} }
void Procedural::prepare(gpu::Batch& batch, const glm::vec3& size) { void Procedural::prepare(gpu::Batch& batch, const glm::vec3& position, const glm::vec3& size) {
_entityDimensions = size;
_entityPosition = position;
if (_shaderUrl.isLocalFile()) { if (_shaderUrl.isLocalFile()) {
auto lastModified = (quint64) QFileInfo(_shaderPath).lastModified().toMSecsSinceEpoch(); auto lastModified = (quint64)QFileInfo(_shaderPath).lastModified().toMSecsSinceEpoch();
if (lastModified > _shaderModified) { if (lastModified > _shaderModified) {
QFile file(_shaderPath); QFile file(_shaderPath);
file.open(QIODevice::ReadOnly); file.open(QIODevice::ReadOnly);
@ -164,69 +194,183 @@ void Procedural::prepare(gpu::Batch& batch, const glm::vec3& size) {
//qDebug() << "FragmentShader:\n" << fragmentShaderSource.c_str(); //qDebug() << "FragmentShader:\n" << fragmentShaderSource.c_str();
_fragmentShader = gpu::ShaderPointer(gpu::Shader::createPixel(fragmentShaderSource)); _fragmentShader = gpu::ShaderPointer(gpu::Shader::createPixel(fragmentShaderSource));
_shader = gpu::ShaderPointer(gpu::Shader::createProgram(_vertexShader, _fragmentShader)); _shader = gpu::ShaderPointer(gpu::Shader::createProgram(_vertexShader, _fragmentShader));
gpu::Shader::makeProgram(*_shader);
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("iChannel0"), 0));
slotBindings.insert(gpu::Shader::Binding(std::string("iChannel1"), 1));
slotBindings.insert(gpu::Shader::Binding(std::string("iChannel2"), 2));
slotBindings.insert(gpu::Shader::Binding(std::string("iChannel3"), 3));
gpu::Shader::makeProgram(*_shader, slotBindings);
_pipeline = gpu::PipelinePointer(gpu::Pipeline::create(_shader, _state)); _pipeline = gpu::PipelinePointer(gpu::Pipeline::create(_shader, _state));
_timeSlot = _shader->getUniforms().findLocation(UNIFORM_TIME_NAME); for (size_t i = 0; i < NUM_STANDARD_UNIFORMS; ++i) {
_scaleSlot = _shader->getUniforms().findLocation(UNIFORM_SCALE_NAME); const std::string& name = STANDARD_UNIFORM_NAMES[i];
_standardUniformSlots[i] = _shader->getUniforms().findLocation(name);
}
_start = usecTimestampNow(); _start = usecTimestampNow();
_frameCount = 0;
} }
batch.setPipeline(_pipeline); batch.setPipeline(_pipeline);
if (_pipelineDirty) { if (_pipelineDirty) {
_pipelineDirty = false; _pipelineDirty = false;
setupUniforms();
}
for (auto lambda : _uniforms) {
lambda(batch);
}
static gpu::Sampler sampler;
static std::once_flag once;
std::call_once(once, [&] {
gpu::Sampler::Desc desc;
desc._filter = gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR;
});
for (size_t i = 0; i < MAX_PROCEDURAL_TEXTURE_CHANNELS; ++i) {
if (_channels[i] && _channels[i]->isLoaded()) {
auto gpuTexture = _channels[i]->getGPUTexture();
if (gpuTexture) {
gpuTexture->setSampler(sampler);
gpuTexture->autoGenerateMips(-1);
}
batch.setResourceTexture(i, gpuTexture);
}
}
}
void Procedural::setupUniforms() {
_uniforms.clear();
// Set any userdata specified uniforms // Set any userdata specified uniforms
foreach(QString key, _uniforms.keys()) { foreach(QString key, _parsedUniforms.keys()) {
std::string uniformName = key.toLocal8Bit().data(); std::string uniformName = key.toLocal8Bit().data();
int32_t slot = _shader->getUniforms().findLocation(uniformName); int32_t slot = _shader->getUniforms().findLocation(uniformName);
if (gpu::Shader::INVALID_LOCATION == slot) { if (gpu::Shader::INVALID_LOCATION == slot) {
continue; continue;
} }
QJsonValue value = _uniforms[key]; QJsonValue value = _parsedUniforms[key];
if (value.isDouble()) { if (value.isDouble()) {
batch._glUniform1f(slot, value.toDouble()); float v = value.toDouble();
_uniforms.push_back([=](gpu::Batch& batch) {
batch._glUniform1f(slot, v);
});
} else if (value.isArray()) { } else if (value.isArray()) {
auto valueArray = value.toArray(); auto valueArray = value.toArray();
switch (valueArray.size()) { switch (valueArray.size()) {
case 0: case 0:
break; break;
case 1: case 1: {
batch._glUniform1f(slot, valueArray[0].toDouble()); float v = valueArray[0].toDouble();
_uniforms.push_back([=](gpu::Batch& batch) {
batch._glUniform1f(slot, v);
});
break; break;
case 2: }
batch._glUniform2f(slot,
valueArray[0].toDouble(), case 2: {
valueArray[1].toDouble()); glm::vec2 v{ valueArray[0].toDouble(), valueArray[1].toDouble() };
_uniforms.push_back([=](gpu::Batch& batch) {
batch._glUniform2f(slot, v.x, v.y);
});
break; break;
case 3: }
batch._glUniform3f(slot,
valueArray[0].toDouble(), case 3: {
valueArray[1].toDouble(), glm::vec3 v{
valueArray[2].toDouble());
break;
case 4:
default:
batch._glUniform4f(slot,
valueArray[0].toDouble(), valueArray[0].toDouble(),
valueArray[1].toDouble(), valueArray[1].toDouble(),
valueArray[2].toDouble(), valueArray[2].toDouble(),
valueArray[3].toDouble()); };
_uniforms.push_back([=](gpu::Batch& batch) {
batch._glUniform3f(slot, v.x, v.y, v.z);
});
break;
}
default:
case 4: {
glm::vec4 v{
valueArray[0].toDouble(),
valueArray[1].toDouble(),
valueArray[2].toDouble(),
valueArray[3].toDouble(),
};
_uniforms.push_back([=](gpu::Batch& batch) {
batch._glUniform4f(slot, v.x, v.y, v.z, v.w);
});
break; break;
} }
valueArray.size();
} }
} }
} }
if (gpu::Shader::INVALID_LOCATION != _standardUniformSlots[TIME]) {
_uniforms.push_back([=](gpu::Batch& batch) {
// Minimize floating point error by doing an integer division to milliseconds, before the floating point division to seconds // Minimize floating point error by doing an integer division to milliseconds, before the floating point division to seconds
float time = (float)((usecTimestampNow() - _start) / USECS_PER_MSEC) / MSECS_PER_SECOND; float time = (float)((usecTimestampNow() - _start) / USECS_PER_MSEC) / MSECS_PER_SECOND;
batch._glUniform1f(_timeSlot, time); batch._glUniform(_standardUniformSlots[TIME], time);
// FIXME move into the 'set once' section, since this doesn't change over time });
batch._glUniform3f(_scaleSlot, size.x, size.y, size.z); }
}
if (gpu::Shader::INVALID_LOCATION != _standardUniformSlots[DATE]) {
_uniforms.push_back([=](gpu::Batch& batch) {
QDateTime now = QDateTime::currentDateTimeUtc();
QDate date = now.date();
QTime time = now.time();
vec4 v;
v.x = date.year();
// Shadertoy month is 0 based
v.y = date.month() - 1;
// But not the day... go figure
v.z = date.day();
v.w = (time.hour() * 3600) + (time.minute() * 60) + time.second();
batch._glUniform(_standardUniformSlots[DATE], v);
});
}
if (gpu::Shader::INVALID_LOCATION != _standardUniformSlots[FRAME_COUNT]) {
_uniforms.push_back([=](gpu::Batch& batch) {
batch._glUniform(_standardUniformSlots[FRAME_COUNT], ++_frameCount);
});
}
if (gpu::Shader::INVALID_LOCATION != _standardUniformSlots[SCALE]) {
// FIXME move into the 'set once' section, since this doesn't change over time
_uniforms.push_back([=](gpu::Batch& batch) {
batch._glUniform(_standardUniformSlots[SCALE], _entityDimensions);
});
}
if (gpu::Shader::INVALID_LOCATION != _standardUniformSlots[SCALE]) {
// FIXME move into the 'set once' section, since this doesn't change over time
_uniforms.push_back([=](gpu::Batch& batch) {
batch._glUniform(_standardUniformSlots[SCALE], _entityDimensions);
});
}
if (gpu::Shader::INVALID_LOCATION != _standardUniformSlots[POSITION]) {
// FIXME move into the 'set once' section, since this doesn't change over time
_uniforms.push_back([=](gpu::Batch& batch) {
batch._glUniform(_standardUniformSlots[POSITION], _entityPosition);
});
}
if (gpu::Shader::INVALID_LOCATION != _standardUniformSlots[CHANNEL_RESOLUTION]) {
_uniforms.push_back([=](gpu::Batch& batch) {
vec3 channelSizes[MAX_PROCEDURAL_TEXTURE_CHANNELS];
for (size_t i = 0; i < MAX_PROCEDURAL_TEXTURE_CHANNELS; ++i) {
if (_channels[i]) {
channelSizes[i] = vec3(_channels[i]->getWidth(), _channels[i]->getHeight(), 1.0);
}
}
batch._glUniform3fv(_standardUniformSlots[CHANNEL_RESOLUTION], MAX_PROCEDURAL_TEXTURE_CHANNELS, &channelSizes[0].x);
});
}
}
glm::vec4 Procedural::getColor(const glm::vec4& entityColor) { glm::vec4 Procedural::getColor(const glm::vec4& entityColor) {
if (_version == 1) { if (_version == 1) {

View file

@ -14,11 +14,16 @@
#include <QtCore/QString> #include <QtCore/QString>
#include <QtCore/QUrl> #include <QtCore/QUrl>
#include <QtCore/QJsonObject> #include <QtCore/QJsonObject>
#include <QtCore/QJsonArray>
#include <gpu/Shader.h> #include <gpu/Shader.h>
#include <gpu/Pipeline.h> #include <gpu/Pipeline.h>
#include <gpu/Batch.h> #include <gpu/Batch.h>
#include <model-networking/ShaderCache.h> #include <model-networking/ShaderCache.h>
#include <model-networking/TextureCache.h>
using UniformLambdas = std::list<std::function<void(gpu::Batch& batch)>>;
const size_t MAX_PROCEDURAL_TEXTURE_CHANNELS{ 4 };
// FIXME better encapsulation // FIXME better encapsulation
// FIXME better mechanism for extending to things rendered using shaders other than simple.slv // FIXME better mechanism for extending to things rendered using shaders other than simple.slv
@ -29,7 +34,8 @@ struct Procedural {
void parse(const QString& userDataJson); void parse(const QString& userDataJson);
void parse(const QJsonObject&); void parse(const QJsonObject&);
bool ready(); bool ready();
void prepare(gpu::Batch& batch, const glm::vec3& size); void prepare(gpu::Batch& batch, const glm::vec3& position, const glm::vec3& size);
void setupUniforms();
glm::vec4 getColor(const glm::vec4& entityColor); glm::vec4 getColor(const glm::vec4& entityColor);
bool _enabled{ false }; bool _enabled{ false };
@ -43,17 +49,34 @@ struct Procedural {
QUrl _shaderUrl; QUrl _shaderUrl;
quint64 _shaderModified{ 0 }; quint64 _shaderModified{ 0 };
bool _pipelineDirty{ true }; bool _pipelineDirty{ true };
int32_t _timeSlot{ gpu::Shader::INVALID_LOCATION };
int32_t _scaleSlot{ gpu::Shader::INVALID_LOCATION };
uint64_t _start{ 0 };
NetworkShaderPointer _networkShader;
QJsonObject _uniforms;
enum StandardUniforms {
DATE,
TIME,
FRAME_COUNT,
SCALE,
POSITION,
CHANNEL_RESOLUTION,
NUM_STANDARD_UNIFORMS
};
int32_t _standardUniformSlots[NUM_STANDARD_UNIFORMS];
uint64_t _start{ 0 };
int32_t _frameCount{ 0 };
NetworkShaderPointer _networkShader;
QJsonObject _parsedUniforms;
QJsonArray _parsedChannels;
UniformLambdas _uniforms;
NetworkTexturePointer _channels[MAX_PROCEDURAL_TEXTURE_CHANNELS];
gpu::PipelinePointer _pipeline; gpu::PipelinePointer _pipeline;
gpu::ShaderPointer _vertexShader; gpu::ShaderPointer _vertexShader;
gpu::ShaderPointer _fragmentShader; gpu::ShaderPointer _fragmentShader;
gpu::ShaderPointer _shader; gpu::ShaderPointer _shader;
gpu::StatePointer _state; gpu::StatePointer _state;
glm::vec3 _entityDimensions;
glm::vec3 _entityPosition;
}; };
#endif #endif

View file

@ -262,15 +262,39 @@ float snoise(vec2 v) {
return 130.0 * dot(m, g); return 130.0 * dot(m, g);
} }
// TODO add more uniforms // shader playback time (in seconds)
uniform float iGlobalTime; // shader playback time (in seconds) uniform float iGlobalTime;
uniform vec3 iWorldScale; // the dimensions of the object being rendered // the dimensions of the object being rendered
uniform vec3 iWorldScale;
// TODO add support for textures
// TODO document available inputs other than the uniforms
// TODO provide world scale in addition to the untransformed position
#define PROCEDURAL 1 #define PROCEDURAL 1
//PROCEDURAL_VERSION //PROCEDURAL_VERSION
#ifdef PROCEDURAL_V1
#else
// Unimplemented uniforms
// Resolution doesn't make sense in the VR context
const vec3 iResolution = vec3(1.0);
// Mouse functions not enabled currently
const vec4 iMouse = vec4(0.0);
// No support for audio input
const float iSampleRate = 1.0;
// No support for video input
const vec4 iChannelTime = vec4(0.0);
uniform vec4 iDate;
uniform int iFrameCount;
uniform vec3 iWorldPosition;
uniform vec3 iChannelResolution[4];
uniform sampler2D iChannel0;
uniform sampler2D iChannel1;
uniform sampler2D iChannel2;
uniform sampler2D iChannel3;
#endif
)SHADER"; )SHADER";

View file

@ -74,7 +74,7 @@ void ProceduralSkybox::render(gpu::Batch& batch, const ViewFrustum& viewFrustum,
batch.setResourceTexture(0, skybox.getCubemap()); batch.setResourceTexture(0, skybox.getCubemap());
} }
skybox._procedural->prepare(batch, glm::vec3(1)); skybox._procedural->prepare(batch, glm::vec3(0), glm::vec3(1));
batch.draw(gpu::TRIANGLE_STRIP, 4); batch.draw(gpu::TRIANGLE_STRIP, 4);
} }
} }

View file

@ -120,11 +120,11 @@ QString LogHandler::printMessage(LogMsgType type, const QMessageLogContext& cont
} }
// log prefix is in the following format // log prefix is in the following format
// [DEBUG] [TIMESTAMP] [PID] [TARGET] logged string // [TIMESTAMP] [DEBUG] [PID] [TARGET] logged string
QString prefixString = QString("[%1]").arg(stringForLogType(type)); QString prefixString = QString("[%1]").arg(QDateTime::currentDateTime().toString(DATE_STRING_FORMAT));
prefixString.append(QString(" [%1]").arg(QDateTime::currentDateTime().toString(DATE_STRING_FORMAT))); prefixString.append(QString(" [%1]").arg(stringForLogType(type)));
if (_shouldOutputPID) { if (_shouldOutputPID) {
prefixString.append(QString(" [%1]").arg(QCoreApplication::instance()->applicationPid())); prefixString.append(QString(" [%1]").arg(QCoreApplication::instance()->applicationPid()));

View file

@ -260,7 +260,7 @@ protected:
} }
void keyPressEvent(QKeyEvent* event) { void keyPressEvent(QKeyEvent* event) override {
_altPressed = Qt::Key_Alt == event->key(); _altPressed = Qt::Key_Alt == event->key();
switch (event->key()) { switch (event->key()) {
case Qt::Key_B: case Qt::Key_B:
@ -292,13 +292,13 @@ protected:
QWindow::keyPressEvent(event); QWindow::keyPressEvent(event);
} }
QQmlContext* menuContext{ nullptr }; QQmlContext* menuContext{ nullptr };
void keyReleaseEvent(QKeyEvent *event) { void keyReleaseEvent(QKeyEvent *event) override {
if (_altPressed && Qt::Key_Alt == event->key()) { if (_altPressed && Qt::Key_Alt == event->key()) {
VrMenu::toggle(); VrMenu::toggle();
} }
} }
void moveEvent(QMoveEvent* event) { void moveEvent(QMoveEvent* event) override {
static qreal oldPixelRatio = 0.0; static qreal oldPixelRatio = 0.0;
if (devicePixelRatio() != oldPixelRatio) { if (devicePixelRatio() != oldPixelRatio) {
oldPixelRatio = devicePixelRatio(); oldPixelRatio = devicePixelRatio();

View file

@ -71,13 +71,13 @@ function createAllToys() {
createCombinedArmChair({ createCombinedArmChair({
x: 549.29, x: 549.29,
y: 495.05, y: 494.9,
z: 508.22 z: 508.22
}); });
createPottedPlant({ createPottedPlant({
x: 554.26, x: 554.26,
y: 495.23, y: 495.2,
z: 504.53 z: 504.53
}); });
@ -98,7 +98,7 @@ function createAllToys() {
function deleteAllToys() { function deleteAllToys() {
var entities = Entities.findEntities(MyAvatar.position, 100); var entities = Entities.findEntities(MyAvatar.position, 100);
entities.forEach(function (entity) { entities.forEach(function(entity) {
//params: customKey, id, defaultValue //params: customKey, id, defaultValue
var shouldReset = getEntityCustomData(resetKey, entity, {}).resetMe; var shouldReset = getEntityCustomData(resetKey, entity, {}).resetMe;
if (shouldReset === true) { if (shouldReset === true) {