merge from upstream

This commit is contained in:
Seth Alves 2016-05-11 18:45:32 -07:00
commit fcd2e45d67
39 changed files with 422 additions and 284 deletions

View file

@ -5,15 +5,17 @@
{ "from": "Vive.LX", "when": "Vive.LS", "filters": [{ "type": "deadZone", "min": 0.6 }], "to": "Standard.LX" },
{ "from": "Vive.LT", "to": "Standard.LT" },
{ "from": "Vive.LB", "to": "Standard.LB" },
{ "from": "Vive.LeftGrip", "to": "Standard.LB" },
{ "from": "Vive.LS", "to": "Standard.LS" },
{ "from": "Vive.LSTouch", "to": "Standard.LSTouch" },
{ "from": "Vive.RY", "when": "Vive.RS", "filters": ["invert", { "type": "deadZone", "min": 0.6 }], "to": "Standard.RY" },
{ "from": "Vive.RX", "when": "Vive.RS", "filters": [{ "type": "deadZone", "min": 0.6 }], "to": "Standard.RX" },
{ "from": "Vive.RT", "to": "Standard.RT" },
{ "from": "Vive.RB", "to": "Standard.RB" },
{ "from": "Vive.RightGrip", "to": "Standard.RB" },
{ "from": "Vive.RS", "to": "Standard.RS" },
{ "from": "Vive.RSTouch", "to": "Standard.RSTouch" },
{ "from": "Vive.LeftApplicationMenu", "to": "Standard.Back" },
{ "from": "Vive.RightApplicationMenu", "to": "Standard.Start" },

View file

@ -45,11 +45,13 @@
#include <ResourceScriptingInterface.h>
#include <AccountManager.h>
#include <AddressManager.h>
#include <AnimDebugDraw.h>
#include <BuildInfo.h>
#include <AssetClient.h>
#include <AutoUpdater.h>
#include <AudioInjectorManager.h>
#include <CursorManager.h>
#include <DebugDraw.h>
#include <DeferredLightingEffect.h>
#include <display-plugins/DisplayPlugin.h>
#include <EntityScriptingInterface.h>
@ -101,7 +103,7 @@
#include <Preferences.h>
#include <display-plugins/CompositorHelper.h>
#include "AnimDebugDraw.h"
#include "AudioClient.h"
#include "audio/AudioScope.h"
#include "avatar/AvatarManager.h"
@ -673,9 +675,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
// The value will be 0 if the user blew away settings this session, which is both a feature and a bug.
UserActivityLogger::getInstance().launch(applicationVersion(), _previousSessionCrashed, sessionRunTime.get());
// once the event loop has started, check and signal for an access token
QMetaObject::invokeMethod(&accountManager, "checkAndSignalForAccessToken", Qt::QueuedConnection);
auto addressManager = DependencyManager::get<AddressManager>();
// use our MyAvatar position and quat for address manager path
@ -1094,6 +1093,11 @@ void Application::checkChangeCursor() {
_cursorNeedsChanging = false;
}
// After all of the constructor is completed, then set firstRun to false.
Setting::Handle<bool> firstRun{ Settings::firstRun, true };
firstRun.set(false);
}
void Application::showCursor(const QCursor& cursor) {
@ -1303,8 +1307,6 @@ void Application::initializeGL() {
// update before the first render
update(0);
InfoView::show(INFO_HELP_PATH, true);
}
FrameTimingsScriptingInterface _frameTimingsScriptingInterface;
@ -2964,7 +2966,21 @@ void Application::init() {
addressLookupString = arguments().value(urlIndex + 1);
}
DependencyManager::get<AddressManager>()->loadSettings(addressLookupString);
Setting::Handle<bool> firstRun { Settings::firstRun, true };
if (addressLookupString.isEmpty() && firstRun.get()) {
qDebug() << "First run and no URL passed... attempting to go to Home or Entry...";
DependencyManager::get<AddressManager>()->ifLocalSandboxRunningElse([](){
qDebug() << "Home sandbox appears to be running, going to Home.";
DependencyManager::get<AddressManager>()->goToLocalSandbox();
},
[](){
qDebug() << "Home sandbox does not appear to be running, going to Entry.";
DependencyManager::get<AddressManager>()->goToEntry();
});
} else {
qDebug() << "Not first run... going to" << qPrintable(addressLookupString.isEmpty() ? QString("previous location") : addressLookupString);
DependencyManager::get<AddressManager>()->loadSettings(addressLookupString);
}
qCDebug(interfaceapp) << "Loaded settings";
@ -3029,9 +3045,9 @@ void Application::updateLOD() const {
}
}
void Application::pushPreRenderLambda(void* key, std::function<void()> func) {
std::unique_lock<std::mutex> guard(_preRenderLambdasLock);
_preRenderLambdas[key] = func;
void Application::pushPostUpdateLambda(void* key, std::function<void()> func) {
std::unique_lock<std::mutex> guard(_postUpdateLambdasLock);
_postUpdateLambdas[key] = func;
}
// Called during Application::update immediately before AvatarManager::updateMyAvatar, updating my data that is then sent to everyone.
@ -3553,15 +3569,19 @@ void Application::update(float deltaTime) {
}
}
avatarManager->postUpdate(deltaTime);
{
PROFILE_RANGE_EX("PreRenderLambdas", 0xffff0000, (uint64_t)0);
std::unique_lock<std::mutex> guard(_preRenderLambdasLock);
for (auto& iter : _preRenderLambdas) {
std::unique_lock<std::mutex> guard(_postUpdateLambdasLock);
for (auto& iter : _postUpdateLambdas) {
iter.second();
}
_preRenderLambdas.clear();
_postUpdateLambdas.clear();
}
AnimDebugDraw::getInstance().update();
}
@ -3983,13 +4003,10 @@ namespace render {
void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool selfAvatarOnly) {
// FIXME: This preRender call is temporary until we create a separate render::scene for the mirror rendering.
// FIXME: This preDisplayRender call is temporary until we create a separate render::scene for the mirror rendering.
// Then we can move this logic into the Avatar::simulate call.
auto myAvatar = getMyAvatar();
myAvatar->preRender(renderArgs);
// Update animation debug draw renderer
AnimDebugDraw::getInstance().update();
myAvatar->preDisplaySide(renderArgs);
activeRenderingThread = QThread::currentThread();
PROFILE_RANGE(__FUNCTION__);
@ -4941,24 +4958,19 @@ qreal Application::getDevicePixelRatio() {
return (_window && _window->windowHandle()) ? _window->windowHandle()->devicePixelRatio() : 1.0;
}
DisplayPlugin* Application::getActiveDisplayPlugin() {
DisplayPlugin* result = nullptr;
if (QThread::currentThread() == thread()) {
if (nullptr == _displayPlugin) {
updateDisplayMode();
Q_ASSERT(_displayPlugin);
}
result = _displayPlugin.get();
} else {
DisplayPluginPointer Application::getActiveDisplayPlugin() const {
if (QThread::currentThread() != thread()) {
std::unique_lock<std::mutex> lock(_displayPluginLock);
result = _displayPlugin.get();
return _displayPlugin;
}
return result;
if (!_displayPlugin) {
const_cast<Application*>(this)->updateDisplayMode();
Q_ASSERT(_displayPlugin);
}
return _displayPlugin;
}
const DisplayPlugin* Application::getActiveDisplayPlugin() const {
return const_cast<Application*>(this)->getActiveDisplayPlugin();
}
static void addDisplayPluginToMenu(DisplayPluginPointer displayPlugin, bool active = false) {
auto menu = Menu::getInstance();

View file

@ -175,8 +175,7 @@ public:
void setActiveDisplayPlugin(const QString& pluginName);
DisplayPlugin* getActiveDisplayPlugin();
const DisplayPlugin* getActiveDisplayPlugin() const;
DisplayPluginPointer getActiveDisplayPlugin() const;
FileLogger* getLogger() const { return _logger; }
@ -210,7 +209,7 @@ public:
render::EnginePointer getRenderEngine() override { return _renderEngine; }
gpu::ContextPointer getGPUContext() const { return _gpuContext; }
virtual void pushPreRenderLambda(void* key, std::function<void()> func) override;
virtual void pushPostUpdateLambda(void* key, std::function<void()> func) override;
const QRect& getMirrorViewRect() const { return _mirrorViewRect; }
@ -383,7 +382,7 @@ private:
OffscreenGLCanvas* _offscreenContext { nullptr };
DisplayPluginPointer _displayPlugin;
std::mutex _displayPluginLock;
mutable std::mutex _displayPluginLock;
InputPluginList _activeInputPlugins;
bool _activatingDisplayPlugin { false };
@ -509,8 +508,8 @@ private:
QThread* _deadlockWatchdogThread;
std::map<void*, std::function<void()>> _preRenderLambdas;
std::mutex _preRenderLambdasLock;
std::map<void*, std::function<void()>> _postUpdateLambdas;
std::mutex _postUpdateLambdasLock;
std::atomic<uint32_t> _fullSceneReceivedCounter { 0 }; // how many times have we received a full-scene octree stats packet
uint32_t _fullSceneCounterAtLastPhysicsCheck { 0 }; // _fullSceneReceivedCounter last time we checked physics ready

View file

@ -480,10 +480,8 @@ Menu::Menu() {
avatarManager.data(), SLOT(setShouldShowReceiveStats(bool)));
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderBoundingCollisionShapes);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderLookAtVectors, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderLookAtTargets, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderFocusIndicator, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowWhosLookingAtMe, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderMyLookAtVectors, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderOtherLookAtVectors, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::FixGaze, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AnimDebugDrawDefaultPose, 0, false,
avatar, SLOT(setEnableDebugDrawDefaultPose(bool)));

View file

@ -147,9 +147,8 @@ namespace MenuOption {
const QString ReloadAllScripts = "Reload All Scripts";
const QString ReloadContent = "Reload Content (Clears all caches)";
const QString RenderBoundingCollisionShapes = "Show Bounding Collision Shapes";
const QString RenderFocusIndicator = "Show Eye Focus";
const QString RenderLookAtTargets = "Show Look-at Targets";
const QString RenderLookAtVectors = "Show Look-at Vectors";
const QString RenderMyLookAtVectors = "Show My Eye Vectors";
const QString RenderOtherLookAtVectors = "Show Other Eye Vectors";
const QString RenderMaxTextureMemory = "Maximum Texture Memory";
const QString RenderMaxTextureAutomatic = "Automatic Texture Memory";
const QString RenderMaxTexture64MB = "64 MB";
@ -174,7 +173,6 @@ namespace MenuOption {
const QString ShowDSConnectTable = "Show Domain Connection Timing";
const QString ShowBordersEntityNodes = "Show Entity Nodes";
const QString ShowRealtimeEntityStats = "Show Realtime Entity Stats";
const QString ShowWhosLookingAtMe = "Show Who's Looking at Me";
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode";
const QString SimulateEyeTracking = "Simulate";
const QString SMIEyeTracking = "SMI Eye Tracking";

View file

@ -54,7 +54,7 @@ QOpenGLContext* PluginContainerProxy::getPrimaryContext() {
return qApp->_glWidget->context()->contextHandle();
}
const DisplayPlugin* PluginContainerProxy::getActiveDisplayPlugin() const {
const DisplayPluginPointer PluginContainerProxy::getActiveDisplayPlugin() const {
return qApp->getActiveDisplayPlugin();
}

View file

@ -24,7 +24,7 @@ class PluginContainerProxy : public QObject, PluginContainer {
virtual ui::Menu* getPrimaryMenu() override;
virtual QOpenGLContext* getPrimaryContext() override;
virtual bool isForeground() override;
virtual const DisplayPlugin* getActiveDisplayPlugin() const override;
virtual const DisplayPluginPointer getActiveDisplayPlugin() const override;
friend class Application;

View file

@ -32,6 +32,7 @@
#include <TextRenderer3D.h>
#include <TextureCache.h>
#include <VariantMapToScriptValue.h>
#include <DebugDraw.h>
#include "Application.h"
#include "Avatar.h"
@ -67,11 +68,6 @@ namespace render {
}
template <> void payloadRender(const AvatarSharedPointer& avatar, RenderArgs* args) {
auto avatarPtr = static_pointer_cast<Avatar>(avatar);
bool renderLookAtVectors = Menu::getInstance()->isOptionChecked(MenuOption::RenderLookAtVectors);
avatarPtr->setDisplayingLookatVectors(renderLookAtVectors);
bool renderLookAtTarget = Menu::getInstance()->isOptionChecked(MenuOption::RenderLookAtTargets);
avatarPtr->setDisplayingLookatTarget(renderLookAtTarget);
if (avatarPtr->isInitialized() && args) {
PROFILE_RANGE_BATCH(*args->_batch, "renderAvatarPayload");
avatarPtr->render(args, qApp->getCamera()->getPosition());
@ -423,6 +419,39 @@ void Avatar::updateRenderItem(render::PendingChanges& pendingChanges) {
}
}
void Avatar::postUpdate(float deltaTime) {
bool renderLookAtVectors;
if (isMyAvatar()) {
renderLookAtVectors = Menu::getInstance()->isOptionChecked(MenuOption::RenderMyLookAtVectors);
} else {
renderLookAtVectors = Menu::getInstance()->isOptionChecked(MenuOption::RenderOtherLookAtVectors);
}
if (renderLookAtVectors) {
const float EYE_RAY_LENGTH = 10.0;
const glm::vec4 BLUE(0.0f, 0.0f, 1.0f, 1.0f);
const glm::vec4 RED(1.0f, 0.0f, 0.0f, 1.0f);
int leftEyeJoint = getJointIndex("LeftEye");
glm::vec3 leftEyePosition;
glm::quat leftEyeRotation;
if (_skeletonModel->getJointPositionInWorldFrame(leftEyeJoint, leftEyePosition) &&
_skeletonModel->getJointRotationInWorldFrame(leftEyeJoint, leftEyeRotation)) {
DebugDraw::getInstance().drawRay(leftEyePosition, leftEyePosition + leftEyeRotation * Vectors::UNIT_Z * EYE_RAY_LENGTH, BLUE);
}
int rightEyeJoint = getJointIndex("RightEye");
glm::vec3 rightEyePosition;
glm::quat rightEyeRotation;
if (_skeletonModel->getJointPositionInWorldFrame(rightEyeJoint, rightEyePosition) &&
_skeletonModel->getJointRotationInWorldFrame(rightEyeJoint, rightEyeRotation)) {
DebugDraw::getInstance().drawRay(rightEyePosition, rightEyePosition + rightEyeRotation * Vectors::UNIT_Z * EYE_RAY_LENGTH, RED);
}
}
}
void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
auto& batch = *renderArgs->_batch;
PROFILE_RANGE_BATCH(batch, __FUNCTION__);
@ -502,22 +531,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
float distanceToTarget = glm::length(toTarget);
{
// glow when moving far away
const float GLOW_DISTANCE = 20.0f;
const float GLOW_MAX_LOUDNESS = 2500.0f;
const float MAX_GLOW = 0.5f;
float GLOW_FROM_AVERAGE_LOUDNESS = ((this == DependencyManager::get<AvatarManager>()->getMyAvatar())
? 0.0f
: MAX_GLOW * getHeadData()->getAudioLoudness() / GLOW_MAX_LOUDNESS);
GLOW_FROM_AVERAGE_LOUDNESS = 0.0f;
float glowLevel = _moving && distanceToTarget > GLOW_DISTANCE && renderArgs->_renderMode == RenderArgs::NORMAL_RENDER_MODE
? 1.0f
: GLOW_FROM_AVERAGE_LOUDNESS;
// render body
renderBody(renderArgs, glowLevel);
fixupModelsInScene();
if (renderArgs->_renderMode != RenderArgs::SHADOW_RENDER_MODE) {
// add local lights
@ -541,64 +555,6 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
const float BOUNDING_SHAPE_ALPHA = 0.7f;
_skeletonModel->renderBoundingCollisionShapes(*renderArgs->_batch, getUniformScale(), BOUNDING_SHAPE_ALPHA);
}
// If this is the avatar being looked at, render a little ball above their head
if (_isLookAtTarget && Menu::getInstance()->isOptionChecked(MenuOption::RenderFocusIndicator)) {
static const float INDICATOR_OFFSET = 0.22f;
static const float INDICATOR_RADIUS = 0.03f;
static const glm::vec4 LOOK_AT_INDICATOR_COLOR = { 0.8f, 0.0f, 0.0f, 0.75f };
glm::vec3 avatarPosition = getPosition();
glm::vec3 position = glm::vec3(avatarPosition.x, getDisplayNamePosition().y + INDICATOR_OFFSET, avatarPosition.z);
PROFILE_RANGE_BATCH(batch, __FUNCTION__":renderFocusIndicator");
Transform transform;
transform.setTranslation(position);
transform.postScale(INDICATOR_RADIUS);
batch.setModelTransform(transform);
DependencyManager::get<GeometryCache>()->renderSolidSphereInstance(batch, LOOK_AT_INDICATOR_COLOR);
}
// If the avatar is looking at me, indicate that they are
if (getHead()->isLookingAtMe() && Menu::getInstance()->isOptionChecked(MenuOption::ShowWhosLookingAtMe)) {
PROFILE_RANGE_BATCH(batch, __FUNCTION__":renderLookingAtMe");
const glm::vec3 LOOKING_AT_ME_COLOR = { 1.0f, 1.0f, 1.0f };
const float LOOKING_AT_ME_ALPHA_START = 0.8f;
const float LOOKING_AT_ME_DURATION = 0.5f; // seconds
quint64 now = usecTimestampNow();
float alpha = LOOKING_AT_ME_ALPHA_START
* (1.0f - ((float)(now - getHead()->getLookingAtMeStarted()))
/ (LOOKING_AT_ME_DURATION * (float)USECS_PER_SECOND));
if (alpha > 0.0f) {
if (_skeletonModel->isLoaded()) {
const auto& geometry = _skeletonModel->getFBXGeometry();
const float DEFAULT_EYE_DIAMETER = 0.048f; // Typical human eye
const float RADIUS_INCREMENT = 0.005f;
batch.setModelTransform(Transform());
glm::vec3 position = getHead()->getLeftEyePosition();
Transform transform;
transform.setTranslation(position);
float eyeDiameter = geometry.leftEyeSize;
if (eyeDiameter == 0.0f) {
eyeDiameter = DEFAULT_EYE_DIAMETER;
}
batch.setModelTransform(Transform(transform).postScale(eyeDiameter * getUniformScale() / 2.0f + RADIUS_INCREMENT));
DependencyManager::get<GeometryCache>()->renderSolidSphereInstance(batch,
glm::vec4(LOOKING_AT_ME_COLOR, alpha));
position = getHead()->getRightEyePosition();
transform.setTranslation(position);
eyeDiameter = geometry.rightEyeSize;
if (eyeDiameter == 0.0f) {
eyeDiameter = DEFAULT_EYE_DIAMETER;
}
batch.setModelTransform(Transform(transform).postScale(eyeDiameter * getUniformScale() / 2.0f + RADIUS_INCREMENT));
DependencyManager::get<GeometryCache>()->renderSolidSphereInstance(batch,
glm::vec4(LOOKING_AT_ME_COLOR, alpha));
}
}
}
}
const float DISPLAYNAME_DISTANCE = 20.0f;
@ -656,11 +612,6 @@ void Avatar::fixupModelsInScene() {
scene->enqueuePendingChanges(pendingChanges);
}
void Avatar::renderBody(RenderArgs* renderArgs, float glowLevel) {
fixupModelsInScene();
getHead()->renderLookAts(renderArgs);
}
bool Avatar::shouldRenderHead(const RenderArgs* renderArgs) const {
return true;
}

View file

@ -78,9 +78,9 @@ public:
void updateRenderItem(render::PendingChanges& pendingChanges);
virtual void postUpdate(float deltaTime);
//setters
void setDisplayingLookatVectors(bool displayingLookatVectors) { getHead()->setRenderLookatVectors(displayingLookatVectors); }
void setDisplayingLookatTarget(bool displayingLookatTarget) { getHead()->setRenderLookatTarget(displayingLookatTarget); }
void setIsLookAtTarget(const bool isLookAtTarget) { _isLookAtTarget = isLookAtTarget; }
bool getIsLookAtTarget() const { return _isLookAtTarget; }
//getters
@ -233,7 +233,6 @@ protected:
Transform calculateDisplayNameTransform(const ViewFrustum& view, const glm::vec3& textPosition) const;
void renderDisplayName(gpu::Batch& batch, const ViewFrustum& view, const glm::vec3& textPosition) const;
virtual void renderBody(RenderArgs* renderArgs, float glowLevel = 0.0f);
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const;
virtual void fixupModelsInScene();
@ -252,7 +251,7 @@ private:
bool _initialized;
bool _shouldAnimate { true };
bool _shouldSkipRender { false };
bool _isLookAtTarget;
bool _isLookAtTarget { false };
float getBoundingRadius() const;

View file

@ -156,6 +156,15 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
simulateAvatarFades(deltaTime);
}
void AvatarManager::postUpdate(float deltaTime) {
auto hashCopy = getHashCopy();
AvatarHash::iterator avatarIterator = hashCopy.begin();
for (avatarIterator = hashCopy.begin(); avatarIterator != hashCopy.end(); avatarIterator++) {
auto avatar = std::static_pointer_cast<Avatar>(avatarIterator.value());
avatar->postUpdate(deltaTime);
}
}
void AvatarManager::simulateAvatarFades(float deltaTime) {
QVector<AvatarSharedPointer>::iterator fadingIterator = _avatarFades.begin();

View file

@ -44,6 +44,8 @@ public:
void updateMyAvatar(float deltaTime);
void updateOtherAvatars(float deltaTime);
void postUpdate(float deltaTime);
void clearOtherAvatars();
void clearAllAvatars();

View file

@ -46,8 +46,6 @@ Head::Head(Avatar* owningAvatar) :
_mouth3(0.0f),
_mouth4(0.0f),
_mouthTime(0.0f),
_renderLookatVectors(false),
_renderLookatTarget(false),
_saccade(0.0f, 0.0f, 0.0f),
_saccadeTarget(0.0f, 0.0f, 0.0f),
_leftEyeBlinkVelocity(0.0f),
@ -316,19 +314,6 @@ void Head::relaxLean(float deltaTime) {
_deltaLeanForward *= relaxationFactor;
}
void Head::renderLookAts(RenderArgs* renderArgs) {
renderLookAts(renderArgs, _leftEyePosition, _rightEyePosition);
}
void Head::renderLookAts(RenderArgs* renderArgs, glm::vec3 leftEyePosition, glm::vec3 rightEyePosition) {
if (_renderLookatVectors) {
renderLookatVectors(renderArgs, leftEyePosition, rightEyePosition, getCorrectedLookAtPosition());
}
if (_renderLookatTarget) {
renderLookatTarget(renderArgs, getCorrectedLookAtPosition());
}
}
void Head::setScale (float scale) {
if (_scale == scale) {
return;
@ -439,31 +424,3 @@ void Head::addLeanDeltas(float sideways, float forward) {
_deltaLeanSideways += sideways;
_deltaLeanForward += forward;
}
void Head::renderLookatVectors(RenderArgs* renderArgs, glm::vec3 leftEyePosition, glm::vec3 rightEyePosition, glm::vec3 lookatPosition) {
auto& batch = *renderArgs->_batch;
auto transform = Transform{};
batch.setModelTransform(transform);
// FIXME: THe line width of 2.0f is not supported anymore, we ll need a workaround
glm::vec4 startColor(0.2f, 0.2f, 0.2f, 1.0f);
glm::vec4 endColor(1.0f, 1.0f, 1.0f, 0.0f);
auto geometryCache = DependencyManager::get<GeometryCache>();
geometryCache->bindSimpleProgram(batch);
geometryCache->renderLine(batch, leftEyePosition, lookatPosition, startColor, endColor, _leftEyeLookAtID);
geometryCache->renderLine(batch, rightEyePosition, lookatPosition, startColor, endColor, _rightEyeLookAtID);
}
void Head::renderLookatTarget(RenderArgs* renderArgs, glm::vec3 lookatPosition) {
auto& batch = *renderArgs->_batch;
auto transform = Transform{};
transform.setTranslation(lookatPosition);
auto geometryCache = DependencyManager::get<GeometryCache>();
const float LOOK_AT_TARGET_RADIUS = 0.075f;
transform.postScale(LOOK_AT_TARGET_RADIUS);
const glm::vec4 LOOK_AT_TARGET_COLOR = { 0.8f, 0.0f, 0.0f, 0.75f };
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(batch, LOOK_AT_TARGET_COLOR);
}

View file

@ -36,10 +36,6 @@ public:
void setPosition(glm::vec3 position) { _position = position; }
void setAverageLoudness(float averageLoudness) { _averageLoudness = averageLoudness; }
void setReturnToCenter (bool returnHeadToCenter) { _returnHeadToCenter = returnHeadToCenter; }
void setRenderLookatVectors(bool onOff) { _renderLookatVectors = onOff; }
void setRenderLookatTarget(bool onOff) { _renderLookatTarget = onOff; }
void renderLookAts(RenderArgs* renderArgs);
void renderLookAts(RenderArgs* renderArgs, glm::vec3 leftEyePosition, glm::vec3 rightEyePosition);
/// \return orientationBase+Delta
glm::quat getFinalOrientationInLocalFrame() const;
@ -49,7 +45,7 @@ public:
/// \return orientationBody * orientationBasePitch
glm::quat getCameraOrientation () const;
void setCorrectedLookAtPosition(glm::vec3 correctedLookAtPosition);
glm::vec3 getCorrectedLookAtPosition();
void clearCorrectedLookAtPosition() { _isLookingAtMe = false; }
@ -65,9 +61,9 @@ public:
glm::vec3 getFrontDirection() const { return getOrientation() * IDENTITY_FRONT; }
float getFinalLeanSideways() const { return _leanSideways + _deltaLeanSideways; }
float getFinalLeanForward() const { return _leanForward + _deltaLeanForward; }
glm::quat getEyeRotation(const glm::vec3& eyePosition) const;
const glm::vec3& getRightEyePosition() const { return _rightEyePosition; }
const glm::vec3& getLeftEyePosition() const { return _leftEyePosition; }
glm::vec3 getRightEarPosition() const { return _rightEyePosition + (getRightDirection() * EYE_EAR_GAP) + (getFrontDirection() * -EYE_EAR_GAP); }
@ -84,10 +80,10 @@ public:
void setDeltaYaw(float yaw) { _deltaYaw = yaw; }
float getDeltaYaw() const { return _deltaYaw; }
void setDeltaRoll(float roll) { _deltaRoll = roll; }
float getDeltaRoll() const { return _deltaRoll; }
virtual void setFinalYaw(float finalYaw);
virtual void setFinalPitch(float finalPitch);
virtual void setFinalRoll(float finalRoll);
@ -99,7 +95,7 @@ public:
void addLeanDeltas(float sideways, float forward);
float getTimeWithoutTalking() const { return _timeWithoutTalking; }
private:
glm::vec3 calculateAverageEyePosition() const { return _leftEyePosition + (_rightEyePosition - _leftEyePosition ) * 0.5f; }
@ -113,7 +109,7 @@ private:
glm::vec3 _leftEyePosition;
glm::vec3 _rightEyePosition;
glm::vec3 _eyePosition;
float _scale;
float _lastLoudness;
float _longTermAverageLoudness;
@ -124,8 +120,7 @@ private:
float _mouth3;
float _mouth4;
float _mouthTime;
bool _renderLookatVectors;
bool _renderLookatTarget;
glm::vec3 _saccade;
glm::vec3 _saccadeTarget;
float _leftEyeBlinkVelocity;
@ -145,15 +140,13 @@ private:
bool _isLookingAtMe;
quint64 _lookingAtMeStarted;
quint64 _wasLastLookingAtMe;
glm::vec3 _correctedLookAtPosition;
int _leftEyeLookAtID;
int _rightEyeLookAtID;
// private methods
void renderLookatVectors(RenderArgs* renderArgs, glm::vec3 leftEyePosition, glm::vec3 rightEyePosition, glm::vec3 lookatPosition);
void renderLookatTarget(RenderArgs* renderArgs, glm::vec3 lookatPosition);
void calculateMouthShapes();
void applyEyelidOffset(glm::quat headOrientation);
};

View file

@ -1298,35 +1298,6 @@ void MyAvatar::attach(const QString& modelURL, const QString& jointName,
Avatar::attach(modelURL, jointName, translation, rotation, scale, isSoft, allowDuplicates, useSaved);
}
void MyAvatar::renderBody(RenderArgs* renderArgs, float glowLevel) {
if (!_skeletonModel->isRenderable()) {
return; // wait until all models are loaded
}
fixupModelsInScene();
// This is drawing the lookat vectors from our avatar to wherever we're looking.
if (qApp->isHMDMode()) {
glm::vec3 cameraPosition = qApp->getCamera()->getPosition();
glm::mat4 headPose = qApp->getActiveDisplayPlugin()->getHeadPose();
glm::mat4 leftEyePose = qApp->getActiveDisplayPlugin()->getEyeToHeadTransform(Eye::Left);
leftEyePose = leftEyePose * headPose;
glm::vec3 leftEyePosition = extractTranslation(leftEyePose);
glm::mat4 rightEyePose = qApp->getActiveDisplayPlugin()->getEyeToHeadTransform(Eye::Right);
rightEyePose = rightEyePose * headPose;
glm::vec3 rightEyePosition = extractTranslation(rightEyePose);
glm::vec3 headPosition = extractTranslation(headPose);
getHead()->renderLookAts(renderArgs,
cameraPosition + getOrientation() * (leftEyePosition - headPosition),
cameraPosition + getOrientation() * (rightEyePosition - headPosition));
} else {
getHead()->renderLookAts(renderArgs);
}
}
void MyAvatar::setVisibleInSceneIfReady(Model* model, render::ScenePointer scene, bool visible) {
if (model->isActive() && model->isRenderable()) {
model->setVisibleInScene(visible, scene);
@ -1383,10 +1354,11 @@ void MyAvatar::destroyAnimGraph() {
_rig->destroyAnimGraph();
}
void MyAvatar::preRender(RenderArgs* renderArgs) {
void MyAvatar::postUpdate(float deltaTime) {
Avatar::postUpdate(deltaTime);
render::ScenePointer scene = qApp->getMain3DScene();
if (_skeletonModel->initWhenReady(scene)) {
initHeadBones();
_skeletonModel->setCauterizeBoneSet(_headBoneSet);
@ -1436,7 +1408,12 @@ void MyAvatar::preRender(RenderArgs* renderArgs) {
DebugDraw::getInstance().updateMyAvatarPos(getPosition());
DebugDraw::getInstance().updateMyAvatarRot(getOrientation());
}
void MyAvatar::preDisplaySide(RenderArgs* renderArgs) {
// toggle using the cauterizedBones depending on where the camera is and the rendering pass type.
const bool shouldDrawHead = shouldRenderHead(renderArgs);
if (shouldDrawHead != _prevShouldDrawHead) {
_skeletonModel->setCauterizeBones(!shouldDrawHead);

View file

@ -96,7 +96,8 @@ public:
Q_INVOKABLE void reset(bool andRecenter = false);
void update(float deltaTime);
void preRender(RenderArgs* renderArgs);
virtual void postUpdate(float deltaTime) override;
void preDisplaySide(RenderArgs* renderArgs);
const glm::mat4& getHMDSensorMatrix() const { return _hmdSensorMatrix; }
const glm::vec3& getHMDSensorPosition() const { return _hmdSensorPosition; }
@ -310,7 +311,6 @@ private:
void simulate(float deltaTime);
void updateFromTrackers(float deltaTime);
virtual void render(RenderArgs* renderArgs, const glm::vec3& cameraPositio) override;
virtual void renderBody(RenderArgs* renderArgs, float glowLevel = 0.0f) override;
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const override;
void setShouldRenderLocally(bool shouldRender) { _shouldRender = shouldRender; setEnableMeshVisible(shouldRender); }
bool getShouldRenderLocally() const { return _shouldRender; }

View file

@ -13,6 +13,7 @@
#include <QMultiMap>
#include <recording/Deck.h>
#include <DebugDraw.h>
#include "Application.h"
#include "Avatar.h"
@ -92,7 +93,6 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
Head* head = _owningAvatar->getHead();
// make sure lookAt is not too close to face (avoid crosseyes)
glm::vec3 lookAt = _owningAvatar->isMyAvatar() ? head->getLookAtPosition() : head->getCorrectedLookAtPosition();
glm::vec3 focusOffset = lookAt - _owningAvatar->getHead()->getEyePosition();

View file

@ -1057,20 +1057,30 @@ void Rig::updateNeckJoint(int index, const HeadParameters& params) {
}
void Rig::updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::quat& worldHeadOrientation, const glm::vec3& lookAtSpot, const glm::vec3& saccade) {
// TODO: does not properly handle avatar scale.
if (isIndexValid(index)) {
glm::mat4 rigToWorld = createMatFromQuatAndPos(modelRotation, modelTranslation);
glm::mat4 worldToRig = glm::inverse(rigToWorld);
glm::vec3 zAxis = glm::normalize(_internalPoseSet._absolutePoses[index].trans - transformPoint(worldToRig, lookAtSpot));
glm::vec3 lookAtVector = glm::normalize(transformPoint(worldToRig, lookAtSpot) - _internalPoseSet._absolutePoses[index].trans);
glm::quat desiredQuat = rotationBetween(IDENTITY_FRONT, zAxis);
glm::quat headQuat;
int headIndex = indexOfJoint("Head");
glm::quat headQuat;
if (headIndex >= 0) {
headQuat = _internalPoseSet._absolutePoses[headIndex].rot;
}
glm::vec3 headUp = headQuat * Vectors::UNIT_Y;
glm::vec3 z, y, x;
generateBasisVectors(lookAtVector, headUp, z, y, x);
glm::mat3 m(glm::cross(y, z), y, z);
glm::quat desiredQuat = glm::normalize(glm::quat_cast(m));
glm::quat deltaQuat = desiredQuat * glm::inverse(headQuat);
// limit rotation
// limit swing rotation of the deltaQuat by a 30 degree cone.
// TODO: use swing twist decomposition constraint instead, for off axis rotation clamping.
const float MAX_ANGLE = 30.0f * RADIANS_PER_DEGREE;
if (fabsf(glm::angle(deltaQuat)) > MAX_ANGLE) {
deltaQuat = glm::angleAxis(glm::clamp(glm::angle(deltaQuat), -MAX_ANGLE, MAX_ANGLE), glm::axis(deltaQuat));

View file

@ -58,7 +58,7 @@ controller::UserInputMapper::UserInputMapper() {
namespace controller {
UserInputMapper::~UserInputMapper() {
}
@ -80,6 +80,7 @@ void UserInputMapper::registerDevice(InputDevice::Pointer device) {
recordDeviceOfType(device->getName());
qCDebug(controllers) << "Registered input device <" << device->getName() << "> deviceID = " << deviceID;
for (const auto& inputMapping : device->getAvailableInputs()) {
const auto& input = inputMapping.first;
// Ignore aliases
@ -102,6 +103,7 @@ void UserInputMapper::registerDevice(InputDevice::Pointer device) {
}
_registeredDevices[deviceID] = device;
auto mapping = loadMappings(device->getDefaultMappingConfigs());
if (mapping) {
_mappingsByDevice[deviceID] = mapping;
@ -111,15 +113,21 @@ void UserInputMapper::registerDevice(InputDevice::Pointer device) {
emit hardwareChanged();
}
// FIXME remove the associated device mappings
void UserInputMapper::removeDevice(int deviceID) {
Locker locker(_lock);
auto proxyEntry = _registeredDevices.find(deviceID);
if (_registeredDevices.end() == proxyEntry) {
qCWarning(controllers) << "Attempted to remove unknown device " << deviceID;
return;
}
auto proxy = proxyEntry->second;
auto device = proxyEntry->second;
qCDebug(controllers) << "Unregistering input device <" << device->getName() << "> deviceID = " << deviceID;
unloadMappings(device->getDefaultMappingConfigs());
auto mappingsEntry = _mappingsByDevice.find(deviceID);
if (_mappingsByDevice.end() != mappingsEntry) {
disableMapping(mappingsEntry->second);
@ -244,7 +252,7 @@ void UserInputMapper::update(float deltaTime) {
for (auto& channel : _actionStates) {
channel = 0.0f;
}
for (auto& channel : _poseStates) {
channel = Pose();
}
@ -705,11 +713,10 @@ Mapping::Pointer UserInputMapper::loadMapping(const QString& jsonFile, bool enab
return Mapping::Pointer();
}
// Each mapping only needs to be loaded once
static QSet<QString> loaded;
if (loaded.contains(jsonFile)) {
if (_loadedRouteJsonFiles.contains(jsonFile)) {
return Mapping::Pointer();
}
loaded.insert(jsonFile);
_loadedRouteJsonFiles.insert(jsonFile);
QString json;
{
QFile file(jsonFile);
@ -741,6 +748,18 @@ MappingPointer UserInputMapper::loadMappings(const QStringList& jsonFiles) {
return result;
}
void UserInputMapper::unloadMappings(const QStringList& jsonFiles) {
for (const QString& jsonFile : jsonFiles) {
unloadMapping(jsonFile);
}
}
void UserInputMapper::unloadMapping(const QString& jsonFile) {
auto entry = _loadedRouteJsonFiles.find(jsonFile);
if (entry != _loadedRouteJsonFiles.end()) {
_loadedRouteJsonFiles.erase(entry);
}
}
static const QString JSON_NAME = QStringLiteral("name");
static const QString JSON_CHANNELS = QStringLiteral("channels");

View file

@ -111,9 +111,18 @@ namespace controller {
void loadDefaultMapping(uint16 deviceID);
void enableMapping(const QString& mappingName, bool enable = true);
void unloadMappings(const QStringList& jsonFiles);
void unloadMapping(const QString& jsonFile);
float getValue(const Input& input) const;
Pose getPose(const Input& input) const;
// perform an action when the UserInputMapper mutex is acquired.
using Locker = std::unique_lock<std::recursive_mutex>;
template <typename F>
void withLock(F&& f) { Locker locker(_lock); f(); }
signals:
void actionEvent(int action, float state);
void inputEvent(int input, float state);
@ -177,7 +186,7 @@ namespace controller {
RouteList _deviceRoutes;
RouteList _standardRoutes;
using Locker = std::unique_lock<std::recursive_mutex>;
QSet<QString> _loadedRouteJsonFiles;
mutable std::recursive_mutex _lock;
};

View file

@ -20,8 +20,12 @@
const QString KeyboardMouseDevice::NAME = "Keyboard/Mouse";
void KeyboardMouseDevice::pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, bool jointsCaptured) {
_inputDevice->update(deltaTime, inputCalibrationData, jointsCaptured);
void KeyboardMouseDevice::pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, bool jointsCaptured) {
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
userInputMapper->withLock([&, this]() {
_inputDevice->update(deltaTime, inputCalibrationData, jointsCaptured);
});
// For touch event, we need to check that the last event is not too long ago
// Maybe it's a Qt issue, but the touch event sequence (begin, update, end) is not always called properly

View file

@ -629,3 +629,32 @@ void AddressManager::addCurrentAddressToHistory(LookupTrigger trigger) {
}
}
}
void AddressManager::ifLocalSandboxRunningElse(std::function<void()> localSandboxRunningDoThis,
std::function<void()> localSandboxNotRunningDoThat) {
QNetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance();
QNetworkRequest sandboxStatus(SANDBOX_STATUS_URL);
sandboxStatus.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
QNetworkReply* reply = networkAccessManager.get(sandboxStatus);
connect(reply, &QNetworkReply::finished, this, [reply, localSandboxRunningDoThis, localSandboxNotRunningDoThat]() {
auto statusData = reply->readAll();
auto statusJson = QJsonDocument::fromJson(statusData);
if (!statusJson.isEmpty()) {
auto statusObject = statusJson.object();
auto serversValue = statusObject.value("servers");
if (!serversValue.isUndefined() && serversValue.isObject()) {
auto serversObject = serversValue.toObject();
auto serversCount = serversObject.size();
const int MINIMUM_EXPECTED_SERVER_COUNT = 5;
if (serversCount >= MINIMUM_EXPECTED_SERVER_COUNT) {
localSandboxRunningDoThis();
return;
}
}
}
localSandboxNotRunningDoThat();
});
}

View file

@ -24,6 +24,8 @@
const QString HIFI_URL_SCHEME = "hifi";
const QString DEFAULT_HIFI_ADDRESS = "hifi://entry";
const QString SANDBOX_HIFI_ADDRESS = "hifi://localhost";
const QString SANDBOX_STATUS_URL = "http://localhost:60332/status";
const QString INDEX_PATH = "/";
const QString GET_PLACE = "/api/v1/places/%1";
@ -65,6 +67,11 @@ public:
const QStack<QUrl>& getBackStack() const { return _backStack; }
const QStack<QUrl>& getForwardStack() const { return _forwardStack; }
/// determines if the local sandbox is likely running. It does not account for custom setups, and is only
/// intended to detect the standard local sandbox install.
void ifLocalSandboxRunningElse(std::function<void()> localSandboxRunningDoThis,
std::function<void()> localSandboxNotRunningDoThat);
public slots:
void handleLookupString(const QString& lookupString);
@ -74,6 +81,8 @@ public slots:
void goBack();
void goForward();
void goToLocalSandbox(LookupTrigger trigger = LookupTrigger::StartupFromSettings) { handleUrl(SANDBOX_HIFI_ADDRESS, trigger); }
void goToEntry(LookupTrigger trigger = LookupTrigger::StartupFromSettings) { handleUrl(DEFAULT_HIFI_ADDRESS, trigger); }
void goToUser(const QString& username);

View file

@ -64,7 +64,7 @@ public:
virtual MainWindow* getPrimaryWindow() = 0;
virtual QOpenGLContext* getPrimaryContext() = 0;
virtual bool isForeground() = 0;
virtual const DisplayPlugin* getActiveDisplayPlugin() const = 0;
virtual const DisplayPluginPointer getActiveDisplayPlugin() const = 0;
/// settings interface
bool getBoolSetting(const QString& settingName, bool defaultValue);

View file

@ -46,7 +46,7 @@ public:
virtual render::ScenePointer getMain3DScene() = 0;
virtual render::EnginePointer getRenderEngine() = 0;
virtual void pushPreRenderLambda(void* key, std::function<void()> func) = 0;
virtual void pushPostUpdateLambda(void* key, std::function<void()> func) = 0;
// FIXME - we shouldn't assume that there's a single instance of an AbstractViewStateInterface
static AbstractViewStateInterface* instance();

View file

@ -307,6 +307,16 @@ static void addLink(const AnimPose& rootPose, const AnimPose& pose, const AnimPo
}
}
static void addLine(const glm::vec3& start, const glm::vec3& end, const glm::vec4& color, Vertex*& v) {
uint32_t colorInt = toRGBA(color);
v->pos = start;
v->rgba = colorInt;
v++;
v->pos = end;
v->rgba = colorInt;
v++;
}
void AnimDebugDraw::update() {
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();
@ -319,6 +329,7 @@ void AnimDebugDraw::update() {
const size_t VERTICES_PER_BONE = (6 + (NUM_CIRCLE_SLICES * 2) * 3);
const size_t VERTICES_PER_LINK = 8 * 2;
const size_t VERTICES_PER_RAY = 2;
const float BONE_RADIUS = 0.01f; // 1 cm
const float POSE_RADIUS = 0.1f; // 10 cm
@ -342,6 +353,7 @@ void AnimDebugDraw::update() {
numVerts += (int)markerMap.size() * VERTICES_PER_BONE;
auto myAvatarMarkerMap = DebugDraw::getInstance().getMyAvatarMarkerMap();
numVerts += (int)myAvatarMarkerMap.size() * VERTICES_PER_BONE;
numVerts += (int)DebugDraw::getInstance().getRays().size() * VERTICES_PER_RAY;
// allocate verts!
data._vertexBuffer->resize(sizeof(Vertex) * numVerts);
@ -390,6 +402,12 @@ void AnimDebugDraw::update() {
addBone(myAvatarPose, AnimPose(glm::vec3(1), rot, pos), radius, v);
}
// draw rays from shared DebugDraw singleton
for (auto& iter : DebugDraw::getInstance().getRays()) {
addLine(std::get<0>(iter), std::get<1>(iter), std::get<2>(iter), v);
}
DebugDraw::getInstance().clearRays();
assert(numVerts == (v - verts));
render::Item::Bound theBound;

View file

@ -132,7 +132,7 @@ void Model::updateRenderItems() {
// the application will ensure only the last lambda is actually invoked.
void* key = (void*)this;
std::weak_ptr<Model> weakSelf = shared_from_this();
AbstractViewStateInterface::instance()->pushPreRenderLambda(key, [weakSelf]() {
AbstractViewStateInterface::instance()->pushPostUpdateLambda(key, [weakSelf]() {
// do nothing, if the model has already been destroyed.
auto self = weakSelf.lock();

View file

@ -271,12 +271,12 @@ void ScriptEngines::loadOneScript(const QString& scriptFilename) {
void ScriptEngines::loadScripts() {
// check first run...
if (_firstRun.get()) {
Setting::Handle<bool> firstRun { Settings::firstRun, true };
if (firstRun.get()) {
qCDebug(scriptengine) << "This is a first run...";
// clear the scripts, and set out script to our default scripts
clearScripts();
loadDefaultScripts();
_firstRun.set(false);
return;
}

View file

@ -87,8 +87,6 @@ protected:
void onScriptEngineError(const QString& scriptFilename);
void launchScriptEngine(ScriptEngine* engine);
Setting::Handle<bool> _firstRun { "firstRun", true };
QReadWriteLock _scriptEnginesHashLock;
QHash<QUrl, ScriptEngine*> _scriptEnginesHash;
QSet<ScriptEngine*> _allKnownScriptEngines;

View file

@ -23,6 +23,11 @@ DebugDraw::~DebugDraw() {
}
// world space line, drawn only once
void DebugDraw::drawRay(const glm::vec3& start, const glm::vec3& end, const glm::vec4& color) {
_rays.push_back(Ray(start, end, color));
}
void DebugDraw::addMarker(const std::string& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color) {
_markers[key] = MarkerInfo(rotation, position, color);
}

View file

@ -13,6 +13,7 @@
#include <unordered_map>
#include <tuple>
#include <string>
#include <vector>
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
@ -23,16 +24,21 @@ public:
DebugDraw();
~DebugDraw();
// world space maker
// world space line, drawn only once
void drawRay(const glm::vec3& start, const glm::vec3& end, const glm::vec4& color);
// world space maker, marker drawn every frame until it is removed.
void addMarker(const std::string& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color);
void removeMarker(const std::string& key);
// myAvatar relative marker
// myAvatar relative marker, maker is drawn every frame until it is removed.
void addMyAvatarMarker(const std::string& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color);
void removeMyAvatarMarker(const std::string& key);
using MarkerInfo = std::tuple<glm::quat, glm::vec3, glm::vec4>;
using MarkerMap = std::unordered_map<std::string, MarkerInfo>;
using Ray = std::tuple<glm::vec3, glm::vec3, glm::vec4>;
using Rays = std::vector<Ray>;
//
// accessors used by renderer
@ -44,12 +50,15 @@ public:
const glm::vec3& getMyAvatarPos() const { return _myAvatarPos; }
void updateMyAvatarRot(const glm::quat& rot) { _myAvatarRot = rot; }
const glm::quat& getMyAvatarRot() const { return _myAvatarRot; }
const Rays getRays() const { return _rays; }
void clearRays() { _rays.clear(); }
protected:
MarkerMap _markers;
MarkerMap _myAvatarMarkers;
glm::quat _myAvatarRot;
glm::vec3 _myAvatarPos;
Rays _rays;
};
#endif // hifi_DebugDraw_h

View file

@ -431,13 +431,27 @@ glm::vec3 transformVectorFull(const glm::mat4& m, const glm::vec3& v) {
void generateBasisVectors(const glm::vec3& primaryAxis, const glm::vec3& secondaryAxis,
glm::vec3& uAxisOut, glm::vec3& vAxisOut, glm::vec3& wAxisOut) {
// primaryAxis & secondaryAxis must not be zero.
#ifndef NDEBUG
const float MIN_LENGTH_SQUARED = 1.0e-6f;
#endif
assert(fabsf(glm::length2(primaryAxis) > MIN_LENGTH_SQUARED));
assert(fabsf(glm::length2(secondaryAxis) > MIN_LENGTH_SQUARED));
uAxisOut = glm::normalize(primaryAxis);
wAxisOut = glm::cross(uAxisOut, secondaryAxis);
if (glm::length(wAxisOut) > 0.0f) {
wAxisOut = glm::normalize(wAxisOut);
} else {
wAxisOut = glm::normalize(glm::cross(uAxisOut, glm::vec3(0, 1, 0)));
glm::vec3 normSecondary = glm::normalize(secondaryAxis);
// if secondaryAxis is parallel with the primaryAxis, pick another axis.
const float EPSILON = 1.0e-4f;
if (fabsf(fabsf(glm::dot(uAxisOut, secondaryAxis)) - 1.0f) > EPSILON) {
// pick a better secondaryAxis.
normSecondary = glm::vec3(1.0f, 0.0f, 0.0f);
if (fabsf(fabsf(glm::dot(uAxisOut, secondaryAxis)) - 1.0f) > EPSILON) {
normSecondary = glm::vec3(0.0f, 1.0f, 0.0f);
}
}
wAxisOut = glm::normalize(glm::cross(uAxisOut, secondaryAxis));
vAxisOut = glm::cross(wAxisOut, uAxisOut);
}

View file

@ -13,6 +13,7 @@
#include <math.h>
const QString Settings::firstRun { "firstRun" };
void Settings::getFloatValueIfValid(const QString& name, float& floatValue) {
const QVariant badDefaultValue = NAN;

View file

@ -26,6 +26,8 @@
// TODO: remove
class Settings : public QSettings {
public:
static const QString firstRun;
void getFloatValueIfValid(const QString& name, float& floatValue);
void getBoolValue(const QString& name, bool& boolValue);

View file

@ -509,7 +509,12 @@ void NeuronPlugin::pluginUpdate(float deltaTime, const controller::InputCalibrat
std::lock_guard<std::mutex> guard(_jointsMutex);
joints = _joints;
}
_inputDevice->update(deltaTime, inputCalibrationData, joints, _prevJoints);
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
userInputMapper->withLock([&, this]() {
_inputDevice->update(deltaTime, inputCalibrationData, joints, _prevJoints);
});
_prevJoints = joints;
}

View file

@ -136,7 +136,12 @@ void SixenseManager::setSixenseFilter(bool filter) {
void SixenseManager::pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, bool jointsCaptured) {
BAIL_IF_NOT_LOADED
_inputDevice->update(deltaTime, inputCalibrationData, jointsCaptured);
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
userInputMapper->withLock([&, this]() {
_inputDevice->update(deltaTime, inputCalibrationData, jointsCaptured);
});
if (_inputDevice->_requestReset) {
_container->requestReset();
_inputDevice->_requestReset = false;

View file

@ -11,6 +11,8 @@
#include "ViveControllerManager.h"
#include <QtCore/QProcessEnvironment>
#include <PerfStat.h>
#include <PathUtils.h>
#include <GeometryCache.h>
@ -48,9 +50,11 @@ static const QString MENU_PATH = MENU_PARENT + ">" + MENU_NAME;
static const QString RENDER_CONTROLLERS = "Render Hand Controllers";
const QString ViveControllerManager::NAME = "OpenVR";
static const QString DEBUG_FLAG("HIFI_DEBUG_OPENVR");
static bool enableDebugOpenVR = QProcessEnvironment::systemEnvironment().contains(DEBUG_FLAG);
bool ViveControllerManager::isSupported() const {
return !isOculusPresent() && vr::VR_IsHmdPresent();
return (enableDebugOpenVR || !isOculusPresent()) && vr::VR_IsHmdPresent();
}
bool ViveControllerManager::activate() {
@ -211,9 +215,13 @@ void ViveControllerManager::renderHand(const controller::Pose& pose, gpu::Batch&
void ViveControllerManager::pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, bool jointsCaptured) {
_inputDevice->update(deltaTime, inputCalibrationData, jointsCaptured);
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
// because update mutates the internal state we need to lock
userInputMapper->withLock([&, this]() {
_inputDevice->update(deltaTime, inputCalibrationData, jointsCaptured);
});
if (_inputDevice->_trackedControllers == 0 && _registeredWithInputMapper) {
userInputMapper->removeDevice(_inputDevice->_deviceID);
_registeredWithInputMapper = false;
@ -270,7 +278,8 @@ void ViveControllerManager::InputDevice::handleHandController(float deltaTime, u
for (uint32_t i = 0; i < vr::k_EButton_Max; ++i) {
auto mask = vr::ButtonMaskFromId((vr::EVRButtonId)i);
bool pressed = 0 != (controllerState.ulButtonPressed & mask);
handleButtonEvent(deltaTime, i, pressed, isLeftHand);
bool touched = 0 != (controllerState.ulButtonTouched & mask);
handleButtonEvent(deltaTime, i, pressed, touched, isLeftHand);
}
// process each axis
@ -314,20 +323,26 @@ enum ViveButtonChannel {
// These functions do translation from the Steam IDs to the standard controller IDs
void ViveControllerManager::InputDevice::handleButtonEvent(float deltaTime, uint32_t button, bool pressed, bool isLeftHand) {
if (!pressed) {
return;
}
void ViveControllerManager::InputDevice::handleButtonEvent(float deltaTime, uint32_t button, bool pressed, bool touched, bool isLeftHand) {
using namespace controller;
if (button == vr::k_EButton_ApplicationMenu) {
_buttonPressedMap.insert(isLeftHand ? LEFT_APP_MENU : RIGHT_APP_MENU);
} else if (button == vr::k_EButton_Grip) {
_buttonPressedMap.insert(isLeftHand ? LB : RB);
} else if (button == vr::k_EButton_SteamVR_Trigger) {
_buttonPressedMap.insert(isLeftHand ? LT : RT);
} else if (button == vr::k_EButton_SteamVR_Touchpad) {
_buttonPressedMap.insert(isLeftHand ? LS : RS);
if (pressed) {
if (button == vr::k_EButton_ApplicationMenu) {
_buttonPressedMap.insert(isLeftHand ? LEFT_APP_MENU : RIGHT_APP_MENU);
} else if (button == vr::k_EButton_Grip) {
_buttonPressedMap.insert(isLeftHand ? LEFT_GRIP : RIGHT_GRIP);
} else if (button == vr::k_EButton_SteamVR_Trigger) {
_buttonPressedMap.insert(isLeftHand ? LT : RT);
} else if (button == vr::k_EButton_SteamVR_Touchpad) {
_buttonPressedMap.insert(isLeftHand ? LS : RS);
}
}
if (touched) {
if (button == vr::k_EButton_SteamVR_Touchpad) {
_buttonPressedMap.insert(isLeftHand ? LS_TOUCH : RS_TOUCH);
}
}
}
@ -424,18 +439,28 @@ controller::Input::NamedVector ViveControllerManager::InputDevice::getAvailableI
makePair(LY, "LY"),
makePair(RX, "RX"),
makePair(RY, "RY"),
// trigger analogs
// capacitive touch on the touch pad
makePair(LS_TOUCH, "LSTouch"),
makePair(RS_TOUCH, "RSTouch"),
// touch pad press
makePair(LS, "LS"),
makePair(RS, "RS"),
// triggers
makePair(LT, "LT"),
makePair(RT, "RT"),
makePair(LB, "LB"),
makePair(RB, "RB"),
// low profile side grip button.
makePair(LEFT_GRIP, "LeftGrip"),
makePair(RIGHT_GRIP, "RightGrip"),
makePair(LS, "LS"),
makePair(RS, "RS"),
// 3d location of controller
makePair(LEFT_HAND, "LeftHand"),
makePair(RIGHT_HAND, "RightHand"),
// app button above trackpad.
Input::NamedPair(Input(_deviceID, LEFT_APP_MENU, ChannelType::BUTTON), "LeftApplicationMenu"),
Input::NamedPair(Input(_deviceID, RIGHT_APP_MENU, ChannelType::BUTTON), "RightApplicationMenu"),
};

View file

@ -59,7 +59,7 @@ private:
virtual void focusOutEvent() override;
void handleHandController(float deltaTime, uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData, bool isLeftHand);
void handleButtonEvent(float deltaTime, uint32_t button, bool pressed, bool isLeftHand);
void handleButtonEvent(float deltaTime, uint32_t button, bool pressed, bool touched, bool isLeftHand);
void handleAxisEvent(float deltaTime, uint32_t axis, float x, float y, bool isLeftHand);
void handlePoseEvent(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, const mat4& mat,
const vec3& linearVelocity, const vec3& angularVelocity, bool isLeftHand);

View file

@ -0,0 +1,79 @@
//
// viveTouchpadTest.js
//
// Anthony J. Thibault
// Copyright 2016 High Fidelity, Inc.
//
// An example of reading touch and move events from the vive controller touch pad.
//
// It will spawn a gray cube in front of you, then as you use the right touch pad,
// the cube should turn green and respond to the motion of your thumb on the pad.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
var GRAY = {red: 57, green: 57, blue: 57};
var GREEN = {red: 0, green: 255, blue: 0};
var ZERO = {x: 0, y: 0, z: 0};
var Y_AXIS = {x: 0, y: 1, x: 0};
var ROT_Y_90 = Quat.angleAxis(Y_AXIS, 90.0);
var boxEntity;
var boxPosition;
var boxZAxis, boxYAxis;
var prevThumbDown = false;
function init() {
boxPosition = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(Camera.getOrientation())));
var front = Quat.getFront(Camera.getOrientation());
boxZAxis = Vec3.normalize(Vec3.cross(front, Y_AXIS));
boxYAxis = Vec3.normalize(Vec3.cross(boxZAxis, front));
boxEntity = Entities.addEntity({
type: "Box",
position: boxPosition,
dimentions: {x: 0.25, y: 0.25, z: 0.25},
color: GRAY,
gravity: ZERO,
visible: true,
locked: false,
lifetime: 60000
});
}
function shutdown() {
Entities.deleteEntity(boxEntity);
}
Script.scriptEnding.connect(shutdown);
function viveIsConnected() {
return Controller.Hardware.Vive;
}
function update(dt) {
if (viveIsConnected()) {
var thumbDown = Controller.getValue(Controller.Hardware.Vive.RSTouch);
if (thumbDown) {
var x = Controller.getValue(Controller.Hardware.Vive.RX);
var y = Controller.getValue(Controller.Hardware.Vive.RY);
var xOffset = Vec3.multiply(boxZAxis, x);
var yOffset = Vec3.multiply(boxYAxis, y);
var offset = Vec3.sum(xOffset, yOffset);
Entities.editEntity(boxEntity, {position: Vec3.sum(boxPosition, offset)});
}
if (thumbDown && !prevThumbDown) {
Entities.editEntity(boxEntity, {color: GREEN});
}
if (!thumbDown && prevThumbDown) {
Entities.editEntity(boxEntity, {color: GRAY});
}
prevThumbDown = thumbDown;
}
}
Script.update.connect(update);
init();

View file

@ -91,7 +91,7 @@ public:
virtual QOpenGLContext* getPrimaryContext() override { return nullptr; }
virtual ui::Menu* getPrimaryMenu() { return nullptr; }
virtual bool isForeground() override { return true; }
virtual const DisplayPlugin* getActiveDisplayPlugin() const override { return nullptr; }
virtual const DisplayPluginPointer getActiveDisplayPlugin() const override { return DisplayPluginPointer(); }
};
class MyControllerScriptingInterface : public controller::ScriptingInterface {