Merge branch 'vive-ui' into feature/grab-script-preliminaries

This commit is contained in:
Anthony J. Thibault 2016-06-14 15:33:38 -07:00
commit 071584597d
52 changed files with 869 additions and 709 deletions

View file

@ -1,5 +1,6 @@
import QtQuick 2.3
import QtQuick.Controls 1.2
import QtGraphicalEffects 1.0
import "."
@ -44,6 +45,12 @@ Overlay {
}
}
ColorOverlay {
id: color
anchors.fill: image
source: image
}
function updateSubImage(subImage) {
var keys = Object.keys(subImage);
for (var i = 0; i < keys.length; ++i) {
@ -70,6 +77,7 @@ Overlay {
case "alpha": root.opacity = value; break;
case "imageURL": image.source = value; break;
case "subImage": updateSubImage(value); break;
case "color": color.color = Qt.rgba(value.red / 255, value.green / 255, value.blue / 255, root.opacity); break;
default: console.log("OVERLAY Unhandled image property " + key);
}
}

View file

@ -239,11 +239,13 @@ void Avatar::updateAvatarEntities() {
}
AvatarEntityIDs recentlyDettachedAvatarEntities = getAndClearRecentlyDetachedIDs();
foreach (auto entityID, recentlyDettachedAvatarEntities) {
if (!_avatarEntityData.contains(entityID)) {
entityTree->deleteEntity(entityID, true, true);
_avatarEntitiesLock.withReadLock([&] {
foreach (auto entityID, recentlyDettachedAvatarEntities) {
if (!_avatarEntityData.contains(entityID)) {
entityTree->deleteEntity(entityID, true, true);
}
}
}
});
});
if (success) {

View file

@ -709,12 +709,14 @@ void MyAvatar::saveData() {
settings.beginWriteArray("avatarEntityData");
int avatarEntityIndex = 0;
for (auto entityID : _avatarEntityData.keys()) {
settings.setArrayIndex(avatarEntityIndex);
settings.setValue("id", entityID);
settings.setValue("properties", _avatarEntityData.value(entityID));
avatarEntityIndex++;
}
_avatarEntitiesLock.withReadLock([&] {
for (auto entityID : _avatarEntityData.keys()) {
settings.setArrayIndex(avatarEntityIndex);
settings.setValue("id", entityID);
settings.setValue("properties", _avatarEntityData.value(entityID));
avatarEntityIndex++;
}
});
settings.endArray();
settings.setValue("displayName", _displayName);

View file

@ -105,3 +105,13 @@ QString HMDScriptingInterface::preferredAudioInput() const {
QString HMDScriptingInterface::preferredAudioOutput() const {
return qApp->getActiveDisplayPlugin()->getPreferredAudioOutDevice();
}
bool HMDScriptingInterface::setHandLasers(int hands, bool enabled, const glm::vec4& color, const glm::vec3& direction) const {
return qApp->getActiveDisplayPlugin()->setHandLaser(hands,
enabled ? DisplayPlugin::HandLaserMode::Overlay : DisplayPlugin::HandLaserMode::None,
color, direction);
}
void HMDScriptingInterface::disableHandLasers(int hands) const {
qApp->getActiveDisplayPlugin()->setHandLaser(hands, DisplayPlugin::HandLaserMode::None);
}

View file

@ -36,6 +36,8 @@ public:
Q_INVOKABLE glm::vec2 overlayToSpherical(const glm::vec2 & overlayPos) const;
Q_INVOKABLE QString preferredAudioInput() const;
Q_INVOKABLE QString preferredAudioOutput() const;
Q_INVOKABLE bool setHandLasers(int hands, bool enabled, const glm::vec4& color, const glm::vec3& direction) const;
Q_INVOKABLE void disableHandLasers(int hands) const;
public:
HMDScriptingInterface();

View file

@ -900,7 +900,11 @@ bool AvatarData::processAvatarIdentity(const Identity& identity) {
hasIdentityChanged = true;
}
if (identity.avatarEntityData != _avatarEntityData) {
bool avatarEntityDataChanged = false;
_avatarEntitiesLock.withReadLock([&] {
avatarEntityDataChanged = (identity.avatarEntityData != _avatarEntityData);
});
if (avatarEntityDataChanged) {
setAvatarEntityData(identity.avatarEntityData);
hasIdentityChanged = true;
}
@ -914,7 +918,9 @@ QByteArray AvatarData::identityByteArray() {
QUrl emptyURL("");
const QUrl& urlToSend = _skeletonModelURL.scheme() == "file" ? emptyURL : _skeletonModelURL;
identityStream << getSessionUUID() << urlToSend << _attachmentData << _displayName << _avatarEntityData;
_avatarEntitiesLock.withReadLock([&] {
identityStream << getSessionUUID() << urlToSend << _attachmentData << _displayName << _avatarEntityData;
});
return identityData;
}
@ -1306,16 +1312,18 @@ QJsonObject AvatarData::toJson() const {
root[JSON_AVATAR_ATTACHEMENTS] = attachmentsJson;
}
if (!_avatarEntityData.empty()) {
QJsonArray avatarEntityJson;
for (auto entityID : _avatarEntityData.keys()) {
QVariantMap entityData;
entityData.insert("id", entityID);
entityData.insert("properties", _avatarEntityData.value(entityID));
avatarEntityJson.push_back(QVariant(entityData).toJsonObject());
_avatarEntitiesLock.withReadLock([&] {
if (!_avatarEntityData.empty()) {
QJsonArray avatarEntityJson;
for (auto entityID : _avatarEntityData.keys()) {
QVariantMap entityData;
entityData.insert("id", entityID);
entityData.insert("properties", _avatarEntityData.value(entityID));
avatarEntityJson.push_back(QVariant(entityData).toJsonObject());
}
root[JSON_AVATAR_ENTITIES] = avatarEntityJson;
}
root[JSON_AVATAR_ENTITIES] = avatarEntityJson;
}
});
auto recordingBasis = getRecordingBasis();
bool success;
@ -1604,8 +1612,10 @@ void AvatarData::updateAvatarEntity(const QUuid& entityID, const QByteArray& ent
QMetaObject::invokeMethod(this, "updateAvatarEntity", Q_ARG(const QUuid&, entityID), Q_ARG(QByteArray, entityData));
return;
}
_avatarEntityData.insert(entityID, entityData);
_avatarEntityDataLocallyEdited = true;
_avatarEntitiesLock.withWriteLock([&] {
_avatarEntityData.insert(entityID, entityData);
_avatarEntityDataLocallyEdited = true;
});
}
void AvatarData::clearAvatarEntity(const QUuid& entityID) {
@ -1613,18 +1623,25 @@ void AvatarData::clearAvatarEntity(const QUuid& entityID) {
QMetaObject::invokeMethod(this, "clearAvatarEntity", Q_ARG(const QUuid&, entityID));
return;
}
_avatarEntityData.remove(entityID);
_avatarEntityDataLocallyEdited = true;
_avatarEntitiesLock.withWriteLock([&] {
_avatarEntityData.remove(entityID);
_avatarEntityDataLocallyEdited = true;
});
}
AvatarEntityMap AvatarData::getAvatarEntityData() const {
AvatarEntityMap result;
if (QThread::currentThread() != thread()) {
AvatarEntityMap result;
QMetaObject::invokeMethod(const_cast<AvatarData*>(this), "getAvatarEntityData", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(AvatarEntityMap, result));
return result;
}
return _avatarEntityData;
_avatarEntitiesLock.withReadLock([&] {
result = _avatarEntityData;
});
return result;
}
void AvatarData::setAvatarEntityData(const AvatarEntityMap& avatarEntityData) {
@ -1632,29 +1649,33 @@ void AvatarData::setAvatarEntityData(const AvatarEntityMap& avatarEntityData) {
QMetaObject::invokeMethod(this, "setAvatarEntityData", Q_ARG(const AvatarEntityMap&, avatarEntityData));
return;
}
if (_avatarEntityData != avatarEntityData) {
// keep track of entities that were attached to this avatar but no longer are
AvatarEntityIDs previousAvatarEntityIDs = QSet<QUuid>::fromList(_avatarEntityData.keys());
_avatarEntitiesLock.withWriteLock([&] {
if (_avatarEntityData != avatarEntityData) {
// keep track of entities that were attached to this avatar but no longer are
AvatarEntityIDs previousAvatarEntityIDs = QSet<QUuid>::fromList(_avatarEntityData.keys());
_avatarEntityData = avatarEntityData;
setAvatarEntityDataChanged(true);
_avatarEntityData = avatarEntityData;
setAvatarEntityDataChanged(true);
foreach (auto entityID, previousAvatarEntityIDs) {
if (!_avatarEntityData.contains(entityID)) {
_avatarEntityDetached.insert(entityID);
foreach (auto entityID, previousAvatarEntityIDs) {
if (!_avatarEntityData.contains(entityID)) {
_avatarEntityDetached.insert(entityID);
}
}
}
}
});
}
AvatarEntityIDs AvatarData::getAndClearRecentlyDetachedIDs() {
AvatarEntityIDs result;
if (QThread::currentThread() != thread()) {
AvatarEntityIDs result;
QMetaObject::invokeMethod(const_cast<AvatarData*>(this), "getRecentlyDetachedIDs", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(AvatarEntityIDs, result));
return result;
}
AvatarEntityIDs result = _avatarEntityDetached;
_avatarEntityDetached.clear();
_avatarEntitiesLock.withWriteLock([&] {
result = _avatarEntityDetached;
_avatarEntityDetached.clear();
});
return result;
}

View file

@ -418,6 +418,7 @@ protected:
// updates about one avatar to another.
glm::vec3 _globalPosition;
mutable ReadWriteLockable _avatarEntitiesLock;
AvatarEntityIDs _avatarEntityDetached; // recently detached from this avatar
AvatarEntityMap _avatarEntityData;
bool _avatarEntityDataLocallyEdited { false };

View file

@ -32,6 +32,7 @@ class Mapping;
using MappingPointer = std::shared_ptr<Mapping>;
using MappingList = std::list<MappingPointer>;
struct Pose;
}
#endif

View file

@ -213,9 +213,10 @@ OpenGLDisplayPlugin::OpenGLDisplayPlugin() {
}
void OpenGLDisplayPlugin::cleanupForSceneTexture(const gpu::TexturePointer& sceneTexture) {
Lock lock(_mutex);
Q_ASSERT(_sceneTextureToFrameIndexMap.contains(sceneTexture));
_sceneTextureToFrameIndexMap.remove(sceneTexture);
withRenderThreadLock([&] {
Q_ASSERT(_sceneTextureToFrameIndexMap.contains(sceneTexture));
_sceneTextureToFrameIndexMap.remove(sceneTexture);
});
}
@ -394,10 +395,9 @@ void OpenGLDisplayPlugin::submitSceneTexture(uint32_t frameIndex, const gpu::Tex
return;
}
{
Lock lock(_mutex);
withRenderThreadLock([&] {
_sceneTextureToFrameIndexMap[sceneTexture] = frameIndex;
}
});
// Submit it to the presentation thread via escrow
_sceneTextureEscrow.submit(sceneTexture);
@ -431,11 +431,12 @@ void OpenGLDisplayPlugin::updateTextures() {
}
void OpenGLDisplayPlugin::updateFrameData() {
Lock lock(_mutex);
auto previousFrameIndex = _currentPresentFrameIndex;
_currentPresentFrameIndex = _sceneTextureToFrameIndexMap[_currentSceneTexture];
auto skippedCount = (_currentPresentFrameIndex - previousFrameIndex) - 1;
_droppedFrameRate.increment(skippedCount);
withPresentThreadLock([&] {
auto previousFrameIndex = _currentPresentFrameIndex;
_currentPresentFrameIndex = _sceneTextureToFrameIndexMap[_currentSceneTexture];
auto skippedCount = (_currentPresentFrameIndex - previousFrameIndex) - 1;
_droppedFrameRate.increment(skippedCount);
});
}
void OpenGLDisplayPlugin::compositeOverlay() {
@ -492,14 +493,14 @@ void OpenGLDisplayPlugin::compositeLayers() {
}
_compositeFramebuffer->Bound(Framebuffer::Target::Draw, [&] {
Context::Viewport(targetRenderSize.x, targetRenderSize.y);
Context::Clear().DepthBuffer();
glBindTexture(GL_TEXTURE_2D, getSceneTextureId());
compositeScene();
auto sceneTextureId = getSceneTextureId();
auto overlayTextureId = getOverlayTextureId();
glBindTexture(GL_TEXTURE_2D, sceneTextureId);
compositeScene();
if (overlayTextureId) {
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBindTexture(GL_TEXTURE_2D, overlayTextureId);
Context::Enable(Capability::Blend);
Context::BlendFunc(BlendFunction::SrcAlpha, BlendFunction::OneMinusSrcAlpha);
compositeOverlay();
auto compositorHelper = DependencyManager::get<CompositorHelper>();
@ -507,11 +508,16 @@ void OpenGLDisplayPlugin::compositeLayers() {
auto& cursorManager = Cursor::Manager::instance();
const auto& cursorData = _cursorsData[cursorManager.getCursor()->getIcon()];
glBindTexture(GL_TEXTURE_2D, cursorData.texture);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, overlayTextureId);
compositePointer();
glBindTexture(GL_TEXTURE_2D, 0);
glActiveTexture(GL_TEXTURE0);
}
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_BLEND);
Context::Disable(Capability::Blend);
}
compositeExtra();
});
}
@ -549,7 +555,11 @@ float OpenGLDisplayPlugin::newFramePresentRate() const {
}
float OpenGLDisplayPlugin::droppedFrameRate() const {
return _droppedFrameRate.rate();
float result;
withRenderThreadLock([&] {
result = _droppedFrameRate.rate();
});
return result;
}
float OpenGLDisplayPlugin::presentRate() const {
@ -664,3 +674,11 @@ void OpenGLDisplayPlugin::useProgram(const ProgramPtr& program) {
_activeProgram = program;
}
}
void OpenGLDisplayPlugin::assertIsRenderThread() const {
Q_ASSERT(QThread::currentThread() != _presentThread);
}
void OpenGLDisplayPlugin::assertIsPresentThread() const {
Q_ASSERT(QThread::currentThread() == _presentThread);
}

View file

@ -74,6 +74,7 @@ protected:
virtual void compositeScene();
virtual void compositeOverlay();
virtual void compositePointer();
virtual void compositeExtra() {};
virtual bool hasFocus() const override;
@ -109,7 +110,6 @@ protected:
int32_t _alphaUniform { -1 };
ShapeWrapperPtr _plane;
mutable Mutex _mutex;
RateCounter<> _droppedFrameRate;
RateCounter<> _newFrameRate;
RateCounter<> _presentRate;
@ -135,7 +135,27 @@ protected:
BasicFramebufferWrapperPtr _compositeFramebuffer;
bool _lockCurrentTexture { false };
void assertIsRenderThread() const;
void assertIsPresentThread() const;
template<typename F>
void withPresentThreadLock(F f) const {
assertIsPresentThread();
Lock lock(_presentMutex);
f();
}
template<typename F>
void withRenderThreadLock(F f) const {
assertIsRenderThread();
Lock lock(_presentMutex);
f();
}
private:
// Any resource shared by the main thread and the presentation thread must
// be serialized through this mutex
mutable Mutex _presentMutex;
ProgramPtr _activeProgram;
};

View file

@ -9,6 +9,7 @@
#include <memory>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtx/intersect.hpp>
#include <QtCore/QLoggingCategory>
#include <QtWidgets/QApplication>
@ -37,7 +38,6 @@ QRect HmdDisplayPlugin::getRecommendedOverlayRect() const {
return CompositorHelper::VIRTUAL_SCREEN_RECOMMENDED_OVERLAY_RECT;
}
bool HmdDisplayPlugin::internalActivate() {
_monoPreview = _container->getBoolSetting("monoPreview", DEFAULT_MONO_VIEW);
@ -197,14 +197,43 @@ static ProgramPtr getReprojectionProgram() {
#endif
static const char * LASER_VS = R"VS(#version 410 core
uniform mat4 mvp = mat4(1);
in vec3 Position;
out vec3 vPosition;
void main() {
gl_Position = mvp * vec4(Position, 1);
vPosition = Position;
}
)VS";
static const char * LASER_FS = R"FS(#version 410 core
uniform vec4 color = vec4(1.0, 1.0, 1.0, 1.0);
in vec3 vPosition;
out vec4 FragColor;
void main() {
FragColor = color;
}
)FS";
void HmdDisplayPlugin::customizeContext() {
Parent::customizeContext();
// Only enable mirroring if we know vsync is disabled
enableVsync(false);
_enablePreview = !isVsyncEnabled();
_sphereSection = loadSphereSection(_program, CompositorHelper::VIRTUAL_UI_TARGET_FOV.y, CompositorHelper::VIRTUAL_UI_ASPECT_RATIO);
compileProgram(_laserProgram, LASER_VS, LASER_FS);
_laserGeometry = loadLaser(_laserProgram);
compileProgram(_reprojectionProgram, REPROJECTION_VS, REPROJECTION_FS);
using namespace oglplus;
REPROJECTION_MATRIX_LOCATION = Uniform<glm::mat3>(*_reprojectionProgram, "reprojection").Location();
INVERSE_PROJECTION_MATRIX_LOCATION = Uniform<glm::mat4>(*_reprojectionProgram, "inverseProjections").Location();
@ -215,6 +244,8 @@ void HmdDisplayPlugin::uncustomizeContext() {
_sphereSection.reset();
_compositeFramebuffer.reset();
_reprojectionProgram.reset();
_laserProgram.reset();
_laserGeometry.reset();
Parent::uncustomizeContext();
}
@ -288,6 +319,7 @@ void HmdDisplayPlugin::compositePointer() {
});
}
void HmdDisplayPlugin::internalPresent() {
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)presentCount())
@ -344,22 +376,116 @@ void HmdDisplayPlugin::setEyeRenderPose(uint32_t frameIndex, Eye eye, const glm:
void HmdDisplayPlugin::updateFrameData() {
// Check if we have old frame data to discard
{
Lock lock(_mutex);
withPresentThreadLock([&] {
auto itr = _frameInfos.find(_currentPresentFrameIndex);
if (itr != _frameInfos.end()) {
_frameInfos.erase(itr);
}
}
});
Parent::updateFrameData();
{
Lock lock(_mutex);
withPresentThreadLock([&] {
_currentPresentFrameInfo = _frameInfos[_currentPresentFrameIndex];
}
});
}
glm::mat4 HmdDisplayPlugin::getHeadPose() const {
return _currentRenderFrameInfo.renderPose;
}
bool HmdDisplayPlugin::setHandLaser(uint32_t hands, HandLaserMode mode, const vec4& color, const vec3& direction) {
HandLaserInfo info;
info.mode = mode;
info.color = color;
info.direction = direction;
withRenderThreadLock([&] {
if (hands & Hand::LeftHand) {
_handLasers[0] = info;
}
if (hands & Hand::RightHand) {
_handLasers[1] = info;
}
});
// FIXME defer to a child class plugin to determine if hand lasers are actually
// available based on the presence or absence of hand controllers
return true;
}
void HmdDisplayPlugin::compositeExtra() {
std::array<HandLaserInfo, 2> handLasers;
std::array<mat4, 2> renderHandPoses;
Transform uiModelTransform;
withPresentThreadLock([&] {
handLasers = _handLasers;
renderHandPoses = _handPoses;
uiModelTransform = _uiModelTransform;
});
// If neither hand laser is activated, exit
if (!handLasers[0].valid() && !handLasers[1].valid()) {
return;
}
static const glm::mat4 identity;
if (renderHandPoses[0] == identity && renderHandPoses[1] == identity) {
return;
}
// Render hand lasers
using namespace oglplus;
useProgram(_laserProgram);
_laserGeometry->Use();
std::array<mat4, 2> handLaserModelMatrices;
for (int i = 0; i < 2; ++i) {
if (renderHandPoses[i] == identity) {
continue;
}
const auto& handLaser = handLasers[i];
if (!handLaser.valid()) {
continue;
}
const auto& laserDirection = handLaser.direction;
auto model = renderHandPoses[i];
auto castDirection = glm::quat_cast(model) * laserDirection;
if (glm::abs(glm::length2(castDirection) - 1.0f) > EPSILON) {
castDirection = glm::normalize(castDirection);
}
// FIXME fetch the actual UI radius from... somewhere?
float uiRadius = 1.0f;
// Find the intersection of the laser with he UI and use it to scale the model matrix
float distance;
if (!glm::intersectRaySphere(vec3(renderHandPoses[i][3]), castDirection, uiModelTransform.getTranslation(), uiRadius * uiRadius, distance)) {
continue;
}
// Make sure we rotate to match the desired laser direction
if (laserDirection != Vectors::UNIT_NEG_Z) {
auto rotation = glm::rotation(Vectors::UNIT_NEG_Z, laserDirection);
model = model * glm::mat4_cast(rotation);
}
model = glm::scale(model, vec3(distance));
handLaserModelMatrices[i] = model;
}
for_each_eye([&](Eye eye) {
eyeViewport(eye);
auto eyePose = _currentPresentFrameInfo.presentPose * getEyeToHeadTransform(eye);
auto view = glm::inverse(eyePose);
const auto& projection = _eyeProjections[eye];
for (int i = 0; i < 2; ++i) {
if (handLaserModelMatrices[i] == identity) {
continue;
}
Uniform<glm::mat4>(*_laserProgram, "mvp").Set(projection * view * handLaserModelMatrices[i]);
Uniform<glm::vec4>(*_laserProgram, "color").Set(handLasers[i].color);
_laserGeometry->Draw();
// TODO render some kind of visual indicator at the intersection point with the UI.
}
});
}

View file

@ -10,6 +10,7 @@
#include <ThreadSafeValueCache.h>
#include <QtGlobal>
#include <Transform.h>
#include "../OpenGLDisplayPlugin.h"
@ -30,7 +31,7 @@ public:
virtual glm::mat4 getHeadPose() const override;
bool setHandLaser(uint32_t hands, HandLaserMode mode, const vec4& color, const vec3& direction) override;
protected:
virtual void hmdPresent() = 0;
@ -46,7 +47,22 @@ protected:
void customizeContext() override;
void uncustomizeContext() override;
void updateFrameData() override;
void compositeExtra() override;
struct HandLaserInfo {
HandLaserMode mode { HandLaserMode::None };
vec4 color { 1.0f };
vec3 direction { 0, 0, -1 };
// Is this hand laser info suitable for drawing?
bool valid() const {
return (mode != HandLaserMode::None && color.a > 0.0f && direction != vec3());
}
};
Transform _uiModelTransform;
std::array<HandLaserInfo, 2> _handLasers;
std::array<glm::mat4, 2> _handPoses;
std::array<glm::mat4, 2> _eyeOffsets;
std::array<glm::mat4, 2> _eyeProjections;
std::array<glm::mat4, 2> _eyeInverseProjections;
@ -75,5 +91,7 @@ private:
bool _enableReprojection { true };
ShapeWrapperPtr _sphereSection;
ProgramPtr _reprojectionProgram;
ProgramPtr _laserProgram;
ShapeWrapperPtr _laserGeometry;
};

View file

@ -1,5 +1,5 @@
set(TARGET_NAME entities-renderer)
AUTOSCRIBE_SHADER_LIB(gpu model render render-utils)
AUTOSCRIBE_SHADER_LIB(gpu model procedural render render-utils)
setup_hifi_library(Widgets Network Script)
link_hifi_libraries(shared gpu procedural model model-networking script-engine render render-utils)

View file

@ -1,380 +0,0 @@
//
// Created by Bradley Austin Davis on 2015/09/05
// Copyright 2013-2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Shader includes portions of webgl-noise:
// Description : Array and textureless GLSL 2D/3D/4D simplex
// noise functions.
// Author : Ian McEwan, Ashima Arts.
// Maintainer : ijm
// Lastmod : 20110822 (ijm)
// License : Copyright (C) 2011 Ashima Arts. All rights reserved.
// Distributed under the MIT License. See LICENSE file.
// https://github.com/ashima/webgl-noise
//
const QString SHADER_COMMON = R"SHADER(
layout(location = 0) out vec4 _fragColor0;
layout(location = 1) out vec4 _fragColor1;
layout(location = 2) out vec4 _fragColor2;
// the alpha threshold
uniform float alphaThreshold;
vec2 signNotZero(vec2 v) {
return vec2((v.x >= 0.0) ? +1.0 : -1.0, (v.y >= 0.0) ? +1.0 : -1.0);
}
vec2 float32x3_to_oct(in vec3 v) {
vec2 p = v.xy * (1.0 / (abs(v.x) + abs(v.y) + abs(v.z)));
return ((v.z <= 0.0) ? ((1.0 - abs(p.yx)) * signNotZero(p)) : p);
}
vec3 oct_to_float32x3(in vec2 e) {
vec3 v = vec3(e.xy, 1.0 - abs(e.x) - abs(e.y));
if (v.z < 0) {
v.xy = (1.0 - abs(v.yx)) * signNotZero(v.xy);
}
return normalize(v);
}
vec3 snorm12x2_to_unorm8x3(vec2 f) {
vec2 u = vec2(round(clamp(f, -1.0, 1.0) * 2047.0 + 2047.0));
float t = floor(u.y / 256.0);
return floor(vec3(
u.x / 16.0,
fract(u.x / 16.0) * 256.0 + t,
u.y - t * 256.0
)) / 255.0;
}
vec2 unorm8x3_to_snorm12x2(vec3 u) {
u *= 255.0;
u.y *= (1.0 / 16.0);
vec2 s = vec2( u.x * 16.0 + floor(u.y),
fract(u.y) * (16.0 * 256.0) + u.z);
return clamp(s * (1.0 / 2047.0) - 1.0, vec2(-1.0), vec2(1.0));
}
float mod289(float x) {
return x - floor(x * (1.0 / 289.0)) * 289.0;
}
vec2 mod289(vec2 x) {
return x - floor(x * (1.0 / 289.0)) * 289.0;
}
vec3 mod289(vec3 x) {
return x - floor(x * (1.0 / 289.0)) * 289.0;
}
vec4 mod289(vec4 x) {
return x - floor(x * (1.0 / 289.0)) * 289.0;
}
float permute(float x) {
return mod289(((x*34.0)+1.0)*x);
}
vec3 permute(vec3 x) {
return mod289(((x*34.0)+1.0)*x);
}
vec4 permute(vec4 x) {
return mod289(((x*34.0)+1.0)*x);
}
float taylorInvSqrt(float r) {
return 1.79284291400159 - 0.85373472095314 * r;
}
vec4 taylorInvSqrt(vec4 r) {
return 1.79284291400159 - 0.85373472095314 * r;
}
vec4 grad4(float j, vec4 ip) {
const vec4 ones = vec4(1.0, 1.0, 1.0, -1.0);
vec4 p, s;
p.xyz = floor(fract(vec3(j) * ip.xyz) * 7.0) * ip.z - 1.0;
p.w = 1.5 - dot(abs(p.xyz), ones.xyz);
s = vec4(lessThan(p, vec4(0.0)));
p.xyz = p.xyz + (s.xyz * 2.0 - 1.0) * s.www;
return p;
}
// (sqrt(5) - 1)/4 = F4, used once below
#define F4 0.309016994374947451
float snoise(vec4 v) {
const vec4 C = vec4(0.138196601125011, // (5 - sqrt(5))/20 G4
0.276393202250021, // 2 * G4
0.414589803375032, // 3 * G4
-0.447213595499958); // -1 + 4 * G4
// First corner
vec4 i = floor(v + dot(v, vec4(F4)));
vec4 x0 = v - i + dot(i, C.xxxx);
// Other corners
// Rank sorting originally contributed by Bill Licea-Kane, AMD (formerly ATI)
vec4 i0;
vec3 isX = step(x0.yzw, x0.xxx);
vec3 isYZ = step(x0.zww, x0.yyz);
i0.x = isX.x + isX.y + isX.z;
i0.yzw = 1.0 - isX;
i0.y += isYZ.x + isYZ.y;
i0.zw += 1.0 - isYZ.xy;
i0.z += isYZ.z;
i0.w += 1.0 - isYZ.z;
// i0 now contains the unique values 0,1,2,3 in each channel
vec4 i3 = clamp(i0, 0.0, 1.0);
vec4 i2 = clamp(i0 - 1.0, 0.0, 1.0);
vec4 i1 = clamp(i0 - 2.0, 0.0, 1.0);
vec4 x1 = x0 - i1 + C.xxxx;
vec4 x2 = x0 - i2 + C.yyyy;
vec4 x3 = x0 - i3 + C.zzzz;
vec4 x4 = x0 + C.wwww;
// Permutations
i = mod289(i);
float j0 = permute(permute(permute(permute(i.w) + i.z) + i.y) + i.x);
vec4 j1 = permute(
permute(
permute(
permute(i.w + vec4(i1.w, i2.w, i3.w, 1.0)) + i.z
+ vec4(i1.z, i2.z, i3.z, 1.0)) + i.y
+ vec4(i1.y, i2.y, i3.y, 1.0)) + i.x
+ vec4(i1.x, i2.x, i3.x, 1.0));
// Gradients: 7x7x6 points over a cube, mapped onto a 4-cross polytope
// 7*7*6 = 294, which is close to the ring size 17*17 = 289.
vec4 ip = vec4(1.0 / 294.0, 1.0 / 49.0, 1.0 / 7.0, 0.0);
vec4 p0 = grad4(j0, ip);
vec4 p1 = grad4(j1.x, ip);
vec4 p2 = grad4(j1.y, ip);
vec4 p3 = grad4(j1.z, ip);
vec4 p4 = grad4(j1.w, ip);
// Normalise gradients
vec4 norm = taylorInvSqrt(
vec4(dot(p0, p0), dot(p1, p1), dot(p2, p2), dot(p3, p3)));
p0 *= norm.x;
p1 *= norm.y;
p2 *= norm.z;
p3 *= norm.w;
p4 *= taylorInvSqrt(dot(p4, p4));
// Mix contributions from the five corners
vec3 m0 = max(0.6 - vec3(dot(x0, x0), dot(x1, x1), dot(x2, x2)), 0.0);
vec2 m1 = max(0.6 - vec2(dot(x3, x3), dot(x4, x4)), 0.0);
m0 = m0 * m0;
m1 = m1 * m1;
return 49.0
* (dot(m0 * m0, vec3(dot(p0, x0), dot(p1, x1), dot(p2, x2)))
+ dot(m1 * m1, vec2(dot(p3, x3), dot(p4, x4))));
}
float snoise(vec3 v) {
const vec2 C = vec2(1.0 / 6.0, 1.0 / 3.0);
const vec4 D = vec4(0.0, 0.5, 1.0, 2.0);
// First corner
vec3 i = floor(v + dot(v, C.yyy));
vec3 x0 = v - i + dot(i, C.xxx);
// Other corners
vec3 g = step(x0.yzx, x0.xyz);
vec3 l = 1.0 - g;
vec3 i1 = min(g.xyz, l.zxy);
vec3 i2 = max(g.xyz, l.zxy);
vec3 x1 = x0 - i1 + C.xxx;
vec3 x2 = x0 - i2 + C.yyy; // 2.0*C.x = 1/3 = C.y
vec3 x3 = x0 - D.yyy; // -1.0+3.0*C.x = -0.5 = -D.y
// Permutations
i = mod289(i);
vec4 p = permute(
permute(
permute(i.z + vec4(0.0, i1.z, i2.z, 1.0)) + i.y
+ vec4(0.0, i1.y, i2.y, 1.0)) + i.x
+ vec4(0.0, i1.x, i2.x, 1.0));
// Gradients: 7x7 points over a square, mapped onto an octahedron.
// The ring size 17*17 = 289 is close to a multiple of 49 (49*6 = 294)
float n_ = 0.142857142857; // 1.0/7.0
vec3 ns = n_ * D.wyz - D.xzx;
vec4 j = p - 49.0 * floor(p * ns.z * ns.z); // mod(p,7*7)
vec4 x_ = floor(j * ns.z);
vec4 y_ = floor(j - 7.0 * x_); // mod(j,N)
vec4 x = x_ * ns.x + ns.yyyy;
vec4 y = y_ * ns.x + ns.yyyy;
vec4 h = 1.0 - abs(x) - abs(y);
vec4 b0 = vec4(x.xy, y.xy);
vec4 b1 = vec4(x.zw, y.zw);
//vec4 s0 = vec4(lessThan(b0,0.0))*2.0 - 1.0;
//vec4 s1 = vec4(lessThan(b1,0.0))*2.0 - 1.0;
vec4 s0 = floor(b0) * 2.0 + 1.0;
vec4 s1 = floor(b1) * 2.0 + 1.0;
vec4 sh = -step(h, vec4(0.0));
vec4 a0 = b0.xzyw + s0.xzyw * sh.xxyy;
vec4 a1 = b1.xzyw + s1.xzyw * sh.zzww;
vec3 p0 = vec3(a0.xy, h.x);
vec3 p1 = vec3(a0.zw, h.y);
vec3 p2 = vec3(a1.xy, h.z);
vec3 p3 = vec3(a1.zw, h.w);
//Normalise gradients
vec4 norm = taylorInvSqrt(
vec4(dot(p0, p0), dot(p1, p1), dot(p2, p2), dot(p3, p3)));
p0 *= norm.x;
p1 *= norm.y;
p2 *= norm.z;
p3 *= norm.w;
// Mix final noise value
vec4 m = max(0.6 - vec4(dot(x0, x0), dot(x1, x1), dot(x2, x2), dot(x3, x3)),
0.0);
m = m * m;
return 42.0
* dot(m * m, vec4(dot(p0, x0), dot(p1, x1), dot(p2, x2), dot(p3, x3)));
}
float snoise(vec2 v) {
const vec4 C = vec4(0.211324865405187, // (3.0-sqrt(3.0))/6.0
0.366025403784439, // 0.5*(sqrt(3.0)-1.0)
-0.577350269189626, // -1.0 + 2.0 * C.x
0.024390243902439); // 1.0 / 41.0
// First corner
vec2 i = floor(v + dot(v, C.yy));
vec2 x0 = v - i + dot(i, C.xx);
// Other corners
vec2 i1;
i1 = (x0.x > x0.y) ? vec2(1.0, 0.0) : vec2(0.0, 1.0);
vec4 x12 = x0.xyxy + C.xxzz;
x12.xy -= i1;
// Permutations
i = mod289(i); // Avoid truncation effects in permutation
vec3 p = permute(
permute(i.y + vec3(0.0, i1.y, 1.0)) + i.x + vec3(0.0, i1.x, 1.0));
vec3 m = max(0.5 - vec3(dot(x0, x0), dot(x12.xy, x12.xy), dot(x12.zw, x12.zw)),
0.0);
m = m * m;
m = m * m;
// Gradients: 41 points uniformly over a line, mapped onto a diamond.
// The ring size 17*17 = 289 is close to a multiple of 41 (41*7 = 287)
vec3 x = 2.0 * fract(p * C.www) - 1.0;
vec3 h = abs(x) - 0.5;
vec3 ox = floor(x + 0.5);
vec3 a0 = x - ox;
// Normalise gradients implicitly by scaling m
// Approximation of: m *= inversesqrt( a0*a0 + h*h );
m *= 1.79284291400159 - 0.85373472095314 * (a0 * a0 + h * h);
// Compute final noise value at P
vec3 g;
g.x = a0.x * x0.x + h.x * x0.y;
g.yz = a0.yz * x12.xz + h.yz * x12.yw;
return 130.0 * dot(m, g);
}
// the interpolated normal
in vec3 _normal;
in vec3 _color;
in vec2 _texCoord0;
in vec4 _position;
// TODO add more uniforms
uniform float iGlobalTime; // shader playback time (in seconds)
uniform vec3 iWorldScale; // the dimensions of the object being rendered
// TODO add support for textures
// TODO document available inputs other than the uniforms
// TODO provide world scale in addition to the untransformed position
const vec3 DEFAULT_SPECULAR = vec3(0.1);
const float DEFAULT_SHININESS = 10;
)SHADER";
// V1 shaders, only support emissive
// vec4 getProceduralColor()
const QString SHADER_TEMPLATE_V1 = SHADER_COMMON + R"SCRIBE(
#line 1001
%1
#line 317
void main(void) {
vec4 emissive = getProceduralColor();
float alpha = emissive.a;
if (alpha != 1.0) {
discard;
}
vec4 diffuse = vec4(_color.rgb, alpha);
vec4 normal = vec4(packNormal(normalize(_normal)), 0.5);
_fragColor0 = diffuse;
_fragColor1 = normal;
_fragColor2 = vec4(emissive.rgb, DEFAULT_SHININESS / 128.0);
}
)SCRIBE";
// void getProceduralDiffuseAndEmissive(out vec4 diffuse, out vec4 emissive)
const QString SHADER_TEMPLATE_V2 = SHADER_COMMON + R"SCRIBE(
// FIXME should we be doing the swizzle here?
vec3 iResolution = iWorldScale.xzy;
// FIXME Mouse X,Y coordinates, and Z,W are for the click position if clicked (not supported in High Fidelity at the moment)
vec4 iMouse = vec4(0);
// FIXME We set the seconds (iDate.w) of iDate to iGlobalTime, which contains the current date in seconds
vec4 iDate = vec4(0, 0, 0, iGlobalTime);
#line 1001
%1
#line 351
void main(void) {
vec3 diffuse = _color.rgb;
vec3 specular = DEFAULT_SPECULAR;
float shininess = DEFAULT_SHININESS;
float emissiveAmount = getProceduralColors(diffuse, specular, shininess);
_fragColor0 = vec4(diffuse.rgb, 1.0);
_fragColor1 = vec4(packNormal(normalize(_normal.xyz)), 1.0 - (emissiveAmount / 2.0));
_fragColor2 = vec4(specular, shininess / 128.0);
}
)SCRIBE";

View file

@ -98,7 +98,7 @@ void RenderableShapeEntityItem::render(RenderArgs* args) {
}
batch.setModelTransform(modelTransform); // use a transform with scale, rotation, registration point and translation
if (_procedural->ready()) {
_procedural->prepare(batch, getPosition(), getDimensions());
_procedural->prepare(batch, getPosition(), getDimensions(), getOrientation());
auto outColor = _procedural->getColor(color);
batch._glColor4f(outColor.r, outColor.g, outColor.b, outColor.a);
DependencyManager::get<GeometryCache>()->renderShape(batch, MAPPING[_shape]);

View file

@ -261,7 +261,14 @@ void EntitySimulation::moveSimpleKinematics(const quint64& now) {
SetOfEntities::iterator itemItr = _simpleKinematicEntities.begin();
while (itemItr != _simpleKinematicEntities.end()) {
EntityItemPointer entity = *itemItr;
if (entity->isMovingRelativeToParent() && !entity->getPhysicsInfo()) {
// The entity-server doesn't know where avatars are, so don't attempt to do simple extrapolation for
// children of avatars. See related code in EntityMotionState::remoteSimulationOutOfSync.
bool ancestryIsKnown;
entity->getMaximumAACube(ancestryIsKnown);
bool hasAvatarAncestor = entity->hasAncestorOfType(NestableType::Avatar);
if (entity->isMovingRelativeToParent() && !entity->getPhysicsInfo() && ancestryIsKnown && !hasAvatarAncestor) {
entity->simulate(now);
_entitiesToSort.insert(entity);
++itemItr;

View file

@ -45,9 +45,11 @@ in vec2 vTexCoord;
out vec4 FragColor;
void main() {
FragColor = texture(sampler, vTexCoord);
FragColor.a *= alpha;
if (FragColor.a <= 0.0) {
discard;
}
}
)FS";
@ -359,6 +361,94 @@ ShapeWrapperPtr loadSphereSection(ProgramPtr program, float fov, float aspect, i
);
}
namespace oglplus {
namespace shapes {
class Laser : public DrawingInstructionWriter, public DrawMode {
public:
using IndexArray = std::vector<GLuint>;
using PosArray = std::vector<float>;
/// The type of the index container returned by Indices()
// vertex positions
PosArray _pos_data;
IndexArray _idx_data;
unsigned int _prim_count { 0 };
public:
Laser() {
int vertices = 2;
_pos_data.resize(vertices * 3);
_pos_data[0] = 0;
_pos_data[1] = 0;
_pos_data[2] = 0;
_pos_data[3] = 0;
_pos_data[4] = 0;
_pos_data[5] = -1;
_idx_data.push_back(0);
_idx_data.push_back(1);
_prim_count = 1;
}
/// Returns the winding direction of faces
FaceOrientation FaceWinding(void) const {
return FaceOrientation::CCW;
}
/// Queries the bounding sphere coordinates and dimensions
template <typename T>
void BoundingSphere(Sphere<T>& bounding_sphere) const {
bounding_sphere = Sphere<T>(0, 0, -0.5, 0.5);
}
typedef GLuint(Laser::*VertexAttribFunc)(std::vector<GLfloat>&) const;
/// Makes the vertex positions and returns the number of values per vertex
template <typename T>
GLuint Positions(std::vector<T>& dest) const {
dest.clear();
dest.insert(dest.begin(), _pos_data.begin(), _pos_data.end());
return 3;
}
typedef VertexAttribsInfo<
Laser,
std::tuple<VertexPositionsTag>
> VertexAttribs;
/// Returns element indices that are used with the drawing instructions
const IndexArray & Indices(Default = Default()) const {
return _idx_data;
}
/// Returns the instructions for rendering of faces
DrawingInstructions Instructions(PrimitiveType primitive) const {
DrawingInstructions instr = MakeInstructions();
DrawOperation operation;
operation.method = DrawOperation::Method::DrawElements;
operation.mode = primitive;
operation.first = 0;
operation.count = _prim_count * 3;
operation.restart_index = DrawOperation::NoRestartIndex();
operation.phase = 0;
AddInstruction(instr, operation);
return instr;
}
/// Returns the instructions for rendering of faces
DrawingInstructions Instructions(Default = Default()) const {
return Instructions(PrimitiveType::Lines);
}
};
}
}
ShapeWrapperPtr loadLaser(const ProgramPtr& program) {
return std::make_shared<shapes::ShapeWrapper>(shapes::ShapeWrapper("Position", shapes::Laser(), *program));
}
void TextureRecycler::setSize(const uvec2& size) {
if (size == _size) {
return;

View file

@ -64,8 +64,9 @@ ProgramPtr loadCubemapShader();
void compileProgram(ProgramPtr & result, const std::string& vs, const std::string& fs);
ShapeWrapperPtr loadSkybox(ProgramPtr program);
ShapeWrapperPtr loadPlane(ProgramPtr program, float aspect = 1.0f);
ShapeWrapperPtr loadSphereSection(ProgramPtr program, float fov = PI / 3.0f * 2.0f, float aspect = 16.0f / 9.0f, int slices = 32, int stacks = 32);
ShapeWrapperPtr loadSphereSection(ProgramPtr program, float fov = PI / 3.0f * 2.0f, float aspect = 16.0f / 9.0f, int slices = 128, int stacks = 128);
ShapeWrapperPtr loadLaser(const ProgramPtr& program);
// A basic wrapper for constructing a framebuffer with a renderbuffer
// for the depth attachment and an undefined type for the color attachement

View file

@ -1,5 +1,4 @@
set(TARGET_NAME gpu-gl)
AUTOSCRIBE_SHADER_LIB(gpu)
setup_hifi_library()
link_hifi_libraries(shared gl gpu)
GroupSources("src")

View file

@ -114,6 +114,7 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
(&::gpu::gl::GLBackend::do_glUniform3fv),
(&::gpu::gl::GLBackend::do_glUniform4fv),
(&::gpu::gl::GLBackend::do_glUniform4iv),
(&::gpu::gl::GLBackend::do_glUniformMatrix3fv),
(&::gpu::gl::GLBackend::do_glUniformMatrix4fv),
(&::gpu::gl::GLBackend::do_glColor4f),
@ -515,6 +516,22 @@ void GLBackend::do_glUniform4iv(Batch& batch, size_t paramOffset) {
(void)CHECK_GL_ERROR();
}
void GLBackend::do_glUniformMatrix3fv(Batch& batch, size_t paramOffset) {
if (_pipeline._program == 0) {
// We should call updatePipeline() to bind the program but we are not doing that
// because these uniform setters are deprecated and we don;t want to create side effect
return;
}
updatePipeline();
glUniformMatrix3fv(
GET_UNIFORM_LOCATION(batch._params[paramOffset + 3]._int),
batch._params[paramOffset + 2]._uint,
batch._params[paramOffset + 1]._uint,
(const GLfloat*)batch.editData(batch._params[paramOffset + 0]._uint));
(void)CHECK_GL_ERROR();
}
void GLBackend::do_glUniformMatrix4fv(Batch& batch, size_t paramOffset) {
if (_pipeline._program == 0) {
// We should call updatePipeline() to bind the program but we are not doing that

View file

@ -136,6 +136,7 @@ public:
virtual void do_glUniform3fv(Batch& batch, size_t paramOffset) final;
virtual void do_glUniform4fv(Batch& batch, size_t paramOffset) final;
virtual void do_glUniform4iv(Batch& batch, size_t paramOffset) final;
virtual void do_glUniformMatrix3fv(Batch& batch, size_t paramOffset) final;
virtual void do_glUniformMatrix4fv(Batch& batch, size_t paramOffset) final;
virtual void do_glColor4f(Batch& batch, size_t paramOffset) final;

View file

@ -567,6 +567,16 @@ void Batch::_glUniform4iv(int32 location, int count, const int32* value) {
_params.push_back(location);
}
void Batch::_glUniformMatrix3fv(int32 location, int count, uint8 transpose, const float* value) {
ADD_COMMAND(glUniformMatrix3fv);
const int MATRIX3_SIZE = 9 * sizeof(float);
_params.push_back(cacheData(count * MATRIX3_SIZE, value));
_params.push_back(transpose);
_params.push_back(count);
_params.push_back(location);
}
void Batch::_glUniformMatrix4fv(int32 location, int count, uint8 transpose, const float* value) {
ADD_COMMAND(glUniformMatrix4fv);

View file

@ -14,6 +14,7 @@
#include <vector>
#include <mutex>
#include <functional>
#include <glm/gtc/type_ptr.hpp>
#include <shared/NsightHelpers.h>
@ -269,6 +270,7 @@ public:
void _glUniform3fv(int location, int count, const float* value);
void _glUniform4fv(int location, int count, const float* value);
void _glUniform4iv(int location, int count, const int* value);
void _glUniformMatrix3fv(int location, int count, unsigned char transpose, const float* value);
void _glUniformMatrix4fv(int location, int count, unsigned char transpose, const float* value);
void _glUniform(int location, int v0) {
@ -291,6 +293,10 @@ public:
_glUniform4f(location, v.x, v.y, v.z, v.w);
}
void _glUniform(int location, const glm::mat3& v) {
_glUniformMatrix3fv(location, 1, false, glm::value_ptr(v));
}
void _glColor4f(float red, float green, float blue, float alpha);
enum Command {
@ -348,6 +354,7 @@ public:
COMMAND_glUniform3fv,
COMMAND_glUniform4fv,
COMMAND_glUniform4iv,
COMMAND_glUniformMatrix3fv,
COMMAND_glUniformMatrix4fv,
COMMAND_glColor4f,
@ -446,7 +453,7 @@ public:
Params _params;
Bytes _data;
// SSBO class... layout MUST match the layout in TransformCamera.slh
// SSBO class... layout MUST match the layout in Transform.slh
class TransformObject {
public:
Mat4 _model;

View file

@ -95,7 +95,7 @@ public:
virtual void syncCache() = 0;
virtual void downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage) = 0;
// UBO class... layout MUST match the layout in TransformCamera.slh
// UBO class... layout MUST match the layout in Transform.slh
class TransformCamera {
public:
mutable Mat4 _view;

View file

@ -27,6 +27,10 @@ layout(std140) uniform transformCameraBuffer {
TransformCamera getTransformCamera() {
return _camera;
}
vec3 getEyeWorldPos() {
return _camera._viewInverse[3].xyz;
}
<@endfunc@>

View file

@ -173,6 +173,7 @@ void AccountManager::setAuthURL(const QUrl& authURL) {
<< "from previous settings file";
}
}
settings.endGroup();
if (_accountInfo.getAccessToken().token.isEmpty()) {
qCWarning(networking) << "Unable to load account file. No existing account settings will be loaded.";

View file

@ -85,10 +85,10 @@ void EntityMotionState::updateServerPhysicsVariables() {
return;
}
_serverPosition = _entity->getPosition();
_serverRotation = _entity->getRotation();
_serverVelocity = _entity->getVelocity();
_serverAngularVelocity = _entity->getAngularVelocity();
Transform localTransform;
_entity->getLocalTransformAndVelocities(localTransform, _serverVelocity, _serverAngularVelocity);
_serverPosition = localTransform.getTranslation();
_serverRotation = localTransform.getRotation();
_serverAcceleration = _entity->getAcceleration();
_serverActionData = _entity->getActionData();
}
@ -271,14 +271,25 @@ bool EntityMotionState::isCandidateForOwnership() const {
bool EntityMotionState::remoteSimulationOutOfSync(uint32_t simulationStep) {
// NOTE: we only get here if we think we own the simulation
assert(_body);
bool parentTransformSuccess;
Transform localToWorld = _entity->getParentTransform(parentTransformSuccess);
Transform worldToLocal;
Transform worldVelocityToLocal;
if (parentTransformSuccess) {
localToWorld.evalInverse(worldToLocal);
worldVelocityToLocal = worldToLocal;
worldVelocityToLocal.setTranslation(glm::vec3(0.0f));
}
// if we've never checked before, our _lastStep will be 0, and we need to initialize our state
if (_lastStep == 0) {
btTransform xform = _body->getWorldTransform();
_serverPosition = bulletToGLM(xform.getOrigin());
_serverRotation = bulletToGLM(xform.getRotation());
_serverVelocity = getBodyLinearVelocityGTSigma();
_serverPosition = worldToLocal.transform(bulletToGLM(xform.getOrigin()));
_serverRotation = worldToLocal.getRotation() * bulletToGLM(xform.getRotation());
_serverVelocity = worldVelocityToLocal.transform(getBodyLinearVelocityGTSigma());
_serverAcceleration = Vectors::ZERO;
_serverAngularVelocity = bulletToGLM(_body->getAngularVelocity());
_serverAngularVelocity = worldVelocityToLocal.transform(bulletToGLM(_body->getAngularVelocity()));
_lastStep = simulationStep;
_serverActionData = _entity->getActionData();
_numInactiveUpdates = 1;
@ -315,11 +326,21 @@ bool EntityMotionState::remoteSimulationOutOfSync(uint32_t simulationStep) {
_lastStep = simulationStep;
if (glm::length2(_serverVelocity) > 0.0f) {
_serverVelocity += _serverAcceleration * dt;
_serverVelocity *= powf(1.0f - _body->getLinearDamping(), dt);
// NOTE: we ignore the second-order acceleration term when integrating
// the position forward because Bullet also does this.
_serverPosition += dt * _serverVelocity;
// the entity-server doesn't know where avatars are, so it doesn't do simple extrapolation for children of
// avatars. We are trying to guess what values the entity server has, so we don't do it here, either. See
// related code in EntitySimulation::moveSimpleKinematics.
bool ancestryIsKnown;
_entity->getMaximumAACube(ancestryIsKnown);
bool hasAvatarAncestor = _entity->hasAncestorOfType(NestableType::Avatar);
if (ancestryIsKnown && !hasAvatarAncestor) {
_serverVelocity += _serverAcceleration * dt;
_serverVelocity *= powf(1.0f - _body->getLinearDamping(), dt);
// NOTE: we ignore the second-order acceleration term when integrating
// the position forward because Bullet also does this.
_serverPosition += dt * _serverVelocity;
}
}
if (_entity->actionDataNeedsTransmit()) {
@ -341,7 +362,7 @@ bool EntityMotionState::remoteSimulationOutOfSync(uint32_t simulationStep) {
// compute position error
btTransform worldTrans = _body->getWorldTransform();
glm::vec3 position = bulletToGLM(worldTrans.getOrigin());
glm::vec3 position = worldToLocal.transform(bulletToGLM(worldTrans.getOrigin()));
float dx2 = glm::distance2(position, _serverPosition);
const float MAX_POSITION_ERROR_SQUARED = 0.000004f; // corresponds to 2mm
@ -376,7 +397,7 @@ bool EntityMotionState::remoteSimulationOutOfSync(uint32_t simulationStep) {
}
}
const float MIN_ROTATION_DOT = 0.99999f; // This corresponds to about 0.5 degrees of rotation
glm::quat actualRotation = bulletToGLM(worldTrans.getRotation());
glm::quat actualRotation = worldToLocal.getRotation() * bulletToGLM(worldTrans.getRotation());
#ifdef WANT_DEBUG
if ((fabsf(glm::dot(actualRotation, _serverRotation)) < MIN_ROTATION_DOT)) {
@ -481,11 +502,11 @@ void EntityMotionState::sendUpdate(OctreeEditPacketSender* packetSender, uint32_
}
// remember properties for local server prediction
_serverPosition = _entity->getPosition();
_serverRotation = _entity->getRotation();
_serverVelocity = _entity->getVelocity();
Transform localTransform;
_entity->getLocalTransformAndVelocities(localTransform, _serverVelocity, _serverAngularVelocity);
_serverPosition = localTransform.getTranslation();
_serverRotation = localTransform.getRotation();
_serverAcceleration = _entity->getAcceleration();
_serverAngularVelocity = _entity->getAngularVelocity();
_serverActionData = _entity->getActionData();
EntityItemProperties properties;
@ -590,7 +611,7 @@ uint32_t EntityMotionState::getIncomingDirtyFlags() {
if (_body && _entity) {
dirtyFlags = _entity->getDirtyFlags();
if (dirtyFlags | Simulation::DIRTY_SIMULATOR_ID) {
if (dirtyFlags & Simulation::DIRTY_SIMULATOR_ID) {
// when SIMULATOR_ID changes we must check for reinterpretation of asymmetric collision mask
// bits for the avatar groups (e.g. MY_AVATAR vs OTHER_AVATAR)
uint8_t entityCollisionMask = _entity->getCollisionless() ? 0 : _entity->getCollisionMask();
@ -603,8 +624,12 @@ uint32_t EntityMotionState::getIncomingDirtyFlags() {
// we add DIRTY_MOTION_TYPE if the body's motion type disagrees with entity velocity settings
int bodyFlags = _body->getCollisionFlags();
bool isMoving = _entity->isMovingRelativeToParent();
if (((bodyFlags & btCollisionObject::CF_STATIC_OBJECT) && isMoving) ||
(bodyFlags & btCollisionObject::CF_KINEMATIC_OBJECT && !isMoving)) {
if (((bodyFlags & btCollisionObject::CF_STATIC_OBJECT) && isMoving) // ||
// TODO -- there is opportunity for an optimization here, but this currently causes
// excessive re-insertion of the rigid body.
// (bodyFlags & btCollisionObject::CF_KINEMATIC_OBJECT && !isMoving)
) {
dirtyFlags |= Simulation::DIRTY_MOTION_TYPE;
}
}

View file

@ -168,6 +168,26 @@ public:
static const QString& MENU_PATH();
enum Hand {
LeftHand = 0x01,
RightHand = 0x02,
};
enum class HandLaserMode {
None, // Render no hand lasers
Overlay, // Render hand lasers only if they intersect with the UI layer, and stop at the UI layer
};
virtual bool setHandLaser(
uint32_t hands, // Bits from the Hand enum
HandLaserMode mode, // Mode in which to render
const vec4& color = vec4(1), // The color of the rendered laser
const vec3& direction = vec3(0, 0, -1) // The direction in which to render the hand lasers
) {
return false;
}
signals:
void recommendedFramebufferSizeChanged(const QSize & size);
// Indicates that this display plugin is no longer valid for use.

View file

@ -19,7 +19,7 @@
#include <NumericalConstants.h>
#include <GLMHelpers.h>
#include "ProceduralShaders.h"
#include "ProceduralCommon_frag.h"
// Userdata parsing constants
static const QString PROCEDURAL_USER_DATA_KEY = "ProceduralEntity";
@ -39,6 +39,7 @@ static const std::string STANDARD_UNIFORM_NAMES[Procedural::NUM_STANDARD_UNIFORM
"iFrameCount",
"iWorldScale",
"iWorldPosition",
"iWorldOrientation",
"iChannelResolution"
};
@ -202,9 +203,10 @@ bool Procedural::ready() {
return true;
}
void Procedural::prepare(gpu::Batch& batch, const glm::vec3& position, const glm::vec3& size) {
void Procedural::prepare(gpu::Batch& batch, const glm::vec3& position, const glm::vec3& size, const glm::quat& orientation) {
_entityDimensions = size;
_entityPosition = position;
_entityOrientation = glm::mat3_cast(orientation);
if (_shaderUrl.isLocalFile()) {
auto lastModified = (quint64)QFileInfo(_shaderPath).lastModified().toMSecsSinceEpoch();
if (lastModified > _shaderModified) {
@ -227,7 +229,7 @@ void Procedural::prepare(gpu::Batch& batch, const glm::vec3& position, const glm
std::string fragmentShaderSource = _fragmentSource;
size_t replaceIndex = fragmentShaderSource.find(PROCEDURAL_COMMON_BLOCK);
if (replaceIndex != std::string::npos) {
fragmentShaderSource.replace(replaceIndex, PROCEDURAL_COMMON_BLOCK.size(), SHADER_COMMON);
fragmentShaderSource.replace(replaceIndex, PROCEDURAL_COMMON_BLOCK.size(), ProceduralCommon_frag);
}
replaceIndex = fragmentShaderSource.find(PROCEDURAL_VERSION);
@ -404,10 +406,10 @@ void Procedural::setupUniforms() {
});
}
if (gpu::Shader::INVALID_LOCATION != _standardUniformSlots[SCALE]) {
if (gpu::Shader::INVALID_LOCATION != _standardUniformSlots[ORIENTATION]) {
// FIXME move into the 'set once' section, since this doesn't change over time
_uniforms.push_back([=](gpu::Batch& batch) {
batch._glUniform(_standardUniformSlots[SCALE], _entityDimensions);
batch._glUniform(_standardUniformSlots[ORIENTATION], _entityOrientation);
});
}

View file

@ -38,7 +38,7 @@ public:
void parse(const QString& userDataJson);
bool ready();
void prepare(gpu::Batch& batch, const glm::vec3& position, const glm::vec3& size);
void prepare(gpu::Batch& batch, const glm::vec3& position, const glm::vec3& size, const glm::quat& orientation);
const gpu::ShaderPointer& getShader() const { return _shader; }
glm::vec4 getColor(const glm::vec4& entityColor);
@ -56,6 +56,7 @@ public:
FRAME_COUNT,
SCALE,
POSITION,
ORIENTATION,
CHANNEL_RESOLUTION,
NUM_STANDARD_UNIFORMS
};
@ -93,6 +94,7 @@ protected:
// Entity metadata
glm::vec3 _entityDimensions;
glm::vec3 _entityPosition;
glm::mat3 _entityOrientation;
private:
// This should only be called from the render thread, as it shares data with Procedural::prepare

View file

@ -1,3 +1,5 @@
<@include gpu/Config.slh@>
// Generated on <$_SCRIBE_DATE$>
//
// Created by Bradley Austin Davis on 2015/09/05
// Copyright 2013-2015 High Fidelity, Inc.
@ -17,11 +19,11 @@
// https://github.com/ashima/webgl-noise
//
const std::string SHADER_COMMON = R"SHADER(
<@include gpu/Transform.slh@>
<$declareStandardCameraTransform()$>
float mod289(float x) {
return x - floor(x * (1.0 / 289.0)) * 289.0;
return x - floor(x * (1.0 / 289.0)) * 289.0;
}
vec2 mod289(vec2 x) {
@ -262,11 +264,6 @@ float snoise(vec2 v) {
return 130.0 * dot(m, g);
}
// shader playback time (in seconds)
uniform float iGlobalTime;
// the dimensions of the object being rendered
uniform vec3 iWorldScale;
#define PROCEDURAL 1
//PROCEDURAL_VERSION
@ -286,15 +283,16 @@ const float iSampleRate = 1.0;
const vec4 iChannelTime = vec4(0.0);
uniform float iGlobalTime; // shader playback time (in seconds)
uniform vec4 iDate;
uniform int iFrameCount;
uniform vec3 iWorldPosition;
uniform vec3 iWorldPosition; // the position of the object being rendered
uniform vec3 iWorldScale; // the dimensions of the object being rendered
uniform mat3 iWorldOrientation; // the orientation of the object being rendered
uniform vec3 iChannelResolution[4];
uniform sampler2D iChannel0;
uniform sampler2D iChannel1;
uniform sampler2D iChannel2;
uniform sampler2D iChannel3;
#endif
)SHADER";
#endif

View file

@ -52,7 +52,7 @@ void ProceduralSkybox::render(gpu::Batch& batch, const ViewFrustum& viewFrustum,
batch.setModelTransform(Transform()); // only for Mac
auto& procedural = skybox._procedural;
procedural.prepare(batch, glm::vec3(0), glm::vec3(1));
procedural.prepare(batch, glm::vec3(0), glm::vec3(1), glm::quat());
auto textureSlot = procedural.getShader()->getTextures().findLocation("cubeMap");
auto bufferSlot = procedural.getShader()->getBuffers().findLocation("skyboxBuffer");
skybox.prepare(batch, textureSlot, bufferSlot);

View file

@ -1,5 +1,5 @@
set(TARGET_NAME render-utils)
AUTOSCRIBE_SHADER_LIB(gpu model render)
AUTOSCRIBE_SHADER_LIB(gpu model render procedural)
# pull in the resources.qrc file
qt5_add_resources(QT_RESOURCES_FILE "${CMAKE_CURRENT_SOURCE_DIR}/res/fonts/fonts.qrc")
setup_hifi_library(Widgets OpenGL Network Qml Quick Script)

View file

@ -1,5 +1,5 @@
set(TARGET_NAME render)
AUTOSCRIBE_SHADER_LIB(gpu model)
AUTOSCRIBE_SHADER_LIB(gpu model procedural)
setup_hifi_library()
link_hifi_libraries(shared gpu model)

View file

@ -334,6 +334,7 @@ void ScriptEngines::clearScripts() {
Settings settings;
settings.beginWriteArray(SETTINGS_KEY);
settings.remove("");
settings.endArray();
}
void ScriptEngines::saveScripts() {

View file

@ -18,6 +18,7 @@
const QString Settings::firstRun { "firstRun" };
Settings::Settings() :
_manager(DependencyManager::get<Setting::Manager>()),
_locker(&(_manager->getLock()))
@ -25,6 +26,9 @@ Settings::Settings() :
}
Settings::~Settings() {
if (_prefixes.size() != 0) {
qFatal("Unstable Settings Prefixes: You must call endGroup for every beginGroup and endArray for every begin*Array call");
}
}
void Settings::remove(const QString& key) {
@ -50,14 +54,17 @@ bool Settings::contains(const QString& key) const {
}
int Settings::beginReadArray(const QString & prefix) {
_prefixes.push(prefix);
return _manager->beginReadArray(prefix);
}
void Settings::beginWriteArray(const QString& prefix, int size) {
_prefixes.push(prefix);
_manager->beginWriteArray(prefix, size);
}
void Settings::endArray() {
_prefixes.pop();
_manager->endArray();
}
@ -66,10 +73,12 @@ void Settings::setArrayIndex(int i) {
}
void Settings::beginGroup(const QString& prefix) {
_prefixes.push(prefix);
_manager->beginGroup(prefix);
}
void Settings::endGroup() {
_prefixes.pop();
_manager->endGroup();
}

View file

@ -58,8 +58,10 @@ public:
void setQuatValue(const QString& name, const glm::quat& quatValue);
void getQuatValueIfValid(const QString& name, glm::quat& quatValue);
private:
QSharedPointer<Setting::Manager> _manager;
QWriteLocker _locker;
QStack<QString> _prefixes;
};
namespace Setting {

View file

@ -98,6 +98,8 @@ namespace Setting {
// Register Handle
manager->registerHandle(this);
_isInitialized = true;
} else {
qWarning() << "Settings interface used after manager destroyed";
}
// Load value from disk

View file

@ -144,6 +144,15 @@ public:
bool hasAncestorOfType(NestableType nestableType);
void getLocalTransformAndVelocities(Transform& localTransform,
glm::vec3& localVelocity,
glm::vec3& localAngularVelocity) const;
void setLocalTransformAndVelocities(
const Transform& localTransform,
const glm::vec3& localVelocity,
const glm::vec3& localAngularVelocity);
protected:
const NestableType _nestableType; // EntityItem or an AvatarData
QUuid _id;
@ -151,13 +160,6 @@ protected:
quint16 _parentJointIndex { 0 }; // which joint of the parent is this relative to?
SpatiallyNestablePointer getParentPointer(bool& success) const;
void getLocalTransformAndVelocities(Transform& localTransform, glm::vec3& localVelocity, glm::vec3& localAngularVelocity) const;
void setLocalTransformAndVelocities(
const Transform& localTransform,
const glm::vec3& localVelocity,
const glm::vec3& localAngularVelocity);
mutable SpatiallyNestableWeakPointer _parent;
virtual void beParentOfChild(SpatiallyNestablePointer newChild) const;

View file

@ -8,6 +8,8 @@
#include "OculusBaseDisplayPlugin.h"
#include <ViewFrustum.h>
#include <controllers/Pose.h>
#include <display-plugins/CompositorHelper.h>
#include "OculusHelpers.h"
@ -24,8 +26,25 @@ bool OculusBaseDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
auto trackingState = ovr_GetTrackingState(_session, _currentRenderFrameInfo.predictedDisplayTime, ovrTrue);
_currentRenderFrameInfo.renderPose = toGlm(trackingState.HeadPose.ThePose);
_currentRenderFrameInfo.presentPose = _currentRenderFrameInfo.renderPose;
Lock lock(_mutex);
_frameInfos[frameIndex] = _currentRenderFrameInfo;
std::array<glm::mat4, 2> handPoses;
// Make controller poses available to the presentation thread
ovr_for_each_hand([&](ovrHandType hand) {
static const auto REQUIRED_HAND_STATUS = ovrStatus_OrientationTracked & ovrStatus_PositionTracked;
if (REQUIRED_HAND_STATUS != (trackingState.HandStatusFlags[hand] & REQUIRED_HAND_STATUS)) {
return;
}
auto correctedPose = ovrControllerPoseToHandPose(hand, trackingState.HandPoses[hand]);
static const glm::quat HAND_TO_LASER_ROTATION = glm::rotation(Vectors::UNIT_Z, Vectors::UNIT_NEG_Y);
handPoses[hand] = glm::translate(glm::mat4(), correctedPose.translation) * glm::mat4_cast(correctedPose.rotation * HAND_TO_LASER_ROTATION);
});
withRenderThreadLock([&] {
_uiModelTransform = DependencyManager::get<CompositorHelper>()->getModelTransform();
_handPoses = handPoses;
_frameInfos[frameIndex] = _currentRenderFrameInfo;
});
return true;
}

View file

@ -243,91 +243,13 @@ void OculusControllerManager::TouchDevice::focusOutEvent() {
void OculusControllerManager::TouchDevice::handlePose(float deltaTime,
const controller::InputCalibrationData& inputCalibrationData, ovrHandType hand,
const ovrPoseStatef& handPose) {
// When the sensor-to-world rotation is identity the coordinate axes look like this:
//
// user
// forward
// -z
// |
// y| user
// y o----x right
// o-----x user
// | up
// |
// z
//
// Rift
// From ABOVE the hand canonical axes looks like this:
//
// | | | | y | | | |
// | | | | | | | | |
// | | | | |
// |left | / x---- + \ |right|
// | _/ z \_ |
// | | | |
// | | | |
//
// So when the user is in Rift space facing the -zAxis with hands outstretched and palms down
// the rotation to align the Touch axes with those of the hands is:
//
// touchToHand = halfTurnAboutY * quaterTurnAboutX
// Due to how the Touch controllers fit into the palm there is an offset that is different for each hand.
// You can think of this offset as the inverse of the measured rotation when the hands are posed, such that
// the combination (measurement * offset) is identity at this orientation.
//
// Qoffset = glm::inverse(deltaRotation when hand is posed fingers forward, palm down)
//
// An approximate offset for the Touch can be obtained by inspection:
//
// Qoffset = glm::inverse(glm::angleAxis(sign * PI/2.0f, zAxis) * glm::angleAxis(PI/4.0f, xAxis))
//
// So the full equation is:
//
// Q = combinedMeasurement * touchToHand
//
// Q = (deltaQ * QOffset) * (yFlip * quarterTurnAboutX)
//
// Q = (deltaQ * inverse(deltaQForAlignedHand)) * (yFlip * quarterTurnAboutX)
auto poseId = hand == ovrHand_Left ? controller::LEFT_HAND : controller::RIGHT_HAND;
auto& pose = _poseStateMap[poseId];
static const glm::quat yFlip = glm::angleAxis(PI, Vectors::UNIT_Y);
static const glm::quat quarterX = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_X);
static const glm::quat touchToHand = yFlip * quarterX;
static const glm::quat leftQuarterZ = glm::angleAxis(-PI_OVER_TWO, Vectors::UNIT_Z);
static const glm::quat rightQuarterZ = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_Z);
static const glm::quat eighthX = glm::angleAxis(PI / 4.0f, Vectors::UNIT_X);
static const glm::quat leftRotationOffset = glm::inverse(leftQuarterZ * eighthX) * touchToHand;
static const glm::quat rightRotationOffset = glm::inverse(rightQuarterZ * eighthX) * touchToHand;
static const float CONTROLLER_LENGTH_OFFSET = 0.0762f; // three inches
static const glm::vec3 CONTROLLER_OFFSET = glm::vec3(CONTROLLER_LENGTH_OFFSET / 2.0f,
CONTROLLER_LENGTH_OFFSET / 2.0f,
CONTROLLER_LENGTH_OFFSET * 2.0f);
static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET;
static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET;
auto translationOffset = (hand == ovrHand_Left ? leftTranslationOffset : rightTranslationOffset);
auto rotationOffset = (hand == ovrHand_Left ? leftRotationOffset : rightRotationOffset);
glm::quat rotation = toGlm(handPose.ThePose.Orientation);
pose.translation = toGlm(handPose.ThePose.Position);
pose.translation += rotation * translationOffset;
pose.rotation = rotation * rotationOffset;
pose.angularVelocity = toGlm(handPose.AngularVelocity);
pose.velocity = toGlm(handPose.LinearVelocity);
pose.valid = true;
pose = ovrControllerPoseToHandPose(hand, handPose);
// transform into avatar frame
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
pose = pose.transform(controllerToAvatar);
}
bool OculusControllerManager::TouchDevice::triggerHapticPulse(float strength, float duration, controller::Hand hand) {

View file

@ -15,6 +15,9 @@
#include <QtCore/QFile>
#include <QtCore/QDir>
#include <controllers/Input.h>
#include <controllers/Pose.h>
using Mutex = std::mutex;
using Lock = std::unique_lock<Mutex>;
@ -191,3 +194,88 @@ void SwapFramebufferWrapper::onBind(oglplus::Framebuffer::Target target) {
void SwapFramebufferWrapper::onUnbind(oglplus::Framebuffer::Target target) {
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
}
controller::Pose ovrControllerPoseToHandPose(
ovrHandType hand,
const ovrPoseStatef& handPose) {
// When the sensor-to-world rotation is identity the coordinate axes look like this:
//
// user
// forward
// -z
// |
// y| user
// y o----x right
// o-----x user
// | up
// |
// z
//
// Rift
// From ABOVE the hand canonical axes looks like this:
//
// | | | | y | | | |
// | | | | | | | | |
// | | | | |
// |left | / x---- + \ |right|
// | _/ z \_ |
// | | | |
// | | | |
//
// So when the user is in Rift space facing the -zAxis with hands outstretched and palms down
// the rotation to align the Touch axes with those of the hands is:
//
// touchToHand = halfTurnAboutY * quaterTurnAboutX
// Due to how the Touch controllers fit into the palm there is an offset that is different for each hand.
// You can think of this offset as the inverse of the measured rotation when the hands are posed, such that
// the combination (measurement * offset) is identity at this orientation.
//
// Qoffset = glm::inverse(deltaRotation when hand is posed fingers forward, palm down)
//
// An approximate offset for the Touch can be obtained by inspection:
//
// Qoffset = glm::inverse(glm::angleAxis(sign * PI/2.0f, zAxis) * glm::angleAxis(PI/4.0f, xAxis))
//
// So the full equation is:
//
// Q = combinedMeasurement * touchToHand
//
// Q = (deltaQ * QOffset) * (yFlip * quarterTurnAboutX)
//
// Q = (deltaQ * inverse(deltaQForAlignedHand)) * (yFlip * quarterTurnAboutX)
static const glm::quat yFlip = glm::angleAxis(PI, Vectors::UNIT_Y);
static const glm::quat quarterX = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_X);
static const glm::quat touchToHand = yFlip * quarterX;
static const glm::quat leftQuarterZ = glm::angleAxis(-PI_OVER_TWO, Vectors::UNIT_Z);
static const glm::quat rightQuarterZ = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_Z);
static const glm::quat eighthX = glm::angleAxis(PI / 4.0f, Vectors::UNIT_X);
static const glm::quat leftRotationOffset = glm::inverse(leftQuarterZ * eighthX) * touchToHand;
static const glm::quat rightRotationOffset = glm::inverse(rightQuarterZ * eighthX) * touchToHand;
static const float CONTROLLER_LENGTH_OFFSET = 0.0762f; // three inches
static const glm::vec3 CONTROLLER_OFFSET = glm::vec3(CONTROLLER_LENGTH_OFFSET / 2.0f,
CONTROLLER_LENGTH_OFFSET / 2.0f,
CONTROLLER_LENGTH_OFFSET * 2.0f);
static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET;
static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET;
auto translationOffset = (hand == ovrHand_Left ? leftTranslationOffset : rightTranslationOffset);
auto rotationOffset = (hand == ovrHand_Left ? leftRotationOffset : rightRotationOffset);
glm::quat rotation = toGlm(handPose.ThePose.Orientation);
controller::Pose pose;
pose.translation = toGlm(handPose.ThePose.Position);
pose.translation += rotation * translationOffset;
pose.rotation = rotation * rotationOffset;
pose.angularVelocity = toGlm(handPose.AngularVelocity);
pose.velocity = toGlm(handPose.LinearVelocity);
pose.valid = true;
return pose;
}

View file

@ -13,6 +13,7 @@
#include <glm/gtc/matrix_transform.hpp>
#include <gl/OglplusHelpers.h>
#include <controllers/Forward.h>
void logWarning(const char* what);
void logFatal(const char* what);
@ -128,3 +129,7 @@ protected:
private:
ovrSession _session;
};
controller::Pose ovrControllerPoseToHandPose(
ovrHandType hand,
const ovrPoseStatef& handPose);

View file

@ -45,8 +45,9 @@ bool OculusLegacyDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
_currentRenderFrameInfo.predictedDisplayTime = _currentRenderFrameInfo.sensorSampleTime = ovr_GetTimeInSeconds();
_trackingState = ovrHmd_GetTrackingState(_hmd, _currentRenderFrameInfo.predictedDisplayTime);
_currentRenderFrameInfo.rawRenderPose = _currentRenderFrameInfo.renderPose = toGlm(_trackingState.HeadPose.ThePose);
Lock lock(_mutex);
_frameInfos[frameIndex] = _currentRenderFrameInfo;
withRenderThreadLock([&]{
_frameInfos[frameIndex] = _currentRenderFrameInfo;
});
return true;
}

View file

@ -18,9 +18,11 @@
#include <GLMHelpers.h>
#include <gl/GlWindow.h>
#include <controllers/Pose.h>
#include <PerfStat.h>
#include <plugins/PluginContainer.h>
#include <ViewFrustum.h>
#include <display-plugins/CompositorHelper.h>
#include <shared/NsightHelpers.h>
#include "OpenVrHelpers.h"
@ -29,11 +31,13 @@ Q_DECLARE_LOGGING_CATEGORY(displayplugins)
const QString OpenVrDisplayPlugin::NAME("OpenVR (Vive)");
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probably shouldn't be hardcoded here
static vr::IVRCompositor* _compositor{ nullptr };
static vr::IVRCompositor* _compositor { nullptr };
vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
vec3 _trackedDeviceLinearVelocities[vr::k_unMaxTrackedDeviceCount];
vec3 _trackedDeviceAngularVelocities[vr::k_unMaxTrackedDeviceCount];
static mat4 _sensorResetMat;
static std::array<vr::Hmd_Eye, 2> VR_EYES { { vr::Eye_Left, vr::Eye_Right } };
bool _openVrDisplayActive { false };
@ -59,16 +63,14 @@ bool OpenVrDisplayPlugin::internalActivate() {
// left + right eyes
_renderTargetSize.x *= 2;
{
Lock lock(_poseMutex);
withRenderThreadLock([&] {
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
_eyeOffsets[eye] = toGlm(_system->GetEyeToHeadTransform(eye));
_eyeProjections[eye] = toGlm(_system->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
});
// FIXME Calculate the proper combined projection by using GetProjectionRaw values from both eyes
_cullingProjection = _eyeProjections[0];
}
});
_compositor = vr::VRCompositor();
Q_ASSERT(_compositor);
@ -113,7 +115,7 @@ void OpenVrDisplayPlugin::internalDeactivate() {
void OpenVrDisplayPlugin::customizeContext() {
// Display plugins in DLLs must initialize glew locally
static std::once_flag once;
std::call_once(once, []{
std::call_once(once, [] {
glewExperimental = true;
GLenum err = glewInit();
glGetError(); // clear the potential error from glewExperimental
@ -123,9 +125,10 @@ void OpenVrDisplayPlugin::customizeContext() {
}
void OpenVrDisplayPlugin::resetSensors() {
Lock lock(_poseMutex);
glm::mat4 m = toGlm(_trackedDevicePose[0].mDeviceToAbsoluteTracking);
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
withRenderThreadLock([&] {
glm::mat4 m = toGlm(_trackedDevicePose[0].mDeviceToAbsoluteTracking);
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
});
}
@ -150,6 +153,24 @@ bool OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
_system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseStanding, _currentRenderFrameInfo.predictedDisplayTime, _trackedDevicePose, vr::k_unMaxTrackedDeviceCount);
vr::TrackedDeviceIndex_t handIndices[2] { vr::k_unTrackedDeviceIndexInvalid, vr::k_unTrackedDeviceIndexInvalid };
{
vr::TrackedDeviceIndex_t controllerIndices[2] ;
auto trackedCount = _system->GetSortedTrackedDeviceIndicesOfClass(vr::TrackedDeviceClass_Controller, controllerIndices, 2);
// Find the left and right hand controllers, if they exist
for (uint32_t i = 0; i < std::min<uint32_t>(trackedCount, 2); ++i) {
if (_trackedDevicePose[i].bPoseIsValid) {
auto role = _system->GetControllerRoleForTrackedDeviceIndex(controllerIndices[i]);
if (vr::TrackedControllerRole_LeftHand == role) {
handIndices[0] = controllerIndices[i];
} else if (vr::TrackedControllerRole_RightHand == role) {
handIndices[1] = controllerIndices[i];
}
}
}
}
// copy and process predictedTrackedDevicePoses
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
@ -159,18 +180,39 @@ bool OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
_currentRenderFrameInfo.rawRenderPose = toGlm(_trackedDevicePose[vr::k_unTrackedDeviceIndex_Hmd].mDeviceToAbsoluteTracking);
_currentRenderFrameInfo.renderPose = _trackedDevicePoseMat4[vr::k_unTrackedDeviceIndex_Hmd];
Lock lock(_mutex);
_frameInfos[frameIndex] = _currentRenderFrameInfo;
bool keyboardVisible = isOpenVrKeyboardShown();
std::array<mat4, 2> handPoses;
if (!keyboardVisible) {
for (int i = 0; i < 2; ++i) {
if (handIndices[i] == vr::k_unTrackedDeviceIndexInvalid) {
continue;
}
auto deviceIndex = handIndices[i];
const mat4& mat = _trackedDevicePoseMat4[deviceIndex];
const vec3& linearVelocity = _trackedDeviceLinearVelocities[deviceIndex];
const vec3& angularVelocity = _trackedDeviceAngularVelocities[deviceIndex];
auto correctedPose = openVrControllerPoseToHandPose(i == 0, mat, linearVelocity, angularVelocity);
static const glm::quat HAND_TO_LASER_ROTATION = glm::rotation(Vectors::UNIT_Z, Vectors::UNIT_NEG_Y);
handPoses[i] = glm::translate(glm::mat4(), correctedPose.translation) * glm::mat4_cast(correctedPose.rotation * HAND_TO_LASER_ROTATION);
}
}
withRenderThreadLock([&] {
_uiModelTransform = DependencyManager::get<CompositorHelper>()->getModelTransform();
// Make controller poses available to the presentation thread
_handPoses = handPoses;
_frameInfos[frameIndex] = _currentRenderFrameInfo;
});
return true;
}
void OpenVrDisplayPlugin::hmdPresent() {
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)_currentPresentFrameIndex)
// Flip y-axis since GL UV coords are backwards.
static vr::VRTextureBounds_t leftBounds{ 0, 0, 0.5f, 1 };
static vr::VRTextureBounds_t rightBounds{ 0.5f, 0, 1, 1 };
static vr::VRTextureBounds_t leftBounds { 0, 0, 0.5f, 1 };
static vr::VRTextureBounds_t rightBounds { 0.5f, 0, 1, 1 };
vr::Texture_t texture { (void*)oglplus::GetName(_compositeFramebuffer->color), vr::API_OpenGL, vr::ColorSpace_Auto };
@ -191,6 +233,10 @@ bool OpenVrDisplayPlugin::isHmdMounted() const {
}
void OpenVrDisplayPlugin::updatePresentPose() {
mat4 sensorResetMat;
withPresentThreadLock([&] {
sensorResetMat = _sensorResetMat;
});
{
float fSecondsSinceLastVsync;
_system->GetTimeSinceLastVsync(&fSecondsSinceLastVsync, nullptr);
@ -202,9 +248,8 @@ void OpenVrDisplayPlugin::updatePresentPose() {
_system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseStanding, fPredictedSecondsFromNow, &pose, 1);
_currentPresentFrameInfo.rawPresentPose = toGlm(pose.mDeviceToAbsoluteTracking);
}
_currentPresentFrameInfo.presentPose = _sensorResetMat * _currentPresentFrameInfo.rawPresentPose;
_currentPresentFrameInfo.presentPose = sensorResetMat * _currentPresentFrameInfo.rawPresentPose;
mat3 renderRotation(_currentPresentFrameInfo.rawRenderPose);
mat3 presentRotation(_currentPresentFrameInfo.rawPresentPose);
_currentPresentFrameInfo.presentReprojection = glm::mat3(glm::inverse(renderRotation) * presentRotation);
}

View file

@ -43,5 +43,4 @@ private:
vr::IVRSystem* _system { nullptr };
std::atomic<vr::EDeviceActivityLevel> _hmdActivityLevel { vr::k_EDeviceActivityLevel_Unknown };
static const QString NAME;
mutable Mutex _poseMutex;
};

View file

@ -18,8 +18,9 @@
#include <QtQuick/QQuickWindow>
#include <Windows.h>
#include <OffscreenUi.h>
#include <controllers/Pose.h>
#include <NumericalConstants.h>
Q_DECLARE_LOGGING_CATEGORY(displayplugins)
Q_LOGGING_CATEGORY(displayplugins, "hifi.plugins.display")
@ -208,6 +209,10 @@ void disableOpenVrKeyboard() {
QObject::disconnect(_focusConnection);
}
bool isOpenVrKeyboardShown() {
return _keyboardShown;
}
void handleOpenVrEvents() {
if (!activeHmd) {
@ -238,3 +243,86 @@ void handleOpenVrEvents() {
}
controller::Pose openVrControllerPoseToHandPose(bool isLeftHand, const mat4& mat, const vec3& linearVelocity, const vec3& angularVelocity) {
// When the sensor-to-world rotation is identity the coordinate axes look like this:
//
// user
// forward
// -z
// |
// y| user
// y o----x right
// o-----x user
// | up
// |
// z
//
// Rift
// From ABOVE the hand canonical axes looks like this:
//
// | | | | y | | | |
// | | | | | | | | |
// | | | | |
// |left | / x---- + \ |right|
// | _/ z \_ |
// | | | |
// | | | |
//
// So when the user is in Rift space facing the -zAxis with hands outstretched and palms down
// the rotation to align the Touch axes with those of the hands is:
//
// touchToHand = halfTurnAboutY * quaterTurnAboutX
// Due to how the Touch controllers fit into the palm there is an offset that is different for each hand.
// You can think of this offset as the inverse of the measured rotation when the hands are posed, such that
// the combination (measurement * offset) is identity at this orientation.
//
// Qoffset = glm::inverse(deltaRotation when hand is posed fingers forward, palm down)
//
// An approximate offset for the Touch can be obtained by inspection:
//
// Qoffset = glm::inverse(glm::angleAxis(sign * PI/2.0f, zAxis) * glm::angleAxis(PI/4.0f, xAxis))
//
// So the full equation is:
//
// Q = combinedMeasurement * touchToHand
//
// Q = (deltaQ * QOffset) * (yFlip * quarterTurnAboutX)
//
// Q = (deltaQ * inverse(deltaQForAlignedHand)) * (yFlip * quarterTurnAboutX)
static const glm::quat yFlip = glm::angleAxis(PI, Vectors::UNIT_Y);
static const glm::quat quarterX = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_X);
static const glm::quat touchToHand = yFlip * quarterX;
static const glm::quat leftQuarterZ = glm::angleAxis(-PI_OVER_TWO, Vectors::UNIT_Z);
static const glm::quat rightQuarterZ = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_Z);
static const glm::quat eighthX = glm::angleAxis(PI / 4.0f, Vectors::UNIT_X);
static const glm::quat leftRotationOffset = glm::inverse(leftQuarterZ * eighthX) * touchToHand;
static const glm::quat rightRotationOffset = glm::inverse(rightQuarterZ * eighthX) * touchToHand;
static const float CONTROLLER_LENGTH_OFFSET = 0.0762f; // three inches
static const glm::vec3 CONTROLLER_OFFSET = glm::vec3(CONTROLLER_LENGTH_OFFSET / 2.0f,
CONTROLLER_LENGTH_OFFSET / 2.0f,
CONTROLLER_LENGTH_OFFSET * 2.0f);
static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET;
static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET;
auto translationOffset = (isLeftHand ? leftTranslationOffset : rightTranslationOffset);
auto rotationOffset = (isLeftHand ? leftRotationOffset : rightRotationOffset);
glm::vec3 position = extractTranslation(mat);
glm::quat rotation = glm::normalize(glm::quat_cast(mat));
position += rotation * translationOffset;
rotation = rotation * rotationOffset;
// transform into avatar frame
auto result = controller::Pose(position, rotation);
// handle change in velocity due to translationOffset
result.velocity = linearVelocity + glm::cross(angularVelocity, position - extractTranslation(mat));
result.angularVelocity = angularVelocity;
return result;
}

View file

@ -12,6 +12,8 @@
#include <glm/gtc/type_ptr.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <controllers/Forward.h>
bool openVrSupported();
vr::IVRSystem* acquireOpenVrSystem();
@ -20,6 +22,7 @@ void handleOpenVrEvents();
bool openVrQuitRequested();
void enableOpenVrKeyboard();
void disableOpenVrKeyboard();
bool isOpenVrKeyboardShown();
template<typename F>
@ -54,3 +57,5 @@ inline vr::HmdMatrix34_t toOpenVr(const mat4& m) {
}
return result;
}
controller::Pose openVrControllerPoseToHandPose(bool isLeftHand, const mat4& mat, const vec3& linearVelocity, const vec3& angularVelocity);

View file

@ -37,10 +37,6 @@ vr::IVRSystem* acquireOpenVrSystem();
void releaseOpenVrSystem();
static const float CONTROLLER_LENGTH_OFFSET = 0.0762f; // three inches
static const glm::vec3 CONTROLLER_OFFSET = glm::vec3(CONTROLLER_LENGTH_OFFSET / 2.0f,
CONTROLLER_LENGTH_OFFSET / 2.0f,
CONTROLLER_LENGTH_OFFSET * 2.0f);
static const char* CONTROLLER_MODEL_STRING = "vr_controller_05_wireless_b";
static const QString MENU_PARENT = "Avatar";
@ -386,86 +382,11 @@ void ViveControllerManager::InputDevice::handleButtonEvent(float deltaTime, uint
void ViveControllerManager::InputDevice::handlePoseEvent(float deltaTime, const controller::InputCalibrationData& inputCalibrationData,
const mat4& mat, const vec3& linearVelocity,
const vec3& angularVelocity, bool isLeftHand) {
// When the sensor-to-world rotation is identity the coordinate axes look like this:
//
// user
// forward
// -z
// |
// y| user
// y o----x right
// o-----x user
// | up
// |
// z
//
// Vive
//
// From ABOVE the hand canonical axes looks like this:
//
// | | | | y | | | |
// | | | | | | | | |
// | | | | |
// |left | / x---- + \ |right|
// | _/ z \_ |
// | | | |
// | | | |
//
// So when the user is standing in Vive space facing the -zAxis with hands outstretched and palms down
// the rotation to align the Vive axes with those of the hands is:
//
// QviveToHand = halfTurnAboutY * quaterTurnAboutX
// Due to how the Vive controllers fit into the palm there is an offset that is different for each hand.
// You can think of this offset as the inverse of the measured rotation when the hands are posed, such that
// the combination (measurement * offset) is identity at this orientation.
//
// Qoffset = glm::inverse(deltaRotation when hand is posed fingers forward, palm down)
//
// An approximate offset for the Vive can be obtained by inspection:
//
// Qoffset = glm::inverse(glm::angleAxis(sign * PI/4.0f, zAxis) * glm::angleAxis(PI/2.0f, xAxis))
//
// So the full equation is:
//
// Q = combinedMeasurement * viveToHand
//
// Q = (deltaQ * QOffset) * (yFlip * quarterTurnAboutX)
//
// Q = (deltaQ * inverse(deltaQForAlignedHand)) * (yFlip * quarterTurnAboutX)
static const glm::quat yFlip = glm::angleAxis(PI, Vectors::UNIT_Y);
static const glm::quat quarterX = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_X);
static const glm::quat viveToHand = yFlip * quarterX;
static const glm::quat leftQuaterZ = glm::angleAxis(-PI_OVER_TWO, Vectors::UNIT_Z);
static const glm::quat rightQuaterZ = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_Z);
static const glm::quat eighthX = glm::angleAxis(PI / 4.0f, Vectors::UNIT_X);
static const glm::quat leftRotationOffset = glm::inverse(leftQuaterZ * eighthX) * viveToHand;
static const glm::quat rightRotationOffset = glm::inverse(rightQuaterZ * eighthX) * viveToHand;
static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET;
static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET;
auto translationOffset = (isLeftHand ? leftTranslationOffset : rightTranslationOffset);
auto rotationOffset = (isLeftHand ? leftRotationOffset : rightRotationOffset);
glm::vec3 position = extractTranslation(mat);
glm::quat rotation = glm::normalize(glm::quat_cast(mat));
position += rotation * translationOffset;
rotation = rotation * rotationOffset;
auto pose = openVrControllerPoseToHandPose(isLeftHand, mat, linearVelocity, angularVelocity);
// transform into avatar frame
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
auto avatarPose = controller::Pose(position, rotation);
// handle change in velocity due to translationOffset
avatarPose.velocity = linearVelocity + glm::cross(angularVelocity, position - extractTranslation(mat));
avatarPose.angularVelocity = angularVelocity;
_poseStateMap[isLeftHand ? controller::LEFT_HAND : controller::RIGHT_HAND] = avatarPose.transform(controllerToAvatar);
_poseStateMap[isLeftHand ? controller::LEFT_HAND : controller::RIGHT_HAND] = pose.transform(controllerToAvatar);
}
bool ViveControllerManager::InputDevice::triggerHapticPulse(float strength, float duration, controller::Hand hand) {

View file

@ -248,7 +248,10 @@ function propsArePhysical(props) {
// and we should not be showing lasers when someone else is using the Reticle to indicate a 2D minor mode.
var EXTERNALLY_MANAGED_2D_MINOR_MODE = true;
function isIn2DMode() {
return EXTERNALLY_MANAGED_2D_MINOR_MODE && Reticle.visible;
// In this version, we make our own determination of whether we're aimed a HUD element,
// because other scripts (such as handControllerPointer) might be using some other visualization
// instead of setting Reticle.visible.
return EXTERNALLY_MANAGED_2D_MINOR_MODE && (Reticle.pointingAtSystemOverlay || Overlays.getOverlayAtPoint(Reticle.position));
}
function restore2DMode() {
if (!EXTERNALLY_MANAGED_2D_MINOR_MODE) {

View file

@ -305,14 +305,21 @@ var leftTrigger = new Trigger();
var rightTrigger = new Trigger();
var activeTrigger = rightTrigger;
var activeHand = Controller.Standard.RightHand;
var LEFT_HUD_LASER = 1;
var RIGHT_HUD_LASER = 2;
var BOTH_HUD_LASERS = LEFT_HUD_LASER + RIGHT_HUD_LASER;
var activeHudLaser = RIGHT_HUD_LASER;
function toggleHand() { // unequivocally switch which hand controls mouse position
if (activeHand === Controller.Standard.RightHand) {
activeHand = Controller.Standard.LeftHand;
activeTrigger = leftTrigger;
activeHudLaser = LEFT_HUD_LASER;
} else {
activeHand = Controller.Standard.RightHand;
activeTrigger = rightTrigger;
activeHudLaser = RIGHT_HUD_LASER;
}
clearSystemLaser();
}
function makeToggleAction(hand) { // return a function(0|1) that makes the specified hand control mouse when 1
return function (on) {
@ -329,8 +336,8 @@ Script.scriptEnding.connect(clickMapping.disable);
clickMapping.from(Controller.Standard.RT).peek().to(rightTrigger.triggerPress);
clickMapping.from(Controller.Standard.LT).peek().to(leftTrigger.triggerPress);
// Full smoothed trigger is a click.
clickMapping.from(rightTrigger.full).to(Controller.Actions.ReticleClick);
clickMapping.from(leftTrigger.full).to(Controller.Actions.ReticleClick);
clickMapping.from(rightTrigger.full).when(isPointingAtOverlay).to(Controller.Actions.ReticleClick);
clickMapping.from(leftTrigger.full).when(isPointingAtOverlay).to(Controller.Actions.ReticleClick);
clickMapping.from(Controller.Standard.RightSecondaryThumb).peek().to(Controller.Actions.ContextMenu);
clickMapping.from(Controller.Standard.LeftSecondaryThumb).peek().to(Controller.Actions.ContextMenu);
// Partial smoothed trigger is activation.
@ -342,6 +349,7 @@ clickMapping.enable();
// Same properties as handControllerGrab search sphere
var BALL_SIZE = 0.011;
var BALL_ALPHA = 0.5;
var LASER_COLOR_XYZW = {x: 10 / 255, y: 10 / 255, z: 255 / 255, w: BALL_ALPHA};
var fakeProjectionBall = Overlays.addOverlay("sphere", {
size: 5 * BALL_SIZE,
color: {red: 255, green: 10, blue: 10},
@ -356,9 +364,23 @@ Script.scriptEnding.connect(function () {
overlays.forEach(Overlays.deleteOverlay);
});
var visualizationIsShowing = false; // Not whether it desired, but simply whether it is. Just an optimziation.
var SYSTEM_LASER_DIRECTION = {x: 0, y: 0, z: -1};
var systemLaserOn = false;
function clearSystemLaser() {
if (!systemLaserOn) {
return;
}
HMD.disableHandLasers(BOTH_HUD_LASERS);
systemLaserOn = false;
}
function turnOffVisualization(optionalEnableClicks) { // because we're showing cursor on HUD
if (!optionalEnableClicks) {
expireMouseCursor();
clearSystemLaser();
} else if (!systemLaserOn) {
// If the active plugin doesn't implement hand lasers, show the mouse reticle instead.
systemLaserOn = HMD.setHandLasers(activeHudLaser, true, LASER_COLOR_XYZW, SYSTEM_LASER_DIRECTION);
Reticle.visible = !systemLaserOn;
}
if (!visualizationIsShowing) {
return;
@ -371,6 +393,7 @@ function turnOffVisualization(optionalEnableClicks) { // because we're showing c
var MAX_RAY_SCALE = 32000; // Anything large. It's a scale, not a distance.
function updateVisualization(controllerPosition, controllerDirection, hudPosition3d, hudPosition2d) {
ignore(controllerPosition, controllerDirection, hudPosition2d);
clearSystemLaser();
// Show an indication of where the cursor will appear when crossing a HUD element,
// and where in-world clicking will occur.
//
@ -392,9 +415,11 @@ function updateVisualization(controllerPosition, controllerDirection, hudPositio
// For now, though, we present a false projection of the cursor onto whatever is below it. This is
// different from the hand beam termination because the false projection is from the camera, while
// the hand beam termination is from the hand.
/* // FIXME: We can tighten this up later, once we know what will and won't be included.
var eye = Camera.getPosition();
var falseProjection = intersection3d(eye, Vec3.subtract(hudPosition3d, eye));
Overlays.editOverlay(fakeProjectionBall, {visible: true, position: falseProjection});
*/
Reticle.visible = false;
return visualizationIsShowing; // In case we change caller to act conditionally.
@ -442,7 +467,6 @@ function update() {
if (HMD.active) { // Doesn't hurt anything without the guard, but consider it documentation.
Reticle.depth = SPHERICAL_HUD_DISTANCE; // NOT CORRECT IF WE SWITCH TO OFFSET SPHERE!
}
Reticle.visible = true;
return turnOffVisualization(true);
}
// We are not pointing at a HUD element (but it could be a 3d overlay).

View file

@ -546,17 +546,10 @@
disableProperties();
} else {
var activeElement = document.activeElement;
try {
var selected = (activeElement
&& activeElement.selectionStart == 0
&& activeElement.selectionEnd == activeElement.value.length);
} catch (e) {
var selected = false;
}
properties = data.selections[0].properties;
elID.innerHTML = properties.id;
elType.innerHTML = properties.type;
@ -571,7 +564,6 @@
} else {
enableProperties();
}
elName.value = properties.name;
@ -811,11 +803,10 @@
elYTextureURL.value = properties.yTextureURL;
elZTextureURL.value = properties.zTextureURL;
}
if (selected) {
activeElement.focus();
activeElement.select();
}
var activeElement = document.activeElement;
activeElement.select();
}
}
});
@ -1178,7 +1169,7 @@
for (var i = 0; i < els.length; i++) {
var clicked = false;
var originalText;
els[i].onfocus = function() {
els[i].onfocus = function(e) {
originalText = this.value;
this.select();
clicked = false;