mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-08-04 21:53:12 +02:00
Merge branch 'master' into 21070
This commit is contained in:
commit
a359626316
41 changed files with 258 additions and 183 deletions
|
@ -1,28 +1,49 @@
|
|||
{
|
||||
"name": "XBox to Standard",
|
||||
"channels": [
|
||||
{ "from": "GamePad.LY", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Standard.LY" },
|
||||
{ "from": "GamePad.LX", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Standard.LX" },
|
||||
{ "from": "GamePad.LY", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Actions.TranslateZ" },
|
||||
{ "from": "GamePad.LX", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Actions.TranslateX" },
|
||||
{ "from": "GamePad.LT", "to": "Standard.LT" },
|
||||
{ "from": "GamePad.LB", "to": "Standard.LB" },
|
||||
{ "from": "GamePad.LS", "to": "Standard.LS" },
|
||||
|
||||
{ "from": "GamePad.RY", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Standard.RY" },
|
||||
{ "from": "GamePad.RX", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Standard.RX" },
|
||||
|
||||
{ "from": "GamePad.RX",
|
||||
"when": [ "Application.InHMD", "Application.SnapTurn" ],
|
||||
"to": "Actions.StepYaw",
|
||||
"filters":
|
||||
[
|
||||
{ "type": "deadZone", "min": 0.15 },
|
||||
"constrainToInteger",
|
||||
{ "type": "pulse", "interval": 0.25 },
|
||||
{ "type": "scale", "scale": 22.5 }
|
||||
]
|
||||
},
|
||||
|
||||
{ "from": "GamePad.RX", "to": "Actions.Yaw" },
|
||||
|
||||
{ "from": "GamePad.RY",
|
||||
"to": "Actions.VERTICAL_UP",
|
||||
"filters":
|
||||
[
|
||||
{ "type": "deadZone", "min": 0.95 },
|
||||
"invert"
|
||||
]
|
||||
},
|
||||
|
||||
{ "from": "GamePad.RT", "to": "Standard.RT" },
|
||||
{ "from": "GamePad.RB", "to": "Standard.RB" },
|
||||
{ "from": "GamePad.RS", "to": "Standard.RS" },
|
||||
|
||||
{ "from": "GamePad.Back", "to": "Standard.Back" },
|
||||
{ "from": "GamePad.Start", "to": "Standard.Start" },
|
||||
{ "from": "GamePad.Start", "to": "Actions.CycleCamera" },
|
||||
{ "from": "GamePad.Back", "to": "Actions.ContextMenu" },
|
||||
|
||||
{ "from": [ "GamePad.DU", "GamePad.DL", "GamePad.DR", "GamePad.DD" ], "to": "Standard.LeftPrimaryThumb", "peek": true },
|
||||
{ "from": "GamePad.DU", "to": "Standard.DU" },
|
||||
{ "from": "GamePad.DD", "to": "Standard.DD" },
|
||||
{ "from": "GamePad.DL", "to": "Standard.DL" },
|
||||
{ "from": "GamePad.DR", "to": "Standard.DR" },
|
||||
|
||||
{ "from": [ "GamePad.A", "GamePad.B", "GamePad.X", "GamePad.Y" ], "to": "Standard.RightPrimaryThumb", "peek": true },
|
||||
{ "from": [ "GamePad.Y" ], "to": "Standard.RightPrimaryThumb", "peek": true },
|
||||
{ "from": "GamePad.A", "to": "Standard.A" },
|
||||
{ "from": "GamePad.B", "to": "Standard.B" },
|
||||
{ "from": "GamePad.X", "to": "Standard.X" },
|
||||
|
|
|
@ -184,7 +184,7 @@ ScrollingWindow {
|
|||
prompt.selected.connect(function (jsonResult) {
|
||||
if (jsonResult) {
|
||||
var result = JSON.parse(jsonResult);
|
||||
var url = result.textInput;
|
||||
var url = result.textInput.trim();
|
||||
var shapeType;
|
||||
switch (result.comboBox) {
|
||||
case SHAPE_TYPE_SIMPLE_HULL:
|
||||
|
|
|
@ -32,6 +32,8 @@ FocusScope {
|
|||
|
||||
readonly property ComboBox control: comboBox
|
||||
|
||||
signal accepted();
|
||||
|
||||
implicitHeight: comboBox.height;
|
||||
focus: true
|
||||
|
||||
|
@ -134,6 +136,7 @@ FocusScope {
|
|||
function hideList() {
|
||||
popup.visible = false;
|
||||
scrollView.hoverEnabled = false;
|
||||
root.accepted();
|
||||
}
|
||||
|
||||
FocusScope {
|
||||
|
|
|
@ -189,6 +189,7 @@ ModalWindow {
|
|||
left: parent.left;
|
||||
bottom: parent.bottom;
|
||||
leftMargin: 6; // Magic number to align with warning icon
|
||||
bottomMargin: 6;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -202,7 +203,10 @@ ModalWindow {
|
|||
bottom: parent.bottom;
|
||||
}
|
||||
model: root.comboBox ? root.comboBox.items : [];
|
||||
onCurrentTextChanged: updateCheckbox();
|
||||
onAccepted: {
|
||||
updateCheckbox();
|
||||
focus = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -315,6 +319,7 @@ ModalWindow {
|
|||
Component.onCompleted: {
|
||||
keyboardEnabled = HMD.active;
|
||||
updateIcon();
|
||||
updateCheckbox();
|
||||
d.resize();
|
||||
textField.forceActiveFocus();
|
||||
}
|
||||
|
|
|
@ -867,6 +867,10 @@ void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
|
|||
|
||||
void AudioClient::handleAudioInput() {
|
||||
|
||||
if (!_inputDevice) {
|
||||
return;
|
||||
}
|
||||
|
||||
// input samples required to produce exactly NETWORK_FRAME_SAMPLES of output
|
||||
const int inputSamplesRequired = (_inputToNetworkResampler ?
|
||||
_inputToNetworkResampler->getMinInput(AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL) :
|
||||
|
|
|
@ -95,6 +95,11 @@ namespace controller {
|
|||
return getValue(Input(device, source, ChannelType::BUTTON).getID());
|
||||
}
|
||||
|
||||
float ScriptingInterface::getAxisValue(int source) const {
|
||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||
return userInputMapper->getValue(Input((uint32_t)source));
|
||||
}
|
||||
|
||||
float ScriptingInterface::getAxisValue(StandardAxisChannel source, uint16_t device) const {
|
||||
return getValue(Input(device, source, ChannelType::AXIS).getID());
|
||||
}
|
||||
|
|
|
@ -81,6 +81,7 @@ namespace controller {
|
|||
Q_INVOKABLE float getValue(const int& source) const;
|
||||
Q_INVOKABLE float getButtonValue(StandardButtonChannel source, uint16_t device = 0) const;
|
||||
Q_INVOKABLE float getAxisValue(StandardAxisChannel source, uint16_t device = 0) const;
|
||||
Q_INVOKABLE float getAxisValue(int source) const;
|
||||
Q_INVOKABLE Pose getPoseValue(const int& source) const;
|
||||
Q_INVOKABLE Pose getPoseValue(StandardPoseChannel source, uint16_t device = 0) const;
|
||||
|
||||
|
|
|
@ -27,19 +27,27 @@ AnyEndpoint::AnyEndpoint(Endpoint::List children) : Endpoint(Input::INVALID_INPU
|
|||
}
|
||||
}
|
||||
|
||||
// The value of an any-point is considered to be the maxiumum absolute value,
|
||||
// this handles any's of multiple axis values as well as single values as well
|
||||
float AnyEndpoint::peek() const {
|
||||
float result = 0;
|
||||
float result = 0.0f;
|
||||
for (auto& child : _children) {
|
||||
result = std::max(result, child->peek());
|
||||
auto childValue = child->peek();
|
||||
if (std::abs(childValue) > std::abs(result)) {
|
||||
result = childValue;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Fetching the value must trigger any necessary side effects of value() on ALL the children.
|
||||
float AnyEndpoint::value() {
|
||||
float result = 0;
|
||||
float result = 0.0f;
|
||||
for (auto& child : _children) {
|
||||
result = std::max(result, child->value());
|
||||
auto childValue = child->value();
|
||||
if (std::abs(childValue) > std::abs(result)) {
|
||||
result = childValue;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -374,7 +374,8 @@ void HmdDisplayPlugin::updateFrameData() {
|
|||
}
|
||||
|
||||
// this offset needs to match GRAB_POINT_SPHERE_OFFSET in scripts/system/libraries/controllers.js
|
||||
static const vec3 GRAB_POINT_SPHERE_OFFSET = vec3(0.1f, 0.04f, -0.32f);
|
||||
//static const vec3 GRAB_POINT_SPHERE_OFFSET = vec3(0.1f, 0.04f, -0.32f);
|
||||
static const vec3 GRAB_POINT_SPHERE_OFFSET = vec3(0.0f, 0.0f, -0.175f);
|
||||
vec3 grabPointOffset = GRAB_POINT_SPHERE_OFFSET;
|
||||
if (i == 0) {
|
||||
grabPointOffset.x *= -1.0f; // this changes between left and right hands
|
||||
|
|
|
@ -666,7 +666,6 @@ void GLBackend::recycle() const {
|
|||
for (auto pair : externalTexturesTrash) {
|
||||
auto fence = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
|
||||
pair.second(pair.first, fence);
|
||||
decrementTextureGPUCount();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -192,6 +192,11 @@ void GLBackend::resetResourceStage() {
|
|||
|
||||
void GLBackend::do_setResourceTexture(const Batch& batch, size_t paramOffset) {
|
||||
GLuint slot = batch._params[paramOffset + 1]._uint;
|
||||
if (slot >= (GLuint) MAX_NUM_RESOURCE_TEXTURES) {
|
||||
// "GLBackend::do_setResourceTexture: Trying to set a resource Texture at slot #" + slot + " which doesn't exist. MaxNumResourceTextures = " + getMaxNumResourceTextures());
|
||||
return;
|
||||
}
|
||||
|
||||
TexturePointer resourceTexture = batch._textures.get(batch._params[paramOffset + 0]._uint);
|
||||
|
||||
if (!resourceTexture) {
|
||||
|
|
|
@ -20,9 +20,20 @@ std::shared_ptr<GLTextureTransferHelper> GLTexture::_textureTransferHelper;
|
|||
|
||||
// FIXME placeholder for texture memory over-use
|
||||
#define DEFAULT_MAX_MEMORY_MB 256
|
||||
#define MIN_FREE_GPU_MEMORY_PERCENTAGE 0.25f
|
||||
#define OVER_MEMORY_PRESSURE 2.0f
|
||||
|
||||
// FIXME other apps show things like Oculus home consuming large amounts of GPU memory
|
||||
// which causes us to blur textures needlessly (since other app GPU memory usage will likely
|
||||
// be swapped out and not cause any actual impact
|
||||
//#define CHECK_MIN_FREE_GPU_MEMORY
|
||||
#ifdef CHECK_MIN_FREE_GPU_MEMORY
|
||||
#define MIN_FREE_GPU_MEMORY_PERCENTAGE 0.25f
|
||||
#endif
|
||||
|
||||
// Allow 65% of all available GPU memory to be consumed by textures
|
||||
// FIXME overly conservative?
|
||||
#define MAX_CONSUMED_TEXTURE_MEMORY_PERCENTAGE 0.65f
|
||||
|
||||
const GLenum GLTexture::CUBE_FACE_LAYOUT[6] = {
|
||||
GL_TEXTURE_CUBE_MAP_POSITIVE_X, GL_TEXTURE_CUBE_MAP_NEGATIVE_X,
|
||||
GL_TEXTURE_CUBE_MAP_POSITIVE_Y, GL_TEXTURE_CUBE_MAP_NEGATIVE_Y,
|
||||
|
@ -107,6 +118,7 @@ float GLTexture::getMemoryPressure() {
|
|||
// If we can't query the dedicated memory just use a fallback fixed value of 256 MB
|
||||
totalGpuMemory = MB_TO_BYTES(DEFAULT_MAX_MEMORY_MB);
|
||||
} else {
|
||||
#ifdef CHECK_MIN_FREE_GPU_MEMORY
|
||||
// Check the global free GPU memory
|
||||
auto freeGpuMemory = getFreeDedicatedMemory();
|
||||
if (freeGpuMemory) {
|
||||
|
@ -115,21 +127,26 @@ float GLTexture::getMemoryPressure() {
|
|||
if (freeGpuMemory != lastFreeGpuMemory) {
|
||||
lastFreeGpuMemory = freeGpuMemory;
|
||||
if (freePercentage < MIN_FREE_GPU_MEMORY_PERCENTAGE) {
|
||||
qDebug() << "Exceeded max GPU memory";
|
||||
qCDebug(gpugllogging) << "Exceeded min free GPU memory " << freePercentage;
|
||||
return OVER_MEMORY_PRESSURE;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
// Allow 50% of all available GPU memory to be consumed by textures
|
||||
// FIXME overly conservative?
|
||||
availableTextureMemory = (totalGpuMemory >> 1);
|
||||
availableTextureMemory = static_cast<gpu::Size>(totalGpuMemory * MAX_CONSUMED_TEXTURE_MEMORY_PERCENTAGE);
|
||||
}
|
||||
|
||||
// Return the consumed texture memory divided by the available texture memory.
|
||||
auto consumedGpuMemory = Context::getTextureGPUMemoryUsage();
|
||||
return (float)consumedGpuMemory / (float)availableTextureMemory;
|
||||
float memoryPressure = (float)consumedGpuMemory / (float)availableTextureMemory;
|
||||
static Context::Size lastConsumedGpuMemory = 0;
|
||||
if (memoryPressure > 1.0f && lastConsumedGpuMemory != consumedGpuMemory) {
|
||||
lastConsumedGpuMemory = consumedGpuMemory;
|
||||
qCDebug(gpugllogging) << "Exceeded max allowed texture memory: " << consumedGpuMemory << " / " << availableTextureMemory;
|
||||
}
|
||||
return memoryPressure;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ vec2 float32x3_to_oct(in vec3 v) {
|
|||
|
||||
vec3 oct_to_float32x3(in vec2 e) {
|
||||
vec3 v = vec3(e.xy, 1.0 - abs(e.x) - abs(e.y));
|
||||
if (v.z < 0) {
|
||||
if (v.z < 0.0) {
|
||||
v.xy = (1.0 - abs(v.yx)) * signNotZero(v.xy);
|
||||
}
|
||||
return normalize(v);
|
||||
|
|
|
@ -108,7 +108,7 @@ float evalLightAttenuation(Light l, float d) {
|
|||
|
||||
// "Fade" the edges of light sources to make things look a bit more attractive.
|
||||
// Note: this tends to look a bit odd at lower exponents.
|
||||
attenuation *= min(1, max(0, -(d - cutoff)));
|
||||
attenuation *= min(1.0, max(0.0, -(d - cutoff)));
|
||||
|
||||
return attenuation;
|
||||
}
|
||||
|
@ -118,7 +118,7 @@ SphericalHarmonics getLightAmbientSphere(Light l) {
|
|||
}
|
||||
|
||||
bool getLightHasAmbientMap(Light l) {
|
||||
return l._control.x > 0;
|
||||
return l._control.x > 0.0;
|
||||
}
|
||||
|
||||
float getLightAmbientMapNumMips(Light l) {
|
||||
|
|
|
@ -141,7 +141,11 @@ bool haveAssetServer() {
|
|||
}
|
||||
|
||||
GetMappingRequest* AssetClient::createGetMappingRequest(const AssetPath& path) {
|
||||
return new GetMappingRequest(path);
|
||||
auto request = new GetMappingRequest(path);
|
||||
|
||||
request->moveToThread(thread());
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
GetAllMappingsRequest* AssetClient::createGetAllMappingsRequest() {
|
||||
|
@ -305,7 +309,7 @@ void AssetClient::handleAssetGetInfoReply(QSharedPointer<ReceivedMessage> messag
|
|||
void AssetClient::handleAssetGetReply(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
|
||||
Q_ASSERT(QThread::currentThread() == thread());
|
||||
|
||||
auto assetHash = message->read(SHA256_HASH_LENGTH);
|
||||
auto assetHash = message->readHead(SHA256_HASH_LENGTH);
|
||||
qCDebug(asset_client) << "Got reply for asset: " << assetHash.toHex();
|
||||
|
||||
MessageID messageID;
|
||||
|
@ -349,8 +353,8 @@ void AssetClient::handleAssetGetReply(QSharedPointer<ReceivedMessage> message, S
|
|||
} else {
|
||||
auto weakNode = senderNode.toWeakRef();
|
||||
|
||||
connect(message.data(), &ReceivedMessage::progress, this, [this, weakNode, messageID, length]() {
|
||||
handleProgressCallback(weakNode, messageID, length);
|
||||
connect(message.data(), &ReceivedMessage::progress, this, [this, weakNode, messageID, length](qint64 size) {
|
||||
handleProgressCallback(weakNode, messageID, size, length);
|
||||
});
|
||||
connect(message.data(), &ReceivedMessage::completed, this, [this, weakNode, messageID]() {
|
||||
handleCompleteCallback(weakNode, messageID);
|
||||
|
@ -358,7 +362,8 @@ void AssetClient::handleAssetGetReply(QSharedPointer<ReceivedMessage> message, S
|
|||
}
|
||||
}
|
||||
|
||||
void AssetClient::handleProgressCallback(const QWeakPointer<Node>& node, MessageID messageID, DataOffset length) {
|
||||
void AssetClient::handleProgressCallback(const QWeakPointer<Node>& node, MessageID messageID,
|
||||
qint64 size, DataOffset length) {
|
||||
auto senderNode = node.toStrongRef();
|
||||
|
||||
if (!senderNode) {
|
||||
|
@ -381,13 +386,7 @@ void AssetClient::handleProgressCallback(const QWeakPointer<Node>& node, Message
|
|||
}
|
||||
|
||||
auto& callbacks = requestIt->second;
|
||||
auto& message = callbacks.message;
|
||||
|
||||
if (!message) {
|
||||
return;
|
||||
}
|
||||
|
||||
callbacks.progressCallback(message->getSize(), length);
|
||||
callbacks.progressCallback(size, length);
|
||||
}
|
||||
|
||||
void AssetClient::handleCompleteCallback(const QWeakPointer<Node>& node, MessageID messageID) {
|
||||
|
|
|
@ -93,7 +93,7 @@ private:
|
|||
bool cancelGetAssetRequest(MessageID id);
|
||||
bool cancelUploadAssetRequest(MessageID id);
|
||||
|
||||
void handleProgressCallback(const QWeakPointer<Node>& node, MessageID messageID, DataOffset length);
|
||||
void handleProgressCallback(const QWeakPointer<Node>& node, MessageID messageID, qint64 size, DataOffset length);
|
||||
void handleCompleteCallback(const QWeakPointer<Node>& node, MessageID messageID);
|
||||
|
||||
struct GetAssetRequestData {
|
||||
|
|
|
@ -11,10 +11,20 @@
|
|||
|
||||
#include "AssetResourceRequest.h"
|
||||
|
||||
#include <QtCore/QLoggingCategory>
|
||||
|
||||
#include "AssetClient.h"
|
||||
#include "AssetUtils.h"
|
||||
#include "MappingRequest.h"
|
||||
#include <QtCore/qloggingcategory.h>
|
||||
#include "NetworkLogging.h"
|
||||
|
||||
static const int DOWNLOAD_PROGRESS_LOG_INTERVAL_SECONDS = 5;
|
||||
|
||||
AssetResourceRequest::AssetResourceRequest(const QUrl& url) :
|
||||
ResourceRequest(url)
|
||||
{
|
||||
_lastProgressDebug = p_high_resolution_clock::now() - std::chrono::seconds(DOWNLOAD_PROGRESS_LOG_INTERVAL_SECONDS);
|
||||
}
|
||||
|
||||
AssetResourceRequest::~AssetResourceRequest() {
|
||||
if (_assetMappingRequest) {
|
||||
|
@ -24,10 +34,6 @@ AssetResourceRequest::~AssetResourceRequest() {
|
|||
if (_assetRequest) {
|
||||
_assetRequest->deleteLater();
|
||||
}
|
||||
|
||||
if (_sendTimer) {
|
||||
cleanupTimer();
|
||||
}
|
||||
}
|
||||
|
||||
bool AssetResourceRequest::urlIsAssetHash() const {
|
||||
|
@ -37,24 +43,6 @@ bool AssetResourceRequest::urlIsAssetHash() const {
|
|||
return hashRegex.exactMatch(_url.toString());
|
||||
}
|
||||
|
||||
void AssetResourceRequest::setupTimer() {
|
||||
Q_ASSERT(!_sendTimer);
|
||||
static const int TIMEOUT_MS = 2000;
|
||||
|
||||
_sendTimer = new QTimer(this);
|
||||
connect(_sendTimer, &QTimer::timeout, this, &AssetResourceRequest::onTimeout);
|
||||
|
||||
_sendTimer->setSingleShot(true);
|
||||
_sendTimer->start(TIMEOUT_MS);
|
||||
}
|
||||
|
||||
void AssetResourceRequest::cleanupTimer() {
|
||||
Q_ASSERT(_sendTimer);
|
||||
disconnect(_sendTimer, 0, this, 0);
|
||||
_sendTimer->deleteLater();
|
||||
_sendTimer = nullptr;
|
||||
}
|
||||
|
||||
void AssetResourceRequest::doSend() {
|
||||
// We'll either have a hash or an ATP path to a file (that maps to a hash)
|
||||
if (urlIsAssetHash()) {
|
||||
|
@ -81,8 +69,6 @@ void AssetResourceRequest::requestMappingForPath(const AssetPath& path) {
|
|||
Q_ASSERT(_state == InProgress);
|
||||
Q_ASSERT(request == _assetMappingRequest);
|
||||
|
||||
cleanupTimer();
|
||||
|
||||
switch (request->getError()) {
|
||||
case MappingRequest::NoError:
|
||||
// we have no error, we should have a resulting hash - use that to send of a request for that asset
|
||||
|
@ -118,7 +104,6 @@ void AssetResourceRequest::requestMappingForPath(const AssetPath& path) {
|
|||
_assetMappingRequest = nullptr;
|
||||
});
|
||||
|
||||
setupTimer();
|
||||
_assetMappingRequest->start();
|
||||
}
|
||||
|
||||
|
@ -133,8 +118,6 @@ void AssetResourceRequest::requestHash(const AssetHash& hash) {
|
|||
Q_ASSERT(_state == InProgress);
|
||||
Q_ASSERT(req == _assetRequest);
|
||||
Q_ASSERT(req->getState() == AssetRequest::Finished);
|
||||
|
||||
cleanupTimer();
|
||||
|
||||
switch (req->getError()) {
|
||||
case AssetRequest::Error::NoError:
|
||||
|
@ -162,35 +145,29 @@ void AssetResourceRequest::requestHash(const AssetHash& hash) {
|
|||
_assetRequest = nullptr;
|
||||
});
|
||||
|
||||
setupTimer();
|
||||
_assetRequest->start();
|
||||
}
|
||||
|
||||
void AssetResourceRequest::onDownloadProgress(qint64 bytesReceived, qint64 bytesTotal) {
|
||||
Q_ASSERT(_state == InProgress);
|
||||
|
||||
// We've received data, so reset the timer
|
||||
_sendTimer->start();
|
||||
|
||||
emit progress(bytesReceived, bytesTotal);
|
||||
|
||||
auto now = p_high_resolution_clock::now();
|
||||
|
||||
// if we haven't received the full asset check if it is time to output progress to log
|
||||
// we do so every X seconds to assist with ATP download tracking
|
||||
|
||||
if (bytesReceived != bytesTotal
|
||||
&& now - _lastProgressDebug > std::chrono::seconds(DOWNLOAD_PROGRESS_LOG_INTERVAL_SECONDS)) {
|
||||
|
||||
int percentage = roundf((float) bytesReceived / (float) bytesTotal * 100.0f);
|
||||
|
||||
qCDebug(networking).nospace() << "Progress for " << _url.path() << " - "
|
||||
<< bytesReceived << " of " << bytesTotal << " bytes - " << percentage << "%";
|
||||
|
||||
_lastProgressDebug = now;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void AssetResourceRequest::onTimeout() {
|
||||
if (_state == InProgress) {
|
||||
qWarning() << "Asset request timed out: " << _url;
|
||||
if (_assetRequest) {
|
||||
disconnect(_assetRequest, 0, this, 0);
|
||||
_assetRequest->deleteLater();
|
||||
_assetRequest = nullptr;
|
||||
}
|
||||
if (_assetMappingRequest) {
|
||||
disconnect(_assetMappingRequest, 0, this, 0);
|
||||
_assetMappingRequest->deleteLater();
|
||||
_assetMappingRequest = nullptr;
|
||||
}
|
||||
_result = Timeout;
|
||||
_state = Finished;
|
||||
emit finished();
|
||||
}
|
||||
cleanupTimer();
|
||||
}
|
||||
|
|
|
@ -14,13 +14,15 @@
|
|||
|
||||
#include <QUrl>
|
||||
|
||||
#include <PortableHighResolutionClock.h>
|
||||
|
||||
#include "AssetRequest.h"
|
||||
#include "ResourceRequest.h"
|
||||
|
||||
class AssetResourceRequest : public ResourceRequest {
|
||||
Q_OBJECT
|
||||
public:
|
||||
AssetResourceRequest(const QUrl& url) : ResourceRequest(url) { }
|
||||
AssetResourceRequest(const QUrl& url);
|
||||
virtual ~AssetResourceRequest() override;
|
||||
|
||||
protected:
|
||||
|
@ -28,21 +30,17 @@ protected:
|
|||
|
||||
private slots:
|
||||
void onDownloadProgress(qint64 bytesReceived, qint64 bytesTotal);
|
||||
void onTimeout();
|
||||
|
||||
private:
|
||||
void setupTimer();
|
||||
void cleanupTimer();
|
||||
|
||||
bool urlIsAssetHash() const;
|
||||
|
||||
void requestMappingForPath(const AssetPath& path);
|
||||
void requestHash(const AssetHash& hash);
|
||||
|
||||
QTimer* _sendTimer { nullptr };
|
||||
|
||||
GetMappingRequest* _assetMappingRequest { nullptr };
|
||||
AssetRequest* _assetRequest { nullptr };
|
||||
|
||||
p_high_resolution_clock::time_point _lastProgressDebug;
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
|
@ -54,14 +54,14 @@ void ReceivedMessage::appendPacket(NLPacket& packet) {
|
|||
"We should not be appending to a complete message");
|
||||
|
||||
// Limit progress signal to every X packets
|
||||
const int EMIT_PROGRESS_EVERY_X_PACKETS = 100;
|
||||
const int EMIT_PROGRESS_EVERY_X_PACKETS = 50;
|
||||
|
||||
++_numPackets;
|
||||
|
||||
_data.append(packet.getPayload(), packet.getPayloadSize());
|
||||
|
||||
if (_numPackets % EMIT_PROGRESS_EVERY_X_PACKETS == 0) {
|
||||
emit progress();
|
||||
emit progress(getSize());
|
||||
}
|
||||
|
||||
if (packet.getPacketPosition() == NLPacket::PacketPosition::LAST) {
|
||||
|
|
|
@ -78,7 +78,7 @@ public:
|
|||
template<typename T> qint64 readHeadPrimitive(T* data);
|
||||
|
||||
signals:
|
||||
void progress();
|
||||
void progress(qint64 size);
|
||||
void completed();
|
||||
|
||||
private slots:
|
||||
|
|
|
@ -51,7 +51,7 @@ DeferredFragment unpackDeferredFragmentNoPosition(vec2 texcoord) {
|
|||
vec4 specularVal;
|
||||
|
||||
DeferredFragment frag;
|
||||
frag.depthVal = -1;
|
||||
frag.depthVal = -1.0;
|
||||
normalVal = texture(normalMap, texcoord);
|
||||
diffuseVal = texture(albedoMap, texcoord);
|
||||
specularVal = texture(specularMap, texcoord);
|
||||
|
@ -138,8 +138,8 @@ void unpackMidLowNormalCurvature(vec2 texcoord, out vec4 midNormalCurvature, out
|
|||
lowNormalCurvature = fetchDiffusedCurvature(texcoord);
|
||||
midNormalCurvature.xyz = normalize((midNormalCurvature.xyz - 0.5f) * 2.0f);
|
||||
lowNormalCurvature.xyz = normalize((lowNormalCurvature.xyz - 0.5f) * 2.0f);
|
||||
midNormalCurvature.w = (midNormalCurvature.w * 2 - 1);
|
||||
lowNormalCurvature.w = (lowNormalCurvature.w * 2 - 1);
|
||||
midNormalCurvature.w = (midNormalCurvature.w * 2.0 - 1.0);
|
||||
lowNormalCurvature.w = (lowNormalCurvature.w * 2.0 - 1.0);
|
||||
}
|
||||
<@endfunc@>
|
||||
|
||||
|
|
|
@ -27,8 +27,8 @@ float evalOpaqueFinalAlpha(float alpha, float mapAlpha) {
|
|||
}
|
||||
|
||||
const float DEFAULT_ROUGHNESS = 0.9;
|
||||
const float DEFAULT_SHININESS = 10;
|
||||
const float DEFAULT_METALLIC = 0;
|
||||
const float DEFAULT_SHININESS = 10.0;
|
||||
const float DEFAULT_METALLIC = 0.0;
|
||||
const vec3 DEFAULT_SPECULAR = vec3(0.1);
|
||||
const vec3 DEFAULT_EMISSIVE = vec3(0.0);
|
||||
const float DEFAULT_OCCLUSION = 1.0;
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
// prepareGlobalLight
|
||||
// Transform directions to worldspace
|
||||
vec3 fragNormal = vec3((normal));
|
||||
vec3 fragEyeVector = vec3(invViewMat * vec4(-position, 0.0));
|
||||
vec3 fragEyeVector = vec3(invViewMat * vec4(-1.0*position, 0.0));
|
||||
vec3 fragEyeDir = normalize(fragEyeVector);
|
||||
|
||||
// Get light
|
||||
|
@ -143,13 +143,13 @@ vec3 evalLightmappedColor(mat4 invViewMat, float shadowAttenuation, float obscur
|
|||
float facingLight = step(PERPENDICULAR_THRESHOLD, diffuseDot);
|
||||
|
||||
// Reevaluate the shadow attenuation for light facing fragments
|
||||
float lightAttenuation = (1 - facingLight) + facingLight * shadowAttenuation;
|
||||
float lightAttenuation = (1.0 - facingLight) + facingLight * shadowAttenuation;
|
||||
|
||||
// Diffuse light is the lightmap dimmed by shadow
|
||||
vec3 diffuseLight = lightAttenuation * lightmap;
|
||||
|
||||
// Ambient light is the lightmap when in shadow
|
||||
vec3 ambientLight = (1 - lightAttenuation) * lightmap * getLightAmbientIntensity(light);
|
||||
vec3 ambientLight = (1.0 - lightAttenuation) * lightmap * getLightAmbientIntensity(light);
|
||||
|
||||
return isLightmapEnabled() * obscurance * albedo * (diffuseLight + ambientLight);
|
||||
}
|
||||
|
|
|
@ -478,6 +478,8 @@ void RenderDeferredSetup::run(const render::SceneContextPointer& sceneContext, c
|
|||
// Setup the global directional pass pipeline
|
||||
{
|
||||
if (deferredLightingEffect->_shadowMapEnabled) {
|
||||
// If the keylight has an ambient Map then use the Skybox version of the pass
|
||||
// otherwise use the ambient sphere version
|
||||
if (keyLight->getAmbientMap()) {
|
||||
program = deferredLightingEffect->_directionalSkyboxLightShadow;
|
||||
locations = deferredLightingEffect->_directionalSkyboxLightShadowLocations;
|
||||
|
@ -486,11 +488,11 @@ void RenderDeferredSetup::run(const render::SceneContextPointer& sceneContext, c
|
|||
locations = deferredLightingEffect->_directionalAmbientSphereLightShadowLocations;
|
||||
}
|
||||
} else {
|
||||
// If the keylight has an ambient Map then use the Skybox version of the pass
|
||||
// otherwise use the ambient sphere version
|
||||
if (keyLight->getAmbientMap()) {
|
||||
program = deferredLightingEffect->_directionalAmbientSphereLight;
|
||||
locations = deferredLightingEffect->_directionalAmbientSphereLightLocations;
|
||||
//program = deferredLightingEffect->_directionalSkyboxLight;
|
||||
//locations = deferredLightingEffect->_directionalSkyboxLightLocations;
|
||||
program = deferredLightingEffect->_directionalSkyboxLight;
|
||||
locations = deferredLightingEffect->_directionalSkyboxLightLocations;
|
||||
} else {
|
||||
program = deferredLightingEffect->_directionalAmbientSphereLight;
|
||||
locations = deferredLightingEffect->_directionalAmbientSphereLightLocations;
|
||||
|
|
|
@ -23,7 +23,7 @@ vec4 evalSkyboxLight(vec3 direction, float lod) {
|
|||
<@func declareEvalAmbientSpecularIrradiance(supportAmbientSphere, supportAmbientMap, supportIfAmbientMapElseAmbientSphere)@>
|
||||
|
||||
vec3 fresnelSchlickAmbient(vec3 fresnelColor, vec3 lightDir, vec3 halfDir, float gloss) {
|
||||
return fresnelColor + (max(vec3(gloss), fresnelColor) - fresnelColor) * pow(1.0 - clamp(dot(lightDir, halfDir), 0.0, 1.0), 5);
|
||||
return fresnelColor + (max(vec3(gloss), fresnelColor) - fresnelColor) * pow(1.0 - clamp(dot(lightDir, halfDir), 0.0, 1.0), 5.0);
|
||||
}
|
||||
|
||||
<@if supportAmbientMap@>
|
||||
|
@ -32,7 +32,7 @@ vec3 fresnelSchlickAmbient(vec3 fresnelColor, vec3 lightDir, vec3 halfDir, float
|
|||
|
||||
vec3 evalAmbientSpecularIrradiance(Light light, vec3 fragEyeDir, vec3 fragNormal, float roughness, vec3 fresnel) {
|
||||
vec3 direction = -reflect(fragEyeDir, fragNormal);
|
||||
vec3 ambientFresnel = fresnelSchlickAmbient(fresnel, fragEyeDir, fragNormal, 1 - roughness);
|
||||
vec3 ambientFresnel = fresnelSchlickAmbient(fresnel, fragEyeDir, fragNormal, 1.0 - roughness);
|
||||
vec3 specularLight;
|
||||
<@if supportIfAmbientMapElseAmbientSphere@>
|
||||
if (getLightHasAmbientMap(light))
|
||||
|
@ -76,7 +76,7 @@ void evalLightingAmbient(out vec3 diffuse, out vec3 specular, Light light, vec3
|
|||
|
||||
|
||||
// Diffuse from ambient
|
||||
diffuse = (1 - metallic) * evalSphericalLight(getLightAmbientSphere(light), normal).xyz;
|
||||
diffuse = (1.0 - metallic) * evalSphericalLight(getLightAmbientSphere(light), normal).xyz;
|
||||
|
||||
// Specular highlight from ambient
|
||||
specular = evalAmbientSpecularIrradiance(light, eyeDir, normal, roughness, fresnel) * obscurance * getLightAmbientIntensity(light);
|
||||
|
|
|
@ -40,7 +40,7 @@ void evalLightingPoint(out vec3 diffuse, out vec3 specular, Light light,
|
|||
if (isShowLightContour() > 0.0) {
|
||||
// Show edge
|
||||
float edge = abs(2.0 * ((getLightRadius(light) - fragLightDistance) / (0.1)) - 1.0);
|
||||
if (edge < 1) {
|
||||
if (edge < 1.0) {
|
||||
float edgeCoord = exp2(-8.0*edge*edge);
|
||||
diffuse = vec3(edgeCoord * edgeCoord * getLightShowContour(light) * getLightColor(light));
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ void evalLightingSpot(out vec3 diffuse, out vec3 specular, Light light,
|
|||
float edgeDistS = dot(fragLightDistance * vec2(cosSpotAngle, sqrt(1.0 - cosSpotAngle * cosSpotAngle)), -getLightSpotOutsideNormal2(light));
|
||||
float edgeDist = min(edgeDistR, edgeDistS);
|
||||
float edge = abs(2.0 * (edgeDist / (0.1)) - 1.0);
|
||||
if (edge < 1) {
|
||||
if (edge < 1.0) {
|
||||
float edgeCoord = exp2(-8.0*edge*edge);
|
||||
diffuse = vec3(edgeCoord * edgeCoord * getLightColor(light));
|
||||
}
|
||||
|
|
|
@ -118,8 +118,8 @@ vec3 fresnelSchlickColor(vec3 fresnelColor, vec3 lightDir, vec3 halfDir) {
|
|||
|
||||
float specularDistribution(float roughness, vec3 normal, vec3 halfDir) {
|
||||
float ndoth = clamp(dot(halfDir, normal), 0.0, 1.0);
|
||||
float gloss2 = pow(0.001 + roughness, 4);
|
||||
float denom = (ndoth * ndoth*(gloss2 - 1) + 1);
|
||||
float gloss2 = pow(0.001 + roughness, 4.0);
|
||||
float denom = (ndoth * ndoth*(gloss2 - 1.0) + 1.0);
|
||||
float power = gloss2 / (3.14159 * denom * denom);
|
||||
return power;
|
||||
}
|
||||
|
@ -142,7 +142,7 @@ vec4 evalPBRShading(vec3 fragNormal, vec3 fragLightDir, vec3 fragEyeDir, float m
|
|||
float power = specularDistribution(roughness, fragNormal, halfDir);
|
||||
vec3 specular = power * fresnelColor * diffuse;
|
||||
|
||||
return vec4(specular, (1.0 - metallic) * diffuse * (1 - fresnelColor.x));
|
||||
return vec4(specular, (1.0 - metallic) * diffuse * (1.0 - fresnelColor.x));
|
||||
}
|
||||
<@endfunc@>
|
||||
|
||||
|
|
|
@ -137,7 +137,7 @@ vec3 integrate(float cosTheta, float skinRadius) {
|
|||
uniform sampler2D scatteringLUT;
|
||||
|
||||
vec3 fetchBRDF(float LdotN, float curvature) {
|
||||
return texture(scatteringLUT, vec2( clamp(LdotN * 0.5 + 0.5, 0.0, 1.0), clamp(2 * curvature, 0.0, 1.0))).xyz;
|
||||
return texture(scatteringLUT, vec2( clamp(LdotN * 0.5 + 0.5, 0.0, 1.0), clamp(2.0 * curvature, 0.0, 1.0))).xyz;
|
||||
}
|
||||
|
||||
vec3 fetchBRDFSpectrum(vec3 LdotNSpectrum, float curvature) {
|
||||
|
@ -183,7 +183,7 @@ float tuneCurvatureUnsigned(float curvature) {
|
|||
}
|
||||
|
||||
float unpackCurvature(float packedCurvature) {
|
||||
return (packedCurvature * 2 - 1);
|
||||
return (packedCurvature * 2.0 - 1.0);
|
||||
}
|
||||
|
||||
vec3 evalScatteringBentNdotL(vec3 normal, vec3 midNormal, vec3 lowNormal, vec3 lightDir) {
|
||||
|
@ -210,7 +210,7 @@ vec3 evalSkinBRDF(vec3 lightDir, vec3 normal, vec3 midNormal, vec3 lowNormal, fl
|
|||
return lowNormal * 0.5 + vec3(0.5);
|
||||
}
|
||||
if (showCurvature()) {
|
||||
return (curvature > 0 ? vec3(curvature, 0.0, 0.0) : vec3(0.0, 0.0, -curvature));
|
||||
return (curvature > 0.0 ? vec3(curvature, 0.0, 0.0) : vec3(0.0, 0.0, -curvature));
|
||||
}
|
||||
|
||||
vec3 bentNdotL = evalScatteringBentNdotL(normal, midNormal, lowNormal, lightDir);
|
||||
|
|
|
@ -26,7 +26,7 @@ void main(void) {
|
|||
);
|
||||
vec4 pos = UNIT_QUAD[gl_VertexID];
|
||||
|
||||
_texCoord0 = (pos.xy + 1) * 0.5;
|
||||
_texCoord0 = (pos.xy + 1.0) * 0.5;
|
||||
|
||||
_texCoord0 *= texcoordFrameTransform.zw;
|
||||
_texCoord0 += texcoordFrameTransform.xy;
|
||||
|
|
|
@ -47,7 +47,7 @@ void main(void) {
|
|||
);
|
||||
vec4 pos = UNIT_QUAD[gl_VertexID];
|
||||
|
||||
_texCoord0 = vec4((pos.xy + 1) * 0.5, 0.0, 1.0);
|
||||
_texCoord0 = vec4((pos.xy + 1.0) * 0.5, 0.0, 1.0);
|
||||
|
||||
if (cam_isStereo()) {
|
||||
_texCoord0.x = 0.5 * (_texCoord0.x + cam_getStereoSide());
|
||||
|
|
|
@ -60,7 +60,7 @@ void main(void) {
|
|||
);
|
||||
vec4 pos = UNIT_QUAD[gl_VertexID];
|
||||
|
||||
_texCoord0 = vec4((pos.xy + 1) * 0.5, 0.0, 1.0);
|
||||
_texCoord0 = vec4((pos.xy + 1.0) * 0.5, 0.0, 1.0);
|
||||
if (cam_isStereo()) {
|
||||
_texCoord0.x = 0.5 * (_texCoord0.x + cam_getStereoSide());
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// directional_light.frag
|
||||
// directional_ambient_light.frag
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Andrzej Kapolka on 9/3/14.
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// directional_light.frag
|
||||
// directional_skybox_light.frag
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Sam Gateau on 5/8/2015.
|
||||
|
|
|
@ -63,7 +63,6 @@ void OculusBaseDisplayPlugin::customizeContext() {
|
|||
|
||||
void OculusBaseDisplayPlugin::uncustomizeContext() {
|
||||
Parent::uncustomizeContext();
|
||||
internalPresent();
|
||||
}
|
||||
|
||||
bool OculusBaseDisplayPlugin::internalActivate() {
|
||||
|
|
|
@ -635,7 +635,11 @@ void OpenVrDisplayPlugin::postPreview() {
|
|||
_nextSimPoseData = nextSim;
|
||||
});
|
||||
_nextRenderPoseData = nextRender;
|
||||
|
||||
// FIXME - this looks wrong!
|
||||
_hmdActivityLevel = vr::k_EDeviceActivityLevel_UserInteraction; // _system->GetTrackedDeviceActivityLevel(vr::k_unTrackedDeviceIndex_Hmd);
|
||||
#else
|
||||
_hmdActivityLevel = _system->GetTrackedDeviceActivityLevel(vr::k_unTrackedDeviceIndex_Hmd);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
|
||||
(function() { // BEGIN LOCAL_SCOPE
|
||||
|
||||
var BASIC_TIMER_INTERVAL = 50; // 50ms = 20hz
|
||||
var OVERLAY_WIDTH = 1920;
|
||||
var OVERLAY_HEIGHT = 1080;
|
||||
var OVERLAY_DATA = {
|
||||
|
@ -49,6 +50,21 @@ var AWAY_INTRO = {
|
|||
endFrame: 83.0
|
||||
};
|
||||
|
||||
// MAIN CONTROL
|
||||
var isEnabled = true;
|
||||
var wasMuted; // unknonwn?
|
||||
var isAway = false; // we start in the un-away state
|
||||
var wasOverlaysVisible = Menu.isOptionChecked("Overlays");
|
||||
var eventMappingName = "io.highfidelity.away"; // goActive on hand controller button events, too.
|
||||
var eventMapping = Controller.newMapping(eventMappingName);
|
||||
var avatarPosition = MyAvatar.position;
|
||||
var wasHmdMounted = HMD.mounted;
|
||||
|
||||
|
||||
// some intervals we may create/delete
|
||||
var avatarMovedInterval;
|
||||
|
||||
|
||||
// prefetch the kneel animation and hold a ref so it's always resident in memory when we need it.
|
||||
var _animation = AnimationCache.prefetch(AWAY_INTRO.url);
|
||||
|
||||
|
@ -125,41 +141,28 @@ function maybeMoveOverlay() {
|
|||
var halfWayBetweenOldAndLookAt = Vec3.multiply(lookAtChange, EASE_BY_RATIO);
|
||||
var newOverlayPosition = Vec3.sum(lastOverlayPosition, halfWayBetweenOldAndLookAt);
|
||||
lastOverlayPosition = newOverlayPosition;
|
||||
|
||||
var actualOverlayPositon = moveCloserToCamera(lastOverlayPosition);
|
||||
Overlays.editOverlay(overlayHMD, { visible: true, position: actualOverlayPositon });
|
||||
|
||||
// make sure desktop version is hidden
|
||||
Overlays.editOverlay(overlay, { visible: false });
|
||||
|
||||
// also remember avatar position
|
||||
avatarPosition = MyAvatar.position;
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function ifAvatarMovedGoActive() {
|
||||
if (Vec3.distance(MyAvatar.position, avatarPosition) > AVATAR_MOVE_FOR_ACTIVE_DISTANCE) {
|
||||
var newAvatarPosition = MyAvatar.position;
|
||||
if (Vec3.distance(newAvatarPosition, avatarPosition) > AVATAR_MOVE_FOR_ACTIVE_DISTANCE) {
|
||||
goActive();
|
||||
}
|
||||
avatarPosition = newAvatarPosition;
|
||||
}
|
||||
|
||||
// MAIN CONTROL
|
||||
var isEnabled = true;
|
||||
var wasMuted, isAway;
|
||||
var wasOverlaysVisible = Menu.isOptionChecked("Overlays");
|
||||
var eventMappingName = "io.highfidelity.away"; // goActive on hand controller button events, too.
|
||||
var eventMapping = Controller.newMapping(eventMappingName);
|
||||
var avatarPosition = MyAvatar.position;
|
||||
|
||||
// backward compatible version of getting HMD.mounted, so it works in old clients
|
||||
function safeGetHMDMounted() {
|
||||
if (HMD.mounted === undefined) {
|
||||
return true;
|
||||
}
|
||||
return HMD.mounted;
|
||||
}
|
||||
|
||||
var wasHmdMounted = safeGetHMDMounted();
|
||||
|
||||
function goAway() {
|
||||
function goAway(fromStartup) {
|
||||
if (!isEnabled || isAway) {
|
||||
return;
|
||||
}
|
||||
|
@ -167,7 +170,6 @@ function goAway() {
|
|||
UserActivityLogger.toggledAway(true);
|
||||
|
||||
isAway = true;
|
||||
print('going "away"');
|
||||
wasMuted = AudioDevice.getMuted();
|
||||
if (!wasMuted) {
|
||||
AudioDevice.toggleMute();
|
||||
|
@ -189,10 +191,21 @@ function goAway() {
|
|||
// For HMD, the hmd preview will show the system mouse because of allowMouseCapture,
|
||||
// but we want to turn off our Reticle so that we don't get two in preview and a stuck one in headset.
|
||||
Reticle.visible = !HMD.active;
|
||||
wasHmdMounted = safeGetHMDMounted(); // always remember the correct state
|
||||
wasHmdMounted = HMD.mounted; // always remember the correct state
|
||||
|
||||
avatarPosition = MyAvatar.position;
|
||||
Script.update.connect(ifAvatarMovedGoActive);
|
||||
|
||||
// If we're entering away mode from some other state than startup, then we create our move timer immediately.
|
||||
// However if we're just stating up, we need to delay this process so that we don't think the initial teleport
|
||||
// is actually a move.
|
||||
if (fromStartup === undefined || fromStartup === false) {
|
||||
avatarMovedInterval = Script.setInterval(ifAvatarMovedGoActive, BASIC_TIMER_INTERVAL);
|
||||
} else {
|
||||
var WAIT_FOR_MOVE_ON_STARTUP = 3000; // 3 seconds
|
||||
Script.setTimeout(function() {
|
||||
avatarMovedInterval = Script.setInterval(ifAvatarMovedGoActive, BASIC_TIMER_INTERVAL);
|
||||
}, WAIT_FOR_MOVE_ON_STARTUP);
|
||||
}
|
||||
}
|
||||
|
||||
function goActive() {
|
||||
|
@ -203,7 +216,6 @@ function goActive() {
|
|||
UserActivityLogger.toggledAway(false);
|
||||
|
||||
isAway = false;
|
||||
print('going "active"');
|
||||
if (!wasMuted) {
|
||||
AudioDevice.toggleMute();
|
||||
}
|
||||
|
@ -230,9 +242,9 @@ function goActive() {
|
|||
if (HMD.active) {
|
||||
Reticle.position = HMD.getHUDLookAtPosition2D();
|
||||
}
|
||||
wasHmdMounted = safeGetHMDMounted(); // always remember the correct state
|
||||
wasHmdMounted = HMD.mounted; // always remember the correct state
|
||||
|
||||
Script.update.disconnect(ifAvatarMovedGoActive);
|
||||
Script.clearInterval(avatarMovedInterval);
|
||||
}
|
||||
|
||||
function maybeGoActive(event) {
|
||||
|
@ -250,10 +262,12 @@ var wasHmdActive = HMD.active;
|
|||
var wasMouseCaptured = Reticle.mouseCaptured;
|
||||
|
||||
function maybeGoAway() {
|
||||
// If our active state change (went to or from HMD mode), and we are now in the HMD, go into away
|
||||
if (HMD.active !== wasHmdActive) {
|
||||
wasHmdActive = !wasHmdActive;
|
||||
if (wasHmdActive) {
|
||||
goAway();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -264,19 +278,30 @@ function maybeGoAway() {
|
|||
wasMouseCaptured = !wasMouseCaptured;
|
||||
if (!wasMouseCaptured) {
|
||||
goAway();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// If you've removed your HMD from your head, and we can detect it, we will also go away...
|
||||
var hmdMounted = safeGetHMDMounted();
|
||||
if (HMD.active && !hmdMounted && wasHmdMounted) {
|
||||
wasHmdMounted = hmdMounted;
|
||||
goAway();
|
||||
if (HMD.mounted != wasHmdMounted) {
|
||||
wasHmdMounted = HMD.mounted;
|
||||
print("HMD mounted changed...");
|
||||
|
||||
// We're putting the HMD on... switch to those devices
|
||||
if (HMD.mounted) {
|
||||
print("NOW mounted...");
|
||||
} else {
|
||||
print("HMD NOW un-mounted...");
|
||||
|
||||
if (HMD.active) {
|
||||
goAway();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function setEnabled(value) {
|
||||
print("setting away enabled: ", value);
|
||||
if (!value) {
|
||||
goActive();
|
||||
}
|
||||
|
@ -293,9 +318,12 @@ var handleMessage = function(channel, message, sender) {
|
|||
Messages.subscribe(CHANNEL_AWAY_ENABLE);
|
||||
Messages.messageReceived.connect(handleMessage);
|
||||
|
||||
Script.update.connect(maybeMoveOverlay);
|
||||
var maybeIntervalTimer = Script.setInterval(function(){
|
||||
maybeMoveOverlay();
|
||||
maybeGoAway();
|
||||
}, BASIC_TIMER_INTERVAL);
|
||||
|
||||
|
||||
Script.update.connect(maybeGoAway);
|
||||
Controller.mousePressEvent.connect(goActive);
|
||||
Controller.keyPressEvent.connect(maybeGoActive);
|
||||
// Note peek() so as to not interfere with other mappings.
|
||||
|
@ -316,11 +344,17 @@ eventMapping.from(Controller.Standard.Start).peek().to(goActive);
|
|||
Controller.enableMapping(eventMappingName);
|
||||
|
||||
Script.scriptEnding.connect(function () {
|
||||
Script.update.disconnect(maybeGoAway);
|
||||
Script.clearInterval(maybeIntervalTimer);
|
||||
goActive();
|
||||
Controller.disableMapping(eventMappingName);
|
||||
Controller.mousePressEvent.disconnect(goActive);
|
||||
Controller.keyPressEvent.disconnect(maybeGoActive);
|
||||
});
|
||||
|
||||
if (HMD.active && !HMD.mounted) {
|
||||
print("Starting script, while HMD is active and not mounted...");
|
||||
goAway(true);
|
||||
}
|
||||
|
||||
|
||||
}()); // END LOCAL_SCOPE
|
||||
|
|
|
@ -26,7 +26,7 @@ var WANT_DEBUG = false;
|
|||
var WANT_DEBUG_STATE = false;
|
||||
var WANT_DEBUG_SEARCH_NAME = null;
|
||||
|
||||
var FORCE_IGNORE_IK = true;
|
||||
var FORCE_IGNORE_IK = false;
|
||||
var SHOW_GRAB_POINT_SPHERE = true;
|
||||
|
||||
//
|
||||
|
@ -112,7 +112,7 @@ var CHECK_TOO_FAR_UNEQUIP_TIME = 0.3; // seconds, duration between checks
|
|||
|
||||
|
||||
var GRAB_POINT_SPHERE_RADIUS = NEAR_GRAB_RADIUS;
|
||||
var GRAB_POINT_SPHERE_COLOR = { red: 20, green: 90, blue: 238 };
|
||||
var GRAB_POINT_SPHERE_COLOR = { red: 240, green: 240, blue: 240 };
|
||||
var GRAB_POINT_SPHERE_ALPHA = 0.85;
|
||||
|
||||
|
||||
|
@ -1075,12 +1075,6 @@ function MyController(hand) {
|
|||
var controllerLocation = getControllerWorldLocation(this.handToController(), true);
|
||||
var worldHandPosition = controllerLocation.position;
|
||||
|
||||
if (controllerLocation.valid) {
|
||||
this.grabPointSphereOn();
|
||||
} else {
|
||||
this.grabPointSphereOff();
|
||||
}
|
||||
|
||||
var candidateEntities = Entities.findEntities(worldHandPosition, MAX_EQUIP_HOTSPOT_RADIUS);
|
||||
entityPropertiesCache.addEntities(candidateEntities);
|
||||
var potentialEquipHotspot = this.chooseBestEquipHotspot(candidateEntities);
|
||||
|
@ -1103,9 +1097,11 @@ function MyController(hand) {
|
|||
if (!this.grabPointIntersectsEntity) {
|
||||
Controller.triggerHapticPulse(1, 20, this.hand);
|
||||
this.grabPointIntersectsEntity = true;
|
||||
this.grabPointSphereOn();
|
||||
}
|
||||
} else {
|
||||
this.grabPointIntersectsEntity = false;
|
||||
this.grabPointSphereOff();
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1386,10 +1382,11 @@ function MyController(hand) {
|
|||
this.chooseBestEquipHotspot = function(candidateEntities) {
|
||||
var DISTANCE = 0;
|
||||
var equippableHotspots = this.chooseNearEquipHotspots(candidateEntities, DISTANCE);
|
||||
var _this = this;
|
||||
if (equippableHotspots.length > 0) {
|
||||
// sort by distance
|
||||
equippableHotspots.sort(function(a, b) {
|
||||
var handControllerLocation = getControllerWorldLocation(this.handToController(), true);
|
||||
var handControllerLocation = getControllerWorldLocation(_this.handToController(), true);
|
||||
var aDistance = Vec3.distance(a.worldPosition, handControllerLocation.position);
|
||||
var bDistance = Vec3.distance(b.worldPosition, handControllerLocation.position);
|
||||
return aDistance - bDistance;
|
||||
|
@ -1427,12 +1424,6 @@ function MyController(hand) {
|
|||
var controllerLocation = getControllerWorldLocation(this.handToController(), true);
|
||||
var handPosition = controllerLocation.position;
|
||||
|
||||
if (controllerLocation.valid) {
|
||||
this.grabPointSphereOn();
|
||||
} else {
|
||||
this.grabPointSphereOff();
|
||||
}
|
||||
|
||||
var rayPickInfo = this.calcRayPickInfo(this.hand);
|
||||
|
||||
if (rayPickInfo.entityID) {
|
||||
|
@ -1906,7 +1897,7 @@ function MyController(hand) {
|
|||
if (FORCE_IGNORE_IK) {
|
||||
this.ignoreIK = true;
|
||||
} else {
|
||||
this.ignoreIK = grabbableData.ignoreIK ? grabbableData.ignoreIK : false;
|
||||
this.ignoreIK = (grabbableData.ignoreIK !== undefined) ? grabbableData.ignoreIK : true;
|
||||
}
|
||||
|
||||
var handRotation;
|
||||
|
|
|
@ -830,7 +830,7 @@ function loaded() {
|
|||
|
||||
elGrabbable.checked = properties.dynamic;
|
||||
elWantsTrigger.checked = false;
|
||||
elIgnoreIK.checked = false;
|
||||
elIgnoreIK.checked = true;
|
||||
var parsedUserData = {}
|
||||
try {
|
||||
parsedUserData = JSON.parse(properties.userData);
|
||||
|
@ -1143,7 +1143,7 @@ function loaded() {
|
|||
userDataChanger("grabbableKey", "wantsTrigger", elWantsTrigger, elUserData, false);
|
||||
});
|
||||
elIgnoreIK.addEventListener('change', function() {
|
||||
userDataChanger("grabbableKey", "ignoreIK", elIgnoreIK, elUserData, false);
|
||||
userDataChanger("grabbableKey", "ignoreIK", elIgnoreIK, elUserData, true);
|
||||
});
|
||||
|
||||
elCollisionSoundURL.addEventListener('change', createEmitTextPropertyUpdateFunction('collisionSoundURL'));
|
||||
|
@ -1596,4 +1596,4 @@ function loaded() {
|
|||
document.addEventListener("contextmenu", function(event) {
|
||||
event.preventDefault();
|
||||
}, false);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,9 +10,10 @@
|
|||
|
||||
// var GRAB_POINT_SPHERE_OFFSET = { x: 0, y: 0.2, z: 0 };
|
||||
// var GRAB_POINT_SPHERE_OFFSET = { x: 0.1, y: 0.175, z: 0.04 };
|
||||
// var GRAB_POINT_SPHERE_OFFSET = { x: 0.1, y: 0.32, z: 0.04 };
|
||||
|
||||
// this offset needs to match the one in libraries/display-plugins/src/display-plugins/hmd/HmdDisplayPlugin.cpp
|
||||
var GRAB_POINT_SPHERE_OFFSET = { x: 0.1, y: 0.32, z: 0.04 };
|
||||
var GRAB_POINT_SPHERE_OFFSET = { x: 0.0, y: 0.175, z: 0.0 };
|
||||
|
||||
getGrabPointSphereOffset = function(handController) {
|
||||
if (handController === Controller.Standard.RightHand) {
|
||||
|
|
|
@ -1133,6 +1133,7 @@ var usersWindow = (function () {
|
|||
if (VISIBILITY_VALUES.indexOf(myVisibility) === -1) {
|
||||
myVisibility = VISIBILITY_FRIENDS;
|
||||
}
|
||||
GlobalServices.findableBy = myVisibility;
|
||||
|
||||
visibilityControl = new PopUpMenu({
|
||||
prompt: VISIBILITY_PROMPT,
|
||||
|
|
Loading…
Reference in a new issue