Merge branch 'master' of https://github.com/highfidelity/hifi into crowd-animation

This commit is contained in:
howard-stearns 2016-10-12 14:38:07 -07:00
commit 055e2ffcb0
46 changed files with 522 additions and 268 deletions

View file

@ -15,6 +15,7 @@
#include <QtNetwork/QNetworkDiskCache>
#include <QtNetwork/QNetworkRequest>
#include <QtNetwork/QNetworkReply>
#include <QThread>
#include <AssetClient.h>
#include <AvatarHashMap.h>
@ -33,6 +34,9 @@
#include <recording/Recorder.h>
#include <recording/Frame.h>
#include <plugins/CodecPlugin.h>
#include <plugins/PluginManager.h>
#include <WebSocketServerClass.h>
#include <EntityScriptingInterface.h> // TODO: consider moving to scriptengine.h
@ -42,6 +46,7 @@
#include "AbstractAudioInterface.h"
#include "Agent.h"
#include "AvatarAudioTimer.h"
static const int RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES = 10;
@ -72,6 +77,17 @@ Agent::Agent(ReceivedMessage& message) :
{ PacketType::OctreeStats, PacketType::EntityData, PacketType::EntityErase },
this, "handleOctreePacket");
packetReceiver.registerListener(PacketType::Jurisdiction, this, "handleJurisdictionPacket");
packetReceiver.registerListener(PacketType::SelectedAudioFormat, this, "handleSelectedAudioFormat");
}
void Agent::playAvatarSound(SharedSoundPointer sound) {
// this must happen on Agent's main thread
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "playAvatarSound", Q_ARG(SharedSoundPointer, sound));
return;
} else {
setAvatarSound(sound);
}
}
void Agent::handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
@ -118,7 +134,6 @@ void Agent::handleAudioPacket(QSharedPointer<ReceivedMessage> message) {
_receivedAudioStream.parseData(*message);
_lastReceivedAudioLoudness = _receivedAudioStream.getNextOutputFrameLoudness();
_receivedAudioStream.clearBuffer();
}
@ -214,6 +229,59 @@ void Agent::nodeActivated(SharedNodePointer activatedNode) {
_pendingScriptRequest = nullptr;
}
if (activatedNode->getType() == NodeType::AudioMixer) {
negotiateAudioFormat();
}
}
void Agent::negotiateAudioFormat() {
auto nodeList = DependencyManager::get<NodeList>();
auto negotiateFormatPacket = NLPacket::create(PacketType::NegotiateAudioFormat);
auto codecPlugins = PluginManager::getInstance()->getCodecPlugins();
quint8 numberOfCodecs = (quint8)codecPlugins.size();
negotiateFormatPacket->writePrimitive(numberOfCodecs);
for (auto& plugin : codecPlugins) {
auto codecName = plugin->getName();
negotiateFormatPacket->writeString(codecName);
}
// grab our audio mixer from the NodeList, if it exists
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
if (audioMixer) {
// send off this mute packet
nodeList->sendPacket(std::move(negotiateFormatPacket), *audioMixer);
}
}
void Agent::handleSelectedAudioFormat(QSharedPointer<ReceivedMessage> message) {
QString selectedCodecName = message->readString();
selectAudioFormat(selectedCodecName);
}
void Agent::selectAudioFormat(const QString& selectedCodecName) {
_selectedCodecName = selectedCodecName;
qDebug() << "Selected Codec:" << _selectedCodecName;
// release any old codec encoder/decoder first...
if (_codec && _encoder) {
_codec->releaseEncoder(_encoder);
_encoder = nullptr;
_codec = nullptr;
}
_receivedAudioStream.cleanupCodec();
auto codecPlugins = PluginManager::getInstance()->getCodecPlugins();
for (auto& plugin : codecPlugins) {
if (_selectedCodecName == plugin->getName()) {
_codec = plugin;
_receivedAudioStream.setupCodec(plugin, _selectedCodecName, AudioConstants::STEREO);
_encoder = plugin->createEncoder(AudioConstants::SAMPLE_RATE, AudioConstants::MONO);
qDebug() << "Selected Codec Plugin:" << _codec.get();
break;
}
}
}
void Agent::scriptRequestFinished() {
@ -314,10 +382,18 @@ void Agent::executeScript() {
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
// wire up our additional agent related processing to the update signal
QObject::connect(_scriptEngine.get(), &ScriptEngine::update, this, &Agent::processAgentAvatarAndAudio);
// 100Hz timer for audio
AvatarAudioTimer* audioTimerWorker = new AvatarAudioTimer();
audioTimerWorker->moveToThread(&_avatarAudioTimerThread);
connect(audioTimerWorker, &AvatarAudioTimer::avatarTick, this, &Agent::processAgentAvatarAudio);
connect(this, &Agent::startAvatarAudioTimer, audioTimerWorker, &AvatarAudioTimer::start);
connect(this, &Agent::stopAvatarAudioTimer, audioTimerWorker, &AvatarAudioTimer::stop);
connect(&_avatarAudioTimerThread, &QThread::finished, audioTimerWorker, &QObject::deleteLater);
_avatarAudioTimerThread.start();
// 60Hz timer for avatar
QObject::connect(_scriptEngine.get(), &ScriptEngine::update, this, &Agent::processAgentAvatar);
_scriptEngine->run();
Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
@ -343,6 +419,10 @@ void Agent::setIsAvatar(bool isAvatar) {
// start the timers
_avatarIdentityTimer->start(AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS);
// tell the avatarAudioTimer to start ticking
emit startAvatarAudioTimer();
}
if (!_isAvatar) {
@ -367,6 +447,7 @@ void Agent::setIsAvatar(bool isAvatar) {
nodeList->sendPacketList(std::move(packetList), *node);
});
}
emit stopAvatarAudioTimer();
}
}
@ -377,11 +458,9 @@ void Agent::sendAvatarIdentityPacket() {
}
}
void Agent::processAgentAvatarAndAudio(float deltaTime) {
void Agent::processAgentAvatar() {
if (!_scriptEngine->isFinished() && _isAvatar) {
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
const int SCRIPT_AUDIO_BUFFER_SAMPLES = AudioConstants::SAMPLE_RATE / SCRIPT_FPS + 0.5;
const int SCRIPT_AUDIO_BUFFER_BYTES = SCRIPT_AUDIO_BUFFER_SAMPLES * sizeof(int16_t);
QByteArray avatarByteArray = scriptedAvatar->toByteArray(true, randFloat() < AVATAR_SEND_FULL_UPDATE_RATIO);
scriptedAvatar->doneEncoding(true);
@ -395,95 +474,106 @@ void Agent::processAgentAvatarAndAudio(float deltaTime) {
auto nodeList = DependencyManager::get<NodeList>();
nodeList->broadcastToNodes(std::move(avatarPacket), NodeSet() << NodeType::AvatarMixer);
}
}
if (_isListeningToAudioStream || _avatarSound) {
// if we have an avatar audio stream then send it out to our audio-mixer
bool silentFrame = true;
void Agent::processAgentAvatarAudio() {
if (_isAvatar && (_isListeningToAudioStream || _avatarSound)) {
// if we have an avatar audio stream then send it out to our audio-mixer
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
bool silentFrame = true;
int16_t numAvailableSamples = SCRIPT_AUDIO_BUFFER_SAMPLES;
const int16_t* nextSoundOutput = NULL;
int16_t numAvailableSamples = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL;
const int16_t* nextSoundOutput = NULL;
if (_avatarSound) {
const QByteArray& soundByteArray = _avatarSound->getByteArray();
nextSoundOutput = reinterpret_cast<const int16_t*>(soundByteArray.data()
if (_avatarSound) {
const QByteArray& soundByteArray = _avatarSound->getByteArray();
nextSoundOutput = reinterpret_cast<const int16_t*>(soundByteArray.data()
+ _numAvatarSoundSentBytes);
int numAvailableBytes = (soundByteArray.size() - _numAvatarSoundSentBytes) > SCRIPT_AUDIO_BUFFER_BYTES
? SCRIPT_AUDIO_BUFFER_BYTES
: soundByteArray.size() - _numAvatarSoundSentBytes;
numAvailableSamples = (int16_t)numAvailableBytes / sizeof(int16_t);
int numAvailableBytes = (soundByteArray.size() - _numAvatarSoundSentBytes) > AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL
? AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL
: soundByteArray.size() - _numAvatarSoundSentBytes;
numAvailableSamples = (int16_t)numAvailableBytes / sizeof(int16_t);
// check if the all of the _numAvatarAudioBufferSamples to be sent are silence
for (int i = 0; i < numAvailableSamples; ++i) {
if (nextSoundOutput[i] != 0) {
silentFrame = false;
break;
}
}
_numAvatarSoundSentBytes += numAvailableBytes;
if (_numAvatarSoundSentBytes == soundByteArray.size()) {
// we're done with this sound object - so set our pointer back to NULL
// and our sent bytes back to zero
_avatarSound.clear();
_numAvatarSoundSentBytes = 0;
// check if the all of the _numAvatarAudioBufferSamples to be sent are silence
for (int i = 0; i < numAvailableSamples; ++i) {
if (nextSoundOutput[i] != 0) {
silentFrame = false;
break;
}
}
auto audioPacket = NLPacket::create(silentFrame
_numAvatarSoundSentBytes += numAvailableBytes;
if (_numAvatarSoundSentBytes == soundByteArray.size()) {
// we're done with this sound object - so set our pointer back to NULL
// and our sent bytes back to zero
_avatarSound.clear();
_numAvatarSoundSentBytes = 0;
}
}
auto audioPacket = NLPacket::create(silentFrame
? PacketType::SilentAudioFrame
: PacketType::MicrophoneAudioNoEcho);
// seek past the sequence number, will be packed when destination node is known
audioPacket->seek(sizeof(quint16));
// seek past the sequence number, will be packed when destination node is known
audioPacket->seek(sizeof(quint16));
if (silentFrame) {
if (!_isListeningToAudioStream) {
// if we have a silent frame and we're not listening then just send nothing and break out of here
return;
}
// write the number of silent samples so the audio-mixer can uphold timing
audioPacket->writePrimitive(SCRIPT_AUDIO_BUFFER_SAMPLES);
// use the orientation and position of this avatar for the source of this audio
audioPacket->writePrimitive(scriptedAvatar->getPosition());
glm::quat headOrientation = scriptedAvatar->getHeadOrientation();
audioPacket->writePrimitive(headOrientation);
} else if (nextSoundOutput) {
// write the codec
QString codecName;
audioPacket->writeString(codecName);
// assume scripted avatar audio is mono and set channel flag to zero
audioPacket->writePrimitive((quint8)0);
// use the orientation and position of this avatar for the source of this audio
audioPacket->writePrimitive(scriptedAvatar->getPosition());
glm::quat headOrientation = scriptedAvatar->getHeadOrientation();
audioPacket->writePrimitive(headOrientation);
// write the raw audio data
audioPacket->write(reinterpret_cast<const char*>(nextSoundOutput), numAvailableSamples * sizeof(int16_t));
if (silentFrame) {
if (!_isListeningToAudioStream) {
// if we have a silent frame and we're not listening then just send nothing and break out of here
return;
}
// write audio packet to AudioMixer nodes
auto nodeList = DependencyManager::get<NodeList>();
nodeList->eachNode([this, &nodeList, &audioPacket](const SharedNodePointer& node){
// only send to nodes of type AudioMixer
if (node->getType() == NodeType::AudioMixer) {
// pack sequence number
quint16 sequence = _outgoingScriptAudioSequenceNumbers[node->getUUID()]++;
audioPacket->seek(0);
audioPacket->writePrimitive(sequence);
// write the number of silent samples so the audio-mixer can uphold timing
audioPacket->writePrimitive(AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
// use the orientation and position of this avatar for the source of this audio
audioPacket->writePrimitive(scriptedAvatar->getPosition());
glm::quat headOrientation = scriptedAvatar->getHeadOrientation();
audioPacket->writePrimitive(headOrientation);
} else if (nextSoundOutput) {
// write the codec
audioPacket->writeString(_selectedCodecName);
// assume scripted avatar audio is mono and set channel flag to zero
audioPacket->writePrimitive((quint8)0);
// use the orientation and position of this avatar for the source of this audio
audioPacket->writePrimitive(scriptedAvatar->getPosition());
glm::quat headOrientation = scriptedAvatar->getHeadOrientation();
audioPacket->writePrimitive(headOrientation);
// encode it
if(_encoder) {
QByteArray decodedBuffer(reinterpret_cast<const char*>(nextSoundOutput), numAvailableSamples*sizeof(int16_t));
QByteArray encodedBuffer;
_encoder->encode(decodedBuffer, encodedBuffer);
audioPacket->write(encodedBuffer.data(), encodedBuffer.size());
} else {
audioPacket->write(reinterpret_cast<const char*>(nextSoundOutput), numAvailableSamples*sizeof(int16_t));
}
// send audio packet
nodeList->sendUnreliablePacket(*audioPacket, *node);
}
});
}
// write audio packet to AudioMixer nodes
auto nodeList = DependencyManager::get<NodeList>();
nodeList->eachNode([this, &nodeList, &audioPacket](const SharedNodePointer& node) {
// only send to nodes of type AudioMixer
if (node->getType() == NodeType::AudioMixer) {
// pack sequence number
quint16 sequence = _outgoingScriptAudioSequenceNumbers[node->getUUID()]++;
audioPacket->seek(0);
audioPacket->writePrimitive(sequence);
// send audio packet
nodeList->sendUnreliablePacket(*audioPacket, *node);
}
});
}
}
@ -501,4 +591,13 @@ void Agent::aboutToFinish() {
// cleanup the AudioInjectorManager (and any still running injectors)
DependencyManager::destroy<AudioInjectorManager>();
emit stopAvatarAudioTimer();
_avatarAudioTimerThread.quit();
// cleanup codec & encoder
if (_codec && _encoder) {
_codec->releaseEncoder(_encoder);
_encoder = nullptr;
}
}

View file

@ -18,6 +18,7 @@
#include <QtScript/QScriptEngine>
#include <QtCore/QObject>
#include <QtCore/QUrl>
#include <QtCore/QTimer>
#include <QUuid>
#include <EntityEditPacketSender.h>
@ -26,8 +27,9 @@
#include <ScriptEngine.h>
#include <ThreadedAssignment.h>
#include "MixedAudioStream.h"
#include <plugins/CodecPlugin.h>
#include "MixedAudioStream.h"
class Agent : public ThreadedAssignment {
Q_OBJECT
@ -56,7 +58,7 @@ public:
public slots:
void run() override;
void playAvatarSound(SharedSoundPointer avatarSound) { setAvatarSound(avatarSound); }
void playAvatarSound(SharedSoundPointer avatarSound);
private slots:
void requestScript();
@ -66,12 +68,20 @@ private slots:
void handleAudioPacket(QSharedPointer<ReceivedMessage> message);
void handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleJurisdictionPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void processAgentAvatarAndAudio(float deltaTime);
void handleSelectedAudioFormat(QSharedPointer<ReceivedMessage> message);
void nodeActivated(SharedNodePointer activatedNode);
void processAgentAvatar();
void processAgentAvatarAudio();
signals:
void startAvatarAudioTimer();
void stopAvatarAudioTimer();
private:
void negotiateAudioFormat();
void selectAudioFormat(const QString& selectedCodecName);
std::unique_ptr<ScriptEngine> _scriptEngine;
EntityEditPacketSender _entityEditSender;
EntityTreeHeadlessViewer _entityViewer;
@ -92,7 +102,11 @@ private:
bool _isAvatar = false;
QTimer* _avatarIdentityTimer = nullptr;
QHash<QUuid, quint16> _outgoingScriptAudioSequenceNumbers;
CodecPluginPointer _codec;
QString _selectedCodecName;
Encoder* _encoder { nullptr };
QThread _avatarAudioTimerThread;
};
#endif // hifi_Agent_h

View file

@ -0,0 +1,33 @@
//
// AvatarAudioTimer.cpp
// assignment-client/src
//
// Created by David Kelly on 10/12/13.
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QDebug>
#include <SharedUtil.h>
#include "AvatarAudioTimer.h"
// this should send a signal every 10ms, with pretty good precision. Hardcoding
// to 10ms since that's what you'd want for audio.
void AvatarAudioTimer::start() {
qDebug() << "AvatarAudioTimer::start called";
auto startTime = usecTimestampNow();
quint64 frameCounter = 0;
const int TARGET_INTERVAL_USEC = 10000; // 10ms
while (!_quit) {
frameCounter++;
// simplest possible timer
quint64 targetTime = startTime + frameCounter * TARGET_INTERVAL_USEC;
quint64 interval = std::max((quint64)0, targetTime - usecTimestampNow());
usleep(interval);
emit avatarTick();
}
qDebug() << "AvatarAudioTimer is finished";
}

View file

@ -0,0 +1,31 @@
//
// AvatarAudioTimer.h
// assignment-client/src
//
// Created by David Kelly on 10/12/13.
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AvatarAudioTimer_h
#define hifi_AvatarAudioTimer_h
#include <QtCore/QObject>
class AvatarAudioTimer : public QObject {
Q_OBJECT
signals:
void avatarTick();
public slots:
void start();
void stop() { _quit = true; }
private:
bool _quit { false };
};
#endif //hifi_AvatarAudioTimer_h

View file

@ -1,28 +1,49 @@
{
"name": "XBox to Standard",
"channels": [
{ "from": "GamePad.LY", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Standard.LY" },
{ "from": "GamePad.LX", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Standard.LX" },
{ "from": "GamePad.LY", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Actions.TranslateZ" },
{ "from": "GamePad.LX", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Actions.TranslateX" },
{ "from": "GamePad.LT", "to": "Standard.LT" },
{ "from": "GamePad.LB", "to": "Standard.LB" },
{ "from": "GamePad.LS", "to": "Standard.LS" },
{ "from": "GamePad.RY", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Standard.RY" },
{ "from": "GamePad.RX", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Standard.RX" },
{ "from": "GamePad.RX",
"when": [ "Application.InHMD", "Application.SnapTurn" ],
"to": "Actions.StepYaw",
"filters":
[
{ "type": "deadZone", "min": 0.15 },
"constrainToInteger",
{ "type": "pulse", "interval": 0.25 },
{ "type": "scale", "scale": 22.5 }
]
},
{ "from": "GamePad.RX", "to": "Actions.Yaw" },
{ "from": "GamePad.RY",
"to": "Actions.VERTICAL_UP",
"filters":
[
{ "type": "deadZone", "min": 0.95 },
"invert"
]
},
{ "from": "GamePad.RT", "to": "Standard.RT" },
{ "from": "GamePad.RB", "to": "Standard.RB" },
{ "from": "GamePad.RS", "to": "Standard.RS" },
{ "from": "GamePad.Back", "to": "Standard.Back" },
{ "from": "GamePad.Start", "to": "Standard.Start" },
{ "from": "GamePad.Start", "to": "Actions.CycleCamera" },
{ "from": "GamePad.Back", "to": "Actions.ContextMenu" },
{ "from": [ "GamePad.DU", "GamePad.DL", "GamePad.DR", "GamePad.DD" ], "to": "Standard.LeftPrimaryThumb", "peek": true },
{ "from": "GamePad.DU", "to": "Standard.DU" },
{ "from": "GamePad.DD", "to": "Standard.DD" },
{ "from": "GamePad.DL", "to": "Standard.DL" },
{ "from": "GamePad.DR", "to": "Standard.DR" },
{ "from": [ "GamePad.A", "GamePad.B", "GamePad.X", "GamePad.Y" ], "to": "Standard.RightPrimaryThumb", "peek": true },
{ "from": [ "GamePad.Y" ], "to": "Standard.RightPrimaryThumb", "peek": true },
{ "from": "GamePad.A", "to": "Standard.A" },
{ "from": "GamePad.B", "to": "Standard.B" },
{ "from": "GamePad.X", "to": "Standard.X" },

View file

@ -184,7 +184,7 @@ ScrollingWindow {
prompt.selected.connect(function (jsonResult) {
if (jsonResult) {
var result = JSON.parse(jsonResult);
var url = result.textInput;
var url = result.textInput.trim();
var shapeType;
switch (result.comboBox) {
case SHAPE_TYPE_SIMPLE_HULL:

View file

@ -32,6 +32,8 @@ FocusScope {
readonly property ComboBox control: comboBox
signal accepted();
implicitHeight: comboBox.height;
focus: true
@ -134,6 +136,7 @@ FocusScope {
function hideList() {
popup.visible = false;
scrollView.hoverEnabled = false;
root.accepted();
}
FocusScope {

View file

@ -211,6 +211,7 @@ ModalWindow {
left: parent.left;
bottom: parent.bottom;
leftMargin: 6; // Magic number to align with warning icon
bottomMargin: 6;
}
}
@ -224,7 +225,10 @@ ModalWindow {
bottom: parent.bottom;
}
model: root.comboBox ? root.comboBox.items : [];
onCurrentTextChanged: updateCheckbox();
onAccepted: {
updateCheckbox();
focus = true;
}
}
}
@ -336,6 +340,7 @@ ModalWindow {
Component.onCompleted: {
updateIcon();
updateCheckbox();
d.resize();
textField.forceActiveFocus();
}

View file

@ -867,6 +867,10 @@ void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
void AudioClient::handleAudioInput() {
if (!_inputDevice) {
return;
}
// input samples required to produce exactly NETWORK_FRAME_SAMPLES of output
const int inputSamplesRequired = (_inputToNetworkResampler ?
_inputToNetworkResampler->getMinInput(AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL) :

View file

@ -95,6 +95,11 @@ namespace controller {
return getValue(Input(device, source, ChannelType::BUTTON).getID());
}
float ScriptingInterface::getAxisValue(int source) const {
auto userInputMapper = DependencyManager::get<UserInputMapper>();
return userInputMapper->getValue(Input((uint32_t)source));
}
float ScriptingInterface::getAxisValue(StandardAxisChannel source, uint16_t device) const {
return getValue(Input(device, source, ChannelType::AXIS).getID());
}

View file

@ -81,6 +81,7 @@ namespace controller {
Q_INVOKABLE float getValue(const int& source) const;
Q_INVOKABLE float getButtonValue(StandardButtonChannel source, uint16_t device = 0) const;
Q_INVOKABLE float getAxisValue(StandardAxisChannel source, uint16_t device = 0) const;
Q_INVOKABLE float getAxisValue(int source) const;
Q_INVOKABLE Pose getPoseValue(const int& source) const;
Q_INVOKABLE Pose getPoseValue(StandardPoseChannel source, uint16_t device = 0) const;

View file

@ -27,19 +27,27 @@ AnyEndpoint::AnyEndpoint(Endpoint::List children) : Endpoint(Input::INVALID_INPU
}
}
// The value of an any-point is considered to be the maxiumum absolute value,
// this handles any's of multiple axis values as well as single values as well
float AnyEndpoint::peek() const {
float result = 0;
float result = 0.0f;
for (auto& child : _children) {
result = std::max(result, child->peek());
auto childValue = child->peek();
if (std::abs(childValue) > std::abs(result)) {
result = childValue;
}
}
return result;
}
// Fetching the value must trigger any necessary side effects of value() on ALL the children.
float AnyEndpoint::value() {
float result = 0;
float result = 0.0f;
for (auto& child : _children) {
result = std::max(result, child->value());
auto childValue = child->value();
if (std::abs(childValue) > std::abs(result)) {
result = childValue;
}
}
return result;
}

View file

@ -374,7 +374,8 @@ void HmdDisplayPlugin::updateFrameData() {
}
// this offset needs to match GRAB_POINT_SPHERE_OFFSET in scripts/system/libraries/controllers.js
static const vec3 GRAB_POINT_SPHERE_OFFSET = vec3(0.1f, 0.04f, -0.32f);
//static const vec3 GRAB_POINT_SPHERE_OFFSET = vec3(0.1f, 0.04f, -0.32f);
static const vec3 GRAB_POINT_SPHERE_OFFSET = vec3(0.0f, 0.0f, -0.175f);
vec3 grabPointOffset = GRAB_POINT_SPHERE_OFFSET;
if (i == 0) {
grabPointOffset.x *= -1.0f; // this changes between left and right hands

View file

@ -666,7 +666,6 @@ void GLBackend::recycle() const {
for (auto pair : externalTexturesTrash) {
auto fence = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
pair.second(pair.first, fence);
decrementTextureGPUCount();
}
}

View file

@ -192,6 +192,11 @@ void GLBackend::resetResourceStage() {
void GLBackend::do_setResourceTexture(const Batch& batch, size_t paramOffset) {
GLuint slot = batch._params[paramOffset + 1]._uint;
if (slot >= (GLuint) MAX_NUM_RESOURCE_TEXTURES) {
// "GLBackend::do_setResourceTexture: Trying to set a resource Texture at slot #" + slot + " which doesn't exist. MaxNumResourceTextures = " + getMaxNumResourceTextures());
return;
}
TexturePointer resourceTexture = batch._textures.get(batch._params[paramOffset + 0]._uint);
if (!resourceTexture) {

View file

@ -20,9 +20,20 @@ std::shared_ptr<GLTextureTransferHelper> GLTexture::_textureTransferHelper;
// FIXME placeholder for texture memory over-use
#define DEFAULT_MAX_MEMORY_MB 256
#define MIN_FREE_GPU_MEMORY_PERCENTAGE 0.25f
#define OVER_MEMORY_PRESSURE 2.0f
// FIXME other apps show things like Oculus home consuming large amounts of GPU memory
// which causes us to blur textures needlessly (since other app GPU memory usage will likely
// be swapped out and not cause any actual impact
//#define CHECK_MIN_FREE_GPU_MEMORY
#ifdef CHECK_MIN_FREE_GPU_MEMORY
#define MIN_FREE_GPU_MEMORY_PERCENTAGE 0.25f
#endif
// Allow 65% of all available GPU memory to be consumed by textures
// FIXME overly conservative?
#define MAX_CONSUMED_TEXTURE_MEMORY_PERCENTAGE 0.65f
const GLenum GLTexture::CUBE_FACE_LAYOUT[6] = {
GL_TEXTURE_CUBE_MAP_POSITIVE_X, GL_TEXTURE_CUBE_MAP_NEGATIVE_X,
GL_TEXTURE_CUBE_MAP_POSITIVE_Y, GL_TEXTURE_CUBE_MAP_NEGATIVE_Y,
@ -107,6 +118,7 @@ float GLTexture::getMemoryPressure() {
// If we can't query the dedicated memory just use a fallback fixed value of 256 MB
totalGpuMemory = MB_TO_BYTES(DEFAULT_MAX_MEMORY_MB);
} else {
#ifdef CHECK_MIN_FREE_GPU_MEMORY
// Check the global free GPU memory
auto freeGpuMemory = getFreeDedicatedMemory();
if (freeGpuMemory) {
@ -115,21 +127,26 @@ float GLTexture::getMemoryPressure() {
if (freeGpuMemory != lastFreeGpuMemory) {
lastFreeGpuMemory = freeGpuMemory;
if (freePercentage < MIN_FREE_GPU_MEMORY_PERCENTAGE) {
qDebug() << "Exceeded max GPU memory";
qCDebug(gpugllogging) << "Exceeded min free GPU memory " << freePercentage;
return OVER_MEMORY_PRESSURE;
}
}
}
#endif
}
// Allow 50% of all available GPU memory to be consumed by textures
// FIXME overly conservative?
availableTextureMemory = (totalGpuMemory >> 1);
availableTextureMemory = static_cast<gpu::Size>(totalGpuMemory * MAX_CONSUMED_TEXTURE_MEMORY_PERCENTAGE);
}
// Return the consumed texture memory divided by the available texture memory.
auto consumedGpuMemory = Context::getTextureGPUMemoryUsage();
return (float)consumedGpuMemory / (float)availableTextureMemory;
float memoryPressure = (float)consumedGpuMemory / (float)availableTextureMemory;
static Context::Size lastConsumedGpuMemory = 0;
if (memoryPressure > 1.0f && lastConsumedGpuMemory != consumedGpuMemory) {
lastConsumedGpuMemory = consumedGpuMemory;
qCDebug(gpugllogging) << "Exceeded max allowed texture memory: " << consumedGpuMemory << " / " << availableTextureMemory;
}
return memoryPressure;
}

View file

@ -23,7 +23,7 @@ vec2 float32x3_to_oct(in vec3 v) {
vec3 oct_to_float32x3(in vec2 e) {
vec3 v = vec3(e.xy, 1.0 - abs(e.x) - abs(e.y));
if (v.z < 0) {
if (v.z < 0.0) {
v.xy = (1.0 - abs(v.yx)) * signNotZero(v.xy);
}
return normalize(v);

View file

@ -108,7 +108,7 @@ float evalLightAttenuation(Light l, float d) {
// "Fade" the edges of light sources to make things look a bit more attractive.
// Note: this tends to look a bit odd at lower exponents.
attenuation *= min(1, max(0, -(d - cutoff)));
attenuation *= min(1.0, max(0.0, -(d - cutoff)));
return attenuation;
}
@ -118,7 +118,7 @@ SphericalHarmonics getLightAmbientSphere(Light l) {
}
bool getLightHasAmbientMap(Light l) {
return l._control.x > 0;
return l._control.x > 0.0;
}
float getLightAmbientMapNumMips(Light l) {

View file

@ -141,7 +141,11 @@ bool haveAssetServer() {
}
GetMappingRequest* AssetClient::createGetMappingRequest(const AssetPath& path) {
return new GetMappingRequest(path);
auto request = new GetMappingRequest(path);
request->moveToThread(thread());
return request;
}
GetAllMappingsRequest* AssetClient::createGetAllMappingsRequest() {
@ -305,7 +309,7 @@ void AssetClient::handleAssetGetInfoReply(QSharedPointer<ReceivedMessage> messag
void AssetClient::handleAssetGetReply(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
Q_ASSERT(QThread::currentThread() == thread());
auto assetHash = message->read(SHA256_HASH_LENGTH);
auto assetHash = message->readHead(SHA256_HASH_LENGTH);
qCDebug(asset_client) << "Got reply for asset: " << assetHash.toHex();
MessageID messageID;
@ -349,8 +353,8 @@ void AssetClient::handleAssetGetReply(QSharedPointer<ReceivedMessage> message, S
} else {
auto weakNode = senderNode.toWeakRef();
connect(message.data(), &ReceivedMessage::progress, this, [this, weakNode, messageID, length]() {
handleProgressCallback(weakNode, messageID, length);
connect(message.data(), &ReceivedMessage::progress, this, [this, weakNode, messageID, length](qint64 size) {
handleProgressCallback(weakNode, messageID, size, length);
});
connect(message.data(), &ReceivedMessage::completed, this, [this, weakNode, messageID]() {
handleCompleteCallback(weakNode, messageID);
@ -358,7 +362,8 @@ void AssetClient::handleAssetGetReply(QSharedPointer<ReceivedMessage> message, S
}
}
void AssetClient::handleProgressCallback(const QWeakPointer<Node>& node, MessageID messageID, DataOffset length) {
void AssetClient::handleProgressCallback(const QWeakPointer<Node>& node, MessageID messageID,
qint64 size, DataOffset length) {
auto senderNode = node.toStrongRef();
if (!senderNode) {
@ -381,13 +386,7 @@ void AssetClient::handleProgressCallback(const QWeakPointer<Node>& node, Message
}
auto& callbacks = requestIt->second;
auto& message = callbacks.message;
if (!message) {
return;
}
callbacks.progressCallback(message->getSize(), length);
callbacks.progressCallback(size, length);
}
void AssetClient::handleCompleteCallback(const QWeakPointer<Node>& node, MessageID messageID) {

View file

@ -93,7 +93,7 @@ private:
bool cancelGetAssetRequest(MessageID id);
bool cancelUploadAssetRequest(MessageID id);
void handleProgressCallback(const QWeakPointer<Node>& node, MessageID messageID, DataOffset length);
void handleProgressCallback(const QWeakPointer<Node>& node, MessageID messageID, qint64 size, DataOffset length);
void handleCompleteCallback(const QWeakPointer<Node>& node, MessageID messageID);
struct GetAssetRequestData {

View file

@ -11,10 +11,20 @@
#include "AssetResourceRequest.h"
#include <QtCore/QLoggingCategory>
#include "AssetClient.h"
#include "AssetUtils.h"
#include "MappingRequest.h"
#include <QtCore/qloggingcategory.h>
#include "NetworkLogging.h"
static const int DOWNLOAD_PROGRESS_LOG_INTERVAL_SECONDS = 5;
AssetResourceRequest::AssetResourceRequest(const QUrl& url) :
ResourceRequest(url)
{
_lastProgressDebug = p_high_resolution_clock::now() - std::chrono::seconds(DOWNLOAD_PROGRESS_LOG_INTERVAL_SECONDS);
}
AssetResourceRequest::~AssetResourceRequest() {
if (_assetMappingRequest) {
@ -24,10 +34,6 @@ AssetResourceRequest::~AssetResourceRequest() {
if (_assetRequest) {
_assetRequest->deleteLater();
}
if (_sendTimer) {
cleanupTimer();
}
}
bool AssetResourceRequest::urlIsAssetHash() const {
@ -37,24 +43,6 @@ bool AssetResourceRequest::urlIsAssetHash() const {
return hashRegex.exactMatch(_url.toString());
}
void AssetResourceRequest::setupTimer() {
Q_ASSERT(!_sendTimer);
static const int TIMEOUT_MS = 2000;
_sendTimer = new QTimer(this);
connect(_sendTimer, &QTimer::timeout, this, &AssetResourceRequest::onTimeout);
_sendTimer->setSingleShot(true);
_sendTimer->start(TIMEOUT_MS);
}
void AssetResourceRequest::cleanupTimer() {
Q_ASSERT(_sendTimer);
disconnect(_sendTimer, 0, this, 0);
_sendTimer->deleteLater();
_sendTimer = nullptr;
}
void AssetResourceRequest::doSend() {
// We'll either have a hash or an ATP path to a file (that maps to a hash)
if (urlIsAssetHash()) {
@ -81,8 +69,6 @@ void AssetResourceRequest::requestMappingForPath(const AssetPath& path) {
Q_ASSERT(_state == InProgress);
Q_ASSERT(request == _assetMappingRequest);
cleanupTimer();
switch (request->getError()) {
case MappingRequest::NoError:
// we have no error, we should have a resulting hash - use that to send of a request for that asset
@ -118,7 +104,6 @@ void AssetResourceRequest::requestMappingForPath(const AssetPath& path) {
_assetMappingRequest = nullptr;
});
setupTimer();
_assetMappingRequest->start();
}
@ -133,8 +118,6 @@ void AssetResourceRequest::requestHash(const AssetHash& hash) {
Q_ASSERT(_state == InProgress);
Q_ASSERT(req == _assetRequest);
Q_ASSERT(req->getState() == AssetRequest::Finished);
cleanupTimer();
switch (req->getError()) {
case AssetRequest::Error::NoError:
@ -162,35 +145,29 @@ void AssetResourceRequest::requestHash(const AssetHash& hash) {
_assetRequest = nullptr;
});
setupTimer();
_assetRequest->start();
}
void AssetResourceRequest::onDownloadProgress(qint64 bytesReceived, qint64 bytesTotal) {
Q_ASSERT(_state == InProgress);
// We've received data, so reset the timer
_sendTimer->start();
emit progress(bytesReceived, bytesTotal);
auto now = p_high_resolution_clock::now();
// if we haven't received the full asset check if it is time to output progress to log
// we do so every X seconds to assist with ATP download tracking
if (bytesReceived != bytesTotal
&& now - _lastProgressDebug > std::chrono::seconds(DOWNLOAD_PROGRESS_LOG_INTERVAL_SECONDS)) {
int percentage = roundf((float) bytesReceived / (float) bytesTotal * 100.0f);
qCDebug(networking).nospace() << "Progress for " << _url.path() << " - "
<< bytesReceived << " of " << bytesTotal << " bytes - " << percentage << "%";
_lastProgressDebug = now;
}
}
void AssetResourceRequest::onTimeout() {
if (_state == InProgress) {
qWarning() << "Asset request timed out: " << _url;
if (_assetRequest) {
disconnect(_assetRequest, 0, this, 0);
_assetRequest->deleteLater();
_assetRequest = nullptr;
}
if (_assetMappingRequest) {
disconnect(_assetMappingRequest, 0, this, 0);
_assetMappingRequest->deleteLater();
_assetMappingRequest = nullptr;
}
_result = Timeout;
_state = Finished;
emit finished();
}
cleanupTimer();
}

View file

@ -14,13 +14,15 @@
#include <QUrl>
#include <PortableHighResolutionClock.h>
#include "AssetRequest.h"
#include "ResourceRequest.h"
class AssetResourceRequest : public ResourceRequest {
Q_OBJECT
public:
AssetResourceRequest(const QUrl& url) : ResourceRequest(url) { }
AssetResourceRequest(const QUrl& url);
virtual ~AssetResourceRequest() override;
protected:
@ -28,21 +30,17 @@ protected:
private slots:
void onDownloadProgress(qint64 bytesReceived, qint64 bytesTotal);
void onTimeout();
private:
void setupTimer();
void cleanupTimer();
bool urlIsAssetHash() const;
void requestMappingForPath(const AssetPath& path);
void requestHash(const AssetHash& hash);
QTimer* _sendTimer { nullptr };
GetMappingRequest* _assetMappingRequest { nullptr };
AssetRequest* _assetRequest { nullptr };
p_high_resolution_clock::time_point _lastProgressDebug;
};
#endif

View file

@ -54,14 +54,14 @@ void ReceivedMessage::appendPacket(NLPacket& packet) {
"We should not be appending to a complete message");
// Limit progress signal to every X packets
const int EMIT_PROGRESS_EVERY_X_PACKETS = 100;
const int EMIT_PROGRESS_EVERY_X_PACKETS = 50;
++_numPackets;
_data.append(packet.getPayload(), packet.getPayloadSize());
if (_numPackets % EMIT_PROGRESS_EVERY_X_PACKETS == 0) {
emit progress();
emit progress(getSize());
}
if (packet.getPacketPosition() == NLPacket::PacketPosition::LAST) {

View file

@ -78,7 +78,7 @@ public:
template<typename T> qint64 readHeadPrimitive(T* data);
signals:
void progress();
void progress(qint64 size);
void completed();
private slots:

View file

@ -51,7 +51,7 @@ DeferredFragment unpackDeferredFragmentNoPosition(vec2 texcoord) {
vec4 specularVal;
DeferredFragment frag;
frag.depthVal = -1;
frag.depthVal = -1.0;
normalVal = texture(normalMap, texcoord);
diffuseVal = texture(albedoMap, texcoord);
specularVal = texture(specularMap, texcoord);
@ -138,8 +138,8 @@ void unpackMidLowNormalCurvature(vec2 texcoord, out vec4 midNormalCurvature, out
lowNormalCurvature = fetchDiffusedCurvature(texcoord);
midNormalCurvature.xyz = normalize((midNormalCurvature.xyz - 0.5f) * 2.0f);
lowNormalCurvature.xyz = normalize((lowNormalCurvature.xyz - 0.5f) * 2.0f);
midNormalCurvature.w = (midNormalCurvature.w * 2 - 1);
lowNormalCurvature.w = (lowNormalCurvature.w * 2 - 1);
midNormalCurvature.w = (midNormalCurvature.w * 2.0 - 1.0);
lowNormalCurvature.w = (lowNormalCurvature.w * 2.0 - 1.0);
}
<@endfunc@>

View file

@ -27,8 +27,8 @@ float evalOpaqueFinalAlpha(float alpha, float mapAlpha) {
}
const float DEFAULT_ROUGHNESS = 0.9;
const float DEFAULT_SHININESS = 10;
const float DEFAULT_METALLIC = 0;
const float DEFAULT_SHININESS = 10.0;
const float DEFAULT_METALLIC = 0.0;
const vec3 DEFAULT_SPECULAR = vec3(0.1);
const vec3 DEFAULT_EMISSIVE = vec3(0.0);
const float DEFAULT_OCCLUSION = 1.0;

View file

@ -23,7 +23,7 @@
// prepareGlobalLight
// Transform directions to worldspace
vec3 fragNormal = vec3((normal));
vec3 fragEyeVector = vec3(invViewMat * vec4(-position, 0.0));
vec3 fragEyeVector = vec3(invViewMat * vec4(-1.0*position, 0.0));
vec3 fragEyeDir = normalize(fragEyeVector);
// Get light
@ -143,13 +143,13 @@ vec3 evalLightmappedColor(mat4 invViewMat, float shadowAttenuation, float obscur
float facingLight = step(PERPENDICULAR_THRESHOLD, diffuseDot);
// Reevaluate the shadow attenuation for light facing fragments
float lightAttenuation = (1 - facingLight) + facingLight * shadowAttenuation;
float lightAttenuation = (1.0 - facingLight) + facingLight * shadowAttenuation;
// Diffuse light is the lightmap dimmed by shadow
vec3 diffuseLight = lightAttenuation * lightmap;
// Ambient light is the lightmap when in shadow
vec3 ambientLight = (1 - lightAttenuation) * lightmap * getLightAmbientIntensity(light);
vec3 ambientLight = (1.0 - lightAttenuation) * lightmap * getLightAmbientIntensity(light);
return isLightmapEnabled() * obscurance * albedo * (diffuseLight + ambientLight);
}

View file

@ -478,6 +478,8 @@ void RenderDeferredSetup::run(const render::SceneContextPointer& sceneContext, c
// Setup the global directional pass pipeline
{
if (deferredLightingEffect->_shadowMapEnabled) {
// If the keylight has an ambient Map then use the Skybox version of the pass
// otherwise use the ambient sphere version
if (keyLight->getAmbientMap()) {
program = deferredLightingEffect->_directionalSkyboxLightShadow;
locations = deferredLightingEffect->_directionalSkyboxLightShadowLocations;
@ -486,11 +488,11 @@ void RenderDeferredSetup::run(const render::SceneContextPointer& sceneContext, c
locations = deferredLightingEffect->_directionalAmbientSphereLightShadowLocations;
}
} else {
// If the keylight has an ambient Map then use the Skybox version of the pass
// otherwise use the ambient sphere version
if (keyLight->getAmbientMap()) {
program = deferredLightingEffect->_directionalAmbientSphereLight;
locations = deferredLightingEffect->_directionalAmbientSphereLightLocations;
//program = deferredLightingEffect->_directionalSkyboxLight;
//locations = deferredLightingEffect->_directionalSkyboxLightLocations;
program = deferredLightingEffect->_directionalSkyboxLight;
locations = deferredLightingEffect->_directionalSkyboxLightLocations;
} else {
program = deferredLightingEffect->_directionalAmbientSphereLight;
locations = deferredLightingEffect->_directionalAmbientSphereLightLocations;

View file

@ -23,7 +23,7 @@ vec4 evalSkyboxLight(vec3 direction, float lod) {
<@func declareEvalAmbientSpecularIrradiance(supportAmbientSphere, supportAmbientMap, supportIfAmbientMapElseAmbientSphere)@>
vec3 fresnelSchlickAmbient(vec3 fresnelColor, vec3 lightDir, vec3 halfDir, float gloss) {
return fresnelColor + (max(vec3(gloss), fresnelColor) - fresnelColor) * pow(1.0 - clamp(dot(lightDir, halfDir), 0.0, 1.0), 5);
return fresnelColor + (max(vec3(gloss), fresnelColor) - fresnelColor) * pow(1.0 - clamp(dot(lightDir, halfDir), 0.0, 1.0), 5.0);
}
<@if supportAmbientMap@>
@ -32,7 +32,7 @@ vec3 fresnelSchlickAmbient(vec3 fresnelColor, vec3 lightDir, vec3 halfDir, float
vec3 evalAmbientSpecularIrradiance(Light light, vec3 fragEyeDir, vec3 fragNormal, float roughness, vec3 fresnel) {
vec3 direction = -reflect(fragEyeDir, fragNormal);
vec3 ambientFresnel = fresnelSchlickAmbient(fresnel, fragEyeDir, fragNormal, 1 - roughness);
vec3 ambientFresnel = fresnelSchlickAmbient(fresnel, fragEyeDir, fragNormal, 1.0 - roughness);
vec3 specularLight;
<@if supportIfAmbientMapElseAmbientSphere@>
if (getLightHasAmbientMap(light))
@ -76,7 +76,7 @@ void evalLightingAmbient(out vec3 diffuse, out vec3 specular, Light light, vec3
// Diffuse from ambient
diffuse = (1 - metallic) * evalSphericalLight(getLightAmbientSphere(light), normal).xyz;
diffuse = (1.0 - metallic) * evalSphericalLight(getLightAmbientSphere(light), normal).xyz;
// Specular highlight from ambient
specular = evalAmbientSpecularIrradiance(light, eyeDir, normal, roughness, fresnel) * obscurance * getLightAmbientIntensity(light);

View file

@ -40,7 +40,7 @@ void evalLightingPoint(out vec3 diffuse, out vec3 specular, Light light,
if (isShowLightContour() > 0.0) {
// Show edge
float edge = abs(2.0 * ((getLightRadius(light) - fragLightDistance) / (0.1)) - 1.0);
if (edge < 1) {
if (edge < 1.0) {
float edgeCoord = exp2(-8.0*edge*edge);
diffuse = vec3(edgeCoord * edgeCoord * getLightShowContour(light) * getLightColor(light));
}

View file

@ -44,7 +44,7 @@ void evalLightingSpot(out vec3 diffuse, out vec3 specular, Light light,
float edgeDistS = dot(fragLightDistance * vec2(cosSpotAngle, sqrt(1.0 - cosSpotAngle * cosSpotAngle)), -getLightSpotOutsideNormal2(light));
float edgeDist = min(edgeDistR, edgeDistS);
float edge = abs(2.0 * (edgeDist / (0.1)) - 1.0);
if (edge < 1) {
if (edge < 1.0) {
float edgeCoord = exp2(-8.0*edge*edge);
diffuse = vec3(edgeCoord * edgeCoord * getLightColor(light));
}

View file

@ -118,8 +118,8 @@ vec3 fresnelSchlickColor(vec3 fresnelColor, vec3 lightDir, vec3 halfDir) {
float specularDistribution(float roughness, vec3 normal, vec3 halfDir) {
float ndoth = clamp(dot(halfDir, normal), 0.0, 1.0);
float gloss2 = pow(0.001 + roughness, 4);
float denom = (ndoth * ndoth*(gloss2 - 1) + 1);
float gloss2 = pow(0.001 + roughness, 4.0);
float denom = (ndoth * ndoth*(gloss2 - 1.0) + 1.0);
float power = gloss2 / (3.14159 * denom * denom);
return power;
}
@ -142,7 +142,7 @@ vec4 evalPBRShading(vec3 fragNormal, vec3 fragLightDir, vec3 fragEyeDir, float m
float power = specularDistribution(roughness, fragNormal, halfDir);
vec3 specular = power * fresnelColor * diffuse;
return vec4(specular, (1.0 - metallic) * diffuse * (1 - fresnelColor.x));
return vec4(specular, (1.0 - metallic) * diffuse * (1.0 - fresnelColor.x));
}
<@endfunc@>

View file

@ -137,7 +137,7 @@ vec3 integrate(float cosTheta, float skinRadius) {
uniform sampler2D scatteringLUT;
vec3 fetchBRDF(float LdotN, float curvature) {
return texture(scatteringLUT, vec2( clamp(LdotN * 0.5 + 0.5, 0.0, 1.0), clamp(2 * curvature, 0.0, 1.0))).xyz;
return texture(scatteringLUT, vec2( clamp(LdotN * 0.5 + 0.5, 0.0, 1.0), clamp(2.0 * curvature, 0.0, 1.0))).xyz;
}
vec3 fetchBRDFSpectrum(vec3 LdotNSpectrum, float curvature) {
@ -183,7 +183,7 @@ float tuneCurvatureUnsigned(float curvature) {
}
float unpackCurvature(float packedCurvature) {
return (packedCurvature * 2 - 1);
return (packedCurvature * 2.0 - 1.0);
}
vec3 evalScatteringBentNdotL(vec3 normal, vec3 midNormal, vec3 lowNormal, vec3 lightDir) {
@ -210,7 +210,7 @@ vec3 evalSkinBRDF(vec3 lightDir, vec3 normal, vec3 midNormal, vec3 lowNormal, fl
return lowNormal * 0.5 + vec3(0.5);
}
if (showCurvature()) {
return (curvature > 0 ? vec3(curvature, 0.0, 0.0) : vec3(0.0, 0.0, -curvature));
return (curvature > 0.0 ? vec3(curvature, 0.0, 0.0) : vec3(0.0, 0.0, -curvature));
}
vec3 bentNdotL = evalScatteringBentNdotL(normal, midNormal, lowNormal, lightDir);

View file

@ -26,7 +26,7 @@ void main(void) {
);
vec4 pos = UNIT_QUAD[gl_VertexID];
_texCoord0 = (pos.xy + 1) * 0.5;
_texCoord0 = (pos.xy + 1.0) * 0.5;
_texCoord0 *= texcoordFrameTransform.zw;
_texCoord0 += texcoordFrameTransform.xy;

View file

@ -47,7 +47,7 @@ void main(void) {
);
vec4 pos = UNIT_QUAD[gl_VertexID];
_texCoord0 = vec4((pos.xy + 1) * 0.5, 0.0, 1.0);
_texCoord0 = vec4((pos.xy + 1.0) * 0.5, 0.0, 1.0);
if (cam_isStereo()) {
_texCoord0.x = 0.5 * (_texCoord0.x + cam_getStereoSide());

View file

@ -60,7 +60,7 @@ void main(void) {
);
vec4 pos = UNIT_QUAD[gl_VertexID];
_texCoord0 = vec4((pos.xy + 1) * 0.5, 0.0, 1.0);
_texCoord0 = vec4((pos.xy + 1.0) * 0.5, 0.0, 1.0);
if (cam_isStereo()) {
_texCoord0.x = 0.5 * (_texCoord0.x + cam_getStereoSide());
}

View file

@ -2,7 +2,7 @@
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// directional_light.frag
// directional_ambient_light.frag
// fragment shader
//
// Created by Andrzej Kapolka on 9/3/14.

View file

@ -2,7 +2,7 @@
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// directional_light.frag
// directional_skybox_light.frag
// fragment shader
//
// Created by Sam Gateau on 5/8/2015.

View file

@ -63,7 +63,6 @@ void OculusBaseDisplayPlugin::customizeContext() {
void OculusBaseDisplayPlugin::uncustomizeContext() {
Parent::uncustomizeContext();
internalPresent();
}
bool OculusBaseDisplayPlugin::internalActivate() {

View file

@ -445,8 +445,9 @@ void OpenVrDisplayPlugin::internalDeactivate() {
_openVrDisplayActive = false;
_container->setIsOptionChecked(StandingHMDSensorMode, false);
if (_system) {
// Invalidate poses. It's fine if someone else sets these shared values, but we're about to stop updating them, and
// TODO: Invalidate poses. It's fine if someone else sets these shared values, but we're about to stop updating them, and
// we don't want ViveControllerManager to consider old values to be valid.
_container->makeRenderingContextCurrent();
releaseOpenVrSystem();
_system = nullptr;
}
@ -635,7 +636,11 @@ void OpenVrDisplayPlugin::postPreview() {
_nextSimPoseData = nextSim;
});
_nextRenderPoseData = nextRender;
// FIXME - this looks wrong!
_hmdActivityLevel = vr::k_EDeviceActivityLevel_UserInteraction; // _system->GetTrackedDeviceActivityLevel(vr::k_unTrackedDeviceIndex_Hmd);
#else
_hmdActivityLevel = _system->GetTrackedDeviceActivityLevel(vr::k_unTrackedDeviceIndex_Hmd);
#endif
}

View file

@ -132,6 +132,7 @@ void ViveControllerManager::deactivate() {
_container->removeMenu(MENU_PATH);
if (_system) {
_container->makeRenderingContextCurrent();
releaseOpenVrSystem();
_system = nullptr;
}

View file

@ -16,6 +16,7 @@
(function() { // BEGIN LOCAL_SCOPE
var BASIC_TIMER_INTERVAL = 50; // 50ms = 20hz
var OVERLAY_WIDTH = 1920;
var OVERLAY_HEIGHT = 1080;
var OVERLAY_DATA = {
@ -49,6 +50,21 @@ var AWAY_INTRO = {
endFrame: 83.0
};
// MAIN CONTROL
var isEnabled = true;
var wasMuted; // unknonwn?
var isAway = false; // we start in the un-away state
var wasOverlaysVisible = Menu.isOptionChecked("Overlays");
var eventMappingName = "io.highfidelity.away"; // goActive on hand controller button events, too.
var eventMapping = Controller.newMapping(eventMappingName);
var avatarPosition = MyAvatar.position;
var wasHmdMounted = HMD.mounted;
// some intervals we may create/delete
var avatarMovedInterval;
// prefetch the kneel animation and hold a ref so it's always resident in memory when we need it.
var _animation = AnimationCache.prefetch(AWAY_INTRO.url);
@ -125,41 +141,28 @@ function maybeMoveOverlay() {
var halfWayBetweenOldAndLookAt = Vec3.multiply(lookAtChange, EASE_BY_RATIO);
var newOverlayPosition = Vec3.sum(lastOverlayPosition, halfWayBetweenOldAndLookAt);
lastOverlayPosition = newOverlayPosition;
var actualOverlayPositon = moveCloserToCamera(lastOverlayPosition);
Overlays.editOverlay(overlayHMD, { visible: true, position: actualOverlayPositon });
// make sure desktop version is hidden
Overlays.editOverlay(overlay, { visible: false });
// also remember avatar position
avatarPosition = MyAvatar.position;
}
}
}
function ifAvatarMovedGoActive() {
if (Vec3.distance(MyAvatar.position, avatarPosition) > AVATAR_MOVE_FOR_ACTIVE_DISTANCE) {
var newAvatarPosition = MyAvatar.position;
if (Vec3.distance(newAvatarPosition, avatarPosition) > AVATAR_MOVE_FOR_ACTIVE_DISTANCE) {
goActive();
}
avatarPosition = newAvatarPosition;
}
// MAIN CONTROL
var isEnabled = true;
var wasMuted, isAway;
var wasOverlaysVisible = Menu.isOptionChecked("Overlays");
var eventMappingName = "io.highfidelity.away"; // goActive on hand controller button events, too.
var eventMapping = Controller.newMapping(eventMappingName);
var avatarPosition = MyAvatar.position;
// backward compatible version of getting HMD.mounted, so it works in old clients
function safeGetHMDMounted() {
if (HMD.mounted === undefined) {
return true;
}
return HMD.mounted;
}
var wasHmdMounted = safeGetHMDMounted();
function goAway() {
function goAway(fromStartup) {
if (!isEnabled || isAway) {
return;
}
@ -167,7 +170,6 @@ function goAway() {
UserActivityLogger.toggledAway(true);
isAway = true;
print('going "away"');
wasMuted = AudioDevice.getMuted();
if (!wasMuted) {
AudioDevice.toggleMute();
@ -189,10 +191,21 @@ function goAway() {
// For HMD, the hmd preview will show the system mouse because of allowMouseCapture,
// but we want to turn off our Reticle so that we don't get two in preview and a stuck one in headset.
Reticle.visible = !HMD.active;
wasHmdMounted = safeGetHMDMounted(); // always remember the correct state
wasHmdMounted = HMD.mounted; // always remember the correct state
avatarPosition = MyAvatar.position;
Script.update.connect(ifAvatarMovedGoActive);
// If we're entering away mode from some other state than startup, then we create our move timer immediately.
// However if we're just stating up, we need to delay this process so that we don't think the initial teleport
// is actually a move.
if (fromStartup === undefined || fromStartup === false) {
avatarMovedInterval = Script.setInterval(ifAvatarMovedGoActive, BASIC_TIMER_INTERVAL);
} else {
var WAIT_FOR_MOVE_ON_STARTUP = 3000; // 3 seconds
Script.setTimeout(function() {
avatarMovedInterval = Script.setInterval(ifAvatarMovedGoActive, BASIC_TIMER_INTERVAL);
}, WAIT_FOR_MOVE_ON_STARTUP);
}
}
function goActive() {
@ -203,7 +216,6 @@ function goActive() {
UserActivityLogger.toggledAway(false);
isAway = false;
print('going "active"');
if (!wasMuted) {
AudioDevice.toggleMute();
}
@ -230,9 +242,9 @@ function goActive() {
if (HMD.active) {
Reticle.position = HMD.getHUDLookAtPosition2D();
}
wasHmdMounted = safeGetHMDMounted(); // always remember the correct state
wasHmdMounted = HMD.mounted; // always remember the correct state
Script.update.disconnect(ifAvatarMovedGoActive);
Script.clearInterval(avatarMovedInterval);
}
function maybeGoActive(event) {
@ -250,10 +262,12 @@ var wasHmdActive = HMD.active;
var wasMouseCaptured = Reticle.mouseCaptured;
function maybeGoAway() {
// If our active state change (went to or from HMD mode), and we are now in the HMD, go into away
if (HMD.active !== wasHmdActive) {
wasHmdActive = !wasHmdActive;
if (wasHmdActive) {
goAway();
return;
}
}
@ -264,19 +278,30 @@ function maybeGoAway() {
wasMouseCaptured = !wasMouseCaptured;
if (!wasMouseCaptured) {
goAway();
return;
}
}
// If you've removed your HMD from your head, and we can detect it, we will also go away...
var hmdMounted = safeGetHMDMounted();
if (HMD.active && !hmdMounted && wasHmdMounted) {
wasHmdMounted = hmdMounted;
goAway();
if (HMD.mounted != wasHmdMounted) {
wasHmdMounted = HMD.mounted;
print("HMD mounted changed...");
// We're putting the HMD on... switch to those devices
if (HMD.mounted) {
print("NOW mounted...");
} else {
print("HMD NOW un-mounted...");
if (HMD.active) {
goAway();
return;
}
}
}
}
function setEnabled(value) {
print("setting away enabled: ", value);
if (!value) {
goActive();
}
@ -293,9 +318,12 @@ var handleMessage = function(channel, message, sender) {
Messages.subscribe(CHANNEL_AWAY_ENABLE);
Messages.messageReceived.connect(handleMessage);
Script.update.connect(maybeMoveOverlay);
var maybeIntervalTimer = Script.setInterval(function(){
maybeMoveOverlay();
maybeGoAway();
}, BASIC_TIMER_INTERVAL);
Script.update.connect(maybeGoAway);
Controller.mousePressEvent.connect(goActive);
Controller.keyPressEvent.connect(maybeGoActive);
// Note peek() so as to not interfere with other mappings.
@ -316,11 +344,17 @@ eventMapping.from(Controller.Standard.Start).peek().to(goActive);
Controller.enableMapping(eventMappingName);
Script.scriptEnding.connect(function () {
Script.update.disconnect(maybeGoAway);
Script.clearInterval(maybeIntervalTimer);
goActive();
Controller.disableMapping(eventMappingName);
Controller.mousePressEvent.disconnect(goActive);
Controller.keyPressEvent.disconnect(maybeGoActive);
});
if (HMD.active && !HMD.mounted) {
print("Starting script, while HMD is active and not mounted...");
goAway(true);
}
}()); // END LOCAL_SCOPE

View file

@ -26,7 +26,7 @@ var WANT_DEBUG = false;
var WANT_DEBUG_STATE = false;
var WANT_DEBUG_SEARCH_NAME = null;
var FORCE_IGNORE_IK = true;
var FORCE_IGNORE_IK = false;
var SHOW_GRAB_POINT_SPHERE = true;
//
@ -112,7 +112,7 @@ var CHECK_TOO_FAR_UNEQUIP_TIME = 0.3; // seconds, duration between checks
var GRAB_POINT_SPHERE_RADIUS = NEAR_GRAB_RADIUS;
var GRAB_POINT_SPHERE_COLOR = { red: 20, green: 90, blue: 238 };
var GRAB_POINT_SPHERE_COLOR = { red: 240, green: 240, blue: 240 };
var GRAB_POINT_SPHERE_ALPHA = 0.85;
@ -1075,12 +1075,6 @@ function MyController(hand) {
var controllerLocation = getControllerWorldLocation(this.handToController(), true);
var worldHandPosition = controllerLocation.position;
if (controllerLocation.valid) {
this.grabPointSphereOn();
} else {
this.grabPointSphereOff();
}
var candidateEntities = Entities.findEntities(worldHandPosition, MAX_EQUIP_HOTSPOT_RADIUS);
entityPropertiesCache.addEntities(candidateEntities);
var potentialEquipHotspot = this.chooseBestEquipHotspot(candidateEntities);
@ -1103,9 +1097,11 @@ function MyController(hand) {
if (!this.grabPointIntersectsEntity) {
Controller.triggerHapticPulse(1, 20, this.hand);
this.grabPointIntersectsEntity = true;
this.grabPointSphereOn();
}
} else {
this.grabPointIntersectsEntity = false;
this.grabPointSphereOff();
}
};
@ -1386,10 +1382,11 @@ function MyController(hand) {
this.chooseBestEquipHotspot = function(candidateEntities) {
var DISTANCE = 0;
var equippableHotspots = this.chooseNearEquipHotspots(candidateEntities, DISTANCE);
var _this = this;
if (equippableHotspots.length > 0) {
// sort by distance
equippableHotspots.sort(function(a, b) {
var handControllerLocation = getControllerWorldLocation(this.handToController(), true);
var handControllerLocation = getControllerWorldLocation(_this.handToController(), true);
var aDistance = Vec3.distance(a.worldPosition, handControllerLocation.position);
var bDistance = Vec3.distance(b.worldPosition, handControllerLocation.position);
return aDistance - bDistance;
@ -1427,12 +1424,6 @@ function MyController(hand) {
var controllerLocation = getControllerWorldLocation(this.handToController(), true);
var handPosition = controllerLocation.position;
if (controllerLocation.valid) {
this.grabPointSphereOn();
} else {
this.grabPointSphereOff();
}
var rayPickInfo = this.calcRayPickInfo(this.hand);
if (rayPickInfo.entityID) {
@ -1906,7 +1897,7 @@ function MyController(hand) {
if (FORCE_IGNORE_IK) {
this.ignoreIK = true;
} else {
this.ignoreIK = grabbableData.ignoreIK ? grabbableData.ignoreIK : false;
this.ignoreIK = (grabbableData.ignoreIK !== undefined) ? grabbableData.ignoreIK : true;
}
var handRotation;

View file

@ -830,7 +830,7 @@ function loaded() {
elGrabbable.checked = properties.dynamic;
elWantsTrigger.checked = false;
elIgnoreIK.checked = false;
elIgnoreIK.checked = true;
var parsedUserData = {}
try {
parsedUserData = JSON.parse(properties.userData);
@ -1143,7 +1143,7 @@ function loaded() {
userDataChanger("grabbableKey", "wantsTrigger", elWantsTrigger, elUserData, false);
});
elIgnoreIK.addEventListener('change', function() {
userDataChanger("grabbableKey", "ignoreIK", elIgnoreIK, elUserData, false);
userDataChanger("grabbableKey", "ignoreIK", elIgnoreIK, elUserData, true);
});
elCollisionSoundURL.addEventListener('change', createEmitTextPropertyUpdateFunction('collisionSoundURL'));
@ -1596,4 +1596,4 @@ function loaded() {
document.addEventListener("contextmenu", function(event) {
event.preventDefault();
}, false);
}
}

View file

@ -10,9 +10,10 @@
// var GRAB_POINT_SPHERE_OFFSET = { x: 0, y: 0.2, z: 0 };
// var GRAB_POINT_SPHERE_OFFSET = { x: 0.1, y: 0.175, z: 0.04 };
// var GRAB_POINT_SPHERE_OFFSET = { x: 0.1, y: 0.32, z: 0.04 };
// this offset needs to match the one in libraries/display-plugins/src/display-plugins/hmd/HmdDisplayPlugin.cpp
var GRAB_POINT_SPHERE_OFFSET = { x: 0.1, y: 0.32, z: 0.04 };
var GRAB_POINT_SPHERE_OFFSET = { x: 0.0, y: 0.175, z: 0.0 };
getGrabPointSphereOffset = function(handController) {
if (handController === Controller.Standard.RightHand) {

View file

@ -1133,6 +1133,7 @@ var usersWindow = (function () {
if (VISIBILITY_VALUES.indexOf(myVisibility) === -1) {
myVisibility = VISIBILITY_FRIENDS;
}
GlobalServices.findableBy = myVisibility;
visibilityControl = new PopUpMenu({
prompt: VISIBILITY_PROMPT,