mirror of
https://github.com/overte-org/overte.git
synced 2025-04-17 08:56:36 +02:00
Merge branch 'smarter_textures' of github.com:highfidelity/hifi into 2-27
This commit is contained in:
commit
170df4a1f4
86 changed files with 2180 additions and 608 deletions
BUILD.mdBUILD_OSX.mdBUILD_WIN.mdREADME.md
assignment-client/src
interface
resources
src
libraries
animation/src
avatars/src
entities-renderer/src
gpu-gl/src/gpu
gl
gl41
gl45
gpu/src/gpu
ktx/src/ktx
model-networking/src/model-networking
model/src/model
networking/src
render-utils/src
DeferredBuffer.slhGeometryCache.cppMeshPartPayload.cppModel.cppRenderDeferredTask.cppRenderPipelines.cppoverlay3D_model.slfoverlay3D_model_translucent.slfoverlay3D_model_translucent_unlit.slfoverlay3D_model_unlit.slf
render/src/render
script-engine/src
shared/src
scripts
developer/utilities/render
system
tutorials
tests
tools/jsdoc/plugins
13
BUILD.md
13
BUILD.md
|
@ -1,7 +1,7 @@
|
|||
###Dependencies
|
||||
|
||||
* [cmake](http://www.cmake.org/cmake/resources/software.html) ~> 3.3.2
|
||||
* [Qt](http://www.qt.io/download-open-source) ~> 5.6.1
|
||||
* [cmake](https://cmake.org/download/) ~> 3.3.2
|
||||
* [Qt](https://www.qt.io/download-open-source) ~> 5.6.1
|
||||
* [OpenSSL](https://www.openssl.org/community/binaries.html)
|
||||
* IMPORTANT: Use the latest available version of OpenSSL to avoid security vulnerabilities.
|
||||
* [VHACD](https://github.com/virneo/v-hacd)(clone this repository)(Optional)
|
||||
|
@ -9,18 +9,17 @@
|
|||
####CMake External Project Dependencies
|
||||
|
||||
* [boostconfig](https://github.com/boostorg/config) ~> 1.58
|
||||
* [Bullet Physics Engine](https://code.google.com/p/bullet/downloads/list) ~> 2.82
|
||||
* [Faceshift](http://www.faceshift.com/) ~> 4.3
|
||||
* [Bullet Physics Engine](https://github.com/bulletphysics/bullet3/releases) ~> 2.83
|
||||
* [GLEW](http://glew.sourceforge.net/)
|
||||
* [glm](http://glm.g-truc.net/0.9.5/index.html) ~> 0.9.5.4
|
||||
* [glm](https://glm.g-truc.net/0.9.5/index.html) ~> 0.9.5.4
|
||||
* [gverb](https://github.com/highfidelity/gverb)
|
||||
* [Oculus SDK](https://developer.oculus.com/downloads/) ~> 0.6 (Win32) / 0.5 (Mac / Linux)
|
||||
* [oglplus](http://oglplus.org/) ~> 0.63
|
||||
* [OpenVR](https://github.com/ValveSoftware/openvr) ~> 0.91 (Win32 only)
|
||||
* [Polyvox](http://www.volumesoffun.com/) ~> 0.2.1
|
||||
* [QuaZip](http://sourceforge.net/projects/quazip/files/quazip/) ~> 0.7.1
|
||||
* [QuaZip](https://sourceforge.net/projects/quazip/files/quazip/) ~> 0.7.1
|
||||
* [SDL2](https://www.libsdl.org/download-2.0.php) ~> 2.0.3
|
||||
* [soxr](http://soxr.sourceforge.net) ~> 0.1.1
|
||||
* [soxr](https://sourceforge.net/p/soxr/wiki/Home/) ~> 0.1.1
|
||||
* [Intel Threading Building Blocks](https://www.threadingbuildingblocks.org/) ~> 4.3
|
||||
* [Sixense](http://sixense.com/) ~> 071615
|
||||
* [zlib](http://www.zlib.net/) ~> 1.28 (Win32 only)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
Please read the [general build guide](BUILD.md) for information on dependencies required for all platforms. Only OS X specific instructions are found in this file.
|
||||
|
||||
###Homebrew
|
||||
[Homebrew](http://brew.sh/) is an excellent package manager for OS X. It makes install of some High Fidelity dependencies very simple.
|
||||
[Homebrew](https://brew.sh/) is an excellent package manager for OS X. It makes install of some High Fidelity dependencies very simple.
|
||||
|
||||
brew tap homebrew/versions
|
||||
brew install cmake openssl
|
||||
|
@ -18,11 +18,11 @@ Note that this uses the version from the homebrew formula at the time of this wr
|
|||
###Qt
|
||||
You can use the online installer or the offline installer.
|
||||
|
||||
* [Download the online installer](http://www.qt.io/download-open-source/#section-2)
|
||||
* [Download the online installer](https://www.qt.io/download-open-source/#section-2)
|
||||
* When it asks you to select components, select the following:
|
||||
* Qt > Qt 5.6
|
||||
|
||||
* [Download the offline installer](http://download.qt.io/official_releases/qt/5.6/5.6.1-1/qt-opensource-mac-x64-clang-5.6.1-1.dmg)
|
||||
* [Download the offline installer](https://download.qt.io/official_releases/qt/5.6/5.6.1-1/qt-opensource-mac-x64-clang-5.6.1-1.dmg)
|
||||
|
||||
Once Qt is installed, you need to manually configure the following:
|
||||
* Set the QT_CMAKE_PREFIX_PATH environment variable to your `Qt5.6.1/5.6/clang_64/lib/cmake/` directory.
|
||||
|
|
|
@ -33,8 +33,8 @@ You can use the online installer or the offline installer. If you use the offlin
|
|||
* Qt > Qt 5.6.1 > **msvc2013 64-bit**
|
||||
|
||||
* Download the offline installer, 32- or 64-bit to match your build preference:
|
||||
* [32-bit](http://download.qt.io/official_releases/qt/5.6/5.6.1-1/qt-opensource-windows-x86-msvc2013-5.6.1-1.exe)
|
||||
* [64-bit](http://download.qt.io/official_releases/qt/5.6/5.6.1-1/qt-opensource-windows-x86-msvc2013_64-5.6.1-1.exe)
|
||||
* [32-bit](https://download.qt.io/official_releases/qt/5.6/5.6.1-1/qt-opensource-windows-x86-msvc2013-5.6.1-1.exe)
|
||||
* [64-bit](https://download.qt.io/official_releases/qt/5.6/5.6.1-1/qt-opensource-windows-x86-msvc2013_64-5.6.1-1.exe)
|
||||
|
||||
Once Qt is installed, you need to manually configure the following:
|
||||
* Set the QT_CMAKE_PREFIX_PATH environment variable to your `Qt\5.6.1\msvc2013\lib\cmake` or `Qt\5.6.1\msvc2013_64\lib\cmake` directory.
|
||||
|
@ -72,7 +72,7 @@ Your system may already have several versions of the OpenSSL DLL's (ssleay32.dll
|
|||
QSslSocket: cannot resolve SSL_CTX_set_next_proto_select_cb
|
||||
QSslSocket: cannot resolve SSL_get0_next_proto_negotiated
|
||||
|
||||
To prevent these problems, install OpenSSL yourself. Download one of the following binary packages [from this website](http://slproweb.com/products/Win32OpenSSL.html):
|
||||
To prevent these problems, install OpenSSL yourself. Download one of the following binary packages [from this website](https://slproweb.com/products/Win32OpenSSL.html):
|
||||
* Win32 OpenSSL v1.0.1q
|
||||
* Win64 OpenSSL v1.0.1q
|
||||
|
||||
|
|
|
@ -11,11 +11,11 @@ We're hiring! We're looking for skilled developers;
|
|||
send your resume to hiring@highfidelity.com
|
||||
|
||||
##### Chat with us
|
||||
Come chat with us in [our Gitter](http://gitter.im/highfidelity/hifi) if you have any questions or just want to say hi!
|
||||
Come chat with us in [our Gitter](https://gitter.im/highfidelity/hifi) if you have any questions or just want to say hi!
|
||||
|
||||
Documentation
|
||||
=========
|
||||
Documentation is available at [docs.highfidelity.com](http://docs.highfidelity.com), if something is missing, please suggest it via a new job on Worklist (add to the hifi-docs project).
|
||||
Documentation is available at [docs.highfidelity.com](https://docs.highfidelity.com), if something is missing, please suggest it via a new job on Worklist (add to the hifi-docs project).
|
||||
|
||||
Build Instructions
|
||||
=========
|
||||
|
|
|
@ -43,7 +43,6 @@
|
|||
#include <WebSocketServerClass.h>
|
||||
#include <EntityScriptingInterface.h> // TODO: consider moving to scriptengine.h
|
||||
|
||||
#include "avatars/ScriptableAvatar.h"
|
||||
#include "entities/AssignmentParentFinder.h"
|
||||
#include "RecordingScriptingInterface.h"
|
||||
#include "AbstractAudioInterface.h"
|
||||
|
@ -88,9 +87,9 @@ void Agent::playAvatarSound(SharedSoundPointer sound) {
|
|||
QMetaObject::invokeMethod(this, "playAvatarSound", Q_ARG(SharedSoundPointer, sound));
|
||||
return;
|
||||
} else {
|
||||
// TODO: seems to add occasional artifact in tests. I believe it is
|
||||
// TODO: seems to add occasional artifact in tests. I believe it is
|
||||
// correct to do this, but need to figure out for sure, so commenting this
|
||||
// out until I verify.
|
||||
// out until I verify.
|
||||
// _numAvatarSoundSentBytes = 0;
|
||||
setAvatarSound(sound);
|
||||
}
|
||||
|
@ -105,7 +104,7 @@ void Agent::handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNo
|
|||
if (message->getSize() > statsMessageLength) {
|
||||
// pull out the piggybacked packet and create a new QSharedPointer<NLPacket> for it
|
||||
int piggyBackedSizeWithHeader = message->getSize() - statsMessageLength;
|
||||
|
||||
|
||||
auto buffer = std::unique_ptr<char[]>(new char[piggyBackedSizeWithHeader]);
|
||||
memcpy(buffer.get(), message->getRawMessage() + statsMessageLength, piggyBackedSizeWithHeader);
|
||||
|
||||
|
@ -284,7 +283,7 @@ void Agent::selectAudioFormat(const QString& selectedCodecName) {
|
|||
for (auto& plugin : codecPlugins) {
|
||||
if (_selectedCodecName == plugin->getName()) {
|
||||
_codec = plugin;
|
||||
_receivedAudioStream.setupCodec(plugin, _selectedCodecName, AudioConstants::STEREO);
|
||||
_receivedAudioStream.setupCodec(plugin, _selectedCodecName, AudioConstants::STEREO);
|
||||
_encoder = plugin->createEncoder(AudioConstants::SAMPLE_RATE, AudioConstants::MONO);
|
||||
qDebug() << "Selected Codec Plugin:" << _codec.get();
|
||||
break;
|
||||
|
@ -380,6 +379,8 @@ void Agent::executeScript() {
|
|||
audioTransform.setTranslation(scriptedAvatar->getPosition());
|
||||
audioTransform.setRotation(headOrientation);
|
||||
|
||||
computeLoudness(&audio, scriptedAvatar);
|
||||
|
||||
QByteArray encodedBuffer;
|
||||
if (_encoder) {
|
||||
_encoder->encode(audio, encodedBuffer);
|
||||
|
@ -424,16 +425,16 @@ void Agent::executeScript() {
|
|||
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
|
||||
|
||||
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
|
||||
|
||||
|
||||
// 100Hz timer for audio
|
||||
AvatarAudioTimer* audioTimerWorker = new AvatarAudioTimer();
|
||||
audioTimerWorker->moveToThread(&_avatarAudioTimerThread);
|
||||
connect(audioTimerWorker, &AvatarAudioTimer::avatarTick, this, &Agent::processAgentAvatarAudio);
|
||||
connect(this, &Agent::startAvatarAudioTimer, audioTimerWorker, &AvatarAudioTimer::start);
|
||||
connect(this, &Agent::stopAvatarAudioTimer, audioTimerWorker, &AvatarAudioTimer::stop);
|
||||
connect(&_avatarAudioTimerThread, &QThread::finished, audioTimerWorker, &QObject::deleteLater);
|
||||
connect(&_avatarAudioTimerThread, &QThread::finished, audioTimerWorker, &QObject::deleteLater);
|
||||
_avatarAudioTimerThread.start();
|
||||
|
||||
|
||||
// Agents should run at 45hz
|
||||
static const int AVATAR_DATA_HZ = 45;
|
||||
static const int AVATAR_DATA_IN_MSECS = MSECS_PER_SECOND / AVATAR_DATA_HZ;
|
||||
|
@ -456,14 +457,14 @@ QUuid Agent::getSessionUUID() const {
|
|||
return DependencyManager::get<NodeList>()->getSessionUUID();
|
||||
}
|
||||
|
||||
void Agent::setIsListeningToAudioStream(bool isListeningToAudioStream) {
|
||||
void Agent::setIsListeningToAudioStream(bool isListeningToAudioStream) {
|
||||
// this must happen on Agent's main thread
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "setIsListeningToAudioStream", Q_ARG(bool, isListeningToAudioStream));
|
||||
return;
|
||||
}
|
||||
if (_isListeningToAudioStream) {
|
||||
// have to tell just the audio mixer to KillAvatar.
|
||||
// have to tell just the audio mixer to KillAvatar.
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->eachMatchingNode(
|
||||
|
@ -479,7 +480,7 @@ void Agent::setIsListeningToAudioStream(bool isListeningToAudioStream) {
|
|||
});
|
||||
|
||||
}
|
||||
_isListeningToAudioStream = isListeningToAudioStream;
|
||||
_isListeningToAudioStream = isListeningToAudioStream;
|
||||
}
|
||||
|
||||
void Agent::setIsAvatar(bool isAvatar) {
|
||||
|
@ -560,6 +561,7 @@ void Agent::processAgentAvatar() {
|
|||
nodeList->broadcastToNodes(std::move(avatarPacket), NodeSet() << NodeType::AvatarMixer);
|
||||
}
|
||||
}
|
||||
|
||||
void Agent::encodeFrameOfZeros(QByteArray& encodedZeros) {
|
||||
_flushEncoder = false;
|
||||
static const QByteArray zeros(AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL, 0);
|
||||
|
@ -570,6 +572,22 @@ void Agent::encodeFrameOfZeros(QByteArray& encodedZeros) {
|
|||
}
|
||||
}
|
||||
|
||||
void Agent::computeLoudness(const QByteArray* decodedBuffer, QSharedPointer<ScriptableAvatar> scriptableAvatar) {
|
||||
float loudness = 0.0f;
|
||||
if (decodedBuffer) {
|
||||
auto soundData = reinterpret_cast<const int16_t*>(decodedBuffer->constData());
|
||||
int numFrames = decodedBuffer->size() / sizeof(int16_t);
|
||||
// now iterate and come up with average
|
||||
if (numFrames > 0) {
|
||||
for(int i = 0; i < numFrames; i++) {
|
||||
loudness += (float) std::abs(soundData[i]);
|
||||
}
|
||||
loudness /= numFrames;
|
||||
}
|
||||
}
|
||||
scriptableAvatar->setAudioLoudness(loudness);
|
||||
}
|
||||
|
||||
void Agent::processAgentAvatarAudio() {
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
bool isPlayingRecording = recordingInterface->isPlaying();
|
||||
|
@ -619,6 +637,7 @@ void Agent::processAgentAvatarAudio() {
|
|||
audioPacket->seek(sizeof(quint16));
|
||||
|
||||
if (silentFrame) {
|
||||
|
||||
if (!_isListeningToAudioStream) {
|
||||
// if we have a silent frame and we're not listening then just send nothing and break out of here
|
||||
return;
|
||||
|
@ -626,7 +645,7 @@ void Agent::processAgentAvatarAudio() {
|
|||
|
||||
// write the codec
|
||||
audioPacket->writeString(_selectedCodecName);
|
||||
|
||||
|
||||
// write the number of silent samples so the audio-mixer can uphold timing
|
||||
audioPacket->writePrimitive(numAvailableSamples);
|
||||
|
||||
|
@ -636,8 +655,11 @@ void Agent::processAgentAvatarAudio() {
|
|||
audioPacket->writePrimitive(headOrientation);
|
||||
audioPacket->writePrimitive(scriptedAvatar->getPosition());
|
||||
audioPacket->writePrimitive(glm::vec3(0));
|
||||
|
||||
// no matter what, the loudness should be set to 0
|
||||
computeLoudness(nullptr, scriptedAvatar);
|
||||
} else if (nextSoundOutput) {
|
||||
|
||||
|
||||
// write the codec
|
||||
audioPacket->writeString(_selectedCodecName);
|
||||
|
||||
|
@ -654,6 +676,8 @@ void Agent::processAgentAvatarAudio() {
|
|||
QByteArray encodedBuffer;
|
||||
if (_flushEncoder) {
|
||||
encodeFrameOfZeros(encodedBuffer);
|
||||
// loudness is 0
|
||||
computeLoudness(nullptr, scriptedAvatar);
|
||||
} else {
|
||||
QByteArray decodedBuffer(reinterpret_cast<const char*>(nextSoundOutput), numAvailableSamples*sizeof(int16_t));
|
||||
if (_encoder) {
|
||||
|
@ -662,10 +686,15 @@ void Agent::processAgentAvatarAudio() {
|
|||
} else {
|
||||
encodedBuffer = decodedBuffer;
|
||||
}
|
||||
computeLoudness(&decodedBuffer, scriptedAvatar);
|
||||
}
|
||||
audioPacket->write(encodedBuffer.constData(), encodedBuffer.size());
|
||||
}
|
||||
|
||||
// we should never have both nextSoundOutput being null and silentFrame being false, but lets
|
||||
// assert on it in case things above change in a bad way
|
||||
assert(nextSoundOutput || silentFrame);
|
||||
|
||||
// write audio packet to AudioMixer nodes
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->eachNode([this, &nodeList, &audioPacket](const SharedNodePointer& node) {
|
||||
|
|
|
@ -30,6 +30,7 @@
|
|||
#include <plugins/CodecPlugin.h>
|
||||
|
||||
#include "MixedAudioStream.h"
|
||||
#include "avatars/ScriptableAvatar.h"
|
||||
|
||||
class Agent : public ThreadedAssignment {
|
||||
Q_OBJECT
|
||||
|
@ -68,10 +69,10 @@ private slots:
|
|||
void handleAudioPacket(QSharedPointer<ReceivedMessage> message);
|
||||
void handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
||||
void handleJurisdictionPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
||||
void handleSelectedAudioFormat(QSharedPointer<ReceivedMessage> message);
|
||||
void handleSelectedAudioFormat(QSharedPointer<ReceivedMessage> message);
|
||||
|
||||
void nodeActivated(SharedNodePointer activatedNode);
|
||||
|
||||
|
||||
void processAgentAvatar();
|
||||
void processAgentAvatarAudio();
|
||||
|
||||
|
@ -82,6 +83,7 @@ private:
|
|||
void negotiateAudioFormat();
|
||||
void selectAudioFormat(const QString& selectedCodecName);
|
||||
void encodeFrameOfZeros(QByteArray& encodedZeros);
|
||||
void computeLoudness(const QByteArray* decodedBuffer, QSharedPointer<ScriptableAvatar>);
|
||||
|
||||
std::unique_ptr<ScriptEngine> _scriptEngine;
|
||||
EntityEditPacketSender _entityEditSender;
|
||||
|
@ -103,10 +105,10 @@ private:
|
|||
bool _isAvatar = false;
|
||||
QTimer* _avatarIdentityTimer = nullptr;
|
||||
QHash<QUuid, quint16> _outgoingScriptAudioSequenceNumbers;
|
||||
|
||||
|
||||
CodecPluginPointer _codec;
|
||||
QString _selectedCodecName;
|
||||
Encoder* _encoder { nullptr };
|
||||
Encoder* _encoder { nullptr };
|
||||
QThread _avatarAudioTimerThread;
|
||||
bool _flushEncoder { false };
|
||||
};
|
||||
|
|
|
@ -365,6 +365,28 @@ void AvatarMixer::handleRequestsDomainListDataPacket(QSharedPointer<ReceivedMess
|
|||
message->readPrimitive(&isRequesting);
|
||||
nodeData->setRequestsDomainListData(isRequesting);
|
||||
qCDebug(avatars) << "node" << nodeData->getNodeID() << "requestsDomainListData" << isRequesting;
|
||||
|
||||
// If we just opened the PAL...
|
||||
if (isRequesting) {
|
||||
// For each node in the NodeList...
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->eachMatchingNode(
|
||||
// Discover the valid nodes we're ignoring...
|
||||
[&](const SharedNodePointer& node)->bool {
|
||||
if (node->getUUID() != senderNode->getUUID() &&
|
||||
(nodeData->isRadiusIgnoring(node->getUUID()) ||
|
||||
senderNode->isIgnoringNodeWithID(node->getUUID()))) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
// ...For those nodes, reset the lastBroadcastTime to 0
|
||||
// so that the AvatarMixer will send Identity data to us
|
||||
[&](const SharedNodePointer& node) {
|
||||
nodeData->setLastBroadcastTime(node->getUUID(), 0);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
auto end = usecTimestampNow();
|
||||
|
@ -409,7 +431,31 @@ void AvatarMixer::handleKillAvatarPacket(QSharedPointer<ReceivedMessage> message
|
|||
|
||||
void AvatarMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
|
||||
auto start = usecTimestampNow();
|
||||
senderNode->parseIgnoreRequestMessage(message);
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
AvatarMixerClientData* nodeData = reinterpret_cast<AvatarMixerClientData*>(senderNode->getLinkedData());
|
||||
bool addToIgnore;
|
||||
message->readPrimitive(&addToIgnore);
|
||||
while (message->getBytesLeftToRead()) {
|
||||
// parse out the UUID being ignored from the packet
|
||||
QUuid ignoredUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
// Reset the lastBroadcastTime for the ignored avatar to 0
|
||||
// so the AvatarMixer knows it'll have to send identity data about the ignored avatar
|
||||
// to the ignorer if the ignorer unignores.
|
||||
nodeData->setLastBroadcastTime(ignoredUUID, 0);
|
||||
|
||||
// Reset the lastBroadcastTime for the ignorer (FROM THE PERSPECTIVE OF THE IGNORED) to 0
|
||||
// so the AvatarMixer knows it'll have to send identity data about the ignorer
|
||||
// to the ignored if the ignorer unignores.
|
||||
auto ignoredNode = nodeList->nodeWithUUID(ignoredUUID);
|
||||
AvatarMixerClientData* ignoredNodeData = reinterpret_cast<AvatarMixerClientData*>(ignoredNode->getLinkedData());
|
||||
ignoredNodeData->setLastBroadcastTime(senderNode->getUUID(), 0);
|
||||
|
||||
if (addToIgnore) {
|
||||
senderNode->addIgnoredNode(ignoredUUID);
|
||||
} else {
|
||||
senderNode->removeIgnoredNode(ignoredUUID);
|
||||
}
|
||||
}
|
||||
auto end = usecTimestampNow();
|
||||
_handleNodeIgnoreRequestPacketElapsedTime += (end - start);
|
||||
}
|
||||
|
|
|
@ -65,15 +65,6 @@ int AvatarMixerClientData::parseData(ReceivedMessage& message) {
|
|||
// compute the offset to the data payload
|
||||
return _avatar->parseDataFromBuffer(message.readWithoutCopy(message.getBytesLeftToRead()));
|
||||
}
|
||||
|
||||
bool AvatarMixerClientData::checkAndSetHasReceivedFirstPacketsFrom(const QUuid& uuid) {
|
||||
if (_hasReceivedFirstPacketsFrom.find(uuid) == _hasReceivedFirstPacketsFrom.end()) {
|
||||
_hasReceivedFirstPacketsFrom.insert(uuid);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
uint64_t AvatarMixerClientData::getLastBroadcastTime(const QUuid& nodeUUID) const {
|
||||
// return the matching PacketSequenceNumber, or the default if we don't have it
|
||||
auto nodeMatch = _lastBroadcastTimes.find(nodeUUID);
|
||||
|
@ -102,8 +93,8 @@ void AvatarMixerClientData::ignoreOther(SharedNodePointer self, SharedNodePointe
|
|||
} else {
|
||||
killPacket->writePrimitive(KillAvatarReason::YourAvatarEnteredTheirBubble);
|
||||
}
|
||||
setLastBroadcastTime(other->getUUID(), 0);
|
||||
DependencyManager::get<NodeList>()->sendUnreliablePacket(*killPacket, *self);
|
||||
_hasReceivedFirstPacketsFrom.erase(other->getUUID());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -45,8 +45,6 @@ public:
|
|||
const AvatarData* getConstAvatarData() const { return _avatar.get(); }
|
||||
AvatarSharedPointer getAvatarSharedPointer() const { return _avatar; }
|
||||
|
||||
bool checkAndSetHasReceivedFirstPacketsFrom(const QUuid& uuid);
|
||||
|
||||
uint16_t getLastBroadcastSequenceNumber(const QUuid& nodeUUID) const;
|
||||
void setLastBroadcastSequenceNumber(const QUuid& nodeUUID, uint16_t sequenceNumber)
|
||||
{ _lastBroadcastSequenceNumbers[nodeUUID] = sequenceNumber; }
|
||||
|
@ -63,8 +61,8 @@ public:
|
|||
|
||||
uint16_t getLastReceivedSequenceNumber() const { return _lastReceivedSequenceNumber; }
|
||||
|
||||
HRCTime getIdentityChangeTimestamp() const { return _identityChangeTimestamp; }
|
||||
void flagIdentityChange() { _identityChangeTimestamp = p_high_resolution_clock::now(); }
|
||||
uint64_t getIdentityChangeTimestamp() const { return _identityChangeTimestamp; }
|
||||
void flagIdentityChange() { _identityChangeTimestamp = usecTimestampNow(); }
|
||||
bool getAvatarSessionDisplayNameMustChange() const { return _avatarSessionDisplayNameMustChange; }
|
||||
void setAvatarSessionDisplayNameMustChange(bool set = true) { _avatarSessionDisplayNameMustChange = set; }
|
||||
|
||||
|
@ -139,7 +137,6 @@ private:
|
|||
|
||||
uint16_t _lastReceivedSequenceNumber { 0 };
|
||||
std::unordered_map<QUuid, uint16_t> _lastBroadcastSequenceNumbers;
|
||||
std::unordered_set<QUuid> _hasReceivedFirstPacketsFrom;
|
||||
std::unordered_map<QUuid, uint64_t> _lastBroadcastTimes;
|
||||
|
||||
// this is a map of the last time we encoded an "other" avatar for
|
||||
|
@ -147,7 +144,7 @@ private:
|
|||
std::unordered_map<QUuid, quint64> _lastOtherAvatarEncodeTime;
|
||||
std::unordered_map<QUuid, QVector<JointData>> _lastOtherAvatarSentJoints;
|
||||
|
||||
HRCTime _identityChangeTimestamp;
|
||||
uint64_t _identityChangeTimestamp;
|
||||
bool _avatarSessionDisplayNameMustChange{ false };
|
||||
|
||||
int _numAvatarsSentLastFrame = 0;
|
||||
|
|
|
@ -80,16 +80,6 @@ int AvatarMixerSlave::sendIdentityPacket(const AvatarMixerClientData* nodeData,
|
|||
|
||||
static const int AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND = 45;
|
||||
|
||||
// FIXME - There is some old logic (unchanged as of 2/17/17) that randomly decides to send an identity
|
||||
// packet. That logic had the following comment about the constants it uses...
|
||||
//
|
||||
// An 80% chance of sending a identity packet within a 5 second interval.
|
||||
// assuming 60 htz update rate.
|
||||
//
|
||||
// Assuming the calculation of the constant is in fact correct for 80% and 60hz and 5 seconds (an assumption
|
||||
// that I have not verified) then the constant is definitely wrong now, since we send at 45hz.
|
||||
const float IDENTITY_SEND_PROBABILITY = 1.0f / 187.0f;
|
||||
|
||||
void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
|
||||
quint64 start = usecTimestampNow();
|
||||
|
||||
|
@ -137,14 +127,18 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
|
|||
// keep track of the number of other avatar frames skipped
|
||||
int numAvatarsWithSkippedFrames = 0;
|
||||
|
||||
// When this is true, the AvatarMixer will send Avatar data to a client about avatars that are not in the view frustrum
|
||||
bool getsOutOfView = nodeData->getRequestsDomainListData();
|
||||
|
||||
// When this is true, the AvatarMixer will send Avatar data to a client about avatars that they've ignored
|
||||
bool getsIgnoredByMe = getsOutOfView;
|
||||
// When this is true, the AvatarMixer will send Avatar data to a client
|
||||
// about avatars they've ignored or that are out of view
|
||||
bool PALIsOpen = nodeData->getRequestsDomainListData();
|
||||
|
||||
// When this is true, the AvatarMixer will send Avatar data to a client about avatars that have ignored them
|
||||
bool getsAnyIgnored = getsIgnoredByMe && node->getCanKick();
|
||||
bool getsAnyIgnored = PALIsOpen && node->getCanKick();
|
||||
|
||||
if (PALIsOpen) {
|
||||
// Increase minimumBytesPerAvatar if the PAL is open
|
||||
minimumBytesPerAvatar += sizeof(AvatarDataPacket::AvatarGlobalPosition) +
|
||||
sizeof(AvatarDataPacket::AudioLoudness);
|
||||
}
|
||||
|
||||
// setup a PacketList for the avatarPackets
|
||||
auto avatarPacketList = NLPacketList::create(PacketType::BulkAvatarData);
|
||||
|
@ -222,13 +216,14 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
|
|||
// or that has ignored the viewing node
|
||||
if (!avatarNode->getLinkedData()
|
||||
|| avatarNode->getUUID() == node->getUUID()
|
||||
|| (node->isIgnoringNodeWithID(avatarNode->getUUID()) && !getsIgnoredByMe)
|
||||
|| (node->isIgnoringNodeWithID(avatarNode->getUUID()) && !PALIsOpen)
|
||||
|| (avatarNode->isIgnoringNodeWithID(node->getUUID()) && !getsAnyIgnored)) {
|
||||
shouldIgnore = true;
|
||||
} else {
|
||||
|
||||
// Check to see if the space bubble is enabled
|
||||
if (node->isIgnoreRadiusEnabled() || avatarNode->isIgnoreRadiusEnabled()) {
|
||||
// Don't bother with these checks if the other avatar has their bubble enabled and we're gettingAnyIgnored
|
||||
if (node->isIgnoreRadiusEnabled() || (avatarNode->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
|
||||
|
||||
// Define the scale of the box for the current other node
|
||||
glm::vec3 otherNodeBoxScale = (avatarNodeData->getPosition() - avatarNodeData->getGlobalBoundingBoxCorner()) * 2.0f;
|
||||
|
@ -306,16 +301,9 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
|
|||
|
||||
const AvatarMixerClientData* otherNodeData = reinterpret_cast<const AvatarMixerClientData*>(otherNode->getLinkedData());
|
||||
|
||||
// make sure we send out identity packets to and from new arrivals.
|
||||
bool forceSend = !nodeData->checkAndSetHasReceivedFirstPacketsFrom(otherNode->getUUID());
|
||||
|
||||
// FIXME - this clause seems suspicious "... || otherNodeData->getIdentityChangeTimestamp() > _lastFrameTimestamp ..."
|
||||
if (!overBudget
|
||||
&& otherNodeData->getIdentityChangeTimestamp().time_since_epoch().count() > 0
|
||||
&& (forceSend
|
||||
|| otherNodeData->getIdentityChangeTimestamp() > _lastFrameTimestamp
|
||||
|| distribution(generator) < IDENTITY_SEND_PROBABILITY)) {
|
||||
|
||||
// If the time that the mixer sent AVATAR DATA about Avatar B to Avatar A is BEFORE OR EQUAL TO
|
||||
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Avatar A.
|
||||
if (nodeData->getLastBroadcastTime(otherNode->getUUID()) <= otherNodeData->getIdentityChangeTimestamp()) {
|
||||
identityBytesSent += sendIdentityPacket(otherNodeData, node);
|
||||
}
|
||||
|
||||
|
@ -335,9 +323,9 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
|
|||
if (overBudget) {
|
||||
overBudgetAvatars++;
|
||||
_stats.overBudgetAvatars++;
|
||||
detail = AvatarData::NoData;
|
||||
} else if (!isInView && !getsOutOfView) {
|
||||
detail = AvatarData::NoData;
|
||||
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::NoData;
|
||||
} else if (!isInView) {
|
||||
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::NoData;
|
||||
nodeData->incrementAvatarOutOfView();
|
||||
} else {
|
||||
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO
|
||||
|
|
BIN
interface/resources/fonts/hifi-glyphs.ttf
Normal file → Executable file
BIN
interface/resources/fonts/hifi-glyphs.ttf
Normal file → Executable file
Binary file not shown.
|
@ -31,6 +31,7 @@ Item {
|
|||
property real displayNameTextPixelSize: 18
|
||||
property int usernameTextHeight: 12
|
||||
property real audioLevel: 0.0
|
||||
property real avgAudioLevel: 0.0
|
||||
property bool isMyCard: false
|
||||
property bool selected: false
|
||||
property bool isAdmin: false
|
||||
|
@ -55,7 +56,7 @@ Item {
|
|||
id: textContainer
|
||||
// Size
|
||||
width: parent.width - /*avatarImage.width - parent.spacing - */parent.anchors.leftMargin - parent.anchors.rightMargin
|
||||
height: childrenRect.height
|
||||
height: selected || isMyCard ? childrenRect.height : childrenRect.height - 15
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
|
||||
// DisplayName field for my card
|
||||
|
@ -273,6 +274,7 @@ Item {
|
|||
// Style
|
||||
radius: 4
|
||||
color: "#c5c5c5"
|
||||
visible: isMyCard || selected
|
||||
// Rectangle for the zero-gain point on the VU meter
|
||||
Rectangle {
|
||||
id: vuMeterZeroGain
|
||||
|
@ -303,6 +305,7 @@ Item {
|
|||
id: vuMeterBase
|
||||
// Anchors
|
||||
anchors.fill: parent
|
||||
visible: isMyCard || selected
|
||||
// Style
|
||||
color: parent.color
|
||||
radius: parent.radius
|
||||
|
@ -310,6 +313,7 @@ Item {
|
|||
// Rectangle for the VU meter audio level
|
||||
Rectangle {
|
||||
id: vuMeterLevel
|
||||
visible: isMyCard || selected
|
||||
// Size
|
||||
width: (thisNameCard.audioLevel) * parent.width
|
||||
// Style
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import QtGraphicalEffects 1.0
|
||||
import Qt.labs.settings 1.0
|
||||
import "../styles-uit"
|
||||
import "../controls-uit" as HifiControls
|
||||
|
@ -33,7 +34,7 @@ Rectangle {
|
|||
property int actionButtonAllowance: actionButtonWidth * 2
|
||||
property int minNameCardWidth: palContainer.width - (actionButtonAllowance * 2) - 4 - hifi.dimensions.scrollbarBackgroundWidth
|
||||
property int nameCardWidth: minNameCardWidth + (iAmAdmin ? 0 : actionButtonAllowance)
|
||||
property var myData: ({displayName: "", userName: "", audioLevel: 0.0, admin: true}) // valid dummy until set
|
||||
property var myData: ({displayName: "", userName: "", audioLevel: 0.0, avgAudioLevel: 0.0, admin: true}) // valid dummy until set
|
||||
property var ignored: ({}); // Keep a local list of ignored avatars & their data. Necessary because HashMap is slow to respond after ignoring.
|
||||
property var userModelData: [] // This simple list is essentially a mirror of the userModel listModel without all the extra complexities.
|
||||
property bool iAmAdmin: false
|
||||
|
@ -57,6 +58,8 @@ Rectangle {
|
|||
category: "pal"
|
||||
property bool filtered: false
|
||||
property int nearDistance: 30
|
||||
property int sortIndicatorColumn: 1
|
||||
property int sortIndicatorOrder: Qt.AscendingOrder
|
||||
}
|
||||
function refreshWithFilter() {
|
||||
// We should just be able to set settings.filtered to filter.checked, but see #3249, so send to .js for saving.
|
||||
|
@ -96,6 +99,7 @@ Rectangle {
|
|||
displayName: myData.displayName
|
||||
userName: myData.userName
|
||||
audioLevel: myData.audioLevel
|
||||
avgAudioLevel: myData.avgAudioLevel
|
||||
isMyCard: true
|
||||
// Size
|
||||
width: minNameCardWidth
|
||||
|
@ -190,8 +194,24 @@ Rectangle {
|
|||
centerHeaderText: true
|
||||
sortIndicatorVisible: true
|
||||
headerVisible: true
|
||||
onSortIndicatorColumnChanged: sortModel()
|
||||
onSortIndicatorOrderChanged: sortModel()
|
||||
sortIndicatorColumn: settings.sortIndicatorColumn
|
||||
sortIndicatorOrder: settings.sortIndicatorOrder
|
||||
onSortIndicatorColumnChanged: {
|
||||
settings.sortIndicatorColumn = sortIndicatorColumn
|
||||
sortModel()
|
||||
}
|
||||
onSortIndicatorOrderChanged: {
|
||||
settings.sortIndicatorOrder = sortIndicatorOrder
|
||||
sortModel()
|
||||
}
|
||||
|
||||
TableViewColumn {
|
||||
role: "avgAudioLevel"
|
||||
title: "LOUD"
|
||||
width: actionButtonWidth
|
||||
movable: false
|
||||
resizable: false
|
||||
}
|
||||
|
||||
TableViewColumn {
|
||||
id: displayNameHeader
|
||||
|
@ -201,13 +221,6 @@ Rectangle {
|
|||
movable: false
|
||||
resizable: false
|
||||
}
|
||||
TableViewColumn {
|
||||
role: "personalMute"
|
||||
title: "MUTE"
|
||||
width: actionButtonWidth
|
||||
movable: false
|
||||
resizable: false
|
||||
}
|
||||
TableViewColumn {
|
||||
role: "ignore"
|
||||
title: "IGNORE"
|
||||
|
@ -238,7 +251,7 @@ Rectangle {
|
|||
// This Rectangle refers to each Row in the table.
|
||||
rowDelegate: Rectangle { // The only way I know to specify a row height.
|
||||
// Size
|
||||
height: rowHeight
|
||||
height: styleData.selected ? rowHeight : rowHeight - 15
|
||||
color: styleData.selected
|
||||
? hifi.colors.orangeHighlight
|
||||
: styleData.alternate ? hifi.colors.tableRowLightEven : hifi.colors.tableRowLightOdd
|
||||
|
@ -249,6 +262,8 @@ Rectangle {
|
|||
id: itemCell
|
||||
property bool isCheckBox: styleData.role === "personalMute" || styleData.role === "ignore"
|
||||
property bool isButton: styleData.role === "mute" || styleData.role === "kick"
|
||||
property bool isAvgAudio: styleData.role === "avgAudioLevel"
|
||||
|
||||
// This NameCard refers to the cell that contains an avatar's
|
||||
// DisplayName and UserName
|
||||
NameCard {
|
||||
|
@ -257,7 +272,8 @@ Rectangle {
|
|||
displayName: styleData.value
|
||||
userName: model ? model.userName : ""
|
||||
audioLevel: model ? model.audioLevel : 0.0
|
||||
visible: !isCheckBox && !isButton
|
||||
avgAudioLevel: model ? model.avgAudioLevel : 0.0
|
||||
visible: !isCheckBox && !isButton && !isAvgAudio
|
||||
uuid: model ? model.sessionId : ""
|
||||
selected: styleData.selected
|
||||
isAdmin: model && model.admin
|
||||
|
@ -267,6 +283,33 @@ Rectangle {
|
|||
// Anchors
|
||||
anchors.left: parent.left
|
||||
}
|
||||
HifiControls.GlyphButton {
|
||||
function getGlyph() {
|
||||
var fileName = "vol_";
|
||||
if (model["personalMute"]) {
|
||||
fileName += "x_";
|
||||
}
|
||||
fileName += (4.0*(model ? model.avgAudioLevel : 0.0)).toFixed(0);
|
||||
return hifi.glyphs[fileName];
|
||||
}
|
||||
id: avgAudioVolume
|
||||
visible: isAvgAudio
|
||||
glyph: getGlyph()
|
||||
width: 32
|
||||
size: height
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
anchors.horizontalCenter: parent.horizontalCenter
|
||||
onClicked: {
|
||||
// cannot change mute status when ignoring
|
||||
if (!model["ignore"]) {
|
||||
var newValue = !model["personalMute"];
|
||||
userModel.setProperty(model.userIndex, "personalMute", newValue)
|
||||
userModelData[model.userIndex]["personalMute"] = newValue // Defensive programming
|
||||
Users["personalMute"](model.sessionId, newValue)
|
||||
UserActivityLogger["palAction"](newValue ? "personalMute" : "un-personalMute", model.sessionId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This CheckBox belongs in the columns that contain the stateful action buttons ("Mute" & "Ignore" for now)
|
||||
// KNOWN BUG with the Checkboxes: When clicking in the center of the sorting header, the checkbox
|
||||
|
@ -296,6 +339,7 @@ Rectangle {
|
|||
} else {
|
||||
delete ignored[model.sessionId]
|
||||
}
|
||||
avgAudioVolume.glyph = avgAudioVolume.getGlyph()
|
||||
}
|
||||
// http://doc.qt.io/qt-5/qtqml-syntax-propertybinding.html#creating-property-bindings-from-javascript
|
||||
// I'm using an explicit binding here because clicking a checkbox breaks the implicit binding as set by
|
||||
|
@ -311,7 +355,7 @@ Rectangle {
|
|||
visible: isButton
|
||||
anchors.centerIn: parent
|
||||
width: 32
|
||||
height: 24
|
||||
height: 32
|
||||
onClicked: {
|
||||
Users[styleData.role](model.sessionId)
|
||||
UserActivityLogger["palAction"](styleData.role, model.sessionId)
|
||||
|
@ -363,7 +407,7 @@ Rectangle {
|
|||
anchors.left: table.left
|
||||
anchors.top: table.top
|
||||
anchors.topMargin: 1
|
||||
anchors.leftMargin: nameCardWidth/2 + displayNameHeaderMetrics.width/2 + 6
|
||||
anchors.leftMargin: actionButtonWidth + nameCardWidth/2 + displayNameHeaderMetrics.width/2 + 6
|
||||
RalewayRegular {
|
||||
id: helpText
|
||||
text: "[?]"
|
||||
|
@ -537,16 +581,21 @@ Rectangle {
|
|||
break;
|
||||
case 'updateAudioLevel':
|
||||
for (var userId in message.params) {
|
||||
var audioLevel = message.params[userId];
|
||||
var audioLevel = message.params[userId][0];
|
||||
var avgAudioLevel = message.params[userId][1];
|
||||
// If the userId is 0, we're updating "myData".
|
||||
if (userId == 0) {
|
||||
myData.audioLevel = audioLevel;
|
||||
myCard.audioLevel = audioLevel; // Defensive programming
|
||||
myData.avgAudioLevel = avgAudioLevel;
|
||||
myCard.avgAudioLevel = avgAudioLevel;
|
||||
} else {
|
||||
var userIndex = findSessionIndex(userId);
|
||||
if (userIndex != -1) {
|
||||
userModel.setProperty(userIndex, "audioLevel", audioLevel);
|
||||
userModelData[userIndex].audioLevel = audioLevel; // Defensive programming
|
||||
userModel.setProperty(userIndex, "avgAudioLevel", avgAudioLevel);
|
||||
userModelData[userIndex].avgAudioLevel = avgAudioLevel;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -318,5 +318,15 @@ Item {
|
|||
readonly property string deg: "\\"
|
||||
readonly property string px: "|"
|
||||
readonly property string editPencil: "\ue00d"
|
||||
readonly property string vol_0: "\ue00e"
|
||||
readonly property string vol_1: "\ue00f"
|
||||
readonly property string vol_2: "\ue010"
|
||||
readonly property string vol_3: "\ue011"
|
||||
readonly property string vol_4: "\ue012"
|
||||
readonly property string vol_x_0: "\ue013"
|
||||
readonly property string vol_x_1: "\ue014"
|
||||
readonly property string vol_x_2: "\ue015"
|
||||
readonly property string vol_x_3: "\ue016"
|
||||
readonly property string vol_x_4: "\ue017"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -549,6 +549,7 @@ const float DEFAULT_DESKTOP_TABLET_SCALE_PERCENT = 75.0f;
|
|||
const bool DEFAULT_DESKTOP_TABLET_BECOMES_TOOLBAR = true;
|
||||
const bool DEFAULT_HMD_TABLET_BECOMES_TOOLBAR = false;
|
||||
const bool DEFAULT_TABLET_VISIBLE_TO_OTHERS = false;
|
||||
const bool DEFAULT_PREFER_AVATAR_FINGER_OVER_STYLUS = false;
|
||||
|
||||
Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bool runServer, QString runServerPathOption) :
|
||||
QApplication(argc, argv),
|
||||
|
@ -572,6 +573,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
_desktopTabletBecomesToolbarSetting("desktopTabletBecomesToolbar", DEFAULT_DESKTOP_TABLET_BECOMES_TOOLBAR),
|
||||
_hmdTabletBecomesToolbarSetting("hmdTabletBecomesToolbar", DEFAULT_HMD_TABLET_BECOMES_TOOLBAR),
|
||||
_tabletVisibleToOthersSetting("tabletVisibleToOthers", DEFAULT_TABLET_VISIBLE_TO_OTHERS),
|
||||
_preferAvatarFingerOverStylusSetting("preferAvatarFingerOverStylus", DEFAULT_PREFER_AVATAR_FINGER_OVER_STYLUS),
|
||||
_constrainToolbarPosition("toolbar/constrainToolbarToCenterX", true),
|
||||
_scaleMirror(1.0f),
|
||||
_rotateMirror(0.0f),
|
||||
|
@ -2362,6 +2364,10 @@ void Application::setTabletVisibleToOthersSetting(bool value) {
|
|||
updateSystemTabletMode();
|
||||
}
|
||||
|
||||
void Application::setPreferAvatarFingerOverStylus(bool value) {
|
||||
_preferAvatarFingerOverStylusSetting.set(value);
|
||||
}
|
||||
|
||||
void Application::setSettingConstrainToolbarPosition(bool setting) {
|
||||
_constrainToolbarPosition.set(setting);
|
||||
DependencyManager::get<OffscreenUi>()->setConstrainToolbarToCenterX(setting);
|
||||
|
@ -2918,10 +2924,12 @@ void Application::keyPressEvent(QKeyEvent* event) {
|
|||
}
|
||||
break;
|
||||
case Qt::Key_P: {
|
||||
bool isFirstPersonChecked = Menu::getInstance()->isOptionChecked(MenuOption::FirstPerson);
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, !isFirstPersonChecked);
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::ThirdPerson, isFirstPersonChecked);
|
||||
cameraMenuChanged();
|
||||
if (!(isShifted || isMeta || isOption)) {
|
||||
bool isFirstPersonChecked = Menu::getInstance()->isOptionChecked(MenuOption::FirstPerson);
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, !isFirstPersonChecked);
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::ThirdPerson, isFirstPersonChecked);
|
||||
cameraMenuChanged();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -5184,6 +5192,7 @@ void Application::updateWindowTitle() const {
|
|||
#endif
|
||||
_window->setWindowTitle(title);
|
||||
}
|
||||
|
||||
void Application::clearDomainOctreeDetails() {
|
||||
|
||||
// if we're about to quit, we really don't need to do any of these things...
|
||||
|
@ -5213,6 +5222,12 @@ void Application::clearDomainOctreeDetails() {
|
|||
skyStage->setBackgroundMode(model::SunSkyStage::SKY_DEFAULT);
|
||||
|
||||
_recentlyClearedDomain = true;
|
||||
|
||||
DependencyManager::get<AvatarManager>()->clearOtherAvatars();
|
||||
DependencyManager::get<AnimationCache>()->clearUnusedResources();
|
||||
DependencyManager::get<ModelCache>()->clearUnusedResources();
|
||||
DependencyManager::get<SoundCache>()->clearUnusedResources();
|
||||
DependencyManager::get<TextureCache>()->clearUnusedResources();
|
||||
}
|
||||
|
||||
void Application::domainChanged(const QString& domainHostname) {
|
||||
|
|
|
@ -220,6 +220,8 @@ public:
|
|||
void setHmdTabletBecomesToolbarSetting(bool value);
|
||||
bool getTabletVisibleToOthersSetting() { return _tabletVisibleToOthersSetting.get(); }
|
||||
void setTabletVisibleToOthersSetting(bool value);
|
||||
bool getPreferAvatarFingerOverStylus() { return _preferAvatarFingerOverStylusSetting.get(); }
|
||||
void setPreferAvatarFingerOverStylus(bool value);
|
||||
|
||||
float getSettingConstrainToolbarPosition() { return _constrainToolbarPosition.get(); }
|
||||
void setSettingConstrainToolbarPosition(bool setting);
|
||||
|
@ -565,6 +567,7 @@ private:
|
|||
Setting::Handle<bool> _desktopTabletBecomesToolbarSetting;
|
||||
Setting::Handle<bool> _hmdTabletBecomesToolbarSetting;
|
||||
Setting::Handle<bool> _tabletVisibleToOthersSetting;
|
||||
Setting::Handle<bool> _preferAvatarFingerOverStylusSetting;
|
||||
Setting::Handle<bool> _constrainToolbarPosition;
|
||||
|
||||
float _scaleMirror;
|
||||
|
|
|
@ -577,7 +577,7 @@ Menu::Menu() {
|
|||
nodeList.data(), SLOT(toggleSendNewerDSConnectVersion(bool)));
|
||||
#endif
|
||||
|
||||
|
||||
|
||||
// Developer >> Tests >>>
|
||||
MenuWrapper* testMenu = developerMenu->addMenu("Tests");
|
||||
addActionToQMenuAndActionHash(testMenu, MenuOption::RunClientScriptTests, 0, dialogsManager.data(), SLOT(showTestingResults()));
|
||||
|
@ -628,9 +628,9 @@ Menu::Menu() {
|
|||
|
||||
auto scope = DependencyManager::get<AudioScope>();
|
||||
MenuWrapper* audioScopeMenu = audioDebugMenu->addMenu("Audio Scope");
|
||||
addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScope, Qt::CTRL | Qt::Key_P, false,
|
||||
addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScope, Qt::CTRL | Qt::Key_F2, false,
|
||||
scope.data(), SLOT(toggle()));
|
||||
addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScopePause, Qt::CTRL | Qt::SHIFT | Qt::Key_P, false,
|
||||
addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScopePause, Qt::CTRL | Qt::SHIFT | Qt::Key_F2, false,
|
||||
scope.data(), SLOT(togglePause()));
|
||||
|
||||
addDisabledActionAndSeparator(audioScopeMenu, "Display Frames");
|
||||
|
|
|
@ -348,6 +348,8 @@ void Avatar::simulate(float deltaTime, bool inView) {
|
|||
PROFILE_RANGE(simulation, "updateJoints");
|
||||
if (inView && _hasNewJointData) {
|
||||
_skeletonModel->getRig()->copyJointsFromJointData(_jointData);
|
||||
glm::mat4 rootTransform = glm::scale(_skeletonModel->getScale()) * glm::translate(_skeletonModel->getOffset());
|
||||
_skeletonModel->getRig()->computeExternalPoses(rootTransform);
|
||||
_jointDataSimulationRate.increment();
|
||||
|
||||
_skeletonModel->simulate(deltaTime, true);
|
||||
|
|
|
@ -329,7 +329,7 @@ void AvatarManager::removeAvatar(const QUuid& sessionUUID, KillAvatarReason remo
|
|||
}
|
||||
|
||||
void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar, KillAvatarReason removalReason) {
|
||||
AvatarHashMap::handleRemovedAvatar(removedAvatar);
|
||||
AvatarHashMap::handleRemovedAvatar(removedAvatar, removalReason);
|
||||
|
||||
// removedAvatar is a shared pointer to an AvatarData but we need to get to the derived Avatar
|
||||
// class in this context so we can call methods that don't exist at the base class.
|
||||
|
|
|
@ -110,13 +110,7 @@ void CauterizedModel::updateClusterMatrices() {
|
|||
for (int j = 0; j < mesh.clusters.size(); j++) {
|
||||
const FBXCluster& cluster = mesh.clusters.at(j);
|
||||
auto jointMatrix = _rig->getJointTransform(cluster.jointIndex);
|
||||
#if (GLM_ARCH & GLM_ARCH_SSE2) && !(defined Q_OS_MAC)
|
||||
glm::mat4 out, inverseBindMatrix = cluster.inverseBindMatrix;
|
||||
glm_mat4_mul((glm_vec4*)&jointMatrix, (glm_vec4*)&inverseBindMatrix, (glm_vec4*)&out);
|
||||
state.clusterMatrices[j] = out;
|
||||
#else
|
||||
state.clusterMatrices[j] = jointMatrix * cluster.inverseBindMatrix;
|
||||
#endif
|
||||
glm_mat4u_mul(jointMatrix, cluster.inverseBindMatrix, state.clusterMatrices[j]);
|
||||
}
|
||||
|
||||
// Once computed the cluster matrices, update the buffer(s)
|
||||
|
@ -149,13 +143,7 @@ void CauterizedModel::updateClusterMatrices() {
|
|||
if (_cauterizeBoneSet.find(cluster.jointIndex) != _cauterizeBoneSet.end()) {
|
||||
jointMatrix = cauterizeMatrix;
|
||||
}
|
||||
#if (GLM_ARCH & GLM_ARCH_SSE2) && !(defined Q_OS_MAC)
|
||||
glm::mat4 out, inverseBindMatrix = cluster.inverseBindMatrix;
|
||||
glm_mat4_mul((glm_vec4*)&jointMatrix, (glm_vec4*)&inverseBindMatrix, (glm_vec4*)&out);
|
||||
state.clusterMatrices[j] = out;
|
||||
#else
|
||||
state.clusterMatrices[j] = jointMatrix * cluster.inverseBindMatrix;
|
||||
#endif
|
||||
glm_mat4u_mul(jointMatrix, cluster.inverseBindMatrix, state.clusterMatrices[j]);
|
||||
}
|
||||
|
||||
if (!_cauterizeBoneSet.empty() && (state.clusterMatrices.size() > 1)) {
|
||||
|
|
|
@ -60,13 +60,7 @@ void SoftAttachmentModel::updateClusterMatrices() {
|
|||
} else {
|
||||
jointMatrix = _rig->getJointTransform(cluster.jointIndex);
|
||||
}
|
||||
#if (GLM_ARCH & GLM_ARCH_SSE2) && !(defined Q_OS_MAC)
|
||||
glm::mat4 out, inverseBindMatrix = cluster.inverseBindMatrix;
|
||||
glm_mat4_mul((glm_vec4*)&jointMatrix, (glm_vec4*)&inverseBindMatrix, (glm_vec4*)&out);
|
||||
state.clusterMatrices[j] = out;
|
||||
#else
|
||||
state.clusterMatrices[j] = jointMatrix * cluster.inverseBindMatrix;
|
||||
#endif
|
||||
glm_mat4u_mul(jointMatrix, cluster.inverseBindMatrix, state.clusterMatrices[j]);
|
||||
}
|
||||
|
||||
// Once computed the cluster matrices, update the buffer(s)
|
||||
|
|
|
@ -107,6 +107,12 @@ void setupPreferences() {
|
|||
auto setter = [](bool value) { qApp->setTabletVisibleToOthersSetting(value); };
|
||||
preferences->addPreference(new CheckPreference(UI_CATEGORY, "Tablet Is Visible To Others", getter, setter));
|
||||
}
|
||||
{
|
||||
auto getter = []()->bool { return qApp->getPreferAvatarFingerOverStylus(); };
|
||||
auto setter = [](bool value) { qApp->setPreferAvatarFingerOverStylus(value); };
|
||||
preferences->addPreference(new CheckPreference(UI_CATEGORY, "Prefer Avatar Finger Over Stylus", getter, setter));
|
||||
}
|
||||
|
||||
// Snapshots
|
||||
static const QString SNAPSHOTS { "Snapshots" };
|
||||
{
|
||||
|
|
|
@ -23,10 +23,15 @@ Line3DOverlay::Line3DOverlay() :
|
|||
|
||||
Line3DOverlay::Line3DOverlay(const Line3DOverlay* line3DOverlay) :
|
||||
Base3DOverlay(line3DOverlay),
|
||||
_start(line3DOverlay->_start),
|
||||
_end(line3DOverlay->_end),
|
||||
_geometryCacheID(DependencyManager::get<GeometryCache>()->allocateID())
|
||||
{
|
||||
setParentID(line3DOverlay->getParentID());
|
||||
setParentJointIndex(line3DOverlay->getParentJointIndex());
|
||||
setLocalTransform(line3DOverlay->getLocalTransform());
|
||||
_direction = line3DOverlay->getDirection();
|
||||
_length = line3DOverlay->getLength();
|
||||
_endParentID = line3DOverlay->getEndParentID();
|
||||
_endParentJointIndex = line3DOverlay->getEndJointIndex();
|
||||
}
|
||||
|
||||
Line3DOverlay::~Line3DOverlay() {
|
||||
|
@ -37,17 +42,23 @@ Line3DOverlay::~Line3DOverlay() {
|
|||
}
|
||||
|
||||
glm::vec3 Line3DOverlay::getStart() const {
|
||||
bool success;
|
||||
glm::vec3 worldStart = localToWorld(_start, getParentID(), getParentJointIndex(), success);
|
||||
if (!success) {
|
||||
qDebug() << "Line3DOverlay::getStart failed";
|
||||
}
|
||||
return worldStart;
|
||||
return getPosition();
|
||||
}
|
||||
|
||||
glm::vec3 Line3DOverlay::getEnd() const {
|
||||
bool success;
|
||||
glm::vec3 worldEnd = localToWorld(_end, getParentID(), getParentJointIndex(), success);
|
||||
glm::vec3 localEnd;
|
||||
glm::vec3 worldEnd;
|
||||
|
||||
if (_endParentID != QUuid()) {
|
||||
glm::vec3 localOffset = _direction * _length;
|
||||
bool success;
|
||||
worldEnd = localToWorld(localOffset, _endParentID, _endParentJointIndex, success);
|
||||
return worldEnd;
|
||||
}
|
||||
|
||||
localEnd = getLocalEnd();
|
||||
worldEnd = localToWorld(localEnd, getParentID(), getParentJointIndex(), success);
|
||||
if (!success) {
|
||||
qDebug() << "Line3DOverlay::getEnd failed";
|
||||
}
|
||||
|
@ -55,27 +66,55 @@ glm::vec3 Line3DOverlay::getEnd() const {
|
|||
}
|
||||
|
||||
void Line3DOverlay::setStart(const glm::vec3& start) {
|
||||
bool success;
|
||||
_start = worldToLocal(start, getParentID(), getParentJointIndex(), success);
|
||||
if (!success) {
|
||||
qDebug() << "Line3DOverlay::setStart failed";
|
||||
}
|
||||
setPosition(start);
|
||||
}
|
||||
|
||||
void Line3DOverlay::setEnd(const glm::vec3& end) {
|
||||
bool success;
|
||||
_end = worldToLocal(end, getParentID(), getParentJointIndex(), success);
|
||||
glm::vec3 localStart;
|
||||
glm::vec3 localEnd;
|
||||
glm::vec3 offset;
|
||||
|
||||
if (_endParentID != QUuid()) {
|
||||
offset = worldToLocal(end, _endParentID, _endParentJointIndex, success);
|
||||
} else {
|
||||
localStart = getLocalStart();
|
||||
localEnd = worldToLocal(end, getParentID(), getParentJointIndex(), success);
|
||||
offset = localEnd - localStart;
|
||||
}
|
||||
if (!success) {
|
||||
qDebug() << "Line3DOverlay::setEnd failed";
|
||||
return;
|
||||
}
|
||||
|
||||
_length = glm::length(offset);
|
||||
if (_length > 0.0f) {
|
||||
_direction = glm::normalize(offset);
|
||||
} else {
|
||||
_direction = glm::vec3(0.0f);
|
||||
}
|
||||
}
|
||||
|
||||
void Line3DOverlay::setLocalEnd(const glm::vec3& localEnd) {
|
||||
glm::vec3 offset;
|
||||
if (_endParentID != QUuid()) {
|
||||
offset = localEnd;
|
||||
} else {
|
||||
glm::vec3 localStart = getLocalStart();
|
||||
offset = localEnd - localStart;
|
||||
}
|
||||
_length = glm::length(offset);
|
||||
if (_length > 0.0f) {
|
||||
_direction = glm::normalize(offset);
|
||||
} else {
|
||||
_direction = glm::vec3(0.0f);
|
||||
}
|
||||
}
|
||||
|
||||
AABox Line3DOverlay::getBounds() const {
|
||||
auto extents = Extents{};
|
||||
extents.addPoint(_start);
|
||||
extents.addPoint(_end);
|
||||
extents.transform(getTransform());
|
||||
|
||||
extents.addPoint(getStart());
|
||||
extents.addPoint(getEnd());
|
||||
return AABox(extents);
|
||||
}
|
||||
|
||||
|
@ -90,18 +129,20 @@ void Line3DOverlay::render(RenderArgs* args) {
|
|||
glm::vec4 colorv4(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha);
|
||||
auto batch = args->_batch;
|
||||
if (batch) {
|
||||
batch->setModelTransform(getTransform());
|
||||
batch->setModelTransform(Transform());
|
||||
glm::vec3 start = getStart();
|
||||
glm::vec3 end = getEnd();
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
if (getIsDashedLine()) {
|
||||
// TODO: add support for color to renderDashedLine()
|
||||
geometryCache->bindSimpleProgram(*batch, false, false, false, true, true);
|
||||
geometryCache->renderDashedLine(*batch, _start, _end, colorv4, _geometryCacheID);
|
||||
geometryCache->renderDashedLine(*batch, start, end, colorv4, _geometryCacheID);
|
||||
} else if (_glow > 0.0f) {
|
||||
geometryCache->renderGlowLine(*batch, _start, _end, colorv4, _glow, _glowWidth, _geometryCacheID);
|
||||
geometryCache->renderGlowLine(*batch, start, end, colorv4, _glow, _glowWidth, _geometryCacheID);
|
||||
} else {
|
||||
geometryCache->bindSimpleProgram(*batch, false, false, false, true, true);
|
||||
geometryCache->renderLine(*batch, _start, _end, colorv4, _geometryCacheID);
|
||||
geometryCache->renderLine(*batch, start, end, colorv4, _geometryCacheID);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -116,6 +157,10 @@ const render::ShapeKey Line3DOverlay::getShapeKey() {
|
|||
|
||||
void Line3DOverlay::setProperties(const QVariantMap& originalProperties) {
|
||||
QVariantMap properties = originalProperties;
|
||||
glm::vec3 newStart(0.0f);
|
||||
bool newStartSet { false };
|
||||
glm::vec3 newEnd(0.0f);
|
||||
bool newEndSet { false };
|
||||
|
||||
auto start = properties["start"];
|
||||
// if "start" property was not there, check to see if they included aliases: startPoint
|
||||
|
@ -123,30 +168,57 @@ void Line3DOverlay::setProperties(const QVariantMap& originalProperties) {
|
|||
start = properties["startPoint"];
|
||||
}
|
||||
if (start.isValid()) {
|
||||
setStart(vec3FromVariant(start));
|
||||
newStart = vec3FromVariant(start);
|
||||
newStartSet = true;
|
||||
}
|
||||
properties.remove("start"); // so that Base3DOverlay doesn't respond to it
|
||||
|
||||
auto localStart = properties["localStart"];
|
||||
if (localStart.isValid()) {
|
||||
_start = vec3FromVariant(localStart);
|
||||
}
|
||||
properties.remove("localStart"); // so that Base3DOverlay doesn't respond to it
|
||||
|
||||
auto end = properties["end"];
|
||||
// if "end" property was not there, check to see if they included aliases: endPoint
|
||||
if (!end.isValid()) {
|
||||
end = properties["endPoint"];
|
||||
}
|
||||
if (end.isValid()) {
|
||||
setEnd(vec3FromVariant(end));
|
||||
newEnd = vec3FromVariant(end);
|
||||
newEndSet = true;
|
||||
}
|
||||
properties.remove("end"); // so that Base3DOverlay doesn't respond to it
|
||||
|
||||
auto length = properties["length"];
|
||||
if (length.isValid()) {
|
||||
_length = length.toFloat();
|
||||
}
|
||||
|
||||
Base3DOverlay::setProperties(properties);
|
||||
|
||||
auto endParentIDProp = properties["endParentID"];
|
||||
if (endParentIDProp.isValid()) {
|
||||
_endParentID = QUuid(endParentIDProp.toString());
|
||||
}
|
||||
auto endParentJointIndexProp = properties["endParentJointIndex"];
|
||||
if (endParentJointIndexProp.isValid()) {
|
||||
_endParentJointIndex = endParentJointIndexProp.toInt();
|
||||
}
|
||||
|
||||
auto localStart = properties["localStart"];
|
||||
if (localStart.isValid()) {
|
||||
glm::vec3 tmpLocalEnd = getLocalEnd();
|
||||
setLocalStart(vec3FromVariant(localStart));
|
||||
setLocalEnd(tmpLocalEnd);
|
||||
}
|
||||
|
||||
auto localEnd = properties["localEnd"];
|
||||
if (localEnd.isValid()) {
|
||||
_end = vec3FromVariant(localEnd);
|
||||
setLocalEnd(vec3FromVariant(localEnd));
|
||||
}
|
||||
|
||||
// these are saved until after Base3DOverlay::setProperties so parenting infomation can be set, first
|
||||
if (newStartSet) {
|
||||
setStart(newStart);
|
||||
}
|
||||
if (newEndSet) {
|
||||
setEnd(newEnd);
|
||||
}
|
||||
properties.remove("localEnd"); // so that Base3DOverlay doesn't respond to it
|
||||
|
||||
auto glow = properties["glow"];
|
||||
if (glow.isValid()) {
|
||||
|
@ -161,7 +233,6 @@ void Line3DOverlay::setProperties(const QVariantMap& originalProperties) {
|
|||
setGlow(glowWidth.toFloat());
|
||||
}
|
||||
|
||||
Base3DOverlay::setProperties(properties);
|
||||
}
|
||||
|
||||
QVariant Line3DOverlay::getProperty(const QString& property) {
|
||||
|
@ -171,6 +242,15 @@ QVariant Line3DOverlay::getProperty(const QString& property) {
|
|||
if (property == "end" || property == "endPoint" || property == "p2") {
|
||||
return vec3toVariant(getEnd());
|
||||
}
|
||||
if (property == "localStart") {
|
||||
return vec3toVariant(getLocalStart());
|
||||
}
|
||||
if (property == "localEnd") {
|
||||
return vec3toVariant(getLocalEnd());
|
||||
}
|
||||
if (property == "length") {
|
||||
return QVariant(getLength());
|
||||
}
|
||||
|
||||
return Base3DOverlay::getProperty(property);
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
|
||||
class Line3DOverlay : public Base3DOverlay {
|
||||
Q_OBJECT
|
||||
|
||||
|
||||
public:
|
||||
static QString const TYPE;
|
||||
virtual QString getType() const override { return TYPE; }
|
||||
|
@ -37,6 +37,9 @@ public:
|
|||
void setStart(const glm::vec3& start);
|
||||
void setEnd(const glm::vec3& end);
|
||||
|
||||
void setLocalStart(const glm::vec3& localStart) { setLocalPosition(localStart); }
|
||||
void setLocalEnd(const glm::vec3& localEnd);
|
||||
|
||||
void setGlow(const float& glow) { _glow = glow; }
|
||||
void setGlowWidth(const float& glowWidth) { _glowWidth = glowWidth; }
|
||||
|
||||
|
@ -47,13 +50,26 @@ public:
|
|||
|
||||
virtual void locationChanged(bool tellPhysics = true) override;
|
||||
|
||||
protected:
|
||||
glm::vec3 _start;
|
||||
glm::vec3 _end;
|
||||
glm::vec3 getDirection() const { return _direction; }
|
||||
float getLength() const { return _length; }
|
||||
glm::vec3 getLocalStart() const { return getLocalPosition(); }
|
||||
glm::vec3 getLocalEnd() const { return getLocalStart() + _direction * _length; }
|
||||
QUuid getEndParentID() const { return _endParentID; }
|
||||
quint16 getEndJointIndex() const { return _endParentJointIndex; }
|
||||
|
||||
private:
|
||||
QUuid _endParentID;
|
||||
quint16 _endParentJointIndex { INVALID_JOINT_INDEX };
|
||||
|
||||
// _direction and _length are in the parent's frame. If _endParentID is set, they are
|
||||
// relative to that. Otherwise, they are relative to the local-start-position (which is the
|
||||
// same as localPosition)
|
||||
glm::vec3 _direction; // in parent frame
|
||||
float _length { 1.0 }; // in parent frame
|
||||
|
||||
float _glow { 0.0 };
|
||||
float _glowWidth { 0.0 };
|
||||
int _geometryCacheID;
|
||||
};
|
||||
|
||||
|
||||
#endif // hifi_Line3DOverlay_h
|
||||
|
|
|
@ -62,7 +62,11 @@ namespace render {
|
|||
if (overlay->is3D()) {
|
||||
auto overlay3D = std::dynamic_pointer_cast<Base3DOverlay>(overlay);
|
||||
if (overlay3D->isAA())
|
||||
return (overlay3D->getDrawInFront() ? LAYER_3D_FRONT : LAYER_3D);
|
||||
if (overlay3D->getDrawInFront()) {
|
||||
return LAYER_3D_FRONT;
|
||||
} else {
|
||||
return LAYER_3D;
|
||||
}
|
||||
else
|
||||
return LAYER_NO_AA;
|
||||
} else {
|
||||
|
|
|
@ -198,18 +198,27 @@ void Web3DOverlay::render(RenderArgs* args) {
|
|||
_webSurface->getRootItem()->setProperty("scriptURL", _scriptURL);
|
||||
currentContext->makeCurrent(currentSurface);
|
||||
|
||||
auto selfOverlayID = getOverlayID();
|
||||
std::weak_ptr<Web3DOverlay> weakSelf = std::dynamic_pointer_cast<Web3DOverlay>(qApp->getOverlays().getOverlay(selfOverlayID));
|
||||
auto forwardPointerEvent = [=](OverlayID overlayID, const PointerEvent& event) {
|
||||
if (overlayID == getOverlayID()) {
|
||||
handlePointerEvent(event);
|
||||
auto self = weakSelf.lock();
|
||||
if (!self) {
|
||||
return;
|
||||
}
|
||||
if (overlayID == selfOverlayID) {
|
||||
self->handlePointerEvent(event);
|
||||
}
|
||||
};
|
||||
|
||||
_mousePressConnection = connect(&(qApp->getOverlays()), &Overlays::mousePressOnOverlay, forwardPointerEvent);
|
||||
_mouseReleaseConnection = connect(&(qApp->getOverlays()), &Overlays::mouseReleaseOnOverlay, forwardPointerEvent);
|
||||
_mouseMoveConnection = connect(&(qApp->getOverlays()), &Overlays::mouseMoveOnOverlay, forwardPointerEvent);
|
||||
_hoverLeaveConnection = connect(&(qApp->getOverlays()), &Overlays::hoverLeaveOverlay,
|
||||
[=](OverlayID overlayID, const PointerEvent& event) {
|
||||
if (this->_pressed && this->getOverlayID() == overlayID) {
|
||||
_mousePressConnection = connect(&(qApp->getOverlays()), &Overlays::mousePressOnOverlay, this, forwardPointerEvent, Qt::DirectConnection);
|
||||
_mouseReleaseConnection = connect(&(qApp->getOverlays()), &Overlays::mouseReleaseOnOverlay, this, forwardPointerEvent, Qt::DirectConnection);
|
||||
_mouseMoveConnection = connect(&(qApp->getOverlays()), &Overlays::mouseMoveOnOverlay, this, forwardPointerEvent, Qt::DirectConnection);
|
||||
_hoverLeaveConnection = connect(&(qApp->getOverlays()), &Overlays::hoverLeaveOverlay, this, [=](OverlayID overlayID, const PointerEvent& event) {
|
||||
auto self = weakSelf.lock();
|
||||
if (!self) {
|
||||
return;
|
||||
}
|
||||
if (self->_pressed && overlayID == selfOverlayID) {
|
||||
// If the user mouses off the overlay while the button is down, simulate a touch end.
|
||||
QTouchEvent::TouchPoint point;
|
||||
point.setId(event.getID());
|
||||
|
@ -222,12 +231,12 @@ void Web3DOverlay::render(RenderArgs* args) {
|
|||
touchPoints.push_back(point);
|
||||
QTouchEvent* touchEvent = new QTouchEvent(QEvent::TouchEnd, nullptr, Qt::NoModifier, Qt::TouchPointReleased,
|
||||
touchPoints);
|
||||
touchEvent->setWindow(_webSurface->getWindow());
|
||||
touchEvent->setWindow(self->_webSurface->getWindow());
|
||||
touchEvent->setDevice(&_touchDevice);
|
||||
touchEvent->setTarget(_webSurface->getRootItem());
|
||||
QCoreApplication::postEvent(_webSurface->getWindow(), touchEvent);
|
||||
touchEvent->setTarget(self->_webSurface->getRootItem());
|
||||
QCoreApplication::postEvent(self->_webSurface->getWindow(), touchEvent);
|
||||
}
|
||||
});
|
||||
}, Qt::DirectConnection);
|
||||
|
||||
_emitScriptEventConnection = connect(this, &Web3DOverlay::scriptEventReceived, _webSurface.data(), &OffscreenQmlSurface::emitScriptEvent);
|
||||
_webEventReceivedConnection = connect(_webSurface.data(), &OffscreenQmlSurface::webEventReceived, this, &Web3DOverlay::webEventReceived);
|
||||
|
|
|
@ -50,15 +50,9 @@ glm::vec3 AnimPose::xformVector(const glm::vec3& rhs) const {
|
|||
}
|
||||
|
||||
AnimPose AnimPose::operator*(const AnimPose& rhs) const {
|
||||
#if (GLM_ARCH & GLM_ARCH_SSE2) && !(defined Q_OS_MAC)
|
||||
glm::mat4 result;
|
||||
glm::mat4 lhsMat = *this;
|
||||
glm::mat4 rhsMat = rhs;
|
||||
glm_mat4_mul((glm_vec4*)&lhsMat, (glm_vec4*)&rhsMat, (glm_vec4*)&result);
|
||||
glm_mat4u_mul(*this, rhs, result);
|
||||
return AnimPose(result);
|
||||
#else
|
||||
return AnimPose(static_cast<glm::mat4>(*this) * static_cast<glm::mat4>(rhs));
|
||||
#endif
|
||||
}
|
||||
|
||||
AnimPose AnimPose::inverse() const {
|
||||
|
|
|
@ -1346,8 +1346,13 @@ void Rig::copyJointsFromJointData(const QVector<JointData>& jointDataVec) {
|
|||
_internalPoseSet._relativePoses[i].trans() = relativeDefaultPoses[i].trans();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Rig::computeExternalPoses(const glm::mat4& modelOffsetMat) {
|
||||
_modelOffset = AnimPose(modelOffsetMat);
|
||||
_geometryToRigTransform = _modelOffset * _geometryOffset;
|
||||
_rigToGeometryTransform = glm::inverse(_geometryToRigTransform);
|
||||
|
||||
// build absolute poses and copy to externalPoseSet
|
||||
buildAbsoluteRigPoses(_internalPoseSet._relativePoses, _internalPoseSet._absolutePoses);
|
||||
QWriteLocker writeLock(&_externalPoseSetLock);
|
||||
_externalPoseSet = _internalPoseSet;
|
||||
|
|
|
@ -210,6 +210,7 @@ public:
|
|||
|
||||
void copyJointsIntoJointData(QVector<JointData>& jointDataVec) const;
|
||||
void copyJointsFromJointData(const QVector<JointData>& jointDataVec);
|
||||
void computeExternalPoses(const glm::mat4& modelOffsetMat);
|
||||
|
||||
void computeAvatarBoundingCapsule(const FBXGeometry& geometry, float& radiusOut, float& heightOut, glm::vec3& offsetOut) const;
|
||||
|
||||
|
|
|
@ -186,6 +186,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
bool cullSmallChanges = (dataDetail == CullSmallData);
|
||||
bool sendAll = (dataDetail == SendAllData);
|
||||
bool sendMinimum = (dataDetail == MinimumData);
|
||||
bool sendPALMinimum = (dataDetail == PALMinimum);
|
||||
|
||||
lazyInitHeadData();
|
||||
|
||||
|
@ -222,24 +223,41 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
auto parentID = getParentID();
|
||||
|
||||
bool hasAvatarGlobalPosition = true; // always include global position
|
||||
bool hasAvatarOrientation = sendAll || rotationChangedSince(lastSentTime);
|
||||
bool hasAvatarBoundingBox = sendAll || avatarBoundingBoxChangedSince(lastSentTime);
|
||||
bool hasAvatarScale = sendAll || avatarScaleChangedSince(lastSentTime);
|
||||
bool hasLookAtPosition = sendAll || lookAtPositionChangedSince(lastSentTime);
|
||||
bool hasAudioLoudness = sendAll || audioLoudnessChangedSince(lastSentTime);
|
||||
bool hasSensorToWorldMatrix = sendAll || sensorToWorldMatrixChangedSince(lastSentTime);
|
||||
bool hasAdditionalFlags = sendAll || additionalFlagsChangedSince(lastSentTime);
|
||||
bool hasAvatarOrientation = false;
|
||||
bool hasAvatarBoundingBox = false;
|
||||
bool hasAvatarScale = false;
|
||||
bool hasLookAtPosition = false;
|
||||
bool hasAudioLoudness = false;
|
||||
bool hasSensorToWorldMatrix = false;
|
||||
bool hasAdditionalFlags = false;
|
||||
|
||||
// local position, and parent info only apply to avatars that are parented. The local position
|
||||
// and the parent info can change independently though, so we track their "changed since"
|
||||
// separately
|
||||
bool hasParentInfo = sendAll || parentInfoChangedSince(lastSentTime);
|
||||
bool hasAvatarLocalPosition = hasParent() && (sendAll ||
|
||||
tranlationChangedSince(lastSentTime) ||
|
||||
parentInfoChangedSince(lastSentTime));
|
||||
bool hasParentInfo = false;
|
||||
bool hasAvatarLocalPosition = false;
|
||||
|
||||
bool hasFaceTrackerInfo = !dropFaceTracking && hasFaceTracker() && (sendAll || faceTrackerInfoChangedSince(lastSentTime));
|
||||
bool hasJointData = sendAll || !sendMinimum;
|
||||
bool hasFaceTrackerInfo = false;
|
||||
bool hasJointData = false;
|
||||
|
||||
if (sendPALMinimum) {
|
||||
hasAudioLoudness = true;
|
||||
} else {
|
||||
hasAvatarOrientation = sendAll || rotationChangedSince(lastSentTime);
|
||||
hasAvatarBoundingBox = sendAll || avatarBoundingBoxChangedSince(lastSentTime);
|
||||
hasAvatarScale = sendAll || avatarScaleChangedSince(lastSentTime);
|
||||
hasLookAtPosition = sendAll || lookAtPositionChangedSince(lastSentTime);
|
||||
hasAudioLoudness = sendAll || audioLoudnessChangedSince(lastSentTime);
|
||||
hasSensorToWorldMatrix = sendAll || sensorToWorldMatrixChangedSince(lastSentTime);
|
||||
hasAdditionalFlags = sendAll || additionalFlagsChangedSince(lastSentTime);
|
||||
hasParentInfo = sendAll || parentInfoChangedSince(lastSentTime);
|
||||
hasAvatarLocalPosition = hasParent() && (sendAll ||
|
||||
tranlationChangedSince(lastSentTime) ||
|
||||
parentInfoChangedSince(lastSentTime));
|
||||
|
||||
hasFaceTrackerInfo = !dropFaceTracking && hasFaceTracker() && (sendAll || faceTrackerInfoChangedSince(lastSentTime));
|
||||
hasJointData = sendAll || !sendMinimum;
|
||||
}
|
||||
|
||||
// Leading flags, to indicate how much data is actually included in the packet...
|
||||
AvatarDataPacket::HasFlags packetStateFlags =
|
||||
|
|
|
@ -376,6 +376,7 @@ public:
|
|||
|
||||
typedef enum {
|
||||
NoData,
|
||||
PALMinimum,
|
||||
MinimumData,
|
||||
CullSmallData,
|
||||
IncludeSmallData,
|
||||
|
|
|
@ -37,6 +37,8 @@ static uint64_t MAX_NO_RENDER_INTERVAL = 30 * USECS_PER_SECOND;
|
|||
|
||||
static int MAX_WINDOW_SIZE = 4096;
|
||||
static float OPAQUE_ALPHA_THRESHOLD = 0.99f;
|
||||
static int DEFAULT_MAX_FPS = 10;
|
||||
static int YOUTUBE_MAX_FPS = 30;
|
||||
|
||||
EntityItemPointer RenderableWebEntityItem::factory(const EntityItemID& entityID, const EntityItemProperties& properties) {
|
||||
EntityItemPointer entity{ new RenderableWebEntityItem(entityID) };
|
||||
|
@ -113,7 +115,7 @@ bool RenderableWebEntityItem::buildWebSurface(QSharedPointer<EntityTreeRenderer>
|
|||
|
||||
// FIXME, the max FPS could be better managed by being dynamic (based on the number of current surfaces
|
||||
// and the current rendering load)
|
||||
_webSurface->setMaxFps(10);
|
||||
_webSurface->setMaxFps(DEFAULT_MAX_FPS);
|
||||
|
||||
// The lifetime of the QML surface MUST be managed by the main thread
|
||||
// Additionally, we MUST use local variables copied by value, rather than
|
||||
|
@ -256,9 +258,18 @@ void RenderableWebEntityItem::loadSourceURL() {
|
|||
_sourceUrl.toLower().endsWith(".htm") || _sourceUrl.toLower().endsWith(".html")) {
|
||||
_contentType = htmlContent;
|
||||
_webSurface->setBaseUrl(QUrl::fromLocalFile(PathUtils::resourcesPath() + "qml/controls/"));
|
||||
|
||||
// We special case YouTube URLs since we know they are videos that we should play with at least 30 FPS.
|
||||
if (sourceUrl.host().endsWith("youtube.com", Qt::CaseInsensitive)) {
|
||||
_webSurface->setMaxFps(YOUTUBE_MAX_FPS);
|
||||
} else {
|
||||
_webSurface->setMaxFps(DEFAULT_MAX_FPS);
|
||||
}
|
||||
|
||||
_webSurface->load("WebView.qml", [&](QQmlContext* context, QObject* obj) {
|
||||
context->setContextProperty("eventBridgeJavaScriptToInject", QVariant(_javaScriptToInject));
|
||||
});
|
||||
|
||||
_webSurface->getRootItem()->setProperty("url", _sourceUrl);
|
||||
_webSurface->getRootContext()->setContextProperty("desktop", QVariant());
|
||||
|
||||
|
|
|
@ -207,7 +207,7 @@ void GLBackend::renderPassTransfer(const Batch& batch) {
|
|||
}
|
||||
}
|
||||
|
||||
{ // Sync all the buffers
|
||||
{ // Sync all the transform states
|
||||
PROFILE_RANGE(render_gpu_gl_detail, "syncCPUTransform");
|
||||
_transform._cameras.clear();
|
||||
_transform._cameraOffsets.clear();
|
||||
|
@ -275,7 +275,7 @@ void GLBackend::renderPassDraw(const Batch& batch) {
|
|||
updateInput();
|
||||
updateTransform(batch);
|
||||
updatePipeline();
|
||||
|
||||
|
||||
CommandCall call = _commandCalls[(*command)];
|
||||
(this->*(call))(batch, *offset);
|
||||
break;
|
||||
|
|
|
@ -72,7 +72,6 @@ GL41Texture::GL41Texture(const std::weak_ptr<GLBackend>& backend, const Texture&
|
|||
incrementTextureGPUCount();
|
||||
withPreservedTexture([&] {
|
||||
GLTexelFormat texelFormat = GLTexelFormat::evalGLTexelFormat(_gpuObject.getTexelFormat(), _gpuObject.getStoredMipFormat());
|
||||
const Sampler& sampler = _gpuObject.getSampler();
|
||||
auto numMips = _gpuObject.evalNumMips();
|
||||
for (uint16_t mipLevel = 0; mipLevel < numMips; ++mipLevel) {
|
||||
// Get the mip level dimensions, accounting for the downgrade level
|
||||
|
|
|
@ -18,6 +18,12 @@ Q_LOGGING_CATEGORY(gpugl45logging, "hifi.gpu.gl45")
|
|||
using namespace gpu;
|
||||
using namespace gpu::gl45;
|
||||
|
||||
void GL45Backend::recycle() const {
|
||||
Parent::recycle();
|
||||
GL45VariableAllocationTexture::manageMemory();
|
||||
GL45VariableAllocationTexture::_frameTexturesCreated = 0;
|
||||
}
|
||||
|
||||
void GL45Backend::do_draw(const Batch& batch, size_t paramOffset) {
|
||||
Primitive primitiveType = (Primitive)batch._params[paramOffset + 2]._uint;
|
||||
GLenum mode = gl::PRIMITIVE_TO_GL[primitiveType];
|
||||
|
|
|
@ -147,6 +147,7 @@ public:
|
|||
using TransferQueue = std::queue<std::unique_ptr<TransferJob>>;
|
||||
static MemoryPressureState _memoryPressureState;
|
||||
protected:
|
||||
static size_t _frameTexturesCreated;
|
||||
static std::atomic<bool> _memoryPressureStateStale;
|
||||
static std::list<TextureWeakPointer> _memoryManagedTextures;
|
||||
static WorkQueue _transferQueue;
|
||||
|
|
|
@ -28,6 +28,7 @@ using namespace gpu::gl;
|
|||
using namespace gpu::gl45;
|
||||
|
||||
#define SPARSE_PAGE_SIZE_OVERHEAD_ESTIMATE 1.3f
|
||||
#define MAX_RESOURCE_TEXTURES_PER_FRAME 2
|
||||
|
||||
GLTexture* GL45Backend::syncGPUObject(const TexturePointer& texturePointer) {
|
||||
if (!texturePointer) {
|
||||
|
@ -57,19 +58,23 @@ GLTexture* GL45Backend::syncGPUObject(const TexturePointer& texturePointer) {
|
|||
break;
|
||||
|
||||
case TextureUsageType::RESOURCE: {
|
||||
|
||||
GL45VariableAllocationTexture* varObject { nullptr };
|
||||
if (GL45VariableAllocationTexture::_frameTexturesCreated < MAX_RESOURCE_TEXTURES_PER_FRAME) {
|
||||
#if 0
|
||||
if (isTextureManagementSparseEnabled() && GL45Texture::isSparseEligible(texture)) {
|
||||
varObject = new GL45SparseResourceTexture(shared_from_this(), texture);
|
||||
} else {
|
||||
varObject = new GL45ResourceTexture(shared_from_this(), texture);
|
||||
}
|
||||
if (isTextureManagementSparseEnabled() && GL45Texture::isSparseEligible(texture)) {
|
||||
object = new GL45SparseResourceTexture(shared_from_this(), texture);
|
||||
} else {
|
||||
object = new GL45ResourceTexture(shared_from_this(), texture);
|
||||
}
|
||||
#else
|
||||
varObject = new GL45ResourceTexture(shared_from_this(), texture);
|
||||
object = new GL45ResourceTexture(shared_from_this(), texture);
|
||||
#endif
|
||||
GL45VariableAllocationTexture::addMemoryManagedTexture(texturePointer);
|
||||
object = varObject;
|
||||
GL45VariableAllocationTexture::addMemoryManagedTexture(texturePointer);
|
||||
} else {
|
||||
auto fallback = texturePointer->getFallbackTexture();
|
||||
if (fallback) {
|
||||
object = static_cast<GL45Texture*>(syncGPUObject(fallback));
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -81,11 +86,6 @@ GLTexture* GL45Backend::syncGPUObject(const TexturePointer& texturePointer) {
|
|||
return object;
|
||||
}
|
||||
|
||||
void GL45Backend::recycle() const {
|
||||
Parent::recycle();
|
||||
GL45VariableAllocationTexture::manageMemory();
|
||||
}
|
||||
|
||||
void GL45Backend::initTextureManagementStage() {
|
||||
// enable the Sparse Texture on gl45
|
||||
_textureManagement._sparseCapable = true;
|
||||
|
|
|
@ -405,7 +405,10 @@ void GL45VariableAllocationTexture::manageMemory() {
|
|||
processWorkQueues();
|
||||
}
|
||||
|
||||
size_t GL45VariableAllocationTexture::_frameTexturesCreated { 0 };
|
||||
|
||||
GL45VariableAllocationTexture::GL45VariableAllocationTexture(const std::weak_ptr<GLBackend>& backend, const Texture& texture) : GL45Texture(backend, texture) {
|
||||
++_frameTexturesCreated;
|
||||
}
|
||||
|
||||
GL45VariableAllocationTexture::~GL45VariableAllocationTexture() {
|
||||
|
|
|
@ -412,7 +412,6 @@ void Texture::assignStoredMip(uint16 level, storage::StoragePointer& storage) {
|
|||
// THen check that the mem texture passed make sense with its format
|
||||
Size expectedSize = evalStoredMipSize(level, getStoredMipFormat());
|
||||
auto size = storage->size();
|
||||
auto bytes = storage->data();
|
||||
if (storage->size() == expectedSize) {
|
||||
_storage->assignMipData(level, storage);
|
||||
_maxMip = std::max(_maxMip, level);
|
||||
|
@ -442,7 +441,6 @@ void Texture::assignStoredMipFace(uint16 level, uint8 face, storage::StoragePoin
|
|||
// THen check that the mem texture passed make sense with its format
|
||||
Size expectedSize = evalStoredMipFaceSize(level, getStoredMipFormat());
|
||||
auto size = storage->size();
|
||||
auto bytes = storage->data();
|
||||
if (size == expectedSize) {
|
||||
_storage->assignMipFaceData(level, face, storage);
|
||||
_maxMip = std::max(_maxMip, level);
|
||||
|
|
|
@ -503,6 +503,9 @@ public:
|
|||
const Sampler& getSampler() const { return _sampler; }
|
||||
Stamp getSamplerStamp() const { return _samplerStamp; }
|
||||
|
||||
void setFallbackTexture(const TexturePointer& fallback) { _fallback = fallback; }
|
||||
TexturePointer getFallbackTexture() const { return _fallback.lock(); }
|
||||
|
||||
void setExternalTexture(uint32 externalId, void* externalFence);
|
||||
void setExternalRecycler(const ExternalRecycler& recycler);
|
||||
ExternalRecycler getExternalRecycler() const;
|
||||
|
@ -526,6 +529,7 @@ protected:
|
|||
ExternalRecycler _externalRecycler;
|
||||
|
||||
|
||||
std::weak_ptr<Texture> _fallback;
|
||||
// Not strictly necessary, but incredibly useful for debugging
|
||||
std::string _source;
|
||||
std::unique_ptr< Storage > _storage;
|
||||
|
|
|
@ -117,6 +117,7 @@ ktx::KTXUniquePointer Texture::serialize(const Texture& texture) {
|
|||
}
|
||||
|
||||
auto ktxBuffer = ktx::KTX::create(header, images);
|
||||
#if 0
|
||||
auto expectedMipCount = texture.evalNumMips();
|
||||
assert(expectedMipCount == ktxBuffer->_images.size());
|
||||
assert(expectedMipCount == header.numberOfMipmapLevels);
|
||||
|
@ -141,6 +142,7 @@ ktx::KTXUniquePointer Texture::serialize(const Texture& texture) {
|
|||
assert(0 == memcmp(expectedFace, actualFace, expected._faceSize));
|
||||
}
|
||||
}
|
||||
#endif
|
||||
return ktxBuffer;
|
||||
}
|
||||
|
||||
|
|
|
@ -77,7 +77,7 @@ KTX::KTX() {
|
|||
KTX::~KTX() {
|
||||
}
|
||||
|
||||
void KTX::resetStorage(StoragePointer& storage) {
|
||||
void KTX::resetStorage(const StoragePointer& storage) {
|
||||
_storage = storage;
|
||||
}
|
||||
|
||||
|
|
|
@ -402,15 +402,15 @@ namespace ktx {
|
|||
Image(uint32_t imageSize, uint32_t padding, const Byte* bytes) :
|
||||
_numFaces(1),
|
||||
_imageSize(imageSize),
|
||||
_padding(padding),
|
||||
_faceSize(imageSize),
|
||||
_padding(padding),
|
||||
_faceBytes(1, bytes) {}
|
||||
|
||||
Image(uint32_t pageSize, uint32_t padding, const FaceBytes& cubeFaceBytes) :
|
||||
_numFaces(NUM_CUBEMAPFACES),
|
||||
_imageSize(pageSize * NUM_CUBEMAPFACES),
|
||||
_padding(padding),
|
||||
_faceSize(pageSize)
|
||||
_faceSize(pageSize),
|
||||
_padding(padding)
|
||||
{
|
||||
if (cubeFaceBytes.size() == NUM_CUBEMAPFACES) {
|
||||
_faceBytes = cubeFaceBytes;
|
||||
|
@ -420,7 +420,7 @@ namespace ktx {
|
|||
using Images = std::vector<Image>;
|
||||
|
||||
class KTX {
|
||||
void resetStorage(StoragePointer& src);
|
||||
void resetStorage(const StoragePointer& src);
|
||||
|
||||
KTX();
|
||||
public:
|
||||
|
@ -448,7 +448,7 @@ namespace ktx {
|
|||
static Images writeImages(Byte* destBytes, size_t destByteSize, const Images& images);
|
||||
|
||||
// Parse a block of memory and create a KTX object from it
|
||||
static std::unique_ptr<KTX> create(StoragePointer& src);
|
||||
static std::unique_ptr<KTX> create(const StoragePointer& src);
|
||||
|
||||
static bool checkHeaderFromStorage(size_t srcSize, const Byte* srcBytes);
|
||||
static Images parseImages(const Header& header, size_t srcSize, const Byte* srcBytes);
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
|
||||
#include <list>
|
||||
#include <QtGlobal>
|
||||
#include <QtCore/QDebug>
|
||||
|
||||
#ifndef _MSC_VER
|
||||
#define NOEXCEPT noexcept
|
||||
|
@ -68,7 +69,7 @@ namespace ktx {
|
|||
}
|
||||
|
||||
// find the first null character \0
|
||||
int keyLength = 0;
|
||||
uint32_t keyLength = 0;
|
||||
while (reinterpret_cast<const char*>(src[++keyLength]) != '\0') {
|
||||
if (keyLength == keyValueByteSize) {
|
||||
// key must be null-terminated, and there must be space for the value
|
||||
|
@ -119,8 +120,8 @@ namespace ktx {
|
|||
|
||||
return true;
|
||||
}
|
||||
catch (ReaderException& e) {
|
||||
qWarning(e.what());
|
||||
catch (const ReaderException& e) {
|
||||
qWarning() << e.what();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -128,7 +129,6 @@ namespace ktx {
|
|||
Images KTX::parseImages(const Header& header, size_t srcSize, const Byte* srcBytes) {
|
||||
Images images;
|
||||
auto currentPtr = srcBytes;
|
||||
auto numMips = header.getNumberOfLevels();
|
||||
auto numFaces = header.numberOfFaces;
|
||||
|
||||
// Keep identifying new mip as long as we can at list query the next imageSize
|
||||
|
@ -163,7 +163,7 @@ namespace ktx {
|
|||
return images;
|
||||
}
|
||||
|
||||
std::unique_ptr<KTX> KTX::create(StoragePointer& src) {
|
||||
std::unique_ptr<KTX> KTX::create(const StoragePointer& src) {
|
||||
if (!src) {
|
||||
return nullptr;
|
||||
}
|
||||
|
|
|
@ -105,14 +105,14 @@ namespace ktx {
|
|||
|
||||
// Single face vs cubes
|
||||
if (srcImages[l]._numFaces == 1) {
|
||||
auto copied = memcpy(currentPtr, srcImages[l]._faceBytes[0], imageSize);
|
||||
memcpy(currentPtr, srcImages[l]._faceBytes[0], imageSize);
|
||||
destImages.emplace_back(Image((uint32_t) imageSize, padding, currentPtr));
|
||||
currentPtr += imageSize;
|
||||
} else {
|
||||
Image::FaceBytes faceBytes(6);
|
||||
auto faceSize = srcImages[l]._faceSize;
|
||||
for (int face = 0; face < 6; face++) {
|
||||
auto copied = memcpy(currentPtr, srcImages[l]._faceBytes[face], faceSize);
|
||||
memcpy(currentPtr, srcImages[l]._faceBytes[face], faceSize);
|
||||
faceBytes[face] = currentPtr;
|
||||
currentPtr += faceSize;
|
||||
}
|
||||
|
|
|
@ -171,14 +171,6 @@ const gpu::TexturePointer& TextureCache::getBlackTexture() {
|
|||
return _blackTexture;
|
||||
}
|
||||
|
||||
|
||||
const gpu::TexturePointer& TextureCache::getNormalFittingTexture() {
|
||||
if (!_normalFittingTexture) {
|
||||
_normalFittingTexture = getImageTexture(PathUtils::resourcesPath() + "images/normalFittingScale.dds", NetworkTexture::STRICT_TEXTURE);
|
||||
}
|
||||
return _normalFittingTexture;
|
||||
}
|
||||
|
||||
/// Extra data for creating textures.
|
||||
class TextureExtra {
|
||||
public:
|
||||
|
@ -198,6 +190,39 @@ NetworkTexturePointer TextureCache::getTexture(const QUrl& url, Type type, const
|
|||
return ResourceCache::getResource(url, QUrl(), &extra).staticCast<NetworkTexture>();
|
||||
}
|
||||
|
||||
gpu::TexturePointer getFallbackTextureForType(NetworkTexture::Type type) {
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
|
||||
gpu::TexturePointer result;
|
||||
switch (type) {
|
||||
case NetworkTexture::DEFAULT_TEXTURE:
|
||||
case NetworkTexture::ALBEDO_TEXTURE:
|
||||
case NetworkTexture::ROUGHNESS_TEXTURE:
|
||||
case NetworkTexture::OCCLUSION_TEXTURE:
|
||||
result = textureCache->getWhiteTexture();
|
||||
break;
|
||||
|
||||
case NetworkTexture::NORMAL_TEXTURE:
|
||||
result = textureCache->getBlueTexture();
|
||||
break;
|
||||
|
||||
case NetworkTexture::EMISSIVE_TEXTURE:
|
||||
case NetworkTexture::LIGHTMAP_TEXTURE:
|
||||
result = textureCache->getBlackTexture();
|
||||
break;
|
||||
|
||||
case NetworkTexture::BUMP_TEXTURE:
|
||||
case NetworkTexture::SPECULAR_TEXTURE:
|
||||
case NetworkTexture::GLOSS_TEXTURE:
|
||||
case NetworkTexture::CUBE_TEXTURE:
|
||||
case NetworkTexture::CUSTOM_TEXTURE:
|
||||
case NetworkTexture::STRICT_TEXTURE:
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
NetworkTexture::TextureLoaderFunc getTextureLoaderForType(NetworkTexture::Type type,
|
||||
const QVariantMap& options = QVariantMap()) {
|
||||
|
@ -353,6 +378,13 @@ void NetworkTexture::setImage(gpu::TexturePointer texture, int originalWidth,
|
|||
emit networkTextureCreated(qWeakPointerCast<NetworkTexture, Resource> (_self));
|
||||
}
|
||||
|
||||
gpu::TexturePointer NetworkTexture::getFallbackTexture() const {
|
||||
if (_type == CUSTOM_TEXTURE) {
|
||||
return gpu::TexturePointer();
|
||||
}
|
||||
return getFallbackTextureForType(_type);
|
||||
}
|
||||
|
||||
class Reader : public QRunnable {
|
||||
public:
|
||||
Reader(const QWeakPointer<Resource>& resource, const QUrl& url) : _resource(resource), _url(url) {
|
||||
|
@ -526,6 +558,9 @@ void ImageReader::read() {
|
|||
PROFILE_RANGE_EX(resource_parse_image, __FUNCTION__, 0xffff0000, 0);
|
||||
texture.reset(resource.staticCast<NetworkTexture>()->getTextureLoader()(image, url));
|
||||
texture->setSource(url);
|
||||
if (texture) {
|
||||
texture->setFallbackTexture(networkTexture->getFallbackTexture());
|
||||
}
|
||||
}
|
||||
|
||||
// Hash the source image to use as a filename for on-disk caching
|
||||
|
|
|
@ -78,6 +78,7 @@ public:
|
|||
Type getTextureType() const { return _type; }
|
||||
|
||||
TextureLoaderFunc getTextureLoader() const;
|
||||
gpu::TexturePointer getFallbackTexture() const;
|
||||
|
||||
signals:
|
||||
void networkTextureCreated(const QWeakPointer<NetworkTexture>& self);
|
||||
|
@ -132,9 +133,6 @@ public:
|
|||
/// Returns the a black texture (useful for a default).
|
||||
const gpu::TexturePointer& getBlackTexture();
|
||||
|
||||
// Returns a map used to compress the normals through a fitting scale algorithm
|
||||
const gpu::TexturePointer& getNormalFittingTexture();
|
||||
|
||||
/// Returns a texture version of an image file
|
||||
static gpu::TexturePointer getImageTexture(const QString& path, Type type = Type::DEFAULT_TEXTURE, QVariantMap options = QVariantMap());
|
||||
|
||||
|
@ -165,7 +163,6 @@ private:
|
|||
gpu::TexturePointer _grayTexture;
|
||||
gpu::TexturePointer _blueTexture;
|
||||
gpu::TexturePointer _blackTexture;
|
||||
gpu::TexturePointer _normalFittingTexture;
|
||||
};
|
||||
|
||||
#endif // hifi_TextureCache_h
|
||||
|
|
|
@ -154,7 +154,7 @@ const QImage TextureUsage::process2DImageColor(const QImage& srcImage, bool& val
|
|||
return image;
|
||||
}
|
||||
|
||||
void TextureUsage::defineColorTexelFormats(gpu::Element& formatGPU, gpu::Element& formatMip,
|
||||
void TextureUsage::defineColorTexelFormats(gpu::Element& formatGPU, gpu::Element& formatMip,
|
||||
const QImage& image, bool isLinear, bool doCompress) {
|
||||
|
||||
#ifdef COMPRESS_TEXTURES
|
||||
|
@ -308,7 +308,7 @@ gpu::Texture* TextureUsage::createNormalTextureFromNormalImage(const QImage& src
|
|||
if (image.format() != QImage::Format_ARGB32) {
|
||||
image = image.convertToFormat(QImage::Format_ARGB32);
|
||||
}
|
||||
|
||||
|
||||
|
||||
gpu::Texture* theTexture = nullptr;
|
||||
if ((image.width() > 0) && (image.height() > 0)) {
|
||||
|
@ -354,15 +354,15 @@ gpu::Texture* TextureUsage::createNormalTextureFromBumpImage(const QImage& srcIm
|
|||
int width = image.width();
|
||||
int height = image.height();
|
||||
QImage result(width, height, QImage::Format_RGB888);
|
||||
|
||||
|
||||
for (int i = 0; i < width; i++) {
|
||||
const int iNextClamped = clampPixelCoordinate(i + 1, width - 1);
|
||||
const int iPrevClamped = clampPixelCoordinate(i - 1, width - 1);
|
||||
|
||||
|
||||
for (int j = 0; j < height; j++) {
|
||||
const int jNextClamped = clampPixelCoordinate(j + 1, height - 1);
|
||||
const int jPrevClamped = clampPixelCoordinate(j - 1, height - 1);
|
||||
|
||||
|
||||
// surrounding pixels
|
||||
const QRgb topLeft = image.pixel(iPrevClamped, jPrevClamped);
|
||||
const QRgb top = image.pixel(iPrevClamped, j);
|
||||
|
@ -372,7 +372,7 @@ gpu::Texture* TextureUsage::createNormalTextureFromBumpImage(const QImage& srcIm
|
|||
const QRgb bottom = image.pixel(iNextClamped, j);
|
||||
const QRgb bottomLeft = image.pixel(iNextClamped, jPrevClamped);
|
||||
const QRgb left = image.pixel(i, jPrevClamped);
|
||||
|
||||
|
||||
// take their gray intensities
|
||||
// since it's a grayscale image, the value of each component RGB is the same
|
||||
const double tl = qRed(topLeft);
|
||||
|
@ -383,15 +383,15 @@ gpu::Texture* TextureUsage::createNormalTextureFromBumpImage(const QImage& srcIm
|
|||
const double b = qRed(bottom);
|
||||
const double bl = qRed(bottomLeft);
|
||||
const double l = qRed(left);
|
||||
|
||||
|
||||
// apply the sobel filter
|
||||
const double dX = (tr + pStrength * r + br) - (tl + pStrength * l + bl);
|
||||
const double dY = (bl + pStrength * b + br) - (tl + pStrength * t + tr);
|
||||
const double dZ = RGBA_MAX / pStrength;
|
||||
|
||||
|
||||
glm::vec3 v(dX, dY, dZ);
|
||||
glm::normalize(v);
|
||||
|
||||
|
||||
// convert to rgb from the value obtained computing the filter
|
||||
QRgb qRgbValue = qRgba(mapComponent(v.x), mapComponent(v.y), mapComponent(v.z), 1.0);
|
||||
result.setPixel(i, j, qRgbValue);
|
||||
|
@ -444,7 +444,7 @@ gpu::Texture* TextureUsage::createRoughnessTextureFromImage(const QImage& srcIma
|
|||
theTexture->setStoredMipFormat(formatMip);
|
||||
theTexture->assignStoredMip(0, image.byteCount(), image.constBits());
|
||||
generateMips(theTexture, image, true);
|
||||
|
||||
|
||||
theTexture->setSource(srcImageName);
|
||||
}
|
||||
|
||||
|
@ -466,12 +466,12 @@ gpu::Texture* TextureUsage::createRoughnessTextureFromGlossImage(const QImage& s
|
|||
|
||||
// Gloss turned into Rough
|
||||
image.invertPixels(QImage::InvertRgba);
|
||||
|
||||
|
||||
image = image.convertToFormat(QImage::Format_Grayscale8);
|
||||
|
||||
|
||||
gpu::Texture* theTexture = nullptr;
|
||||
if ((image.width() > 0) && (image.height() > 0)) {
|
||||
|
||||
|
||||
#ifdef COMPRESS_TEXTURES
|
||||
gpu::Element formatGPU = gpu::Element(gpu::SCALAR, gpu::NUINT8, gpu::COMPRESSED_R);
|
||||
#else
|
||||
|
@ -487,7 +487,7 @@ gpu::Texture* TextureUsage::createRoughnessTextureFromGlossImage(const QImage& s
|
|||
|
||||
theTexture->setSource(srcImageName);
|
||||
}
|
||||
|
||||
|
||||
return theTexture;
|
||||
}
|
||||
|
||||
|
@ -545,18 +545,18 @@ public:
|
|||
int _y = 0;
|
||||
bool _horizontalMirror = false;
|
||||
bool _verticalMirror = false;
|
||||
|
||||
|
||||
Face() {}
|
||||
Face(int x, int y, bool horizontalMirror, bool verticalMirror) : _x(x), _y(y), _horizontalMirror(horizontalMirror), _verticalMirror(verticalMirror) {}
|
||||
};
|
||||
|
||||
|
||||
Face _faceXPos;
|
||||
Face _faceXNeg;
|
||||
Face _faceYPos;
|
||||
Face _faceYNeg;
|
||||
Face _faceZPos;
|
||||
Face _faceZNeg;
|
||||
|
||||
|
||||
CubeLayout(int wr, int hr, Face fXP, Face fXN, Face fYP, Face fYN, Face fZP, Face fZN) :
|
||||
_type(FLAT),
|
||||
_widthRatio(wr),
|
||||
|
@ -799,7 +799,7 @@ gpu::Texture* TextureUsage::processCubeTextureColorFromImage(const QImage& srcIm
|
|||
defineColorTexelFormats(formatGPU, formatMip, image, isLinear, doCompress);
|
||||
|
||||
// Find the layout of the cubemap in the 2D image
|
||||
// Use the original image size since processSourceImage may have altered the size / aspect ratio
|
||||
// Use the original image size since processSourceImage may have altered the size / aspect ratio
|
||||
int foundLayout = CubeLayout::findLayout(srcImage.width(), srcImage.height());
|
||||
|
||||
std::vector<QImage> faces;
|
||||
|
|
|
@ -221,7 +221,7 @@ ResourceCache::ResourceCache(QObject* parent) : QObject(parent) {
|
|||
}
|
||||
|
||||
ResourceCache::~ResourceCache() {
|
||||
clearUnusedResource();
|
||||
clearUnusedResources();
|
||||
}
|
||||
|
||||
void ResourceCache::clearATPAssets() {
|
||||
|
@ -265,7 +265,7 @@ void ResourceCache::clearATPAssets() {
|
|||
|
||||
void ResourceCache::refreshAll() {
|
||||
// Clear all unused resources so we don't have to reload them
|
||||
clearUnusedResource();
|
||||
clearUnusedResources();
|
||||
resetResourceCounters();
|
||||
|
||||
QHash<QUrl, QWeakPointer<Resource>> resources;
|
||||
|
@ -418,7 +418,7 @@ void ResourceCache::reserveUnusedResource(qint64 resourceSize) {
|
|||
}
|
||||
}
|
||||
|
||||
void ResourceCache::clearUnusedResource() {
|
||||
void ResourceCache::clearUnusedResources() {
|
||||
// the unused resources may themselves reference resources that will be added to the unused
|
||||
// list on destruction, so keep clearing until there are no references left
|
||||
QWriteLocker locker(&_unusedResourcesLock);
|
||||
|
|
|
@ -249,6 +249,7 @@ public:
|
|||
|
||||
void refreshAll();
|
||||
void refresh(const QUrl& url);
|
||||
void clearUnusedResources();
|
||||
|
||||
signals:
|
||||
void dirty();
|
||||
|
@ -298,7 +299,7 @@ protected:
|
|||
|
||||
void addUnusedResource(const QSharedPointer<Resource>& resource);
|
||||
void removeUnusedResource(const QSharedPointer<Resource>& resource);
|
||||
|
||||
|
||||
/// Attempt to load a resource if requests are below the limit, otherwise queue the resource for loading
|
||||
/// \return true if the resource began loading, otherwise false if the resource is in the pending queue
|
||||
static bool attemptRequest(QSharedPointer<Resource> resource);
|
||||
|
@ -309,7 +310,6 @@ private:
|
|||
friend class Resource;
|
||||
|
||||
void reserveUnusedResource(qint64 resourceSize);
|
||||
void clearUnusedResource();
|
||||
void resetResourceCounters();
|
||||
void removeResource(const QUrl& url, qint64 size = 0);
|
||||
|
||||
|
|
|
@ -65,25 +65,4 @@ float packUnlit() {
|
|||
return FRAG_PACK_UNLIT;
|
||||
}
|
||||
|
||||
<!
|
||||
uniform sampler2D normalFittingMap;
|
||||
|
||||
vec3 bestFitNormal(vec3 normal) {
|
||||
vec3 absNorm = abs(normal);
|
||||
float maxNAbs = max(absNorm.z, max(absNorm.x, absNorm.y));
|
||||
|
||||
vec2 texcoord = (absNorm.z < maxNAbs ?
|
||||
(absNorm.y < maxNAbs ? absNorm.yz : absNorm.xz) :
|
||||
absNorm.xy);
|
||||
texcoord = (texcoord.x < texcoord.y ? texcoord.yx : texcoord.xy);
|
||||
texcoord.y /= texcoord.x;
|
||||
vec3 cN = normal / maxNAbs;
|
||||
|
||||
float fittingScale = texture(normalFittingMap, texcoord).a;
|
||||
cN *= fittingScale;
|
||||
|
||||
return (cN * 0.5 + 0.5);
|
||||
}
|
||||
!>
|
||||
|
||||
<@endif@>
|
||||
|
|
|
@ -414,8 +414,6 @@ _nextID(0) {
|
|||
// Set the defaults needed for a simple program
|
||||
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
|
||||
DependencyManager::get<TextureCache>()->getWhiteTexture());
|
||||
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
|
||||
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
|
||||
}
|
||||
);
|
||||
GeometryCache::_simpleTransparentPipeline =
|
||||
|
@ -424,8 +422,6 @@ _nextID(0) {
|
|||
// Set the defaults needed for a simple program
|
||||
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
|
||||
DependencyManager::get<TextureCache>()->getWhiteTexture());
|
||||
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
|
||||
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
|
||||
}
|
||||
);
|
||||
GeometryCache::_simpleWirePipeline =
|
||||
|
@ -1770,7 +1766,6 @@ static void buildWebShader(const std::string& vertShaderText, const std::string&
|
|||
shaderPointerOut = gpu::Shader::createProgram(VS, PS);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("normalFittingMap"), render::ShapePipeline::Slot::MAP::NORMAL_FITTING));
|
||||
gpu::Shader::makeProgram(*shaderPointerOut, slotBindings);
|
||||
auto state = std::make_shared<gpu::State>();
|
||||
state->setCullMode(gpu::State::CULL_NONE);
|
||||
|
@ -1784,9 +1779,6 @@ static void buildWebShader(const std::string& vertShaderText, const std::string&
|
|||
|
||||
void GeometryCache::bindOpaqueWebBrowserProgram(gpu::Batch& batch, bool isAA) {
|
||||
batch.setPipeline(getOpaqueWebBrowserProgram(isAA));
|
||||
// Set a default normal map
|
||||
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
|
||||
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
|
||||
}
|
||||
|
||||
gpu::PipelinePointer GeometryCache::getOpaqueWebBrowserProgram(bool isAA) {
|
||||
|
@ -1802,9 +1794,6 @@ gpu::PipelinePointer GeometryCache::getOpaqueWebBrowserProgram(bool isAA) {
|
|||
|
||||
void GeometryCache::bindTransparentWebBrowserProgram(gpu::Batch& batch, bool isAA) {
|
||||
batch.setPipeline(getTransparentWebBrowserProgram(isAA));
|
||||
// Set a default normal map
|
||||
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
|
||||
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
|
||||
}
|
||||
|
||||
gpu::PipelinePointer GeometryCache::getTransparentWebBrowserProgram(bool isAA) {
|
||||
|
@ -1827,9 +1816,6 @@ void GeometryCache::bindSimpleProgram(gpu::Batch& batch, bool textured, bool tra
|
|||
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
|
||||
DependencyManager::get<TextureCache>()->getWhiteTexture());
|
||||
}
|
||||
// Set a default normal map
|
||||
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
|
||||
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
|
||||
}
|
||||
|
||||
gpu::PipelinePointer GeometryCache::getSimplePipeline(bool textured, bool transparent, bool culled, bool unlit, bool depthBiased) {
|
||||
|
@ -1846,7 +1832,6 @@ gpu::PipelinePointer GeometryCache::getSimplePipeline(bool textured, bool transp
|
|||
_unlitShader = gpu::Shader::createProgram(VS, PSUnlit);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("normalFittingMap"), render::ShapePipeline::Slot::MAP::NORMAL_FITTING));
|
||||
gpu::Shader::makeProgram(*_simpleShader, slotBindings);
|
||||
gpu::Shader::makeProgram(*_unlitShader, slotBindings);
|
||||
});
|
||||
|
|
|
@ -97,6 +97,8 @@ ShapeKey MeshPartPayload::getShapeKey() const {
|
|||
}
|
||||
|
||||
ShapeKey::Builder builder;
|
||||
builder.withMaterial();
|
||||
|
||||
if (drawMaterialKey.isTranslucent()) {
|
||||
builder.withTranslucent();
|
||||
}
|
||||
|
@ -478,6 +480,8 @@ ShapeKey ModelMeshPartPayload::getShapeKey() const {
|
|||
}
|
||||
|
||||
ShapeKey::Builder builder;
|
||||
builder.withMaterial();
|
||||
|
||||
if (isTranslucent || _fadeState != FADE_COMPLETE) {
|
||||
builder.withTranslucent();
|
||||
}
|
||||
|
|
|
@ -1178,13 +1178,7 @@ void Model::updateClusterMatrices() {
|
|||
for (int j = 0; j < mesh.clusters.size(); j++) {
|
||||
const FBXCluster& cluster = mesh.clusters.at(j);
|
||||
auto jointMatrix = _rig->getJointTransform(cluster.jointIndex);
|
||||
#if (GLM_ARCH & GLM_ARCH_SSE2) && !(defined Q_OS_MAC)
|
||||
glm::mat4 out, inverseBindMatrix = cluster.inverseBindMatrix;
|
||||
glm_mat4_mul((glm_vec4*)&jointMatrix, (glm_vec4*)&inverseBindMatrix, (glm_vec4*)&out);
|
||||
state.clusterMatrices[j] = out;
|
||||
#else
|
||||
state.clusterMatrices[j] = jointMatrix * cluster.inverseBindMatrix;
|
||||
#endif
|
||||
glm_mat4u_mul(jointMatrix, cluster.inverseBindMatrix, state.clusterMatrices[j]);
|
||||
}
|
||||
|
||||
// Once computed the cluster matrices, update the buffer(s)
|
||||
|
|
|
@ -75,7 +75,6 @@ RenderDeferredTask::RenderDeferredTask(RenderFetchCullSortTask::Output items) {
|
|||
// GPU jobs: Start preparing the primary, deferred and lighting buffer
|
||||
const auto primaryFramebuffer = addJob<PreparePrimaryFramebuffer>("PreparePrimaryBuffer");
|
||||
|
||||
// const auto fullFrameRangeTimer = addJob<BeginGPURangeTimer>("BeginRangeTimer");
|
||||
const auto opaqueRangeTimer = addJob<BeginGPURangeTimer>("BeginOpaqueRangeTimer", "DrawOpaques");
|
||||
|
||||
const auto prepareDeferredInputs = PrepareDeferred::Inputs(primaryFramebuffer, lightingModel).hasVarying();
|
||||
|
@ -154,20 +153,25 @@ RenderDeferredTask::RenderDeferredTask(RenderFetchCullSortTask::Output items) {
|
|||
const auto toneMappingInputs = render::Varying(ToneMappingDeferred::Inputs(lightingFramebuffer, primaryFramebuffer));
|
||||
addJob<ToneMappingDeferred>("ToneMapping", toneMappingInputs);
|
||||
|
||||
{ // DEbug the bounds of the rendered items, still look at the zbuffer
|
||||
addJob<DrawBounds>("DrawMetaBounds", metas);
|
||||
addJob<DrawBounds>("DrawOpaqueBounds", opaques);
|
||||
addJob<DrawBounds>("DrawTransparentBounds", transparents);
|
||||
}
|
||||
|
||||
// Overlays
|
||||
const auto overlayOpaquesInputs = DrawOverlay3D::Inputs(overlayOpaques, lightingModel).hasVarying();
|
||||
const auto overlayTransparentsInputs = DrawOverlay3D::Inputs(overlayTransparents, lightingModel).hasVarying();
|
||||
addJob<DrawOverlay3D>("DrawOverlay3DOpaque", overlayOpaquesInputs, true);
|
||||
addJob<DrawOverlay3D>("DrawOverlay3DTransparent", overlayTransparentsInputs, false);
|
||||
|
||||
{ // DEbug the bounds of the rendered OVERLAY items, still look at the zbuffer
|
||||
addJob<DrawBounds>("DrawOverlayOpaqueBounds", overlayOpaques);
|
||||
addJob<DrawBounds>("DrawOverlayTransparentBounds", overlayTransparents);
|
||||
}
|
||||
|
||||
// Debugging stages
|
||||
// Debugging stages
|
||||
{
|
||||
|
||||
|
||||
// Bounds do not draw on stencil buffer, so they must come last
|
||||
addJob<DrawBounds>("DrawMetaBounds", metas);
|
||||
|
||||
// Debugging Deferred buffer job
|
||||
const auto debugFramebuffers = render::Varying(DebugDeferredBuffer::Inputs(deferredFramebuffer, linearDepthTarget, surfaceGeometryFramebuffer, ambientOcclusionFramebuffer));
|
||||
addJob<DebugDeferredBuffer>("DebugDeferredBuffer", debugFramebuffers);
|
||||
|
@ -207,9 +211,6 @@ RenderDeferredTask::RenderDeferredTask(RenderFetchCullSortTask::Output items) {
|
|||
|
||||
// Blit!
|
||||
addJob<Blit>("Blit", primaryFramebuffer);
|
||||
|
||||
// addJob<EndGPURangeTimer>("RangeTimer", fullFrameRangeTimer);
|
||||
|
||||
}
|
||||
|
||||
void BeginGPURangeTimer::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, gpu::RangeTimerPointer& timer) {
|
||||
|
|
|
@ -50,9 +50,13 @@
|
|||
|
||||
#include "overlay3D_vert.h"
|
||||
#include "overlay3D_frag.h"
|
||||
#include "overlay3D_model_frag.h"
|
||||
#include "overlay3D_model_translucent_frag.h"
|
||||
#include "overlay3D_translucent_frag.h"
|
||||
#include "overlay3D_unlit_frag.h"
|
||||
#include "overlay3D_translucent_unlit_frag.h"
|
||||
#include "overlay3D_model_unlit_frag.h"
|
||||
#include "overlay3D_model_translucent_unlit_frag.h"
|
||||
|
||||
|
||||
using namespace render;
|
||||
|
@ -70,15 +74,24 @@ void lightBatchSetter(const ShapePipeline& pipeline, gpu::Batch& batch);
|
|||
|
||||
void initOverlay3DPipelines(ShapePlumber& plumber) {
|
||||
auto vertex = gpu::Shader::createVertex(std::string(overlay3D_vert));
|
||||
auto vertexModel = gpu::Shader::createVertex(std::string(model_vert));
|
||||
auto pixel = gpu::Shader::createPixel(std::string(overlay3D_frag));
|
||||
auto pixelTranslucent = gpu::Shader::createPixel(std::string(overlay3D_translucent_frag));
|
||||
auto pixelUnlit = gpu::Shader::createPixel(std::string(overlay3D_unlit_frag));
|
||||
auto pixelTranslucentUnlit = gpu::Shader::createPixel(std::string(overlay3D_translucent_unlit_frag));
|
||||
auto pixelModel = gpu::Shader::createPixel(std::string(overlay3D_model_frag));
|
||||
auto pixelModelTranslucent = gpu::Shader::createPixel(std::string(overlay3D_model_translucent_frag));
|
||||
auto pixelModelUnlit = gpu::Shader::createPixel(std::string(overlay3D_model_unlit_frag));
|
||||
auto pixelModelTranslucentUnlit = gpu::Shader::createPixel(std::string(overlay3D_model_translucent_unlit_frag));
|
||||
|
||||
auto opaqueProgram = gpu::Shader::createProgram(vertex, pixel);
|
||||
auto translucentProgram = gpu::Shader::createProgram(vertex, pixelTranslucent);
|
||||
auto unlitOpaqueProgram = gpu::Shader::createProgram(vertex, pixelUnlit);
|
||||
auto unlitTranslucentProgram = gpu::Shader::createProgram(vertex, pixelTranslucentUnlit);
|
||||
auto materialOpaqueProgram = gpu::Shader::createProgram(vertexModel, pixelModel);
|
||||
auto materialTranslucentProgram = gpu::Shader::createProgram(vertexModel, pixelModelTranslucent);
|
||||
auto materialUnlitOpaqueProgram = gpu::Shader::createProgram(vertexModel, pixelModel);
|
||||
auto materialUnlitTranslucentProgram = gpu::Shader::createProgram(vertexModel, pixelModelTranslucent);
|
||||
|
||||
for (int i = 0; i < 8; i++) {
|
||||
bool isCulled = (i & 1);
|
||||
|
@ -103,14 +116,20 @@ void initOverlay3DPipelines(ShapePlumber& plumber) {
|
|||
}
|
||||
|
||||
ShapeKey::Filter::Builder builder;
|
||||
|
||||
isCulled ? builder.withCullFace() : builder.withoutCullFace();
|
||||
isBiased ? builder.withDepthBias() : builder.withoutDepthBias();
|
||||
isOpaque ? builder.withOpaque() : builder.withTranslucent();
|
||||
|
||||
auto simpleProgram = isOpaque ? opaqueProgram : translucentProgram;
|
||||
auto unlitProgram = isOpaque ? unlitOpaqueProgram : unlitTranslucentProgram;
|
||||
plumber.addPipeline(builder.withoutUnlit().build(), simpleProgram, state, &lightBatchSetter);
|
||||
plumber.addPipeline(builder.withUnlit().build(), unlitProgram, state, &batchSetter);
|
||||
auto materialProgram = isOpaque ? materialOpaqueProgram : materialTranslucentProgram;
|
||||
auto materialUnlitProgram = isOpaque ? materialUnlitOpaqueProgram : materialUnlitTranslucentProgram;
|
||||
|
||||
plumber.addPipeline(builder.withMaterial().build().key(), materialProgram, state, &lightBatchSetter);
|
||||
plumber.addPipeline(builder.withMaterial().withUnlit().build().key(), materialUnlitProgram, state, &batchSetter);
|
||||
plumber.addPipeline(builder.withoutUnlit().withoutMaterial().build().key(), simpleProgram, state, &lightBatchSetter);
|
||||
plumber.addPipeline(builder.withUnlit().withoutMaterial().build().key(), unlitProgram, state, &batchSetter);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -144,78 +163,87 @@ void initDeferredPipelines(render::ShapePlumber& plumber) {
|
|||
// TODO: Refactor this to use a filter
|
||||
// Opaques
|
||||
addPipeline(
|
||||
Key::Builder(),
|
||||
Key::Builder().withMaterial(),
|
||||
modelVertex, modelPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withMaterial().withUnlit(),
|
||||
modelVertex, modelUnlitPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withUnlit(),
|
||||
modelVertex, modelUnlitPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withTangents(),
|
||||
Key::Builder().withMaterial().withTangents(),
|
||||
modelNormalMapVertex, modelNormalMapPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withSpecular(),
|
||||
Key::Builder().withMaterial().withSpecular(),
|
||||
modelVertex, modelSpecularMapPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withTangents().withSpecular(),
|
||||
Key::Builder().withMaterial().withTangents().withSpecular(),
|
||||
modelNormalMapVertex, modelNormalSpecularMapPixel);
|
||||
// Translucents
|
||||
addPipeline(
|
||||
Key::Builder().withMaterial().withTranslucent(),
|
||||
modelVertex, modelTranslucentPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withTranslucent(),
|
||||
modelVertex, modelTranslucentPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withMaterial().withTranslucent().withUnlit(),
|
||||
modelVertex, modelTranslucentUnlitPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withTranslucent().withUnlit(),
|
||||
modelVertex, modelTranslucentUnlitPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withTranslucent().withTangents(),
|
||||
Key::Builder().withMaterial().withTranslucent().withTangents(),
|
||||
modelNormalMapVertex, modelTranslucentPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withTranslucent().withSpecular(),
|
||||
Key::Builder().withMaterial().withTranslucent().withSpecular(),
|
||||
modelVertex, modelTranslucentPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withTranslucent().withTangents().withSpecular(),
|
||||
Key::Builder().withMaterial().withTranslucent().withTangents().withSpecular(),
|
||||
modelNormalMapVertex, modelTranslucentPixel);
|
||||
addPipeline(
|
||||
// FIXME: Ignore lightmap for translucents meshpart
|
||||
Key::Builder().withTranslucent().withLightmap(),
|
||||
Key::Builder().withMaterial().withTranslucent().withLightmap(),
|
||||
modelVertex, modelTranslucentPixel);
|
||||
// Lightmapped
|
||||
addPipeline(
|
||||
Key::Builder().withLightmap(),
|
||||
Key::Builder().withMaterial().withLightmap(),
|
||||
modelLightmapVertex, modelLightmapPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withLightmap().withTangents(),
|
||||
Key::Builder().withMaterial().withLightmap().withTangents(),
|
||||
modelLightmapNormalMapVertex, modelLightmapNormalMapPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withLightmap().withSpecular(),
|
||||
Key::Builder().withMaterial().withLightmap().withSpecular(),
|
||||
modelLightmapVertex, modelLightmapSpecularMapPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withLightmap().withTangents().withSpecular(),
|
||||
Key::Builder().withMaterial().withLightmap().withTangents().withSpecular(),
|
||||
modelLightmapNormalMapVertex, modelLightmapNormalSpecularMapPixel);
|
||||
// Skinned
|
||||
addPipeline(
|
||||
Key::Builder().withSkinned(),
|
||||
Key::Builder().withMaterial().withSkinned(),
|
||||
skinModelVertex, modelPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withSkinned().withTangents(),
|
||||
Key::Builder().withMaterial().withSkinned().withTangents(),
|
||||
skinModelNormalMapVertex, modelNormalMapPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withSkinned().withSpecular(),
|
||||
Key::Builder().withMaterial().withSkinned().withSpecular(),
|
||||
skinModelVertex, modelSpecularMapPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withSkinned().withTangents().withSpecular(),
|
||||
Key::Builder().withMaterial().withSkinned().withTangents().withSpecular(),
|
||||
skinModelNormalMapVertex, modelNormalSpecularMapPixel);
|
||||
// Skinned and Translucent
|
||||
addPipeline(
|
||||
Key::Builder().withSkinned().withTranslucent(),
|
||||
Key::Builder().withMaterial().withSkinned().withTranslucent(),
|
||||
skinModelVertex, modelTranslucentPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withSkinned().withTranslucent().withTangents(),
|
||||
Key::Builder().withMaterial().withSkinned().withTranslucent().withTangents(),
|
||||
skinModelNormalMapVertex, modelTranslucentPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withSkinned().withTranslucent().withSpecular(),
|
||||
Key::Builder().withMaterial().withSkinned().withTranslucent().withSpecular(),
|
||||
skinModelVertex, modelTranslucentPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withSkinned().withTranslucent().withTangents().withSpecular(),
|
||||
Key::Builder().withMaterial().withSkinned().withTranslucent().withTangents().withSpecular(),
|
||||
skinModelNormalMapVertex, modelTranslucentPixel);
|
||||
// Depth-only
|
||||
addPipeline(
|
||||
|
@ -244,32 +272,32 @@ void initForwardPipelines(render::ShapePlumber& plumber) {
|
|||
auto addPipeline = std::bind(&addPlumberPipeline, std::ref(plumber), _1, _2, _3);
|
||||
// Opaques
|
||||
addPipeline(
|
||||
Key::Builder(),
|
||||
Key::Builder().withMaterial(),
|
||||
modelVertex, modelPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withUnlit(),
|
||||
Key::Builder().withMaterial().withUnlit(),
|
||||
modelVertex, modelUnlitPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withTangents(),
|
||||
Key::Builder().withMaterial().withTangents(),
|
||||
modelNormalMapVertex, modelNormalMapPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withSpecular(),
|
||||
Key::Builder().withMaterial().withSpecular(),
|
||||
modelVertex, modelSpecularMapPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withTangents().withSpecular(),
|
||||
Key::Builder().withMaterial().withTangents().withSpecular(),
|
||||
modelNormalMapVertex, modelNormalSpecularMapPixel);
|
||||
// Skinned
|
||||
addPipeline(
|
||||
Key::Builder().withSkinned(),
|
||||
Key::Builder().withMaterial().withSkinned(),
|
||||
skinModelVertex, modelPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withSkinned().withTangents(),
|
||||
Key::Builder().withMaterial().withSkinned().withTangents(),
|
||||
skinModelNormalMapVertex, modelNormalMapPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withSkinned().withSpecular(),
|
||||
Key::Builder().withMaterial().withSkinned().withSpecular(),
|
||||
skinModelVertex, modelSpecularMapPixel);
|
||||
addPipeline(
|
||||
Key::Builder().withSkinned().withTangents().withSpecular(),
|
||||
Key::Builder().withMaterial().withSkinned().withTangents().withSpecular(),
|
||||
skinModelNormalMapVertex, modelNormalSpecularMapPixel);
|
||||
}
|
||||
|
||||
|
@ -319,9 +347,6 @@ void batchSetter(const ShapePipeline& pipeline, gpu::Batch& batch) {
|
|||
// Set a default albedo map
|
||||
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
|
||||
DependencyManager::get<TextureCache>()->getWhiteTexture());
|
||||
// Set a default normal map
|
||||
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
|
||||
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
|
||||
|
||||
// Set a default material
|
||||
if (pipeline.locations->materialBufferUnit >= 0) {
|
||||
|
|
88
libraries/render-utils/src/overlay3D_model.slf
Normal file
88
libraries/render-utils/src/overlay3D_model.slf
Normal file
|
@ -0,0 +1,88 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
// overlay3D.slf
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Sam Gateau on 6/16/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include DeferredGlobalLight.slh@>
|
||||
<$declareEvalSkyboxGlobalColor()$>
|
||||
|
||||
<@include model/Material.slh@>
|
||||
|
||||
<@include gpu/Transform.slh@>
|
||||
<$declareStandardCameraTransform()$>
|
||||
|
||||
<@include MaterialTextures.slh@>
|
||||
<$declareMaterialTextures(ALBEDO, ROUGHNESS, _SCRIBE_NULL, _SCRIBE_NULL, EMISSIVE, OCCLUSION)$>
|
||||
|
||||
in vec2 _texCoord0;
|
||||
in vec2 _texCoord1;
|
||||
in vec4 _position;
|
||||
in vec3 _normal;
|
||||
in vec3 _color;
|
||||
in float _alpha;
|
||||
|
||||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
Material mat = getMaterial();
|
||||
int matKey = getMaterialKey(mat);
|
||||
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex, roughnessTex, _SCRIBE_NULL, _SCRIBE_NULL, emissiveTex)$>
|
||||
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, occlusionTex)$>
|
||||
|
||||
float opacity = 1.0;
|
||||
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
|
||||
<$discardTransparent(opacity)$>;
|
||||
|
||||
vec3 albedo = getMaterialAlbedo(mat);
|
||||
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
|
||||
albedo *= _color;
|
||||
|
||||
float metallic = getMaterialMetallic(mat);
|
||||
vec3 fresnel = vec3(0.03); // Default Di-electric fresnel value
|
||||
if (metallic <= 0.5) {
|
||||
metallic = 0.0;
|
||||
} else {
|
||||
fresnel = albedo;
|
||||
metallic = 1.0;
|
||||
}
|
||||
|
||||
float roughness = getMaterialRoughness(mat);
|
||||
<$evalMaterialRoughness(roughnessTex, roughness, matKey, roughness)$>;
|
||||
|
||||
vec3 emissive = getMaterialEmissive(mat);
|
||||
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
|
||||
|
||||
|
||||
vec3 fragPosition = _position.xyz;
|
||||
//vec3 fragNormal = normalize(_normal);
|
||||
|
||||
TransformCamera cam = getTransformCamera();
|
||||
vec3 fragNormal;
|
||||
<$transformEyeToWorldDir(cam, _normal, fragNormal)$>;
|
||||
|
||||
vec4 color = vec4(evalSkyboxGlobalColor(
|
||||
cam._viewInverse,
|
||||
1.0,
|
||||
occlusionTex,
|
||||
fragPosition,
|
||||
fragNormal,
|
||||
albedo,
|
||||
fresnel,
|
||||
metallic,
|
||||
roughness),
|
||||
opacity);
|
||||
|
||||
// And emissive
|
||||
color.rgb += emissive * isEmissiveEnabled();
|
||||
|
||||
// Apply standard tone mapping
|
||||
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
|
||||
}
|
83
libraries/render-utils/src/overlay3D_model_translucent.slf
Normal file
83
libraries/render-utils/src/overlay3D_model_translucent.slf
Normal file
|
@ -0,0 +1,83 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
// overlay3D_model_transparent.slf
|
||||
//
|
||||
// Created by Sam Gateau on 2/27/2017.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include DeferredGlobalLight.slh@>
|
||||
<$declareEvalGlobalLightingAlphaBlended()$>
|
||||
|
||||
<@include model/Material.slh@>
|
||||
|
||||
<@include gpu/Transform.slh@>
|
||||
<$declareStandardCameraTransform()$>
|
||||
|
||||
<@include MaterialTextures.slh@>
|
||||
<$declareMaterialTextures(ALBEDO, ROUGHNESS, _SCRIBE_NULL, _SCRIBE_NULL, EMISSIVE, OCCLUSION)$>
|
||||
|
||||
in vec2 _texCoord0;
|
||||
in vec2 _texCoord1;
|
||||
in vec4 _position;
|
||||
in vec3 _normal;
|
||||
in vec3 _color;
|
||||
in float _alpha;
|
||||
|
||||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
Material mat = getMaterial();
|
||||
int matKey = getMaterialKey(mat);
|
||||
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex, roughnessTex, _SCRIBE_NULL, _SCRIBE_NULL, emissiveTex)$>
|
||||
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, occlusionTex)$>
|
||||
|
||||
float opacity = 1.0;
|
||||
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
|
||||
|
||||
vec3 albedo = getMaterialAlbedo(mat);
|
||||
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
|
||||
albedo *= _color;
|
||||
|
||||
float metallic = getMaterialMetallic(mat);
|
||||
vec3 fresnel = vec3(0.03); // Default Di-electric fresnel value
|
||||
if (metallic <= 0.5) {
|
||||
metallic = 0.0;
|
||||
} else {
|
||||
fresnel = albedo;
|
||||
metallic = 1.0;
|
||||
}
|
||||
|
||||
float roughness = getMaterialRoughness(mat);
|
||||
<$evalMaterialRoughness(roughnessTex, roughness, matKey, roughness)$>;
|
||||
|
||||
vec3 emissive = getMaterialEmissive(mat);
|
||||
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
|
||||
|
||||
|
||||
vec3 fragPosition = _position.xyz;
|
||||
|
||||
TransformCamera cam = getTransformCamera();
|
||||
vec3 fragNormal;
|
||||
<$transformEyeToWorldDir(cam, _normal, fragNormal)$>
|
||||
|
||||
vec4 color = vec4(evalGlobalLightingAlphaBlended(
|
||||
cam._viewInverse,
|
||||
1.0,
|
||||
occlusionTex,
|
||||
fragPosition,
|
||||
fragNormal,
|
||||
albedo,
|
||||
fresnel,
|
||||
metallic,
|
||||
emissive,
|
||||
roughness, opacity),
|
||||
opacity);
|
||||
|
||||
// Apply standard tone mapping
|
||||
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
// overlay3D-model_transparent_unlit.slf
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Sam Gateau on 2/28/2017.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include LightingModel.slh@>
|
||||
<@include model/Material.slh@>
|
||||
|
||||
<@include MaterialTextures.slh@>
|
||||
<$declareMaterialTextures(ALBEDO)$>
|
||||
|
||||
in vec2 _texCoord0;
|
||||
in vec3 _normal;
|
||||
in vec3 _color;
|
||||
in float _alpha;
|
||||
|
||||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
|
||||
Material mat = getMaterial();
|
||||
int matKey = getMaterialKey(mat);
|
||||
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex)$>
|
||||
|
||||
float opacity = 1.0;
|
||||
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
|
||||
|
||||
vec3 albedo = getMaterialAlbedo(mat);
|
||||
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
|
||||
albedo *= _color;
|
||||
|
||||
vec4 color = vec4(albedo * isUnlitEnabled(), opacity);
|
||||
|
||||
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
|
||||
}
|
44
libraries/render-utils/src/overlay3D_model_unlit.slf
Normal file
44
libraries/render-utils/src/overlay3D_model_unlit.slf
Normal file
|
@ -0,0 +1,44 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
// overlay3D-model_unlit.slf
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Sam Gateau on 2/28/2017.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include LightingModel.slh@>
|
||||
<@include model/Material.slh@>
|
||||
|
||||
<@include MaterialTextures.slh@>
|
||||
<$declareMaterialTextures(ALBEDO)$>
|
||||
|
||||
in vec2 _texCoord0;
|
||||
in vec3 _normal;
|
||||
in vec3 _color;
|
||||
in float _alpha;
|
||||
|
||||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
|
||||
Material mat = getMaterial();
|
||||
int matKey = getMaterialKey(mat);
|
||||
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex)$>
|
||||
|
||||
float opacity = 1.0;
|
||||
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
|
||||
<$discardTransparent(opacity)$>;
|
||||
|
||||
vec3 albedo = getMaterialAlbedo(mat);
|
||||
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
|
||||
albedo *= _color;
|
||||
|
||||
vec4 color = vec4(albedo * isUnlitEnabled(), opacity);
|
||||
|
||||
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
|
||||
}
|
|
@ -20,8 +20,6 @@ void renderItems(const SceneContextPointer& sceneContext, const RenderContextPoi
|
|||
void renderShapes(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const ShapePlumberPointer& shapeContext, const ItemBounds& inItems, int maxDrawnItems = -1);
|
||||
void renderStateSortShapes(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const ShapePlumberPointer& shapeContext, const ItemBounds& inItems, int maxDrawnItems = -1);
|
||||
|
||||
|
||||
|
||||
class DrawLightConfig : public Job::Config {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(int numDrawn READ getNumDrawn NOTIFY numDrawnChanged)
|
||||
|
|
|
@ -39,6 +39,10 @@ void ShapePlumber::addPipelineHelper(const Filter& filter, ShapeKey key, int bit
|
|||
}
|
||||
} else {
|
||||
// Add the brand new pipeline and cache its location in the lib
|
||||
auto precedent = _pipelineMap.find(key);
|
||||
if (precedent != _pipelineMap.end()) {
|
||||
qCDebug(renderlogging) << "Key already assigned: " << key;
|
||||
}
|
||||
_pipelineMap.insert(PipelineMap::value_type(key, pipeline));
|
||||
}
|
||||
}
|
||||
|
@ -65,16 +69,11 @@ void ShapePlumber::addPipeline(const Filter& filter, const gpu::ShaderPointer& p
|
|||
slotBindings.insert(gpu::Shader::Binding(std::string("lightBuffer"), Slot::BUFFER::LIGHT));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("lightAmbientBuffer"), Slot::BUFFER::LIGHT_AMBIENT_BUFFER));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("skyboxMap"), Slot::MAP::LIGHT_AMBIENT));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("normalFittingMap"), Slot::NORMAL_FITTING));
|
||||
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
auto locations = std::make_shared<Locations>();
|
||||
locations->normalFittingMapUnit = program->getTextures().findLocation("normalFittingMap");
|
||||
if (program->getTextures().findLocation("normalFittingMap") > -1) {
|
||||
locations->normalFittingMapUnit = program->getTextures().findLocation("normalFittingMap");
|
||||
|
||||
}
|
||||
locations->albedoTextureUnit = program->getTextures().findLocation("albedoMap");
|
||||
locations->roughnessTextureUnit = program->getTextures().findLocation("roughnessMap");
|
||||
locations->normalTextureUnit = program->getTextures().findLocation("normalMap");
|
||||
|
|
|
@ -22,13 +22,13 @@ namespace render {
|
|||
class ShapeKey {
|
||||
public:
|
||||
enum FlagBit {
|
||||
TRANSLUCENT = 0,
|
||||
MATERIAL = 0,
|
||||
TRANSLUCENT,
|
||||
LIGHTMAP,
|
||||
TANGENTS,
|
||||
SPECULAR,
|
||||
UNLIT,
|
||||
SKINNED,
|
||||
STEREO,
|
||||
DEPTH_ONLY,
|
||||
DEPTH_BIAS,
|
||||
WIREFRAME,
|
||||
|
@ -53,13 +53,13 @@ public:
|
|||
|
||||
ShapeKey build() const { return ShapeKey{_flags}; }
|
||||
|
||||
Builder& withMaterial() { _flags.set(MATERIAL); return (*this); }
|
||||
Builder& withTranslucent() { _flags.set(TRANSLUCENT); return (*this); }
|
||||
Builder& withLightmap() { _flags.set(LIGHTMAP); return (*this); }
|
||||
Builder& withTangents() { _flags.set(TANGENTS); return (*this); }
|
||||
Builder& withSpecular() { _flags.set(SPECULAR); return (*this); }
|
||||
Builder& withUnlit() { _flags.set(UNLIT); return (*this); }
|
||||
Builder& withSkinned() { _flags.set(SKINNED); return (*this); }
|
||||
Builder& withStereo() { _flags.set(STEREO); return (*this); }
|
||||
Builder& withDepthOnly() { _flags.set(DEPTH_ONLY); return (*this); }
|
||||
Builder& withDepthBias() { _flags.set(DEPTH_BIAS); return (*this); }
|
||||
Builder& withWireframe() { _flags.set(WIREFRAME); return (*this); }
|
||||
|
@ -89,6 +89,9 @@ public:
|
|||
|
||||
Filter build() const { return Filter(_flags, _mask); }
|
||||
|
||||
Builder& withMaterial() { _flags.set(MATERIAL); _mask.set(MATERIAL); return (*this); }
|
||||
Builder& withoutMaterial() { _flags.reset(MATERIAL); _mask.set(MATERIAL); return (*this); }
|
||||
|
||||
Builder& withTranslucent() { _flags.set(TRANSLUCENT); _mask.set(TRANSLUCENT); return (*this); }
|
||||
Builder& withOpaque() { _flags.reset(TRANSLUCENT); _mask.set(TRANSLUCENT); return (*this); }
|
||||
|
||||
|
@ -107,9 +110,6 @@ public:
|
|||
Builder& withSkinned() { _flags.set(SKINNED); _mask.set(SKINNED); return (*this); }
|
||||
Builder& withoutSkinned() { _flags.reset(SKINNED); _mask.set(SKINNED); return (*this); }
|
||||
|
||||
Builder& withStereo() { _flags.set(STEREO); _mask.set(STEREO); return (*this); }
|
||||
Builder& withoutStereo() { _flags.reset(STEREO); _mask.set(STEREO); return (*this); }
|
||||
|
||||
Builder& withDepthOnly() { _flags.set(DEPTH_ONLY); _mask.set(DEPTH_ONLY); return (*this); }
|
||||
Builder& withoutDepthOnly() { _flags.reset(DEPTH_ONLY); _mask.set(DEPTH_ONLY); return (*this); }
|
||||
|
||||
|
@ -128,19 +128,20 @@ public:
|
|||
Flags _mask{0};
|
||||
};
|
||||
Filter(const Filter::Builder& builder) : Filter(builder._flags, builder._mask) {}
|
||||
ShapeKey key() const { return ShapeKey(_flags); }
|
||||
protected:
|
||||
friend class ShapePlumber;
|
||||
Flags _flags{0};
|
||||
Flags _mask{0};
|
||||
};
|
||||
|
||||
bool useMaterial() const { return _flags[MATERIAL]; }
|
||||
bool hasLightmap() const { return _flags[LIGHTMAP]; }
|
||||
bool hasTangents() const { return _flags[TANGENTS]; }
|
||||
bool hasSpecular() const { return _flags[SPECULAR]; }
|
||||
bool isUnlit() const { return _flags[UNLIT]; }
|
||||
bool isTranslucent() const { return _flags[TRANSLUCENT]; }
|
||||
bool isSkinned() const { return _flags[SKINNED]; }
|
||||
bool isStereo() const { return _flags[STEREO]; }
|
||||
bool isDepthOnly() const { return _flags[DEPTH_ONLY]; }
|
||||
bool isDepthBiased() const { return _flags[DEPTH_BIAS]; }
|
||||
bool isWireFrame() const { return _flags[WIREFRAME]; }
|
||||
|
@ -170,13 +171,13 @@ inline QDebug operator<<(QDebug debug, const ShapeKey& key) {
|
|||
debug << "[ShapeKey: OWN_PIPELINE]";
|
||||
} else {
|
||||
debug << "[ShapeKey:"
|
||||
<< "useMaterial:" << key.useMaterial()
|
||||
<< "hasLightmap:" << key.hasLightmap()
|
||||
<< "hasTangents:" << key.hasTangents()
|
||||
<< "hasSpecular:" << key.hasSpecular()
|
||||
<< "isUnlit:" << key.isUnlit()
|
||||
<< "isTranslucent:" << key.isTranslucent()
|
||||
<< "isSkinned:" << key.isSkinned()
|
||||
<< "isStereo:" << key.isStereo()
|
||||
<< "isDepthOnly:" << key.isDepthOnly()
|
||||
<< "isDepthBiased:" << key.isDepthBiased()
|
||||
<< "isWireFrame:" << key.isWireFrame()
|
||||
|
@ -213,8 +214,6 @@ public:
|
|||
OCCLUSION,
|
||||
SCATTERING,
|
||||
LIGHT_AMBIENT,
|
||||
|
||||
NORMAL_FITTING = 10,
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -226,7 +225,6 @@ public:
|
|||
int metallicTextureUnit;
|
||||
int emissiveTextureUnit;
|
||||
int occlusionTextureUnit;
|
||||
int normalFittingMapUnit;
|
||||
int lightingModelBufferUnit;
|
||||
int skinClusterBufferUnit;
|
||||
int materialBufferUnit;
|
||||
|
|
|
@ -19,6 +19,16 @@
|
|||
#include <QObject>
|
||||
#include <QString>
|
||||
|
||||
/**jsdoc
|
||||
* A Quaternion
|
||||
*
|
||||
* @typedef Quat
|
||||
* @property {float} x imaginary component i.
|
||||
* @property {float} y imaginary component j.
|
||||
* @property {float} z imaginary component k.
|
||||
* @property {float} w real component.
|
||||
*/
|
||||
|
||||
/// Scriptable interface a Quaternion helper class object. Used exclusively in the JavaScript API
|
||||
class Quat : public QObject {
|
||||
Q_OBJECT
|
||||
|
|
|
@ -34,6 +34,7 @@
|
|||
#include <AudioConstants.h>
|
||||
#include <AudioEffectOptions.h>
|
||||
#include <AvatarData.h>
|
||||
#include <DebugDraw.h>
|
||||
#include <EntityScriptingInterface.h>
|
||||
#include <MessagesClient.h>
|
||||
#include <NetworkAccessManager.h>
|
||||
|
@ -630,6 +631,8 @@ void ScriptEngine::init() {
|
|||
registerGlobalObject("Tablet", DependencyManager::get<TabletScriptingInterface>().data());
|
||||
registerGlobalObject("Assets", &_assetScriptingInterface);
|
||||
registerGlobalObject("Resources", DependencyManager::get<ResourceScriptingInterface>().data());
|
||||
|
||||
registerGlobalObject("DebugDraw", &DebugDraw::getInstance());
|
||||
}
|
||||
|
||||
void ScriptEngine::registerValue(const QString& valueName, QScriptValue value) {
|
||||
|
|
|
@ -37,6 +37,15 @@
|
|||
* @property {float} z Z-coordinate of the vector.
|
||||
*/
|
||||
|
||||
/**jsdoc
|
||||
* A 4-dimensional vector.
|
||||
*
|
||||
* @typedef Vec4
|
||||
* @property {float} x X-coordinate of the vector.
|
||||
* @property {float} y Y-coordinate of the vector.
|
||||
* @property {float} z Z-coordinate of the vector.
|
||||
* @property {float} w W-coordinate of the vector.
|
||||
*/
|
||||
|
||||
/// Scriptable interface a Vec3ernion helper class object. Used exclusively in the JavaScript API
|
||||
class Vec3 : public QObject {
|
||||
|
|
|
@ -10,6 +10,8 @@
|
|||
#include "DebugDraw.h"
|
||||
#include "SharedUtil.h"
|
||||
|
||||
using Lock = std::unique_lock<std::mutex>;
|
||||
|
||||
DebugDraw& DebugDraw::getInstance() {
|
||||
static DebugDraw* instance = globalInstance<DebugDraw>("com.highfidelity.DebugDraw");
|
||||
return *instance;
|
||||
|
@ -25,22 +27,50 @@ DebugDraw::~DebugDraw() {
|
|||
|
||||
// world space line, drawn only once
|
||||
void DebugDraw::drawRay(const glm::vec3& start, const glm::vec3& end, const glm::vec4& color) {
|
||||
Lock lock(_mapMutex);
|
||||
_rays.push_back(Ray(start, end, color));
|
||||
}
|
||||
|
||||
void DebugDraw::addMarker(const std::string& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color) {
|
||||
void DebugDraw::addMarker(const QString& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color) {
|
||||
Lock lock(_mapMutex);
|
||||
_markers[key] = MarkerInfo(rotation, position, color);
|
||||
}
|
||||
|
||||
void DebugDraw::removeMarker(const std::string& key) {
|
||||
void DebugDraw::removeMarker(const QString& key) {
|
||||
Lock lock(_mapMutex);
|
||||
_markers.erase(key);
|
||||
}
|
||||
|
||||
void DebugDraw::addMyAvatarMarker(const std::string& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color) {
|
||||
void DebugDraw::addMyAvatarMarker(const QString& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color) {
|
||||
Lock lock(_mapMutex);
|
||||
_myAvatarMarkers[key] = MarkerInfo(rotation, position, color);
|
||||
}
|
||||
|
||||
void DebugDraw::removeMyAvatarMarker(const std::string& key) {
|
||||
void DebugDraw::removeMyAvatarMarker(const QString& key) {
|
||||
Lock lock(_mapMutex);
|
||||
_myAvatarMarkers.erase(key);
|
||||
}
|
||||
|
||||
//
|
||||
// accessors used by renderer
|
||||
//
|
||||
|
||||
DebugDraw::MarkerMap DebugDraw::getMarkerMap() const {
|
||||
Lock lock(_mapMutex);
|
||||
return _markers;
|
||||
}
|
||||
|
||||
DebugDraw::MarkerMap DebugDraw::getMyAvatarMarkerMap() const {
|
||||
Lock lock(_mapMutex);
|
||||
return _myAvatarMarkers;
|
||||
}
|
||||
|
||||
DebugDraw::Rays DebugDraw::getRays() const {
|
||||
Lock lock(_mapMutex);
|
||||
return _rays;
|
||||
}
|
||||
|
||||
void DebugDraw::clearRays() {
|
||||
Lock lock(_mapMutex);
|
||||
_rays.clear();
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
#ifndef hifi_DebugDraw_h
|
||||
#define hifi_DebugDraw_h
|
||||
|
||||
#include <mutex>
|
||||
#include <unordered_map>
|
||||
#include <tuple>
|
||||
#include <string>
|
||||
|
@ -17,26 +18,69 @@
|
|||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/quaternion.hpp>
|
||||
|
||||
class DebugDraw {
|
||||
#include <QObject>
|
||||
#include <QString>
|
||||
|
||||
/**jsdoc
|
||||
* Helper functions to render ephemeral debug markers and lines.
|
||||
* DebugDraw markers and lines are only visible locally, they are not visible by other users.
|
||||
* @namespace DebugDraw
|
||||
*/
|
||||
class DebugDraw : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
static DebugDraw& getInstance();
|
||||
|
||||
DebugDraw();
|
||||
~DebugDraw();
|
||||
|
||||
// world space line, drawn only once
|
||||
void drawRay(const glm::vec3& start, const glm::vec3& end, const glm::vec4& color);
|
||||
/**jsdoc
|
||||
* Draws a line in world space, but it will only be visible for a single frame.
|
||||
* @function DebugDraw.drawRay
|
||||
* @param {Vec3} start - start position of line in world space.
|
||||
* @param {Vec3} end - end position of line in world space.
|
||||
* @param {Vec4} color - color of line, each component should be in the zero to one range. x = red, y = blue, z = green, w = alpha.
|
||||
*/
|
||||
Q_INVOKABLE void drawRay(const glm::vec3& start, const glm::vec3& end, const glm::vec4& color);
|
||||
|
||||
// world space maker, marker drawn every frame until it is removed.
|
||||
void addMarker(const std::string& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color);
|
||||
void removeMarker(const std::string& key);
|
||||
/**jsdoc
|
||||
* Adds a debug marker to the world. This marker will be drawn every frame until it is removed with DebugDraw.removeMarker.
|
||||
* This can be called repeatedly to change the position of the marker.
|
||||
* @function DebugDraw.addMarker
|
||||
* @param {string} key - name to uniquely identify this marker, later used for DebugDraw.removeMarker.
|
||||
* @param {Quat} rotation - start position of line in world space.
|
||||
* @param {Vec3} position - position of the marker in world space.
|
||||
* @param {Vec4} color - color of the marker.
|
||||
*/
|
||||
Q_INVOKABLE void addMarker(const QString& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color);
|
||||
|
||||
// myAvatar relative marker, maker is drawn every frame until it is removed.
|
||||
void addMyAvatarMarker(const std::string& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color);
|
||||
void removeMyAvatarMarker(const std::string& key);
|
||||
/**jsdoc
|
||||
* Removes debug marker from the world. Once a marker is removed, it will no longer be visible.
|
||||
* @function DebugDraw.removeMarker
|
||||
* @param {string} key - name of marker to remove.
|
||||
*/
|
||||
Q_INVOKABLE void removeMarker(const QString& key);
|
||||
|
||||
/**jsdoc
|
||||
* Adds a debug marker to the world, this marker will be drawn every frame until it is removed with DebugDraw.removeMyAvatarMarker.
|
||||
* This can be called repeatedly to change the position of the marker.
|
||||
* @function DebugDraw.addMyAvatarMarker
|
||||
* @param {string} key - name to uniquely identify this marker, later used for DebugDraw.removeMyAvatarMarker.
|
||||
* @param {Quat} rotation - start position of line in avatar space.
|
||||
* @param {Vec3} position - position of the marker in avatar space.
|
||||
* @param {Vec4} color - color of the marker.
|
||||
*/
|
||||
Q_INVOKABLE void addMyAvatarMarker(const QString& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color);
|
||||
|
||||
/**jsdoc
|
||||
* Removes debug marker from the world. Once a marker is removed, it will no longer be visible
|
||||
* @function DebugDraw.removeMyAvatarMarker
|
||||
* @param {string} key - name of marker to remove.
|
||||
*/
|
||||
Q_INVOKABLE void removeMyAvatarMarker(const QString& key);
|
||||
|
||||
using MarkerInfo = std::tuple<glm::quat, glm::vec3, glm::vec4>;
|
||||
using MarkerMap = std::unordered_map<std::string, MarkerInfo>;
|
||||
using MarkerMap = std::map<QString, MarkerInfo>;
|
||||
using Ray = std::tuple<glm::vec3, glm::vec3, glm::vec4>;
|
||||
using Rays = std::vector<Ray>;
|
||||
|
||||
|
@ -44,16 +88,17 @@ public:
|
|||
// accessors used by renderer
|
||||
//
|
||||
|
||||
const MarkerMap& getMarkerMap() const { return _markers; }
|
||||
const MarkerMap& getMyAvatarMarkerMap() const { return _myAvatarMarkers; }
|
||||
MarkerMap getMarkerMap() const;
|
||||
MarkerMap getMyAvatarMarkerMap() const;
|
||||
void updateMyAvatarPos(const glm::vec3& pos) { _myAvatarPos = pos; }
|
||||
const glm::vec3& getMyAvatarPos() const { return _myAvatarPos; }
|
||||
void updateMyAvatarRot(const glm::quat& rot) { _myAvatarRot = rot; }
|
||||
const glm::quat& getMyAvatarRot() const { return _myAvatarRot; }
|
||||
const Rays getRays() const { return _rays; }
|
||||
void clearRays() { _rays.clear(); }
|
||||
Rays getRays() const;
|
||||
void clearRays();
|
||||
|
||||
protected:
|
||||
mutable std::mutex _mapMutex;
|
||||
MarkerMap _markers;
|
||||
MarkerMap _myAvatarMarkers;
|
||||
glm::quat _myAvatarRot;
|
||||
|
|
|
@ -245,4 +245,53 @@ inline bool isNaN(const glm::quat& value) { return isNaN(value.w) || isNaN(value
|
|||
|
||||
glm::mat4 orthoInverse(const glm::mat4& m);
|
||||
|
||||
//
|
||||
// Safe replacement of glm_mat4_mul() for unaligned arguments instead of __m128
|
||||
//
|
||||
inline void glm_mat4u_mul(const glm::mat4& m1, const glm::mat4& m2, glm::mat4& r) {
|
||||
|
||||
#if GLM_ARCH & GLM_ARCH_SSE2_BIT
|
||||
__m128 u0 = _mm_loadu_ps((float*)&m1[0][0]);
|
||||
__m128 u1 = _mm_loadu_ps((float*)&m1[1][0]);
|
||||
__m128 u2 = _mm_loadu_ps((float*)&m1[2][0]);
|
||||
__m128 u3 = _mm_loadu_ps((float*)&m1[3][0]);
|
||||
|
||||
__m128 v0 = _mm_loadu_ps((float*)&m2[0][0]);
|
||||
__m128 v1 = _mm_loadu_ps((float*)&m2[1][0]);
|
||||
__m128 v2 = _mm_loadu_ps((float*)&m2[2][0]);
|
||||
__m128 v3 = _mm_loadu_ps((float*)&m2[3][0]);
|
||||
|
||||
__m128 t0 = _mm_mul_ps(_mm_shuffle_ps(v0, v0, _MM_SHUFFLE(0,0,0,0)), u0);
|
||||
__m128 t1 = _mm_mul_ps(_mm_shuffle_ps(v0, v0, _MM_SHUFFLE(1,1,1,1)), u1);
|
||||
__m128 t2 = _mm_mul_ps(_mm_shuffle_ps(v0, v0, _MM_SHUFFLE(2,2,2,2)), u2);
|
||||
__m128 t3 = _mm_mul_ps(_mm_shuffle_ps(v0, v0, _MM_SHUFFLE(3,3,3,3)), u3);
|
||||
v0 = _mm_add_ps(_mm_add_ps(t0, t1), _mm_add_ps(t2, t3));
|
||||
|
||||
t0 = _mm_mul_ps(_mm_shuffle_ps(v1, v1, _MM_SHUFFLE(0,0,0,0)), u0);
|
||||
t1 = _mm_mul_ps(_mm_shuffle_ps(v1, v1, _MM_SHUFFLE(1,1,1,1)), u1);
|
||||
t2 = _mm_mul_ps(_mm_shuffle_ps(v1, v1, _MM_SHUFFLE(2,2,2,2)), u2);
|
||||
t3 = _mm_mul_ps(_mm_shuffle_ps(v1, v1, _MM_SHUFFLE(3,3,3,3)), u3);
|
||||
v1 = _mm_add_ps(_mm_add_ps(t0, t1), _mm_add_ps(t2, t3));
|
||||
|
||||
t0 = _mm_mul_ps(_mm_shuffle_ps(v2, v2, _MM_SHUFFLE(0,0,0,0)), u0);
|
||||
t1 = _mm_mul_ps(_mm_shuffle_ps(v2, v2, _MM_SHUFFLE(1,1,1,1)), u1);
|
||||
t2 = _mm_mul_ps(_mm_shuffle_ps(v2, v2, _MM_SHUFFLE(2,2,2,2)), u2);
|
||||
t3 = _mm_mul_ps(_mm_shuffle_ps(v2, v2, _MM_SHUFFLE(3,3,3,3)), u3);
|
||||
v2 = _mm_add_ps(_mm_add_ps(t0, t1), _mm_add_ps(t2, t3));
|
||||
|
||||
t0 = _mm_mul_ps(_mm_shuffle_ps(v3, v3, _MM_SHUFFLE(0,0,0,0)), u0);
|
||||
t1 = _mm_mul_ps(_mm_shuffle_ps(v3, v3, _MM_SHUFFLE(1,1,1,1)), u1);
|
||||
t2 = _mm_mul_ps(_mm_shuffle_ps(v3, v3, _MM_SHUFFLE(2,2,2,2)), u2);
|
||||
t3 = _mm_mul_ps(_mm_shuffle_ps(v3, v3, _MM_SHUFFLE(3,3,3,3)), u3);
|
||||
v3 = _mm_add_ps(_mm_add_ps(t0, t1), _mm_add_ps(t2, t3));
|
||||
|
||||
_mm_storeu_ps((float*)&r[0][0], v0);
|
||||
_mm_storeu_ps((float*)&r[1][0], v1);
|
||||
_mm_storeu_ps((float*)&r[2][0], v2);
|
||||
_mm_storeu_ps((float*)&r[3][0], v3);
|
||||
#else
|
||||
r = m1 * m2;
|
||||
#endif
|
||||
}
|
||||
|
||||
#endif // hifi_GLMHelpers_h
|
||||
|
|
|
@ -159,13 +159,33 @@ Column {
|
|||
}
|
||||
}
|
||||
|
||||
Row {
|
||||
Column {
|
||||
id: metas
|
||||
CheckBox {
|
||||
text: "Draw Meta Bounds"
|
||||
text: "Metas"
|
||||
checked: Render.getConfig("DrawMetaBounds")["enabled"]
|
||||
onCheckedChanged: { Render.getConfig("DrawMetaBounds")["enabled"] = checked }
|
||||
}
|
||||
CheckBox {
|
||||
text: "Opaques"
|
||||
checked: Render.getConfig("DrawOpaqueBounds")["enabled"]
|
||||
onCheckedChanged: { Render.getConfig("DrawOpaqueBounds")["enabled"] = checked }
|
||||
}
|
||||
CheckBox {
|
||||
text: "Transparents"
|
||||
checked: Render.getConfig("DrawTransparentBounds")["enabled"]
|
||||
onCheckedChanged: { Render.getConfig("DrawTransparentBounds")["enabled"] = checked }
|
||||
}
|
||||
CheckBox {
|
||||
text: "Overlay Opaques"
|
||||
checked: Render.getConfig("DrawOverlayOpaqueBounds")["enabled"]
|
||||
onCheckedChanged: { Render.getConfig("DrawOverlayOpaqueBounds")["enabled"] = checked }
|
||||
}
|
||||
CheckBox {
|
||||
text: "Overlay Transparents"
|
||||
checked: Render.getConfig("DrawOverlayTransparentBounds")["enabled"]
|
||||
onCheckedChanged: { Render.getConfig("DrawOverlayTransparentBounds")["enabled"] = checked }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
BIN
scripts/system/assets/sounds/entitySnap.wav
Normal file
BIN
scripts/system/assets/sounds/entitySnap.wav
Normal file
Binary file not shown.
|
@ -74,6 +74,10 @@ var WEB_TOUCH_Y_OFFSET = 0.05; // how far forward (or back with a negative numbe
|
|||
var WEB_TOUCH_TOO_CLOSE = 0.03; // if the stylus is pushed far though the web surface, don't consider it touching
|
||||
var WEB_TOUCH_Y_TOUCH_DEADZONE_SIZE = 0.01;
|
||||
|
||||
var FINGER_TOUCH_Y_OFFSET = -0.02;
|
||||
var FINGER_TOUCH_MIN = -0.01 - FINGER_TOUCH_Y_OFFSET;
|
||||
var FINGER_TOUCH_MAX = 0.01 - FINGER_TOUCH_Y_OFFSET;
|
||||
|
||||
//
|
||||
// distant manipulation
|
||||
//
|
||||
|
@ -205,14 +209,15 @@ var HARDWARE_MOUSE_ID = 0; // Value reserved for hardware mouse.
|
|||
var STATE_OFF = 0;
|
||||
var STATE_SEARCHING = 1;
|
||||
var STATE_DISTANCE_HOLDING = 2;
|
||||
var STATE_NEAR_GRABBING = 3;
|
||||
var STATE_NEAR_TRIGGER = 4;
|
||||
var STATE_FAR_TRIGGER = 5;
|
||||
var STATE_HOLD = 6;
|
||||
var STATE_ENTITY_STYLUS_TOUCHING = 7;
|
||||
var STATE_ENTITY_LASER_TOUCHING = 8;
|
||||
var STATE_OVERLAY_STYLUS_TOUCHING = 9;
|
||||
var STATE_OVERLAY_LASER_TOUCHING = 10;
|
||||
var STATE_DISTANCE_ROTATING = 3;
|
||||
var STATE_NEAR_GRABBING = 4;
|
||||
var STATE_NEAR_TRIGGER = 5;
|
||||
var STATE_FAR_TRIGGER = 6;
|
||||
var STATE_HOLD = 7;
|
||||
var STATE_ENTITY_STYLUS_TOUCHING = 8;
|
||||
var STATE_ENTITY_LASER_TOUCHING = 9;
|
||||
var STATE_OVERLAY_STYLUS_TOUCHING = 10;
|
||||
var STATE_OVERLAY_LASER_TOUCHING = 11;
|
||||
|
||||
var CONTROLLER_STATE_MACHINE = {};
|
||||
|
||||
|
@ -231,6 +236,11 @@ CONTROLLER_STATE_MACHINE[STATE_DISTANCE_HOLDING] = {
|
|||
enterMethod: "distanceHoldingEnter",
|
||||
updateMethod: "distanceHolding"
|
||||
};
|
||||
CONTROLLER_STATE_MACHINE[STATE_DISTANCE_ROTATING] = {
|
||||
name: "distance_rotating",
|
||||
enterMethod: "distanceRotatingEnter",
|
||||
updateMethod: "distanceRotating"
|
||||
};
|
||||
CONTROLLER_STATE_MACHINE[STATE_NEAR_GRABBING] = {
|
||||
name: "near_grabbing",
|
||||
enterMethod: "nearGrabbingEnter",
|
||||
|
@ -252,20 +262,73 @@ CONTROLLER_STATE_MACHINE[STATE_FAR_TRIGGER] = {
|
|||
updateMethod: "farTrigger"
|
||||
};
|
||||
CONTROLLER_STATE_MACHINE[STATE_ENTITY_STYLUS_TOUCHING] = {
|
||||
name: "entityTouching",
|
||||
name: "entityStylusTouching",
|
||||
enterMethod: "entityTouchingEnter",
|
||||
exitMethod: "entityTouchingExit",
|
||||
updateMethod: "entityTouching"
|
||||
};
|
||||
CONTROLLER_STATE_MACHINE[STATE_ENTITY_LASER_TOUCHING] = {
|
||||
name: "entityLaserTouching",
|
||||
enterMethod: "entityTouchingEnter",
|
||||
exitMethod: "entityTouchingExit",
|
||||
updateMethod: "entityTouching"
|
||||
};
|
||||
CONTROLLER_STATE_MACHINE[STATE_ENTITY_LASER_TOUCHING] = CONTROLLER_STATE_MACHINE[STATE_ENTITY_STYLUS_TOUCHING];
|
||||
CONTROLLER_STATE_MACHINE[STATE_OVERLAY_STYLUS_TOUCHING] = {
|
||||
name: "overlayTouching",
|
||||
name: "overlayStylusTouching",
|
||||
enterMethod: "overlayTouchingEnter",
|
||||
exitMethod: "overlayTouchingExit",
|
||||
updateMethod: "overlayTouching"
|
||||
};
|
||||
CONTROLLER_STATE_MACHINE[STATE_OVERLAY_LASER_TOUCHING] = {
|
||||
name: "overlayLaserTouching",
|
||||
enterMethod: "overlayTouchingEnter",
|
||||
exitMethod: "overlayTouchingExit",
|
||||
updateMethod: "overlayTouching"
|
||||
};
|
||||
CONTROLLER_STATE_MACHINE[STATE_OVERLAY_LASER_TOUCHING] = CONTROLLER_STATE_MACHINE[STATE_OVERLAY_STYLUS_TOUCHING];
|
||||
|
||||
function getFingerWorldLocation(hand) {
|
||||
var fingerJointName = (hand === RIGHT_HAND) ? "RightHandIndex4" : "LeftHandIndex4";
|
||||
|
||||
var fingerJointIndex = MyAvatar.getJointIndex(fingerJointName);
|
||||
var fingerPosition = MyAvatar.getAbsoluteJointTranslationInObjectFrame(fingerJointIndex);
|
||||
var fingerRotation = MyAvatar.getAbsoluteJointRotationInObjectFrame(fingerJointIndex);
|
||||
var worldFingerRotation = Quat.multiply(MyAvatar.orientation, fingerRotation);
|
||||
var worldFingerPosition = Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, fingerPosition));
|
||||
|
||||
// local y offset.
|
||||
var localYOffset = Vec3.multiplyQbyV(worldFingerRotation, {x: 0, y: FINGER_TOUCH_Y_OFFSET, z: 0});
|
||||
|
||||
var offsetWorldFingerPosition = Vec3.sum(worldFingerPosition, localYOffset);
|
||||
|
||||
return {
|
||||
position: offsetWorldFingerPosition,
|
||||
orientation: worldFingerRotation,
|
||||
rotation: worldFingerRotation,
|
||||
valid: true
|
||||
};
|
||||
}
|
||||
|
||||
// Object assign polyfill
|
||||
if (typeof Object.assign != 'function') {
|
||||
Object.assign = function(target, varArgs) {
|
||||
'use strict';
|
||||
if (target == null) {
|
||||
throw new TypeError('Cannot convert undefined or null to object');
|
||||
}
|
||||
var to = Object(target);
|
||||
for (var index = 1; index < arguments.length; index++) {
|
||||
var nextSource = arguments[index];
|
||||
if (nextSource != null) {
|
||||
for (var nextKey in nextSource) {
|
||||
if (Object.prototype.hasOwnProperty.call(nextSource, nextKey)) {
|
||||
to[nextKey] = nextSource[nextKey];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return to;
|
||||
};
|
||||
}
|
||||
|
||||
function distanceBetweenPointAndEntityBoundingBox(point, entityProps) {
|
||||
var entityXform = new Xform(entityProps.rotation, entityProps.position);
|
||||
|
@ -347,6 +410,7 @@ function handLaserIntersectItem(position, rotation, start) {
|
|||
direction: rayDirection,
|
||||
length: PICK_MAX_DISTANCE
|
||||
};
|
||||
|
||||
return intersectionInfo;
|
||||
} else {
|
||||
// entity has been destroyed? or is no longer in cache
|
||||
|
@ -413,16 +477,18 @@ function entityIsGrabbedByOther(entityID) {
|
|||
var actionID = actionIDs[actionIndex];
|
||||
var actionArguments = Entities.getActionArguments(entityID, actionID);
|
||||
var tag = actionArguments.tag;
|
||||
if (tag == getTag()) {
|
||||
if (tag === getTag()) {
|
||||
// we see a grab-*uuid* shaped tag, but it's our tag, so that's okay.
|
||||
continue;
|
||||
}
|
||||
if (tag.slice(0, 5) == "grab-") {
|
||||
var GRAB_PREFIX_LENGTH = 5;
|
||||
var UUID_LENGTH = 38;
|
||||
if (tag && tag.slice(0, GRAB_PREFIX_LENGTH) == "grab-") {
|
||||
// we see a grab-*uuid* shaped tag and it's not ours, so someone else is grabbing it.
|
||||
return true;
|
||||
return tag.slice(GRAB_PREFIX_LENGTH, GRAB_PREFIX_LENGTH + UUID_LENGTH - 1);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
return null;
|
||||
}
|
||||
|
||||
function propsArePhysical(props) {
|
||||
|
@ -740,6 +806,10 @@ function MyController(hand) {
|
|||
this.stylus = null;
|
||||
this.homeButtonTouched = false;
|
||||
|
||||
this.controllerJointIndex = MyAvatar.getJointIndex(this.hand === RIGHT_HAND ?
|
||||
"_CONTROLLER_RIGHTHAND" :
|
||||
"_CONTROLLER_LEFTHAND");
|
||||
|
||||
// Until there is some reliable way to keep track of a "stack" of parentIDs, we'll have problems
|
||||
// when more than one avatar does parenting grabs on things. This script tries to work
|
||||
// around this with two associative arrays: previousParentID and previousParentJointIndex. If
|
||||
|
@ -791,10 +861,10 @@ function MyController(hand) {
|
|||
|
||||
// for visualizations
|
||||
this.overlayLine = null;
|
||||
|
||||
// for lights
|
||||
this.overlayLine = null;
|
||||
this.searchSphere = null;
|
||||
this.otherGrabbingLine = null;
|
||||
|
||||
this.otherGrabbingUUID = null;
|
||||
|
||||
this.waitForTriggerRelease = false;
|
||||
|
||||
|
@ -816,6 +886,8 @@ function MyController(hand) {
|
|||
this.tabletStabbedPos2D = null;
|
||||
this.tabletStabbedPos3D = null;
|
||||
|
||||
this.useFingerInsteadOfStylus = false;
|
||||
|
||||
var _this = this;
|
||||
|
||||
var suppressedIn2D = [STATE_OFF, STATE_SEARCHING];
|
||||
|
@ -829,10 +901,22 @@ function MyController(hand) {
|
|||
this.updateSmoothedTrigger();
|
||||
this.maybeScaleMyAvatar();
|
||||
|
||||
var DEFAULT_USE_FINGER_AS_STYLUS = false;
|
||||
var USE_FINGER_AS_STYLUS = Settings.getValue("preferAvatarFingerOverStylus");
|
||||
if (USE_FINGER_AS_STYLUS === "") {
|
||||
USE_FINGER_AS_STYLUS = DEFAULT_USE_FINGER_AS_STYLUS;
|
||||
}
|
||||
if (USE_FINGER_AS_STYLUS && MyAvatar.getJointIndex("LeftHandIndex4") !== -1) {
|
||||
this.useFingerInsteadOfStylus = true;
|
||||
} else {
|
||||
this.useFingerInsteadOfStylus = false;
|
||||
}
|
||||
|
||||
if (this.ignoreInput()) {
|
||||
|
||||
// Most hand input is disabled, because we are interacting with the 2d hud.
|
||||
// However, we still should check for collisions of the stylus with the web overlay.
|
||||
|
||||
var controllerLocation = getControllerWorldLocation(this.handToController(), true);
|
||||
this.processStylus(controllerLocation.position);
|
||||
|
||||
|
@ -869,7 +953,8 @@ function MyController(hand) {
|
|||
newState !== STATE_OVERLAY_LASER_TOUCHING)) {
|
||||
return;
|
||||
}
|
||||
setGrabCommunications((newState === STATE_DISTANCE_HOLDING) || (newState === STATE_NEAR_GRABBING));
|
||||
setGrabCommunications((newState === STATE_DISTANCE_HOLDING) || (newState === STATE_DISTANCE_ROTATING)
|
||||
|| (newState === STATE_NEAR_GRABBING));
|
||||
if (WANT_DEBUG || WANT_DEBUG_STATE) {
|
||||
var oldStateName = stateToName(this.state);
|
||||
var newStateName = stateToName(newState);
|
||||
|
@ -920,9 +1005,7 @@ function MyController(hand) {
|
|||
ignoreRayIntersection: true,
|
||||
drawInFront: false,
|
||||
parentID: AVATAR_SELF_ID,
|
||||
parentJointIndex: MyAvatar.getJointIndex(this.hand === RIGHT_HAND ?
|
||||
"_CONTROLLER_RIGHTHAND" :
|
||||
"_CONTROLLER_LEFTHAND")
|
||||
parentJointIndex: this.controllerJointIndex
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@ -1007,32 +1090,38 @@ function MyController(hand) {
|
|||
}
|
||||
};
|
||||
|
||||
this.overlayLineOn = function(closePoint, farPoint, color) {
|
||||
this.overlayLineOn = function(closePoint, farPoint, color, farParentID) {
|
||||
if (this.overlayLine === null) {
|
||||
var lineProperties = {
|
||||
name: "line",
|
||||
glow: 1.0,
|
||||
start: closePoint,
|
||||
end: farPoint,
|
||||
color: color,
|
||||
ignoreRayIntersection: true, // always ignore this
|
||||
drawInFront: true, // Even when burried inside of something, show it.
|
||||
visible: true,
|
||||
alpha: 1
|
||||
};
|
||||
this.overlayLine = Overlays.addOverlay("line3d", lineProperties);
|
||||
|
||||
} else {
|
||||
Overlays.editOverlay(this.overlayLine, {
|
||||
lineWidth: 5,
|
||||
start: closePoint,
|
||||
end: farPoint,
|
||||
color: color,
|
||||
visible: true,
|
||||
ignoreRayIntersection: true, // always ignore this
|
||||
drawInFront: true, // Even when burried inside of something, show it.
|
||||
alpha: 1
|
||||
});
|
||||
visible: true,
|
||||
alpha: 1,
|
||||
parentID: AVATAR_SELF_ID,
|
||||
parentJointIndex: this.controllerJointIndex,
|
||||
endParentID: farParentID
|
||||
};
|
||||
this.overlayLine = Overlays.addOverlay("line3d", lineProperties);
|
||||
|
||||
} else {
|
||||
if (farParentID && farParentID != NULL_UUID) {
|
||||
Overlays.editOverlay(this.overlayLine, {
|
||||
color: color,
|
||||
endParentID: farParentID
|
||||
});
|
||||
} else {
|
||||
Overlays.editOverlay(this.overlayLine, {
|
||||
length: Vec3.distance(farPoint, closePoint),
|
||||
color: color,
|
||||
endParentID: farParentID
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1061,6 +1150,29 @@ function MyController(hand) {
|
|||
}
|
||||
};
|
||||
|
||||
this.otherGrabbingLineOn = function(avatarPosition, entityPosition, color) {
|
||||
if (this.otherGrabbingLine === null) {
|
||||
var lineProperties = {
|
||||
lineWidth: 5,
|
||||
start: avatarPosition,
|
||||
end: entityPosition,
|
||||
color: color,
|
||||
glow: 1.0,
|
||||
ignoreRayIntersection: true,
|
||||
drawInFront: true,
|
||||
visible: true,
|
||||
alpha: 1
|
||||
};
|
||||
this.otherGrabbingLine = Overlays.addOverlay("line3d", lineProperties);
|
||||
} else {
|
||||
Overlays.editOverlay(this.otherGrabbingLine, {
|
||||
start: avatarPosition,
|
||||
end: entityPosition,
|
||||
color: color
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
this.evalLightWorldTransform = function(modelPos, modelRot) {
|
||||
|
||||
var MODEL_LIGHT_POSITION = {
|
||||
|
@ -1104,14 +1216,20 @@ function MyController(hand) {
|
|||
}
|
||||
};
|
||||
|
||||
this.turnOffVisualizations = function() {
|
||||
this.otherGrabbingLineOff = function() {
|
||||
if (this.otherGrabbingLine !== null) {
|
||||
Overlays.deleteOverlay(this.otherGrabbingLine);
|
||||
}
|
||||
this.otherGrabbingLine = null;
|
||||
};
|
||||
|
||||
this.turnOffVisualizations = function() {
|
||||
this.overlayLineOff();
|
||||
this.grabPointSphereOff();
|
||||
this.lineOff();
|
||||
this.searchSphereOff();
|
||||
this.otherGrabbingLineOff();
|
||||
restore2DMode();
|
||||
|
||||
};
|
||||
|
||||
this.triggerPress = function(value) {
|
||||
|
@ -1174,30 +1292,54 @@ function MyController(hand) {
|
|||
};
|
||||
|
||||
this.processStylus = function(worldHandPosition) {
|
||||
// see if the hand is near a tablet or web-entity
|
||||
var candidateEntities = Entities.findEntities(worldHandPosition, WEB_DISPLAY_STYLUS_DISTANCE);
|
||||
entityPropertiesCache.addEntities(candidateEntities);
|
||||
var nearWeb = false;
|
||||
for (var i = 0; i < candidateEntities.length; i++) {
|
||||
var props = entityPropertiesCache.getProps(candidateEntities[i]);
|
||||
if (props && (props.type == "Web" || this.isTablet(candidateEntities[i]))) {
|
||||
nearWeb = true;
|
||||
break;
|
||||
|
||||
var performRayTest = false;
|
||||
if (this.useFingerInsteadOfStylus) {
|
||||
this.hideStylus();
|
||||
performRayTest = true;
|
||||
} else {
|
||||
var i;
|
||||
|
||||
// see if the hand is near a tablet or web-entity
|
||||
var candidateEntities = Entities.findEntities(worldHandPosition, WEB_DISPLAY_STYLUS_DISTANCE);
|
||||
entityPropertiesCache.addEntities(candidateEntities);
|
||||
for (i = 0; i < candidateEntities.length; i++) {
|
||||
var props = entityPropertiesCache.getProps(candidateEntities[i]);
|
||||
if (props && (props.type == "Web" || this.isTablet(candidateEntities[i]))) {
|
||||
performRayTest = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!performRayTest) {
|
||||
var candidateOverlays = Overlays.findOverlays(worldHandPosition, WEB_DISPLAY_STYLUS_DISTANCE);
|
||||
for (i = 0; i < candidateOverlays.length; i++) {
|
||||
if (this.isTablet(candidateOverlays[i])) {
|
||||
performRayTest = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (performRayTest) {
|
||||
this.showStylus();
|
||||
} else {
|
||||
this.hideStylus();
|
||||
}
|
||||
}
|
||||
|
||||
var candidateOverlays = Overlays.findOverlays(worldHandPosition, WEB_DISPLAY_STYLUS_DISTANCE);
|
||||
for (var j = 0; j < candidateOverlays.length; j++) {
|
||||
if (this.isTablet(candidateOverlays[j])) {
|
||||
nearWeb = true;
|
||||
if (performRayTest) {
|
||||
var rayPickInfo = this.calcRayPickInfo(this.hand, this.useFingerInsteadOfStylus);
|
||||
var max, min;
|
||||
if (this.useFingerInsteadOfStylus) {
|
||||
max = FINGER_TOUCH_MAX;
|
||||
min = FINGER_TOUCH_MIN;
|
||||
} else {
|
||||
max = WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_Y_OFFSET;
|
||||
min = WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_TOO_CLOSE;
|
||||
}
|
||||
}
|
||||
|
||||
if (nearWeb) {
|
||||
this.showStylus();
|
||||
var rayPickInfo = this.calcRayPickInfo(this.hand);
|
||||
if (rayPickInfo.distance < WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_Y_OFFSET &&
|
||||
rayPickInfo.distance > WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_TOO_CLOSE) {
|
||||
if (rayPickInfo.distance < max && rayPickInfo.distance > min) {
|
||||
this.handleStylusOnHomeButton(rayPickInfo);
|
||||
if (this.handleStylusOnWebEntity(rayPickInfo)) {
|
||||
return;
|
||||
|
@ -1206,10 +1348,8 @@ function MyController(hand) {
|
|||
return;
|
||||
}
|
||||
} else {
|
||||
this.homeButtonTouched = false;
|
||||
}
|
||||
} else {
|
||||
this.hideStylus();
|
||||
this.homeButtonTouched = false;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1324,10 +1464,17 @@ function MyController(hand) {
|
|||
|
||||
// Performs ray pick test from the hand controller into the world
|
||||
// @param {number} which hand to use, RIGHT_HAND or LEFT_HAND
|
||||
// @param {bool} if true use the world position/orientation of the index finger to cast the ray from.
|
||||
// @returns {object} returns object with two keys entityID and distance
|
||||
//
|
||||
this.calcRayPickInfo = function(hand) {
|
||||
var controllerLocation = getControllerWorldLocation(this.handToController(), true);
|
||||
this.calcRayPickInfo = function(hand, useFingerInsteadOfController) {
|
||||
|
||||
var controllerLocation;
|
||||
if (useFingerInsteadOfController) {
|
||||
controllerLocation = getFingerWorldLocation(hand);
|
||||
} else {
|
||||
controllerLocation = getControllerWorldLocation(this.handToController(), true);
|
||||
}
|
||||
var worldHandPosition = controllerLocation.position;
|
||||
var worldHandRotation = controllerLocation.orientation;
|
||||
|
||||
|
@ -1439,9 +1586,10 @@ function MyController(hand) {
|
|||
var props = entityPropertiesCache.getProps(hotspot.entityID);
|
||||
var debug = (WANT_DEBUG_SEARCH_NAME && props.name === WANT_DEBUG_SEARCH_NAME);
|
||||
|
||||
var okToEquipFromOtherHand = ((this.getOtherHandController().state == STATE_NEAR_GRABBING ||
|
||||
this.getOtherHandController().state == STATE_DISTANCE_HOLDING) &&
|
||||
this.getOtherHandController().grabbedThingID == hotspot.entityID);
|
||||
var otherHandControllerState = this.getOtherHandController().state;
|
||||
var okToEquipFromOtherHand = ((otherHandControllerState === STATE_NEAR_GRABBING
|
||||
|| otherHandControllerState === STATE_DISTANCE_HOLDING || otherHandControllerState === STATE_DISTANCE_ROTATING)
|
||||
&& this.getOtherHandController().grabbedThingID === hotspot.entityID);
|
||||
var hasParent = true;
|
||||
if (props.parentID === NULL_UUID) {
|
||||
hasParent = false;
|
||||
|
@ -1455,7 +1603,18 @@ function MyController(hand) {
|
|||
|
||||
return true;
|
||||
};
|
||||
this.entityIsCloneable = function(entityID) {
|
||||
var entityProps = entityPropertiesCache.getGrabbableProps(entityID);
|
||||
var props = entityPropertiesCache.getProps(entityID);
|
||||
if (!props) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (entityProps.hasOwnProperty("cloneable")) {
|
||||
return entityProps.cloneable;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
this.entityIsGrabbable = function(entityID) {
|
||||
var grabbableProps = entityPropertiesCache.getGrabbableProps(entityID);
|
||||
var props = entityPropertiesCache.getProps(entityID);
|
||||
|
@ -1522,7 +1681,8 @@ function MyController(hand) {
|
|||
return false;
|
||||
}
|
||||
|
||||
if (entityIsGrabbedByOther(entityID)) {
|
||||
this.otherGrabbingUUID = entityIsGrabbedByOther(entityID);
|
||||
if (this.otherGrabbingUUID !== null) {
|
||||
// don't distance grab something that is already grabbed.
|
||||
if (debug) {
|
||||
print("distance grab is skipping '" + props.name + "': already grabbed by another.");
|
||||
|
@ -1535,7 +1695,7 @@ function MyController(hand) {
|
|||
|
||||
this.entityIsNearGrabbable = function(entityID, handPosition, maxDistance) {
|
||||
|
||||
if (!this.entityIsGrabbable(entityID)) {
|
||||
if (!this.entityIsCloneable(entityID) && !this.entityIsGrabbable(entityID)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -1726,17 +1886,40 @@ function MyController(hand) {
|
|||
} else {
|
||||
// potentialFarTriggerEntity = entity;
|
||||
}
|
||||
this.otherGrabbingLineOff();
|
||||
} else if (this.entityIsDistanceGrabbable(rayPickInfo.entityID, handPosition)) {
|
||||
if (this.triggerSmoothedGrab() && !isEditing() && farGrabEnabled && farSearching) {
|
||||
this.grabbedThingID = entity;
|
||||
this.grabbedIsOverlay = false;
|
||||
this.grabbedDistance = rayPickInfo.distance;
|
||||
if (this.getOtherHandController().state === STATE_DISTANCE_HOLDING) {
|
||||
this.setState(STATE_DISTANCE_ROTATING, "distance rotate '" + name + "'");
|
||||
} else {
|
||||
this.setState(STATE_DISTANCE_HOLDING, "distance hold '" + name + "'");
|
||||
}
|
||||
return;
|
||||
} else {
|
||||
// potentialFarGrabEntity = entity;
|
||||
}
|
||||
this.otherGrabbingLineOff();
|
||||
} else if (this.otherGrabbingUUID !== null) {
|
||||
if (this.triggerSmoothedGrab() && !isEditing() && farGrabEnabled && farSearching) {
|
||||
var avatar = AvatarList.getAvatar(this.otherGrabbingUUID);
|
||||
var IN_FRONT_OF_AVATAR = { x: 0, y: 0.2, z: 0.4 }; // Up from hips and in front of avatar.
|
||||
var startPosition = Vec3.sum(avatar.position, Vec3.multiplyQbyV(avatar.rotation, IN_FRONT_OF_AVATAR));
|
||||
var finishPisition = Vec3.sum(rayPickInfo.properties.position, // Entity's centroid.
|
||||
Vec3.multiplyQbyV(rayPickInfo.properties.rotation ,
|
||||
Vec3.multiplyVbyV(rayPickInfo.properties.dimensions,
|
||||
Vec3.subtract(DEFAULT_REGISTRATION_POINT, rayPickInfo.properties.registrationPoint))));
|
||||
this.otherGrabbingLineOn(startPosition, finishPisition, COLORS_GRAB_DISTANCE_HOLD);
|
||||
} else {
|
||||
this.otherGrabbingLineOff();
|
||||
}
|
||||
} else {
|
||||
this.otherGrabbingLineOff();
|
||||
}
|
||||
} else {
|
||||
this.otherGrabbingLineOff();
|
||||
}
|
||||
|
||||
this.updateEquipHaptics(potentialEquipHotspot, handPosition);
|
||||
|
@ -2036,6 +2219,19 @@ function MyController(hand) {
|
|||
return (dimensions.x * dimensions.y * dimensions.z) * density;
|
||||
};
|
||||
|
||||
this.ensureDynamic = function () {
|
||||
// if we distance hold something and keep it very still before releasing it, it ends up
|
||||
// non-dynamic in bullet. If it's too still, give it a little bounce so it will fall.
|
||||
var props = Entities.getEntityProperties(this.grabbedThingID, ["velocity", "dynamic", "parentID"]);
|
||||
if (props.dynamic && props.parentID == NULL_UUID) {
|
||||
var velocity = props.velocity;
|
||||
if (Vec3.length(velocity) < 0.05) { // see EntityMotionState.cpp DYNAMIC_LINEAR_VELOCITY_THRESHOLD
|
||||
velocity = { x: 0.0, y: 0.2, z: 0.0 };
|
||||
Entities.editEntity(this.grabbedThingID, { velocity: velocity });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
this.distanceHoldingEnter = function() {
|
||||
this.clearEquipHaptics();
|
||||
this.grabPointSphereOff();
|
||||
|
@ -2102,25 +2298,20 @@ function MyController(hand) {
|
|||
this.previousRoomControllerPosition = roomControllerPosition;
|
||||
};
|
||||
|
||||
this.ensureDynamic = function() {
|
||||
// if we distance hold something and keep it very still before releasing it, it ends up
|
||||
// non-dynamic in bullet. If it's too still, give it a little bounce so it will fall.
|
||||
var props = Entities.getEntityProperties(this.grabbedThingID, ["velocity", "dynamic", "parentID"]);
|
||||
if (props.dynamic && props.parentID == NULL_UUID) {
|
||||
var velocity = props.velocity;
|
||||
if (Vec3.length(velocity) < 0.05) { // see EntityMotionState.cpp DYNAMIC_LINEAR_VELOCITY_THRESHOLD
|
||||
velocity = { x: 0.0, y: 0.2, z:0.0 };
|
||||
Entities.editEntity(this.grabbedThingID, { velocity: velocity });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
this.distanceHolding = function(deltaTime, timestamp) {
|
||||
|
||||
if (!this.triggerClicked) {
|
||||
this.callEntityMethodOnGrabbed("releaseGrab");
|
||||
this.ensureDynamic();
|
||||
this.setState(STATE_OFF, "trigger released");
|
||||
if (this.getOtherHandController().state === STATE_DISTANCE_ROTATING) {
|
||||
this.getOtherHandController().setState(STATE_SEARCHING, "trigger released on holding controller");
|
||||
// Can't set state of other controller to STATE_DISTANCE_HOLDING because then either:
|
||||
// (a) The entity would jump to line up with the formerly rotating controller's orientation, or
|
||||
// (b) The grab beam would need an orientation offset to the controller's true orientation.
|
||||
// Neither of these options is good, so instead set STATE_SEARCHING and subsequently let the formerly distance
|
||||
// rotating controller start distance holding the entity if it happens to be pointing at the entity.
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -2209,11 +2400,13 @@ function MyController(hand) {
|
|||
}
|
||||
|
||||
this.maybeScale(grabbedProperties);
|
||||
|
||||
// visualizations
|
||||
|
||||
var rayPickInfo = this.calcRayPickInfo(this.hand);
|
||||
|
||||
this.overlayLineOn(rayPickInfo.searchRay.origin, Vec3.subtract(grabbedProperties.position, this.offsetPosition), COLORS_GRAB_DISTANCE_HOLD);
|
||||
this.overlayLineOn(rayPickInfo.searchRay.origin,
|
||||
Vec3.subtract(grabbedProperties.position, this.offsetPosition),
|
||||
COLORS_GRAB_DISTANCE_HOLD,
|
||||
this.grabbedThingID);
|
||||
|
||||
var distanceToObject = Vec3.length(Vec3.subtract(MyAvatar.position, this.currentObjectPosition));
|
||||
var success = Entities.updateAction(this.grabbedThingID, this.actionID, {
|
||||
|
@ -2232,6 +2425,64 @@ function MyController(hand) {
|
|||
this.previousRoomControllerPosition = roomControllerPosition;
|
||||
};
|
||||
|
||||
this.distanceRotatingEnter = function() {
|
||||
this.clearEquipHaptics();
|
||||
this.grabPointSphereOff();
|
||||
|
||||
var controllerLocation = getControllerWorldLocation(this.handToController(), true);
|
||||
var worldControllerPosition = controllerLocation.position;
|
||||
var worldControllerRotation = controllerLocation.orientation;
|
||||
|
||||
var grabbedProperties = Entities.getEntityProperties(this.grabbedThingID, GRABBABLE_PROPERTIES);
|
||||
this.currentObjectPosition = grabbedProperties.position;
|
||||
this.grabRadius = this.grabbedDistance;
|
||||
|
||||
// Offset between controller vector at the grab radius and the entity position.
|
||||
var targetPosition = Vec3.multiply(this.grabRadius, Quat.getUp(worldControllerRotation));
|
||||
targetPosition = Vec3.sum(targetPosition, worldControllerPosition);
|
||||
this.offsetPosition = Vec3.subtract(this.currentObjectPosition, targetPosition);
|
||||
|
||||
// Initial controller rotation.
|
||||
this.previousWorldControllerRotation = worldControllerRotation;
|
||||
|
||||
Controller.triggerHapticPulse(HAPTIC_PULSE_STRENGTH, HAPTIC_PULSE_DURATION, this.hand);
|
||||
this.turnOffVisualizations();
|
||||
};
|
||||
|
||||
this.distanceRotating = function(deltaTime, timestamp) {
|
||||
|
||||
if (!this.triggerClicked) {
|
||||
this.callEntityMethodOnGrabbed("releaseGrab");
|
||||
this.ensureDynamic();
|
||||
this.setState(STATE_OFF, "trigger released");
|
||||
return;
|
||||
}
|
||||
|
||||
var grabbedProperties = Entities.getEntityProperties(this.grabbedThingID, GRABBABLE_PROPERTIES);
|
||||
|
||||
// Delta rotation of grabbing controller since last update.
|
||||
var worldControllerRotation = getControllerWorldLocation(this.handToController(), true).orientation;
|
||||
var controllerRotationDelta = Quat.multiply(worldControllerRotation, Quat.inverse(this.previousWorldControllerRotation));
|
||||
|
||||
// Rotate entity by twice the delta rotation.
|
||||
controllerRotationDelta = Quat.multiply(controllerRotationDelta, controllerRotationDelta);
|
||||
|
||||
// Perform the rotation in the translation controller's action update.
|
||||
this.getOtherHandController().currentObjectRotation = Quat.multiply(controllerRotationDelta,
|
||||
this.getOtherHandController().currentObjectRotation);
|
||||
|
||||
// Rotate about the translation controller's target position.
|
||||
this.offsetPosition = Vec3.multiplyQbyV(controllerRotationDelta, this.offsetPosition);
|
||||
this.getOtherHandController().offsetPosition = Vec3.multiplyQbyV(controllerRotationDelta,
|
||||
this.getOtherHandController().offsetPosition);
|
||||
|
||||
var rayPickInfo = this.calcRayPickInfo(this.hand);
|
||||
this.overlayLineOn(rayPickInfo.searchRay.origin, Vec3.subtract(grabbedProperties.position, this.offsetPosition),
|
||||
COLORS_GRAB_DISTANCE_HOLD, this.grabbedThingID);
|
||||
|
||||
this.previousWorldControllerRotation = worldControllerRotation;
|
||||
}
|
||||
|
||||
this.setupHoldAction = function() {
|
||||
this.actionID = Entities.addAction("hold", this.grabbedThingID, {
|
||||
hand: this.hand === RIGHT_HAND ? "right" : "left",
|
||||
|
@ -2314,6 +2565,7 @@ function MyController(hand) {
|
|||
this.lineOff();
|
||||
this.overlayLineOff();
|
||||
this.searchSphereOff();
|
||||
this.otherGrabbingLineOff();
|
||||
|
||||
this.dropGestureReset();
|
||||
this.clearEquipHaptics();
|
||||
|
@ -2385,6 +2637,9 @@ function MyController(hand) {
|
|||
this.offsetPosition = Vec3.multiplyQbyV(Quat.inverse(Quat.multiply(handRotation, this.offsetRotation)), offset);
|
||||
}
|
||||
|
||||
// This boolean is used to check if the object that is grabbed has just been cloned
|
||||
// It is only set true, if the object that is grabbed creates a new clone.
|
||||
var isClone = false;
|
||||
var isPhysical = propsArePhysical(grabbedProperties) ||
|
||||
(!this.grabbedIsOverlay && entityHasActions(this.grabbedThingID));
|
||||
if (isPhysical && this.state == STATE_NEAR_GRABBING && grabbedProperties.parentID === NULL_UUID) {
|
||||
|
@ -2402,9 +2657,7 @@ function MyController(hand) {
|
|||
this.actionID = null;
|
||||
var handJointIndex;
|
||||
if (this.ignoreIK) {
|
||||
handJointIndex = MyAvatar.getJointIndex(this.hand === RIGHT_HAND ?
|
||||
"_CONTROLLER_RIGHTHAND" :
|
||||
"_CONTROLLER_LEFTHAND");
|
||||
handJointIndex = this.controllerJointIndex;
|
||||
} else {
|
||||
handJointIndex = MyAvatar.getJointIndex(this.hand === RIGHT_HAND ? "RightHand" : "LeftHand");
|
||||
}
|
||||
|
@ -2423,6 +2676,54 @@ function MyController(hand) {
|
|||
if (this.grabbedIsOverlay) {
|
||||
Overlays.editOverlay(this.grabbedThingID, reparentProps);
|
||||
} else {
|
||||
if (grabbedProperties.userData.length > 0) {
|
||||
try{
|
||||
var userData = JSON.parse(grabbedProperties.userData);
|
||||
var grabInfo = userData.grabbableKey;
|
||||
if (grabInfo && grabInfo.cloneable) {
|
||||
// Check if
|
||||
var worldEntities = Entities.findEntitiesInBox(Vec3.subtract(MyAvatar.position, {x:25,y:25, z:25}), {x:50, y: 50, z: 50})
|
||||
var count = 0;
|
||||
worldEntities.forEach(function(item) {
|
||||
var item = Entities.getEntityProperties(item, ["name"]);
|
||||
if (item.name === grabbedProperties.name) {
|
||||
count++;
|
||||
}
|
||||
})
|
||||
var cloneableProps = Entities.getEntityProperties(grabbedProperties.id);
|
||||
var lifetime = grabInfo.cloneLifetime ? grabInfo.cloneLifetime : 300;
|
||||
var limit = grabInfo.cloneLimit ? grabInfo.cloneLimit : 10;
|
||||
var dynamic = grabInfo.cloneDynamic ? grabInfo.cloneDynamic : false;
|
||||
var cUserData = Object.assign({}, userData);
|
||||
var cProperties = Object.assign({}, cloneableProps);
|
||||
isClone = true;
|
||||
|
||||
if (count > limit) {
|
||||
delete cloneableProps;
|
||||
delete lifetime;
|
||||
delete cUserData;
|
||||
delete cProperties;
|
||||
return;
|
||||
}
|
||||
|
||||
delete cUserData.grabbableKey.cloneLifetime;
|
||||
delete cUserData.grabbableKey.cloneable;
|
||||
delete cUserData.grabbableKey.cloneDynamic;
|
||||
delete cUserData.grabbableKey.cloneLimit;
|
||||
delete cProperties.id
|
||||
|
||||
cProperties.dynamic = dynamic;
|
||||
cProperties.locked = false;
|
||||
cUserData.grabbableKey.triggerable = true;
|
||||
cUserData.grabbableKey.grabbable = true;
|
||||
cProperties.lifetime = lifetime;
|
||||
cProperties.userData = JSON.stringify(cUserData);
|
||||
var cloneID = Entities.addEntity(cProperties);
|
||||
this.grabbedThingID = cloneID;
|
||||
grabbedProperties = Entities.getEntityProperties(cloneID);
|
||||
}
|
||||
}catch(e) {}
|
||||
}
|
||||
Entities.editEntity(this.grabbedThingID, reparentProps);
|
||||
}
|
||||
|
||||
|
@ -2434,7 +2735,6 @@ function MyController(hand) {
|
|||
this.previousParentID[this.grabbedThingID] = grabbedProperties.parentID;
|
||||
this.previousParentJointIndex[this.grabbedThingID] = grabbedProperties.parentJointIndex;
|
||||
}
|
||||
|
||||
Messages.sendMessage('Hifi-Object-Manipulation', JSON.stringify({
|
||||
action: 'equip',
|
||||
grabbedEntity: this.grabbedThingID,
|
||||
|
@ -2450,22 +2750,37 @@ function MyController(hand) {
|
|||
});
|
||||
}
|
||||
|
||||
if (this.state == STATE_NEAR_GRABBING) {
|
||||
this.callEntityMethodOnGrabbed("startNearGrab");
|
||||
} else { // this.state == STATE_HOLD
|
||||
this.callEntityMethodOnGrabbed("startEquip");
|
||||
var _this = this;
|
||||
/*
|
||||
* Setting context for function that is either called via timer or directly, depending if
|
||||
* if the object in question is a clone. If it is a clone, we need to make sure that the intial equipment event
|
||||
* is called correctly, as these just freshly created entity may not have completely initialized.
|
||||
*/
|
||||
var grabEquipCheck = function () {
|
||||
if (_this.state == STATE_NEAR_GRABBING) {
|
||||
_this.callEntityMethodOnGrabbed("startNearGrab");
|
||||
} else { // this.state == STATE_HOLD
|
||||
_this.callEntityMethodOnGrabbed("startEquip");
|
||||
}
|
||||
|
||||
_this.currentHandControllerTipPosition =
|
||||
(_this.hand === RIGHT_HAND) ? MyAvatar.rightHandTipPosition : MyAvatar.leftHandTipPosition;
|
||||
_this.currentObjectTime = Date.now();
|
||||
|
||||
_this.currentObjectPosition = grabbedProperties.position;
|
||||
_this.currentObjectRotation = grabbedProperties.rotation;
|
||||
_this.currentVelocity = ZERO_VEC;
|
||||
_this.currentAngularVelocity = ZERO_VEC;
|
||||
|
||||
_this.prevDropDetected = false;
|
||||
}
|
||||
|
||||
this.currentHandControllerTipPosition =
|
||||
(this.hand === RIGHT_HAND) ? MyAvatar.rightHandTipPosition : MyAvatar.leftHandTipPosition;
|
||||
this.currentObjectTime = Date.now();
|
||||
|
||||
this.currentObjectPosition = grabbedProperties.position;
|
||||
this.currentObjectRotation = grabbedProperties.rotation;
|
||||
this.currentVelocity = ZERO_VEC;
|
||||
this.currentAngularVelocity = ZERO_VEC;
|
||||
|
||||
this.prevDropDetected = false;
|
||||
if (isClone) {
|
||||
// 100 ms seems to be sufficient time to force the check even occur after the object has been initialized.
|
||||
Script.setTimeout(grabEquipCheck, 100);
|
||||
} else {
|
||||
grabEquipCheck();
|
||||
}
|
||||
};
|
||||
|
||||
this.nearGrabbing = function(deltaTime, timestamp) {
|
||||
|
@ -2783,8 +3098,13 @@ function MyController(hand) {
|
|||
|
||||
this.entityTouchingEnter = function() {
|
||||
// test for intersection between controller laser and web entity plane.
|
||||
var intersectInfo = handLaserIntersectEntity(this.grabbedThingID,
|
||||
getControllerWorldLocation(this.handToController(), true));
|
||||
var controllerLocation;
|
||||
if (this.useFingerInsteadOfStylus && this.state === STATE_ENTITY_STYLUS_TOUCHING) {
|
||||
controllerLocation = getFingerWorldLocation(this.hand);
|
||||
} else {
|
||||
controllerLocation = getControllerWorldLocation(this.handToController(), true);
|
||||
}
|
||||
var intersectInfo = handLaserIntersectEntity(this.grabbedThingID, controllerLocation);
|
||||
if (intersectInfo) {
|
||||
var pointerEvent = {
|
||||
type: "Press",
|
||||
|
@ -2820,8 +3140,13 @@ function MyController(hand) {
|
|||
|
||||
this.entityTouchingExit = function() {
|
||||
// test for intersection between controller laser and web entity plane.
|
||||
var intersectInfo = handLaserIntersectEntity(this.grabbedThingID,
|
||||
getControllerWorldLocation(this.handToController(), true));
|
||||
var controllerLocation;
|
||||
if (this.useFingerInsteadOfStylus && this.state === STATE_ENTITY_STYLUS_TOUCHING) {
|
||||
controllerLocation = getFingerWorldLocation(this.hand);
|
||||
} else {
|
||||
controllerLocation = getControllerWorldLocation(this.handToController(), true);
|
||||
}
|
||||
var intersectInfo = handLaserIntersectEntity(this.grabbedThingID, controllerLocation);
|
||||
if (intersectInfo) {
|
||||
var pointerEvent;
|
||||
if (this.deadspotExpired) {
|
||||
|
@ -2861,12 +3186,24 @@ function MyController(hand) {
|
|||
}
|
||||
|
||||
// test for intersection between controller laser and web entity plane.
|
||||
var intersectInfo = handLaserIntersectEntity(this.grabbedThingID,
|
||||
getControllerWorldLocation(this.handToController(), true));
|
||||
var controllerLocation;
|
||||
if (this.useFingerInsteadOfStylus && this.state === STATE_ENTITY_STYLUS_TOUCHING) {
|
||||
controllerLocation = getFingerWorldLocation(this.hand);
|
||||
} else {
|
||||
controllerLocation = getControllerWorldLocation(this.handToController(), true);
|
||||
}
|
||||
var intersectInfo = handLaserIntersectEntity(this.grabbedThingID, controllerLocation);
|
||||
if (intersectInfo) {
|
||||
|
||||
var max;
|
||||
if (this.useFingerInsteadOfStylus && this.state === STATE_ENTITY_STYLUS_TOUCHING) {
|
||||
max = FINGER_TOUCH_MAX;
|
||||
} else {
|
||||
max = WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_Y_OFFSET;
|
||||
}
|
||||
|
||||
if (this.state == STATE_ENTITY_STYLUS_TOUCHING &&
|
||||
intersectInfo.distance > WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_Y_OFFSET) {
|
||||
intersectInfo.distance > max) {
|
||||
this.setState(STATE_OFF, "pulled away from web entity");
|
||||
return;
|
||||
}
|
||||
|
@ -2909,8 +3246,13 @@ function MyController(hand) {
|
|||
|
||||
this.overlayTouchingEnter = function () {
|
||||
// Test for intersection between controller laser and Web overlay plane.
|
||||
var intersectInfo =
|
||||
handLaserIntersectOverlay(this.grabbedOverlay, getControllerWorldLocation(this.handToController(), true));
|
||||
var controllerLocation;
|
||||
if (this.useFingerInsteadOfStylus && this.state === STATE_OVERLAY_STYLUS_TOUCHING) {
|
||||
controllerLocation = getFingerWorldLocation(this.hand);
|
||||
} else {
|
||||
controllerLocation = getControllerWorldLocation(this.handToController(), true);
|
||||
}
|
||||
var intersectInfo = handLaserIntersectOverlay(this.grabbedOverlay, controllerLocation);
|
||||
if (intersectInfo) {
|
||||
var pointerEvent = {
|
||||
type: "Press",
|
||||
|
@ -2945,8 +3287,13 @@ function MyController(hand) {
|
|||
|
||||
this.overlayTouchingExit = function () {
|
||||
// Test for intersection between controller laser and Web overlay plane.
|
||||
var intersectInfo =
|
||||
handLaserIntersectOverlay(this.grabbedOverlay, getControllerWorldLocation(this.handToController(), true));
|
||||
var controllerLocation;
|
||||
if (this.useFingerInsteadOfStylus && this.state === STATE_OVERLAY_STYLUS_TOUCHING) {
|
||||
controllerLocation = getFingerWorldLocation(this.hand);
|
||||
} else {
|
||||
controllerLocation = getControllerWorldLocation(this.handToController(), true);
|
||||
}
|
||||
var intersectInfo = handLaserIntersectOverlay(this.grabbedOverlay, controllerLocation);
|
||||
if (intersectInfo) {
|
||||
var pointerEvent;
|
||||
|
||||
|
@ -3003,12 +3350,25 @@ function MyController(hand) {
|
|||
}
|
||||
|
||||
// Test for intersection between controller laser and Web overlay plane.
|
||||
var intersectInfo =
|
||||
handLaserIntersectOverlay(this.grabbedOverlay, getControllerWorldLocation(this.handToController(), true));
|
||||
var controllerLocation;
|
||||
if (this.useFingerInsteadOfStylus && this.state === STATE_OVERLAY_STYLUS_TOUCHING) {
|
||||
controllerLocation = getFingerWorldLocation(this.hand);
|
||||
} else {
|
||||
controllerLocation = getControllerWorldLocation(this.handToController(), true);
|
||||
}
|
||||
var intersectInfo = handLaserIntersectOverlay(this.grabbedOverlay, controllerLocation);
|
||||
if (intersectInfo) {
|
||||
|
||||
if (this.state == STATE_OVERLAY_STYLUS_TOUCHING &&
|
||||
intersectInfo.distance > WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_Y_OFFSET + WEB_TOUCH_Y_TOUCH_DEADZONE_SIZE) {
|
||||
var max, min;
|
||||
if (this.useFingerInsteadOfStylus && this.state === STATE_OVERLAY_STYLUS_TOUCHING) {
|
||||
max = FINGER_TOUCH_MAX;
|
||||
min = FINGER_TOUCH_MIN;
|
||||
} else {
|
||||
max = WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_Y_OFFSET + WEB_TOUCH_Y_TOUCH_DEADZONE_SIZE;
|
||||
min = WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_TOO_CLOSE;
|
||||
}
|
||||
|
||||
if (this.state == STATE_OVERLAY_STYLUS_TOUCHING && intersectInfo.distance > max) {
|
||||
this.grabbedThingID = null;
|
||||
this.setState(STATE_OFF, "pulled away from overlay");
|
||||
return;
|
||||
|
@ -3019,7 +3379,7 @@ function MyController(hand) {
|
|||
|
||||
if (this.state == STATE_OVERLAY_STYLUS_TOUCHING &&
|
||||
!this.tabletStabbed &&
|
||||
intersectInfo.distance < WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_TOO_CLOSE) {
|
||||
intersectInfo.distance < min) {
|
||||
// they've stabbed the tablet, don't send events until they pull back
|
||||
this.tabletStabbed = true;
|
||||
this.tabletStabbedPos2D = pos2D;
|
||||
|
@ -3149,9 +3509,7 @@ function MyController(hand) {
|
|||
return true;
|
||||
}
|
||||
|
||||
var controllerJointIndex = MyAvatar.getJointIndex(this.hand === RIGHT_HAND ?
|
||||
"_CONTROLLER_RIGHTHAND" :
|
||||
"_CONTROLLER_LEFTHAND");
|
||||
var controllerJointIndex = this.controllerJointIndex;
|
||||
if (props.parentJointIndex == controllerJointIndex) {
|
||||
return true;
|
||||
}
|
||||
|
@ -3177,9 +3535,7 @@ function MyController(hand) {
|
|||
children = children.concat(Entities.getChildrenIDsOfJoint(AVATAR_SELF_ID, handJointIndex));
|
||||
|
||||
// find children of faux controller joint
|
||||
var controllerJointIndex = MyAvatar.getJointIndex(this.hand === RIGHT_HAND ?
|
||||
"_CONTROLLER_RIGHTHAND" :
|
||||
"_CONTROLLER_LEFTHAND");
|
||||
var controllerJointIndex = this.controllerJointIndex;
|
||||
children = children.concat(Entities.getChildrenIDsOfJoint(MyAvatar.sessionUUID, controllerJointIndex));
|
||||
children = children.concat(Entities.getChildrenIDsOfJoint(AVATAR_SELF_ID, controllerJointIndex));
|
||||
|
||||
|
@ -3191,11 +3547,11 @@ function MyController(hand) {
|
|||
children = children.concat(Entities.getChildrenIDsOfJoint(AVATAR_SELF_ID, controllerCRJointIndex));
|
||||
|
||||
children.forEach(function(childID) {
|
||||
if (childID !== _this.stylus) {
|
||||
if (childID !== _this.stylus &&
|
||||
childID !== _this.overlayLine) {
|
||||
// we appear to be holding something and this script isn't in a state that would be holding something.
|
||||
// unhook it. if we previously took note of this entity's parent, put it back where it was. This
|
||||
// works around some problems that happen when more than one hand or avatar is passing something around.
|
||||
print("disconnecting stray child of hand: (" + _this.hand + ") " + childID);
|
||||
if (_this.previousParentID[childID]) {
|
||||
var previousParentID = _this.previousParentID[childID];
|
||||
var previousParentJointIndex = _this.previousParentJointIndex[childID];
|
||||
|
@ -3213,13 +3569,21 @@ function MyController(hand) {
|
|||
}
|
||||
_this.previouslyUnhooked[childID] = now;
|
||||
|
||||
// we don't know if it's an entity or an overlay
|
||||
if (Overlays.getProperty(childID, "grabbable")) {
|
||||
// only auto-unhook overlays that were flagged as grabbable. this avoids unhooking overlays
|
||||
// used in tutorial.
|
||||
Overlays.editOverlay(childID, {
|
||||
parentID: previousParentID,
|
||||
parentJointIndex: previousParentJointIndex
|
||||
});
|
||||
}
|
||||
Entities.editEntity(childID, { parentID: previousParentID, parentJointIndex: previousParentJointIndex });
|
||||
Overlays.editOverlay(childID, { parentID: previousParentID, parentJointIndex: previousParentJointIndex });
|
||||
|
||||
} else {
|
||||
Entities.editEntity(childID, { parentID: NULL_UUID });
|
||||
Overlays.editOverlay(childID, { parentID: NULL_UUID });
|
||||
if (Overlays.getProperty(childID, "grabbable")) {
|
||||
Overlays.editOverlay(childID, { parentID: NULL_UUID });
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -3287,6 +3651,7 @@ Messages.subscribe('Hifi-Hand-Disabler');
|
|||
Messages.subscribe('Hifi-Hand-Grab');
|
||||
Messages.subscribe('Hifi-Hand-RayPick-Blacklist');
|
||||
Messages.subscribe('Hifi-Object-Manipulation');
|
||||
Messages.subscribe('Hifi-Hand-Drop');
|
||||
|
||||
var handleHandMessages = function(channel, message, sender) {
|
||||
var data;
|
||||
|
@ -3372,6 +3737,15 @@ var handleHandMessages = function(channel, message, sender) {
|
|||
} catch (e) {
|
||||
print("WARNING: handControllerGrab.js -- error parsing Hifi-Hand-RayPick-Blacklist message: " + message);
|
||||
}
|
||||
} else if (channel === 'Hifi-Hand-Drop') {
|
||||
if (message === 'left') {
|
||||
leftController.release();
|
||||
} else if (message === 'right') {
|
||||
rightController.release();
|
||||
} else if (message === 'both') {
|
||||
leftController.release();
|
||||
rightController.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -56,6 +56,7 @@ selectionManager.addEventListener(function () {
|
|||
lightOverlayManager.updatePositions();
|
||||
});
|
||||
|
||||
const KEY_P = 80; //Key code for letter p used for Parenting hotkey.
|
||||
var DEGREES_TO_RADIANS = Math.PI / 180.0;
|
||||
var RADIANS_TO_DEGREES = 180.0 / Math.PI;
|
||||
var epsilon = 0.001;
|
||||
|
@ -843,7 +844,6 @@ function setupModelMenus() {
|
|||
});
|
||||
modelMenuAddedDelete = true;
|
||||
}
|
||||
|
||||
Menu.addMenuItem({
|
||||
menuName: "Edit",
|
||||
menuItemName: "Entity List...",
|
||||
|
@ -851,11 +851,25 @@ function setupModelMenus() {
|
|||
afterItem: "Entities",
|
||||
grouping: "Advanced"
|
||||
});
|
||||
|
||||
Menu.addMenuItem({
|
||||
menuName: "Edit",
|
||||
menuItemName: "Parent Entity to Last",
|
||||
afterItem: "Entity List...",
|
||||
grouping: "Advanced"
|
||||
});
|
||||
|
||||
Menu.addMenuItem({
|
||||
menuName: "Edit",
|
||||
menuItemName: "Unparent Entity",
|
||||
afterItem: "Parent Entity to Last",
|
||||
grouping: "Advanced"
|
||||
});
|
||||
Menu.addMenuItem({
|
||||
menuName: "Edit",
|
||||
menuItemName: "Allow Selecting of Large Models",
|
||||
shortcutKey: "CTRL+META+L",
|
||||
afterItem: "Entity List...",
|
||||
afterItem: "Unparent Entity",
|
||||
isCheckable: true,
|
||||
isChecked: true,
|
||||
grouping: "Advanced"
|
||||
|
@ -958,6 +972,8 @@ function cleanupModelMenus() {
|
|||
Menu.removeMenuItem("Edit", "Delete");
|
||||
}
|
||||
|
||||
Menu.removeMenuItem("Edit", "Parent Entity to Last");
|
||||
Menu.removeMenuItem("Edit", "Unparent Entity");
|
||||
Menu.removeMenuItem("Edit", "Entity List...");
|
||||
Menu.removeMenuItem("Edit", "Allow Selecting of Large Models");
|
||||
Menu.removeMenuItem("Edit", "Allow Selecting of Small Models");
|
||||
|
@ -990,6 +1006,9 @@ Script.scriptEnding.connect(function () {
|
|||
|
||||
Overlays.deleteOverlay(importingSVOImageOverlay);
|
||||
Overlays.deleteOverlay(importingSVOTextOverlay);
|
||||
|
||||
Controller.keyReleaseEvent.disconnect(keyReleaseEvent);
|
||||
Controller.keyPressEvent.disconnect(keyPressEvent);
|
||||
});
|
||||
|
||||
var lastOrientation = null;
|
||||
|
@ -1101,7 +1120,68 @@ function recursiveDelete(entities, childrenList) {
|
|||
Entities.deleteEntity(entityID);
|
||||
}
|
||||
}
|
||||
function unparentSelectedEntities() {
|
||||
if (SelectionManager.hasSelection()) {
|
||||
var selectedEntities = selectionManager.selections;
|
||||
var parentCheck = false;
|
||||
|
||||
if (selectedEntities.length < 1) {
|
||||
Window.notifyEditError("You must have an entity selected inorder to unparent it.");
|
||||
return;
|
||||
}
|
||||
selectedEntities.forEach(function (id, index) {
|
||||
var parentId = Entities.getEntityProperties(id, ["parentID"]).parentID;
|
||||
if (parentId !== null && parentId.length > 0 && parentId !== "{00000000-0000-0000-0000-000000000000}") {
|
||||
parentCheck = true;
|
||||
}
|
||||
Entities.editEntity(id, {parentID: null})
|
||||
return true;
|
||||
});
|
||||
if (parentCheck) {
|
||||
if (selectedEntities.length > 1) {
|
||||
Window.notify("Entities unparented");
|
||||
} else {
|
||||
Window.notify("Entity unparented");
|
||||
}
|
||||
} else {
|
||||
if (selectedEntities.length > 1) {
|
||||
Window.notify("Selected Entities have no parents");
|
||||
} else {
|
||||
Window.notify("Selected Entity does not have a parent");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Window.notifyEditError("You have nothing selected to unparent");
|
||||
}
|
||||
}
|
||||
function parentSelectedEntities() {
|
||||
if (SelectionManager.hasSelection()) {
|
||||
var selectedEntities = selectionManager.selections;
|
||||
if (selectedEntities.length <= 1) {
|
||||
Window.notifyEditError("You must have multiple entities selected in order to parent them");
|
||||
return;
|
||||
}
|
||||
var parentCheck = false;
|
||||
var lastEntityId = selectedEntities[selectedEntities.length-1];
|
||||
selectedEntities.forEach(function (id, index) {
|
||||
if (lastEntityId !== id) {
|
||||
var parentId = Entities.getEntityProperties(id, ["parentID"]).parentID;
|
||||
if (parentId !== lastEntityId) {
|
||||
parentCheck = true;
|
||||
}
|
||||
Entities.editEntity(id, {parentID: lastEntityId})
|
||||
}
|
||||
});
|
||||
|
||||
if(parentCheck) {
|
||||
Window.notify("Entities parented");
|
||||
}else {
|
||||
Window.notify("Entities are already parented to last");
|
||||
}
|
||||
} else {
|
||||
Window.notifyEditError("You have nothing selected to parent");
|
||||
}
|
||||
}
|
||||
function deleteSelectedEntities() {
|
||||
if (SelectionManager.hasSelection()) {
|
||||
selectedParticleEntity = 0;
|
||||
|
@ -1164,6 +1244,10 @@ function handeMenuEvent(menuItem) {
|
|||
Entities.setLightsArePickable(Menu.isOptionChecked("Allow Selecting of Lights"));
|
||||
} else if (menuItem === "Delete") {
|
||||
deleteSelectedEntities();
|
||||
} else if (menuItem === "Parent Entity to Last") {
|
||||
parentSelectedEntities();
|
||||
} else if (menuItem === "Unparent Entity") {
|
||||
unparentSelectedEntities();
|
||||
} else if (menuItem === "Export Entities") {
|
||||
if (!selectionManager.hasSelection()) {
|
||||
Window.notifyEditError("No entities have been selected.");
|
||||
|
@ -1289,13 +1373,12 @@ Window.svoImportRequested.connect(importSVO);
|
|||
|
||||
Menu.menuItemEvent.connect(handeMenuEvent);
|
||||
|
||||
Controller.keyPressEvent.connect(function (event) {
|
||||
var keyPressEvent = function (event) {
|
||||
if (isActive) {
|
||||
cameraManager.keyPressEvent(event);
|
||||
}
|
||||
});
|
||||
|
||||
Controller.keyReleaseEvent.connect(function (event) {
|
||||
};
|
||||
var keyReleaseEvent = function (event) {
|
||||
if (isActive) {
|
||||
cameraManager.keyReleaseEvent(event);
|
||||
}
|
||||
|
@ -1329,8 +1412,16 @@ Controller.keyReleaseEvent.connect(function (event) {
|
|||
});
|
||||
grid.setPosition(newPosition);
|
||||
}
|
||||
} else if (event.key === KEY_P && event.isControl && !event.isAutoRepeat ) {
|
||||
if (event.isShifted) {
|
||||
unparentSelectedEntities();
|
||||
} else {
|
||||
parentSelectedEntities();
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
Controller.keyReleaseEvent.connect(keyReleaseEvent);
|
||||
Controller.keyPressEvent.connect(keyPressEvent);
|
||||
|
||||
function recursiveAdd(newParentID, parentData) {
|
||||
var children = parentData.children;
|
||||
|
@ -1580,6 +1671,10 @@ var PropertiesTool = function (opts) {
|
|||
}
|
||||
pushCommandForSelections();
|
||||
selectionManager._update();
|
||||
} else if(data.type === 'parent') {
|
||||
parentSelectedEntities();
|
||||
} else if(data.type === 'unparent') {
|
||||
unparentSelectedEntities();
|
||||
} else if(data.type === 'saveUserData'){
|
||||
//the event bridge and json parsing handle our avatar id string differently.
|
||||
var actualID = data.id.split('"')[1];
|
||||
|
@ -1837,6 +1932,9 @@ var PopupMenu = function () {
|
|||
for (var i = 0; i < overlays.length; i++) {
|
||||
Overlays.deleteOverlay(overlays[i]);
|
||||
}
|
||||
Controller.mousePressEvent.disconnect(self.mousePressEvent);
|
||||
Controller.mouseMoveEvent.disconnect(self.mouseMoveEvent);
|
||||
Controller.mouseReleaseEvent.disconnect(self.mouseReleaseEvent);
|
||||
}
|
||||
|
||||
Controller.mousePressEvent.connect(self.mousePressEvent);
|
||||
|
@ -1864,7 +1962,11 @@ var particleExplorerTool = new ParticleExplorerTool();
|
|||
var selectedParticleEntity = 0;
|
||||
entityListTool.webView.webEventReceived.connect(function (data) {
|
||||
data = JSON.parse(data);
|
||||
if (data.type === "selectionUpdate") {
|
||||
if(data.type === 'parent') {
|
||||
parentSelectedEntities();
|
||||
} else if(data.type === 'unparent') {
|
||||
unparentSelectedEntities();
|
||||
} else if (data.type === "selectionUpdate") {
|
||||
var ids = data.entityIds;
|
||||
if (ids.length === 1) {
|
||||
if (Entities.getEntityProperties(ids[0], "type").type === "ParticleEffect") {
|
||||
|
|
|
@ -89,6 +89,7 @@
|
|||
</tr>
|
||||
</tfoot>
|
||||
</table>
|
||||
|
||||
<div id="no-entities">
|
||||
No entities found <span id="no-entities-in-view">in view</span> within a <span id="no-entities-radius">100</span> meter radius. Try moving to a different location and refreshing.
|
||||
</div>
|
||||
|
|
|
@ -61,7 +61,7 @@
|
|||
<label for="property-description">Description</label>
|
||||
<input type="text" id="property-description">
|
||||
</div>
|
||||
|
||||
|
||||
<div class="property textarea">
|
||||
<label for="property-user-data">User data</label>
|
||||
<br>
|
||||
|
@ -295,12 +295,29 @@
|
|||
<input type="checkbox" id="property-wants-trigger">
|
||||
<label for="property-wants-trigger">Triggerable</label>
|
||||
</div>
|
||||
<div class="property checkbox">
|
||||
<input type="checkbox" id="property-cloneable">
|
||||
<label for="property-cloneable">Cloneable</label>
|
||||
</div>
|
||||
<div class="property checkbox">
|
||||
<input type="checkbox" id="property-ignore-ik">
|
||||
<label for="property-ignore-ik">Ignore inverse kinematics</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="column" id="group-cloneable-group" style="display:none;">
|
||||
<div class="sub-section-header">
|
||||
<span>Cloneable Settings</span>
|
||||
</div>
|
||||
<div class="cloneable-group property gen">
|
||||
<div><label>Clone Lifetime</label><input type="number" data-user-data-type="cloneLifetime" id="property-cloneable-lifetime"></div>
|
||||
<div><label>Clone Limit</label><input type="number" data-user-data-type="cloneLimit" id="property-cloneable-limit"></div>
|
||||
<div class="property checkbox">
|
||||
<input type="checkbox" id="property-cloneable-dynamic">
|
||||
<label for="property-cloneable-dynamic">Clone Dynamic</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<hr class="behavior-group" />
|
||||
<div class="behavior-group property url ">
|
||||
|
|
|
@ -19,6 +19,7 @@ const VISIBLE_GLYPH = "";
|
|||
const TRANSPARENCY_GLYPH = "";
|
||||
const SCRIPT_GLYPH = "k";
|
||||
const DELETE = 46; // Key code for the delete key.
|
||||
const KEY_P = 80; // Key code for letter p used for Parenting hotkey.
|
||||
const MAX_ITEMS = Number.MAX_VALUE; // Used to set the max length of the list of discovered entities.
|
||||
|
||||
debugPrint = function (message) {
|
||||
|
@ -26,7 +27,7 @@ debugPrint = function (message) {
|
|||
};
|
||||
|
||||
function loaded() {
|
||||
openEventBridge(function() {
|
||||
openEventBridge(function() {
|
||||
entityList = new List('entity-list', { valueNames: ['name', 'type', 'url', 'locked', 'visible'], page: MAX_ITEMS});
|
||||
entityList.clear();
|
||||
elEntityTable = document.getElementById("entity-table");
|
||||
|
@ -48,7 +49,7 @@ function loaded() {
|
|||
elNoEntitiesInView = document.getElementById("no-entities-in-view");
|
||||
elNoEntitiesRadius = document.getElementById("no-entities-radius");
|
||||
elEntityTableScroll = document.getElementById("entity-table-scroll");
|
||||
|
||||
|
||||
document.getElementById("entity-name").onclick = function() {
|
||||
setSortColumn('name');
|
||||
};
|
||||
|
@ -90,7 +91,7 @@ function loaded() {
|
|||
selection = selection.concat(selectedEntities);
|
||||
} else if (clickEvent.shiftKey && selectedEntities.length > 0) {
|
||||
var previousItemFound = -1;
|
||||
var clickedItemFound = -1;
|
||||
var clickedItemFound = -1;
|
||||
for (var entity in entityList.visibleItems) {
|
||||
if (clickedItemFound === -1 && this.dataset.entityId == entityList.visibleItems[entity].values().id) {
|
||||
clickedItemFound = entity;
|
||||
|
@ -113,11 +114,11 @@ function loaded() {
|
|||
selection = selection.concat(betweenItems, selectedEntities);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
selectedEntities = selection;
|
||||
|
||||
|
||||
this.className = 'selected';
|
||||
|
||||
|
||||
EventBridge.emitWebEvent(JSON.stringify({
|
||||
type: "selectionUpdate",
|
||||
focus: false,
|
||||
|
@ -126,7 +127,7 @@ function loaded() {
|
|||
|
||||
refreshFooter();
|
||||
}
|
||||
|
||||
|
||||
function onRowDoubleClicked() {
|
||||
EventBridge.emitWebEvent(JSON.stringify({
|
||||
type: "selectionUpdate",
|
||||
|
@ -134,7 +135,7 @@ function loaded() {
|
|||
entityIds: [this.dataset.entityId],
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
const BYTES_PER_MEGABYTE = 1024 * 1024;
|
||||
|
||||
function decimalMegabytes(number) {
|
||||
|
@ -173,7 +174,7 @@ function loaded() {
|
|||
currentElement.onclick = onRowClicked;
|
||||
currentElement.ondblclick = onRowDoubleClicked;
|
||||
});
|
||||
|
||||
|
||||
if (refreshEntityListTimer) {
|
||||
clearTimeout(refreshEntityListTimer);
|
||||
}
|
||||
|
@ -183,13 +184,13 @@ function loaded() {
|
|||
item.values({ name: name, url: filename, locked: locked, visible: visible });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function clearEntities() {
|
||||
entities = {};
|
||||
entityList.clear();
|
||||
refreshFooter();
|
||||
}
|
||||
|
||||
|
||||
var elSortOrder = {
|
||||
name: document.querySelector('#entity-name .sort-order'),
|
||||
type: document.querySelector('#entity-type .sort-order'),
|
||||
|
@ -215,12 +216,12 @@ function loaded() {
|
|||
entityList.sort(currentSortColumn, { order: currentSortOrder });
|
||||
}
|
||||
setSortColumn('type');
|
||||
|
||||
|
||||
function refreshEntities() {
|
||||
clearEntities();
|
||||
EventBridge.emitWebEvent(JSON.stringify({ type: 'refresh' }));
|
||||
}
|
||||
|
||||
|
||||
function refreshFooter() {
|
||||
if (selectedEntities.length > 1) {
|
||||
elFooter.firstChild.nodeValue = selectedEntities.length + " entities selected";
|
||||
|
@ -239,7 +240,7 @@ function loaded() {
|
|||
entityList.search(elFilter.value);
|
||||
refreshFooter();
|
||||
}
|
||||
|
||||
|
||||
function updateSelectedEntities(selectedIDs) {
|
||||
var notFound = false;
|
||||
for (var id in entities) {
|
||||
|
@ -262,7 +263,7 @@ function loaded() {
|
|||
|
||||
return notFound;
|
||||
}
|
||||
|
||||
|
||||
elRefresh.onclick = function() {
|
||||
refreshEntities();
|
||||
}
|
||||
|
@ -282,7 +283,7 @@ function loaded() {
|
|||
EventBridge.emitWebEvent(JSON.stringify({ type: 'delete' }));
|
||||
refreshEntities();
|
||||
}
|
||||
|
||||
|
||||
document.addEventListener("keydown", function (keyDownEvent) {
|
||||
if (keyDownEvent.target.nodeName === "INPUT") {
|
||||
return;
|
||||
|
@ -292,8 +293,15 @@ function loaded() {
|
|||
EventBridge.emitWebEvent(JSON.stringify({ type: 'delete' }));
|
||||
refreshEntities();
|
||||
}
|
||||
if (keyDownEvent.keyCode === KEY_P && keyDownEvent.ctrlKey) {
|
||||
if (keyDownEvent.shiftKey) {
|
||||
EventBridge.emitWebEvent(JSON.stringify({ type: 'unparent' }));
|
||||
} else {
|
||||
EventBridge.emitWebEvent(JSON.stringify({ type: 'parent' }));
|
||||
}
|
||||
}
|
||||
}, false);
|
||||
|
||||
|
||||
var isFilterInView = false;
|
||||
var FILTER_IN_VIEW_ATTRIBUTE = "pressed";
|
||||
elNoEntitiesInView.style.display = "none";
|
||||
|
@ -320,7 +328,7 @@ function loaded() {
|
|||
if (window.EventBridge !== undefined) {
|
||||
EventBridge.scriptEventReceived.connect(function(data) {
|
||||
data = JSON.parse(data);
|
||||
|
||||
|
||||
if (data.type === "clearEntityList") {
|
||||
clearEntities();
|
||||
} else if (data.type == "selectionUpdate") {
|
||||
|
@ -426,4 +434,3 @@ function loaded() {
|
|||
event.preventDefault();
|
||||
}, false);
|
||||
}
|
||||
|
||||
|
|
|
@ -24,9 +24,10 @@ var ICON_FOR_TYPE = {
|
|||
}
|
||||
|
||||
var EDITOR_TIMEOUT_DURATION = 1500;
|
||||
|
||||
const KEY_P = 80; //Key code for letter p used for Parenting hotkey.
|
||||
var colorPickers = [];
|
||||
var lastEntityID = null;
|
||||
|
||||
debugPrint = function(message) {
|
||||
EventBridge.emitWebEvent(
|
||||
JSON.stringify({
|
||||
|
@ -273,7 +274,7 @@ function updateCheckedSubProperty(propertyName, propertyValue, subPropertyElemen
|
|||
propertyValue += subPropertyString + ',';
|
||||
}
|
||||
} else {
|
||||
// We've unchecked, so remove
|
||||
// We've unchecked, so remove
|
||||
propertyValue = propertyValue.replace(subPropertyString + ",", "");
|
||||
}
|
||||
|
||||
|
@ -323,13 +324,9 @@ function setUserDataFromEditor(noUpdate) {
|
|||
})
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
function userDataChanger(groupName, keyName, checkBoxElement, userDataElement, defaultValue) {
|
||||
function multiDataUpdater(groupName, updateKeyPair, userDataElement, defaults) {
|
||||
var properties = {};
|
||||
var parsedData = {};
|
||||
try {
|
||||
|
@ -339,17 +336,31 @@ function userDataChanger(groupName, keyName, checkBoxElement, userDataElement, d
|
|||
} else {
|
||||
parsedData = JSON.parse(userDataElement.value);
|
||||
}
|
||||
|
||||
} catch (e) {}
|
||||
|
||||
if (!(groupName in parsedData)) {
|
||||
parsedData[groupName] = {}
|
||||
}
|
||||
delete parsedData[groupName][keyName];
|
||||
if (checkBoxElement.checked !== defaultValue) {
|
||||
parsedData[groupName][keyName] = checkBoxElement.checked;
|
||||
}
|
||||
|
||||
var keys = Object.keys(updateKeyPair);
|
||||
keys.forEach(function (key) {
|
||||
delete parsedData[groupName][key];
|
||||
if (updateKeyPair[key] !== null && updateKeyPair[key] !== "null") {
|
||||
if (updateKeyPair[key] instanceof Element) {
|
||||
if(updateKeyPair[key].type === "checkbox") {
|
||||
if (updateKeyPair[key].checked !== defaults[key]) {
|
||||
parsedData[groupName][key] = updateKeyPair[key].checked;
|
||||
}
|
||||
} else {
|
||||
var val = isNaN(updateKeyPair[key].value) ? updateKeyPair[key].value : parseInt(updateKeyPair[key].value);
|
||||
if (val !== defaults[key]) {
|
||||
parsedData[groupName][key] = val;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
parsedData[groupName][key] = updateKeyPair[key];
|
||||
}
|
||||
}
|
||||
});
|
||||
if (Object.keys(parsedData[groupName]).length == 0) {
|
||||
delete parsedData[groupName];
|
||||
}
|
||||
|
@ -368,6 +379,12 @@ function userDataChanger(groupName, keyName, checkBoxElement, userDataElement, d
|
|||
properties: properties,
|
||||
})
|
||||
);
|
||||
}
|
||||
function userDataChanger(groupName, keyName, values, userDataElement, defaultValue) {
|
||||
var val = {}, def = {};
|
||||
val[keyName] = values;
|
||||
def[keyName] = defaultValue;
|
||||
multiDataUpdater(groupName, val, userDataElement, def);
|
||||
};
|
||||
|
||||
function setTextareaScrolling(element) {
|
||||
|
@ -521,6 +538,7 @@ function unbindAllInputs() {
|
|||
|
||||
function loaded() {
|
||||
openEventBridge(function() {
|
||||
|
||||
var allSections = [];
|
||||
var elID = document.getElementById("property-id");
|
||||
var elType = document.getElementById("property-type");
|
||||
|
@ -584,6 +602,13 @@ function loaded() {
|
|||
var elCollisionSoundURL = document.getElementById("property-collision-sound-url");
|
||||
|
||||
var elGrabbable = document.getElementById("property-grabbable");
|
||||
|
||||
var elCloneable = document.getElementById("property-cloneable");
|
||||
var elCloneableDynamic = document.getElementById("property-cloneable-dynamic");
|
||||
var elCloneableGroup = document.getElementById("group-cloneable-group");
|
||||
var elCloneableLifetime = document.getElementById("property-cloneable-lifetime");
|
||||
var elCloneableLimit = document.getElementById("property-cloneable-limit");
|
||||
|
||||
var elWantsTrigger = document.getElementById("property-wants-trigger");
|
||||
var elIgnoreIK = document.getElementById("property-ignore-ik");
|
||||
|
||||
|
@ -780,7 +805,7 @@ function loaded() {
|
|||
if (lastEntityID !== '"' + properties.id + '"' && lastEntityID !== null && editor !== null) {
|
||||
saveJSONUserData(true);
|
||||
}
|
||||
//the event bridge and json parsing handle our avatar id string differently.
|
||||
//the event bridge and json parsing handle our avatar id string differently.
|
||||
|
||||
lastEntityID = '"' + properties.id + '"';
|
||||
elID.innerHTML = properties.id;
|
||||
|
@ -847,8 +872,16 @@ function loaded() {
|
|||
elCollideOtherAvatar.checked = properties.collidesWith.indexOf("otherAvatar") > -1;
|
||||
|
||||
elGrabbable.checked = properties.dynamic;
|
||||
|
||||
elWantsTrigger.checked = false;
|
||||
elIgnoreIK.checked = true;
|
||||
|
||||
elCloneable.checked = false;
|
||||
elCloneableDynamic.checked = false;
|
||||
elCloneableGroup.style.display = elCloneable.checked ? "block": "none";
|
||||
elCloneableLimit.value = 10;
|
||||
elCloneableLifetime.value = 300;
|
||||
|
||||
var parsedUserData = {}
|
||||
try {
|
||||
parsedUserData = JSON.parse(properties.userData);
|
||||
|
@ -863,8 +896,25 @@ function loaded() {
|
|||
if ("ignoreIK" in parsedUserData["grabbableKey"]) {
|
||||
elIgnoreIK.checked = parsedUserData["grabbableKey"].ignoreIK;
|
||||
}
|
||||
if ("cloneable" in parsedUserData["grabbableKey"]) {
|
||||
elCloneable.checked = parsedUserData["grabbableKey"].cloneable;
|
||||
elCloneableGroup.style.display = elCloneable.checked ? "block": "none";
|
||||
elCloneableLimit.value = elCloneable.checked ? 10: 0;
|
||||
elCloneableLifetime.value = elCloneable.checked ? 300: 0;
|
||||
elCloneableDynamic.checked = parsedUserData["grabbableKey"].cloneDynamic ? parsedUserData["grabbableKey"].cloneDynamic : properties.dynamic;
|
||||
elDynamic.checked = elCloneable.checked ? false: properties.dynamic;
|
||||
if (elCloneable.checked) {
|
||||
if ("cloneLifetime" in parsedUserData["grabbableKey"]) {
|
||||
elCloneableLifetime.value = parsedUserData["grabbableKey"].cloneLifetime ? parsedUserData["grabbableKey"].cloneLifetime : 300;
|
||||
}
|
||||
if ("cloneLimit" in parsedUserData["grabbableKey"]) {
|
||||
elCloneableLimit.value = parsedUserData["grabbableKey"].cloneLimit ? parsedUserData["grabbableKey"].cloneLimit : 10;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {}
|
||||
} catch (e) {
|
||||
}
|
||||
|
||||
elCollisionSoundURL.value = properties.collisionSoundURL;
|
||||
elLifetime.value = properties.lifetime;
|
||||
|
@ -1154,8 +1204,38 @@ function loaded() {
|
|||
});
|
||||
|
||||
elGrabbable.addEventListener('change', function() {
|
||||
if(elCloneable.checked) {
|
||||
elGrabbable.checked = false;
|
||||
}
|
||||
userDataChanger("grabbableKey", "grabbable", elGrabbable, elUserData, properties.dynamic);
|
||||
});
|
||||
elCloneableDynamic.addEventListener('change', function (event){
|
||||
userDataChanger("grabbableKey", "cloneDynamic", event.target, elUserData, -1);
|
||||
});
|
||||
elCloneable.addEventListener('change', function (event) {
|
||||
var checked = event.target.checked;
|
||||
if (checked) {
|
||||
multiDataUpdater("grabbableKey",
|
||||
{cloneLifetime: elCloneableLifetime, cloneLimit: elCloneableLimit, cloneDynamic: elCloneableDynamic, cloneable: event.target},
|
||||
elUserData, {});
|
||||
elCloneableGroup.style.display = "block";
|
||||
EventBridge.emitWebEvent(
|
||||
'{"id":' + lastEntityID + ', "type":"update", "properties":{"dynamic":false, "grabbable": false}}'
|
||||
);
|
||||
} else {
|
||||
multiDataUpdater("grabbableKey",
|
||||
{cloneLifetime: null, cloneLimit: null, cloneDynamic: null, cloneable: false},
|
||||
elUserData, {});
|
||||
elCloneableGroup.style.display = "none";
|
||||
}
|
||||
});
|
||||
|
||||
var numberListener = function (event) {
|
||||
userDataChanger("grabbableKey", event.target.getAttribute("data-user-data-type"), parseInt(event.target.value), elUserData, false);
|
||||
};
|
||||
elCloneableLifetime.addEventListener('change', numberListener);
|
||||
elCloneableLimit.addEventListener('change', numberListener);
|
||||
|
||||
elWantsTrigger.addEventListener('change', function() {
|
||||
userDataChanger("grabbableKey", "wantsTrigger", elWantsTrigger, elUserData, false);
|
||||
});
|
||||
|
@ -1390,7 +1470,7 @@ function loaded() {
|
|||
elZoneFlyingAllowed.addEventListener('change', createEmitCheckedPropertyUpdateFunction('flyingAllowed'));
|
||||
elZoneGhostingAllowed.addEventListener('change', createEmitCheckedPropertyUpdateFunction('ghostingAllowed'));
|
||||
elZoneFilterURL.addEventListener('change', createEmitTextPropertyUpdateFunction('filterURL'));
|
||||
|
||||
|
||||
var voxelVolumeSizeChangeFunction = createEmitVec3PropertyUpdateFunction(
|
||||
'voxelVolumeSize', elVoxelVolumeSizeX, elVoxelVolumeSizeY, elVoxelVolumeSizeZ);
|
||||
elVoxelVolumeSizeX.addEventListener('change', voxelVolumeSizeChangeFunction);
|
||||
|
@ -1441,7 +1521,15 @@ function loaded() {
|
|||
}));
|
||||
});
|
||||
|
||||
|
||||
document.addEventListener("keydown", function (keyDown) {
|
||||
if (keyDown.keyCode === KEY_P && keyDown.ctrlKey) {
|
||||
if (keyDown.shiftKey) {
|
||||
EventBridge.emitWebEvent(JSON.stringify({ type: 'unparent' }));
|
||||
} else {
|
||||
EventBridge.emitWebEvent(JSON.stringify({ type: 'parent' }));
|
||||
}
|
||||
}
|
||||
});
|
||||
window.onblur = function() {
|
||||
// Fake a change event
|
||||
var ev = document.createEvent("HTMLEvents");
|
||||
|
|
|
@ -6,6 +6,8 @@
|
|||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
||||
const KEY_P = 80; //Key code for letter p used for Parenting hotkey.
|
||||
|
||||
function loaded() {
|
||||
openEventBridge(function() {
|
||||
elPosY = document.getElementById("horiz-y");
|
||||
|
@ -131,10 +133,17 @@ function loaded() {
|
|||
|
||||
EventBridge.emitWebEvent(JSON.stringify({ type: 'init' }));
|
||||
});
|
||||
|
||||
document.addEventListener("keydown", function (keyDown) {
|
||||
if (keyDown.keyCode === KEY_P && keyDown.ctrlKey) {
|
||||
if (keyDown.shiftKey) {
|
||||
EventBridge.emitWebEvent(JSON.stringify({ type: 'unparent' }));
|
||||
} else {
|
||||
EventBridge.emitWebEvent(JSON.stringify({ type: 'parent' }));
|
||||
}
|
||||
}
|
||||
})
|
||||
// Disable right-click context menu which is not visible in the HMD and makes it seem like the app has locked
|
||||
document.addEventListener("contextmenu", function (event) {
|
||||
event.preventDefault();
|
||||
}, false);
|
||||
}
|
||||
|
||||
|
|
|
@ -1170,14 +1170,14 @@ SelectionDisplay = (function() {
|
|||
// determine which bottom corner we are closest to
|
||||
/*------------------------------
|
||||
example:
|
||||
|
||||
|
||||
BRF +--------+ BLF
|
||||
| |
|
||||
| |
|
||||
BRN +--------+ BLN
|
||||
|
||||
|
||||
*
|
||||
|
||||
|
||||
------------------------------*/
|
||||
|
||||
var cameraPosition = Camera.getPosition();
|
||||
|
@ -2189,8 +2189,12 @@ SelectionDisplay = (function() {
|
|||
offset = Vec3.multiplyQbyV(properties.rotation, offset);
|
||||
var boxPosition = Vec3.sum(properties.position, offset);
|
||||
|
||||
var color = {red: 255, green: 128, blue: 0};
|
||||
if (i >= selectionManager.selections.length - 1) color = {red: 255, green: 255, blue: 64};
|
||||
|
||||
Overlays.editOverlay(selectionBoxes[i], {
|
||||
position: boxPosition,
|
||||
color: color,
|
||||
rotation: properties.rotation,
|
||||
dimensions: properties.dimensions,
|
||||
visible: true,
|
||||
|
@ -2395,7 +2399,7 @@ SelectionDisplay = (function() {
|
|||
if (wantDebug) {
|
||||
print("Start Elevation: " + translateXZTool.startingElevation + ", elevation: " + elevation);
|
||||
}
|
||||
if ((translateXZTool.startingElevation > 0.0 && elevation < MIN_ELEVATION) ||
|
||||
if ((translateXZTool.startingElevation > 0.0 && elevation < MIN_ELEVATION) ||
|
||||
(translateXZTool.startingElevation < 0.0 && elevation > -MIN_ELEVATION)) {
|
||||
if (wantDebug) {
|
||||
print("too close to horizon!");
|
||||
|
@ -3857,7 +3861,7 @@ SelectionDisplay = (function() {
|
|||
};
|
||||
|
||||
that.mousePressEvent = function(event) {
|
||||
var wantDebug = false;
|
||||
var wantDebug = false;
|
||||
if (!event.isLeftButton && !that.triggered) {
|
||||
// if another mouse button than left is pressed ignore it
|
||||
return false;
|
||||
|
@ -3889,7 +3893,7 @@ SelectionDisplay = (function() {
|
|||
|
||||
if (result.intersects) {
|
||||
|
||||
|
||||
|
||||
if (wantDebug) {
|
||||
print("something intersects... ");
|
||||
print(" result.overlayID:" + result.overlayID + "[" + overlayNames[result.overlayID] + "]");
|
||||
|
@ -3989,7 +3993,7 @@ SelectionDisplay = (function() {
|
|||
if (wantDebug) {
|
||||
print("rotate handle case...");
|
||||
}
|
||||
|
||||
|
||||
|
||||
// After testing our stretch handles, then check out rotate handles
|
||||
Overlays.editOverlay(yawHandle, {
|
||||
|
@ -4211,7 +4215,7 @@ SelectionDisplay = (function() {
|
|||
case selectionBox:
|
||||
activeTool = translateXZTool;
|
||||
translateXZTool.pickPlanePosition = result.intersection;
|
||||
translateXZTool.greatestDimension = Math.max(Math.max(SelectionManager.worldDimensions.x, SelectionManager.worldDimensions.y),
|
||||
translateXZTool.greatestDimension = Math.max(Math.max(SelectionManager.worldDimensions.x, SelectionManager.worldDimensions.y),
|
||||
SelectionManager.worldDimensions.z);
|
||||
if (wantDebug) {
|
||||
print("longest dimension: " + translateXZTool.greatestDimension);
|
||||
|
@ -4220,7 +4224,7 @@ SelectionDisplay = (function() {
|
|||
translateXZTool.startingElevation = translateXZTool.elevation(pickRay.origin, translateXZTool.pickPlanePosition);
|
||||
print(" starting elevation: " + translateXZTool.startingElevation);
|
||||
}
|
||||
|
||||
|
||||
mode = translateXZTool.mode;
|
||||
activeTool.onBegin(event);
|
||||
somethingClicked = 'selectionBox';
|
||||
|
|
|
@ -521,6 +521,9 @@ function onEditError(msg) {
|
|||
createNotification(wordWrap(msg), NotificationType.EDIT_ERROR);
|
||||
}
|
||||
|
||||
function onNotify(msg) {
|
||||
createNotification(wordWrap(msg), NotificationType.UNKNOWN); // Needs a generic notification system for user feedback, thus using this
|
||||
}
|
||||
|
||||
function onSnapshotTaken(pathStillSnapshot, pathAnimatedSnapshot, notify) {
|
||||
if (notify) {
|
||||
|
@ -637,6 +640,7 @@ Window.domainConnectionRefused.connect(onDomainConnectionRefused);
|
|||
Window.snapshotTaken.connect(onSnapshotTaken);
|
||||
Window.processingGif.connect(processingGif);
|
||||
Window.notifyEditError = onEditError;
|
||||
Window.notify = onNotify;
|
||||
|
||||
setup();
|
||||
|
||||
|
|
|
@ -206,6 +206,17 @@ HighlightedEntity.updateOverlays = function updateHighlightedEntities() {
|
|||
});
|
||||
};
|
||||
|
||||
/* this contains current gain for a given node (by session id). More efficient than
|
||||
* querying it, plus there isn't a getGain function so why write one */
|
||||
var sessionGains = {};
|
||||
function convertDbToLinear(decibels) {
|
||||
// +20db = 10x, 0dB = 1x, -10dB = 0.1x, etc...
|
||||
// but, your perception is that something 2x as loud is +10db
|
||||
// so we go from -60 to +20 or 1/64x to 4x. For now, we can
|
||||
// maybe scale the signal this way??
|
||||
return Math.pow(2, decibels/10.0);
|
||||
}
|
||||
|
||||
function fromQml(message) { // messages are {method, params}, like json-rpc. See also sendToQml.
|
||||
var data;
|
||||
switch (message.method) {
|
||||
|
@ -237,12 +248,16 @@ function fromQml(message) { // messages are {method, params}, like json-rpc. See
|
|||
}
|
||||
break;
|
||||
case 'refresh':
|
||||
data = {};
|
||||
ExtendedOverlay.some(function (overlay) { // capture the audio data
|
||||
data[overlay.key] = overlay;
|
||||
});
|
||||
removeOverlays();
|
||||
// If filter is specified from .qml instead of through settings, update the settings.
|
||||
if (message.params.filter !== undefined) {
|
||||
Settings.setValue('pal/filtered', !!message.params.filter);
|
||||
}
|
||||
populateUserList(message.params.selected);
|
||||
populateUserList(message.params.selected, data);
|
||||
UserActivityLogger.palAction("refresh", "");
|
||||
break;
|
||||
case 'displayNameUpdate':
|
||||
|
@ -274,7 +289,7 @@ function addAvatarNode(id) {
|
|||
}
|
||||
// Each open/refresh will capture a stable set of avatarsOfInterest, within the specified filter.
|
||||
var avatarsOfInterest = {};
|
||||
function populateUserList(selectData) {
|
||||
function populateUserList(selectData, oldAudioData) {
|
||||
var filter = Settings.getValue('pal/filtered') && {distance: Settings.getValue('pal/nearDistance')};
|
||||
var data = [], avatars = AvatarList.getAvatarIdentifiers();
|
||||
avatarsOfInterest = {};
|
||||
|
@ -306,11 +321,13 @@ function populateUserList(selectData) {
|
|||
if (id && filter && ((Math.abs(horizontal) > horizontalHalfAngle) || (Math.abs(vertical) > verticalHalfAngle))) {
|
||||
return;
|
||||
}
|
||||
var oldAudio = oldAudioData && oldAudioData[id];
|
||||
var avatarPalDatum = {
|
||||
displayName: name,
|
||||
userName: '',
|
||||
sessionId: id || '',
|
||||
audioLevel: 0.0,
|
||||
audioLevel: (oldAudio && oldAudio.audioLevel) || 0.0,
|
||||
avgAudioLevel: (oldAudio && oldAudio.avgAudioLevel) || 0.0,
|
||||
admin: false,
|
||||
personalMute: !!id && Users.getPersonalMuteStatus(id), // expects proper boolean, not null
|
||||
ignore: !!id && Users.getIgnoreStatus(id) // ditto
|
||||
|
@ -604,41 +621,54 @@ function receiveMessage(channel, messageString, senderID) {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
var AVERAGING_RATIO = 0.05;
|
||||
var LOUDNESS_FLOOR = 11.0;
|
||||
var LOUDNESS_SCALE = 2.8 / 5.0;
|
||||
var LOG2 = Math.log(2.0);
|
||||
var AUDIO_PEAK_DECAY = 0.02;
|
||||
var myData = {}; // we're not includied in ExtendedOverlay.get.
|
||||
|
||||
function scaleAudio(val) {
|
||||
var audioLevel = 0.0;
|
||||
if (val <= LOUDNESS_FLOOR) {
|
||||
audioLevel = val / LOUDNESS_FLOOR * LOUDNESS_SCALE;
|
||||
} else {
|
||||
audioLevel = (val -(LOUDNESS_FLOOR -1 )) * LOUDNESS_SCALE;
|
||||
}
|
||||
if (audioLevel > 1.0) {
|
||||
audioLevel = 1;
|
||||
}
|
||||
return audioLevel;
|
||||
}
|
||||
|
||||
function getAudioLevel(id) {
|
||||
// the VU meter should work similarly to the one in AvatarInputs: log scale, exponentially averaged
|
||||
// But of course it gets the data at a different rate, so we tweak the averaging ratio and frequency
|
||||
// of updating (the latter for efficiency too).
|
||||
var avatar = AvatarList.getAvatar(id);
|
||||
var audioLevel = 0.0;
|
||||
var avgAudioLevel = 0.0;
|
||||
var data = id ? ExtendedOverlay.get(id) : myData;
|
||||
if (!data) {
|
||||
return audioLevel;
|
||||
}
|
||||
if (data) {
|
||||
|
||||
// we will do exponential moving average by taking some the last loudness and averaging
|
||||
data.accumulatedLevel = AVERAGING_RATIO * (data.accumulatedLevel || 0) + (1 - AVERAGING_RATIO) * (avatar.audioLoudness);
|
||||
// we will do exponential moving average by taking some the last loudness and averaging
|
||||
data.accumulatedLevel = AVERAGING_RATIO * (data.accumulatedLevel || 0) + (1 - AVERAGING_RATIO) * (avatar.audioLoudness);
|
||||
|
||||
// add 1 to insure we don't go log() and hit -infinity. Math.log is
|
||||
// natural log, so to get log base 2, just divide by ln(2).
|
||||
var logLevel = Math.log(data.accumulatedLevel + 1) / LOG2;
|
||||
// add 1 to insure we don't go log() and hit -infinity. Math.log is
|
||||
// natural log, so to get log base 2, just divide by ln(2).
|
||||
audioLevel = scaleAudio(Math.log(data.accumulatedLevel + 1) / LOG2);
|
||||
|
||||
if (logLevel <= LOUDNESS_FLOOR) {
|
||||
audioLevel = logLevel / LOUDNESS_FLOOR * LOUDNESS_SCALE;
|
||||
} else {
|
||||
audioLevel = (logLevel - (LOUDNESS_FLOOR - 1.0)) * LOUDNESS_SCALE;
|
||||
// decay avgAudioLevel
|
||||
avgAudioLevel = Math.max((1-AUDIO_PEAK_DECAY) * (data.avgAudioLevel || 0), audioLevel);
|
||||
|
||||
data.avgAudioLevel = avgAudioLevel;
|
||||
data.audioLevel = audioLevel;
|
||||
|
||||
// now scale for the gain. Also, asked to boost the low end, so one simple way is
|
||||
// to take sqrt of the value. Lets try that, see how it feels.
|
||||
avgAudioLevel = Math.min(1.0, Math.sqrt(avgAudioLevel *(sessionGains[id] || 0.75)));
|
||||
}
|
||||
if (audioLevel > 1.0) {
|
||||
audioLevel = 1;
|
||||
}
|
||||
data.audioLevel = audioLevel;
|
||||
return audioLevel;
|
||||
return [audioLevel, avgAudioLevel];
|
||||
}
|
||||
|
||||
function createAudioInterval(interval) {
|
||||
|
|
151
scripts/tutorials/entity_scripts/magneticBlock.js
Normal file
151
scripts/tutorials/entity_scripts/magneticBlock.js
Normal file
|
@ -0,0 +1,151 @@
|
|||
//
|
||||
// magneticBlock.js
|
||||
//
|
||||
// Created by Matti Lahtinen 4/3/2017
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
// Makes the entity the script is bound to connect to nearby, similarly sized entities, like a magnet.
|
||||
|
||||
(function() {
|
||||
var SNAPSOUND_SOURCE = SoundCache.getSound(Script.resolvePath("../../system/assets/sounds/entitySnap.wav?xrs"));
|
||||
var RANGE_MULTIPLER = 1.5;
|
||||
var MAX_SCALE = 2;
|
||||
var MIN_SCALE = 0.5;
|
||||
|
||||
// Helper for detecting nearby objects near entityProperties, with the scale calculated by the dimensions of the object.
|
||||
function findEntitiesInRange(entityProperties) {
|
||||
var dimensions = entityProperties.dimensions;
|
||||
// Average of the dimensions instead of full value.
|
||||
return Entities.findEntities(entityProperties.position,
|
||||
((dimensions.x + dimensions.y + dimensions.z) / 3) * RANGE_MULTIPLER);
|
||||
}
|
||||
|
||||
function getNearestValidEntityProperties(releasedProperties) {
|
||||
var entities = findEntitiesInRange(releasedProperties);
|
||||
var nearestEntity = null;
|
||||
var nearest = Number.MAX_VALUE - 1;
|
||||
var releaseSize = Vec3.length(releasedProperties.dimensions);
|
||||
entities.forEach(function(entityId) {
|
||||
if (entityId !== releasedProperties.id) {
|
||||
var entity = Entities.getEntityProperties(entityId, ['position', 'rotation', 'dimensions']);
|
||||
var distance = Vec3.distance(releasedProperties.position, entity.position);
|
||||
var scale = releaseSize / Vec3.length(entity.dimensions);
|
||||
|
||||
if (distance < nearest && (scale >= MIN_SCALE && scale <= MAX_SCALE)) {
|
||||
nearestEntity = entity;
|
||||
nearest = distance;
|
||||
}
|
||||
}
|
||||
});
|
||||
return nearestEntity;
|
||||
}
|
||||
// Create the 'class'
|
||||
function MagneticBlock() {}
|
||||
// Bind pre-emptive events
|
||||
MagneticBlock.prototype = {
|
||||
/*
|
||||
When script is bound to an entity, preload is the first callback called with the entityID.
|
||||
It will behave as the constructor
|
||||
*/
|
||||
preload: function(id) {
|
||||
/*
|
||||
We will now override any existing userdata with the grabbable property.
|
||||
Only retrieving userData
|
||||
*/
|
||||
var entityProperties = Entities.getEntityProperties(id, ['userData']);
|
||||
var userData = {
|
||||
grabbableKey: {}
|
||||
};
|
||||
// Check if existing userData field exists.
|
||||
if (entityProperties.userData && entityProperties.userData.length > 0) {
|
||||
try {
|
||||
userData = JSON.parse(entityProperties.userData);
|
||||
if (!userData.grabbableKey) {
|
||||
userData.grabbableKey = {}; // If by random change there is no grabbableKey in the userData.
|
||||
}
|
||||
} catch (e) {
|
||||
// if user data is not valid json, we will simply overwrite it.
|
||||
}
|
||||
}
|
||||
// Object must be triggerable inorder to bind releaseGrabEvent
|
||||
userData.grabbableKey.grabbable = true;
|
||||
|
||||
// Apply the new properties to entity of id
|
||||
Entities.editEntity(id, {
|
||||
userData: JSON.stringify(userData)
|
||||
});
|
||||
Script.scriptEnding.connect(function() {
|
||||
Script.removeEventHandler(id, "releaseGrab", this.releaseGrab);
|
||||
});
|
||||
},
|
||||
releaseGrab: function(entityId) {
|
||||
// Release grab is called with entityId,
|
||||
var released = Entities.getEntityProperties(entityId, ["position", "rotation", "dimensions"]);
|
||||
var target = getNearestValidEntityProperties(released);
|
||||
if (target !== null) {
|
||||
// We found nearest, now lets do the snap calculations
|
||||
// Plays the snap sound between the two objects.
|
||||
Audio.playSound(SNAPSOUND_SOURCE, {
|
||||
volume: 1,
|
||||
position: Vec3.mix(target.position, released.position, 0.5)
|
||||
});
|
||||
// Check Nearest Axis
|
||||
var difference = Vec3.subtract(released.position, target.position);
|
||||
var relativeDifference = Vec3.multiplyQbyV(Quat.inverse(target.rotation), difference);
|
||||
|
||||
var abs = {
|
||||
x: Math.abs(relativeDifference.x),
|
||||
y: Math.abs(relativeDifference.y),
|
||||
z: Math.abs(relativeDifference.z)
|
||||
};
|
||||
// Check what value is greater. and lock down to that axis.
|
||||
var newRelative = {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
};
|
||||
if (abs.x >= abs.y && abs.x >= abs.z) {
|
||||
newRelative.x = target.dimensions.x / 2 + released.dimensions.x / 2;
|
||||
if (relativeDifference.x < 0) {
|
||||
newRelative.x = -newRelative.x;
|
||||
}
|
||||
} else if (abs.y >= abs.x && abs.y >= abs.z) {
|
||||
newRelative.y = target.dimensions.y / 2 + released.dimensions.y / 2;
|
||||
if (relativeDifference.y < 0) {
|
||||
newRelative.y = -newRelative.y;
|
||||
}
|
||||
} else if (abs.z >= abs.x && abs.z >= abs.y) {
|
||||
newRelative.z = target.dimensions.z / 2 + released.dimensions.z / 2;
|
||||
if (relativeDifference.z < 0) {
|
||||
newRelative.z = -newRelative.z;
|
||||
}
|
||||
}
|
||||
// Can be expanded upon to work in nearest 90 degree rotation as well, but was not in spec.
|
||||
var newPosition = Vec3.multiplyQbyV(target.rotation, newRelative);
|
||||
Entities.editEntity(entityId, {
|
||||
// Script relies on the registrationPoint being at the very center of the object. Thus override.
|
||||
registrationPoint: {
|
||||
x: 0.5,
|
||||
y: 0.5,
|
||||
z: 0.5
|
||||
},
|
||||
rotation: target.rotation,
|
||||
position: Vec3.sum(target.position, newPosition)
|
||||
});
|
||||
// Script relies on the registrationPoint being at the very center of the object. Thus override.
|
||||
Entities.editEntity(target.id, {
|
||||
registrationPoint: {
|
||||
x: 0.5,
|
||||
y: 0.5,
|
||||
z: 0.5
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
return new MagneticBlock();
|
||||
});
|
72
scripts/tutorials/makeBlocks.js
Normal file
72
scripts/tutorials/makeBlocks.js
Normal file
|
@ -0,0 +1,72 @@
|
|||
//
|
||||
// makeBlocks.js
|
||||
//
|
||||
// Created by Matti Lahtinen 4/3/2017
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
// Creates multiple "magnetic" blocks with random colors that users clones of and snap together.
|
||||
|
||||
|
||||
(function() {
|
||||
var MAX_RGB_COMPONENT_VALUE = 256 / 2; // Limit the values to half the maximum.
|
||||
var MIN_COLOR_VALUE = 127;
|
||||
var SIZE = 0.3;
|
||||
var LIFETIME = 600;
|
||||
var VERTICAL_OFFSET = -0.25;
|
||||
var ROWS = 3;
|
||||
var COLUMNS = 3;
|
||||
// Random Pastel Generator based on Piper's script
|
||||
function newColor() {
|
||||
return {
|
||||
red: randomPastelRGBComponent(),
|
||||
green: randomPastelRGBComponent(),
|
||||
blue: randomPastelRGBComponent()
|
||||
};
|
||||
}
|
||||
// Helper functions.
|
||||
function randomPastelRGBComponent() {
|
||||
return Math.floor(Math.random() * MAX_RGB_COMPONENT_VALUE) + MIN_COLOR_VALUE;
|
||||
}
|
||||
|
||||
var SCRIPT_URL = Script.resolvePath("./entity_scripts/magneticBlock.js");
|
||||
|
||||
var frontVector = Quat.getFront(MyAvatar.orientation);
|
||||
frontVector.y += VERTICAL_OFFSET;
|
||||
for (var x = 0; x < COLUMNS; x++) {
|
||||
for (var y = 0; y < ROWS; y++) {
|
||||
|
||||
var frontOffset = {
|
||||
x: 0,
|
||||
y: SIZE * y + SIZE,
|
||||
z: SIZE * x + SIZE
|
||||
};
|
||||
|
||||
Entities.addEntity({
|
||||
type: "Box",
|
||||
name: "MagneticBlock-" + y + '-' + x,
|
||||
dimensions: {
|
||||
x: SIZE,
|
||||
y: SIZE,
|
||||
z: SIZE
|
||||
},
|
||||
userData: JSON.stringify({
|
||||
grabbableKey: {
|
||||
cloneable: true,
|
||||
grabbable: true,
|
||||
cloneLifetime: LIFETIME,
|
||||
cloneLimit: 9999
|
||||
}
|
||||
}),
|
||||
position: Vec3.sum(MyAvatar.position, Vec3.sum(frontOffset, frontVector)),
|
||||
color: newColor(),
|
||||
script: SCRIPT_URL
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Script.stop();
|
||||
})();
|
|
@ -38,7 +38,7 @@
|
|||
#include <StatTracker.h>
|
||||
#include <LogHandler.h>
|
||||
|
||||
#include <Windows.h>
|
||||
|
||||
#include <gpu/Texture.h>
|
||||
#include <gl/Config.h>
|
||||
#include <model/TextureMap.h>
|
||||
|
@ -99,12 +99,10 @@ int main(int argc, char** argv) {
|
|||
auto ktxMemory = gpu::Texture::serialize(*testTexture);
|
||||
{
|
||||
const auto& ktxStorage = ktxMemory->getStorage();
|
||||
auto header = ktxMemory->getHeader();
|
||||
QFile outFile(TEST_IMAGE_KTX);
|
||||
if (!outFile.open(QFile::Truncate | QFile::ReadWrite)) {
|
||||
throw std::runtime_error("Unable to open file");
|
||||
}
|
||||
//auto ktxSize = sizeof(ktx::Header); // ktxStorage->size()
|
||||
auto ktxSize = ktxStorage->size();
|
||||
outFile.resize(ktxSize);
|
||||
auto dest = outFile.map(0, ktxSize);
|
||||
|
|
|
@ -115,8 +115,8 @@ void GLMHelpersTests::testSimd() {
|
|||
|
||||
a1 = a * b;
|
||||
b1 = b * a;
|
||||
glm_mat4_mul((glm_vec4*)&a, (glm_vec4*)&b, (glm_vec4*)&a2);
|
||||
glm_mat4_mul((glm_vec4*)&b, (glm_vec4*)&a, (glm_vec4*)&b2);
|
||||
glm_mat4u_mul(a, b, a2);
|
||||
glm_mat4u_mul(b, a, b2);
|
||||
|
||||
|
||||
{
|
||||
|
@ -133,8 +133,8 @@ void GLMHelpersTests::testSimd() {
|
|||
QElapsedTimer timer;
|
||||
timer.start();
|
||||
for (size_t i = 0; i < LOOPS; ++i) {
|
||||
glm_mat4_mul((glm_vec4*)&a, (glm_vec4*)&b, (glm_vec4*)&a2);
|
||||
glm_mat4_mul((glm_vec4*)&b, (glm_vec4*)&a, (glm_vec4*)&b2);
|
||||
glm_mat4u_mul(a, b, a2);
|
||||
glm_mat4u_mul(b, a, b2);
|
||||
}
|
||||
qDebug() << "SIMD " << timer.elapsed();
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ exports.handlers = {
|
|||
'../../libraries/networking/src',
|
||||
'../../libraries/animation/src',
|
||||
'../../libraries/entities/src',
|
||||
'../../libraries/shared/src'
|
||||
];
|
||||
var exts = ['.h', '.cpp'];
|
||||
|
||||
|
|
Loading…
Reference in a new issue