Merge branch 'master' into smarter_textures

Conflicts:
	libraries/model-networking/src/model-networking/TextureCache.cpp
This commit is contained in:
Brad Davis 2017-03-06 15:29:41 -08:00
commit 8782fe1d93
48 changed files with 1220 additions and 327 deletions

View file

@ -43,7 +43,6 @@
#include <WebSocketServerClass.h>
#include <EntityScriptingInterface.h> // TODO: consider moving to scriptengine.h
#include "avatars/ScriptableAvatar.h"
#include "entities/AssignmentParentFinder.h"
#include "RecordingScriptingInterface.h"
#include "AbstractAudioInterface.h"
@ -88,9 +87,9 @@ void Agent::playAvatarSound(SharedSoundPointer sound) {
QMetaObject::invokeMethod(this, "playAvatarSound", Q_ARG(SharedSoundPointer, sound));
return;
} else {
// TODO: seems to add occasional artifact in tests. I believe it is
// TODO: seems to add occasional artifact in tests. I believe it is
// correct to do this, but need to figure out for sure, so commenting this
// out until I verify.
// out until I verify.
// _numAvatarSoundSentBytes = 0;
setAvatarSound(sound);
}
@ -105,7 +104,7 @@ void Agent::handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNo
if (message->getSize() > statsMessageLength) {
// pull out the piggybacked packet and create a new QSharedPointer<NLPacket> for it
int piggyBackedSizeWithHeader = message->getSize() - statsMessageLength;
auto buffer = std::unique_ptr<char[]>(new char[piggyBackedSizeWithHeader]);
memcpy(buffer.get(), message->getRawMessage() + statsMessageLength, piggyBackedSizeWithHeader);
@ -284,7 +283,7 @@ void Agent::selectAudioFormat(const QString& selectedCodecName) {
for (auto& plugin : codecPlugins) {
if (_selectedCodecName == plugin->getName()) {
_codec = plugin;
_receivedAudioStream.setupCodec(plugin, _selectedCodecName, AudioConstants::STEREO);
_receivedAudioStream.setupCodec(plugin, _selectedCodecName, AudioConstants::STEREO);
_encoder = plugin->createEncoder(AudioConstants::SAMPLE_RATE, AudioConstants::MONO);
qDebug() << "Selected Codec Plugin:" << _codec.get();
break;
@ -380,6 +379,8 @@ void Agent::executeScript() {
audioTransform.setTranslation(scriptedAvatar->getPosition());
audioTransform.setRotation(headOrientation);
computeLoudness(&audio, scriptedAvatar);
QByteArray encodedBuffer;
if (_encoder) {
_encoder->encode(audio, encodedBuffer);
@ -424,16 +425,16 @@ void Agent::executeScript() {
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
// 100Hz timer for audio
AvatarAudioTimer* audioTimerWorker = new AvatarAudioTimer();
audioTimerWorker->moveToThread(&_avatarAudioTimerThread);
connect(audioTimerWorker, &AvatarAudioTimer::avatarTick, this, &Agent::processAgentAvatarAudio);
connect(this, &Agent::startAvatarAudioTimer, audioTimerWorker, &AvatarAudioTimer::start);
connect(this, &Agent::stopAvatarAudioTimer, audioTimerWorker, &AvatarAudioTimer::stop);
connect(&_avatarAudioTimerThread, &QThread::finished, audioTimerWorker, &QObject::deleteLater);
connect(&_avatarAudioTimerThread, &QThread::finished, audioTimerWorker, &QObject::deleteLater);
_avatarAudioTimerThread.start();
// Agents should run at 45hz
static const int AVATAR_DATA_HZ = 45;
static const int AVATAR_DATA_IN_MSECS = MSECS_PER_SECOND / AVATAR_DATA_HZ;
@ -456,14 +457,14 @@ QUuid Agent::getSessionUUID() const {
return DependencyManager::get<NodeList>()->getSessionUUID();
}
void Agent::setIsListeningToAudioStream(bool isListeningToAudioStream) {
void Agent::setIsListeningToAudioStream(bool isListeningToAudioStream) {
// this must happen on Agent's main thread
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "setIsListeningToAudioStream", Q_ARG(bool, isListeningToAudioStream));
return;
}
if (_isListeningToAudioStream) {
// have to tell just the audio mixer to KillAvatar.
// have to tell just the audio mixer to KillAvatar.
auto nodeList = DependencyManager::get<NodeList>();
nodeList->eachMatchingNode(
@ -479,7 +480,7 @@ void Agent::setIsListeningToAudioStream(bool isListeningToAudioStream) {
});
}
_isListeningToAudioStream = isListeningToAudioStream;
_isListeningToAudioStream = isListeningToAudioStream;
}
void Agent::setIsAvatar(bool isAvatar) {
@ -560,6 +561,7 @@ void Agent::processAgentAvatar() {
nodeList->broadcastToNodes(std::move(avatarPacket), NodeSet() << NodeType::AvatarMixer);
}
}
void Agent::encodeFrameOfZeros(QByteArray& encodedZeros) {
_flushEncoder = false;
static const QByteArray zeros(AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL, 0);
@ -570,6 +572,22 @@ void Agent::encodeFrameOfZeros(QByteArray& encodedZeros) {
}
}
void Agent::computeLoudness(const QByteArray* decodedBuffer, QSharedPointer<ScriptableAvatar> scriptableAvatar) {
float loudness = 0.0f;
if (decodedBuffer) {
auto soundData = reinterpret_cast<const int16_t*>(decodedBuffer->constData());
int numFrames = decodedBuffer->size() / sizeof(int16_t);
// now iterate and come up with average
if (numFrames > 0) {
for(int i = 0; i < numFrames; i++) {
loudness += (float) std::abs(soundData[i]);
}
loudness /= numFrames;
}
}
scriptableAvatar->setAudioLoudness(loudness);
}
void Agent::processAgentAvatarAudio() {
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
bool isPlayingRecording = recordingInterface->isPlaying();
@ -619,6 +637,7 @@ void Agent::processAgentAvatarAudio() {
audioPacket->seek(sizeof(quint16));
if (silentFrame) {
if (!_isListeningToAudioStream) {
// if we have a silent frame and we're not listening then just send nothing and break out of here
return;
@ -626,7 +645,7 @@ void Agent::processAgentAvatarAudio() {
// write the codec
audioPacket->writeString(_selectedCodecName);
// write the number of silent samples so the audio-mixer can uphold timing
audioPacket->writePrimitive(numAvailableSamples);
@ -636,8 +655,11 @@ void Agent::processAgentAvatarAudio() {
audioPacket->writePrimitive(headOrientation);
audioPacket->writePrimitive(scriptedAvatar->getPosition());
audioPacket->writePrimitive(glm::vec3(0));
// no matter what, the loudness should be set to 0
computeLoudness(nullptr, scriptedAvatar);
} else if (nextSoundOutput) {
// write the codec
audioPacket->writeString(_selectedCodecName);
@ -654,6 +676,8 @@ void Agent::processAgentAvatarAudio() {
QByteArray encodedBuffer;
if (_flushEncoder) {
encodeFrameOfZeros(encodedBuffer);
// loudness is 0
computeLoudness(nullptr, scriptedAvatar);
} else {
QByteArray decodedBuffer(reinterpret_cast<const char*>(nextSoundOutput), numAvailableSamples*sizeof(int16_t));
if (_encoder) {
@ -662,10 +686,15 @@ void Agent::processAgentAvatarAudio() {
} else {
encodedBuffer = decodedBuffer;
}
computeLoudness(&decodedBuffer, scriptedAvatar);
}
audioPacket->write(encodedBuffer.constData(), encodedBuffer.size());
}
// we should never have both nextSoundOutput being null and silentFrame being false, but lets
// assert on it in case things above change in a bad way
assert(nextSoundOutput || silentFrame);
// write audio packet to AudioMixer nodes
auto nodeList = DependencyManager::get<NodeList>();
nodeList->eachNode([this, &nodeList, &audioPacket](const SharedNodePointer& node) {

View file

@ -30,6 +30,7 @@
#include <plugins/CodecPlugin.h>
#include "MixedAudioStream.h"
#include "avatars/ScriptableAvatar.h"
class Agent : public ThreadedAssignment {
Q_OBJECT
@ -68,10 +69,10 @@ private slots:
void handleAudioPacket(QSharedPointer<ReceivedMessage> message);
void handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleJurisdictionPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleSelectedAudioFormat(QSharedPointer<ReceivedMessage> message);
void handleSelectedAudioFormat(QSharedPointer<ReceivedMessage> message);
void nodeActivated(SharedNodePointer activatedNode);
void processAgentAvatar();
void processAgentAvatarAudio();
@ -82,6 +83,7 @@ private:
void negotiateAudioFormat();
void selectAudioFormat(const QString& selectedCodecName);
void encodeFrameOfZeros(QByteArray& encodedZeros);
void computeLoudness(const QByteArray* decodedBuffer, QSharedPointer<ScriptableAvatar>);
std::unique_ptr<ScriptEngine> _scriptEngine;
EntityEditPacketSender _entityEditSender;
@ -103,10 +105,10 @@ private:
bool _isAvatar = false;
QTimer* _avatarIdentityTimer = nullptr;
QHash<QUuid, quint16> _outgoingScriptAudioSequenceNumbers;
CodecPluginPointer _codec;
QString _selectedCodecName;
Encoder* _encoder { nullptr };
Encoder* _encoder { nullptr };
QThread _avatarAudioTimerThread;
bool _flushEncoder { false };
};

View file

@ -365,6 +365,28 @@ void AvatarMixer::handleRequestsDomainListDataPacket(QSharedPointer<ReceivedMess
message->readPrimitive(&isRequesting);
nodeData->setRequestsDomainListData(isRequesting);
qCDebug(avatars) << "node" << nodeData->getNodeID() << "requestsDomainListData" << isRequesting;
// If we just opened the PAL...
if (isRequesting) {
// For each node in the NodeList...
auto nodeList = DependencyManager::get<NodeList>();
nodeList->eachMatchingNode(
// Discover the valid nodes we're ignoring...
[&](const SharedNodePointer& node)->bool {
if (node->getUUID() != senderNode->getUUID() &&
(nodeData->isRadiusIgnoring(node->getUUID()) ||
senderNode->isIgnoringNodeWithID(node->getUUID()))) {
return true;
}
return false;
},
// ...For those nodes, reset the lastBroadcastTime to 0
// so that the AvatarMixer will send Identity data to us
[&](const SharedNodePointer& node) {
nodeData->setLastBroadcastTime(node->getUUID(), 0);
}
);
}
}
}
auto end = usecTimestampNow();
@ -409,7 +431,31 @@ void AvatarMixer::handleKillAvatarPacket(QSharedPointer<ReceivedMessage> message
void AvatarMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
auto start = usecTimestampNow();
senderNode->parseIgnoreRequestMessage(message);
auto nodeList = DependencyManager::get<NodeList>();
AvatarMixerClientData* nodeData = reinterpret_cast<AvatarMixerClientData*>(senderNode->getLinkedData());
bool addToIgnore;
message->readPrimitive(&addToIgnore);
while (message->getBytesLeftToRead()) {
// parse out the UUID being ignored from the packet
QUuid ignoredUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
// Reset the lastBroadcastTime for the ignored avatar to 0
// so the AvatarMixer knows it'll have to send identity data about the ignored avatar
// to the ignorer if the ignorer unignores.
nodeData->setLastBroadcastTime(ignoredUUID, 0);
// Reset the lastBroadcastTime for the ignorer (FROM THE PERSPECTIVE OF THE IGNORED) to 0
// so the AvatarMixer knows it'll have to send identity data about the ignorer
// to the ignored if the ignorer unignores.
auto ignoredNode = nodeList->nodeWithUUID(ignoredUUID);
AvatarMixerClientData* ignoredNodeData = reinterpret_cast<AvatarMixerClientData*>(ignoredNode->getLinkedData());
ignoredNodeData->setLastBroadcastTime(senderNode->getUUID(), 0);
if (addToIgnore) {
senderNode->addIgnoredNode(ignoredUUID);
} else {
senderNode->removeIgnoredNode(ignoredUUID);
}
}
auto end = usecTimestampNow();
_handleNodeIgnoreRequestPacketElapsedTime += (end - start);
}

View file

@ -65,15 +65,6 @@ int AvatarMixerClientData::parseData(ReceivedMessage& message) {
// compute the offset to the data payload
return _avatar->parseDataFromBuffer(message.readWithoutCopy(message.getBytesLeftToRead()));
}
bool AvatarMixerClientData::checkAndSetHasReceivedFirstPacketsFrom(const QUuid& uuid) {
if (_hasReceivedFirstPacketsFrom.find(uuid) == _hasReceivedFirstPacketsFrom.end()) {
_hasReceivedFirstPacketsFrom.insert(uuid);
return false;
}
return true;
}
uint64_t AvatarMixerClientData::getLastBroadcastTime(const QUuid& nodeUUID) const {
// return the matching PacketSequenceNumber, or the default if we don't have it
auto nodeMatch = _lastBroadcastTimes.find(nodeUUID);
@ -102,8 +93,8 @@ void AvatarMixerClientData::ignoreOther(SharedNodePointer self, SharedNodePointe
} else {
killPacket->writePrimitive(KillAvatarReason::YourAvatarEnteredTheirBubble);
}
setLastBroadcastTime(other->getUUID(), 0);
DependencyManager::get<NodeList>()->sendUnreliablePacket(*killPacket, *self);
_hasReceivedFirstPacketsFrom.erase(other->getUUID());
}
}

View file

@ -45,8 +45,6 @@ public:
const AvatarData* getConstAvatarData() const { return _avatar.get(); }
AvatarSharedPointer getAvatarSharedPointer() const { return _avatar; }
bool checkAndSetHasReceivedFirstPacketsFrom(const QUuid& uuid);
uint16_t getLastBroadcastSequenceNumber(const QUuid& nodeUUID) const;
void setLastBroadcastSequenceNumber(const QUuid& nodeUUID, uint16_t sequenceNumber)
{ _lastBroadcastSequenceNumbers[nodeUUID] = sequenceNumber; }
@ -63,8 +61,8 @@ public:
uint16_t getLastReceivedSequenceNumber() const { return _lastReceivedSequenceNumber; }
HRCTime getIdentityChangeTimestamp() const { return _identityChangeTimestamp; }
void flagIdentityChange() { _identityChangeTimestamp = p_high_resolution_clock::now(); }
uint64_t getIdentityChangeTimestamp() const { return _identityChangeTimestamp; }
void flagIdentityChange() { _identityChangeTimestamp = usecTimestampNow(); }
bool getAvatarSessionDisplayNameMustChange() const { return _avatarSessionDisplayNameMustChange; }
void setAvatarSessionDisplayNameMustChange(bool set = true) { _avatarSessionDisplayNameMustChange = set; }
@ -139,7 +137,6 @@ private:
uint16_t _lastReceivedSequenceNumber { 0 };
std::unordered_map<QUuid, uint16_t> _lastBroadcastSequenceNumbers;
std::unordered_set<QUuid> _hasReceivedFirstPacketsFrom;
std::unordered_map<QUuid, uint64_t> _lastBroadcastTimes;
// this is a map of the last time we encoded an "other" avatar for
@ -147,7 +144,7 @@ private:
std::unordered_map<QUuid, quint64> _lastOtherAvatarEncodeTime;
std::unordered_map<QUuid, QVector<JointData>> _lastOtherAvatarSentJoints;
HRCTime _identityChangeTimestamp;
uint64_t _identityChangeTimestamp;
bool _avatarSessionDisplayNameMustChange{ false };
int _numAvatarsSentLastFrame = 0;

View file

@ -80,16 +80,6 @@ int AvatarMixerSlave::sendIdentityPacket(const AvatarMixerClientData* nodeData,
static const int AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND = 45;
// FIXME - There is some old logic (unchanged as of 2/17/17) that randomly decides to send an identity
// packet. That logic had the following comment about the constants it uses...
//
// An 80% chance of sending a identity packet within a 5 second interval.
// assuming 60 htz update rate.
//
// Assuming the calculation of the constant is in fact correct for 80% and 60hz and 5 seconds (an assumption
// that I have not verified) then the constant is definitely wrong now, since we send at 45hz.
const float IDENTITY_SEND_PROBABILITY = 1.0f / 187.0f;
void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
quint64 start = usecTimestampNow();
@ -137,14 +127,18 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
// keep track of the number of other avatar frames skipped
int numAvatarsWithSkippedFrames = 0;
// When this is true, the AvatarMixer will send Avatar data to a client about avatars that are not in the view frustrum
bool getsOutOfView = nodeData->getRequestsDomainListData();
// When this is true, the AvatarMixer will send Avatar data to a client about avatars that they've ignored
bool getsIgnoredByMe = getsOutOfView;
// When this is true, the AvatarMixer will send Avatar data to a client
// about avatars they've ignored or that are out of view
bool PALIsOpen = nodeData->getRequestsDomainListData();
// When this is true, the AvatarMixer will send Avatar data to a client about avatars that have ignored them
bool getsAnyIgnored = getsIgnoredByMe && node->getCanKick();
bool getsAnyIgnored = PALIsOpen && node->getCanKick();
if (PALIsOpen) {
// Increase minimumBytesPerAvatar if the PAL is open
minimumBytesPerAvatar += sizeof(AvatarDataPacket::AvatarGlobalPosition) +
sizeof(AvatarDataPacket::AudioLoudness);
}
// setup a PacketList for the avatarPackets
auto avatarPacketList = NLPacketList::create(PacketType::BulkAvatarData);
@ -222,13 +216,14 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
// or that has ignored the viewing node
if (!avatarNode->getLinkedData()
|| avatarNode->getUUID() == node->getUUID()
|| (node->isIgnoringNodeWithID(avatarNode->getUUID()) && !getsIgnoredByMe)
|| (node->isIgnoringNodeWithID(avatarNode->getUUID()) && !PALIsOpen)
|| (avatarNode->isIgnoringNodeWithID(node->getUUID()) && !getsAnyIgnored)) {
shouldIgnore = true;
} else {
// Check to see if the space bubble is enabled
if (node->isIgnoreRadiusEnabled() || avatarNode->isIgnoreRadiusEnabled()) {
// Don't bother with these checks if the other avatar has their bubble enabled and we're gettingAnyIgnored
if (node->isIgnoreRadiusEnabled() || (avatarNode->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
// Define the scale of the box for the current other node
glm::vec3 otherNodeBoxScale = (avatarNodeData->getPosition() - avatarNodeData->getGlobalBoundingBoxCorner()) * 2.0f;
@ -306,16 +301,9 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
const AvatarMixerClientData* otherNodeData = reinterpret_cast<const AvatarMixerClientData*>(otherNode->getLinkedData());
// make sure we send out identity packets to and from new arrivals.
bool forceSend = !nodeData->checkAndSetHasReceivedFirstPacketsFrom(otherNode->getUUID());
// FIXME - this clause seems suspicious "... || otherNodeData->getIdentityChangeTimestamp() > _lastFrameTimestamp ..."
if (!overBudget
&& otherNodeData->getIdentityChangeTimestamp().time_since_epoch().count() > 0
&& (forceSend
|| otherNodeData->getIdentityChangeTimestamp() > _lastFrameTimestamp
|| distribution(generator) < IDENTITY_SEND_PROBABILITY)) {
// If the time that the mixer sent AVATAR DATA about Avatar B to Avatar A is BEFORE OR EQUAL TO
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Avatar A.
if (nodeData->getLastBroadcastTime(otherNode->getUUID()) <= otherNodeData->getIdentityChangeTimestamp()) {
identityBytesSent += sendIdentityPacket(otherNodeData, node);
}
@ -335,9 +323,9 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
if (overBudget) {
overBudgetAvatars++;
_stats.overBudgetAvatars++;
detail = AvatarData::NoData;
} else if (!isInView && !getsOutOfView) {
detail = AvatarData::NoData;
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::NoData;
} else if (!isInView) {
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::NoData;
nodeData->incrementAvatarOutOfView();
} else {
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO

View file

@ -549,6 +549,7 @@ const float DEFAULT_DESKTOP_TABLET_SCALE_PERCENT = 75.0f;
const bool DEFAULT_DESKTOP_TABLET_BECOMES_TOOLBAR = true;
const bool DEFAULT_HMD_TABLET_BECOMES_TOOLBAR = false;
const bool DEFAULT_TABLET_VISIBLE_TO_OTHERS = false;
const bool DEFAULT_PREFER_AVATAR_FINGER_OVER_STYLUS = false;
Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bool runServer, QString runServerPathOption) :
QApplication(argc, argv),
@ -572,6 +573,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
_desktopTabletBecomesToolbarSetting("desktopTabletBecomesToolbar", DEFAULT_DESKTOP_TABLET_BECOMES_TOOLBAR),
_hmdTabletBecomesToolbarSetting("hmdTabletBecomesToolbar", DEFAULT_HMD_TABLET_BECOMES_TOOLBAR),
_tabletVisibleToOthersSetting("tabletVisibleToOthers", DEFAULT_TABLET_VISIBLE_TO_OTHERS),
_preferAvatarFingerOverStylusSetting("preferAvatarFingerOverStylus", DEFAULT_PREFER_AVATAR_FINGER_OVER_STYLUS),
_constrainToolbarPosition("toolbar/constrainToolbarToCenterX", true),
_scaleMirror(1.0f),
_rotateMirror(0.0f),
@ -2362,6 +2364,10 @@ void Application::setTabletVisibleToOthersSetting(bool value) {
updateSystemTabletMode();
}
void Application::setPreferAvatarFingerOverStylus(bool value) {
_preferAvatarFingerOverStylusSetting.set(value);
}
void Application::setSettingConstrainToolbarPosition(bool setting) {
_constrainToolbarPosition.set(setting);
DependencyManager::get<OffscreenUi>()->setConstrainToolbarToCenterX(setting);
@ -5186,6 +5192,7 @@ void Application::updateWindowTitle() const {
#endif
_window->setWindowTitle(title);
}
void Application::clearDomainOctreeDetails() {
// if we're about to quit, we really don't need to do any of these things...
@ -5215,6 +5222,12 @@ void Application::clearDomainOctreeDetails() {
skyStage->setBackgroundMode(model::SunSkyStage::SKY_DEFAULT);
_recentlyClearedDomain = true;
DependencyManager::get<AvatarManager>()->clearOtherAvatars();
DependencyManager::get<AnimationCache>()->clearUnusedResources();
DependencyManager::get<ModelCache>()->clearUnusedResources();
DependencyManager::get<SoundCache>()->clearUnusedResources();
DependencyManager::get<TextureCache>()->clearUnusedResources();
}
void Application::domainChanged(const QString& domainHostname) {

View file

@ -220,6 +220,8 @@ public:
void setHmdTabletBecomesToolbarSetting(bool value);
bool getTabletVisibleToOthersSetting() { return _tabletVisibleToOthersSetting.get(); }
void setTabletVisibleToOthersSetting(bool value);
bool getPreferAvatarFingerOverStylus() { return _preferAvatarFingerOverStylusSetting.get(); }
void setPreferAvatarFingerOverStylus(bool value);
float getSettingConstrainToolbarPosition() { return _constrainToolbarPosition.get(); }
void setSettingConstrainToolbarPosition(bool setting);
@ -565,6 +567,7 @@ private:
Setting::Handle<bool> _desktopTabletBecomesToolbarSetting;
Setting::Handle<bool> _hmdTabletBecomesToolbarSetting;
Setting::Handle<bool> _tabletVisibleToOthersSetting;
Setting::Handle<bool> _preferAvatarFingerOverStylusSetting;
Setting::Handle<bool> _constrainToolbarPosition;
float _scaleMirror;

View file

@ -329,7 +329,7 @@ void AvatarManager::removeAvatar(const QUuid& sessionUUID, KillAvatarReason remo
}
void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar, KillAvatarReason removalReason) {
AvatarHashMap::handleRemovedAvatar(removedAvatar);
AvatarHashMap::handleRemovedAvatar(removedAvatar, removalReason);
// removedAvatar is a shared pointer to an AvatarData but we need to get to the derived Avatar
// class in this context so we can call methods that don't exist at the base class.

View file

@ -110,13 +110,7 @@ void CauterizedModel::updateClusterMatrices() {
for (int j = 0; j < mesh.clusters.size(); j++) {
const FBXCluster& cluster = mesh.clusters.at(j);
auto jointMatrix = _rig->getJointTransform(cluster.jointIndex);
#if (GLM_ARCH & GLM_ARCH_SSE2) && !(defined Q_OS_MAC)
glm::mat4 out, inverseBindMatrix = cluster.inverseBindMatrix;
glm_mat4_mul((glm_vec4*)&jointMatrix, (glm_vec4*)&inverseBindMatrix, (glm_vec4*)&out);
state.clusterMatrices[j] = out;
#else
state.clusterMatrices[j] = jointMatrix * cluster.inverseBindMatrix;
#endif
glm_mat4u_mul(jointMatrix, cluster.inverseBindMatrix, state.clusterMatrices[j]);
}
// Once computed the cluster matrices, update the buffer(s)
@ -149,13 +143,7 @@ void CauterizedModel::updateClusterMatrices() {
if (_cauterizeBoneSet.find(cluster.jointIndex) != _cauterizeBoneSet.end()) {
jointMatrix = cauterizeMatrix;
}
#if (GLM_ARCH & GLM_ARCH_SSE2) && !(defined Q_OS_MAC)
glm::mat4 out, inverseBindMatrix = cluster.inverseBindMatrix;
glm_mat4_mul((glm_vec4*)&jointMatrix, (glm_vec4*)&inverseBindMatrix, (glm_vec4*)&out);
state.clusterMatrices[j] = out;
#else
state.clusterMatrices[j] = jointMatrix * cluster.inverseBindMatrix;
#endif
glm_mat4u_mul(jointMatrix, cluster.inverseBindMatrix, state.clusterMatrices[j]);
}
if (!_cauterizeBoneSet.empty() && (state.clusterMatrices.size() > 1)) {

View file

@ -60,13 +60,7 @@ void SoftAttachmentModel::updateClusterMatrices() {
} else {
jointMatrix = _rig->getJointTransform(cluster.jointIndex);
}
#if (GLM_ARCH & GLM_ARCH_SSE2) && !(defined Q_OS_MAC)
glm::mat4 out, inverseBindMatrix = cluster.inverseBindMatrix;
glm_mat4_mul((glm_vec4*)&jointMatrix, (glm_vec4*)&inverseBindMatrix, (glm_vec4*)&out);
state.clusterMatrices[j] = out;
#else
state.clusterMatrices[j] = jointMatrix * cluster.inverseBindMatrix;
#endif
glm_mat4u_mul(jointMatrix, cluster.inverseBindMatrix, state.clusterMatrices[j]);
}
// Once computed the cluster matrices, update the buffer(s)

View file

@ -107,6 +107,12 @@ void setupPreferences() {
auto setter = [](bool value) { qApp->setTabletVisibleToOthersSetting(value); };
preferences->addPreference(new CheckPreference(UI_CATEGORY, "Tablet Is Visible To Others", getter, setter));
}
{
auto getter = []()->bool { return qApp->getPreferAvatarFingerOverStylus(); };
auto setter = [](bool value) { qApp->setPreferAvatarFingerOverStylus(value); };
preferences->addPreference(new CheckPreference(UI_CATEGORY, "Prefer Avatar Finger Over Stylus", getter, setter));
}
// Snapshots
static const QString SNAPSHOTS { "Snapshots" };
{

View file

@ -62,7 +62,11 @@ namespace render {
if (overlay->is3D()) {
auto overlay3D = std::dynamic_pointer_cast<Base3DOverlay>(overlay);
if (overlay3D->isAA())
return (overlay3D->getDrawInFront() ? LAYER_3D_FRONT : LAYER_3D);
if (overlay3D->getDrawInFront()) {
return LAYER_3D_FRONT;
} else {
return LAYER_3D;
}
else
return LAYER_NO_AA;
} else {

View file

@ -198,18 +198,27 @@ void Web3DOverlay::render(RenderArgs* args) {
_webSurface->getRootItem()->setProperty("scriptURL", _scriptURL);
currentContext->makeCurrent(currentSurface);
auto selfOverlayID = getOverlayID();
std::weak_ptr<Web3DOverlay> weakSelf = std::dynamic_pointer_cast<Web3DOverlay>(qApp->getOverlays().getOverlay(selfOverlayID));
auto forwardPointerEvent = [=](OverlayID overlayID, const PointerEvent& event) {
if (overlayID == getOverlayID()) {
handlePointerEvent(event);
auto self = weakSelf.lock();
if (!self) {
return;
}
if (overlayID == selfOverlayID) {
self->handlePointerEvent(event);
}
};
_mousePressConnection = connect(&(qApp->getOverlays()), &Overlays::mousePressOnOverlay, forwardPointerEvent);
_mouseReleaseConnection = connect(&(qApp->getOverlays()), &Overlays::mouseReleaseOnOverlay, forwardPointerEvent);
_mouseMoveConnection = connect(&(qApp->getOverlays()), &Overlays::mouseMoveOnOverlay, forwardPointerEvent);
_hoverLeaveConnection = connect(&(qApp->getOverlays()), &Overlays::hoverLeaveOverlay,
[=](OverlayID overlayID, const PointerEvent& event) {
if (this->_pressed && this->getOverlayID() == overlayID) {
_mousePressConnection = connect(&(qApp->getOverlays()), &Overlays::mousePressOnOverlay, this, forwardPointerEvent, Qt::DirectConnection);
_mouseReleaseConnection = connect(&(qApp->getOverlays()), &Overlays::mouseReleaseOnOverlay, this, forwardPointerEvent, Qt::DirectConnection);
_mouseMoveConnection = connect(&(qApp->getOverlays()), &Overlays::mouseMoveOnOverlay, this, forwardPointerEvent, Qt::DirectConnection);
_hoverLeaveConnection = connect(&(qApp->getOverlays()), &Overlays::hoverLeaveOverlay, this, [=](OverlayID overlayID, const PointerEvent& event) {
auto self = weakSelf.lock();
if (!self) {
return;
}
if (self->_pressed && overlayID == selfOverlayID) {
// If the user mouses off the overlay while the button is down, simulate a touch end.
QTouchEvent::TouchPoint point;
point.setId(event.getID());
@ -222,12 +231,12 @@ void Web3DOverlay::render(RenderArgs* args) {
touchPoints.push_back(point);
QTouchEvent* touchEvent = new QTouchEvent(QEvent::TouchEnd, nullptr, Qt::NoModifier, Qt::TouchPointReleased,
touchPoints);
touchEvent->setWindow(_webSurface->getWindow());
touchEvent->setWindow(self->_webSurface->getWindow());
touchEvent->setDevice(&_touchDevice);
touchEvent->setTarget(_webSurface->getRootItem());
QCoreApplication::postEvent(_webSurface->getWindow(), touchEvent);
touchEvent->setTarget(self->_webSurface->getRootItem());
QCoreApplication::postEvent(self->_webSurface->getWindow(), touchEvent);
}
});
}, Qt::DirectConnection);
_emitScriptEventConnection = connect(this, &Web3DOverlay::scriptEventReceived, _webSurface.data(), &OffscreenQmlSurface::emitScriptEvent);
_webEventReceivedConnection = connect(_webSurface.data(), &OffscreenQmlSurface::webEventReceived, this, &Web3DOverlay::webEventReceived);

View file

@ -50,15 +50,9 @@ glm::vec3 AnimPose::xformVector(const glm::vec3& rhs) const {
}
AnimPose AnimPose::operator*(const AnimPose& rhs) const {
#if (GLM_ARCH & GLM_ARCH_SSE2) && !(defined Q_OS_MAC)
glm::mat4 result;
glm::mat4 lhsMat = *this;
glm::mat4 rhsMat = rhs;
glm_mat4_mul((glm_vec4*)&lhsMat, (glm_vec4*)&rhsMat, (glm_vec4*)&result);
glm_mat4u_mul(*this, rhs, result);
return AnimPose(result);
#else
return AnimPose(static_cast<glm::mat4>(*this) * static_cast<glm::mat4>(rhs));
#endif
}
AnimPose AnimPose::inverse() const {

View file

@ -186,6 +186,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
bool cullSmallChanges = (dataDetail == CullSmallData);
bool sendAll = (dataDetail == SendAllData);
bool sendMinimum = (dataDetail == MinimumData);
bool sendPALMinimum = (dataDetail == PALMinimum);
lazyInitHeadData();
@ -222,24 +223,41 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
auto parentID = getParentID();
bool hasAvatarGlobalPosition = true; // always include global position
bool hasAvatarOrientation = sendAll || rotationChangedSince(lastSentTime);
bool hasAvatarBoundingBox = sendAll || avatarBoundingBoxChangedSince(lastSentTime);
bool hasAvatarScale = sendAll || avatarScaleChangedSince(lastSentTime);
bool hasLookAtPosition = sendAll || lookAtPositionChangedSince(lastSentTime);
bool hasAudioLoudness = sendAll || audioLoudnessChangedSince(lastSentTime);
bool hasSensorToWorldMatrix = sendAll || sensorToWorldMatrixChangedSince(lastSentTime);
bool hasAdditionalFlags = sendAll || additionalFlagsChangedSince(lastSentTime);
bool hasAvatarOrientation = false;
bool hasAvatarBoundingBox = false;
bool hasAvatarScale = false;
bool hasLookAtPosition = false;
bool hasAudioLoudness = false;
bool hasSensorToWorldMatrix = false;
bool hasAdditionalFlags = false;
// local position, and parent info only apply to avatars that are parented. The local position
// and the parent info can change independently though, so we track their "changed since"
// separately
bool hasParentInfo = sendAll || parentInfoChangedSince(lastSentTime);
bool hasAvatarLocalPosition = hasParent() && (sendAll ||
tranlationChangedSince(lastSentTime) ||
parentInfoChangedSince(lastSentTime));
bool hasParentInfo = false;
bool hasAvatarLocalPosition = false;
bool hasFaceTrackerInfo = !dropFaceTracking && hasFaceTracker() && (sendAll || faceTrackerInfoChangedSince(lastSentTime));
bool hasJointData = sendAll || !sendMinimum;
bool hasFaceTrackerInfo = false;
bool hasJointData = false;
if (sendPALMinimum) {
hasAudioLoudness = true;
} else {
hasAvatarOrientation = sendAll || rotationChangedSince(lastSentTime);
hasAvatarBoundingBox = sendAll || avatarBoundingBoxChangedSince(lastSentTime);
hasAvatarScale = sendAll || avatarScaleChangedSince(lastSentTime);
hasLookAtPosition = sendAll || lookAtPositionChangedSince(lastSentTime);
hasAudioLoudness = sendAll || audioLoudnessChangedSince(lastSentTime);
hasSensorToWorldMatrix = sendAll || sensorToWorldMatrixChangedSince(lastSentTime);
hasAdditionalFlags = sendAll || additionalFlagsChangedSince(lastSentTime);
hasParentInfo = sendAll || parentInfoChangedSince(lastSentTime);
hasAvatarLocalPosition = hasParent() && (sendAll ||
tranlationChangedSince(lastSentTime) ||
parentInfoChangedSince(lastSentTime));
hasFaceTrackerInfo = !dropFaceTracking && hasFaceTracker() && (sendAll || faceTrackerInfoChangedSince(lastSentTime));
hasJointData = sendAll || !sendMinimum;
}
// Leading flags, to indicate how much data is actually included in the packet...
AvatarDataPacket::HasFlags packetStateFlags =

View file

@ -376,6 +376,7 @@ public:
typedef enum {
NoData,
PALMinimum,
MinimumData,
CullSmallData,
IncludeSmallData,

View file

@ -159,14 +159,6 @@ const gpu::TexturePointer& TextureCache::getBlackTexture() {
return _blackTexture;
}
const gpu::TexturePointer& TextureCache::getNormalFittingTexture() {
if (!_normalFittingTexture) {
_normalFittingTexture = getImageTexture(PathUtils::resourcesPath() + "images/normalFittingScale.dds", NetworkTexture::STRICT_TEXTURE);
}
return _normalFittingTexture;
}
/// Extra data for creating textures.
class TextureExtra {
public:

View file

@ -126,9 +126,6 @@ public:
/// Returns the a black texture (useful for a default).
const gpu::TexturePointer& getBlackTexture();
// Returns a map used to compress the normals through a fitting scale algorithm
const gpu::TexturePointer& getNormalFittingTexture();
/// Returns a texture version of an image file
static gpu::TexturePointer getImageTexture(const QString& path, Type type = Type::DEFAULT_TEXTURE, QVariantMap options = QVariantMap());
@ -153,7 +150,6 @@ private:
gpu::TexturePointer _grayTexture;
gpu::TexturePointer _blueTexture;
gpu::TexturePointer _blackTexture;
gpu::TexturePointer _normalFittingTexture;
};
#endif // hifi_TextureCache_h

View file

@ -221,7 +221,7 @@ ResourceCache::ResourceCache(QObject* parent) : QObject(parent) {
}
ResourceCache::~ResourceCache() {
clearUnusedResource();
clearUnusedResources();
}
void ResourceCache::clearATPAssets() {
@ -265,7 +265,7 @@ void ResourceCache::clearATPAssets() {
void ResourceCache::refreshAll() {
// Clear all unused resources so we don't have to reload them
clearUnusedResource();
clearUnusedResources();
resetResourceCounters();
QHash<QUrl, QWeakPointer<Resource>> resources;
@ -418,7 +418,7 @@ void ResourceCache::reserveUnusedResource(qint64 resourceSize) {
}
}
void ResourceCache::clearUnusedResource() {
void ResourceCache::clearUnusedResources() {
// the unused resources may themselves reference resources that will be added to the unused
// list on destruction, so keep clearing until there are no references left
QWriteLocker locker(&_unusedResourcesLock);

View file

@ -249,6 +249,7 @@ public:
void refreshAll();
void refresh(const QUrl& url);
void clearUnusedResources();
signals:
void dirty();
@ -298,7 +299,7 @@ protected:
void addUnusedResource(const QSharedPointer<Resource>& resource);
void removeUnusedResource(const QSharedPointer<Resource>& resource);
/// Attempt to load a resource if requests are below the limit, otherwise queue the resource for loading
/// \return true if the resource began loading, otherwise false if the resource is in the pending queue
static bool attemptRequest(QSharedPointer<Resource> resource);
@ -309,7 +310,6 @@ private:
friend class Resource;
void reserveUnusedResource(qint64 resourceSize);
void clearUnusedResource();
void resetResourceCounters();
void removeResource(const QUrl& url, qint64 size = 0);

View file

@ -65,25 +65,4 @@ float packUnlit() {
return FRAG_PACK_UNLIT;
}
<!
uniform sampler2D normalFittingMap;
vec3 bestFitNormal(vec3 normal) {
vec3 absNorm = abs(normal);
float maxNAbs = max(absNorm.z, max(absNorm.x, absNorm.y));
vec2 texcoord = (absNorm.z < maxNAbs ?
(absNorm.y < maxNAbs ? absNorm.yz : absNorm.xz) :
absNorm.xy);
texcoord = (texcoord.x < texcoord.y ? texcoord.yx : texcoord.xy);
texcoord.y /= texcoord.x;
vec3 cN = normal / maxNAbs;
float fittingScale = texture(normalFittingMap, texcoord).a;
cN *= fittingScale;
return (cN * 0.5 + 0.5);
}
!>
<@endif@>

View file

@ -414,8 +414,6 @@ _nextID(0) {
// Set the defaults needed for a simple program
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
DependencyManager::get<TextureCache>()->getWhiteTexture());
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
}
);
GeometryCache::_simpleTransparentPipeline =
@ -424,8 +422,6 @@ _nextID(0) {
// Set the defaults needed for a simple program
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
DependencyManager::get<TextureCache>()->getWhiteTexture());
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
}
);
GeometryCache::_simpleWirePipeline =
@ -1770,7 +1766,6 @@ static void buildWebShader(const std::string& vertShaderText, const std::string&
shaderPointerOut = gpu::Shader::createProgram(VS, PS);
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("normalFittingMap"), render::ShapePipeline::Slot::MAP::NORMAL_FITTING));
gpu::Shader::makeProgram(*shaderPointerOut, slotBindings);
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_NONE);
@ -1784,9 +1779,6 @@ static void buildWebShader(const std::string& vertShaderText, const std::string&
void GeometryCache::bindOpaqueWebBrowserProgram(gpu::Batch& batch, bool isAA) {
batch.setPipeline(getOpaqueWebBrowserProgram(isAA));
// Set a default normal map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
}
gpu::PipelinePointer GeometryCache::getOpaqueWebBrowserProgram(bool isAA) {
@ -1802,9 +1794,6 @@ gpu::PipelinePointer GeometryCache::getOpaqueWebBrowserProgram(bool isAA) {
void GeometryCache::bindTransparentWebBrowserProgram(gpu::Batch& batch, bool isAA) {
batch.setPipeline(getTransparentWebBrowserProgram(isAA));
// Set a default normal map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
}
gpu::PipelinePointer GeometryCache::getTransparentWebBrowserProgram(bool isAA) {
@ -1827,9 +1816,6 @@ void GeometryCache::bindSimpleProgram(gpu::Batch& batch, bool textured, bool tra
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
DependencyManager::get<TextureCache>()->getWhiteTexture());
}
// Set a default normal map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
}
gpu::PipelinePointer GeometryCache::getSimplePipeline(bool textured, bool transparent, bool culled, bool unlit, bool depthBiased) {
@ -1846,7 +1832,6 @@ gpu::PipelinePointer GeometryCache::getSimplePipeline(bool textured, bool transp
_unlitShader = gpu::Shader::createProgram(VS, PSUnlit);
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("normalFittingMap"), render::ShapePipeline::Slot::MAP::NORMAL_FITTING));
gpu::Shader::makeProgram(*_simpleShader, slotBindings);
gpu::Shader::makeProgram(*_unlitShader, slotBindings);
});

View file

@ -97,6 +97,8 @@ ShapeKey MeshPartPayload::getShapeKey() const {
}
ShapeKey::Builder builder;
builder.withMaterial();
if (drawMaterialKey.isTranslucent()) {
builder.withTranslucent();
}
@ -478,6 +480,8 @@ ShapeKey ModelMeshPartPayload::getShapeKey() const {
}
ShapeKey::Builder builder;
builder.withMaterial();
if (isTranslucent || _fadeState != FADE_COMPLETE) {
builder.withTranslucent();
}

View file

@ -1178,13 +1178,7 @@ void Model::updateClusterMatrices() {
for (int j = 0; j < mesh.clusters.size(); j++) {
const FBXCluster& cluster = mesh.clusters.at(j);
auto jointMatrix = _rig->getJointTransform(cluster.jointIndex);
#if (GLM_ARCH & GLM_ARCH_SSE2) && !(defined Q_OS_MAC)
glm::mat4 out, inverseBindMatrix = cluster.inverseBindMatrix;
glm_mat4_mul((glm_vec4*)&jointMatrix, (glm_vec4*)&inverseBindMatrix, (glm_vec4*)&out);
state.clusterMatrices[j] = out;
#else
state.clusterMatrices[j] = jointMatrix * cluster.inverseBindMatrix;
#endif
glm_mat4u_mul(jointMatrix, cluster.inverseBindMatrix, state.clusterMatrices[j]);
}
// Once computed the cluster matrices, update the buffer(s)

View file

@ -75,7 +75,6 @@ RenderDeferredTask::RenderDeferredTask(RenderFetchCullSortTask::Output items) {
// GPU jobs: Start preparing the primary, deferred and lighting buffer
const auto primaryFramebuffer = addJob<PreparePrimaryFramebuffer>("PreparePrimaryBuffer");
// const auto fullFrameRangeTimer = addJob<BeginGPURangeTimer>("BeginRangeTimer");
const auto opaqueRangeTimer = addJob<BeginGPURangeTimer>("BeginOpaqueRangeTimer", "DrawOpaques");
const auto prepareDeferredInputs = PrepareDeferred::Inputs(primaryFramebuffer, lightingModel).hasVarying();
@ -154,21 +153,25 @@ RenderDeferredTask::RenderDeferredTask(RenderFetchCullSortTask::Output items) {
const auto toneMappingInputs = render::Varying(ToneMappingDeferred::Inputs(lightingFramebuffer, primaryFramebuffer));
addJob<ToneMappingDeferred>("ToneMapping", toneMappingInputs);
{ // DEbug the bounds of the rendered items, still look at the zbuffer
addJob<DrawBounds>("DrawMetaBounds", metas);
addJob<DrawBounds>("DrawOpaqueBounds", opaques);
addJob<DrawBounds>("DrawTransparentBounds", transparents);
}
// Overlays
const auto overlayOpaquesInputs = DrawOverlay3D::Inputs(overlayOpaques, lightingModel).hasVarying();
const auto overlayTransparentsInputs = DrawOverlay3D::Inputs(overlayTransparents, lightingModel).hasVarying();
addJob<DrawOverlay3D>("DrawOverlay3DOpaque", overlayOpaquesInputs, true);
addJob<DrawOverlay3D>("DrawOverlay3DTransparent", overlayTransparentsInputs, false);
// Debugging stages
{
// Bounds do not draw on stencil buffer, so they must come last
addJob<DrawBounds>("DrawMetaBounds", metas);
{ // DEbug the bounds of the rendered OVERLAY items, still look at the zbuffer
addJob<DrawBounds>("DrawOverlayOpaqueBounds", overlayOpaques);
addJob<DrawBounds>("DrawOverlayTransparentBounds", overlayTransparents);
}
// Debugging stages
{
// Debugging Deferred buffer job
const auto debugFramebuffers = render::Varying(DebugDeferredBuffer::Inputs(deferredFramebuffer, linearDepthTarget, surfaceGeometryFramebuffer, ambientOcclusionFramebuffer));
addJob<DebugDeferredBuffer>("DebugDeferredBuffer", debugFramebuffers);
@ -208,9 +211,6 @@ RenderDeferredTask::RenderDeferredTask(RenderFetchCullSortTask::Output items) {
// Blit!
addJob<Blit>("Blit", primaryFramebuffer);
// addJob<EndGPURangeTimer>("RangeTimer", fullFrameRangeTimer);
}
void BeginGPURangeTimer::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, gpu::RangeTimerPointer& timer) {

View file

@ -50,9 +50,13 @@
#include "overlay3D_vert.h"
#include "overlay3D_frag.h"
#include "overlay3D_model_frag.h"
#include "overlay3D_model_translucent_frag.h"
#include "overlay3D_translucent_frag.h"
#include "overlay3D_unlit_frag.h"
#include "overlay3D_translucent_unlit_frag.h"
#include "overlay3D_model_unlit_frag.h"
#include "overlay3D_model_translucent_unlit_frag.h"
using namespace render;
@ -70,15 +74,24 @@ void lightBatchSetter(const ShapePipeline& pipeline, gpu::Batch& batch);
void initOverlay3DPipelines(ShapePlumber& plumber) {
auto vertex = gpu::Shader::createVertex(std::string(overlay3D_vert));
auto vertexModel = gpu::Shader::createVertex(std::string(model_vert));
auto pixel = gpu::Shader::createPixel(std::string(overlay3D_frag));
auto pixelTranslucent = gpu::Shader::createPixel(std::string(overlay3D_translucent_frag));
auto pixelUnlit = gpu::Shader::createPixel(std::string(overlay3D_unlit_frag));
auto pixelTranslucentUnlit = gpu::Shader::createPixel(std::string(overlay3D_translucent_unlit_frag));
auto pixelModel = gpu::Shader::createPixel(std::string(overlay3D_model_frag));
auto pixelModelTranslucent = gpu::Shader::createPixel(std::string(overlay3D_model_translucent_frag));
auto pixelModelUnlit = gpu::Shader::createPixel(std::string(overlay3D_model_unlit_frag));
auto pixelModelTranslucentUnlit = gpu::Shader::createPixel(std::string(overlay3D_model_translucent_unlit_frag));
auto opaqueProgram = gpu::Shader::createProgram(vertex, pixel);
auto translucentProgram = gpu::Shader::createProgram(vertex, pixelTranslucent);
auto unlitOpaqueProgram = gpu::Shader::createProgram(vertex, pixelUnlit);
auto unlitTranslucentProgram = gpu::Shader::createProgram(vertex, pixelTranslucentUnlit);
auto materialOpaqueProgram = gpu::Shader::createProgram(vertexModel, pixelModel);
auto materialTranslucentProgram = gpu::Shader::createProgram(vertexModel, pixelModelTranslucent);
auto materialUnlitOpaqueProgram = gpu::Shader::createProgram(vertexModel, pixelModel);
auto materialUnlitTranslucentProgram = gpu::Shader::createProgram(vertexModel, pixelModelTranslucent);
for (int i = 0; i < 8; i++) {
bool isCulled = (i & 1);
@ -103,14 +116,20 @@ void initOverlay3DPipelines(ShapePlumber& plumber) {
}
ShapeKey::Filter::Builder builder;
isCulled ? builder.withCullFace() : builder.withoutCullFace();
isBiased ? builder.withDepthBias() : builder.withoutDepthBias();
isOpaque ? builder.withOpaque() : builder.withTranslucent();
auto simpleProgram = isOpaque ? opaqueProgram : translucentProgram;
auto unlitProgram = isOpaque ? unlitOpaqueProgram : unlitTranslucentProgram;
plumber.addPipeline(builder.withoutUnlit().build(), simpleProgram, state, &lightBatchSetter);
plumber.addPipeline(builder.withUnlit().build(), unlitProgram, state, &batchSetter);
auto materialProgram = isOpaque ? materialOpaqueProgram : materialTranslucentProgram;
auto materialUnlitProgram = isOpaque ? materialUnlitOpaqueProgram : materialUnlitTranslucentProgram;
plumber.addPipeline(builder.withMaterial().build().key(), materialProgram, state, &lightBatchSetter);
plumber.addPipeline(builder.withMaterial().withUnlit().build().key(), materialUnlitProgram, state, &batchSetter);
plumber.addPipeline(builder.withoutUnlit().withoutMaterial().build().key(), simpleProgram, state, &lightBatchSetter);
plumber.addPipeline(builder.withUnlit().withoutMaterial().build().key(), unlitProgram, state, &batchSetter);
}
}
@ -144,78 +163,87 @@ void initDeferredPipelines(render::ShapePlumber& plumber) {
// TODO: Refactor this to use a filter
// Opaques
addPipeline(
Key::Builder(),
Key::Builder().withMaterial(),
modelVertex, modelPixel);
addPipeline(
Key::Builder().withMaterial().withUnlit(),
modelVertex, modelUnlitPixel);
addPipeline(
Key::Builder().withUnlit(),
modelVertex, modelUnlitPixel);
addPipeline(
Key::Builder().withTangents(),
Key::Builder().withMaterial().withTangents(),
modelNormalMapVertex, modelNormalMapPixel);
addPipeline(
Key::Builder().withSpecular(),
Key::Builder().withMaterial().withSpecular(),
modelVertex, modelSpecularMapPixel);
addPipeline(
Key::Builder().withTangents().withSpecular(),
Key::Builder().withMaterial().withTangents().withSpecular(),
modelNormalMapVertex, modelNormalSpecularMapPixel);
// Translucents
addPipeline(
Key::Builder().withMaterial().withTranslucent(),
modelVertex, modelTranslucentPixel);
addPipeline(
Key::Builder().withTranslucent(),
modelVertex, modelTranslucentPixel);
addPipeline(
Key::Builder().withMaterial().withTranslucent().withUnlit(),
modelVertex, modelTranslucentUnlitPixel);
addPipeline(
Key::Builder().withTranslucent().withUnlit(),
modelVertex, modelTranslucentUnlitPixel);
addPipeline(
Key::Builder().withTranslucent().withTangents(),
Key::Builder().withMaterial().withTranslucent().withTangents(),
modelNormalMapVertex, modelTranslucentPixel);
addPipeline(
Key::Builder().withTranslucent().withSpecular(),
Key::Builder().withMaterial().withTranslucent().withSpecular(),
modelVertex, modelTranslucentPixel);
addPipeline(
Key::Builder().withTranslucent().withTangents().withSpecular(),
Key::Builder().withMaterial().withTranslucent().withTangents().withSpecular(),
modelNormalMapVertex, modelTranslucentPixel);
addPipeline(
// FIXME: Ignore lightmap for translucents meshpart
Key::Builder().withTranslucent().withLightmap(),
Key::Builder().withMaterial().withTranslucent().withLightmap(),
modelVertex, modelTranslucentPixel);
// Lightmapped
addPipeline(
Key::Builder().withLightmap(),
Key::Builder().withMaterial().withLightmap(),
modelLightmapVertex, modelLightmapPixel);
addPipeline(
Key::Builder().withLightmap().withTangents(),
Key::Builder().withMaterial().withLightmap().withTangents(),
modelLightmapNormalMapVertex, modelLightmapNormalMapPixel);
addPipeline(
Key::Builder().withLightmap().withSpecular(),
Key::Builder().withMaterial().withLightmap().withSpecular(),
modelLightmapVertex, modelLightmapSpecularMapPixel);
addPipeline(
Key::Builder().withLightmap().withTangents().withSpecular(),
Key::Builder().withMaterial().withLightmap().withTangents().withSpecular(),
modelLightmapNormalMapVertex, modelLightmapNormalSpecularMapPixel);
// Skinned
addPipeline(
Key::Builder().withSkinned(),
Key::Builder().withMaterial().withSkinned(),
skinModelVertex, modelPixel);
addPipeline(
Key::Builder().withSkinned().withTangents(),
Key::Builder().withMaterial().withSkinned().withTangents(),
skinModelNormalMapVertex, modelNormalMapPixel);
addPipeline(
Key::Builder().withSkinned().withSpecular(),
Key::Builder().withMaterial().withSkinned().withSpecular(),
skinModelVertex, modelSpecularMapPixel);
addPipeline(
Key::Builder().withSkinned().withTangents().withSpecular(),
Key::Builder().withMaterial().withSkinned().withTangents().withSpecular(),
skinModelNormalMapVertex, modelNormalSpecularMapPixel);
// Skinned and Translucent
addPipeline(
Key::Builder().withSkinned().withTranslucent(),
Key::Builder().withMaterial().withSkinned().withTranslucent(),
skinModelVertex, modelTranslucentPixel);
addPipeline(
Key::Builder().withSkinned().withTranslucent().withTangents(),
Key::Builder().withMaterial().withSkinned().withTranslucent().withTangents(),
skinModelNormalMapVertex, modelTranslucentPixel);
addPipeline(
Key::Builder().withSkinned().withTranslucent().withSpecular(),
Key::Builder().withMaterial().withSkinned().withTranslucent().withSpecular(),
skinModelVertex, modelTranslucentPixel);
addPipeline(
Key::Builder().withSkinned().withTranslucent().withTangents().withSpecular(),
Key::Builder().withMaterial().withSkinned().withTranslucent().withTangents().withSpecular(),
skinModelNormalMapVertex, modelTranslucentPixel);
// Depth-only
addPipeline(
@ -244,32 +272,32 @@ void initForwardPipelines(render::ShapePlumber& plumber) {
auto addPipeline = std::bind(&addPlumberPipeline, std::ref(plumber), _1, _2, _3);
// Opaques
addPipeline(
Key::Builder(),
Key::Builder().withMaterial(),
modelVertex, modelPixel);
addPipeline(
Key::Builder().withUnlit(),
Key::Builder().withMaterial().withUnlit(),
modelVertex, modelUnlitPixel);
addPipeline(
Key::Builder().withTangents(),
Key::Builder().withMaterial().withTangents(),
modelNormalMapVertex, modelNormalMapPixel);
addPipeline(
Key::Builder().withSpecular(),
Key::Builder().withMaterial().withSpecular(),
modelVertex, modelSpecularMapPixel);
addPipeline(
Key::Builder().withTangents().withSpecular(),
Key::Builder().withMaterial().withTangents().withSpecular(),
modelNormalMapVertex, modelNormalSpecularMapPixel);
// Skinned
addPipeline(
Key::Builder().withSkinned(),
Key::Builder().withMaterial().withSkinned(),
skinModelVertex, modelPixel);
addPipeline(
Key::Builder().withSkinned().withTangents(),
Key::Builder().withMaterial().withSkinned().withTangents(),
skinModelNormalMapVertex, modelNormalMapPixel);
addPipeline(
Key::Builder().withSkinned().withSpecular(),
Key::Builder().withMaterial().withSkinned().withSpecular(),
skinModelVertex, modelSpecularMapPixel);
addPipeline(
Key::Builder().withSkinned().withTangents().withSpecular(),
Key::Builder().withMaterial().withSkinned().withTangents().withSpecular(),
skinModelNormalMapVertex, modelNormalSpecularMapPixel);
}
@ -319,9 +347,6 @@ void batchSetter(const ShapePipeline& pipeline, gpu::Batch& batch) {
// Set a default albedo map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
DependencyManager::get<TextureCache>()->getWhiteTexture());
// Set a default normal map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
// Set a default material
if (pipeline.locations->materialBufferUnit >= 0) {

View file

@ -0,0 +1,88 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// overlay3D.slf
// fragment shader
//
// Created by Sam Gateau on 6/16/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include DeferredGlobalLight.slh@>
<$declareEvalSkyboxGlobalColor()$>
<@include model/Material.slh@>
<@include gpu/Transform.slh@>
<$declareStandardCameraTransform()$>
<@include MaterialTextures.slh@>
<$declareMaterialTextures(ALBEDO, ROUGHNESS, _SCRIBE_NULL, _SCRIBE_NULL, EMISSIVE, OCCLUSION)$>
in vec2 _texCoord0;
in vec2 _texCoord1;
in vec4 _position;
in vec3 _normal;
in vec3 _color;
in float _alpha;
out vec4 _fragColor;
void main(void) {
Material mat = getMaterial();
int matKey = getMaterialKey(mat);
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex, roughnessTex, _SCRIBE_NULL, _SCRIBE_NULL, emissiveTex)$>
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, occlusionTex)$>
float opacity = 1.0;
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
<$discardTransparent(opacity)$>;
vec3 albedo = getMaterialAlbedo(mat);
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
albedo *= _color;
float metallic = getMaterialMetallic(mat);
vec3 fresnel = vec3(0.03); // Default Di-electric fresnel value
if (metallic <= 0.5) {
metallic = 0.0;
} else {
fresnel = albedo;
metallic = 1.0;
}
float roughness = getMaterialRoughness(mat);
<$evalMaterialRoughness(roughnessTex, roughness, matKey, roughness)$>;
vec3 emissive = getMaterialEmissive(mat);
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
vec3 fragPosition = _position.xyz;
//vec3 fragNormal = normalize(_normal);
TransformCamera cam = getTransformCamera();
vec3 fragNormal;
<$transformEyeToWorldDir(cam, _normal, fragNormal)$>;
vec4 color = vec4(evalSkyboxGlobalColor(
cam._viewInverse,
1.0,
occlusionTex,
fragPosition,
fragNormal,
albedo,
fresnel,
metallic,
roughness),
opacity);
// And emissive
color.rgb += emissive * isEmissiveEnabled();
// Apply standard tone mapping
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
}

View file

@ -0,0 +1,83 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// overlay3D_model_transparent.slf
//
// Created by Sam Gateau on 2/27/2017.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include DeferredGlobalLight.slh@>
<$declareEvalGlobalLightingAlphaBlended()$>
<@include model/Material.slh@>
<@include gpu/Transform.slh@>
<$declareStandardCameraTransform()$>
<@include MaterialTextures.slh@>
<$declareMaterialTextures(ALBEDO, ROUGHNESS, _SCRIBE_NULL, _SCRIBE_NULL, EMISSIVE, OCCLUSION)$>
in vec2 _texCoord0;
in vec2 _texCoord1;
in vec4 _position;
in vec3 _normal;
in vec3 _color;
in float _alpha;
out vec4 _fragColor;
void main(void) {
Material mat = getMaterial();
int matKey = getMaterialKey(mat);
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex, roughnessTex, _SCRIBE_NULL, _SCRIBE_NULL, emissiveTex)$>
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, occlusionTex)$>
float opacity = 1.0;
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
vec3 albedo = getMaterialAlbedo(mat);
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
albedo *= _color;
float metallic = getMaterialMetallic(mat);
vec3 fresnel = vec3(0.03); // Default Di-electric fresnel value
if (metallic <= 0.5) {
metallic = 0.0;
} else {
fresnel = albedo;
metallic = 1.0;
}
float roughness = getMaterialRoughness(mat);
<$evalMaterialRoughness(roughnessTex, roughness, matKey, roughness)$>;
vec3 emissive = getMaterialEmissive(mat);
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
vec3 fragPosition = _position.xyz;
TransformCamera cam = getTransformCamera();
vec3 fragNormal;
<$transformEyeToWorldDir(cam, _normal, fragNormal)$>
vec4 color = vec4(evalGlobalLightingAlphaBlended(
cam._viewInverse,
1.0,
occlusionTex,
fragPosition,
fragNormal,
albedo,
fresnel,
metallic,
emissive,
roughness, opacity),
opacity);
// Apply standard tone mapping
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
}

View file

@ -0,0 +1,43 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// overlay3D-model_transparent_unlit.slf
// fragment shader
//
// Created by Sam Gateau on 2/28/2017.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include LightingModel.slh@>
<@include model/Material.slh@>
<@include MaterialTextures.slh@>
<$declareMaterialTextures(ALBEDO)$>
in vec2 _texCoord0;
in vec3 _normal;
in vec3 _color;
in float _alpha;
out vec4 _fragColor;
void main(void) {
Material mat = getMaterial();
int matKey = getMaterialKey(mat);
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex)$>
float opacity = 1.0;
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
vec3 albedo = getMaterialAlbedo(mat);
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
albedo *= _color;
vec4 color = vec4(albedo * isUnlitEnabled(), opacity);
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
}

View file

@ -0,0 +1,44 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// overlay3D-model_unlit.slf
// fragment shader
//
// Created by Sam Gateau on 2/28/2017.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include LightingModel.slh@>
<@include model/Material.slh@>
<@include MaterialTextures.slh@>
<$declareMaterialTextures(ALBEDO)$>
in vec2 _texCoord0;
in vec3 _normal;
in vec3 _color;
in float _alpha;
out vec4 _fragColor;
void main(void) {
Material mat = getMaterial();
int matKey = getMaterialKey(mat);
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex)$>
float opacity = 1.0;
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
<$discardTransparent(opacity)$>;
vec3 albedo = getMaterialAlbedo(mat);
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
albedo *= _color;
vec4 color = vec4(albedo * isUnlitEnabled(), opacity);
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
}

View file

@ -20,8 +20,6 @@ void renderItems(const SceneContextPointer& sceneContext, const RenderContextPoi
void renderShapes(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const ShapePlumberPointer& shapeContext, const ItemBounds& inItems, int maxDrawnItems = -1);
void renderStateSortShapes(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const ShapePlumberPointer& shapeContext, const ItemBounds& inItems, int maxDrawnItems = -1);
class DrawLightConfig : public Job::Config {
Q_OBJECT
Q_PROPERTY(int numDrawn READ getNumDrawn NOTIFY numDrawnChanged)

View file

@ -39,6 +39,10 @@ void ShapePlumber::addPipelineHelper(const Filter& filter, ShapeKey key, int bit
}
} else {
// Add the brand new pipeline and cache its location in the lib
auto precedent = _pipelineMap.find(key);
if (precedent != _pipelineMap.end()) {
qCDebug(renderlogging) << "Key already assigned: " << key;
}
_pipelineMap.insert(PipelineMap::value_type(key, pipeline));
}
}
@ -65,16 +69,11 @@ void ShapePlumber::addPipeline(const Filter& filter, const gpu::ShaderPointer& p
slotBindings.insert(gpu::Shader::Binding(std::string("lightBuffer"), Slot::BUFFER::LIGHT));
slotBindings.insert(gpu::Shader::Binding(std::string("lightAmbientBuffer"), Slot::BUFFER::LIGHT_AMBIENT_BUFFER));
slotBindings.insert(gpu::Shader::Binding(std::string("skyboxMap"), Slot::MAP::LIGHT_AMBIENT));
slotBindings.insert(gpu::Shader::Binding(std::string("normalFittingMap"), Slot::NORMAL_FITTING));
gpu::Shader::makeProgram(*program, slotBindings);
auto locations = std::make_shared<Locations>();
locations->normalFittingMapUnit = program->getTextures().findLocation("normalFittingMap");
if (program->getTextures().findLocation("normalFittingMap") > -1) {
locations->normalFittingMapUnit = program->getTextures().findLocation("normalFittingMap");
}
locations->albedoTextureUnit = program->getTextures().findLocation("albedoMap");
locations->roughnessTextureUnit = program->getTextures().findLocation("roughnessMap");
locations->normalTextureUnit = program->getTextures().findLocation("normalMap");

View file

@ -22,13 +22,13 @@ namespace render {
class ShapeKey {
public:
enum FlagBit {
TRANSLUCENT = 0,
MATERIAL = 0,
TRANSLUCENT,
LIGHTMAP,
TANGENTS,
SPECULAR,
UNLIT,
SKINNED,
STEREO,
DEPTH_ONLY,
DEPTH_BIAS,
WIREFRAME,
@ -53,13 +53,13 @@ public:
ShapeKey build() const { return ShapeKey{_flags}; }
Builder& withMaterial() { _flags.set(MATERIAL); return (*this); }
Builder& withTranslucent() { _flags.set(TRANSLUCENT); return (*this); }
Builder& withLightmap() { _flags.set(LIGHTMAP); return (*this); }
Builder& withTangents() { _flags.set(TANGENTS); return (*this); }
Builder& withSpecular() { _flags.set(SPECULAR); return (*this); }
Builder& withUnlit() { _flags.set(UNLIT); return (*this); }
Builder& withSkinned() { _flags.set(SKINNED); return (*this); }
Builder& withStereo() { _flags.set(STEREO); return (*this); }
Builder& withDepthOnly() { _flags.set(DEPTH_ONLY); return (*this); }
Builder& withDepthBias() { _flags.set(DEPTH_BIAS); return (*this); }
Builder& withWireframe() { _flags.set(WIREFRAME); return (*this); }
@ -89,6 +89,9 @@ public:
Filter build() const { return Filter(_flags, _mask); }
Builder& withMaterial() { _flags.set(MATERIAL); _mask.set(MATERIAL); return (*this); }
Builder& withoutMaterial() { _flags.reset(MATERIAL); _mask.set(MATERIAL); return (*this); }
Builder& withTranslucent() { _flags.set(TRANSLUCENT); _mask.set(TRANSLUCENT); return (*this); }
Builder& withOpaque() { _flags.reset(TRANSLUCENT); _mask.set(TRANSLUCENT); return (*this); }
@ -107,9 +110,6 @@ public:
Builder& withSkinned() { _flags.set(SKINNED); _mask.set(SKINNED); return (*this); }
Builder& withoutSkinned() { _flags.reset(SKINNED); _mask.set(SKINNED); return (*this); }
Builder& withStereo() { _flags.set(STEREO); _mask.set(STEREO); return (*this); }
Builder& withoutStereo() { _flags.reset(STEREO); _mask.set(STEREO); return (*this); }
Builder& withDepthOnly() { _flags.set(DEPTH_ONLY); _mask.set(DEPTH_ONLY); return (*this); }
Builder& withoutDepthOnly() { _flags.reset(DEPTH_ONLY); _mask.set(DEPTH_ONLY); return (*this); }
@ -128,19 +128,20 @@ public:
Flags _mask{0};
};
Filter(const Filter::Builder& builder) : Filter(builder._flags, builder._mask) {}
ShapeKey key() const { return ShapeKey(_flags); }
protected:
friend class ShapePlumber;
Flags _flags{0};
Flags _mask{0};
};
bool useMaterial() const { return _flags[MATERIAL]; }
bool hasLightmap() const { return _flags[LIGHTMAP]; }
bool hasTangents() const { return _flags[TANGENTS]; }
bool hasSpecular() const { return _flags[SPECULAR]; }
bool isUnlit() const { return _flags[UNLIT]; }
bool isTranslucent() const { return _flags[TRANSLUCENT]; }
bool isSkinned() const { return _flags[SKINNED]; }
bool isStereo() const { return _flags[STEREO]; }
bool isDepthOnly() const { return _flags[DEPTH_ONLY]; }
bool isDepthBiased() const { return _flags[DEPTH_BIAS]; }
bool isWireFrame() const { return _flags[WIREFRAME]; }
@ -170,13 +171,13 @@ inline QDebug operator<<(QDebug debug, const ShapeKey& key) {
debug << "[ShapeKey: OWN_PIPELINE]";
} else {
debug << "[ShapeKey:"
<< "useMaterial:" << key.useMaterial()
<< "hasLightmap:" << key.hasLightmap()
<< "hasTangents:" << key.hasTangents()
<< "hasSpecular:" << key.hasSpecular()
<< "isUnlit:" << key.isUnlit()
<< "isTranslucent:" << key.isTranslucent()
<< "isSkinned:" << key.isSkinned()
<< "isStereo:" << key.isStereo()
<< "isDepthOnly:" << key.isDepthOnly()
<< "isDepthBiased:" << key.isDepthBiased()
<< "isWireFrame:" << key.isWireFrame()
@ -213,8 +214,6 @@ public:
OCCLUSION,
SCATTERING,
LIGHT_AMBIENT,
NORMAL_FITTING = 10,
};
};
@ -226,7 +225,6 @@ public:
int metallicTextureUnit;
int emissiveTextureUnit;
int occlusionTextureUnit;
int normalFittingMapUnit;
int lightingModelBufferUnit;
int skinClusterBufferUnit;
int materialBufferUnit;

View file

@ -19,6 +19,16 @@
#include <QObject>
#include <QString>
/**jsdoc
* A Quaternion
*
* @typedef Quat
* @property {float} x imaginary component i.
* @property {float} y imaginary component j.
* @property {float} z imaginary component k.
* @property {float} w real component.
*/
/// Scriptable interface a Quaternion helper class object. Used exclusively in the JavaScript API
class Quat : public QObject {
Q_OBJECT

View file

@ -34,6 +34,7 @@
#include <AudioConstants.h>
#include <AudioEffectOptions.h>
#include <AvatarData.h>
#include <DebugDraw.h>
#include <EntityScriptingInterface.h>
#include <MessagesClient.h>
#include <NetworkAccessManager.h>
@ -630,6 +631,8 @@ void ScriptEngine::init() {
registerGlobalObject("Tablet", DependencyManager::get<TabletScriptingInterface>().data());
registerGlobalObject("Assets", &_assetScriptingInterface);
registerGlobalObject("Resources", DependencyManager::get<ResourceScriptingInterface>().data());
registerGlobalObject("DebugDraw", &DebugDraw::getInstance());
}
void ScriptEngine::registerValue(const QString& valueName, QScriptValue value) {

View file

@ -37,6 +37,15 @@
* @property {float} z Z-coordinate of the vector.
*/
/**jsdoc
* A 4-dimensional vector.
*
* @typedef Vec4
* @property {float} x X-coordinate of the vector.
* @property {float} y Y-coordinate of the vector.
* @property {float} z Z-coordinate of the vector.
* @property {float} w W-coordinate of the vector.
*/
/// Scriptable interface a Vec3ernion helper class object. Used exclusively in the JavaScript API
class Vec3 : public QObject {

View file

@ -10,6 +10,8 @@
#include "DebugDraw.h"
#include "SharedUtil.h"
using Lock = std::unique_lock<std::mutex>;
DebugDraw& DebugDraw::getInstance() {
static DebugDraw* instance = globalInstance<DebugDraw>("com.highfidelity.DebugDraw");
return *instance;
@ -25,22 +27,50 @@ DebugDraw::~DebugDraw() {
// world space line, drawn only once
void DebugDraw::drawRay(const glm::vec3& start, const glm::vec3& end, const glm::vec4& color) {
Lock lock(_mapMutex);
_rays.push_back(Ray(start, end, color));
}
void DebugDraw::addMarker(const std::string& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color) {
void DebugDraw::addMarker(const QString& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color) {
Lock lock(_mapMutex);
_markers[key] = MarkerInfo(rotation, position, color);
}
void DebugDraw::removeMarker(const std::string& key) {
void DebugDraw::removeMarker(const QString& key) {
Lock lock(_mapMutex);
_markers.erase(key);
}
void DebugDraw::addMyAvatarMarker(const std::string& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color) {
void DebugDraw::addMyAvatarMarker(const QString& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color) {
Lock lock(_mapMutex);
_myAvatarMarkers[key] = MarkerInfo(rotation, position, color);
}
void DebugDraw::removeMyAvatarMarker(const std::string& key) {
void DebugDraw::removeMyAvatarMarker(const QString& key) {
Lock lock(_mapMutex);
_myAvatarMarkers.erase(key);
}
//
// accessors used by renderer
//
DebugDraw::MarkerMap DebugDraw::getMarkerMap() const {
Lock lock(_mapMutex);
return _markers;
}
DebugDraw::MarkerMap DebugDraw::getMyAvatarMarkerMap() const {
Lock lock(_mapMutex);
return _myAvatarMarkers;
}
DebugDraw::Rays DebugDraw::getRays() const {
Lock lock(_mapMutex);
return _rays;
}
void DebugDraw::clearRays() {
Lock lock(_mapMutex);
_rays.clear();
}

View file

@ -10,6 +10,7 @@
#ifndef hifi_DebugDraw_h
#define hifi_DebugDraw_h
#include <mutex>
#include <unordered_map>
#include <tuple>
#include <string>
@ -17,26 +18,69 @@
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
class DebugDraw {
#include <QObject>
#include <QString>
/**jsdoc
* Helper functions to render ephemeral debug markers and lines.
* DebugDraw markers and lines are only visible locally, they are not visible by other users.
* @namespace DebugDraw
*/
class DebugDraw : public QObject {
Q_OBJECT
public:
static DebugDraw& getInstance();
DebugDraw();
~DebugDraw();
// world space line, drawn only once
void drawRay(const glm::vec3& start, const glm::vec3& end, const glm::vec4& color);
/**jsdoc
* Draws a line in world space, but it will only be visible for a single frame.
* @function DebugDraw.drawRay
* @param {Vec3} start - start position of line in world space.
* @param {Vec3} end - end position of line in world space.
* @param {Vec4} color - color of line, each component should be in the zero to one range. x = red, y = blue, z = green, w = alpha.
*/
Q_INVOKABLE void drawRay(const glm::vec3& start, const glm::vec3& end, const glm::vec4& color);
// world space maker, marker drawn every frame until it is removed.
void addMarker(const std::string& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color);
void removeMarker(const std::string& key);
/**jsdoc
* Adds a debug marker to the world. This marker will be drawn every frame until it is removed with DebugDraw.removeMarker.
* This can be called repeatedly to change the position of the marker.
* @function DebugDraw.addMarker
* @param {string} key - name to uniquely identify this marker, later used for DebugDraw.removeMarker.
* @param {Quat} rotation - start position of line in world space.
* @param {Vec3} position - position of the marker in world space.
* @param {Vec4} color - color of the marker.
*/
Q_INVOKABLE void addMarker(const QString& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color);
// myAvatar relative marker, maker is drawn every frame until it is removed.
void addMyAvatarMarker(const std::string& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color);
void removeMyAvatarMarker(const std::string& key);
/**jsdoc
* Removes debug marker from the world. Once a marker is removed, it will no longer be visible.
* @function DebugDraw.removeMarker
* @param {string} key - name of marker to remove.
*/
Q_INVOKABLE void removeMarker(const QString& key);
/**jsdoc
* Adds a debug marker to the world, this marker will be drawn every frame until it is removed with DebugDraw.removeMyAvatarMarker.
* This can be called repeatedly to change the position of the marker.
* @function DebugDraw.addMyAvatarMarker
* @param {string} key - name to uniquely identify this marker, later used for DebugDraw.removeMyAvatarMarker.
* @param {Quat} rotation - start position of line in avatar space.
* @param {Vec3} position - position of the marker in avatar space.
* @param {Vec4} color - color of the marker.
*/
Q_INVOKABLE void addMyAvatarMarker(const QString& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color);
/**jsdoc
* Removes debug marker from the world. Once a marker is removed, it will no longer be visible
* @function DebugDraw.removeMyAvatarMarker
* @param {string} key - name of marker to remove.
*/
Q_INVOKABLE void removeMyAvatarMarker(const QString& key);
using MarkerInfo = std::tuple<glm::quat, glm::vec3, glm::vec4>;
using MarkerMap = std::unordered_map<std::string, MarkerInfo>;
using MarkerMap = std::map<QString, MarkerInfo>;
using Ray = std::tuple<glm::vec3, glm::vec3, glm::vec4>;
using Rays = std::vector<Ray>;
@ -44,16 +88,17 @@ public:
// accessors used by renderer
//
const MarkerMap& getMarkerMap() const { return _markers; }
const MarkerMap& getMyAvatarMarkerMap() const { return _myAvatarMarkers; }
MarkerMap getMarkerMap() const;
MarkerMap getMyAvatarMarkerMap() const;
void updateMyAvatarPos(const glm::vec3& pos) { _myAvatarPos = pos; }
const glm::vec3& getMyAvatarPos() const { return _myAvatarPos; }
void updateMyAvatarRot(const glm::quat& rot) { _myAvatarRot = rot; }
const glm::quat& getMyAvatarRot() const { return _myAvatarRot; }
const Rays getRays() const { return _rays; }
void clearRays() { _rays.clear(); }
Rays getRays() const;
void clearRays();
protected:
mutable std::mutex _mapMutex;
MarkerMap _markers;
MarkerMap _myAvatarMarkers;
glm::quat _myAvatarRot;

View file

@ -245,4 +245,53 @@ inline bool isNaN(const glm::quat& value) { return isNaN(value.w) || isNaN(value
glm::mat4 orthoInverse(const glm::mat4& m);
//
// Safe replacement of glm_mat4_mul() for unaligned arguments instead of __m128
//
inline void glm_mat4u_mul(const glm::mat4& m1, const glm::mat4& m2, glm::mat4& r) {
#if GLM_ARCH & GLM_ARCH_SSE2_BIT
__m128 u0 = _mm_loadu_ps((float*)&m1[0][0]);
__m128 u1 = _mm_loadu_ps((float*)&m1[1][0]);
__m128 u2 = _mm_loadu_ps((float*)&m1[2][0]);
__m128 u3 = _mm_loadu_ps((float*)&m1[3][0]);
__m128 v0 = _mm_loadu_ps((float*)&m2[0][0]);
__m128 v1 = _mm_loadu_ps((float*)&m2[1][0]);
__m128 v2 = _mm_loadu_ps((float*)&m2[2][0]);
__m128 v3 = _mm_loadu_ps((float*)&m2[3][0]);
__m128 t0 = _mm_mul_ps(_mm_shuffle_ps(v0, v0, _MM_SHUFFLE(0,0,0,0)), u0);
__m128 t1 = _mm_mul_ps(_mm_shuffle_ps(v0, v0, _MM_SHUFFLE(1,1,1,1)), u1);
__m128 t2 = _mm_mul_ps(_mm_shuffle_ps(v0, v0, _MM_SHUFFLE(2,2,2,2)), u2);
__m128 t3 = _mm_mul_ps(_mm_shuffle_ps(v0, v0, _MM_SHUFFLE(3,3,3,3)), u3);
v0 = _mm_add_ps(_mm_add_ps(t0, t1), _mm_add_ps(t2, t3));
t0 = _mm_mul_ps(_mm_shuffle_ps(v1, v1, _MM_SHUFFLE(0,0,0,0)), u0);
t1 = _mm_mul_ps(_mm_shuffle_ps(v1, v1, _MM_SHUFFLE(1,1,1,1)), u1);
t2 = _mm_mul_ps(_mm_shuffle_ps(v1, v1, _MM_SHUFFLE(2,2,2,2)), u2);
t3 = _mm_mul_ps(_mm_shuffle_ps(v1, v1, _MM_SHUFFLE(3,3,3,3)), u3);
v1 = _mm_add_ps(_mm_add_ps(t0, t1), _mm_add_ps(t2, t3));
t0 = _mm_mul_ps(_mm_shuffle_ps(v2, v2, _MM_SHUFFLE(0,0,0,0)), u0);
t1 = _mm_mul_ps(_mm_shuffle_ps(v2, v2, _MM_SHUFFLE(1,1,1,1)), u1);
t2 = _mm_mul_ps(_mm_shuffle_ps(v2, v2, _MM_SHUFFLE(2,2,2,2)), u2);
t3 = _mm_mul_ps(_mm_shuffle_ps(v2, v2, _MM_SHUFFLE(3,3,3,3)), u3);
v2 = _mm_add_ps(_mm_add_ps(t0, t1), _mm_add_ps(t2, t3));
t0 = _mm_mul_ps(_mm_shuffle_ps(v3, v3, _MM_SHUFFLE(0,0,0,0)), u0);
t1 = _mm_mul_ps(_mm_shuffle_ps(v3, v3, _MM_SHUFFLE(1,1,1,1)), u1);
t2 = _mm_mul_ps(_mm_shuffle_ps(v3, v3, _MM_SHUFFLE(2,2,2,2)), u2);
t3 = _mm_mul_ps(_mm_shuffle_ps(v3, v3, _MM_SHUFFLE(3,3,3,3)), u3);
v3 = _mm_add_ps(_mm_add_ps(t0, t1), _mm_add_ps(t2, t3));
_mm_storeu_ps((float*)&r[0][0], v0);
_mm_storeu_ps((float*)&r[1][0], v1);
_mm_storeu_ps((float*)&r[2][0], v2);
_mm_storeu_ps((float*)&r[3][0], v3);
#else
r = m1 * m2;
#endif
}
#endif // hifi_GLMHelpers_h

View file

@ -159,13 +159,33 @@ Column {
}
}
Row {
Column {
id: metas
CheckBox {
text: "Draw Meta Bounds"
text: "Metas"
checked: Render.getConfig("DrawMetaBounds")["enabled"]
onCheckedChanged: { Render.getConfig("DrawMetaBounds")["enabled"] = checked }
}
CheckBox {
text: "Opaques"
checked: Render.getConfig("DrawOpaqueBounds")["enabled"]
onCheckedChanged: { Render.getConfig("DrawOpaqueBounds")["enabled"] = checked }
}
CheckBox {
text: "Transparents"
checked: Render.getConfig("DrawTransparentBounds")["enabled"]
onCheckedChanged: { Render.getConfig("DrawTransparentBounds")["enabled"] = checked }
}
CheckBox {
text: "Overlay Opaques"
checked: Render.getConfig("DrawOverlayOpaqueBounds")["enabled"]
onCheckedChanged: { Render.getConfig("DrawOverlayOpaqueBounds")["enabled"] = checked }
}
CheckBox {
text: "Overlay Transparents"
checked: Render.getConfig("DrawOverlayTransparentBounds")["enabled"]
onCheckedChanged: { Render.getConfig("DrawOverlayTransparentBounds")["enabled"] = checked }
}
}
}

Binary file not shown.

View file

@ -74,6 +74,10 @@ var WEB_TOUCH_Y_OFFSET = 0.05; // how far forward (or back with a negative numbe
var WEB_TOUCH_TOO_CLOSE = 0.03; // if the stylus is pushed far though the web surface, don't consider it touching
var WEB_TOUCH_Y_TOUCH_DEADZONE_SIZE = 0.01;
var FINGER_TOUCH_Y_OFFSET = -0.02;
var FINGER_TOUCH_MIN = -0.01 - FINGER_TOUCH_Y_OFFSET;
var FINGER_TOUCH_MAX = 0.01 - FINGER_TOUCH_Y_OFFSET;
//
// distant manipulation
//
@ -258,19 +262,51 @@ CONTROLLER_STATE_MACHINE[STATE_FAR_TRIGGER] = {
updateMethod: "farTrigger"
};
CONTROLLER_STATE_MACHINE[STATE_ENTITY_STYLUS_TOUCHING] = {
name: "entityTouching",
name: "entityStylusTouching",
enterMethod: "entityTouchingEnter",
exitMethod: "entityTouchingExit",
updateMethod: "entityTouching"
};
CONTROLLER_STATE_MACHINE[STATE_ENTITY_LASER_TOUCHING] = {
name: "entityLaserTouching",
enterMethod: "entityTouchingEnter",
exitMethod: "entityTouchingExit",
updateMethod: "entityTouching"
};
CONTROLLER_STATE_MACHINE[STATE_ENTITY_LASER_TOUCHING] = CONTROLLER_STATE_MACHINE[STATE_ENTITY_STYLUS_TOUCHING];
CONTROLLER_STATE_MACHINE[STATE_OVERLAY_STYLUS_TOUCHING] = {
name: "overlayTouching",
name: "overlayStylusTouching",
enterMethod: "overlayTouchingEnter",
exitMethod: "overlayTouchingExit",
updateMethod: "overlayTouching"
};
CONTROLLER_STATE_MACHINE[STATE_OVERLAY_LASER_TOUCHING] = CONTROLLER_STATE_MACHINE[STATE_OVERLAY_STYLUS_TOUCHING];
CONTROLLER_STATE_MACHINE[STATE_OVERLAY_LASER_TOUCHING] = {
name: "overlayLaserTouching",
enterMethod: "overlayTouchingEnter",
exitMethod: "overlayTouchingExit",
updateMethod: "overlayTouching"
};
function getFingerWorldLocation(hand) {
var fingerJointName = (hand === RIGHT_HAND) ? "RightHandIndex4" : "LeftHandIndex4";
var fingerJointIndex = MyAvatar.getJointIndex(fingerJointName);
var fingerPosition = MyAvatar.getAbsoluteJointTranslationInObjectFrame(fingerJointIndex);
var fingerRotation = MyAvatar.getAbsoluteJointRotationInObjectFrame(fingerJointIndex);
var worldFingerRotation = Quat.multiply(MyAvatar.orientation, fingerRotation);
var worldFingerPosition = Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, fingerPosition));
// local y offset.
var localYOffset = Vec3.multiplyQbyV(worldFingerRotation, {x: 0, y: FINGER_TOUCH_Y_OFFSET, z: 0});
var offsetWorldFingerPosition = Vec3.sum(worldFingerPosition, localYOffset);
return {
position: offsetWorldFingerPosition,
orientation: worldFingerRotation,
rotation: worldFingerRotation,
valid: true
};
}
// Object assign polyfill
if (typeof Object.assign != 'function') {
@ -374,6 +410,7 @@ function handLaserIntersectItem(position, rotation, start) {
direction: rayDirection,
length: PICK_MAX_DISTANCE
};
return intersectionInfo;
} else {
// entity has been destroyed? or is no longer in cache
@ -440,16 +477,18 @@ function entityIsGrabbedByOther(entityID) {
var actionID = actionIDs[actionIndex];
var actionArguments = Entities.getActionArguments(entityID, actionID);
var tag = actionArguments.tag;
if (tag == getTag()) {
if (tag === getTag()) {
// we see a grab-*uuid* shaped tag, but it's our tag, so that's okay.
continue;
}
if (tag.slice(0, 5) == "grab-") {
var GRAB_PREFIX_LENGTH = 5;
var UUID_LENGTH = 38;
if (tag && tag.slice(0, GRAB_PREFIX_LENGTH) == "grab-") {
// we see a grab-*uuid* shaped tag and it's not ours, so someone else is grabbing it.
return true;
return tag.slice(GRAB_PREFIX_LENGTH, GRAB_PREFIX_LENGTH + UUID_LENGTH - 1);
}
}
return false;
return null;
}
function propsArePhysical(props) {
@ -823,6 +862,9 @@ function MyController(hand) {
// for visualizations
this.overlayLine = null;
this.searchSphere = null;
this.otherGrabbingLine = null;
this.otherGrabbingUUID = null;
this.waitForTriggerRelease = false;
@ -844,6 +886,8 @@ function MyController(hand) {
this.tabletStabbedPos2D = null;
this.tabletStabbedPos3D = null;
this.useFingerInsteadOfStylus = false;
var _this = this;
var suppressedIn2D = [STATE_OFF, STATE_SEARCHING];
@ -857,10 +901,22 @@ function MyController(hand) {
this.updateSmoothedTrigger();
this.maybeScaleMyAvatar();
var DEFAULT_USE_FINGER_AS_STYLUS = false;
var USE_FINGER_AS_STYLUS = Settings.getValue("preferAvatarFingerOverStylus");
if (USE_FINGER_AS_STYLUS === "") {
USE_FINGER_AS_STYLUS = DEFAULT_USE_FINGER_AS_STYLUS;
}
if (USE_FINGER_AS_STYLUS && MyAvatar.getJointIndex("LeftHandIndex4") !== -1) {
this.useFingerInsteadOfStylus = true;
} else {
this.useFingerInsteadOfStylus = false;
}
if (this.ignoreInput()) {
// Most hand input is disabled, because we are interacting with the 2d hud.
// However, we still should check for collisions of the stylus with the web overlay.
var controllerLocation = getControllerWorldLocation(this.handToController(), true);
this.processStylus(controllerLocation.position);
@ -1094,6 +1150,29 @@ function MyController(hand) {
}
};
this.otherGrabbingLineOn = function(avatarPosition, entityPosition, color) {
if (this.otherGrabbingLine === null) {
var lineProperties = {
lineWidth: 5,
start: avatarPosition,
end: entityPosition,
color: color,
glow: 1.0,
ignoreRayIntersection: true,
drawInFront: true,
visible: true,
alpha: 1
};
this.otherGrabbingLine = Overlays.addOverlay("line3d", lineProperties);
} else {
Overlays.editOverlay(this.otherGrabbingLine, {
start: avatarPosition,
end: entityPosition,
color: color
});
}
};
this.evalLightWorldTransform = function(modelPos, modelRot) {
var MODEL_LIGHT_POSITION = {
@ -1137,14 +1216,20 @@ function MyController(hand) {
}
};
this.turnOffVisualizations = function() {
this.otherGrabbingLineOff = function() {
if (this.otherGrabbingLine !== null) {
Overlays.deleteOverlay(this.otherGrabbingLine);
}
this.otherGrabbingLine = null;
};
this.turnOffVisualizations = function() {
this.overlayLineOff();
this.grabPointSphereOff();
this.lineOff();
this.searchSphereOff();
this.otherGrabbingLineOff();
restore2DMode();
};
this.triggerPress = function(value) {
@ -1207,30 +1292,54 @@ function MyController(hand) {
};
this.processStylus = function(worldHandPosition) {
// see if the hand is near a tablet or web-entity
var candidateEntities = Entities.findEntities(worldHandPosition, WEB_DISPLAY_STYLUS_DISTANCE);
entityPropertiesCache.addEntities(candidateEntities);
var nearWeb = false;
for (var i = 0; i < candidateEntities.length; i++) {
var props = entityPropertiesCache.getProps(candidateEntities[i]);
if (props && (props.type == "Web" || this.isTablet(candidateEntities[i]))) {
nearWeb = true;
break;
var performRayTest = false;
if (this.useFingerInsteadOfStylus) {
this.hideStylus();
performRayTest = true;
} else {
var i;
// see if the hand is near a tablet or web-entity
var candidateEntities = Entities.findEntities(worldHandPosition, WEB_DISPLAY_STYLUS_DISTANCE);
entityPropertiesCache.addEntities(candidateEntities);
for (i = 0; i < candidateEntities.length; i++) {
var props = entityPropertiesCache.getProps(candidateEntities[i]);
if (props && (props.type == "Web" || this.isTablet(candidateEntities[i]))) {
performRayTest = true;
break;
}
}
if (!performRayTest) {
var candidateOverlays = Overlays.findOverlays(worldHandPosition, WEB_DISPLAY_STYLUS_DISTANCE);
for (i = 0; i < candidateOverlays.length; i++) {
if (this.isTablet(candidateOverlays[i])) {
performRayTest = true;
break;
}
}
}
if (performRayTest) {
this.showStylus();
} else {
this.hideStylus();
}
}
var candidateOverlays = Overlays.findOverlays(worldHandPosition, WEB_DISPLAY_STYLUS_DISTANCE);
for (var j = 0; j < candidateOverlays.length; j++) {
if (this.isTablet(candidateOverlays[j])) {
nearWeb = true;
if (performRayTest) {
var rayPickInfo = this.calcRayPickInfo(this.hand, this.useFingerInsteadOfStylus);
var max, min;
if (this.useFingerInsteadOfStylus) {
max = FINGER_TOUCH_MAX;
min = FINGER_TOUCH_MIN;
} else {
max = WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_Y_OFFSET;
min = WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_TOO_CLOSE;
}
}
if (nearWeb) {
this.showStylus();
var rayPickInfo = this.calcRayPickInfo(this.hand);
if (rayPickInfo.distance < WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_Y_OFFSET &&
rayPickInfo.distance > WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_TOO_CLOSE) {
if (rayPickInfo.distance < max && rayPickInfo.distance > min) {
this.handleStylusOnHomeButton(rayPickInfo);
if (this.handleStylusOnWebEntity(rayPickInfo)) {
return;
@ -1239,10 +1348,8 @@ function MyController(hand) {
return;
}
} else {
this.homeButtonTouched = false;
}
} else {
this.hideStylus();
this.homeButtonTouched = false;
}
}
};
@ -1357,10 +1464,17 @@ function MyController(hand) {
// Performs ray pick test from the hand controller into the world
// @param {number} which hand to use, RIGHT_HAND or LEFT_HAND
// @param {bool} if true use the world position/orientation of the index finger to cast the ray from.
// @returns {object} returns object with two keys entityID and distance
//
this.calcRayPickInfo = function(hand) {
var controllerLocation = getControllerWorldLocation(this.handToController(), true);
this.calcRayPickInfo = function(hand, useFingerInsteadOfController) {
var controllerLocation;
if (useFingerInsteadOfController) {
controllerLocation = getFingerWorldLocation(hand);
} else {
controllerLocation = getControllerWorldLocation(this.handToController(), true);
}
var worldHandPosition = controllerLocation.position;
var worldHandRotation = controllerLocation.orientation;
@ -1567,7 +1681,8 @@ function MyController(hand) {
return false;
}
if (entityIsGrabbedByOther(entityID)) {
this.otherGrabbingUUID = entityIsGrabbedByOther(entityID);
if (this.otherGrabbingUUID !== null) {
// don't distance grab something that is already grabbed.
if (debug) {
print("distance grab is skipping '" + props.name + "': already grabbed by another.");
@ -1771,6 +1886,7 @@ function MyController(hand) {
} else {
// potentialFarTriggerEntity = entity;
}
this.otherGrabbingLineOff();
} else if (this.entityIsDistanceGrabbable(rayPickInfo.entityID, handPosition)) {
if (this.triggerSmoothedGrab() && !isEditing() && farGrabEnabled && farSearching) {
this.grabbedThingID = entity;
@ -1785,7 +1901,25 @@ function MyController(hand) {
} else {
// potentialFarGrabEntity = entity;
}
this.otherGrabbingLineOff();
} else if (this.otherGrabbingUUID !== null) {
if (this.triggerSmoothedGrab() && !isEditing() && farGrabEnabled && farSearching) {
var avatar = AvatarList.getAvatar(this.otherGrabbingUUID);
var IN_FRONT_OF_AVATAR = { x: 0, y: 0.2, z: 0.4 }; // Up from hips and in front of avatar.
var startPosition = Vec3.sum(avatar.position, Vec3.multiplyQbyV(avatar.rotation, IN_FRONT_OF_AVATAR));
var finishPisition = Vec3.sum(rayPickInfo.properties.position, // Entity's centroid.
Vec3.multiplyQbyV(rayPickInfo.properties.rotation ,
Vec3.multiplyVbyV(rayPickInfo.properties.dimensions,
Vec3.subtract(DEFAULT_REGISTRATION_POINT, rayPickInfo.properties.registrationPoint))));
this.otherGrabbingLineOn(startPosition, finishPisition, COLORS_GRAB_DISTANCE_HOLD);
} else {
this.otherGrabbingLineOff();
}
} else {
this.otherGrabbingLineOff();
}
} else {
this.otherGrabbingLineOff();
}
this.updateEquipHaptics(potentialEquipHotspot, handPosition);
@ -2431,6 +2565,7 @@ function MyController(hand) {
this.lineOff();
this.overlayLineOff();
this.searchSphereOff();
this.otherGrabbingLineOff();
this.dropGestureReset();
this.clearEquipHaptics();
@ -2963,8 +3098,13 @@ function MyController(hand) {
this.entityTouchingEnter = function() {
// test for intersection between controller laser and web entity plane.
var intersectInfo = handLaserIntersectEntity(this.grabbedThingID,
getControllerWorldLocation(this.handToController(), true));
var controllerLocation;
if (this.useFingerInsteadOfStylus && this.state === STATE_ENTITY_STYLUS_TOUCHING) {
controllerLocation = getFingerWorldLocation(this.hand);
} else {
controllerLocation = getControllerWorldLocation(this.handToController(), true);
}
var intersectInfo = handLaserIntersectEntity(this.grabbedThingID, controllerLocation);
if (intersectInfo) {
var pointerEvent = {
type: "Press",
@ -3000,8 +3140,13 @@ function MyController(hand) {
this.entityTouchingExit = function() {
// test for intersection between controller laser and web entity plane.
var intersectInfo = handLaserIntersectEntity(this.grabbedThingID,
getControllerWorldLocation(this.handToController(), true));
var controllerLocation;
if (this.useFingerInsteadOfStylus && this.state === STATE_ENTITY_STYLUS_TOUCHING) {
controllerLocation = getFingerWorldLocation(this.hand);
} else {
controllerLocation = getControllerWorldLocation(this.handToController(), true);
}
var intersectInfo = handLaserIntersectEntity(this.grabbedThingID, controllerLocation);
if (intersectInfo) {
var pointerEvent;
if (this.deadspotExpired) {
@ -3041,12 +3186,24 @@ function MyController(hand) {
}
// test for intersection between controller laser and web entity plane.
var intersectInfo = handLaserIntersectEntity(this.grabbedThingID,
getControllerWorldLocation(this.handToController(), true));
var controllerLocation;
if (this.useFingerInsteadOfStylus && this.state === STATE_ENTITY_STYLUS_TOUCHING) {
controllerLocation = getFingerWorldLocation(this.hand);
} else {
controllerLocation = getControllerWorldLocation(this.handToController(), true);
}
var intersectInfo = handLaserIntersectEntity(this.grabbedThingID, controllerLocation);
if (intersectInfo) {
var max;
if (this.useFingerInsteadOfStylus && this.state === STATE_ENTITY_STYLUS_TOUCHING) {
max = FINGER_TOUCH_MAX;
} else {
max = WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_Y_OFFSET;
}
if (this.state == STATE_ENTITY_STYLUS_TOUCHING &&
intersectInfo.distance > WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_Y_OFFSET) {
intersectInfo.distance > max) {
this.setState(STATE_OFF, "pulled away from web entity");
return;
}
@ -3089,8 +3246,13 @@ function MyController(hand) {
this.overlayTouchingEnter = function () {
// Test for intersection between controller laser and Web overlay plane.
var intersectInfo =
handLaserIntersectOverlay(this.grabbedOverlay, getControllerWorldLocation(this.handToController(), true));
var controllerLocation;
if (this.useFingerInsteadOfStylus && this.state === STATE_OVERLAY_STYLUS_TOUCHING) {
controllerLocation = getFingerWorldLocation(this.hand);
} else {
controllerLocation = getControllerWorldLocation(this.handToController(), true);
}
var intersectInfo = handLaserIntersectOverlay(this.grabbedOverlay, controllerLocation);
if (intersectInfo) {
var pointerEvent = {
type: "Press",
@ -3125,8 +3287,13 @@ function MyController(hand) {
this.overlayTouchingExit = function () {
// Test for intersection between controller laser and Web overlay plane.
var intersectInfo =
handLaserIntersectOverlay(this.grabbedOverlay, getControllerWorldLocation(this.handToController(), true));
var controllerLocation;
if (this.useFingerInsteadOfStylus && this.state === STATE_OVERLAY_STYLUS_TOUCHING) {
controllerLocation = getFingerWorldLocation(this.hand);
} else {
controllerLocation = getControllerWorldLocation(this.handToController(), true);
}
var intersectInfo = handLaserIntersectOverlay(this.grabbedOverlay, controllerLocation);
if (intersectInfo) {
var pointerEvent;
@ -3183,12 +3350,25 @@ function MyController(hand) {
}
// Test for intersection between controller laser and Web overlay plane.
var intersectInfo =
handLaserIntersectOverlay(this.grabbedOverlay, getControllerWorldLocation(this.handToController(), true));
var controllerLocation;
if (this.useFingerInsteadOfStylus && this.state === STATE_OVERLAY_STYLUS_TOUCHING) {
controllerLocation = getFingerWorldLocation(this.hand);
} else {
controllerLocation = getControllerWorldLocation(this.handToController(), true);
}
var intersectInfo = handLaserIntersectOverlay(this.grabbedOverlay, controllerLocation);
if (intersectInfo) {
if (this.state == STATE_OVERLAY_STYLUS_TOUCHING &&
intersectInfo.distance > WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_Y_OFFSET + WEB_TOUCH_Y_TOUCH_DEADZONE_SIZE) {
var max, min;
if (this.useFingerInsteadOfStylus && this.state === STATE_OVERLAY_STYLUS_TOUCHING) {
max = FINGER_TOUCH_MAX;
min = FINGER_TOUCH_MIN;
} else {
max = WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_Y_OFFSET + WEB_TOUCH_Y_TOUCH_DEADZONE_SIZE;
min = WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_TOO_CLOSE;
}
if (this.state == STATE_OVERLAY_STYLUS_TOUCHING && intersectInfo.distance > max) {
this.grabbedThingID = null;
this.setState(STATE_OFF, "pulled away from overlay");
return;
@ -3199,7 +3379,7 @@ function MyController(hand) {
if (this.state == STATE_OVERLAY_STYLUS_TOUCHING &&
!this.tabletStabbed &&
intersectInfo.distance < WEB_STYLUS_LENGTH / 2.0 + WEB_TOUCH_TOO_CLOSE) {
intersectInfo.distance < min) {
// they've stabbed the tablet, don't send events until they pull back
this.tabletStabbed = true;
this.tabletStabbedPos2D = pos2D;
@ -3372,7 +3552,6 @@ function MyController(hand) {
// we appear to be holding something and this script isn't in a state that would be holding something.
// unhook it. if we previously took note of this entity's parent, put it back where it was. This
// works around some problems that happen when more than one hand or avatar is passing something around.
print("disconnecting stray child of hand: (" + _this.hand + ") " + childID);
if (_this.previousParentID[childID]) {
var previousParentID = _this.previousParentID[childID];
var previousParentJointIndex = _this.previousParentJointIndex[childID];
@ -3390,13 +3569,21 @@ function MyController(hand) {
}
_this.previouslyUnhooked[childID] = now;
// we don't know if it's an entity or an overlay
if (Overlays.getProperty(childID, "grabbable")) {
// only auto-unhook overlays that were flagged as grabbable. this avoids unhooking overlays
// used in tutorial.
Overlays.editOverlay(childID, {
parentID: previousParentID,
parentJointIndex: previousParentJointIndex
});
}
Entities.editEntity(childID, { parentID: previousParentID, parentJointIndex: previousParentJointIndex });
Overlays.editOverlay(childID, { parentID: previousParentID, parentJointIndex: previousParentJointIndex });
} else {
Entities.editEntity(childID, { parentID: NULL_UUID });
Overlays.editOverlay(childID, { parentID: NULL_UUID });
if (Overlays.getProperty(childID, "grabbable")) {
Overlays.editOverlay(childID, { parentID: NULL_UUID });
}
}
}
});

View file

@ -248,12 +248,16 @@ function fromQml(message) { // messages are {method, params}, like json-rpc. See
}
break;
case 'refresh':
data = {};
ExtendedOverlay.some(function (overlay) { // capture the audio data
data[overlay.key] = overlay;
});
removeOverlays();
// If filter is specified from .qml instead of through settings, update the settings.
if (message.params.filter !== undefined) {
Settings.setValue('pal/filtered', !!message.params.filter);
}
populateUserList(message.params.selected);
populateUserList(message.params.selected, data);
UserActivityLogger.palAction("refresh", "");
break;
case 'displayNameUpdate':
@ -285,7 +289,7 @@ function addAvatarNode(id) {
}
// Each open/refresh will capture a stable set of avatarsOfInterest, within the specified filter.
var avatarsOfInterest = {};
function populateUserList(selectData) {
function populateUserList(selectData, oldAudioData) {
var filter = Settings.getValue('pal/filtered') && {distance: Settings.getValue('pal/nearDistance')};
var data = [], avatars = AvatarList.getAvatarIdentifiers();
avatarsOfInterest = {};
@ -317,12 +321,13 @@ function populateUserList(selectData) {
if (id && filter && ((Math.abs(horizontal) > horizontalHalfAngle) || (Math.abs(vertical) > verticalHalfAngle))) {
return;
}
var oldAudio = oldAudioData && oldAudioData[id];
var avatarPalDatum = {
displayName: name,
userName: '',
sessionId: id || '',
audioLevel: 0.0,
avgAudioLevel: 0.0,
audioLevel: (oldAudio && oldAudio.audioLevel) || 0.0,
avgAudioLevel: (oldAudio && oldAudio.avgAudioLevel) || 0.0,
admin: false,
personalMute: !!id && Users.getPersonalMuteStatus(id), // expects proper boolean, not null
ignore: !!id && Users.getIgnoreStatus(id) // ditto

View file

@ -0,0 +1,151 @@
//
// magneticBlock.js
//
// Created by Matti Lahtinen 4/3/2017
// Copyright 2017 High Fidelity, Inc.
//
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Makes the entity the script is bound to connect to nearby, similarly sized entities, like a magnet.
(function() {
var SNAPSOUND_SOURCE = SoundCache.getSound(Script.resolvePath("../../system/assets/sounds/entitySnap.wav?xrs"));
var RANGE_MULTIPLER = 1.5;
var MAX_SCALE = 2;
var MIN_SCALE = 0.5;
// Helper for detecting nearby objects near entityProperties, with the scale calculated by the dimensions of the object.
function findEntitiesInRange(entityProperties) {
var dimensions = entityProperties.dimensions;
// Average of the dimensions instead of full value.
return Entities.findEntities(entityProperties.position,
((dimensions.x + dimensions.y + dimensions.z) / 3) * RANGE_MULTIPLER);
}
function getNearestValidEntityProperties(releasedProperties) {
var entities = findEntitiesInRange(releasedProperties);
var nearestEntity = null;
var nearest = Number.MAX_VALUE - 1;
var releaseSize = Vec3.length(releasedProperties.dimensions);
entities.forEach(function(entityId) {
if (entityId !== releasedProperties.id) {
var entity = Entities.getEntityProperties(entityId, ['position', 'rotation', 'dimensions']);
var distance = Vec3.distance(releasedProperties.position, entity.position);
var scale = releaseSize / Vec3.length(entity.dimensions);
if (distance < nearest && (scale >= MIN_SCALE && scale <= MAX_SCALE)) {
nearestEntity = entity;
nearest = distance;
}
}
});
return nearestEntity;
}
// Create the 'class'
function MagneticBlock() {}
// Bind pre-emptive events
MagneticBlock.prototype = {
/*
When script is bound to an entity, preload is the first callback called with the entityID.
It will behave as the constructor
*/
preload: function(id) {
/*
We will now override any existing userdata with the grabbable property.
Only retrieving userData
*/
var entityProperties = Entities.getEntityProperties(id, ['userData']);
var userData = {
grabbableKey: {}
};
// Check if existing userData field exists.
if (entityProperties.userData && entityProperties.userData.length > 0) {
try {
userData = JSON.parse(entityProperties.userData);
if (!userData.grabbableKey) {
userData.grabbableKey = {}; // If by random change there is no grabbableKey in the userData.
}
} catch (e) {
// if user data is not valid json, we will simply overwrite it.
}
}
// Object must be triggerable inorder to bind releaseGrabEvent
userData.grabbableKey.grabbable = true;
// Apply the new properties to entity of id
Entities.editEntity(id, {
userData: JSON.stringify(userData)
});
Script.scriptEnding.connect(function() {
Script.removeEventHandler(id, "releaseGrab", this.releaseGrab);
});
},
releaseGrab: function(entityId) {
// Release grab is called with entityId,
var released = Entities.getEntityProperties(entityId, ["position", "rotation", "dimensions"]);
var target = getNearestValidEntityProperties(released);
if (target !== null) {
// We found nearest, now lets do the snap calculations
// Plays the snap sound between the two objects.
Audio.playSound(SNAPSOUND_SOURCE, {
volume: 1,
position: Vec3.mix(target.position, released.position, 0.5)
});
// Check Nearest Axis
var difference = Vec3.subtract(released.position, target.position);
var relativeDifference = Vec3.multiplyQbyV(Quat.inverse(target.rotation), difference);
var abs = {
x: Math.abs(relativeDifference.x),
y: Math.abs(relativeDifference.y),
z: Math.abs(relativeDifference.z)
};
// Check what value is greater. and lock down to that axis.
var newRelative = {
x: 0,
y: 0,
z: 0
};
if (abs.x >= abs.y && abs.x >= abs.z) {
newRelative.x = target.dimensions.x / 2 + released.dimensions.x / 2;
if (relativeDifference.x < 0) {
newRelative.x = -newRelative.x;
}
} else if (abs.y >= abs.x && abs.y >= abs.z) {
newRelative.y = target.dimensions.y / 2 + released.dimensions.y / 2;
if (relativeDifference.y < 0) {
newRelative.y = -newRelative.y;
}
} else if (abs.z >= abs.x && abs.z >= abs.y) {
newRelative.z = target.dimensions.z / 2 + released.dimensions.z / 2;
if (relativeDifference.z < 0) {
newRelative.z = -newRelative.z;
}
}
// Can be expanded upon to work in nearest 90 degree rotation as well, but was not in spec.
var newPosition = Vec3.multiplyQbyV(target.rotation, newRelative);
Entities.editEntity(entityId, {
// Script relies on the registrationPoint being at the very center of the object. Thus override.
registrationPoint: {
x: 0.5,
y: 0.5,
z: 0.5
},
rotation: target.rotation,
position: Vec3.sum(target.position, newPosition)
});
// Script relies on the registrationPoint being at the very center of the object. Thus override.
Entities.editEntity(target.id, {
registrationPoint: {
x: 0.5,
y: 0.5,
z: 0.5
}
});
}
}
};
return new MagneticBlock();
});

View file

@ -0,0 +1,72 @@
//
// makeBlocks.js
//
// Created by Matti Lahtinen 4/3/2017
// Copyright 2017 High Fidelity, Inc.
//
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Creates multiple "magnetic" blocks with random colors that users clones of and snap together.
(function() {
var MAX_RGB_COMPONENT_VALUE = 256 / 2; // Limit the values to half the maximum.
var MIN_COLOR_VALUE = 127;
var SIZE = 0.3;
var LIFETIME = 600;
var VERTICAL_OFFSET = -0.25;
var ROWS = 3;
var COLUMNS = 3;
// Random Pastel Generator based on Piper's script
function newColor() {
return {
red: randomPastelRGBComponent(),
green: randomPastelRGBComponent(),
blue: randomPastelRGBComponent()
};
}
// Helper functions.
function randomPastelRGBComponent() {
return Math.floor(Math.random() * MAX_RGB_COMPONENT_VALUE) + MIN_COLOR_VALUE;
}
var SCRIPT_URL = Script.resolvePath("./entity_scripts/magneticBlock.js");
var frontVector = Quat.getFront(MyAvatar.orientation);
frontVector.y += VERTICAL_OFFSET;
for (var x = 0; x < COLUMNS; x++) {
for (var y = 0; y < ROWS; y++) {
var frontOffset = {
x: 0,
y: SIZE * y + SIZE,
z: SIZE * x + SIZE
};
Entities.addEntity({
type: "Box",
name: "MagneticBlock-" + y + '-' + x,
dimensions: {
x: SIZE,
y: SIZE,
z: SIZE
},
userData: JSON.stringify({
grabbableKey: {
cloneable: true,
grabbable: true,
cloneLifetime: LIFETIME,
cloneLimit: 9999
}
}),
position: Vec3.sum(MyAvatar.position, Vec3.sum(frontOffset, frontVector)),
color: newColor(),
script: SCRIPT_URL
});
}
}
Script.stop();
})();

View file

@ -115,8 +115,8 @@ void GLMHelpersTests::testSimd() {
a1 = a * b;
b1 = b * a;
glm_mat4_mul((glm_vec4*)&a, (glm_vec4*)&b, (glm_vec4*)&a2);
glm_mat4_mul((glm_vec4*)&b, (glm_vec4*)&a, (glm_vec4*)&b2);
glm_mat4u_mul(a, b, a2);
glm_mat4u_mul(b, a, b2);
{
@ -133,8 +133,8 @@ void GLMHelpersTests::testSimd() {
QElapsedTimer timer;
timer.start();
for (size_t i = 0; i < LOOPS; ++i) {
glm_mat4_mul((glm_vec4*)&a, (glm_vec4*)&b, (glm_vec4*)&a2);
glm_mat4_mul((glm_vec4*)&b, (glm_vec4*)&a, (glm_vec4*)&b2);
glm_mat4u_mul(a, b, a2);
glm_mat4u_mul(b, a, b2);
}
qDebug() << "SIMD " << timer.elapsed();
}

View file

@ -21,6 +21,7 @@ exports.handlers = {
'../../libraries/networking/src',
'../../libraries/animation/src',
'../../libraries/entities/src',
'../../libraries/shared/src'
];
var exts = ['.h', '.cpp'];