Merge branch 'master' of https://github.com/highfidelity/hifi into tuneAvatarInfo

This commit is contained in:
ZappoMan 2017-01-23 13:09:59 -08:00
commit 29fff2e556
87 changed files with 2623 additions and 690 deletions

View file

@ -12,6 +12,8 @@
#include "AssetServer.h"
#include <thread>
#include <QtCore/QCoreApplication>
#include <QtCore/QCryptographicHash>
#include <QtCore/QDateTime>
@ -21,6 +23,7 @@
#include <QtCore/QJsonDocument>
#include <QtCore/QString>
#include <SharedUtil.h>
#include <ServerPathUtils.h>
#include "NetworkLogging.h"
@ -28,8 +31,44 @@
#include "SendAssetTask.h"
#include "UploadAssetTask.h"
static const uint8_t MIN_CORES_FOR_MULTICORE = 4;
static const uint8_t CPU_AFFINITY_COUNT_HIGH = 2;
static const uint8_t CPU_AFFINITY_COUNT_LOW = 1;
static const int INTERFACE_RUNNING_CHECK_FREQUENCY_MS = 1000;
const QString ASSET_SERVER_LOGGING_TARGET_NAME = "asset-server";
bool interfaceRunning() {
bool result = false;
#ifdef Q_OS_WIN
QSharedMemory sharedMemory { getInterfaceSharedMemoryName() };
result = sharedMemory.attach(QSharedMemory::ReadOnly);
if (result) {
sharedMemory.detach();
}
#endif
return result;
}
void updateConsumedCores() {
static bool wasInterfaceRunning = false;
bool isInterfaceRunning = interfaceRunning();
// If state is unchanged, return early
if (isInterfaceRunning == wasInterfaceRunning) {
return;
}
wasInterfaceRunning = isInterfaceRunning;
auto coreCount = std::thread::hardware_concurrency();
if (isInterfaceRunning) {
coreCount = coreCount > MIN_CORES_FOR_MULTICORE ? CPU_AFFINITY_COUNT_HIGH : CPU_AFFINITY_COUNT_LOW;
}
qDebug() << "Setting max consumed cores to " << coreCount;
setMaxCores(coreCount);
}
AssetServer::AssetServer(ReceivedMessage& message) :
ThreadedAssignment(message),
_taskPool(this)
@ -45,6 +84,20 @@ AssetServer::AssetServer(ReceivedMessage& message) :
packetReceiver.registerListener(PacketType::AssetGetInfo, this, "handleAssetGetInfo");
packetReceiver.registerListener(PacketType::AssetUpload, this, "handleAssetUpload");
packetReceiver.registerListener(PacketType::AssetMappingOperation, this, "handleAssetMappingOperation");
#ifdef Q_OS_WIN
updateConsumedCores();
QTimer* timer = new QTimer(this);
auto timerConnection = connect(timer, &QTimer::timeout, [] {
updateConsumedCores();
});
connect(qApp, &QCoreApplication::aboutToQuit, [this, timerConnection] {
disconnect(timerConnection);
});
timer->setInterval(INTERFACE_RUNNING_CHECK_FREQUENCY_MS);
timer->setTimerType(Qt::CoarseTimer);
timer->start();
#endif
}
void AssetServer::run() {

View file

@ -28,6 +28,7 @@
#include <StDev.h>
#include <UUID.h>
#include "AudioHelpers.h"
#include "AudioRingBuffer.h"
#include "AudioMixerClientData.h"
#include "AvatarAudioStream.h"
@ -68,7 +69,8 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
packetReceiver.registerListener(PacketType::KillAvatar, this, "handleKillAvatarPacket");
packetReceiver.registerListener(PacketType::NodeMuteRequest, this, "handleNodeMuteRequestPacket");
packetReceiver.registerListener(PacketType::RadiusIgnoreRequest, this, "handleRadiusIgnoreRequestPacket");
packetReceiver.registerListener(PacketType::RequestsDomainListData, this, "handleRequestsDomainListDataPacket");
packetReceiver.registerListener(PacketType::RequestsDomainListData, this, "handleRequestsDomainListDataPacket");
packetReceiver.registerListener(PacketType::PerAvatarGainSet, this, "handlePerAvatarGainSetDataPacket");
connect(nodeList.data(), &NodeList::nodeKilled, this, &AudioMixer::handleNodeKilled);
}
@ -186,7 +188,8 @@ void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) {
nodeList->eachNode([&killedNode](const SharedNodePointer& node) {
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
if (clientData) {
clientData->removeHRTFsForNode(killedNode->getUUID());
QUuid killedUUID = killedNode->getUUID();
clientData->removeHRTFsForNode(killedUUID);
}
});
}
@ -240,6 +243,20 @@ void AudioMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage> p
sendingNode->parseIgnoreRequestMessage(packet);
}
void AudioMixer::handlePerAvatarGainSetDataPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
auto clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
if (clientData) {
QUuid listeningNodeUUID = sendingNode->getUUID();
// parse the UUID from the packet
QUuid audioSourceUUID = QUuid::fromRfc4122(packet->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
uint8_t packedGain;
packet->readPrimitive(&packedGain);
float gain = unpackFloatGainFromByte(packedGain);
clientData->hrtfForStream(audioSourceUUID, QUuid()).setGainAdjustment(gain);
qDebug() << "Setting gain adjustment for hrtf[" << listeningNodeUUID << "][" << audioSourceUUID << "] to " << gain;
}
}
void AudioMixer::handleRadiusIgnoreRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
sendingNode->parseIgnoreRadiusRequestMessage(packet);
}

View file

@ -66,6 +66,7 @@ private slots:
void handleRadiusIgnoreRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
void handleKillAvatarPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
void handleNodeMuteRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
void handlePerAvatarGainSetDataPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
void start();
void removeHRTFsForFinishedInjector(const QUuid& streamID);

View file

@ -131,7 +131,7 @@ void AudioMixerSlavePool::setNumThreads(int numThreads) {
}
void AudioMixerSlavePool::resize(int numThreads) {
assert(_numThreads == _slaves.size());
assert(_numThreads == (int)_slaves.size());
#ifdef AUDIO_SINGLE_THREADED
qDebug("%s: running single threaded", __FUNCTION__, numThreads);
@ -182,6 +182,6 @@ void AudioMixerSlavePool::resize(int numThreads) {
}
_numThreads = _numStarted = _numFinished = numThreads;
assert(_numThreads == _slaves.size());
assert(_numThreads == (int)_slaves.size());
#endif
}

View file

@ -262,8 +262,12 @@ void AvatarMixer::broadcastAvatarData() {
// setup a PacketList for the avatarPackets
auto avatarPacketList = NLPacketList::create(PacketType::BulkAvatarData);
if (avatar.getSessionDisplayName().isEmpty() && // We haven't set it yet...
nodeData->getReceivedIdentity()) { // ... but we have processed identity (with possible displayName).
if (nodeData->getAvatarSessionDisplayNameMustChange()) {
const QString& existingBaseDisplayName = nodeData->getBaseDisplayName();
if (--_sessionDisplayNames[existingBaseDisplayName].second <= 0) {
_sessionDisplayNames.remove(existingBaseDisplayName);
}
QString baseName = avatar.getDisplayName().trimmed();
const QRegularExpression curses{ "fuck|shit|damn|cock|cunt" }; // POC. We may eventually want something much more elaborate (subscription?).
baseName = baseName.replace(curses, "*"); // Replace rather than remove, so that people have a clue that the person's a jerk.
@ -276,11 +280,14 @@ void AvatarMixer::broadcastAvatarData() {
QPair<int, int>& soFar = _sessionDisplayNames[baseName]; // Inserts and answers 0, 0 if not already present, which is what we want.
int& highWater = soFar.first;
nodeData->setBaseDisplayName(baseName);
avatar.setSessionDisplayName((highWater > 0) ? baseName + "_" + QString::number(highWater) : baseName);
QString sessionDisplayName = (highWater > 0) ? baseName + "_" + QString::number(highWater) : baseName;
avatar.setSessionDisplayName(sessionDisplayName);
highWater++;
soFar.second++; // refcount
nodeData->flagIdentityChange();
sendIdentityPacket(nodeData, node); // Tell new node about its sessionUUID. Others will find out below.
nodeData->setAvatarSessionDisplayNameMustChange(false);
sendIdentityPacket(nodeData, node); // Tell node whose name changed about its new session display name. Others will find out below.
qDebug() << "Giving session display name" << sessionDisplayName << "to node with ID" << node->getUUID();
}
// this is an AGENT we have received head data from
@ -589,7 +596,7 @@ void AvatarMixer::handleAvatarIdentityPacket(QSharedPointer<ReceivedMessage> mes
if (avatar.processAvatarIdentity(identity)) {
QMutexLocker nodeDataLocker(&nodeData->getMutex());
nodeData->flagIdentityChange();
nodeData->setReceivedIdentity();
nodeData->setAvatarSessionDisplayNameMustChange(true);
}
}
}

View file

@ -53,8 +53,8 @@ public:
HRCTime getIdentityChangeTimestamp() const { return _identityChangeTimestamp; }
void flagIdentityChange() { _identityChangeTimestamp = p_high_resolution_clock::now(); }
bool getReceivedIdentity() const { return _gotIdentity; }
void setReceivedIdentity() { _gotIdentity = true; }
bool getAvatarSessionDisplayNameMustChange() const { return _avatarSessionDisplayNameMustChange; }
void setAvatarSessionDisplayNameMustChange(bool set = true) { _avatarSessionDisplayNameMustChange = set; }
void setFullRateDistance(float fullRateDistance) { _fullRateDistance = fullRateDistance; }
float getFullRateDistance() const { return _fullRateDistance; }
@ -132,7 +132,7 @@ private:
std::unordered_map<QUuid, QVector<JointData>> _lastOtherAvatarSentJoints;
HRCTime _identityChangeTimestamp;
bool _gotIdentity { false };
bool _avatarSessionDisplayNameMustChange{ false };
float _fullRateDistance = FLT_MAX;
float _maxAvatarDistance = FLT_MAX;

View file

@ -0,0 +1,32 @@
include(ExternalProject)
include(SelectLibraryConfigurations)
set(EXTERNAL_NAME LibOVRPlatform)
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
if (WIN32)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://hifi-public.s3.amazonaws.com/dependencies/OVRPlatformSDK_v1.10.0.zip
URL_MD5 e6c8264af16d904e6506acd5172fa0a9
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
LOG_DOWNLOAD 1
)
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${SOURCE_DIR}/Windows/LibOVRPlatform64_1.lib CACHE TYPE INTERNAL)
else()
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${SOURCE_DIR}/Windows/LibOVRPlatform32_1.lib CACHE TYPE INTERNAL)
endif()
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/Include CACHE TYPE INTERNAL)
endif ()
# Hide this external target (for ide users)
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")

View file

@ -38,10 +38,10 @@ set(${EXTERNAL_NAME_UPPER}_DLL_PATH ${INSTALL_DIR}/lib CACHE FILEPATH "Location
if (APPLE)
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/libquazip5.1.0.0.dylib CACHE FILEPATH "Location of QuaZip release library")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/libquazip5.1.0.0.dylib CACHE FILEPATH "Location of QuaZip release library")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/libquazip5d.1.0.0.dylib CACHE FILEPATH "Location of QuaZip release library")
elseif (WIN32)
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/quazip5.lib CACHE FILEPATH "Location of QuaZip release library")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/quazip5.lib CACHE FILEPATH "Location of QuaZip release library")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/quazip5d.lib CACHE FILEPATH "Location of QuaZip release library")
else ()
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/libquazip5.so CACHE FILEPATH "Location of QuaZip release library")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/libquazip5.so CACHE FILEPATH "Location of QuaZip release library")

View file

@ -0,0 +1,44 @@
#
# FindLibOVRPlatform.cmake
#
# Try to find the LibOVRPlatform library to use the Oculus Platform SDK
#
# You must provide a LIBOVRPLATFORM_ROOT_DIR which contains Windows and Include directories
#
# Once done this will define
#
# LIBOVRPLATFORM_FOUND - system found Oculus Platform SDK
# LIBOVRPLATFORM_INCLUDE_DIRS - the Oculus Platform include directory
# LIBOVRPLATFORM_LIBRARIES - Link this to use Oculus Platform
#
# Created on December 16, 2016 by Stephen Birarda
# Copyright 2016 High Fidelity, Inc.
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
if (WIN32)
# setup hints for LIBOVRPLATFORM search
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
hifi_library_search_hints("LibOVRPlatform")
find_path(LIBOVRPLATFORM_INCLUDE_DIRS OVR_Platform.h PATH_SUFFIXES Include HINTS ${LIBOVRPLATFORM_SEARCH_DIRS})
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
set(_LIB_NAME LibOVRPlatform64_1.lib)
else()
set(_LIB_NAME LibOVRPlatform32_1.lib)
endif()
find_library(LIBOVRPLATFORM_LIBRARY_RELEASE NAMES ${_LIB_NAME} PATH_SUFFIXES Windows HINTS ${LIBOVRPLATFORM_SEARCH_DIRS})
include(SelectLibraryConfigurations)
select_library_configurations(LIBOVRPLATFORM)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(LIBOVRPLATFORM DEFAULT_MSG LIBOVRPLATFORM_INCLUDE_DIRS LIBOVRPLATFORM_LIBRARIES)
mark_as_advanced(LIBOVRPLATFORM_INCLUDE_DIRS LIBOVRPLATFORM_LIBRARIES LIBOVRPLATFORM_SEARCH_DIRS)
endif ()

View file

@ -40,7 +40,7 @@
{
"name": "local_port",
"label": "Local UDP Port",
"help": "This is the local port your domain-server binds to for UDP connections.<br/>Depending on your router, this may need to be changed to run multiple full automatic networking domain-servers in the same network.",
"help": "This is the local port your domain-server binds to for UDP connections.<br/>Depending on your router, this may need to be changed to unique values for each domain-server in order to run multiple full automatic networking domain-servers in the same network. You can use the value 0 to have the domain-server select a random port, which will help in preventing port collisions.",
"default": "40102",
"type": "int",
"advanced": true

View file

@ -12,7 +12,7 @@
<div id="setup-sidebar" class="hidden-xs" data-spy="affix" data-offset-top="55" data-clampedwidth="#setup-sidebar-col">
<script id="list-group-template" type="text/template">
<% _.each(descriptions, function(group){ %>
<% panelID = group.name ? group.name : group.label %>
<% panelID = group.name ? group.name : group.html_id %>
<li>
<a href="#<%- panelID %>" class="list-group-item">
<span class="badge"></span>

View file

@ -780,45 +780,52 @@ void DomainServerSettingsManager::processNodeKickRequestPacket(QSharedPointer<Re
// This function processes the "Get Username from ID" request.
void DomainServerSettingsManager::processUsernameFromIDRequestPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
// Before we do any processing on this packet, make sure it comes from a node that is allowed to kick (is an admin)
if (sendingNode->getCanKick()) {
// From the packet, pull the UUID we're identifying
QUuid nodeUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
// From the packet, pull the UUID we're identifying
QUuid nodeUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
if (!nodeUUID.isNull()) {
// First, make sure we actually have a node with this UUID
auto limitedNodeList = DependencyManager::get<LimitedNodeList>();
auto matchingNode = limitedNodeList->nodeWithUUID(nodeUUID);
if (!nodeUUID.isNull()) {
// First, make sure we actually have a node with this UUID
auto limitedNodeList = DependencyManager::get<LimitedNodeList>();
auto matchingNode = limitedNodeList->nodeWithUUID(nodeUUID);
// If we do have a matching node...
if (matchingNode) {
// If we do have a matching node...
if (matchingNode) {
// Setup the packet
auto usernameFromIDReplyPacket = NLPacket::create(PacketType::UsernameFromIDReply);
QString verifiedUsername;
QUuid machineFingerprint;
// Write the UUID to the packet
usernameFromIDReplyPacket->write(nodeUUID.toRfc4122());
// Check if the sending node has permission to kick (is an admin)
// OR if the message is from a node whose UUID matches the one in the packet
if (sendingNode->getCanKick() || nodeUUID == sendingNode->getUUID()) {
// It's time to figure out the username
QString verifiedUsername = matchingNode->getPermissions().getVerifiedUserName();
// Setup the packet
auto usernameFromIDReplyPacket = NLPacket::create(PacketType::UsernameFromIDReply);
usernameFromIDReplyPacket->write(nodeUUID.toRfc4122());
verifiedUsername = matchingNode->getPermissions().getVerifiedUserName();
usernameFromIDReplyPacket->writeString(verifiedUsername);
// now put in the machine fingerprint
DomainServerNodeData* nodeData = reinterpret_cast<DomainServerNodeData*>(matchingNode->getLinkedData());
QUuid machineFingerprint = nodeData ? nodeData->getMachineFingerprint() : QUuid();
machineFingerprint = nodeData ? nodeData->getMachineFingerprint() : QUuid();
usernameFromIDReplyPacket->write(machineFingerprint.toRfc4122());
qDebug() << "Sending username" << verifiedUsername << "and machine fingerprint" << machineFingerprint << "associated with node" << nodeUUID;
// Ship it!
limitedNodeList->sendPacket(std::move(usernameFromIDReplyPacket), *sendingNode);
} else {
qWarning() << "Node username request received for unknown node. Refusing to process.";
usernameFromIDReplyPacket->writeString(verifiedUsername);
usernameFromIDReplyPacket->write(machineFingerprint.toRfc4122());
}
} else {
qWarning() << "Node username request received for invalid node ID. Refusing to process.";
}
// Write whether or not the user is an admin
bool isAdmin = matchingNode->getCanKick();
usernameFromIDReplyPacket->writePrimitive(isAdmin);
qDebug() << "Sending username" << verifiedUsername << "and machine fingerprint" << machineFingerprint << "associated with node" << nodeUUID << ". Node admin status: " << isAdmin;
// Ship it!
limitedNodeList->sendPacket(std::move(usernameFromIDReplyPacket), *sendingNode);
} else {
qWarning() << "Node username request received for unknown node. Refusing to process.";
}
} else {
qWarning() << "Refusing to process a username request packet from node" << uuidStringWithoutCurlyBraces(sendingNode->getUUID())
<< "that does not have kick permissions.";
qWarning() << "Node username request received for invalid node ID. Refusing to process.";
}
}

View file

@ -15,7 +15,13 @@ import "../styles-uit"
Item {
property alias text: popupText.text
property alias headerGlyph: headerGlyph.text
property alias headerText: headerText.text
property real popupRadius: hifi.dimensions.borderRadius
property real headerTextPixelSize: 22
property real popupTextPixelSize: 16
FontLoader { id: ralewayRegular; source: "../../fonts/Raleway-Regular.ttf"; }
FontLoader { id: ralewaySemiBold; source: "../../fonts/Raleway-SemiBold.ttf"; }
visible: false
id: letterbox
anchors.fill: parent
@ -27,19 +33,79 @@ Item {
}
Rectangle {
width: Math.max(parent.width * 0.75, 400)
height: popupText.contentHeight*1.5
height: contentContainer.height + 50
anchors.centerIn: parent
radius: popupRadius
color: "white"
FiraSansSemiBold {
id: popupText
size: hifi.fontSizes.textFieldInput
color: hifi.colors.darkGray
horizontalAlignment: Text.AlignHCenter
anchors.fill: parent
anchors.leftMargin: 15
anchors.rightMargin: 15
wrapMode: Text.WordWrap
Item {
id: contentContainer
width: parent.width - 60
height: childrenRect.height
anchors.centerIn: parent
Item {
id: popupHeaderContainer
visible: headerText.text !== "" || headerGlyph.text !== ""
height: 30
// Anchors
anchors.top: parent.top
anchors.left: parent.left
anchors.right: parent.right
// Header Glyph
HiFiGlyphs {
id: headerGlyph
visible: headerGlyph.text !== ""
// Size
height: parent.height
// Anchors
anchors.left: parent.left
anchors.leftMargin: -15
// Text Size
size: headerTextPixelSize*2.5
// Style
horizontalAlignment: Text.AlignHLeft
verticalAlignment: Text.AlignVCenter
color: hifi.colors.darkGray
}
// Header Text
Text {
id: headerText
visible: headerText.text !== ""
// Size
height: parent.height
// Anchors
anchors.left: headerGlyph.right
anchors.leftMargin: -5
// Text Size
font.pixelSize: headerTextPixelSize
// Style
font.family: ralewaySemiBold.name
color: hifi.colors.darkGray
horizontalAlignment: Text.AlignHLeft
verticalAlignment: Text.AlignVCenter
wrapMode: Text.WordWrap
textFormat: Text.StyledText
}
}
// Popup Text
Text {
id: popupText
// Size
width: parent.width
// Anchors
anchors.top: popupHeaderContainer.visible ? popupHeaderContainer.bottom : parent.top
anchors.topMargin: popupHeaderContainer.visible ? 15 : 0
anchors.left: parent.left
anchors.right: parent.right
// Text alignment
verticalAlignment: Text.AlignVCenter
horizontalAlignment: Text.AlignHLeft
// Style
font.pixelSize: popupTextPixelSize
font.family: ralewayRegular.name
color: hifi.colors.darkGray
wrapMode: Text.WordWrap
textFormat: Text.StyledText
}
}
}
MouseArea {

View file

@ -10,35 +10,36 @@
//
import QtQuick 2.5
import QtQuick.Controls 1.4
import QtQuick.Controls.Styles 1.4
import QtGraphicalEffects 1.0
import "../styles-uit"
Row {
Item {
id: thisNameCard
// Spacing
spacing: 10
// Anchors
anchors.top: parent.top
anchors {
topMargin: (parent.height - contentHeight)/2
bottomMargin: (parent.height - contentHeight)/2
verticalCenter: parent.verticalCenter
leftMargin: 10
rightMargin: 10
}
// Properties
property int contentHeight: 50
property string uuid: ""
property string displayName: ""
property string userName: ""
property int displayTextHeight: 18
property real displayNameTextPixelSize: 18
property int usernameTextHeight: 12
property real audioLevel: 0.0
property bool isMyCard: false
property bool selected: false
property bool isAdmin: false
/* User image commented out for now - will probably be re-introduced later.
Column {
id: avatarImage
// Size
height: contentHeight
height: parent.height
width: height
Image {
id: userImage
@ -49,26 +50,184 @@ Row {
}
}
*/
Column {
Item {
id: textContainer
// Size
width: parent.width - /*avatarImage.width - */parent.anchors.leftMargin - parent.anchors.rightMargin - parent.spacing
height: contentHeight
width: parent.width - /*avatarImage.width - parent.spacing - */parent.anchors.leftMargin - parent.anchors.rightMargin
height: childrenRect.height
anchors.verticalCenter: parent.verticalCenter
// DisplayName Text
FiraSansSemiBold {
id: displayNameText
// Properties
text: thisNameCard.displayName
elide: Text.ElideRight
// DisplayName field for my card
Rectangle {
id: myDisplayName
visible: isMyCard
// Size
width: parent.width + 70
height: 35
// Anchors
anchors.top: parent.top
anchors.left: parent.left
anchors.leftMargin: -10
// Style
color: hifi.colors.textFieldLightBackground
border.color: hifi.colors.blueHighlight
border.width: 0
TextInput {
id: myDisplayNameText
// Properties
text: thisNameCard.displayName
maximumLength: 256
clip: true
// Size
width: parent.width
height: parent.height
// Anchors
anchors.verticalCenter: parent.verticalCenter
anchors.left: parent.left
anchors.leftMargin: 10
anchors.right: parent.right
anchors.rightMargin: editGlyph.width + editGlyph.anchors.rightMargin
// Style
color: hifi.colors.darkGray
FontLoader { id: firaSansSemiBold; source: "../../fonts/FiraSans-SemiBold.ttf"; }
font.family: firaSansSemiBold.name
font.pixelSize: displayNameTextPixelSize
selectionColor: hifi.colors.blueHighlight
selectedTextColor: "black"
// Text Positioning
verticalAlignment: TextInput.AlignVCenter
horizontalAlignment: TextInput.AlignLeft
// Signals
onEditingFinished: {
pal.sendToScript({method: 'displayNameUpdate', params: text})
cursorPosition = 0
focus = false
myDisplayName.border.width = 0
color = hifi.colors.darkGray
}
}
MouseArea {
anchors.fill: parent
acceptedButtons: Qt.LeftButton
hoverEnabled: true
onClicked: {
myDisplayName.border.width = 1
myDisplayNameText.focus ? myDisplayNameText.cursorPosition = myDisplayNameText.positionAt(mouseX, mouseY, TextInput.CursorOnCharacter) : myDisplayNameText.selectAll();
myDisplayNameText.focus = true
myDisplayNameText.color = "black"
}
onDoubleClicked: {
myDisplayNameText.selectAll();
myDisplayNameText.focus = true;
}
onEntered: myDisplayName.color = hifi.colors.lightGrayText
onExited: myDisplayName.color = hifi.colors.textFieldLightBackground
}
// Edit pencil glyph
HiFiGlyphs {
id: editGlyph
text: hifi.glyphs.editPencil
// Text Size
size: displayNameTextPixelSize*1.5
// Anchors
anchors.right: parent.right
anchors.rightMargin: 5
anchors.verticalCenter: parent.verticalCenter
// Style
horizontalAlignment: Text.AlignHCenter
verticalAlignment: Text.AlignVCenter
color: hifi.colors.baseGray
}
}
// Spacer for DisplayName for my card
Item {
id: myDisplayNameSpacer
width: 1
height: 4
// Anchors
anchors.top: myDisplayName.bottom
}
// DisplayName container for others' cards
Item {
id: displayNameContainer
visible: !isMyCard
// Size
width: parent.width
// Text Size
size: thisNameCard.displayTextHeight
// Text Positioning
verticalAlignment: Text.AlignVCenter
// Style
color: hifi.colors.darkGray
height: displayNameTextPixelSize + 4
// Anchors
anchors.top: parent.top
anchors.left: parent.left
// DisplayName Text for others' cards
FiraSansSemiBold {
id: displayNameText
// Properties
text: thisNameCard.displayName
elide: Text.ElideRight
// Size
width: isAdmin ? Math.min(displayNameTextMetrics.tightBoundingRect.width + 8, parent.width - adminLabelText.width - adminLabelQuestionMark.width + 8) : parent.width
// Anchors
anchors.top: parent.top
anchors.left: parent.left
// Text Size
size: displayNameTextPixelSize
// Text Positioning
verticalAlignment: Text.AlignVCenter
// Style
color: hifi.colors.darkGray
}
TextMetrics {
id: displayNameTextMetrics
font: displayNameText.font
text: displayNameText.text
}
// "ADMIN" label for other users' cards
RalewaySemiBold {
id: adminLabelText
visible: isAdmin
text: "ADMIN"
// Text size
size: displayNameText.size - 4
// Anchors
anchors.verticalCenter: parent.verticalCenter
anchors.left: displayNameText.right
// Style
font.capitalization: Font.AllUppercase
color: hifi.colors.redHighlight
// Alignment
horizontalAlignment: Text.AlignHCenter
verticalAlignment: Text.AlignTop
}
// This Rectangle refers to the [?] popup button next to "ADMIN"
Item {
id: adminLabelQuestionMark
visible: isAdmin
// Size
width: 20
height: displayNameText.height
// Anchors
anchors.verticalCenter: parent.verticalCenter
anchors.left: adminLabelText.right
RalewayRegular {
id: adminLabelQuestionMarkText
text: "[?]"
size: adminLabelText.size
font.capitalization: Font.AllUppercase
color: hifi.colors.redHighlight
horizontalAlignment: Text.AlignHCenter
verticalAlignment: Text.AlignVCenter
anchors.fill: parent
}
MouseArea {
anchors.fill: parent
acceptedButtons: Qt.LeftButton
hoverEnabled: true
onClicked: letterbox(hifi.glyphs.question,
"Domain Admin",
"This user is an admin on this domain. Admins can <b>Silence</b> and <b>Ban</b> other users at their discretion - so be extra nice!")
onEntered: adminLabelQuestionMarkText.color = "#94132e"
onExited: adminLabelQuestionMarkText.color = hifi.colors.redHighlight
}
}
}
// UserName Text
@ -80,6 +239,8 @@ Row {
visible: thisNameCard.displayName
// Size
width: parent.width
// Anchors
anchors.top: isMyCard ? myDisplayNameSpacer.bottom : displayNameContainer.bottom
// Text Size
size: thisNameCard.usernameTextHeight
// Text Positioning
@ -90,25 +251,56 @@ Row {
// Spacer
Item {
id: userNameSpacer
height: 4
width: parent.width
// Anchors
anchors.top: userNameText.bottom
}
// VU Meter
Rectangle { // CHANGEME to the appropriate type!
Rectangle {
id: nameCardVUMeter
// Size
width: parent.width
width: isMyCard ? myDisplayName.width - 70 : ((gainSlider.value - gainSlider.minimumValue)/(gainSlider.maximumValue - gainSlider.minimumValue)) * parent.width
height: 8
// Anchors
anchors.top: userNameSpacer.bottom
// Style
radius: 4
color: "#c5c5c5"
// Rectangle for the zero-gain point on the VU meter
Rectangle {
id: vuMeterZeroGain
visible: gainSlider.visible
// Size
width: 4
height: 18
// Style
color: hifi.colors.darkGray
// Anchors
anchors.verticalCenter: parent.verticalCenter
anchors.left: parent.left
anchors.leftMargin: (-gainSlider.minimumValue)/(gainSlider.maximumValue - gainSlider.minimumValue) * gainSlider.width - 4
}
// Rectangle for the VU meter line
Rectangle {
id: vuMeterLine
width: gainSlider.width
visible: gainSlider.visible
// Style
color: vuMeterBase.color
radius: nameCardVUMeter.radius
height: nameCardVUMeter.height / 2
anchors.verticalCenter: nameCardVUMeter.verticalCenter
}
// Rectangle for the VU meter base
Rectangle {
id: vuMeterBase
// Anchors
anchors.fill: parent
// Style
color: "#c5c5c5"
color: parent.color
radius: parent.radius
}
// Rectangle for the VU meter audio level
@ -117,7 +309,7 @@ Row {
// Size
width: (thisNameCard.audioLevel) * parent.width
// Style
color: "#c5c5c5"
color: parent.color
radius: parent.radius
// Anchors
anchors.bottom: parent.bottom
@ -138,5 +330,73 @@ Row {
}
}
}
// Per-Avatar Gain Slider
Slider {
id: gainSlider
// Size
width: parent.width
height: 14
// Anchors
anchors.verticalCenter: nameCardVUMeter.verticalCenter
// Properties
visible: !isMyCard && selected
value: pal.gainSliderValueDB[uuid] ? pal.gainSliderValueDB[uuid] : 0.0
minimumValue: -60.0
maximumValue: 20.0
stepSize: 5
updateValueWhileDragging: true
onValueChanged: updateGainFromQML(uuid, value, false)
onPressedChanged: {
if (!pressed) {
updateGainFromQML(uuid, value, true)
}
}
MouseArea {
anchors.fill: parent
onWheel: {
// Do nothing.
}
onDoubleClicked: {
gainSlider.value = 0.0
}
onPressed: {
// Pass through to Slider
mouse.accepted = false
}
onReleased: {
// the above mouse.accepted seems to make this
// never get called, nonetheless...
mouse.accepted = false
}
}
style: SliderStyle {
groove: Rectangle {
color: "#c5c5c5"
implicitWidth: gainSlider.width
implicitHeight: 4
radius: 2
opacity: 0
}
handle: Rectangle {
anchors.centerIn: parent
color: (control.pressed || control.hovered) ? "#00b4ef" : "#8F8F8F"
implicitWidth: 10
implicitHeight: 16
}
}
}
}
function updateGainFromQML(avatarUuid, sliderValue, isReleased) {
if (isReleased || pal.gainSliderValueDB[avatarUuid] !== sliderValue) {
pal.gainSliderValueDB[avatarUuid] = sliderValue;
var data = {
sessionId: avatarUuid,
gain: sliderValue,
isReleased: isReleased
};
pal.sendToScript({method: 'updateGain', params: data});
}
}
}

View file

@ -24,14 +24,30 @@ Rectangle {
// Style
color: "#E3E3E3"
// Properties
property int myCardHeight: 70
property int myCardHeight: 90
property int rowHeight: 70
property int actionButtonWidth: 75
property int nameCardWidth: palContainer.width - actionButtonWidth*(iAmAdmin ? 4 : 2) - 4 - hifi.dimensions.scrollbarBackgroundWidth
property var myData: ({displayName: "", userName: "", audioLevel: 0.0}) // valid dummy until set
property var myData: ({displayName: "", userName: "", audioLevel: 0.0, admin: true}) // valid dummy until set
property var ignored: ({}); // Keep a local list of ignored avatars & their data. Necessary because HashMap is slow to respond after ignoring.
property var userModelData: [] // This simple list is essentially a mirror of the userModel listModel without all the extra complexities.
property bool iAmAdmin: false
// Keep a local list of per-avatar gainSliderValueDBs. Far faster than fetching this data from the server.
// NOTE: if another script modifies the per-avatar gain, this value won't be accurate!
property var gainSliderValueDB: ({});
// The letterbox used for popup messages
LetterboxMessage {
id: letterboxMessage
z: 999 // Force the popup on top of everything else
}
function letterbox(headerGlyph, headerText, message) {
letterboxMessage.headerGlyph = headerGlyph
letterboxMessage.headerText = headerText
letterboxMessage.text = message
letterboxMessage.visible = true
letterboxMessage.popupRadius = 0
}
// This is the container for the PAL
Rectangle {
@ -51,7 +67,7 @@ Rectangle {
id: myInfo
// Size
width: palContainer.width
height: myCardHeight + 20
height: myCardHeight
// Style
color: pal.color
// Anchors
@ -65,6 +81,7 @@ Rectangle {
displayName: myData.displayName
userName: myData.userName
audioLevel: myData.audioLevel
isMyCard: true
// Size
width: nameCardWidth
height: parent.height
@ -172,8 +189,6 @@ Rectangle {
TableViewColumn {
visible: iAmAdmin
role: "kick"
// The hacky spaces used to center text over the button, since I don't know how to apply a margin
// to column header text.
title: "BAN"
width: actionButtonWidth
movable: false
@ -203,9 +218,12 @@ Rectangle {
id: nameCard
// Properties
displayName: styleData.value
userName: model && model.userName
audioLevel: model && model.audioLevel
userName: model ? model.userName : ""
audioLevel: model ? model.audioLevel : 0.0
visible: !isCheckBox && !isButton
uuid: model ? model.sessionId : ""
selected: styleData.selected
isAdmin: model && model.admin
// Size
width: nameCardWidth
height: parent.height
@ -223,15 +241,16 @@ Rectangle {
id: actionCheckBox
visible: isCheckBox
anchors.centerIn: parent
checked: model[styleData.role]
checked: model ? model[styleData.role] : false
// If this is a "Personal Mute" checkbox, disable the checkbox if the "Ignore" checkbox is checked.
enabled: !(styleData.role === "personalMute" && model["ignore"])
enabled: !(styleData.role === "personalMute" && (model ? model["ignore"] : true))
boxSize: 24
onClicked: {
var newValue = !model[styleData.role]
userModel.setProperty(model.userIndex, styleData.role, newValue)
userModelData[model.userIndex][styleData.role] = newValue // Defensive programming
Users[styleData.role](model.sessionId, newValue)
UserActivityLogger["palAction"](newValue ? styleData.role : "un-" + styleData.role, model.sessionId)
if (styleData.role === "ignore") {
userModel.setProperty(model.userIndex, "personalMute", newValue)
userModelData[model.userIndex]["personalMute"] = newValue // Defensive programming
@ -258,6 +277,7 @@ Rectangle {
height: 24
onClicked: {
Users[styleData.role](model.sessionId)
UserActivityLogger["palAction"](styleData.role, model.sessionId)
if (styleData.role === "kick") {
// Just for now, while we cannot undo "Ban":
userModel.remove(model.userIndex)
@ -331,11 +351,6 @@ Rectangle {
visible: iAmAdmin
color: hifi.colors.lightGrayText
}
function letterbox(message) {
letterboxMessage.text = message;
letterboxMessage.visible = true
}
// This Rectangle refers to the [?] popup button next to "NAMES"
Rectangle {
color: hifi.colors.tableBackgroundLight
@ -359,9 +374,11 @@ Rectangle {
anchors.fill: parent
acceptedButtons: Qt.LeftButton
hoverEnabled: true
onClicked: letterbox("Bold names in the list are Avatar Display Names.\n" +
"If a Display Name isn't set, a unique Session Display Name is assigned." +
"\n\nAdministrators of this domain can also see the Username or Machine ID associated with each avatar present.")
onClicked: letterbox(hifi.glyphs.question,
"Display Names",
"Bold names in the list are <b>avatar display names</b>.<br>" +
"If a display name isn't set, a unique <b>session display name</b> is assigned." +
"<br><br>Administrators of this domain can also see the <b>username</b> or <b>machine ID</b> associated with each avatar present.")
onEntered: helpText.color = hifi.colors.baseGrayHighlight
onExited: helpText.color = hifi.colors.darkGray
}
@ -390,15 +407,30 @@ Rectangle {
anchors.fill: parent
acceptedButtons: Qt.LeftButton
hoverEnabled: true
onClicked: letterbox('Silencing a user mutes their microphone. Silenced users can unmute themselves by clicking the "UNMUTE" button on their HUD.\n\n' +
"Banning a user will remove them from this domain and prevent them from returning. You can un-ban users from your domain's settings page.)")
onClicked: letterbox(hifi.glyphs.question,
"Admin Actions",
"<b>Silence</b> mutes a user's microphone. Silenced users can unmute themselves by clicking &quot;UNMUTE&quot; on their toolbar.<br><br>" +
"<b>Ban</b> removes a user from this domain and prevents them from returning. Admins can un-ban users from the Sandbox Domain Settings page.")
onEntered: adminHelpText.color = "#94132e"
onExited: adminHelpText.color = hifi.colors.redHighlight
}
}
LetterboxMessage {
id: letterboxMessage
}
// Timer used when selecting table rows that aren't yet present in the model
// (i.e. when selecting avatars using edit.js or sphere overlays)
Timer {
property bool selected // Selected or deselected?
property int userIndex // The userIndex of the avatar we want to select
id: selectionTimer
onTriggered: {
if (selected) {
table.selection.clear(); // for now, no multi-select
table.selection.select(userIndex);
table.positionViewAtRow(userIndex, ListView.Beginning);
} else {
table.selection.deselect(userIndex);
}
}
}
function findSessionIndex(sessionId, optionalData) { // no findIndex in .qml
@ -437,19 +469,30 @@ Rectangle {
case 'select':
var sessionIds = message.params[0];
var selected = message.params[1];
var alreadyRefreshed = message.params[2];
var userIndex = findSessionIndex(sessionIds[0]);
if (sessionIds.length > 1) {
letterbox('Only one user can be selected at a time.');
letterbox("", "", 'Only one user can be selected at a time.');
} else if (userIndex < 0) {
letterbox('The last editor is not among this list of users.');
} else {
if (selected) {
table.selection.clear(); // for now, no multi-select
table.selection.select(userIndex);
table.positionViewAtRow(userIndex, ListView.Visible);
// If we've already refreshed the PAL and the avatar still isn't present in the model...
if (alreadyRefreshed === true) {
letterbox('', '', 'The last editor of this object is either you or not among this list of users.');
} else {
table.selection.deselect(userIndex);
pal.sendToScript({method: 'refresh', params: message.params});
}
} else {
// If we've already refreshed the PAL and found the avatar in the model
if (alreadyRefreshed === true) {
// Wait a little bit before trying to actually select the avatar in the table
selectionTimer.interval = 250;
} else {
// If we've found the avatar in the model and didn't need to refresh,
// select the avatar in the table immediately
selectionTimer.interval = 0;
}
selectionTimer.selected = selected;
selectionTimer.userIndex = userIndex;
selectionTimer.start();
}
break;
// Received an "updateUsername()" request from the JS
@ -458,6 +501,7 @@ Rectangle {
var userId = message.params[0];
// The text that goes in the userName field is the second parameter in the message.
var userName = message.params[1];
var admin = message.params[2];
// If the userId is empty, we're updating "myData".
if (!userId) {
myData.userName = userName;
@ -469,8 +513,9 @@ Rectangle {
// Set the userName appropriately
userModel.setProperty(userIndex, "userName", userName);
userModelData[userIndex].userName = userName; // Defensive programming
} else {
console.log("updateUsername() called with unknown UUID: ", userId);
// Set the admin status appropriately
userModel.setProperty(userIndex, "admin", admin);
userModelData[userIndex].admin = admin; // Defensive programming
}
}
break;
@ -486,14 +531,17 @@ Rectangle {
if (userIndex != -1) {
userModel.setProperty(userIndex, "audioLevel", audioLevel);
userModelData[userIndex].audioLevel = audioLevel; // Defensive programming
} else {
console.log("updateUsername() called with unknown UUID: ", userId);
}
}
}
break;
case 'clearIgnored':
case 'clearLocalQMLData':
ignored = {};
gainSliderValueDB = {};
break;
case 'avatarDisconnected':
var sessionID = message.params[0];
delete ignored[sessionID];
break;
default:
console.log('Unrecognized message:', JSON.stringify(message));

View file

@ -312,10 +312,11 @@ Item {
readonly property string error: "="
readonly property string settings: "@"
readonly property string trash: "{"
readonly property string objectGroup: "&#xe000;"
readonly property string objectGroup: "\ue000"
readonly property string cm: "}"
readonly property string msvg79: "~"
readonly property string deg: "\\"
readonly property string px: "|"
readonly property string editPencil: "\ue00d"
}
}

View file

@ -252,7 +252,7 @@ public:
static const unsigned long MAX_HEARTBEAT_AGE_USECS = 30 * USECS_PER_SECOND;
static const int WARNING_ELAPSED_HEARTBEAT = 500 * USECS_PER_MSEC; // warn if elapsed heartbeat average is large
static const int HEARTBEAT_SAMPLES = 100000; // ~5 seconds worth of samples
// Set the heartbeat on launch
DeadlockWatchdogThread() {
setObjectName("Deadlock Watchdog");
@ -616,7 +616,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
_window->setWindowTitle("Interface");
Model::setAbstractViewStateInterface(this); // The model class will sometimes need to know view state details from us
auto nodeList = DependencyManager::get<NodeList>();
// Set up a watchdog thread to intentionally crash the application on deadlocks
@ -637,6 +637,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
qCDebug(interfaceapp) << "[VERSION] We will use DEVELOPMENT global services.";
#endif
// set the OCULUS_STORE property so the oculus plugin can know if we ran from the Oculus Store
static const QString OCULUS_STORE_ARG = "--oculus-store";
setProperty(hifi::properties::OCULUS_STORE, arguments().indexOf(OCULUS_STORE_ARG) != -1);
static const QString NO_UPDATER_ARG = "--no-updater";
static const bool noUpdater = arguments().indexOf(NO_UPDATER_ARG) != -1;
@ -697,7 +700,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
audioIO->setPositionGetter([]{
auto avatarManager = DependencyManager::get<AvatarManager>();
auto myAvatar = avatarManager ? avatarManager->getMyAvatar() : nullptr;
return myAvatar ? myAvatar->getPositionForAudio() : Vectors::ZERO;
});
audioIO->setOrientationGetter([]{
@ -880,7 +883,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
#ifdef Q_OS_MAC
auto cursorTarget = _window; // OSX doesn't seem to provide for hiding the cursor only on the GL widget
#else
// On windows and linux, hiding the top level cursor also means it's invisible when hovering over the
// On windows and linux, hiding the top level cursor also means it's invisible when hovering over the
// window menu, which is a pain, so only hide it for the GL surface
auto cursorTarget = _glWidget;
#endif
@ -1125,7 +1128,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
loadSettings();
// Now that we've loaded the menu and thus switched to the previous display plugin
// we can unlock the desktop repositioning code, since all the positions will be
// we can unlock the desktop repositioning code, since all the positions will be
// relative to the desktop size for this plugin
auto offscreenUi = DependencyManager::get<OffscreenUi>();
offscreenUi->getDesktop()->setProperty("repositionLocked", false);
@ -1235,8 +1238,11 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// Add periodic checks to send user activity data
static int CHECK_NEARBY_AVATARS_INTERVAL_MS = 10000;
static int SEND_STATS_INTERVAL_MS = 10000;
static int NEARBY_AVATAR_RADIUS_METERS = 10;
// setup the stats interval depending on if the 1s faster hearbeat was requested
static const QString FAST_STATS_ARG = "--fast-heartbeat";
static int SEND_STATS_INTERVAL_MS = arguments().indexOf(FAST_STATS_ARG) != -1 ? 1000 : 10000;
static glm::vec3 lastAvatarPosition = myAvatar->getPosition();
static glm::mat4 lastHMDHeadPose = getHMDSensorPose();
@ -1598,7 +1604,7 @@ void Application::checkChangeCursor() {
#ifdef Q_OS_MAC
auto cursorTarget = _window; // OSX doesn't seem to provide for hiding the cursor only on the GL widget
#else
// On windows and linux, hiding the top level cursor also means it's invisible when hovering over the
// On windows and linux, hiding the top level cursor also means it's invisible when hovering over the
// window menu, which is a pain, so only hide it for the GL surface
auto cursorTarget = _glWidget;
#endif
@ -1785,7 +1791,7 @@ Application::~Application() {
#endif
// The window takes ownership of the menu, so this has the side effect of destroying it.
_window->setMenuBar(nullptr);
_window->deleteLater();
// Can't log to file passed this point, FileLogger about to be deleted
@ -1811,10 +1817,10 @@ void Application::initializeGL() {
_glWidget->makeCurrent();
gpu::Context::init<gpu::gl::GLBackend>();
qApp->setProperty(hifi::properties::gl::MAKE_PROGRAM_CALLBACK,
qApp->setProperty(hifi::properties::gl::MAKE_PROGRAM_CALLBACK,
QVariant::fromValue((void*)(&gpu::gl::GLBackend::makeProgram)));
_gpuContext = std::make_shared<gpu::Context>();
// The gpu context can make child contexts for transfers, so
// The gpu context can make child contexts for transfers, so
// we need to restore primary rendering context
_glWidget->makeCurrent();
@ -1828,7 +1834,7 @@ void Application::initializeGL() {
assert(items.canCast<RenderFetchCullSortTask::Output>());
static const QString RENDER_FORWARD = "HIFI_RENDER_FORWARD";
if (QProcessEnvironment::systemEnvironment().contains(RENDER_FORWARD)) {
_renderEngine->addJob<RenderForwardTask>("RenderForwardTask", items.get<RenderFetchCullSortTask::Output>());
_renderEngine->addJob<RenderForwardTask>("Forward", items.get<RenderFetchCullSortTask::Output>());
} else {
_renderEngine->addJob<RenderDeferredTask>("RenderDeferredTask", items.get<RenderFetchCullSortTask::Output>());
}
@ -1931,6 +1937,8 @@ void Application::initializeUi() {
rootContext->setContextProperty("AvatarList", DependencyManager::get<AvatarManager>().data());
rootContext->setContextProperty("Users", DependencyManager::get<UsersScriptingInterface>().data());
rootContext->setContextProperty("UserActivityLogger", DependencyManager::get<UserActivityLoggerScriptingInterface>().data());
rootContext->setContextProperty("Camera", &_myCamera);
#if defined(Q_OS_MAC) || defined(Q_OS_WIN)
@ -2034,7 +2042,7 @@ void Application::paintGL() {
// FIXME not needed anymore?
_offscreenContext->makeCurrent();
// If a display plugin loses it's underlying support, it
// If a display plugin loses it's underlying support, it
// needs to be able to signal us to not use it
if (!displayPlugin->beginFrameRender(_frameCount)) {
_inPaint = false;
@ -2846,7 +2854,7 @@ void Application::keyPressEvent(QKeyEvent* event) {
if (isMirrorChecked) {
// if we got here without coming in from a non-Full Screen mirror case, then our
// _returnFromFullScreenMirrorTo is unknown. In that case we'll go to the old
// _returnFromFullScreenMirrorTo is unknown. In that case we'll go to the old
// behavior of returning to ThirdPerson
if (_returnFromFullScreenMirrorTo.isEmpty()) {
_returnFromFullScreenMirrorTo = MenuOption::ThirdPerson;
@ -3020,7 +3028,7 @@ void Application::mouseMoveEvent(QMouseEvent* event) {
maybeToggleMenuVisible(event);
auto& compositor = getApplicationCompositor();
// if this is a real mouse event, and we're in HMD mode, then we should use it to move the
// if this is a real mouse event, and we're in HMD mode, then we should use it to move the
// compositor reticle
// handleRealMouseMoveEvent() will return true, if we shouldn't process the event further
if (!compositor.fakeEventActive() && compositor.handleRealMouseMoveEvent()) {
@ -3392,6 +3400,8 @@ void Application::idle(float nsecsElapsed) {
PROFILE_COUNTER(app, "cpuSystem", { { "system", kernelUserAndSystem.z } });
#endif
auto displayPlugin = getActiveDisplayPlugin();
if (displayPlugin) {
PROFILE_COUNTER_IF_CHANGED(app, "present", float, displayPlugin->presentRate());
@ -3401,9 +3411,15 @@ void Application::idle(float nsecsElapsed) {
PROFILE_COUNTER_IF_CHANGED(app, "pendingDownloads", int, ResourceCache::getPendingRequestCount());
PROFILE_COUNTER_IF_CHANGED(app, "currentProcessing", int, DependencyManager::get<StatTracker>()->getStat("Processing").toInt());
PROFILE_COUNTER_IF_CHANGED(app, "pendingProcessing", int, DependencyManager::get<StatTracker>()->getStat("PendingProcessing").toInt());
auto renderConfig = _renderEngine->getConfiguration();
PROFILE_COUNTER_IF_CHANGED(render, "gpuTime", float, (float)_gpuContext->getFrameTimerGPUAverage());
PROFILE_COUNTER(render_detail, "gpuTimes", {
{ "OpaqueRangeTimer", renderConfig->getConfig("OpaqueRangeTimer")->property("gpuRunTime") },
{ "LinearDepth", renderConfig->getConfig("LinearDepth")->property("gpuRunTime") },
{ "SurfaceGeometry", renderConfig->getConfig("SurfaceGeometry")->property("gpuRunTime") },
{ "RenderDeferred", renderConfig->getConfig("RenderDeferred")->property("gpuRunTime") },
{ "ToneAndPostRangeTimer", renderConfig->getConfig("ToneAndPostRangeTimer")->property("gpuRunTime") }
});
PROFILE_RANGE(app, __FUNCTION__);
@ -4109,7 +4125,7 @@ void Application::setKeyboardFocusEntity(EntityItemID entityItemID) {
}
_lastAcceptedKeyPress = usecTimestampNow();
setKeyboardFocusHighlight(entity->getPosition(), entity->getRotation(),
setKeyboardFocusHighlight(entity->getPosition(), entity->getRotation(),
entity->getDimensions() * FOCUS_HIGHLIGHT_EXPANSION_FACTOR);
}
}
@ -4700,7 +4716,7 @@ void Application::queryOctree(NodeType_t serverType, PacketType packetType, Node
_octreeQuery.setMaxQueryPacketsPerSecond(0);
}
// if asked to forceResend, then set the query's position/orientation to be degenerate in a manner
// if asked to forceResend, then set the query's position/orientation to be degenerate in a manner
// that will cause our next query to be guarenteed to be different and the server will resend to us
if (forceResend) {
_octreeQuery.setCameraPosition(glm::vec3(-0.1, -0.1, -0.1));
@ -5295,15 +5311,17 @@ bool Application::nearbyEntitiesAreReadyForPhysics() {
if (_nearbyEntitiesStabilityCount >= MINIMUM_NEARBY_ENTITIES_STABILITY_COUNT) {
// We've seen the same number of nearby entities for several stats packets in a row. assume we've got all
// the local entities.
bool result = true;
foreach (EntityItemPointer entity, entities) {
if (entity->shouldBePhysical() && !entity->isReadyToComputeShape()) {
static QString repeatedMessage =
LogHandler::getInstance().addRepeatedMessageRegex("Physics disabled until entity loads: .*");
qCDebug(interfaceapp) << "Physics disabled until entity loads: " << entity->getID() << entity->getName();
return false;
// don't break here because we want all the relevant entities to start their downloads
result = false;
}
}
return true;
return result;
}
return false;
}
@ -5834,7 +5852,7 @@ void Application::addAssetToWorldWithNewMapping(QString filePath, QString mappin
mapping = mapping.insert(mapping.lastIndexOf("."), "-" + QString::number(copy));
addAssetToWorldWithNewMapping(filePath, mapping, copy);
} else {
QString errorInfo = "Too many copies of asset name: "
QString errorInfo = "Too many copies of asset name: "
+ mapping.left(mapping.length() - QString::number(copy).length() - 1);
qWarning(interfaceapp) << "Error downloading model: " + errorInfo;
addAssetToWorldError(filenameFromPath(filePath), errorInfo);
@ -5901,7 +5919,7 @@ void Application::addAssetToWorldAddEntity(QString filePath, QString mapping) {
// Note: Model dimensions are not available here; model is scaled per FBX mesh in RenderableModelEntityItem::update() later
// on. But FBX dimensions may be in cm, so we monitor for the dimension change and rescale again if warranted.
if (entityID == QUuid()) {
QString errorInfo = "Could not add model " + mapping + " to world.";
qWarning(interfaceapp) << "Could not add model to world: " + errorInfo;
@ -6365,7 +6383,7 @@ glm::uvec2 Application::getCanvasSize() const {
}
QRect Application::getRenderingGeometry() const {
auto geometry = _glWidget->geometry();
auto geometry = _glWidget->geometry();
auto topLeft = geometry.topLeft();
auto topLeftScreen = _glWidget->mapToGlobal(topLeft);
geometry.moveTopLeft(topLeftScreen);
@ -6728,8 +6746,8 @@ bool Application::makeRenderingContextCurrent() {
return _offscreenContext->makeCurrent();
}
bool Application::isForeground() const {
return _isForeground && !_window->isMinimized();
bool Application::isForeground() const {
return _isForeground && !_window->isMinimized();
}
void Application::sendMousePressOnEntity(QUuid id, PointerEvent event) {

View file

@ -62,7 +62,7 @@ const glm::vec3 HAND_TO_PALM_OFFSET(0.0f, 0.12f, 0.08f);
namespace render {
template <> const ItemKey payloadGetKey(const AvatarSharedPointer& avatar) {
return ItemKey::Builder::opaqueShape();
return ItemKey::Builder::opaqueShape().withTypeMeta();
}
template <> const Item::Bound payloadGetBound(const AvatarSharedPointer& avatar) {
return static_pointer_cast<Avatar>(avatar)->getBounds();
@ -74,6 +74,15 @@ namespace render {
avatarPtr->render(args, qApp->getCamera()->getPosition());
}
}
template <> uint32_t metaFetchMetaSubItems(const AvatarSharedPointer& avatar, ItemIDs& subItems) {
auto avatarPtr = static_pointer_cast<Avatar>(avatar);
if (avatarPtr->getSkeletonModel()) {
auto metaSubItems = avatarPtr->getSkeletonModel()->fetchRenderItemIDs();
subItems.insert(subItems.end(), metaSubItems.begin(), metaSubItems.end());
return (uint32_t) metaSubItems.size();
}
return 0;
}
}
static uint64_t timeProcessingJoints = 0;
@ -1035,10 +1044,14 @@ void Avatar::setModelURLFinished(bool success) {
// create new model, can return an instance of a SoftAttachmentModel rather then Model
static std::shared_ptr<Model> allocateAttachmentModel(bool isSoft, RigPointer rigOverride) {
static std::shared_ptr<Model> allocateAttachmentModel(bool isSoft, RigPointer rigOverride, bool isCauterized) {
if (isSoft) {
// cast to std::shared_ptr<Model>
return std::dynamic_pointer_cast<Model>(std::make_shared<SoftAttachmentModel>(std::make_shared<Rig>(), nullptr, rigOverride));
std::shared_ptr<SoftAttachmentModel> softModel = std::make_shared<SoftAttachmentModel>(std::make_shared<Rig>(), nullptr, rigOverride);
if (isCauterized) {
softModel->flagAsCauterized();
}
return std::dynamic_pointer_cast<Model>(softModel);
} else {
return std::make_shared<Model>(std::make_shared<Rig>());
}
@ -1064,12 +1077,12 @@ void Avatar::setAttachmentData(const QVector<AttachmentData>& attachmentData) {
for (int i = 0; i < attachmentData.size(); i++) {
if (i == (int)_attachmentModels.size()) {
// if number of attachments has been increased, we need to allocate a new model
_attachmentModels.push_back(allocateAttachmentModel(attachmentData[i].isSoft, _skeletonModel->getRig()));
_attachmentModels.push_back(allocateAttachmentModel(attachmentData[i].isSoft, _skeletonModel->getRig(), isMyAvatar()));
}
else if (i < oldAttachmentData.size() && oldAttachmentData[i].isSoft != attachmentData[i].isSoft) {
// if the attachment has changed type, we need to re-allocate a new one.
_attachmentsToRemove.push_back(_attachmentModels[i]);
_attachmentModels[i] = allocateAttachmentModel(attachmentData[i].isSoft, _skeletonModel->getRig());
_attachmentModels[i] = allocateAttachmentModel(attachmentData[i].isSoft, _skeletonModel->getRig(), isMyAvatar());
}
_attachmentModels[i]->setURL(attachmentData[i].modelURL);
}
@ -1354,4 +1367,4 @@ void Avatar::ensureInScene(AvatarSharedPointer self) {
if (!_inScene) {
addToScene(self);
}
}
}

View file

@ -32,6 +32,7 @@ namespace render {
template <> const ItemKey payloadGetKey(const AvatarSharedPointer& avatar);
template <> const Item::Bound payloadGetBound(const AvatarSharedPointer& avatar);
template <> void payloadRender(const AvatarSharedPointer& avatar, RenderArgs* args);
template <> uint32_t metaFetchMetaSubItems(const AvatarSharedPointer& avatar, ItemIDs& subItems);
}
static const float SCALING_RATIO = .05f;

View file

@ -265,6 +265,10 @@ void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar
}
if (removalReason == KillAvatarReason::TheirAvatarEnteredYourBubble || removalReason == YourAvatarEnteredTheirBubble) {
DependencyManager::get<NodeList>()->radiusIgnoreNodeBySessionID(avatar->getSessionUUID(), true);
} else if (removalReason == KillAvatarReason::AvatarDisconnected) {
// remove from node sets, if present
DependencyManager::get<NodeList>()->removeFromIgnoreMuteSets(avatar->getSessionUUID());
DependencyManager::get<UsersScriptingInterface>()->avatarDisconnected(avatar->getSessionUUID());
}
_avatarFades.push_back(removedAvatar);
}

View file

@ -0,0 +1,74 @@
//
// CauterizedMeshPartPayload.cpp
// interface/src/renderer
//
// Created by Andrew Meadows 2017.01.17
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "CauterizedMeshPartPayload.h"
#include <PerfStat.h>
#include "SkeletonModel.h"
using namespace render;
CauterizedMeshPartPayload::CauterizedMeshPartPayload(Model* model, int meshIndex, int partIndex, int shapeIndex, const Transform& transform, const Transform& offsetTransform)
: ModelMeshPartPayload(model, meshIndex, partIndex, shapeIndex, transform, offsetTransform) {}
void CauterizedMeshPartPayload::updateTransformForSkinnedCauterizedMesh(const Transform& transform,
const QVector<glm::mat4>& clusterMatrices,
const QVector<glm::mat4>& cauterizedClusterMatrices) {
_transform = transform;
_cauterizedTransform = transform;
if (clusterMatrices.size() > 0) {
_worldBound = AABox();
for (auto& clusterMatrix : clusterMatrices) {
AABox clusterBound = _localBound;
clusterBound.transform(clusterMatrix);
_worldBound += clusterBound;
}
_worldBound.transform(transform);
if (clusterMatrices.size() == 1) {
_transform = _transform.worldTransform(Transform(clusterMatrices[0]));
if (cauterizedClusterMatrices.size() != 0) {
_cauterizedTransform = _cauterizedTransform.worldTransform(Transform(cauterizedClusterMatrices[0]));
} else {
_cauterizedTransform = _transform;
}
}
} else {
_worldBound = _localBound;
_worldBound.transform(_drawTransform);
}
}
void CauterizedMeshPartPayload::bindTransform(gpu::Batch& batch, const render::ShapePipeline::LocationsPointer locations, RenderArgs::RenderMode renderMode) const {
// Still relying on the raw data from the model
const Model::MeshState& state = _model->getMeshState(_meshIndex);
SkeletonModel* skeleton = static_cast<SkeletonModel*>(_model);
bool useCauterizedMesh = (renderMode != RenderArgs::RenderMode::SHADOW_RENDER_MODE) && skeleton->getEnableCauterization();
if (state.clusterBuffer) {
if (useCauterizedMesh) {
const Model::MeshState& cState = skeleton->getCauterizeMeshState(_meshIndex);
batch.setUniformBuffer(ShapePipeline::Slot::BUFFER::SKINNING, cState.clusterBuffer);
} else {
batch.setUniformBuffer(ShapePipeline::Slot::BUFFER::SKINNING, state.clusterBuffer);
}
batch.setModelTransform(_transform);
} else {
if (useCauterizedMesh) {
batch.setModelTransform(_cauterizedTransform);
} else {
batch.setModelTransform(_transform);
}
}
}

View file

@ -0,0 +1,29 @@
//
// CauterizedModelMeshPartPayload.h
// interface/src/avatar
//
// Created by AndrewMeadows 2017.01.17
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_CauterizedMeshPartPayload_h
#define hifi_CauterizedMeshPartPayload_h
#include <MeshPartPayload.h>
class CauterizedMeshPartPayload : public ModelMeshPartPayload {
public:
CauterizedMeshPartPayload(Model* model, int meshIndex, int partIndex, int shapeIndex, const Transform& transform, const Transform& offsetTransform);
void updateTransformForSkinnedCauterizedMesh(const Transform& transform,
const QVector<glm::mat4>& clusterMatrices,
const QVector<glm::mat4>& cauterizedClusterMatrices);
void bindTransform(gpu::Batch& batch, const render::ShapePipeline::LocationsPointer locations, RenderArgs::RenderMode renderMode) const override;
private:
Transform _cauterizedTransform;
};
#endif // hifi_CauterizedMeshPartPayload_h

View file

@ -0,0 +1,254 @@
//
// CauterizedModel.cpp
// interface/src/avatar
//
// Created by Andrew Meadows 2017.01.17
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "CauterizedModel.h"
#include <AbstractViewStateInterface.h>
#include <MeshPartPayload.h>
#include <PerfStat.h>
#include "CauterizedMeshPartPayload.h"
CauterizedModel::CauterizedModel(RigPointer rig, QObject* parent) :
Model(rig, parent) {
}
CauterizedModel::~CauterizedModel() {
}
void CauterizedModel::deleteGeometry() {
Model::deleteGeometry();
_cauterizeMeshStates.clear();
}
bool CauterizedModel::updateGeometry() {
bool needsFullUpdate = Model::updateGeometry();
if (_isCauterized && needsFullUpdate) {
assert(_cauterizeMeshStates.empty());
const FBXGeometry& fbxGeometry = getFBXGeometry();
foreach (const FBXMesh& mesh, fbxGeometry.meshes) {
Model::MeshState state;
state.clusterMatrices.resize(mesh.clusters.size());
_cauterizeMeshStates.append(state);
}
}
return needsFullUpdate;
}
void CauterizedModel::createVisibleRenderItemSet() {
if (_isCauterized) {
assert(isLoaded());
const auto& meshes = _renderGeometry->getMeshes();
// all of our mesh vectors must match in size
if ((int)meshes.size() != _meshStates.size()) {
qCDebug(renderlogging) << "WARNING!!!! Mesh Sizes don't match! We will not segregate mesh groups yet.";
return;
}
// We should not have any existing renderItems if we enter this section of code
Q_ASSERT(_modelMeshRenderItemsSet.isEmpty());
_modelMeshRenderItemsSet.clear();
Transform transform;
transform.setTranslation(_translation);
transform.setRotation(_rotation);
Transform offset;
offset.setScale(_scale);
offset.postTranslate(_offset);
// Run through all of the meshes, and place them into their segregated, but unsorted buckets
int shapeID = 0;
uint32_t numMeshes = (uint32_t)meshes.size();
for (uint32_t i = 0; i < numMeshes; i++) {
const auto& mesh = meshes.at(i);
if (!mesh) {
continue;
}
// Create the render payloads
int numParts = (int)mesh->getNumParts();
for (int partIndex = 0; partIndex < numParts; partIndex++) {
auto ptr = std::make_shared<CauterizedMeshPartPayload>(this, i, partIndex, shapeID, transform, offset);
_modelMeshRenderItemsSet << std::static_pointer_cast<ModelMeshPartPayload>(ptr);
shapeID++;
}
}
} else {
Model::createVisibleRenderItemSet();
}
}
void CauterizedModel::createCollisionRenderItemSet() {
// Temporary HACK: use base class method for now
Model::createCollisionRenderItemSet();
}
// Called within Model::simulate call, below.
void CauterizedModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
Model::updateRig(deltaTime, parentTransform);
_needsUpdateClusterMatrices = true;
}
void CauterizedModel::updateClusterMatrices() {
PerformanceTimer perfTimer("CauterizedModel::updateClusterMatrices");
if (!_needsUpdateClusterMatrices || !isLoaded()) {
return;
}
_needsUpdateClusterMatrices = false;
const FBXGeometry& geometry = getFBXGeometry();
for (int i = 0; i < _meshStates.size(); i++) {
Model::MeshState& state = _meshStates[i];
const FBXMesh& mesh = geometry.meshes.at(i);
for (int j = 0; j < mesh.clusters.size(); j++) {
const FBXCluster& cluster = mesh.clusters.at(j);
auto jointMatrix = _rig->getJointTransform(cluster.jointIndex);
#if GLM_ARCH & GLM_ARCH_SSE2
glm::mat4 out, inverseBindMatrix = cluster.inverseBindMatrix;
glm_mat4_mul((glm_vec4*)&jointMatrix, (glm_vec4*)&inverseBindMatrix, (glm_vec4*)&out);
state.clusterMatrices[j] = out;
#else
state.clusterMatrices[j] = jointMatrix * cluster.inverseBindMatrix;
#endif
}
// Once computed the cluster matrices, update the buffer(s)
if (mesh.clusters.size() > 1) {
if (!state.clusterBuffer) {
state.clusterBuffer = std::make_shared<gpu::Buffer>(state.clusterMatrices.size() * sizeof(glm::mat4),
(const gpu::Byte*) state.clusterMatrices.constData());
} else {
state.clusterBuffer->setSubData(0, state.clusterMatrices.size() * sizeof(glm::mat4),
(const gpu::Byte*) state.clusterMatrices.constData());
}
}
}
// as an optimization, don't build cautrizedClusterMatrices if the boneSet is empty.
if (!_cauterizeBoneSet.empty()) {
static const glm::mat4 zeroScale(
glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
glm::vec4(0.0f, 0.0f, 0.0f, 1.0f));
auto cauterizeMatrix = _rig->getJointTransform(geometry.neckJointIndex) * zeroScale;
for (int i = 0; i < _cauterizeMeshStates.size(); i++) {
Model::MeshState& state = _cauterizeMeshStates[i];
const FBXMesh& mesh = geometry.meshes.at(i);
for (int j = 0; j < mesh.clusters.size(); j++) {
const FBXCluster& cluster = mesh.clusters.at(j);
auto jointMatrix = _rig->getJointTransform(cluster.jointIndex);
if (_cauterizeBoneSet.find(cluster.jointIndex) != _cauterizeBoneSet.end()) {
jointMatrix = cauterizeMatrix;
}
#if GLM_ARCH & GLM_ARCH_SSE2
glm::mat4 out, inverseBindMatrix = cluster.inverseBindMatrix;
glm_mat4_mul((glm_vec4*)&jointMatrix, (glm_vec4*)&inverseBindMatrix, (glm_vec4*)&out);
state.clusterMatrices[j] = out;
#else
state.clusterMatrices[j] = jointMatrix * cluster.inverseBindMatrix;
#endif
}
if (!_cauterizeBoneSet.empty() && (state.clusterMatrices.size() > 1)) {
if (!state.clusterBuffer) {
state.clusterBuffer =
std::make_shared<gpu::Buffer>(state.clusterMatrices.size() * sizeof(glm::mat4),
(const gpu::Byte*) state.clusterMatrices.constData());
} else {
state.clusterBuffer->setSubData(0, state.clusterMatrices.size() * sizeof(glm::mat4),
(const gpu::Byte*) state.clusterMatrices.constData());
}
}
}
}
// post the blender if we're not currently waiting for one to finish
if (geometry.hasBlendedMeshes() && _blendshapeCoefficients != _blendedBlendshapeCoefficients) {
_blendedBlendshapeCoefficients = _blendshapeCoefficients;
DependencyManager::get<ModelBlender>()->noteRequiresBlend(getThisPointer());
}
}
void CauterizedModel::updateRenderItems() {
if (_isCauterized) {
if (!_addedToScene) {
return;
}
glm::vec3 scale = getScale();
if (_collisionGeometry) {
// _collisionGeometry is already scaled
scale = glm::vec3(1.0f);
}
_needsUpdateClusterMatrices = true;
_renderItemsNeedUpdate = false;
// queue up this work for later processing, at the end of update and just before rendering.
// the application will ensure only the last lambda is actually invoked.
void* key = (void*)this;
std::weak_ptr<Model> weakSelf = shared_from_this();
AbstractViewStateInterface::instance()->pushPostUpdateLambda(key, [weakSelf, scale]() {
// do nothing, if the model has already been destroyed.
auto self = weakSelf.lock();
if (!self) {
return;
}
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();
Transform modelTransform;
modelTransform.setTranslation(self->getTranslation());
modelTransform.setRotation(self->getRotation());
Transform scaledModelTransform(modelTransform);
scaledModelTransform.setScale(scale);
uint32_t deleteGeometryCounter = self->getGeometryCounter();
render::PendingChanges pendingChanges;
QList<render::ItemID> keys = self->getRenderItems().keys();
foreach (auto itemID, keys) {
pendingChanges.updateItem<CauterizedMeshPartPayload>(itemID, [modelTransform, deleteGeometryCounter](CauterizedMeshPartPayload& data) {
if (data._model && data._model->isLoaded()) {
// Ensure the model geometry was not reset between frames
if (deleteGeometryCounter == data._model->getGeometryCounter()) {
// lazy update of cluster matrices used for rendering. We need to update them here, so we can correctly update the bounding box.
data._model->updateClusterMatrices();
// update the model transform and bounding box for this render item.
const Model::MeshState& state = data._model->getMeshState(data._meshIndex);
CauterizedModel* cModel = static_cast<CauterizedModel*>(data._model);
assert(data._meshIndex < cModel->_cauterizeMeshStates.size());
const Model::MeshState& cState = cModel->_cauterizeMeshStates.at(data._meshIndex);
data.updateTransformForSkinnedCauterizedMesh(modelTransform, state.clusterMatrices, cState.clusterMatrices);
}
}
});
}
scene->enqueuePendingChanges(pendingChanges);
});
} else {
Model::updateRenderItems();
}
}
const Model::MeshState& CauterizedModel::getCauterizeMeshState(int index) const {
assert(index < _meshStates.size());
return _cauterizeMeshStates.at(index);
}

View file

@ -0,0 +1,53 @@
//
// CauterizeableModel.h
// interface/src/avatar
//
// Created by Andrew Meadows 2016.01.17
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_CauterizedModel_h
#define hifi_CauterizedModel_h
#include <Model.h>
class CauterizedModel : public Model {
Q_OBJECT
public:
CauterizedModel(RigPointer rig, QObject* parent);
virtual ~CauterizedModel();
void flagAsCauterized() { _isCauterized = true; }
bool getIsCauterized() const { return _isCauterized; }
void setEnableCauterization(bool flag) { _enableCauterization = flag; }
bool getEnableCauterization() const { return _enableCauterization; }
const std::unordered_set<int>& getCauterizeBoneSet() const { return _cauterizeBoneSet; }
void setCauterizeBoneSet(const std::unordered_set<int>& boneSet) { _cauterizeBoneSet = boneSet; }
void deleteGeometry() override;
bool updateGeometry() override;
void createVisibleRenderItemSet() override;
void createCollisionRenderItemSet() override;
virtual void updateRig(float deltaTime, glm::mat4 parentTransform) override;
virtual void updateClusterMatrices() override;
void updateRenderItems() override;
const Model::MeshState& getCauterizeMeshState(int index) const;
protected:
std::unordered_set<int> _cauterizeBoneSet;
QVector<Model::MeshState> _cauterizeMeshStates;
bool _isCauterized { false };
bool _enableCauterization { false };
};
#endif // hifi_CauterizedModel_h

View file

@ -116,12 +116,12 @@ MyAvatar::MyAvatar(RigPointer rig) :
_hmdAtRestDetector(glm::vec3(0), glm::quat())
{
using namespace recording;
_skeletonModel->flagAsCauterized();
for (int i = 0; i < MAX_DRIVE_KEYS; i++) {
_driveKeys[i] = 0.0f;
}
// Necessary to select the correct slot
using SlotType = void(MyAvatar::*)(const glm::vec3&, bool, const glm::quat&, bool);
@ -1593,7 +1593,7 @@ void MyAvatar::preDisplaySide(RenderArgs* renderArgs) {
// toggle using the cauterizedBones depending on where the camera is and the rendering pass type.
const bool shouldDrawHead = shouldRenderHead(renderArgs);
if (shouldDrawHead != _prevShouldDrawHead) {
_skeletonModel->setCauterizeBones(!shouldDrawHead);
_skeletonModel->setEnableCauterization(!shouldDrawHead);
}
_prevShouldDrawHead = shouldDrawHead;
}

View file

@ -24,7 +24,7 @@
#include "AnimDebugDraw.h"
SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent, RigPointer rig) :
Model(rig, parent),
CauterizedModel(rig, parent),
_owningAvatar(owningAvatar),
_boundingCapsuleLocalOffset(0.0f),
_boundingCapsuleRadius(0.0f),
@ -166,7 +166,7 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
_rig->computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState);
// evaluate AnimGraph animation and update jointStates.
Model::updateRig(deltaTime, parentTransform);
CauterizedModel::updateRig(deltaTime, parentTransform);
Rig::EyeParameters eyeParams;
eyeParams.worldHeadOrientation = headParams.worldHeadOrientation;
@ -178,10 +178,8 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;
_rig->updateFromEyeParameters(eyeParams);
} else {
Model::updateRig(deltaTime, parentTransform);
CauterizedModel::updateRig(deltaTime, parentTransform);
// This is a little more work than we really want.
//

View file

@ -12,8 +12,7 @@
#ifndef hifi_SkeletonModel_h
#define hifi_SkeletonModel_h
#include <Model.h>
#include "CauterizedModel.h"
class Avatar;
class MuscleConstraint;
@ -23,7 +22,7 @@ using SkeletonModelPointer = std::shared_ptr<SkeletonModel>;
using SkeletonModelWeakPointer = std::weak_ptr<SkeletonModel>;
/// A skeleton loaded from a model.
class SkeletonModel : public Model {
class SkeletonModel : public CauterizedModel {
Q_OBJECT
public:
@ -31,10 +30,10 @@ public:
SkeletonModel(Avatar* owningAvatar, QObject* parent = nullptr, RigPointer rig = nullptr);
~SkeletonModel();
virtual void initJointStates() override;
void initJointStates() override;
virtual void simulate(float deltaTime, bool fullUpdate = true) override;
virtual void updateRig(float deltaTime, glm::mat4 parentTransform) override;
void simulate(float deltaTime, bool fullUpdate = true) override;
void updateRig(float deltaTime, glm::mat4 parentTransform) override;
void updateAttitude();
/// Returns the index of the left hand joint, or -1 if not found.
@ -105,7 +104,7 @@ public:
float getHeadClipDistance() const { return _headClipDistance; }
virtual void onInvalidate() override;
void onInvalidate() override;
signals:

View file

@ -13,7 +13,7 @@
#include "InterfaceLogging.h"
SoftAttachmentModel::SoftAttachmentModel(RigPointer rig, QObject* parent, RigPointer rigOverride) :
Model(rig, parent),
CauterizedModel(rig, parent),
_rigOverride(rigOverride) {
assert(_rig);
assert(_rigOverride);

View file

@ -12,7 +12,7 @@
#ifndef hifi_SoftAttachmentModel_h
#define hifi_SoftAttachmentModel_h
#include <Model.h>
#include "CauterizedModel.h"
// A model that allows the creator to specify a secondary rig instance.
// When the cluster matrices are created for rendering, the
@ -22,16 +22,15 @@
// This is used by Avatar instances to wear clothing that follows the same
// animated pose as the SkeletonModel.
class SoftAttachmentModel : public Model {
class SoftAttachmentModel : public CauterizedModel {
Q_OBJECT
public:
SoftAttachmentModel(RigPointer rig, QObject* parent, RigPointer rigOverride);
~SoftAttachmentModel();
virtual void updateRig(float deltaTime, glm::mat4 parentTransform) override;
virtual void updateClusterMatrices() override;
void updateRig(float deltaTime, glm::mat4 parentTransform) override;
void updateClusterMatrices() override;
protected:
int getJointIndexOverride(int i) const;

View file

@ -56,7 +56,7 @@ int main(int argc, const char* argv[]) {
QCoreApplication::setOrganizationDomain(BuildInfo::ORGANIZATION_DOMAIN);
QCoreApplication::setApplicationVersion(BuildInfo::VERSION);
QString applicationName = "High Fidelity Interface - " + qgetenv("USERNAME");
const QString& applicationName = getInterfaceSharedMemoryName();
bool instanceMightBeRunning = true;

View file

@ -45,7 +45,7 @@ public:
void renderSilent(int16_t* input, float* output, int index, float azimuth, float distance, float gain, int numFrames);
//
// HRTF local gain adjustment
// HRTF local gain adjustment in amplitude (1.0 == unity)
//
void setGainAdjustment(float gain) { _gainAdjust = HRTF_GAIN * gain; };

View file

@ -1395,7 +1395,7 @@ bool AvatarData::processAvatarIdentity(const Identity& identity) {
}
if (identity.displayName != _displayName) {
setDisplayName(identity.displayName);
_displayName = identity.displayName;
hasIdentityChanged = true;
}
maybeUpdateSessionDisplayNameFromTransport(identity.sessionDisplayName);
@ -1442,6 +1442,9 @@ void AvatarData::setSkeletonModelURL(const QUrl& skeletonModelURL) {
void AvatarData::setDisplayName(const QString& displayName) {
_displayName = displayName;
_sessionDisplayName = "";
sendIdentityPacket();
qCDebug(avatars) << "Changing display name for avatar to" << displayName;
}

View file

@ -482,6 +482,7 @@ void OpenGLDisplayPlugin::submitFrame(const gpu::FramePointer& newFrame) {
}
void OpenGLDisplayPlugin::updateFrameData() {
PROFILE_RANGE(render, __FUNCTION__)
if (_lockCurrentTexture) {
return;
}
@ -596,12 +597,16 @@ void OpenGLDisplayPlugin::internalPresent() {
}
void OpenGLDisplayPlugin::present() {
PROFILE_RANGE_EX(render, __FUNCTION__, 0xffffff00, (uint64_t)presentCount())
updateFrameData();
auto frameId = (uint64_t)presentCount();
PROFILE_RANGE_EX(render, __FUNCTION__, 0xffffff00, frameId)
{
PROFILE_RANGE_EX(render, "updateFrameData", 0xff00ff00, frameId)
updateFrameData();
}
incrementPresentCount();
{
PROFILE_RANGE_EX(render, "recycle", 0xff00ff00, (uint64_t)presentCount())
PROFILE_RANGE_EX(render, "recycle", 0xff00ff00, frameId)
_gpuContext->recycle();
}
@ -615,19 +620,19 @@ void OpenGLDisplayPlugin::present() {
_lastFrame = _currentFrame.get();
});
// Execute the frame rendering commands
PROFILE_RANGE_EX(render, "execute", 0xff00ff00, (uint64_t)presentCount())
PROFILE_RANGE_EX(render, "execute", 0xff00ff00, frameId)
_gpuContext->executeFrame(_currentFrame);
}
// Write all layers to a local framebuffer
{
PROFILE_RANGE_EX(render, "composite", 0xff00ffff, (uint64_t)presentCount())
PROFILE_RANGE_EX(render, "composite", 0xff00ffff, frameId)
compositeLayers();
}
// Take the composite framebuffer and send it to the output device
{
PROFILE_RANGE_EX(render, "internalPresent", 0xff00ffff, (uint64_t)presentCount())
PROFILE_RANGE_EX(render, "internalPresent", 0xff00ffff, frameId)
internalPresent();
}

View file

@ -15,21 +15,21 @@
namespace render {
template <> const ItemKey payloadGetKey(const RenderableEntityItemProxy::Pointer& payload) {
if (payload && payload->entity) {
if (payload->entity->getType() == EntityTypes::Light) {
return ItemKey::Builder::light();
if (payload && payload->_entity) {
if (payload->_entity->getType() == EntityTypes::Light) {
return ItemKey::Builder::light().withTypeMeta();
}
if (payload && payload->entity->isTransparent()) {
return ItemKey::Builder::transparentShape();
if (payload && payload->_entity->isTransparent()) {
return ItemKey::Builder::transparentShape().withTypeMeta();
}
}
return ItemKey::Builder::opaqueShape();
return ItemKey::Builder::opaqueShape().withTypeMeta();
}
template <> const Item::Bound payloadGetBound(const RenderableEntityItemProxy::Pointer& payload) {
if (payload && payload->entity) {
if (payload && payload->_entity) {
bool success;
auto result = payload->entity->getAABox(success);
auto result = payload->_entity->getAABox(success);
if (!success) {
return render::Item::Bound();
}
@ -39,11 +39,19 @@ namespace render {
}
template <> void payloadRender(const RenderableEntityItemProxy::Pointer& payload, RenderArgs* args) {
if (args) {
if (payload && payload->entity && payload->entity->getVisible()) {
payload->entity->render(args);
if (payload && payload->_entity && payload->_entity->getVisible()) {
payload->_entity->render(args);
}
}
}
template <> uint32_t metaFetchMetaSubItems(const RenderableEntityItemProxy::Pointer& payload, ItemIDs& subItems) {
auto metaID = payload->_metaID;
if (Item::isValidID(metaID)) {
subItems.emplace_back(metaID);
return 1;
}
return 0;
}
}
void makeEntityItemStatusGetters(EntityItemPointer entity, render::Item::Status::Getters& statusGetters) {

View file

@ -36,17 +36,19 @@ void makeEntityItemStatusGetters(EntityItemPointer entity, render::Item::Status:
class RenderableEntityItemProxy {
public:
RenderableEntityItemProxy(EntityItemPointer entity) : entity(entity) { }
RenderableEntityItemProxy(EntityItemPointer entity, render::ItemID metaID) : _entity(entity), _metaID(metaID) { }
typedef render::Payload<RenderableEntityItemProxy> Payload;
typedef Payload::DataPointer Pointer;
EntityItemPointer entity;
EntityItemPointer _entity;
render::ItemID _metaID;
};
namespace render {
template <> const ItemKey payloadGetKey(const RenderableEntityItemProxy::Pointer& payload);
template <> const Item::Bound payloadGetBound(const RenderableEntityItemProxy::Pointer& payload);
template <> void payloadRender(const RenderableEntityItemProxy::Pointer& payload, RenderArgs* args);
template <> uint32_t metaFetchMetaSubItems(const RenderableEntityItemProxy::Pointer& payload, ItemIDs& subItems);
}
// Mixin class for implementing basic single item rendering
@ -55,7 +57,7 @@ public:
bool addToScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
_myItem = scene->allocateID();
auto renderData = std::make_shared<RenderableEntityItemProxy>(self);
auto renderData = std::make_shared<RenderableEntityItemProxy>(self, _myItem);
auto renderPayload = std::make_shared<RenderableEntityItemProxy::Payload>(renderData);
render::Item::Status::Getters statusGetters;

View file

@ -194,7 +194,7 @@ public:
namespace render {
template <> const ItemKey payloadGetKey(const RenderableModelEntityItemMeta::Pointer& payload) {
return ItemKey::Builder::opaqueShape();
return ItemKey::Builder::opaqueShape().withTypeMeta();
}
template <> const Item::Bound payloadGetBound(const RenderableModelEntityItemMeta::Pointer& payload) {
@ -216,6 +216,15 @@ namespace render {
}
}
}
template <> uint32_t metaFetchMetaSubItems(const RenderableModelEntityItemMeta::Pointer& payload, ItemIDs& subItems) {
auto modelEntity = std::static_pointer_cast<RenderableModelEntityItem>(payload->entity);
if (modelEntity->hasModel()) {
auto metaSubItems = modelEntity->getModelNotSafe()->fetchRenderItemIDs();
subItems.insert(subItems.end(), metaSubItems.begin(), metaSubItems.end());
return (uint32_t) metaSubItems.size();
}
return 0;
}
}
bool RenderableModelEntityItem::addToScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene,
@ -473,6 +482,10 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
}
}
ModelPointer RenderableModelEntityItem::getModelNotSafe() {
return _model;
}
ModelPointer RenderableModelEntityItem::getModel(QSharedPointer<EntityTreeRenderer> renderer) {
if (!renderer) {
return nullptr;
@ -646,6 +659,12 @@ bool RenderableModelEntityItem::isReadyToComputeShape() {
// the model is still being downloaded.
return false;
} else if (type >= SHAPE_TYPE_SIMPLE_HULL && type <= SHAPE_TYPE_STATIC_MESH) {
if (!_model) {
EntityTreePointer tree = getTree();
if (tree) {
QMetaObject::invokeMethod(tree.get(), "callLoader", Qt::QueuedConnection, Q_ARG(EntityItemID, getID()));
}
}
return (_model && _model->isLoaded());
}
return true;

View file

@ -52,6 +52,7 @@ public:
BoxFace& face, glm::vec3& surfaceNormal,
void** intersectedObject, bool precisionPicking) const override;
ModelPointer getModel(QSharedPointer<EntityTreeRenderer> renderer);
ModelPointer getModelNotSafe();
virtual bool needsToCallUpdate() const override;
virtual void update(const quint64& now) override;

View file

@ -246,14 +246,14 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
}
void RenderableWebEntityItem::setSourceUrl(const QString& value) {
if (_sourceUrl != value) {
qCDebug(entities) << "Setting web entity source URL to " << value;
_sourceUrl = value;
if (_webSurface) {
AbstractViewStateInterface::instance()->postLambdaEvent([this] {
_webSurface->getRootItem()->setProperty("url", _sourceUrl);
});
}
auto valueBeforeSuperclassSet = _sourceUrl;
WebEntityItem::setSourceUrl(value);
if (_sourceUrl != valueBeforeSuperclassSet && _webSurface) {
AbstractViewStateInterface::instance()->postLambdaEvent([this] {
_webSurface->getRootItem()->setProperty("url", _sourceUrl);
});
}
}

View file

@ -125,7 +125,14 @@ bool WebEntityItem::findDetailedRayIntersection(const glm::vec3& origin, const g
void WebEntityItem::setSourceUrl(const QString& value) {
if (_sourceUrl != value) {
_sourceUrl = value;
auto newURL = QUrl::fromUserInput(value);
if (newURL.isValid()) {
_sourceUrl = newURL.toDisplayString();
qCDebug(entities) << "Changed web entity source URL to " << _sourceUrl;
} else {
qCDebug(entities) << "Clearing web entity source URL since" << value << "cannot be parsed to a valid URL.";
}
}
}

View file

@ -972,7 +972,7 @@ FBXGeometry* FBXReader::extractFBXGeometry(const QVariantHash& mapping, const QS
static const QVariant EMISSIVE = QByteArray("Emissive");
static const QVariant AMBIENT_FACTOR = QByteArray("AmbientFactor");
static const QVariant SHININESS = QByteArray("Shininess");
static const QVariant OPACITY = QByteArray("Shininess");
static const QVariant OPACITY = QByteArray("Opacity");
static const QVariant MAYA_USE_NORMAL_MAP = QByteArray("Maya|use_normal_map");
static const QVariant MAYA_BASE_COLOR = QByteArray("Maya|base_color");
static const QVariant MAYA_USE_COLOR_MAP = QByteArray("Maya|use_color_map");

View file

@ -26,6 +26,7 @@
#include "AccountManager.h"
#include "AddressManager.h"
#include "Assignment.h"
#include "AudioHelpers.h"
#include "HifiSockAddr.h"
#include "FingerprintUtils.h"
@ -846,6 +847,16 @@ void NodeList::ignoreNodeBySessionID(const QUuid& nodeID, bool ignoreEnabled) {
}
}
void NodeList::removeFromIgnoreMuteSets(const QUuid& nodeID) {
// don't remove yourself, or nobody
if (!nodeID.isNull() && _sessionUUID != nodeID) {
QWriteLocker ignoredSetLocker{ &_ignoredSetLock };
QWriteLocker personalMutedSetLocker{ &_personalMutedSetLock };
_ignoredNodeIDs.unsafe_erase(nodeID);
_personalMutedNodeIDs.unsafe_erase(nodeID);
}
}
bool NodeList::isIgnoringNode(const QUuid& nodeID) const {
QReadLocker ignoredSetLocker{ &_ignoredSetLock };
return _ignoredNodeIDs.find(nodeID) != _ignoredNodeIDs.cend();
@ -951,6 +962,30 @@ void NodeList::maybeSendIgnoreSetToNode(SharedNodePointer newNode) {
}
}
void NodeList::setAvatarGain(const QUuid& nodeID, float gain) {
// cannot set gain of yourself or nobody
if (!nodeID.isNull() && _sessionUUID != nodeID) {
auto audioMixer = soloNodeOfType(NodeType::AudioMixer);
if (audioMixer) {
// setup the packet
auto setAvatarGainPacket = NLPacket::create(PacketType::PerAvatarGainSet, NUM_BYTES_RFC4122_UUID + sizeof(float), true);
// write the node ID to the packet
setAvatarGainPacket->write(nodeID.toRfc4122());
// We need to convert the gain in dB (from the script) to an amplitude before packing it.
setAvatarGainPacket->writePrimitive(packFloatGainToByte(fastExp2f(gain / 6.0206f)));
qCDebug(networking) << "Sending Set Avatar Gain packet UUID: " << uuidStringWithoutCurlyBraces(nodeID) << "Gain:" << gain;
sendPacket(std::move(setAvatarGainPacket), *audioMixer);
} else {
qWarning() << "Couldn't find audio mixer to send set gain request";
}
} else {
qWarning() << "NodeList::setAvatarGain called with an invalid ID or an ID which matches the current session ID:" << nodeID;
}
}
void NodeList::kickNodeBySessionID(const QUuid& nodeID) {
// send a request to domain-server to kick the node with the given session ID
// the domain-server will handle the persistence of the kick (via username or IP)
@ -1005,25 +1040,20 @@ void NodeList::muteNodeBySessionID(const QUuid& nodeID) {
}
void NodeList::requestUsernameFromSessionID(const QUuid& nodeID) {
// send a request to domain-server to get the username associated with the given session ID
if (getThisNodeCanKick() || nodeID.isNull()) {
// setup the packet
auto usernameFromIDRequestPacket = NLPacket::create(PacketType::UsernameFromIDRequest, NUM_BYTES_RFC4122_UUID, true);
// send a request to domain-server to get the username/machine fingerprint/admin status associated with the given session ID
// If the requesting user isn't an admin, the username and machine fingerprint will return "".
auto usernameFromIDRequestPacket = NLPacket::create(PacketType::UsernameFromIDRequest, NUM_BYTES_RFC4122_UUID, true);
// write the node ID to the packet
if (nodeID.isNull()) {
usernameFromIDRequestPacket->write(getSessionUUID().toRfc4122());
} else {
usernameFromIDRequestPacket->write(nodeID.toRfc4122());
}
qCDebug(networking) << "Sending packet to get username of node" << uuidStringWithoutCurlyBraces(nodeID);
sendPacket(std::move(usernameFromIDRequestPacket), _domainHandler.getSockAddr());
// write the node ID to the packet
if (nodeID.isNull()) {
usernameFromIDRequestPacket->write(getSessionUUID().toRfc4122());
} else {
qWarning() << "You do not have permissions to kick in this domain."
<< "Request to get the username of node" << uuidStringWithoutCurlyBraces(nodeID) << "will not be sent";
usernameFromIDRequestPacket->write(nodeID.toRfc4122());
}
qCDebug(networking) << "Sending packet to get username/fingerprint/admin status of node" << uuidStringWithoutCurlyBraces(nodeID);
sendPacket(std::move(usernameFromIDRequestPacket), _domainHandler.getSockAddr());
}
void NodeList::processUsernameFromIDReply(QSharedPointer<ReceivedMessage> message) {
@ -1033,10 +1063,13 @@ void NodeList::processUsernameFromIDReply(QSharedPointer<ReceivedMessage> messag
QString username = message->readString();
// read the machine fingerprint from the packet
QString machineFingerprintString = (QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID))).toString();
bool isAdmin;
message->readPrimitive(&isAdmin);
qCDebug(networking) << "Got username" << username << "and machine fingerprint" << machineFingerprintString << "for node" << nodeUUIDString;
qCDebug(networking) << "Got username" << username << "and machine fingerprint"
<< machineFingerprintString << "for node" << nodeUUIDString << ". isAdmin:" << isAdmin;
emit usernameFromIDReply(nodeUUIDString, username, machineFingerprintString);
emit usernameFromIDReply(nodeUUIDString, username, machineFingerprintString, isAdmin);
}
void NodeList::setRequestsDomainListData(bool isRequesting) {

View file

@ -82,6 +82,7 @@ public:
bool isIgnoringNode(const QUuid& nodeID) const;
void personalMuteNodeBySessionID(const QUuid& nodeID, bool muteEnabled);
bool isPersonalMutingNode(const QUuid& nodeID) const;
void setAvatarGain(const QUuid& nodeID, float gain);
void kickNodeBySessionID(const QUuid& nodeID);
void muteNodeBySessionID(const QUuid& nodeID);
@ -89,6 +90,8 @@ public:
bool getRequestsDomainListData() { return _requestsDomainListData; }
void setRequestsDomainListData(bool isRequesting);
void removeFromIgnoreMuteSets(const QUuid& nodeID);
public slots:
void reset();
void sendDomainServerCheckIn();
@ -117,7 +120,7 @@ signals:
void receivedDomainServerList();
void ignoredNode(const QUuid& nodeID, bool enabled);
void ignoreRadiusEnabledChanged(bool isIgnored);
void usernameFromIDReply(const QString& nodeID, const QString& username, const QString& machineFingerprint);
void usernameFromIDReply(const QString& nodeID, const QString& username, const QString& machineFingerprint, bool isAdmin);
private slots:
void stopKeepalivePingTimer();

View file

@ -38,6 +38,21 @@ void UserActivityLoggerScriptingInterface::tutorialProgress( QString stepName, i
}
void UserActivityLoggerScriptingInterface::palAction(QString action, QString target) {
QJsonObject payload;
payload["action"] = action;
if (target.length() > 0) {
payload["target"] = target;
}
logAction("pal_activity", payload);
}
void UserActivityLoggerScriptingInterface::palOpened(float secondsOpened) {
logAction("pal_opened", {
{ "seconds_opened", secondsOpened }
});
}
void UserActivityLoggerScriptingInterface::logAction(QString action, QJsonObject details) {
QMetaObject::invokeMethod(&UserActivityLogger::getInstance(), "logAction",
Q_ARG(QString, action),

View file

@ -25,7 +25,8 @@ public:
Q_INVOKABLE void toggledAway(bool isAway);
Q_INVOKABLE void tutorialProgress(QString stepName, int stepNumber, float secondsToComplete,
float tutorialElapsedTime, QString tutorialRunID = "", int tutorialVersion = 0, QString controllerType = "");
Q_INVOKABLE void palAction(QString action, QString target);
Q_INVOKABLE void palOpened(float secondsOpen);
private:
void logAction(QString action, QJsonObject details = {});
};

View file

@ -106,7 +106,8 @@ public:
ViewFrustum,
RequestsDomainListData,
ExitingSpaceBubble,
LAST_PACKET_TYPE = ExitingSpaceBubble
PerAvatarGainSet,
LAST_PACKET_TYPE = PerAvatarGainSet
};
};
@ -211,6 +212,7 @@ enum class AvatarMixerPacketVersion : PacketVersion {
HasKillAvatarReason,
SessionDisplayName,
Unignore,
ImmediateSessionDisplayNameUpdates,
VariableAvatarData
};

View file

@ -251,7 +251,7 @@ void MeshPartPayload::bindMaterial(gpu::Batch& batch, const ShapePipeline::Locat
}
}
void MeshPartPayload::bindTransform(gpu::Batch& batch, const ShapePipeline::LocationsPointer locations, bool canCauterize) const {
void MeshPartPayload::bindTransform(gpu::Batch& batch, const ShapePipeline::LocationsPointer locations, RenderArgs::RenderMode renderMode) const {
batch.setModelTransform(_drawTransform);
}
@ -265,7 +265,7 @@ void MeshPartPayload::render(RenderArgs* args) const {
assert(locations);
// Bind the model transform and the skinCLusterMatrices if needed
bindTransform(batch, locations);
bindTransform(batch, locations, args->_renderMode);
//Bind the index buffer and vertex buffer and Blend shapes if needed
bindMesh(batch);
@ -369,11 +369,13 @@ void ModelMeshPartPayload::updateTransformForSkinnedMesh(const Transform& transf
clusterBound.transform(clusterMatrix);
_worldBound += clusterBound;
}
_worldBound.transform(transform);
_worldBound.transform(_transform);
if (clusterMatrices.size() == 1) {
_transform = _transform.worldTransform(Transform(clusterMatrices[0]));
}
} else {
_worldBound = _localBound;
_worldBound.transform(_transform);
}
}
@ -400,7 +402,7 @@ ItemKey ModelMeshPartPayload::getKey() const {
}
}
if (!_hasFinishedFade) {
if (_fadeState != FADE_COMPLETE) {
builder.withTransparent();
}
@ -470,7 +472,7 @@ ShapeKey ModelMeshPartPayload::getShapeKey() const {
}
ShapeKey::Builder builder;
if (isTranslucent || !_hasFinishedFade) {
if (isTranslucent || _fadeState != FADE_COMPLETE) {
builder.withTranslucent();
}
if (hasTangents) {
@ -511,38 +513,39 @@ void ModelMeshPartPayload::bindMesh(gpu::Batch& batch) const {
batch.setInputStream(2, _drawMesh->getVertexStream().makeRangedStream(2));
}
float fadeRatio = _isFading ? Interpolate::calculateFadeRatio(_fadeStartTime) : 1.0f;
if (!_hasColorAttrib || fadeRatio < 1.0f) {
batch._glColor4f(1.0f, 1.0f, 1.0f, fadeRatio);
if (_fadeState != FADE_COMPLETE) {
batch._glColor4f(1.0f, 1.0f, 1.0f, computeFadeAlpha());
} else if (!_hasColorAttrib) {
batch._glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
}
}
void ModelMeshPartPayload::bindTransform(gpu::Batch& batch, const ShapePipeline::LocationsPointer locations, bool canCauterize) const {
void ModelMeshPartPayload::bindTransform(gpu::Batch& batch, const ShapePipeline::LocationsPointer locations, RenderArgs::RenderMode renderMode) const {
// Still relying on the raw data from the model
const Model::MeshState& state = _model->_meshStates.at(_meshIndex);
const Model::MeshState& state = _model->getMeshState(_meshIndex);
if (state.clusterBuffer) {
if (canCauterize && _model->getCauterizeBones()) {
batch.setUniformBuffer(ShapePipeline::Slot::BUFFER::SKINNING, state.cauterizedClusterBuffer);
} else {
batch.setUniformBuffer(ShapePipeline::Slot::BUFFER::SKINNING, state.clusterBuffer);
}
batch.setUniformBuffer(ShapePipeline::Slot::BUFFER::SKINNING, state.clusterBuffer);
}
batch.setModelTransform(_transform);
}
void ModelMeshPartPayload::startFade() {
bool shouldFade = EntityItem::getEntitiesShouldFadeFunction()();
if (shouldFade) {
_fadeStartTime = usecTimestampNow();
_hasStartedFade = true;
_hasFinishedFade = false;
} else {
_isFading = true;
_hasStartedFade = true;
_hasFinishedFade = true;
float ModelMeshPartPayload::computeFadeAlpha() const {
if (_fadeState == FADE_WAITING_TO_START) {
return 0.0f;
}
float fadeAlpha = 1.0f;
const float INV_FADE_PERIOD = 1.0f / (float)(1 * USECS_PER_SECOND);
float fraction = (float)(usecTimestampNow() - _fadeStartTime) * INV_FADE_PERIOD;
if (fraction < 1.0f) {
fadeAlpha = Interpolate::simpleNonLinearBlend(fraction);
}
if (fadeAlpha >= 1.0f) {
_fadeState = FADE_COMPLETE;
// when fade-in completes we flag model for one last "render item update"
_model->setRenderItemsNeedUpdate();
return 1.0f;
}
return Interpolate::simpleNonLinearBlend(fadeAlpha);
}
void ModelMeshPartPayload::render(RenderArgs* args) const {
@ -552,40 +555,34 @@ void ModelMeshPartPayload::render(RenderArgs* args) const {
return; // bail asap
}
// If we didn't start the fade in, check if we are ready to now....
if (!_hasStartedFade && _model->isLoaded() && _model->getGeometry()->areTexturesLoaded()) {
const_cast<ModelMeshPartPayload&>(*this).startFade();
if (_fadeState == FADE_WAITING_TO_START) {
if (_model->isLoaded() && _model->getGeometry()->areTexturesLoaded()) {
if (EntityItem::getEntitiesShouldFadeFunction()()) {
_fadeStartTime = usecTimestampNow();
_fadeState = FADE_IN_PROGRESS;
} else {
_fadeState = FADE_COMPLETE;
}
_model->setRenderItemsNeedUpdate();
} else {
return;
}
}
// If we still didn't start the fade in, bail
if (!_hasStartedFade) {
if (!args) {
return;
}
// When an individual mesh parts like this finishes its fade, we will mark the Model as
// having render items that need updating
bool nextIsFading = _isFading ? isStillFading() : false;
bool startFading = !_isFading && !_hasFinishedFade && _hasStartedFade;
bool endFading = _isFading && !nextIsFading;
if (startFading || endFading) {
_isFading = startFading;
_hasFinishedFade = endFading;
_model->setRenderItemsNeedUpdate();
}
gpu::Batch& batch = *(args->_batch);
if (!getShapeKey().isValid()) {
return;
}
gpu::Batch& batch = *(args->_batch);
auto locations = args->_pipeline->locations;
assert(locations);
// Bind the model transform and the skinCLusterMatrices if needed
bool canCauterize = args->_renderMode != RenderArgs::SHADOW_RENDER_MODE;
_model->updateClusterMatrices();
bindTransform(batch, locations, canCauterize);
bindTransform(batch, locations, args->_renderMode);
//Bind the index buffer and vertex buffer and Blend shapes if needed
bindMesh(batch);
@ -593,9 +590,7 @@ void ModelMeshPartPayload::render(RenderArgs* args) const {
// apply material properties
bindMaterial(batch, locations);
if (args) {
args->_details._materialSwitches++;
}
args->_details._materialSwitches++;
// Draw!
{
@ -603,9 +598,6 @@ void ModelMeshPartPayload::render(RenderArgs* args) const {
drawCall(batch);
}
if (args) {
const int INDICES_PER_TRIANGLE = 3;
args->_details._trianglesRendered += _drawPart._numIndices / INDICES_PER_TRIANGLE;
}
const int INDICES_PER_TRIANGLE = 3;
args->_details._trianglesRendered += _drawPart._numIndices / INDICES_PER_TRIANGLE;
}

View file

@ -21,6 +21,10 @@
#include <model/Geometry.h>
const uint8_t FADE_WAITING_TO_START = 0;
const uint8_t FADE_IN_PROGRESS = 1;
const uint8_t FADE_COMPLETE = 2;
class Model;
class MeshPartPayload {
@ -48,21 +52,20 @@ public:
void drawCall(gpu::Batch& batch) const;
virtual void bindMesh(gpu::Batch& batch) const;
virtual void bindMaterial(gpu::Batch& batch, const render::ShapePipeline::LocationsPointer locations) const;
virtual void bindTransform(gpu::Batch& batch, const render::ShapePipeline::LocationsPointer locations, bool canCauterize = true) const;
virtual void bindTransform(gpu::Batch& batch, const render::ShapePipeline::LocationsPointer locations, RenderArgs::RenderMode renderMode) const;
// Payload resource cached values
std::shared_ptr<const model::Mesh> _drawMesh;
int _partIndex = 0;
model::Mesh::Part _drawPart;
std::shared_ptr<const model::Material> _drawMaterial;
model::Box _localBound;
Transform _drawTransform;
Transform _transform;
mutable model::Box _worldBound;
int _partIndex = 0;
bool _hasColorAttrib { false };
bool _hasColorAttrib = false;
model::Box _localBound;
mutable model::Box _worldBound;
std::shared_ptr<const model::Mesh> _drawMesh;
std::shared_ptr<const model::Material> _drawMaterial;
model::Mesh::Part _drawPart;
size_t getVerticesCount() const { return _drawMesh ? _drawMesh->getNumVertices() : 0; }
size_t getMaterialTextureSize() { return _drawMaterial ? _drawMaterial->getTextureSize() : 0; }
@ -85,12 +88,10 @@ public:
typedef Payload::DataPointer Pointer;
void notifyLocationChanged() override;
void updateTransformForSkinnedMesh(const Transform& transform, const QVector<glm::mat4>& clusterMatrices);
void updateTransformForSkinnedMesh(const Transform& transform,
const QVector<glm::mat4>& clusterMatrices);
// Entity fade in
void startFade();
bool hasStartedFade() { return _hasStartedFade; }
bool isStillFading() const { return Interpolate::calculateFadeRatio(_fadeStartTime) < 1.0f; }
float computeFadeAlpha() const;
// Render Item interface
render::ItemKey getKey() const override;
@ -100,7 +101,7 @@ public:
// ModelMeshPartPayload functions to perform render
void bindMesh(gpu::Batch& batch) const override;
void bindTransform(gpu::Batch& batch, const render::ShapePipeline::LocationsPointer locations, bool canCauterize = true) const override;
void bindTransform(gpu::Batch& batch, const render::ShapePipeline::LocationsPointer locations, RenderArgs::RenderMode renderMode) const override;
void initCache();
@ -113,10 +114,8 @@ public:
bool _isBlendShaped{ false };
private:
quint64 _fadeStartTime { 0 };
bool _hasStartedFade { false };
mutable bool _hasFinishedFade { false };
mutable bool _isFading { false };
mutable quint64 _fadeStartTime { 0 };
mutable uint8_t _fadeState { FADE_WAITING_TO_START };
};
namespace render {

View file

@ -91,7 +91,6 @@ Model::Model(RigPointer rig, QObject* parent) :
_scaledToFit(false),
_snapModelToRegistrationPoint(false),
_snappedToRegistrationPoint(false),
_cauterizeBones(false),
_url(HTTP_INVALID_COM),
_isVisible(true),
_blendNumber(0),
@ -228,9 +227,6 @@ void Model::updateRenderItems() {
foreach (auto itemID, self->_modelMeshRenderItems.keys()) {
pendingChanges.updateItem<ModelMeshPartPayload>(itemID, [modelTransform, deleteGeometryCounter](ModelMeshPartPayload& data) {
if (data._model && data._model->isLoaded()) {
if (!data.hasStartedFade() && data._model->getGeometry()->areTexturesLoaded()) {
data.startFade();
}
// Ensure the model geometry was not reset between frames
if (deleteGeometryCounter == data._model->_deleteGeometryCounter) {
// lazy update of cluster matrices used for rendering. We need to update them here, so we can correctly update the bounding box.
@ -294,8 +290,6 @@ bool Model::updateGeometry() {
foreach (const FBXMesh& mesh, fbxGeometry.meshes) {
MeshState state;
state.clusterMatrices.resize(mesh.clusters.size());
state.cauterizedClusterMatrices.resize(mesh.clusters.size());
_meshStates.append(state);
// Note: we add empty buffers for meshes that lack blendshapes so we can access the buffers by index
@ -673,6 +667,7 @@ bool Model::addToScene(std::shared_ptr<render::Scene> scene,
hasTransparent = hasTransparent || renderItem.get()->getShapeKey().isTranslucent();
verticesCount += renderItem.get()->getVerticesCount();
_modelMeshRenderItems.insert(item, renderPayload);
_modelMeshRenderItemIDs.emplace_back(item);
}
somethingAdded = !_modelMeshRenderItems.empty();
@ -695,6 +690,7 @@ void Model::removeFromScene(std::shared_ptr<render::Scene> scene, render::Pendin
foreach (auto item, _modelMeshRenderItems.keys()) {
pendingChanges.removeItem(item);
}
_modelMeshRenderItemIDs.clear();
_modelMeshRenderItems.clear();
_modelMeshRenderItemsSet.clear();
@ -1157,13 +1153,6 @@ void Model::updateClusterMatrices() {
}
_needsUpdateClusterMatrices = false;
const FBXGeometry& geometry = getFBXGeometry();
static const glm::mat4 zeroScale(
glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
glm::vec4(0.0f, 0.0f, 0.0f, 1.0f));
auto cauterizeMatrix = _rig->getJointTransform(geometry.neckJointIndex) * zeroScale;
for (int i = 0; i < _meshStates.size(); i++) {
MeshState& state = _meshStates[i];
const FBXMesh& mesh = geometry.meshes.at(i);
@ -1177,20 +1166,6 @@ void Model::updateClusterMatrices() {
#else
state.clusterMatrices[j] = jointMatrix * cluster.inverseBindMatrix;
#endif
// as an optimization, don't build cautrizedClusterMatrices if the boneSet is empty.
if (!_cauterizeBoneSet.empty()) {
if (_cauterizeBoneSet.find(cluster.jointIndex) != _cauterizeBoneSet.end()) {
jointMatrix = cauterizeMatrix;
}
#if GLM_ARCH & GLM_ARCH_SSE2
glm::mat4 out, inverseBindMatrix = cluster.inverseBindMatrix;
glm_mat4_mul((glm_vec4*)&jointMatrix, (glm_vec4*)&inverseBindMatrix, (glm_vec4*)&out);
state.cauterizedClusterMatrices[j] = out;
#else
state.cauterizedClusterMatrices[j] = jointMatrix * cluster.inverseBindMatrix;
#endif
}
}
// Once computed the cluster matrices, update the buffer(s)
@ -1202,17 +1177,6 @@ void Model::updateClusterMatrices() {
state.clusterBuffer->setSubData(0, state.clusterMatrices.size() * sizeof(glm::mat4),
(const gpu::Byte*) state.clusterMatrices.constData());
}
if (!_cauterizeBoneSet.empty() && (state.cauterizedClusterMatrices.size() > 1)) {
if (!state.cauterizedClusterBuffer) {
state.cauterizedClusterBuffer =
std::make_shared<gpu::Buffer>(state.cauterizedClusterMatrices.size() * sizeof(glm::mat4),
(const gpu::Byte*) state.cauterizedClusterMatrices.constData());
} else {
state.cauterizedClusterBuffer->setSubData(0, state.cauterizedClusterMatrices.size() * sizeof(glm::mat4),
(const gpu::Byte*) state.cauterizedClusterMatrices.constData());
}
}
}
}
@ -1301,6 +1265,10 @@ AABox Model::getRenderableMeshBound() const {
}
}
const render::ItemIDs& Model::fetchRenderItemIDs() const {
return _modelMeshRenderItemIDs;
}
void Model::createRenderItemSet() {
if (_collisionGeometry) {
if (_collisionRenderItemsSet.empty()) {

View file

@ -101,10 +101,11 @@ public:
bool isLayeredInFront() const { return _isLayeredInFront; }
void updateRenderItems();
virtual void updateRenderItems();
void setRenderItemsNeedUpdate() { _renderItemsNeedUpdate = true; }
bool getRenderItemsNeedUpdate() { return _renderItemsNeedUpdate; }
AABox getRenderableMeshBound() const;
const render::ItemIDs& fetchRenderItemIDs() const;
bool maybeStartBlender();
@ -214,12 +215,6 @@ public:
bool getIsScaledToFit() const { return _scaledToFit; } /// is model scaled to fit
glm::vec3 getScaleToFitDimensions() const; /// the dimensions model is scaled to, including inferred y/z
void setCauterizeBones(bool flag) { _cauterizeBones = flag; }
bool getCauterizeBones() const { return _cauterizeBones; }
const std::unordered_set<int>& getCauterizeBoneSet() const { return _cauterizeBoneSet; }
void setCauterizeBoneSet(const std::unordered_set<int>& boneSet) { _cauterizeBoneSet = boneSet; }
int getBlendshapeCoefficientsNum() const { return _blendshapeCoefficients.size(); }
float getBlendshapeCoefficient(int index) const {
return ((index < 0) && (index >= _blendshapeCoefficients.size())) ? 0.0f : _blendshapeCoefficients.at(index);
@ -230,7 +225,7 @@ public:
const glm::vec3& getRegistrationPoint() const { return _registrationPoint; }
// returns 'true' if needs fullUpdate after geometry change
bool updateGeometry();
virtual bool updateGeometry();
void setCollisionMesh(model::MeshPointer mesh);
void setLoadingPriority(float priority) { _loadingPriority = priority; }
@ -241,6 +236,18 @@ public:
int getRenderInfoDrawCalls() const { return _renderInfoDrawCalls; }
bool getRenderInfoHasTransparent() const { return _renderInfoHasTransparent; }
class MeshState {
public:
QVector<glm::mat4> clusterMatrices;
gpu::BufferPointer clusterBuffer;
};
const MeshState& getMeshState(int index) { return _meshStates.at(index); }
uint32_t getGeometryCounter() const { return _deleteGeometryCounter; }
const QMap<render::ItemID, render::PayloadPointer>& getRenderItems() const { return _modelMeshRenderItems; }
public slots:
void loadURLFinished(bool success);
@ -297,18 +304,7 @@ protected:
bool _snappedToRegistrationPoint; /// are we currently snapped to a registration point
glm::vec3 _registrationPoint = glm::vec3(0.5f); /// the point in model space our center is snapped to
class MeshState {
public:
QVector<glm::mat4> clusterMatrices;
QVector<glm::mat4> cauterizedClusterMatrices;
gpu::BufferPointer clusterBuffer;
gpu::BufferPointer cauterizedClusterBuffer;
};
QVector<MeshState> _meshStates;
std::unordered_set<int> _cauterizeBoneSet;
bool _cauterizeBones;
virtual void initJointStates();
@ -341,7 +337,7 @@ protected:
protected:
void deleteGeometry();
virtual void deleteGeometry();
void initJointTransforms();
QVector<float> _blendshapeCoefficients;
@ -370,12 +366,11 @@ protected:
void recalculateMeshBoxes(bool pickAgainstTriangles = false);
void createRenderItemSet();
void createVisibleRenderItemSet();
void createCollisionRenderItemSet();
virtual void createVisibleRenderItemSet();
virtual void createCollisionRenderItemSet();
bool _isWireframe;
// debug rendering support
void renderDebugMeshBoxes(gpu::Batch& batch);
int _debugMeshBoxesID = GeometryCache::UNKNOWN_ID;
@ -389,6 +384,8 @@ protected:
QSet<std::shared_ptr<ModelMeshPartPayload>> _modelMeshRenderItemsSet;
QMap<render::ItemID, render::PayloadPointer> _modelMeshRenderItems;
render::ItemIDs _modelMeshRenderItemIDs;
bool _addedToScene { false }; // has been added to scene
bool _needsFixupInScene { true }; // needs to be removed/re-added to scene
bool _needsReload { true };

View file

@ -53,14 +53,15 @@ RenderDeferredTask::RenderDeferredTask(RenderFetchCullSortTask::Output items) {
ShapePlumberPointer shapePlumber = std::make_shared<ShapePlumber>();
initDeferredPipelines(*shapePlumber);
// Extract opaques / transparents / lights / overlays
const auto opaques = items[0];
const auto transparents = items[1];
const auto lights = items[2];
const auto overlayOpaques = items[3];
const auto overlayTransparents = items[4];
const auto background = items[5];
const auto spatialSelection = items[6];
// Extract opaques / transparents / lights / metas / overlays / background
const auto opaques = items[RenderFetchCullSortTask::OPAQUE_SHAPE];
const auto transparents = items[RenderFetchCullSortTask::TRANSPARENT_SHAPE];
const auto lights = items[RenderFetchCullSortTask::LIGHT];
const auto metas = items[RenderFetchCullSortTask::META];
const auto overlayOpaques = items[RenderFetchCullSortTask::OVERLAY_OPAQUE_SHAPE];
const auto overlayTransparents = items[RenderFetchCullSortTask::OVERLAY_TRANSPARENT_SHAPE];
const auto background = items[RenderFetchCullSortTask::BACKGROUND];
const auto spatialSelection = items[RenderFetchCullSortTask::SPATIAL_SELECTION];
// Prepare deferred, generate the shared Deferred Frame Transform
const auto deferredFrameTransform = addJob<GenerateDeferredFrameTransform>("DeferredFrameTransform");
@ -158,6 +159,11 @@ RenderDeferredTask::RenderDeferredTask(RenderFetchCullSortTask::Output items) {
// Debugging stages
{
// Bounds do not draw on stencil buffer, so they must come last
addJob<DrawBounds>("DrawMetaBounds", metas);
// Debugging Deferred buffer job
const auto debugFramebuffers = render::Varying(DebugDeferredBuffer::Inputs(deferredFramebuffer, linearDepthTarget, surfaceGeometryFramebuffer, ambientOcclusionFramebuffer));
addJob<DebugDeferredBuffer>("DebugDeferredBuffer", debugFramebuffers);

View file

@ -24,32 +24,41 @@
#include <gpu/StandardShaderLib.h>
#include <render/drawItemBounds_vert.h>
#include <render/drawItemBounds_frag.h>
#include "nop_frag.h"
using namespace render;
extern void initForwardPipelines(ShapePlumber& plumber);
RenderForwardTask::RenderForwardTask(RenderFetchCullSortTask::Output items) {
// Extract opaques / transparents / lights / overlays
const auto opaques = items[0];
const auto transparents = items[1];
const auto lights = items[2];
const auto overlayOpaques = items[3];
const auto overlayTransparents = items[4];
const auto background = items[5];
// Prepare the ShapePipelines
ShapePlumberPointer shapePlumber = std::make_shared<ShapePlumber>();
initForwardPipelines(*shapePlumber);
// Extract opaques / transparents / lights / metas / overlays / background
const auto opaques = items[RenderFetchCullSortTask::OPAQUE_SHAPE];
const auto transparents = items[RenderFetchCullSortTask::TRANSPARENT_SHAPE];
const auto lights = items[RenderFetchCullSortTask::LIGHT];
const auto metas = items[RenderFetchCullSortTask::META];
const auto overlayOpaques = items[RenderFetchCullSortTask::OVERLAY_OPAQUE_SHAPE];
const auto overlayTransparents = items[RenderFetchCullSortTask::OVERLAY_TRANSPARENT_SHAPE];
const auto background = items[RenderFetchCullSortTask::BACKGROUND];
const auto spatialSelection = items[RenderFetchCullSortTask::SPATIAL_SELECTION];
const auto framebuffer = addJob<PrepareFramebuffer>("PrepareFramebuffer");
addJob<Draw>("DrawOpaques", opaques, shapePlumber);
addJob<Stencil>("Stencil");
addJob<DrawBackground>("DrawBackground", background);
// bounds do not draw on stencil buffer, so they must come last
// Bounds do not draw on stencil buffer, so they must come last
addJob<DrawBounds>("DrawBounds", opaques);
// Blit!
addJob<Blit>("Blit", framebuffer);
}
void PrepareFramebuffer::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, gpu::FramebufferPointer& framebuffer) {
void PrepareFramebuffer::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext,
gpu::FramebufferPointer& framebuffer) {
auto framebufferCache = DependencyManager::get<FramebufferCache>();
auto framebufferSize = framebufferCache->getFrameBufferSize();
glm::uvec2 frameSize(framebufferSize.width(), framebufferSize.height());
@ -89,30 +98,8 @@ void PrepareFramebuffer::run(const SceneContextPointer& sceneContext, const Rend
framebuffer = _framebuffer;
}
const gpu::PipelinePointer DrawBounds::getPipeline() {
if (!_boundsPipeline) {
auto vs = gpu::Shader::createVertex(std::string(drawItemBounds_vert));
auto ps = gpu::Shader::createPixel(std::string(drawItemBounds_frag));
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::Shader::BindingSet slotBindings;
gpu::Shader::makeProgram(*program, slotBindings);
_cornerLocation = program->getUniforms().findLocation("inBoundPos");
_scaleLocation = program->getUniforms().findLocation("inBoundDim");
auto state = std::make_shared<gpu::State>();
state->setDepthTest(true, false, gpu::LESS_EQUAL);
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::DEST_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ZERO);
_boundsPipeline = gpu::Pipeline::create(program, state);
}
return _boundsPipeline;
}
void DrawBounds::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const Inputs& items) {
void Draw::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext,
const Inputs& items) {
RenderArgs* args = renderContext->args;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
@ -127,23 +114,50 @@ void DrawBounds::run(const SceneContextPointer& sceneContext, const RenderContex
batch.setViewTransform(viewMat);
batch.setModelTransform(Transform());
// Bind program
batch.setPipeline(getPipeline());
assert(_cornerLocation >= 0);
assert(_scaleLocation >= 0);
// Render bounds
for (const auto& item : items) {
batch._glUniform3fv(_cornerLocation, 1, (const float*)(&item.bound.getCorner()));
batch._glUniform3fv(_scaleLocation, 1, (const float*)(&item.bound.getScale()));
static const int NUM_VERTICES_PER_CUBE = 24;
batch.draw(gpu::LINES, NUM_VERTICES_PER_CUBE, 0);
}
// Render items
renderStateSortShapes(sceneContext, renderContext, _shapePlumber, items, -1);
});
args->_batch = nullptr;
}
void DrawBackground::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const Inputs& items) {
const gpu::PipelinePointer Stencil::getPipeline() {
if (!_stencilPipeline) {
auto vs = gpu::StandardShaderLib::getDrawUnitQuadTexcoordVS();
auto ps = gpu::Shader::createPixel(std::string(nop_frag));
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::Shader::makeProgram(*program);
auto state = std::make_shared<gpu::State>();
state->setDepthTest(true, false, gpu::LESS_EQUAL);
const gpu::int8 STENCIL_OPAQUE = 1;
state->setStencilTest(true, 0xFF, gpu::State::StencilTest(STENCIL_OPAQUE, 0xFF, gpu::ALWAYS,
gpu::State::STENCIL_OP_REPLACE,
gpu::State::STENCIL_OP_REPLACE,
gpu::State::STENCIL_OP_KEEP));
_stencilPipeline = gpu::Pipeline::create(program, state);
}
return _stencilPipeline;
}
void Stencil::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
RenderArgs* args = renderContext->args;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
args->_batch = &batch;
batch.enableStereo(false);
batch.setViewportTransform(args->_viewport);
batch.setStateScissorRect(args->_viewport);
batch.setPipeline(getPipeline());
batch.draw(gpu::TRIANGLE_STRIP, 4);
});
args->_batch = nullptr;
}
void DrawBackground::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext,
const Inputs& background) {
RenderArgs* args = renderContext->args;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
@ -161,7 +175,9 @@ void DrawBackground::run(const SceneContextPointer& sceneContext, const RenderCo
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat);
renderItems(sceneContext, renderContext, items);
renderItems(sceneContext, renderContext, background);
});
args->_batch = nullptr;
}

View file

@ -25,26 +25,38 @@ public:
class PrepareFramebuffer {
public:
using JobModel = render::Job::ModelO<PrepareFramebuffer, gpu::FramebufferPointer>;
using Inputs = gpu::FramebufferPointer;
using JobModel = render::Job::ModelO<PrepareFramebuffer, Inputs>;
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, gpu::FramebufferPointer& framebuffer);
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext,
gpu::FramebufferPointer& framebuffer);
private:
gpu::FramebufferPointer _framebuffer;
};
class DrawBounds {
class Draw {
public:
using Inputs = render::ItemBounds;
using JobModel = render::Job::ModelI<DrawBounds, Inputs>;
using JobModel = render::Job::ModelI<Draw, Inputs>;
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const Inputs& items);
Draw(const render::ShapePlumberPointer& shapePlumber) : _shapePlumber(shapePlumber) {}
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext,
const Inputs& items);
private:
render::ShapePlumberPointer _shapePlumber;
};
class Stencil {
public:
using JobModel = render::Job::Model<Stencil>;
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
private:
const gpu::PipelinePointer getPipeline();
gpu::PipelinePointer _boundsPipeline;
int _cornerLocation { -1 };
int _scaleLocation { -1 };
gpu::PipelinePointer _stencilPipeline;
};
class DrawBackground {
@ -52,7 +64,8 @@ public:
using Inputs = render::ItemBounds;
using JobModel = render::Job::ModelI<DrawBackground, Inputs>;
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const Inputs& background);
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext,
const Inputs& background);
};
#endif // hifi_RenderForwardTask_h

View file

@ -10,6 +10,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <functional>
#include <gpu/Context.h>
#include <gpu/StandardShaderLib.h>
@ -32,6 +34,13 @@
#include "model_normal_map_frag.h"
#include "model_normal_specular_map_frag.h"
#include "model_specular_map_frag.h"
#include "forward_model_frag.h"
#include "forward_model_unlit_frag.h"
#include "forward_model_normal_map_frag.h"
#include "forward_model_normal_specular_map_frag.h"
#include "forward_model_specular_map_frag.h"
#include "model_lightmap_frag.h"
#include "model_lightmap_normal_map_frag.h"
#include "model_lightmap_normal_specular_map_frag.h"
@ -47,41 +56,17 @@
using namespace render;
using namespace std::placeholders;
gpu::BufferView getDefaultMaterialBuffer() {
model::Material::Schema schema;
schema._albedo = vec3(1.0f);
schema._opacity = 1.0f;
schema._metallic = 0.1f;
schema._roughness = 0.9f;
return gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(model::Material::Schema), (const gpu::Byte*) &schema));
}
void initOverlay3DPipelines(ShapePlumber& plumber);
void initDeferredPipelines(ShapePlumber& plumber);
void initForwardPipelines(ShapePlumber& plumber);
void batchSetter(const ShapePipeline& pipeline, gpu::Batch& batch) {
// Set a default albedo map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
DependencyManager::get<TextureCache>()->getWhiteTexture());
// Set a default normal map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
void addPlumberPipeline(ShapePlumber& plumber,
const ShapeKey& key, const gpu::ShaderPointer& vertex, const gpu::ShaderPointer& pixel);
// Set a default material
if (pipeline.locations->materialBufferUnit >= 0) {
static const gpu::BufferView OPAQUE_SCHEMA_BUFFER = getDefaultMaterialBuffer();
batch.setUniformBuffer(ShapePipeline::Slot::BUFFER::MATERIAL, OPAQUE_SCHEMA_BUFFER);
}
}
void lightBatchSetter(const ShapePipeline& pipeline, gpu::Batch& batch) {
batchSetter(pipeline, batch);
// Set the light
if (pipeline.locations->lightBufferUnit >= 0) {
DependencyManager::get<DeferredLightingEffect>()->setupKeyLightBatch(batch,
pipeline.locations->lightBufferUnit,
pipeline.locations->lightAmbientBufferUnit,
pipeline.locations->lightAmbientMapUnit);
}
}
void batchSetter(const ShapePipeline& pipeline, gpu::Batch& batch);
void lightBatchSetter(const ShapePipeline& pipeline, gpu::Batch& batch);
void initOverlay3DPipelines(ShapePlumber& plumber) {
auto vertex = gpu::Shader::createVertex(std::string(overlay3D_vert));
@ -130,50 +115,6 @@ void initOverlay3DPipelines(ShapePlumber& plumber) {
}
void initDeferredPipelines(render::ShapePlumber& plumber) {
using Key = render::ShapeKey;
using ShaderPointer = gpu::ShaderPointer;
auto addPipeline = [&plumber](const Key& key, const ShaderPointer& vertexShader, const ShaderPointer& pixelShader) {
// These keyvalues' pipelines will be added by this lamdba in addition to the key passed
assert(!key.isWireFrame());
assert(!key.isDepthBiased());
assert(key.isCullFace());
ShaderPointer program = gpu::Shader::createProgram(vertexShader, pixelShader);
for (int i = 0; i < 8; i++) {
bool isCulled = (i & 1);
bool isBiased = (i & 2);
bool isWireframed = (i & 4);
ShapeKey::Builder builder(key);
auto state = std::make_shared<gpu::State>();
// Depth test depends on transparency
state->setDepthTest(true, !key.isTranslucent(), gpu::LESS_EQUAL);
state->setBlendFunction(key.isTranslucent(),
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
if (!isCulled) {
builder.withoutCullFace();
}
state->setCullMode(isCulled ? gpu::State::CULL_BACK : gpu::State::CULL_NONE);
if (isWireframed) {
builder.withWireframe();
state->setFillMode(gpu::State::FILL_LINE);
}
if (isBiased) {
builder.withDepthBias();
state->setDepthBias(1.0f);
state->setDepthBiasSlopeScale(1.0f);
}
plumber.addPipeline(builder.build(), program, state,
key.isTranslucent() ? &lightBatchSetter : &batchSetter);
}
};
// Vertex shaders
auto modelVertex = gpu::Shader::createVertex(std::string(model_vert));
auto modelNormalMapVertex = gpu::Shader::createVertex(std::string(model_normal_map_vert));
@ -198,6 +139,8 @@ void initDeferredPipelines(render::ShapePlumber& plumber) {
auto modelLightmapSpecularMapPixel = gpu::Shader::createPixel(std::string(model_lightmap_specular_map_frag));
auto modelLightmapNormalSpecularMapPixel = gpu::Shader::createPixel(std::string(model_lightmap_normal_specular_map_frag));
using Key = render::ShapeKey;
auto addPipeline = std::bind(&addPlumberPipeline, std::ref(plumber), _1, _2, _3);
// TODO: Refactor this to use a filter
// Opaques
addPipeline(
@ -281,5 +224,132 @@ void initDeferredPipelines(render::ShapePlumber& plumber) {
addPipeline(
Key::Builder().withSkinned().withDepthOnly(),
skinModelShadowVertex, modelShadowPixel);
}
void initForwardPipelines(render::ShapePlumber& plumber) {
// Vertex shaders
auto modelVertex = gpu::Shader::createVertex(std::string(model_vert));
auto modelNormalMapVertex = gpu::Shader::createVertex(std::string(model_normal_map_vert));
auto skinModelVertex = gpu::Shader::createVertex(std::string(skin_model_vert));
auto skinModelNormalMapVertex = gpu::Shader::createVertex(std::string(skin_model_normal_map_vert));
// Pixel shaders
auto modelPixel = gpu::Shader::createPixel(std::string(forward_model_frag));
auto modelUnlitPixel = gpu::Shader::createPixel(std::string(forward_model_unlit_frag));
auto modelNormalMapPixel = gpu::Shader::createPixel(std::string(forward_model_normal_map_frag));
auto modelSpecularMapPixel = gpu::Shader::createPixel(std::string(forward_model_specular_map_frag));
auto modelNormalSpecularMapPixel = gpu::Shader::createPixel(std::string(forward_model_normal_specular_map_frag));
using Key = render::ShapeKey;
auto addPipeline = std::bind(&addPlumberPipeline, std::ref(plumber), _1, _2, _3);
// Opaques
addPipeline(
Key::Builder(),
modelVertex, modelPixel);
addPipeline(
Key::Builder().withUnlit(),
modelVertex, modelUnlitPixel);
addPipeline(
Key::Builder().withTangents(),
modelNormalMapVertex, modelNormalMapPixel);
addPipeline(
Key::Builder().withSpecular(),
modelVertex, modelSpecularMapPixel);
addPipeline(
Key::Builder().withTangents().withSpecular(),
modelNormalMapVertex, modelNormalSpecularMapPixel);
// Skinned
addPipeline(
Key::Builder().withSkinned(),
skinModelVertex, modelPixel);
addPipeline(
Key::Builder().withSkinned().withTangents(),
skinModelNormalMapVertex, modelNormalMapPixel);
addPipeline(
Key::Builder().withSkinned().withSpecular(),
skinModelVertex, modelSpecularMapPixel);
addPipeline(
Key::Builder().withSkinned().withTangents().withSpecular(),
skinModelNormalMapVertex, modelNormalSpecularMapPixel);
}
void addPlumberPipeline(ShapePlumber& plumber,
const ShapeKey& key, const gpu::ShaderPointer& vertex, const gpu::ShaderPointer& pixel) {
// These key-values' pipelines are added by this functor in addition to the key passed
assert(!key.isWireFrame());
assert(!key.isDepthBiased());
assert(key.isCullFace());
gpu::ShaderPointer program = gpu::Shader::createProgram(vertex, pixel);
for (int i = 0; i < 8; i++) {
bool isCulled = (i & 1);
bool isBiased = (i & 2);
bool isWireframed = (i & 4);
auto state = std::make_shared<gpu::State>();
// Depth test depends on transparency
state->setDepthTest(true, !key.isTranslucent(), gpu::LESS_EQUAL);
state->setBlendFunction(key.isTranslucent(),
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
ShapeKey::Builder builder(key);
if (!isCulled) {
builder.withoutCullFace();
}
state->setCullMode(isCulled ? gpu::State::CULL_BACK : gpu::State::CULL_NONE);
if (isWireframed) {
builder.withWireframe();
state->setFillMode(gpu::State::FILL_LINE);
}
if (isBiased) {
builder.withDepthBias();
state->setDepthBias(1.0f);
state->setDepthBiasSlopeScale(1.0f);
}
plumber.addPipeline(builder.build(), program, state,
key.isTranslucent() ? &lightBatchSetter : &batchSetter);
}
}
void batchSetter(const ShapePipeline& pipeline, gpu::Batch& batch) {
// Set a default albedo map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
DependencyManager::get<TextureCache>()->getWhiteTexture());
// Set a default normal map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
// Set a default material
if (pipeline.locations->materialBufferUnit >= 0) {
// Create a default schema
static bool isMaterialSet = false;
static model::Material material;
if (!isMaterialSet) {
material.setAlbedo(vec3(1.0f));
material.setOpacity(1.0f);
material.setMetallic(0.1f);
material.setRoughness(0.9f);
isMaterialSet = true;
}
// Set a default schema
batch.setUniformBuffer(ShapePipeline::Slot::BUFFER::MATERIAL, material.getSchemaBuffer());
}
}
void lightBatchSetter(const ShapePipeline& pipeline, gpu::Batch& batch) {
// Set the batch
batchSetter(pipeline, batch);
// Set the light
if (pipeline.locations->lightBufferUnit >= 0) {
DependencyManager::get<DeferredLightingEffect>()->setupKeyLightBatch(batch,
pipeline.locations->lightBufferUnit,
pipeline.locations->lightAmbientBufferUnit,
pipeline.locations->lightAmbientMapUnit);
}
}

View file

@ -0,0 +1,59 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// model.frag
// fragment shader
//
// Created by Andrzej Kapolka on 10/14/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include DeferredBufferWrite.slh@>
<@include model/Material.slh@>
<@include MaterialTextures.slh@>
<$declareMaterialTextures(ALBEDO, ROUGHNESS, _SCRIBE_NULL, _SCRIBE_NULL, EMISSIVE, OCCLUSION)$>
in vec4 _position;
in vec3 _normal;
in vec3 _color;
in vec2 _texCoord0;
in vec2 _texCoord1;
void main(void) {
Material mat = getMaterial();
int matKey = getMaterialKey(mat);
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex, roughnessTex, _SCRIBE_NULL, _SCRIBE_NULL, emissiveTex)$>
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, occlusionTex)$>
float opacity = 1.0;
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
<$discardTransparent(opacity)$>;
vec3 albedo = getMaterialAlbedo(mat);
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
albedo *= _color;
float roughness = getMaterialRoughness(mat);
<$evalMaterialRoughness(roughnessTex, roughness, matKey, roughness)$>;
vec3 emissive = getMaterialEmissive(mat);
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
float scattering = getMaterialScattering(mat);
packDeferredFragment(
normalize(_normal.xyz),
opacity,
albedo,
roughness,
getMaterialMetallic(mat),
emissive,
occlusionTex,
scattering);
}

View file

@ -0,0 +1,64 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// model_normal_map.frag
// fragment shader
//
// Created by Andrzej Kapolka on 10/29/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include DeferredBufferWrite.slh@>
<@include model/Material.slh@>
<@include MaterialTextures.slh@>
<$declareMaterialTextures(ALBEDO, ROUGHNESS, NORMAL, _SCRIBE_NULL, EMISSIVE, OCCLUSION, SCATTERING)$>
in vec4 _position;
in vec2 _texCoord0;
in vec2 _texCoord1;
in vec3 _normal;
in vec3 _tangent;
in vec3 _color;
void main(void) {
Material mat = getMaterial();
int matKey = getMaterialKey(mat);
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex, roughnessTex, normalTex, _SCRIBE_NULL, emissiveTex, scatteringTex)$>
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, occlusionTex)$>
float opacity = 1.0;
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
<$discardTransparent(opacity)$>;
vec3 albedo = getMaterialAlbedo(mat);
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
albedo *= _color;
float roughness = getMaterialRoughness(mat);
<$evalMaterialRoughness(roughnessTex, roughness, matKey, roughness)$>;
vec3 emissive = getMaterialEmissive(mat);
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
vec3 viewNormal;
<$tangentToViewSpace(normalTex, _normal, _tangent, viewNormal)$>
float scattering = getMaterialScattering(mat);
<$evalMaterialScattering(scatteringTex, scattering, matKey, scattering)$>;
packDeferredFragment(
viewNormal,
opacity,
albedo,
roughness,
getMaterialMetallic(mat),
emissive,
occlusionTex,
scattering);
}

View file

@ -0,0 +1,66 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// model_normal_specular_map.frag
// fragment shader
//
// Created by Andrzej Kapolka on 5/6/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include DeferredBufferWrite.slh@>
<@include model/Material.slh@>
<@include MaterialTextures.slh@>
<$declareMaterialTextures(ALBEDO, ROUGHNESS, NORMAL, METALLIC, EMISSIVE, OCCLUSION)$>
in vec4 _position;
in vec2 _texCoord0;
in vec2 _texCoord1;
in vec3 _normal;
in vec3 _tangent;
in vec3 _color;
void main(void) {
Material mat = getMaterial();
int matKey = getMaterialKey(mat);
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex, roughnessTex, normalTex, metallicTex, emissiveTex)$>
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, occlusionTex)$>
float opacity = 1.0;
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)&>;
<$discardTransparent(opacity)$>;
vec3 albedo = getMaterialAlbedo(mat);
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
albedo *= _color;
float roughness = getMaterialRoughness(mat);
<$evalMaterialRoughness(roughnessTex, roughness, matKey, roughness)$>;
vec3 emissive = getMaterialEmissive(mat);
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
vec3 viewNormal;
<$tangentToViewSpace(normalTex, _normal, _tangent, viewNormal)$>
float metallic = getMaterialMetallic(mat);
<$evalMaterialMetallic(metallicTex, metallic, matKey, metallic)$>;
float scattering = getMaterialScattering(mat);
packDeferredFragment(
normalize(viewNormal.xyz),
opacity,
albedo,
roughness,
metallic,
emissive,
occlusionTex,
scattering);
}

View file

@ -0,0 +1,63 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// model_specular_map.frag
// fragment shader
//
// Created by Andrzej Kapolka on 5/6/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include DeferredBufferWrite.slh@>
<@include model/Material.slh@>
<@include MaterialTextures.slh@>
<$declareMaterialTextures(ALBEDO, ROUGHNESS, _SCRIBE_NULL, METALLIC, EMISSIVE, OCCLUSION)$>
in vec4 _position;
in vec2 _texCoord0;
in vec2 _texCoord1;
in vec3 _normal;
in vec3 _color;
void main(void) {
Material mat = getMaterial();
int matKey = getMaterialKey(mat);
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex, roughnessTex, _SCRIBE_NULL, metallicTex, emissiveTex)$>
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, occlusionTex)$>
float opacity = 1.0;
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
<$discardTransparent(opacity)$>;
vec3 albedo = getMaterialAlbedo(mat);
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
albedo *= _color;
float roughness = getMaterialRoughness(mat);
<$evalMaterialRoughness(roughnessTex, roughness, matKey, roughness)$>;
vec3 emissive = getMaterialEmissive(mat);
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
float metallic = getMaterialMetallic(mat);
<$evalMaterialMetallic(metallicTex, metallic, matKey, metallic)$>;
float scattering = getMaterialScattering(mat);
packDeferredFragment(
normalize(_normal),
opacity,
albedo,
roughness,
metallic,
emissive,
occlusionTex,
scattering);
}

View file

@ -0,0 +1,45 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// material_opaque_unlit.frag
// fragment shader
//
// Created by Sam Gateau on 5/5/2016.
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include DeferredBufferWrite.slh@>
<@include LightingModel.slh@>
<@include model/Material.slh@>
<@include MaterialTextures.slh@>
<$declareMaterialTextures(ALBEDO)$>
in vec2 _texCoord0;
in vec3 _normal;
in vec3 _color;
in float _alpha;
void main(void) {
Material mat = getMaterial();
int matKey = getMaterialKey(mat);
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex)$>
float opacity = 1.0;
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
<$discardTransparent(opacity)$>;
vec3 albedo = getMaterialAlbedo(mat);
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
albedo *= _color;
packDeferredFragmentUnlit(
normalize(_normal),
opacity,
albedo * isUnlitEnabled());
}

View file

@ -0,0 +1,16 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// nop.frag
// fragment shader
//
// Created by Zach Pomerantz on 1/3/2017.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
void main(void) {
}

View file

@ -170,7 +170,6 @@ namespace render {
for (size_t i = 0; i < NUM_FILTERS; i++) {
if (_filters[i].test(itemKey)) {
outItems[i].template edit<ItemBounds>().emplace_back(itemBound);
break;
}
}
}

View file

@ -19,6 +19,10 @@
#include <ViewFrustum.h>
#include <gpu/Context.h>
#include <drawItemBounds_vert.h>
#include <drawItemBounds_frag.h>
using namespace render;
void render::renderItems(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const ItemBounds& inItems, int maxDrawnItems) {
@ -134,3 +138,59 @@ void DrawLight::run(const SceneContextPointer& sceneContext, const RenderContext
auto config = std::static_pointer_cast<Config>(renderContext->jobConfig);
config->setNumDrawn((int)inLights.size());
}
const gpu::PipelinePointer DrawBounds::getPipeline() {
if (!_boundsPipeline) {
auto vs = gpu::Shader::createVertex(std::string(drawItemBounds_vert));
auto ps = gpu::Shader::createPixel(std::string(drawItemBounds_frag));
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::Shader::BindingSet slotBindings;
gpu::Shader::makeProgram(*program, slotBindings);
_cornerLocation = program->getUniforms().findLocation("inBoundPos");
_scaleLocation = program->getUniforms().findLocation("inBoundDim");
auto state = std::make_shared<gpu::State>();
state->setDepthTest(true, false, gpu::LESS_EQUAL);
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::DEST_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ZERO);
_boundsPipeline = gpu::Pipeline::create(program, state);
}
return _boundsPipeline;
}
void DrawBounds::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext,
const Inputs& items) {
RenderArgs* args = renderContext->args;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
args->_batch = &batch;
// Setup projection
glm::mat4 projMat;
Transform viewMat;
args->getViewFrustum().evalProjectionMatrix(projMat);
args->getViewFrustum().evalViewTransform(viewMat);
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat);
batch.setModelTransform(Transform());
// Bind program
batch.setPipeline(getPipeline());
assert(_cornerLocation >= 0);
assert(_scaleLocation >= 0);
// Render bounds
for (const auto& item : items) {
batch._glUniform3fv(_cornerLocation, 1, (const float*)(&item.bound.getCorner()));
batch._glUniform3fv(_scaleLocation, 1, (const float*)(&item.bound.getScale()));
static const int NUM_VERTICES_PER_CUBE = 24;
batch.draw(gpu::LINES, NUM_VERTICES_PER_CUBE, 0);
}
});
}

View file

@ -50,6 +50,27 @@ protected:
int _maxDrawn; // initialized by Config
};
class DrawBounds {
public:
class Config : public render::JobConfig {
public:
Config() : JobConfig(false) {}
};
using Inputs = render::ItemBounds;
using JobModel = render::Job::ModelI<DrawBounds, Inputs, Config>;
void configure(const Config& configuration) {}
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext,
const Inputs& items);
private:
const gpu::PipelinePointer getPipeline();
gpu::PipelinePointer _boundsPipeline;
int _cornerLocation { -1 };
int _scaleLocation { -1 };
};
}
#endif // hifi_render_DrawTask_h

View file

@ -38,6 +38,7 @@ public:
enum FlagBit {
TYPE_SHAPE = 0, // Item is a Shape
TYPE_LIGHT, // Item is a Light
TYPE_META, // Item is a Meta: meanning it s used to represent a higher level object, potentially represented by other render items
TRANSLUCENT, // Transparent and not opaque, for some odd reason TRANSPARENCY doesn't work...
VIEW_SPACE, // Transformed in view space, and not in world space
DYNAMIC, // Dynamic and bound will change unlike static item
@ -72,6 +73,7 @@ public:
Builder& withTypeShape() { _flags.set(TYPE_SHAPE); return (*this); }
Builder& withTypeLight() { _flags.set(TYPE_LIGHT); return (*this); }
Builder& withTypeMeta() { _flags.set(TYPE_META); return (*this); }
Builder& withTransparent() { _flags.set(TRANSLUCENT); return (*this); }
Builder& withViewSpace() { _flags.set(VIEW_SPACE); return (*this); }
Builder& withDynamic() { _flags.set(DYNAMIC); return (*this); }
@ -91,6 +93,7 @@ public:
bool isShape() const { return _flags[TYPE_SHAPE]; }
bool isLight() const { return _flags[TYPE_LIGHT]; }
bool isMeta() const { return _flags[TYPE_META]; }
bool isOpaque() const { return !_flags[TRANSLUCENT]; }
bool isTransparent() const { return _flags[TRANSLUCENT]; }
@ -150,6 +153,7 @@ public:
Builder& withTypeShape() { _value.set(ItemKey::TYPE_SHAPE); _mask.set(ItemKey::TYPE_SHAPE); return (*this); }
Builder& withTypeLight() { _value.set(ItemKey::TYPE_LIGHT); _mask.set(ItemKey::TYPE_LIGHT); return (*this); }
Builder& withTypeMeta() { _value.set(ItemKey::TYPE_META); _mask.set(ItemKey::TYPE_META); return (*this); }
Builder& withOpaque() { _value.reset(ItemKey::TRANSLUCENT); _mask.set(ItemKey::TRANSLUCENT); return (*this); }
Builder& withTransparent() { _value.set(ItemKey::TRANSLUCENT); _mask.set(ItemKey::TRANSLUCENT); return (*this); }
@ -179,6 +183,7 @@ public:
static Builder opaqueShape() { return Builder().withTypeShape().withOpaque().withWorldSpace(); }
static Builder transparentShape() { return Builder().withTypeShape().withTransparent().withWorldSpace(); }
static Builder light() { return Builder().withTypeLight(); }
static Builder meta() { return Builder().withTypeMeta(); }
static Builder background() { return Builder().withViewSpace().withLayered(); }
static Builder opaqueShapeLayered() { return Builder().withTypeShape().withOpaque().withWorldSpace().withLayered(); }
static Builder transparentShapeLayered() { return Builder().withTypeShape().withTransparent().withWorldSpace().withLayered(); }
@ -210,6 +215,25 @@ inline QDebug operator<<(QDebug debug, const ItemFilter& me) {
using ItemID = uint32_t;
using ItemCell = int32_t;
// A few typedefs for standard containers of ItemIDs
using ItemIDs = std::vector<ItemID>;
using ItemIDSet = std::set<ItemID>;
// Handy type to just pass the ID and the bound of an item
class ItemBound {
public:
ItemBound(ItemID id) : id(id) { }
ItemBound(ItemID id, const AABox& bound) : id(id), bound(bound) { }
ItemID id;
AABox bound;
};
// many Item Bounds in a vector
using ItemBounds = std::vector<ItemBound>;
// Item is the proxy to a bounded "object" in the scene
// An item is described by its Key
class Item {
public:
typedef std::vector<Item> Vector;
@ -295,6 +319,8 @@ public:
virtual const ShapeKey getShapeKey() const = 0;
virtual uint32_t fetchMetaSubItems(ItemIDs& subItems) const = 0;
~PayloadInterface() {}
// Status interface is local to the base class
@ -313,6 +339,9 @@ public:
Item() {}
~Item() {}
// Item exists if it has a valid payload
bool exist() const { return (bool)(_payload); }
// Main scene / item managment interface reset/update/kill
void resetPayload(const PayloadPointer& payload);
void resetCell(ItemCell cell = INVALID_CELL, bool _small = false) { _cell = cell; _key.setSmaller(_small); }
@ -339,6 +368,9 @@ public:
// Shape Type Interface
const ShapeKey getShapeKey() const { return _payload->getShapeKey(); }
// Meta Type Interface
uint32_t fetchMetaSubItems(ItemIDs& subItems) const { return _payload->fetchMetaSubItems(subItems); }
// Access the status
const StatusPointer& getStatus() const { return _payload->getStatus(); }
@ -370,10 +402,7 @@ inline QDebug operator<<(QDebug debug, const Item& item) {
return debug;
}
// THe Payload class is the real Payload to be used
// THis allow anything to be turned into a Payload as long as the required interface functions are available
// When creating a new kind of payload from a new "stuff" class then you need to create specialized version for "stuff"
// of the Payload interface
// Item shared interface supported by the payload
template <class T> const ItemKey payloadGetKey(const std::shared_ptr<T>& payloadData) { return ItemKey(); }
template <class T> const Item::Bound payloadGetBound(const std::shared_ptr<T>& payloadData) { return Item::Bound(); }
template <class T> int payloadGetLayer(const std::shared_ptr<T>& payloadData) { return 0; }
@ -385,6 +414,14 @@ template <class T> void payloadRender(const std::shared_ptr<T>& payloadData, Ren
// implying that the shape will setup its own pipeline without the use of the ShapeKey.
template <class T> const ShapeKey shapeGetShapeKey(const std::shared_ptr<T>& payloadData) { return ShapeKey::Builder::ownPipeline(); }
// Meta Type Interface
// Meta items act as the grouping object for several sub items (typically shapes).
template <class T> uint32_t metaFetchMetaSubItems(const std::shared_ptr<T>& payloadData, ItemIDs& subItems) { return 0; }
// THe Payload class is the real Payload to be used
// THis allow anything to be turned into a Payload as long as the required interface functions are available
// When creating a new kind of payload from a new "stuff" class then you need to create specialized version for "stuff"
// of the Payload interface
template <class T> class Payload : public Item::PayloadInterface {
public:
typedef std::shared_ptr<T> DataPointer;
@ -403,6 +440,9 @@ public:
// Shape Type interface
virtual const ShapeKey getShapeKey() const override { return shapeGetShapeKey<T>(_data); }
// Meta Type Interface
virtual uint32_t fetchMetaSubItems(ItemIDs& subItems) const override { return metaFetchMetaSubItems<T>(_data, subItems); }
protected:
DataPointer _data;
@ -450,22 +490,6 @@ template <> const Item::Bound payloadGetBound(const FooPointer& foo) {
typedef Item::PayloadPointer PayloadPointer;
typedef std::vector< PayloadPointer > Payloads;
// A few typedefs for standard containers of ItemIDs
using ItemIDs = std::vector<ItemID>;
using ItemIDSet = std::set<ItemID>;
// Handy type to just pass the ID and the bound of an item
class ItemBound {
public:
ItemBound(ItemID id) : id(id) { }
ItemBound(ItemID id, const AABox& bound) : id(id), bound(bound) { }
ItemID id;
AABox bound;
};
// many Item Bounds in a vector
using ItemBounds = std::vector<ItemBound>;
// A map of items by ShapeKey to optimize rendering pipeline assignments
using ShapeBounds = std::unordered_map<ShapeKey, ItemBounds, ShapeKey::Hash, ShapeKey::KeyEqual>;

View file

@ -29,33 +29,41 @@ RenderFetchCullSortTask::RenderFetchCullSortTask(CullFunctor cullFunctor) {
const auto nonspatialSelection = addJob<FetchNonspatialItems>("FetchOverlaySelection");
// Multi filter visible items into different buckets
const int NUM_FILTERS = 3;
const int NUM_SPATIAL_FILTERS = 4;
const int NUM_NON_SPATIAL_FILTERS = 3;
const int OPAQUE_SHAPE_BUCKET = 0;
const int TRANSPARENT_SHAPE_BUCKET = 1;
const int LIGHT_BUCKET = 2;
const int META_BUCKET = 3;
const int BACKGROUND_BUCKET = 2;
MultiFilterItem<NUM_FILTERS>::ItemFilterArray spatialFilters = { {
MultiFilterItem<NUM_SPATIAL_FILTERS>::ItemFilterArray spatialFilters = { {
ItemFilter::Builder::opaqueShape(),
ItemFilter::Builder::transparentShape(),
ItemFilter::Builder::light()
ItemFilter::Builder::light(),
ItemFilter::Builder::meta()
} };
MultiFilterItem<NUM_FILTERS>::ItemFilterArray nonspatialFilters = { {
MultiFilterItem<NUM_NON_SPATIAL_FILTERS>::ItemFilterArray nonspatialFilters = { {
ItemFilter::Builder::opaqueShape(),
ItemFilter::Builder::transparentShape(),
ItemFilter::Builder::background()
} };
const auto filteredSpatialBuckets = addJob<MultiFilterItem<NUM_FILTERS>>("FilterSceneSelection", culledSpatialSelection, spatialFilters).get<MultiFilterItem<NUM_FILTERS>::ItemBoundsArray>();
const auto filteredNonspatialBuckets = addJob<MultiFilterItem<NUM_FILTERS>>("FilterOverlaySelection", nonspatialSelection, nonspatialFilters).get<MultiFilterItem<NUM_FILTERS>::ItemBoundsArray>();
const auto filteredSpatialBuckets =
addJob<MultiFilterItem<NUM_SPATIAL_FILTERS>>("FilterSceneSelection", culledSpatialSelection, spatialFilters)
.get<MultiFilterItem<NUM_SPATIAL_FILTERS>::ItemBoundsArray>();
const auto filteredNonspatialBuckets =
addJob<MultiFilterItem<NUM_NON_SPATIAL_FILTERS>>("FilterOverlaySelection", nonspatialSelection, nonspatialFilters)
.get<MultiFilterItem<NUM_NON_SPATIAL_FILTERS>::ItemBoundsArray>();
// Extract opaques / transparents / lights / overlays
const auto opaques = addJob<DepthSortItems>("DepthSortOpaque", filteredSpatialBuckets[OPAQUE_SHAPE_BUCKET]);
const auto transparents = addJob<DepthSortItems>("DepthSortTransparent", filteredSpatialBuckets[TRANSPARENT_SHAPE_BUCKET], DepthSortItems(false));
const auto lights = filteredSpatialBuckets[LIGHT_BUCKET];
const auto metas = filteredSpatialBuckets[META_BUCKET];
const auto overlayOpaques = addJob<DepthSortItems>("DepthSortOverlayOpaque", filteredNonspatialBuckets[OPAQUE_SHAPE_BUCKET]);
const auto overlayTransparents = addJob<DepthSortItems>("DepthSortOverlayTransparent", filteredNonspatialBuckets[TRANSPARENT_SHAPE_BUCKET], DepthSortItems(false));
const auto background = filteredNonspatialBuckets[BACKGROUND_BUCKET];
setOutput(Output{{
opaques, transparents, lights, overlayOpaques, overlayTransparents, background, spatialSelection }});
opaques, transparents, lights, metas, overlayOpaques, overlayTransparents, background, spatialSelection }});
}

View file

@ -19,7 +19,21 @@
class RenderFetchCullSortTask : public render::Task {
public:
using Output = std::array<render::Varying, 7>;
enum Buckets {
OPAQUE_SHAPE = 0,
TRANSPARENT_SHAPE,
LIGHT,
META,
OVERLAY_OPAQUE_SHAPE,
OVERLAY_TRANSPARENT_SHAPE,
BACKGROUND,
SPATIAL_SELECTION,
NUM_BUCKETS
};
using Output = std::array<render::Varying, Buckets::NUM_BUCKETS>;
using JobModel = ModelO<RenderFetchCullSortTask>;
RenderFetchCullSortTask(render::CullFunctor cullFunctor);

View file

@ -58,7 +58,7 @@ ItemID Scene::allocateID() {
return _IDAllocator.fetch_add(1);
}
bool Scene::isAllocatedID(const ItemID& id) {
bool Scene::isAllocatedID(const ItemID& id) const {
return Item::isValidID(id) && (id < _numAllocatedItems.load());
}

View file

@ -61,7 +61,7 @@ public:
ItemID allocateID();
// Check that the ID is valid and allocated for this scene, this a threadsafe call
bool isAllocatedID(const ItemID& id);
bool isAllocatedID(const ItemID& id) const;
// THis is the total number of allocated items, this a threadsafe call
size_t getNumItems() const { return _numAllocatedItems.load(); }
@ -78,6 +78,9 @@ public:
// WARNING, There is No check on the validity of the ID, so this could return a bad Item
const Item& getItem(const ItemID& id) const { return _items[id]; }
// Same as getItem, checking if the id is valid
const Item getItemSafe(const ItemID& id) const { if (isAllocatedID(id)) { return _items[id]; } else { return Item(); } }
// Access the spatialized items
const ItemSpatialTree& getSpatialTree() const { return _masterSpatialTree; }

View file

@ -371,6 +371,7 @@ protected:
class JobConfig : public QObject {
Q_OBJECT
Q_PROPERTY(double cpuRunTime READ getCPURunTime NOTIFY newStats()) //ms
Q_PROPERTY(bool enabled READ isEnabled WRITE setEnabled)
double _msCPURunTime{ 0.0 };
public:
@ -380,7 +381,7 @@ public:
JobConfig(bool enabled) : alwaysEnabled{ false }, enabled{ enabled } {}
bool isEnabled() { return alwaysEnabled || enabled; }
void setEnabled(bool enable) { enabled = enable; }
void setEnabled(bool enable) { enabled = alwaysEnabled || enable; }
bool alwaysEnabled{ true };
bool enabled{ true };

View file

@ -42,6 +42,11 @@ bool UsersScriptingInterface::getPersonalMuteStatus(const QUuid& nodeID) {
return DependencyManager::get<NodeList>()->isPersonalMutingNode(nodeID);
}
void UsersScriptingInterface::setAvatarGain(const QUuid& nodeID, float gain) {
// ask the NodeList to set the gain of the specified avatar
DependencyManager::get<NodeList>()->setAvatarGain(nodeID, gain);
}
void UsersScriptingInterface::kick(const QUuid& nodeID) {
// ask the NodeList to kick the user with the given session ID
DependencyManager::get<NodeList>()->kickNodeBySessionID(nodeID);

View file

@ -61,6 +61,15 @@ public slots:
*/
bool getPersonalMuteStatus(const QUuid& nodeID);
/**jsdoc
* Sets an avatar's gain for you and you only.
* Units are Decibels (dB)
* @function Users.setAvatarGain
* @param {nodeID} nodeID The node or session ID of the user whose gain you want to modify.
* @param {float} gain The gain of the avatar you'd like to set. Units are dB.
*/
void setAvatarGain(const QUuid& nodeID, float gain);
/**jsdoc
* Kick another user.
* @function Users.kick
@ -126,9 +135,17 @@ signals:
/**jsdoc
* Notifies scripts of the username and machine fingerprint associated with a UUID.
* Username and machineFingerprint will be their default constructor output if the requesting user isn't an admin.
* @function Users.usernameFromIDReply
*/
void usernameFromIDReply(const QString& nodeID, const QString& username, const QString& machineFingerprint);
void usernameFromIDReply(const QString& nodeID, const QString& username, const QString& machineFingerprint, bool isAdmin);
/**jsdoc
* Notifies scripts that a user has disconnected from the domain
* @function Users.avatar.avatarDisconnected
* @param {nodeID} NodeID The session ID of the avatar that has disconnected
*/
void avatarDisconnected(const QUuid& nodeID);
private:
bool getRequestsDomainListData();

View file

@ -61,6 +61,13 @@ float Interpolate::interpolate3Points(float y1, float y2, float y3, float u) {
}
}
float Interpolate::simpleNonLinearBlend(float fraction) {
// uses arctan() to map a linear distribution in domain [0,1] to a non-linear blend (slow out, slow in) in range [0,1]
const float WIDTH = 20.0f;
const float INV_ARCTAN_WIDTH = 0.339875327433f; // 1 / (2 * atan(WIDTH/2))
return 0.5f + atanf(WIDTH * (fraction - 0.5f)) * INV_ARCTAN_WIDTH;
}
float Interpolate::calculateFadeRatio(quint64 start) {
const float FADE_TIME = 1.0f;
float t = 2.0f * std::min(((float)(usecTimestampNow() - start)) / ((float)(FADE_TIME * USECS_PER_SECOND)), 1.0f);
@ -69,4 +76,4 @@ float Interpolate::calculateFadeRatio(quint64 start) {
// The easing function isn't exactly 1 at t = 2, so we need to scale the whole function up slightly
const float EASING_SCALE = 1.001f;
return std::min(EASING_SCALE * fadeRatio, 1.0f);
}
}

View file

@ -25,6 +25,10 @@ public:
// pass through all three y values. Return value lies wholly within the range of y values passed in.
static float interpolate3Points(float y1, float y2, float y3, float u);
// returns smooth in and out blend between 0 and 1
// DANGER: assumes fraction is properly inside range [0, 1]
static float simpleNonLinearBlend(float fraction);
static float calculateFadeRatio(quint64 start);
};

View file

@ -18,6 +18,8 @@
#include <cctype>
#include <time.h>
#include <mutex>
#include <thread>
#include <set>
#include <glm/glm.hpp>
@ -1022,4 +1024,54 @@ bool getProcessorInfo(ProcessorInfo& info) {
#endif
return false;
}
}
const QString& getInterfaceSharedMemoryName() {
static const QString applicationName = "High Fidelity Interface - " + qgetenv("USERNAME");
return applicationName;
}
const std::vector<uint8_t>& getAvailableCores() {
static std::vector<uint8_t> availableCores;
#ifdef Q_OS_WIN
static std::once_flag once;
std::call_once(once, [&] {
DWORD_PTR defaultProcessAffinity = 0, defaultSystemAffinity = 0;
HANDLE process = GetCurrentProcess();
GetProcessAffinityMask(process, &defaultProcessAffinity, &defaultSystemAffinity);
for (uint64_t i = 0; i < sizeof(DWORD_PTR) * BITS_IN_BYTE; ++i) {
DWORD_PTR coreMask = 1;
coreMask <<= i;
if (0 != (defaultSystemAffinity & coreMask)) {
availableCores.push_back(i);
}
}
});
#endif
return availableCores;
}
void setMaxCores(uint8_t maxCores) {
#ifdef Q_OS_WIN
HANDLE process = GetCurrentProcess();
auto availableCores = getAvailableCores();
if (availableCores.size() <= maxCores) {
DWORD_PTR currentProcessAffinity = 0, currentSystemAffinity = 0;
GetProcessAffinityMask(process, &currentProcessAffinity, &currentSystemAffinity);
SetProcessAffinityMask(GetCurrentProcess(), currentSystemAffinity);
return;
}
DWORD_PTR newProcessAffinity = 0;
while (maxCores) {
int index = randIntInRange(0, (int)availableCores.size() - 1);
DWORD_PTR coreMask = 1;
coreMask <<= availableCores[index];
newProcessAffinity |= coreMask;
availableCores.erase(availableCores.begin() + index);
maxCores--;
}
SetProcessAffinityMask(process, newProcessAffinity);
#endif
}

View file

@ -231,4 +231,8 @@ struct ProcessorInfo {
bool getProcessorInfo(ProcessorInfo& info);
const QString& getInterfaceSharedMemoryName();
void setMaxCores(uint8_t maxCores);
#endif // hifi_SharedUtil_h

View file

@ -13,6 +13,7 @@ namespace hifi { namespace properties {
const char* CRASHED = "com.highfidelity.crashed";
const char* STEAM = "com.highfidelity.launchedFromSteam";
const char* LOGGER = "com.highfidelity.logger";
const char* OCULUS_STORE = "com.highfidelity.oculusStore";
const char* TEST = "com.highfidelity.test";
const char* TRACING = "com.highfidelity.tracing";

View file

@ -15,6 +15,7 @@ namespace hifi { namespace properties {
extern const char* CRASHED;
extern const char* STEAM;
extern const char* LOGGER;
extern const char* OCULUS_STORE;
extern const char* TEST;
extern const char* TRACING;

View file

@ -8,21 +8,31 @@
if (WIN32)
# we're using static GLEW, so define GLEW_STATIC
add_definitions(-DGLEW_STATIC)
# we're using static GLEW, so define GLEW_STATIC
add_definitions(-DGLEW_STATIC)
set(TARGET_NAME oculus)
setup_hifi_plugin(Multimedia)
link_hifi_libraries(shared gl gpu gpu-gl controllers ui
plugins ui-plugins display-plugins input-plugins
audio-client networking render-utils)
include_hifi_library_headers(octree)
add_dependency_external_projects(LibOVR)
find_package(LibOVR REQUIRED)
target_include_directories(${TARGET_NAME} PRIVATE ${LIBOVR_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${LIBOVR_LIBRARIES})
target_link_libraries(${TARGET_NAME} Winmm.lib)
# if we were passed an Oculus App ID for entitlement checks, send that along
if (DEFINED ENV{OCULUS_APP_ID})
add_definitions(-DOCULUS_APP_ID="$ENV{OCULUS_APP_ID}")
endif ()
set(TARGET_NAME oculus)
setup_hifi_plugin(Multimedia)
link_hifi_libraries(
shared gl gpu gpu-gl controllers ui
plugins ui-plugins display-plugins input-plugins
audio-client networking render-utils
)
include_hifi_library_headers(octree)
add_dependency_external_projects(LibOVR)
find_package(LibOVR REQUIRED)
target_include_directories(${TARGET_NAME} PRIVATE ${LIBOVR_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${LIBOVR_LIBRARIES})
target_link_libraries(${TARGET_NAME} Winmm.lib)
add_dependency_external_projects(LibOVRPlatform)
find_package(LibOVRPlatform REQUIRED)
target_include_directories(${TARGET_NAME} PRIVATE ${LIBOVRPLATFORM_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${LIBOVRPLATFORM_LIBRARIES})
endif()

View file

@ -15,8 +15,12 @@
#include <QtCore/QDir>
#include <QtCore/QProcessEnvironment>
#define OVRPL_DISABLED
#include <OVR_Platform.h>
#include <controllers/Input.h>
#include <controllers/Pose.h>
#include <shared/GlobalAppProperties.h>
#include <NumericalConstants.h>
Q_LOGGING_CATEGORY(displayplugins, "hifi.plugins.display")
@ -89,6 +93,18 @@ ovrSession acquireOculusSession() {
return session;
}
#ifdef OCULUS_APP_ID
if (qApp->property(hifi::properties::OCULUS_STORE).toBool()) {
if (ovr_PlatformInitializeWindows(OCULUS_APP_ID) != ovrPlatformInitialize_Success) {
// we were unable to initialize the platform for entitlement check - fail the check
_quitRequested = true;
} else {
qCDebug(oculus) << "Performing Oculus Platform entitlement check";
ovr_Entitlement_GetIsViewerEntitled();
}
}
#endif
Q_ASSERT(0 == refCount);
ovrGraphicsLuid luid;
if (!OVR_SUCCESS(ovr_Create(&session, &luid))) {
@ -127,6 +143,35 @@ void handleOVREvents() {
_quitRequested = status.ShouldQuit;
_reorientRequested = status.ShouldRecenter;
#ifdef OCULUS_APP_ID
if (qApp->property(hifi::properties::OCULUS_STORE).toBool()) {
// pop messages to see if we got a return for an entitlement check
ovrMessageHandle message = ovr_PopMessage();
while (message) {
switch (ovr_Message_GetType(message)) {
case ovrMessage_Entitlement_GetIsViewerEntitled: {
if (!ovr_Message_IsError(message)) {
// this viewer is entitled, no need to flag anything
qCDebug(oculus) << "Oculus Platform entitlement check succeeded, proceeding normally";
} else {
// we failed the entitlement check, set our flag so the app can stop
qCDebug(oculus) << "Oculus Platform entitlement check failed, app will now quit" << OCULUS_APP_ID;
_quitRequested = true;
}
}
}
// free the message handle to cleanup and not leak
ovr_FreeMessage(message);
// pop the next message to check, if there is one
message = ovr_PopMessage();
}
}
#endif
}
bool quitRequested() {
@ -217,4 +262,4 @@ controller::Pose ovrControllerPoseToHandPose(
pose.velocity = toGlm(handPose.LinearVelocity);
pose.valid = true;
return pose;
}
}

View file

@ -0,0 +1,19 @@
var WAVE = 'http://cdn.rawgit.com/ambisonictoolkit/atk-sounds/aa31005c/stereo/Aurora_Surgit-Lux_Aeterna.wav';
var uuid = Entities.addEntity({
type: "Shape",
shape: "Icosahedron",
dimensions: Vec3.HALF,
script: Script.resolvePath('../../tutorials/entity_scripts/ambientSound.js'),
position: Vec3.sum(Vec3.multiply(5, Quat.getFront(MyAvatar.orientation)), MyAvatar.position),
userData: JSON.stringify({
soundURL: WAVE,
maxVolume: 0.1,
range: 25,
disabled: true,
grabbableKey: { wantsTrigger: true },
}),
lifetime: 600,
});
Script.scriptEnding.connect(function() {
Entities.deleteEntity(uuid);
});

View file

@ -13,7 +13,7 @@ var qml = Script.resolvePath('deferredLighting.qml');
var window = new OverlayWindow({
title: 'Lighting',
source: qml,
width: 400, height:220,
width: 400, height:280,
});
window.setPosition(Window.innerWidth - 420, 50);
window.closed.connect(function() { Script.stop(); });

View file

@ -74,7 +74,7 @@ Column {
Column {
spacing: 10
Repeater {
model: [ "Tone Mapping exposure:ToneMapping:exposure:5.0:-5.0"
model: [ "Tone Mapping Exposure:ToneMapping:exposure:5.0:-5.0"
]
ConfigSlider {
label: qsTr(modelData.split(":")[0])
@ -88,7 +88,7 @@ Column {
Row {
Label {
text: "Debug Framebuffer"
text: "Tone Mapping Curve"
anchors.left: root.left
}
@ -109,6 +109,7 @@ Column {
}
Row {
id: framebuffer
spacing: 10
Label {
text: "Debug Framebuffer"
@ -156,5 +157,14 @@ Column {
onCurrentIndexChanged: { framebuffer.setDebugMode(currentIndex) }
}
}
Row {
id: metas
CheckBox {
text: "Draw Meta Bounds"
checked: Render.getConfig("DrawMetaBounds")["enabled"]
onCheckedChanged: { Render.getConfig("DrawMetaBounds")["enabled"] = checked }
}
}
}

View file

@ -3082,7 +3082,7 @@ var handleHandMessages = function(channel, message, sender) {
Messages.messageReceived.connect(handleHandMessages);
var TARGET_UPDATE_HZ = 50; // 50hz good enough (no change in logic)
var TARGET_UPDATE_HZ = 60; // 50hz good enough, but we're using update
var BASIC_TIMER_INTERVAL_MS = 1000 / TARGET_UPDATE_HZ;
var lastInterval = Date.now();
@ -3095,7 +3095,7 @@ var updateTotalWork = 0;
var UPDATE_PERFORMANCE_DEBUGGING = false;
var updateIntervalTimer = Script.setInterval(function(){
function updateWrapper(){
intervalCount++;
var thisInterval = Date.now();
@ -3141,11 +3141,12 @@ var updateIntervalTimer = Script.setInterval(function(){
updateTotalWork = 0;
}
}, BASIC_TIMER_INTERVAL_MS);
}
Script.update.connect(updateWrapper);
function cleanup() {
Menu.removeMenuItem("Developer", "Show Grab Sphere");
Script.clearInterval(updateIntervalTimer);
Script.update.disconnect(updateWrapper);
rightController.cleanup();
leftController.cleanup();
Controller.disableMapping(MAPPING_NAME);

View file

@ -148,7 +148,6 @@
if (!canWriteAssets) {
console.log("ERROR: Clara.io FBX: File download cancelled because no permissions to write to Asset Server");
EventBridge.emitWebEvent(WARN_USER_NO_PERMISSIONS);
event.stopPropagation();
return;
}

View file

@ -134,7 +134,7 @@ EntityListTool = function(opts) {
Window.alert('There were no recent users of the ' + selectionManager.selections.length + ' selected objects.');
} else {
// No need to subscribe if we're just sending.
Messages.sendMessage('com.highfidelity.pal', JSON.stringify({method: 'select', params: [dedupped, true]}), 'local');
Messages.sendMessage('com.highfidelity.pal', JSON.stringify({method: 'select', params: [dedupped, true, false]}), 'local');
}
} else if (data.type == "delete") {
deleteSelectedEntities();

View file

@ -103,6 +103,8 @@ ExtendedOverlay.prototype.select = function (selected) {
return;
}
UserActivityLogger.palAction(selected ? "avatar_selected" : "avatar_deselected", this.key);
this.editOverlay({color: color(selected, this.hovering, this.audioLevel)});
if (this.model) {
this.model.editOverlay({textures: textures(selected)});
@ -231,7 +233,26 @@ pal.fromQml.connect(function (message) { // messages are {method, params}, like
break;
case 'refresh':
removeOverlays();
populateUserList();
populateUserList(message.params);
UserActivityLogger.palAction("refresh", "");
break;
case 'updateGain':
data = message.params;
if (data['isReleased']) {
// isReleased=true happens once at the end of a cycle of dragging
// the slider about, but with same gain as last isReleased=false so
// we don't set the gain in that case, and only here do we want to
// send an analytic event.
UserActivityLogger.palAction("avatar_gain_changed", data['sessionId']);
} else {
Users.setAvatarGain(data['sessionId'], data['gain']);
}
break;
case 'displayNameUpdate':
if (MyAvatar.displayName != message.params) {
MyAvatar.displayName = message.params;
UserActivityLogger.palAction("display_name_change", "");
}
break;
default:
print('Unrecognized message from Pal.qml:', JSON.stringify(message));
@ -250,7 +271,7 @@ function addAvatarNode(id) {
color: color(selected, false, 0.0),
ignoreRayIntersection: false}, selected, true);
}
function populateUserList() {
function populateUserList(selectData) {
var data = [];
AvatarList.getAvatarIdentifiers().sort().forEach(function (id) { // sorting the identifiers is just an aid for debugging
var avatar = AvatarList.getAvatar(id);
@ -258,14 +279,12 @@ function populateUserList() {
displayName: avatar.sessionDisplayName,
userName: '',
sessionId: id || '',
audioLevel: 0.0
audioLevel: 0.0,
admin: false
};
// If the current user is an admin OR
// they're requesting their own username ("id" is blank)...
if (Users.canKick || !id) {
// Request the username from the given UUID
Users.requestUsernameFromID(id);
}
// Request the username, fingerprint, and admin status from the given UUID
// Username and fingerprint returns default constructor output if the requesting user isn't an admin
Users.requestUsernameFromID(id);
// Request personal mute status and ignore status
// from NodeList (as long as we're not requesting it for our own ID)
if (id) {
@ -276,20 +295,27 @@ function populateUserList() {
data.push(avatarPalDatum);
print('PAL data:', JSON.stringify(avatarPalDatum));
});
pal.sendToQml({method: 'users', params: data});
pal.sendToQml({ method: 'users', params: data });
if (selectData) {
selectData[2] = true;
pal.sendToQml({ method: 'select', params: selectData });
}
}
// The function that handles the reply from the server
function usernameFromIDReply(id, username, machineFingerprint) {
function usernameFromIDReply(id, username, machineFingerprint, isAdmin) {
var data;
// If the ID we've received is our ID...
if (MyAvatar.sessionUUID === id) {
// Set the data to contain specific strings.
data = ['', username];
} else {
data = ['', username, isAdmin];
} else if (Users.canKick) {
// Set the data to contain the ID and the username (if we have one)
// or fingerprint (if we don't have a username) string.
data = [id, username || machineFingerprint];
data = [id, username || machineFingerprint, isAdmin];
} else {
// Set the data to contain specific strings.
data = [id, '', isAdmin];
}
print('Username Data:', JSON.stringify(data));
// Ship the data off to QML
@ -366,7 +392,7 @@ function removeOverlays() {
function handleClick(pickRay) {
ExtendedOverlay.applyPickRay(pickRay, function (overlay) {
// Don't select directly. Tell qml, who will give us back a list of ids.
var message = {method: 'select', params: [[overlay.key], !overlay.selected]};
var message = {method: 'select', params: [[overlay.key], !overlay.selected, false]};
pal.sendToQml(message);
return true;
});
@ -448,50 +474,6 @@ triggerMapping.from(Controller.Standard.RTClick).peek().to(makeClickHandler(Cont
triggerMapping.from(Controller.Standard.LTClick).peek().to(makeClickHandler(Controller.Standard.LeftHand));
triggerPressMapping.from(Controller.Standard.RT).peek().to(makePressHandler(Controller.Standard.RightHand));
triggerPressMapping.from(Controller.Standard.LT).peek().to(makePressHandler(Controller.Standard.LeftHand));
//
// Manage the connection between the button and the window.
//
var toolBar = Toolbars.getToolbar("com.highfidelity.interface.toolbar.system");
var buttonName = "pal";
var button = toolBar.addButton({
objectName: buttonName,
imageURL: Script.resolvePath("assets/images/tools/people.svg"),
visible: true,
hoverState: 2,
defaultState: 1,
buttonState: 1,
alpha: 0.9
});
var isWired = false;
function off() {
if (isWired) { // It is not ok to disconnect these twice, hence guard.
Script.update.disconnect(updateOverlays);
Controller.mousePressEvent.disconnect(handleMouseEvent);
Controller.mouseMoveEvent.disconnect(handleMouseMoveEvent);
isWired = false;
}
triggerMapping.disable(); // It's ok if we disable twice.
triggerPressMapping.disable(); // see above
removeOverlays();
Users.requestsDomainListData = false;
}
function onClicked() {
if (!pal.visible) {
Users.requestsDomainListData = true;
populateUserList();
pal.raise();
isWired = true;
Script.update.connect(updateOverlays);
Controller.mousePressEvent.connect(handleMouseEvent);
Controller.mouseMoveEvent.connect(handleMouseMoveEvent);
triggerMapping.enable();
triggerPressMapping.enable();
} else {
off();
}
pal.setVisible(!pal.visible);
}
//
// Message from other scripts, such as edit.js
//
@ -523,6 +505,7 @@ var LOUDNESS_SCALE = 2.8 / 5.0;
var LOG2 = Math.log(2.0);
var AUDIO_LEVEL_UPDATE_INTERVAL_MS = 100; // 10hz for now (change this and change the AVERAGING_RATIO too)
var myData = {}; // we're not includied in ExtendedOverlay.get.
var audioInterval;
function getAudioLevel(id) {
// the VU meter should work similarly to the one in AvatarInputs: log scale, exponentially averaged
@ -532,7 +515,6 @@ function getAudioLevel(id) {
var audioLevel = 0.0;
var data = id ? ExtendedOverlay.get(id) : myData;
if (!data) {
print('no data for', id);
return audioLevel;
}
@ -555,21 +537,83 @@ function getAudioLevel(id) {
return audioLevel;
}
function createAudioInterval() {
// we will update the audioLevels periodically
// TODO: tune for efficiency - expecially with large numbers of avatars
return Script.setInterval(function () {
if (pal.visible) {
var param = {};
AvatarList.getAvatarIdentifiers().forEach(function (id) {
var level = getAudioLevel(id);
// qml didn't like an object with null/empty string for a key, so...
var userId = id || 0;
param[userId] = level;
});
pal.sendToQml({method: 'updateAudioLevel', params: param});
}
}, AUDIO_LEVEL_UPDATE_INTERVAL_MS);
}
// we will update the audioLevels periodically
// TODO: tune for efficiency - expecially with large numbers of avatars
Script.setInterval(function () {
if (pal.visible) {
var param = {};
AvatarList.getAvatarIdentifiers().forEach(function (id) {
var level = getAudioLevel(id);
// qml didn't like an object with null/empty string for a key, so...
var userId = id || 0;
param[userId] = level;
});
pal.sendToQml({method: 'updateAudioLevel', params: param});
//
// Manage the connection between the button and the window.
//
var toolBar = Toolbars.getToolbar("com.highfidelity.interface.toolbar.system");
var buttonName = "pal";
var button = toolBar.addButton({
objectName: buttonName,
imageURL: Script.resolvePath("assets/images/tools/people.svg"),
visible: true,
hoverState: 2,
defaultState: 1,
buttonState: 1,
alpha: 0.9
});
var isWired = false;
var palOpenedAt;
function off() {
if (isWired) { // It is not ok to disconnect these twice, hence guard.
Script.update.disconnect(updateOverlays);
Controller.mousePressEvent.disconnect(handleMouseEvent);
Controller.mouseMoveEvent.disconnect(handleMouseMoveEvent);
isWired = false;
}
}, AUDIO_LEVEL_UPDATE_INTERVAL_MS);
triggerMapping.disable(); // It's ok if we disable twice.
triggerPressMapping.disable(); // see above
removeOverlays();
Users.requestsDomainListData = false;
if (palOpenedAt) {
var duration = new Date().getTime() - palOpenedAt;
UserActivityLogger.palOpened(duration / 1000.0);
palOpenedAt = 0; // just a falsy number is good enough.
}
if (audioInterval) {
Script.clearInterval(audioInterval);
}
}
function onClicked() {
if (!pal.visible) {
Users.requestsDomainListData = true;
populateUserList();
pal.raise();
isWired = true;
Script.update.connect(updateOverlays);
Controller.mousePressEvent.connect(handleMouseEvent);
Controller.mouseMoveEvent.connect(handleMouseMoveEvent);
triggerMapping.enable();
triggerPressMapping.enable();
createAudioInterval();
palOpenedAt = new Date().getTime();
} else {
off();
}
pal.setVisible(!pal.visible);
}
function avatarDisconnected(nodeID) {
// remove from the pal list
pal.sendToQml({method: 'avatarDisconnected', params: [nodeID]});
}
//
// Button state.
//
@ -582,14 +626,16 @@ button.clicked.connect(onClicked);
pal.visibleChanged.connect(onVisibleChanged);
pal.closed.connect(off);
Users.usernameFromIDReply.connect(usernameFromIDReply);
function clearIgnoredInQMLAndClosePAL() {
pal.sendToQml({ method: 'clearIgnored' });
Users.avatarDisconnected.connect(avatarDisconnected);
function clearLocalQMLDataAndClosePAL() {
pal.sendToQml({ method: 'clearLocalQMLData' });
if (pal.visible) {
onClicked(); // Close the PAL
}
}
Window.domainChanged.connect(clearIgnoredInQMLAndClosePAL);
Window.domainConnectionRefused.connect(clearIgnoredInQMLAndClosePAL);
Window.domainChanged.connect(clearLocalQMLDataAndClosePAL);
Window.domainConnectionRefused.connect(clearLocalQMLDataAndClosePAL);
//
// Cleanup.
@ -600,10 +646,11 @@ Script.scriptEnding.connect(function () {
pal.visibleChanged.disconnect(onVisibleChanged);
pal.closed.disconnect(off);
Users.usernameFromIDReply.disconnect(usernameFromIDReply);
Window.domainChanged.disconnect(clearIgnoredInQMLAndClosePAL);
Window.domainConnectionRefused.disconnect(clearIgnoredInQMLAndClosePAL);
Window.domainChanged.disconnect(clearLocalQMLDataAndClosePAL);
Window.domainConnectionRefused.disconnect(clearLocalQMLDataAndClosePAL);
Messages.unsubscribe(CHANNEL);
Messages.messageReceived.disconnect(receiveMessage);
Users.avatarDisconnected.disconnect(avatarDisconnected);
off();
});

View file

@ -1,34 +1,48 @@
// ambientSound.js
//
// This entity script will allow you to create an ambient sound that loops when a person is within a given
// range of this entity. Great way to add one or more ambisonic soundfields to your environment.
// range of this entity. Great way to add one or more ambisonic soundfields to your environment.
//
// In the userData section for the entity, add/edit three values:
// userData.soundURL should be a string giving the URL to the sound file. Defaults to 100 meters if not set.
// In the userData section for the entity, add/edit three values:
// userData.soundURL should be a string giving the URL to the sound file. Defaults to 100 meters if not set.
// userData.range should be an integer for the max distance away from the entity where the sound will be audible.
// userData.volume is the max volume at which the clip should play. Defaults to 1.0 full volume)
// userData.maxVolume is the max volume at which the clip should play. Defaults to 1.0 full volume.
// userData.disabled is an optionanl boolean flag which can be used to disable the ambient sound. Defaults to false.
//
// The rotation of the entity is copied to the ambisonic field, so by rotating the entity you will rotate the
// direction in-which a certain sound comes from.
//
// The rotation of the entity is copied to the ambisonic field, so by rotating the entity you will rotate the
// direction in-which a certain sound comes from.
//
// Remember that the entity has to be visible to the user for the sound to play at all, so make sure the entity is
// large enough to be loaded at the range you set, particularly for large ranges.
//
// large enough to be loaded at the range you set, particularly for large ranges.
//
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
(function(){
(function(){
// This sample clip and range will be used if you don't add userData to the entity (see above)
var DEFAULT_RANGE = 100;
var DEFAULT_URL = "http://hifi-content.s3.amazonaws.com/ken/samples/forest_ambiX.wav";
var DEFAULT_VOLUME = 1.0;
var DEFAULT_USERDATA = {
soundURL: DEFAULT_URL,
range: DEFAULT_RANGE,
maxVolume: DEFAULT_VOLUME,
disabled: true,
grabbableKey: { wantsTrigger: true },
};
var soundURL = "";
var soundOptions = {
loop: true,
localOnly: true,
};
var range = DEFAULT_RANGE;
var maxVolume = DEFAULT_VOLUME;
var disabled = false;
var UPDATE_INTERVAL_MSECS = 100;
var rotation;
@ -39,14 +53,13 @@
var checkTimer = false;
var _this;
var WANT_COLOR_CHANGE = false;
var COLOR_OFF = { red: 128, green: 128, blue: 128 };
var COLOR_ON = { red: 255, green: 0, blue: 0 };
var WANT_DEBUG = false;
function debugPrint(string) {
if (WANT_DEBUG) {
print(string);
print("ambientSound | " + string);
}
}
@ -55,84 +68,201 @@
var oldSoundURL = soundURL;
var props = Entities.getEntityProperties(entity, [ "userData" ]);
if (props.userData) {
var data = JSON.parse(props.userData);
try {
var data = JSON.parse(props.userData);
} catch(e) {
debugPrint("unable to parse userData JSON string: " + props.userData);
this.cleanup();
return;
}
if (data.soundURL && !(soundURL === data.soundURL)) {
soundURL = data.soundURL;
debugPrint("Read ambient sound URL: " + soundURL);
} else if (!data.soundURL) {
soundURL = DEFAULT_URL;
debugPrint("Read ambient sound URL: " + soundURL);
}
if (data.range && !(range === data.range)) {
range = data.range;
debugPrint("Read ambient sound range: " + range);
}
if (data.volume && !(maxVolume === data.volume)) {
maxVolume = data.volume;
// Check known aliases for the "volume" setting (which allows for inplace upgrade of existing marketplace entities)
data.maxVolume = data.maxVolume || data.soundVolume || data.volume;
if (data.maxVolume && !(maxVolume === data.maxVolume)) {
maxVolume = data.maxVolume;
debugPrint("Read ambient sound volume: " + maxVolume);
}
if ("disabled" in data && !(disabled === data.disabled)) {
disabled = data.disabled;
debugPrint("Read ambient disabled state: " + disabled);
this._updateColor(disabled);
}
}
if (disabled) {
this.cleanup();
soundURL = "";
return;
} else if (!checkTimer) {
checkTimer = Script.setInterval(_this.maybeUpdate, UPDATE_INTERVAL_MSECS);
}
if (!(soundURL === oldSoundURL) || (soundURL === "")) {
debugPrint("Loading ambient sound into cache");
ambientSound = SoundCache.getSound(soundURL);
if (soundURL) {
debugPrint("Loading ambient sound into cache");
// Use prefetch to detect URL loading errors
var resource = SoundCache.prefetch(soundURL);
function onStateChanged() {
if (resource.state === Resource.State.FINISHED) {
resource.stateChanged.disconnect(onStateChanged);
ambientSound = SoundCache.getSound(soundURL);
} else if (resource.state === Resource.State.FAILED) {
resource.stateChanged.disconnect(onStateChanged);
debugPrint("Failed to download ambient sound: " + soundURL);
}
}
resource.stateChanged.connect(onStateChanged);
onStateChanged(resource.state);
}
if (soundPlaying && soundPlaying.playing) {
debugPrint("URL changed, stopping current ambient sound");
soundPlaying.stop();
soundPlaying = false;
if (WANT_COLOR_CHANGE) {
Entities.editEntity(entity, { color: COLOR_OFF });
}
debugPrint("Restarting ambient sound");
}
}
}
}
this.preload = function(entityID) {
this.clickDownOnEntity = function(entityID, mouseEvent) {
if (mouseEvent.isPrimaryButton) {
this._toggle("primary click");
}
};
this.startFarTrigger = function() {
this._toggle("far click");
};
this._toggle = function(hint) {
// Toggle between ON/OFF state, but only if not in edit mode
if (Settings.getValue("io.highfidelity.isEditting")) {
return;
}
var props = Entities.getEntityProperties(entity, [ "userData" ]);
if (!props.userData) {
debugPrint("userData is empty; ignoring " + hint);
this.cleanup();
return;
}
var data = JSON.parse(props.userData);
data.disabled = !data.disabled;
debugPrint(hint + " -- triggering ambient sound " + (data.disabled ? "OFF" : "ON") + " (" + data.soundURL + ")");
this.cleanup();
// Save the userData and notify nearby listeners of the change
Entities.editEntity(entity, {
userData: JSON.stringify(data)
});
Messages.sendMessage(entity, "toggled");
};
this._updateColor = function(disabled) {
// Update Shape or Text Entity color based on ON/OFF status
var props = Entities.getEntityProperties(entity, [ "color", "textColor" ]);
var targetColor = disabled ? COLOR_OFF : COLOR_ON;
var currentColor = props.textColor || props.color;
var newProps = props.textColor ? { textColor: targetColor } : { color: targetColor };
if (currentColor.red !== targetColor.red ||
currentColor.green !== targetColor.green ||
currentColor.blue !== targetColor.blue) {
Entities.editEntity(entity, newProps);
}
};
this.preload = function(entityID) {
// Load the sound and range from the entity userData fields, and note the position of the entity.
debugPrint("Ambient sound preload");
debugPrint("Ambient sound preload " + entityID);
entity = entityID;
_this = this;
checkTimer = Script.setInterval(this.maybeUpdate, UPDATE_INTERVAL_MSECS);
};
var props = Entities.getEntityProperties(entity, [ "userData" ]);
var data = {};
if (props.userData) {
data = JSON.parse(props.userData);
}
var changed = false;
for(var p in DEFAULT_USERDATA) {
if (!(p in data)) {
data[p] = DEFAULT_USERDATA[p];
changed = true;
}
}
if (!data.grabbableKey.wantsTrigger) {
data.grabbableKey.wantsTrigger = true;
changed = true;
}
if (changed) {
debugPrint("applying default values to userData");
Entities.editEntity(entity, { userData: JSON.stringify(data) });
}
this._updateColor(data.disabled);
this.updateSettings();
// Subscribe to toggle notifications using entity ID as a channel name
Messages.subscribe(entity);
Messages.messageReceived.connect(this, "_onMessageReceived");
};
this._onMessageReceived = function(channel, message, sender, local) {
// Handle incoming toggle notifications
if (channel === entity && message === "toggled") {
debugPrint("received " + message + " from " + sender);
this.updateSettings();
}
};
this.maybeUpdate = function() {
// Every UPDATE_INTERVAL_MSECS, update the volume of the ambient sound based on distance from my avatar
_this.updateSettings();
var HYSTERESIS_FRACTION = 0.1;
var props = Entities.getEntityProperties(entity, [ "position", "rotation" ]);
if (disabled || !props.position) {
_this.cleanup();
return;
}
center = props.position;
rotation = props.rotation;
var distance = Vec3.length(Vec3.subtract(MyAvatar.position, center));
if (distance <= range) {
var volume = (1.0 - distance / range) * maxVolume;
if (!soundPlaying && ambientSound.downloaded) {
soundPlaying = Audio.playSound(ambientSound, { loop: true,
localOnly: true,
orientation: rotation,
volume: volume });
debugPrint("Starting ambient sound, volume: " + volume);
if (WANT_COLOR_CHANGE) {
Entities.editEntity(entity, { color: COLOR_ON });
}
soundOptions.orientation = rotation;
soundOptions.volume = volume;
if (!soundPlaying && ambientSound && ambientSound.downloaded) {
debugPrint("Starting ambient sound: " + soundURL + " (duration: " + ambientSound.duration + ")");
soundPlaying = Audio.playSound(ambientSound, soundOptions);
} else if (soundPlaying && soundPlaying.playing) {
soundPlaying.setOptions( { volume: volume, orientation: rotation } );
soundPlaying.setOptions(soundOptions);
}
} else if (soundPlaying && soundPlaying.playing && (distance > range * HYSTERESIS_FRACTION)) {
soundPlaying.stop();
soundPlaying = false;
Entities.editEntity(entity, { color: { red: 128, green: 128, blue: 128 }});
debugPrint("Out of range, stopping ambient sound");
debugPrint("Out of range, stopping ambient sound: " + soundURL);
}
}
};
this.unload = function(entityID) {
this.unload = function(entityID) {
debugPrint("Ambient sound unload");
this.cleanup();
Messages.unsubscribe(entity);
Messages.messageReceived.disconnect(this, "_onMessageReceived");
};
this.cleanup = function() {
if (checkTimer) {
Script.clearInterval(checkTimer);
checkTimer = false;
}
if (soundPlaying && soundPlaying.playing) {
soundPlaying.stop();
soundPlaying = false;
}
};
};
})
})