Merge branch 'master' of https://github.com/highfidelity/hifi into triangleSet

This commit is contained in:
Brad Hefta-Gaub 2017-03-07 12:18:36 -08:00
commit f7e824e47c
40 changed files with 576 additions and 478 deletions

View file

@ -206,6 +206,17 @@ foreach(CUSTOM_MACRO ${HIFI_CUSTOM_MACROS})
include(${CUSTOM_MACRO})
endforeach()
file(GLOB_RECURSE JS_SRC scripts/*.js)
add_custom_target(js SOURCES ${JS_SRC})
if (UNIX)
install(
DIRECTORY "${CMAKE_SOURCE_DIR}/scripts"
DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/interface
COMPONENT ${CLIENT_COMPONENT}
)
endif()
if (ANDROID)
file(GLOB ANDROID_CUSTOM_MACROS "cmake/android/*.cmake")
foreach(CUSTOM_MACRO ${ANDROID_CUSTOM_MACROS})

View file

@ -365,6 +365,28 @@ void AvatarMixer::handleRequestsDomainListDataPacket(QSharedPointer<ReceivedMess
message->readPrimitive(&isRequesting);
nodeData->setRequestsDomainListData(isRequesting);
qCDebug(avatars) << "node" << nodeData->getNodeID() << "requestsDomainListData" << isRequesting;
// If we just opened the PAL...
if (isRequesting) {
// For each node in the NodeList...
auto nodeList = DependencyManager::get<NodeList>();
nodeList->eachMatchingNode(
// Discover the valid nodes we're ignoring...
[&](const SharedNodePointer& node)->bool {
if (node->getUUID() != senderNode->getUUID() &&
(nodeData->isRadiusIgnoring(node->getUUID()) ||
senderNode->isIgnoringNodeWithID(node->getUUID()))) {
return true;
}
return false;
},
// ...For those nodes, reset the lastBroadcastTime to 0
// so that the AvatarMixer will send Identity data to us
[&](const SharedNodePointer& node) {
nodeData->setLastBroadcastTime(node->getUUID(), 0);
}
);
}
}
}
auto end = usecTimestampNow();
@ -409,7 +431,31 @@ void AvatarMixer::handleKillAvatarPacket(QSharedPointer<ReceivedMessage> message
void AvatarMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
auto start = usecTimestampNow();
senderNode->parseIgnoreRequestMessage(message);
auto nodeList = DependencyManager::get<NodeList>();
AvatarMixerClientData* nodeData = reinterpret_cast<AvatarMixerClientData*>(senderNode->getLinkedData());
bool addToIgnore;
message->readPrimitive(&addToIgnore);
while (message->getBytesLeftToRead()) {
// parse out the UUID being ignored from the packet
QUuid ignoredUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
// Reset the lastBroadcastTime for the ignored avatar to 0
// so the AvatarMixer knows it'll have to send identity data about the ignored avatar
// to the ignorer if the ignorer unignores.
nodeData->setLastBroadcastTime(ignoredUUID, 0);
// Reset the lastBroadcastTime for the ignorer (FROM THE PERSPECTIVE OF THE IGNORED) to 0
// so the AvatarMixer knows it'll have to send identity data about the ignorer
// to the ignored if the ignorer unignores.
auto ignoredNode = nodeList->nodeWithUUID(ignoredUUID);
AvatarMixerClientData* ignoredNodeData = reinterpret_cast<AvatarMixerClientData*>(ignoredNode->getLinkedData());
ignoredNodeData->setLastBroadcastTime(senderNode->getUUID(), 0);
if (addToIgnore) {
senderNode->addIgnoredNode(ignoredUUID);
} else {
senderNode->removeIgnoredNode(ignoredUUID);
}
}
auto end = usecTimestampNow();
_handleNodeIgnoreRequestPacketElapsedTime += (end - start);
}

View file

@ -65,15 +65,6 @@ int AvatarMixerClientData::parseData(ReceivedMessage& message) {
// compute the offset to the data payload
return _avatar->parseDataFromBuffer(message.readWithoutCopy(message.getBytesLeftToRead()));
}
bool AvatarMixerClientData::checkAndSetHasReceivedFirstPacketsFrom(const QUuid& uuid) {
if (_hasReceivedFirstPacketsFrom.find(uuid) == _hasReceivedFirstPacketsFrom.end()) {
_hasReceivedFirstPacketsFrom.insert(uuid);
return false;
}
return true;
}
uint64_t AvatarMixerClientData::getLastBroadcastTime(const QUuid& nodeUUID) const {
// return the matching PacketSequenceNumber, or the default if we don't have it
auto nodeMatch = _lastBroadcastTimes.find(nodeUUID);
@ -102,8 +93,8 @@ void AvatarMixerClientData::ignoreOther(SharedNodePointer self, SharedNodePointe
} else {
killPacket->writePrimitive(KillAvatarReason::YourAvatarEnteredTheirBubble);
}
setLastBroadcastTime(other->getUUID(), 0);
DependencyManager::get<NodeList>()->sendUnreliablePacket(*killPacket, *self);
_hasReceivedFirstPacketsFrom.erase(other->getUUID());
}
}

View file

@ -45,8 +45,6 @@ public:
const AvatarData* getConstAvatarData() const { return _avatar.get(); }
AvatarSharedPointer getAvatarSharedPointer() const { return _avatar; }
bool checkAndSetHasReceivedFirstPacketsFrom(const QUuid& uuid);
uint16_t getLastBroadcastSequenceNumber(const QUuid& nodeUUID) const;
void setLastBroadcastSequenceNumber(const QUuid& nodeUUID, uint16_t sequenceNumber)
{ _lastBroadcastSequenceNumbers[nodeUUID] = sequenceNumber; }
@ -63,8 +61,8 @@ public:
uint16_t getLastReceivedSequenceNumber() const { return _lastReceivedSequenceNumber; }
HRCTime getIdentityChangeTimestamp() const { return _identityChangeTimestamp; }
void flagIdentityChange() { _identityChangeTimestamp = p_high_resolution_clock::now(); }
uint64_t getIdentityChangeTimestamp() const { return _identityChangeTimestamp; }
void flagIdentityChange() { _identityChangeTimestamp = usecTimestampNow(); }
bool getAvatarSessionDisplayNameMustChange() const { return _avatarSessionDisplayNameMustChange; }
void setAvatarSessionDisplayNameMustChange(bool set = true) { _avatarSessionDisplayNameMustChange = set; }
@ -139,7 +137,6 @@ private:
uint16_t _lastReceivedSequenceNumber { 0 };
std::unordered_map<QUuid, uint16_t> _lastBroadcastSequenceNumbers;
std::unordered_set<QUuid> _hasReceivedFirstPacketsFrom;
std::unordered_map<QUuid, uint64_t> _lastBroadcastTimes;
// this is a map of the last time we encoded an "other" avatar for
@ -147,7 +144,7 @@ private:
std::unordered_map<QUuid, quint64> _lastOtherAvatarEncodeTime;
std::unordered_map<QUuid, QVector<JointData>> _lastOtherAvatarSentJoints;
HRCTime _identityChangeTimestamp;
uint64_t _identityChangeTimestamp;
bool _avatarSessionDisplayNameMustChange{ false };
int _numAvatarsSentLastFrame = 0;

View file

@ -80,16 +80,6 @@ int AvatarMixerSlave::sendIdentityPacket(const AvatarMixerClientData* nodeData,
static const int AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND = 45;
// FIXME - There is some old logic (unchanged as of 2/17/17) that randomly decides to send an identity
// packet. That logic had the following comment about the constants it uses...
//
// An 80% chance of sending a identity packet within a 5 second interval.
// assuming 60 htz update rate.
//
// Assuming the calculation of the constant is in fact correct for 80% and 60hz and 5 seconds (an assumption
// that I have not verified) then the constant is definitely wrong now, since we send at 45hz.
const float IDENTITY_SEND_PROBABILITY = 1.0f / 187.0f;
void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
quint64 start = usecTimestampNow();
@ -137,14 +127,18 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
// keep track of the number of other avatar frames skipped
int numAvatarsWithSkippedFrames = 0;
// When this is true, the AvatarMixer will send Avatar data to a client about avatars that are not in the view frustrum
bool getsOutOfView = nodeData->getRequestsDomainListData();
// When this is true, the AvatarMixer will send Avatar data to a client about avatars that they've ignored
bool getsIgnoredByMe = getsOutOfView;
// When this is true, the AvatarMixer will send Avatar data to a client
// about avatars they've ignored or that are out of view
bool PALIsOpen = nodeData->getRequestsDomainListData();
// When this is true, the AvatarMixer will send Avatar data to a client about avatars that have ignored them
bool getsAnyIgnored = getsIgnoredByMe && node->getCanKick();
bool getsAnyIgnored = PALIsOpen && node->getCanKick();
if (PALIsOpen) {
// Increase minimumBytesPerAvatar if the PAL is open
minimumBytesPerAvatar += sizeof(AvatarDataPacket::AvatarGlobalPosition) +
sizeof(AvatarDataPacket::AudioLoudness);
}
// setup a PacketList for the avatarPackets
auto avatarPacketList = NLPacketList::create(PacketType::BulkAvatarData);
@ -222,13 +216,14 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
// or that has ignored the viewing node
if (!avatarNode->getLinkedData()
|| avatarNode->getUUID() == node->getUUID()
|| (node->isIgnoringNodeWithID(avatarNode->getUUID()) && !getsIgnoredByMe)
|| (node->isIgnoringNodeWithID(avatarNode->getUUID()) && !PALIsOpen)
|| (avatarNode->isIgnoringNodeWithID(node->getUUID()) && !getsAnyIgnored)) {
shouldIgnore = true;
} else {
// Check to see if the space bubble is enabled
if (node->isIgnoreRadiusEnabled() || avatarNode->isIgnoreRadiusEnabled()) {
// Don't bother with these checks if the other avatar has their bubble enabled and we're gettingAnyIgnored
if (node->isIgnoreRadiusEnabled() || (avatarNode->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
// Define the scale of the box for the current other node
glm::vec3 otherNodeBoxScale = (avatarNodeData->getPosition() - avatarNodeData->getGlobalBoundingBoxCorner()) * 2.0f;
@ -306,16 +301,9 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
const AvatarMixerClientData* otherNodeData = reinterpret_cast<const AvatarMixerClientData*>(otherNode->getLinkedData());
// make sure we send out identity packets to and from new arrivals.
bool forceSend = !nodeData->checkAndSetHasReceivedFirstPacketsFrom(otherNode->getUUID());
// FIXME - this clause seems suspicious "... || otherNodeData->getIdentityChangeTimestamp() > _lastFrameTimestamp ..."
if (!overBudget
&& otherNodeData->getIdentityChangeTimestamp().time_since_epoch().count() > 0
&& (forceSend
|| otherNodeData->getIdentityChangeTimestamp() > _lastFrameTimestamp
|| distribution(generator) < IDENTITY_SEND_PROBABILITY)) {
// If the time that the mixer sent AVATAR DATA about Avatar B to Avatar A is BEFORE OR EQUAL TO
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Avatar A.
if (nodeData->getLastBroadcastTime(otherNode->getUUID()) <= otherNodeData->getIdentityChangeTimestamp()) {
identityBytesSent += sendIdentityPacket(otherNodeData, node);
}
@ -335,9 +323,9 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
if (overBudget) {
overBudgetAvatars++;
_stats.overBudgetAvatars++;
detail = AvatarData::NoData;
} else if (!isInView && !getsOutOfView) {
detail = AvatarData::NoData;
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::NoData;
} else if (!isInView) {
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::NoData;
nodeData->incrementAvatarOutOfView();
} else {
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO

View file

@ -63,6 +63,17 @@ qt5_wrap_ui(QT_UI_HEADERS "${QT_UI_FILES}")
# add them to the interface source files
set(INTERFACE_SRCS ${INTERFACE_SRCS} "${QT_UI_HEADERS}" "${QT_RESOURCES}")
file(GLOB_RECURSE QML_SRC resources/qml/*.qml resources/qml/*.js)
add_custom_target(qml SOURCES ${QML_SRC})
if (UNIX)
install(
DIRECTORY "${CMAKE_SOURCE_DIR}/interface/resources/qml"
DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/resources
COMPONENT ${CLIENT_COMPONENT}
)
endif()
# translation disabled until we strip out the line numbers
# set(QM ${TARGET_NAME}_en.qm)
# set(TS ${TARGET_NAME}_en.ts)

View file

@ -61,9 +61,21 @@ Rectangle {
property int sortIndicatorColumn: 1
property int sortIndicatorOrder: Qt.AscendingOrder
}
function getSelectedSessionIDs() {
var sessionIDs = [];
table.selection.forEach(function (userIndex) {
sessionIDs.push(userModelData[userIndex].sessionId);
});
return sessionIDs;
}
function refreshWithFilter() {
// We should just be able to set settings.filtered to filter.checked, but see #3249, so send to .js for saving.
pal.sendToScript({method: 'refresh', params: {filter: filter.checked && {distance: settings.nearDistance}}});
var userIds = getSelectedSessionIDs();
var params = {filter: filter.checked && {distance: settings.nearDistance}};
if (userIds.length > 0) {
params.selected = [[userIds[0]], true, true];
}
pal.sendToScript({method: 'refresh', params: params});
}
// This is the container for the PAL
@ -286,7 +298,7 @@ Rectangle {
HifiControls.GlyphButton {
function getGlyph() {
var fileName = "vol_";
if (model["personalMute"]) {
if (model && model["personalMute"]) {
fileName += "x_";
}
fileName += (4.0*(model ? model.avgAudioLevel : 0.0)).toFixed(0);
@ -615,6 +627,9 @@ Rectangle {
var sortProperty = table.getColumn(table.sortIndicatorColumn).role;
var before = (table.sortIndicatorOrder === Qt.AscendingOrder) ? -1 : 1;
var after = -1 * before;
// get selection(s) before sorting
var selectedIDs = getSelectedSessionIDs();
userModelData.sort(function (a, b) {
var aValue = a[sortProperty].toString().toLowerCase(), bValue = b[sortProperty].toString().toLowerCase();
switch (true) {
@ -627,6 +642,7 @@ Rectangle {
userModel.clear();
var userIndex = 0;
var newSelectedIndexes = [];
userModelData.forEach(function (datum) {
function init(property) {
if (datum[property] === undefined) {
@ -636,7 +652,14 @@ Rectangle {
['personalMute', 'ignore', 'mute', 'kick'].forEach(init);
datum.userIndex = userIndex++;
userModel.append(datum);
if (selectedIDs.indexOf(datum.sessionId) != -1) {
newSelectedIndexes.push(datum.userIndex);
}
});
if (newSelectedIndexes.length > 0) {
table.selection.select(newSelectedIndexes);
table.positionViewAtRow(newSelectedIndexes[0], ListView.Beginning);
}
}
signal sendToScript(var message);
function noticeSelection() {

View file

@ -4236,12 +4236,6 @@ void Application::updateDialogs(float deltaTime) const {
PerformanceWarning warn(showWarnings, "Application::updateDialogs()");
auto dialogsManager = DependencyManager::get<DialogsManager>();
// Update bandwidth dialog, if any
BandwidthDialog* bandwidthDialog = dialogsManager->getBandwidthDialog();
if (bandwidthDialog) {
bandwidthDialog->update();
}
QPointer<OctreeStatsDialog> octreeStatsDialog = dialogsManager->getOctreeStatsDialog();
if (octreeStatsDialog) {
octreeStatsDialog->update();

View file

@ -51,6 +51,7 @@
#include <RunningMarker.h>
#include "avatar/MyAvatar.h"
#include "BandwidthRecorder.h"
#include "Bookmarks.h"
#include "Camera.h"
#include "ConnectionMonitor.h"
@ -61,7 +62,6 @@
#include "scripting/ControllerScriptingInterface.h"
#include "scripting/DialogsManagerScriptingInterface.h"
#include "ui/ApplicationOverlay.h"
#include "ui/BandwidthDialog.h"
#include "ui/EntityScriptServerLogDialog.h"
#include "ui/LodToolsDialog.h"
#include "ui/LogDialog.h"

View file

@ -566,8 +566,6 @@ Menu::Menu() {
dialogsManager.data(), SLOT(toggleDiskCacheEditor()));
addActionToQMenuAndActionHash(networkMenu, MenuOption::ShowDSConnectTable, 0,
dialogsManager.data(), SLOT(showDomainConnectionDialog()));
addActionToQMenuAndActionHash(networkMenu, MenuOption::BandwidthDetails, 0,
dialogsManager.data(), SLOT(bandwidthDetails()));
#if (PR_BUILD || DEV_BUILD)
addCheckableActionToQMenuAndActionHash(networkMenu, MenuOption::SendWrongProtocolVersion, 0, false,

View file

@ -49,7 +49,6 @@ namespace MenuOption {
const QString AutoMuteAudio = "Auto Mute Microphone";
const QString AvatarReceiveStats = "Show Receive Stats";
const QString Back = "Back";
const QString BandwidthDetails = "Bandwidth Details";
const QString BinaryEyelidControl = "Binary Eyelid Control";
const QString BookmarkLocation = "Bookmark Location";
const QString Bookmarks = "Bookmarks";

View file

@ -329,7 +329,7 @@ void AvatarManager::removeAvatar(const QUuid& sessionUUID, KillAvatarReason remo
}
void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar, KillAvatarReason removalReason) {
AvatarHashMap::handleRemovedAvatar(removedAvatar);
AvatarHashMap::handleRemovedAvatar(removedAvatar, removalReason);
// removedAvatar is a shared pointer to an AvatarData but we need to get to the derived Avatar
// class in this context so we can call methods that don't exist at the base class.

View file

@ -1,135 +0,0 @@
//
// BandwidthDialog.cpp
// interface/src/ui
//
// Created by Tobias Schwinger on 6/21/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <cstdio>
#include "BandwidthRecorder.h"
#include "ui/BandwidthDialog.h"
#include <QFormLayout>
#include <QDialogButtonBox>
#include <QPalette>
#include <QColor>
BandwidthChannelDisplay::BandwidthChannelDisplay(QVector<NodeType_t> nodeTypesToFollow,
QFormLayout* form,
char const* const caption, char const* unitCaption,
const float unitScale, unsigned colorRGBA) :
_nodeTypesToFollow(nodeTypesToFollow),
_caption(caption),
_unitCaption(unitCaption),
_unitScale(unitScale),
_colorRGBA(colorRGBA)
{
_label = new QLabel();
_label->setAlignment(Qt::AlignRight);
QPalette palette = _label->palette();
unsigned rgb = colorRGBA >> 8;
rgb = ((rgb & 0xfefefeu) >> 1) + ((rgb & 0xf8f8f8) >> 3);
palette.setColor(QPalette::WindowText, QColor::fromRgb(rgb));
_label->setPalette(palette);
form->addRow(QString(" ") + _caption + " Bandwidth In/Out:", _label);
}
void BandwidthChannelDisplay::bandwidthAverageUpdated() {
float inTotal = 0.;
float outTotal = 0.;
QSharedPointer<BandwidthRecorder> bandwidthRecorder = DependencyManager::get<BandwidthRecorder>();
for (int i = 0; i < _nodeTypesToFollow.size(); ++i) {
inTotal += bandwidthRecorder->getAverageInputKilobitsPerSecond(_nodeTypesToFollow.at(i));
outTotal += bandwidthRecorder->getAverageOutputKilobitsPerSecond(_nodeTypesToFollow.at(i));
}
_strBuf =
QString("").setNum((int) (inTotal * _unitScale)) + "/" +
QString("").setNum((int) (outTotal * _unitScale)) + " " + _unitCaption;
}
void BandwidthChannelDisplay::paint() {
_label->setText(_strBuf);
}
BandwidthDialog::BandwidthDialog(QWidget* parent) :
QDialog(parent, Qt::Window | Qt::WindowCloseButtonHint | Qt::WindowStaysOnTopHint) {
this->setWindowTitle("Bandwidth Details");
// Create layout
QFormLayout* form = new QFormLayout();
form->setSizeConstraint(QLayout::SetFixedSize);
this->QDialog::setLayout(form);
QSharedPointer<BandwidthRecorder> bandwidthRecorder = DependencyManager::get<BandwidthRecorder>();
_allChannelDisplays[0] = _audioChannelDisplay =
new BandwidthChannelDisplay({NodeType::AudioMixer}, form, "Audio", "Kbps", 1.0, COLOR0);
_allChannelDisplays[1] = _avatarsChannelDisplay =
new BandwidthChannelDisplay({NodeType::Agent, NodeType::AvatarMixer}, form, "Avatars", "Kbps", 1.0, COLOR1);
_allChannelDisplays[2] = _octreeChannelDisplay =
new BandwidthChannelDisplay({NodeType::EntityServer}, form, "Octree", "Kbps", 1.0, COLOR2);
_allChannelDisplays[3] = _octreeChannelDisplay =
new BandwidthChannelDisplay({NodeType::DomainServer}, form, "Domain", "Kbps", 1.0, COLOR2);
_allChannelDisplays[4] = _otherChannelDisplay =
new BandwidthChannelDisplay({NodeType::Unassigned}, form, "Other", "Kbps", 1.0, COLOR2);
_allChannelDisplays[5] = _totalChannelDisplay =
new BandwidthChannelDisplay({
NodeType::DomainServer, NodeType::EntityServer,
NodeType::AudioMixer, NodeType::Agent,
NodeType::AvatarMixer, NodeType::Unassigned
}, form, "Total", "Kbps", 1.0, COLOR2);
connect(averageUpdateTimer, SIGNAL(timeout()), this, SLOT(updateTimerTimeout()));
averageUpdateTimer->start(1000);
}
BandwidthDialog::~BandwidthDialog() {
for (unsigned int i = 0; i < _CHANNELCOUNT; i++) {
delete _allChannelDisplays[i];
}
}
void BandwidthDialog::updateTimerTimeout() {
for (unsigned int i = 0; i < _CHANNELCOUNT; i++) {
_allChannelDisplays[i]->bandwidthAverageUpdated();
}
}
void BandwidthDialog::paintEvent(QPaintEvent* event) {
for (unsigned int i=0; i<_CHANNELCOUNT; i++)
_allChannelDisplays[i]->paint();
this->QDialog::paintEvent(event);
}
void BandwidthDialog::reject() {
// Just regularly close upon ESC
this->QDialog::close();
}
void BandwidthDialog::closeEvent(QCloseEvent* event) {
this->QDialog::closeEvent(event);
emit closed();
}

View file

@ -1,94 +0,0 @@
//
// BandwidthDialog.h
// interface/src/ui
//
// Created by Tobias Schwinger on 6/21/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_BandwidthDialog_h
#define hifi_BandwidthDialog_h
#include <QDialog>
#include <QLabel>
#include <QFormLayout>
#include <QVector>
#include <QTimer>
#include "Node.h"
#include "BandwidthRecorder.h"
const unsigned int COLOR0 = 0x33cc99ff;
const unsigned int COLOR1 = 0xffef40c0;
const unsigned int COLOR2 = 0xd0d0d0a0;
class BandwidthChannelDisplay : public QObject {
Q_OBJECT
public:
BandwidthChannelDisplay(QVector<NodeType_t> nodeTypesToFollow,
QFormLayout* form,
char const* const caption, char const* unitCaption, float unitScale, unsigned colorRGBA);
void paint();
private:
QVector<NodeType_t> _nodeTypesToFollow;
QLabel* _label;
QString _strBuf;
char const* const _caption;
char const* _unitCaption;
float const _unitScale;
unsigned _colorRGBA;
public slots:
void bandwidthAverageUpdated();
};
class BandwidthDialog : public QDialog {
Q_OBJECT
public:
BandwidthDialog(QWidget* parent);
~BandwidthDialog();
void paintEvent(QPaintEvent*) override;
private:
BandwidthChannelDisplay* _audioChannelDisplay;
BandwidthChannelDisplay* _avatarsChannelDisplay;
BandwidthChannelDisplay* _octreeChannelDisplay;
BandwidthChannelDisplay* _domainChannelDisplay;
BandwidthChannelDisplay* _otherChannelDisplay;
BandwidthChannelDisplay* _totalChannelDisplay; // sums of all the other channels
static const unsigned int _CHANNELCOUNT = 6;
BandwidthChannelDisplay* _allChannelDisplays[_CHANNELCOUNT];
signals:
void closed();
public slots:
void reject() override;
void updateTimerTimeout();
protected:
// Emits a 'closed' signal when this dialog is closed.
void closeEvent(QCloseEvent*) override;
private:
QTimer* averageUpdateTimer = new QTimer(this);
};
#endif // hifi_BandwidthDialog_h

View file

@ -19,7 +19,6 @@
#include <PathUtils.h>
#include "AddressBarDialog.h"
#include "BandwidthDialog.h"
#include "CachesSizeDialog.h"
#include "ConnectionFailureDialog.h"
#include "DiskCacheEditor.h"
@ -108,20 +107,6 @@ void DialogsManager::cachesSizeDialog() {
_cachesSizeDialog->raise();
}
void DialogsManager::bandwidthDetails() {
if (! _bandwidthDialog) {
_bandwidthDialog = new BandwidthDialog(qApp->getWindow());
connect(_bandwidthDialog, SIGNAL(closed()), _bandwidthDialog, SLOT(deleteLater()));
if (_hmdToolsDialog) {
_hmdToolsDialog->watchWindow(_bandwidthDialog->windowHandle());
}
_bandwidthDialog->show();
}
_bandwidthDialog->raise();
}
void DialogsManager::lodTools() {
if (!_lodToolsDialog) {
maybeCreateDialog(_lodToolsDialog);

View file

@ -21,7 +21,6 @@
class AnimationsDialog;
class AttachmentsDialog;
class BandwidthDialog;
class CachesSizeDialog;
class DiskCacheEditor;
class LodToolsDialog;
@ -36,7 +35,6 @@ class DialogsManager : public QObject, public Dependency {
SINGLETON_DEPENDENCY
public:
QPointer<BandwidthDialog> getBandwidthDialog() const { return _bandwidthDialog; }
QPointer<HMDToolsDialog> getHMDToolsDialog() const { return _hmdToolsDialog; }
QPointer<LodToolsDialog> getLodToolsDialog() const { return _lodToolsDialog; }
QPointer<OctreeStatsDialog> getOctreeStatsDialog() const { return _octreeStatsDialog; }
@ -53,7 +51,6 @@ public slots:
void showLoginDialog();
void octreeStatsDetails();
void cachesSizeDialog();
void bandwidthDetails();
void lodTools();
void hmdTools(bool showTools);
void showScriptEditor();
@ -79,7 +76,6 @@ private:
QPointer<AnimationsDialog> _animationsDialog;
QPointer<AttachmentsDialog> _attachmentsDialog;
QPointer<BandwidthDialog> _bandwidthDialog;
QPointer<CachesSizeDialog> _cachesSizeDialog;
QPointer<DiskCacheEditor> _diskCacheEditor;
QPointer<QMessageBox> _ircInfoBox;

View file

@ -79,9 +79,6 @@ HMDToolsDialog::HMDToolsDialog(QWidget* parent) :
// what screens we're allowed on
watchWindow(windowHandle());
auto dialogsManager = DependencyManager::get<DialogsManager>();
if (dialogsManager->getBandwidthDialog()) {
watchWindow(dialogsManager->getBandwidthDialog()->windowHandle());
}
if (dialogsManager->getOctreeStatsDialog()) {
watchWindow(dialogsManager->getOctreeStatsDialog()->windowHandle());
}

View file

@ -62,7 +62,11 @@ namespace render {
if (overlay->is3D()) {
auto overlay3D = std::dynamic_pointer_cast<Base3DOverlay>(overlay);
if (overlay3D->isAA())
return (overlay3D->getDrawInFront() ? LAYER_3D_FRONT : LAYER_3D);
if (overlay3D->getDrawInFront()) {
return LAYER_3D_FRONT;
} else {
return LAYER_3D;
}
else
return LAYER_NO_AA;
} else {

View file

@ -488,13 +488,7 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
// measure new _hipsOffset for next frame
// by looking for discrepancies between where a targeted endEffector is
// and where it wants to be (after IK solutions are done)
// use weighted average between HMD and other targets
float HMD_WEIGHT = 10.0f;
float OTHER_WEIGHT = 1.0f;
float totalWeight = 0.0f;
glm::vec3 additionalHipsOffset = Vectors::ZERO;
glm::vec3 newHipsOffset = Vectors::ZERO;
for (auto& target: targets) {
int targetIndex = target.getIndex();
if (targetIndex == _headIndex && _headIndex != -1) {
@ -505,42 +499,34 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
glm::vec3 under = _skeleton->getAbsolutePose(_headIndex, underPoses).trans();
glm::vec3 actual = _skeleton->getAbsolutePose(_headIndex, _relativePoses).trans();
const float HEAD_OFFSET_SLAVE_FACTOR = 0.65f;
additionalHipsOffset += (OTHER_WEIGHT * HEAD_OFFSET_SLAVE_FACTOR) * (under- actual);
totalWeight += OTHER_WEIGHT;
newHipsOffset += HEAD_OFFSET_SLAVE_FACTOR * (actual - under);
} else if (target.getType() == IKTarget::Type::HmdHead) {
// we want to shift the hips to bring the head to its designated position
glm::vec3 actual = _skeleton->getAbsolutePose(_headIndex, _relativePoses).trans();
glm::vec3 thisOffset = target.getTranslation() - actual;
glm::vec3 futureHipsOffset = _hipsOffset + thisOffset;
if (glm::length(glm::vec2(futureHipsOffset.x, futureHipsOffset.z)) < _maxHipsOffsetLength) {
// it is imperative to shift the hips and bring the head to its designated position
// so we slam newHipsOffset here and ignore all other targets
additionalHipsOffset = futureHipsOffset - _hipsOffset;
totalWeight = 0.0f;
break;
} else {
additionalHipsOffset += HMD_WEIGHT * (target.getTranslation() - actual);
totalWeight += HMD_WEIGHT;
}
_hipsOffset += target.getTranslation() - actual;
// and ignore all other targets
newHipsOffset = _hipsOffset;
break;
} else if (target.getType() == IKTarget::Type::RotationAndPosition) {
glm::vec3 actualPosition = _skeleton->getAbsolutePose(targetIndex, _relativePoses).trans();
glm::vec3 targetPosition = target.getTranslation();
newHipsOffset += targetPosition - actualPosition;
// Add downward pressure on the hips
newHipsOffset *= 0.95f;
newHipsOffset -= 1.0f;
}
} else if (target.getType() == IKTarget::Type::RotationAndPosition) {
glm::vec3 actualPosition = _skeleton->getAbsolutePose(targetIndex, _relativePoses).trans();
glm::vec3 targetPosition = target.getTranslation();
additionalHipsOffset += OTHER_WEIGHT * (targetPosition - actualPosition);
totalWeight += OTHER_WEIGHT;
newHipsOffset += targetPosition - actualPosition;
}
}
if (totalWeight > 1.0f) {
additionalHipsOffset /= totalWeight;
}
// Add downward pressure on the hips
additionalHipsOffset *= 0.95f;
additionalHipsOffset -= 1.0f;
// smooth transitions by relaxing _hipsOffset toward the new value
const float HIPS_OFFSET_SLAVE_TIMESCALE = 0.10f;
float tau = dt < HIPS_OFFSET_SLAVE_TIMESCALE ? dt / HIPS_OFFSET_SLAVE_TIMESCALE : 1.0f;
_hipsOffset += additionalHipsOffset * tau;
_hipsOffset += (newHipsOffset - _hipsOffset) * tau;
// clamp the hips offset
float hipsOffsetLength = glm::length(_hipsOffset);

View file

@ -186,6 +186,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
bool cullSmallChanges = (dataDetail == CullSmallData);
bool sendAll = (dataDetail == SendAllData);
bool sendMinimum = (dataDetail == MinimumData);
bool sendPALMinimum = (dataDetail == PALMinimum);
lazyInitHeadData();
@ -222,24 +223,41 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
auto parentID = getParentID();
bool hasAvatarGlobalPosition = true; // always include global position
bool hasAvatarOrientation = sendAll || rotationChangedSince(lastSentTime);
bool hasAvatarBoundingBox = sendAll || avatarBoundingBoxChangedSince(lastSentTime);
bool hasAvatarScale = sendAll || avatarScaleChangedSince(lastSentTime);
bool hasLookAtPosition = sendAll || lookAtPositionChangedSince(lastSentTime);
bool hasAudioLoudness = sendAll || audioLoudnessChangedSince(lastSentTime);
bool hasSensorToWorldMatrix = sendAll || sensorToWorldMatrixChangedSince(lastSentTime);
bool hasAdditionalFlags = sendAll || additionalFlagsChangedSince(lastSentTime);
bool hasAvatarOrientation = false;
bool hasAvatarBoundingBox = false;
bool hasAvatarScale = false;
bool hasLookAtPosition = false;
bool hasAudioLoudness = false;
bool hasSensorToWorldMatrix = false;
bool hasAdditionalFlags = false;
// local position, and parent info only apply to avatars that are parented. The local position
// and the parent info can change independently though, so we track their "changed since"
// separately
bool hasParentInfo = sendAll || parentInfoChangedSince(lastSentTime);
bool hasAvatarLocalPosition = hasParent() && (sendAll ||
tranlationChangedSince(lastSentTime) ||
parentInfoChangedSince(lastSentTime));
bool hasParentInfo = false;
bool hasAvatarLocalPosition = false;
bool hasFaceTrackerInfo = !dropFaceTracking && hasFaceTracker() && (sendAll || faceTrackerInfoChangedSince(lastSentTime));
bool hasJointData = sendAll || !sendMinimum;
bool hasFaceTrackerInfo = false;
bool hasJointData = false;
if (sendPALMinimum) {
hasAudioLoudness = true;
} else {
hasAvatarOrientation = sendAll || rotationChangedSince(lastSentTime);
hasAvatarBoundingBox = sendAll || avatarBoundingBoxChangedSince(lastSentTime);
hasAvatarScale = sendAll || avatarScaleChangedSince(lastSentTime);
hasLookAtPosition = sendAll || lookAtPositionChangedSince(lastSentTime);
hasAudioLoudness = sendAll || audioLoudnessChangedSince(lastSentTime);
hasSensorToWorldMatrix = sendAll || sensorToWorldMatrixChangedSince(lastSentTime);
hasAdditionalFlags = sendAll || additionalFlagsChangedSince(lastSentTime);
hasParentInfo = sendAll || parentInfoChangedSince(lastSentTime);
hasAvatarLocalPosition = hasParent() && (sendAll ||
tranlationChangedSince(lastSentTime) ||
parentInfoChangedSince(lastSentTime));
hasFaceTrackerInfo = !dropFaceTracking && hasFaceTracker() && (sendAll || faceTrackerInfoChangedSince(lastSentTime));
hasJointData = sendAll || !sendMinimum;
}
// Leading flags, to indicate how much data is actually included in the packet...
AvatarDataPacket::HasFlags packetStateFlags =

View file

@ -376,6 +376,7 @@ public:
typedef enum {
NoData,
PALMinimum,
MinimumData,
CullSmallData,
IncludeSmallData,

View file

@ -163,7 +163,7 @@ void ShapeEntityItem::appendSubclassData(OctreePacketData* packetData, EncodeBit
// This value specifes how the shape should be treated by physics calculations.
// For now, all polys will act as spheres
ShapeType ShapeEntityItem::getShapeType() const {
return (_shape == entity::Shape::Cube) ? SHAPE_TYPE_BOX : SHAPE_TYPE_SPHERE;
return (_shape == entity::Shape::Cube) ? SHAPE_TYPE_BOX : SHAPE_TYPE_ELLIPSOID;
}
void ShapeEntityItem::setColor(const rgbColor& value) {

View file

@ -154,14 +154,6 @@ const gpu::TexturePointer& TextureCache::getBlackTexture() {
return _blackTexture;
}
const gpu::TexturePointer& TextureCache::getNormalFittingTexture() {
if (!_normalFittingTexture) {
_normalFittingTexture = getImageTexture(PathUtils::resourcesPath() + "images/normalFittingScale.dds");
}
return _normalFittingTexture;
}
/// Extra data for creating textures.
class TextureExtra {
public:

View file

@ -124,9 +124,6 @@ public:
/// Returns the a black texture (useful for a default).
const gpu::TexturePointer& getBlackTexture();
// Returns a map used to compress the normals through a fitting scale algorithm
const gpu::TexturePointer& getNormalFittingTexture();
/// Returns a texture version of an image file
static gpu::TexturePointer getImageTexture(const QString& path, Type type = Type::DEFAULT_TEXTURE, QVariantMap options = QVariantMap());
@ -151,7 +148,6 @@ private:
gpu::TexturePointer _grayTexture;
gpu::TexturePointer _blueTexture;
gpu::TexturePointer _blackTexture;
gpu::TexturePointer _normalFittingTexture;
};
#endif // hifi_TextureCache_h

View file

@ -256,9 +256,20 @@ const btCollisionShape* ShapeFactory::createShapeFromInfo(const ShapeInfo& info)
}
break;
case SHAPE_TYPE_SPHERE: {
glm::vec3 halfExtents = info.getHalfExtents();
float radius = glm::max(halfExtents.x, glm::max(halfExtents.y, halfExtents.z));
shape = new btSphereShape(radius);
}
break;
case SHAPE_TYPE_ELLIPSOID: {
glm::vec3 halfExtents = info.getHalfExtents();
float radius = halfExtents.x;
if (radius == halfExtents.y && radius == halfExtents.z) {
const float MIN_RADIUS = 0.001f;
const float MIN_RELATIVE_SPHERICAL_ERROR = 0.001f;
if (radius > MIN_RADIUS
&& fabsf(radius - halfExtents.y) / radius < MIN_RELATIVE_SPHERICAL_ERROR
&& fabsf(radius - halfExtents.z) / radius < MIN_RELATIVE_SPHERICAL_ERROR) {
// close enough to true sphere
shape = new btSphereShape(radius);
} else {
ShapeInfo::PointList points;

View file

@ -65,25 +65,4 @@ float packUnlit() {
return FRAG_PACK_UNLIT;
}
<!
uniform sampler2D normalFittingMap;
vec3 bestFitNormal(vec3 normal) {
vec3 absNorm = abs(normal);
float maxNAbs = max(absNorm.z, max(absNorm.x, absNorm.y));
vec2 texcoord = (absNorm.z < maxNAbs ?
(absNorm.y < maxNAbs ? absNorm.yz : absNorm.xz) :
absNorm.xy);
texcoord = (texcoord.x < texcoord.y ? texcoord.yx : texcoord.xy);
texcoord.y /= texcoord.x;
vec3 cN = normal / maxNAbs;
float fittingScale = texture(normalFittingMap, texcoord).a;
cN *= fittingScale;
return (cN * 0.5 + 0.5);
}
!>
<@endif@>

View file

@ -414,8 +414,6 @@ _nextID(0) {
// Set the defaults needed for a simple program
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
DependencyManager::get<TextureCache>()->getWhiteTexture());
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
}
);
GeometryCache::_simpleTransparentPipeline =
@ -424,8 +422,6 @@ _nextID(0) {
// Set the defaults needed for a simple program
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
DependencyManager::get<TextureCache>()->getWhiteTexture());
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
}
);
GeometryCache::_simpleWirePipeline =
@ -1770,7 +1766,6 @@ static void buildWebShader(const std::string& vertShaderText, const std::string&
shaderPointerOut = gpu::Shader::createProgram(VS, PS);
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("normalFittingMap"), render::ShapePipeline::Slot::MAP::NORMAL_FITTING));
gpu::Shader::makeProgram(*shaderPointerOut, slotBindings);
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_NONE);
@ -1784,9 +1779,6 @@ static void buildWebShader(const std::string& vertShaderText, const std::string&
void GeometryCache::bindOpaqueWebBrowserProgram(gpu::Batch& batch, bool isAA) {
batch.setPipeline(getOpaqueWebBrowserProgram(isAA));
// Set a default normal map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
}
gpu::PipelinePointer GeometryCache::getOpaqueWebBrowserProgram(bool isAA) {
@ -1802,9 +1794,6 @@ gpu::PipelinePointer GeometryCache::getOpaqueWebBrowserProgram(bool isAA) {
void GeometryCache::bindTransparentWebBrowserProgram(gpu::Batch& batch, bool isAA) {
batch.setPipeline(getTransparentWebBrowserProgram(isAA));
// Set a default normal map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
}
gpu::PipelinePointer GeometryCache::getTransparentWebBrowserProgram(bool isAA) {
@ -1827,9 +1816,6 @@ void GeometryCache::bindSimpleProgram(gpu::Batch& batch, bool textured, bool tra
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
DependencyManager::get<TextureCache>()->getWhiteTexture());
}
// Set a default normal map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
}
gpu::PipelinePointer GeometryCache::getSimplePipeline(bool textured, bool transparent, bool culled, bool unlit, bool depthBiased) {
@ -1846,7 +1832,6 @@ gpu::PipelinePointer GeometryCache::getSimplePipeline(bool textured, bool transp
_unlitShader = gpu::Shader::createProgram(VS, PSUnlit);
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("normalFittingMap"), render::ShapePipeline::Slot::MAP::NORMAL_FITTING));
gpu::Shader::makeProgram(*_simpleShader, slotBindings);
gpu::Shader::makeProgram(*_unlitShader, slotBindings);
});

View file

@ -97,6 +97,8 @@ ShapeKey MeshPartPayload::getShapeKey() const {
}
ShapeKey::Builder builder;
builder.withMaterial();
if (drawMaterialKey.isTranslucent()) {
builder.withTranslucent();
}
@ -478,6 +480,8 @@ ShapeKey ModelMeshPartPayload::getShapeKey() const {
}
ShapeKey::Builder builder;
builder.withMaterial();
if (isTranslucent || _fadeState != FADE_COMPLETE) {
builder.withTranslucent();
}

View file

@ -75,7 +75,6 @@ RenderDeferredTask::RenderDeferredTask(RenderFetchCullSortTask::Output items) {
// GPU jobs: Start preparing the primary, deferred and lighting buffer
const auto primaryFramebuffer = addJob<PreparePrimaryFramebuffer>("PreparePrimaryBuffer");
// const auto fullFrameRangeTimer = addJob<BeginGPURangeTimer>("BeginRangeTimer");
const auto opaqueRangeTimer = addJob<BeginGPURangeTimer>("BeginOpaqueRangeTimer", "DrawOpaques");
const auto prepareDeferredInputs = PrepareDeferred::Inputs(primaryFramebuffer, lightingModel).hasVarying();
@ -154,21 +153,25 @@ RenderDeferredTask::RenderDeferredTask(RenderFetchCullSortTask::Output items) {
const auto toneMappingInputs = render::Varying(ToneMappingDeferred::Inputs(lightingFramebuffer, primaryFramebuffer));
addJob<ToneMappingDeferred>("ToneMapping", toneMappingInputs);
{ // DEbug the bounds of the rendered items, still look at the zbuffer
addJob<DrawBounds>("DrawMetaBounds", metas);
addJob<DrawBounds>("DrawOpaqueBounds", opaques);
addJob<DrawBounds>("DrawTransparentBounds", transparents);
}
// Overlays
const auto overlayOpaquesInputs = DrawOverlay3D::Inputs(overlayOpaques, lightingModel).hasVarying();
const auto overlayTransparentsInputs = DrawOverlay3D::Inputs(overlayTransparents, lightingModel).hasVarying();
addJob<DrawOverlay3D>("DrawOverlay3DOpaque", overlayOpaquesInputs, true);
addJob<DrawOverlay3D>("DrawOverlay3DTransparent", overlayTransparentsInputs, false);
// Debugging stages
{
// Bounds do not draw on stencil buffer, so they must come last
addJob<DrawBounds>("DrawMetaBounds", metas);
{ // DEbug the bounds of the rendered OVERLAY items, still look at the zbuffer
addJob<DrawBounds>("DrawOverlayOpaqueBounds", overlayOpaques);
addJob<DrawBounds>("DrawOverlayTransparentBounds", overlayTransparents);
}
// Debugging stages
{
// Debugging Deferred buffer job
const auto debugFramebuffers = render::Varying(DebugDeferredBuffer::Inputs(deferredFramebuffer, linearDepthTarget, surfaceGeometryFramebuffer, ambientOcclusionFramebuffer));
addJob<DebugDeferredBuffer>("DebugDeferredBuffer", debugFramebuffers);
@ -208,9 +211,6 @@ RenderDeferredTask::RenderDeferredTask(RenderFetchCullSortTask::Output items) {
// Blit!
addJob<Blit>("Blit", primaryFramebuffer);
// addJob<EndGPURangeTimer>("RangeTimer", fullFrameRangeTimer);
}
void BeginGPURangeTimer::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, gpu::RangeTimerPointer& timer) {

View file

@ -50,9 +50,13 @@
#include "overlay3D_vert.h"
#include "overlay3D_frag.h"
#include "overlay3D_model_frag.h"
#include "overlay3D_model_translucent_frag.h"
#include "overlay3D_translucent_frag.h"
#include "overlay3D_unlit_frag.h"
#include "overlay3D_translucent_unlit_frag.h"
#include "overlay3D_model_unlit_frag.h"
#include "overlay3D_model_translucent_unlit_frag.h"
using namespace render;
@ -70,15 +74,24 @@ void lightBatchSetter(const ShapePipeline& pipeline, gpu::Batch& batch);
void initOverlay3DPipelines(ShapePlumber& plumber) {
auto vertex = gpu::Shader::createVertex(std::string(overlay3D_vert));
auto vertexModel = gpu::Shader::createVertex(std::string(model_vert));
auto pixel = gpu::Shader::createPixel(std::string(overlay3D_frag));
auto pixelTranslucent = gpu::Shader::createPixel(std::string(overlay3D_translucent_frag));
auto pixelUnlit = gpu::Shader::createPixel(std::string(overlay3D_unlit_frag));
auto pixelTranslucentUnlit = gpu::Shader::createPixel(std::string(overlay3D_translucent_unlit_frag));
auto pixelModel = gpu::Shader::createPixel(std::string(overlay3D_model_frag));
auto pixelModelTranslucent = gpu::Shader::createPixel(std::string(overlay3D_model_translucent_frag));
auto pixelModelUnlit = gpu::Shader::createPixel(std::string(overlay3D_model_unlit_frag));
auto pixelModelTranslucentUnlit = gpu::Shader::createPixel(std::string(overlay3D_model_translucent_unlit_frag));
auto opaqueProgram = gpu::Shader::createProgram(vertex, pixel);
auto translucentProgram = gpu::Shader::createProgram(vertex, pixelTranslucent);
auto unlitOpaqueProgram = gpu::Shader::createProgram(vertex, pixelUnlit);
auto unlitTranslucentProgram = gpu::Shader::createProgram(vertex, pixelTranslucentUnlit);
auto materialOpaqueProgram = gpu::Shader::createProgram(vertexModel, pixelModel);
auto materialTranslucentProgram = gpu::Shader::createProgram(vertexModel, pixelModelTranslucent);
auto materialUnlitOpaqueProgram = gpu::Shader::createProgram(vertexModel, pixelModel);
auto materialUnlitTranslucentProgram = gpu::Shader::createProgram(vertexModel, pixelModelTranslucent);
for (int i = 0; i < 8; i++) {
bool isCulled = (i & 1);
@ -103,14 +116,20 @@ void initOverlay3DPipelines(ShapePlumber& plumber) {
}
ShapeKey::Filter::Builder builder;
isCulled ? builder.withCullFace() : builder.withoutCullFace();
isBiased ? builder.withDepthBias() : builder.withoutDepthBias();
isOpaque ? builder.withOpaque() : builder.withTranslucent();
auto simpleProgram = isOpaque ? opaqueProgram : translucentProgram;
auto unlitProgram = isOpaque ? unlitOpaqueProgram : unlitTranslucentProgram;
plumber.addPipeline(builder.withoutUnlit().build(), simpleProgram, state, &lightBatchSetter);
plumber.addPipeline(builder.withUnlit().build(), unlitProgram, state, &batchSetter);
auto materialProgram = isOpaque ? materialOpaqueProgram : materialTranslucentProgram;
auto materialUnlitProgram = isOpaque ? materialUnlitOpaqueProgram : materialUnlitTranslucentProgram;
plumber.addPipeline(builder.withMaterial().build().key(), materialProgram, state, &lightBatchSetter);
plumber.addPipeline(builder.withMaterial().withUnlit().build().key(), materialUnlitProgram, state, &batchSetter);
plumber.addPipeline(builder.withoutUnlit().withoutMaterial().build().key(), simpleProgram, state, &lightBatchSetter);
plumber.addPipeline(builder.withUnlit().withoutMaterial().build().key(), unlitProgram, state, &batchSetter);
}
}
@ -144,78 +163,87 @@ void initDeferredPipelines(render::ShapePlumber& plumber) {
// TODO: Refactor this to use a filter
// Opaques
addPipeline(
Key::Builder(),
Key::Builder().withMaterial(),
modelVertex, modelPixel);
addPipeline(
Key::Builder().withMaterial().withUnlit(),
modelVertex, modelUnlitPixel);
addPipeline(
Key::Builder().withUnlit(),
modelVertex, modelUnlitPixel);
addPipeline(
Key::Builder().withTangents(),
Key::Builder().withMaterial().withTangents(),
modelNormalMapVertex, modelNormalMapPixel);
addPipeline(
Key::Builder().withSpecular(),
Key::Builder().withMaterial().withSpecular(),
modelVertex, modelSpecularMapPixel);
addPipeline(
Key::Builder().withTangents().withSpecular(),
Key::Builder().withMaterial().withTangents().withSpecular(),
modelNormalMapVertex, modelNormalSpecularMapPixel);
// Translucents
addPipeline(
Key::Builder().withMaterial().withTranslucent(),
modelVertex, modelTranslucentPixel);
addPipeline(
Key::Builder().withTranslucent(),
modelVertex, modelTranslucentPixel);
addPipeline(
Key::Builder().withMaterial().withTranslucent().withUnlit(),
modelVertex, modelTranslucentUnlitPixel);
addPipeline(
Key::Builder().withTranslucent().withUnlit(),
modelVertex, modelTranslucentUnlitPixel);
addPipeline(
Key::Builder().withTranslucent().withTangents(),
Key::Builder().withMaterial().withTranslucent().withTangents(),
modelNormalMapVertex, modelTranslucentPixel);
addPipeline(
Key::Builder().withTranslucent().withSpecular(),
Key::Builder().withMaterial().withTranslucent().withSpecular(),
modelVertex, modelTranslucentPixel);
addPipeline(
Key::Builder().withTranslucent().withTangents().withSpecular(),
Key::Builder().withMaterial().withTranslucent().withTangents().withSpecular(),
modelNormalMapVertex, modelTranslucentPixel);
addPipeline(
// FIXME: Ignore lightmap for translucents meshpart
Key::Builder().withTranslucent().withLightmap(),
Key::Builder().withMaterial().withTranslucent().withLightmap(),
modelVertex, modelTranslucentPixel);
// Lightmapped
addPipeline(
Key::Builder().withLightmap(),
Key::Builder().withMaterial().withLightmap(),
modelLightmapVertex, modelLightmapPixel);
addPipeline(
Key::Builder().withLightmap().withTangents(),
Key::Builder().withMaterial().withLightmap().withTangents(),
modelLightmapNormalMapVertex, modelLightmapNormalMapPixel);
addPipeline(
Key::Builder().withLightmap().withSpecular(),
Key::Builder().withMaterial().withLightmap().withSpecular(),
modelLightmapVertex, modelLightmapSpecularMapPixel);
addPipeline(
Key::Builder().withLightmap().withTangents().withSpecular(),
Key::Builder().withMaterial().withLightmap().withTangents().withSpecular(),
modelLightmapNormalMapVertex, modelLightmapNormalSpecularMapPixel);
// Skinned
addPipeline(
Key::Builder().withSkinned(),
Key::Builder().withMaterial().withSkinned(),
skinModelVertex, modelPixel);
addPipeline(
Key::Builder().withSkinned().withTangents(),
Key::Builder().withMaterial().withSkinned().withTangents(),
skinModelNormalMapVertex, modelNormalMapPixel);
addPipeline(
Key::Builder().withSkinned().withSpecular(),
Key::Builder().withMaterial().withSkinned().withSpecular(),
skinModelVertex, modelSpecularMapPixel);
addPipeline(
Key::Builder().withSkinned().withTangents().withSpecular(),
Key::Builder().withMaterial().withSkinned().withTangents().withSpecular(),
skinModelNormalMapVertex, modelNormalSpecularMapPixel);
// Skinned and Translucent
addPipeline(
Key::Builder().withSkinned().withTranslucent(),
Key::Builder().withMaterial().withSkinned().withTranslucent(),
skinModelVertex, modelTranslucentPixel);
addPipeline(
Key::Builder().withSkinned().withTranslucent().withTangents(),
Key::Builder().withMaterial().withSkinned().withTranslucent().withTangents(),
skinModelNormalMapVertex, modelTranslucentPixel);
addPipeline(
Key::Builder().withSkinned().withTranslucent().withSpecular(),
Key::Builder().withMaterial().withSkinned().withTranslucent().withSpecular(),
skinModelVertex, modelTranslucentPixel);
addPipeline(
Key::Builder().withSkinned().withTranslucent().withTangents().withSpecular(),
Key::Builder().withMaterial().withSkinned().withTranslucent().withTangents().withSpecular(),
skinModelNormalMapVertex, modelTranslucentPixel);
// Depth-only
addPipeline(
@ -244,32 +272,32 @@ void initForwardPipelines(render::ShapePlumber& plumber) {
auto addPipeline = std::bind(&addPlumberPipeline, std::ref(plumber), _1, _2, _3);
// Opaques
addPipeline(
Key::Builder(),
Key::Builder().withMaterial(),
modelVertex, modelPixel);
addPipeline(
Key::Builder().withUnlit(),
Key::Builder().withMaterial().withUnlit(),
modelVertex, modelUnlitPixel);
addPipeline(
Key::Builder().withTangents(),
Key::Builder().withMaterial().withTangents(),
modelNormalMapVertex, modelNormalMapPixel);
addPipeline(
Key::Builder().withSpecular(),
Key::Builder().withMaterial().withSpecular(),
modelVertex, modelSpecularMapPixel);
addPipeline(
Key::Builder().withTangents().withSpecular(),
Key::Builder().withMaterial().withTangents().withSpecular(),
modelNormalMapVertex, modelNormalSpecularMapPixel);
// Skinned
addPipeline(
Key::Builder().withSkinned(),
Key::Builder().withMaterial().withSkinned(),
skinModelVertex, modelPixel);
addPipeline(
Key::Builder().withSkinned().withTangents(),
Key::Builder().withMaterial().withSkinned().withTangents(),
skinModelNormalMapVertex, modelNormalMapPixel);
addPipeline(
Key::Builder().withSkinned().withSpecular(),
Key::Builder().withMaterial().withSkinned().withSpecular(),
skinModelVertex, modelSpecularMapPixel);
addPipeline(
Key::Builder().withSkinned().withTangents().withSpecular(),
Key::Builder().withMaterial().withSkinned().withTangents().withSpecular(),
skinModelNormalMapVertex, modelNormalSpecularMapPixel);
}
@ -319,9 +347,6 @@ void batchSetter(const ShapePipeline& pipeline, gpu::Batch& batch) {
// Set a default albedo map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
DependencyManager::get<TextureCache>()->getWhiteTexture());
// Set a default normal map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
// Set a default material
if (pipeline.locations->materialBufferUnit >= 0) {

View file

@ -0,0 +1,88 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// overlay3D.slf
// fragment shader
//
// Created by Sam Gateau on 6/16/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include DeferredGlobalLight.slh@>
<$declareEvalSkyboxGlobalColor()$>
<@include model/Material.slh@>
<@include gpu/Transform.slh@>
<$declareStandardCameraTransform()$>
<@include MaterialTextures.slh@>
<$declareMaterialTextures(ALBEDO, ROUGHNESS, _SCRIBE_NULL, _SCRIBE_NULL, EMISSIVE, OCCLUSION)$>
in vec2 _texCoord0;
in vec2 _texCoord1;
in vec4 _position;
in vec3 _normal;
in vec3 _color;
in float _alpha;
out vec4 _fragColor;
void main(void) {
Material mat = getMaterial();
int matKey = getMaterialKey(mat);
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex, roughnessTex, _SCRIBE_NULL, _SCRIBE_NULL, emissiveTex)$>
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, occlusionTex)$>
float opacity = 1.0;
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
<$discardTransparent(opacity)$>;
vec3 albedo = getMaterialAlbedo(mat);
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
albedo *= _color;
float metallic = getMaterialMetallic(mat);
vec3 fresnel = vec3(0.03); // Default Di-electric fresnel value
if (metallic <= 0.5) {
metallic = 0.0;
} else {
fresnel = albedo;
metallic = 1.0;
}
float roughness = getMaterialRoughness(mat);
<$evalMaterialRoughness(roughnessTex, roughness, matKey, roughness)$>;
vec3 emissive = getMaterialEmissive(mat);
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
vec3 fragPosition = _position.xyz;
//vec3 fragNormal = normalize(_normal);
TransformCamera cam = getTransformCamera();
vec3 fragNormal;
<$transformEyeToWorldDir(cam, _normal, fragNormal)$>;
vec4 color = vec4(evalSkyboxGlobalColor(
cam._viewInverse,
1.0,
occlusionTex,
fragPosition,
fragNormal,
albedo,
fresnel,
metallic,
roughness),
opacity);
// And emissive
color.rgb += emissive * isEmissiveEnabled();
// Apply standard tone mapping
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
}

View file

@ -0,0 +1,83 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// overlay3D_model_transparent.slf
//
// Created by Sam Gateau on 2/27/2017.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include DeferredGlobalLight.slh@>
<$declareEvalGlobalLightingAlphaBlended()$>
<@include model/Material.slh@>
<@include gpu/Transform.slh@>
<$declareStandardCameraTransform()$>
<@include MaterialTextures.slh@>
<$declareMaterialTextures(ALBEDO, ROUGHNESS, _SCRIBE_NULL, _SCRIBE_NULL, EMISSIVE, OCCLUSION)$>
in vec2 _texCoord0;
in vec2 _texCoord1;
in vec4 _position;
in vec3 _normal;
in vec3 _color;
in float _alpha;
out vec4 _fragColor;
void main(void) {
Material mat = getMaterial();
int matKey = getMaterialKey(mat);
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex, roughnessTex, _SCRIBE_NULL, _SCRIBE_NULL, emissiveTex)$>
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, occlusionTex)$>
float opacity = 1.0;
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
vec3 albedo = getMaterialAlbedo(mat);
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
albedo *= _color;
float metallic = getMaterialMetallic(mat);
vec3 fresnel = vec3(0.03); // Default Di-electric fresnel value
if (metallic <= 0.5) {
metallic = 0.0;
} else {
fresnel = albedo;
metallic = 1.0;
}
float roughness = getMaterialRoughness(mat);
<$evalMaterialRoughness(roughnessTex, roughness, matKey, roughness)$>;
vec3 emissive = getMaterialEmissive(mat);
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
vec3 fragPosition = _position.xyz;
TransformCamera cam = getTransformCamera();
vec3 fragNormal;
<$transformEyeToWorldDir(cam, _normal, fragNormal)$>
vec4 color = vec4(evalGlobalLightingAlphaBlended(
cam._viewInverse,
1.0,
occlusionTex,
fragPosition,
fragNormal,
albedo,
fresnel,
metallic,
emissive,
roughness, opacity),
opacity);
// Apply standard tone mapping
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
}

View file

@ -0,0 +1,43 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// overlay3D-model_transparent_unlit.slf
// fragment shader
//
// Created by Sam Gateau on 2/28/2017.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include LightingModel.slh@>
<@include model/Material.slh@>
<@include MaterialTextures.slh@>
<$declareMaterialTextures(ALBEDO)$>
in vec2 _texCoord0;
in vec3 _normal;
in vec3 _color;
in float _alpha;
out vec4 _fragColor;
void main(void) {
Material mat = getMaterial();
int matKey = getMaterialKey(mat);
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex)$>
float opacity = 1.0;
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
vec3 albedo = getMaterialAlbedo(mat);
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
albedo *= _color;
vec4 color = vec4(albedo * isUnlitEnabled(), opacity);
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
}

View file

@ -0,0 +1,44 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// overlay3D-model_unlit.slf
// fragment shader
//
// Created by Sam Gateau on 2/28/2017.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include LightingModel.slh@>
<@include model/Material.slh@>
<@include MaterialTextures.slh@>
<$declareMaterialTextures(ALBEDO)$>
in vec2 _texCoord0;
in vec3 _normal;
in vec3 _color;
in float _alpha;
out vec4 _fragColor;
void main(void) {
Material mat = getMaterial();
int matKey = getMaterialKey(mat);
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex)$>
float opacity = 1.0;
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
<$discardTransparent(opacity)$>;
vec3 albedo = getMaterialAlbedo(mat);
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
albedo *= _color;
vec4 color = vec4(albedo * isUnlitEnabled(), opacity);
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
}

View file

@ -20,8 +20,6 @@ void renderItems(const SceneContextPointer& sceneContext, const RenderContextPoi
void renderShapes(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const ShapePlumberPointer& shapeContext, const ItemBounds& inItems, int maxDrawnItems = -1);
void renderStateSortShapes(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const ShapePlumberPointer& shapeContext, const ItemBounds& inItems, int maxDrawnItems = -1);
class DrawLightConfig : public Job::Config {
Q_OBJECT
Q_PROPERTY(int numDrawn READ getNumDrawn NOTIFY numDrawnChanged)

View file

@ -39,6 +39,10 @@ void ShapePlumber::addPipelineHelper(const Filter& filter, ShapeKey key, int bit
}
} else {
// Add the brand new pipeline and cache its location in the lib
auto precedent = _pipelineMap.find(key);
if (precedent != _pipelineMap.end()) {
qCDebug(renderlogging) << "Key already assigned: " << key;
}
_pipelineMap.insert(PipelineMap::value_type(key, pipeline));
}
}
@ -65,16 +69,11 @@ void ShapePlumber::addPipeline(const Filter& filter, const gpu::ShaderPointer& p
slotBindings.insert(gpu::Shader::Binding(std::string("lightBuffer"), Slot::BUFFER::LIGHT));
slotBindings.insert(gpu::Shader::Binding(std::string("lightAmbientBuffer"), Slot::BUFFER::LIGHT_AMBIENT_BUFFER));
slotBindings.insert(gpu::Shader::Binding(std::string("skyboxMap"), Slot::MAP::LIGHT_AMBIENT));
slotBindings.insert(gpu::Shader::Binding(std::string("normalFittingMap"), Slot::NORMAL_FITTING));
gpu::Shader::makeProgram(*program, slotBindings);
auto locations = std::make_shared<Locations>();
locations->normalFittingMapUnit = program->getTextures().findLocation("normalFittingMap");
if (program->getTextures().findLocation("normalFittingMap") > -1) {
locations->normalFittingMapUnit = program->getTextures().findLocation("normalFittingMap");
}
locations->albedoTextureUnit = program->getTextures().findLocation("albedoMap");
locations->roughnessTextureUnit = program->getTextures().findLocation("roughnessMap");
locations->normalTextureUnit = program->getTextures().findLocation("normalMap");

View file

@ -22,13 +22,13 @@ namespace render {
class ShapeKey {
public:
enum FlagBit {
TRANSLUCENT = 0,
MATERIAL = 0,
TRANSLUCENT,
LIGHTMAP,
TANGENTS,
SPECULAR,
UNLIT,
SKINNED,
STEREO,
DEPTH_ONLY,
DEPTH_BIAS,
WIREFRAME,
@ -53,13 +53,13 @@ public:
ShapeKey build() const { return ShapeKey{_flags}; }
Builder& withMaterial() { _flags.set(MATERIAL); return (*this); }
Builder& withTranslucent() { _flags.set(TRANSLUCENT); return (*this); }
Builder& withLightmap() { _flags.set(LIGHTMAP); return (*this); }
Builder& withTangents() { _flags.set(TANGENTS); return (*this); }
Builder& withSpecular() { _flags.set(SPECULAR); return (*this); }
Builder& withUnlit() { _flags.set(UNLIT); return (*this); }
Builder& withSkinned() { _flags.set(SKINNED); return (*this); }
Builder& withStereo() { _flags.set(STEREO); return (*this); }
Builder& withDepthOnly() { _flags.set(DEPTH_ONLY); return (*this); }
Builder& withDepthBias() { _flags.set(DEPTH_BIAS); return (*this); }
Builder& withWireframe() { _flags.set(WIREFRAME); return (*this); }
@ -89,6 +89,9 @@ public:
Filter build() const { return Filter(_flags, _mask); }
Builder& withMaterial() { _flags.set(MATERIAL); _mask.set(MATERIAL); return (*this); }
Builder& withoutMaterial() { _flags.reset(MATERIAL); _mask.set(MATERIAL); return (*this); }
Builder& withTranslucent() { _flags.set(TRANSLUCENT); _mask.set(TRANSLUCENT); return (*this); }
Builder& withOpaque() { _flags.reset(TRANSLUCENT); _mask.set(TRANSLUCENT); return (*this); }
@ -107,9 +110,6 @@ public:
Builder& withSkinned() { _flags.set(SKINNED); _mask.set(SKINNED); return (*this); }
Builder& withoutSkinned() { _flags.reset(SKINNED); _mask.set(SKINNED); return (*this); }
Builder& withStereo() { _flags.set(STEREO); _mask.set(STEREO); return (*this); }
Builder& withoutStereo() { _flags.reset(STEREO); _mask.set(STEREO); return (*this); }
Builder& withDepthOnly() { _flags.set(DEPTH_ONLY); _mask.set(DEPTH_ONLY); return (*this); }
Builder& withoutDepthOnly() { _flags.reset(DEPTH_ONLY); _mask.set(DEPTH_ONLY); return (*this); }
@ -128,19 +128,20 @@ public:
Flags _mask{0};
};
Filter(const Filter::Builder& builder) : Filter(builder._flags, builder._mask) {}
ShapeKey key() const { return ShapeKey(_flags); }
protected:
friend class ShapePlumber;
Flags _flags{0};
Flags _mask{0};
};
bool useMaterial() const { return _flags[MATERIAL]; }
bool hasLightmap() const { return _flags[LIGHTMAP]; }
bool hasTangents() const { return _flags[TANGENTS]; }
bool hasSpecular() const { return _flags[SPECULAR]; }
bool isUnlit() const { return _flags[UNLIT]; }
bool isTranslucent() const { return _flags[TRANSLUCENT]; }
bool isSkinned() const { return _flags[SKINNED]; }
bool isStereo() const { return _flags[STEREO]; }
bool isDepthOnly() const { return _flags[DEPTH_ONLY]; }
bool isDepthBiased() const { return _flags[DEPTH_BIAS]; }
bool isWireFrame() const { return _flags[WIREFRAME]; }
@ -170,13 +171,13 @@ inline QDebug operator<<(QDebug debug, const ShapeKey& key) {
debug << "[ShapeKey: OWN_PIPELINE]";
} else {
debug << "[ShapeKey:"
<< "useMaterial:" << key.useMaterial()
<< "hasLightmap:" << key.hasLightmap()
<< "hasTangents:" << key.hasTangents()
<< "hasSpecular:" << key.hasSpecular()
<< "isUnlit:" << key.isUnlit()
<< "isTranslucent:" << key.isTranslucent()
<< "isSkinned:" << key.isSkinned()
<< "isStereo:" << key.isStereo()
<< "isDepthOnly:" << key.isDepthOnly()
<< "isDepthBiased:" << key.isDepthBiased()
<< "isWireFrame:" << key.isWireFrame()
@ -213,8 +214,6 @@ public:
OCCLUSION,
SCATTERING,
LIGHT_AMBIENT,
NORMAL_FITTING = 10,
};
};
@ -226,7 +225,6 @@ public:
int metallicTextureUnit;
int emissiveTextureUnit;
int occlusionTextureUnit;
int normalFittingMapUnit;
int lightingModelBufferUnit;
int skinClusterBufferUnit;
int materialBufferUnit;

View file

@ -45,7 +45,8 @@ enum ShapeType {
SHAPE_TYPE_COMPOUND,
SHAPE_TYPE_SIMPLE_HULL,
SHAPE_TYPE_SIMPLE_COMPOUND,
SHAPE_TYPE_STATIC_MESH
SHAPE_TYPE_STATIC_MESH,
SHAPE_TYPE_ELLIPSOID
};
class ShapeInfo {

View file

@ -159,13 +159,33 @@ Column {
}
}
Row {
Column {
id: metas
CheckBox {
text: "Draw Meta Bounds"
text: "Metas"
checked: Render.getConfig("DrawMetaBounds")["enabled"]
onCheckedChanged: { Render.getConfig("DrawMetaBounds")["enabled"] = checked }
}
CheckBox {
text: "Opaques"
checked: Render.getConfig("DrawOpaqueBounds")["enabled"]
onCheckedChanged: { Render.getConfig("DrawOpaqueBounds")["enabled"] = checked }
}
CheckBox {
text: "Transparents"
checked: Render.getConfig("DrawTransparentBounds")["enabled"]
onCheckedChanged: { Render.getConfig("DrawTransparentBounds")["enabled"] = checked }
}
CheckBox {
text: "Overlay Opaques"
checked: Render.getConfig("DrawOverlayOpaqueBounds")["enabled"]
onCheckedChanged: { Render.getConfig("DrawOverlayOpaqueBounds")["enabled"] = checked }
}
CheckBox {
text: "Overlay Transparents"
checked: Render.getConfig("DrawOverlayTransparentBounds")["enabled"]
onCheckedChanged: { Render.getConfig("DrawOverlayTransparentBounds")["enabled"] = checked }
}
}
}

View file

@ -51,7 +51,9 @@ const OUTPUT_DEVICE_SETTING = "audio_output_device";
var selectedInputMenu = "";
var selectedOutputMenu = "";
var audioDevicesList = [];
function setupAudioMenus() {
removeAudioMenus();
Menu.addSeparator("Audio", "Input Audio Device");
var inputDeviceSetting = Settings.getValue(INPUT_DEVICE_SETTING);
@ -67,11 +69,12 @@ function setupAudioMenus() {
var thisDeviceSelected = (inputDevices[i] == selectedInputDevice);
var menuItem = "Use " + inputDevices[i] + " for Input";
Menu.addMenuItem({
menuName: "Audio",
menuItemName: menuItem,
isCheckable: true,
isChecked: thisDeviceSelected
});
menuName: "Audio",
menuItemName: menuItem,
isCheckable: true,
isChecked: thisDeviceSelected
});
audioDevicesList.push(menuItem);
if (thisDeviceSelected) {
selectedInputMenu = menuItem;
}
@ -97,12 +100,24 @@ function setupAudioMenus() {
isCheckable: true,
isChecked: thisDeviceSelected
});
audioDevicesList.push(menuItem);
if (thisDeviceSelected) {
selectedOutputMenu = menuItem;
}
}
}
function removeAudioMenus() {
Menu.removeSeparator("Audio", "Input Audio Device");
Menu.removeSeparator("Audio", "Output Audio Device");
for (var index = 0; index < audioDevicesList.length; index++) {
Menu.removeMenuItem("Audio", audioDevicesList[index]);
}
audioDevicesList = [];
}
function onDevicechanged() {
print("audio devices changed, removing Audio > Devices menu...");
Menu.removeMenu("Audio > Devices");
@ -218,6 +233,7 @@ Script.update.connect(checkHMDAudio);
Script.scriptEnding.connect(function () {
restoreAudio();
removeAudioMenus();
Menu.menuItemEvent.disconnect(menuItemEvent);
Script.update.disconnect(checkHMDAudio);
});