merge from upstream

This commit is contained in:
Seth Alves 2017-01-17 09:29:30 -08:00
parent c212fc93c9
commit 6bace4f451
29 changed files with 771 additions and 216 deletions

View file

@ -28,6 +28,7 @@
#include <StDev.h>
#include <UUID.h>
#include "AudioHelpers.h"
#include "AudioRingBuffer.h"
#include "AudioMixerClientData.h"
#include "AvatarAudioStream.h"
@ -68,7 +69,8 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
packetReceiver.registerListener(PacketType::KillAvatar, this, "handleKillAvatarPacket");
packetReceiver.registerListener(PacketType::NodeMuteRequest, this, "handleNodeMuteRequestPacket");
packetReceiver.registerListener(PacketType::RadiusIgnoreRequest, this, "handleRadiusIgnoreRequestPacket");
packetReceiver.registerListener(PacketType::RequestsDomainListData, this, "handleRequestsDomainListDataPacket");
packetReceiver.registerListener(PacketType::RequestsDomainListData, this, "handleRequestsDomainListDataPacket");
packetReceiver.registerListener(PacketType::PerAvatarGainSet, this, "handlePerAvatarGainSetDataPacket");
connect(nodeList.data(), &NodeList::nodeKilled, this, &AudioMixer::handleNodeKilled);
}
@ -186,7 +188,8 @@ void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) {
nodeList->eachNode([&killedNode](const SharedNodePointer& node) {
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
if (clientData) {
clientData->removeHRTFsForNode(killedNode->getUUID());
QUuid killedUUID = killedNode->getUUID();
clientData->removeHRTFsForNode(killedUUID);
}
});
}
@ -240,6 +243,20 @@ void AudioMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage> p
sendingNode->parseIgnoreRequestMessage(packet);
}
void AudioMixer::handlePerAvatarGainSetDataPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
auto clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
if (clientData) {
QUuid listeningNodeUUID = sendingNode->getUUID();
// parse the UUID from the packet
QUuid audioSourceUUID = QUuid::fromRfc4122(packet->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
uint8_t packedGain;
packet->readPrimitive(&packedGain);
float gain = unpackFloatGainFromByte(packedGain);
clientData->hrtfForStream(audioSourceUUID, QUuid()).setGainAdjustment(gain);
qDebug() << "Setting gain adjustment for hrtf[" << listeningNodeUUID << "][" << audioSourceUUID << "] to " << gain;
}
}
void AudioMixer::handleRadiusIgnoreRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
sendingNode->parseIgnoreRadiusRequestMessage(packet);
}

View file

@ -66,6 +66,7 @@ private slots:
void handleRadiusIgnoreRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
void handleKillAvatarPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
void handleNodeMuteRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
void handlePerAvatarGainSetDataPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
void start();
void removeHRTFsForFinishedInjector(const QUuid& streamID);

View file

@ -0,0 +1,32 @@
include(ExternalProject)
include(SelectLibraryConfigurations)
set(EXTERNAL_NAME LibOVRPlatform)
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
if (WIN32)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://hifi-public.s3.amazonaws.com/dependencies/OVRPlatformSDK_v1.10.0.zip
URL_MD5 e6c8264af16d904e6506acd5172fa0a9
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
LOG_DOWNLOAD 1
)
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${SOURCE_DIR}/Windows/LibOVRPlatform64_1.lib CACHE TYPE INTERNAL)
else()
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${SOURCE_DIR}/Windows/LibOVRPlatform32_1.lib CACHE TYPE INTERNAL)
endif()
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/Include CACHE TYPE INTERNAL)
endif ()
# Hide this external target (for ide users)
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")

View file

@ -0,0 +1,44 @@
#
# FindLibOVRPlatform.cmake
#
# Try to find the LibOVRPlatform library to use the Oculus Platform SDK
#
# You must provide a LIBOVRPLATFORM_ROOT_DIR which contains Windows and Include directories
#
# Once done this will define
#
# LIBOVRPLATFORM_FOUND - system found Oculus Platform SDK
# LIBOVRPLATFORM_INCLUDE_DIRS - the Oculus Platform include directory
# LIBOVRPLATFORM_LIBRARIES - Link this to use Oculus Platform
#
# Created on December 16, 2016 by Stephen Birarda
# Copyright 2016 High Fidelity, Inc.
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
if (WIN32)
# setup hints for LIBOVRPLATFORM search
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
hifi_library_search_hints("LibOVRPlatform")
find_path(LIBOVRPLATFORM_INCLUDE_DIRS OVR_Platform.h PATH_SUFFIXES Include HINTS ${LIBOVRPLATFORM_SEARCH_DIRS})
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
set(_LIB_NAME LibOVRPlatform64_1.lib)
else()
set(_LIB_NAME LibOVRPlatform32_1.lib)
endif()
find_library(LIBOVRPLATFORM_LIBRARY_RELEASE NAMES ${_LIB_NAME} PATH_SUFFIXES Windows HINTS ${LIBOVRPLATFORM_SEARCH_DIRS})
include(SelectLibraryConfigurations)
select_library_configurations(LIBOVRPLATFORM)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(LIBOVRPLATFORM DEFAULT_MSG LIBOVRPLATFORM_INCLUDE_DIRS LIBOVRPLATFORM_LIBRARIES)
mark_as_advanced(LIBOVRPLATFORM_INCLUDE_DIRS LIBOVRPLATFORM_LIBRARIES LIBOVRPLATFORM_SEARCH_DIRS)
endif ()

View file

@ -780,12 +780,12 @@ void DomainServerSettingsManager::processNodeKickRequestPacket(QSharedPointer<Re
// This function processes the "Get Username from ID" request.
void DomainServerSettingsManager::processUsernameFromIDRequestPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
// Before we do any processing on this packet, make sure it comes from a node that is allowed to kick (is an admin)
if (sendingNode->getCanKick()) {
// From the packet, pull the UUID we're identifying
QUuid nodeUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
if (!nodeUUID.isNull()) {
// From the packet, pull the UUID we're identifying
QUuid nodeUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
if (!nodeUUID.isNull()) {
// Before we do any processing on this packet, make sure it comes from a node that is allowed to kick (is an admin)
// OR from a node whose UUID matches the one in the packet
if (sendingNode->getCanKick() || nodeUUID == sendingNode->getUUID()) {
// First, make sure we actually have a node with this UUID
auto limitedNodeList = DependencyManager::get<LimitedNodeList>();
auto matchingNode = limitedNodeList->nodeWithUUID(nodeUUID);
@ -813,12 +813,12 @@ void DomainServerSettingsManager::processUsernameFromIDRequestPacket(QSharedPoin
qWarning() << "Node username request received for unknown node. Refusing to process.";
}
} else {
qWarning() << "Node username request received for invalid node ID. Refusing to process.";
qWarning() << "Refusing to process a username request packet from node" << uuidStringWithoutCurlyBraces(sendingNode->getUUID())
<< ". Either node doesn't have kick permissions or is requesting a username not from their UUID.";
}
} else {
qWarning() << "Refusing to process a username request packet from node" << uuidStringWithoutCurlyBraces(sendingNode->getUUID())
<< "that does not have kick permissions.";
qWarning() << "Node username request received for invalid node ID. Refusing to process.";
}
}

View file

@ -10,35 +10,35 @@
//
import QtQuick 2.5
import QtQuick.Controls 1.4
import QtQuick.Controls.Styles 1.4
import QtGraphicalEffects 1.0
import "../styles-uit"
Row {
Item {
id: thisNameCard
// Spacing
spacing: 10
// Anchors
anchors.top: parent.top
anchors {
topMargin: (parent.height - contentHeight)/2
bottomMargin: (parent.height - contentHeight)/2
verticalCenter: parent.verticalCenter
leftMargin: 10
rightMargin: 10
}
// Properties
property int contentHeight: 50
property string uuid: ""
property string displayName: ""
property string userName: ""
property int displayTextHeight: 18
property int usernameTextHeight: 12
property real audioLevel: 0.0
property bool isMyCard: false
property bool selected: false
/* User image commented out for now - will probably be re-introduced later.
Column {
id: avatarImage
// Size
height: contentHeight
height: parent.height
width: height
Image {
id: userImage
@ -49,12 +49,12 @@ Row {
}
}
*/
Column {
Item {
id: textContainer
// Size
width: parent.width - /*avatarImage.width - */parent.anchors.leftMargin - parent.anchors.rightMargin - parent.spacing
height: contentHeight
width: parent.width - /*avatarImage.width - parent.spacing - */parent.anchors.leftMargin - parent.anchors.rightMargin
height: childrenRect.height
anchors.verticalCenter: parent.verticalCenter
// DisplayName Text
FiraSansSemiBold {
id: displayNameText
@ -63,6 +63,8 @@ Row {
elide: Text.ElideRight
// Size
width: parent.width
// Anchors
anchors.top: parent.top
// Text Size
size: thisNameCard.displayTextHeight
// Text Positioning
@ -80,6 +82,8 @@ Row {
visible: thisNameCard.displayName
// Size
width: parent.width
// Anchors
anchors.top: displayNameText.bottom
// Text Size
size: thisNameCard.usernameTextHeight
// Text Positioning
@ -90,25 +94,56 @@ Row {
// Spacer
Item {
id: spacer
height: 4
width: parent.width
// Anchors
anchors.top: userNameText.bottom
}
// VU Meter
Rectangle { // CHANGEME to the appropriate type!
Rectangle {
id: nameCardVUMeter
// Size
width: parent.width
width: ((gainSlider.value - gainSlider.minimumValue)/(gainSlider.maximumValue - gainSlider.minimumValue)) * parent.width
height: 8
// Anchors
anchors.top: spacer.bottom
// Style
radius: 4
color: "#c5c5c5"
// Rectangle for the zero-gain point on the VU meter
Rectangle {
id: vuMeterZeroGain
visible: gainSlider.visible
// Size
width: 4
height: 18
// Style
color: hifi.colors.darkGray
// Anchors
anchors.verticalCenter: parent.verticalCenter
anchors.left: parent.left
anchors.leftMargin: (-gainSlider.minimumValue)/(gainSlider.maximumValue - gainSlider.minimumValue) * gainSlider.width - 4
}
// Rectangle for the VU meter line
Rectangle {
id: vuMeterLine
width: gainSlider.width
visible: gainSlider.visible
// Style
color: vuMeterBase.color
radius: nameCardVUMeter.radius
height: nameCardVUMeter.height / 2
anchors.verticalCenter: nameCardVUMeter.verticalCenter
}
// Rectangle for the VU meter base
Rectangle {
id: vuMeterBase
// Anchors
anchors.fill: parent
// Style
color: "#c5c5c5"
color: parent.color
radius: parent.radius
}
// Rectangle for the VU meter audio level
@ -117,7 +152,7 @@ Row {
// Size
width: (thisNameCard.audioLevel) * parent.width
// Style
color: "#c5c5c5"
color: parent.color
radius: parent.radius
// Anchors
anchors.bottom: parent.bottom
@ -138,5 +173,66 @@ Row {
}
}
}
// Per-Avatar Gain Slider
Slider {
id: gainSlider
// Size
width: parent.width
height: 14
// Anchors
anchors.verticalCenter: nameCardVUMeter.verticalCenter
// Properties
visible: !isMyCard && selected
value: pal.gainSliderValueDB[uuid] ? pal.gainSliderValueDB[uuid] : 0.0
minimumValue: -60.0
maximumValue: 20.0
stepSize: 5
updateValueWhileDragging: true
onValueChanged: updateGainFromQML(uuid, value)
MouseArea {
anchors.fill: parent
onWheel: {
// Do nothing.
}
onDoubleClicked: {
gainSlider.value = 0.0
}
onPressed: {
// Pass through to Slider
mouse.accepted = false
}
onReleased: {
// Pass through to Slider
mouse.accepted = false
}
}
style: SliderStyle {
groove: Rectangle {
color: "#c5c5c5"
implicitWidth: gainSlider.width
implicitHeight: 4
radius: 2
opacity: 0
}
handle: Rectangle {
anchors.centerIn: parent
color: (control.pressed || control.hovered) ? "#00b4ef" : "#8F8F8F"
implicitWidth: 10
implicitHeight: 16
}
}
}
}
function updateGainFromQML(avatarUuid, sliderValue) {
if (pal.gainSliderValueDB[avatarUuid] !== sliderValue) {
pal.gainSliderValueDB[avatarUuid] = sliderValue;
var data = {
sessionId: avatarUuid,
gain: sliderValue
};
pal.sendToScript({method: 'updateGain', params: data});
}
}
}

View file

@ -24,7 +24,7 @@ Rectangle {
// Style
color: "#E3E3E3"
// Properties
property int myCardHeight: 70
property int myCardHeight: 90
property int rowHeight: 70
property int actionButtonWidth: 75
property int nameCardWidth: palContainer.width - actionButtonWidth*(iAmAdmin ? 4 : 2) - 4 - hifi.dimensions.scrollbarBackgroundWidth
@ -32,6 +32,9 @@ Rectangle {
property var ignored: ({}); // Keep a local list of ignored avatars & their data. Necessary because HashMap is slow to respond after ignoring.
property var userModelData: [] // This simple list is essentially a mirror of the userModel listModel without all the extra complexities.
property bool iAmAdmin: false
// Keep a local list of per-avatar gainSliderValueDBs. Far faster than fetching this data from the server.
// NOTE: if another script modifies the per-avatar gain, this value won't be accurate!
property var gainSliderValueDB: ({});
// This is the container for the PAL
Rectangle {
@ -51,7 +54,7 @@ Rectangle {
id: myInfo
// Size
width: palContainer.width
height: myCardHeight + 20
height: myCardHeight
// Style
color: pal.color
// Anchors
@ -65,6 +68,7 @@ Rectangle {
displayName: myData.displayName
userName: myData.userName
audioLevel: myData.audioLevel
isMyCard: true
// Size
width: nameCardWidth
height: parent.height
@ -206,6 +210,8 @@ Rectangle {
userName: model && model.userName
audioLevel: model && model.audioLevel
visible: !isCheckBox && !isButton
uuid: model && model.sessionId
selected: styleData.selected
// Size
width: nameCardWidth
height: parent.height
@ -492,8 +498,9 @@ Rectangle {
}
}
break;
case 'clearIgnored':
case 'clearLocalQMLData':
ignored = {};
gainSliderValueDB = {};
break;
default:
console.log('Unrecognized message:', JSON.stringify(message));

View file

@ -253,7 +253,7 @@ public:
static const unsigned long MAX_HEARTBEAT_AGE_USECS = 30 * USECS_PER_SECOND;
static const int WARNING_ELAPSED_HEARTBEAT = 500 * USECS_PER_MSEC; // warn if elapsed heartbeat average is large
static const int HEARTBEAT_SAMPLES = 100000; // ~5 seconds worth of samples
// Set the heartbeat on launch
DeadlockWatchdogThread() {
setObjectName("Deadlock Watchdog");
@ -618,7 +618,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
_window->setWindowTitle("Interface");
Model::setAbstractViewStateInterface(this); // The model class will sometimes need to know view state details from us
auto nodeList = DependencyManager::get<NodeList>();
// Set up a watchdog thread to intentionally crash the application on deadlocks
@ -639,6 +639,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
qCDebug(interfaceapp) << "[VERSION] We will use DEVELOPMENT global services.";
#endif
// set the OCULUS_STORE property so the oculus plugin can know if we ran from the Oculus Store
static const QString OCULUS_STORE_ARG = "--oculus-store";
setProperty(hifi::properties::OCULUS_STORE, arguments().indexOf(OCULUS_STORE_ARG) != -1);
static const QString NO_UPDATER_ARG = "--no-updater";
static const bool noUpdater = arguments().indexOf(NO_UPDATER_ARG) != -1;
@ -699,7 +702,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
audioIO->setPositionGetter([]{
auto avatarManager = DependencyManager::get<AvatarManager>();
auto myAvatar = avatarManager ? avatarManager->getMyAvatar() : nullptr;
return myAvatar ? myAvatar->getPositionForAudio() : Vectors::ZERO;
});
audioIO->setOrientationGetter([]{
@ -882,7 +885,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
#ifdef Q_OS_MAC
auto cursorTarget = _window; // OSX doesn't seem to provide for hiding the cursor only on the GL widget
#else
// On windows and linux, hiding the top level cursor also means it's invisible when hovering over the
// On windows and linux, hiding the top level cursor also means it's invisible when hovering over the
// window menu, which is a pain, so only hide it for the GL surface
auto cursorTarget = _glWidget;
#endif
@ -1123,7 +1126,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
loadSettings();
// Now that we've loaded the menu and thus switched to the previous display plugin
// we can unlock the desktop repositioning code, since all the positions will be
// we can unlock the desktop repositioning code, since all the positions will be
// relative to the desktop size for this plugin
auto offscreenUi = DependencyManager::get<OffscreenUi>();
offscreenUi->getDesktop()->setProperty("repositionLocked", false);
@ -1598,7 +1601,7 @@ void Application::checkChangeCursor() {
#ifdef Q_OS_MAC
auto cursorTarget = _window; // OSX doesn't seem to provide for hiding the cursor only on the GL widget
#else
// On windows and linux, hiding the top level cursor also means it's invisible when hovering over the
// On windows and linux, hiding the top level cursor also means it's invisible when hovering over the
// window menu, which is a pain, so only hide it for the GL surface
auto cursorTarget = _glWidget;
#endif
@ -1785,7 +1788,7 @@ Application::~Application() {
#endif
// The window takes ownership of the menu, so this has the side effect of destroying it.
_window->setMenuBar(nullptr);
_window->deleteLater();
// Can't log to file passed this point, FileLogger about to be deleted
@ -1811,10 +1814,10 @@ void Application::initializeGL() {
_glWidget->makeCurrent();
gpu::Context::init<gpu::gl::GLBackend>();
qApp->setProperty(hifi::properties::gl::MAKE_PROGRAM_CALLBACK,
qApp->setProperty(hifi::properties::gl::MAKE_PROGRAM_CALLBACK,
QVariant::fromValue((void*)(&gpu::gl::GLBackend::makeProgram)));
_gpuContext = std::make_shared<gpu::Context>();
// The gpu context can make child contexts for transfers, so
// The gpu context can make child contexts for transfers, so
// we need to restore primary rendering context
_glWidget->makeCurrent();
@ -1828,7 +1831,7 @@ void Application::initializeGL() {
assert(items.canCast<RenderFetchCullSortTask::Output>());
static const QString RENDER_FORWARD = "HIFI_RENDER_FORWARD";
if (QProcessEnvironment::systemEnvironment().contains(RENDER_FORWARD)) {
_renderEngine->addJob<RenderForwardTask>("RenderForwardTask", items.get<RenderFetchCullSortTask::Output>());
_renderEngine->addJob<RenderForwardTask>("Forward", items.get<RenderFetchCullSortTask::Output>());
} else {
_renderEngine->addJob<RenderDeferredTask>("RenderDeferredTask", items.get<RenderFetchCullSortTask::Output>());
}
@ -2034,7 +2037,7 @@ void Application::paintGL() {
// FIXME not needed anymore?
_offscreenContext->makeCurrent();
// If a display plugin loses it's underlying support, it
// If a display plugin loses it's underlying support, it
// needs to be able to signal us to not use it
if (!displayPlugin->beginFrameRender(_frameCount)) {
_inPaint = false;
@ -2846,7 +2849,7 @@ void Application::keyPressEvent(QKeyEvent* event) {
if (isMirrorChecked) {
// if we got here without coming in from a non-Full Screen mirror case, then our
// _returnFromFullScreenMirrorTo is unknown. In that case we'll go to the old
// _returnFromFullScreenMirrorTo is unknown. In that case we'll go to the old
// behavior of returning to ThirdPerson
if (_returnFromFullScreenMirrorTo.isEmpty()) {
_returnFromFullScreenMirrorTo = MenuOption::ThirdPerson;
@ -3016,7 +3019,7 @@ void Application::mouseMoveEvent(QMouseEvent* event) {
maybeToggleMenuVisible(event);
auto& compositor = getApplicationCompositor();
// if this is a real mouse event, and we're in HMD mode, then we should use it to move the
// if this is a real mouse event, and we're in HMD mode, then we should use it to move the
// compositor reticle
// handleRealMouseMoveEvent() will return true, if we shouldn't process the event further
if (!compositor.fakeEventActive() && compositor.handleRealMouseMoveEvent()) {
@ -4105,7 +4108,7 @@ void Application::setKeyboardFocusEntity(EntityItemID entityItemID) {
}
_lastAcceptedKeyPress = usecTimestampNow();
setKeyboardFocusHighlight(entity->getPosition(), entity->getRotation(),
setKeyboardFocusHighlight(entity->getPosition(), entity->getRotation(),
entity->getDimensions() * FOCUS_HIGHLIGHT_EXPANSION_FACTOR);
}
}
@ -4698,7 +4701,7 @@ void Application::queryOctree(NodeType_t serverType, PacketType packetType, Node
_octreeQuery.setMaxQueryPacketsPerSecond(0);
}
// if asked to forceResend, then set the query's position/orientation to be degenerate in a manner
// if asked to forceResend, then set the query's position/orientation to be degenerate in a manner
// that will cause our next query to be guarenteed to be different and the server will resend to us
if (forceResend) {
_octreeQuery.setCameraPosition(glm::vec3(-0.1, -0.1, -0.1));
@ -5292,15 +5295,17 @@ bool Application::nearbyEntitiesAreReadyForPhysics() {
if (_nearbyEntitiesStabilityCount >= MINIMUM_NEARBY_ENTITIES_STABILITY_COUNT) {
// We've seen the same number of nearby entities for several stats packets in a row. assume we've got all
// the local entities.
bool result = true;
foreach (EntityItemPointer entity, entities) {
if (entity->shouldBePhysical() && !entity->isReadyToComputeShape()) {
static QString repeatedMessage =
LogHandler::getInstance().addRepeatedMessageRegex("Physics disabled until entity loads: .*");
qCDebug(interfaceapp) << "Physics disabled until entity loads: " << entity->getID() << entity->getName();
return false;
// don't break here because we want all the relevant entities to start their downloads
result = false;
}
}
return true;
return result;
}
return false;
}
@ -5831,7 +5836,7 @@ void Application::addAssetToWorldWithNewMapping(QString filePath, QString mappin
mapping = mapping.insert(mapping.lastIndexOf("."), "-" + QString::number(copy));
addAssetToWorldWithNewMapping(filePath, mapping, copy);
} else {
QString errorInfo = "Too many copies of asset name: "
QString errorInfo = "Too many copies of asset name: "
+ mapping.left(mapping.length() - QString::number(copy).length() - 1);
qWarning(interfaceapp) << "Error downloading model: " + errorInfo;
addAssetToWorldError(filenameFromPath(filePath), errorInfo);
@ -5898,7 +5903,7 @@ void Application::addAssetToWorldAddEntity(QString filePath, QString mapping) {
// Note: Model dimensions are not available here; model is scaled per FBX mesh in RenderableModelEntityItem::update() later
// on. But FBX dimensions may be in cm, so we monitor for the dimension change and rescale again if warranted.
if (entityID == QUuid()) {
QString errorInfo = "Could not add model " + mapping + " to world.";
qWarning(interfaceapp) << "Could not add model to world: " + errorInfo;
@ -6362,7 +6367,7 @@ glm::uvec2 Application::getCanvasSize() const {
}
QRect Application::getRenderingGeometry() const {
auto geometry = _glWidget->geometry();
auto geometry = _glWidget->geometry();
auto topLeft = geometry.topLeft();
auto topLeftScreen = _glWidget->mapToGlobal(topLeft);
geometry.moveTopLeft(topLeftScreen);
@ -6725,8 +6730,8 @@ bool Application::makeRenderingContextCurrent() {
return _offscreenContext->makeCurrent();
}
bool Application::isForeground() const {
return _isForeground && !_window->isMinimized();
bool Application::isForeground() const {
return _isForeground && !_window->isMinimized();
}
void Application::sendMousePressOnEntity(QUuid id, PointerEvent event) {

View file

@ -45,7 +45,7 @@ public:
void renderSilent(int16_t* input, float* output, int index, float azimuth, float distance, float gain, int numFrames);
//
// HRTF local gain adjustment
// HRTF local gain adjustment in amplitude (1.0 == unity)
//
void setGainAdjustment(float gain) { _gainAdjust = HRTF_GAIN * gain; };

View file

@ -972,7 +972,7 @@ FBXGeometry* FBXReader::extractFBXGeometry(const QVariantHash& mapping, const QS
static const QVariant EMISSIVE = QByteArray("Emissive");
static const QVariant AMBIENT_FACTOR = QByteArray("AmbientFactor");
static const QVariant SHININESS = QByteArray("Shininess");
static const QVariant OPACITY = QByteArray("Shininess");
static const QVariant OPACITY = QByteArray("Opacity");
static const QVariant MAYA_USE_NORMAL_MAP = QByteArray("Maya|use_normal_map");
static const QVariant MAYA_BASE_COLOR = QByteArray("Maya|base_color");
static const QVariant MAYA_USE_COLOR_MAP = QByteArray("Maya|use_color_map");

View file

@ -26,6 +26,7 @@
#include "AccountManager.h"
#include "AddressManager.h"
#include "Assignment.h"
#include "AudioHelpers.h"
#include "HifiSockAddr.h"
#include "FingerprintUtils.h"
@ -951,6 +952,30 @@ void NodeList::maybeSendIgnoreSetToNode(SharedNodePointer newNode) {
}
}
void NodeList::setAvatarGain(const QUuid& nodeID, float gain) {
// cannot set gain of yourself or nobody
if (!nodeID.isNull() && _sessionUUID != nodeID) {
auto audioMixer = soloNodeOfType(NodeType::AudioMixer);
if (audioMixer) {
// setup the packet
auto setAvatarGainPacket = NLPacket::create(PacketType::PerAvatarGainSet, NUM_BYTES_RFC4122_UUID + sizeof(float), true);
// write the node ID to the packet
setAvatarGainPacket->write(nodeID.toRfc4122());
// We need to convert the gain in dB (from the script) to an amplitude before packing it.
setAvatarGainPacket->writePrimitive(packFloatGainToByte(fastExp2f(gain / 6.0206f)));
qCDebug(networking) << "Sending Set Avatar Gain packet UUID: " << uuidStringWithoutCurlyBraces(nodeID) << "Gain:" << gain;
sendPacket(std::move(setAvatarGainPacket), *audioMixer);
} else {
qWarning() << "Couldn't find audio mixer to send set gain request";
}
} else {
qWarning() << "NodeList::setAvatarGain called with an invalid ID or an ID which matches the current session ID:" << nodeID;
}
}
void NodeList::kickNodeBySessionID(const QUuid& nodeID) {
// send a request to domain-server to kick the node with the given session ID
// the domain-server will handle the persistence of the kick (via username or IP)

View file

@ -82,6 +82,7 @@ public:
bool isIgnoringNode(const QUuid& nodeID) const;
void personalMuteNodeBySessionID(const QUuid& nodeID, bool muteEnabled);
bool isPersonalMutingNode(const QUuid& nodeID) const;
void setAvatarGain(const QUuid& nodeID, float gain);
void kickNodeBySessionID(const QUuid& nodeID);
void muteNodeBySessionID(const QUuid& nodeID);

View file

@ -106,7 +106,8 @@ public:
ViewFrustum,
RequestsDomainListData,
ExitingSpaceBubble,
LAST_PACKET_TYPE = ExitingSpaceBubble
PerAvatarGainSet,
LAST_PACKET_TYPE = PerAvatarGainSet
};
};

View file

@ -359,8 +359,11 @@ void ModelMeshPartPayload::notifyLocationChanged() {
}
void ModelMeshPartPayload::updateTransformForSkinnedMesh(const Transform& transform, const QVector<glm::mat4>& clusterMatrices) {
void ModelMeshPartPayload::updateTransformForSkinnedMesh(const Transform& transform,
const QVector<glm::mat4>& clusterMatrices,
const QVector<glm::mat4>& cauterizedClusterMatrices) {
_transform = transform;
_cauterizedTransform = transform;
if (clusterMatrices.size() > 0) {
_worldBound = AABox();
@ -373,6 +376,11 @@ void ModelMeshPartPayload::updateTransformForSkinnedMesh(const Transform& transf
_worldBound.transform(transform);
if (clusterMatrices.size() == 1) {
_transform = _transform.worldTransform(Transform(clusterMatrices[0]));
if (cauterizedClusterMatrices.size() != 0) {
_cauterizedTransform = _cauterizedTransform.worldTransform(Transform(cauterizedClusterMatrices[0]));
} else {
_cauterizedTransform = _transform;
}
}
}
}
@ -527,9 +535,14 @@ void ModelMeshPartPayload::bindTransform(gpu::Batch& batch, const ShapePipeline:
} else {
batch.setUniformBuffer(ShapePipeline::Slot::BUFFER::SKINNING, state.clusterBuffer);
}
batch.setModelTransform(_transform);
} else {
if (canCauterize && _model->getCauterizeBones()) {
batch.setModelTransform(_cauterizedTransform);
} else {
batch.setModelTransform(_transform);
}
}
batch.setModelTransform(_transform);
}
void ModelMeshPartPayload::startFade() {

View file

@ -85,7 +85,9 @@ public:
typedef Payload::DataPointer Pointer;
void notifyLocationChanged() override;
void updateTransformForSkinnedMesh(const Transform& transform, const QVector<glm::mat4>& clusterMatrices);
void updateTransformForSkinnedMesh(const Transform& transform,
const QVector<glm::mat4>& clusterMatrices,
const QVector<glm::mat4>& cauterizedClusterMatrices);
// Entity fade in
void startFade();
@ -106,6 +108,7 @@ public:
Model* _model;
Transform _cauterizedTransform;
int _meshIndex;
int _shapeID;

View file

@ -257,7 +257,7 @@ void Model::updateRenderItems() {
// update the model transform and bounding box for this render item.
const Model::MeshState& state = data._model->_meshStates.at(data._meshIndex);
data.updateTransformForSkinnedMesh(modelTransform, state.clusterMatrices);
data.updateTransformForSkinnedMesh(modelTransform, state.clusterMatrices, state.cauterizedClusterMatrices);
}
}
});

View file

@ -26,10 +26,16 @@
#include <render/drawItemBounds_vert.h>
#include <render/drawItemBounds_frag.h>
#include "nop_frag.h"
using namespace render;
extern void initForwardPipelines(ShapePlumber& plumber);
RenderForwardTask::RenderForwardTask(RenderFetchCullSortTask::Output items) {
// Prepare the ShapePipelines
ShapePlumberPointer shapePlumber = std::make_shared<ShapePlumber>();
initForwardPipelines(*shapePlumber);
// Extract opaques / transparents / lights / overlays
const auto opaques = items[0];
const auto transparents = items[1];
@ -40,16 +46,19 @@ RenderForwardTask::RenderForwardTask(RenderFetchCullSortTask::Output items) {
const auto framebuffer = addJob<PrepareFramebuffer>("PrepareFramebuffer");
addJob<Draw>("DrawOpaques", opaques, shapePlumber);
addJob<Stencil>("Stencil");
addJob<DrawBackground>("DrawBackground", background);
// bounds do not draw on stencil buffer, so they must come last
// Bounds do not draw on stencil buffer, so they must come last
addJob<DrawBounds>("DrawBounds", opaques);
// Blit!
addJob<Blit>("Blit", framebuffer);
}
void PrepareFramebuffer::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, gpu::FramebufferPointer& framebuffer) {
void PrepareFramebuffer::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext,
gpu::FramebufferPointer& framebuffer) {
auto framebufferCache = DependencyManager::get<FramebufferCache>();
auto framebufferSize = framebufferCache->getFrameBufferSize();
glm::uvec2 frameSize(framebufferSize.width(), framebufferSize.height());
@ -89,6 +98,88 @@ void PrepareFramebuffer::run(const SceneContextPointer& sceneContext, const Rend
framebuffer = _framebuffer;
}
void Draw::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext,
const Inputs& items) {
RenderArgs* args = renderContext->args;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
args->_batch = &batch;
// Setup projection
glm::mat4 projMat;
Transform viewMat;
args->getViewFrustum().evalProjectionMatrix(projMat);
args->getViewFrustum().evalViewTransform(viewMat);
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat);
batch.setModelTransform(Transform());
// Render items
renderStateSortShapes(sceneContext, renderContext, _shapePlumber, items, -1);
});
args->_batch = nullptr;
}
const gpu::PipelinePointer Stencil::getPipeline() {
if (!_stencilPipeline) {
auto vs = gpu::StandardShaderLib::getDrawUnitQuadTexcoordVS();
auto ps = gpu::Shader::createPixel(std::string(nop_frag));
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::Shader::makeProgram(*program);
auto state = std::make_shared<gpu::State>();
state->setDepthTest(true, false, gpu::LESS_EQUAL);
const gpu::int8 STENCIL_OPAQUE = 1;
state->setStencilTest(true, 0xFF, gpu::State::StencilTest(STENCIL_OPAQUE, 0xFF, gpu::ALWAYS,
gpu::State::STENCIL_OP_REPLACE,
gpu::State::STENCIL_OP_REPLACE,
gpu::State::STENCIL_OP_KEEP));
_stencilPipeline = gpu::Pipeline::create(program, state);
}
return _stencilPipeline;
}
void Stencil::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
RenderArgs* args = renderContext->args;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
args->_batch = &batch;
batch.enableStereo(false);
batch.setViewportTransform(args->_viewport);
batch.setStateScissorRect(args->_viewport);
batch.setPipeline(getPipeline());
batch.draw(gpu::TRIANGLE_STRIP, 4);
});
args->_batch = nullptr;
}
void DrawBackground::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext,
const Inputs& background) {
RenderArgs* args = renderContext->args;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
args->_batch = &batch;
batch.enableSkybox(true);
batch.setViewportTransform(args->_viewport);
batch.setStateScissorRect(args->_viewport);
// Setup projection
glm::mat4 projMat;
Transform viewMat;
args->getViewFrustum().evalProjectionMatrix(projMat);
args->getViewFrustum().evalViewTransform(viewMat);
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat);
renderItems(sceneContext, renderContext, background);
});
args->_batch = nullptr;
}
const gpu::PipelinePointer DrawBounds::getPipeline() {
if (!_boundsPipeline) {
auto vs = gpu::Shader::createVertex(std::string(drawItemBounds_vert));
@ -112,7 +203,8 @@ const gpu::PipelinePointer DrawBounds::getPipeline() {
return _boundsPipeline;
}
void DrawBounds::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const Inputs& items) {
void DrawBounds::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext,
const Inputs& items) {
RenderArgs* args = renderContext->args;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
@ -142,26 +234,3 @@ void DrawBounds::run(const SceneContextPointer& sceneContext, const RenderContex
}
});
}
void DrawBackground::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const Inputs& items) {
RenderArgs* args = renderContext->args;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
args->_batch = &batch;
batch.enableSkybox(true);
batch.setViewportTransform(args->_viewport);
batch.setStateScissorRect(args->_viewport);
// Setup projection
glm::mat4 projMat;
Transform viewMat;
args->getViewFrustum().evalProjectionMatrix(projMat);
args->getViewFrustum().evalViewTransform(viewMat);
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat);
renderItems(sceneContext, renderContext, items);
});
args->_batch = nullptr;
}

View file

@ -25,20 +25,62 @@ public:
class PrepareFramebuffer {
public:
using JobModel = render::Job::ModelO<PrepareFramebuffer, gpu::FramebufferPointer>;
using Inputs = gpu::FramebufferPointer;
using JobModel = render::Job::ModelO<PrepareFramebuffer, Inputs>;
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, gpu::FramebufferPointer& framebuffer);
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext,
gpu::FramebufferPointer& framebuffer);
private:
gpu::FramebufferPointer _framebuffer;
};
class DrawBounds {
class Draw {
public:
using Inputs = render::ItemBounds;
using JobModel = render::Job::ModelI<DrawBounds, Inputs>;
using JobModel = render::Job::ModelI<Draw, Inputs>;
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const Inputs& items);
Draw(const render::ShapePlumberPointer& shapePlumber) : _shapePlumber(shapePlumber) {}
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext,
const Inputs& items);
private:
render::ShapePlumberPointer _shapePlumber;
};
class Stencil {
public:
using JobModel = render::Job::Model<Stencil>;
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
private:
const gpu::PipelinePointer getPipeline();
gpu::PipelinePointer _stencilPipeline;
};
class DrawBackground {
public:
using Inputs = render::ItemBounds;
using JobModel = render::Job::ModelI<DrawBackground, Inputs>;
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext,
const Inputs& background);
};
class DrawBounds {
public:
class Config : public render::JobConfig {
public:
Config() : JobConfig(false) {}
};
using Inputs = render::ItemBounds;
using JobModel = render::Job::ModelI<DrawBounds, Inputs, Config>;
void configure(const Config& configuration) {}
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext,
const Inputs& items);
private:
const gpu::PipelinePointer getPipeline();
@ -47,12 +89,4 @@ private:
int _scaleLocation { -1 };
};
class DrawBackground {
public:
using Inputs = render::ItemBounds;
using JobModel = render::Job::ModelI<DrawBackground, Inputs>;
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const Inputs& background);
};
#endif // hifi_RenderForwardTask_h

View file

@ -10,6 +10,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <functional>
#include <gpu/Context.h>
#include <gpu/StandardShaderLib.h>
@ -47,41 +49,17 @@
using namespace render;
using namespace std::placeholders;
gpu::BufferView getDefaultMaterialBuffer() {
model::Material::Schema schema;
schema._albedo = vec3(1.0f);
schema._opacity = 1.0f;
schema._metallic = 0.1f;
schema._roughness = 0.9f;
return gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(model::Material::Schema), (const gpu::Byte*) &schema));
}
void initOverlay3DPipelines(ShapePlumber& plumber);
void initDeferredPipelines(ShapePlumber& plumber);
void initForwardPipelines(ShapePlumber& plumber);
void batchSetter(const ShapePipeline& pipeline, gpu::Batch& batch) {
// Set a default albedo map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
DependencyManager::get<TextureCache>()->getWhiteTexture());
// Set a default normal map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
void addPlumberPipeline(ShapePlumber& plumber,
const ShapeKey& key, const gpu::ShaderPointer& vertex, const gpu::ShaderPointer& pixel);
// Set a default material
if (pipeline.locations->materialBufferUnit >= 0) {
static const gpu::BufferView OPAQUE_SCHEMA_BUFFER = getDefaultMaterialBuffer();
batch.setUniformBuffer(ShapePipeline::Slot::BUFFER::MATERIAL, OPAQUE_SCHEMA_BUFFER);
}
}
void lightBatchSetter(const ShapePipeline& pipeline, gpu::Batch& batch) {
batchSetter(pipeline, batch);
// Set the light
if (pipeline.locations->lightBufferUnit >= 0) {
DependencyManager::get<DeferredLightingEffect>()->setupKeyLightBatch(batch,
pipeline.locations->lightBufferUnit,
pipeline.locations->lightAmbientBufferUnit,
pipeline.locations->lightAmbientMapUnit);
}
}
void batchSetter(const ShapePipeline& pipeline, gpu::Batch& batch);
void lightBatchSetter(const ShapePipeline& pipeline, gpu::Batch& batch);
void initOverlay3DPipelines(ShapePlumber& plumber) {
auto vertex = gpu::Shader::createVertex(std::string(overlay3D_vert));
@ -130,50 +108,6 @@ void initOverlay3DPipelines(ShapePlumber& plumber) {
}
void initDeferredPipelines(render::ShapePlumber& plumber) {
using Key = render::ShapeKey;
using ShaderPointer = gpu::ShaderPointer;
auto addPipeline = [&plumber](const Key& key, const ShaderPointer& vertexShader, const ShaderPointer& pixelShader) {
// These keyvalues' pipelines will be added by this lamdba in addition to the key passed
assert(!key.isWireFrame());
assert(!key.isDepthBiased());
assert(key.isCullFace());
ShaderPointer program = gpu::Shader::createProgram(vertexShader, pixelShader);
for (int i = 0; i < 8; i++) {
bool isCulled = (i & 1);
bool isBiased = (i & 2);
bool isWireframed = (i & 4);
ShapeKey::Builder builder(key);
auto state = std::make_shared<gpu::State>();
// Depth test depends on transparency
state->setDepthTest(true, !key.isTranslucent(), gpu::LESS_EQUAL);
state->setBlendFunction(key.isTranslucent(),
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
if (!isCulled) {
builder.withoutCullFace();
}
state->setCullMode(isCulled ? gpu::State::CULL_BACK : gpu::State::CULL_NONE);
if (isWireframed) {
builder.withWireframe();
state->setFillMode(gpu::State::FILL_LINE);
}
if (isBiased) {
builder.withDepthBias();
state->setDepthBias(1.0f);
state->setDepthBiasSlopeScale(1.0f);
}
plumber.addPipeline(builder.build(), program, state,
key.isTranslucent() ? &lightBatchSetter : &batchSetter);
}
};
// Vertex shaders
auto modelVertex = gpu::Shader::createVertex(std::string(model_vert));
auto modelNormalMapVertex = gpu::Shader::createVertex(std::string(model_normal_map_vert));
@ -198,6 +132,8 @@ void initDeferredPipelines(render::ShapePlumber& plumber) {
auto modelLightmapSpecularMapPixel = gpu::Shader::createPixel(std::string(model_lightmap_specular_map_frag));
auto modelLightmapNormalSpecularMapPixel = gpu::Shader::createPixel(std::string(model_lightmap_normal_specular_map_frag));
using Key = render::ShapeKey;
auto addPipeline = std::bind(&addPlumberPipeline, std::ref(plumber), _1, _2, _3);
// TODO: Refactor this to use a filter
// Opaques
addPipeline(
@ -281,5 +217,132 @@ void initDeferredPipelines(render::ShapePlumber& plumber) {
addPipeline(
Key::Builder().withSkinned().withDepthOnly(),
skinModelShadowVertex, modelShadowPixel);
}
void initForwardPipelines(render::ShapePlumber& plumber) {
// Vertex shaders
auto modelVertex = gpu::Shader::createVertex(std::string(model_vert));
auto modelNormalMapVertex = gpu::Shader::createVertex(std::string(model_normal_map_vert));
auto skinModelVertex = gpu::Shader::createVertex(std::string(skin_model_vert));
auto skinModelNormalMapVertex = gpu::Shader::createVertex(std::string(skin_model_normal_map_vert));
// Pixel shaders
auto modelPixel = gpu::Shader::createPixel(std::string(model_frag));
auto modelUnlitPixel = gpu::Shader::createPixel(std::string(model_unlit_frag));
auto modelNormalMapPixel = gpu::Shader::createPixel(std::string(model_normal_map_frag));
auto modelSpecularMapPixel = gpu::Shader::createPixel(std::string(model_specular_map_frag));
auto modelNormalSpecularMapPixel = gpu::Shader::createPixel(std::string(model_normal_specular_map_frag));
using Key = render::ShapeKey;
auto addPipeline = std::bind(&addPlumberPipeline, std::ref(plumber), _1, _2, _3);
// Opaques
addPipeline(
Key::Builder(),
modelVertex, modelPixel);
addPipeline(
Key::Builder().withUnlit(),
modelVertex, modelUnlitPixel);
addPipeline(
Key::Builder().withTangents(),
modelNormalMapVertex, modelNormalMapPixel);
addPipeline(
Key::Builder().withSpecular(),
modelVertex, modelSpecularMapPixel);
addPipeline(
Key::Builder().withTangents().withSpecular(),
modelNormalMapVertex, modelNormalSpecularMapPixel);
// Skinned
addPipeline(
Key::Builder().withSkinned(),
skinModelVertex, modelPixel);
addPipeline(
Key::Builder().withSkinned().withTangents(),
skinModelNormalMapVertex, modelNormalMapPixel);
addPipeline(
Key::Builder().withSkinned().withSpecular(),
skinModelVertex, modelSpecularMapPixel);
addPipeline(
Key::Builder().withSkinned().withTangents().withSpecular(),
skinModelNormalMapVertex, modelNormalSpecularMapPixel);
}
void addPlumberPipeline(ShapePlumber& plumber,
const ShapeKey& key, const gpu::ShaderPointer& vertex, const gpu::ShaderPointer& pixel) {
// These key-values' pipelines are added by this functor in addition to the key passed
assert(!key.isWireFrame());
assert(!key.isDepthBiased());
assert(key.isCullFace());
gpu::ShaderPointer program = gpu::Shader::createProgram(vertex, pixel);
for (int i = 0; i < 8; i++) {
bool isCulled = (i & 1);
bool isBiased = (i & 2);
bool isWireframed = (i & 4);
auto state = std::make_shared<gpu::State>();
// Depth test depends on transparency
state->setDepthTest(true, !key.isTranslucent(), gpu::LESS_EQUAL);
state->setBlendFunction(key.isTranslucent(),
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
ShapeKey::Builder builder(key);
if (!isCulled) {
builder.withoutCullFace();
}
state->setCullMode(isCulled ? gpu::State::CULL_BACK : gpu::State::CULL_NONE);
if (isWireframed) {
builder.withWireframe();
state->setFillMode(gpu::State::FILL_LINE);
}
if (isBiased) {
builder.withDepthBias();
state->setDepthBias(1.0f);
state->setDepthBiasSlopeScale(1.0f);
}
plumber.addPipeline(builder.build(), program, state,
key.isTranslucent() ? &lightBatchSetter : &batchSetter);
}
}
void batchSetter(const ShapePipeline& pipeline, gpu::Batch& batch) {
// Set a default albedo map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
DependencyManager::get<TextureCache>()->getWhiteTexture());
// Set a default normal map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::NORMAL_FITTING,
DependencyManager::get<TextureCache>()->getNormalFittingTexture());
// Set a default material
if (pipeline.locations->materialBufferUnit >= 0) {
// Create a default schema
static bool isMaterialSet = false;
static model::Material material;
if (!isMaterialSet) {
material.setAlbedo(vec3(1.0f));
material.setOpacity(1.0f);
material.setMetallic(0.1f);
material.setRoughness(0.9f);
isMaterialSet = true;
}
// Set a default schema
batch.setUniformBuffer(ShapePipeline::Slot::BUFFER::MATERIAL, material.getSchemaBuffer());
}
}
void lightBatchSetter(const ShapePipeline& pipeline, gpu::Batch& batch) {
// Set the batch
batchSetter(pipeline, batch);
// Set the light
if (pipeline.locations->lightBufferUnit >= 0) {
DependencyManager::get<DeferredLightingEffect>()->setupKeyLightBatch(batch,
pipeline.locations->lightBufferUnit,
pipeline.locations->lightAmbientBufferUnit,
pipeline.locations->lightAmbientMapUnit);
}
}

View file

@ -0,0 +1,16 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// nop.frag
// fragment shader
//
// Created by Zach Pomerantz on 1/3/2017.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
void main(void) {
}

View file

@ -371,6 +371,7 @@ protected:
class JobConfig : public QObject {
Q_OBJECT
Q_PROPERTY(double cpuRunTime READ getCPURunTime NOTIFY newStats()) //ms
Q_PROPERTY(bool enabled READ isEnabled WRITE setEnabled)
double _msCPURunTime{ 0.0 };
public:
@ -380,7 +381,7 @@ public:
JobConfig(bool enabled) : alwaysEnabled{ false }, enabled{ enabled } {}
bool isEnabled() { return alwaysEnabled || enabled; }
void setEnabled(bool enable) { enabled = enable; }
void setEnabled(bool enable) { enabled = alwaysEnabled || enable; }
bool alwaysEnabled{ true };
bool enabled{ true };

View file

@ -42,6 +42,11 @@ bool UsersScriptingInterface::getPersonalMuteStatus(const QUuid& nodeID) {
return DependencyManager::get<NodeList>()->isPersonalMutingNode(nodeID);
}
void UsersScriptingInterface::setAvatarGain(const QUuid& nodeID, float gain) {
// ask the NodeList to set the gain of the specified avatar
DependencyManager::get<NodeList>()->setAvatarGain(nodeID, gain);
}
void UsersScriptingInterface::kick(const QUuid& nodeID) {
// ask the NodeList to kick the user with the given session ID
DependencyManager::get<NodeList>()->kickNodeBySessionID(nodeID);

View file

@ -61,6 +61,15 @@ public slots:
*/
bool getPersonalMuteStatus(const QUuid& nodeID);
/**jsdoc
* Sets an avatar's gain for you and you only.
* Units are Decibels (dB)
* @function Users.setAvatarGain
* @param {nodeID} nodeID The node or session ID of the user whose gain you want to modify.
* @param {float} gain The gain of the avatar you'd like to set. Units are dB.
*/
void setAvatarGain(const QUuid& nodeID, float gain);
/**jsdoc
* Kick another user.
* @function Users.kick

View file

@ -13,6 +13,7 @@ namespace hifi { namespace properties {
const char* CRASHED = "com.highfidelity.crashed";
const char* STEAM = "com.highfidelity.launchedFromSteam";
const char* LOGGER = "com.highfidelity.logger";
const char* OCULUS_STORE = "com.highfidelity.oculusStore";
const char* TEST = "com.highfidelity.test";
const char* TRACING = "com.highfidelity.tracing";

View file

@ -15,6 +15,7 @@ namespace hifi { namespace properties {
extern const char* CRASHED;
extern const char* STEAM;
extern const char* LOGGER;
extern const char* OCULUS_STORE;
extern const char* TEST;
extern const char* TRACING;

View file

@ -8,21 +8,31 @@
if (WIN32)
# we're using static GLEW, so define GLEW_STATIC
add_definitions(-DGLEW_STATIC)
# we're using static GLEW, so define GLEW_STATIC
add_definitions(-DGLEW_STATIC)
set(TARGET_NAME oculus)
setup_hifi_plugin(Multimedia)
link_hifi_libraries(shared gl gpu gpu-gl controllers ui
plugins ui-plugins display-plugins input-plugins
audio-client networking render-utils)
include_hifi_library_headers(octree)
add_dependency_external_projects(LibOVR)
find_package(LibOVR REQUIRED)
target_include_directories(${TARGET_NAME} PRIVATE ${LIBOVR_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${LIBOVR_LIBRARIES})
target_link_libraries(${TARGET_NAME} Winmm.lib)
# if we were passed an Oculus App ID for entitlement checks, send that along
if (DEFINED ENV{OCULUS_APP_ID})
add_definitions(-DOCULUS_APP_ID="$ENV{OCULUS_APP_ID}")
endif ()
set(TARGET_NAME oculus)
setup_hifi_plugin(Multimedia)
link_hifi_libraries(
shared gl gpu gpu-gl controllers ui
plugins ui-plugins display-plugins input-plugins
audio-client networking render-utils
)
include_hifi_library_headers(octree)
add_dependency_external_projects(LibOVR)
find_package(LibOVR REQUIRED)
target_include_directories(${TARGET_NAME} PRIVATE ${LIBOVR_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${LIBOVR_LIBRARIES})
target_link_libraries(${TARGET_NAME} Winmm.lib)
add_dependency_external_projects(LibOVRPlatform)
find_package(LibOVRPlatform REQUIRED)
target_include_directories(${TARGET_NAME} PRIVATE ${LIBOVRPLATFORM_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${LIBOVRPLATFORM_LIBRARIES})
endif()

View file

@ -15,8 +15,12 @@
#include <QtCore/QDir>
#include <QtCore/QProcessEnvironment>
#define OVRPL_DISABLED
#include <OVR_Platform.h>
#include <controllers/Input.h>
#include <controllers/Pose.h>
#include <shared/GlobalAppProperties.h>
#include <NumericalConstants.h>
Q_LOGGING_CATEGORY(displayplugins, "hifi.plugins.display")
@ -89,6 +93,18 @@ ovrSession acquireOculusSession() {
return session;
}
#ifdef OCULUS_APP_ID
if (qApp->property(hifi::properties::OCULUS_STORE).toBool()) {
if (ovr_PlatformInitializeWindows(OCULUS_APP_ID) != ovrPlatformInitialize_Success) {
// we were unable to initialize the platform for entitlement check - fail the check
_quitRequested = true;
} else {
qCDebug(oculus) << "Performing Oculus Platform entitlement check";
ovr_Entitlement_GetIsViewerEntitled();
}
}
#endif
Q_ASSERT(0 == refCount);
ovrGraphicsLuid luid;
if (!OVR_SUCCESS(ovr_Create(&session, &luid))) {
@ -127,6 +143,35 @@ void handleOVREvents() {
_quitRequested = status.ShouldQuit;
_reorientRequested = status.ShouldRecenter;
#ifdef OCULUS_APP_ID
if (qApp->property(hifi::properties::OCULUS_STORE).toBool()) {
// pop messages to see if we got a return for an entitlement check
ovrMessageHandle message = ovr_PopMessage();
while (message) {
switch (ovr_Message_GetType(message)) {
case ovrMessage_Entitlement_GetIsViewerEntitled: {
if (!ovr_Message_IsError(message)) {
// this viewer is entitled, no need to flag anything
qCDebug(oculus) << "Oculus Platform entitlement check succeeded, proceeding normally";
} else {
// we failed the entitlement check, set our flag so the app can stop
qCDebug(oculus) << "Oculus Platform entitlement check failed, app will now quit" << OCULUS_APP_ID;
_quitRequested = true;
}
}
}
// free the message handle to cleanup and not leak
ovr_FreeMessage(message);
// pop the next message to check, if there is one
message = ovr_PopMessage();
}
}
#endif
}
bool quitRequested() {
@ -217,4 +262,4 @@ controller::Pose ovrControllerPoseToHandPose(
pose.velocity = toGlm(handPose.LinearVelocity);
pose.valid = true;
return pose;
}
}

View file

@ -3185,7 +3185,7 @@ var handleHandMessages = function(channel, message, sender) {
Messages.messageReceived.connect(handleHandMessages);
var TARGET_UPDATE_HZ = 50; // 50hz good enough (no change in logic)
var TARGET_UPDATE_HZ = 60; // 50hz good enough, but we're using update
var BASIC_TIMER_INTERVAL_MS = 1000 / TARGET_UPDATE_HZ;
var lastInterval = Date.now();
@ -3198,7 +3198,7 @@ var updateTotalWork = 0;
var UPDATE_PERFORMANCE_DEBUGGING = false;
var updateIntervalTimer = Script.setInterval(function(){
function updateWrapper(){
intervalCount++;
var thisInterval = Date.now();
@ -3246,11 +3246,12 @@ var updateIntervalTimer = Script.setInterval(function(){
updateTotalWork = 0;
}
}, BASIC_TIMER_INTERVAL_MS);
}
Script.update.connect(updateWrapper);
function cleanup() {
Menu.removeMenuItem("Developer", "Show Grab Sphere");
Script.clearInterval(updateIntervalTimer);
Script.update.disconnect(updateWrapper);
rightController.cleanup();
leftController.cleanup();
Controller.disableMapping(MAPPING_NAME);

View file

@ -233,6 +233,10 @@ pal.fromQml.connect(function (message) { // messages are {method, params}, like
removeOverlays();
populateUserList();
break;
case 'updateGain':
data = message.params;
Users.setAvatarGain(data['sessionId'], data['gain']);
break;
default:
print('Unrecognized message from Pal.qml:', JSON.stringify(message));
}
@ -518,6 +522,7 @@ var LOUDNESS_SCALE = 2.8 / 5.0;
var LOG2 = Math.log(2.0);
var AUDIO_LEVEL_UPDATE_INTERVAL_MS = 100; // 10hz for now (change this and change the AVERAGING_RATIO too)
var myData = {}; // we're not includied in ExtendedOverlay.get.
var audioInterval;
function getAudioLevel(id) {
// the VU meter should work similarly to the one in AvatarInputs: log scale, exponentially averaged
@ -550,21 +555,71 @@ function getAudioLevel(id) {
return audioLevel;
}
function createAudioInterval() {
// we will update the audioLevels periodically
// TODO: tune for efficiency - expecially with large numbers of avatars
return Script.setInterval(function () {
if (pal.visible) {
var param = {};
AvatarList.getAvatarIdentifiers().forEach(function (id) {
var level = getAudioLevel(id);
// qml didn't like an object with null/empty string for a key, so...
var userId = id || 0;
param[userId] = level;
});
pal.sendToQml({method: 'updateAudioLevel', params: param});
}
}, AUDIO_LEVEL_UPDATE_INTERVAL_MS);
}
// we will update the audioLevels periodically
// TODO: tune for efficiency - expecially with large numbers of avatars
Script.setInterval(function () {
if (pal.visible) {
var param = {};
AvatarList.getAvatarIdentifiers().forEach(function (id) {
var level = getAudioLevel(id);
// qml didn't like an object with null/empty string for a key, so...
var userId = id || 0;
param[userId] = level;
});
pal.sendToQml({method: 'updateAudioLevel', params: param});
//
// Manage the connection between the button and the window.
//
var toolBar = Toolbars.getToolbar("com.highfidelity.interface.toolbar.system");
var buttonName = "pal";
var button = toolBar.addButton({
objectName: buttonName,
imageURL: Script.resolvePath("assets/images/tools/people.svg"),
visible: true,
hoverState: 2,
defaultState: 1,
buttonState: 1,
alpha: 0.9
});
var isWired = false;
function off() {
if (isWired) { // It is not ok to disconnect these twice, hence guard.
Script.update.disconnect(updateOverlays);
Controller.mousePressEvent.disconnect(handleMouseEvent);
Controller.mouseMoveEvent.disconnect(handleMouseMoveEvent);
isWired = false;
}
}, AUDIO_LEVEL_UPDATE_INTERVAL_MS);
triggerMapping.disable(); // It's ok if we disable twice.
triggerPressMapping.disable(); // see above
removeOverlays();
Users.requestsDomainListData = false;
if (audioInterval) {
Script.clearInterval(audioInterval);
}
}
function onClicked() {
if (!pal.visible) {
Users.requestsDomainListData = true;
populateUserList();
pal.raise();
isWired = true;
Script.update.connect(updateOverlays);
Controller.mousePressEvent.connect(handleMouseEvent);
Controller.mouseMoveEvent.connect(handleMouseMoveEvent);
triggerMapping.enable();
triggerPressMapping.enable();
createAudioInterval();
} else {
off();
}
pal.setVisible(!pal.visible);
}
//
// Button state.
//
@ -577,14 +632,14 @@ button.clicked.connect(onClicked);
pal.visibleChanged.connect(onVisibleChanged);
pal.closed.connect(off);
Users.usernameFromIDReply.connect(usernameFromIDReply);
function clearIgnoredInQMLAndClosePAL() {
pal.sendToQml({ method: 'clearIgnored' });
function clearLocalQMLDataAndClosePAL() {
pal.sendToQml({ method: 'clearLocalQMLData' });
if (pal.visible) {
onClicked(); // Close the PAL
}
}
Window.domainChanged.connect(clearIgnoredInQMLAndClosePAL);
Window.domainConnectionRefused.connect(clearIgnoredInQMLAndClosePAL);
Window.domainChanged.connect(clearLocalQMLDataAndClosePAL);
Window.domainConnectionRefused.connect(clearLocalQMLDataAndClosePAL);
//
// Cleanup.
@ -595,8 +650,8 @@ Script.scriptEnding.connect(function () {
pal.visibleChanged.disconnect(onVisibleChanged);
pal.closed.disconnect(off);
Users.usernameFromIDReply.disconnect(usernameFromIDReply);
Window.domainChanged.disconnect(clearIgnoredInQMLAndClosePAL);
Window.domainConnectionRefused.disconnect(clearIgnoredInQMLAndClosePAL);
Window.domainChanged.disconnect(clearLocalQMLDataAndClosePAL);
Window.domainConnectionRefused.disconnect(clearLocalQMLDataAndClosePAL);
Messages.unsubscribe(CHANNEL);
Messages.messageReceived.disconnect(receiveMessage);
off();