TransitionStage + FadeJob compiling but not linked to entity events

This commit is contained in:
Olivier Prat 2017-07-07 18:22:11 +02:00
commit 06d69d04c3
87 changed files with 1623 additions and 739 deletions

View file

@ -17,8 +17,8 @@
#include <QtCore/QThread>
#include <LogHandler.h>
#include <SharedUtil.h>
#include <HifiConfigVariantMap.h>
#include <SharedUtil.h>
#include <ShutdownEventListener.h>
#include "Assignment.h"

View file

@ -50,6 +50,7 @@
{
"label": "Places / Paths",
"html_id": "places_paths",
"restart": false,
"settings": [
{
"name": "paths",
@ -75,6 +76,7 @@
{
"name": "descriptors",
"label": "Description",
"restart": false,
"help": "This data will be queryable from your server. It may be collected by High Fidelity and used to share your domain with others.",
"settings": [
{

View file

@ -2,11 +2,11 @@ $(document).ready(function(){
// setup the underscore templates
var nodeTemplate = _.template($('#nodes-template').html());
var queuedTemplate = _.template($('#queued-template').html());
// setup a function to grab the assignments
function getNodesAndAssignments() {
$.getJSON("nodes.json", function(json){
json.nodes.sort(function(a, b){
if (a.type === b.type) {
if (a.uptime < b.uptime) {
@ -16,36 +16,50 @@ $(document).ready(function(){
} else {
return 0;
}
}
}
if (a.type === "agent" && b.type !== "agent") {
return 1;
} else if (b.type === "agent" && a.type !== "agent") {
return -1;
}
if (a.type > b.type) {
return 1;
}
if (a.type < b.type) {
return -1;
}
}
});
$('#nodes-table tbody').html(nodeTemplate(json));
}).fail(function(jqXHR, textStatus, errorThrown) {
// we assume a 401 means the DS has restarted
// and no longer has our OAuth produced uuid
// so just reload and re-auth
if (jqXHR.status == 401) {
location.reload();
}
});
$.getJSON("assignments.json", function(json){
$.getJSON("assignments.json", function(json){
$('#assignments-table tbody').html(queuedTemplate(json));
}).fail(function(jqXHR, textStatus, errorThrown) {
// we assume a 401 means the DS has restarted
// and no longer has our OAuth produced uuid
// so just reload and re-auth
if (jqXHR.status == 401) {
location.reload();
}
});
}
// do the first GET on page load
getNodesAndAssignments();
// grab the new assignments JSON every two seconds
var getNodesAndAssignmentsInterval = setInterval(getNodesAndAssignments, 2000);
// hook the node delete to the X button
$(document.body).on('click', '.glyphicon-remove', function(){
// fire off a delete for this node
@ -57,10 +71,10 @@ $(document).ready(function(){
}
});
});
$(document.body).on('click', '#kill-all-btn', function() {
var confirmed_kill = confirm("Are you sure?");
if (confirmed_kill == true) {
$.ajax({
url: "/nodes/",

View file

@ -40,11 +40,11 @@
#include <LogHandler.h>
#include <PathUtils.h>
#include <NumericalConstants.h>
#include <Trace.h>
#include <StatTracker.h>
#include "DomainServerNodeData.h"
#include "NodeConnectionData.h"
#include <Trace.h>
#include <StatTracker.h>
int const DomainServer::EXIT_CODE_REBOOT = 234923;
@ -162,8 +162,10 @@ DomainServer::DomainServer(int argc, char* argv[]) :
_gatekeeper.preloadAllowedUserPublicKeys(); // so they can connect on first request
//send signal to DomainMetadata when descriptors changed
_metadata = new DomainMetadata(this);
connect(&_settingsManager, &DomainServerSettingsManager::settingsUpdated,
_metadata, &DomainMetadata::descriptorsChanged);
qDebug() << "domain-server is running";
static const QString AC_SUBNET_WHITELIST_SETTING_PATH = "security.ac_subnet_whitelist";
@ -1972,7 +1974,8 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
return _settingsManager.handleAuthenticatedHTTPRequest(connection, url);
}
const QString HIFI_SESSION_COOKIE_KEY = "DS_WEB_SESSION_UUID";
static const QString HIFI_SESSION_COOKIE_KEY = "DS_WEB_SESSION_UUID";
static const QString STATE_QUERY_KEY = "state";
bool DomainServer::handleHTTPSRequest(HTTPSConnection* connection, const QUrl &url, bool skipSubHandler) {
qDebug() << "HTTPS request received at" << url.toString();
@ -1983,10 +1986,9 @@ bool DomainServer::handleHTTPSRequest(HTTPSConnection* connection, const QUrl &u
const QString CODE_QUERY_KEY = "code";
QString authorizationCode = codeURLQuery.queryItemValue(CODE_QUERY_KEY);
const QString STATE_QUERY_KEY = "state";
QUuid stateUUID = QUuid(codeURLQuery.queryItemValue(STATE_QUERY_KEY));
if (!authorizationCode.isEmpty() && !stateUUID.isNull()) {
if (!authorizationCode.isEmpty() && !stateUUID.isNull() && _webAuthenticationStateSet.remove(stateUUID)) {
// fire off a request with this code and state to get an access token for the user
const QString OAUTH_TOKEN_REQUEST_PATH = "/oauth/token";
@ -2004,47 +2006,83 @@ bool DomainServer::handleHTTPSRequest(HTTPSConnection* connection, const QUrl &u
tokenRequest.setHeader(QNetworkRequest::ContentTypeHeader, "application/x-www-form-urlencoded");
QNetworkReply* tokenReply = NetworkAccessManager::getInstance().post(tokenRequest, tokenPostBody.toLocal8Bit());
connect(tokenReply, &QNetworkReply::finished, this, &DomainServer::tokenGrantFinished);
if (_webAuthenticationStateSet.remove(stateUUID)) {
// this is a web user who wants to auth to access web interface
// we hold the response back to them until we get their profile information
// and can decide if they are let in or not
// add this connection to our list of pending connections so that we can hold the response
_pendingOAuthConnections.insert(stateUUID, connection);
QEventLoop loop;
connect(tokenReply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
// set the state UUID on the reply so that we can associate the response with the connection later
tokenReply->setProperty(STATE_QUERY_KEY.toLocal8Bit(), stateUUID);
// start the loop for the token request
loop.exec();
return true;
} else {
connection->respond(HTTPConnection::StatusCode400);
QNetworkReply* profileReply = profileRequestGivenTokenReply(tokenReply);
return true;
}
} else {
return false;
}
}
// stop the loop once the profileReply is complete
connect(profileReply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
HTTPSConnection* DomainServer::connectionFromReplyWithState(QNetworkReply* reply) {
// grab the UUID state property from the reply
QUuid stateUUID = reply->property(STATE_QUERY_KEY.toLocal8Bit()).toUuid();
// restart the loop for the profile request
loop.exec();
if (!stateUUID.isNull()) {
return _pendingOAuthConnections.take(stateUUID);
} else {
return nullptr;
}
}
void DomainServer::tokenGrantFinished() {
auto tokenReply = qobject_cast<QNetworkReply*>(sender());
if (tokenReply) {
if (tokenReply->error() == QNetworkReply::NoError) {
// now that we have a token for this profile, send off a profile request
QNetworkReply* profileReply = profileRequestGivenTokenReply(tokenReply);
// forward along the state UUID that we kept with the token request
profileReply->setProperty(STATE_QUERY_KEY.toLocal8Bit(), tokenReply->property(STATE_QUERY_KEY.toLocal8Bit()));
connect(profileReply, &QNetworkReply::finished, this, &DomainServer::profileRequestFinished);
} else {
// the token grant failed, send back a 500 (assuming the connection is still around)
auto connection = connectionFromReplyWithState(tokenReply);
if (connection) {
connection->respond(HTTPConnection::StatusCode500);
}
}
tokenReply->deleteLater();
}
}
void DomainServer::profileRequestFinished() {
auto profileReply = qobject_cast<QNetworkReply*>(sender());
if (profileReply) {
auto connection = connectionFromReplyWithState(profileReply);
if (connection) {
if (profileReply->error() == QNetworkReply::NoError) {
// call helper method to get cookieHeaders
Headers cookieHeaders = setupCookieHeadersFromProfileReply(profileReply);
connection->respond(HTTPConnection::StatusCode302, QByteArray(),
HTTPConnection::DefaultContentType, cookieHeaders);
delete tokenReply;
delete profileReply;
// we've redirected the user back to our homepage
return true;
} else {
// the profile request failed, send back a 500 (assuming the connection is still around)
connection->respond(HTTPConnection::StatusCode500);
}
}
// respond with a 200 code indicating that login is complete
connection->respond(HTTPConnection::StatusCode200);
return true;
} else {
return false;
profileReply->deleteLater();
}
}
@ -2104,22 +2142,31 @@ bool DomainServer::isAuthenticatedRequest(HTTPConnection* connection, const QUrl
// the user does not have allowed username or role, return 401
return false;
} else {
// re-direct this user to OAuth page
static const QByteArray REQUESTED_WITH_HEADER = "X-Requested-With";
static const QString XML_REQUESTED_WITH = "XMLHttpRequest";
// generate a random state UUID to use
QUuid stateUUID = QUuid::createUuid();
if (connection->requestHeaders().value(REQUESTED_WITH_HEADER) == XML_REQUESTED_WITH) {
// unauthorized XHR requests get a 401 and not a 302, since there isn't an XHR
// path to OAuth authorize
connection->respond(HTTPConnection::StatusCode401, UNAUTHENTICATED_BODY);
} else {
// re-direct this user to OAuth page
// add it to the set so we can handle the callback from the OAuth provider
_webAuthenticationStateSet.insert(stateUUID);
// generate a random state UUID to use
QUuid stateUUID = QUuid::createUuid();
QUrl authURL = oauthAuthorizationURL(stateUUID);
// add it to the set so we can handle the callback from the OAuth provider
_webAuthenticationStateSet.insert(stateUUID);
Headers redirectHeaders;
QUrl authURL = oauthAuthorizationURL(stateUUID);
redirectHeaders.insert("Location", authURL.toEncoded());
Headers redirectHeaders;
connection->respond(HTTPConnection::StatusCode302,
QByteArray(), HTTPConnection::DefaultContentType, redirectHeaders);
redirectHeaders.insert("Location", authURL.toEncoded());
connection->respond(HTTPConnection::StatusCode302,
QByteArray(), HTTPConnection::DefaultContentType, redirectHeaders);
}
// we don't know about this user yet, so they are not yet authenticated
return false;

View file

@ -111,6 +111,9 @@ private slots:
void updateDownstreamNodes();
void updateUpstreamNodes();
void tokenGrantFinished();
void profileRequestFinished();
signals:
void iceServerChanged();
void userConnected();
@ -178,6 +181,8 @@ private:
void updateReplicationNodes(ReplicationServerDirection direction);
HTTPSConnection* connectionFromReplyWithState(QNetworkReply* reply);
SubnetList _acSubnetWhitelist;
std::vector<QString> _replicatedUsernames;
@ -235,6 +240,8 @@ private:
bool _sendICEServerAddressToMetaverseAPIInProgress { false };
bool _sendICEServerAddressToMetaverseAPIRedo { false };
QHash<QUuid, QPointer<HTTPSConnection>> _pendingOAuthConnections;
};

View file

@ -1198,6 +1198,7 @@ bool DomainServerSettingsManager::recurseJSONObjectAndOverwriteSettings(const QJ
static const QString SECURITY_ROOT_KEY = "security";
static const QString AC_SUBNET_WHITELIST_KEY = "ac_subnet_whitelist";
static const QString BROADCASTING_KEY = "broadcasting";
static const QString DESCRIPTION_ROOT_KEY = "descriptors";
auto& settingsVariant = _configMap.getConfig();
bool needRestart = false;
@ -1249,7 +1250,7 @@ bool DomainServerSettingsManager::recurseJSONObjectAndOverwriteSettings(const QJ
if (!matchingDescriptionObject.isEmpty()) {
updateSetting(rootKey, rootValue, *thisMap, matchingDescriptionObject);
if (rootKey != SECURITY_ROOT_KEY && rootKey != BROADCASTING_KEY) {
if (rootKey != SECURITY_ROOT_KEY && rootKey != BROADCASTING_KEY && rootKey != SETTINGS_PATHS_KEY ) {
needRestart = true;
}
} else {
@ -1265,7 +1266,7 @@ bool DomainServerSettingsManager::recurseJSONObjectAndOverwriteSettings(const QJ
if (!matchingDescriptionObject.isEmpty()) {
const QJsonValue& settingValue = rootValue.toObject()[settingKey];
updateSetting(settingKey, settingValue, *thisMap, matchingDescriptionObject);
if ((rootKey != SECURITY_ROOT_KEY && rootKey != BROADCASTING_KEY)
if ((rootKey != SECURITY_ROOT_KEY && rootKey != BROADCASTING_KEY && rootKey != DESCRIPTION_ROOT_KEY)
|| settingKey == AC_SUBNET_WHITELIST_KEY) {
needRestart = true;
}

View file

@ -34,36 +34,32 @@
{ "from": "Vive.RSCenter", "to": "Standard.RightPrimaryThumb" },
{ "from": "Vive.RightApplicationMenu", "to": "Standard.RightSecondaryThumb" },
{ "from": "Vive.LeftHand", "to": "Standard.LeftHand", "when": [ "Application.InHMD" ] },
{ "from": "Vive.RightHand", "to": "Standard.RightHand", "when": [ "Application.InHMD" ] },
{ "from": "Vive.LeftHand", "to": "Standard.LeftHand"},
{ "from": "Vive.RightHand", "to": "Standard.RightHand"},
{
"from": "Vive.LeftFoot", "to" : "Standard.LeftFoot",
"filters" : [{"type" : "lowVelocity", "rotation" : 1.0, "translation": 1.0}],
"when": [ "Application.InHMD" ]
"filters" : [{"type" : "lowVelocity", "rotation" : 1.0, "translation": 1.0}]
},
{
"from": "Vive.RightFoot", "to" : "Standard.RightFoot",
"filters" : [{"type" : "lowVelocity", "rotation" : 1.0, "translation": 1.0}],
"when": [ "Application.InHMD" ]
"filters" : [{"type" : "lowVelocity", "rotation" : 1.0, "translation": 1.0}]
},
{
"from": "Vive.Hips", "to" : "Standard.Hips",
"filters" : [{"type" : "lowVelocity", "rotation" : 0.01, "translation": 0.01}],
"when": [ "Application.InHMD" ]
"filters" : [{"type" : "lowVelocity", "rotation" : 0.01, "translation": 0.01}]
},
{
"from": "Vive.Spine2", "to" : "Standard.Spine2",
"filters" : [{"type" : "lowVelocity", "rotation" : 0.01, "translation": 0.01}],
"when": [ "Application.InHMD" ]
"filters" : [{"type" : "lowVelocity", "rotation" : 0.01, "translation": 0.01}]
},
{ "from": "Vive.Head", "to" : "Standard.Head", "when": [ "Application.InHMD" ] },
{ "from": "Vive.Head", "to" : "Standard.Head"},
{ "from": "Vive.RightArm", "to" : "Standard.RightArm", "when": [ "Application.InHMD" ] },
{ "from": "Vive.LeftArm", "to" : "Standard.LeftArm", "when": [ "Application.InHMD" ] }
{ "from": "Vive.RightArm", "to" : "Standard.RightArm"},
{ "from": "Vive.LeftArm", "to" : "Standard.LeftArm"}
]
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

View file

@ -0,0 +1,64 @@
//
// ImageMessageBox.qml
//
// Created by Dante Ruiz on 7/5/2017
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import QtQuick.Controls 1.4
import "../styles-uit"
Item {
id: imageBox
visible: false
anchors.fill: parent
property alias source: image.source
property alias imageWidth: image.width
property alias imageHeight: image.height
Rectangle {
anchors.fill: parent
color: "black"
opacity: 0.3
}
Image {
id: image
anchors.centerIn: parent
HiFiGlyphs {
id: closeGlyphButton
text: hifi.glyphs.close
size: 25
anchors {
top: parent.top
topMargin: 15
right: parent.right
rightMargin: 15
}
MouseArea {
anchors.fill: parent
hoverEnabled: true
onEntered: {
parent.text = hifi.glyphs.closeInverted;
}
onExited: {
parent.text = hifi.glyphs.close;
}
onClicked: {
imageBox.visible = false;
}
}
}
}
}

View file

@ -65,7 +65,7 @@ Rectangle {
HiFiGlyphs {
id: image
text: hifi.glyphs.avatar1
text: hifi.glyphs.avatarTPose
size: 190
color: hifi.colors.white

View file

@ -16,6 +16,7 @@ import "../../controls-uit" as HifiControls
StackView {
id: stack
initialItem: inputConfiguration
property alias messageVisible: imageMessageBox.visible
Rectangle {
id: inputConfiguration
anchors.fill: parent
@ -26,6 +27,15 @@ StackView {
property var pluginSettings: null
HifiControls.ImageMessageBox {
id: imageMessageBox
anchors.fill: parent
z: 2000
imageWidth: 442
imageHeight: 670
source: "../../../images/calibration-help.png"
}
Rectangle {
width: inputConfiguration.width
height: 1
@ -167,7 +177,7 @@ StackView {
loader.item.pluginName = box.currentText;
}
}
if (loader.item.hasOwnProperty("displayInformation")) {
loader.item.displayConfiguration();
}
@ -183,20 +193,20 @@ StackView {
return InputConfiguration.activeInputPlugins();
}
}
function initialize() {
changeSource();
}
function changeSource() {
loader.source = "";
var source = "";
if (box.currentText == "Vive") {
source = InputConfiguration.configurationLayout("OpenVR");
} else {
} else {
source = InputConfiguration.configurationLayout(box.currentText);
}
loader.source = source;
if (source === "") {
box.label = "(not configurable)";
@ -204,14 +214,14 @@ StackView {
box.label = "";
}
}
Timer {
id: timer
repeat: false
interval: 300
onTriggered: initialize()
}
Component.onCompleted: {
timer.start();
}

View file

@ -50,9 +50,12 @@ Rectangle {
readonly property int apply: 1
readonly property int applyAndCalibrate: 2
readonly property int calibrate: 3
}
MouseArea {
id: mouseArea
@ -64,6 +67,7 @@ Rectangle {
mouse.accepted = false;
}
}
color: hifi.colors.baseGray
RalewayBold {
@ -146,6 +150,7 @@ Rectangle {
label: "Y: offset"
minimumValue: -10
stepSize: 0.0254
value: -0.05
colorScheme: hifi.colorSchemes.dark
onEditingFinished: {
@ -161,15 +166,16 @@ Rectangle {
minimumValue: -10
stepSize: 0.0254
decimals: 4
value: -0.05
colorScheme: hifi.colorSchemes.dark
onEditingFinished: {
sendConfigurationSettings();
}
}
}
RalewayBold {
id: hands
@ -245,7 +251,7 @@ Rectangle {
anchors.left: openVrConfiguration.left
anchors.leftMargin: leftMargin + 10
spacing: 10
HifiControls.SpinBox {
id: handYOffset
decimals: 4
@ -269,7 +275,7 @@ Rectangle {
stepSize: 0.0254
decimals: 4
colorScheme: hifi.colorSchemes.dark
onEditingFinished: {
sendConfigurationSettings();
}
@ -290,6 +296,52 @@ Rectangle {
anchors.leftMargin: leftMargin
}
RalewayRegular {
id: info
text: "See Recommended Tracker Placement"
color: hifi.colors.blueHighlight
size: 10
anchors {
left: additional.right
leftMargin: 10
verticalCenter: additional.verticalCenter
}
Rectangle {
id: selected
color: hifi.colors.blueHighlight
width: info.width
height: 1
anchors {
top: info.bottom
topMargin: 1
left: info.left
right: info.right
}
visible: false
}
MouseArea {
anchors.fill: parent;
hoverEnabled: true
onEntered: {
selected.visible = true;
}
onExited: {
selected.visible = false;
}
onClicked: {
stack.messageVisible = true;
}
}
}
Row {
id: feetConfig
anchors.top: additional.bottom
@ -379,6 +431,7 @@ Rectangle {
if (checked) {
hipBox.checked = true;
feetBox.checked = true;
shoulderBox.checked = false;
}
sendConfigurationSettings();
}
@ -416,6 +469,7 @@ Rectangle {
if (checked) {
hipBox.checked = true;
feetBox.checked = true;
chestBox.checked = false;
}
sendConfigurationSettings();
}
@ -463,7 +517,7 @@ Rectangle {
anchors.leftMargin: leftMargin
radius: hifi.buttons.radius
gradient: Gradient {
GradientStop {
position: 0.2
@ -479,7 +533,7 @@ Rectangle {
}
}
}
GradientStop {
position: 1.0
color: {
@ -495,10 +549,10 @@ Rectangle {
}
}
}
HiFiGlyphs {
id: glyphButton
color: enabled ? hifi.buttons.textColor[calibrationButton.color]
@ -512,7 +566,7 @@ Rectangle {
bottomMargin: 1
}
}
RalewayBold {
id: calibrationText
font.capitalization: Font.AllUppercase
@ -527,7 +581,7 @@ Rectangle {
topMargin: 7
}
}
MouseArea {
anchors.fill: parent
@ -549,19 +603,19 @@ Rectangle {
}
}
}
onPressed: {
calibrationButton.pressed = true;
}
onReleased: {
calibrationButton.pressed = false;
}
onEntered: {
calibrationButton.hovered = true;
}
onExited: {
calibrationButton.hovered = false;
}
@ -642,6 +696,57 @@ Rectangle {
}
}
Separator {
id: advanceSeperator
width: parent.width
anchors.top: timeToCalibrate.bottom
anchors.topMargin: 10
}
RalewayBold {
id: advanceSettings
text: "Advanced Settings"
size: 12
color: hifi.colors.white
anchors.top: advanceSeperator.bottom
anchors.topMargin: 10
anchors.left: parent.left
anchors.leftMargin: leftMargin
}
HifiControls.CheckBox {
id: viveInDesktop
width: 15
height: 15
boxRadius: 7
anchors.top: advanceSettings.bottom
anchors.topMargin: 5
anchors.left: openVrConfiguration.left
anchors.leftMargin: leftMargin + 10
onClicked: {
sendConfigurationSettings();
}
}
RalewayBold {
id: viveDesktopText
size: 10
text: "Use Vive devices in desktop mode"
color: hifi.colors.white
anchors {
left: viveInDesktop.right
leftMargin: 5
verticalCenter: viveInDesktop.verticalCenter
}
}
NumberAnimation {
id: numberAnimation
target: openVrConfiguration
@ -667,14 +772,14 @@ Rectangle {
calibratingScreen = screen.createObject();
stack.push(calibratingScreen);
}
if (status["calibrated"]) {
calibrationScreen.success();
if (status["UI"]) {
logAction("mocap_ui_success", status);
}
} else if (!status["calibrated"]) {
calibrationScreen.failure();
@ -728,6 +833,7 @@ Rectangle {
var HmdHead = settings["HMDHead"];
var viveController = settings["handController"];
var desktopMode = settings["desktopMode"];
if (HmdHead) {
headBox.checked = true;
@ -745,6 +851,8 @@ Rectangle {
handBox.checked = false;
}
viveInDesktop.checked = desktopMode;
initializeButtonState();
updateCalibrationText();
@ -786,11 +894,11 @@ Rectangle {
var handOverride = handSetting["override"];
var settingsChanged = false;
if (lastConfiguration["bodyConfiguration"] !== bodySetting) {
settingsChanged = true;
}
var lastHead = lastConfiguration["headConfiguration"];
if (lastHead["override"] !== headOverride) {
settingsChanged = true;
@ -800,13 +908,13 @@ Rectangle {
if (lastHand["override"] !== handOverride) {
settingsChanged = true;
}
if (settingsChanged) {
if ((!handOverride) && (!headOverride) && (bodySetting === "None")) {
state = buttonState.apply;
} else {
state = buttonState.applyAndCalibrate;
}
}
} else {
if (state == buttonState.apply) {
state = buttonState.disabled;
@ -814,7 +922,7 @@ Rectangle {
state = buttonState.calibrate;
}
}
lastConfiguration = settings;
}
@ -831,7 +939,7 @@ Rectangle {
state = buttonState.disabled;
} else {
state = buttonState.calibrate;
}
}
}
function updateCalibrationButton() {
@ -897,11 +1005,12 @@ Rectangle {
"Y": handYOffset.value,
"Z": handZOffset.value
}
var settingsObject = {
"bodyConfiguration": trackerConfiguration,
"headConfiguration": headObject,
"handConfiguration": handObject
"handConfiguration": handObject,
"desktopMode": viveInDesktop.checked
}
return settingsObject;

View file

@ -94,10 +94,20 @@ StackView {
property bool keyboardEnabled: false
property bool keyboardRaised: false
property bool punctuationMode: false
width: parent.width
height: parent.height
MouseArea {
anchors.fill: parent
propagateComposedEvents: true
onPressed: {
parent.forceActiveFocus();
addressBarDialog.keyboardEnabled = false;
mouse.accepted = false;
}
}
anchors {
right: parent.right
left: parent.left
@ -227,9 +237,9 @@ StackView {
MouseArea {
anchors.fill: parent;
onClicked: {
if (!addressLine.focus || !HMD.active) {
addressLine.focus = true;
addressLine.forceActiveFocus();
addressLine.focus = true;
addressLine.forceActiveFocus();
if (HMD.active) {
addressBarDialog.keyboardEnabled = HMD.active;
}
tabletRoot.playButtonClickSound();

View file

@ -336,5 +336,6 @@ Item {
readonly property string source: "\ue01c"
readonly property string playback_play: "\ue01d"
readonly property string stop_square: "\ue01e"
readonly property string avatarTPose: "\ue01f"
}
}

View file

@ -25,6 +25,7 @@
#include <QtCore/QCommandLineParser>
#include <QtCore/QMimeData>
#include <QtCore/QThreadPool>
#include <QtConcurrent/QtConcurrentRun>
#include <QtGui/QScreen>
#include <QtGui/QWindow>
@ -111,10 +112,7 @@
#include <plugins/InputConfiguration.h>
#include <RecordingScriptingInterface.h>
#include <RenderableWebEntityItem.h>
#include <RenderShadowTask.h>
#include <render/RenderFetchCullSortTask.h>
#include <RenderDeferredTask.h>
#include <RenderForwardTask.h>
#include <UpdateSceneTask.h>
#include <RenderViewTask.h>
#include <SecondaryCamera.h>
#include <ResourceCache.h>
@ -480,6 +478,12 @@ bool setupEssentials(int& argc, char** argv, bool runningMarkerExisted) {
static const auto SUPPRESS_SETTINGS_RESET = "--suppress-settings-reset";
bool suppressPrompt = cmdOptionExists(argc, const_cast<const char**>(argv), SUPPRESS_SETTINGS_RESET);
bool previousSessionCrashed = CrashHandler::checkForResetSettings(runningMarkerExisted, suppressPrompt);
// get dir to use for cache
static const auto CACHE_SWITCH = "--cache";
QString cacheDir = getCmdOption(argc, const_cast<const char**>(argv), CACHE_SWITCH);
if (!cacheDir.isEmpty()) {
qApp->setProperty(hifi::properties::APP_LOCAL_DATA_PATH, cacheDir);
}
Setting::init();
@ -993,12 +997,67 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// add firstRun flag from settings to launch event
Setting::Handle<bool> firstRun { Settings::firstRun, true };
properties["first_run"] = firstRun.get();
// add the user's machine ID to the launch event
properties["machine_fingerprint"] = uuidStringWithoutCurlyBraces(FingerprintUtils::getMachineFingerprint());
// once the settings have been loaded, check if we need to flip the default for UserActivityLogger
auto& userActivityLogger = UserActivityLogger::getInstance();
if (!userActivityLogger.isDisabledSettingSet()) {
// the user activity logger is opt-out for Interface
// but it's defaulted to disabled for other targets
// so we need to enable it here if it has never been disabled by the user
userActivityLogger.disable(false);
}
UserActivityLogger::getInstance().logAction("launch", properties);
if (userActivityLogger.isEnabled()) {
// sessionRunTime will be reset soon by loadSettings. Grab it now to get previous session value.
// The value will be 0 if the user blew away settings this session, which is both a feature and a bug.
static const QString TESTER = "HIFI_TESTER";
auto gpuIdent = GPUIdent::getInstance();
auto glContextData = getGLContextData();
QJsonObject properties = {
{ "version", applicationVersion() },
{ "tester", QProcessEnvironment::systemEnvironment().contains(TESTER) },
{ "previousSessionCrashed", _previousSessionCrashed },
{ "previousSessionRuntime", sessionRunTime.get() },
{ "cpu_architecture", QSysInfo::currentCpuArchitecture() },
{ "kernel_type", QSysInfo::kernelType() },
{ "kernel_version", QSysInfo::kernelVersion() },
{ "os_type", QSysInfo::productType() },
{ "os_version", QSysInfo::productVersion() },
{ "gpu_name", gpuIdent->getName() },
{ "gpu_driver", gpuIdent->getDriver() },
{ "gpu_memory", static_cast<qint64>(gpuIdent->getMemory()) },
{ "gl_version_int", glVersionToInteger(glContextData.value("version").toString()) },
{ "gl_version", glContextData["version"] },
{ "gl_vender", glContextData["vendor"] },
{ "gl_sl_version", glContextData["sl_version"] },
{ "gl_renderer", glContextData["renderer"] },
{ "ideal_thread_count", QThread::idealThreadCount() }
};
auto macVersion = QSysInfo::macVersion();
if (macVersion != QSysInfo::MV_None) {
properties["os_osx_version"] = QSysInfo::macVersion();
}
auto windowsVersion = QSysInfo::windowsVersion();
if (windowsVersion != QSysInfo::WV_None) {
properties["os_win_version"] = QSysInfo::windowsVersion();
}
ProcessorInfo procInfo;
if (getProcessorInfo(procInfo)) {
properties["processor_core_count"] = procInfo.numProcessorCores;
properties["logical_processor_count"] = procInfo.numLogicalProcessors;
properties["processor_l1_cache_count"] = procInfo.numProcessorCachesL1;
properties["processor_l2_cache_count"] = procInfo.numProcessorCachesL2;
properties["processor_l3_cache_count"] = procInfo.numProcessorCachesL3;
}
properties["first_run"] = firstRun.get();
// add the user's machine ID to the launch event
properties["machine_fingerprint"] = uuidStringWithoutCurlyBraces(FingerprintUtils::getMachineFingerprint());
userActivityLogger.logAction("launch", properties);
}
// Tell our entity edit sender about our known jurisdictions
_entityEditSender.setServerJurisdictions(&_entityServerJurisdictions);
@ -1220,8 +1279,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
settingsTimer->stop();
// Delete it (this will trigger the thread destruction
settingsTimer->deleteLater();
// Mark the settings thread as finished, so we know we can safely save in the main application
// shutdown code
// Mark the settings thread as finished, so we know we can safely save in the main application
// shutdown code
_settingsGuard.trigger();
});
@ -1958,6 +2017,7 @@ void Application::initializeGL() {
render::CullFunctor cullFunctor = LODManager::shouldRender;
static const QString RENDER_FORWARD = "HIFI_RENDER_FORWARD";
bool isDeferred = !QProcessEnvironment::systemEnvironment().contains(RENDER_FORWARD);
_renderEngine->addJob<UpdateSceneTask>("UpdateScene");
_renderEngine->addJob<SecondaryCameraRenderTask>("SecondaryCameraFrame", cullFunctor);
_renderEngine->addJob<RenderViewTask>("RenderMainView", cullFunctor, isDeferred);
_renderEngine->load();
@ -2166,48 +2226,74 @@ void Application::paintGL() {
return;
}
auto displayPlugin = getActiveDisplayPlugin();
// FIXME not needed anymore?
_offscreenContext->makeCurrent();
DisplayPluginPointer displayPlugin;
{
PROFILE_RANGE(render, "/getActiveDisplayPlugin");
displayPlugin = getActiveDisplayPlugin();
}
// If a display plugin loses it's underlying support, it
// needs to be able to signal us to not use it
if (!displayPlugin->beginFrameRender(_frameCount)) {
_inPaint = false;
updateDisplayMode();
return;
{
PROFILE_RANGE(render, "/offscreenMakeCurrent");
// FIXME not needed anymore?
_offscreenContext->makeCurrent();
}
{
PROFILE_RANGE(render, "/pluginBeginFrameRender");
// If a display plugin loses it's underlying support, it
// needs to be able to signal us to not use it
if (!displayPlugin->beginFrameRender(_frameCount)) {
_inPaint = false;
updateDisplayMode();
return;
}
}
// update the avatar with a fresh HMD pose
getMyAvatar()->updateFromHMDSensorMatrix(getHMDSensorPose());
{
PROFILE_RANGE(render, "/updateAvatar");
getMyAvatar()->updateFromHMDSensorMatrix(getHMDSensorPose());
}
auto lodManager = DependencyManager::get<LODManager>();
RenderArgs renderArgs;
{
QMutexLocker viewLocker(&_viewMutex);
_viewFrustum.calculate();
}
RenderArgs renderArgs(_gpuContext, getEntities(), lodManager->getOctreeSizeScale(),
lodManager->getBoundaryLevelAdjust(), RenderArgs::DEFAULT_RENDER_MODE,
RenderArgs::MONO, RenderArgs::RENDER_DEBUG_NONE);
{
QMutexLocker viewLocker(&_viewMutex);
renderArgs.setViewFrustum(_viewFrustum);
PROFILE_RANGE(render, "/buildFrustrumAndArgs");
{
QMutexLocker viewLocker(&_viewMutex);
_viewFrustum.calculate();
}
renderArgs = RenderArgs(_gpuContext, getEntities(), lodManager->getOctreeSizeScale(),
lodManager->getBoundaryLevelAdjust(), RenderArgs::DEFAULT_RENDER_MODE,
RenderArgs::MONO, RenderArgs::RENDER_DEBUG_NONE);
{
QMutexLocker viewLocker(&_viewMutex);
renderArgs.setViewFrustum(_viewFrustum);
}
}
PerformanceWarning::setSuppressShortTimings(Menu::getInstance()->isOptionChecked(MenuOption::SuppressShortTimings));
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::paintGL()");
resizeGL();
_gpuContext->beginFrame(getHMDSensorPose());
// Reset the gpu::Context Stages
// Back to the default framebuffer;
gpu::doInBatch(_gpuContext, [&](gpu::Batch& batch) {
batch.resetStages();
});
{
PROFILE_RANGE(render, "/resizeGL");
PerformanceWarning::setSuppressShortTimings(Menu::getInstance()->isOptionChecked(MenuOption::SuppressShortTimings));
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::paintGL()");
resizeGL();
}
{
PROFILE_RANGE(render, "/gpuContextReset");
_gpuContext->beginFrame(getHMDSensorPose());
// Reset the gpu::Context Stages
// Back to the default framebuffer;
gpu::doInBatch(_gpuContext, [&](gpu::Batch& batch) {
batch.resetStages();
});
}
{
PROFILE_RANGE(render, "/renderOverlay");
PerformanceTimer perfTimer("renderOverlay");
// NOTE: There is no batch associated with this renderArgs
// the ApplicationOverlay class assumes it's viewport is setup to be the device size
@ -2218,114 +2304,127 @@ void Application::paintGL() {
glm::vec3 boomOffset;
{
PerformanceTimer perfTimer("CameraUpdates");
PROFILE_RANGE(render, "/updateCamera");
{
PerformanceTimer perfTimer("CameraUpdates");
auto myAvatar = getMyAvatar();
boomOffset = myAvatar->getScale() * myAvatar->getBoomLength() * -IDENTITY_FORWARD;
auto myAvatar = getMyAvatar();
boomOffset = myAvatar->getScale() * myAvatar->getBoomLength() * -IDENTITY_FORWARD;
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON || _myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, myAvatar->getBoomLength() <= MyAvatar::ZOOM_MIN);
Menu::getInstance()->setIsOptionChecked(MenuOption::ThirdPerson, !(myAvatar->getBoomLength() <= MyAvatar::ZOOM_MIN));
cameraMenuChanged();
}
// The render mode is default or mirror if the camera is in mirror mode, assigned further below
renderArgs._renderMode = RenderArgs::DEFAULT_RENDER_MODE;
// Always use the default eye position, not the actual head eye position.
// Using the latter will cause the camera to wobble with idle animations,
// or with changes from the face tracker
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON) {
if (isHMDMode()) {
mat4 camMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
_myCamera.setPosition(extractTranslation(camMat));
_myCamera.setOrientation(glm::quat_cast(camMat));
} else {
_myCamera.setPosition(myAvatar->getDefaultEyePosition());
_myCamera.setOrientation(myAvatar->getMyHead()->getHeadOrientation());
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON || _myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, myAvatar->getBoomLength() <= MyAvatar::ZOOM_MIN);
Menu::getInstance()->setIsOptionChecked(MenuOption::ThirdPerson, !(myAvatar->getBoomLength() <= MyAvatar::ZOOM_MIN));
cameraMenuChanged();
}
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
if (isHMDMode()) {
auto hmdWorldMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
_myCamera.setOrientation(glm::normalize(glm::quat_cast(hmdWorldMat)));
_myCamera.setPosition(extractTranslation(hmdWorldMat) +
myAvatar->getOrientation() * boomOffset);
} else {
_myCamera.setOrientation(myAvatar->getHead()->getOrientation());
if (Menu::getInstance()->isOptionChecked(MenuOption::CenterPlayerInView)) {
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ _myCamera.getOrientation() * boomOffset);
} else {
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ myAvatar->getOrientation() * boomOffset);
}
}
} else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
if (isHMDMode()) {
auto mirrorBodyOrientation = myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f));
glm::quat hmdRotation = extractRotation(myAvatar->getHMDSensorMatrix());
// Mirror HMD yaw and roll
glm::vec3 mirrorHmdEulers = glm::eulerAngles(hmdRotation);
mirrorHmdEulers.y = -mirrorHmdEulers.y;
mirrorHmdEulers.z = -mirrorHmdEulers.z;
glm::quat mirrorHmdRotation = glm::quat(mirrorHmdEulers);
// The render mode is default or mirror if the camera is in mirror mode, assigned further below
renderArgs._renderMode = RenderArgs::DEFAULT_RENDER_MODE;
glm::quat worldMirrorRotation = mirrorBodyOrientation * mirrorHmdRotation;
_myCamera.setOrientation(worldMirrorRotation);
glm::vec3 hmdOffset = extractTranslation(myAvatar->getHMDSensorMatrix());
// Mirror HMD lateral offsets
hmdOffset.x = -hmdOffset.x;
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ glm::vec3(0, _raiseMirror * myAvatar->getUniformScale(), 0)
+ mirrorBodyOrientation * glm::vec3(0.0f, 0.0f, 1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror
+ mirrorBodyOrientation * hmdOffset);
} else {
_myCamera.setOrientation(myAvatar->getWorldAlignedOrientation()
* glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ glm::vec3(0, _raiseMirror * myAvatar->getUniformScale(), 0)
+ (myAvatar->getOrientation() * glm::quat(glm::vec3(0.0f, _rotateMirror, 0.0f))) *
glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
}
renderArgs._renderMode = RenderArgs::MIRROR_RENDER_MODE;
} else if (_myCamera.getMode() == CAMERA_MODE_ENTITY) {
EntityItemPointer cameraEntity = _myCamera.getCameraEntityPointer();
if (cameraEntity != nullptr) {
// Always use the default eye position, not the actual head eye position.
// Using the latter will cause the camera to wobble with idle animations,
// or with changes from the face tracker
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON) {
if (isHMDMode()) {
glm::quat hmdRotation = extractRotation(myAvatar->getHMDSensorMatrix());
_myCamera.setOrientation(cameraEntity->getRotation() * hmdRotation);
glm::vec3 hmdOffset = extractTranslation(myAvatar->getHMDSensorMatrix());
_myCamera.setPosition(cameraEntity->getPosition() + (hmdRotation * hmdOffset));
mat4 camMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
_myCamera.setPosition(extractTranslation(camMat));
_myCamera.setOrientation(glm::quat_cast(camMat));
} else {
_myCamera.setOrientation(cameraEntity->getRotation());
_myCamera.setPosition(cameraEntity->getPosition());
_myCamera.setPosition(myAvatar->getDefaultEyePosition());
_myCamera.setOrientation(myAvatar->getMyHead()->getHeadOrientation());
}
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
if (isHMDMode()) {
auto hmdWorldMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
_myCamera.setOrientation(glm::normalize(glm::quat_cast(hmdWorldMat)));
_myCamera.setPosition(extractTranslation(hmdWorldMat) +
myAvatar->getOrientation() * boomOffset);
} else {
_myCamera.setOrientation(myAvatar->getHead()->getOrientation());
if (Menu::getInstance()->isOptionChecked(MenuOption::CenterPlayerInView)) {
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ _myCamera.getOrientation() * boomOffset);
} else {
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ myAvatar->getOrientation() * boomOffset);
}
}
} else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
if (isHMDMode()) {
auto mirrorBodyOrientation = myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f));
glm::quat hmdRotation = extractRotation(myAvatar->getHMDSensorMatrix());
// Mirror HMD yaw and roll
glm::vec3 mirrorHmdEulers = glm::eulerAngles(hmdRotation);
mirrorHmdEulers.y = -mirrorHmdEulers.y;
mirrorHmdEulers.z = -mirrorHmdEulers.z;
glm::quat mirrorHmdRotation = glm::quat(mirrorHmdEulers);
glm::quat worldMirrorRotation = mirrorBodyOrientation * mirrorHmdRotation;
_myCamera.setOrientation(worldMirrorRotation);
glm::vec3 hmdOffset = extractTranslation(myAvatar->getHMDSensorMatrix());
// Mirror HMD lateral offsets
hmdOffset.x = -hmdOffset.x;
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ glm::vec3(0, _raiseMirror * myAvatar->getUniformScale(), 0)
+ mirrorBodyOrientation * glm::vec3(0.0f, 0.0f, 1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror
+ mirrorBodyOrientation * hmdOffset);
} else {
_myCamera.setOrientation(myAvatar->getWorldAlignedOrientation()
* glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ glm::vec3(0, _raiseMirror * myAvatar->getUniformScale(), 0)
+ (myAvatar->getOrientation() * glm::quat(glm::vec3(0.0f, _rotateMirror, 0.0f))) *
glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
}
renderArgs._renderMode = RenderArgs::MIRROR_RENDER_MODE;
} else if (_myCamera.getMode() == CAMERA_MODE_ENTITY) {
EntityItemPointer cameraEntity = _myCamera.getCameraEntityPointer();
if (cameraEntity != nullptr) {
if (isHMDMode()) {
glm::quat hmdRotation = extractRotation(myAvatar->getHMDSensorMatrix());
_myCamera.setOrientation(cameraEntity->getRotation() * hmdRotation);
glm::vec3 hmdOffset = extractTranslation(myAvatar->getHMDSensorMatrix());
_myCamera.setPosition(cameraEntity->getPosition() + (hmdRotation * hmdOffset));
} else {
_myCamera.setOrientation(cameraEntity->getRotation());
_myCamera.setPosition(cameraEntity->getPosition());
}
}
}
}
// Update camera position
if (!isHMDMode()) {
_myCamera.update(1.0f / _frameCounter.rate());
// Update camera position
if (!isHMDMode()) {
_myCamera.update(1.0f / _frameCounter.rate());
}
}
}
getApplicationCompositor().setFrameInfo(_frameCount, _myCamera.getTransform());
{
PROFILE_RANGE(render, "/updateCompositor");
getApplicationCompositor().setFrameInfo(_frameCount, _myCamera.getTransform());
}
// Primary rendering pass
auto framebufferCache = DependencyManager::get<FramebufferCache>();
const QSize size = framebufferCache->getFrameBufferSize();
// Final framebuffer that will be handled to the display-plugin
auto finalFramebuffer = framebufferCache->getFramebuffer();
gpu::FramebufferPointer finalFramebuffer;
QSize finalFramebufferSize;
{
PROFILE_RANGE(render, "/getOutputFramebuffer");
// Primary rendering pass
auto framebufferCache = DependencyManager::get<FramebufferCache>();
finalFramebufferSize = framebufferCache->getFrameBufferSize();
// Final framebuffer that will be handled to the display-plugin
finalFramebuffer = framebufferCache->getFramebuffer();
}
{
PROFILE_RANGE(render, "/mainRender");
PerformanceTimer perfTimer("mainRender");
renderArgs._boomOffset = boomOffset;
// FIXME is this ever going to be different from the size previously set in the render args
// in the overlay render?
// Viewport is assigned to the size of the framebuffer
renderArgs._viewport = ivec4(0, 0, size.width(), size.height());
renderArgs._viewport = ivec4(0, 0, finalFramebufferSize.width(), finalFramebufferSize.height());
if (displayPlugin->isStereo()) {
// Stereo modes will typically have a larger projection matrix overall,
// so we ask for the 'mono' projection matrix, which for stereo and HMD
@ -3625,6 +3724,133 @@ bool Application::shouldPaint(float nsecsElapsed) {
#include <TCHAR.h>
#include <pdh.h>
#pragma comment(lib, "pdh.lib")
#pragma comment(lib, "ntdll.lib")
extern "C" {
enum SYSTEM_INFORMATION_CLASS {
SystemBasicInformation = 0,
SystemProcessorPerformanceInformation = 8,
};
struct SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION {
LARGE_INTEGER IdleTime;
LARGE_INTEGER KernelTime;
LARGE_INTEGER UserTime;
LARGE_INTEGER DpcTime;
LARGE_INTEGER InterruptTime;
ULONG InterruptCount;
};
struct SYSTEM_BASIC_INFORMATION {
ULONG Reserved;
ULONG TimerResolution;
ULONG PageSize;
ULONG NumberOfPhysicalPages;
ULONG LowestPhysicalPageNumber;
ULONG HighestPhysicalPageNumber;
ULONG AllocationGranularity;
ULONG_PTR MinimumUserModeAddress;
ULONG_PTR MaximumUserModeAddress;
ULONG_PTR ActiveProcessorsAffinityMask;
CCHAR NumberOfProcessors;
};
NTSYSCALLAPI NTSTATUS NTAPI NtQuerySystemInformation(
_In_ SYSTEM_INFORMATION_CLASS SystemInformationClass,
_Out_writes_bytes_opt_(SystemInformationLength) PVOID SystemInformation,
_In_ ULONG SystemInformationLength,
_Out_opt_ PULONG ReturnLength
);
}
template <typename T>
NTSTATUS NtQuerySystemInformation(SYSTEM_INFORMATION_CLASS SystemInformationClass, T& t) {
return NtQuerySystemInformation(SystemInformationClass, &t, (ULONG)sizeof(T), nullptr);
}
template <typename T>
NTSTATUS NtQuerySystemInformation(SYSTEM_INFORMATION_CLASS SystemInformationClass, std::vector<T>& t) {
return NtQuerySystemInformation(SystemInformationClass, t.data(), (ULONG)(sizeof(T) * t.size()), nullptr);
}
template <typename T>
void updateValueAndDelta(std::pair<T, T>& pair, T newValue) {
auto& value = pair.first;
auto& delta = pair.second;
delta = (value != 0) ? newValue - value : 0;
value = newValue;
}
struct MyCpuInfo {
using ValueAndDelta = std::pair<LONGLONG, LONGLONG>;
std::string name;
ValueAndDelta kernel { 0, 0 };
ValueAndDelta user { 0, 0 };
ValueAndDelta idle { 0, 0 };
float kernelUsage { 0.0f };
float userUsage { 0.0f };
void update(const SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION& cpuInfo) {
updateValueAndDelta(kernel, cpuInfo.KernelTime.QuadPart);
updateValueAndDelta(user, cpuInfo.UserTime.QuadPart);
updateValueAndDelta(idle, cpuInfo.IdleTime.QuadPart);
auto totalTime = kernel.second + user.second + idle.second;
if (totalTime != 0) {
kernelUsage = (FLOAT)kernel.second / totalTime;
userUsage = (FLOAT)user.second / totalTime;
} else {
kernelUsage = userUsage = 0.0f;
}
}
};
void updateCpuInformation() {
static std::once_flag once;
static SYSTEM_BASIC_INFORMATION systemInfo {};
static SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION cpuTotals;
static std::vector<SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION> cpuInfos;
static std::vector<MyCpuInfo> myCpuInfos;
static MyCpuInfo myCpuTotals;
std::call_once(once, [&] {
NtQuerySystemInformation( SystemBasicInformation, systemInfo);
cpuInfos.resize(systemInfo.NumberOfProcessors);
myCpuInfos.resize(systemInfo.NumberOfProcessors);
for (size_t i = 0; i < systemInfo.NumberOfProcessors; ++i) {
myCpuInfos[i].name = "cpu." + std::to_string(i);
}
myCpuTotals.name = "cpu.total";
});
NtQuerySystemInformation(SystemProcessorPerformanceInformation, cpuInfos);
// Zero the CPU totals.
memset(&cpuTotals, 0, sizeof(SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION));
for (size_t i = 0; i < systemInfo.NumberOfProcessors; ++i) {
auto& cpuInfo = cpuInfos[i];
// KernelTime includes IdleTime.
cpuInfo.KernelTime.QuadPart -= cpuInfo.IdleTime.QuadPart;
// Update totals
cpuTotals.IdleTime.QuadPart += cpuInfo.IdleTime.QuadPart;
cpuTotals.KernelTime.QuadPart += cpuInfo.KernelTime.QuadPart;
cpuTotals.UserTime.QuadPart += cpuInfo.UserTime.QuadPart;
// Update friendly structure
auto& myCpuInfo = myCpuInfos[i];
myCpuInfo.update(cpuInfo);
PROFILE_COUNTER(app, myCpuInfo.name.c_str(), {
{ "kernel", myCpuInfo.kernelUsage },
{ "user", myCpuInfo.userUsage }
});
}
myCpuTotals.update(cpuTotals);
PROFILE_COUNTER(app, myCpuTotals.name.c_str(), {
{ "kernel", myCpuTotals.kernelUsage },
{ "user", myCpuTotals.userUsage }
});
}
static ULARGE_INTEGER lastCPU, lastSysCPU, lastUserCPU;
static int numProcessors;
@ -3677,6 +3903,26 @@ void getCpuUsage(vec3& systemAndUser) {
systemAndUser.z = (float)counterVal.doubleValue;
}
void setupCpuMonitorThread() {
initCpuUsage();
auto cpuMonitorThread = QThread::currentThread();
QTimer* timer = new QTimer();
timer->setInterval(50);
QObject::connect(timer, &QTimer::timeout, [] {
updateCpuInformation();
vec3 kernelUserAndSystem;
getCpuUsage(kernelUserAndSystem);
PROFILE_COUNTER(app, "cpuProcess", { { "system", kernelUserAndSystem.x }, { "user", kernelUserAndSystem.y } });
PROFILE_COUNTER(app, "cpuSystem", { { "system", kernelUserAndSystem.z } });
});
QObject::connect(cpuMonitorThread, &QThread::finished, [=] {
timer->deleteLater();
cpuMonitorThread->deleteLater();
});
timer->start();
}
#endif
@ -3697,15 +3943,17 @@ void Application::idle(float nsecsElapsed) {
}
#ifdef Q_OS_WIN
// If tracing is enabled then monitor the CPU in a separate thread
static std::once_flag once;
std::call_once(once, [] {
initCpuUsage();
std::call_once(once, [&] {
if (trace_app().isDebugEnabled()) {
QThread* cpuMonitorThread = new QThread(qApp);
cpuMonitorThread->setObjectName("cpuMonitorThread");
QObject::connect(cpuMonitorThread, &QThread::started, [this] { setupCpuMonitorThread(); });
QObject::connect(qApp, &QCoreApplication::aboutToQuit, cpuMonitorThread, &QThread::quit);
cpuMonitorThread->start();
}
});
vec3 kernelUserAndSystem;
getCpuUsage(kernelUserAndSystem);
PROFILE_COUNTER(app, "cpuProcess", { { "system", kernelUserAndSystem.x }, { "user", kernelUserAndSystem.y } });
PROFILE_COUNTER(app, "cpuSystem", { { "system", kernelUserAndSystem.z } });
#endif
@ -5175,7 +5423,7 @@ namespace render {
auto& batch = *args->_batch;
DependencyManager::get<GeometryCache>()->bindSimpleProgram(batch);
renderWorldBox(batch);
renderWorldBox(args, batch);
}
}
}
@ -5238,10 +5486,7 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
}
{
PerformanceTimer perfTimer("SceneProcessTransaction");
_main3DScene->enqueueTransaction(transaction);
_main3DScene->processTransactionQueue();
}
// For now every frame pass the renderContext

View file

@ -299,7 +299,6 @@ public:
void setAvatarOverrideUrl(const QUrl& url, bool save);
QUrl getAvatarOverrideUrl() { return _avatarOverrideUrl; }
bool getSaveAvatarOverrideUrl() { return _saveAvatarOverrideUrl; }
void setCacheOverrideDir(const QString& dirName) { _cacheDir = dirName; }
signals:
void svoImportRequested(const QString& url);
@ -691,6 +690,5 @@ private:
QUrl _avatarOverrideUrl;
bool _saveAvatarOverrideUrl { false };
QString _cacheDir;
};
#endif // hifi_Application_h

View file

@ -319,7 +319,7 @@ Menu::Menu() {
QString("../../hifi/tablet/TabletLodPreferences.qml"), "LodPreferencesDialog");
});
action = addActionToQMenuAndActionHash(settingsMenu, "Controller Settings");
action = addActionToQMenuAndActionHash(settingsMenu, "Controller Settings...");
connect(action, &QAction::triggered, [] {
auto tablet = DependencyManager::get<TabletScriptingInterface>()->getTablet("com.highfidelity.interface.tablet.system");
auto hmd = DependencyManager::get<HMDScriptingInterface>();

View file

@ -34,7 +34,7 @@
using namespace std;
void renderWorldBox(gpu::Batch& batch) {
void renderWorldBox(RenderArgs* args, gpu::Batch& batch) {
auto geometryCache = DependencyManager::get<GeometryCache>();
// Show center of world
@ -115,7 +115,7 @@ void renderWorldBox(gpu::Batch& batch) {
geometryIds[17]);
geometryCache->renderWireCubeInstance(batch, GREY4);
geometryCache->renderWireCubeInstance(args, batch, GREY4);
// Draw meter markers along the 3 axis to help with measuring things
const float MARKER_DISTANCE = 1.0f;
@ -123,23 +123,23 @@ void renderWorldBox(gpu::Batch& batch) {
transform = Transform().setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(batch, RED);
geometryCache->renderSolidSphereInstance(args, batch, RED);
transform = Transform().setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, 0.0f)).setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(batch, RED);
geometryCache->renderSolidSphereInstance(args, batch, RED);
transform = Transform().setTranslation(glm::vec3(0.0f, MARKER_DISTANCE, 0.0f)).setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(batch, GREEN);
geometryCache->renderSolidSphereInstance(args, batch, GREEN);
transform = Transform().setTranslation(glm::vec3(0.0f, 0.0f, MARKER_DISTANCE)).setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(batch, BLUE);
geometryCache->renderSolidSphereInstance(args, batch, BLUE);
transform = Transform().setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, MARKER_DISTANCE)).setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(batch, GREY);
geometryCache->renderSolidSphereInstance(args, batch, GREY);
}
// Do some basic timing tests and report the results

View file

@ -16,8 +16,9 @@
#include <glm/gtc/quaternion.hpp>
#include <gpu/Batch.h>
#include <render/Forward.h>
void renderWorldBox(gpu::Batch& batch);
void renderWorldBox(RenderArgs* args, gpu::Batch& batch);
void runTimingTests();
void runUnitTests();

View file

@ -1634,7 +1634,8 @@ void MyAvatar::prepareForPhysicsSimulation() {
_characterController.setParentVelocity(parentVelocity);
_characterController.setPositionAndOrientation(getPosition(), getOrientation());
if (qApp->isHMDMode()) {
auto headPose = getHeadControllerPoseInAvatarFrame();
if (headPose.isValid()) {
_follow.prePhysicsUpdate(*this, deriveBodyFromHMDSensor(), _bodySensorMatrix, hasDriveInput());
} else {
_follow.deactivate();

View file

@ -101,7 +101,7 @@ int main(int argc, const char* argv[]) {
if (allowMultipleInstances) {
instanceMightBeRunning = false;
}
// this needs to be done here in main, as the mechanism for setting the
// this needs to be done here in main, as the mechanism for setting the
// scripts directory appears not to work. See the bug report
// https://highfidelity.fogbugz.com/f/cases/5759/Issues-changing-scripts-directory-in-ScriptsEngine
if (parser.isSet(overrideScriptsPathOption)) {
@ -111,20 +111,6 @@ int main(int argc, const char* argv[]) {
}
}
if (parser.isSet(overrideAppLocalDataPathOption)) {
// get dir to use for cache
QString cacheDir = parser.value(overrideAppLocalDataPathOption);
if (!cacheDir.isEmpty()) {
// tell everyone to use the right cache location
//
// this handles data8 and prepared
DependencyManager::get<ResourceManager>()->setCacheDir(cacheDir);
// this does the ktx_cache
PathUtils::getAppLocalDataPath(cacheDir);
}
}
if (instanceMightBeRunning) {
// Try to connect and send message to existing interface instance
QLocalSocket socket;

View file

@ -73,7 +73,7 @@ void Cube3DOverlay::render(RenderArgs* args) {
if (_isSolid) {
transform.setScale(dimensions);
batch->setModelTransform(transform);
geometryCache->renderSolidCubeInstance(*batch, cubeColor, pipeline);
geometryCache->renderSolidCubeInstance(args, *batch, cubeColor, pipeline);
} else {
geometryCache->bindSimpleProgram(*batch, false, false, false, true, true);
if (getIsDashedLine()) {
@ -109,7 +109,7 @@ void Cube3DOverlay::render(RenderArgs* args) {
} else {
transform.setScale(dimensions);
batch->setModelTransform(transform);
geometryCache->renderWireCubeInstance(*batch, cubeColor, pipeline);
geometryCache->renderWireCubeInstance(args, *batch, cubeColor, pipeline);
}
}
}

View file

@ -53,9 +53,9 @@ void Shape3DOverlay::render(RenderArgs* args) {
transform.setScale(dimensions);
batch->setModelTransform(transform);
if (_isSolid) {
geometryCache->renderSolidShapeInstance(*batch, _shape, cubeColor, pipeline);
geometryCache->renderSolidShapeInstance(args, *batch, _shape, cubeColor, pipeline);
} else {
geometryCache->renderWireShapeInstance(*batch, _shape, cubeColor, pipeline);
geometryCache->renderWireShapeInstance(args, *batch, _shape, cubeColor, pipeline);
}
}
}

View file

@ -50,9 +50,9 @@ void Sphere3DOverlay::render(RenderArgs* args) {
}
if (_isSolid) {
geometryCache->renderSolidSphereInstance(*batch, sphereColor, pipeline);
geometryCache->renderSolidSphereInstance(args, *batch, sphereColor, pipeline);
} else {
geometryCache->renderWireSphereInstance(*batch, sphereColor, pipeline);
geometryCache->renderWireSphereInstance(args, *batch, sphereColor, pipeline);
}
}
}

View file

@ -138,7 +138,7 @@ void Text3DOverlay::render(RenderArgs* args) {
_textRenderer->draw(batch, 0, 0, getText(), textColor, glm::vec2(-1.0f), getDrawInFront());
// so before we continue, we must reset the pipeline
batch.setPipeline(args->_pipeline->pipeline);
args->_pipeline->prepare(batch);
args->_pipeline->prepare(batch, args);
}
const render::ShapeKey Text3DOverlay::getShapeKey() {

View file

@ -610,7 +610,7 @@ void Avatar::render(RenderArgs* renderArgs) {
if (showCollisionShapes && shouldRenderHead(renderArgs) && _skeletonModel->isRenderable()) {
PROFILE_RANGE_BATCH(batch, __FUNCTION__":skeletonBoundingCollisionShapes");
const float BOUNDING_SHAPE_ALPHA = 0.7f;
_skeletonModel->renderBoundingCollisionShapes(*renderArgs->_batch, getUniformScale(), BOUNDING_SHAPE_ALPHA);
_skeletonModel->renderBoundingCollisionShapes(renderArgs, *renderArgs->_batch, getUniformScale(), BOUNDING_SHAPE_ALPHA);
}
if (showReceiveStats || showNamesAboveHeads) {

View file

@ -322,20 +322,20 @@ void SkeletonModel::computeBoundingShape() {
_boundingCapsuleLocalOffset = invScale * offset;
}
void SkeletonModel::renderBoundingCollisionShapes(gpu::Batch& batch, float scale, float alpha) {
void SkeletonModel::renderBoundingCollisionShapes(RenderArgs* args, gpu::Batch& batch, float scale, float alpha) {
auto geometryCache = DependencyManager::get<GeometryCache>();
// draw a blue sphere at the capsule top point
glm::vec3 topPoint = _translation + getRotation() * (scale * (_boundingCapsuleLocalOffset + (0.5f * _boundingCapsuleHeight) * Vectors::UNIT_Y));
batch.setModelTransform(Transform().setTranslation(topPoint).postScale(scale * _boundingCapsuleRadius));
geometryCache->renderSolidSphereInstance(batch, glm::vec4(0.6f, 0.6f, 0.8f, alpha));
geometryCache->renderSolidSphereInstance(args, batch, glm::vec4(0.6f, 0.6f, 0.8f, alpha));
// draw a yellow sphere at the capsule bottom point
glm::vec3 bottomPoint = topPoint - glm::vec3(0.0f, scale * _boundingCapsuleHeight, 0.0f);
glm::vec3 axis = topPoint - bottomPoint;
batch.setModelTransform(Transform().setTranslation(bottomPoint).postScale(scale * _boundingCapsuleRadius));
geometryCache->renderSolidSphereInstance(batch, glm::vec4(0.8f, 0.8f, 0.6f, alpha));
geometryCache->renderSolidSphereInstance(args, batch, glm::vec4(0.8f, 0.8f, 0.6f, alpha));
// draw a green cylinder between the two points
glm::vec3 origin(0.0f);

View file

@ -96,7 +96,7 @@ public:
/// \return whether or not the head was found.
glm::vec3 getDefaultEyeModelPosition() const;
void renderBoundingCollisionShapes(gpu::Batch& batch, float scale, float alpha);
void renderBoundingCollisionShapes(RenderArgs* args, gpu::Batch& batch, float scale, float alpha);
float getBoundingCapsuleRadius() const { return _boundingCapsuleRadius; }
float getBoundingCapsuleHeight() const { return _boundingCapsuleHeight; }
const glm::vec3 getBoundingCapsuleOffset() const { return _boundingCapsuleLocalOffset; }

View file

@ -378,7 +378,7 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
auto shapeTransform = getTransformToCenter(success);
if (success) {
batch.setModelTransform(shapeTransform); // we want to include the scale as well
DependencyManager::get<GeometryCache>()->renderWireCubeInstance(batch, greenColor);
DependencyManager::get<GeometryCache>()->renderWireCubeInstance(args, batch, greenColor);
}
return;
}

View file

@ -130,9 +130,9 @@ void RenderableShapeEntityItem::render(RenderArgs* args) {
assert(pipeline != nullptr);
if (render::ShapeKey(args->_globalShapeKey).isWireframe()) {
geometryCache->renderWireShapeInstance(batch, MAPPING[_shape], color, pipeline);
geometryCache->renderWireShapeInstance(args, batch, MAPPING[_shape], color, pipeline);
} else {
geometryCache->renderSolidShapeInstance(batch, MAPPING[_shape], color, pipeline);
geometryCache->renderSolidShapeInstance(args, batch, MAPPING[_shape], color, pipeline);
}
}

View file

@ -221,10 +221,10 @@ void RenderableZoneEntityItem::render(RenderArgs* args) {
if (getShapeType() == SHAPE_TYPE_SPHERE) {
shapeTransform.postScale(SPHERE_ENTITY_SCALE);
batch.setModelTransform(shapeTransform);
geometryCache->renderWireSphereInstance(batch, DEFAULT_COLOR);
geometryCache->renderWireSphereInstance(args, batch, DEFAULT_COLOR);
} else {
batch.setModelTransform(shapeTransform);
geometryCache->renderWireCubeInstance(batch, DEFAULT_COLOR);
geometryCache->renderWireCubeInstance(args, batch, DEFAULT_COLOR);
}
break;
}
@ -554,11 +554,11 @@ void RenderableZoneEntityItemMeta::setProceduralUserData(QString userData) {
void RenderableZoneEntityItemMeta::render(RenderArgs* args) {
if (!_stage) {
_stage = DependencyManager::get<DeferredLightingEffect>()->getLightStage();
_stage = args->_scene->getStage<LightStage>();
}
if (!_backgroundStage) {
_backgroundStage = DependencyManager::get<DeferredLightingEffect>()->getBackgroundStage();
_backgroundStage = args->_scene->getStage<BackgroundStage>();
}
{ // Sun

View file

@ -19,6 +19,8 @@
#include <QtScript/QScriptEngine>
#include <QtNetwork/QNetworkDiskCache>
#include <shared/GlobalAppProperties.h>
#include "AssetRequest.h"
#include "AssetUpload.h"
#include "AssetUtils.h"
@ -31,11 +33,12 @@
MessageID AssetClient::_currentID = 0;
AssetClient::AssetClient(const QString& cacheDir) : _cacheDir(cacheDir) {
AssetClient::AssetClient() {
_cacheDir = qApp->property(hifi::properties::APP_LOCAL_DATA_PATH).toString();
setCustomDeleter([](Dependency* dependency){
static_cast<AssetClient*>(dependency)->deleteLater();
});
auto nodeList = DependencyManager::get<NodeList>();
auto& packetReceiver = nodeList->getPacketReceiver();
@ -105,7 +108,7 @@ void AssetClient::handleAssetMappingOperationReply(QSharedPointer<ReceivedMessag
MessageID messageID;
message->readPrimitive(&messageID);
AssetServerError error;
message->readPrimitive(&error);
@ -132,13 +135,13 @@ void AssetClient::handleAssetMappingOperationReply(QSharedPointer<ReceivedMessag
bool haveAssetServer() {
auto nodeList = DependencyManager::get<NodeList>();
SharedNodePointer assetServer = nodeList->soloNodeOfType(NodeType::AssetServer);
if (!assetServer) {
qCWarning(asset_client) << "Could not complete AssetClient operation "
<< "since you are not currently connected to an asset-server.";
return false;
}
return true;
}
@ -220,14 +223,14 @@ MessageID AssetClient::getAsset(const QString& hash, DataOffset start, DataOffse
SharedNodePointer assetServer = nodeList->soloNodeOfType(NodeType::AssetServer);
if (assetServer) {
auto messageID = ++_currentID;
auto payloadSize = sizeof(messageID) + SHA256_HASH_LENGTH + sizeof(start) + sizeof(end);
auto packet = NLPacket::create(PacketType::AssetGet, payloadSize, true);
qCDebug(asset_client) << "Requesting data from" << start << "to" << end << "of" << hash << "from asset-server.";
packet->writePrimitive(messageID);
packet->write(QByteArray::fromHex(hash.toLatin1()));
@ -254,10 +257,10 @@ MessageID AssetClient::getAssetInfo(const QString& hash, GetInfoCallback callbac
if (assetServer) {
auto messageID = ++_currentID;
auto payloadSize = sizeof(messageID) + SHA256_HASH_LENGTH;
auto packet = NLPacket::create(PacketType::AssetGetInfo, payloadSize, true);
packet->writePrimitive(messageID);
packet->write(QByteArray::fromHex(hash.toLatin1()));
@ -278,7 +281,7 @@ void AssetClient::handleAssetGetInfoReply(QSharedPointer<ReceivedMessage> messag
MessageID messageID;
message->readPrimitive(&messageID);
auto assetHash = message->read(SHA256_HASH_LENGTH);
AssetServerError error;
message->readPrimitive(&error);
@ -367,7 +370,7 @@ void AssetClient::handleAssetGetReply(QSharedPointer<ReceivedMessage> message, S
callbacks.completeCallback(true, error, message->readAll());
}
messageCallbackMap.erase(requestIt);
}
}
@ -478,7 +481,7 @@ MessageID AssetClient::getAllAssetMappings(MappingOperationCallback callback) {
auto nodeList = DependencyManager::get<NodeList>();
SharedNodePointer assetServer = nodeList->soloNodeOfType(NodeType::AssetServer);
if (assetServer) {
auto packetList = NLPacketList::create(PacketType::AssetMappingOperation, QByteArray(), true, true);
@ -501,7 +504,7 @@ MessageID AssetClient::getAllAssetMappings(MappingOperationCallback callback) {
MessageID AssetClient::deleteAssetMappings(const AssetPathList& paths, MappingOperationCallback callback) {
auto nodeList = DependencyManager::get<NodeList>();
SharedNodePointer assetServer = nodeList->soloNodeOfType(NodeType::AssetServer);
if (assetServer) {
auto packetList = NLPacketList::create(PacketType::AssetMappingOperation, QByteArray(), true, true);
@ -532,7 +535,7 @@ MessageID AssetClient::setAssetMapping(const QString& path, const AssetHash& has
auto nodeList = DependencyManager::get<NodeList>();
SharedNodePointer assetServer = nodeList->soloNodeOfType(NodeType::AssetServer);
if (assetServer) {
auto packetList = NLPacketList::create(PacketType::AssetMappingOperation, QByteArray(), true, true);
@ -644,7 +647,7 @@ MessageID AssetClient::uploadAsset(const QByteArray& data, UploadResultCallback
auto nodeList = DependencyManager::get<NodeList>();
SharedNodePointer assetServer = nodeList->soloNodeOfType(NodeType::AssetServer);
if (assetServer) {
auto packetList = NLPacketList::create(PacketType::AssetUpload, QByteArray(), true, true);
@ -682,7 +685,7 @@ void AssetClient::handleAssetUploadReply(QSharedPointer<ReceivedMessage> message
} else {
auto hash = message->read(SHA256_HASH_LENGTH);
hashString = hash.toHex();
qCDebug(asset_client) << "Successfully uploaded asset to asset-server - SHA256 hash is " << hashString;
}

View file

@ -49,7 +49,7 @@ using ProgressCallback = std::function<void(qint64 totalReceived, qint64 total)>
class AssetClient : public QObject, public Dependency {
Q_OBJECT
public:
AssetClient(const QString& cacheDir="");
AssetClient();
Q_INVOKABLE GetMappingRequest* createGetMappingRequest(const AssetPath& path);
Q_INVOKABLE GetAllMappingsRequest* createGetAllMappingsRequest();

View file

@ -28,7 +28,7 @@
ResourceManager::ResourceManager() {
_thread.setObjectName("Resource Manager Thread");
auto assetClient = DependencyManager::set<AssetClient>(_cacheDir);
auto assetClient = DependencyManager::set<AssetClient>();
assetClient->moveToThread(&_thread);
QObject::connect(&_thread, &QThread::started, assetClient.data(), &AssetClient::init);
@ -160,7 +160,3 @@ bool ResourceManager::resourceExists(const QUrl& url) {
return false;
}
void ResourceManager::setCacheDir(const QString& cacheDir) {
// TODO: check for existence?
_cacheDir = cacheDir;
}

View file

@ -59,7 +59,6 @@ private:
PrefixMap _prefixMap;
QMutex _prefixMapLock;
QString _cacheDir;
};
#endif

View file

@ -33,6 +33,7 @@ public:
public slots:
bool isEnabled() { return !_disabled.get(); }
bool isDisabledSettingSet() const { return _disabled.isSet(); }
void disable(bool disable);
void logAction(QString action, QJsonObject details = QJsonObject(), JSONCallbackParameters params = JSONCallbackParameters());
@ -53,7 +54,7 @@ private slots:
private:
UserActivityLogger();
Setting::Handle<bool> _disabled { "UserActivityLoggerDisabled", false };
Setting::Handle<bool> _disabled { "UserActivityLoggerDisabled", true };
QElapsedTimer _timer;
};

View file

@ -13,6 +13,8 @@
#include <gpu/Context.h>
std::string BackgroundStage::_stageName { "BACKGROUND_STAGE"};
BackgroundStage::Index BackgroundStage::findBackground(const BackgroundPointer& background) const {
auto found = _backgroundMap.find(background);
if (found != _backgroundMap.end()) {
@ -52,15 +54,13 @@ BackgroundStage::BackgroundPointer BackgroundStage::removeBackground(Index index
void DrawBackgroundStage::run(const render::RenderContextPointer& renderContext, const Inputs& inputs) {
const auto& lightingModel = inputs;
if (!lightingModel->isBackgroundEnabled()) {
return;
}
// Background rendering decision
auto backgroundStage = DependencyManager::get<DeferredLightingEffect>()->getBackgroundStage();
auto backgroundStage = renderContext->_scene->getStage<BackgroundStage>();
model::SunSkyStagePointer background;
model::SkyboxPointer skybox;
if (backgroundStage->_currentFrame._backgrounds.size()) {
@ -68,11 +68,8 @@ void DrawBackgroundStage::run(const render::RenderContextPointer& renderContext,
auto background = backgroundStage->getBackground(backgroundId);
if (background) {
skybox = background->getSkybox();
}
} else {
skybox = DependencyManager::get<DeferredLightingEffect>()->getDefaultSkybox();
}
}
/* auto backgroundMode = skyStage->getBackgroundMode();
switch (backgroundMode) {
@ -137,4 +134,15 @@ void DrawBackgroundStage::run(const render::RenderContextPointer& renderContext,
}
*/
}
}
BackgroundStageSetup::BackgroundStageSetup() {
}
void BackgroundStageSetup::run(const render::RenderContextPointer& renderContext) {
auto stage = renderContext->_scene->getStage(BackgroundStage::getName());
if (!stage) {
renderContext->_scene->resetStage(BackgroundStage::getName(), std::make_shared<BackgroundStage>());
}
}

View file

@ -15,13 +15,17 @@
#include <set>
#include <unordered_map>
#include <render/IndexedContainer.h>
#include <render/Stage.h>
#include "LightingModel.h"
// Background stage to set up background-related rendering tasks
class BackgroundStage {
class BackgroundStage : public render::Stage {
public:
static std::string _stageName;
static const std::string& getName() { return _stageName; }
using Index = render::indexed_container::Index;
static const Index INVALID_INDEX { render::indexed_container::INVALID_INDEX };
static bool isIndexInvalid(Index index) { return index == INVALID_INDEX; }
@ -66,6 +70,15 @@ public:
};
using BackgroundStagePointer = std::shared_ptr<BackgroundStage>;
class BackgroundStageSetup {
public:
using JobModel = render::Job::Model<BackgroundStageSetup>;
BackgroundStageSetup();
void run(const render::RenderContextPointer& renderContext);
protected:
};
class DrawBackgroundStage {
public:

View file

@ -432,9 +432,9 @@ void DebugDeferredBuffer::run(const RenderContextPointer& renderContext, const I
batch.setResourceTexture(Lighting, deferredFramebuffer->getLightingTexture());
}
auto deferredLightingEffect = DependencyManager::get<DeferredLightingEffect>();
assert(deferredLightingEffect->getLightStage()->getNumLights() > 0);
auto lightAndShadow = deferredLightingEffect->getLightStage()->getLightAndShadow(0);
auto lightStage = renderContext->_scene->getStage<LightStage>();
assert(lightStage->getNumLights() > 0);
auto lightAndShadow = lightStage->getLightAndShadow(0);
const auto& globalShadow = lightAndShadow.second;
if (globalShadow) {
batch.setResourceTexture(Shadow, globalShadow->map);

View file

@ -99,75 +99,35 @@ void DeferredLightingEffect::init() {
loadLightProgram(deferred_light_vert, local_lights_shading_frag, true, _localLight, _localLightLocations);
loadLightProgram(deferred_light_vert, local_lights_drawOutline_frag, true, _localLightOutline, _localLightOutlineLocations);
// Light Stage and clusters
_lightStage = std::make_shared<LightStage>();
// Allocate a global light representing the Global Directional light casting shadow (the sun) and the ambient light
_allocatedLights.push_back(std::make_shared<model::Light>());
model::LightPointer lp = _allocatedLights[0];
lp->setType(model::Light::SUN);
lp->setDirection(glm::vec3(-1.0f));
lp->setColor(glm::vec3(1.0f));
lp->setIntensity(1.0f);
lp->setType(model::Light::SUN);
lp->setAmbientSpherePreset(gpu::SphericalHarmonics::Preset::OLD_TOWN_SQUARE);
// Add the global light to the light stage (for later shadow rendering)
_globalLights.push_back(_lightStage->addLight(lp));
_lightStage->addShadow(_globalLights[0]);
_backgroundStage = std::make_shared<BackgroundStage>();
auto textureCache = DependencyManager::get<TextureCache>();
{
PROFILE_RANGE(render, "Process Default Skybox");
auto textureCache = DependencyManager::get<TextureCache>();
auto skyboxUrl = PathUtils::resourcesPath().toStdString() + "images/Default-Sky-9-cubemap.ktx";
_defaultSkyboxTexture = gpu::Texture::unserialize(skyboxUrl);
_defaultSkyboxAmbientTexture = _defaultSkyboxTexture;
_defaultSkybox->setCubemap(_defaultSkyboxTexture);
}
lp->setAmbientIntensity(0.5f);
lp->setAmbientMap(_defaultSkyboxAmbientTexture);
auto irradianceSH = _defaultSkyboxAmbientTexture->getIrradiance();
if (irradianceSH) {
lp->setAmbientSphere((*irradianceSH));
}
}
void DeferredLightingEffect::setupKeyLightBatch(gpu::Batch& batch, int lightBufferUnit, int ambientBufferUnit, int skyboxCubemapUnit) {
void DeferredLightingEffect::setupKeyLightBatch(const RenderArgs* args, gpu::Batch& batch, int lightBufferUnit, int ambientBufferUnit, int skyboxCubemapUnit) {
PerformanceTimer perfTimer("DLE->setupBatch()");
model::LightPointer keySunLight;
if (_lightStage && _lightStage->_currentFrame._sunLights.size()) {
keySunLight = _lightStage->getLight(_lightStage->_currentFrame._sunLights.front());
} else {
keySunLight = _allocatedLights[_globalLights.front()];
auto lightStage = args->_scene->getStage<LightStage>();
if (lightStage && lightStage->_currentFrame._sunLights.size()) {
keySunLight = lightStage->getLight(lightStage->_currentFrame._sunLights.front());
}
model::LightPointer keyAmbiLight;
if (_lightStage && _lightStage->_currentFrame._ambientLights.size()) {
keyAmbiLight = _lightStage->getLight(_lightStage->_currentFrame._ambientLights.front());
} else {
keyAmbiLight = _allocatedLights[_globalLights.front()];
if (lightStage && lightStage->_currentFrame._ambientLights.size()) {
keyAmbiLight = lightStage->getLight(lightStage->_currentFrame._ambientLights.front());
}
if (lightBufferUnit >= 0) {
batch.setUniformBuffer(lightBufferUnit, keySunLight->getLightSchemaBuffer());
}
if (ambientBufferUnit >= 0) {
batch.setUniformBuffer(ambientBufferUnit, keyAmbiLight->getAmbientSchemaBuffer());
if (keySunLight) {
if (lightBufferUnit >= 0) {
batch.setUniformBuffer(lightBufferUnit, keySunLight->getLightSchemaBuffer());
}
}
if (keyAmbiLight->getAmbientMap() && (skyboxCubemapUnit >= 0)) {
batch.setResourceTexture(skyboxCubemapUnit, keyAmbiLight->getAmbientMap());
if (keyAmbiLight) {
if (ambientBufferUnit >= 0) {
batch.setUniformBuffer(ambientBufferUnit, keyAmbiLight->getAmbientSchemaBuffer());
}
if (keyAmbiLight->getAmbientMap() && (skyboxCubemapUnit >= 0)) {
batch.setResourceTexture(skyboxCubemapUnit, keyAmbiLight->getAmbientMap());
}
}
}
@ -266,21 +226,6 @@ static void loadLightProgram(const char* vertSource, const char* fragSource, boo
}
void DeferredLightingEffect::setGlobalLight(const model::LightPointer& light) {
/* auto globalLight = _allocatedLights.front();
globalLight->setDirection(light->getDirection());
globalLight->setColor(light->getColor());
globalLight->setIntensity(light->getIntensity());
globalLight->setAmbientIntensity(light->getAmbientIntensity());
globalLight->setAmbientSphere(light->getAmbientSphere());
globalLight->setAmbientMap(light->getAmbientMap());*/
}
const model::LightPointer& DeferredLightingEffect::getGlobalLight() const {
return _allocatedLights.front();
}
#include <shared/Shapes.h>
model::MeshPointer DeferredLightingEffect::getPointLightMesh() {
@ -483,8 +428,8 @@ void PrepareDeferred::run(const RenderContextPointer& renderContext, const Input
// Prepare a fresh Light Frame
auto deferredLightingEffect = DependencyManager::get<DeferredLightingEffect>();
deferredLightingEffect->getLightStage()->_currentFrame.clear();
auto lightStage = renderContext->_scene->getStage<LightStage>();
lightStage->_currentFrame.clear();
}
@ -547,8 +492,9 @@ void RenderDeferredSetup::run(const render::RenderContextPointer& renderContext,
// Global directional light and ambient pass
assert(deferredLightingEffect->getLightStage()->getNumLights() > 0);
auto lightAndShadow = deferredLightingEffect->getLightStage()->getLightAndShadow(0);
auto lightStage = renderContext->_scene->getStage<LightStage>();
assert(lightStage->getNumLights() > 0);
auto lightAndShadow = lightStage->getLightAndShadow(0);
const auto& globalShadow = lightAndShadow.second;
// Bind the shadow buffer
@ -558,7 +504,8 @@ void RenderDeferredSetup::run(const render::RenderContextPointer& renderContext,
auto& program = deferredLightingEffect->_directionalSkyboxLight;
LightLocationsPtr locations = deferredLightingEffect->_directionalSkyboxLightLocations;
const auto& keyLight = deferredLightingEffect->_allocatedLights[deferredLightingEffect->_globalLights.front()];
auto keyLight = lightStage->getLight(0);
// Setup the global directional pass pipeline
{
@ -597,7 +544,7 @@ void RenderDeferredSetup::run(const render::RenderContextPointer& renderContext,
batch._glUniform4fv(locations->texcoordFrameTransform, 1, reinterpret_cast< const float* >(&textureFrameTransform));
// Setup the global lighting
deferredLightingEffect->setupKeyLightBatch(batch, locations->lightBufferUnit, locations->ambientBufferUnit, SKYBOX_MAP_UNIT);
deferredLightingEffect->setupKeyLightBatch(args, batch, locations->lightBufferUnit, locations->ambientBufferUnit, SKYBOX_MAP_UNIT);
batch.draw(gpu::TRIANGLE_STRIP, 4);
@ -749,3 +696,66 @@ void RenderDeferred::run(const RenderContextPointer& renderContext, const Inputs
auto config = std::static_pointer_cast<Config>(renderContext->jobConfig);
config->setGPUBatchRunTime(_gpuTimer->getGPUAverage(), _gpuTimer->getBatchAverage());
}
void DefaultLightingSetup::run(const RenderContextPointer& renderContext) {
if (!_defaultLight || !_defaultBackground) {
if (!_defaultSkyboxTexture) {
auto textureCache = DependencyManager::get<TextureCache>();
{
PROFILE_RANGE(render, "Process Default Skybox");
auto textureCache = DependencyManager::get<TextureCache>();
auto skyboxUrl = PathUtils::resourcesPath().toStdString() + "images/Default-Sky-9-cubemap.ktx";
_defaultSkyboxTexture = gpu::Texture::unserialize(skyboxUrl);
_defaultSkyboxAmbientTexture = _defaultSkyboxTexture;
_defaultSkybox->setCubemap(_defaultSkyboxTexture);
}
}
auto lightStage = renderContext->_scene->getStage<LightStage>();
if (lightStage) {
// Allocate a default global light directional and ambient
auto lp = std::make_shared<model::Light>();
lp->setType(model::Light::SUN);
lp->setDirection(glm::vec3(-1.0f));
lp->setColor(glm::vec3(1.0f));
lp->setIntensity(1.0f);
lp->setType(model::Light::SUN);
lp->setAmbientSpherePreset(gpu::SphericalHarmonics::Preset::OLD_TOWN_SQUARE);
lp->setAmbientIntensity(0.5f);
lp->setAmbientMap(_defaultSkyboxAmbientTexture);
auto irradianceSH = _defaultSkyboxAmbientTexture->getIrradiance();
if (irradianceSH) {
lp->setAmbientSphere((*irradianceSH));
}
// capture default light
_defaultLight = lp;
// Add the global light to the light stage (for later shadow rendering)
_defaultLightID = lightStage->addLight(lp);
lightStage->addShadow(_defaultLightID);
}
auto backgroundStage = renderContext->_scene->getStage<BackgroundStage>();
if (backgroundStage) {
auto background = std::make_shared<model::SunSkyStage>();
background->setSkybox(_defaultSkybox);
// capture deault background
_defaultBackground = background;
// Add the global light to the light stage (for later shadow rendering)
_defaultBackgroundID = backgroundStage->addBackground(_defaultBackground);
}
}
}

View file

@ -47,30 +47,16 @@ class DeferredLightingEffect : public Dependency {
public:
void init();
void setupKeyLightBatch(gpu::Batch& batch, int lightBufferUnit, int ambientBufferUnit, int skyboxCubemapUnit);
void setupKeyLightBatch(const RenderArgs* args, gpu::Batch& batch, int lightBufferUnit, int ambientBufferUnit, int skyboxCubemapUnit);
void unsetKeyLightBatch(gpu::Batch& batch, int lightBufferUnit, int ambientBufferUnit, int skyboxCubemapUnit);
// update global lighting
void setGlobalLight(const model::LightPointer& light);
const model::LightPointer& getGlobalLight() const;
const LightStagePointer& getLightStage() { return _lightStage; }
const BackgroundStagePointer& getBackgroundStage() { return _backgroundStage; }
void setShadowMapEnabled(bool enable) { _shadowMapEnabled = enable; };
void setAmbientOcclusionEnabled(bool enable) { _ambientOcclusionEnabled = enable; }
bool isAmbientOcclusionEnabled() const { return _ambientOcclusionEnabled; }
model::SkyboxPointer getDefaultSkybox() const { return _defaultSkybox; }
gpu::TexturePointer getDefaultSkyboxTexture() const { return _defaultSkyboxTexture; }
gpu::TexturePointer getDefaultSkyboxAmbientTexture() const { return _defaultSkyboxAmbientTexture; }
private:
DeferredLightingEffect() = default;
LightStagePointer _lightStage;
BackgroundStagePointer _backgroundStage;
bool _shadowMapEnabled{ false };
bool _ambientOcclusionEnabled{ false };
@ -97,15 +83,6 @@ private:
LightLocationsPtr _localLightLocations;
LightLocationsPtr _localLightOutlineLocations;
using Lights = std::vector<model::LightPointer>;
Lights _allocatedLights;
std::vector<int> _globalLights;
model::SkyboxPointer _defaultSkybox { new ProceduralSkybox() };
gpu::TexturePointer _defaultSkyboxTexture;
gpu::TexturePointer _defaultSkyboxAmbientTexture;
friend class LightClusteringPass;
friend class RenderDeferredSetup;
friend class RenderDeferredLocals;
@ -195,6 +172,20 @@ protected:
gpu::RangeTimerPointer _gpuTimer;
};
class DefaultLightingSetup {
public:
using JobModel = render::Job::Model<DefaultLightingSetup>;
void run(const render::RenderContextPointer& renderContext);
protected:
model::LightPointer _defaultLight;
LightStage::Index _defaultLightID{ LightStage::INVALID_INDEX };
model::SunSkyStagePointer _defaultBackground;
BackgroundStage::Index _defaultBackgroundID{ BackgroundStage::INVALID_INDEX };
model::SkyboxPointer _defaultSkybox { new ProceduralSkybox() };
gpu::TexturePointer _defaultSkyboxTexture;
gpu::TexturePointer _defaultSkyboxAmbientTexture;
};
#endif // hifi_DeferredLightingEffect_h

View file

@ -18,10 +18,11 @@
<@func declareFadeFragment()@>
<@include Fade_shared.slh@>
#define EVENT_CATEGORY_COUNT 5
<@include Fade_shared.slh@>
uniform fadeParametersBuffer {
FadeParameters fadeParameters[EVENT_CATEGORY_COUNT];
};

View file

@ -1,6 +1,7 @@
#include "FadeEffect.h"
#include "TextureCache.h"
#include "render/Logging.h"
#include "render/TransitionStage.h"
#include <PathUtils.h>
#include <NumericalConstants.h>
@ -29,8 +30,8 @@ FadeConfig::FadeConfig()
events[render::Transition::ELEMENT_ENTER_LEAVE_DOMAIN].timing = FadeConfig::LINEAR;
events[render::Transition::ELEMENT_ENTER_LEAVE_DOMAIN].baseSize = glm::vec3{ 1.0f, 1.0f, 1.0f };
events[render::Transition::ELEMENT_ENTER_LEAVE_DOMAIN].baseLevel = 0.f;
events[render::Transition::ELEMENT_ENTER_LEAVE_DOMAIN]._isInverted = false;
events[render::Transition::ELEMENT_ENTER_LEAVE_DOMAIN]._duration = 4.f;
events[render::Transition::ELEMENT_ENTER_LEAVE_DOMAIN].isInverted = false;
events[render::Transition::ELEMENT_ENTER_LEAVE_DOMAIN].duration = 4.f;
events[render::Transition::ELEMENT_ENTER_LEAVE_DOMAIN].edgeWidth = 0.1f;
events[render::Transition::ELEMENT_ENTER_LEAVE_DOMAIN].edgeInnerColor = glm::vec4{ 78.f / 255.f, 215.f / 255.f, 255.f / 255.f, 0.0f };
events[render::Transition::ELEMENT_ENTER_LEAVE_DOMAIN].edgeOuterColor = glm::vec4{ 78.f / 255.f, 215.f / 255.f, 255.f / 255.f, 1.0f };
@ -41,8 +42,8 @@ FadeConfig::FadeConfig()
events[render::Transition::BUBBLE_ISECT_OWNER].timing = FadeConfig::LINEAR;
events[render::Transition::BUBBLE_ISECT_OWNER].baseSize = glm::vec3{ 2.0f, 2.0f, 2.0f };
events[render::Transition::BUBBLE_ISECT_OWNER].baseLevel = 1.f;
events[render::Transition::BUBBLE_ISECT_OWNER]._isInverted = false;
events[render::Transition::BUBBLE_ISECT_OWNER]._duration = 4.f;
events[render::Transition::BUBBLE_ISECT_OWNER].isInverted = false;
events[render::Transition::BUBBLE_ISECT_OWNER].duration = 4.f;
events[render::Transition::BUBBLE_ISECT_OWNER].edgeWidth = 0.02f;
events[render::Transition::BUBBLE_ISECT_OWNER].edgeInnerColor = glm::vec4{ 31.f / 255.f, 198.f / 255.f, 166.f / 255.f, 1.0f };
events[render::Transition::BUBBLE_ISECT_OWNER].edgeOuterColor = glm::vec4{ 31.f / 255.f, 198.f / 255.f, 166.f / 255.f, 2.0f };
@ -53,8 +54,8 @@ FadeConfig::FadeConfig()
events[render::Transition::BUBBLE_ISECT_TRESPASSER].timing = FadeConfig::LINEAR;
events[render::Transition::BUBBLE_ISECT_TRESPASSER].baseSize = glm::vec3{ 2.0f, 2.0f, 2.0f };
events[render::Transition::BUBBLE_ISECT_TRESPASSER].baseLevel = 0.f;
events[render::Transition::BUBBLE_ISECT_TRESPASSER]._isInverted = false;
events[render::Transition::BUBBLE_ISECT_TRESPASSER]._duration = 4.f;
events[render::Transition::BUBBLE_ISECT_TRESPASSER].isInverted = false;
events[render::Transition::BUBBLE_ISECT_TRESPASSER].duration = 4.f;
events[render::Transition::BUBBLE_ISECT_TRESPASSER].edgeWidth = 0.025f;
events[render::Transition::BUBBLE_ISECT_TRESPASSER].edgeInnerColor = glm::vec4{ 31.f / 255.f, 198.f / 255.f, 166.f / 255.f, 1.0f };
events[render::Transition::BUBBLE_ISECT_TRESPASSER].edgeOuterColor = glm::vec4{ 31.f / 255.f, 198.f / 255.f, 166.f / 255.f, 2.0f };
@ -65,8 +66,8 @@ FadeConfig::FadeConfig()
events[render::Transition::USER_ENTER_LEAVE_DOMAIN].timing = FadeConfig::LINEAR;
events[render::Transition::USER_ENTER_LEAVE_DOMAIN].baseSize = glm::vec3{ 10000.f, 1.0f, 10000.0f };
events[render::Transition::USER_ENTER_LEAVE_DOMAIN].baseLevel = 1.f;
events[render::Transition::USER_ENTER_LEAVE_DOMAIN]._isInverted = true;
events[render::Transition::USER_ENTER_LEAVE_DOMAIN]._duration = 5.f;
events[render::Transition::USER_ENTER_LEAVE_DOMAIN].isInverted = true;
events[render::Transition::USER_ENTER_LEAVE_DOMAIN].duration = 5.f;
events[render::Transition::USER_ENTER_LEAVE_DOMAIN].edgeWidth = 0.229f;
events[render::Transition::USER_ENTER_LEAVE_DOMAIN].edgeInnerColor = glm::vec4{ 78.f / 255.f, 215.f / 255.f, 255.f / 255.f, 0.25f };
events[render::Transition::USER_ENTER_LEAVE_DOMAIN].edgeOuterColor = glm::vec4{ 78.f / 255.f, 215.f / 255.f, 255.f / 255.f, 1.0f };
@ -77,27 +78,27 @@ FadeConfig::FadeConfig()
events[render::Transition::AVATAR_CHANGE].timing = FadeConfig::LINEAR;
events[render::Transition::AVATAR_CHANGE].baseSize = glm::vec3{ 0.4f, 0.4f, 0.4f };
events[render::Transition::AVATAR_CHANGE].baseLevel = 1.f;
events[render::Transition::AVATAR_CHANGE]._isInverted = false;
events[render::Transition::AVATAR_CHANGE]._duration = 3.f;
events[render::Transition::AVATAR_CHANGE].isInverted = false;
events[render::Transition::AVATAR_CHANGE].duration = 3.f;
events[render::Transition::AVATAR_CHANGE].edgeWidth = 0.05f;
events[render::Transition::AVATAR_CHANGE].edgeInnerColor = glm::vec4{ 1.0f, 1.0f, 1.0f, 1.0f };
events[render::Transition::AVATAR_CHANGE].edgeOuterColor = glm::vec4{ 1.0f, 1.0f, 1.0f, 1.0f };
}
void FadeConfig::setEditedCategory(int value) {
assert(value < EVENT_CATEGORY_COUNT);
assert(value < render::Transition::EVENT_CATEGORY_COUNT);
editedCategory = std::min<int>(render::Transition::EVENT_CATEGORY_COUNT, value);
emit dirtyCategory();
emit dirty();
}
void FadeConfig::setDuration(float value) {
events[editedCategory]._duration = value;
events[editedCategory].duration = value;
emit dirty();
}
float FadeConfig::getDuration() const {
return events[editedCategory]._duration;
return events[editedCategory].duration;
}
void FadeConfig::setBaseSizeX(float value) {
@ -133,12 +134,12 @@ void FadeConfig::setBaseLevel(float value) {
}
void FadeConfig::setInverted(bool value) {
events[editedCategory]._isInverted = value;
events[editedCategory].isInverted = value;
emit dirty();
}
bool FadeConfig::isInverted() const {
return events[editedCategory]._isInverted;
return events[editedCategory].isInverted;
}
void FadeConfig::setNoiseSizeX(float value) {
@ -264,7 +265,7 @@ QString FadeConfig::eventNames[render::Transition::EVENT_CATEGORY_COUNT] = {
};
void FadeConfig::save() const {
assert(category < EVENT_CATEGORY_COUNT);
assert(editedCategory < render::Transition::EVENT_CATEGORY_COUNT);
QJsonObject lProperties;
const QString configFile = "config/" + eventNames[editedCategory] + ".json";
QUrl path(PathUtils::resourcesPath() + configFile);
@ -282,10 +283,10 @@ void FadeConfig::save() const {
lProperties["baseSize"] = QJsonArray{ event.baseSize.x, event.baseSize.y, event.baseSize.z };
lProperties["noiseLevel"] = event.noiseLevel;
lProperties["baseLevel"] = event.baseLevel;
lProperties["duration"] = event._duration;
lProperties["duration"] = event.duration;
lProperties["edgeWidth"] = event.edgeWidth;
lProperties["timing"] = event.timing;
lProperties["isInverted"] = event._isInverted;
lProperties["isInverted"] = event.isInverted;
file.write( QJsonDocument(lProperties).toJson() );
file.close();
@ -420,7 +421,7 @@ void FadeConfig::load() {
value = jsonObject["duration"];
if (value.isDouble()) {
event._duration = (float)value.toDouble();
event.duration = (float)value.toDouble();
}
else {
qWarning() << "Fade event configuration file " << path << " contains an invalid 'duration' field. Expected float value";
@ -444,7 +445,7 @@ void FadeConfig::load() {
value = jsonObject["isInverted"];
if (value.isBool()) {
event._isInverted = value.toBool();
event.isInverted = value.toBool();
}
else {
qWarning() << "Fade event configuration file " << path << " contains an invalid 'isInverted' field. Expected boolean value";
@ -489,7 +490,7 @@ void FadeJob::configure(const Config& config) {
void FadeJob::run(const render::RenderContextPointer& renderContext) {
const Config* jobConfig = static_cast<const Config*>(renderContext->jobConfig.get());
auto scene = renderContext->args->_scene;
render::TransitionStage* transitionStage; // TODO: get transition stage
auto transitionStage = scene->getStage<render::TransitionStage>(render::TransitionStage::getName());
uint64_t now = usecTimestampNow();
const double deltaTime = (int64_t(now) - int64_t(_previousTime)) / double(USECS_PER_SECOND);
@ -498,6 +499,8 @@ void FadeJob::run(const render::RenderContextPointer& renderContext) {
auto& state = transitionStage->editTransition(transitionId);
update(*jobConfig, scene, state, deltaTime);
}
_previousTime = now;
}
void FadeJob::update(const Config& config, const render::ScenePointer& scene, render::Transition& transition, const double deltaTime) const {
@ -585,104 +588,8 @@ float FadeJob::computeElementEnterThreshold(double time, const double period, Fa
return fadeAlpha;
}
float FadeJob::computeFadePercent(quint64 startTime) {
const double time = (double)(int64_t(usecTimestampNow()) - int64_t(startTime)) / (double)(USECS_PER_SECOND);
/* assert(_currentInstance);
return _currentInstance->computeElementEnterThreshold(time,
_currentInstance->_parameters->_durations[FadeConfig::ELEMENT_ENTER_LEAVE_DOMAIN],
_currentInstance->_parameters->_timing[FadeConfig::ELEMENT_ENTER_LEAVE_DOMAIN]);*/
return (float)time;
}
void FadeJob::updateFadeEdit(const render::RenderContextPointer& renderContext, const render::ItemBound& itemBounds) {
/* if (_editPreviousTime == 0) {
_editPreviousTime = usecTimestampNow();
_editTime = 0.0;
}
uint64_t now = usecTimestampNow();
const double deltaTime = (int64_t(now) - int64_t(_editPreviousTime)) / double(USECS_PER_SECOND);
const int editedCategory = _parameters->_editedCategory;
const double eventDuration = (double)_parameters->_durations[editedCategory];
const FadeConfig::Timing timing = _parameters->_timing[editedCategory];
const double waitTime = 0.5; // Wait between fade in and out
double cycleTime = fmod(_editTime, (eventDuration + waitTime) * 2.0);
bool inverseTime = false;
_editTime += deltaTime;
_editPreviousTime = now;
if (_parameters->_isManualThresholdEnabled) {
_editThreshold = _parameters->_manualThreshold;
}
else {
if (cycleTime < eventDuration) {
_editThreshold = 1.f - computeElementEnterThreshold(cycleTime, eventDuration, timing);
}
else if (cycleTime < (eventDuration + waitTime)) {
_editThreshold = 0.f;
}
else if (cycleTime < (2 * eventDuration + waitTime)) {
_editThreshold = computeElementEnterThreshold(cycleTime - (eventDuration + waitTime), eventDuration, timing);
inverseTime = true;
}
else {
_editThreshold = 1.f;
inverseTime = true;
}
}
float threshold = _editThreshold;
if (editedCategory != FadeConfig::BUBBLE_ISECT_OWNER) {
threshold = (threshold - 0.5f)*_parameters->_thresholdScale[editedCategory] + 0.5f;
}
renderContext->jobConfig->setProperty("threshold", threshold);
_editNoiseOffset = _parameters->_noiseSpeed[editedCategory] * (float)_editTime;
if (inverseTime) {
_editNoiseOffset = -_editNoiseOffset;
}
switch (editedCategory) {
case FadeConfig::ELEMENT_ENTER_LEAVE_DOMAIN:
break;
case FadeConfig::BUBBLE_ISECT_OWNER:
{
const glm::vec3 cameraPos = renderContext->args->getViewFrustum().getPosition();
glm::vec3 delta = itemBounds.bound.calcCenter() - cameraPos;
float distance = glm::length(delta);
delta = glm::normalize(delta) * std::max(0.f, distance - 0.5f);
_editBaseOffset = cameraPos + delta*_editThreshold;
_editThreshold = 0.33f;
}
break;
case FadeConfig::BUBBLE_ISECT_TRESPASSER:
{
_editBaseOffset = glm::vec3{ 0.f, 0.f, 0.f };
}
break;
case FadeConfig::USER_ENTER_LEAVE_DOMAIN:
{
_editBaseOffset = itemBounds.bound.calcCenter();
_editBaseOffset.y -= itemBounds.bound.getDimensions().y / 2.f;
}
break;
case FadeConfig::AVATAR_CHANGE:
break;
default:
assert(false);
}*/
}
render::ShapePipeline::BatchSetter FadeJob::getBatchSetter() const {
return [this](const render::ShapePipeline& shapePipeline, gpu::Batch& batch) {
return [this](const render::ShapePipeline& shapePipeline, gpu::Batch& batch, render::Args*) {
auto program = shapePipeline.pipeline->getProgram();
auto maskMapLocation = program->getTextures().findLocation("fadeMaskMap");
auto bufferLocation = program->getUniformBuffers().findLocation("fadeParametersBuffer");
@ -696,7 +603,8 @@ render::ShapePipeline::ItemSetter FadeJob::getItemSetter() const {
if (!render::TransitionStage::isIndexInvalid(item.getTransitionId())) {
auto scene = args->_scene;
auto batch = args->_batch;
render::Transition transitionState; // TODO : get the transition state
auto transitionStage = scene->getStage<render::TransitionStage>(render::TransitionStage::getName());
render::Transition transitionState = transitionStage->getTransition(item.getTransitionId());
render::ShapeKey shapeKey(args->_globalShapeKey);
// TODO test various cases: polyvox... etc

View file

@ -179,8 +179,6 @@ public:
render::ShapePipeline::BatchSetter getBatchSetter() const;
render::ShapePipeline::ItemSetter getItemSetter() const;
static float computeFadePercent(quint64 startTime);
private:
#include "Fade_shared.slh"
@ -198,7 +196,6 @@ private:
void update(const Config& config, const render::ScenePointer& scene, render::Transition& transition, const double deltaTime) const;
float computeElementEnterThreshold(double time, const double period, FadeConfig::Timing timing) const;
void updateFadeEdit(const render::RenderContextPointer& renderContext, const render::ItemBound& itemBounds);
const render::Item* findNearestItem(const render::RenderContextPointer& renderContext, const render::Varying& input, float& minIsectDistance) const;
};

View file

@ -19,4 +19,9 @@ struct FadeParameters
VEC2 _edgeWidthInvWidth;
FLOAT32 _baseLevel;
INT32 _isInverted;
};
};
// <@if 1@>
// Trigger Scribe include
// <@endif@> <!def that !>
//

View file

@ -29,6 +29,7 @@
#include "gpu/StandardShaderLib.h"
#include "model/TextureMap.h"
#include "render/Args.h"
#include "standardTransformPNTC_vert.h"
#include "standardDrawTexture_frag.h"
@ -513,7 +514,7 @@ void GeometryCache::initializeShapePipelines() {
render::ShapePipelinePointer GeometryCache::getShapePipeline(bool textured, bool transparent, bool culled,
bool unlit, bool depthBias, bool fading) {
return std::make_shared<render::ShapePipeline>(getSimplePipeline(textured, transparent, culled, unlit, depthBias, fading), nullptr,
[](const render::ShapePipeline&, gpu::Batch& batch) {
[](const render::ShapePipeline&, gpu::Batch& batch, render::Args*) {
// Set the defaults needed for a simple program
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
DependencyManager::get<TextureCache>()->getWhiteTexture());
@ -1971,7 +1972,7 @@ uint32_t toCompactColor(const glm::vec4& color) {
static const size_t INSTANCE_COLOR_BUFFER = 0;
void renderInstances(gpu::Batch& batch, const glm::vec4& color, bool isWire,
void renderInstances(RenderArgs* args, gpu::Batch& batch, const glm::vec4& color, bool isWire,
const render::ShapePipelinePointer& pipeline, GeometryCache::Shape shape) {
// Add pipeline to name
std::string instanceName = (isWire ? "wire_shapes_" : "solid_shapes_") + std::to_string(shape) + "_" + std::to_string(std::hash<render::ShapePipelinePointer>()(pipeline));
@ -1984,9 +1985,9 @@ void renderInstances(gpu::Batch& batch, const glm::vec4& color, bool isWire,
}
// Add call to named buffer
batch.setupNamedCalls(instanceName, [isWire, pipeline, shape](gpu::Batch& batch, gpu::Batch::NamedBatchData& data) {
batch.setupNamedCalls(instanceName, [args, isWire, pipeline, shape](gpu::Batch& batch, gpu::Batch::NamedBatchData& data) {
batch.setPipeline(pipeline->pipeline);
pipeline->prepare(batch);
pipeline->prepare(batch, args);
if (isWire) {
DependencyManager::get<GeometryCache>()->renderWireShapeInstances(batch, shape, data.count(), data.buffers[INSTANCE_COLOR_BUFFER]);
@ -1996,32 +1997,32 @@ void renderInstances(gpu::Batch& batch, const glm::vec4& color, bool isWire,
});
}
void GeometryCache::renderSolidShapeInstance(gpu::Batch& batch, GeometryCache::Shape shape, const glm::vec4& color, const render::ShapePipelinePointer& pipeline) {
void GeometryCache::renderSolidShapeInstance(RenderArgs* args, gpu::Batch& batch, GeometryCache::Shape shape, const glm::vec4& color, const render::ShapePipelinePointer& pipeline) {
assert(pipeline != nullptr);
renderInstances(batch, color, false, pipeline, shape);
renderInstances(args, batch, color, false, pipeline, shape);
}
void GeometryCache::renderWireShapeInstance(gpu::Batch& batch, GeometryCache::Shape shape, const glm::vec4& color, const render::ShapePipelinePointer& pipeline) {
void GeometryCache::renderWireShapeInstance(RenderArgs* args, gpu::Batch& batch, GeometryCache::Shape shape, const glm::vec4& color, const render::ShapePipelinePointer& pipeline) {
assert(pipeline != nullptr);
renderInstances(batch, color, true, pipeline, shape);
renderInstances(args, batch, color, true, pipeline, shape);
}
void GeometryCache::renderSolidSphereInstance(gpu::Batch& batch, const glm::vec4& color, const render::ShapePipelinePointer& pipeline) {
void GeometryCache::renderSolidSphereInstance(RenderArgs* args, gpu::Batch& batch, const glm::vec4& color, const render::ShapePipelinePointer& pipeline) {
assert(pipeline != nullptr);
renderInstances(batch, color, false, pipeline, GeometryCache::Sphere);
renderInstances(args, batch, color, false, pipeline, GeometryCache::Sphere);
}
void GeometryCache::renderWireSphereInstance(gpu::Batch& batch, const glm::vec4& color, const render::ShapePipelinePointer& pipeline) {
void GeometryCache::renderWireSphereInstance(RenderArgs* args, gpu::Batch& batch, const glm::vec4& color, const render::ShapePipelinePointer& pipeline) {
assert(pipeline != nullptr);
renderInstances(batch, color, true, pipeline, GeometryCache::Sphere);
renderInstances(args, batch, color, true, pipeline, GeometryCache::Sphere);
}
// Enable this in a debug build to cause 'box' entities to iterate through all the
// available shape types, both solid and wireframes
//#define DEBUG_SHAPES
void GeometryCache::renderSolidCubeInstance(gpu::Batch& batch, const glm::vec4& color, const render::ShapePipelinePointer& pipeline) {
void GeometryCache::renderSolidCubeInstance(RenderArgs* args, gpu::Batch& batch, const glm::vec4& color, const render::ShapePipelinePointer& pipeline) {
assert(pipeline != nullptr);
#ifdef DEBUG_SHAPES
static auto startTime = usecTimestampNow();
@ -2056,12 +2057,12 @@ void GeometryCache::renderSolidCubeInstance(gpu::Batch& batch, const glm::vec4&
}
});
#else
renderInstances(batch, color, false, pipeline, GeometryCache::Cube);
renderInstances(args, batch, color, false, pipeline, GeometryCache::Cube);
#endif
}
void GeometryCache::renderWireCubeInstance(gpu::Batch& batch, const glm::vec4& color, const render::ShapePipelinePointer& pipeline) {
void GeometryCache::renderWireCubeInstance(RenderArgs* args, gpu::Batch& batch, const glm::vec4& color, const render::ShapePipelinePointer& pipeline) {
static const std::string INSTANCE_NAME = __FUNCTION__;
assert(pipeline != nullptr);
renderInstances(batch, color, true, pipeline, GeometryCache::Cube);
renderInstances(args, batch, color, true, pipeline, GeometryCache::Cube);
}

View file

@ -182,46 +182,46 @@ public:
void renderShapeInstances(gpu::Batch& batch, Shape shape, size_t count, gpu::BufferPointer& colorBuffer);
void renderWireShapeInstances(gpu::Batch& batch, Shape shape, size_t count, gpu::BufferPointer& colorBuffer);
void renderSolidShapeInstance(gpu::Batch& batch, Shape shape, const glm::vec4& color = glm::vec4(1),
void renderSolidShapeInstance(RenderArgs* args, gpu::Batch& batch, Shape shape, const glm::vec4& color = glm::vec4(1),
const render::ShapePipelinePointer& pipeline = _simpleOpaquePipeline);
void renderSolidShapeInstance(gpu::Batch& batch, Shape shape, const glm::vec3& color,
void renderSolidShapeInstance(RenderArgs* args, gpu::Batch& batch, Shape shape, const glm::vec3& color,
const render::ShapePipelinePointer& pipeline = _simpleOpaquePipeline) {
renderSolidShapeInstance(batch, shape, glm::vec4(color, 1.0f), pipeline);
renderSolidShapeInstance(args, batch, shape, glm::vec4(color, 1.0f), pipeline);
}
void renderWireShapeInstance(gpu::Batch& batch, Shape shape, const glm::vec4& color = glm::vec4(1),
void renderWireShapeInstance(RenderArgs* args, gpu::Batch& batch, Shape shape, const glm::vec4& color = glm::vec4(1),
const render::ShapePipelinePointer& pipeline = _simpleOpaquePipeline);
void renderWireShapeInstance(gpu::Batch& batch, Shape shape, const glm::vec3& color,
void renderWireShapeInstance(RenderArgs* args, gpu::Batch& batch, Shape shape, const glm::vec3& color,
const render::ShapePipelinePointer& pipeline = _simpleOpaquePipeline) {
renderWireShapeInstance(batch, shape, glm::vec4(color, 1.0f), pipeline);
renderWireShapeInstance(args, batch, shape, glm::vec4(color, 1.0f), pipeline);
}
void renderSolidSphereInstance(gpu::Batch& batch, const glm::vec4& color,
void renderSolidSphereInstance(RenderArgs* args, gpu::Batch& batch, const glm::vec4& color,
const render::ShapePipelinePointer& pipeline = _simpleOpaquePipeline);
void renderSolidSphereInstance(gpu::Batch& batch, const glm::vec3& color,
void renderSolidSphereInstance(RenderArgs* args, gpu::Batch& batch, const glm::vec3& color,
const render::ShapePipelinePointer& pipeline = _simpleOpaquePipeline) {
renderSolidSphereInstance(batch, glm::vec4(color, 1.0f), pipeline);
renderSolidSphereInstance(args, batch, glm::vec4(color, 1.0f), pipeline);
}
void renderWireSphereInstance(gpu::Batch& batch, const glm::vec4& color,
void renderWireSphereInstance(RenderArgs* args, gpu::Batch& batch, const glm::vec4& color,
const render::ShapePipelinePointer& pipeline = _simpleWirePipeline);
void renderWireSphereInstance(gpu::Batch& batch, const glm::vec3& color,
void renderWireSphereInstance(RenderArgs* args, gpu::Batch& batch, const glm::vec3& color,
const render::ShapePipelinePointer& pipeline = _simpleWirePipeline) {
renderWireSphereInstance(batch, glm::vec4(color, 1.0f), pipeline);
renderWireSphereInstance(args, batch, glm::vec4(color, 1.0f), pipeline);
}
void renderSolidCubeInstance(gpu::Batch& batch, const glm::vec4& color,
void renderSolidCubeInstance(RenderArgs* args, gpu::Batch& batch, const glm::vec4& color,
const render::ShapePipelinePointer& pipeline = _simpleOpaquePipeline);
void renderSolidCubeInstance(gpu::Batch& batch, const glm::vec3& color,
void renderSolidCubeInstance(RenderArgs* args, gpu::Batch& batch, const glm::vec3& color,
const render::ShapePipelinePointer& pipeline = _simpleOpaquePipeline) {
renderSolidCubeInstance(batch, glm::vec4(color, 1.0f), pipeline);
renderSolidCubeInstance(args, batch, glm::vec4(color, 1.0f), pipeline);
}
void renderWireCubeInstance(gpu::Batch& batch, const glm::vec4& color,
void renderWireCubeInstance(RenderArgs* args, gpu::Batch& batch, const glm::vec4& color,
const render::ShapePipelinePointer& pipeline = _simpleWirePipeline);
void renderWireCubeInstance(gpu::Batch& batch, const glm::vec3& color,
void renderWireCubeInstance(RenderArgs* args, gpu::Batch& batch, const glm::vec3& color,
const render::ShapePipelinePointer& pipeline = _simpleWirePipeline) {
renderWireCubeInstance(batch, glm::vec4(color, 1.0f), pipeline);
renderWireCubeInstance(args, batch, glm::vec4(color, 1.0f), pipeline);
}
// Dynamic geometry

View file

@ -574,8 +574,7 @@ void LightClusteringPass::run(const render::RenderContextPointer& renderContext,
}
// From the LightStage and the current frame, update the light cluster Grid
auto deferredLightingEffect = DependencyManager::get<DeferredLightingEffect>();
auto lightStage = deferredLightingEffect->getLightStage();
auto lightStage = renderContext->_scene->getStage<LightStage>();
_lightClusters->updateLightStage(lightStage);
_lightClusters->updateLightFrame(lightStage->_currentFrame, lightingModel->isPointLightEnabled(), lightingModel->isSpotLightEnabled());

View file

@ -55,7 +55,7 @@ LightPayload::~LightPayload() {
void LightPayload::render(RenderArgs* args) {
if (!_stage) {
_stage = DependencyManager::get<DeferredLightingEffect>()->getLightStage();
_stage = args->_scene->getStage<LightStage>();
}
// Do we need to allocate the light in the stage ?
if (LightStage::isIndexInvalid(_index)) {
@ -123,7 +123,7 @@ KeyLightPayload::~KeyLightPayload() {
void KeyLightPayload::render(RenderArgs* args) {
if (!_stage) {
_stage = DependencyManager::get<DeferredLightingEffect>()->getLightStage();
_stage = args->_scene->getStage<LightStage>();
}
// Do we need to allocate the light in the stage ?
if (LightStage::isIndexInvalid(_index)) {

View file

@ -13,6 +13,11 @@
#include "LightStage.h"
std::string LightStage::_stageName { "LIGHT_STAGE"};
LightStage::LightStage() {
}
LightStage::Shadow::Shadow(model::LightPointer light) : _light{ light}, _frustum{ std::make_shared<ViewFrustum>() } {
framebuffer = gpu::FramebufferPointer(gpu::Framebuffer::createShadowmap(MAP_SIZE));
map = framebuffer->getDepthStencilBuffer();
@ -165,3 +170,14 @@ void LightStage::updateLightArrayBuffer(Index lightId) {
}
}
LightStageSetup::LightStageSetup() {
}
void LightStageSetup::run(const render::RenderContextPointer& renderContext) {
auto stage = renderContext->_scene->getStage(LightStage::getName());
if (!stage) {
stage = std::make_shared<LightStage>();
renderContext->_scene->resetStage(LightStage::getName(), stage);
}
}

View file

@ -14,17 +14,23 @@
#include <set>
#include <unordered_map>
#include <gpu/Framebuffer.h>
#include <model/Light.h>
#include <render/IndexedContainer.h>
#include "gpu/Framebuffer.h"
#include "model/Light.h"
#include <render/Stage.h>
#include <render/Engine.h>
class ViewFrustum;
// Light stage to set up light-related rendering tasks
class LightStage {
class LightStage : public render::Stage {
public:
static std::string _stageName;
static const std::string& getName() { return _stageName; }
using Index = render::indexed_container::Index;
static const Index INVALID_INDEX { render::indexed_container::INVALID_INDEX };
static bool isIndexInvalid(Index index) { return index == INVALID_INDEX; }
@ -93,6 +99,7 @@ public:
LightPointer getLight(Index lightId) const {
return _lights.get(lightId);
}
Index getShadowId(Index lightId) const {
if (checkLightId(lightId)) {
return _descs[lightId].shadowId;
@ -109,6 +116,7 @@ public:
return LightAndShadow(getLight(lightId), getShadow(lightId));
}
LightStage();
Lights _lights;
LightMap _lightMap;
Descs _descs;
@ -149,5 +157,15 @@ using LightStagePointer = std::shared_ptr<LightStage>;
class LightStageSetup {
public:
using JobModel = render::Job::Model<LightStageSetup>;
LightStageSetup();
void run(const render::RenderContextPointer& renderContext);
protected:
};
#endif

View file

@ -106,8 +106,8 @@ void initForwardPipelines(ShapePlumber& plumber);
void addPlumberPipeline(ShapePlumber& plumber,
const ShapeKey& key, const gpu::ShaderPointer& vertex, const gpu::ShaderPointer& pixel);
void batchSetter(const ShapePipeline& pipeline, gpu::Batch& batch);
void lightBatchSetter(const ShapePipeline& pipeline, gpu::Batch& batch);
void batchSetter(const ShapePipeline& pipeline, gpu::Batch& batch, RenderArgs* args);
void lightBatchSetter(const ShapePipeline& pipeline, gpu::Batch& batch, RenderArgs* args);
void initOverlay3DPipelines(ShapePlumber& plumber) {
auto vertex = gpu::Shader::createVertex(std::string(overlay3D_vert));
@ -515,7 +515,7 @@ void addPlumberPipeline(ShapePlumber& plumber,
}
}
void batchSetter(const ShapePipeline& pipeline, gpu::Batch& batch) {
void batchSetter(const ShapePipeline& pipeline, gpu::Batch& batch, RenderArgs* args) {
// Set a default albedo map
batch.setResourceTexture(render::ShapePipeline::Slot::MAP::ALBEDO,
DependencyManager::get<TextureCache>()->getWhiteTexture());
@ -538,13 +538,13 @@ void batchSetter(const ShapePipeline& pipeline, gpu::Batch& batch) {
}
}
void lightBatchSetter(const ShapePipeline& pipeline, gpu::Batch& batch) {
void lightBatchSetter(const ShapePipeline& pipeline, gpu::Batch& batch, RenderArgs* args) {
// Set the batch
batchSetter(pipeline, batch);
batchSetter(pipeline, batch, args);
// Set the light
if (pipeline.locations->lightBufferUnit >= 0) {
DependencyManager::get<DeferredLightingEffect>()->setupKeyLightBatch(batch,
DependencyManager::get<DeferredLightingEffect>()->setupKeyLightBatch(args, batch,
pipeline.locations->lightBufferUnit,
pipeline.locations->lightAmbientBufferUnit,
pipeline.locations->lightAmbientMapUnit);

View file

@ -41,7 +41,7 @@ void RenderShadowMap::run(const render::RenderContextPointer& renderContext,
assert(renderContext->args);
assert(renderContext->args->hasViewFrustum());
auto lightStage = DependencyManager::get<DeferredLightingEffect>()->getLightStage();
auto lightStage = renderContext->_scene->getStage<LightStage>();
LightStage::Index globalLightIndex { 0 };
@ -162,7 +162,7 @@ void RenderShadowTask::configure(const Config& configuration) {
}
void RenderShadowSetup::run(const render::RenderContextPointer& renderContext, Output& output) {
auto lightStage = DependencyManager::get<DeferredLightingEffect>()->getLightStage();
auto lightStage = renderContext->_scene->getStage<LightStage>();
const auto globalShadow = lightStage->getShadow(0);
// Cache old render args

View file

@ -532,9 +532,10 @@ void DebugSubsurfaceScattering::run(const render::RenderContextPointer& renderCo
const auto light = DependencyManager::get<DeferredLightingEffect>()->getLightStage()->getLight(0);
auto lightStage = renderContext->_scene->getStage<LightStage>("LIGHT_STAGE");
// const auto light = DependencyManager::get<DeferredLightingEffect>()->getLightStage()->getLight(0);
const auto light = lightStage->getLight(0);
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {
batch.enableStereo(false);

View file

@ -0,0 +1,28 @@
//
// UpdateSceneTask.cpp
// render-utils/src/
//
// Created by Sam Gateau on 6/21/2017.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "UpdateSceneTask.h"
#include <render/SceneTask.h>
#include "LightStage.h"
#include "BackgroundStage.h"
#include <render/TransitionStage.h>
#include "DeferredLightingEffect.h"
void UpdateSceneTask::build(JobModel& task, const render::Varying& input, render::Varying& output) {
task.addJob<LightStageSetup>("LightStageSetup");
task.addJob<BackgroundStageSetup>("BackgroundStageSetup");
task.addJob<render::TransitionStageSetup>("TransitionStageSetup");
task.addJob<DefaultLightingSetup>("DefaultLightingSetup");
task.addJob<render::PerformSceneTransaction>("PerformSceneTransaction");
}

View file

@ -0,0 +1,30 @@
//
// UpdateSceneTask.h
// render-utils/src/
//
// Created by Sam Gateau on 6/21/2017.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_UpdateSceneTask_h
#define hifi_UpdateSceneTask_h
#include <render/Engine.h>
#include <render/RenderFetchCullSortTask.h>
class UpdateSceneTask {
public:
using JobModel = render::Task::Model<UpdateSceneTask>;
UpdateSceneTask() {}
void build(JobModel& task, const render::Varying& inputs, render::Varying& outputs);
};
#endif // hifi_UpdateSceneTask_h

View file

@ -52,13 +52,19 @@ void ZoneRendererTask::build(JobModel& task, const Varying& input, Varying& oupu
}
void SetupZones::run(const RenderContextPointer& context, const Inputs& inputs) {
auto backgroundStage = DependencyManager::get<DeferredLightingEffect>()->getBackgroundStage();
auto backgroundStage = context->_scene->getStage<BackgroundStage>("BACKGROUND_STAGE");
backgroundStage->_currentFrame.clear();
// call render in the correct order first...
render::renderItems(context, inputs);
// Finally add the default lights and background:
auto lightStage = context->_scene->getStage<LightStage>("LIGHT_STAGE");
lightStage->_currentFrame.pushSunLight(0);
lightStage->_currentFrame.pushAmbientLight(0);
backgroundStage->_currentFrame.pushBackground(0);
}
const gpu::PipelinePointer& DebugZoneLighting::getKeyLightPipeline() {
@ -130,14 +136,13 @@ void DebugZoneLighting::run(const render::RenderContextPointer& context, const I
auto deferredTransform = inputs;
auto lightStage = DependencyManager::get<DeferredLightingEffect>()->getLightStage();
auto lightStage = context->_scene->getStage<LightStage>(LightStage::getName());
std::vector<model::LightPointer> keyLightStack;
if (lightStage && lightStage->_currentFrame._sunLights.size()) {
for (auto index : lightStage->_currentFrame._sunLights) {
keyLightStack.push_back(lightStage->getLight(index));
}
}
keyLightStack.push_back(DependencyManager::get<DeferredLightingEffect>()->getGlobalLight());
std::vector<model::LightPointer> ambientLightStack;
if (lightStage && lightStage->_currentFrame._ambientLights.size()) {
@ -145,10 +150,8 @@ void DebugZoneLighting::run(const render::RenderContextPointer& context, const I
ambientLightStack.push_back(lightStage->getLight(index));
}
}
ambientLightStack.push_back(DependencyManager::get<DeferredLightingEffect>()->getGlobalLight());
auto backgroundStage = DependencyManager::get<DeferredLightingEffect>()->getBackgroundStage();
auto backgroundStage = context->_scene->getStage<BackgroundStage>(BackgroundStage::getName());
std::vector<model::SkyboxPointer> skyboxStack;
if (backgroundStage && backgroundStage->_currentFrame._backgrounds.size()) {
for (auto index : backgroundStage->_currentFrame._backgrounds) {
@ -157,8 +160,7 @@ void DebugZoneLighting::run(const render::RenderContextPointer& context, const I
skyboxStack.push_back(background->getSkybox());
}
}
}
skyboxStack.push_back(DependencyManager::get<DeferredLightingEffect>()->getDefaultSkybox());
}
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {

View file

@ -21,13 +21,13 @@
<@include Fade.slh@>
out vec3 _color;
out float _alpha;
out vec2 _texCoord0;
out vec2 _texCoord1;
out vec4 _position;
out vec4 _worldFadePosition;
out vec3 _normal;
out vec3 _color;
void main(void) {
_color = colorToLinearRGB(inColor.xyz);

View file

@ -18,6 +18,8 @@
#include <gpu/Context.h>
#include "EngineStats.h"
#include "SceneTask.h"
#include "Logging.h"
using namespace render;

View file

@ -12,6 +12,7 @@
#ifndef hifi_render_IndexedContainer_h
#define hifi_render_IndexedContainer_h
#include <cassert>
#include <vector>
namespace render {

View file

@ -13,6 +13,8 @@
#include <numeric>
#include "gpu/Batch.h"
#include "TransitionStage.h"
using namespace render;
const Item::Status::Value Item::Status::Value::INVALID = Item::Status::Value();

View file

@ -27,10 +27,12 @@
#include "model/Material.h"
#include "ShapePipeline.h"
#include "TransitionStage.h"
namespace render {
typedef int32_t Index;
const Index INVALID_INDEX{ -1 };
class Context;
// Key is the KEY to filter Items and create specialized lists
@ -375,14 +377,14 @@ public:
// Access the status
const StatusPointer& getStatus() const { return _payload->getStatus(); }
void setTransitionId(TransitionStage::Index id) { _transitionId = id; }
TransitionStage::Index getTransitionId() const { return _transitionId; }
void setTransitionId(Index id) { _transitionId = id; }
Index getTransitionId() const { return _transitionId; }
protected:
PayloadPointer _payload;
ItemKey _key;
ItemCell _cell{ INVALID_CELL };
TransitionStage::Index _transitionId{ TransitionStage::INVALID_INDEX };
Index _transitionId{ INVALID_INDEX };
friend class Scene;
};

View file

@ -237,3 +237,26 @@ void Scene::resetSelections(const Selections& selections) {
}
}
}
// Access a particular Stage (empty if doesn't exist)
// Thread safe
StagePointer Scene::getStage(const Stage::Name& name) const {
std::unique_lock<std::mutex> lock(_stagesMutex);
auto found = _stages.find(name);
if (found == _stages.end()) {
return StagePointer();
} else {
return (*found).second;
}
}
void Scene::resetStage(const Stage::Name& name, const StagePointer& stage) {
std::unique_lock<std::mutex> lock(_stagesMutex);
auto found = _stages.find(name);
if (found == _stages.end()) {
_stages.insert(StageMap::value_type(name, stage));
} else {
(*found).second = stage;
}
}

View file

@ -14,6 +14,7 @@
#include "Item.h"
#include "SpatialTree.h"
#include "Stage.h"
#include "Selection.h"
namespace render {
@ -110,6 +111,19 @@ public:
// Access non-spatialized items (overlays, backgrounds)
const ItemIDSet& getNonspatialSet() const { return _masterNonspatialSet; }
// Access a particular Stage (empty if doesn't exist)
// Thread safe
StagePointer getStage(const Stage::Name& name) const;
template <class T>
std::shared_ptr<T> getStage(const Stage::Name& name = T::getName()) const {
auto stage = getStage(name);
return (stage ? std::static_pointer_cast<T>(stage) : std::shared_ptr<T>());
}
void resetStage(const Stage::Name& name, const StagePointer& stage);
protected:
// Thread safe elements that can be accessed from anywhere
std::atomic<unsigned int> _IDAllocator{ 1 }; // first valid itemID will be One
@ -128,7 +142,6 @@ protected:
void removeItems(const ItemIDs& ids);
void updateItems(const ItemIDs& ids, UpdateFunctors& functors);
// The Selection map
mutable std::mutex _selectionsMutex; // mutable so it can be used in the thread safe getSelection const method
SelectionMap _selections;
@ -139,6 +152,11 @@ protected:
// void appendToSelection(const Selection& selection);
// void mergeWithSelection(const Selection& selection);
// The Stage map
mutable std::mutex _stagesMutex; // mutable so it can be used in the thread safe getStage const method
StageMap _stages;
friend class Engine;
};

View file

@ -0,0 +1,21 @@
//
// SceneTask.cpp
// render/src/render
//
// Created by Sam Gateau on 6/14/2017.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "SceneTask.h"
using namespace render;
void PerformSceneTransaction::configure(const Config& config) {
}
void PerformSceneTransaction::run(const RenderContextPointer& renderContext) {
renderContext->_scene->processTransactionQueue();
}

View file

@ -0,0 +1,41 @@
//
// SceneTask.h
// render/src/render
//
// Created by Sam Gateau on 6/14/2017.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_render_SceneTask_h
#define hifi_render_SceneTask_h
#include "Engine.h"
namespace render {
class PerformSceneTransactionConfig : public Job::Config {
Q_OBJECT
public:
signals:
void dirty();
protected:
};
class PerformSceneTransaction {
public:
using Config = PerformSceneTransactionConfig;
using JobModel = Job::Model<PerformSceneTransaction, Config>;
void configure(const Config& config);
void run(const RenderContextPointer& renderContext);
protected:
};
}
#endif // hifi_render_SceneTask_h

View file

@ -17,9 +17,9 @@
using namespace render;
void ShapePipeline::prepare(gpu::Batch& batch) {
void ShapePipeline::prepare(gpu::Batch& batch, RenderArgs* args) {
if (_batchSetter) {
_batchSetter(*this, batch);
_batchSetter(*this, batch, args);
}
}
@ -123,14 +123,13 @@ const ShapePipelinePointer ShapePlumber::pickPipeline(RenderArgs* args, const Ke
}
PipelinePointer shapePipeline(pipelineIterator->second);
auto& batch = args->_batch;
// Setup the one pipeline (to rule them all)
batch->setPipeline(shapePipeline->pipeline);
args->_batch->setPipeline(shapePipeline->pipeline);
// Run the pipeline's BatchSetter on the passed in batch
if (shapePipeline->_batchSetter) {
shapePipeline->_batchSetter(*shapePipeline, *batch);
shapePipeline->_batchSetter(*shapePipeline, *(args->_batch), args);
}
return shapePipeline;

View file

@ -252,7 +252,7 @@ public:
};
using LocationsPointer = std::shared_ptr<Locations>;
using BatchSetter = std::function<void(const ShapePipeline&, gpu::Batch&)>;
using BatchSetter = std::function<void(const ShapePipeline&, gpu::Batch&, render::Args*)>;
using ItemSetter = std::function<void(const ShapePipeline&, render::Args*, const render::Item&)>;
@ -262,9 +262,9 @@ public:
_batchSetter(batchSetter),
_itemSetter(itemSetter) {}
// Normally, a pipeline is accessed thorugh pickPipeline. If it needs to be set manually,
// Normally, a pipeline is accessed through pickPipeline. If it needs to be set manually,
// after calling setPipeline this method should be called to prepare the pipeline with default buffers.
void prepare(gpu::Batch& batch);
void prepare(gpu::Batch& batch, Args* args);
gpu::PipelinePointer pipeline;
std::shared_ptr<Locations> locations;

View file

@ -0,0 +1,26 @@
//
// Stage.cpp
// render/src/render
//
// Created by Sam Gateau on 6/14/2017.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Stage.h"
using namespace render;
Stage::~Stage() {
}
Stage::Stage() :
_name()
{
}

View file

@ -0,0 +1,38 @@
//
// Stage.h
// render/src/render
//
// Created by Sam Gateau on 6/14/2017.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_render_Stage_h
#define hifi_render_Stage_h
#include <memory>
#include <map>
#include <string>
namespace render {
class Stage {
public:
using Name = std::string;
Stage();
virtual ~Stage();
protected:
Name _name;
};
using StagePointer = std::shared_ptr<Stage>;
using StageMap = std::map<const Stage::Name, StagePointer>;
}
#endif // hifi_render_Stage_h

View file

@ -8,8 +8,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_render_utils_Transition_h
#define hifi_render_utils_Transition_h
#ifndef hifi_render_Transition_h
#define hifi_render_Transition_h
#include "Item.h"
@ -43,4 +43,4 @@ namespace render {
typedef std::shared_ptr<Transition> TransitionPointer;
}
#endif
#endif // hifi_render_Transition_h

View file

@ -4,6 +4,8 @@
using namespace render;
std::string TransitionStage::_name("Transition");
TransitionStage::Index TransitionStage::addTransition(ItemID itemId, Transition::Type type) {
Transition transition;
Index id;
@ -23,3 +25,15 @@ void TransitionStage::removeTransition(Index index) {
}
_transitions.freeElement(index);
}
TransitionStageSetup::TransitionStageSetup() {
}
void TransitionStageSetup::run(const RenderContextPointer& renderContext) {
auto stage = renderContext->_scene->getStage(TransitionStage::getName());
if (!stage) {
stage = std::make_shared<TransitionStage>();
renderContext->_scene->resetStage(TransitionStage::getName(), stage);
}
}

View file

@ -11,16 +11,19 @@
#ifndef hifi_render_TransitionStage_h
#define hifi_render_TransitionStage_h
#include <model/Stage.h>
#include "Stage.h"
#include "IndexedContainer.h"
#include "Engine.h"
#include "Transition.h"
namespace render {
// Transition stage to set up Transition-related effects
class TransitionStage {
class TransitionStage : public render::Stage {
public:
static const std::string& getName() { return _name; }
using Index = indexed_container::Index;
static const Index INVALID_INDEX{ indexed_container::INVALID_INDEX };
using TransitionIdList = indexed_container::Indices;
@ -43,11 +46,23 @@ namespace render {
using Transitions = indexed_container::IndexedVector<Transition>;
static std::string _name;
Transitions _transitions;
TransitionIdList _activeTransitionIds;
};
using TransitionStagePointer = std::shared_ptr<TransitionStage>;
class TransitionStageSetup {
public:
using JobModel = render::Job::Model<TransitionStageSetup>;
TransitionStageSetup();
void run(const RenderContextPointer& renderContext);
protected:
};
}
#endif // hifi_render_TransitionStage_h

View file

@ -19,6 +19,7 @@
#include "PathUtils.h"
#include <QtCore/QStandardPaths>
#include <mutex> // std::once
#include "shared/GlobalAppProperties.h"
const QString& PathUtils::resourcesPath() {
#ifdef Q_OS_MAC
@ -34,12 +35,8 @@ QString PathUtils::getAppDataPath() {
return QStandardPaths::writableLocation(QStandardPaths::AppDataLocation) + "/";
}
QString PathUtils::getAppLocalDataPath(const QString& overridePath /* = "" */) {
static QString overriddenPath = "";
// set the overridden path if one was passed in
if (!overridePath.isEmpty()) {
overriddenPath = overridePath;
}
QString PathUtils::getAppLocalDataPath() {
QString overriddenPath = qApp->property(hifi::properties::APP_LOCAL_DATA_PATH).toString();
// return overridden path if set
if (!overriddenPath.isEmpty()) {
return overriddenPath;

View file

@ -28,7 +28,7 @@ public:
static const QString& resourcesPath();
static QString getAppDataPath();
static QString getAppLocalDataPath(const QString& overridePath = "");
static QString getAppLocalDataPath();
static QString getAppDataFilePath(const QString& filename);
static QString getAppLocalDataFilePath(const QString& filename);

View file

@ -107,6 +107,7 @@ namespace Setting {
}
bool isSet() const {
maybeInit();
return _isSet;
}

View file

@ -17,6 +17,7 @@ namespace hifi { namespace properties {
const char* TEST = "com.highfidelity.test";
const char* TRACING = "com.highfidelity.tracing";
const char* HMD = "com.highfidelity.hmd";
const char* APP_LOCAL_DATA_PATH = "com.highfidelity.appLocalDataPath";
namespace gl {
const char* BACKEND = "com.highfidelity.gl.backend";

View file

@ -19,6 +19,7 @@ namespace hifi { namespace properties {
extern const char* TEST;
extern const char* TRACING;
extern const char* HMD;
extern const char* APP_LOCAL_DATA_PATH;
namespace gl {
extern const char* BACKEND;

View file

@ -82,6 +82,12 @@ struct PoseData {
angularVelocities[i] = transformVectorFast(resetMat, toGlm(vrPoses[i].vAngularVelocity));
}
}
void resetToInvalid() {
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
vrPoses[i].bPoseIsValid = false;
}
}
};
// FIXME remove once OpenVR header is updated

View file

@ -28,6 +28,8 @@
#include <GLMHelpers.h>
#include <glm/ext.hpp>
#include <glm/gtc/quaternion.hpp>
#include <ui-plugins/PluginContainer.h>
#include <plugins/DisplayPlugin.h>
#include <controllers/UserInputMapper.h>
#include <Plugins/InputConfiguration.h>
@ -60,11 +62,6 @@ static const int SECOND_FOOT = 1;
static const int HIP = 2;
static const int CHEST = 3;
static float HEAD_PUCK_Y_OFFSET = -0.0254f;
static float HEAD_PUCK_Z_OFFSET = -0.152f;
static float HAND_PUCK_Y_OFFSET = -0.0508f;
static float HAND_PUCK_Z_OFFSET = 0.0254f;
const char* ViveControllerManager::NAME { "OpenVR" };
const std::map<vr::ETrackingResult, QString> TRACKING_RESULT_TO_STRING = {
@ -121,6 +118,29 @@ static QString deviceTrackingResultToString(vr::ETrackingResult trackingResult)
return result;
}
static glm::mat4 calculateResetMat() {
auto chaperone = vr::VRChaperone();
if (chaperone) {
float const UI_RADIUS = 1.0f;
float const UI_HEIGHT = 1.6f;
float const UI_Z_OFFSET = 0.5;
float xSize, zSize;
chaperone->GetPlayAreaSize(&xSize, &zSize);
glm::vec3 uiPos(0.0f, UI_HEIGHT, UI_RADIUS - (0.5f * zSize) - UI_Z_OFFSET);
return glm::inverse(createMatFromQuatAndPos(glm::quat(), uiPos));
}
return glm::mat4();
}
bool ViveControllerManager::isDesktopMode() {
if (_container) {
return !_container->getActiveDisplayPlugin()->isHmd();
}
return false;
}
void ViveControllerManager::calibrate() {
if (isSupported()) {
_inputDevice->calibrateNextFrame();
@ -141,13 +161,21 @@ bool ViveControllerManager::isSupported() const {
void ViveControllerManager::setConfigurationSettings(const QJsonObject configurationSettings) {
if (isSupported()) {
if (configurationSettings.contains("desktopMode")) {
_desktopMode = configurationSettings["desktopMode"].toBool();
if (!_desktopMode) {
_resetMatCalculated = false;
}
}
_inputDevice->configureCalibrationSettings(configurationSettings);
}
}
QJsonObject ViveControllerManager::configurationSettings() {
if (isSupported()) {
return _inputDevice->configurationSettings();
QJsonObject configurationSettings = _inputDevice->configurationSettings();
configurationSettings["desktopMode"] = _desktopMode;
return configurationSettings;
}
return QJsonObject();
@ -218,6 +246,18 @@ void ViveControllerManager::pluginUpdate(float deltaTime, const controller::Inpu
return;
}
if (isDesktopMode() && _desktopMode) {
if (!_resetMatCalculated) {
_resetMat = calculateResetMat();
_resetMatCalculated = true;
}
_system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseStanding, 0, _nextSimPoseData.vrPoses, vr::k_unMaxTrackedDeviceCount);
_nextSimPoseData.update(_resetMat);
} else if (isDesktopMode()) {
_nextSimPoseData.resetToInvalid();
}
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
handleOpenVrEvents();
if (openVrQuitRequested()) {
@ -344,8 +384,8 @@ void ViveControllerManager::InputDevice::configureCalibrationSettings(const QJso
bool overrideHead = headObject["override"].toBool();
if (overrideHead) {
_headConfig = HeadConfig::Puck;
HEAD_PUCK_Y_OFFSET = headObject["Y"].toDouble();
HEAD_PUCK_Z_OFFSET = headObject["Z"].toDouble();
_headPuckYOffset = headObject["Y"].toDouble();
_headPuckZOffset = headObject["Z"].toDouble();
} else {
_headConfig = HeadConfig::HMD;
}
@ -354,8 +394,8 @@ void ViveControllerManager::InputDevice::configureCalibrationSettings(const QJso
bool overrideHands = handsObject["override"].toBool();
if (overrideHands) {
_handConfig = HandConfig::Pucks;
HAND_PUCK_Y_OFFSET = handsObject["Y"].toDouble();
HAND_PUCK_Z_OFFSET = handsObject["Z"].toDouble();
_handPuckYOffset = handsObject["Y"].toDouble();
_handPuckZOffset = handsObject["Z"].toDouble();
} else {
_handConfig = HandConfig::HandController;
}
@ -389,7 +429,7 @@ void ViveControllerManager::InputDevice::emitCalibrationStatus() {
status["hand_pucks"] = (_handConfig == HandConfig::Pucks);
status["puckCount"] = (int)_validTrackedObjects.size();
status["UI"] = _calibrate;
emit inputConfiguration->calibrationStatus(status);
}
@ -426,7 +466,9 @@ void ViveControllerManager::InputDevice::handleTrackedObject(uint32_t deviceInde
// transform into avatar frame
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
_poseStateMap[poseIndex] = pose.transform(controllerToAvatar);
_validTrackedObjects.push_back(std::make_pair(poseIndex, _poseStateMap[poseIndex]));
// but _validTrackedObjects remain in sensor frame
_validTrackedObjects.push_back(std::make_pair(poseIndex, pose));
} else {
controller::Pose invalidPose;
_poseStateMap[poseIndex] = invalidPose;
@ -440,7 +482,7 @@ void ViveControllerManager::InputDevice::sendUserActivityData(QString activity)
{"head_puck", (_headConfig == HeadConfig::Puck) ? true : false},
{"hand_pucks", (_handConfig == HandConfig::Pucks) ? true : false}
};
UserActivityLogger::getInstance().logAction(activity, jsonData);
}
@ -473,12 +515,13 @@ void ViveControllerManager::InputDevice::calibrate(const controller::InputCalibr
glm::mat4 defaultToReferenceMat = glm::mat4();
if (_headConfig == HeadConfig::HMD) {
defaultToReferenceMat = calculateDefaultToReferenceForHmd(inputCalibration);
} else if (_headConfig == HeadConfig::Puck) {
} else if (_headConfig == HeadConfig::Puck) {
std::sort(_validTrackedObjects.begin(), _validTrackedObjects.end(), sortPucksYPosition);
defaultToReferenceMat = calculateDefaultToReferenceForHeadPuck(inputCalibration);
}
_config = _preferedConfig;
bool headConfigured = configureHead(defaultToReferenceMat, inputCalibration);
bool handsConfigured = configureHands(defaultToReferenceMat, inputCalibration);
bool bodyConfigured = configureBody(defaultToReferenceMat, inputCalibration);
@ -668,63 +711,67 @@ void ViveControllerManager::InputDevice::handleHandController(float deltaTime, u
}
}
}
// defaultToReferenceMat is an offset from avatar space to sensor space.
// it aligns the default center-eye in avatar space with the hmd in sensor space.
//
// * E_a is the the default center-of-the-eyes transform in avatar space.
// * E_s is the the hmd eye-center transform in sensor space, with roll and pitch removed.
// * D is the defaultReferenceMat.
//
// E_s = D * E_a =>
// D = E_s * inverse(E_a)
//
glm::mat4 ViveControllerManager::InputDevice::calculateDefaultToReferenceForHmd(const controller::InputCalibrationData& inputCalibration) {
// convert the hmd head from sensor space to avatar space
glm::mat4 hmdSensorFlippedMat = inputCalibration.hmdSensorMat * Matrices::Y_180;
glm::mat4 sensorToAvatarMat = glm::inverse(inputCalibration.avatarMat) * inputCalibration.sensorToWorldMat;
glm::mat4 hmdAvatarMat = sensorToAvatarMat * hmdSensorFlippedMat;
// cancel the roll and pitch for the hmd head
glm::quat hmdRotation = cancelOutRollAndPitch(glmExtractRotation(hmdAvatarMat));
glm::vec3 hmdTranslation = extractTranslation(hmdAvatarMat);
glm::mat4 currentHmd = createMatFromQuatAndPos(hmdRotation, hmdTranslation);
// the center-eye transform in avatar space.
glm::mat4 E_a = inputCalibration.defaultCenterEyeMat;
// calculate the offset from the centerOfEye to defaultHeadMat
glm::mat4 defaultHeadOffset = glm::inverse(inputCalibration.defaultCenterEyeMat) * inputCalibration.defaultHeadMat;
// the center-eye transform in sensor space.
glm::mat4 E_s = inputCalibration.hmdSensorMat * Matrices::Y_180; // the Y_180 is to convert hmd from -z forward to z forward.
glm::mat4 currentHead = currentHmd * defaultHeadOffset;
// cancel out roll and pitch on E_s
glm::quat rot = cancelOutRollAndPitch(glmExtractRotation(E_s));
glm::vec3 trans = extractTranslation(E_s);
E_s = createMatFromQuatAndPos(rot, trans);
// calculate the defaultToRefrenceXform
glm::mat4 defaultToReferenceMat = currentHead * glm::inverse(inputCalibration.defaultHeadMat);
return defaultToReferenceMat;
return E_s * glm::inverse(E_a);
}
// defaultToReferenceMat is an offset from avatar space to sensor space.
// It aligns the default center-of-the-eyes transform in avatar space with the head-puck in sensor space.
// The offset from the center-of-the-eyes to the head-puck can be configured via _headPuckYOffset and _headPuckZOffset,
// These values are exposed in the configuration UI.
//
// * E_a is the the default center-eye transform in avatar space.
// * E_s is the the head-puck center-eye transform in sensor space, with roll and pitch removed.
// * D is the defaultReferenceMat.
//
// E_s = D * E_a =>
// D = E_s * inverse(E_a)
//
glm::mat4 ViveControllerManager::InputDevice::calculateDefaultToReferenceForHeadPuck(const controller::InputCalibrationData& inputCalibration) {
glm::mat4 avatarToSensorMat = glm::inverse(inputCalibration.sensorToWorldMat) * inputCalibration.avatarMat;
glm::mat4 sensorToAvatarMat = glm::inverse(inputCalibration.avatarMat) * inputCalibration.sensorToWorldMat;
// the center-eye transform in avatar space.
glm::mat4 E_a = inputCalibration.defaultCenterEyeMat;
// calculate the center-eye transform in sensor space, via the head-puck
size_t headPuckIndex = _validTrackedObjects.size() - 1;
controller::Pose headPuckPose = _validTrackedObjects[headPuckIndex].second;
glm::mat4 headPuckAvatarMat = createMatFromQuatAndPos(headPuckPose.getRotation(), headPuckPose.getTranslation()) * Matrices::Y_180;
glm::vec3 headPuckTranslation = extractTranslation(headPuckAvatarMat);
glm::vec3 headPuckZAxis = cancelOutRollAndPitch(glmExtractRotation(headPuckAvatarMat)) * glm::vec3(0.0f, 0.0f, 1.0f);
glm::vec3 worldUp = glm::vec3(0.0f, 1.0f, 0.0f);
// check that the head puck z axis is not parrallel to the world up
const float EPSILON = 1.0e-4f;
glm::vec3 zAxis = glmExtractRotation(headPuckAvatarMat) * glm::vec3(0.0f, 0.0f, 1.0f);
if (fabsf(fabsf(glm::dot(glm::normalize(worldUp), glm::normalize(zAxis))) - 1.0f) < EPSILON) {
headPuckZAxis = glm::vec3(1.0f, 0.0f, 0.0f);
}
// AJT: TODO: handle case were forward is parallel with UNIT_Y.
glm::vec3 forward = headPuckPose.rotation * -Vectors::UNIT_Z;
glm::vec3 x = glm::normalize(glm::cross(Vectors::UNIT_Y, forward));
glm::vec3 z = glm::normalize(glm::cross(x, Vectors::UNIT_Y));
glm::mat3 centerEyeRotMat(x, Vectors::UNIT_Y, z);
glm::vec3 centerEyeTrans = headPuckPose.translation + centerEyeRotMat * glm::vec3(0.0f, _headPuckYOffset, _headPuckZOffset);
glm::vec3 yPrime = glm::vec3(0.0f, 1.0f, 0.0f);
glm::vec3 xPrime = glm::normalize(glm::cross(worldUp, headPuckZAxis));
glm::vec3 zPrime = glm::normalize(glm::cross(xPrime, yPrime));
glm::mat4 newHeadPuck = glm::mat4(glm::vec4(xPrime, 0.0f), glm::vec4(yPrime, 0.0f),
glm::vec4(zPrime, 0.0f), glm::vec4(headPuckTranslation, 1.0f));
glm::mat4 E_s(glm::vec4(centerEyeRotMat[0], 0.0f),
glm::vec4(centerEyeRotMat[1], 0.0f),
glm::vec4(centerEyeRotMat[2], 0.0f),
glm::vec4(centerEyeTrans, 1.0f));
glm::mat4 headPuckOffset = glm::mat4(glm::vec4(1.0f, 0.0f, 0.0f, 0.0f), glm::vec4(0.0f, 1.0f, 0.0f, 0.0f),
glm::vec4(0.0f, 0.0f, 1.0f, 0.0f), glm::vec4(0.0f, HEAD_PUCK_Y_OFFSET, HEAD_PUCK_Z_OFFSET, 1.0f));
glm::mat4 finalHeadPuck = newHeadPuck * headPuckOffset;
glm::mat4 defaultHeadOffset = glm::inverse(inputCalibration.defaultCenterEyeMat) * inputCalibration.defaultHeadMat;
glm::mat4 currentHead = finalHeadPuck * defaultHeadOffset;
// calculate the defaultToRefrenceXform
glm::mat4 defaultToReferenceMat = currentHead * glm::inverse(inputCalibration.defaultHeadMat);
return defaultToReferenceMat;
return E_s * glm::inverse(E_a);
}
void ViveControllerManager::InputDevice::partitionTouchpad(int sButton, int xAxis, int yAxis, int centerPseudoButton, int xPseudoButton, int yPseudoButton) {
@ -912,12 +959,12 @@ void ViveControllerManager::InputDevice::calibrateLeftHand(glm::mat4& defaultToR
glm::mat4 newHandMat = glm::mat4(glm::vec4(xPrime, 0.0f), glm::vec4(yPrime, 0.0f),
glm::vec4(zPrime, 0.0f), glm::vec4(0.0f, 0.0f, 0.0f, 1.0f));
glm::vec3 translationOffset = glm::vec3(0.0f, HAND_PUCK_Y_OFFSET, HAND_PUCK_Z_OFFSET);
glm::vec3 translationOffset = glm::vec3(0.0f, _handPuckYOffset, _handPuckZOffset);
glm::quat initialRotation = glmExtractRotation(handPoseAvatarMat);
glm::quat finalRotation = glmExtractRotation(newHandMat);
glm::quat rotationOffset = glm::inverse(initialRotation) * finalRotation;
glm::mat4 offsetMat = createMatFromQuatAndPos(rotationOffset, translationOffset);
@ -942,13 +989,13 @@ void ViveControllerManager::InputDevice::calibrateRightHand(glm::mat4& defaultTo
glm::vec3 yPrime = glm::normalize(glm::cross(zPrime, xPrime));
glm::mat4 newHandMat = glm::mat4(glm::vec4(xPrime, 0.0f), glm::vec4(yPrime, 0.0f),
glm::vec4(zPrime, 0.0f), glm::vec4(0.0f, 0.0f, 0.0f, 1.0f));
glm::vec3 translationOffset = glm::vec3(0.0f, HAND_PUCK_Y_OFFSET, HAND_PUCK_Z_OFFSET);
glm::vec3 translationOffset = glm::vec3(0.0f, _handPuckYOffset, _handPuckZOffset);
glm::quat initialRotation = glmExtractRotation(handPoseAvatarMat);
glm::quat finalRotation = glmExtractRotation(newHandMat);
glm::quat rotationOffset = glm::inverse(initialRotation) * finalRotation;
glm::mat4 offsetMat = createMatFromQuatAndPos(rotationOffset, translationOffset);
@ -965,7 +1012,7 @@ void ViveControllerManager::InputDevice::calibrateFeet(glm::mat4& defaultToRefer
auto& secondFoot = _validTrackedObjects[SECOND_FOOT];
controller::Pose& firstFootPose = firstFoot.second;
controller::Pose& secondFootPose = secondFoot.second;
if (determineLimbOrdering(firstFootPose, secondFootPose, headXAxis, headPosition)) {
calibrateFoot(defaultToReferenceMat, inputCalibration, firstFoot, true);
calibrateFoot(defaultToReferenceMat, inputCalibration, secondFoot, false);
@ -1030,13 +1077,8 @@ void ViveControllerManager::InputDevice::calibrateShoulders(glm::mat4& defaultTo
void ViveControllerManager::InputDevice::calibrateHead(glm::mat4& defaultToReferenceMat, const controller::InputCalibrationData& inputCalibration) {
size_t headIndex = _validTrackedObjects.size() - 1;
const PuckPosePair& head = _validTrackedObjects[headIndex];
// assume the person is wearing the head puck on his/her forehead
glm::mat4 defaultHeadOffset = glm::inverse(inputCalibration.defaultCenterEyeMat) * inputCalibration.defaultHeadMat;
controller::Pose newHead = head.second.postTransform(defaultHeadOffset);
_jointToPuckMap[controller::HEAD] = head.first;
_pucksOffset[head.first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultHeadMat, newHead);
_pucksOffset[head.first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultHeadMat, head.second);
}
QString ViveControllerManager::InputDevice::configToString(Config config) {

View file

@ -152,7 +152,7 @@ private:
HandController,
Pucks
};
Config _config { Config::None };
Config _preferedConfig { Config::None };
HeadConfig _headConfig { HeadConfig::HMD };
@ -177,6 +177,10 @@ private:
float _leftHapticDuration { 0.0f };
float _rightHapticStrength { 0.0f };
float _rightHapticDuration { 0.0f };
float _headPuckYOffset { -0.05f };
float _headPuckZOffset { -0.05f };
float _handPuckYOffset { 0.0f };
float _handPuckZOffset { 0.0f };
bool _triggersPressedHandled { false };
bool _calibrated { false };
bool _timeTilCalibrationSet { false };
@ -190,9 +194,12 @@ private:
};
void renderHand(const controller::Pose& pose, gpu::Batch& batch, int sign);
bool isDesktopMode();
bool _registeredWithInputMapper { false };
bool _modelLoaded { false };
bool _resetMatCalculated { false };
bool _desktopMode { false };
glm::mat4 _resetMat { glm::mat4() };
model::Geometry _modelGeometry;
gpu::TexturePointer _texture;

View file

@ -0,0 +1,116 @@
"use strict";
//
// godView.js
// scripts/system/
//
// Created by Brad Hefta-Gaub on 1 Jun 2017
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
/* globals HMD, Script, Menu, Tablet, Camera */
/* eslint indent: ["error", 4, { "outerIIFEBody": 0 }] */
(function() { // BEGIN LOCAL_SCOPE
var godView = false;
var GOD_CAMERA_OFFSET = -1; // 1 meter below the avatar
var GOD_VIEW_HEIGHT = 300; // 300 meter above the ground
var ABOVE_GROUND_DROP = 2;
var MOVE_BY = 1;
function moveTo(position) {
if (godView) {
MyAvatar.position = position;
Camera.position = Vec3.sum(MyAvatar.position, {x:0, y: GOD_CAMERA_OFFSET, z: 0});
} else {
MyAvatar.position = position;
}
}
function keyPressEvent(event) {
if (godView) {
switch(event.text) {
case "UP":
moveTo(Vec3.sum(MyAvatar.position, {x:0.0, y: 0, z: -1 * MOVE_BY}));
break;
case "DOWN":
moveTo(Vec3.sum(MyAvatar.position, {x:0, y: 0, z: MOVE_BY}));
break;
case "LEFT":
moveTo(Vec3.sum(MyAvatar.position, {x:-1 * MOVE_BY, y: 0, z: 0}));
break;
case "RIGHT":
moveTo(Vec3.sum(MyAvatar.position, {x:MOVE_BY, y: 0, z: 0}));
break;
}
}
}
function mousePress(event) {
if (godView) {
var pickRay = Camera.computePickRay(event.x, event.y);
var pointingAt = Vec3.sum(pickRay.origin, Vec3.multiply(pickRay.direction,300));
var moveToPosition = { x: pointingAt.x, y: MyAvatar.position.y, z: pointingAt.z };
moveTo(moveToPosition);
}
}
var oldCameraMode = Camera.mode;
function startGodView() {
if (!godView) {
oldCameraMode = Camera.mode;
MyAvatar.position = Vec3.sum(MyAvatar.position, {x:0, y: GOD_VIEW_HEIGHT, z: 0});
Camera.mode = "independent";
Camera.position = Vec3.sum(MyAvatar.position, {x:0, y: GOD_CAMERA_OFFSET, z: 0});
Camera.orientation = Quat.fromPitchYawRollDegrees(-90,0,0);
godView = true;
}
}
function endGodView() {
if (godView) {
Camera.mode = oldCameraMode;
MyAvatar.position = Vec3.sum(MyAvatar.position, {x:0, y: (-1 * GOD_VIEW_HEIGHT) + ABOVE_GROUND_DROP, z: 0});
godView = false;
}
}
var button;
var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
function onClicked() {
if (godView) {
endGodView();
} else {
startGodView();
}
}
button = tablet.addButton({
icon: "icons/tablet-icons/switch-desk-i.svg", // FIXME - consider a better icon from Alan
text: "God View"
});
button.clicked.connect(onClicked);
Controller.keyPressEvent.connect(keyPressEvent);
Controller.mousePressEvent.connect(mousePress);
Script.scriptEnding.connect(function () {
if (godView) {
endGodView();
}
button.clicked.disconnect(onClicked);
if (tablet) {
tablet.removeButton(button);
}
Controller.keyPressEvent.disconnect(keyPressEvent);
Controller.mousePressEvent.disconnect(mousePress);
});
}()); // END LOCAL_SCOPE

View file

@ -482,10 +482,10 @@ function populateNearbyUserList(selectData, oldAudioData) {
isPresent: true,
isReplicated: avatar.isReplicated
};
// Everyone needs to see admin status. Username and fingerprint returns default constructor output if the requesting user isn't an admin.
Users.requestUsernameFromID(id);
if (id) {
addAvatarNode(id); // No overlay for ourselves
// Everyone needs to see admin status. Username and fingerprint returns default constructor output if the requesting user isn't an admin.
Users.requestUsernameFromID(id);
avatarsOfInterest[id] = true;
} else {
// Return our username from the Account API

View file

@ -77,8 +77,7 @@ void TestWindow::initGl() {
#ifdef DEFERRED_LIGHTING
auto deferredLightingEffect = DependencyManager::get<DeferredLightingEffect>();
deferredLightingEffect->init();
deferredLightingEffect->setGlobalLight(_light);
initDeferredPipelines(*_shapePlumber);
initDeferredPipelines(*_shapePlumber, nullptr, nullptr);
#endif
}

View file

@ -889,11 +889,6 @@ private:
BackgroundRenderData::_item = _main3DScene->allocateID();
transaction.resetItem(BackgroundRenderData::_item, backgroundRenderPayload);
}
// Setup the current Zone Entity lighting
{
auto stage = DependencyManager::get<SceneScriptingInterface>()->getSkyStage();
DependencyManager::get<DeferredLightingEffect>()->setGlobalLight(stage->getSunLight());
}
{
PerformanceTimer perfTimer("SceneProcessTransaction");
@ -914,8 +909,6 @@ private:
PerformanceTimer perfTimer("draw");
// The pending changes collecting the changes here
render::Transaction transaction;
// Setup the current Zone Entity lighting
DependencyManager::get<DeferredLightingEffect>()->setGlobalLight(_sunSkyStage.getSunLight());
{
PerformanceTimer perfTimer("SceneProcessTransaction");
_main3DScene->enqueueTransaction(transaction);

View file

@ -42,7 +42,6 @@ ACClientApp::ACClientApp(int argc, char* argv[]) :
const QCommandLineOption listenPortOption("listenPort", "listen port", QString::number(INVALID_PORT));
parser.addOption(listenPortOption);
if (!parser.parse(QCoreApplication::arguments())) {
qCritical() << parser.errorText() << endl;
parser.showHelp();
@ -66,6 +65,7 @@ ACClientApp::ACClientApp(int argc, char* argv[]) :
const_cast<QLoggingCategory*>(&shared())->setEnabled(QtInfoMsg, false);
const_cast<QLoggingCategory*>(&shared())->setEnabled(QtWarningMsg, false);
}
QString domainServerAddress = "127.0.0.1:40103";
if (parser.isSet(domainAddressOption)) {