mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-08-04 07:33:10 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into commerce_QmlWhitelist
This commit is contained in:
commit
7d34b3fe67
56 changed files with 3521 additions and 705 deletions
|
@ -19,6 +19,10 @@ Documentation is available at [docs.highfidelity.com](https://docs.highfidelity.
|
|||
|
||||
There is also detailed [documentation on our coding standards](https://wiki.highfidelity.com/wiki/Coding_Standards).
|
||||
|
||||
Contributor License Agreement (CLA)
|
||||
=========
|
||||
Technology companies frequently receive and use code from contributors outside the company's development team. Outside code can be a tremendous resource, but it also carries responsibility. Best practice for accepting outside contributions consists of an Apache-type Contributor License Agreement (CLA). We have modeled the High Fidelity CLA after the CLA that Google presents to developers for contributions to their projects. This CLA does not transfer ownership of code, instead simply granting a non-exclusive right for High Fidelity to use the code you’ve contributed. In that regard, you should be sure you have permission if the work relates to or uses the resources of a company that you work for. You will be asked to sign our CLA when you create your first PR or when the CLA is updated. You can also [review it here](https://gist.githubusercontent.com/hifi-gustavo/fef8f06a8233d42a0040d45c3efb97a9/raw/9981827eb94f0b18666083670b6f6a02929fb402/High%2520Fidelity%2520CLA). We sincerely appreciate your contribution and efforts toward the success of the platform.
|
||||
|
||||
Build Instructions
|
||||
=========
|
||||
All information required to build is found in the [build guide](BUILD.md).
|
||||
|
|
|
@ -28,6 +28,10 @@
|
|||
const QString ASSIGNMENT_CLIENT_MONITOR_TARGET_NAME = "assignment-client-monitor";
|
||||
const int WAIT_FOR_CHILD_MSECS = 1000;
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
HANDLE PROCESS_GROUP = createProcessGroup();
|
||||
#endif
|
||||
|
||||
AssignmentClientMonitor::AssignmentClientMonitor(const unsigned int numAssignmentClientForks,
|
||||
const unsigned int minAssignmentClientForks,
|
||||
const unsigned int maxAssignmentClientForks,
|
||||
|
@ -202,6 +206,10 @@ void AssignmentClientMonitor::spawnChildClient() {
|
|||
assignmentClient->setProcessChannelMode(QProcess::ForwardedChannels);
|
||||
assignmentClient->start(QCoreApplication::applicationFilePath(), _childArguments);
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
addProcessToGroup(PROCESS_GROUP, assignmentClient->processId());
|
||||
#endif
|
||||
|
||||
QString stdoutPath, stderrPath;
|
||||
|
||||
if (_wantsChildFileLogging) {
|
||||
|
|
|
@ -23,6 +23,17 @@ EntityTreeSendThread::EntityTreeSendThread(OctreeServer* myServer, const SharedN
|
|||
{
|
||||
connect(std::static_pointer_cast<EntityTree>(myServer->getOctree()).get(), &EntityTree::editingEntityPointer, this, &EntityTreeSendThread::editingEntityPointer, Qt::QueuedConnection);
|
||||
connect(std::static_pointer_cast<EntityTree>(myServer->getOctree()).get(), &EntityTree::deletingEntityPointer, this, &EntityTreeSendThread::deletingEntityPointer, Qt::QueuedConnection);
|
||||
|
||||
// connect to connection ID change on EntityNodeData so we can clear state for this receiver
|
||||
auto nodeData = static_cast<EntityNodeData*>(node->getLinkedData());
|
||||
connect(nodeData, &EntityNodeData::incomingConnectionIDChanged, this, &EntityTreeSendThread::resetState);
|
||||
}
|
||||
|
||||
void EntityTreeSendThread::resetState() {
|
||||
qCDebug(entities) << "Clearing known EntityTreeSendThread state for" << _nodeUuid;
|
||||
|
||||
_knownState.clear();
|
||||
_traversal.reset();
|
||||
}
|
||||
|
||||
void EntityTreeSendThread::preDistributionProcessing() {
|
||||
|
|
|
@ -33,6 +33,9 @@ protected:
|
|||
void traverseTreeAndSendContents(SharedNodePointer node, OctreeQueryNode* nodeData,
|
||||
bool viewFrustumChanged, bool isFullScene) override;
|
||||
|
||||
private slots:
|
||||
void resetState(); // clears our known state forcing entities to appear unsent
|
||||
|
||||
private:
|
||||
// the following two methods return booleans to indicate if any extra flagged entities were new additions to set
|
||||
bool addAncestorsToExtraFlaggedEntities(const QUuid& filteredEntityID, EntityItem& entityItem, EntityNodeData& nodeData);
|
||||
|
|
|
@ -82,8 +82,12 @@ bool OctreeSendThread::process() {
|
|||
if (auto node = _node.lock()) {
|
||||
OctreeQueryNode* nodeData = static_cast<OctreeQueryNode*>(node->getLinkedData());
|
||||
|
||||
// Sometimes the node data has not yet been linked, in which case we can't really do anything
|
||||
if (nodeData && !nodeData->isShuttingDown()) {
|
||||
// If we don't have the OctreeQueryNode at all
|
||||
// or it's uninitialized because we haven't received a query yet from the client
|
||||
// or we don't know where we should send packets for this node
|
||||
// or we're shutting down
|
||||
// then we can't send an entity data packet
|
||||
if (nodeData && nodeData->hasReceivedFirstQuery() && node->getActiveSocket() && !nodeData->isShuttingDown()) {
|
||||
bool viewFrustumChanged = nodeData->updateCurrentViewFrustum();
|
||||
packetDistributor(node, nodeData, viewFrustumChanged);
|
||||
}
|
||||
|
|
|
@ -59,7 +59,8 @@ protected:
|
|||
OctreePacketData _packetData;
|
||||
QWeakPointer<Node> _node;
|
||||
OctreeServer* _myServer { nullptr };
|
||||
|
||||
QUuid _nodeUuid;
|
||||
|
||||
private:
|
||||
/// Called before a packetDistributor pass to allow for pre-distribution processing
|
||||
virtual void preDistributionProcessing() {};
|
||||
|
@ -71,8 +72,6 @@ private:
|
|||
virtual void preStartNewScene(OctreeQueryNode* nodeData, bool isFullScene);
|
||||
virtual bool shouldTraverseAndSend(OctreeQueryNode* nodeData) { return hasSomethingToSend(nodeData); }
|
||||
|
||||
QUuid _nodeUuid;
|
||||
|
||||
int _truePacketsSent { 0 }; // available for debug stats
|
||||
int _trueBytesSent { 0 }; // available for debug stats
|
||||
int _packetsSentThisInterval { 0 }; // used for bandwidth throttle condition
|
||||
|
|
|
@ -1,532 +0,0 @@
|
|||
//
|
||||
// AddressBarDialog.qml
|
||||
//
|
||||
// Created by Austin Davis on 2015/04/14
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import Hifi 1.0
|
||||
import QtQuick 2.4
|
||||
import "controls"
|
||||
import "styles"
|
||||
import "windows"
|
||||
import "hifi"
|
||||
import "hifi/toolbars"
|
||||
import "styles-uit" as HifiStyles
|
||||
import "controls-uit" as HifiControls
|
||||
|
||||
Window {
|
||||
id: root
|
||||
HifiConstants { id: hifi }
|
||||
HifiStyles.HifiConstants { id: hifiStyleConstants }
|
||||
|
||||
objectName: "AddressBarDialog"
|
||||
title: "Go To:"
|
||||
|
||||
shown: false
|
||||
destroyOnHidden: false
|
||||
resizable: false
|
||||
pinnable: false;
|
||||
|
||||
width: addressBarDialog.implicitWidth
|
||||
height: addressBarDialog.implicitHeight
|
||||
property int gap: 14
|
||||
|
||||
onShownChanged: {
|
||||
addressBarDialog.keyboardEnabled = HMD.active;
|
||||
addressBarDialog.observeShownChanged(shown);
|
||||
}
|
||||
Component.onCompleted: {
|
||||
root.parentChanged.connect(center);
|
||||
center();
|
||||
}
|
||||
Component.onDestruction: {
|
||||
root.parentChanged.disconnect(center);
|
||||
}
|
||||
|
||||
function center() {
|
||||
// Explicitly center in order to avoid warnings at shutdown
|
||||
anchors.centerIn = parent;
|
||||
}
|
||||
|
||||
function resetAfterTeleport() {
|
||||
storyCardFrame.shown = root.shown = false;
|
||||
}
|
||||
function goCard(targetString) {
|
||||
if (0 !== targetString.indexOf('hifi://')) {
|
||||
storyCardHTML.url = addressBarDialog.metaverseServerUrl + targetString;
|
||||
storyCardFrame.shown = true;
|
||||
return;
|
||||
}
|
||||
addressLine.text = targetString;
|
||||
toggleOrGo(true);
|
||||
clearAddressLineTimer.start();
|
||||
}
|
||||
property var allStories: [];
|
||||
property int cardWidth: 212;
|
||||
property int cardHeight: 152;
|
||||
property string metaverseBase: addressBarDialog.metaverseServerUrl + "/api/v1/";
|
||||
property bool isCursorVisible: false // Override default cursor visibility.
|
||||
|
||||
AddressBarDialog {
|
||||
id: addressBarDialog
|
||||
|
||||
property bool keyboardEnabled: false
|
||||
property bool keyboardRaised: false
|
||||
property bool punctuationMode: false
|
||||
|
||||
implicitWidth: backgroundImage.width
|
||||
implicitHeight: scroll.height + gap + backgroundImage.height + (keyboardEnabled ? keyboard.height : 0);
|
||||
|
||||
// The buttons have their button state changed on hover, so we have to manually fix them up here
|
||||
onBackEnabledChanged: backArrow.buttonState = addressBarDialog.backEnabled ? 1 : 0;
|
||||
onForwardEnabledChanged: forwardArrow.buttonState = addressBarDialog.forwardEnabled ? 1 : 0;
|
||||
onReceivedHifiSchemeURL: resetAfterTeleport();
|
||||
|
||||
// Update location after using back and forward buttons.
|
||||
onHostChanged: updateLocationTextTimer.start();
|
||||
|
||||
ListModel { id: suggestions }
|
||||
|
||||
ListView {
|
||||
id: scroll
|
||||
height: cardHeight + scroll.stackedCardShadowHeight
|
||||
property int stackedCardShadowHeight: 10;
|
||||
spacing: gap;
|
||||
clip: true;
|
||||
anchors {
|
||||
left: backgroundImage.left
|
||||
right: swipe.left
|
||||
bottom: backgroundImage.top
|
||||
}
|
||||
model: suggestions;
|
||||
orientation: ListView.Horizontal;
|
||||
delegate: Card {
|
||||
width: cardWidth;
|
||||
height: cardHeight;
|
||||
goFunction: goCard;
|
||||
userName: model.username;
|
||||
placeName: model.place_name;
|
||||
hifiUrl: model.place_name + model.path;
|
||||
thumbnail: model.thumbnail_url;
|
||||
imageUrl: model.image_url;
|
||||
action: model.action;
|
||||
timestamp: model.created_at;
|
||||
onlineUsers: model.online_users;
|
||||
storyId: model.metaverseId;
|
||||
drillDownToPlace: model.drillDownToPlace;
|
||||
shadowHeight: scroll.stackedCardShadowHeight;
|
||||
hoverThunk: function () { ListView.view.currentIndex = index; }
|
||||
unhoverThunk: function () { ListView.view.currentIndex = -1; }
|
||||
}
|
||||
highlightMoveDuration: -1;
|
||||
highlightMoveVelocity: -1;
|
||||
highlight: Rectangle { color: "transparent"; border.width: 4; border.color: hifiStyleConstants.colors.blueHighlight; z: 1; }
|
||||
}
|
||||
Image { // Just a visual indicator that the user can swipe the cards over to see more.
|
||||
id: swipe;
|
||||
source: "../images/swipe-chevron.svg";
|
||||
width: 72;
|
||||
visible: suggestions.count > 3;
|
||||
anchors {
|
||||
right: backgroundImage.right;
|
||||
top: scroll.top;
|
||||
}
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
onClicked: scroll.currentIndex = (scroll.currentIndex < 0) ? 3 : (scroll.currentIndex + 3)
|
||||
}
|
||||
}
|
||||
|
||||
Row {
|
||||
spacing: 2 * hifi.layout.spacing;
|
||||
anchors {
|
||||
top: parent.top;
|
||||
left: parent.left;
|
||||
leftMargin: 150;
|
||||
topMargin: -30;
|
||||
}
|
||||
property var selected: allTab;
|
||||
TextButton {
|
||||
id: allTab;
|
||||
text: "ALL";
|
||||
property string includeActions: 'snapshot,concurrency';
|
||||
selected: allTab === selectedTab;
|
||||
action: tabSelect;
|
||||
}
|
||||
TextButton {
|
||||
id: placeTab;
|
||||
text: "PLACES";
|
||||
property string includeActions: 'concurrency';
|
||||
selected: placeTab === selectedTab;
|
||||
action: tabSelect;
|
||||
}
|
||||
TextButton {
|
||||
id: snapsTab;
|
||||
text: "SNAPS";
|
||||
property string includeActions: 'snapshot';
|
||||
selected: snapsTab === selectedTab;
|
||||
action: tabSelect;
|
||||
}
|
||||
}
|
||||
|
||||
Image {
|
||||
id: backgroundImage
|
||||
source: "../images/address-bar-856.svg"
|
||||
width: 856
|
||||
height: 100
|
||||
anchors {
|
||||
bottom: parent.keyboardEnabled ? keyboard.top : parent.bottom;
|
||||
}
|
||||
property int inputAreaHeight: 70
|
||||
property int inputAreaStep: (height - inputAreaHeight) / 2
|
||||
|
||||
ToolbarButton {
|
||||
id: homeButton
|
||||
imageURL: "../images/home.svg"
|
||||
onClicked: {
|
||||
addressBarDialog.loadHome();
|
||||
root.shown = false;
|
||||
}
|
||||
anchors {
|
||||
left: parent.left
|
||||
leftMargin: homeButton.width / 2
|
||||
verticalCenter: parent.verticalCenter
|
||||
}
|
||||
}
|
||||
|
||||
ToolbarButton {
|
||||
id: backArrow;
|
||||
imageURL: "../images/backward.svg";
|
||||
onClicked: addressBarDialog.loadBack();
|
||||
anchors {
|
||||
left: homeButton.right
|
||||
verticalCenter: parent.verticalCenter
|
||||
}
|
||||
}
|
||||
ToolbarButton {
|
||||
id: forwardArrow;
|
||||
imageURL: "../images/forward.svg";
|
||||
onClicked: addressBarDialog.loadForward();
|
||||
anchors {
|
||||
left: backArrow.right
|
||||
verticalCenter: parent.verticalCenter
|
||||
}
|
||||
}
|
||||
|
||||
HifiStyles.RalewayLight {
|
||||
id: notice;
|
||||
font.pixelSize: hifi.fonts.pixelSize * 0.50;
|
||||
anchors {
|
||||
top: parent.top
|
||||
topMargin: parent.inputAreaStep + 12
|
||||
left: addressLine.left
|
||||
right: addressLine.right
|
||||
}
|
||||
}
|
||||
HifiStyles.FiraSansRegular {
|
||||
id: location;
|
||||
font.pixelSize: addressLine.font.pixelSize;
|
||||
color: "gray";
|
||||
clip: true;
|
||||
anchors.fill: addressLine;
|
||||
visible: addressLine.text.length === 0
|
||||
}
|
||||
TextInput {
|
||||
id: addressLine
|
||||
focus: true
|
||||
anchors {
|
||||
top: parent.top
|
||||
bottom: parent.bottom
|
||||
left: forwardArrow.right
|
||||
right: parent.right
|
||||
leftMargin: forwardArrow.width
|
||||
rightMargin: forwardArrow.width / 2
|
||||
topMargin: parent.inputAreaStep + (2 * hifi.layout.spacing)
|
||||
bottomMargin: parent.inputAreaStep
|
||||
}
|
||||
font.pixelSize: hifi.fonts.pixelSize * 0.75
|
||||
cursorVisible: false
|
||||
onTextChanged: {
|
||||
filterChoicesByText();
|
||||
updateLocationText(text.length > 0);
|
||||
if (!isCursorVisible && text.length > 0) {
|
||||
isCursorVisible = true;
|
||||
cursorVisible = true;
|
||||
}
|
||||
}
|
||||
onActiveFocusChanged: {
|
||||
cursorVisible = isCursorVisible && focus;
|
||||
}
|
||||
MouseArea {
|
||||
// If user clicks in address bar show cursor to indicate ability to enter address.
|
||||
anchors.fill: parent
|
||||
onClicked: {
|
||||
isCursorVisible = true;
|
||||
parent.cursorVisible = true;
|
||||
parent.forceActiveFocus();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Timer {
|
||||
// Delay updating location text a bit to avoid flicker of content and so that connection status is valid.
|
||||
id: updateLocationTextTimer
|
||||
running: false
|
||||
interval: 500 // ms
|
||||
repeat: false
|
||||
onTriggered: updateLocationText(false);
|
||||
}
|
||||
|
||||
Timer {
|
||||
// Delay clearing address line so as to avoid flicker of "not connected" being displayed after entering an address.
|
||||
id: clearAddressLineTimer
|
||||
running: false
|
||||
interval: 100 // ms
|
||||
repeat: false
|
||||
onTriggered: {
|
||||
addressLine.text = "";
|
||||
isCursorVisible = false;
|
||||
}
|
||||
}
|
||||
|
||||
Window {
|
||||
width: 938
|
||||
height: 625
|
||||
HifiControls.WebView {
|
||||
anchors.fill: parent;
|
||||
id: storyCardHTML;
|
||||
}
|
||||
id: storyCardFrame;
|
||||
|
||||
shown: false;
|
||||
destroyOnCloseButton: false;
|
||||
pinnable: false;
|
||||
|
||||
anchors {
|
||||
verticalCenter: backgroundImage.verticalCenter;
|
||||
horizontalCenter: scroll.horizontalCenter;
|
||||
}
|
||||
z: 100
|
||||
}
|
||||
|
||||
HifiControls.Keyboard {
|
||||
id: keyboard
|
||||
raised: parent.keyboardEnabled // Ignore keyboardRaised; keep keyboard raised if enabled (i.e., in HMD).
|
||||
numeric: parent.punctuationMode
|
||||
anchors {
|
||||
bottom: parent.bottom
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getRequest(url, cb) { // cb(error, responseOfCorrectContentType) of url. General for 'get' text/html/json, but without redirects.
|
||||
// TODO: make available to other .qml.
|
||||
var request = new XMLHttpRequest();
|
||||
// QT bug: apparently doesn't handle onload. Workaround using readyState.
|
||||
request.onreadystatechange = function () {
|
||||
var READY_STATE_DONE = 4;
|
||||
var HTTP_OK = 200;
|
||||
if (request.readyState >= READY_STATE_DONE) {
|
||||
var error = (request.status !== HTTP_OK) && request.status.toString() + ':' + request.statusText,
|
||||
response = !error && request.responseText,
|
||||
contentType = !error && request.getResponseHeader('content-type');
|
||||
if (!error && contentType.indexOf('application/json') === 0) {
|
||||
try {
|
||||
response = JSON.parse(response);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
}
|
||||
cb(error, response);
|
||||
}
|
||||
};
|
||||
request.open("GET", url, true);
|
||||
request.send();
|
||||
}
|
||||
|
||||
function identity(x) {
|
||||
return x;
|
||||
}
|
||||
|
||||
function handleError(url, error, data, cb) { // cb(error) and answer truthy if needed, else falsey
|
||||
if (!error && (data.status === 'success')) {
|
||||
return;
|
||||
}
|
||||
if (!error) { // Create a message from the data
|
||||
error = data.status + ': ' + data.error;
|
||||
}
|
||||
if (typeof(error) === 'string') { // Make a proper Error object
|
||||
error = new Error(error);
|
||||
}
|
||||
error.message += ' in ' + url; // Include the url.
|
||||
cb(error);
|
||||
return true;
|
||||
}
|
||||
function resolveUrl(url) {
|
||||
return (url.indexOf('/') === 0) ? (addressBarDialog.metaverseServerUrl + url) : url;
|
||||
}
|
||||
|
||||
function makeModelData(data) { // create a new obj from data
|
||||
// ListModel elements will only ever have those properties that are defined by the first obj that is added.
|
||||
// So here we make sure that we have all the properties we need, regardless of whether it is a place data or user story.
|
||||
var name = data.place_name,
|
||||
tags = data.tags || [data.action, data.username],
|
||||
description = data.description || "",
|
||||
thumbnail_url = data.thumbnail_url || "";
|
||||
return {
|
||||
place_name: name,
|
||||
username: data.username || "",
|
||||
path: data.path || "",
|
||||
created_at: data.created_at || "",
|
||||
action: data.action || "",
|
||||
thumbnail_url: resolveUrl(thumbnail_url),
|
||||
image_url: resolveUrl(data.details.image_url),
|
||||
|
||||
metaverseId: (data.id || "").toString(), // Some are strings from server while others are numbers. Model objects require uniformity.
|
||||
|
||||
tags: tags,
|
||||
description: description,
|
||||
online_users: data.details.concurrency || 0,
|
||||
drillDownToPlace: false,
|
||||
|
||||
searchText: [name].concat(tags, description || []).join(' ').toUpperCase()
|
||||
}
|
||||
}
|
||||
function suggestable(place) {
|
||||
if (place.action === 'snapshot') {
|
||||
return true;
|
||||
}
|
||||
return (place.place_name !== AddressManager.placename); // Not our entry, but do show other entry points to current domain.
|
||||
}
|
||||
property var selectedTab: allTab;
|
||||
function tabSelect(textButton) {
|
||||
selectedTab = textButton;
|
||||
fillDestinations();
|
||||
}
|
||||
property var placeMap: ({});
|
||||
function addToSuggestions(place) {
|
||||
var collapse = allTab.selected && (place.action !== 'concurrency');
|
||||
if (collapse) {
|
||||
var existing = placeMap[place.place_name];
|
||||
if (existing) {
|
||||
existing.drillDownToPlace = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
suggestions.append(place);
|
||||
if (collapse) {
|
||||
placeMap[place.place_name] = suggestions.get(suggestions.count - 1);
|
||||
} else if (place.action === 'concurrency') {
|
||||
suggestions.get(suggestions.count - 1).drillDownToPlace = true; // Don't change raw place object (in allStories).
|
||||
}
|
||||
}
|
||||
property int requestId: 0;
|
||||
function getUserStoryPage(pageNumber, cb) { // cb(error) after all pages of domain data have been added to model
|
||||
var options = [
|
||||
'now=' + new Date().toISOString(),
|
||||
'include_actions=' + selectedTab.includeActions,
|
||||
'restriction=' + (Account.isLoggedIn() ? 'open,hifi' : 'open'),
|
||||
'require_online=true',
|
||||
'protocol=' + encodeURIComponent(AddressManager.protocolVersion()),
|
||||
'page=' + pageNumber
|
||||
];
|
||||
var url = metaverseBase + 'user_stories?' + options.join('&');
|
||||
var thisRequestId = ++requestId;
|
||||
getRequest(url, function (error, data) {
|
||||
if ((thisRequestId !== requestId) || handleError(url, error, data, cb)) {
|
||||
return;
|
||||
}
|
||||
var stories = data.user_stories.map(function (story) { // explicit single-argument function
|
||||
return makeModelData(story, url);
|
||||
});
|
||||
allStories = allStories.concat(stories);
|
||||
stories.forEach(makeFilteredPlaceProcessor());
|
||||
if ((data.current_page < data.total_pages) && (data.current_page <= 10)) { // just 10 pages = 100 stories for now
|
||||
return getUserStoryPage(pageNumber + 1, cb);
|
||||
}
|
||||
cb();
|
||||
});
|
||||
}
|
||||
function makeFilteredPlaceProcessor() { // answer a function(placeData) that adds it to suggestions if it matches
|
||||
var words = addressLine.text.toUpperCase().split(/\s+/).filter(identity),
|
||||
data = allStories;
|
||||
function matches(place) {
|
||||
if (!words.length) {
|
||||
return suggestable(place);
|
||||
}
|
||||
return words.every(function (word) {
|
||||
return place.searchText.indexOf(word) >= 0;
|
||||
});
|
||||
}
|
||||
return function (place) {
|
||||
if (matches(place)) {
|
||||
addToSuggestions(place);
|
||||
}
|
||||
};
|
||||
}
|
||||
function filterChoicesByText() {
|
||||
suggestions.clear();
|
||||
placeMap = {};
|
||||
allStories.forEach(makeFilteredPlaceProcessor());
|
||||
}
|
||||
|
||||
function fillDestinations() {
|
||||
allStories = [];
|
||||
suggestions.clear();
|
||||
placeMap = {};
|
||||
getUserStoryPage(1, function (error) {
|
||||
console.log('user stories query', error || 'ok', allStories.length);
|
||||
});
|
||||
}
|
||||
|
||||
function updateLocationText(enteringAddress) {
|
||||
if (enteringAddress) {
|
||||
notice.text = "Go to a place, @user, path or network address";
|
||||
notice.color = hifiStyleConstants.colors.baseGrayHighlight;
|
||||
} else {
|
||||
notice.text = AddressManager.isConnected ? "Your location:" : "Not Connected";
|
||||
notice.color = AddressManager.isConnected ? hifiStyleConstants.colors.baseGrayHighlight : hifiStyleConstants.colors.redHighlight;
|
||||
// Display hostname, which includes ip address, localhost, and other non-placenames.
|
||||
location.text = (AddressManager.placename || AddressManager.hostname || '') + (AddressManager.pathname ? AddressManager.pathname.match(/\/[^\/]+/)[0] : '');
|
||||
}
|
||||
}
|
||||
|
||||
onVisibleChanged: {
|
||||
updateLocationText(false);
|
||||
if (visible) {
|
||||
addressLine.forceActiveFocus();
|
||||
fillDestinations();
|
||||
}
|
||||
}
|
||||
|
||||
function toggleOrGo(fromSuggestions) {
|
||||
if (addressLine.text !== "") {
|
||||
addressBarDialog.loadAddress(addressLine.text, fromSuggestions)
|
||||
}
|
||||
root.shown = false;
|
||||
}
|
||||
|
||||
Keys.onPressed: {
|
||||
switch (event.key) {
|
||||
case Qt.Key_Escape:
|
||||
case Qt.Key_Back:
|
||||
root.shown = false
|
||||
clearAddressLineTimer.start();
|
||||
event.accepted = true
|
||||
break
|
||||
case Qt.Key_Enter:
|
||||
case Qt.Key_Return:
|
||||
toggleOrGo()
|
||||
clearAddressLineTimer.start();
|
||||
event.accepted = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
|
@ -476,7 +476,9 @@ Rectangle {
|
|||
Commerce.buy(itemId, itemPrice, true);
|
||||
}
|
||||
} else {
|
||||
sendToScript({method: 'checkout_rezClicked', itemHref: root.itemHref, isWearable: root.isWearable});
|
||||
if (urlHandler.canHandleUrl(itemHref)) {
|
||||
urlHandler.handleUrl(itemHref);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -594,9 +596,7 @@ Rectangle {
|
|||
anchors.right: parent.right;
|
||||
text: root.isWearable ? "Wear It" : "Rez It"
|
||||
onClicked: {
|
||||
if (urlHandler.canHandleUrl(root.itemHref)) {
|
||||
urlHandler.handleUrl(root.itemHref);
|
||||
}
|
||||
sendToScript({method: 'checkout_rezClicked', itemHref: root.itemHref, isWearable: root.isWearable});
|
||||
rezzedNotifContainer.visible = true;
|
||||
rezzedNotifContainerTimer.start();
|
||||
}
|
||||
|
|
|
@ -640,7 +640,8 @@ Rectangle {
|
|||
if (purchasesModel.get(i).title.toLowerCase().indexOf(filterBar.text.toLowerCase()) !== -1) {
|
||||
if (purchasesModel.get(i).status !== "confirmed" && !root.isShowingMyItems) {
|
||||
filteredPurchasesModel.insert(0, purchasesModel.get(i));
|
||||
} else if ((root.isShowingMyItems && purchasesModel.get(i).edition_number === -1) || !root.isShowingMyItems) {
|
||||
} else if ((root.isShowingMyItems && purchasesModel.get(i).edition_number === "0") ||
|
||||
(!root.isShowingMyItems && purchasesModel.get(i).edition_number !== "0")) {
|
||||
filteredPurchasesModel.append(purchasesModel.get(i));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,8 +53,6 @@ Item {
|
|||
onWalletAuthenticatedStatusResult: {
|
||||
if (isAuthenticated) {
|
||||
root.activeView = "step_4";
|
||||
} else {
|
||||
root.activeView = "step_3";
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
import Hifi 1.0
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import QtQuick.Controls.Styles 1.4
|
||||
import QtGraphicalEffects 1.0
|
||||
import "../../controls"
|
||||
import "../../styles"
|
||||
|
@ -83,7 +84,6 @@ StackView {
|
|||
anchors.centerIn = parent;
|
||||
}
|
||||
|
||||
|
||||
function resetAfterTeleport() {
|
||||
//storyCardFrame.shown = root.shown = false;
|
||||
}
|
||||
|
@ -134,7 +134,8 @@ StackView {
|
|||
bottom: parent.bottom
|
||||
}
|
||||
|
||||
onHostChanged: updateLocationTextTimer.start();
|
||||
onHostChanged: updateLocationTextTimer.restart();
|
||||
|
||||
Rectangle {
|
||||
id: navBar
|
||||
width: parent.width
|
||||
|
@ -205,16 +206,16 @@ StackView {
|
|||
anchors {
|
||||
top: parent.top;
|
||||
left: addressLineContainer.left;
|
||||
right: addressLineContainer.right;
|
||||
}
|
||||
}
|
||||
|
||||
HifiStyles.FiraSansRegular {
|
||||
id: location;
|
||||
anchors {
|
||||
left: addressLineContainer.left;
|
||||
leftMargin: 8;
|
||||
verticalCenter: addressLineContainer.verticalCenter;
|
||||
left: notice.right
|
||||
leftMargin: 8
|
||||
right: addressLineContainer.right
|
||||
verticalCenter: notice.verticalCenter
|
||||
}
|
||||
font.pixelSize: addressLine.font.pixelSize;
|
||||
color: "gray";
|
||||
|
@ -222,7 +223,7 @@ StackView {
|
|||
visible: addressLine.text.length === 0
|
||||
}
|
||||
|
||||
TextInput {
|
||||
TextField {
|
||||
id: addressLine
|
||||
width: addressLineContainer.width - addressLineContainer.anchors.leftMargin - addressLineContainer.anchors.rightMargin;
|
||||
anchors {
|
||||
|
@ -230,7 +231,6 @@ StackView {
|
|||
leftMargin: 8;
|
||||
verticalCenter: addressLineContainer.verticalCenter;
|
||||
}
|
||||
font.pixelSize: hifi.fonts.pixelSize * 0.75
|
||||
onTextChanged: {
|
||||
updateLocationText(text.length > 0);
|
||||
}
|
||||
|
@ -238,6 +238,17 @@ StackView {
|
|||
addressBarDialog.keyboardEnabled = false;
|
||||
toggleOrGo();
|
||||
}
|
||||
placeholderText: "Type domain address here"
|
||||
verticalAlignment: TextInput.AlignBottom
|
||||
style: TextFieldStyle {
|
||||
textColor: hifi.colors.text
|
||||
placeholderTextColor: "gray"
|
||||
font {
|
||||
family: hifi.fonts.fontFamily
|
||||
pixelSize: hifi.fonts.pixelSize * 0.75
|
||||
}
|
||||
background: Item {}
|
||||
}
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
|
@ -347,7 +358,7 @@ StackView {
|
|||
// Delay updating location text a bit to avoid flicker of content and so that connection status is valid.
|
||||
id: updateLocationTextTimer
|
||||
running: false
|
||||
interval: 500 // ms
|
||||
interval: 1000 // ms
|
||||
repeat: false
|
||||
onTriggered: updateLocationText(false);
|
||||
}
|
||||
|
|
|
@ -1392,7 +1392,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
// Make sure we don't time out during slow operations at startup
|
||||
updateHeartbeat();
|
||||
|
||||
|
||||
QTimer* settingsTimer = new QTimer();
|
||||
moveToNewNamedThread(settingsTimer, "Settings Thread", [this, settingsTimer]{
|
||||
connect(qApp, &Application::beforeAboutToQuit, [this, settingsTimer]{
|
||||
|
@ -1700,8 +1699,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
lastLeftHandPose = leftHandPose;
|
||||
lastRightHandPose = rightHandPose;
|
||||
|
||||
properties["local_socket_changes"] = DependencyManager::get<StatTracker>()->getStat(LOCAL_SOCKET_CHANGE_STAT).toInt();
|
||||
|
||||
UserActivityLogger::getInstance().logAction("stats", properties);
|
||||
});
|
||||
sendStatsTimer->start();
|
||||
|
@ -1825,6 +1822,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
// Preload Tablet sounds
|
||||
DependencyManager::get<TabletScriptingInterface>()->preloadSounds();
|
||||
|
||||
_pendingIdleEvent = false;
|
||||
_pendingRenderEvent = false;
|
||||
|
||||
qCDebug(interfaceapp) << "Metaverse session ID is" << uuidStringWithoutCurlyBraces(accountManager->getSessionID());
|
||||
}
|
||||
|
||||
|
@ -4516,8 +4516,11 @@ void Application::resetPhysicsReadyInformation() {
|
|||
|
||||
void Application::reloadResourceCaches() {
|
||||
resetPhysicsReadyInformation();
|
||||
|
||||
// Query the octree to refresh everything in view
|
||||
_lastQueriedTime = 0;
|
||||
_octreeQuery.incrementConnectionID();
|
||||
|
||||
queryOctree(NodeType::EntityServer, PacketType::EntityQuery, _entityServerJurisdictions);
|
||||
|
||||
DependencyManager::get<AssetClient>()->clearCache();
|
||||
|
@ -5578,6 +5581,7 @@ void Application::nodeActivated(SharedNodePointer node) {
|
|||
// so we will do a proper query during update
|
||||
if (node->getType() == NodeType::EntityServer) {
|
||||
_lastQueriedTime = 0;
|
||||
_octreeQuery.incrementConnectionID();
|
||||
}
|
||||
|
||||
if (node->getType() == NodeType::AudioMixer) {
|
||||
|
|
|
@ -543,7 +543,7 @@ private:
|
|||
ViewFrustum _displayViewFrustum;
|
||||
quint64 _lastQueriedTime;
|
||||
|
||||
OctreeQuery _octreeQuery; // NodeData derived class for querying octee cells from octree servers
|
||||
OctreeQuery _octreeQuery { true }; // NodeData derived class for querying octee cells from octree servers
|
||||
|
||||
std::shared_ptr<controller::StateController> _applicationStateDevice; // Default ApplicationDevice reflecting the state of different properties of the session
|
||||
std::shared_ptr<KeyboardMouseDevice> _keyboardMouseDevice; // Default input device, the good old keyboard mouse and maybe touchpad
|
||||
|
@ -708,7 +708,7 @@ private:
|
|||
|
||||
friend class RenderEventHandler;
|
||||
|
||||
std::atomic<bool> _pendingIdleEvent { false };
|
||||
std::atomic<bool> _pendingRenderEvent { false };
|
||||
std::atomic<bool> _pendingIdleEvent { true };
|
||||
std::atomic<bool> _pendingRenderEvent { true };
|
||||
};
|
||||
#endif // hifi_Application_h
|
||||
|
|
|
@ -40,6 +40,10 @@ AddressBarDialog::AddressBarDialog(QQuickItem* parent) : OffscreenQmlDialog(pare
|
|||
_backEnabled = !(DependencyManager::get<AddressManager>()->getBackStack().isEmpty());
|
||||
_forwardEnabled = !(DependencyManager::get<AddressManager>()->getForwardStack().isEmpty());
|
||||
connect(addressManager.data(), &AddressManager::hostChanged, this, &AddressBarDialog::hostChanged);
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
const DomainHandler& domainHandler = nodeList->getDomainHandler();
|
||||
connect(&domainHandler, &DomainHandler::connectedToDomain, this, &AddressBarDialog::hostChanged);
|
||||
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, this, &AddressBarDialog::hostChanged);
|
||||
connect(DependencyManager::get<DialogsManager>().data(), &DialogsManager::setUseFeed, this, &AddressBarDialog::setUseFeed);
|
||||
connect(qApp, &Application::receivedHifiSchemeURL, this, &AddressBarDialog::receivedHifiSchemeURL);
|
||||
}
|
||||
|
|
|
@ -179,6 +179,11 @@ void Rig::restoreRoleAnimation(const QString& role) {
|
|||
} else {
|
||||
qCWarning(animation) << "Rig::restoreRoleAnimation could not find role " << role;
|
||||
}
|
||||
|
||||
auto statesIter = _roleAnimStates.find(role);
|
||||
if (statesIter != _roleAnimStates.end()) {
|
||||
_roleAnimStates.erase(statesIter);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
qCWarning(animation) << "Rig::overrideRoleAnimation avatar not ready yet";
|
||||
|
|
|
@ -1579,7 +1579,7 @@ float Avatar::getEyeHeight() const {
|
|||
|
||||
if (QThread::currentThread() != thread()) {
|
||||
float result = DEFAULT_AVATAR_EYE_HEIGHT;
|
||||
BLOCKING_INVOKE_METHOD(const_cast<Avatar*>(this), "getHeight", Q_RETURN_ARG(float, result));
|
||||
BLOCKING_INVOKE_METHOD(const_cast<Avatar*>(this), "getEyeHeight", Q_RETURN_ARG(float, result));
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
@ -73,6 +73,8 @@ public:
|
|||
void setScanCallback(std::function<void (VisibleElement&)> cb);
|
||||
void traverse(uint64_t timeBudget);
|
||||
|
||||
void reset() { _path.clear(); _completedView.startTime = 0; } // resets our state to force a new "First" traversal
|
||||
|
||||
private:
|
||||
void getNextVisibleElement(VisibleElement& next);
|
||||
|
||||
|
|
|
@ -97,7 +97,8 @@ bool operator==(const Properties& a, const Properties& b) {
|
|||
(a.maxParticles == b.maxParticles) &&
|
||||
(a.emission == b.emission) &&
|
||||
(a.polar == b.polar) &&
|
||||
(a.azimuth == b.azimuth);
|
||||
(a.azimuth == b.azimuth) &&
|
||||
(a.textures == b.textures);
|
||||
}
|
||||
|
||||
bool operator!=(const Properties& a, const Properties& b) {
|
||||
|
|
|
@ -297,7 +297,7 @@ void ShapeEntityItem::computeShapeInfo(ShapeInfo& info) {
|
|||
const float MIN_RELATIVE_SPHERICAL_ERROR = 0.001f;
|
||||
if (diameter > MIN_DIAMETER
|
||||
&& fabsf(diameter - entityDimensions.z) / diameter < MIN_RELATIVE_SPHERICAL_ERROR) {
|
||||
_collisionShapeType = SHAPE_TYPE_SPHERE;
|
||||
_collisionShapeType = SHAPE_TYPE_CYLINDER_Y;
|
||||
} else if (hullShapeCalculator) {
|
||||
hullShapeCalculator(this, info);
|
||||
_collisionShapeType = SHAPE_TYPE_SIMPLE_HULL;
|
||||
|
|
1380
libraries/fbx/src/GLTFReader.cpp
Normal file
1380
libraries/fbx/src/GLTFReader.cpp
Normal file
File diff suppressed because it is too large
Load diff
786
libraries/fbx/src/GLTFReader.h
Normal file
786
libraries/fbx/src/GLTFReader.h
Normal file
|
@ -0,0 +1,786 @@
|
|||
//
|
||||
// GLTFReader.h
|
||||
// libraries/fbx/src
|
||||
//
|
||||
// Created by Luis Cuenca on 8/30/17.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_GLTFReader_h
|
||||
#define hifi_GLTFReader_h
|
||||
|
||||
#include <memory.h>
|
||||
#include <QtNetwork/QNetworkReply>
|
||||
#include "ModelFormatLogging.h"
|
||||
#include "FBXReader.h"
|
||||
|
||||
|
||||
struct GLTFAsset {
|
||||
QString generator;
|
||||
QString version; //required
|
||||
QString copyright;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["generator"]) {
|
||||
qCDebug(modelformat) << "generator: " << generator;
|
||||
}
|
||||
if (defined["version"]) {
|
||||
qCDebug(modelformat) << "version: " << version;
|
||||
}
|
||||
if (defined["copyright"]) {
|
||||
qCDebug(modelformat) << "copyright: " << copyright;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFNode {
|
||||
QString name;
|
||||
int camera;
|
||||
int mesh;
|
||||
QVector<int> children;
|
||||
QVector<double> translation;
|
||||
QVector<double> rotation;
|
||||
QVector<double> scale;
|
||||
QVector<double> matrix;
|
||||
QVector<glm::mat4> transforms;
|
||||
int skin;
|
||||
QVector<int> skeletons;
|
||||
QString jointName;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["name"]) {
|
||||
qCDebug(modelformat) << "name: " << name;
|
||||
}
|
||||
if (defined["camera"]) {
|
||||
qCDebug(modelformat) << "camera: " << camera;
|
||||
}
|
||||
if (defined["mesh"]) {
|
||||
qCDebug(modelformat) << "mesh: " << mesh;
|
||||
}
|
||||
if (defined["skin"]) {
|
||||
qCDebug(modelformat) << "skin: " << skin;
|
||||
}
|
||||
if (defined["jointName"]) {
|
||||
qCDebug(modelformat) << "jointName: " << jointName;
|
||||
}
|
||||
if (defined["children"]) {
|
||||
qCDebug(modelformat) << "children: " << children;
|
||||
}
|
||||
if (defined["translation"]) {
|
||||
qCDebug(modelformat) << "translation: " << translation;
|
||||
}
|
||||
if (defined["rotation"]) {
|
||||
qCDebug(modelformat) << "rotation: " << rotation;
|
||||
}
|
||||
if (defined["scale"]) {
|
||||
qCDebug(modelformat) << "scale: " << scale;
|
||||
}
|
||||
if (defined["matrix"]) {
|
||||
qCDebug(modelformat) << "matrix: " << matrix;
|
||||
}
|
||||
if (defined["skeletons"]) {
|
||||
qCDebug(modelformat) << "skeletons: " << skeletons;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Meshes
|
||||
|
||||
struct GLTFMeshPrimitivesTarget {
|
||||
int normal;
|
||||
int position;
|
||||
int tangent;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["normal"]) {
|
||||
qCDebug(modelformat) << "normal: " << normal;
|
||||
}
|
||||
if (defined["position"]) {
|
||||
qCDebug(modelformat) << "position: " << position;
|
||||
}
|
||||
if (defined["tangent"]) {
|
||||
qCDebug(modelformat) << "tangent: " << tangent;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
namespace GLTFMeshPrimitivesRenderingMode {
|
||||
enum Values {
|
||||
POINTS = 0,
|
||||
LINES,
|
||||
LINE_LOOP,
|
||||
LINE_STRIP,
|
||||
TRIANGLES,
|
||||
TRIANGLE_STRIP,
|
||||
TRIANGLE_FAN
|
||||
};
|
||||
}
|
||||
|
||||
struct GLTFMeshPrimitiveAttr {
|
||||
QMap<QString, int> values;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
QList<QString> keys = values.keys();
|
||||
qCDebug(modelformat) << "values: ";
|
||||
foreach(auto k, keys) {
|
||||
qCDebug(modelformat) << k << ": " << values[k];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFMeshPrimitive {
|
||||
GLTFMeshPrimitiveAttr attributes;
|
||||
int indices;
|
||||
int material;
|
||||
int mode{ GLTFMeshPrimitivesRenderingMode::TRIANGLES };
|
||||
QVector<GLTFMeshPrimitiveAttr> targets;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["attributes"]) {
|
||||
qCDebug(modelformat) << "attributes: ";
|
||||
attributes.dump();
|
||||
}
|
||||
if (defined["indices"]) {
|
||||
qCDebug(modelformat) << "indices: " << indices;
|
||||
}
|
||||
if (defined["material"]) {
|
||||
qCDebug(modelformat) << "material: " << material;
|
||||
}
|
||||
if (defined["mode"]) {
|
||||
qCDebug(modelformat) << "mode: " << mode;
|
||||
}
|
||||
if (defined["targets"]) {
|
||||
qCDebug(modelformat) << "targets: ";
|
||||
foreach(auto t, targets) t.dump();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFMesh {
|
||||
QString name;
|
||||
QVector<GLTFMeshPrimitive> primitives;
|
||||
QVector<double> weights;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["name"]) {
|
||||
qCDebug(modelformat) << "name: " << name;
|
||||
}
|
||||
if (defined["primitives"]) {
|
||||
qCDebug(modelformat) << "primitives: ";
|
||||
foreach(auto prim, primitives) prim.dump();
|
||||
}
|
||||
if (defined["weights"]) {
|
||||
qCDebug(modelformat) << "weights: " << weights;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// BufferViews
|
||||
|
||||
namespace GLTFBufferViewTarget {
|
||||
enum Values {
|
||||
ARRAY_BUFFER = 34962,
|
||||
ELEMENT_ARRAY_BUFFER = 34963
|
||||
};
|
||||
}
|
||||
|
||||
struct GLTFBufferView {
|
||||
int buffer; //required
|
||||
int byteLength; //required
|
||||
int byteOffset;
|
||||
int target;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["buffer"]) {
|
||||
qCDebug(modelformat) << "buffer: " << buffer;
|
||||
}
|
||||
if (defined["byteLength"]) {
|
||||
qCDebug(modelformat) << "byteLength: " << byteLength;
|
||||
}
|
||||
if (defined["byteOffset"]) {
|
||||
qCDebug(modelformat) << "byteOffset: " << byteOffset;
|
||||
}
|
||||
if (defined["target"]) {
|
||||
qCDebug(modelformat) << "target: " << target;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Buffers
|
||||
|
||||
struct GLTFBuffer {
|
||||
int byteLength; //required
|
||||
QString uri;
|
||||
QByteArray blob;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["byteLength"]) {
|
||||
qCDebug(modelformat) << "byteLength: " << byteLength;
|
||||
}
|
||||
if (defined["uri"]) {
|
||||
qCDebug(modelformat) << "uri: " << uri;
|
||||
}
|
||||
if (defined["blob"]) {
|
||||
qCDebug(modelformat) << "blob: " << "DEFINED";
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Samplers
|
||||
namespace GLTFSamplerFilterType {
|
||||
enum Values {
|
||||
NEAREST = 9728,
|
||||
LINEAR = 9729,
|
||||
NEAREST_MIPMAP_NEAREST = 9984,
|
||||
LINEAR_MIPMAP_NEAREST = 9985,
|
||||
NEAREST_MIPMAP_LINEAR = 9986,
|
||||
LINEAR_MIPMAP_LINEAR = 9987
|
||||
};
|
||||
}
|
||||
|
||||
namespace GLTFSamplerWrapType {
|
||||
enum Values {
|
||||
CLAMP_TO_EDGE = 33071,
|
||||
MIRRORED_REPEAT = 33648,
|
||||
REPEAT = 10497
|
||||
};
|
||||
}
|
||||
|
||||
struct GLTFSampler {
|
||||
int magFilter;
|
||||
int minFilter;
|
||||
int wrapS;
|
||||
int wrapT;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["magFilter"]) {
|
||||
qCDebug(modelformat) << "magFilter: " << magFilter;
|
||||
}
|
||||
if (defined["minFilter"]) {
|
||||
qCDebug(modelformat) << "minFilter: " << minFilter;
|
||||
}
|
||||
if (defined["wrapS"]) {
|
||||
qCDebug(modelformat) << "wrapS: " << wrapS;
|
||||
}
|
||||
if (defined["wrapT"]) {
|
||||
qCDebug(modelformat) << "wrapT: " << wrapT;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Cameras
|
||||
|
||||
struct GLTFCameraPerspective {
|
||||
double aspectRatio;
|
||||
double yfov; //required
|
||||
double zfar;
|
||||
double znear; //required
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["zfar"]) {
|
||||
qCDebug(modelformat) << "zfar: " << zfar;
|
||||
}
|
||||
if (defined["znear"]) {
|
||||
qCDebug(modelformat) << "znear: " << znear;
|
||||
}
|
||||
if (defined["aspectRatio"]) {
|
||||
qCDebug(modelformat) << "aspectRatio: " << aspectRatio;
|
||||
}
|
||||
if (defined["yfov"]) {
|
||||
qCDebug(modelformat) << "yfov: " << yfov;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFCameraOrthographic {
|
||||
double zfar; //required
|
||||
double znear; //required
|
||||
double xmag; //required
|
||||
double ymag; //required
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["zfar"]) {
|
||||
qCDebug(modelformat) << "zfar: " << zfar;
|
||||
}
|
||||
if (defined["znear"]) {
|
||||
qCDebug(modelformat) << "znear: " << znear;
|
||||
}
|
||||
if (defined["xmag"]) {
|
||||
qCDebug(modelformat) << "xmag: " << xmag;
|
||||
}
|
||||
if (defined["ymag"]) {
|
||||
qCDebug(modelformat) << "ymag: " << ymag;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
namespace GLTFCameraTypes {
|
||||
enum Values {
|
||||
ORTHOGRAPHIC = 0,
|
||||
PERSPECTIVE
|
||||
};
|
||||
}
|
||||
|
||||
struct GLTFCamera {
|
||||
QString name;
|
||||
GLTFCameraPerspective perspective; //required (or)
|
||||
GLTFCameraOrthographic orthographic; //required (or)
|
||||
int type;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["name"]) {
|
||||
qCDebug(modelformat) << "name: " << name;
|
||||
}
|
||||
if (defined["type"]) {
|
||||
qCDebug(modelformat) << "type: " << type;
|
||||
}
|
||||
if (defined["perspective"]) {
|
||||
perspective.dump();
|
||||
}
|
||||
if (defined["orthographic"]) {
|
||||
orthographic.dump();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Images
|
||||
|
||||
namespace GLTFImageMimetype {
|
||||
enum Values {
|
||||
JPEG = 0,
|
||||
PNG
|
||||
};
|
||||
};
|
||||
|
||||
struct GLTFImage {
|
||||
QString uri; //required (or)
|
||||
int mimeType;
|
||||
int bufferView; //required (or)
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["uri"]) {
|
||||
qCDebug(modelformat) << "uri: " << uri;
|
||||
}
|
||||
if (defined["mimeType"]) {
|
||||
qCDebug(modelformat) << "mimeType: " << mimeType;
|
||||
}
|
||||
if (defined["bufferView"]) {
|
||||
qCDebug(modelformat) << "bufferView: " << bufferView;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Materials
|
||||
|
||||
struct GLTFpbrMetallicRoughness {
|
||||
QVector<double> baseColorFactor;
|
||||
int baseColorTexture;
|
||||
int metallicRoughnessTexture;
|
||||
double metallicFactor;
|
||||
double roughnessFactor;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["baseColorFactor"]) {
|
||||
qCDebug(modelformat) << "baseColorFactor: " << baseColorFactor;
|
||||
}
|
||||
if (defined["baseColorTexture"]) {
|
||||
qCDebug(modelformat) << "baseColorTexture: " << baseColorTexture;
|
||||
}
|
||||
if (defined["metallicRoughnessTexture"]) {
|
||||
qCDebug(modelformat) << "metallicRoughnessTexture: " << metallicRoughnessTexture;
|
||||
}
|
||||
if (defined["metallicFactor"]) {
|
||||
qCDebug(modelformat) << "metallicFactor: " << metallicFactor;
|
||||
}
|
||||
if (defined["roughnessFactor"]) {
|
||||
qCDebug(modelformat) << "roughnessFactor: " << roughnessFactor;
|
||||
}
|
||||
if (defined["baseColorFactor"]) {
|
||||
qCDebug(modelformat) << "baseColorFactor: " << baseColorFactor;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
namespace GLTFMaterialAlphaMode {
|
||||
enum Values {
|
||||
OPAQUE = 0,
|
||||
MASK,
|
||||
BLEND
|
||||
};
|
||||
};
|
||||
|
||||
struct GLTFMaterial {
|
||||
QString name;
|
||||
QVector<double> emissiveFactor;
|
||||
int emissiveTexture;
|
||||
int normalTexture;
|
||||
int occlusionTexture;
|
||||
int alphaMode;
|
||||
double alphaCutoff;
|
||||
bool doubleSided;
|
||||
GLTFpbrMetallicRoughness pbrMetallicRoughness;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["name"]) {
|
||||
qCDebug(modelformat) << "name: " << name;
|
||||
}
|
||||
if (defined["emissiveTexture"]) {
|
||||
qCDebug(modelformat) << "emissiveTexture: " << emissiveTexture;
|
||||
}
|
||||
if (defined["normalTexture"]) {
|
||||
qCDebug(modelformat) << "normalTexture: " << normalTexture;
|
||||
}
|
||||
if (defined["occlusionTexture"]) {
|
||||
qCDebug(modelformat) << "occlusionTexture: " << occlusionTexture;
|
||||
}
|
||||
if (defined["emissiveFactor"]) {
|
||||
qCDebug(modelformat) << "emissiveFactor: " << emissiveFactor;
|
||||
}
|
||||
if (defined["pbrMetallicRoughness"]) {
|
||||
pbrMetallicRoughness.dump();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Accesors
|
||||
|
||||
namespace GLTFAccessorType {
|
||||
enum Values {
|
||||
SCALAR = 0,
|
||||
VEC2,
|
||||
VEC3,
|
||||
VEC4,
|
||||
MAT2,
|
||||
MAT3,
|
||||
MAT4
|
||||
};
|
||||
}
|
||||
namespace GLTFAccessorComponentType {
|
||||
enum Values {
|
||||
BYTE = 5120,
|
||||
UNSIGNED_BYTE = 5121,
|
||||
SHORT = 5122,
|
||||
UNSIGNED_SHORT = 5123,
|
||||
UNSIGNED_INT = 5125,
|
||||
FLOAT = 5126
|
||||
};
|
||||
}
|
||||
struct GLTFAccessor {
|
||||
int bufferView;
|
||||
int byteOffset;
|
||||
int componentType; //required
|
||||
int count; //required
|
||||
int type; //required
|
||||
bool normalized{ false };
|
||||
QVector<double> max;
|
||||
QVector<double> min;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["bufferView"]) {
|
||||
qCDebug(modelformat) << "bufferView: " << bufferView;
|
||||
}
|
||||
if (defined["byteOffset"]) {
|
||||
qCDebug(modelformat) << "byteOffset: " << byteOffset;
|
||||
}
|
||||
if (defined["componentType"]) {
|
||||
qCDebug(modelformat) << "componentType: " << componentType;
|
||||
}
|
||||
if (defined["count"]) {
|
||||
qCDebug(modelformat) << "count: " << count;
|
||||
}
|
||||
if (defined["type"]) {
|
||||
qCDebug(modelformat) << "type: " << type;
|
||||
}
|
||||
if (defined["normalized"]) {
|
||||
qCDebug(modelformat) << "normalized: " << (normalized ? "TRUE" : "FALSE");
|
||||
}
|
||||
if (defined["max"]) {
|
||||
qCDebug(modelformat) << "max: ";
|
||||
foreach(float m, max) {
|
||||
qCDebug(modelformat) << m;
|
||||
}
|
||||
}
|
||||
if (defined["min"]) {
|
||||
qCDebug(modelformat) << "min: ";
|
||||
foreach(float m, min) {
|
||||
qCDebug(modelformat) << m;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Animation
|
||||
|
||||
namespace GLTFChannelTargetPath {
|
||||
enum Values {
|
||||
TRANSLATION = 0,
|
||||
ROTATION,
|
||||
SCALE
|
||||
};
|
||||
}
|
||||
|
||||
struct GLTFChannelTarget {
|
||||
int node;
|
||||
int path;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["node"]) {
|
||||
qCDebug(modelformat) << "node: " << node;
|
||||
}
|
||||
if (defined["path"]) {
|
||||
qCDebug(modelformat) << "path: " << path;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFChannel {
|
||||
int sampler;
|
||||
GLTFChannelTarget target;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["sampler"]) {
|
||||
qCDebug(modelformat) << "sampler: " << sampler;
|
||||
}
|
||||
if (defined["target"]) {
|
||||
target.dump();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
namespace GLTFAnimationSamplerInterpolation {
|
||||
enum Values{
|
||||
LINEAR = 0
|
||||
};
|
||||
}
|
||||
|
||||
struct GLTFAnimationSampler {
|
||||
int input;
|
||||
int output;
|
||||
int interpolation;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["input"]) {
|
||||
qCDebug(modelformat) << "input: " << input;
|
||||
}
|
||||
if (defined["output"]) {
|
||||
qCDebug(modelformat) << "output: " << output;
|
||||
}
|
||||
if (defined["interpolation"]) {
|
||||
qCDebug(modelformat) << "interpolation: " << interpolation;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFAnimation {
|
||||
QVector<GLTFChannel> channels;
|
||||
QVector<GLTFAnimationSampler> samplers;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["channels"]) {
|
||||
foreach(auto channel, channels) channel.dump();
|
||||
}
|
||||
if (defined["samplers"]) {
|
||||
foreach(auto sampler, samplers) sampler.dump();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFScene {
|
||||
QString name;
|
||||
QVector<int> nodes;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["name"]) {
|
||||
qCDebug(modelformat) << "name: " << name;
|
||||
}
|
||||
if (defined["nodes"]) {
|
||||
qCDebug(modelformat) << "nodes: ";
|
||||
foreach(int node, nodes) qCDebug(modelformat) << node;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFSkin {
|
||||
int inverseBindMatrices;
|
||||
QVector<int> joints;
|
||||
int skeleton;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["inverseBindMatrices"]) {
|
||||
qCDebug(modelformat) << "inverseBindMatrices: " << inverseBindMatrices;
|
||||
}
|
||||
if (defined["skeleton"]) {
|
||||
qCDebug(modelformat) << "skeleton: " << skeleton;
|
||||
}
|
||||
if (defined["joints"]) {
|
||||
qCDebug(modelformat) << "joints: ";
|
||||
foreach(int joint, joints) qCDebug(modelformat) << joint;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFTexture {
|
||||
int sampler;
|
||||
int source;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["sampler"]) {
|
||||
qCDebug(modelformat) << "sampler: " << sampler;
|
||||
}
|
||||
if (defined["source"]) {
|
||||
qCDebug(modelformat) << "source: " << sampler;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFFile {
|
||||
GLTFAsset asset;
|
||||
int scene = 0;
|
||||
QVector<GLTFAccessor> accessors;
|
||||
QVector<GLTFAnimation> animations;
|
||||
QVector<GLTFBufferView> bufferviews;
|
||||
QVector<GLTFBuffer> buffers;
|
||||
QVector<GLTFCamera> cameras;
|
||||
QVector<GLTFImage> images;
|
||||
QVector<GLTFMaterial> materials;
|
||||
QVector<GLTFMesh> meshes;
|
||||
QVector<GLTFNode> nodes;
|
||||
QVector<GLTFSampler> samplers;
|
||||
QVector<GLTFScene> scenes;
|
||||
QVector<GLTFSkin> skins;
|
||||
QVector<GLTFTexture> textures;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["asset"]) {
|
||||
asset.dump();
|
||||
}
|
||||
if (defined["scene"]) {
|
||||
qCDebug(modelformat) << "scene: " << scene;
|
||||
}
|
||||
if (defined["accessors"]) {
|
||||
foreach(auto acc, accessors) acc.dump();
|
||||
}
|
||||
if (defined["animations"]) {
|
||||
foreach(auto ani, animations) ani.dump();
|
||||
}
|
||||
if (defined["bufferviews"]) {
|
||||
foreach(auto bv, bufferviews) bv.dump();
|
||||
}
|
||||
if (defined["buffers"]) {
|
||||
foreach(auto b, buffers) b.dump();
|
||||
}
|
||||
if (defined["cameras"]) {
|
||||
foreach(auto c, cameras) c.dump();
|
||||
}
|
||||
if (defined["images"]) {
|
||||
foreach(auto i, images) i.dump();
|
||||
}
|
||||
if (defined["materials"]) {
|
||||
foreach(auto mat, materials) mat.dump();
|
||||
}
|
||||
if (defined["meshes"]) {
|
||||
foreach(auto mes, meshes) mes.dump();
|
||||
}
|
||||
if (defined["nodes"]) {
|
||||
foreach(auto nod, nodes) nod.dump();
|
||||
}
|
||||
if (defined["samplers"]) {
|
||||
foreach(auto sa, samplers) sa.dump();
|
||||
}
|
||||
if (defined["scenes"]) {
|
||||
foreach(auto sc, scenes) sc.dump();
|
||||
}
|
||||
if (defined["skins"]) {
|
||||
foreach(auto sk, nodes) sk.dump();
|
||||
}
|
||||
if (defined["textures"]) {
|
||||
foreach(auto tex, textures) tex.dump();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
class GLTFReader : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
GLTFReader();
|
||||
FBXGeometry* readGLTF(QByteArray& model, const QVariantHash& mapping,
|
||||
const QUrl& url, bool loadLightmaps = true, float lightmapLevel = 1.0f);
|
||||
private:
|
||||
GLTFFile _file;
|
||||
QUrl _url;
|
||||
|
||||
glm::mat4 getModelTransform(const GLTFNode& node);
|
||||
|
||||
bool buildGeometry(FBXGeometry& geometry, const QUrl& url);
|
||||
bool parseGLTF(const QByteArray& model);
|
||||
|
||||
bool getStringVal(const QJsonObject& object, const QString& fieldname,
|
||||
QString& value, QMap<QString, bool>& defined);
|
||||
bool getBoolVal(const QJsonObject& object, const QString& fieldname,
|
||||
bool& value, QMap<QString, bool>& defined);
|
||||
bool getIntVal(const QJsonObject& object, const QString& fieldname,
|
||||
int& value, QMap<QString, bool>& defined);
|
||||
bool getDoubleVal(const QJsonObject& object, const QString& fieldname,
|
||||
double& value, QMap<QString, bool>& defined);
|
||||
bool getObjectVal(const QJsonObject& object, const QString& fieldname,
|
||||
QJsonObject& value, QMap<QString, bool>& defined);
|
||||
bool getIntArrayVal(const QJsonObject& object, const QString& fieldname,
|
||||
QVector<int>& values, QMap<QString, bool>& defined);
|
||||
bool getDoubleArrayVal(const QJsonObject& object, const QString& fieldname,
|
||||
QVector<double>& values, QMap<QString, bool>& defined);
|
||||
bool getObjectArrayVal(const QJsonObject& object, const QString& fieldname,
|
||||
QJsonArray& objects, QMap<QString, bool>& defined);
|
||||
|
||||
int getMaterialAlphaMode(const QString& type);
|
||||
int getAccessorType(const QString& type);
|
||||
int getAnimationSamplerInterpolation(const QString& interpolation);
|
||||
int getCameraType(const QString& type);
|
||||
int getImageMimeType(const QString& mime);
|
||||
int getMeshPrimitiveRenderingMode(const QString& type);
|
||||
|
||||
bool getIndexFromObject(const QJsonObject& object, const QString& field,
|
||||
int& outidx, QMap<QString, bool>& defined);
|
||||
|
||||
bool setAsset(const QJsonObject& object);
|
||||
bool addAccessor(const QJsonObject& object);
|
||||
bool addAnimation(const QJsonObject& object);
|
||||
bool addBufferView(const QJsonObject& object);
|
||||
bool addBuffer(const QJsonObject& object);
|
||||
bool addCamera(const QJsonObject& object);
|
||||
bool addImage(const QJsonObject& object);
|
||||
bool addMaterial(const QJsonObject& object);
|
||||
bool addMesh(const QJsonObject& object);
|
||||
bool addNode(const QJsonObject& object);
|
||||
bool addSampler(const QJsonObject& object);
|
||||
bool addScene(const QJsonObject& object);
|
||||
bool addSkin(const QJsonObject& object);
|
||||
bool addTexture(const QJsonObject& object);
|
||||
|
||||
bool readBinary(const QString& url, QByteArray& outdata);
|
||||
|
||||
template<typename T, typename L>
|
||||
bool readArray(const QByteArray& bin, int byteOffset, int byteLength,
|
||||
QVector<L>& outarray, int accessorType);
|
||||
|
||||
template<typename T>
|
||||
bool addArrayOfType(const QByteArray& bin, int byteOffset, int byteLength,
|
||||
QVector<T>& outarray, int accessorType, int componentType);
|
||||
|
||||
void retriangulate(const QVector<int>& in_indices, const QVector<glm::vec3>& in_vertices,
|
||||
const QVector<glm::vec3>& in_normals, QVector<int>& out_indices,
|
||||
QVector<glm::vec3>& out_vertices, QVector<glm::vec3>& out_normals);
|
||||
|
||||
std::tuple<bool, QByteArray> requestData(QUrl& url);
|
||||
QNetworkReply* request(QUrl& url, bool isTest);
|
||||
bool doesResourceExist(const QString& url);
|
||||
|
||||
|
||||
void setFBXMaterial(FBXMaterial& fbxmat, const GLTFMaterial& material);
|
||||
FBXTexture getFBXTexture(const GLTFTexture& texture);
|
||||
void fbxDebugDump(const FBXGeometry& fbxgeo);
|
||||
};
|
||||
|
||||
#endif // hifi_GLTFReader_h
|
|
@ -14,6 +14,7 @@
|
|||
#include <FSTReader.h>
|
||||
#include "FBXReader.h"
|
||||
#include "OBJReader.h"
|
||||
#include "GLTFReader.h"
|
||||
|
||||
#include <gpu/Batch.h>
|
||||
#include <gpu/Stream.h>
|
||||
|
@ -175,9 +176,12 @@ void GeometryReader::run() {
|
|||
|
||||
QString urlname = _url.path().toLower();
|
||||
if (!urlname.isEmpty() && !_url.path().isEmpty() &&
|
||||
(_url.path().toLower().endsWith(".fbx") ||
|
||||
_url.path().toLower().endsWith(".obj") ||
|
||||
_url.path().toLower().endsWith(".obj.gz"))) {
|
||||
|
||||
(_url.path().toLower().endsWith(".fbx") ||
|
||||
_url.path().toLower().endsWith(".obj") ||
|
||||
_url.path().toLower().endsWith(".obj.gz") ||
|
||||
_url.path().toLower().endsWith(".gltf"))) {
|
||||
|
||||
FBXGeometry::Pointer fbxGeometry;
|
||||
|
||||
if (_url.path().toLower().endsWith(".fbx")) {
|
||||
|
@ -189,12 +193,18 @@ void GeometryReader::run() {
|
|||
fbxGeometry.reset(OBJReader().readOBJ(_data, _mapping, _combineParts, _url));
|
||||
} else if (_url.path().toLower().endsWith(".obj.gz")) {
|
||||
QByteArray uncompressedData;
|
||||
if (gunzip(_data, uncompressedData)){
|
||||
if (gunzip(_data, uncompressedData)) {
|
||||
fbxGeometry.reset(OBJReader().readOBJ(uncompressedData, _mapping, _combineParts, _url));
|
||||
} else {
|
||||
throw QString("failed to decompress .obj.gz" );
|
||||
throw QString("failed to decompress .obj.gz");
|
||||
}
|
||||
|
||||
} else if (_url.path().toLower().endsWith(".gltf")) {
|
||||
std::shared_ptr<GLTFReader> glreader = std::make_shared<GLTFReader>();
|
||||
fbxGeometry.reset(glreader->readGLTF(_data, _mapping, _url));
|
||||
if (fbxGeometry->meshes.size() == 0 && fbxGeometry->joints.size() == 0) {
|
||||
throw QString("empty geometry, possibly due to an unsupported GLTF version");
|
||||
}
|
||||
} else {
|
||||
throw QString("unsupported format");
|
||||
}
|
||||
|
|
|
@ -27,7 +27,6 @@
|
|||
#include <NumericalConstants.h>
|
||||
#include <SettingHandle.h>
|
||||
#include <SharedUtil.h>
|
||||
#include <StatTracker.h>
|
||||
#include <UUID.h>
|
||||
|
||||
#include "AccountManager.h"
|
||||
|
@ -430,7 +429,7 @@ qint64 LimitedNodeList::sendPacket(std::unique_ptr<NLPacket> packet, const HifiS
|
|||
}
|
||||
}
|
||||
|
||||
qint64 LimitedNodeList::sendPacketList(NLPacketList& packetList, const Node& destinationNode) {
|
||||
qint64 LimitedNodeList::sendUnreliableUnorderedPacketList(NLPacketList& packetList, const Node& destinationNode) {
|
||||
auto activeSocket = destinationNode.getActiveSocket();
|
||||
|
||||
if (activeSocket) {
|
||||
|
@ -453,8 +452,8 @@ qint64 LimitedNodeList::sendPacketList(NLPacketList& packetList, const Node& des
|
|||
}
|
||||
}
|
||||
|
||||
qint64 LimitedNodeList::sendPacketList(NLPacketList& packetList, const HifiSockAddr& sockAddr,
|
||||
const QUuid& connectionSecret) {
|
||||
qint64 LimitedNodeList::sendUnreliableUnorderedPacketList(NLPacketList& packetList, const HifiSockAddr& sockAddr,
|
||||
const QUuid& connectionSecret) {
|
||||
qint64 bytesSent = 0;
|
||||
|
||||
// close the last packet in the list
|
||||
|
@ -1110,7 +1109,6 @@ void LimitedNodeList::setLocalSocket(const HifiSockAddr& sockAddr) {
|
|||
qCInfo(networking) << "Local socket is" << sockAddr;
|
||||
} else {
|
||||
qCInfo(networking) << "Local socket has changed from" << _localSockAddr << "to" << sockAddr;
|
||||
DependencyManager::get<StatTracker>()->incrementStat(LOCAL_SOCKET_CHANGE_STAT);
|
||||
}
|
||||
|
||||
_localSockAddr = sockAddr;
|
||||
|
|
|
@ -66,8 +66,6 @@ const QHostAddress DEFAULT_ASSIGNMENT_CLIENT_MONITOR_HOSTNAME = QHostAddress::Lo
|
|||
|
||||
const QString USERNAME_UUID_REPLACEMENT_STATS_KEY = "$username";
|
||||
|
||||
const QString LOCAL_SOCKET_CHANGE_STAT = "LocalSocketChanges";
|
||||
|
||||
typedef std::pair<QUuid, SharedNodePointer> UUIDNodePair;
|
||||
typedef tbb::concurrent_unordered_map<QUuid, SharedNodePointer, UUIDHasher> NodeHash;
|
||||
|
||||
|
@ -126,17 +124,25 @@ public:
|
|||
|
||||
PacketReceiver& getPacketReceiver() { return *_packetReceiver; }
|
||||
|
||||
// use sendUnreliablePacket to send an unrelaible packet (that you do not need to move)
|
||||
// either to a node (via its active socket) or to a manual sockaddr
|
||||
qint64 sendUnreliablePacket(const NLPacket& packet, const Node& destinationNode);
|
||||
qint64 sendUnreliablePacket(const NLPacket& packet, const HifiSockAddr& sockAddr,
|
||||
const QUuid& connectionSecret = QUuid());
|
||||
|
||||
// use sendPacket to send a moved unreliable or reliable NL packet to a node's active socket or manual sockaddr
|
||||
qint64 sendPacket(std::unique_ptr<NLPacket> packet, const Node& destinationNode);
|
||||
qint64 sendPacket(std::unique_ptr<NLPacket> packet, const HifiSockAddr& sockAddr,
|
||||
const QUuid& connectionSecret = QUuid());
|
||||
|
||||
qint64 sendPacketList(NLPacketList& packetList, const Node& destinationNode);
|
||||
qint64 sendPacketList(NLPacketList& packetList, const HifiSockAddr& sockAddr,
|
||||
// use sendUnreliableUnorderedPacketList to unreliably send separate packets from the packet list
|
||||
// either to a node's active socket or to a manual sockaddr
|
||||
qint64 sendUnreliableUnorderedPacketList(NLPacketList& packetList, const Node& destinationNode);
|
||||
qint64 sendUnreliableUnorderedPacketList(NLPacketList& packetList, const HifiSockAddr& sockAddr,
|
||||
const QUuid& connectionSecret = QUuid());
|
||||
|
||||
// use sendPacketList to send reliable packet lists (ordered or unordered) to a node's active socket
|
||||
// or to a manual sock addr
|
||||
qint64 sendPacketList(std::unique_ptr<NLPacketList> packetList, const HifiSockAddr& sockAddr);
|
||||
qint64 sendPacketList(std::unique_ptr<NLPacketList> packetList, const Node& destinationNode);
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ PacketVersion versionForPacketType(PacketType packetType) {
|
|||
return static_cast<PacketVersion>(EntityVersion::HazeEffect);
|
||||
|
||||
case PacketType::EntityQuery:
|
||||
return static_cast<PacketVersion>(EntityQueryPacketVersion::JSONFilterWithFamilyTree);
|
||||
return static_cast<PacketVersion>(EntityQueryPacketVersion::ConnectionIdentifier);
|
||||
case PacketType::AvatarIdentity:
|
||||
case PacketType::AvatarData:
|
||||
case PacketType::BulkAvatarData:
|
||||
|
|
|
@ -209,7 +209,8 @@ enum class EntityScriptCallMethodVersion : PacketVersion {
|
|||
|
||||
enum class EntityQueryPacketVersion: PacketVersion {
|
||||
JSONFilter = 18,
|
||||
JSONFilterWithFamilyTree = 19
|
||||
JSONFilterWithFamilyTree = 19,
|
||||
ConnectionIdentifier = 20
|
||||
};
|
||||
|
||||
enum class AssetServerPacketVersion: PacketVersion {
|
||||
|
|
|
@ -9,6 +9,8 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <random>
|
||||
|
||||
#include <QtCore/QJsonDocument>
|
||||
|
||||
#include <GLMHelpers.h>
|
||||
|
@ -22,7 +24,7 @@ const float DEFAULT_ASPECT_RATIO = 1.0f;
|
|||
const float DEFAULT_NEAR_CLIP = 0.1f;
|
||||
const float DEFAULT_FAR_CLIP = 3.0f;
|
||||
|
||||
OctreeQuery::OctreeQuery() :
|
||||
OctreeQuery::OctreeQuery(bool randomizeConnectionID) :
|
||||
_cameraFov(DEFAULT_FOV),
|
||||
_cameraAspectRatio(DEFAULT_ASPECT_RATIO),
|
||||
_cameraNearClip(DEFAULT_NEAR_CLIP),
|
||||
|
@ -30,10 +32,21 @@ OctreeQuery::OctreeQuery() :
|
|||
_cameraCenterRadius(DEFAULT_FAR_CLIP)
|
||||
{
|
||||
_maxQueryPPS = DEFAULT_MAX_OCTREE_PPS;
|
||||
|
||||
if (randomizeConnectionID) {
|
||||
// randomize our initial octree query connection ID using random_device
|
||||
// the connection ID is 16 bits so we take a generated 32 bit value from random device and chop off the top
|
||||
std::random_device randomDevice;
|
||||
_connectionID = randomDevice();
|
||||
}
|
||||
}
|
||||
|
||||
int OctreeQuery::getBroadcastData(unsigned char* destinationBuffer) {
|
||||
unsigned char* bufferStart = destinationBuffer;
|
||||
|
||||
// pack the connection ID so the server can detect when we start a new connection
|
||||
memcpy(destinationBuffer, &_connectionID, sizeof(_connectionID));
|
||||
destinationBuffer += sizeof(_connectionID);
|
||||
|
||||
// back a boolean (cut to 1 byte) to designate if this query uses the sent view frustum
|
||||
memcpy(destinationBuffer, &_usesFrustum, sizeof(_usesFrustum));
|
||||
|
@ -98,7 +111,27 @@ int OctreeQuery::parseData(ReceivedMessage& message) {
|
|||
|
||||
const unsigned char* startPosition = reinterpret_cast<const unsigned char*>(message.getRawMessage());
|
||||
const unsigned char* sourceBuffer = startPosition;
|
||||
|
||||
|
||||
// unpack the connection ID
|
||||
uint16_t newConnectionID;
|
||||
memcpy(&newConnectionID, sourceBuffer, sizeof(newConnectionID));
|
||||
sourceBuffer += sizeof(newConnectionID);
|
||||
|
||||
if (!_hasReceivedFirstQuery) {
|
||||
// set our flag to indicate that we've parsed for this query at least once
|
||||
_hasReceivedFirstQuery = true;
|
||||
|
||||
// set the incoming connection ID as the current
|
||||
_connectionID = newConnectionID;
|
||||
} else {
|
||||
if (newConnectionID != _connectionID) {
|
||||
// the connection ID has changed - emit our signal so the server
|
||||
// knows that the client is starting a new session
|
||||
_connectionID = newConnectionID;
|
||||
emit incomingConnectionIDChanged();
|
||||
}
|
||||
}
|
||||
|
||||
// check if this query uses a view frustum
|
||||
memcpy(&_usesFrustum, sourceBuffer, sizeof(_usesFrustum));
|
||||
sourceBuffer += sizeof(_usesFrustum);
|
||||
|
|
|
@ -27,7 +27,7 @@ class OctreeQuery : public NodeData {
|
|||
Q_OBJECT
|
||||
|
||||
public:
|
||||
OctreeQuery();
|
||||
OctreeQuery(bool randomizeConnectionID = false);
|
||||
virtual ~OctreeQuery() {}
|
||||
|
||||
int getBroadcastData(unsigned char* destinationBuffer);
|
||||
|
@ -68,6 +68,13 @@ public:
|
|||
bool getUsesFrustum() { return _usesFrustum; }
|
||||
void setUsesFrustum(bool usesFrustum) { _usesFrustum = usesFrustum; }
|
||||
|
||||
void incrementConnectionID() { ++_connectionID; }
|
||||
|
||||
bool hasReceivedFirstQuery() const { return _hasReceivedFirstQuery; }
|
||||
|
||||
signals:
|
||||
void incomingConnectionIDChanged();
|
||||
|
||||
public slots:
|
||||
void setMaxQueryPacketsPerSecond(int maxQueryPPS) { _maxQueryPPS = maxQueryPPS; }
|
||||
void setOctreeSizeScale(float octreeSizeScale) { _octreeElementSizeScale = octreeSizeScale; }
|
||||
|
@ -90,9 +97,12 @@ protected:
|
|||
int _boundaryLevelAdjust = 0; /// used for LOD calculations
|
||||
|
||||
uint8_t _usesFrustum = true;
|
||||
uint16_t _connectionID; // query connection ID, randomized to start, increments with each new connection to server
|
||||
|
||||
QJsonObject _jsonParameters;
|
||||
QReadWriteLock _jsonParametersLock;
|
||||
|
||||
bool _hasReceivedFirstQuery { false };
|
||||
|
||||
private:
|
||||
// privatize the copy constructor and assignment operator so they cannot be called
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
#include <SharedUtil.h>
|
||||
#include <UUID.h>
|
||||
|
||||
|
||||
void OctreeQueryNode::nodeKilled() {
|
||||
_isShuttingDown = true;
|
||||
}
|
||||
|
|
27
libraries/render-utils/src/BloomApply.slf
Normal file
27
libraries/render-utils/src/BloomApply.slf
Normal file
|
@ -0,0 +1,27 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// BloomApply.slf
|
||||
// Mix the three gaussian blur textures.
|
||||
//
|
||||
// Created by Olivier Prat on 10/09/2017
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
uniform sampler2D blurMap0;
|
||||
uniform sampler2D blurMap1;
|
||||
uniform sampler2D blurMap2;
|
||||
uniform float intensity;
|
||||
|
||||
in vec2 varTexCoord0;
|
||||
out vec4 outFragColor;
|
||||
|
||||
void main(void) {
|
||||
vec4 blur0 = texture(blurMap0, varTexCoord0);
|
||||
vec4 blur1 = texture(blurMap1, varTexCoord0);
|
||||
vec4 blur2 = texture(blurMap2, varTexCoord0);
|
||||
|
||||
outFragColor = vec4((blur0.rgb+blur1.rgb+blur2.rgb)*intensity, 1.0f);
|
||||
}
|
359
libraries/render-utils/src/BloomEffect.cpp
Normal file
359
libraries/render-utils/src/BloomEffect.cpp
Normal file
|
@ -0,0 +1,359 @@
|
|||
//
|
||||
// BloomEffect.cpp
|
||||
// render-utils/src/
|
||||
//
|
||||
// Created by Olivier Prat on 09/25/17.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "BloomEffect.h"
|
||||
|
||||
#include "gpu/Context.h"
|
||||
#include "gpu/StandardShaderLib.h"
|
||||
|
||||
#include <render/BlurTask.h>
|
||||
#include <render/ResampleTask.h>
|
||||
|
||||
#include "BloomThreshold_frag.h"
|
||||
#include "BloomApply_frag.h"
|
||||
|
||||
#define BLOOM_BLUR_LEVEL_COUNT 3
|
||||
|
||||
BloomThreshold::BloomThreshold(unsigned int downsamplingFactor) :
|
||||
_downsamplingFactor(downsamplingFactor) {
|
||||
assert(downsamplingFactor > 0);
|
||||
}
|
||||
|
||||
void BloomThreshold::configure(const Config& config) {
|
||||
_threshold = config.threshold;
|
||||
}
|
||||
|
||||
void BloomThreshold::run(const render::RenderContextPointer& renderContext, const Inputs& inputs, Outputs& outputs) {
|
||||
assert(renderContext->args);
|
||||
assert(renderContext->args->hasViewFrustum());
|
||||
|
||||
RenderArgs* args = renderContext->args;
|
||||
|
||||
const auto frameTransform = inputs.get0();
|
||||
const auto inputFrameBuffer = inputs.get1();
|
||||
|
||||
assert(inputFrameBuffer->hasColor());
|
||||
|
||||
auto inputBuffer = inputFrameBuffer->getRenderBuffer(0);
|
||||
auto bufferSize = gpu::Vec2u(inputBuffer->getDimensions());
|
||||
|
||||
// Downsample resolution
|
||||
bufferSize.x /= _downsamplingFactor;
|
||||
bufferSize.y /= _downsamplingFactor;
|
||||
|
||||
if (!_outputBuffer || _outputBuffer->getSize() != bufferSize) {
|
||||
auto colorTexture = gpu::TexturePointer(gpu::Texture::createRenderBuffer(inputBuffer->getTexelFormat(), bufferSize.x, bufferSize.y,
|
||||
gpu::Texture::SINGLE_MIP, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT)));
|
||||
|
||||
_outputBuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("BloomThreshold"));
|
||||
_outputBuffer->setRenderBuffer(0, colorTexture);
|
||||
}
|
||||
|
||||
static const int COLOR_MAP_SLOT = 0;
|
||||
static const int THRESHOLD_SLOT = 1;
|
||||
|
||||
if (!_pipeline) {
|
||||
auto vs = gpu::StandardShaderLib::getDrawTransformUnitQuadVS();
|
||||
auto ps = gpu::Shader::createPixel(std::string(BloomThreshold_frag));
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding("colorMap", COLOR_MAP_SLOT));
|
||||
slotBindings.insert(gpu::Shader::Binding("threshold", THRESHOLD_SLOT));
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
_pipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
glm::ivec4 viewport{ 0, 0, bufferSize.x, bufferSize.y };
|
||||
|
||||
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
|
||||
batch.setViewportTransform(viewport);
|
||||
batch.setProjectionTransform(glm::mat4());
|
||||
batch.resetViewTransform();
|
||||
batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(bufferSize, viewport));
|
||||
batch.setPipeline(_pipeline);
|
||||
|
||||
batch.setFramebuffer(_outputBuffer);
|
||||
batch.setResourceTexture(COLOR_MAP_SLOT, inputBuffer);
|
||||
batch._glUniform1f(THRESHOLD_SLOT, _threshold);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
});
|
||||
|
||||
outputs = _outputBuffer;
|
||||
}
|
||||
|
||||
BloomApply::BloomApply() {
|
||||
|
||||
}
|
||||
|
||||
void BloomApply::configure(const Config& config) {
|
||||
_intensity = config.intensity;
|
||||
}
|
||||
|
||||
void BloomApply::run(const render::RenderContextPointer& renderContext, const Inputs& inputs) {
|
||||
assert(renderContext->args);
|
||||
assert(renderContext->args->hasViewFrustum());
|
||||
RenderArgs* args = renderContext->args;
|
||||
|
||||
static auto BLUR0_SLOT = 0;
|
||||
static auto BLUR1_SLOT = 1;
|
||||
static auto BLUR2_SLOT = 2;
|
||||
static auto INTENSITY_SLOT = 3;
|
||||
|
||||
if (!_pipeline) {
|
||||
auto vs = gpu::StandardShaderLib::getDrawTransformUnitQuadVS();
|
||||
auto ps = gpu::Shader::createPixel(std::string(BloomApply_frag));
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding("blurMap0", BLUR0_SLOT));
|
||||
slotBindings.insert(gpu::Shader::Binding("blurMap1", BLUR1_SLOT));
|
||||
slotBindings.insert(gpu::Shader::Binding("blurMap2", BLUR2_SLOT));
|
||||
slotBindings.insert(gpu::Shader::Binding("intensity", INTENSITY_SLOT));
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
state->setDepthTest(gpu::State::DepthTest(false, false));
|
||||
_pipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
const auto frameBuffer = inputs.get0();
|
||||
const auto framebufferSize = frameBuffer->getSize();
|
||||
const auto blur0FB = inputs.get1();
|
||||
const auto blur1FB = inputs.get2();
|
||||
const auto blur2FB = inputs.get3();
|
||||
const glm::ivec4 viewport{ 0, 0, framebufferSize.x, framebufferSize.y };
|
||||
|
||||
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
|
||||
batch.setFramebuffer(frameBuffer);
|
||||
|
||||
batch.setViewportTransform(viewport);
|
||||
batch.setProjectionTransform(glm::mat4());
|
||||
batch.resetViewTransform();
|
||||
batch.setPipeline(_pipeline);
|
||||
|
||||
batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(framebufferSize, viewport));
|
||||
batch.setResourceTexture(BLUR0_SLOT, blur0FB->getRenderBuffer(0));
|
||||
batch.setResourceTexture(BLUR1_SLOT, blur1FB->getRenderBuffer(0));
|
||||
batch.setResourceTexture(BLUR2_SLOT, blur2FB->getRenderBuffer(0));
|
||||
batch._glUniform1f(INTENSITY_SLOT, _intensity / 3.0f);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
});
|
||||
}
|
||||
|
||||
void BloomDraw::run(const render::RenderContextPointer& renderContext, const Inputs& inputs) {
|
||||
assert(renderContext->args);
|
||||
assert(renderContext->args->hasViewFrustum());
|
||||
RenderArgs* args = renderContext->args;
|
||||
|
||||
const auto frameBuffer = inputs.get0();
|
||||
const auto bloomFrameBuffer = inputs.get1();
|
||||
|
||||
if (frameBuffer && bloomFrameBuffer) {
|
||||
const auto framebufferSize = frameBuffer->getSize();
|
||||
|
||||
if (!_pipeline) {
|
||||
auto vs = gpu::StandardShaderLib::getDrawTransformUnitQuadVS();
|
||||
auto ps = gpu::StandardShaderLib::getDrawTextureOpaquePS();
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
state->setDepthTest(gpu::State::DepthTest(false, false));
|
||||
state->setBlendFunction(true, gpu::State::ONE, gpu::State::BLEND_OP_ADD, gpu::State::ONE,
|
||||
gpu::State::ZERO, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
|
||||
_pipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
|
||||
batch.setFramebuffer(frameBuffer);
|
||||
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
batch.setProjectionTransform(glm::mat4());
|
||||
batch.resetViewTransform();
|
||||
batch.setPipeline(_pipeline);
|
||||
|
||||
batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(framebufferSize, args->_viewport));
|
||||
batch.setResourceTexture(0, bloomFrameBuffer->getRenderBuffer(0));
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
DebugBloom::DebugBloom() {
|
||||
}
|
||||
|
||||
void DebugBloom::configure(const Config& config) {
|
||||
_mode = static_cast<DebugBloomConfig::Mode>(config.mode);
|
||||
assert(_mode < DebugBloomConfig::MODE_COUNT);
|
||||
}
|
||||
|
||||
void DebugBloom::run(const render::RenderContextPointer& renderContext, const Inputs& inputs) {
|
||||
assert(renderContext->args);
|
||||
assert(renderContext->args->hasViewFrustum());
|
||||
RenderArgs* args = renderContext->args;
|
||||
|
||||
const auto frameBuffer = inputs.get0();
|
||||
const auto combinedBlurBuffer = inputs.get4();
|
||||
const auto framebufferSize = frameBuffer->getSize();
|
||||
const auto level0FB = inputs.get1();
|
||||
const auto level1FB = inputs.get2();
|
||||
const auto level2FB = inputs.get3();
|
||||
const gpu::TexturePointer levelTextures[BLOOM_BLUR_LEVEL_COUNT] = {
|
||||
level0FB->getRenderBuffer(0),
|
||||
level1FB->getRenderBuffer(0),
|
||||
level2FB->getRenderBuffer(0)
|
||||
};
|
||||
|
||||
static auto TEXCOORD_RECT_SLOT = 1;
|
||||
|
||||
if (!_pipeline) {
|
||||
auto vs = gpu::StandardShaderLib::getDrawTexcoordRectTransformUnitQuadVS();
|
||||
auto ps = gpu::StandardShaderLib::getDrawTextureOpaquePS();
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("texcoordRect"), TEXCOORD_RECT_SLOT));
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
state->setDepthTest(gpu::State::DepthTest(false));
|
||||
_pipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
|
||||
batch.setFramebuffer(frameBuffer);
|
||||
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
batch.setProjectionTransform(glm::mat4());
|
||||
batch.resetViewTransform();
|
||||
batch.setPipeline(_pipeline);
|
||||
|
||||
Transform modelTransform;
|
||||
if (_mode == DebugBloomConfig::MODE_ALL_LEVELS) {
|
||||
batch._glUniform4f(TEXCOORD_RECT_SLOT, 0.0f, 0.0f, 1.f, 1.f);
|
||||
|
||||
modelTransform = gpu::Framebuffer::evalSubregionTexcoordTransform(framebufferSize, args->_viewport / 2);
|
||||
modelTransform.postTranslate(glm::vec3(-1.0f, 1.0f, 0.0f));
|
||||
batch.setModelTransform(modelTransform);
|
||||
batch.setResourceTexture(0, levelTextures[0]);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
modelTransform.postTranslate(glm::vec3(2.0f, 0.0f, 0.0f));
|
||||
batch.setModelTransform(modelTransform);
|
||||
batch.setResourceTexture(0, levelTextures[1]);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
modelTransform.postTranslate(glm::vec3(-2.0f, -2.0f, 0.0f));
|
||||
batch.setModelTransform(modelTransform);
|
||||
batch.setResourceTexture(0, levelTextures[2]);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
modelTransform.postTranslate(glm::vec3(2.0f, 0.0f, 0.0f));
|
||||
batch.setModelTransform(modelTransform);
|
||||
batch.setResourceTexture(0, combinedBlurBuffer->getRenderBuffer(0));
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
} else {
|
||||
auto viewport = args->_viewport;
|
||||
auto blurLevel = _mode - DebugBloomConfig::MODE_LEVEL0;
|
||||
|
||||
viewport.z /= 2;
|
||||
|
||||
batch._glUniform4f(TEXCOORD_RECT_SLOT, 0.5f, 0.0f, 0.5f, 1.f);
|
||||
|
||||
modelTransform = gpu::Framebuffer::evalSubregionTexcoordTransform(framebufferSize, viewport);
|
||||
modelTransform.postTranslate(glm::vec3(-1.0f, 0.0f, 0.0f));
|
||||
batch.setModelTransform(modelTransform);
|
||||
batch.setResourceTexture(0, levelTextures[blurLevel]);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void BloomConfig::setIntensity(float value) {
|
||||
auto task = static_cast<render::Task::TaskConcept*>(_task);
|
||||
auto blurJobIt = task->editJob("BloomApply");
|
||||
assert(blurJobIt != task->_jobs.end());
|
||||
blurJobIt->getConfiguration()->setProperty("intensity", value);
|
||||
}
|
||||
|
||||
float BloomConfig::getIntensity() const {
|
||||
auto task = static_cast<render::Task::TaskConcept*>(_task);
|
||||
auto blurJobIt = task->getJob("BloomApply");
|
||||
assert(blurJobIt != task->_jobs.end());
|
||||
return blurJobIt->getConfiguration()->property("intensity").toFloat();
|
||||
}
|
||||
|
||||
void BloomConfig::setSize(float value) {
|
||||
std::string blurName{ "BloomBlurN" };
|
||||
auto sigma = 0.5f+value*3.5f;
|
||||
|
||||
for (auto i = 0; i < BLOOM_BLUR_LEVEL_COUNT; i++) {
|
||||
blurName.back() = '0' + i;
|
||||
auto task = static_cast<render::Task::TaskConcept*>(_task);
|
||||
auto blurJobIt = task->editJob(blurName);
|
||||
assert(blurJobIt != task->_jobs.end());
|
||||
auto& gaussianBlur = blurJobIt->edit<render::BlurGaussian>();
|
||||
auto gaussianBlurParams = gaussianBlur.getParameters();
|
||||
gaussianBlurParams->setFilterGaussianTaps(5, sigma);
|
||||
// Gaussian blur increases at each level to have a slower rolloff on the edge
|
||||
// of the response
|
||||
sigma *= 1.5f;
|
||||
}
|
||||
}
|
||||
|
||||
Bloom::Bloom() {
|
||||
|
||||
}
|
||||
|
||||
void Bloom::configure(const Config& config) {
|
||||
std::string blurName{ "BloomBlurN" };
|
||||
|
||||
for (auto i = 0; i < BLOOM_BLUR_LEVEL_COUNT; i++) {
|
||||
blurName.back() = '0' + i;
|
||||
auto blurConfig = config.getConfig<render::BlurGaussian>(blurName);
|
||||
blurConfig->setProperty("filterScale", 1.0f);
|
||||
}
|
||||
}
|
||||
|
||||
void Bloom::build(JobModel& task, const render::Varying& inputs, render::Varying& outputs) {
|
||||
// Start by computing threshold of color buffer input at quarter resolution
|
||||
const auto bloomInputBuffer = task.addJob<BloomThreshold>("BloomThreshold", inputs, 4U);
|
||||
|
||||
// Multi-scale blur, each new blur is half resolution of the previous pass
|
||||
const auto blurFB0 = task.addJob<render::BlurGaussian>("BloomBlur0", bloomInputBuffer, true);
|
||||
const auto blurFB1 = task.addJob<render::BlurGaussian>("BloomBlur1", blurFB0, true, 2U);
|
||||
const auto blurFB2 = task.addJob<render::BlurGaussian>("BloomBlur2", blurFB1, true, 2U);
|
||||
|
||||
const auto& input = inputs.get<Inputs>();
|
||||
const auto& frameBuffer = input[1];
|
||||
|
||||
// Mix all blur levels at quarter resolution
|
||||
const auto applyInput = BloomApply::Inputs(bloomInputBuffer, blurFB0, blurFB1, blurFB2).asVarying();
|
||||
task.addJob<BloomApply>("BloomApply", applyInput);
|
||||
// And them blend result in additive manner on top of final color buffer
|
||||
const auto drawInput = BloomDraw::Inputs(frameBuffer, bloomInputBuffer).asVarying();
|
||||
task.addJob<BloomDraw>("BloomDraw", drawInput);
|
||||
|
||||
const auto debugInput = DebugBloom::Inputs(frameBuffer, blurFB0, blurFB1, blurFB2, bloomInputBuffer).asVarying();
|
||||
task.addJob<DebugBloom>("DebugBloom", debugInput);
|
||||
}
|
166
libraries/render-utils/src/BloomEffect.h
Normal file
166
libraries/render-utils/src/BloomEffect.h
Normal file
|
@ -0,0 +1,166 @@
|
|||
//
|
||||
// BloomEffect.h
|
||||
// render-utils/src/
|
||||
//
|
||||
// Created by Olivier Prat on 09/25/17.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_render_utils_BloomEffect_h
|
||||
#define hifi_render_utils_BloomEffect_h
|
||||
|
||||
#include <render/Engine.h>
|
||||
|
||||
#include "DeferredFrameTransform.h"
|
||||
|
||||
class BloomConfig : public render::Task::Config {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(float intensity READ getIntensity WRITE setIntensity NOTIFY dirty)
|
||||
Q_PROPERTY(float size MEMBER size WRITE setSize NOTIFY dirty)
|
||||
|
||||
public:
|
||||
|
||||
BloomConfig() : render::Task::Config(false) {}
|
||||
|
||||
float size{ 0.8f };
|
||||
|
||||
void setIntensity(float value);
|
||||
float getIntensity() const;
|
||||
void setSize(float value);
|
||||
|
||||
signals:
|
||||
void dirty();
|
||||
};
|
||||
|
||||
class BloomThresholdConfig : public render::Job::Config {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(float threshold MEMBER threshold NOTIFY dirty)
|
||||
|
||||
public:
|
||||
|
||||
float threshold{ 1.25f };
|
||||
|
||||
signals:
|
||||
void dirty();
|
||||
};
|
||||
|
||||
class BloomThreshold {
|
||||
public:
|
||||
using Inputs = render::VaryingSet2<DeferredFrameTransformPointer, gpu::FramebufferPointer>;
|
||||
using Outputs = gpu::FramebufferPointer;
|
||||
using Config = BloomThresholdConfig;
|
||||
using JobModel = render::Job::ModelIO<BloomThreshold, Inputs, Outputs, Config>;
|
||||
|
||||
BloomThreshold(unsigned int downsamplingFactor);
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const render::RenderContextPointer& renderContext, const Inputs& inputs, Outputs& outputs);
|
||||
|
||||
private:
|
||||
|
||||
gpu::FramebufferPointer _outputBuffer;
|
||||
gpu::PipelinePointer _pipeline;
|
||||
float _threshold;
|
||||
unsigned int _downsamplingFactor;
|
||||
};
|
||||
|
||||
|
||||
class BloomApplyConfig : public render::Job::Config {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(float intensity MEMBER intensity NOTIFY dirty)
|
||||
|
||||
public:
|
||||
|
||||
float intensity{ 0.8f };
|
||||
|
||||
signals:
|
||||
void dirty();
|
||||
};
|
||||
|
||||
class BloomApply {
|
||||
public:
|
||||
using Inputs = render::VaryingSet4<gpu::FramebufferPointer, gpu::FramebufferPointer, gpu::FramebufferPointer, gpu::FramebufferPointer>;
|
||||
using Config = BloomApplyConfig;
|
||||
using JobModel = render::Job::ModelI<BloomApply, Inputs, Config>;
|
||||
|
||||
BloomApply();
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const render::RenderContextPointer& renderContext, const Inputs& inputs);
|
||||
|
||||
private:
|
||||
|
||||
gpu::PipelinePointer _pipeline;
|
||||
float _intensity{ 1.0f };
|
||||
};
|
||||
|
||||
class BloomDraw {
|
||||
public:
|
||||
using Inputs = render::VaryingSet2<gpu::FramebufferPointer, gpu::FramebufferPointer>;
|
||||
using JobModel = render::Job::ModelI<BloomDraw, Inputs>;
|
||||
|
||||
BloomDraw() {}
|
||||
|
||||
void run(const render::RenderContextPointer& renderContext, const Inputs& inputs);
|
||||
|
||||
private:
|
||||
|
||||
gpu::PipelinePointer _pipeline;
|
||||
};
|
||||
|
||||
class DebugBloomConfig : public render::Job::Config {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(int mode MEMBER mode NOTIFY dirty)
|
||||
|
||||
public:
|
||||
|
||||
enum Mode {
|
||||
MODE_LEVEL0 = 0,
|
||||
MODE_LEVEL1,
|
||||
MODE_LEVEL2,
|
||||
MODE_ALL_LEVELS,
|
||||
|
||||
MODE_COUNT
|
||||
};
|
||||
|
||||
DebugBloomConfig() : render::Job::Config(false) {}
|
||||
|
||||
int mode{ MODE_ALL_LEVELS };
|
||||
|
||||
signals:
|
||||
void dirty();
|
||||
};
|
||||
|
||||
class DebugBloom {
|
||||
public:
|
||||
using Inputs = render::VaryingSet5<gpu::FramebufferPointer, gpu::FramebufferPointer, gpu::FramebufferPointer, gpu::FramebufferPointer, gpu::FramebufferPointer>;
|
||||
using Config = DebugBloomConfig;
|
||||
using JobModel = render::Job::ModelI<DebugBloom, Inputs, Config>;
|
||||
|
||||
DebugBloom();
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const render::RenderContextPointer& renderContext, const Inputs& inputs);
|
||||
|
||||
private:
|
||||
gpu::PipelinePointer _pipeline;
|
||||
DebugBloomConfig::Mode _mode;
|
||||
};
|
||||
|
||||
class Bloom {
|
||||
public:
|
||||
using Inputs = render::VaryingSet2<DeferredFrameTransformPointer, gpu::FramebufferPointer>;
|
||||
using Config = BloomConfig;
|
||||
using JobModel = render::Task::ModelI<Bloom, Inputs, Config>;
|
||||
|
||||
Bloom();
|
||||
|
||||
void configure(const Config& config);
|
||||
void build(JobModel& task, const render::Varying& inputs, render::Varying& outputs);
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi_render_utils_BloomEffect_h
|
45
libraries/render-utils/src/BloomThreshold.slf
Normal file
45
libraries/render-utils/src/BloomThreshold.slf
Normal file
|
@ -0,0 +1,45 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// BloomThreshold.slf
|
||||
// Perform a soft threshold on an input texture and downsample to half size in one go.
|
||||
//
|
||||
// Created by Olivier Prat on 09/26/2017
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
uniform sampler2D colorMap;
|
||||
uniform float threshold;
|
||||
|
||||
in vec2 varTexCoord0;
|
||||
out vec4 outFragColor;
|
||||
|
||||
#define DOWNSAMPLING_FACTOR 4
|
||||
#define SAMPLE_COUNT (DOWNSAMPLING_FACTOR/2)
|
||||
|
||||
void main(void) {
|
||||
vec2 deltaX = dFdx(varTexCoord0) / SAMPLE_COUNT;
|
||||
vec2 deltaY = dFdy(varTexCoord0) / SAMPLE_COUNT;
|
||||
vec2 startUv = varTexCoord0;
|
||||
vec4 maskedColor = vec4(0,0,0,0);
|
||||
|
||||
for (int y=0 ; y<SAMPLE_COUNT ; y++) {
|
||||
vec2 uv = startUv;
|
||||
|
||||
for (int x=0 ; x<SAMPLE_COUNT ; x++) {
|
||||
vec4 color = texture(colorMap, uv);
|
||||
float luminance = (color.r+color.g+color.b) / 3.0;
|
||||
float mask = clamp((luminance-threshold)*0.25, 0, 1);
|
||||
|
||||
color *= mask;
|
||||
maskedColor += color;
|
||||
uv += deltaX;
|
||||
}
|
||||
|
||||
startUv += deltaY;
|
||||
}
|
||||
maskedColor /= SAMPLE_COUNT*SAMPLE_COUNT;
|
||||
outFragColor = vec4(maskedColor.rgb, 1.0);
|
||||
}
|
|
@ -73,9 +73,9 @@ void DeferredFramebuffer::allocate() {
|
|||
_deferredFramebufferDepthColor->setDepthStencilBuffer(_primaryDepthTexture, depthFormat);
|
||||
|
||||
|
||||
auto smoothSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR);
|
||||
auto smoothSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT);
|
||||
|
||||
_lightingTexture = gpu::Texture::createRenderBuffer(gpu::Element(gpu::SCALAR, gpu::FLOAT, gpu::R11G11B10), width, height, gpu::Texture::SINGLE_MIP, defaultSampler);
|
||||
_lightingTexture = gpu::Texture::createRenderBuffer(gpu::Element(gpu::SCALAR, gpu::FLOAT, gpu::R11G11B10), width, height, gpu::Texture::SINGLE_MIP, smoothSampler);
|
||||
_lightingFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("lighting"));
|
||||
_lightingFramebuffer->setRenderBuffer(0, _lightingTexture);
|
||||
_lightingFramebuffer->setDepthStencilBuffer(_primaryDepthTexture, depthFormat);
|
||||
|
|
|
@ -42,6 +42,7 @@
|
|||
#include "ToneMappingEffect.h"
|
||||
#include "SubsurfaceScattering.h"
|
||||
#include "DrawHaze.h"
|
||||
#include "BloomEffect.h"
|
||||
#include "HighlightEffect.h"
|
||||
|
||||
#include <sstream>
|
||||
|
@ -166,7 +167,7 @@ void RenderDeferredTask::build(JobModel& task, const render::Varying& input, ren
|
|||
const auto transparentsInputs = DrawDeferred::Inputs(transparents, lightingModel).asVarying();
|
||||
task.addJob<DrawDeferred>("DrawTransparentDeferred", transparentsInputs, shapePlumber);
|
||||
|
||||
// LIght Cluster Grid Debuging job
|
||||
// Light Cluster Grid Debuging job
|
||||
{
|
||||
const auto debugLightClustersInputs = DebugLightClusters::Inputs(deferredFrameTransform, deferredFramebuffer, lightingModel, linearDepthTarget, lightClusters).asVarying();
|
||||
task.addJob<DebugLightClusters>("DebugLightClusters", debugLightClustersInputs);
|
||||
|
@ -177,6 +178,10 @@ void RenderDeferredTask::build(JobModel& task, const render::Varying& input, ren
|
|||
|
||||
const auto toneAndPostRangeTimer = task.addJob<BeginGPURangeTimer>("BeginToneAndPostRangeTimer", "PostToneOverlaysAntialiasing");
|
||||
|
||||
// Add bloom
|
||||
const auto bloomInputs = Bloom::Inputs(deferredFrameTransform, lightingFramebuffer).asVarying();
|
||||
task.addJob<Bloom>("Bloom", bloomInputs);
|
||||
|
||||
// Lighting Buffer ready for tone mapping
|
||||
const auto toneMappingInputs = ToneMappingDeferred::Inputs(lightingFramebuffer, primaryFramebuffer).asVarying();
|
||||
task.addJob<ToneMappingDeferred>("ToneMapping", toneMappingInputs);
|
||||
|
|
|
@ -29,11 +29,10 @@ enum BlurShaderMapSlots {
|
|||
BlurTask_DepthSlot,
|
||||
};
|
||||
|
||||
const float BLUR_NUM_SAMPLES = 7.0f;
|
||||
|
||||
BlurParams::BlurParams() {
|
||||
Params params;
|
||||
_parametersBuffer = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(Params), (const gpu::Byte*) ¶ms));
|
||||
setFilterGaussianTaps(3);
|
||||
}
|
||||
|
||||
void BlurParams::setWidthHeight(int width, int height, bool isStereo) {
|
||||
|
@ -49,10 +48,10 @@ void BlurParams::setWidthHeight(int width, int height, bool isStereo) {
|
|||
}
|
||||
}
|
||||
|
||||
void BlurParams::setTexcoordTransform(const glm::vec4 texcoordTransformViewport) {
|
||||
auto texcoordTransform = _parametersBuffer.get<Params>().texcoordTransform;
|
||||
if (texcoordTransformViewport != texcoordTransform) {
|
||||
_parametersBuffer.edit<Params>().texcoordTransform = texcoordTransform;
|
||||
void BlurParams::setTexcoordTransform(glm::vec4 texcoordTransformViewport) {
|
||||
auto& params = _parametersBuffer.get<Params>();
|
||||
if (texcoordTransformViewport != params.texcoordTransform) {
|
||||
_parametersBuffer.edit<Params>().texcoordTransform = texcoordTransformViewport;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -60,7 +59,58 @@ void BlurParams::setFilterRadiusScale(float scale) {
|
|||
auto filterInfo = _parametersBuffer.get<Params>().filterInfo;
|
||||
if (scale != filterInfo.x) {
|
||||
_parametersBuffer.edit<Params>().filterInfo.x = scale;
|
||||
_parametersBuffer.edit<Params>().filterInfo.y = scale / BLUR_NUM_SAMPLES;
|
||||
}
|
||||
}
|
||||
|
||||
void BlurParams::setFilterNumTaps(int count) {
|
||||
assert(count <= BLUR_MAX_NUM_TAPS);
|
||||
auto filterInfo = _parametersBuffer.get<Params>().filterInfo;
|
||||
if (count != (int)filterInfo.y) {
|
||||
_parametersBuffer.edit<Params>().filterInfo.y = count;
|
||||
}
|
||||
}
|
||||
|
||||
void BlurParams::setFilterTap(int index, float offset, float value) {
|
||||
auto filterTaps = _parametersBuffer.edit<Params>().filterTaps;
|
||||
assert(index < BLUR_MAX_NUM_TAPS);
|
||||
filterTaps[index].x = offset;
|
||||
filterTaps[index].y = value;
|
||||
}
|
||||
|
||||
void BlurParams::setFilterGaussianTaps(int numHalfTaps, float sigma) {
|
||||
auto& params = _parametersBuffer.edit<Params>();
|
||||
const int numTaps = 2 * numHalfTaps + 1;
|
||||
assert(numTaps <= BLUR_MAX_NUM_TAPS);
|
||||
assert(sigma > 0.0f);
|
||||
const float inverseTwoSigmaSquared = float(0.5 / double(sigma*sigma));
|
||||
float totalWeight = 1.0f;
|
||||
float weight;
|
||||
float offset;
|
||||
int i;
|
||||
|
||||
params.filterInfo.y = numTaps;
|
||||
params.filterTaps[0].x = 0.0f;
|
||||
params.filterTaps[0].y = 1.0f;
|
||||
|
||||
for (i = 0; i < numHalfTaps; i++) {
|
||||
offset = i + 1;
|
||||
weight = (float)exp(-offset*offset * inverseTwoSigmaSquared);
|
||||
params.filterTaps[i + 1].x = offset;
|
||||
params.filterTaps[i + 1].y = weight;
|
||||
params.filterTaps[i + 1 + numHalfTaps].x = -offset;
|
||||
params.filterTaps[i + 1 + numHalfTaps].y = weight;
|
||||
totalWeight += 2 * weight;
|
||||
}
|
||||
|
||||
// Tap weights will be normalized in shader because side cases on edges of screen
|
||||
// won't have the same number of taps as in the center.
|
||||
}
|
||||
|
||||
void BlurParams::setOutputAlpha(float value) {
|
||||
value = glm::clamp(value, 0.0f, 1.0f);
|
||||
auto filterInfo = _parametersBuffer.get<Params>().filterInfo;
|
||||
if (value != filterInfo.z) {
|
||||
_parametersBuffer.edit<Params>().filterInfo.z = value;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -86,17 +136,23 @@ void BlurParams::setLinearDepthPosFar(float farPosDepth) {
|
|||
}
|
||||
|
||||
|
||||
BlurInOutResource::BlurInOutResource(bool generateOutputFramebuffer) :
|
||||
_generateOutputFramebuffer(generateOutputFramebuffer)
|
||||
{
|
||||
|
||||
BlurInOutResource::BlurInOutResource(bool generateOutputFramebuffer, unsigned int downsampleFactor) :
|
||||
_downsampleFactor(downsampleFactor),
|
||||
_generateOutputFramebuffer(generateOutputFramebuffer) {
|
||||
assert(downsampleFactor > 0);
|
||||
}
|
||||
|
||||
bool BlurInOutResource::updateResources(const gpu::FramebufferPointer& sourceFramebuffer, Resources& blurringResources) {
|
||||
if (!sourceFramebuffer) {
|
||||
return false;
|
||||
}
|
||||
if (_blurredFramebuffer && _blurredFramebuffer->getSize() != sourceFramebuffer->getSize()) {
|
||||
|
||||
auto blurBufferSize = sourceFramebuffer->getSize();
|
||||
|
||||
blurBufferSize.x /= _downsampleFactor;
|
||||
blurBufferSize.y /= _downsampleFactor;
|
||||
|
||||
if (_blurredFramebuffer && _blurredFramebuffer->getSize() != blurBufferSize) {
|
||||
_blurredFramebuffer.reset();
|
||||
}
|
||||
|
||||
|
@ -108,7 +164,7 @@ bool BlurInOutResource::updateResources(const gpu::FramebufferPointer& sourceFra
|
|||
// _blurredFramebuffer->setDepthStencilBuffer(sourceFramebuffer->getDepthStencilBuffer(), sourceFramebuffer->getDepthStencilBufferFormat());
|
||||
//}
|
||||
auto blurringSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT);
|
||||
auto blurringTarget = gpu::Texture::create2D(sourceFramebuffer->getRenderBuffer(0)->getTexelFormat(), sourceFramebuffer->getWidth(), sourceFramebuffer->getHeight(), gpu::Texture::SINGLE_MIP, blurringSampler);
|
||||
auto blurringTarget = gpu::Texture::createRenderBuffer(sourceFramebuffer->getRenderBuffer(0)->getTexelFormat(), blurBufferSize.x, blurBufferSize.y, gpu::Texture::SINGLE_MIP, blurringSampler);
|
||||
_blurredFramebuffer->setRenderBuffer(0, blurringTarget);
|
||||
}
|
||||
|
||||
|
@ -117,7 +173,7 @@ bool BlurInOutResource::updateResources(const gpu::FramebufferPointer& sourceFra
|
|||
blurringResources.blurringTexture = _blurredFramebuffer->getRenderBuffer(0);
|
||||
|
||||
if (_generateOutputFramebuffer) {
|
||||
if (_outputFramebuffer && _outputFramebuffer->getSize() != sourceFramebuffer->getSize()) {
|
||||
if (_outputFramebuffer && _outputFramebuffer->getSize() != blurBufferSize) {
|
||||
_outputFramebuffer.reset();
|
||||
}
|
||||
|
||||
|
@ -131,7 +187,7 @@ bool BlurInOutResource::updateResources(const gpu::FramebufferPointer& sourceFra
|
|||
_outputFramebuffer->setDepthStencilBuffer(sourceFramebuffer->getDepthStencilBuffer(), sourceFramebuffer->getDepthStencilBufferFormat());
|
||||
}*/
|
||||
auto blurringSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT);
|
||||
auto blurringTarget = gpu::Texture::create2D(sourceFramebuffer->getRenderBuffer(0)->getTexelFormat(), sourceFramebuffer->getWidth(), sourceFramebuffer->getHeight(), gpu::Texture::SINGLE_MIP, blurringSampler);
|
||||
auto blurringTarget = gpu::Texture::createRenderBuffer(sourceFramebuffer->getRenderBuffer(0)->getTexelFormat(), blurBufferSize.x, blurBufferSize.y, gpu::Texture::SINGLE_MIP, blurringSampler);
|
||||
_outputFramebuffer->setRenderBuffer(0, blurringTarget);
|
||||
}
|
||||
|
||||
|
@ -145,8 +201,8 @@ bool BlurInOutResource::updateResources(const gpu::FramebufferPointer& sourceFra
|
|||
return true;
|
||||
}
|
||||
|
||||
BlurGaussian::BlurGaussian(bool generateOutputFramebuffer) :
|
||||
_inOutResources(generateOutputFramebuffer)
|
||||
BlurGaussian::BlurGaussian(bool generateOutputFramebuffer, unsigned int downsampleFactor) :
|
||||
_inOutResources(generateOutputFramebuffer, downsampleFactor)
|
||||
{
|
||||
_parameters = std::make_shared<BlurParams>();
|
||||
}
|
||||
|
@ -196,7 +252,16 @@ gpu::PipelinePointer BlurGaussian::getBlurHPipeline() {
|
|||
}
|
||||
|
||||
void BlurGaussian::configure(const Config& config) {
|
||||
auto state = getBlurHPipeline()->getState();
|
||||
|
||||
_parameters->setFilterRadiusScale(config.filterScale);
|
||||
_parameters->setOutputAlpha(config.mix);
|
||||
if (config.mix < 1.0f) {
|
||||
state->setBlendFunction(config.mix < 1.0f, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
|
||||
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA);
|
||||
} else {
|
||||
state->setBlendFunction(false);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -206,7 +271,6 @@ void BlurGaussian::run(const RenderContextPointer& renderContext, const gpu::Fra
|
|||
|
||||
RenderArgs* args = renderContext->args;
|
||||
|
||||
|
||||
BlurInOutResource::Resources blurringResources;
|
||||
if (!_inOutResources.updateResources(sourceFramebuffer, blurringResources)) {
|
||||
// early exit if no valid blurring resources
|
||||
|
@ -216,14 +280,15 @@ void BlurGaussian::run(const RenderContextPointer& renderContext, const gpu::Fra
|
|||
|
||||
auto blurVPipeline = getBlurVPipeline();
|
||||
auto blurHPipeline = getBlurHPipeline();
|
||||
glm::ivec4 viewport { 0, 0, blurredFramebuffer->getWidth(), blurredFramebuffer->getHeight() };
|
||||
|
||||
_parameters->setWidthHeight(args->_viewport.z, args->_viewport.w, args->isStereo());
|
||||
glm::ivec2 textureSize(blurringResources.sourceTexture->getDimensions());
|
||||
_parameters->setTexcoordTransform(gpu::Framebuffer::evalSubregionTexcoordTransformCoefficients(textureSize, args->_viewport));
|
||||
glm::ivec2 textureSize = blurredFramebuffer->getSize();
|
||||
_parameters->setWidthHeight(blurredFramebuffer->getWidth(), blurredFramebuffer->getHeight(), args->isStereo());
|
||||
_parameters->setTexcoordTransform(gpu::Framebuffer::evalSubregionTexcoordTransformCoefficients(textureSize, viewport));
|
||||
|
||||
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
batch.setViewportTransform(viewport);
|
||||
|
||||
batch.setUniformBuffer(BlurTask_ParamsSlot, _parameters->_parametersBuffer);
|
||||
|
||||
|
@ -251,7 +316,7 @@ void BlurGaussian::run(const RenderContextPointer& renderContext, const gpu::Fra
|
|||
|
||||
|
||||
BlurGaussianDepthAware::BlurGaussianDepthAware(bool generateOutputFramebuffer, const BlurParamsPointer& params) :
|
||||
_inOutResources(generateOutputFramebuffer),
|
||||
_inOutResources(generateOutputFramebuffer, 1U),
|
||||
_parameters((params ? params : std::make_shared<BlurParams>()))
|
||||
{
|
||||
}
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
|
||||
#include "Engine.h"
|
||||
|
||||
#include "BlurTask_shared.slh"
|
||||
|
||||
namespace render {
|
||||
|
||||
|
||||
|
@ -25,6 +27,11 @@ public:
|
|||
void setTexcoordTransform(const glm::vec4 texcoordTransformViewport);
|
||||
|
||||
void setFilterRadiusScale(float scale);
|
||||
void setFilterNumTaps(int count);
|
||||
// Tap 0 is considered the center of the kernel
|
||||
void setFilterTap(int index, float offset, float value);
|
||||
void setFilterGaussianTaps(int numHalfTaps, float sigma = 1.47f);
|
||||
void setOutputAlpha(float value);
|
||||
|
||||
void setDepthPerspective(float oneOverTan2FOV);
|
||||
void setDepthThreshold(float threshold);
|
||||
|
@ -40,7 +47,7 @@ public:
|
|||
// Viewport to Texcoord info, if the region of the blur (viewport) is smaller than the full frame
|
||||
glm::vec4 texcoordTransform{ 0.0f, 0.0f, 1.0f, 1.0f };
|
||||
|
||||
// Filter info (radius scale
|
||||
// Filter info (radius scale, number of taps, output alpha)
|
||||
glm::vec4 filterInfo{ 1.0f, 0.0f, 0.0f, 0.0f };
|
||||
|
||||
// Depth info (radius scale
|
||||
|
@ -52,6 +59,9 @@ public:
|
|||
// LinearDepth info is { f }
|
||||
glm::vec4 linearDepthInfo{ 0.0f };
|
||||
|
||||
// Taps (offset, weight)
|
||||
glm::vec2 filterTaps[BLUR_MAX_NUM_TAPS];
|
||||
|
||||
Params() {}
|
||||
};
|
||||
gpu::BufferView _parametersBuffer;
|
||||
|
@ -62,7 +72,7 @@ using BlurParamsPointer = std::shared_ptr<BlurParams>;
|
|||
|
||||
class BlurInOutResource {
|
||||
public:
|
||||
BlurInOutResource(bool generateOutputFramebuffer = false);
|
||||
BlurInOutResource(bool generateOutputFramebuffer, unsigned int downsampleFactor);
|
||||
|
||||
struct Resources {
|
||||
gpu::TexturePointer sourceTexture;
|
||||
|
@ -75,8 +85,9 @@ public:
|
|||
|
||||
gpu::FramebufferPointer _blurredFramebuffer;
|
||||
|
||||
// the output framebuffer defined if the job needs to output the result in a new framebuffer and not in place in th einput buffer
|
||||
// the output framebuffer defined if the job needs to output the result in a new framebuffer and not in place in the input buffer
|
||||
gpu::FramebufferPointer _outputFramebuffer;
|
||||
unsigned int _downsampleFactor{ 1U };
|
||||
bool _generateOutputFramebuffer{ false };
|
||||
};
|
||||
|
||||
|
@ -84,12 +95,15 @@ public:
|
|||
class BlurGaussianConfig : public Job::Config {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(bool enabled WRITE setEnabled READ isEnabled NOTIFY dirty) // expose enabled flag
|
||||
Q_PROPERTY(float filterScale MEMBER filterScale NOTIFY dirty) // expose enabled flag
|
||||
Q_PROPERTY(float filterScale MEMBER filterScale NOTIFY dirty)
|
||||
Q_PROPERTY(float mix MEMBER mix NOTIFY dirty)
|
||||
public:
|
||||
|
||||
BlurGaussianConfig() : Job::Config(true) {}
|
||||
|
||||
float filterScale{ 0.2f };
|
||||
float mix{ 1.0f };
|
||||
|
||||
signals :
|
||||
void dirty();
|
||||
|
||||
|
@ -102,11 +116,13 @@ public:
|
|||
using Config = BlurGaussianConfig;
|
||||
using JobModel = Job::ModelIO<BlurGaussian, gpu::FramebufferPointer, gpu::FramebufferPointer, Config>;
|
||||
|
||||
BlurGaussian(bool generateOutputFramebuffer = false);
|
||||
BlurGaussian(bool generateOutputFramebuffer = false, unsigned int downsampleFactor = 1U);
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const RenderContextPointer& renderContext, const gpu::FramebufferPointer& sourceFramebuffer, gpu::FramebufferPointer& blurredFramebuffer);
|
||||
|
||||
BlurParamsPointer getParameters() const { return _parameters; }
|
||||
|
||||
protected:
|
||||
|
||||
BlurParamsPointer _parameters;
|
||||
|
|
|
@ -9,17 +9,7 @@
|
|||
|
||||
<@func declareBlurUniforms()@>
|
||||
|
||||
#define NUM_TAPS 7
|
||||
#define NUM_TAPS_OFFSET 3.0f
|
||||
|
||||
float uniformFilterWidth = 0.05f;
|
||||
|
||||
const float gaussianDistributionCurve[NUM_TAPS] = float[](
|
||||
0.383f, 0.006f, 0.061f, 0.242f, 0.242f, 0.061f, 0.006f
|
||||
);
|
||||
const float gaussianDistributionOffset[NUM_TAPS] = float[](
|
||||
0.0f, -3.0f, -2.0f, -1.0f, 1.0f, 2.0f, 3.0f
|
||||
);
|
||||
<@include BlurTask_shared.slh@>
|
||||
|
||||
struct BlurParameters {
|
||||
vec4 resolutionInfo;
|
||||
|
@ -28,6 +18,7 @@ struct BlurParameters {
|
|||
vec4 depthInfo;
|
||||
vec4 stereoInfo;
|
||||
vec4 linearDepthInfo;
|
||||
vec2 taps[BLUR_MAX_NUM_TAPS];
|
||||
};
|
||||
|
||||
uniform blurParamsBuffer {
|
||||
|
@ -46,6 +37,25 @@ float getFilterScale() {
|
|||
return parameters.filterInfo.x;
|
||||
}
|
||||
|
||||
int getFilterNumTaps() {
|
||||
return int(parameters.filterInfo.y);
|
||||
}
|
||||
|
||||
float getOutputAlpha() {
|
||||
return parameters.filterInfo.z;
|
||||
}
|
||||
|
||||
vec2 getFilterTap(int index) {
|
||||
return parameters.taps[index];
|
||||
}
|
||||
|
||||
float getFilterTapOffset(vec2 tap) {
|
||||
return tap.x;
|
||||
}
|
||||
|
||||
float getFilterTapWeight(vec2 tap) {
|
||||
return tap.y;
|
||||
}
|
||||
|
||||
float getDepthThreshold() {
|
||||
return parameters.depthInfo.x;
|
||||
|
@ -70,19 +80,29 @@ uniform sampler2D sourceMap;
|
|||
|
||||
vec4 pixelShaderGaussian(vec2 texcoord, vec2 direction, vec2 pixelStep) {
|
||||
texcoord = evalTexcoordTransformed(texcoord);
|
||||
vec4 sampleCenter = texture(sourceMap, texcoord);
|
||||
|
||||
vec2 finalStep = getFilterScale() * direction * pixelStep;
|
||||
vec4 srcBlurred = vec4(0.0);
|
||||
float totalWeight = 0.f;
|
||||
int numTaps = getFilterNumTaps();
|
||||
|
||||
for(int i = 0; i < NUM_TAPS; i++) {
|
||||
// Fetch color and depth for current sample.
|
||||
vec2 sampleCoord = texcoord + (gaussianDistributionOffset[i] * finalStep);
|
||||
vec4 srcSample = texture(sourceMap, sampleCoord);
|
||||
// Accumulate.
|
||||
srcBlurred += gaussianDistributionCurve[i] * srcSample;
|
||||
for(int i = 0; i < numTaps; i++) {
|
||||
vec2 tapInfo = getFilterTap(i);
|
||||
// Fetch color for current sample.
|
||||
vec2 sampleCoord = texcoord + (getFilterTapOffset(tapInfo) * finalStep);
|
||||
if (all(greaterThanEqual(sampleCoord, vec2(0,0))) && all(lessThanEqual(sampleCoord, vec2(1.0,1.0)))) {
|
||||
vec4 srcSample = texture(sourceMap, sampleCoord);
|
||||
float weight = getFilterTapWeight(tapInfo);
|
||||
// Accumulate.
|
||||
srcBlurred += srcSample * weight;
|
||||
totalWeight += weight;
|
||||
}
|
||||
}
|
||||
|
||||
if (totalWeight>0.0) {
|
||||
srcBlurred /= totalWeight;
|
||||
}
|
||||
srcBlurred.a = getOutputAlpha();
|
||||
return srcBlurred;
|
||||
}
|
||||
|
||||
|
@ -95,15 +115,6 @@ vec4 pixelShaderGaussian(vec2 texcoord, vec2 direction, vec2 pixelStep) {
|
|||
uniform sampler2D sourceMap;
|
||||
uniform sampler2D depthMap;
|
||||
|
||||
#define NUM_HALF_TAPS 4
|
||||
|
||||
const float gaussianDistributionCurveHalf[NUM_HALF_TAPS] = float[](
|
||||
0.383f, 0.242f, 0.061f, 0.006f
|
||||
);
|
||||
const float gaussianDistributionOffsetHalf[NUM_HALF_TAPS] = float[](
|
||||
0.0f, 1.0f, 2.0f, 3.0f
|
||||
);
|
||||
|
||||
vec4 pixelShaderGaussianDepthAware(vec2 texcoord, vec2 direction, vec2 pixelStep) {
|
||||
texcoord = evalTexcoordTransformed(texcoord);
|
||||
float sampleDepth = texture(depthMap, texcoord).x;
|
||||
|
@ -122,45 +133,36 @@ vec4 pixelShaderGaussianDepthAware(vec2 texcoord, vec2 direction, vec2 pixelStep
|
|||
float scale = distanceToProjectionWindow / sampleDepth;
|
||||
|
||||
vec2 finalStep = filterScale * scale * direction * pixelStep;
|
||||
int numTaps = getFilterNumTaps();
|
||||
|
||||
// Accumulate the center sample
|
||||
vec4 srcBlurred = gaussianDistributionCurve[0] * sampleCenter;
|
||||
vec2 tapInfo = getFilterTap(0);
|
||||
float totalWeight = getFilterTapWeight(tapInfo);
|
||||
vec4 srcBlurred = sampleCenter * totalWeight;
|
||||
|
||||
for(int i = 1; i < numTaps; i++) {
|
||||
tapInfo = getFilterTap(i);
|
||||
|
||||
for(int i = 1; i < NUM_TAPS; i++) {
|
||||
// Fetch color and depth for current sample.
|
||||
vec2 sampleCoord = texcoord + (gaussianDistributionOffset[i] * finalStep);
|
||||
float srcDepth = texture(depthMap, sampleCoord).x;
|
||||
vec4 srcSample = texture(sourceMap, sampleCoord);
|
||||
vec2 sampleCoord = texcoord + (getFilterTapOffset(tapInfo) * finalStep);
|
||||
if (all(greaterThanEqual(sampleCoord, vec2(0,0))) && all(lessThanEqual(sampleCoord, vec2(1.0,1.0)))) {
|
||||
float srcDepth = texture(depthMap, sampleCoord).x;
|
||||
vec4 srcSample = texture(sourceMap, sampleCoord);
|
||||
float weight = getFilterTapWeight(tapInfo);
|
||||
|
||||
// If the difference in depth is huge, we lerp color back.
|
||||
float s = clamp(depthThreshold * distanceToProjectionWindow * filterScale * abs(srcDepth - sampleDepth), 0.0, 1.0);
|
||||
srcSample = mix(srcSample, sampleCenter, s);
|
||||
// If the difference in depth is huge, we lerp color back.
|
||||
float s = clamp(depthThreshold * distanceToProjectionWindow * filterScale * abs(srcDepth - sampleDepth), 0.0, 1.0);
|
||||
srcSample = mix(srcSample, sampleCenter, s);
|
||||
|
||||
// Accumulate.
|
||||
srcBlurred += gaussianDistributionCurve[i] * srcSample;
|
||||
// Accumulate.
|
||||
srcBlurred += srcSample * weight;
|
||||
totalWeight += weight;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
for(int i = 1; i < NUM_HALF_TAPS; i++) {
|
||||
// Fetch color and depth for current sample.
|
||||
vec2 texcoordOffset = (gaussianDistributionOffsetHalf[i] * finalStep);
|
||||
|
||||
float srcDepthN = texture(depthMap, texcoord - texcoordOffset).x;
|
||||
float srcDepthP = texture(depthMap, texcoord + texcoordOffset).x;
|
||||
vec4 srcSampleN = texture(sourceMap, texcoord - texcoordOffset);
|
||||
vec4 srcSampleP = texture(sourceMap, texcoord + texcoordOffset);
|
||||
|
||||
// If the difference in depth is huge, we lerp color back.
|
||||
float sN = clamp(depthThreshold * distanceToProjectionWindow * filterScale * abs(srcDepthN - sampleDepth), 0.0, 1.0);
|
||||
float sP = clamp(depthThreshold * distanceToProjectionWindow * filterScale * abs(srcDepthP - sampleDepth), 0.0, 1.0);
|
||||
|
||||
srcSampleN = mix(srcSampleN, sampleCenter, sN);
|
||||
srcSampleP = mix(srcSampleP, sampleCenter, sP);
|
||||
|
||||
// Accumulate.
|
||||
srcBlurred += gaussianDistributionCurveHalf[i] * (srcSampleP + srcSampleN);
|
||||
}*/
|
||||
|
||||
if (totalWeight>0.0) {
|
||||
srcBlurred /= totalWeight;
|
||||
}
|
||||
return srcBlurred;
|
||||
}
|
||||
|
||||
|
|
10
libraries/render/src/render/BlurTask_shared.slh
Normal file
10
libraries/render/src/render/BlurTask_shared.slh
Normal file
|
@ -0,0 +1,10 @@
|
|||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Olivier Prat on 09/25/17.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#define BLUR_MAX_NUM_TAPS 33
|
83
libraries/render/src/render/ResampleTask.cpp
Normal file
83
libraries/render/src/render/ResampleTask.cpp
Normal file
|
@ -0,0 +1,83 @@
|
|||
//
|
||||
// ResampleTask.cpp
|
||||
// render/src/render
|
||||
//
|
||||
// Various to upsample or downsample textures into framebuffers.
|
||||
//
|
||||
// Created by Olivier Prat on 10/09/17.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "ResampleTask.h"
|
||||
|
||||
#include "gpu/Context.h"
|
||||
#include "gpu/StandardShaderLib.h"
|
||||
|
||||
using namespace render;
|
||||
|
||||
gpu::PipelinePointer HalfDownsample::_pipeline;
|
||||
|
||||
HalfDownsample::HalfDownsample() {
|
||||
|
||||
}
|
||||
|
||||
void HalfDownsample::configure(const Config& config) {
|
||||
|
||||
}
|
||||
|
||||
gpu::FramebufferPointer HalfDownsample::getResampledFrameBuffer(const gpu::FramebufferPointer& sourceFramebuffer) {
|
||||
auto resampledFramebufferSize = sourceFramebuffer->getSize();
|
||||
|
||||
resampledFramebufferSize.x /= 2U;
|
||||
resampledFramebufferSize.y /= 2U;
|
||||
|
||||
if (!_destinationFrameBuffer || resampledFramebufferSize != _destinationFrameBuffer->getSize()) {
|
||||
_destinationFrameBuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("HalfOutput"));
|
||||
|
||||
auto sampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT);
|
||||
auto target = gpu::Texture::createRenderBuffer(sourceFramebuffer->getRenderBuffer(0)->getTexelFormat(), resampledFramebufferSize.x, resampledFramebufferSize.y, gpu::Texture::SINGLE_MIP, sampler);
|
||||
_destinationFrameBuffer->setRenderBuffer(0, target);
|
||||
}
|
||||
return _destinationFrameBuffer;
|
||||
}
|
||||
|
||||
void HalfDownsample::run(const RenderContextPointer& renderContext, const gpu::FramebufferPointer& sourceFramebuffer, gpu::FramebufferPointer& resampledFrameBuffer) {
|
||||
assert(renderContext->args);
|
||||
assert(renderContext->args->hasViewFrustum());
|
||||
RenderArgs* args = renderContext->args;
|
||||
|
||||
resampledFrameBuffer = getResampledFrameBuffer(sourceFramebuffer);
|
||||
|
||||
if (!_pipeline) {
|
||||
auto vs = gpu::StandardShaderLib::getDrawTransformUnitQuadVS();
|
||||
auto ps = gpu::StandardShaderLib::getDrawTextureOpaquePS();
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
state->setDepthTest(gpu::State::DepthTest(false, false));
|
||||
_pipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
const auto bufferSize = resampledFrameBuffer->getSize();
|
||||
glm::ivec4 viewport{ 0, 0, bufferSize.x, bufferSize.y };
|
||||
|
||||
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
|
||||
batch.setFramebuffer(resampledFrameBuffer);
|
||||
|
||||
batch.setViewportTransform(viewport);
|
||||
batch.setProjectionTransform(glm::mat4());
|
||||
batch.resetViewTransform();
|
||||
batch.setPipeline(_pipeline);
|
||||
|
||||
batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(bufferSize, viewport));
|
||||
batch.setResourceTexture(0, sourceFramebuffer->getRenderBuffer(0));
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
});
|
||||
}
|
41
libraries/render/src/render/ResampleTask.h
Normal file
41
libraries/render/src/render/ResampleTask.h
Normal file
|
@ -0,0 +1,41 @@
|
|||
//
|
||||
// ResampleTask.h
|
||||
// render/src/render
|
||||
//
|
||||
// Various to upsample or downsample textures into framebuffers.
|
||||
//
|
||||
// Created by Olivier Prat on 10/09/17.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_render_ResampleTask_h
|
||||
#define hifi_render_ResampleTask_h
|
||||
|
||||
#include "Engine.h"
|
||||
|
||||
namespace render {
|
||||
|
||||
class HalfDownsample {
|
||||
public:
|
||||
using Config = JobConfig;
|
||||
using JobModel = Job::ModelIO<HalfDownsample, gpu::FramebufferPointer, gpu::FramebufferPointer, Config>;
|
||||
|
||||
HalfDownsample();
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const RenderContextPointer& renderContext, const gpu::FramebufferPointer& sourceFramebuffer, gpu::FramebufferPointer& resampledFrameBuffer);
|
||||
|
||||
protected:
|
||||
|
||||
static gpu::PipelinePointer _pipeline;
|
||||
|
||||
gpu::FramebufferPointer _destinationFrameBuffer;
|
||||
|
||||
gpu::FramebufferPointer getResampledFrameBuffer(const gpu::FramebufferPointer& sourceFramebuffer);
|
||||
};
|
||||
}
|
||||
|
||||
#endif // hifi_render_ResampleTask_h
|
|
@ -171,6 +171,8 @@ public:
|
|||
_concept->setCPURunTime((double)(usecTimestampNow() - start) / 1000.0);
|
||||
}
|
||||
|
||||
const std::string& getName() const { return _name; }
|
||||
|
||||
protected:
|
||||
ConceptPointer _concept;
|
||||
std::string _name = "";
|
||||
|
@ -206,6 +208,24 @@ public:
|
|||
|
||||
const Varying getInput() const override { return _input; }
|
||||
const Varying getOutput() const override { return _output; }
|
||||
typename Jobs::iterator editJob(std::string name) {
|
||||
typename Jobs::iterator jobIt;
|
||||
for (jobIt = _jobs.begin(); jobIt != _jobs.end(); ++jobIt) {
|
||||
if (jobIt->getName() == name) {
|
||||
return jobIt;
|
||||
}
|
||||
}
|
||||
return jobIt;
|
||||
}
|
||||
typename Jobs::const_iterator getJob(std::string name) const {
|
||||
typename Jobs::const_iterator jobIt;
|
||||
for (jobIt = _jobs.begin(); jobIt != _jobs.end(); ++jobIt) {
|
||||
if (jobIt->getName() == name) {
|
||||
return jobIt;
|
||||
}
|
||||
}
|
||||
return jobIt;
|
||||
}
|
||||
|
||||
TaskConcept(const Varying& input, QConfigPointer config) : Concept(config), _input(input) {}
|
||||
|
||||
|
|
|
@ -1081,7 +1081,7 @@ void setMaxCores(uint8_t maxCores) {
|
|||
void quitWithParentProcess() {
|
||||
if (qApp) {
|
||||
qDebug() << "Parent process died, quitting";
|
||||
qApp->quit();
|
||||
exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1113,3 +1113,57 @@ void watchParentProcess(int parentPID) {
|
|||
timer->start();
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
QString getLastErrorAsString() {
|
||||
DWORD errorMessageID = ::GetLastError();
|
||||
if (errorMessageID == 0) {
|
||||
return QString();
|
||||
}
|
||||
|
||||
LPSTR messageBuffer = nullptr;
|
||||
size_t size = FormatMessageA(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS,
|
||||
nullptr, errorMessageID, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), (LPSTR)&messageBuffer, 0, nullptr);
|
||||
|
||||
auto message = QString::fromLocal8Bit(messageBuffer, (int)size);
|
||||
|
||||
//Free the buffer.
|
||||
LocalFree(messageBuffer);
|
||||
|
||||
return message;
|
||||
}
|
||||
|
||||
// All processes in the group will shut down with the process creating the group
|
||||
void* createProcessGroup() {
|
||||
HANDLE jobObject = CreateJobObject(nullptr, nullptr);
|
||||
if (jobObject == nullptr) {
|
||||
qWarning() << "Could NOT create job object:" << getLastErrorAsString();
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
JOBOBJECT_EXTENDED_LIMIT_INFORMATION JELI;
|
||||
if (!QueryInformationJobObject(jobObject, JobObjectExtendedLimitInformation, &JELI, sizeof(JELI), nullptr)) {
|
||||
qWarning() << "Could NOT query job object information" << getLastErrorAsString();
|
||||
return nullptr;
|
||||
}
|
||||
JELI.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
|
||||
if (!SetInformationJobObject(jobObject, JobObjectExtendedLimitInformation, &JELI, sizeof(JELI))) {
|
||||
qWarning() << "Could NOT set job object information" << getLastErrorAsString();
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
return jobObject;
|
||||
}
|
||||
|
||||
void addProcessToGroup(void* processGroup, qint64 processId) {
|
||||
HANDLE hProcess = OpenProcess(PROCESS_ALL_ACCESS, FALSE, processId);
|
||||
if (hProcess == nullptr) {
|
||||
qCritical() << "Could NOT open process" << getLastErrorAsString();
|
||||
}
|
||||
if (!AssignProcessToJobObject(processGroup, hProcess)) {
|
||||
qCritical() << "Could NOT assign process to job object" << getLastErrorAsString();
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
@ -238,4 +238,10 @@ void setMaxCores(uint8_t maxCores);
|
|||
const QString PARENT_PID_OPTION = "parent-pid";
|
||||
void watchParentProcess(int parentPID);
|
||||
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
void* createProcessGroup();
|
||||
void addProcessToGroup(void* processGroup, qint64 processId);
|
||||
#endif
|
||||
|
||||
#endif // hifi_SharedUtil_h
|
||||
|
|
119
scripts/developer/utilities/render/bloom.qml
Normal file
119
scripts/developer/utilities/render/bloom.qml
Normal file
|
@ -0,0 +1,119 @@
|
|||
//
|
||||
// bloom.qml
|
||||
// developer/utilities/render
|
||||
//
|
||||
// Olivier Prat, created on 09/25/2017.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import "configSlider"
|
||||
|
||||
Item {
|
||||
id: root
|
||||
property var config: Render.getConfig("RenderMainView.Bloom")
|
||||
property var configThreshold: Render.getConfig("RenderMainView.BloomThreshold")
|
||||
property var configDebug: Render.getConfig("RenderMainView.DebugBloom")
|
||||
|
||||
Column {
|
||||
spacing: 8
|
||||
|
||||
CheckBox {
|
||||
text: "Enable"
|
||||
checked: root.config["enabled"]
|
||||
onCheckedChanged: {
|
||||
root.config["enabled"] = checked;
|
||||
}
|
||||
}
|
||||
GroupBox {
|
||||
title: "Debug"
|
||||
Row {
|
||||
ExclusiveGroup { id: debugGroup }
|
||||
RadioButton {
|
||||
text : "Off"
|
||||
checked : !root.configDebug["enabled"]
|
||||
onCheckedChanged: {
|
||||
if (checked) {
|
||||
root.configDebug["enabled"] = false
|
||||
}
|
||||
}
|
||||
exclusiveGroup : debugGroup
|
||||
}
|
||||
RadioButton {
|
||||
text : "Lvl 0"
|
||||
checked :root.configDebug["enabled"] && root.configDebug["mode"]==0
|
||||
onCheckedChanged: {
|
||||
if (checked) {
|
||||
root.configDebug["enabled"] = true
|
||||
root.configDebug["mode"] = 0
|
||||
}
|
||||
}
|
||||
exclusiveGroup : debugGroup
|
||||
}
|
||||
RadioButton {
|
||||
text : "Lvl 1"
|
||||
checked : root.configDebug["enabled"] && root.configDebug["mode"]==1
|
||||
onCheckedChanged: {
|
||||
if (checked) {
|
||||
root.configDebug["enabled"] = true
|
||||
root.configDebug["mode"] = 1
|
||||
}
|
||||
}
|
||||
exclusiveGroup : debugGroup
|
||||
}
|
||||
RadioButton {
|
||||
text : "Lvl 2"
|
||||
checked : root.configDebug["enabled"] && root.configDebug["mode"]==2
|
||||
onCheckedChanged: {
|
||||
if (checked) {
|
||||
root.configDebug["enabled"] = true
|
||||
root.configDebug["mode"] = 2
|
||||
}
|
||||
}
|
||||
exclusiveGroup : debugGroup
|
||||
}
|
||||
RadioButton {
|
||||
text : "All"
|
||||
checked : root.configDebug["enabled"] && root.configDebug["mode"]==3
|
||||
onCheckedChanged: {
|
||||
if (checked) {
|
||||
root.configDebug["enabled"] = true
|
||||
root.configDebug["mode"] = 3
|
||||
}
|
||||
}
|
||||
exclusiveGroup : debugGroup
|
||||
}
|
||||
}
|
||||
}
|
||||
ConfigSlider {
|
||||
label: "Intensity"
|
||||
integral: false
|
||||
config: root.config
|
||||
property: "intensity"
|
||||
max: 5.0
|
||||
min: 0.0
|
||||
width: 280
|
||||
}
|
||||
ConfigSlider {
|
||||
label: "Size"
|
||||
integral: false
|
||||
config: root.config
|
||||
property: "size"
|
||||
max: 1.0
|
||||
min: 0.0
|
||||
width: 280
|
||||
}
|
||||
ConfigSlider {
|
||||
label: "Threshold"
|
||||
integral: false
|
||||
config: root.configThreshold
|
||||
property: "threshold"
|
||||
max: 2.0
|
||||
min: 0.0
|
||||
width: 280
|
||||
}
|
||||
}
|
||||
}
|
20
scripts/developer/utilities/render/debugBloom.js
Normal file
20
scripts/developer/utilities/render/debugBloom.js
Normal file
|
@ -0,0 +1,20 @@
|
|||
//
|
||||
// debugBloom.js
|
||||
// developer/utilities/render
|
||||
//
|
||||
// Olivier Prat, created on 09/25/2017.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// Set up the qml ui
|
||||
var qml = Script.resolvePath('bloom.qml');
|
||||
var window = new OverlayWindow({
|
||||
title: 'Bloom',
|
||||
source: qml,
|
||||
width: 285,
|
||||
height: 170,
|
||||
});
|
||||
window.closed.connect(function() { Script.stop(); });
|
|
@ -389,7 +389,7 @@ Script.include("/~/system/libraries/controllerDispatcherUtils.js");
|
|||
Controller.enableMapping(MAPPING_NAME);
|
||||
|
||||
this.leftControllerRayPick = RayPick.createRayPick({
|
||||
joint: "_CONTROLLER_LEFTHAND",
|
||||
joint: "_CAMERA_RELATIVE_CONTROLLER_LEFTHAND",
|
||||
filter: RayPick.PICK_ENTITIES | RayPick.PICK_OVERLAYS,
|
||||
enabled: true,
|
||||
maxDistance: DEFAULT_SEARCH_SPHERE_DISTANCE,
|
||||
|
@ -403,7 +403,7 @@ Script.include("/~/system/libraries/controllerDispatcherUtils.js");
|
|||
posOffset: getGrabPointSphereOffset(Controller.Standard.LeftHand, true)
|
||||
});
|
||||
this.rightControllerRayPick = RayPick.createRayPick({
|
||||
joint: "_CONTROLLER_RIGHTHAND",
|
||||
joint: "_CAMERA_RELATIVE_CONTROLLER_RIGHTHAND",
|
||||
filter: RayPick.PICK_ENTITIES | RayPick.PICK_OVERLAYS,
|
||||
enabled: true,
|
||||
maxDistance: DEFAULT_SEARCH_SPHERE_DISTANCE,
|
||||
|
|
|
@ -362,7 +362,7 @@ Script.include("/~/system/libraries/controllers.js");
|
|||
};
|
||||
|
||||
this.laserPointer = LaserPointers.createLaserPointer({
|
||||
joint: (this.hand === RIGHT_HAND) ? "_CONTROLLER_RIGHTHAND" : "_CONTROLLER_LEFTHAND",
|
||||
joint: (this.hand === RIGHT_HAND) ? "_CAMERA_RELATIVE_CONTROLLER_RIGHTHAND" : "_CAMERA_RELATIVE_CONTROLLER_LEFTHAND",
|
||||
filter: RayPick.PICK_OVERLAYS,
|
||||
maxDistance: PICK_MAX_DISTANCE,
|
||||
posOffset: getGrabPointSphereOffset(this.handToController(), true),
|
||||
|
|
|
@ -63,6 +63,7 @@
|
|||
|
||||
var newAvatarScale = (scalingCurrentDistance / this.scalingStartDistance) * this.scalingStartAvatarScale;
|
||||
MyAvatar.scale = newAvatarScale;
|
||||
MyAvatar.scaleChanged();
|
||||
}
|
||||
return dispatcherUtils.makeRunningValues(true, [], []);
|
||||
}
|
||||
|
|
|
@ -1424,24 +1424,29 @@ function deleteSelectedEntities() {
|
|||
for (var i = 0; i < newSortedSelection.length; i++) {
|
||||
var entityID = newSortedSelection[i];
|
||||
var initialProperties = SelectionManager.savedProperties[entityID];
|
||||
var children = Entities.getChildrenIDs(entityID);
|
||||
var childList = [];
|
||||
recursiveDelete(children, childList, deletedIDs);
|
||||
savedProperties.push({
|
||||
entityID: entityID,
|
||||
properties: initialProperties,
|
||||
children: childList
|
||||
});
|
||||
deletedIDs.push(entityID);
|
||||
Entities.deleteEntity(entityID);
|
||||
if (!initialProperties.locked) {
|
||||
var children = Entities.getChildrenIDs(entityID);
|
||||
var childList = [];
|
||||
recursiveDelete(children, childList, deletedIDs);
|
||||
savedProperties.push({
|
||||
entityID: entityID,
|
||||
properties: initialProperties,
|
||||
children: childList
|
||||
});
|
||||
deletedIDs.push(entityID);
|
||||
Entities.deleteEntity(entityID);
|
||||
}
|
||||
}
|
||||
SelectionManager.clearSelections();
|
||||
pushCommandForSelections([], savedProperties);
|
||||
|
||||
entityListTool.webView.emitScriptEvent(JSON.stringify({
|
||||
type: "deleted",
|
||||
ids: deletedIDs
|
||||
}));
|
||||
if (savedProperties.length > 0) {
|
||||
SelectionManager.clearSelections();
|
||||
pushCommandForSelections([], savedProperties);
|
||||
|
||||
entityListTool.webView.emitScriptEvent(JSON.stringify({
|
||||
type: "deleted",
|
||||
ids: deletedIDs
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -318,6 +318,15 @@
|
|||
});
|
||||
}
|
||||
|
||||
function injectUnfocusOnSearch() {
|
||||
// unfocus input field on search, thus hiding virtual keyboard
|
||||
$('#search-box').on('submit', function () {
|
||||
if (document.activeElement) {
|
||||
document.activeElement.blur();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function injectHiFiCode() {
|
||||
if (commerceMode) {
|
||||
maybeAddLogInButton();
|
||||
|
@ -347,6 +356,8 @@
|
|||
maybeAddPurchasesButton();
|
||||
}
|
||||
}
|
||||
|
||||
injectUnfocusOnSearch();
|
||||
}
|
||||
|
||||
function injectHiFiItemPageCode() {
|
||||
|
@ -386,6 +397,8 @@
|
|||
maybeAddPurchasesButton();
|
||||
}
|
||||
}
|
||||
|
||||
injectUnfocusOnSearch();
|
||||
}
|
||||
|
||||
function updateClaraCode() {
|
||||
|
|
|
@ -155,8 +155,7 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location, visible) {
|
|||
localRotation: { x: 0, y: 1, z: 0, w: 0 },
|
||||
dimensions: { x: 4 * tabletScaleFactor, y: 4 * tabletScaleFactor, z: 4 * tabletScaleFactor },
|
||||
solid: true,
|
||||
outerRadius: 25 * tabletScaleFactor,
|
||||
innerRadius: 20 * tabletScaleFactor,
|
||||
innerRadius: 0.9,
|
||||
ignoreIntersection: true,
|
||||
alpha: 1.0,
|
||||
color: { red: 255, green: 255, blue: 255 },
|
||||
|
|
|
@ -409,15 +409,16 @@ resizeTablet = function (width, newParentJointIndex, sensorToWorldScaleOverride)
|
|||
|
||||
// update homeButton
|
||||
var HOME_BUTTON_Y_OFFSET = ((tabletHeight / 2) - (tabletHeight / 20)) * sensorScaleOffsetOverride;
|
||||
var homeButtonDim = 4 * tabletScaleFactor;
|
||||
Overlays.editOverlay(HMD.homeButtonID, {
|
||||
localPosition: {x: -0.001, y: -HOME_BUTTON_Y_OFFSET, z: 0.0},
|
||||
dimensions: { x: 4 * tabletScaleFactor, y: 4 * tabletScaleFactor, z: 4 * tabletScaleFactor}
|
||||
dimensions: { x: homeButtonDim, y: homeButtonDim, z: homeButtonDim}
|
||||
});
|
||||
|
||||
// Circle3D overlays render at 1.5x their proper dimensions
|
||||
var highlightDim = homeButtonDim / 3.0;
|
||||
Overlays.editOverlay(HMD.homeButtonHighlightID, {
|
||||
localPosition: { x: 0, y: -HOME_BUTTON_Y_OFFSET + 0.003, z: -0.0158 },
|
||||
dimensions: { x: 4 * tabletScaleFactor, y: 4 * tabletScaleFactor, z: 4 * tabletScaleFactor },
|
||||
outerRadius: 25 * tabletScaleFactor,
|
||||
innerRadius: 20 * tabletScaleFactor
|
||||
dimensions: { x: highlightDim, y: highlightDim, z: highlightDim }
|
||||
});
|
||||
};
|
||||
|
|
|
@ -210,11 +210,11 @@
|
|||
notificationOrientation,
|
||||
notificationPosition,
|
||||
buttonPosition;
|
||||
var sensorScaleFactor = MyAvatar.sensorToWorldScale;
|
||||
var sensorScaleFactor = isOnHMD ? MyAvatar.sensorToWorldScale : 1.0;
|
||||
// Notification plane positions
|
||||
noticeY = -y * NOTIFICATION_3D_SCALE - noticeHeight / 2;
|
||||
noticeY = -sensorScaleFactor * (y * NOTIFICATION_3D_SCALE + 0.5 * noticeHeight);
|
||||
notificationPosition = { x: 0, y: noticeY, z: 0 };
|
||||
buttonPosition = { x: (noticeWidth - NOTIFICATION_3D_BUTTON_WIDTH) / 2, y: noticeY, z: 0.001 };
|
||||
buttonPosition = { x: 0.5 * sensorScaleFactor * (noticeWidth - NOTIFICATION_3D_BUTTON_WIDTH), y: noticeY, z: 0.001 };
|
||||
|
||||
// Rotate plane
|
||||
notificationOrientation = Quat.fromPitchYawRollDegrees(NOTIFICATIONS_3D_PITCH,
|
||||
|
@ -245,7 +245,7 @@
|
|||
noticeHeight,
|
||||
positions,
|
||||
last;
|
||||
var sensorScaleFactor = MyAvatar.sensorToWorldScale;
|
||||
var sensorScaleFactor = isOnHMD ? MyAvatar.sensorToWorldScale : 1.0;
|
||||
if (isOnHMD) {
|
||||
// Calculate 3D values from 2D overlay properties.
|
||||
|
||||
|
@ -369,7 +369,7 @@
|
|||
buttonProperties,
|
||||
i;
|
||||
|
||||
var sensorScaleFactor = MyAvatar.sensorToWorldScale;
|
||||
var sensorScaleFactor = isOnHMD ? MyAvatar.sensorToWorldScale : 1.0;
|
||||
if (text.length >= breakPoint) {
|
||||
breaks = count;
|
||||
}
|
||||
|
@ -453,7 +453,7 @@
|
|||
}
|
||||
|
||||
function updateNotificationsTexts() {
|
||||
var sensorScaleFactor = MyAvatar.sensorToWorldScale;
|
||||
var sensorScaleFactor = isOnHMD ? MyAvatar.sensorToWorldScale : 1.0;
|
||||
for (var i = 0; i < notifications.length; i++) {
|
||||
var overlayType = Overlays.getOverlayType(notifications[i]);
|
||||
|
||||
|
|
|
@ -482,14 +482,23 @@ HifiEntityUI.prototype = {
|
|||
textureImage.className = "texture-image no-texture";
|
||||
var image = document.createElement("img");
|
||||
var imageLoad = _.debounce(function (url) {
|
||||
if (url.length > 0) {
|
||||
if (url.slice(0, 5).toLowerCase() === "atp:/") {
|
||||
image.src = "";
|
||||
image.style.display = "none";
|
||||
textureImage.classList.remove("with-texture");
|
||||
textureImage.classList.remove("no-texture");
|
||||
textureImage.classList.add("no-preview");
|
||||
} else if (url.length > 0) {
|
||||
textureImage.classList.remove("no-texture");
|
||||
textureImage.classList.remove("no-preview");
|
||||
textureImage.classList.add("with-texture");
|
||||
image.src = url;
|
||||
image.style.display = "block";
|
||||
} else {
|
||||
image.src = "";
|
||||
image.style.display = "none";
|
||||
textureImage.classList.remove("with-texture");
|
||||
textureImage.classList.remove("no-preview");
|
||||
textureImage.classList.add("no-texture");
|
||||
}
|
||||
self.webBridgeSync(group.id, url);
|
||||
|
|
File diff suppressed because one or more lines are too long
Loading…
Reference in a new issue