diff --git a/README.md b/README.md index 6294981e9a..e0bbed3105 100644 --- a/README.md +++ b/README.md @@ -19,6 +19,10 @@ Documentation is available at [docs.highfidelity.com](https://docs.highfidelity. There is also detailed [documentation on our coding standards](https://wiki.highfidelity.com/wiki/Coding_Standards). +Contributor License Agreement (CLA) +========= +Technology companies frequently receive and use code from contributors outside the company's development team. Outside code can be a tremendous resource, but it also carries responsibility. Best practice for accepting outside contributions consists of an Apache-type Contributor License Agreement (CLA). We have modeled the High Fidelity CLA after the CLA that Google presents to developers for contributions to their projects. This CLA does not transfer ownership of code, instead simply granting a non-exclusive right for High Fidelity to use the code you’ve contributed. In that regard, you should be sure you have permission if the work relates to or uses the resources of a company that you work for. You will be asked to sign our CLA when you create your first PR or when the CLA is updated. You can also [review it here](https://gist.githubusercontent.com/hifi-gustavo/fef8f06a8233d42a0040d45c3efb97a9/raw/9981827eb94f0b18666083670b6f6a02929fb402/High%2520Fidelity%2520CLA). We sincerely appreciate your contribution and efforts toward the success of the platform. + Build Instructions ========= All information required to build is found in the [build guide](BUILD.md). diff --git a/assignment-client/src/AssignmentClientMonitor.cpp b/assignment-client/src/AssignmentClientMonitor.cpp index 5539d6a0bb..1868ccfafe 100644 --- a/assignment-client/src/AssignmentClientMonitor.cpp +++ b/assignment-client/src/AssignmentClientMonitor.cpp @@ -28,6 +28,10 @@ const QString ASSIGNMENT_CLIENT_MONITOR_TARGET_NAME = "assignment-client-monitor"; const int WAIT_FOR_CHILD_MSECS = 1000; +#ifdef Q_OS_WIN +HANDLE PROCESS_GROUP = createProcessGroup(); +#endif + AssignmentClientMonitor::AssignmentClientMonitor(const unsigned int numAssignmentClientForks, const unsigned int minAssignmentClientForks, const unsigned int maxAssignmentClientForks, @@ -202,6 +206,10 @@ void AssignmentClientMonitor::spawnChildClient() { assignmentClient->setProcessChannelMode(QProcess::ForwardedChannels); assignmentClient->start(QCoreApplication::applicationFilePath(), _childArguments); +#ifdef Q_OS_WIN + addProcessToGroup(PROCESS_GROUP, assignmentClient->processId()); +#endif + QString stdoutPath, stderrPath; if (_wantsChildFileLogging) { diff --git a/assignment-client/src/entities/EntityTreeSendThread.cpp b/assignment-client/src/entities/EntityTreeSendThread.cpp index 11e4d533fb..e5cee84f1b 100644 --- a/assignment-client/src/entities/EntityTreeSendThread.cpp +++ b/assignment-client/src/entities/EntityTreeSendThread.cpp @@ -23,6 +23,17 @@ EntityTreeSendThread::EntityTreeSendThread(OctreeServer* myServer, const SharedN { connect(std::static_pointer_cast(myServer->getOctree()).get(), &EntityTree::editingEntityPointer, this, &EntityTreeSendThread::editingEntityPointer, Qt::QueuedConnection); connect(std::static_pointer_cast(myServer->getOctree()).get(), &EntityTree::deletingEntityPointer, this, &EntityTreeSendThread::deletingEntityPointer, Qt::QueuedConnection); + + // connect to connection ID change on EntityNodeData so we can clear state for this receiver + auto nodeData = static_cast(node->getLinkedData()); + connect(nodeData, &EntityNodeData::incomingConnectionIDChanged, this, &EntityTreeSendThread::resetState); +} + +void EntityTreeSendThread::resetState() { + qCDebug(entities) << "Clearing known EntityTreeSendThread state for" << _nodeUuid; + + _knownState.clear(); + _traversal.reset(); } void EntityTreeSendThread::preDistributionProcessing() { diff --git a/assignment-client/src/entities/EntityTreeSendThread.h b/assignment-client/src/entities/EntityTreeSendThread.h index a96a18494d..594f423838 100644 --- a/assignment-client/src/entities/EntityTreeSendThread.h +++ b/assignment-client/src/entities/EntityTreeSendThread.h @@ -33,6 +33,9 @@ protected: void traverseTreeAndSendContents(SharedNodePointer node, OctreeQueryNode* nodeData, bool viewFrustumChanged, bool isFullScene) override; +private slots: + void resetState(); // clears our known state forcing entities to appear unsent + private: // the following two methods return booleans to indicate if any extra flagged entities were new additions to set bool addAncestorsToExtraFlaggedEntities(const QUuid& filteredEntityID, EntityItem& entityItem, EntityNodeData& nodeData); diff --git a/assignment-client/src/octree/OctreeSendThread.cpp b/assignment-client/src/octree/OctreeSendThread.cpp index 89e3d403fc..3ae653307f 100644 --- a/assignment-client/src/octree/OctreeSendThread.cpp +++ b/assignment-client/src/octree/OctreeSendThread.cpp @@ -82,8 +82,12 @@ bool OctreeSendThread::process() { if (auto node = _node.lock()) { OctreeQueryNode* nodeData = static_cast(node->getLinkedData()); - // Sometimes the node data has not yet been linked, in which case we can't really do anything - if (nodeData && !nodeData->isShuttingDown()) { + // If we don't have the OctreeQueryNode at all + // or it's uninitialized because we haven't received a query yet from the client + // or we don't know where we should send packets for this node + // or we're shutting down + // then we can't send an entity data packet + if (nodeData && nodeData->hasReceivedFirstQuery() && node->getActiveSocket() && !nodeData->isShuttingDown()) { bool viewFrustumChanged = nodeData->updateCurrentViewFrustum(); packetDistributor(node, nodeData, viewFrustumChanged); } diff --git a/assignment-client/src/octree/OctreeSendThread.h b/assignment-client/src/octree/OctreeSendThread.h index bc7d2c2588..220952e209 100644 --- a/assignment-client/src/octree/OctreeSendThread.h +++ b/assignment-client/src/octree/OctreeSendThread.h @@ -59,7 +59,8 @@ protected: OctreePacketData _packetData; QWeakPointer _node; OctreeServer* _myServer { nullptr }; - + QUuid _nodeUuid; + private: /// Called before a packetDistributor pass to allow for pre-distribution processing virtual void preDistributionProcessing() {}; @@ -71,8 +72,6 @@ private: virtual void preStartNewScene(OctreeQueryNode* nodeData, bool isFullScene); virtual bool shouldTraverseAndSend(OctreeQueryNode* nodeData) { return hasSomethingToSend(nodeData); } - QUuid _nodeUuid; - int _truePacketsSent { 0 }; // available for debug stats int _trueBytesSent { 0 }; // available for debug stats int _packetsSentThisInterval { 0 }; // used for bandwidth throttle condition diff --git a/interface/resources/qml/AddressBarDialog.qml b/interface/resources/qml/AddressBarDialog.qml deleted file mode 100644 index 60d2bacc62..0000000000 --- a/interface/resources/qml/AddressBarDialog.qml +++ /dev/null @@ -1,532 +0,0 @@ -// -// AddressBarDialog.qml -// -// Created by Austin Davis on 2015/04/14 -// Copyright 2015 High Fidelity, Inc. -// -// Distributed under the Apache License, Version 2.0. -// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html -// - -import Hifi 1.0 -import QtQuick 2.4 -import "controls" -import "styles" -import "windows" -import "hifi" -import "hifi/toolbars" -import "styles-uit" as HifiStyles -import "controls-uit" as HifiControls - -Window { - id: root - HifiConstants { id: hifi } - HifiStyles.HifiConstants { id: hifiStyleConstants } - - objectName: "AddressBarDialog" - title: "Go To:" - - shown: false - destroyOnHidden: false - resizable: false - pinnable: false; - - width: addressBarDialog.implicitWidth - height: addressBarDialog.implicitHeight - property int gap: 14 - - onShownChanged: { - addressBarDialog.keyboardEnabled = HMD.active; - addressBarDialog.observeShownChanged(shown); - } - Component.onCompleted: { - root.parentChanged.connect(center); - center(); - } - Component.onDestruction: { - root.parentChanged.disconnect(center); - } - - function center() { - // Explicitly center in order to avoid warnings at shutdown - anchors.centerIn = parent; - } - - function resetAfterTeleport() { - storyCardFrame.shown = root.shown = false; - } - function goCard(targetString) { - if (0 !== targetString.indexOf('hifi://')) { - storyCardHTML.url = addressBarDialog.metaverseServerUrl + targetString; - storyCardFrame.shown = true; - return; - } - addressLine.text = targetString; - toggleOrGo(true); - clearAddressLineTimer.start(); - } - property var allStories: []; - property int cardWidth: 212; - property int cardHeight: 152; - property string metaverseBase: addressBarDialog.metaverseServerUrl + "/api/v1/"; - property bool isCursorVisible: false // Override default cursor visibility. - - AddressBarDialog { - id: addressBarDialog - - property bool keyboardEnabled: false - property bool keyboardRaised: false - property bool punctuationMode: false - - implicitWidth: backgroundImage.width - implicitHeight: scroll.height + gap + backgroundImage.height + (keyboardEnabled ? keyboard.height : 0); - - // The buttons have their button state changed on hover, so we have to manually fix them up here - onBackEnabledChanged: backArrow.buttonState = addressBarDialog.backEnabled ? 1 : 0; - onForwardEnabledChanged: forwardArrow.buttonState = addressBarDialog.forwardEnabled ? 1 : 0; - onReceivedHifiSchemeURL: resetAfterTeleport(); - - // Update location after using back and forward buttons. - onHostChanged: updateLocationTextTimer.start(); - - ListModel { id: suggestions } - - ListView { - id: scroll - height: cardHeight + scroll.stackedCardShadowHeight - property int stackedCardShadowHeight: 10; - spacing: gap; - clip: true; - anchors { - left: backgroundImage.left - right: swipe.left - bottom: backgroundImage.top - } - model: suggestions; - orientation: ListView.Horizontal; - delegate: Card { - width: cardWidth; - height: cardHeight; - goFunction: goCard; - userName: model.username; - placeName: model.place_name; - hifiUrl: model.place_name + model.path; - thumbnail: model.thumbnail_url; - imageUrl: model.image_url; - action: model.action; - timestamp: model.created_at; - onlineUsers: model.online_users; - storyId: model.metaverseId; - drillDownToPlace: model.drillDownToPlace; - shadowHeight: scroll.stackedCardShadowHeight; - hoverThunk: function () { ListView.view.currentIndex = index; } - unhoverThunk: function () { ListView.view.currentIndex = -1; } - } - highlightMoveDuration: -1; - highlightMoveVelocity: -1; - highlight: Rectangle { color: "transparent"; border.width: 4; border.color: hifiStyleConstants.colors.blueHighlight; z: 1; } - } - Image { // Just a visual indicator that the user can swipe the cards over to see more. - id: swipe; - source: "../images/swipe-chevron.svg"; - width: 72; - visible: suggestions.count > 3; - anchors { - right: backgroundImage.right; - top: scroll.top; - } - MouseArea { - anchors.fill: parent - onClicked: scroll.currentIndex = (scroll.currentIndex < 0) ? 3 : (scroll.currentIndex + 3) - } - } - - Row { - spacing: 2 * hifi.layout.spacing; - anchors { - top: parent.top; - left: parent.left; - leftMargin: 150; - topMargin: -30; - } - property var selected: allTab; - TextButton { - id: allTab; - text: "ALL"; - property string includeActions: 'snapshot,concurrency'; - selected: allTab === selectedTab; - action: tabSelect; - } - TextButton { - id: placeTab; - text: "PLACES"; - property string includeActions: 'concurrency'; - selected: placeTab === selectedTab; - action: tabSelect; - } - TextButton { - id: snapsTab; - text: "SNAPS"; - property string includeActions: 'snapshot'; - selected: snapsTab === selectedTab; - action: tabSelect; - } - } - - Image { - id: backgroundImage - source: "../images/address-bar-856.svg" - width: 856 - height: 100 - anchors { - bottom: parent.keyboardEnabled ? keyboard.top : parent.bottom; - } - property int inputAreaHeight: 70 - property int inputAreaStep: (height - inputAreaHeight) / 2 - - ToolbarButton { - id: homeButton - imageURL: "../images/home.svg" - onClicked: { - addressBarDialog.loadHome(); - root.shown = false; - } - anchors { - left: parent.left - leftMargin: homeButton.width / 2 - verticalCenter: parent.verticalCenter - } - } - - ToolbarButton { - id: backArrow; - imageURL: "../images/backward.svg"; - onClicked: addressBarDialog.loadBack(); - anchors { - left: homeButton.right - verticalCenter: parent.verticalCenter - } - } - ToolbarButton { - id: forwardArrow; - imageURL: "../images/forward.svg"; - onClicked: addressBarDialog.loadForward(); - anchors { - left: backArrow.right - verticalCenter: parent.verticalCenter - } - } - - HifiStyles.RalewayLight { - id: notice; - font.pixelSize: hifi.fonts.pixelSize * 0.50; - anchors { - top: parent.top - topMargin: parent.inputAreaStep + 12 - left: addressLine.left - right: addressLine.right - } - } - HifiStyles.FiraSansRegular { - id: location; - font.pixelSize: addressLine.font.pixelSize; - color: "gray"; - clip: true; - anchors.fill: addressLine; - visible: addressLine.text.length === 0 - } - TextInput { - id: addressLine - focus: true - anchors { - top: parent.top - bottom: parent.bottom - left: forwardArrow.right - right: parent.right - leftMargin: forwardArrow.width - rightMargin: forwardArrow.width / 2 - topMargin: parent.inputAreaStep + (2 * hifi.layout.spacing) - bottomMargin: parent.inputAreaStep - } - font.pixelSize: hifi.fonts.pixelSize * 0.75 - cursorVisible: false - onTextChanged: { - filterChoicesByText(); - updateLocationText(text.length > 0); - if (!isCursorVisible && text.length > 0) { - isCursorVisible = true; - cursorVisible = true; - } - } - onActiveFocusChanged: { - cursorVisible = isCursorVisible && focus; - } - MouseArea { - // If user clicks in address bar show cursor to indicate ability to enter address. - anchors.fill: parent - onClicked: { - isCursorVisible = true; - parent.cursorVisible = true; - parent.forceActiveFocus(); - } - } - } - } - - Timer { - // Delay updating location text a bit to avoid flicker of content and so that connection status is valid. - id: updateLocationTextTimer - running: false - interval: 500 // ms - repeat: false - onTriggered: updateLocationText(false); - } - - Timer { - // Delay clearing address line so as to avoid flicker of "not connected" being displayed after entering an address. - id: clearAddressLineTimer - running: false - interval: 100 // ms - repeat: false - onTriggered: { - addressLine.text = ""; - isCursorVisible = false; - } - } - - Window { - width: 938 - height: 625 - HifiControls.WebView { - anchors.fill: parent; - id: storyCardHTML; - } - id: storyCardFrame; - - shown: false; - destroyOnCloseButton: false; - pinnable: false; - - anchors { - verticalCenter: backgroundImage.verticalCenter; - horizontalCenter: scroll.horizontalCenter; - } - z: 100 - } - - HifiControls.Keyboard { - id: keyboard - raised: parent.keyboardEnabled // Ignore keyboardRaised; keep keyboard raised if enabled (i.e., in HMD). - numeric: parent.punctuationMode - anchors { - bottom: parent.bottom - left: parent.left - right: parent.right - } - } - } - - function getRequest(url, cb) { // cb(error, responseOfCorrectContentType) of url. General for 'get' text/html/json, but without redirects. - // TODO: make available to other .qml. - var request = new XMLHttpRequest(); - // QT bug: apparently doesn't handle onload. Workaround using readyState. - request.onreadystatechange = function () { - var READY_STATE_DONE = 4; - var HTTP_OK = 200; - if (request.readyState >= READY_STATE_DONE) { - var error = (request.status !== HTTP_OK) && request.status.toString() + ':' + request.statusText, - response = !error && request.responseText, - contentType = !error && request.getResponseHeader('content-type'); - if (!error && contentType.indexOf('application/json') === 0) { - try { - response = JSON.parse(response); - } catch (e) { - error = e; - } - } - cb(error, response); - } - }; - request.open("GET", url, true); - request.send(); - } - - function identity(x) { - return x; - } - - function handleError(url, error, data, cb) { // cb(error) and answer truthy if needed, else falsey - if (!error && (data.status === 'success')) { - return; - } - if (!error) { // Create a message from the data - error = data.status + ': ' + data.error; - } - if (typeof(error) === 'string') { // Make a proper Error object - error = new Error(error); - } - error.message += ' in ' + url; // Include the url. - cb(error); - return true; - } - function resolveUrl(url) { - return (url.indexOf('/') === 0) ? (addressBarDialog.metaverseServerUrl + url) : url; - } - - function makeModelData(data) { // create a new obj from data - // ListModel elements will only ever have those properties that are defined by the first obj that is added. - // So here we make sure that we have all the properties we need, regardless of whether it is a place data or user story. - var name = data.place_name, - tags = data.tags || [data.action, data.username], - description = data.description || "", - thumbnail_url = data.thumbnail_url || ""; - return { - place_name: name, - username: data.username || "", - path: data.path || "", - created_at: data.created_at || "", - action: data.action || "", - thumbnail_url: resolveUrl(thumbnail_url), - image_url: resolveUrl(data.details.image_url), - - metaverseId: (data.id || "").toString(), // Some are strings from server while others are numbers. Model objects require uniformity. - - tags: tags, - description: description, - online_users: data.details.concurrency || 0, - drillDownToPlace: false, - - searchText: [name].concat(tags, description || []).join(' ').toUpperCase() - } - } - function suggestable(place) { - if (place.action === 'snapshot') { - return true; - } - return (place.place_name !== AddressManager.placename); // Not our entry, but do show other entry points to current domain. - } - property var selectedTab: allTab; - function tabSelect(textButton) { - selectedTab = textButton; - fillDestinations(); - } - property var placeMap: ({}); - function addToSuggestions(place) { - var collapse = allTab.selected && (place.action !== 'concurrency'); - if (collapse) { - var existing = placeMap[place.place_name]; - if (existing) { - existing.drillDownToPlace = true; - return; - } - } - suggestions.append(place); - if (collapse) { - placeMap[place.place_name] = suggestions.get(suggestions.count - 1); - } else if (place.action === 'concurrency') { - suggestions.get(suggestions.count - 1).drillDownToPlace = true; // Don't change raw place object (in allStories). - } - } - property int requestId: 0; - function getUserStoryPage(pageNumber, cb) { // cb(error) after all pages of domain data have been added to model - var options = [ - 'now=' + new Date().toISOString(), - 'include_actions=' + selectedTab.includeActions, - 'restriction=' + (Account.isLoggedIn() ? 'open,hifi' : 'open'), - 'require_online=true', - 'protocol=' + encodeURIComponent(AddressManager.protocolVersion()), - 'page=' + pageNumber - ]; - var url = metaverseBase + 'user_stories?' + options.join('&'); - var thisRequestId = ++requestId; - getRequest(url, function (error, data) { - if ((thisRequestId !== requestId) || handleError(url, error, data, cb)) { - return; - } - var stories = data.user_stories.map(function (story) { // explicit single-argument function - return makeModelData(story, url); - }); - allStories = allStories.concat(stories); - stories.forEach(makeFilteredPlaceProcessor()); - if ((data.current_page < data.total_pages) && (data.current_page <= 10)) { // just 10 pages = 100 stories for now - return getUserStoryPage(pageNumber + 1, cb); - } - cb(); - }); - } - function makeFilteredPlaceProcessor() { // answer a function(placeData) that adds it to suggestions if it matches - var words = addressLine.text.toUpperCase().split(/\s+/).filter(identity), - data = allStories; - function matches(place) { - if (!words.length) { - return suggestable(place); - } - return words.every(function (word) { - return place.searchText.indexOf(word) >= 0; - }); - } - return function (place) { - if (matches(place)) { - addToSuggestions(place); - } - }; - } - function filterChoicesByText() { - suggestions.clear(); - placeMap = {}; - allStories.forEach(makeFilteredPlaceProcessor()); - } - - function fillDestinations() { - allStories = []; - suggestions.clear(); - placeMap = {}; - getUserStoryPage(1, function (error) { - console.log('user stories query', error || 'ok', allStories.length); - }); - } - - function updateLocationText(enteringAddress) { - if (enteringAddress) { - notice.text = "Go to a place, @user, path or network address"; - notice.color = hifiStyleConstants.colors.baseGrayHighlight; - } else { - notice.text = AddressManager.isConnected ? "Your location:" : "Not Connected"; - notice.color = AddressManager.isConnected ? hifiStyleConstants.colors.baseGrayHighlight : hifiStyleConstants.colors.redHighlight; - // Display hostname, which includes ip address, localhost, and other non-placenames. - location.text = (AddressManager.placename || AddressManager.hostname || '') + (AddressManager.pathname ? AddressManager.pathname.match(/\/[^\/]+/)[0] : ''); - } - } - - onVisibleChanged: { - updateLocationText(false); - if (visible) { - addressLine.forceActiveFocus(); - fillDestinations(); - } - } - - function toggleOrGo(fromSuggestions) { - if (addressLine.text !== "") { - addressBarDialog.loadAddress(addressLine.text, fromSuggestions) - } - root.shown = false; - } - - Keys.onPressed: { - switch (event.key) { - case Qt.Key_Escape: - case Qt.Key_Back: - root.shown = false - clearAddressLineTimer.start(); - event.accepted = true - break - case Qt.Key_Enter: - case Qt.Key_Return: - toggleOrGo() - clearAddressLineTimer.start(); - event.accepted = true - break - } - } -} diff --git a/interface/resources/qml/hifi/commerce/checkout/Checkout.qml b/interface/resources/qml/hifi/commerce/checkout/Checkout.qml index 3e7c23f2dd..6c4e020694 100644 --- a/interface/resources/qml/hifi/commerce/checkout/Checkout.qml +++ b/interface/resources/qml/hifi/commerce/checkout/Checkout.qml @@ -476,7 +476,9 @@ Rectangle { commerce.buy(itemId, itemPrice, true); } } else { - sendToScript({method: 'checkout_rezClicked', itemHref: root.itemHref, isWearable: root.isWearable}); + if (urlHandler.canHandleUrl(itemHref)) { + urlHandler.handleUrl(itemHref); + } } } } @@ -594,9 +596,7 @@ Rectangle { anchors.right: parent.right; text: root.isWearable ? "Wear It" : "Rez It" onClicked: { - if (urlHandler.canHandleUrl(root.itemHref)) { - urlHandler.handleUrl(root.itemHref); - } + sendToScript({method: 'checkout_rezClicked', itemHref: root.itemHref, isWearable: root.isWearable}); rezzedNotifContainer.visible = true; rezzedNotifContainerTimer.start(); } diff --git a/interface/resources/qml/hifi/commerce/purchases/Purchases.qml b/interface/resources/qml/hifi/commerce/purchases/Purchases.qml index 1ea488ac98..4c10bdc097 100644 --- a/interface/resources/qml/hifi/commerce/purchases/Purchases.qml +++ b/interface/resources/qml/hifi/commerce/purchases/Purchases.qml @@ -640,7 +640,8 @@ Rectangle { if (purchasesModel.get(i).title.toLowerCase().indexOf(filterBar.text.toLowerCase()) !== -1) { if (purchasesModel.get(i).status !== "confirmed" && !root.isShowingMyItems) { filteredPurchasesModel.insert(0, purchasesModel.get(i)); - } else if ((root.isShowingMyItems && purchasesModel.get(i).edition_number === -1) || !root.isShowingMyItems) { + } else if ((root.isShowingMyItems && purchasesModel.get(i).edition_number === "0") || + (!root.isShowingMyItems && purchasesModel.get(i).edition_number !== "0")) { filteredPurchasesModel.append(purchasesModel.get(i)); } } diff --git a/interface/resources/qml/hifi/commerce/wallet/WalletSetup.qml b/interface/resources/qml/hifi/commerce/wallet/WalletSetup.qml index 99fe933bd6..1a62fe6f0d 100644 --- a/interface/resources/qml/hifi/commerce/wallet/WalletSetup.qml +++ b/interface/resources/qml/hifi/commerce/wallet/WalletSetup.qml @@ -53,8 +53,6 @@ Item { onWalletAuthenticatedStatusResult: { if (isAuthenticated) { root.activeView = "step_4"; - } else { - root.activeView = "step_3"; } } diff --git a/interface/resources/qml/hifi/tablet/TabletAddressDialog.qml b/interface/resources/qml/hifi/tablet/TabletAddressDialog.qml index 649a8e6259..6aa3b8e7fe 100644 --- a/interface/resources/qml/hifi/tablet/TabletAddressDialog.qml +++ b/interface/resources/qml/hifi/tablet/TabletAddressDialog.qml @@ -11,6 +11,7 @@ import Hifi 1.0 import QtQuick 2.5 import QtQuick.Controls 1.4 +import QtQuick.Controls.Styles 1.4 import QtGraphicalEffects 1.0 import "../../controls" import "../../styles" @@ -83,7 +84,6 @@ StackView { anchors.centerIn = parent; } - function resetAfterTeleport() { //storyCardFrame.shown = root.shown = false; } @@ -134,7 +134,8 @@ StackView { bottom: parent.bottom } - onHostChanged: updateLocationTextTimer.start(); + onHostChanged: updateLocationTextTimer.restart(); + Rectangle { id: navBar width: parent.width @@ -205,16 +206,16 @@ StackView { anchors { top: parent.top; left: addressLineContainer.left; - right: addressLineContainer.right; } } HifiStyles.FiraSansRegular { id: location; anchors { - left: addressLineContainer.left; - leftMargin: 8; - verticalCenter: addressLineContainer.verticalCenter; + left: notice.right + leftMargin: 8 + right: addressLineContainer.right + verticalCenter: notice.verticalCenter } font.pixelSize: addressLine.font.pixelSize; color: "gray"; @@ -222,7 +223,7 @@ StackView { visible: addressLine.text.length === 0 } - TextInput { + TextField { id: addressLine width: addressLineContainer.width - addressLineContainer.anchors.leftMargin - addressLineContainer.anchors.rightMargin; anchors { @@ -230,7 +231,6 @@ StackView { leftMargin: 8; verticalCenter: addressLineContainer.verticalCenter; } - font.pixelSize: hifi.fonts.pixelSize * 0.75 onTextChanged: { updateLocationText(text.length > 0); } @@ -238,6 +238,17 @@ StackView { addressBarDialog.keyboardEnabled = false; toggleOrGo(); } + placeholderText: "Type domain address here" + verticalAlignment: TextInput.AlignBottom + style: TextFieldStyle { + textColor: hifi.colors.text + placeholderTextColor: "gray" + font { + family: hifi.fonts.fontFamily + pixelSize: hifi.fonts.pixelSize * 0.75 + } + background: Item {} + } } Rectangle { @@ -347,7 +358,7 @@ StackView { // Delay updating location text a bit to avoid flicker of content and so that connection status is valid. id: updateLocationTextTimer running: false - interval: 500 // ms + interval: 1000 // ms repeat: false onTriggered: updateLocationText(false); } diff --git a/interface/src/Application.cpp b/interface/src/Application.cpp index bd31fc398e..2624aa376f 100644 --- a/interface/src/Application.cpp +++ b/interface/src/Application.cpp @@ -1392,7 +1392,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo // Make sure we don't time out during slow operations at startup updateHeartbeat(); - QTimer* settingsTimer = new QTimer(); moveToNewNamedThread(settingsTimer, "Settings Thread", [this, settingsTimer]{ connect(qApp, &Application::beforeAboutToQuit, [this, settingsTimer]{ @@ -1700,8 +1699,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo lastLeftHandPose = leftHandPose; lastRightHandPose = rightHandPose; - properties["local_socket_changes"] = DependencyManager::get()->getStat(LOCAL_SOCKET_CHANGE_STAT).toInt(); - UserActivityLogger::getInstance().logAction("stats", properties); }); sendStatsTimer->start(); @@ -1825,6 +1822,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo // Preload Tablet sounds DependencyManager::get()->preloadSounds(); + _pendingIdleEvent = false; + _pendingRenderEvent = false; + qCDebug(interfaceapp) << "Metaverse session ID is" << uuidStringWithoutCurlyBraces(accountManager->getSessionID()); } @@ -4481,8 +4481,11 @@ void Application::resetPhysicsReadyInformation() { void Application::reloadResourceCaches() { resetPhysicsReadyInformation(); + // Query the octree to refresh everything in view _lastQueriedTime = 0; + _octreeQuery.incrementConnectionID(); + queryOctree(NodeType::EntityServer, PacketType::EntityQuery, _entityServerJurisdictions); DependencyManager::get()->clearCache(); @@ -5543,6 +5546,7 @@ void Application::nodeActivated(SharedNodePointer node) { // so we will do a proper query during update if (node->getType() == NodeType::EntityServer) { _lastQueriedTime = 0; + _octreeQuery.incrementConnectionID(); } if (node->getType() == NodeType::AudioMixer) { diff --git a/interface/src/Application.h b/interface/src/Application.h index fbfb3979be..c709571204 100644 --- a/interface/src/Application.h +++ b/interface/src/Application.h @@ -543,7 +543,7 @@ private: ViewFrustum _displayViewFrustum; quint64 _lastQueriedTime; - OctreeQuery _octreeQuery; // NodeData derived class for querying octee cells from octree servers + OctreeQuery _octreeQuery { true }; // NodeData derived class for querying octee cells from octree servers std::shared_ptr _applicationStateDevice; // Default ApplicationDevice reflecting the state of different properties of the session std::shared_ptr _keyboardMouseDevice; // Default input device, the good old keyboard mouse and maybe touchpad @@ -708,7 +708,7 @@ private: friend class RenderEventHandler; - std::atomic _pendingIdleEvent { false }; - std::atomic _pendingRenderEvent { false }; + std::atomic _pendingIdleEvent { true }; + std::atomic _pendingRenderEvent { true }; }; #endif // hifi_Application_h diff --git a/interface/src/ui/AddressBarDialog.cpp b/interface/src/ui/AddressBarDialog.cpp index 8b5e255b06..1a23674fa3 100644 --- a/interface/src/ui/AddressBarDialog.cpp +++ b/interface/src/ui/AddressBarDialog.cpp @@ -40,6 +40,10 @@ AddressBarDialog::AddressBarDialog(QQuickItem* parent) : OffscreenQmlDialog(pare _backEnabled = !(DependencyManager::get()->getBackStack().isEmpty()); _forwardEnabled = !(DependencyManager::get()->getForwardStack().isEmpty()); connect(addressManager.data(), &AddressManager::hostChanged, this, &AddressBarDialog::hostChanged); + auto nodeList = DependencyManager::get(); + const DomainHandler& domainHandler = nodeList->getDomainHandler(); + connect(&domainHandler, &DomainHandler::connectedToDomain, this, &AddressBarDialog::hostChanged); + connect(&domainHandler, &DomainHandler::disconnectedFromDomain, this, &AddressBarDialog::hostChanged); connect(DependencyManager::get().data(), &DialogsManager::setUseFeed, this, &AddressBarDialog::setUseFeed); connect(qApp, &Application::receivedHifiSchemeURL, this, &AddressBarDialog::receivedHifiSchemeURL); } diff --git a/libraries/animation/src/Rig.cpp b/libraries/animation/src/Rig.cpp index f06156874b..78aa1f4ba8 100644 --- a/libraries/animation/src/Rig.cpp +++ b/libraries/animation/src/Rig.cpp @@ -179,6 +179,11 @@ void Rig::restoreRoleAnimation(const QString& role) { } else { qCWarning(animation) << "Rig::restoreRoleAnimation could not find role " << role; } + + auto statesIter = _roleAnimStates.find(role); + if (statesIter != _roleAnimStates.end()) { + _roleAnimStates.erase(statesIter); + } } } else { qCWarning(animation) << "Rig::overrideRoleAnimation avatar not ready yet"; diff --git a/libraries/avatars-renderer/src/avatars-renderer/Avatar.cpp b/libraries/avatars-renderer/src/avatars-renderer/Avatar.cpp index 49d2431098..6bf9cc1666 100644 --- a/libraries/avatars-renderer/src/avatars-renderer/Avatar.cpp +++ b/libraries/avatars-renderer/src/avatars-renderer/Avatar.cpp @@ -1579,7 +1579,7 @@ float Avatar::getEyeHeight() const { if (QThread::currentThread() != thread()) { float result = DEFAULT_AVATAR_EYE_HEIGHT; - BLOCKING_INVOKE_METHOD(const_cast(this), "getHeight", Q_RETURN_ARG(float, result)); + BLOCKING_INVOKE_METHOD(const_cast(this), "getEyeHeight", Q_RETURN_ARG(float, result)); return result; } diff --git a/libraries/entities/src/DiffTraversal.h b/libraries/entities/src/DiffTraversal.h index eb7168356e..69431d8db5 100644 --- a/libraries/entities/src/DiffTraversal.h +++ b/libraries/entities/src/DiffTraversal.h @@ -73,6 +73,8 @@ public: void setScanCallback(std::function cb); void traverse(uint64_t timeBudget); + void reset() { _path.clear(); _completedView.startTime = 0; } // resets our state to force a new "First" traversal + private: void getNextVisibleElement(VisibleElement& next); diff --git a/libraries/entities/src/ParticleEffectEntityItem.cpp b/libraries/entities/src/ParticleEffectEntityItem.cpp index b216144ded..c2e29dd44f 100644 --- a/libraries/entities/src/ParticleEffectEntityItem.cpp +++ b/libraries/entities/src/ParticleEffectEntityItem.cpp @@ -97,7 +97,8 @@ bool operator==(const Properties& a, const Properties& b) { (a.maxParticles == b.maxParticles) && (a.emission == b.emission) && (a.polar == b.polar) && - (a.azimuth == b.azimuth); + (a.azimuth == b.azimuth) && + (a.textures == b.textures); } bool operator!=(const Properties& a, const Properties& b) { diff --git a/libraries/entities/src/ShapeEntityItem.cpp b/libraries/entities/src/ShapeEntityItem.cpp index 4115a606df..2771d22cdb 100644 --- a/libraries/entities/src/ShapeEntityItem.cpp +++ b/libraries/entities/src/ShapeEntityItem.cpp @@ -297,7 +297,7 @@ void ShapeEntityItem::computeShapeInfo(ShapeInfo& info) { const float MIN_RELATIVE_SPHERICAL_ERROR = 0.001f; if (diameter > MIN_DIAMETER && fabsf(diameter - entityDimensions.z) / diameter < MIN_RELATIVE_SPHERICAL_ERROR) { - _collisionShapeType = SHAPE_TYPE_SPHERE; + _collisionShapeType = SHAPE_TYPE_CYLINDER_Y; } else if (hullShapeCalculator) { hullShapeCalculator(this, info); _collisionShapeType = SHAPE_TYPE_SIMPLE_HULL; diff --git a/libraries/fbx/src/GLTFReader.cpp b/libraries/fbx/src/GLTFReader.cpp new file mode 100644 index 0000000000..c8501688ac --- /dev/null +++ b/libraries/fbx/src/GLTFReader.cpp @@ -0,0 +1,1380 @@ +// +// GLTFReader.cpp +// libraries/fbx/src +// +// Created by Luis Cuenca on 8/30/17. +// Copyright 2017 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include + +#include +#include +#include + +#include "GLTFReader.h" +#include "FBXReader.h" + + +GLTFReader::GLTFReader() { + +} + +bool GLTFReader::getStringVal(const QJsonObject& object, const QString& fieldname, + QString& value, QMap& defined) { + bool _defined = (object.contains(fieldname) && object[fieldname].isString()); + if (_defined) { + value = object[fieldname].toString(); + } + defined.insert(fieldname, _defined); + return _defined; +} + +bool GLTFReader::getBoolVal(const QJsonObject& object, const QString& fieldname, + bool& value, QMap& defined) { + bool _defined = (object.contains(fieldname) && object[fieldname].isBool()); + if (_defined) { + value = object[fieldname].toBool(); + } + defined.insert(fieldname, _defined); + return _defined; +} + +bool GLTFReader::getIntVal(const QJsonObject& object, const QString& fieldname, + int& value, QMap& defined) { + bool _defined = (object.contains(fieldname) && !object[fieldname].isNull()); + if (_defined) { + value = object[fieldname].toInt(); + } + defined.insert(fieldname, _defined); + return _defined; +} + +bool GLTFReader::getDoubleVal(const QJsonObject& object, const QString& fieldname, + double& value, QMap& defined) { + bool _defined = (object.contains(fieldname) && object[fieldname].isDouble()); + if (_defined) { + value = object[fieldname].toDouble(); + } + defined.insert(fieldname, _defined); + return _defined; +} +bool GLTFReader::getObjectVal(const QJsonObject& object, const QString& fieldname, + QJsonObject& value, QMap& defined) { + bool _defined = (object.contains(fieldname) && object[fieldname].isObject()); + if (_defined) { + value = object[fieldname].toObject(); + } + defined.insert(fieldname, _defined); + return _defined; +} + +bool GLTFReader::getIntArrayVal(const QJsonObject& object, const QString& fieldname, + QVector& values, QMap& defined) { + bool _defined = (object.contains(fieldname) && object[fieldname].isArray()); + if (_defined) { + QJsonArray arr = object[fieldname].toArray(); + foreach(const QJsonValue & v, arr) { + if (!v.isNull()) { + values.push_back(v.toInt()); + } + } + } + defined.insert(fieldname, _defined); + return _defined; +} + +bool GLTFReader::getDoubleArrayVal(const QJsonObject& object, const QString& fieldname, + QVector& values, QMap& defined) { + bool _defined = (object.contains(fieldname) && object[fieldname].isArray()); + if (_defined) { + QJsonArray arr = object[fieldname].toArray(); + foreach(const QJsonValue & v, arr) { + if (v.isDouble()) { + values.push_back(v.toDouble()); + } + } + } + defined.insert(fieldname, _defined); + return _defined; +} + +bool GLTFReader::getObjectArrayVal(const QJsonObject& object, const QString& fieldname, + QJsonArray& objects, QMap& defined) { + bool _defined = (object.contains(fieldname) && object[fieldname].isArray()); + if (_defined) { + objects = object[fieldname].toArray(); + } + defined.insert(fieldname, _defined); + return _defined; +} + +int GLTFReader::getMeshPrimitiveRenderingMode(const QString& type) +{ + if (type == "POINTS") { + return GLTFMeshPrimitivesRenderingMode::POINTS; + } + if (type == "LINES") { + return GLTFMeshPrimitivesRenderingMode::LINES; + } + if (type == "LINE_LOOP") { + return GLTFMeshPrimitivesRenderingMode::LINE_LOOP; + } + if (type == "LINE_STRIP") { + return GLTFMeshPrimitivesRenderingMode::LINE_STRIP; + } + if (type == "TRIANGLES") { + return GLTFMeshPrimitivesRenderingMode::TRIANGLES; + } + if (type == "TRIANGLE_STRIP") { + return GLTFMeshPrimitivesRenderingMode::TRIANGLE_STRIP; + } + if (type == "TRIANGLE_FAN") { + return GLTFMeshPrimitivesRenderingMode::TRIANGLE_FAN; + } + return GLTFMeshPrimitivesRenderingMode::TRIANGLES; +} + +int GLTFReader::getAccessorType(const QString& type) +{ + if (type == "SCALAR") { + return GLTFAccessorType::SCALAR; + } + if (type == "VEC2") { + return GLTFAccessorType::VEC2; + } + if (type == "VEC3") { + return GLTFAccessorType::VEC3; + } + if (type == "VEC4") { + return GLTFAccessorType::VEC4; + } + if (type == "MAT2") { + return GLTFAccessorType::MAT2; + } + if (type == "MAT3") { + return GLTFAccessorType::MAT3; + } + if (type == "MAT4") { + return GLTFAccessorType::MAT4; + } + return GLTFAccessorType::SCALAR; +} + +int GLTFReader::getMaterialAlphaMode(const QString& type) +{ + if (type == "OPAQUE") { + return GLTFMaterialAlphaMode::OPAQUE; + } + if (type == "MASK") { + return GLTFMaterialAlphaMode::MASK; + } + if (type == "BLEND") { + return GLTFMaterialAlphaMode::BLEND; + } + return GLTFMaterialAlphaMode::OPAQUE; +} + +int GLTFReader::getCameraType(const QString& type) +{ + if (type == "orthographic") { + return GLTFCameraTypes::ORTHOGRAPHIC; + } + if (type == "perspective") { + return GLTFCameraTypes::PERSPECTIVE; + } + return GLTFCameraTypes::PERSPECTIVE; +} + +int GLTFReader::getImageMimeType(const QString& mime) +{ + if (mime == "image/jpeg") { + return GLTFImageMimetype::JPEG; + } + if (mime == "image/png") { + return GLTFImageMimetype::PNG; + } + return GLTFImageMimetype::JPEG; +} + +int GLTFReader::getAnimationSamplerInterpolation(const QString& interpolation) +{ + if (interpolation == "LINEAR") { + return GLTFAnimationSamplerInterpolation::LINEAR; + } + return GLTFAnimationSamplerInterpolation::LINEAR; +} + +bool GLTFReader::setAsset(const QJsonObject& object) { + QJsonObject jsAsset; + bool isAssetDefined = getObjectVal(object, "asset", jsAsset, _file.defined); + if (isAssetDefined) { + if (!getStringVal(jsAsset, "version", _file.asset.version, + _file.asset.defined) || _file.asset.version != "2.0") { + return false; + } + getStringVal(jsAsset, "generator", _file.asset.generator, _file.asset.defined); + getStringVal(jsAsset, "copyright", _file.asset.copyright, _file.asset.defined); + } + return isAssetDefined; +} + +bool GLTFReader::addAccessor(const QJsonObject& object) { + GLTFAccessor accessor; + + getIntVal(object, "bufferView", accessor.bufferView, accessor.defined); + getIntVal(object, "byteOffset", accessor.byteOffset, accessor.defined); + getIntVal(object, "componentType", accessor.componentType, accessor.defined); + getIntVal(object, "count", accessor.count, accessor.defined); + getBoolVal(object, "normalized", accessor.normalized, accessor.defined); + QString type; + if (getStringVal(object, "type", type, accessor.defined)) { + accessor.type = getAccessorType(type); + } + getDoubleArrayVal(object, "max", accessor.max, accessor.defined); + getDoubleArrayVal(object, "min", accessor.min, accessor.defined); + + _file.accessors.push_back(accessor); + + return true; +} + +bool GLTFReader::addAnimation(const QJsonObject& object) { + GLTFAnimation animation; + + QJsonArray channels; + if (getObjectArrayVal(object, "channels", channels, animation.defined)) { + foreach(const QJsonValue & v, channels) { + if (v.isObject()) { + GLTFChannel channel; + getIntVal(v.toObject(), "sampler", channel.sampler, channel.defined); + QJsonObject jsChannel; + if (getObjectVal(v.toObject(), "target", jsChannel, channel.defined)) { + getIntVal(jsChannel, "node", channel.target.node, channel.target.defined); + getIntVal(jsChannel, "path", channel.target.path, channel.target.defined); + } + } + } + } + + QJsonArray samplers; + if (getObjectArrayVal(object, "samplers", samplers, animation.defined)) { + foreach(const QJsonValue & v, samplers) { + if (v.isObject()) { + GLTFAnimationSampler sampler; + getIntVal(v.toObject(), "input", sampler.input, sampler.defined); + getIntVal(v.toObject(), "output", sampler.input, sampler.defined); + QString interpolation; + if (getStringVal(v.toObject(), "interpolation", interpolation, sampler.defined)) { + sampler.interpolation = getAnimationSamplerInterpolation(interpolation); + } + } + } + } + + _file.animations.push_back(animation); + + return true; +} + +bool GLTFReader::addBufferView(const QJsonObject& object) { + GLTFBufferView bufferview; + + getIntVal(object, "buffer", bufferview.buffer, bufferview.defined); + getIntVal(object, "byteLength", bufferview.byteLength, bufferview.defined); + getIntVal(object, "byteOffset", bufferview.byteOffset, bufferview.defined); + getIntVal(object, "target", bufferview.target, bufferview.defined); + + _file.bufferviews.push_back(bufferview); + + return true; +} + +bool GLTFReader::addBuffer(const QJsonObject& object) { + GLTFBuffer buffer; + + getIntVal(object, "byteLength", buffer.byteLength, buffer.defined); + if (getStringVal(object, "uri", buffer.uri, buffer.defined)) { + if (!readBinary(buffer.uri, buffer.blob)) { + return false; + } + } + _file.buffers.push_back(buffer); + + return true; +} + +bool GLTFReader::addCamera(const QJsonObject& object) { + GLTFCamera camera; + + QJsonObject jsPerspective; + QJsonObject jsOrthographic; + QString type; + getStringVal(object, "name", camera.name, camera.defined); + if (getObjectVal(object, "perspective", jsPerspective, camera.defined)) { + getDoubleVal(jsPerspective, "aspectRatio", camera.perspective.aspectRatio, camera.perspective.defined); + getDoubleVal(jsPerspective, "yfov", camera.perspective.yfov, camera.perspective.defined); + getDoubleVal(jsPerspective, "zfar", camera.perspective.zfar, camera.perspective.defined); + getDoubleVal(jsPerspective, "znear", camera.perspective.znear, camera.perspective.defined); + camera.type = GLTFCameraTypes::PERSPECTIVE; + } else if (getObjectVal(object, "orthographic", jsOrthographic, camera.defined)) { + getDoubleVal(jsOrthographic, "zfar", camera.orthographic.zfar, camera.orthographic.defined); + getDoubleVal(jsOrthographic, "znear", camera.orthographic.znear, camera.orthographic.defined); + getDoubleVal(jsOrthographic, "xmag", camera.orthographic.xmag, camera.orthographic.defined); + getDoubleVal(jsOrthographic, "ymag", camera.orthographic.ymag, camera.orthographic.defined); + camera.type = GLTFCameraTypes::ORTHOGRAPHIC; + } else if (getStringVal(object, "type", type, camera.defined)) { + camera.type = getCameraType(type); + } + + _file.cameras.push_back(camera); + + return true; +} + +bool GLTFReader::addImage(const QJsonObject& object) { + GLTFImage image; + + QString mime; + getStringVal(object, "uri", image.uri, image.defined); + if (getStringVal(object, "mimeType", mime, image.defined)) { + image.mimeType = getImageMimeType(mime); + } + getIntVal(object, "bufferView", image.bufferView, image.defined); + + _file.images.push_back(image); + + return true; +} + +bool GLTFReader::getIndexFromObject(const QJsonObject& object, const QString& field, + int& outidx, QMap& defined) { + QJsonObject subobject; + if (getObjectVal(object, field, subobject, defined)) { + QMap tmpdefined = QMap(); + return getIntVal(subobject, "index", outidx, tmpdefined); + } + return false; +} + +bool GLTFReader::addMaterial(const QJsonObject& object) { + GLTFMaterial material; + + getStringVal(object, "name", material.name, material.defined); + getDoubleArrayVal(object, "emissiveFactor", material.emissiveFactor, material.defined); + getIndexFromObject(object, "emissiveTexture", material.emissiveTexture, material.defined); + getIndexFromObject(object, "normalTexture", material.normalTexture, material.defined); + getIndexFromObject(object, "occlusionTexture", material.occlusionTexture, material.defined); + getBoolVal(object, "doubleSided", material.doubleSided, material.defined); + QString alphamode; + if (getStringVal(object, "alphaMode", alphamode, material.defined)) { + material.alphaMode = getMaterialAlphaMode(alphamode); + } + getDoubleVal(object, "alphaCutoff", material.alphaCutoff, material.defined); + QJsonObject jsMetallicRoughness; + if (getObjectVal(object, "pbrMetallicRoughness", jsMetallicRoughness, material.defined)) { + getDoubleArrayVal(jsMetallicRoughness, "baseColorFactor", + material.pbrMetallicRoughness.baseColorFactor, + material.pbrMetallicRoughness.defined); + getIndexFromObject(jsMetallicRoughness, "baseColorTexture", + material.pbrMetallicRoughness.baseColorTexture, + material.pbrMetallicRoughness.defined); + getDoubleVal(jsMetallicRoughness, "metallicFactor", + material.pbrMetallicRoughness.metallicFactor, + material.pbrMetallicRoughness.defined); + getDoubleVal(jsMetallicRoughness, "roughnessFactor", + material.pbrMetallicRoughness.roughnessFactor, + material.pbrMetallicRoughness.defined); + getIndexFromObject(jsMetallicRoughness, "metallicRoughnessTexture", + material.pbrMetallicRoughness.metallicRoughnessTexture, + material.pbrMetallicRoughness.defined); + } + _file.materials.push_back(material); + return true; +} + +bool GLTFReader::addMesh(const QJsonObject& object) { + GLTFMesh mesh; + + getStringVal(object, "name", mesh.name, mesh.defined); + getDoubleArrayVal(object, "weights", mesh.weights, mesh.defined); + QJsonArray jsPrimitives; + object.keys(); + if (getObjectArrayVal(object, "primitives", jsPrimitives, mesh.defined)) { + foreach(const QJsonValue & prim, jsPrimitives) { + if (prim.isObject()) { + GLTFMeshPrimitive primitive; + QJsonObject jsPrimitive = prim.toObject(); + getIntVal(jsPrimitive, "mode", primitive.mode, primitive.defined); + getIntVal(jsPrimitive, "indices", primitive.indices, primitive.defined); + getIntVal(jsPrimitive, "material", primitive.material, primitive.defined); + + QJsonObject jsAttributes; + if (getObjectVal(jsPrimitive, "attributes", jsAttributes, primitive.defined)) { + QStringList attrKeys = jsAttributes.keys(); + foreach(const QString & attrKey, attrKeys) { + int attrVal; + getIntVal(jsAttributes, attrKey, attrVal, primitive.attributes.defined); + primitive.attributes.values.insert(attrKey, attrVal); + } + } + + QJsonArray jsTargets; + if (getObjectArrayVal(jsPrimitive, "targets", jsTargets, primitive.defined)) + { + foreach(const QJsonValue & tar, jsTargets) { + if (tar.isObject()) { + QJsonObject jsTarget = tar.toObject(); + QStringList tarKeys = jsTarget.keys(); + GLTFMeshPrimitiveAttr target; + foreach(const QString & tarKey, tarKeys) { + int tarVal; + getIntVal(jsAttributes, tarKey, tarVal, target.defined); + target.values.insert(tarKey, tarVal); + } + primitive.targets.push_back(target); + } + } + } + mesh.primitives.push_back(primitive); + } + } + + } + + _file.meshes.push_back(mesh); + + return true; +} + +bool GLTFReader::addNode(const QJsonObject& object) { + GLTFNode node; + + getStringVal(object, "name", node.name, node.defined); + getIntVal(object, "camera", node.camera, node.defined); + getIntVal(object, "mesh", node.mesh, node.defined); + getIntArrayVal(object, "children", node.children, node.defined); + getDoubleArrayVal(object, "translation", node.translation, node.defined); + getDoubleArrayVal(object, "rotation", node.rotation, node.defined); + getDoubleArrayVal(object, "scale", node.scale, node.defined); + getDoubleArrayVal(object, "matrix", node.matrix, node.defined); + getIntVal(object, "skin", node.skin, node.defined); + getStringVal(object, "jointName", node.jointName, node.defined); + getIntArrayVal(object, "skeletons", node.skeletons, node.defined); + + _file.nodes.push_back(node); + + return true; +} + +bool GLTFReader::addSampler(const QJsonObject& object) { + GLTFSampler sampler; + + getIntVal(object, "magFilter", sampler.magFilter, sampler.defined); + getIntVal(object, "minFilter", sampler.minFilter, sampler.defined); + getIntVal(object, "wrapS", sampler.wrapS, sampler.defined); + getIntVal(object, "wrapT", sampler.wrapT, sampler.defined); + + _file.samplers.push_back(sampler); + + return true; + +} + +bool GLTFReader::addScene(const QJsonObject& object) { + GLTFScene scene; + + getStringVal(object, "name", scene.name, scene.defined); + getIntArrayVal(object, "nodes", scene.nodes, scene.defined); + + _file.scenes.push_back(scene); + return true; +} + +bool GLTFReader::addSkin(const QJsonObject& object) { + GLTFSkin skin; + + getIntVal(object, "inverseBindMatrices", skin.inverseBindMatrices, skin.defined); + getIntVal(object, "skeleton", skin.skeleton, skin.defined); + getIntArrayVal(object, "joints", skin.joints, skin.defined); + + _file.skins.push_back(skin); + + return true; +} + +bool GLTFReader::addTexture(const QJsonObject& object) { + GLTFTexture texture; + getIntVal(object, "sampler", texture.sampler, texture.defined); + getIntVal(object, "source", texture.source, texture.defined); + + _file.textures.push_back(texture); + + return true; +} + +bool GLTFReader::parseGLTF(const QByteArray& model) { + PROFILE_RANGE_EX(resource_parse, __FUNCTION__, 0xffff0000, nullptr); + + QJsonDocument d = QJsonDocument::fromJson(model); + QJsonObject jsFile = d.object(); + + bool isvalid = setAsset(jsFile); + if (isvalid) { + QJsonArray accessors; + if (getObjectArrayVal(jsFile, "accessors", accessors, _file.defined)) { + foreach(const QJsonValue & accVal, accessors) { + if (accVal.isObject()) { + addAccessor(accVal.toObject()); + } + } + } + + QJsonArray animations; + if (getObjectArrayVal(jsFile, "animations", animations, _file.defined)) { + foreach(const QJsonValue & animVal, accessors) { + if (animVal.isObject()) { + addAnimation(animVal.toObject()); + } + } + } + + QJsonArray bufferViews; + if (getObjectArrayVal(jsFile, "bufferViews", bufferViews, _file.defined)) { + foreach(const QJsonValue & bufviewVal, bufferViews) { + if (bufviewVal.isObject()) { + addBufferView(bufviewVal.toObject()); + } + } + } + + QJsonArray buffers; + if (getObjectArrayVal(jsFile, "buffers", buffers, _file.defined)) { + foreach(const QJsonValue & bufVal, buffers) { + if (bufVal.isObject()) { + addBuffer(bufVal.toObject()); + } + } + } + + QJsonArray cameras; + if (getObjectArrayVal(jsFile, "cameras", cameras, _file.defined)) { + foreach(const QJsonValue & camVal, cameras) { + if (camVal.isObject()) { + addCamera(camVal.toObject()); + } + } + } + + QJsonArray images; + if (getObjectArrayVal(jsFile, "images", images, _file.defined)) { + foreach(const QJsonValue & imgVal, images) { + if (imgVal.isObject()) { + addImage(imgVal.toObject()); + } + } + } + + QJsonArray materials; + if (getObjectArrayVal(jsFile, "materials", materials, _file.defined)) { + foreach(const QJsonValue & matVal, materials) { + if (matVal.isObject()) { + addMaterial(matVal.toObject()); + } + } + } + + QJsonArray meshes; + if (getObjectArrayVal(jsFile, "meshes", meshes, _file.defined)) { + foreach(const QJsonValue & meshVal, meshes) { + if (meshVal.isObject()) { + addMesh(meshVal.toObject()); + } + } + } + + QJsonArray nodes; + if (getObjectArrayVal(jsFile, "nodes", nodes, _file.defined)) { + foreach(const QJsonValue & nodeVal, nodes) { + if (nodeVal.isObject()) { + addNode(nodeVal.toObject()); + } + } + } + + QJsonArray samplers; + if (getObjectArrayVal(jsFile, "samplers", samplers, _file.defined)) { + foreach(const QJsonValue & samVal, samplers) { + if (samVal.isObject()) { + addSampler(samVal.toObject()); + } + } + } + + QJsonArray scenes; + if (getObjectArrayVal(jsFile, "scenes", scenes, _file.defined)) { + foreach(const QJsonValue & sceneVal, scenes) { + if (sceneVal.isObject()) { + addScene(sceneVal.toObject()); + } + } + } + + QJsonArray skins; + if (getObjectArrayVal(jsFile, "skins", skins, _file.defined)) { + foreach(const QJsonValue & skinVal, skins) { + if (skinVal.isObject()) { + addSkin(skinVal.toObject()); + } + } + } + + QJsonArray textures; + if (getObjectArrayVal(jsFile, "textures", textures, _file.defined)) { + foreach(const QJsonValue & texVal, textures) { + if (texVal.isObject()) { + addTexture(texVal.toObject()); + } + } + } + } else { + qCDebug(modelformat) << "Error parsing GLTF file."; + return false; + } + return true; +} + +glm::mat4 GLTFReader::getModelTransform(const GLTFNode& node) { + glm::mat4 tmat = glm::mat4(1.0); + + if (node.defined["matrix"] && node.matrix.size() == 16) { + tmat = glm::mat4(node.matrix[0], node.matrix[1], node.matrix[2], node.matrix[3], + node.matrix[4], node.matrix[5], node.matrix[6], node.matrix[7], + node.matrix[8], node.matrix[9], node.matrix[10], node.matrix[11], + node.matrix[12], node.matrix[13], node.matrix[14], node.matrix[15]); + } else { + + if (node.defined["rotation"] && node.rotation.size() == 4) { + //quat(x,y,z,w) to quat(w,x,y,z) + glm::quat rotquat = glm::quat(node.rotation[3], node.rotation[0], node.rotation[1], node.rotation[2]); + tmat = glm::mat4_cast(rotquat) * tmat; + } + + if (node.defined["scale"] && node.scale.size() == 3) { + glm::vec3 scale = glm::vec3(node.scale[0], node.scale[1], node.scale[2]); + glm::mat4 s = glm::mat4(1.0); + s = glm::scale(s, scale); + tmat = s * tmat; + } + + if (node.defined["translation"] && node.translation.size() == 3) { + glm::vec3 trans = glm::vec3(node.translation[0], node.translation[1], node.translation[2]); + glm::mat4 t = glm::mat4(1.0); + t = glm::translate(t, trans); + tmat = t * tmat; + } + } + return tmat; +} + +bool GLTFReader::buildGeometry(FBXGeometry& geometry, const QUrl& url) { + + //Build dependencies + QVector> nodeDependencies(_file.nodes.size()); + int nodecount = 0; + foreach(auto &node, _file.nodes) { + //nodes_transforms.push_back(getModelTransform(node)); + foreach(int child, node.children) nodeDependencies[child].push_back(nodecount); + nodecount++; + } + + nodecount = 0; + foreach(auto &node, _file.nodes) { + // collect node transform + _file.nodes[nodecount].transforms.push_back(getModelTransform(node)); + if (nodeDependencies[nodecount].size() == 1) { + int parentidx = nodeDependencies[nodecount][0]; + while (true) { // iterate parents + // collect parents transforms + _file.nodes[nodecount].transforms.push_back(getModelTransform(_file.nodes[parentidx])); + if (nodeDependencies[parentidx].size() == 1) { + parentidx = nodeDependencies[parentidx][0]; + } else break; + } + } + + nodecount++; + } + + //Build default joints + geometry.joints.resize(1); + geometry.joints[0].isFree = false; + geometry.joints[0].parentIndex = -1; + geometry.joints[0].distanceToParent = 0; + geometry.joints[0].translation = glm::vec3(0, 0, 0); + geometry.joints[0].rotationMin = glm::vec3(0, 0, 0); + geometry.joints[0].rotationMax = glm::vec3(0, 0, 0); + geometry.joints[0].name = "OBJ"; + geometry.joints[0].isSkeletonJoint = true; + + geometry.jointIndices["x"] = 1; + + //Build materials + QVector materialIDs; + QString unknown = "Default"; + int ukcount = 0; + foreach(auto material, _file.materials) { + QString mid = (material.defined["name"]) ? material.name : unknown + ukcount++; + materialIDs.push_back(mid); + } + + for (int i = 0; i < materialIDs.size(); i++) { + QString& matid = materialIDs[i]; + geometry.materials[matid] = FBXMaterial(); + FBXMaterial& fbxMaterial = geometry.materials[matid]; + fbxMaterial._material = std::make_shared(); + setFBXMaterial(fbxMaterial, _file.materials[i]); + } + + + + nodecount = 0; + // Build meshes + foreach(auto &node, _file.nodes) { + + if (node.defined["mesh"]) { + qCDebug(modelformat) << "node_transforms" << node.transforms; + foreach(auto &primitive, _file.meshes[node.mesh].primitives) { + geometry.meshes.append(FBXMesh()); + FBXMesh& mesh = geometry.meshes[geometry.meshes.size() - 1]; + FBXCluster cluster; + cluster.jointIndex = 0; + cluster.inverseBindMatrix = glm::mat4(1, 0, 0, 0, + 0, 1, 0, 0, + 0, 0, 1, 0, + 0, 0, 0, 1); + mesh.clusters.append(cluster); + + FBXMeshPart part = FBXMeshPart(); + + int indicesAccessorIdx = primitive.indices; + + GLTFAccessor& indicesAccessor = _file.accessors[indicesAccessorIdx]; + GLTFBufferView& indicesBufferview = _file.bufferviews[indicesAccessor.bufferView]; + GLTFBuffer& indicesBuffer = _file.buffers[indicesBufferview.buffer]; + + int indicesAccBoffset = indicesAccessor.defined["byteOffset"] ? indicesAccessor.byteOffset : 0; + + QVector raw_indices; + QVector raw_vertices; + QVector raw_normals; + + addArrayOfType(indicesBuffer.blob, + indicesBufferview.byteOffset + indicesAccBoffset, + indicesBufferview.byteLength, + part.triangleIndices, + indicesAccessor.type, + indicesAccessor.componentType); + + QList keys = primitive.attributes.values.keys(); + + foreach(auto &key, keys) { + int accessorIdx = primitive.attributes.values[key]; + + GLTFAccessor& accessor = _file.accessors[accessorIdx]; + GLTFBufferView& bufferview = _file.bufferviews[accessor.bufferView]; + GLTFBuffer& buffer = _file.buffers[bufferview.buffer]; + + int accBoffset = accessor.defined["byteOffset"] ? accessor.byteOffset : 0; + if (key == "POSITION") { + QVector vertices; + addArrayOfType(buffer.blob, + bufferview.byteOffset + accBoffset, + bufferview.byteLength, vertices, + accessor.type, + accessor.componentType); + for (int n = 0; n < vertices.size(); n = n + 3) { + mesh.vertices.push_back(glm::vec3(vertices[n], vertices[n + 1], vertices[n + 2])); + } + } else if (key == "NORMAL") { + QVector normals; + addArrayOfType(buffer.blob, + bufferview.byteOffset + accBoffset, + bufferview.byteLength, + normals, + accessor.type, + accessor.componentType); + for (int n = 0; n < normals.size(); n = n + 3) { + mesh.normals.push_back(glm::vec3(normals[n], normals[n + 1], normals[n + 2])); + } + } else if (key == "TEXCOORD_0") { + QVector texcoords; + addArrayOfType(buffer.blob, + bufferview.byteOffset + accBoffset, + bufferview.byteLength, + texcoords, + accessor.type, + accessor.componentType); + for (int n = 0; n < texcoords.size(); n = n + 2) { + mesh.texCoords.push_back(glm::vec2(texcoords[n], texcoords[n + 1])); + } + } else if (key == "TEXCOORD_1") { + QVector texcoords; + addArrayOfType(buffer.blob, + bufferview.byteOffset + accBoffset, + bufferview.byteLength, + texcoords, + accessor.type, + accessor.componentType); + for (int n = 0; n < texcoords.size(); n = n + 2) { + mesh.texCoords1.push_back(glm::vec2(texcoords[n], texcoords[n + 1])); + } + } + + } + + if (primitive.defined["material"]) { + part.materialID = materialIDs[primitive.material]; + } + mesh.parts.push_back(part); + + // populate the texture coordenates if they don't exist + if (mesh.texCoords.size() == 0) { + for (int i = 0; i < part.triangleIndices.size(); i++) mesh.texCoords.push_back(glm::vec2(0.0, 1.0)); + } + mesh.meshExtents.reset(); + foreach(const glm::vec3& vertex, mesh.vertices) { + mesh.meshExtents.addPoint(vertex); + geometry.meshExtents.addPoint(vertex); + } + + // since mesh.modelTransform seems to not have any effect I apply the transformation the model + for (int h = 0; h < mesh.vertices.size(); h++) { + glm::vec4 ver = glm::vec4(mesh.vertices[h], 1); + if (node.transforms.size() > 0) { + ver = node.transforms[0] * ver; // for model dependency should multiply also by parents transforms? + mesh.vertices[h] = glm::vec3(ver[0], ver[1], ver[2]); + } + } + + mesh.meshIndex = geometry.meshes.size(); + FBXReader::buildModelMesh(mesh, url.toString()); + } + + } + nodecount++; + } + + + return true; +} + +FBXGeometry* GLTFReader::readGLTF(QByteArray& model, const QVariantHash& mapping, + const QUrl& url, bool loadLightmaps, float lightmapLevel) { + _url = url; + + parseGLTF(model); + //_file.dump(); + FBXGeometry* geometryPtr = new FBXGeometry(); + FBXGeometry& geometry = *geometryPtr; + + buildGeometry(geometry, url); + + //fbxDebugDump(geometry); + return geometryPtr; + +} + +bool GLTFReader::readBinary(const QString& url, QByteArray& outdata) { + QUrl binaryUrl = _url.resolved(QUrl(url).fileName()); + qCDebug(modelformat) << "binaryUrl: " << binaryUrl << " OriginalUrl: " << _url; + bool success; + std::tie(success, outdata) = requestData(binaryUrl); + + return success; +} + +bool GLTFReader::doesResourceExist(const QString& url) { + if (_url.isEmpty()) { + return false; + } + QUrl candidateUrl = _url.resolved(QUrl(url).fileName()); + return DependencyManager::get()->resourceExists(candidateUrl); +} + +std::tuple GLTFReader::requestData(QUrl& url) { + auto request = DependencyManager::get()->createResourceRequest(nullptr, url); + + if (!request) { + return std::make_tuple(false, QByteArray()); + } + + QEventLoop loop; + QObject::connect(request, &ResourceRequest::finished, &loop, &QEventLoop::quit); + request->send(); + loop.exec(); + + if (request->getResult() == ResourceRequest::Success) { + return std::make_tuple(true, request->getData()); + } else { + return std::make_tuple(false, QByteArray()); + } +} + + +QNetworkReply* GLTFReader::request(QUrl& url, bool isTest) { + if (!qApp) { + return nullptr; + } + bool aboutToQuit{ false }; + auto connection = QObject::connect(qApp, &QCoreApplication::aboutToQuit, [&] { + aboutToQuit = true; + }); + QNetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance(); + QNetworkRequest netRequest(url); + netRequest.setAttribute(QNetworkRequest::FollowRedirectsAttribute, true); + QNetworkReply* netReply = isTest ? networkAccessManager.head(netRequest) : networkAccessManager.get(netRequest); + if (!qApp || aboutToQuit) { + netReply->deleteLater(); + return nullptr; + } + QEventLoop loop; // Create an event loop that will quit when we get the finished signal + QObject::connect(netReply, SIGNAL(finished()), &loop, SLOT(quit())); + loop.exec(); // Nothing is going to happen on this whole run thread until we get this + + QObject::disconnect(connection); + return netReply; // trying to sync later on. +} + +FBXTexture GLTFReader::getFBXTexture(const GLTFTexture& texture) { + FBXTexture fbxtex = FBXTexture(); + fbxtex.texcoordSet = 0; + + if (texture.defined["source"]) { + QString fname = QUrl(_file.images[texture.source].uri).fileName(); + QUrl textureUrl = _url.resolved(fname); + qCDebug(modelformat) << "fname: " << fname; + qCDebug(modelformat) << "textureUrl: " << textureUrl; + qCDebug(modelformat) << "Url: " << _url; + fbxtex.name = fname; + fbxtex.filename = textureUrl.toEncoded(); + } + return fbxtex; +} + +void GLTFReader::setFBXMaterial(FBXMaterial& fbxmat, const GLTFMaterial& material) { + + + if (material.defined["name"]) { + fbxmat.name = fbxmat.materialID = material.name; + } + + if (material.defined["emissiveFactor"] && material.emissiveFactor.size() == 3) { + glm::vec3 emissive = glm::vec3(material.emissiveFactor[0], + material.emissiveFactor[1], + material.emissiveFactor[2]); + fbxmat._material->setEmissive(emissive); + } + + if (material.defined["emissiveTexture"]) { + fbxmat.emissiveTexture = getFBXTexture(_file.textures[material.emissiveTexture]); + fbxmat.useEmissiveMap = true; + } + + if (material.defined["normalTexture"]) { + fbxmat.normalTexture = getFBXTexture(_file.textures[material.normalTexture]); + fbxmat.useNormalMap = true; + } + + if (material.defined["occlusionTexture"]) { + fbxmat.occlusionTexture = getFBXTexture(_file.textures[material.occlusionTexture]); + fbxmat.useOcclusionMap = true; + } + + if (material.defined["pbrMetallicRoughness"]) { + fbxmat.isPBSMaterial = true; + + if (material.pbrMetallicRoughness.defined["metallicFactor"]) { + fbxmat.metallic = material.pbrMetallicRoughness.metallicFactor; + } + if (material.pbrMetallicRoughness.defined["baseColorTexture"]) { + fbxmat.opacityTexture = getFBXTexture(_file.textures[material.pbrMetallicRoughness.baseColorTexture]); + fbxmat.albedoTexture = getFBXTexture(_file.textures[material.pbrMetallicRoughness.baseColorTexture]); + fbxmat.useAlbedoMap = true; + fbxmat.metallicTexture = getFBXTexture(_file.textures[material.pbrMetallicRoughness.baseColorTexture]); + fbxmat.useMetallicMap = true; + } + if (material.pbrMetallicRoughness.defined["metallicRoughnessTexture"]) { + fbxmat.roughnessTexture = getFBXTexture(_file.textures[material.pbrMetallicRoughness.metallicRoughnessTexture]); + fbxmat.useRoughnessMap = true; + + } + if (material.pbrMetallicRoughness.defined["roughnessFactor"]) { + fbxmat._material->setRoughness(material.pbrMetallicRoughness.roughnessFactor); + } + if (material.pbrMetallicRoughness.defined["baseColorFactor"] && + material.pbrMetallicRoughness.baseColorFactor.size() == 4) { + glm::vec3 dcolor = glm::vec3(material.pbrMetallicRoughness.baseColorFactor[0], + material.pbrMetallicRoughness.baseColorFactor[1], + material.pbrMetallicRoughness.baseColorFactor[2]); + fbxmat.diffuseColor = dcolor; + fbxmat._material->setAlbedo(dcolor); + fbxmat._material->setOpacity(material.pbrMetallicRoughness.baseColorFactor[3]); + } + } + +} + +template +bool GLTFReader::readArray(const QByteArray& bin, int byteOffset, int byteLength, + QVector& outarray, int accessorType) { + + QDataStream blobstream(bin); + blobstream.setByteOrder(QDataStream::LittleEndian); + blobstream.setVersion(QDataStream::Qt_5_9); + blobstream.setFloatingPointPrecision(QDataStream::FloatingPointPrecision::SinglePrecision); + + int vsize = byteLength / sizeof(T); + + qCDebug(modelformat) << "size1: " << vsize; + int dataskipped = blobstream.skipRawData(byteOffset); + qCDebug(modelformat) << "dataskipped: " << dataskipped; + + + while (outarray.size() < vsize) { + + T value1, value2, value3, value4, + value5, value6, value7, value8, + value9, value10, value11, value12, + value13, value14, value15, value16; + + if (accessorType == GLTFAccessorType::SCALAR) { + + blobstream >> value1; + + outarray.push_back(value1); + } else if (accessorType == GLTFAccessorType::VEC2) { + + blobstream >> value1; + blobstream >> value2; + + outarray.push_back(value1); + outarray.push_back(value2); + } else if (accessorType == GLTFAccessorType::VEC3) { + + blobstream >> value1; + blobstream >> value2; + blobstream >> value3; + + outarray.push_back(value1); + outarray.push_back(value2); + outarray.push_back(value3); + } else if (accessorType == GLTFAccessorType::VEC4 || accessorType == GLTFAccessorType::MAT2) { + + blobstream >> value1; + blobstream >> value2; + blobstream >> value3; + blobstream >> value4; + + outarray.push_back(value1); + outarray.push_back(value2); + outarray.push_back(value3); + outarray.push_back(value4); + } else if (accessorType == GLTFAccessorType::MAT3) { + + blobstream >> value1; + blobstream >> value2; + blobstream >> value3; + blobstream >> value4; + blobstream >> value5; + blobstream >> value6; + blobstream >> value7; + blobstream >> value8; + blobstream >> value9; + + outarray.push_back(value1); + outarray.push_back(value2); + outarray.push_back(value3); + outarray.push_back(value4); + outarray.push_back(value5); + outarray.push_back(value6); + outarray.push_back(value7); + outarray.push_back(value8); + outarray.push_back(value9); + } else if (accessorType == GLTFAccessorType::MAT4) { + + blobstream >> value1; + blobstream >> value2; + blobstream >> value3; + blobstream >> value4; + blobstream >> value5; + blobstream >> value6; + blobstream >> value7; + blobstream >> value8; + blobstream >> value9; + blobstream >> value10; + blobstream >> value11; + blobstream >> value12; + blobstream >> value13; + blobstream >> value14; + blobstream >> value15; + blobstream >> value16; + + outarray.push_back(value1); + outarray.push_back(value2); + outarray.push_back(value3); + outarray.push_back(value4); + outarray.push_back(value5); + outarray.push_back(value6); + outarray.push_back(value7); + outarray.push_back(value8); + outarray.push_back(value9); + outarray.push_back(value10); + outarray.push_back(value11); + outarray.push_back(value12); + outarray.push_back(value13); + outarray.push_back(value14); + outarray.push_back(value15); + outarray.push_back(value16); + + } + } + blobstream.unsetDevice(); + return true; +} +template +bool GLTFReader::addArrayOfType(const QByteArray& bin, int byteOffset, int byteLength, + QVector& outarray, int accessorType, int componentType) { + + switch (componentType) { + case GLTFAccessorComponentType::BYTE: {} + case GLTFAccessorComponentType::UNSIGNED_BYTE: { + readArray(bin, byteOffset, byteLength, outarray, accessorType); + break; + } + case GLTFAccessorComponentType::SHORT: { + readArray(bin, byteOffset, byteLength, outarray, accessorType); + break; + } + case GLTFAccessorComponentType::UNSIGNED_INT: { + readArray(bin, byteOffset, byteLength, outarray, accessorType); + break; + } + case GLTFAccessorComponentType::UNSIGNED_SHORT: { + readArray(bin, byteOffset, byteLength, outarray, accessorType); + break; + } + case GLTFAccessorComponentType::FLOAT: { + readArray(bin, byteOffset, byteLength, outarray, accessorType); + break; + } + } + return true; +} + +void GLTFReader::retriangulate(const QVector& inIndices, const QVector& in_vertices, + const QVector& in_normals, QVector& outIndices, + QVector& out_vertices, QVector& out_normals) { + for (int i = 0; i < inIndices.size(); i = i + 3) { + + int idx1 = inIndices[i]; + int idx2 = inIndices[i+1]; + int idx3 = inIndices[i+2]; + + out_vertices.push_back(in_vertices[idx1]); + out_vertices.push_back(in_vertices[idx2]); + out_vertices.push_back(in_vertices[idx3]); + + out_normals.push_back(in_normals[idx1]); + out_normals.push_back(in_normals[idx2]); + out_normals.push_back(in_normals[idx3]); + + outIndices.push_back(i); + outIndices.push_back(i+1); + outIndices.push_back(i+2); + } +} + +void GLTFReader::fbxDebugDump(const FBXGeometry& fbxgeo) { + qCDebug(modelformat) << "---------------- fbxGeometry ----------------"; + qCDebug(modelformat) << " hasSkeletonJoints =" << fbxgeo.hasSkeletonJoints; + qCDebug(modelformat) << " offset =" << fbxgeo.offset; + + qCDebug(modelformat) << " leftEyeJointIndex =" << fbxgeo.leftEyeJointIndex; + qCDebug(modelformat) << " rightEyeJointIndex =" << fbxgeo.rightEyeJointIndex; + qCDebug(modelformat) << " neckJointIndex =" << fbxgeo.neckJointIndex; + qCDebug(modelformat) << " rootJointIndex =" << fbxgeo.rootJointIndex; + qCDebug(modelformat) << " leanJointIndex =" << fbxgeo.leanJointIndex; + qCDebug(modelformat) << " headJointIndex =" << fbxgeo.headJointIndex; + qCDebug(modelformat) << " leftHandJointIndex" << fbxgeo.leftHandJointIndex; + qCDebug(modelformat) << " rightHandJointIndex" << fbxgeo.rightHandJointIndex; + qCDebug(modelformat) << " leftToeJointIndex" << fbxgeo.leftToeJointIndex; + qCDebug(modelformat) << " rightToeJointIndex" << fbxgeo.rightToeJointIndex; + qCDebug(modelformat) << " leftEyeSize = " << fbxgeo.leftEyeSize; + qCDebug(modelformat) << " rightEyeSize = " << fbxgeo.rightEyeSize; + + qCDebug(modelformat) << " palmDirection = " << fbxgeo.palmDirection; + + qCDebug(modelformat) << " neckPivot = " << fbxgeo.neckPivot; + + qCDebug(modelformat) << " bindExtents.size() = " << fbxgeo.bindExtents.size(); + qCDebug(modelformat) << " meshExtents.size() = " << fbxgeo.meshExtents.size(); + + qCDebug(modelformat) << " jointIndices.size() =" << fbxgeo.jointIndices.size(); + qCDebug(modelformat) << " joints.count() =" << fbxgeo.joints.count(); + qCDebug(modelformat) << "---------------- Meshes ----------------"; + qCDebug(modelformat) << " meshes.count() =" << fbxgeo.meshes.count(); + qCDebug(modelformat) << " blendshapeChannelNames = " << fbxgeo.blendshapeChannelNames; + foreach(FBXMesh mesh, fbxgeo.meshes) { + qCDebug(modelformat) << "\n"; + qCDebug(modelformat) << " meshpointer =" << mesh._mesh.get(); + qCDebug(modelformat) << " meshindex =" << mesh.meshIndex; + qCDebug(modelformat) << " vertices.count() =" << mesh.vertices.size(); + qCDebug(modelformat) << " colors.count() =" << mesh.colors.count(); + qCDebug(modelformat) << " normals.count() =" << mesh.normals.size(); + qCDebug(modelformat) << " tangents.count() =" << mesh.tangents.size(); + qCDebug(modelformat) << " colors.count() =" << mesh.colors.count(); + qCDebug(modelformat) << " texCoords.count() =" << mesh.texCoords.count(); + qCDebug(modelformat) << " texCoords1.count() =" << mesh.texCoords1.count(); + qCDebug(modelformat) << " clusterIndices.count() =" << mesh.clusterIndices.count(); + qCDebug(modelformat) << " clusterWeights.count() =" << mesh.clusterWeights.count(); + qCDebug(modelformat) << " modelTransform =" << mesh.modelTransform; + qCDebug(modelformat) << " parts.count() =" << mesh.parts.count(); + qCDebug(modelformat) << "---------------- Meshes (blendshapes)--------"; + foreach(FBXBlendshape bshape, mesh.blendshapes) { + qCDebug(modelformat) << "\n"; + qCDebug(modelformat) << " bshape.indices.count() =" << bshape.indices.count(); + qCDebug(modelformat) << " bshape.vertices.count() =" << bshape.vertices.count(); + qCDebug(modelformat) << " bshape.normals.count() =" << bshape.normals.count(); + qCDebug(modelformat) << "\n"; + } + qCDebug(modelformat) << "---------------- Meshes (meshparts)--------"; + foreach(FBXMeshPart meshPart, mesh.parts) { + qCDebug(modelformat) << "\n"; + qCDebug(modelformat) << " quadIndices.count() =" << meshPart.quadIndices.count(); + qCDebug(modelformat) << " triangleIndices.count() =" << meshPart.triangleIndices.count(); + qCDebug(modelformat) << " materialID =" << meshPart.materialID; + qCDebug(modelformat) << "\n"; + + } + qCDebug(modelformat) << "---------------- Meshes (clusters)--------"; + qCDebug(modelformat) << " clusters.count() =" << mesh.clusters.count(); + foreach(FBXCluster cluster, mesh.clusters) { + qCDebug(modelformat) << "\n"; + qCDebug(modelformat) << " jointIndex =" << cluster.jointIndex; + qCDebug(modelformat) << " inverseBindMatrix =" << cluster.inverseBindMatrix; + qCDebug(modelformat) << "\n"; + } + qCDebug(modelformat) << "\n"; + } + qCDebug(modelformat) << "---------------- AnimationFrames ----------------"; + foreach(FBXAnimationFrame anim, fbxgeo.animationFrames) { + qCDebug(modelformat) << " anim.translations = " << anim.translations; + qCDebug(modelformat) << " anim.rotations = " << anim.rotations; + } + QList mitomona_keys = fbxgeo.meshIndicesToModelNames.keys(); + foreach(int key, mitomona_keys) { + qCDebug(modelformat) << " meshIndicesToModelNames key =" << key << " val =" << fbxgeo.meshIndicesToModelNames[key]; + } + + qCDebug(modelformat) << "---------------- Materials ----------------"; + + foreach(FBXMaterial mat, fbxgeo.materials) { + qCDebug(modelformat) << "\n"; + qCDebug(modelformat) << " mat.materialID =" << mat.materialID; + qCDebug(modelformat) << " diffuseColor =" << mat.diffuseColor; + qCDebug(modelformat) << " diffuseFactor =" << mat.diffuseFactor; + qCDebug(modelformat) << " specularColor =" << mat.specularColor; + qCDebug(modelformat) << " specularFactor =" << mat.specularFactor; + qCDebug(modelformat) << " emissiveColor =" << mat.emissiveColor; + qCDebug(modelformat) << " emissiveFactor =" << mat.emissiveFactor; + qCDebug(modelformat) << " shininess =" << mat.shininess; + qCDebug(modelformat) << " opacity =" << mat.opacity; + qCDebug(modelformat) << " metallic =" << mat.metallic; + qCDebug(modelformat) << " roughness =" << mat.roughness; + qCDebug(modelformat) << " emissiveIntensity =" << mat.emissiveIntensity; + qCDebug(modelformat) << " ambientFactor =" << mat.ambientFactor; + + qCDebug(modelformat) << " materialID =" << mat.materialID; + qCDebug(modelformat) << " name =" << mat.name; + qCDebug(modelformat) << " shadingModel =" << mat.shadingModel; + qCDebug(modelformat) << " _material =" << mat._material.get(); + + qCDebug(modelformat) << " normalTexture =" << mat.normalTexture.filename; + qCDebug(modelformat) << " albedoTexture =" << mat.albedoTexture.filename; + qCDebug(modelformat) << " opacityTexture =" << mat.opacityTexture.filename; + qCDebug(modelformat) << " glossTexture =" << mat.glossTexture.filename; + qCDebug(modelformat) << " roughnessTexture =" << mat.roughnessTexture.filename; + qCDebug(modelformat) << " specularTexture =" << mat.specularTexture.filename; + qCDebug(modelformat) << " metallicTexture =" << mat.metallicTexture.filename; + qCDebug(modelformat) << " emissiveTexture =" << mat.emissiveTexture.filename; + qCDebug(modelformat) << " occlusionTexture =" << mat.occlusionTexture.filename; + qCDebug(modelformat) << " scatteringTexture =" << mat.scatteringTexture.filename; + qCDebug(modelformat) << " lightmapTexture =" << mat.lightmapTexture.filename; + + qCDebug(modelformat) << " lightmapParams =" << mat.lightmapParams; + + qCDebug(modelformat) << " isPBSMaterial =" << mat.isPBSMaterial; + qCDebug(modelformat) << " useNormalMap =" << mat.useNormalMap; + qCDebug(modelformat) << " useAlbedoMap =" << mat.useAlbedoMap; + qCDebug(modelformat) << " useOpacityMap =" << mat.useOpacityMap; + qCDebug(modelformat) << " useRoughnessMap =" << mat.useRoughnessMap; + qCDebug(modelformat) << " useSpecularMap =" << mat.useSpecularMap; + qCDebug(modelformat) << " useMetallicMap =" << mat.useMetallicMap; + qCDebug(modelformat) << " useEmissiveMap =" << mat.useEmissiveMap; + qCDebug(modelformat) << " useOcclusionMap =" << mat.useOcclusionMap; + qCDebug(modelformat) << "\n"; + } + + qCDebug(modelformat) << "---------------- Joints ----------------"; + + foreach(FBXJoint joint, fbxgeo.joints) { + qCDebug(modelformat) << "\n"; + qCDebug(modelformat) << " shapeInfo.avgPoint =" << joint.shapeInfo.avgPoint; + qCDebug(modelformat) << " shapeInfo.debugLines =" << joint.shapeInfo.debugLines; + qCDebug(modelformat) << " shapeInfo.dots =" << joint.shapeInfo.dots; + qCDebug(modelformat) << " shapeInfo.points =" << joint.shapeInfo.points; + + qCDebug(modelformat) << " isFree =" << joint.isFree; + qCDebug(modelformat) << " freeLineage" << joint.freeLineage; + qCDebug(modelformat) << " parentIndex" << joint.parentIndex; + qCDebug(modelformat) << " distanceToParent" << joint.distanceToParent; + qCDebug(modelformat) << " translation" << joint.translation; + qCDebug(modelformat) << " preTransform" << joint.preTransform; + qCDebug(modelformat) << " preRotation" << joint.preRotation; + qCDebug(modelformat) << " rotation" << joint.rotation; + qCDebug(modelformat) << " postRotation" << joint.postRotation; + qCDebug(modelformat) << " postTransform" << joint.postTransform; + qCDebug(modelformat) << " transform" << joint.transform; + qCDebug(modelformat) << " rotationMin" << joint.rotationMin; + qCDebug(modelformat) << " rotationMax" << joint.rotationMax; + qCDebug(modelformat) << " inverseDefaultRotation" << joint.inverseDefaultRotation; + qCDebug(modelformat) << " inverseBindRotation" << joint.inverseBindRotation; + qCDebug(modelformat) << " bindTransform" << joint.bindTransform; + qCDebug(modelformat) << " name" << joint.name; + qCDebug(modelformat) << " isSkeletonJoint" << joint.isSkeletonJoint; + qCDebug(modelformat) << " bindTransformFoundInCluster" << joint.hasGeometricOffset; + qCDebug(modelformat) << " bindTransformFoundInCluster" << joint.geometricTranslation; + qCDebug(modelformat) << " bindTransformFoundInCluster" << joint.geometricRotation; + qCDebug(modelformat) << " bindTransformFoundInCluster" << joint.geometricScaling; + qCDebug(modelformat) << "\n"; + } + + qCDebug(modelformat) << "\n"; +} \ No newline at end of file diff --git a/libraries/fbx/src/GLTFReader.h b/libraries/fbx/src/GLTFReader.h new file mode 100644 index 0000000000..3554594768 --- /dev/null +++ b/libraries/fbx/src/GLTFReader.h @@ -0,0 +1,786 @@ +// +// GLTFReader.h +// libraries/fbx/src +// +// Created by Luis Cuenca on 8/30/17. +// Copyright 2017 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +#ifndef hifi_GLTFReader_h +#define hifi_GLTFReader_h + +#include +#include +#include "ModelFormatLogging.h" +#include "FBXReader.h" + + +struct GLTFAsset { + QString generator; + QString version; //required + QString copyright; + QMap defined; + void dump() { + if (defined["generator"]) { + qCDebug(modelformat) << "generator: " << generator; + } + if (defined["version"]) { + qCDebug(modelformat) << "version: " << version; + } + if (defined["copyright"]) { + qCDebug(modelformat) << "copyright: " << copyright; + } + } +}; + +struct GLTFNode { + QString name; + int camera; + int mesh; + QVector children; + QVector translation; + QVector rotation; + QVector scale; + QVector matrix; + QVector transforms; + int skin; + QVector skeletons; + QString jointName; + QMap defined; + void dump() { + if (defined["name"]) { + qCDebug(modelformat) << "name: " << name; + } + if (defined["camera"]) { + qCDebug(modelformat) << "camera: " << camera; + } + if (defined["mesh"]) { + qCDebug(modelformat) << "mesh: " << mesh; + } + if (defined["skin"]) { + qCDebug(modelformat) << "skin: " << skin; + } + if (defined["jointName"]) { + qCDebug(modelformat) << "jointName: " << jointName; + } + if (defined["children"]) { + qCDebug(modelformat) << "children: " << children; + } + if (defined["translation"]) { + qCDebug(modelformat) << "translation: " << translation; + } + if (defined["rotation"]) { + qCDebug(modelformat) << "rotation: " << rotation; + } + if (defined["scale"]) { + qCDebug(modelformat) << "scale: " << scale; + } + if (defined["matrix"]) { + qCDebug(modelformat) << "matrix: " << matrix; + } + if (defined["skeletons"]) { + qCDebug(modelformat) << "skeletons: " << skeletons; + } + } +}; + +// Meshes + +struct GLTFMeshPrimitivesTarget { + int normal; + int position; + int tangent; + QMap defined; + void dump() { + if (defined["normal"]) { + qCDebug(modelformat) << "normal: " << normal; + } + if (defined["position"]) { + qCDebug(modelformat) << "position: " << position; + } + if (defined["tangent"]) { + qCDebug(modelformat) << "tangent: " << tangent; + } + } +}; + +namespace GLTFMeshPrimitivesRenderingMode { + enum Values { + POINTS = 0, + LINES, + LINE_LOOP, + LINE_STRIP, + TRIANGLES, + TRIANGLE_STRIP, + TRIANGLE_FAN + }; +} + +struct GLTFMeshPrimitiveAttr { + QMap values; + QMap defined; + void dump() { + QList keys = values.keys(); + qCDebug(modelformat) << "values: "; + foreach(auto k, keys) { + qCDebug(modelformat) << k << ": " << values[k]; + } + } +}; + +struct GLTFMeshPrimitive { + GLTFMeshPrimitiveAttr attributes; + int indices; + int material; + int mode{ GLTFMeshPrimitivesRenderingMode::TRIANGLES }; + QVector targets; + QMap defined; + void dump() { + if (defined["attributes"]) { + qCDebug(modelformat) << "attributes: "; + attributes.dump(); + } + if (defined["indices"]) { + qCDebug(modelformat) << "indices: " << indices; + } + if (defined["material"]) { + qCDebug(modelformat) << "material: " << material; + } + if (defined["mode"]) { + qCDebug(modelformat) << "mode: " << mode; + } + if (defined["targets"]) { + qCDebug(modelformat) << "targets: "; + foreach(auto t, targets) t.dump(); + } + } +}; + +struct GLTFMesh { + QString name; + QVector primitives; + QVector weights; + QMap defined; + void dump() { + if (defined["name"]) { + qCDebug(modelformat) << "name: " << name; + } + if (defined["primitives"]) { + qCDebug(modelformat) << "primitives: "; + foreach(auto prim, primitives) prim.dump(); + } + if (defined["weights"]) { + qCDebug(modelformat) << "weights: " << weights; + } + } +}; + +// BufferViews + +namespace GLTFBufferViewTarget { + enum Values { + ARRAY_BUFFER = 34962, + ELEMENT_ARRAY_BUFFER = 34963 + }; +} + +struct GLTFBufferView { + int buffer; //required + int byteLength; //required + int byteOffset; + int target; + QMap defined; + void dump() { + if (defined["buffer"]) { + qCDebug(modelformat) << "buffer: " << buffer; + } + if (defined["byteLength"]) { + qCDebug(modelformat) << "byteLength: " << byteLength; + } + if (defined["byteOffset"]) { + qCDebug(modelformat) << "byteOffset: " << byteOffset; + } + if (defined["target"]) { + qCDebug(modelformat) << "target: " << target; + } + } +}; + +// Buffers + +struct GLTFBuffer { + int byteLength; //required + QString uri; + QByteArray blob; + QMap defined; + void dump() { + if (defined["byteLength"]) { + qCDebug(modelformat) << "byteLength: " << byteLength; + } + if (defined["uri"]) { + qCDebug(modelformat) << "uri: " << uri; + } + if (defined["blob"]) { + qCDebug(modelformat) << "blob: " << "DEFINED"; + } + } +}; + +// Samplers +namespace GLTFSamplerFilterType { + enum Values { + NEAREST = 9728, + LINEAR = 9729, + NEAREST_MIPMAP_NEAREST = 9984, + LINEAR_MIPMAP_NEAREST = 9985, + NEAREST_MIPMAP_LINEAR = 9986, + LINEAR_MIPMAP_LINEAR = 9987 + }; +} + +namespace GLTFSamplerWrapType { + enum Values { + CLAMP_TO_EDGE = 33071, + MIRRORED_REPEAT = 33648, + REPEAT = 10497 + }; +} + +struct GLTFSampler { + int magFilter; + int minFilter; + int wrapS; + int wrapT; + QMap defined; + void dump() { + if (defined["magFilter"]) { + qCDebug(modelformat) << "magFilter: " << magFilter; + } + if (defined["minFilter"]) { + qCDebug(modelformat) << "minFilter: " << minFilter; + } + if (defined["wrapS"]) { + qCDebug(modelformat) << "wrapS: " << wrapS; + } + if (defined["wrapT"]) { + qCDebug(modelformat) << "wrapT: " << wrapT; + } + } +}; + +// Cameras + +struct GLTFCameraPerspective { + double aspectRatio; + double yfov; //required + double zfar; + double znear; //required + QMap defined; + void dump() { + if (defined["zfar"]) { + qCDebug(modelformat) << "zfar: " << zfar; + } + if (defined["znear"]) { + qCDebug(modelformat) << "znear: " << znear; + } + if (defined["aspectRatio"]) { + qCDebug(modelformat) << "aspectRatio: " << aspectRatio; + } + if (defined["yfov"]) { + qCDebug(modelformat) << "yfov: " << yfov; + } + } +}; + +struct GLTFCameraOrthographic { + double zfar; //required + double znear; //required + double xmag; //required + double ymag; //required + QMap defined; + void dump() { + if (defined["zfar"]) { + qCDebug(modelformat) << "zfar: " << zfar; + } + if (defined["znear"]) { + qCDebug(modelformat) << "znear: " << znear; + } + if (defined["xmag"]) { + qCDebug(modelformat) << "xmag: " << xmag; + } + if (defined["ymag"]) { + qCDebug(modelformat) << "ymag: " << ymag; + } + } +}; + +namespace GLTFCameraTypes { + enum Values { + ORTHOGRAPHIC = 0, + PERSPECTIVE + }; +} + +struct GLTFCamera { + QString name; + GLTFCameraPerspective perspective; //required (or) + GLTFCameraOrthographic orthographic; //required (or) + int type; + QMap defined; + void dump() { + if (defined["name"]) { + qCDebug(modelformat) << "name: " << name; + } + if (defined["type"]) { + qCDebug(modelformat) << "type: " << type; + } + if (defined["perspective"]) { + perspective.dump(); + } + if (defined["orthographic"]) { + orthographic.dump(); + } + } +}; + +// Images + +namespace GLTFImageMimetype { + enum Values { + JPEG = 0, + PNG + }; +}; + +struct GLTFImage { + QString uri; //required (or) + int mimeType; + int bufferView; //required (or) + QMap defined; + void dump() { + if (defined["uri"]) { + qCDebug(modelformat) << "uri: " << uri; + } + if (defined["mimeType"]) { + qCDebug(modelformat) << "mimeType: " << mimeType; + } + if (defined["bufferView"]) { + qCDebug(modelformat) << "bufferView: " << bufferView; + } + } +}; + +// Materials + +struct GLTFpbrMetallicRoughness { + QVector baseColorFactor; + int baseColorTexture; + int metallicRoughnessTexture; + double metallicFactor; + double roughnessFactor; + QMap defined; + void dump() { + if (defined["baseColorFactor"]) { + qCDebug(modelformat) << "baseColorFactor: " << baseColorFactor; + } + if (defined["baseColorTexture"]) { + qCDebug(modelformat) << "baseColorTexture: " << baseColorTexture; + } + if (defined["metallicRoughnessTexture"]) { + qCDebug(modelformat) << "metallicRoughnessTexture: " << metallicRoughnessTexture; + } + if (defined["metallicFactor"]) { + qCDebug(modelformat) << "metallicFactor: " << metallicFactor; + } + if (defined["roughnessFactor"]) { + qCDebug(modelformat) << "roughnessFactor: " << roughnessFactor; + } + if (defined["baseColorFactor"]) { + qCDebug(modelformat) << "baseColorFactor: " << baseColorFactor; + } + } +}; + +namespace GLTFMaterialAlphaMode { + enum Values { + OPAQUE = 0, + MASK, + BLEND + }; +}; + +struct GLTFMaterial { + QString name; + QVector emissiveFactor; + int emissiveTexture; + int normalTexture; + int occlusionTexture; + int alphaMode; + double alphaCutoff; + bool doubleSided; + GLTFpbrMetallicRoughness pbrMetallicRoughness; + QMap defined; + void dump() { + if (defined["name"]) { + qCDebug(modelformat) << "name: " << name; + } + if (defined["emissiveTexture"]) { + qCDebug(modelformat) << "emissiveTexture: " << emissiveTexture; + } + if (defined["normalTexture"]) { + qCDebug(modelformat) << "normalTexture: " << normalTexture; + } + if (defined["occlusionTexture"]) { + qCDebug(modelformat) << "occlusionTexture: " << occlusionTexture; + } + if (defined["emissiveFactor"]) { + qCDebug(modelformat) << "emissiveFactor: " << emissiveFactor; + } + if (defined["pbrMetallicRoughness"]) { + pbrMetallicRoughness.dump(); + } + } +}; + +// Accesors + +namespace GLTFAccessorType { + enum Values { + SCALAR = 0, + VEC2, + VEC3, + VEC4, + MAT2, + MAT3, + MAT4 + }; +} +namespace GLTFAccessorComponentType { + enum Values { + BYTE = 5120, + UNSIGNED_BYTE = 5121, + SHORT = 5122, + UNSIGNED_SHORT = 5123, + UNSIGNED_INT = 5125, + FLOAT = 5126 + }; +} +struct GLTFAccessor { + int bufferView; + int byteOffset; + int componentType; //required + int count; //required + int type; //required + bool normalized{ false }; + QVector max; + QVector min; + QMap defined; + void dump() { + if (defined["bufferView"]) { + qCDebug(modelformat) << "bufferView: " << bufferView; + } + if (defined["byteOffset"]) { + qCDebug(modelformat) << "byteOffset: " << byteOffset; + } + if (defined["componentType"]) { + qCDebug(modelformat) << "componentType: " << componentType; + } + if (defined["count"]) { + qCDebug(modelformat) << "count: " << count; + } + if (defined["type"]) { + qCDebug(modelformat) << "type: " << type; + } + if (defined["normalized"]) { + qCDebug(modelformat) << "normalized: " << (normalized ? "TRUE" : "FALSE"); + } + if (defined["max"]) { + qCDebug(modelformat) << "max: "; + foreach(float m, max) { + qCDebug(modelformat) << m; + } + } + if (defined["min"]) { + qCDebug(modelformat) << "min: "; + foreach(float m, min) { + qCDebug(modelformat) << m; + } + } + } +}; + +// Animation + +namespace GLTFChannelTargetPath { + enum Values { + TRANSLATION = 0, + ROTATION, + SCALE + }; +} + +struct GLTFChannelTarget { + int node; + int path; + QMap defined; + void dump() { + if (defined["node"]) { + qCDebug(modelformat) << "node: " << node; + } + if (defined["path"]) { + qCDebug(modelformat) << "path: " << path; + } + } +}; + +struct GLTFChannel { + int sampler; + GLTFChannelTarget target; + QMap defined; + void dump() { + if (defined["sampler"]) { + qCDebug(modelformat) << "sampler: " << sampler; + } + if (defined["target"]) { + target.dump(); + } + } +}; + +namespace GLTFAnimationSamplerInterpolation { + enum Values{ + LINEAR = 0 + }; +} + +struct GLTFAnimationSampler { + int input; + int output; + int interpolation; + QMap defined; + void dump() { + if (defined["input"]) { + qCDebug(modelformat) << "input: " << input; + } + if (defined["output"]) { + qCDebug(modelformat) << "output: " << output; + } + if (defined["interpolation"]) { + qCDebug(modelformat) << "interpolation: " << interpolation; + } + } +}; + +struct GLTFAnimation { + QVector channels; + QVector samplers; + QMap defined; + void dump() { + if (defined["channels"]) { + foreach(auto channel, channels) channel.dump(); + } + if (defined["samplers"]) { + foreach(auto sampler, samplers) sampler.dump(); + } + } +}; + +struct GLTFScene { + QString name; + QVector nodes; + QMap defined; + void dump() { + if (defined["name"]) { + qCDebug(modelformat) << "name: " << name; + } + if (defined["nodes"]) { + qCDebug(modelformat) << "nodes: "; + foreach(int node, nodes) qCDebug(modelformat) << node; + } + } +}; + +struct GLTFSkin { + int inverseBindMatrices; + QVector joints; + int skeleton; + QMap defined; + void dump() { + if (defined["inverseBindMatrices"]) { + qCDebug(modelformat) << "inverseBindMatrices: " << inverseBindMatrices; + } + if (defined["skeleton"]) { + qCDebug(modelformat) << "skeleton: " << skeleton; + } + if (defined["joints"]) { + qCDebug(modelformat) << "joints: "; + foreach(int joint, joints) qCDebug(modelformat) << joint; + } + } +}; + +struct GLTFTexture { + int sampler; + int source; + QMap defined; + void dump() { + if (defined["sampler"]) { + qCDebug(modelformat) << "sampler: " << sampler; + } + if (defined["source"]) { + qCDebug(modelformat) << "source: " << sampler; + } + } +}; + +struct GLTFFile { + GLTFAsset asset; + int scene = 0; + QVector accessors; + QVector animations; + QVector bufferviews; + QVector buffers; + QVector cameras; + QVector images; + QVector materials; + QVector meshes; + QVector nodes; + QVector samplers; + QVector scenes; + QVector skins; + QVector textures; + QMap defined; + void dump() { + if (defined["asset"]) { + asset.dump(); + } + if (defined["scene"]) { + qCDebug(modelformat) << "scene: " << scene; + } + if (defined["accessors"]) { + foreach(auto acc, accessors) acc.dump(); + } + if (defined["animations"]) { + foreach(auto ani, animations) ani.dump(); + } + if (defined["bufferviews"]) { + foreach(auto bv, bufferviews) bv.dump(); + } + if (defined["buffers"]) { + foreach(auto b, buffers) b.dump(); + } + if (defined["cameras"]) { + foreach(auto c, cameras) c.dump(); + } + if (defined["images"]) { + foreach(auto i, images) i.dump(); + } + if (defined["materials"]) { + foreach(auto mat, materials) mat.dump(); + } + if (defined["meshes"]) { + foreach(auto mes, meshes) mes.dump(); + } + if (defined["nodes"]) { + foreach(auto nod, nodes) nod.dump(); + } + if (defined["samplers"]) { + foreach(auto sa, samplers) sa.dump(); + } + if (defined["scenes"]) { + foreach(auto sc, scenes) sc.dump(); + } + if (defined["skins"]) { + foreach(auto sk, nodes) sk.dump(); + } + if (defined["textures"]) { + foreach(auto tex, textures) tex.dump(); + } + } +}; + +class GLTFReader : public QObject { + Q_OBJECT +public: + GLTFReader(); + FBXGeometry* readGLTF(QByteArray& model, const QVariantHash& mapping, + const QUrl& url, bool loadLightmaps = true, float lightmapLevel = 1.0f); +private: + GLTFFile _file; + QUrl _url; + + glm::mat4 getModelTransform(const GLTFNode& node); + + bool buildGeometry(FBXGeometry& geometry, const QUrl& url); + bool parseGLTF(const QByteArray& model); + + bool getStringVal(const QJsonObject& object, const QString& fieldname, + QString& value, QMap& defined); + bool getBoolVal(const QJsonObject& object, const QString& fieldname, + bool& value, QMap& defined); + bool getIntVal(const QJsonObject& object, const QString& fieldname, + int& value, QMap& defined); + bool getDoubleVal(const QJsonObject& object, const QString& fieldname, + double& value, QMap& defined); + bool getObjectVal(const QJsonObject& object, const QString& fieldname, + QJsonObject& value, QMap& defined); + bool getIntArrayVal(const QJsonObject& object, const QString& fieldname, + QVector& values, QMap& defined); + bool getDoubleArrayVal(const QJsonObject& object, const QString& fieldname, + QVector& values, QMap& defined); + bool getObjectArrayVal(const QJsonObject& object, const QString& fieldname, + QJsonArray& objects, QMap& defined); + + int getMaterialAlphaMode(const QString& type); + int getAccessorType(const QString& type); + int getAnimationSamplerInterpolation(const QString& interpolation); + int getCameraType(const QString& type); + int getImageMimeType(const QString& mime); + int getMeshPrimitiveRenderingMode(const QString& type); + + bool getIndexFromObject(const QJsonObject& object, const QString& field, + int& outidx, QMap& defined); + + bool setAsset(const QJsonObject& object); + bool addAccessor(const QJsonObject& object); + bool addAnimation(const QJsonObject& object); + bool addBufferView(const QJsonObject& object); + bool addBuffer(const QJsonObject& object); + bool addCamera(const QJsonObject& object); + bool addImage(const QJsonObject& object); + bool addMaterial(const QJsonObject& object); + bool addMesh(const QJsonObject& object); + bool addNode(const QJsonObject& object); + bool addSampler(const QJsonObject& object); + bool addScene(const QJsonObject& object); + bool addSkin(const QJsonObject& object); + bool addTexture(const QJsonObject& object); + + bool readBinary(const QString& url, QByteArray& outdata); + + template + bool readArray(const QByteArray& bin, int byteOffset, int byteLength, + QVector& outarray, int accessorType); + + template + bool addArrayOfType(const QByteArray& bin, int byteOffset, int byteLength, + QVector& outarray, int accessorType, int componentType); + + void retriangulate(const QVector& in_indices, const QVector& in_vertices, + const QVector& in_normals, QVector& out_indices, + QVector& out_vertices, QVector& out_normals); + + std::tuple requestData(QUrl& url); + QNetworkReply* request(QUrl& url, bool isTest); + bool doesResourceExist(const QString& url); + + + void setFBXMaterial(FBXMaterial& fbxmat, const GLTFMaterial& material); + FBXTexture getFBXTexture(const GLTFTexture& texture); + void fbxDebugDump(const FBXGeometry& fbxgeo); +}; + +#endif // hifi_GLTFReader_h \ No newline at end of file diff --git a/libraries/model-networking/src/model-networking/ModelCache.cpp b/libraries/model-networking/src/model-networking/ModelCache.cpp index b62ad7b366..6a14e6d6b7 100644 --- a/libraries/model-networking/src/model-networking/ModelCache.cpp +++ b/libraries/model-networking/src/model-networking/ModelCache.cpp @@ -14,6 +14,7 @@ #include #include "FBXReader.h" #include "OBJReader.h" +#include "GLTFReader.h" #include #include @@ -175,9 +176,12 @@ void GeometryReader::run() { QString urlname = _url.path().toLower(); if (!urlname.isEmpty() && !_url.path().isEmpty() && - (_url.path().toLower().endsWith(".fbx") || - _url.path().toLower().endsWith(".obj") || - _url.path().toLower().endsWith(".obj.gz"))) { + + (_url.path().toLower().endsWith(".fbx") || + _url.path().toLower().endsWith(".obj") || + _url.path().toLower().endsWith(".obj.gz") || + _url.path().toLower().endsWith(".gltf"))) { + FBXGeometry::Pointer fbxGeometry; if (_url.path().toLower().endsWith(".fbx")) { @@ -189,12 +193,18 @@ void GeometryReader::run() { fbxGeometry.reset(OBJReader().readOBJ(_data, _mapping, _combineParts, _url)); } else if (_url.path().toLower().endsWith(".obj.gz")) { QByteArray uncompressedData; - if (gunzip(_data, uncompressedData)){ + if (gunzip(_data, uncompressedData)) { fbxGeometry.reset(OBJReader().readOBJ(uncompressedData, _mapping, _combineParts, _url)); } else { - throw QString("failed to decompress .obj.gz" ); + throw QString("failed to decompress .obj.gz"); } + } else if (_url.path().toLower().endsWith(".gltf")) { + std::shared_ptr glreader = std::make_shared(); + fbxGeometry.reset(glreader->readGLTF(_data, _mapping, _url)); + if (fbxGeometry->meshes.size() == 0 && fbxGeometry->joints.size() == 0) { + throw QString("empty geometry, possibly due to an unsupported GLTF version"); + } } else { throw QString("unsupported format"); } diff --git a/libraries/networking/src/LimitedNodeList.cpp b/libraries/networking/src/LimitedNodeList.cpp index 300a445ebd..1e682f367a 100644 --- a/libraries/networking/src/LimitedNodeList.cpp +++ b/libraries/networking/src/LimitedNodeList.cpp @@ -27,7 +27,6 @@ #include #include #include -#include #include #include "AccountManager.h" @@ -430,7 +429,7 @@ qint64 LimitedNodeList::sendPacket(std::unique_ptr packet, const HifiS } } -qint64 LimitedNodeList::sendPacketList(NLPacketList& packetList, const Node& destinationNode) { +qint64 LimitedNodeList::sendUnreliableUnorderedPacketList(NLPacketList& packetList, const Node& destinationNode) { auto activeSocket = destinationNode.getActiveSocket(); if (activeSocket) { @@ -453,8 +452,8 @@ qint64 LimitedNodeList::sendPacketList(NLPacketList& packetList, const Node& des } } -qint64 LimitedNodeList::sendPacketList(NLPacketList& packetList, const HifiSockAddr& sockAddr, - const QUuid& connectionSecret) { +qint64 LimitedNodeList::sendUnreliableUnorderedPacketList(NLPacketList& packetList, const HifiSockAddr& sockAddr, + const QUuid& connectionSecret) { qint64 bytesSent = 0; // close the last packet in the list @@ -1110,7 +1109,6 @@ void LimitedNodeList::setLocalSocket(const HifiSockAddr& sockAddr) { qCInfo(networking) << "Local socket is" << sockAddr; } else { qCInfo(networking) << "Local socket has changed from" << _localSockAddr << "to" << sockAddr; - DependencyManager::get()->incrementStat(LOCAL_SOCKET_CHANGE_STAT); } _localSockAddr = sockAddr; diff --git a/libraries/networking/src/LimitedNodeList.h b/libraries/networking/src/LimitedNodeList.h index 994f91db19..e21f01a470 100644 --- a/libraries/networking/src/LimitedNodeList.h +++ b/libraries/networking/src/LimitedNodeList.h @@ -66,8 +66,6 @@ const QHostAddress DEFAULT_ASSIGNMENT_CLIENT_MONITOR_HOSTNAME = QHostAddress::Lo const QString USERNAME_UUID_REPLACEMENT_STATS_KEY = "$username"; -const QString LOCAL_SOCKET_CHANGE_STAT = "LocalSocketChanges"; - typedef std::pair UUIDNodePair; typedef tbb::concurrent_unordered_map NodeHash; @@ -126,17 +124,25 @@ public: PacketReceiver& getPacketReceiver() { return *_packetReceiver; } + // use sendUnreliablePacket to send an unrelaible packet (that you do not need to move) + // either to a node (via its active socket) or to a manual sockaddr qint64 sendUnreliablePacket(const NLPacket& packet, const Node& destinationNode); qint64 sendUnreliablePacket(const NLPacket& packet, const HifiSockAddr& sockAddr, const QUuid& connectionSecret = QUuid()); + // use sendPacket to send a moved unreliable or reliable NL packet to a node's active socket or manual sockaddr qint64 sendPacket(std::unique_ptr packet, const Node& destinationNode); qint64 sendPacket(std::unique_ptr packet, const HifiSockAddr& sockAddr, const QUuid& connectionSecret = QUuid()); - qint64 sendPacketList(NLPacketList& packetList, const Node& destinationNode); - qint64 sendPacketList(NLPacketList& packetList, const HifiSockAddr& sockAddr, + // use sendUnreliableUnorderedPacketList to unreliably send separate packets from the packet list + // either to a node's active socket or to a manual sockaddr + qint64 sendUnreliableUnorderedPacketList(NLPacketList& packetList, const Node& destinationNode); + qint64 sendUnreliableUnorderedPacketList(NLPacketList& packetList, const HifiSockAddr& sockAddr, const QUuid& connectionSecret = QUuid()); + + // use sendPacketList to send reliable packet lists (ordered or unordered) to a node's active socket + // or to a manual sock addr qint64 sendPacketList(std::unique_ptr packetList, const HifiSockAddr& sockAddr); qint64 sendPacketList(std::unique_ptr packetList, const Node& destinationNode); diff --git a/libraries/networking/src/udt/PacketHeaders.cpp b/libraries/networking/src/udt/PacketHeaders.cpp index 9a98393fa1..c2c1d75726 100644 --- a/libraries/networking/src/udt/PacketHeaders.cpp +++ b/libraries/networking/src/udt/PacketHeaders.cpp @@ -33,7 +33,7 @@ PacketVersion versionForPacketType(PacketType packetType) { return static_cast(EntityVersion::HazeEffect); case PacketType::EntityQuery: - return static_cast(EntityQueryPacketVersion::JSONFilterWithFamilyTree); + return static_cast(EntityQueryPacketVersion::ConnectionIdentifier); case PacketType::AvatarIdentity: case PacketType::AvatarData: case PacketType::BulkAvatarData: diff --git a/libraries/networking/src/udt/PacketHeaders.h b/libraries/networking/src/udt/PacketHeaders.h index 21b4ae8878..c4c1758ed2 100644 --- a/libraries/networking/src/udt/PacketHeaders.h +++ b/libraries/networking/src/udt/PacketHeaders.h @@ -209,7 +209,8 @@ enum class EntityScriptCallMethodVersion : PacketVersion { enum class EntityQueryPacketVersion: PacketVersion { JSONFilter = 18, - JSONFilterWithFamilyTree = 19 + JSONFilterWithFamilyTree = 19, + ConnectionIdentifier = 20 }; enum class AssetServerPacketVersion: PacketVersion { diff --git a/libraries/octree/src/OctreeQuery.cpp b/libraries/octree/src/OctreeQuery.cpp index a88f730a50..18766dd7f6 100644 --- a/libraries/octree/src/OctreeQuery.cpp +++ b/libraries/octree/src/OctreeQuery.cpp @@ -9,6 +9,8 @@ // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // +#include + #include #include @@ -22,7 +24,7 @@ const float DEFAULT_ASPECT_RATIO = 1.0f; const float DEFAULT_NEAR_CLIP = 0.1f; const float DEFAULT_FAR_CLIP = 3.0f; -OctreeQuery::OctreeQuery() : +OctreeQuery::OctreeQuery(bool randomizeConnectionID) : _cameraFov(DEFAULT_FOV), _cameraAspectRatio(DEFAULT_ASPECT_RATIO), _cameraNearClip(DEFAULT_NEAR_CLIP), @@ -30,10 +32,21 @@ OctreeQuery::OctreeQuery() : _cameraCenterRadius(DEFAULT_FAR_CLIP) { _maxQueryPPS = DEFAULT_MAX_OCTREE_PPS; + + if (randomizeConnectionID) { + // randomize our initial octree query connection ID using random_device + // the connection ID is 16 bits so we take a generated 32 bit value from random device and chop off the top + std::random_device randomDevice; + _connectionID = randomDevice(); + } } int OctreeQuery::getBroadcastData(unsigned char* destinationBuffer) { unsigned char* bufferStart = destinationBuffer; + + // pack the connection ID so the server can detect when we start a new connection + memcpy(destinationBuffer, &_connectionID, sizeof(_connectionID)); + destinationBuffer += sizeof(_connectionID); // back a boolean (cut to 1 byte) to designate if this query uses the sent view frustum memcpy(destinationBuffer, &_usesFrustum, sizeof(_usesFrustum)); @@ -98,7 +111,27 @@ int OctreeQuery::parseData(ReceivedMessage& message) { const unsigned char* startPosition = reinterpret_cast(message.getRawMessage()); const unsigned char* sourceBuffer = startPosition; - + + // unpack the connection ID + uint16_t newConnectionID; + memcpy(&newConnectionID, sourceBuffer, sizeof(newConnectionID)); + sourceBuffer += sizeof(newConnectionID); + + if (!_hasReceivedFirstQuery) { + // set our flag to indicate that we've parsed for this query at least once + _hasReceivedFirstQuery = true; + + // set the incoming connection ID as the current + _connectionID = newConnectionID; + } else { + if (newConnectionID != _connectionID) { + // the connection ID has changed - emit our signal so the server + // knows that the client is starting a new session + _connectionID = newConnectionID; + emit incomingConnectionIDChanged(); + } + } + // check if this query uses a view frustum memcpy(&_usesFrustum, sourceBuffer, sizeof(_usesFrustum)); sourceBuffer += sizeof(_usesFrustum); diff --git a/libraries/octree/src/OctreeQuery.h b/libraries/octree/src/OctreeQuery.h index fc9ea525e6..21ce2e7fac 100644 --- a/libraries/octree/src/OctreeQuery.h +++ b/libraries/octree/src/OctreeQuery.h @@ -27,7 +27,7 @@ class OctreeQuery : public NodeData { Q_OBJECT public: - OctreeQuery(); + OctreeQuery(bool randomizeConnectionID = false); virtual ~OctreeQuery() {} int getBroadcastData(unsigned char* destinationBuffer); @@ -68,6 +68,13 @@ public: bool getUsesFrustum() { return _usesFrustum; } void setUsesFrustum(bool usesFrustum) { _usesFrustum = usesFrustum; } + void incrementConnectionID() { ++_connectionID; } + + bool hasReceivedFirstQuery() const { return _hasReceivedFirstQuery; } + +signals: + void incomingConnectionIDChanged(); + public slots: void setMaxQueryPacketsPerSecond(int maxQueryPPS) { _maxQueryPPS = maxQueryPPS; } void setOctreeSizeScale(float octreeSizeScale) { _octreeElementSizeScale = octreeSizeScale; } @@ -90,9 +97,12 @@ protected: int _boundaryLevelAdjust = 0; /// used for LOD calculations uint8_t _usesFrustum = true; + uint16_t _connectionID; // query connection ID, randomized to start, increments with each new connection to server QJsonObject _jsonParameters; QReadWriteLock _jsonParametersLock; + + bool _hasReceivedFirstQuery { false }; private: // privatize the copy constructor and assignment operator so they cannot be called diff --git a/libraries/octree/src/OctreeQueryNode.cpp b/libraries/octree/src/OctreeQueryNode.cpp index c26b4ce77b..f0c9027493 100644 --- a/libraries/octree/src/OctreeQueryNode.cpp +++ b/libraries/octree/src/OctreeQueryNode.cpp @@ -18,7 +18,6 @@ #include #include - void OctreeQueryNode::nodeKilled() { _isShuttingDown = true; } diff --git a/libraries/render-utils/src/BloomApply.slf b/libraries/render-utils/src/BloomApply.slf new file mode 100644 index 0000000000..953258e8ab --- /dev/null +++ b/libraries/render-utils/src/BloomApply.slf @@ -0,0 +1,27 @@ +<@include gpu/Config.slh@> +<$VERSION_HEADER$> +// BloomApply.slf +// Mix the three gaussian blur textures. +// +// Created by Olivier Prat on 10/09/2017 +// Copyright 2017 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +uniform sampler2D blurMap0; +uniform sampler2D blurMap1; +uniform sampler2D blurMap2; +uniform float intensity; + +in vec2 varTexCoord0; +out vec4 outFragColor; + +void main(void) { + vec4 blur0 = texture(blurMap0, varTexCoord0); + vec4 blur1 = texture(blurMap1, varTexCoord0); + vec4 blur2 = texture(blurMap2, varTexCoord0); + + outFragColor = vec4((blur0.rgb+blur1.rgb+blur2.rgb)*intensity, 1.0f); +} diff --git a/libraries/render-utils/src/BloomEffect.cpp b/libraries/render-utils/src/BloomEffect.cpp new file mode 100644 index 0000000000..9d9367a6d5 --- /dev/null +++ b/libraries/render-utils/src/BloomEffect.cpp @@ -0,0 +1,359 @@ +// +// BloomEffect.cpp +// render-utils/src/ +// +// Created by Olivier Prat on 09/25/17. +// Copyright 2017 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// +#include "BloomEffect.h" + +#include "gpu/Context.h" +#include "gpu/StandardShaderLib.h" + +#include +#include + +#include "BloomThreshold_frag.h" +#include "BloomApply_frag.h" + +#define BLOOM_BLUR_LEVEL_COUNT 3 + +BloomThreshold::BloomThreshold(unsigned int downsamplingFactor) : + _downsamplingFactor(downsamplingFactor) { + assert(downsamplingFactor > 0); +} + +void BloomThreshold::configure(const Config& config) { + _threshold = config.threshold; +} + +void BloomThreshold::run(const render::RenderContextPointer& renderContext, const Inputs& inputs, Outputs& outputs) { + assert(renderContext->args); + assert(renderContext->args->hasViewFrustum()); + + RenderArgs* args = renderContext->args; + + const auto frameTransform = inputs.get0(); + const auto inputFrameBuffer = inputs.get1(); + + assert(inputFrameBuffer->hasColor()); + + auto inputBuffer = inputFrameBuffer->getRenderBuffer(0); + auto bufferSize = gpu::Vec2u(inputBuffer->getDimensions()); + + // Downsample resolution + bufferSize.x /= _downsamplingFactor; + bufferSize.y /= _downsamplingFactor; + + if (!_outputBuffer || _outputBuffer->getSize() != bufferSize) { + auto colorTexture = gpu::TexturePointer(gpu::Texture::createRenderBuffer(inputBuffer->getTexelFormat(), bufferSize.x, bufferSize.y, + gpu::Texture::SINGLE_MIP, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT))); + + _outputBuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("BloomThreshold")); + _outputBuffer->setRenderBuffer(0, colorTexture); + } + + static const int COLOR_MAP_SLOT = 0; + static const int THRESHOLD_SLOT = 1; + + if (!_pipeline) { + auto vs = gpu::StandardShaderLib::getDrawTransformUnitQuadVS(); + auto ps = gpu::Shader::createPixel(std::string(BloomThreshold_frag)); + gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps); + + gpu::Shader::BindingSet slotBindings; + slotBindings.insert(gpu::Shader::Binding("colorMap", COLOR_MAP_SLOT)); + slotBindings.insert(gpu::Shader::Binding("threshold", THRESHOLD_SLOT)); + gpu::Shader::makeProgram(*program, slotBindings); + + gpu::StatePointer state = gpu::StatePointer(new gpu::State()); + _pipeline = gpu::Pipeline::create(program, state); + } + + glm::ivec4 viewport{ 0, 0, bufferSize.x, bufferSize.y }; + + gpu::doInBatch(args->_context, [&](gpu::Batch& batch) { + batch.enableStereo(false); + + batch.setViewportTransform(viewport); + batch.setProjectionTransform(glm::mat4()); + batch.resetViewTransform(); + batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(bufferSize, viewport)); + batch.setPipeline(_pipeline); + + batch.setFramebuffer(_outputBuffer); + batch.setResourceTexture(COLOR_MAP_SLOT, inputBuffer); + batch._glUniform1f(THRESHOLD_SLOT, _threshold); + batch.draw(gpu::TRIANGLE_STRIP, 4); + }); + + outputs = _outputBuffer; +} + +BloomApply::BloomApply() { + +} + +void BloomApply::configure(const Config& config) { + _intensity = config.intensity; +} + +void BloomApply::run(const render::RenderContextPointer& renderContext, const Inputs& inputs) { + assert(renderContext->args); + assert(renderContext->args->hasViewFrustum()); + RenderArgs* args = renderContext->args; + + static auto BLUR0_SLOT = 0; + static auto BLUR1_SLOT = 1; + static auto BLUR2_SLOT = 2; + static auto INTENSITY_SLOT = 3; + + if (!_pipeline) { + auto vs = gpu::StandardShaderLib::getDrawTransformUnitQuadVS(); + auto ps = gpu::Shader::createPixel(std::string(BloomApply_frag)); + gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps); + + gpu::Shader::BindingSet slotBindings; + slotBindings.insert(gpu::Shader::Binding("blurMap0", BLUR0_SLOT)); + slotBindings.insert(gpu::Shader::Binding("blurMap1", BLUR1_SLOT)); + slotBindings.insert(gpu::Shader::Binding("blurMap2", BLUR2_SLOT)); + slotBindings.insert(gpu::Shader::Binding("intensity", INTENSITY_SLOT)); + gpu::Shader::makeProgram(*program, slotBindings); + + gpu::StatePointer state = gpu::StatePointer(new gpu::State()); + state->setDepthTest(gpu::State::DepthTest(false, false)); + _pipeline = gpu::Pipeline::create(program, state); + } + + const auto frameBuffer = inputs.get0(); + const auto framebufferSize = frameBuffer->getSize(); + const auto blur0FB = inputs.get1(); + const auto blur1FB = inputs.get2(); + const auto blur2FB = inputs.get3(); + const glm::ivec4 viewport{ 0, 0, framebufferSize.x, framebufferSize.y }; + + gpu::doInBatch(args->_context, [&](gpu::Batch& batch) { + batch.enableStereo(false); + + batch.setFramebuffer(frameBuffer); + + batch.setViewportTransform(viewport); + batch.setProjectionTransform(glm::mat4()); + batch.resetViewTransform(); + batch.setPipeline(_pipeline); + + batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(framebufferSize, viewport)); + batch.setResourceTexture(BLUR0_SLOT, blur0FB->getRenderBuffer(0)); + batch.setResourceTexture(BLUR1_SLOT, blur1FB->getRenderBuffer(0)); + batch.setResourceTexture(BLUR2_SLOT, blur2FB->getRenderBuffer(0)); + batch._glUniform1f(INTENSITY_SLOT, _intensity / 3.0f); + batch.draw(gpu::TRIANGLE_STRIP, 4); + }); +} + +void BloomDraw::run(const render::RenderContextPointer& renderContext, const Inputs& inputs) { + assert(renderContext->args); + assert(renderContext->args->hasViewFrustum()); + RenderArgs* args = renderContext->args; + + const auto frameBuffer = inputs.get0(); + const auto bloomFrameBuffer = inputs.get1(); + + if (frameBuffer && bloomFrameBuffer) { + const auto framebufferSize = frameBuffer->getSize(); + + if (!_pipeline) { + auto vs = gpu::StandardShaderLib::getDrawTransformUnitQuadVS(); + auto ps = gpu::StandardShaderLib::getDrawTextureOpaquePS(); + gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps); + + gpu::Shader::BindingSet slotBindings; + gpu::Shader::makeProgram(*program, slotBindings); + + gpu::StatePointer state = gpu::StatePointer(new gpu::State()); + state->setDepthTest(gpu::State::DepthTest(false, false)); + state->setBlendFunction(true, gpu::State::ONE, gpu::State::BLEND_OP_ADD, gpu::State::ONE, + gpu::State::ZERO, gpu::State::BLEND_OP_ADD, gpu::State::ONE); + _pipeline = gpu::Pipeline::create(program, state); + } + + gpu::doInBatch(args->_context, [&](gpu::Batch& batch) { + batch.enableStereo(false); + + batch.setFramebuffer(frameBuffer); + + batch.setViewportTransform(args->_viewport); + batch.setProjectionTransform(glm::mat4()); + batch.resetViewTransform(); + batch.setPipeline(_pipeline); + + batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(framebufferSize, args->_viewport)); + batch.setResourceTexture(0, bloomFrameBuffer->getRenderBuffer(0)); + batch.draw(gpu::TRIANGLE_STRIP, 4); + }); + } +} + +DebugBloom::DebugBloom() { +} + +void DebugBloom::configure(const Config& config) { + _mode = static_cast(config.mode); + assert(_mode < DebugBloomConfig::MODE_COUNT); +} + +void DebugBloom::run(const render::RenderContextPointer& renderContext, const Inputs& inputs) { + assert(renderContext->args); + assert(renderContext->args->hasViewFrustum()); + RenderArgs* args = renderContext->args; + + const auto frameBuffer = inputs.get0(); + const auto combinedBlurBuffer = inputs.get4(); + const auto framebufferSize = frameBuffer->getSize(); + const auto level0FB = inputs.get1(); + const auto level1FB = inputs.get2(); + const auto level2FB = inputs.get3(); + const gpu::TexturePointer levelTextures[BLOOM_BLUR_LEVEL_COUNT] = { + level0FB->getRenderBuffer(0), + level1FB->getRenderBuffer(0), + level2FB->getRenderBuffer(0) + }; + + static auto TEXCOORD_RECT_SLOT = 1; + + if (!_pipeline) { + auto vs = gpu::StandardShaderLib::getDrawTexcoordRectTransformUnitQuadVS(); + auto ps = gpu::StandardShaderLib::getDrawTextureOpaquePS(); + gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps); + + gpu::Shader::BindingSet slotBindings; + slotBindings.insert(gpu::Shader::Binding(std::string("texcoordRect"), TEXCOORD_RECT_SLOT)); + gpu::Shader::makeProgram(*program, slotBindings); + + gpu::StatePointer state = gpu::StatePointer(new gpu::State()); + state->setDepthTest(gpu::State::DepthTest(false)); + _pipeline = gpu::Pipeline::create(program, state); + } + + gpu::doInBatch(args->_context, [&](gpu::Batch& batch) { + batch.enableStereo(false); + + batch.setFramebuffer(frameBuffer); + + batch.setViewportTransform(args->_viewport); + batch.setProjectionTransform(glm::mat4()); + batch.resetViewTransform(); + batch.setPipeline(_pipeline); + + Transform modelTransform; + if (_mode == DebugBloomConfig::MODE_ALL_LEVELS) { + batch._glUniform4f(TEXCOORD_RECT_SLOT, 0.0f, 0.0f, 1.f, 1.f); + + modelTransform = gpu::Framebuffer::evalSubregionTexcoordTransform(framebufferSize, args->_viewport / 2); + modelTransform.postTranslate(glm::vec3(-1.0f, 1.0f, 0.0f)); + batch.setModelTransform(modelTransform); + batch.setResourceTexture(0, levelTextures[0]); + batch.draw(gpu::TRIANGLE_STRIP, 4); + + modelTransform.postTranslate(glm::vec3(2.0f, 0.0f, 0.0f)); + batch.setModelTransform(modelTransform); + batch.setResourceTexture(0, levelTextures[1]); + batch.draw(gpu::TRIANGLE_STRIP, 4); + + modelTransform.postTranslate(glm::vec3(-2.0f, -2.0f, 0.0f)); + batch.setModelTransform(modelTransform); + batch.setResourceTexture(0, levelTextures[2]); + batch.draw(gpu::TRIANGLE_STRIP, 4); + + modelTransform.postTranslate(glm::vec3(2.0f, 0.0f, 0.0f)); + batch.setModelTransform(modelTransform); + batch.setResourceTexture(0, combinedBlurBuffer->getRenderBuffer(0)); + batch.draw(gpu::TRIANGLE_STRIP, 4); + } else { + auto viewport = args->_viewport; + auto blurLevel = _mode - DebugBloomConfig::MODE_LEVEL0; + + viewport.z /= 2; + + batch._glUniform4f(TEXCOORD_RECT_SLOT, 0.5f, 0.0f, 0.5f, 1.f); + + modelTransform = gpu::Framebuffer::evalSubregionTexcoordTransform(framebufferSize, viewport); + modelTransform.postTranslate(glm::vec3(-1.0f, 0.0f, 0.0f)); + batch.setModelTransform(modelTransform); + batch.setResourceTexture(0, levelTextures[blurLevel]); + batch.draw(gpu::TRIANGLE_STRIP, 4); + } + }); +} + +void BloomConfig::setIntensity(float value) { + auto task = static_cast(_task); + auto blurJobIt = task->editJob("BloomApply"); + assert(blurJobIt != task->_jobs.end()); + blurJobIt->getConfiguration()->setProperty("intensity", value); +} + +float BloomConfig::getIntensity() const { + auto task = static_cast(_task); + auto blurJobIt = task->getJob("BloomApply"); + assert(blurJobIt != task->_jobs.end()); + return blurJobIt->getConfiguration()->property("intensity").toFloat(); +} + +void BloomConfig::setSize(float value) { + std::string blurName{ "BloomBlurN" }; + auto sigma = 0.5f+value*3.5f; + + for (auto i = 0; i < BLOOM_BLUR_LEVEL_COUNT; i++) { + blurName.back() = '0' + i; + auto task = static_cast(_task); + auto blurJobIt = task->editJob(blurName); + assert(blurJobIt != task->_jobs.end()); + auto& gaussianBlur = blurJobIt->edit(); + auto gaussianBlurParams = gaussianBlur.getParameters(); + gaussianBlurParams->setFilterGaussianTaps(5, sigma); + // Gaussian blur increases at each level to have a slower rolloff on the edge + // of the response + sigma *= 1.5f; + } +} + +Bloom::Bloom() { + +} + +void Bloom::configure(const Config& config) { + std::string blurName{ "BloomBlurN" }; + + for (auto i = 0; i < BLOOM_BLUR_LEVEL_COUNT; i++) { + blurName.back() = '0' + i; + auto blurConfig = config.getConfig(blurName); + blurConfig->setProperty("filterScale", 1.0f); + } +} + +void Bloom::build(JobModel& task, const render::Varying& inputs, render::Varying& outputs) { + // Start by computing threshold of color buffer input at quarter resolution + const auto bloomInputBuffer = task.addJob("BloomThreshold", inputs, 4U); + + // Multi-scale blur, each new blur is half resolution of the previous pass + const auto blurFB0 = task.addJob("BloomBlur0", bloomInputBuffer, true); + const auto blurFB1 = task.addJob("BloomBlur1", blurFB0, true, 2U); + const auto blurFB2 = task.addJob("BloomBlur2", blurFB1, true, 2U); + + const auto& input = inputs.get(); + const auto& frameBuffer = input[1]; + + // Mix all blur levels at quarter resolution + const auto applyInput = BloomApply::Inputs(bloomInputBuffer, blurFB0, blurFB1, blurFB2).asVarying(); + task.addJob("BloomApply", applyInput); + // And them blend result in additive manner on top of final color buffer + const auto drawInput = BloomDraw::Inputs(frameBuffer, bloomInputBuffer).asVarying(); + task.addJob("BloomDraw", drawInput); + + const auto debugInput = DebugBloom::Inputs(frameBuffer, blurFB0, blurFB1, blurFB2, bloomInputBuffer).asVarying(); + task.addJob("DebugBloom", debugInput); +} diff --git a/libraries/render-utils/src/BloomEffect.h b/libraries/render-utils/src/BloomEffect.h new file mode 100644 index 0000000000..5352c65e4d --- /dev/null +++ b/libraries/render-utils/src/BloomEffect.h @@ -0,0 +1,166 @@ +// +// BloomEffect.h +// render-utils/src/ +// +// Created by Olivier Prat on 09/25/17. +// Copyright 2017 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +#ifndef hifi_render_utils_BloomEffect_h +#define hifi_render_utils_BloomEffect_h + +#include + +#include "DeferredFrameTransform.h" + +class BloomConfig : public render::Task::Config { + Q_OBJECT + Q_PROPERTY(float intensity READ getIntensity WRITE setIntensity NOTIFY dirty) + Q_PROPERTY(float size MEMBER size WRITE setSize NOTIFY dirty) + +public: + + BloomConfig() : render::Task::Config(false) {} + + float size{ 0.8f }; + + void setIntensity(float value); + float getIntensity() const; + void setSize(float value); + +signals: + void dirty(); +}; + +class BloomThresholdConfig : public render::Job::Config { + Q_OBJECT + Q_PROPERTY(float threshold MEMBER threshold NOTIFY dirty) + +public: + + float threshold{ 1.25f }; + +signals: + void dirty(); +}; + +class BloomThreshold { +public: + using Inputs = render::VaryingSet2; + using Outputs = gpu::FramebufferPointer; + using Config = BloomThresholdConfig; + using JobModel = render::Job::ModelIO; + + BloomThreshold(unsigned int downsamplingFactor); + + void configure(const Config& config); + void run(const render::RenderContextPointer& renderContext, const Inputs& inputs, Outputs& outputs); + +private: + + gpu::FramebufferPointer _outputBuffer; + gpu::PipelinePointer _pipeline; + float _threshold; + unsigned int _downsamplingFactor; +}; + + +class BloomApplyConfig : public render::Job::Config { + Q_OBJECT + Q_PROPERTY(float intensity MEMBER intensity NOTIFY dirty) + +public: + + float intensity{ 0.8f }; + +signals: + void dirty(); +}; + +class BloomApply { +public: + using Inputs = render::VaryingSet4; + using Config = BloomApplyConfig; + using JobModel = render::Job::ModelI; + + BloomApply(); + + void configure(const Config& config); + void run(const render::RenderContextPointer& renderContext, const Inputs& inputs); + +private: + + gpu::PipelinePointer _pipeline; + float _intensity{ 1.0f }; +}; + +class BloomDraw { +public: + using Inputs = render::VaryingSet2; + using JobModel = render::Job::ModelI; + + BloomDraw() {} + + void run(const render::RenderContextPointer& renderContext, const Inputs& inputs); + +private: + + gpu::PipelinePointer _pipeline; +}; + +class DebugBloomConfig : public render::Job::Config { + Q_OBJECT + Q_PROPERTY(int mode MEMBER mode NOTIFY dirty) + +public: + + enum Mode { + MODE_LEVEL0 = 0, + MODE_LEVEL1, + MODE_LEVEL2, + MODE_ALL_LEVELS, + + MODE_COUNT + }; + + DebugBloomConfig() : render::Job::Config(false) {} + + int mode{ MODE_ALL_LEVELS }; + +signals: + void dirty(); +}; + +class DebugBloom { +public: + using Inputs = render::VaryingSet5; + using Config = DebugBloomConfig; + using JobModel = render::Job::ModelI; + + DebugBloom(); + + void configure(const Config& config); + void run(const render::RenderContextPointer& renderContext, const Inputs& inputs); + +private: + gpu::PipelinePointer _pipeline; + DebugBloomConfig::Mode _mode; +}; + +class Bloom { +public: + using Inputs = render::VaryingSet2; + using Config = BloomConfig; + using JobModel = render::Task::ModelI; + + Bloom(); + + void configure(const Config& config); + void build(JobModel& task, const render::Varying& inputs, render::Varying& outputs); + +}; + +#endif // hifi_render_utils_BloomEffect_h diff --git a/libraries/render-utils/src/BloomThreshold.slf b/libraries/render-utils/src/BloomThreshold.slf new file mode 100644 index 0000000000..e4b96618df --- /dev/null +++ b/libraries/render-utils/src/BloomThreshold.slf @@ -0,0 +1,45 @@ +<@include gpu/Config.slh@> +<$VERSION_HEADER$> +// BloomThreshold.slf +// Perform a soft threshold on an input texture and downsample to half size in one go. +// +// Created by Olivier Prat on 09/26/2017 +// Copyright 2017 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +uniform sampler2D colorMap; +uniform float threshold; + +in vec2 varTexCoord0; +out vec4 outFragColor; + +#define DOWNSAMPLING_FACTOR 4 +#define SAMPLE_COUNT (DOWNSAMPLING_FACTOR/2) + +void main(void) { + vec2 deltaX = dFdx(varTexCoord0) / SAMPLE_COUNT; + vec2 deltaY = dFdy(varTexCoord0) / SAMPLE_COUNT; + vec2 startUv = varTexCoord0; + vec4 maskedColor = vec4(0,0,0,0); + + for (int y=0 ; ysetDepthStencilBuffer(_primaryDepthTexture, depthFormat); - auto smoothSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR); + auto smoothSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT); - _lightingTexture = gpu::Texture::createRenderBuffer(gpu::Element(gpu::SCALAR, gpu::FLOAT, gpu::R11G11B10), width, height, gpu::Texture::SINGLE_MIP, defaultSampler); + _lightingTexture = gpu::Texture::createRenderBuffer(gpu::Element(gpu::SCALAR, gpu::FLOAT, gpu::R11G11B10), width, height, gpu::Texture::SINGLE_MIP, smoothSampler); _lightingFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("lighting")); _lightingFramebuffer->setRenderBuffer(0, _lightingTexture); _lightingFramebuffer->setDepthStencilBuffer(_primaryDepthTexture, depthFormat); diff --git a/libraries/render-utils/src/RenderDeferredTask.cpp b/libraries/render-utils/src/RenderDeferredTask.cpp index 4261b14a9b..a395136978 100644 --- a/libraries/render-utils/src/RenderDeferredTask.cpp +++ b/libraries/render-utils/src/RenderDeferredTask.cpp @@ -42,6 +42,7 @@ #include "ToneMappingEffect.h" #include "SubsurfaceScattering.h" #include "DrawHaze.h" +#include "BloomEffect.h" #include "HighlightEffect.h" #include @@ -166,7 +167,7 @@ void RenderDeferredTask::build(JobModel& task, const render::Varying& input, ren const auto transparentsInputs = DrawDeferred::Inputs(transparents, lightingModel).asVarying(); task.addJob("DrawTransparentDeferred", transparentsInputs, shapePlumber); - // LIght Cluster Grid Debuging job + // Light Cluster Grid Debuging job { const auto debugLightClustersInputs = DebugLightClusters::Inputs(deferredFrameTransform, deferredFramebuffer, lightingModel, linearDepthTarget, lightClusters).asVarying(); task.addJob("DebugLightClusters", debugLightClustersInputs); @@ -177,6 +178,10 @@ void RenderDeferredTask::build(JobModel& task, const render::Varying& input, ren const auto toneAndPostRangeTimer = task.addJob("BeginToneAndPostRangeTimer", "PostToneOverlaysAntialiasing"); + // Add bloom + const auto bloomInputs = Bloom::Inputs(deferredFrameTransform, lightingFramebuffer).asVarying(); + task.addJob("Bloom", bloomInputs); + // Lighting Buffer ready for tone mapping const auto toneMappingInputs = ToneMappingDeferred::Inputs(lightingFramebuffer, primaryFramebuffer).asVarying(); task.addJob("ToneMapping", toneMappingInputs); diff --git a/libraries/render/src/render/BlurTask.cpp b/libraries/render/src/render/BlurTask.cpp index 73a8e0a0dd..2be6f8fad2 100644 --- a/libraries/render/src/render/BlurTask.cpp +++ b/libraries/render/src/render/BlurTask.cpp @@ -29,11 +29,10 @@ enum BlurShaderMapSlots { BlurTask_DepthSlot, }; -const float BLUR_NUM_SAMPLES = 7.0f; - BlurParams::BlurParams() { Params params; _parametersBuffer = gpu::BufferView(std::make_shared(sizeof(Params), (const gpu::Byte*) ¶ms)); + setFilterGaussianTaps(3); } void BlurParams::setWidthHeight(int width, int height, bool isStereo) { @@ -49,10 +48,10 @@ void BlurParams::setWidthHeight(int width, int height, bool isStereo) { } } -void BlurParams::setTexcoordTransform(const glm::vec4 texcoordTransformViewport) { - auto texcoordTransform = _parametersBuffer.get().texcoordTransform; - if (texcoordTransformViewport != texcoordTransform) { - _parametersBuffer.edit().texcoordTransform = texcoordTransform; +void BlurParams::setTexcoordTransform(glm::vec4 texcoordTransformViewport) { + auto& params = _parametersBuffer.get(); + if (texcoordTransformViewport != params.texcoordTransform) { + _parametersBuffer.edit().texcoordTransform = texcoordTransformViewport; } } @@ -60,7 +59,58 @@ void BlurParams::setFilterRadiusScale(float scale) { auto filterInfo = _parametersBuffer.get().filterInfo; if (scale != filterInfo.x) { _parametersBuffer.edit().filterInfo.x = scale; - _parametersBuffer.edit().filterInfo.y = scale / BLUR_NUM_SAMPLES; + } +} + +void BlurParams::setFilterNumTaps(int count) { + assert(count <= BLUR_MAX_NUM_TAPS); + auto filterInfo = _parametersBuffer.get().filterInfo; + if (count != (int)filterInfo.y) { + _parametersBuffer.edit().filterInfo.y = count; + } +} + +void BlurParams::setFilterTap(int index, float offset, float value) { + auto filterTaps = _parametersBuffer.edit().filterTaps; + assert(index < BLUR_MAX_NUM_TAPS); + filterTaps[index].x = offset; + filterTaps[index].y = value; +} + +void BlurParams::setFilterGaussianTaps(int numHalfTaps, float sigma) { + auto& params = _parametersBuffer.edit(); + const int numTaps = 2 * numHalfTaps + 1; + assert(numTaps <= BLUR_MAX_NUM_TAPS); + assert(sigma > 0.0f); + const float inverseTwoSigmaSquared = float(0.5 / double(sigma*sigma)); + float totalWeight = 1.0f; + float weight; + float offset; + int i; + + params.filterInfo.y = numTaps; + params.filterTaps[0].x = 0.0f; + params.filterTaps[0].y = 1.0f; + + for (i = 0; i < numHalfTaps; i++) { + offset = i + 1; + weight = (float)exp(-offset*offset * inverseTwoSigmaSquared); + params.filterTaps[i + 1].x = offset; + params.filterTaps[i + 1].y = weight; + params.filterTaps[i + 1 + numHalfTaps].x = -offset; + params.filterTaps[i + 1 + numHalfTaps].y = weight; + totalWeight += 2 * weight; + } + + // Tap weights will be normalized in shader because side cases on edges of screen + // won't have the same number of taps as in the center. +} + +void BlurParams::setOutputAlpha(float value) { + value = glm::clamp(value, 0.0f, 1.0f); + auto filterInfo = _parametersBuffer.get().filterInfo; + if (value != filterInfo.z) { + _parametersBuffer.edit().filterInfo.z = value; } } @@ -86,17 +136,23 @@ void BlurParams::setLinearDepthPosFar(float farPosDepth) { } -BlurInOutResource::BlurInOutResource(bool generateOutputFramebuffer) : -_generateOutputFramebuffer(generateOutputFramebuffer) -{ - +BlurInOutResource::BlurInOutResource(bool generateOutputFramebuffer, unsigned int downsampleFactor) : + _downsampleFactor(downsampleFactor), + _generateOutputFramebuffer(generateOutputFramebuffer) { + assert(downsampleFactor > 0); } bool BlurInOutResource::updateResources(const gpu::FramebufferPointer& sourceFramebuffer, Resources& blurringResources) { if (!sourceFramebuffer) { return false; } - if (_blurredFramebuffer && _blurredFramebuffer->getSize() != sourceFramebuffer->getSize()) { + + auto blurBufferSize = sourceFramebuffer->getSize(); + + blurBufferSize.x /= _downsampleFactor; + blurBufferSize.y /= _downsampleFactor; + + if (_blurredFramebuffer && _blurredFramebuffer->getSize() != blurBufferSize) { _blurredFramebuffer.reset(); } @@ -108,7 +164,7 @@ bool BlurInOutResource::updateResources(const gpu::FramebufferPointer& sourceFra // _blurredFramebuffer->setDepthStencilBuffer(sourceFramebuffer->getDepthStencilBuffer(), sourceFramebuffer->getDepthStencilBufferFormat()); //} auto blurringSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT); - auto blurringTarget = gpu::Texture::create2D(sourceFramebuffer->getRenderBuffer(0)->getTexelFormat(), sourceFramebuffer->getWidth(), sourceFramebuffer->getHeight(), gpu::Texture::SINGLE_MIP, blurringSampler); + auto blurringTarget = gpu::Texture::createRenderBuffer(sourceFramebuffer->getRenderBuffer(0)->getTexelFormat(), blurBufferSize.x, blurBufferSize.y, gpu::Texture::SINGLE_MIP, blurringSampler); _blurredFramebuffer->setRenderBuffer(0, blurringTarget); } @@ -117,7 +173,7 @@ bool BlurInOutResource::updateResources(const gpu::FramebufferPointer& sourceFra blurringResources.blurringTexture = _blurredFramebuffer->getRenderBuffer(0); if (_generateOutputFramebuffer) { - if (_outputFramebuffer && _outputFramebuffer->getSize() != sourceFramebuffer->getSize()) { + if (_outputFramebuffer && _outputFramebuffer->getSize() != blurBufferSize) { _outputFramebuffer.reset(); } @@ -131,7 +187,7 @@ bool BlurInOutResource::updateResources(const gpu::FramebufferPointer& sourceFra _outputFramebuffer->setDepthStencilBuffer(sourceFramebuffer->getDepthStencilBuffer(), sourceFramebuffer->getDepthStencilBufferFormat()); }*/ auto blurringSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT); - auto blurringTarget = gpu::Texture::create2D(sourceFramebuffer->getRenderBuffer(0)->getTexelFormat(), sourceFramebuffer->getWidth(), sourceFramebuffer->getHeight(), gpu::Texture::SINGLE_MIP, blurringSampler); + auto blurringTarget = gpu::Texture::createRenderBuffer(sourceFramebuffer->getRenderBuffer(0)->getTexelFormat(), blurBufferSize.x, blurBufferSize.y, gpu::Texture::SINGLE_MIP, blurringSampler); _outputFramebuffer->setRenderBuffer(0, blurringTarget); } @@ -145,8 +201,8 @@ bool BlurInOutResource::updateResources(const gpu::FramebufferPointer& sourceFra return true; } -BlurGaussian::BlurGaussian(bool generateOutputFramebuffer) : - _inOutResources(generateOutputFramebuffer) +BlurGaussian::BlurGaussian(bool generateOutputFramebuffer, unsigned int downsampleFactor) : + _inOutResources(generateOutputFramebuffer, downsampleFactor) { _parameters = std::make_shared(); } @@ -196,7 +252,16 @@ gpu::PipelinePointer BlurGaussian::getBlurHPipeline() { } void BlurGaussian::configure(const Config& config) { + auto state = getBlurHPipeline()->getState(); + _parameters->setFilterRadiusScale(config.filterScale); + _parameters->setOutputAlpha(config.mix); + if (config.mix < 1.0f) { + state->setBlendFunction(config.mix < 1.0f, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA, + gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA); + } else { + state->setBlendFunction(false); + } } @@ -206,7 +271,6 @@ void BlurGaussian::run(const RenderContextPointer& renderContext, const gpu::Fra RenderArgs* args = renderContext->args; - BlurInOutResource::Resources blurringResources; if (!_inOutResources.updateResources(sourceFramebuffer, blurringResources)) { // early exit if no valid blurring resources @@ -216,14 +280,15 @@ void BlurGaussian::run(const RenderContextPointer& renderContext, const gpu::Fra auto blurVPipeline = getBlurVPipeline(); auto blurHPipeline = getBlurHPipeline(); + glm::ivec4 viewport { 0, 0, blurredFramebuffer->getWidth(), blurredFramebuffer->getHeight() }; - _parameters->setWidthHeight(args->_viewport.z, args->_viewport.w, args->isStereo()); - glm::ivec2 textureSize(blurringResources.sourceTexture->getDimensions()); - _parameters->setTexcoordTransform(gpu::Framebuffer::evalSubregionTexcoordTransformCoefficients(textureSize, args->_viewport)); + glm::ivec2 textureSize = blurredFramebuffer->getSize(); + _parameters->setWidthHeight(blurredFramebuffer->getWidth(), blurredFramebuffer->getHeight(), args->isStereo()); + _parameters->setTexcoordTransform(gpu::Framebuffer::evalSubregionTexcoordTransformCoefficients(textureSize, viewport)); gpu::doInBatch(args->_context, [=](gpu::Batch& batch) { batch.enableStereo(false); - batch.setViewportTransform(args->_viewport); + batch.setViewportTransform(viewport); batch.setUniformBuffer(BlurTask_ParamsSlot, _parameters->_parametersBuffer); @@ -251,7 +316,7 @@ void BlurGaussian::run(const RenderContextPointer& renderContext, const gpu::Fra BlurGaussianDepthAware::BlurGaussianDepthAware(bool generateOutputFramebuffer, const BlurParamsPointer& params) : - _inOutResources(generateOutputFramebuffer), + _inOutResources(generateOutputFramebuffer, 1U), _parameters((params ? params : std::make_shared())) { } diff --git a/libraries/render/src/render/BlurTask.h b/libraries/render/src/render/BlurTask.h index f023aabfe7..e8d268dc63 100644 --- a/libraries/render/src/render/BlurTask.h +++ b/libraries/render/src/render/BlurTask.h @@ -14,6 +14,8 @@ #include "Engine.h" +#include "BlurTask_shared.slh" + namespace render { @@ -25,6 +27,11 @@ public: void setTexcoordTransform(const glm::vec4 texcoordTransformViewport); void setFilterRadiusScale(float scale); + void setFilterNumTaps(int count); + // Tap 0 is considered the center of the kernel + void setFilterTap(int index, float offset, float value); + void setFilterGaussianTaps(int numHalfTaps, float sigma = 1.47f); + void setOutputAlpha(float value); void setDepthPerspective(float oneOverTan2FOV); void setDepthThreshold(float threshold); @@ -40,7 +47,7 @@ public: // Viewport to Texcoord info, if the region of the blur (viewport) is smaller than the full frame glm::vec4 texcoordTransform{ 0.0f, 0.0f, 1.0f, 1.0f }; - // Filter info (radius scale + // Filter info (radius scale, number of taps, output alpha) glm::vec4 filterInfo{ 1.0f, 0.0f, 0.0f, 0.0f }; // Depth info (radius scale @@ -52,6 +59,9 @@ public: // LinearDepth info is { f } glm::vec4 linearDepthInfo{ 0.0f }; + // Taps (offset, weight) + glm::vec2 filterTaps[BLUR_MAX_NUM_TAPS]; + Params() {} }; gpu::BufferView _parametersBuffer; @@ -62,7 +72,7 @@ using BlurParamsPointer = std::shared_ptr; class BlurInOutResource { public: - BlurInOutResource(bool generateOutputFramebuffer = false); + BlurInOutResource(bool generateOutputFramebuffer, unsigned int downsampleFactor); struct Resources { gpu::TexturePointer sourceTexture; @@ -75,8 +85,9 @@ public: gpu::FramebufferPointer _blurredFramebuffer; - // the output framebuffer defined if the job needs to output the result in a new framebuffer and not in place in th einput buffer + // the output framebuffer defined if the job needs to output the result in a new framebuffer and not in place in the input buffer gpu::FramebufferPointer _outputFramebuffer; + unsigned int _downsampleFactor{ 1U }; bool _generateOutputFramebuffer{ false }; }; @@ -84,12 +95,15 @@ public: class BlurGaussianConfig : public Job::Config { Q_OBJECT Q_PROPERTY(bool enabled WRITE setEnabled READ isEnabled NOTIFY dirty) // expose enabled flag - Q_PROPERTY(float filterScale MEMBER filterScale NOTIFY dirty) // expose enabled flag + Q_PROPERTY(float filterScale MEMBER filterScale NOTIFY dirty) + Q_PROPERTY(float mix MEMBER mix NOTIFY dirty) public: BlurGaussianConfig() : Job::Config(true) {} float filterScale{ 0.2f }; + float mix{ 1.0f }; + signals : void dirty(); @@ -102,11 +116,13 @@ public: using Config = BlurGaussianConfig; using JobModel = Job::ModelIO; - BlurGaussian(bool generateOutputFramebuffer = false); + BlurGaussian(bool generateOutputFramebuffer = false, unsigned int downsampleFactor = 1U); void configure(const Config& config); void run(const RenderContextPointer& renderContext, const gpu::FramebufferPointer& sourceFramebuffer, gpu::FramebufferPointer& blurredFramebuffer); + BlurParamsPointer getParameters() const { return _parameters; } + protected: BlurParamsPointer _parameters; diff --git a/libraries/render/src/render/BlurTask.slh b/libraries/render/src/render/BlurTask.slh index de2614eb51..37f29496bd 100644 --- a/libraries/render/src/render/BlurTask.slh +++ b/libraries/render/src/render/BlurTask.slh @@ -9,17 +9,7 @@ <@func declareBlurUniforms()@> -#define NUM_TAPS 7 -#define NUM_TAPS_OFFSET 3.0f - -float uniformFilterWidth = 0.05f; - -const float gaussianDistributionCurve[NUM_TAPS] = float[]( - 0.383f, 0.006f, 0.061f, 0.242f, 0.242f, 0.061f, 0.006f -); -const float gaussianDistributionOffset[NUM_TAPS] = float[]( - 0.0f, -3.0f, -2.0f, -1.0f, 1.0f, 2.0f, 3.0f -); +<@include BlurTask_shared.slh@> struct BlurParameters { vec4 resolutionInfo; @@ -28,6 +18,7 @@ struct BlurParameters { vec4 depthInfo; vec4 stereoInfo; vec4 linearDepthInfo; + vec2 taps[BLUR_MAX_NUM_TAPS]; }; uniform blurParamsBuffer { @@ -46,6 +37,25 @@ float getFilterScale() { return parameters.filterInfo.x; } +int getFilterNumTaps() { + return int(parameters.filterInfo.y); +} + +float getOutputAlpha() { + return parameters.filterInfo.z; +} + +vec2 getFilterTap(int index) { + return parameters.taps[index]; +} + +float getFilterTapOffset(vec2 tap) { + return tap.x; +} + +float getFilterTapWeight(vec2 tap) { + return tap.y; +} float getDepthThreshold() { return parameters.depthInfo.x; @@ -70,19 +80,29 @@ uniform sampler2D sourceMap; vec4 pixelShaderGaussian(vec2 texcoord, vec2 direction, vec2 pixelStep) { texcoord = evalTexcoordTransformed(texcoord); - vec4 sampleCenter = texture(sourceMap, texcoord); vec2 finalStep = getFilterScale() * direction * pixelStep; vec4 srcBlurred = vec4(0.0); + float totalWeight = 0.f; + int numTaps = getFilterNumTaps(); - for(int i = 0; i < NUM_TAPS; i++) { - // Fetch color and depth for current sample. - vec2 sampleCoord = texcoord + (gaussianDistributionOffset[i] * finalStep); - vec4 srcSample = texture(sourceMap, sampleCoord); - // Accumulate. - srcBlurred += gaussianDistributionCurve[i] * srcSample; + for(int i = 0; i < numTaps; i++) { + vec2 tapInfo = getFilterTap(i); + // Fetch color for current sample. + vec2 sampleCoord = texcoord + (getFilterTapOffset(tapInfo) * finalStep); + if (all(greaterThanEqual(sampleCoord, vec2(0,0))) && all(lessThanEqual(sampleCoord, vec2(1.0,1.0)))) { + vec4 srcSample = texture(sourceMap, sampleCoord); + float weight = getFilterTapWeight(tapInfo); + // Accumulate. + srcBlurred += srcSample * weight; + totalWeight += weight; + } } + if (totalWeight>0.0) { + srcBlurred /= totalWeight; + } + srcBlurred.a = getOutputAlpha(); return srcBlurred; } @@ -95,15 +115,6 @@ vec4 pixelShaderGaussian(vec2 texcoord, vec2 direction, vec2 pixelStep) { uniform sampler2D sourceMap; uniform sampler2D depthMap; -#define NUM_HALF_TAPS 4 - -const float gaussianDistributionCurveHalf[NUM_HALF_TAPS] = float[]( - 0.383f, 0.242f, 0.061f, 0.006f -); -const float gaussianDistributionOffsetHalf[NUM_HALF_TAPS] = float[]( - 0.0f, 1.0f, 2.0f, 3.0f -); - vec4 pixelShaderGaussianDepthAware(vec2 texcoord, vec2 direction, vec2 pixelStep) { texcoord = evalTexcoordTransformed(texcoord); float sampleDepth = texture(depthMap, texcoord).x; @@ -122,45 +133,36 @@ vec4 pixelShaderGaussianDepthAware(vec2 texcoord, vec2 direction, vec2 pixelStep float scale = distanceToProjectionWindow / sampleDepth; vec2 finalStep = filterScale * scale * direction * pixelStep; + int numTaps = getFilterNumTaps(); // Accumulate the center sample - vec4 srcBlurred = gaussianDistributionCurve[0] * sampleCenter; + vec2 tapInfo = getFilterTap(0); + float totalWeight = getFilterTapWeight(tapInfo); + vec4 srcBlurred = sampleCenter * totalWeight; + + for(int i = 1; i < numTaps; i++) { + tapInfo = getFilterTap(i); - for(int i = 1; i < NUM_TAPS; i++) { // Fetch color and depth for current sample. - vec2 sampleCoord = texcoord + (gaussianDistributionOffset[i] * finalStep); - float srcDepth = texture(depthMap, sampleCoord).x; - vec4 srcSample = texture(sourceMap, sampleCoord); + vec2 sampleCoord = texcoord + (getFilterTapOffset(tapInfo) * finalStep); + if (all(greaterThanEqual(sampleCoord, vec2(0,0))) && all(lessThanEqual(sampleCoord, vec2(1.0,1.0)))) { + float srcDepth = texture(depthMap, sampleCoord).x; + vec4 srcSample = texture(sourceMap, sampleCoord); + float weight = getFilterTapWeight(tapInfo); - // If the difference in depth is huge, we lerp color back. - float s = clamp(depthThreshold * distanceToProjectionWindow * filterScale * abs(srcDepth - sampleDepth), 0.0, 1.0); - srcSample = mix(srcSample, sampleCenter, s); + // If the difference in depth is huge, we lerp color back. + float s = clamp(depthThreshold * distanceToProjectionWindow * filterScale * abs(srcDepth - sampleDepth), 0.0, 1.0); + srcSample = mix(srcSample, sampleCenter, s); - // Accumulate. - srcBlurred += gaussianDistributionCurve[i] * srcSample; + // Accumulate. + srcBlurred += srcSample * weight; + totalWeight += weight; + } } - - /* - for(int i = 1; i < NUM_HALF_TAPS; i++) { - // Fetch color and depth for current sample. - vec2 texcoordOffset = (gaussianDistributionOffsetHalf[i] * finalStep); - - float srcDepthN = texture(depthMap, texcoord - texcoordOffset).x; - float srcDepthP = texture(depthMap, texcoord + texcoordOffset).x; - vec4 srcSampleN = texture(sourceMap, texcoord - texcoordOffset); - vec4 srcSampleP = texture(sourceMap, texcoord + texcoordOffset); - - // If the difference in depth is huge, we lerp color back. - float sN = clamp(depthThreshold * distanceToProjectionWindow * filterScale * abs(srcDepthN - sampleDepth), 0.0, 1.0); - float sP = clamp(depthThreshold * distanceToProjectionWindow * filterScale * abs(srcDepthP - sampleDepth), 0.0, 1.0); - - srcSampleN = mix(srcSampleN, sampleCenter, sN); - srcSampleP = mix(srcSampleP, sampleCenter, sP); - - // Accumulate. - srcBlurred += gaussianDistributionCurveHalf[i] * (srcSampleP + srcSampleN); - }*/ + if (totalWeight>0.0) { + srcBlurred /= totalWeight; + } return srcBlurred; } diff --git a/libraries/render/src/render/BlurTask_shared.slh b/libraries/render/src/render/BlurTask_shared.slh new file mode 100644 index 0000000000..beca32c1be --- /dev/null +++ b/libraries/render/src/render/BlurTask_shared.slh @@ -0,0 +1,10 @@ +// Generated on <$_SCRIBE_DATE$> +// +// Created by Olivier Prat on 09/25/17. +// Copyright 2017 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +#define BLUR_MAX_NUM_TAPS 33 diff --git a/libraries/render/src/render/ResampleTask.cpp b/libraries/render/src/render/ResampleTask.cpp new file mode 100644 index 0000000000..65c0ff45b9 --- /dev/null +++ b/libraries/render/src/render/ResampleTask.cpp @@ -0,0 +1,83 @@ +// +// ResampleTask.cpp +// render/src/render +// +// Various to upsample or downsample textures into framebuffers. +// +// Created by Olivier Prat on 10/09/17. +// Copyright 2017 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// +#include "ResampleTask.h" + +#include "gpu/Context.h" +#include "gpu/StandardShaderLib.h" + +using namespace render; + +gpu::PipelinePointer HalfDownsample::_pipeline; + +HalfDownsample::HalfDownsample() { + +} + +void HalfDownsample::configure(const Config& config) { + +} + +gpu::FramebufferPointer HalfDownsample::getResampledFrameBuffer(const gpu::FramebufferPointer& sourceFramebuffer) { + auto resampledFramebufferSize = sourceFramebuffer->getSize(); + + resampledFramebufferSize.x /= 2U; + resampledFramebufferSize.y /= 2U; + + if (!_destinationFrameBuffer || resampledFramebufferSize != _destinationFrameBuffer->getSize()) { + _destinationFrameBuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("HalfOutput")); + + auto sampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT); + auto target = gpu::Texture::createRenderBuffer(sourceFramebuffer->getRenderBuffer(0)->getTexelFormat(), resampledFramebufferSize.x, resampledFramebufferSize.y, gpu::Texture::SINGLE_MIP, sampler); + _destinationFrameBuffer->setRenderBuffer(0, target); + } + return _destinationFrameBuffer; +} + +void HalfDownsample::run(const RenderContextPointer& renderContext, const gpu::FramebufferPointer& sourceFramebuffer, gpu::FramebufferPointer& resampledFrameBuffer) { + assert(renderContext->args); + assert(renderContext->args->hasViewFrustum()); + RenderArgs* args = renderContext->args; + + resampledFrameBuffer = getResampledFrameBuffer(sourceFramebuffer); + + if (!_pipeline) { + auto vs = gpu::StandardShaderLib::getDrawTransformUnitQuadVS(); + auto ps = gpu::StandardShaderLib::getDrawTextureOpaquePS(); + gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps); + + gpu::Shader::BindingSet slotBindings; + gpu::Shader::makeProgram(*program, slotBindings); + + gpu::StatePointer state = gpu::StatePointer(new gpu::State()); + state->setDepthTest(gpu::State::DepthTest(false, false)); + _pipeline = gpu::Pipeline::create(program, state); + } + + const auto bufferSize = resampledFrameBuffer->getSize(); + glm::ivec4 viewport{ 0, 0, bufferSize.x, bufferSize.y }; + + gpu::doInBatch(args->_context, [&](gpu::Batch& batch) { + batch.enableStereo(false); + + batch.setFramebuffer(resampledFrameBuffer); + + batch.setViewportTransform(viewport); + batch.setProjectionTransform(glm::mat4()); + batch.resetViewTransform(); + batch.setPipeline(_pipeline); + + batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(bufferSize, viewport)); + batch.setResourceTexture(0, sourceFramebuffer->getRenderBuffer(0)); + batch.draw(gpu::TRIANGLE_STRIP, 4); + }); +} diff --git a/libraries/render/src/render/ResampleTask.h b/libraries/render/src/render/ResampleTask.h new file mode 100644 index 0000000000..da2b7b3537 --- /dev/null +++ b/libraries/render/src/render/ResampleTask.h @@ -0,0 +1,41 @@ +// +// ResampleTask.h +// render/src/render +// +// Various to upsample or downsample textures into framebuffers. +// +// Created by Olivier Prat on 10/09/17. +// Copyright 2017 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +#ifndef hifi_render_ResampleTask_h +#define hifi_render_ResampleTask_h + +#include "Engine.h" + +namespace render { + + class HalfDownsample { + public: + using Config = JobConfig; + using JobModel = Job::ModelIO; + + HalfDownsample(); + + void configure(const Config& config); + void run(const RenderContextPointer& renderContext, const gpu::FramebufferPointer& sourceFramebuffer, gpu::FramebufferPointer& resampledFrameBuffer); + + protected: + + static gpu::PipelinePointer _pipeline; + + gpu::FramebufferPointer _destinationFrameBuffer; + + gpu::FramebufferPointer getResampledFrameBuffer(const gpu::FramebufferPointer& sourceFramebuffer); + }; +} + +#endif // hifi_render_ResampleTask_h diff --git a/libraries/render/src/task/Task.h b/libraries/render/src/task/Task.h index a8137fd239..63bda7bafa 100644 --- a/libraries/render/src/task/Task.h +++ b/libraries/render/src/task/Task.h @@ -171,6 +171,8 @@ public: _concept->setCPURunTime((double)(usecTimestampNow() - start) / 1000.0); } + const std::string& getName() const { return _name; } + protected: ConceptPointer _concept; std::string _name = ""; @@ -206,6 +208,24 @@ public: const Varying getInput() const override { return _input; } const Varying getOutput() const override { return _output; } + typename Jobs::iterator editJob(std::string name) { + typename Jobs::iterator jobIt; + for (jobIt = _jobs.begin(); jobIt != _jobs.end(); ++jobIt) { + if (jobIt->getName() == name) { + return jobIt; + } + } + return jobIt; + } + typename Jobs::const_iterator getJob(std::string name) const { + typename Jobs::const_iterator jobIt; + for (jobIt = _jobs.begin(); jobIt != _jobs.end(); ++jobIt) { + if (jobIt->getName() == name) { + return jobIt; + } + } + return jobIt; + } TaskConcept(const Varying& input, QConfigPointer config) : Concept(config), _input(input) {} diff --git a/libraries/shared/src/SharedUtil.cpp b/libraries/shared/src/SharedUtil.cpp index 99bdfc4d90..38a7a3165f 100644 --- a/libraries/shared/src/SharedUtil.cpp +++ b/libraries/shared/src/SharedUtil.cpp @@ -1081,7 +1081,7 @@ void setMaxCores(uint8_t maxCores) { void quitWithParentProcess() { if (qApp) { qDebug() << "Parent process died, quitting"; - qApp->quit(); + exit(0); } } @@ -1113,3 +1113,57 @@ void watchParentProcess(int parentPID) { timer->start(); } #endif + + +#ifdef Q_OS_WIN +QString getLastErrorAsString() { + DWORD errorMessageID = ::GetLastError(); + if (errorMessageID == 0) { + return QString(); + } + + LPSTR messageBuffer = nullptr; + size_t size = FormatMessageA(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, + nullptr, errorMessageID, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), (LPSTR)&messageBuffer, 0, nullptr); + + auto message = QString::fromLocal8Bit(messageBuffer, (int)size); + + //Free the buffer. + LocalFree(messageBuffer); + + return message; +} + +// All processes in the group will shut down with the process creating the group +void* createProcessGroup() { + HANDLE jobObject = CreateJobObject(nullptr, nullptr); + if (jobObject == nullptr) { + qWarning() << "Could NOT create job object:" << getLastErrorAsString(); + return nullptr; + } + + JOBOBJECT_EXTENDED_LIMIT_INFORMATION JELI; + if (!QueryInformationJobObject(jobObject, JobObjectExtendedLimitInformation, &JELI, sizeof(JELI), nullptr)) { + qWarning() << "Could NOT query job object information" << getLastErrorAsString(); + return nullptr; + } + JELI.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE; + if (!SetInformationJobObject(jobObject, JobObjectExtendedLimitInformation, &JELI, sizeof(JELI))) { + qWarning() << "Could NOT set job object information" << getLastErrorAsString(); + return nullptr; + } + + return jobObject; +} + +void addProcessToGroup(void* processGroup, qint64 processId) { + HANDLE hProcess = OpenProcess(PROCESS_ALL_ACCESS, FALSE, processId); + if (hProcess == nullptr) { + qCritical() << "Could NOT open process" << getLastErrorAsString(); + } + if (!AssignProcessToJobObject(processGroup, hProcess)) { + qCritical() << "Could NOT assign process to job object" << getLastErrorAsString(); + } +} + +#endif diff --git a/libraries/shared/src/SharedUtil.h b/libraries/shared/src/SharedUtil.h index 681418a263..25051d45ac 100644 --- a/libraries/shared/src/SharedUtil.h +++ b/libraries/shared/src/SharedUtil.h @@ -238,4 +238,10 @@ void setMaxCores(uint8_t maxCores); const QString PARENT_PID_OPTION = "parent-pid"; void watchParentProcess(int parentPID); + +#ifdef Q_OS_WIN +void* createProcessGroup(); +void addProcessToGroup(void* processGroup, qint64 processId); +#endif + #endif // hifi_SharedUtil_h diff --git a/scripts/developer/utilities/render/bloom.qml b/scripts/developer/utilities/render/bloom.qml new file mode 100644 index 0000000000..66e92e0eff --- /dev/null +++ b/scripts/developer/utilities/render/bloom.qml @@ -0,0 +1,119 @@ +// +// bloom.qml +// developer/utilities/render +// +// Olivier Prat, created on 09/25/2017. +// Copyright 2017 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html +// +import QtQuick 2.5 +import QtQuick.Controls 1.4 +import "configSlider" + +Item { + id: root + property var config: Render.getConfig("RenderMainView.Bloom") + property var configThreshold: Render.getConfig("RenderMainView.BloomThreshold") + property var configDebug: Render.getConfig("RenderMainView.DebugBloom") + + Column { + spacing: 8 + + CheckBox { + text: "Enable" + checked: root.config["enabled"] + onCheckedChanged: { + root.config["enabled"] = checked; + } + } + GroupBox { + title: "Debug" + Row { + ExclusiveGroup { id: debugGroup } + RadioButton { + text : "Off" + checked : !root.configDebug["enabled"] + onCheckedChanged: { + if (checked) { + root.configDebug["enabled"] = false + } + } + exclusiveGroup : debugGroup + } + RadioButton { + text : "Lvl 0" + checked :root.configDebug["enabled"] && root.configDebug["mode"]==0 + onCheckedChanged: { + if (checked) { + root.configDebug["enabled"] = true + root.configDebug["mode"] = 0 + } + } + exclusiveGroup : debugGroup + } + RadioButton { + text : "Lvl 1" + checked : root.configDebug["enabled"] && root.configDebug["mode"]==1 + onCheckedChanged: { + if (checked) { + root.configDebug["enabled"] = true + root.configDebug["mode"] = 1 + } + } + exclusiveGroup : debugGroup + } + RadioButton { + text : "Lvl 2" + checked : root.configDebug["enabled"] && root.configDebug["mode"]==2 + onCheckedChanged: { + if (checked) { + root.configDebug["enabled"] = true + root.configDebug["mode"] = 2 + } + } + exclusiveGroup : debugGroup + } + RadioButton { + text : "All" + checked : root.configDebug["enabled"] && root.configDebug["mode"]==3 + onCheckedChanged: { + if (checked) { + root.configDebug["enabled"] = true + root.configDebug["mode"] = 3 + } + } + exclusiveGroup : debugGroup + } + } + } + ConfigSlider { + label: "Intensity" + integral: false + config: root.config + property: "intensity" + max: 5.0 + min: 0.0 + width: 280 + } + ConfigSlider { + label: "Size" + integral: false + config: root.config + property: "size" + max: 1.0 + min: 0.0 + width: 280 + } + ConfigSlider { + label: "Threshold" + integral: false + config: root.configThreshold + property: "threshold" + max: 2.0 + min: 0.0 + width: 280 + } + } +} diff --git a/scripts/developer/utilities/render/debugBloom.js b/scripts/developer/utilities/render/debugBloom.js new file mode 100644 index 0000000000..2328d524cf --- /dev/null +++ b/scripts/developer/utilities/render/debugBloom.js @@ -0,0 +1,20 @@ +// +// debugBloom.js +// developer/utilities/render +// +// Olivier Prat, created on 09/25/2017. +// Copyright 2017 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +// Set up the qml ui +var qml = Script.resolvePath('bloom.qml'); +var window = new OverlayWindow({ + title: 'Bloom', + source: qml, + width: 285, + height: 170, +}); +window.closed.connect(function() { Script.stop(); }); \ No newline at end of file diff --git a/scripts/system/controllers/controllerDispatcher.js b/scripts/system/controllers/controllerDispatcher.js index 37cd173cfa..ee3b90d7b5 100644 --- a/scripts/system/controllers/controllerDispatcher.js +++ b/scripts/system/controllers/controllerDispatcher.js @@ -389,7 +389,7 @@ Script.include("/~/system/libraries/controllerDispatcherUtils.js"); Controller.enableMapping(MAPPING_NAME); this.leftControllerRayPick = RayPick.createRayPick({ - joint: "_CONTROLLER_LEFTHAND", + joint: "_CAMERA_RELATIVE_CONTROLLER_LEFTHAND", filter: RayPick.PICK_ENTITIES | RayPick.PICK_OVERLAYS, enabled: true, maxDistance: DEFAULT_SEARCH_SPHERE_DISTANCE, @@ -403,7 +403,7 @@ Script.include("/~/system/libraries/controllerDispatcherUtils.js"); posOffset: getGrabPointSphereOffset(Controller.Standard.LeftHand, true) }); this.rightControllerRayPick = RayPick.createRayPick({ - joint: "_CONTROLLER_RIGHTHAND", + joint: "_CAMERA_RELATIVE_CONTROLLER_RIGHTHAND", filter: RayPick.PICK_ENTITIES | RayPick.PICK_OVERLAYS, enabled: true, maxDistance: DEFAULT_SEARCH_SPHERE_DISTANCE, diff --git a/scripts/system/controllers/controllerModules/overlayLaserInput.js b/scripts/system/controllers/controllerModules/overlayLaserInput.js index 2d27f160c1..538a943901 100644 --- a/scripts/system/controllers/controllerModules/overlayLaserInput.js +++ b/scripts/system/controllers/controllerModules/overlayLaserInput.js @@ -362,7 +362,7 @@ Script.include("/~/system/libraries/controllers.js"); }; this.laserPointer = LaserPointers.createLaserPointer({ - joint: (this.hand === RIGHT_HAND) ? "_CONTROLLER_RIGHTHAND" : "_CONTROLLER_LEFTHAND", + joint: (this.hand === RIGHT_HAND) ? "_CAMERA_RELATIVE_CONTROLLER_RIGHTHAND" : "_CAMERA_RELATIVE_CONTROLLER_LEFTHAND", filter: RayPick.PICK_OVERLAYS, maxDistance: PICK_MAX_DISTANCE, posOffset: getGrabPointSphereOffset(this.handToController(), true), diff --git a/scripts/system/controllers/controllerModules/scaleAvatar.js b/scripts/system/controllers/controllerModules/scaleAvatar.js index de0434258c..1fb7e0c9d7 100644 --- a/scripts/system/controllers/controllerModules/scaleAvatar.js +++ b/scripts/system/controllers/controllerModules/scaleAvatar.js @@ -63,6 +63,7 @@ var newAvatarScale = (scalingCurrentDistance / this.scalingStartDistance) * this.scalingStartAvatarScale; MyAvatar.scale = newAvatarScale; + MyAvatar.scaleChanged(); } return dispatcherUtils.makeRunningValues(true, [], []); } diff --git a/scripts/system/edit.js b/scripts/system/edit.js index 15f1c2f6c1..88f50cb080 100644 --- a/scripts/system/edit.js +++ b/scripts/system/edit.js @@ -1424,24 +1424,29 @@ function deleteSelectedEntities() { for (var i = 0; i < newSortedSelection.length; i++) { var entityID = newSortedSelection[i]; var initialProperties = SelectionManager.savedProperties[entityID]; - var children = Entities.getChildrenIDs(entityID); - var childList = []; - recursiveDelete(children, childList, deletedIDs); - savedProperties.push({ - entityID: entityID, - properties: initialProperties, - children: childList - }); - deletedIDs.push(entityID); - Entities.deleteEntity(entityID); + if (!initialProperties.locked) { + var children = Entities.getChildrenIDs(entityID); + var childList = []; + recursiveDelete(children, childList, deletedIDs); + savedProperties.push({ + entityID: entityID, + properties: initialProperties, + children: childList + }); + deletedIDs.push(entityID); + Entities.deleteEntity(entityID); + } } - SelectionManager.clearSelections(); - pushCommandForSelections([], savedProperties); - entityListTool.webView.emitScriptEvent(JSON.stringify({ - type: "deleted", - ids: deletedIDs - })); + if (savedProperties.length > 0) { + SelectionManager.clearSelections(); + pushCommandForSelections([], savedProperties); + + entityListTool.webView.emitScriptEvent(JSON.stringify({ + type: "deleted", + ids: deletedIDs + })); + } } } diff --git a/scripts/system/html/js/marketplacesInject.js b/scripts/system/html/js/marketplacesInject.js index 4b127baef8..1346bcd750 100644 --- a/scripts/system/html/js/marketplacesInject.js +++ b/scripts/system/html/js/marketplacesInject.js @@ -318,6 +318,15 @@ }); } + function injectUnfocusOnSearch() { + // unfocus input field on search, thus hiding virtual keyboard + $('#search-box').on('submit', function () { + if (document.activeElement) { + document.activeElement.blur(); + } + }); + } + function injectHiFiCode() { if (commerceMode) { maybeAddLogInButton(); @@ -347,6 +356,8 @@ maybeAddPurchasesButton(); } } + + injectUnfocusOnSearch(); } function injectHiFiItemPageCode() { @@ -386,6 +397,8 @@ maybeAddPurchasesButton(); } } + + injectUnfocusOnSearch(); } function updateClaraCode() { diff --git a/scripts/system/libraries/WebTablet.js b/scripts/system/libraries/WebTablet.js index c46cfaa073..57b17f3d72 100644 --- a/scripts/system/libraries/WebTablet.js +++ b/scripts/system/libraries/WebTablet.js @@ -155,8 +155,7 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location, visible) { localRotation: { x: 0, y: 1, z: 0, w: 0 }, dimensions: { x: 4 * tabletScaleFactor, y: 4 * tabletScaleFactor, z: 4 * tabletScaleFactor }, solid: true, - outerRadius: 25 * tabletScaleFactor, - innerRadius: 20 * tabletScaleFactor, + innerRadius: 0.9, ignoreIntersection: true, alpha: 1.0, color: { red: 255, green: 255, blue: 255 }, diff --git a/scripts/system/libraries/utils.js b/scripts/system/libraries/utils.js index 76c248d880..9706073081 100644 --- a/scripts/system/libraries/utils.js +++ b/scripts/system/libraries/utils.js @@ -409,15 +409,16 @@ resizeTablet = function (width, newParentJointIndex, sensorToWorldScaleOverride) // update homeButton var HOME_BUTTON_Y_OFFSET = ((tabletHeight / 2) - (tabletHeight / 20)) * sensorScaleOffsetOverride; + var homeButtonDim = 4 * tabletScaleFactor; Overlays.editOverlay(HMD.homeButtonID, { localPosition: {x: -0.001, y: -HOME_BUTTON_Y_OFFSET, z: 0.0}, - dimensions: { x: 4 * tabletScaleFactor, y: 4 * tabletScaleFactor, z: 4 * tabletScaleFactor} + dimensions: { x: homeButtonDim, y: homeButtonDim, z: homeButtonDim} }); + // Circle3D overlays render at 1.5x their proper dimensions + var highlightDim = homeButtonDim / 3.0; Overlays.editOverlay(HMD.homeButtonHighlightID, { localPosition: { x: 0, y: -HOME_BUTTON_Y_OFFSET + 0.003, z: -0.0158 }, - dimensions: { x: 4 * tabletScaleFactor, y: 4 * tabletScaleFactor, z: 4 * tabletScaleFactor }, - outerRadius: 25 * tabletScaleFactor, - innerRadius: 20 * tabletScaleFactor + dimensions: { x: highlightDim, y: highlightDim, z: highlightDim } }); }; diff --git a/scripts/system/particle_explorer/hifi-entity-ui.js b/scripts/system/particle_explorer/hifi-entity-ui.js index a1d02e287d..abf9e3cce6 100644 --- a/scripts/system/particle_explorer/hifi-entity-ui.js +++ b/scripts/system/particle_explorer/hifi-entity-ui.js @@ -482,14 +482,23 @@ HifiEntityUI.prototype = { textureImage.className = "texture-image no-texture"; var image = document.createElement("img"); var imageLoad = _.debounce(function (url) { - if (url.length > 0) { + if (url.slice(0, 5).toLowerCase() === "atp:/") { + image.src = ""; + image.style.display = "none"; + textureImage.classList.remove("with-texture"); textureImage.classList.remove("no-texture"); + textureImage.classList.add("no-preview"); + } else if (url.length > 0) { + textureImage.classList.remove("no-texture"); + textureImage.classList.remove("no-preview"); textureImage.classList.add("with-texture"); image.src = url; image.style.display = "block"; } else { image.src = ""; image.style.display = "none"; + textureImage.classList.remove("with-texture"); + textureImage.classList.remove("no-preview"); textureImage.classList.add("no-texture"); } self.webBridgeSync(group.id, url); diff --git a/scripts/system/particle_explorer/particle-style.css b/scripts/system/particle_explorer/particle-style.css index e8b71fdba0..1e2801c19f 100644 --- a/scripts/system/particle_explorer/particle-style.css +++ b/scripts/system/particle_explorer/particle-style.css @@ -119,6 +119,10 @@ hr.splitter:last-of-type{ background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAYAAADDPmHLAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKT2lDQ1BQaG90b3Nob3AgSUNDIHByb2ZpbGUAAHjanVNnVFPpFj333vRCS4iAlEtvUhUIIFJCi4AUkSYqIQkQSoghodkVUcERRUUEG8igiAOOjoCMFVEsDIoK2AfkIaKOg6OIisr74Xuja9a89+bN/rXXPues852zzwfACAyWSDNRNYAMqUIeEeCDx8TG4eQuQIEKJHAAEAizZCFz/SMBAPh+PDwrIsAHvgABeNMLCADATZvAMByH/w/qQplcAYCEAcB0kThLCIAUAEB6jkKmAEBGAYCdmCZTAKAEAGDLY2LjAFAtAGAnf+bTAICd+Jl7AQBblCEVAaCRACATZYhEAGg7AKzPVopFAFgwABRmS8Q5ANgtADBJV2ZIALC3AMDOEAuyAAgMADBRiIUpAAR7AGDIIyN4AISZABRG8lc88SuuEOcqAAB4mbI8uSQ5RYFbCC1xB1dXLh4ozkkXKxQ2YQJhmkAuwnmZGTKBNA/g88wAAKCRFRHgg/P9eM4Ors7ONo62Dl8t6r8G/yJiYuP+5c+rcEAAAOF0ftH+LC+zGoA7BoBt/qIl7gRoXgugdfeLZrIPQLUAoOnaV/Nw+H48PEWhkLnZ2eXk5NhKxEJbYcpXff5nwl/AV/1s+X48/Pf14L7iJIEyXYFHBPjgwsz0TKUcz5IJhGLc5o9H/LcL//wd0yLESWK5WCoU41EScY5EmozzMqUiiUKSKcUl0v9k4t8s+wM+3zUAsGo+AXuRLahdYwP2SycQWHTA4vcAAPK7b8HUKAgDgGiD4c93/+8//UegJQCAZkmScQAAXkQkLlTKsz/HCAAARKCBKrBBG/TBGCzABhzBBdzBC/xgNoRCJMTCQhBCCmSAHHJgKayCQiiGzbAdKmAv1EAdNMBRaIaTcA4uwlW4Dj1wD/phCJ7BKLyBCQRByAgTYSHaiAFiilgjjggXmYX4IcFIBBKLJCDJiBRRIkuRNUgxUopUIFVIHfI9cgI5h1xGupE7yAAygvyGvEcxlIGyUT3UDLVDuag3GoRGogvQZHQxmo8WoJvQcrQaPYw2oefQq2gP2o8+Q8cwwOgYBzPEbDAuxsNCsTgsCZNjy7EirAyrxhqwVqwDu4n1Y8+xdwQSgUXACTYEd0IgYR5BSFhMWE7YSKggHCQ0EdoJNwkDhFHCJyKTqEu0JroR+cQYYjIxh1hILCPWEo8TLxB7iEPENyQSiUMyJ7mQAkmxpFTSEtJG0m5SI+ksqZs0SBojk8naZGuyBzmULCAryIXkneTD5DPkG+Qh8lsKnWJAcaT4U+IoUspqShnlEOU05QZlmDJBVaOaUt2ooVQRNY9aQq2htlKvUYeoEzR1mjnNgxZJS6WtopXTGmgXaPdpr+h0uhHdlR5Ol9BX0svpR+iX6AP0dwwNhhWDx4hnKBmbGAcYZxl3GK+YTKYZ04sZx1QwNzHrmOeZD5lvVVgqtip8FZHKCpVKlSaVGyovVKmqpqreqgtV81XLVI+pXlN9rkZVM1PjqQnUlqtVqp1Q61MbU2epO6iHqmeob1Q/pH5Z/YkGWcNMw09DpFGgsV/jvMYgC2MZs3gsIWsNq4Z1gTXEJrHN2Xx2KruY/R27iz2qqaE5QzNKM1ezUvOUZj8H45hx+Jx0TgnnKKeX836K3hTvKeIpG6Y0TLkxZVxrqpaXllirSKtRq0frvTau7aedpr1Fu1n7gQ5Bx0onXCdHZ4/OBZ3nU9lT3acKpxZNPTr1ri6qa6UbobtEd79up+6Ynr5egJ5Mb6feeb3n+hx9L/1U/W36p/VHDFgGswwkBtsMzhg8xTVxbzwdL8fb8VFDXcNAQ6VhlWGX4YSRudE8o9VGjUYPjGnGXOMk423GbcajJgYmISZLTepN7ppSTbmmKaY7TDtMx83MzaLN1pk1mz0x1zLnm+eb15vft2BaeFostqi2uGVJsuRaplnutrxuhVo5WaVYVVpds0atna0l1rutu6cRp7lOk06rntZnw7Dxtsm2qbcZsOXYBtuutm22fWFnYhdnt8Wuw+6TvZN9un2N/T0HDYfZDqsdWh1+c7RyFDpWOt6azpzuP33F9JbpL2dYzxDP2DPjthPLKcRpnVOb00dnF2e5c4PziIuJS4LLLpc+Lpsbxt3IveRKdPVxXeF60vWdm7Obwu2o26/uNu5p7ofcn8w0nymeWTNz0MPIQ+BR5dE/C5+VMGvfrH5PQ0+BZ7XnIy9jL5FXrdewt6V3qvdh7xc+9j5yn+M+4zw33jLeWV/MN8C3yLfLT8Nvnl+F30N/I/9k/3r/0QCngCUBZwOJgUGBWwL7+Hp8Ib+OPzrbZfay2e1BjKC5QRVBj4KtguXBrSFoyOyQrSH355jOkc5pDoVQfujW0Adh5mGLw34MJ4WHhVeGP45wiFga0TGXNXfR3ENz30T6RJZE3ptnMU85ry1KNSo+qi5qPNo3ujS6P8YuZlnM1VidWElsSxw5LiquNm5svt/87fOH4p3iC+N7F5gvyF1weaHOwvSFpxapLhIsOpZATIhOOJTwQRAqqBaMJfITdyWOCnnCHcJnIi/RNtGI2ENcKh5O8kgqTXqS7JG8NXkkxTOlLOW5hCepkLxMDUzdmzqeFpp2IG0yPTq9MYOSkZBxQqohTZO2Z+pn5mZ2y6xlhbL+xW6Lty8elQfJa7OQrAVZLQq2QqboVFoo1yoHsmdlV2a/zYnKOZarnivN7cyzytuQN5zvn//tEsIS4ZK2pYZLVy0dWOa9rGo5sjxxedsK4xUFK4ZWBqw8uIq2Km3VT6vtV5eufr0mek1rgV7ByoLBtQFr6wtVCuWFfevc1+1dT1gvWd+1YfqGnRs+FYmKrhTbF5cVf9go3HjlG4dvyr+Z3JS0qavEuWTPZtJm6ebeLZ5bDpaql+aXDm4N2dq0Dd9WtO319kXbL5fNKNu7g7ZDuaO/PLi8ZafJzs07P1SkVPRU+lQ27tLdtWHX+G7R7ht7vPY07NXbW7z3/T7JvttVAVVN1WbVZftJ+7P3P66Jqun4lvttXa1ObXHtxwPSA/0HIw6217nU1R3SPVRSj9Yr60cOxx++/p3vdy0NNg1VjZzG4iNwRHnk6fcJ3/ceDTradox7rOEH0x92HWcdL2pCmvKaRptTmvtbYlu6T8w+0dbq3nr8R9sfD5w0PFl5SvNUyWna6YLTk2fyz4ydlZ19fi753GDborZ752PO32oPb++6EHTh0kX/i+c7vDvOXPK4dPKy2+UTV7hXmq86X23qdOo8/pPTT8e7nLuarrlca7nuer21e2b36RueN87d9L158Rb/1tWeOT3dvfN6b/fF9/XfFt1+cif9zsu72Xcn7q28T7xf9EDtQdlD3YfVP1v+3Njv3H9qwHeg89HcR/cGhYPP/pH1jw9DBY+Zj8uGDYbrnjg+OTniP3L96fynQ89kzyaeF/6i/suuFxYvfvjV69fO0ZjRoZfyl5O/bXyl/erA6xmv28bCxh6+yXgzMV70VvvtwXfcdx3vo98PT+R8IH8o/2j5sfVT0Kf7kxmTk/8EA5jz/GMzLdsAAAAgY0hSTQAAeiUAAICDAAD5/wAAgOkAAHUwAADqYAAAOpgAABdvkl/FRgAABhNJREFUeNrsnVFy4joQRVsSCwAqBMwqsrRsIavMEkICoeAf2+8j1R5ZGDBgpLzoUDVVmTT2dc8It/paOpi3t7faOSciImVZyn6/l6qqRETEWivj8VistYPFd7ud1HUtIiLGGBmPx5JKX0RkMplIzvmPnHNijBERaS7Ef1lrB40bY1oXgH5a/ZH+8P7+LlVVycfHR/MGa60URdGcYOi4MUaKomhGaGx9EZHlcplMP2X+Ly8vPwOgLEtxzklVVVJVVfOznqAsy9YFXhuvqqq5AF/Lj+srtr7+LpV+yvz1mNF+vxcRkdVqJdZaeXp6ap1ws9m0TjibzVoj6lJ8vV6fjJdlKev1ujViU+j7t8tc8p9Op1KWpYw06L9JL0Av0r9l+jXl3nhd11JV1VE8tn5YM3PI3xjzoxVOGvyDU7zQj6s/0tGmI++amtNV087F9Wf/FnVPzRtCXz8RdV1nlb/efUbaJy4Wi0FqzjU1yRgjs9ls0Jp3jb6IyPPzczL9lPkvFot/dwCtB/om/x9oyJoXxps65NW8mPpdNTeX/JtBEtYE/+AUL/Tj6g/qA3TVnD41a6g++Bp9rYOp9FPnH80HOBcvy1I2m81D++BL+o/2AX5r/vgA+AD4AOif8AH8EdpVcy71sX3jWp/8W2AKff/TkUv+Oufr9AF0YuKc66xJ18T7eNP3nP9WfZ0EzufzJPqp8y+KQuq67vYBdETqCDpVU/rEw5oUnr+rD46h73/qUuinzh8fAP22D6AjxznXcqq6akrf+KmaFB6vf4+t7/sAelfIJf/GB9jtdmKMkdVq1dQM3zg4VVNU/NY+1Bgjh8Oh6YM1+dj6X19fzXwgp/wbH0DFtS7oyf0RdKqmhPFr+1RdseKfP7a+Px/IKX98APTbPoDOJrv60L417d54TH3V8lfS5pT/yfUA6/X6qOZcqkm3xrUm6X9CTH3fB0ihnzr/Ix9A/3T1qbfWpGvjMfX9T0UK/dT54wOg/88H8EfGPTVr6D740frhLDmn/Hv5AH1qku9t31KTzh3/aP1LPsBfzr+XDxCO0K6ack/N6qp5MfUv+QB/Of/ePsCQfWmfc6EfV3/kjzZrrRwOh9YtKHSm/LjOH3yrMTzej4c1y//51PHoP0a/tR7AOSdFURw9rz5VU049zw7jl2qWrosP++BY+iI/+wJS6afMv9kXoA6gvimsieHzZr/m6MTp3PPuc3G9SP95OPpx9JtOgT4cHwA+QCJ9+ADwAeADsC+AfQHo/4b1APAB4APAB4APAB8APgB9OD4AfAD4AFFqEnwA+AD4APgA6P86HwA+AHyAZhIBHwA+AHwA+AD04X/eB4APAB8APgB8APgA8AHow/P0AeADwAeADwAfAD4AfAD68Px8APgA8AHgA8AHgA8AH0DO70/v6lHvjaOfVn8U/iLcXx5OUML96X49vRTX3/nPw9FPo9+sB5hMJuKck+VyeVRTrLWtdfNdcf95eldNCuOfn5+tSYy/Pz+2voi0fICc8p/P5z93gJAPEN4+wufN4evaePj99eH+ePTj6p/1Abp60kt9Ksf/v46HDwAfAD6A/6gUPgD7AtgXwPP4DNcDwAeADwAfAD4AfAD4ADyPz289AHyA+Pqp84cPIPAB8AHwAfAB8AHgA7Q+HfAB4APAB4APAB+APjw3HwA+AHwA+ADwAeADwAegD8/TB4APAB8APgB8APgA8AHow/PzAeADwAeADwAfAD4AfACJ//316KfVH/mjLeb31+vx/kWhH0+/tR7AOSdFUUT9/nq9oK4+OJa+iLT25+eUf7MvIOQDxPr+en2F++PRj6PfdAr04fgA8AES6cMHgA8AH4B9AewLQP83rAeADwAfAD4AfAD4APAB6MPxAeADwAeIUpPgA8AHwAfAB0D/1/kA8AHgAzSTCPgA8AHgA8AHoA//8z4AfAD4APAB4APAB4APQB+epw8AHwA+AHwA+ADwAeAD0Ifn5wPAB4APAB8APgB8gBz5AOb19bX2TYLpdNpqQ7bbbctJGjJeVZVst9vWLSu2/vf3t+Sc/yicFIRr0C7Fu76f/lw8XBePflr9/wYAqWwWUSLcO54AAAAASUVORK5CYII='); } -.texture-image.no-texture{ +.texture-image.no-texture { background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAYAAADDPmHLAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKT2lDQ1BQaG90b3Nob3AgSUNDIHByb2ZpbGUAAHjanVNnVFPpFj333vRCS4iAlEtvUhUIIFJCi4AUkSYqIQkQSoghodkVUcERRUUEG8igiAOOjoCMFVEsDIoK2AfkIaKOg6OIisr74Xuja9a89+bN/rXXPues852zzwfACAyWSDNRNYAMqUIeEeCDx8TG4eQuQIEKJHAAEAizZCFz/SMBAPh+PDwrIsAHvgABeNMLCADATZvAMByH/w/qQplcAYCEAcB0kThLCIAUAEB6jkKmAEBGAYCdmCZTAKAEAGDLY2LjAFAtAGAnf+bTAICd+Jl7AQBblCEVAaCRACATZYhEAGg7AKzPVopFAFgwABRmS8Q5ANgtADBJV2ZIALC3AMDOEAuyAAgMADBRiIUpAAR7AGDIIyN4AISZABRG8lc88SuuEOcqAAB4mbI8uSQ5RYFbCC1xB1dXLh4ozkkXKxQ2YQJhmkAuwnmZGTKBNA/g88wAAKCRFRHgg/P9eM4Ors7ONo62Dl8t6r8G/yJiYuP+5c+rcEAAAOF0ftH+LC+zGoA7BoBt/qIl7gRoXgugdfeLZrIPQLUAoOnaV/Nw+H48PEWhkLnZ2eXk5NhKxEJbYcpXff5nwl/AV/1s+X48/Pf14L7iJIEyXYFHBPjgwsz0TKUcz5IJhGLc5o9H/LcL//wd0yLESWK5WCoU41EScY5EmozzMqUiiUKSKcUl0v9k4t8s+wM+3zUAsGo+AXuRLahdYwP2SycQWHTA4vcAAPK7b8HUKAgDgGiD4c93/+8//UegJQCAZkmScQAAXkQkLlTKsz/HCAAARKCBKrBBG/TBGCzABhzBBdzBC/xgNoRCJMTCQhBCCmSAHHJgKayCQiiGzbAdKmAv1EAdNMBRaIaTcA4uwlW4Dj1wD/phCJ7BKLyBCQRByAgTYSHaiAFiilgjjggXmYX4IcFIBBKLJCDJiBRRIkuRNUgxUopUIFVIHfI9cgI5h1xGupE7yAAygvyGvEcxlIGyUT3UDLVDuag3GoRGogvQZHQxmo8WoJvQcrQaPYw2oefQq2gP2o8+Q8cwwOgYBzPEbDAuxsNCsTgsCZNjy7EirAyrxhqwVqwDu4n1Y8+xdwQSgUXACTYEd0IgYR5BSFhMWE7YSKggHCQ0EdoJNwkDhFHCJyKTqEu0JroR+cQYYjIxh1hILCPWEo8TLxB7iEPENyQSiUMyJ7mQAkmxpFTSEtJG0m5SI+ksqZs0SBojk8naZGuyBzmULCAryIXkneTD5DPkG+Qh8lsKnWJAcaT4U+IoUspqShnlEOU05QZlmDJBVaOaUt2ooVQRNY9aQq2htlKvUYeoEzR1mjnNgxZJS6WtopXTGmgXaPdpr+h0uhHdlR5Ol9BX0svpR+iX6AP0dwwNhhWDx4hnKBmbGAcYZxl3GK+YTKYZ04sZx1QwNzHrmOeZD5lvVVgqtip8FZHKCpVKlSaVGyovVKmqpqreqgtV81XLVI+pXlN9rkZVM1PjqQnUlqtVqp1Q61MbU2epO6iHqmeob1Q/pH5Z/YkGWcNMw09DpFGgsV/jvMYgC2MZs3gsIWsNq4Z1gTXEJrHN2Xx2KruY/R27iz2qqaE5QzNKM1ezUvOUZj8H45hx+Jx0TgnnKKeX836K3hTvKeIpG6Y0TLkxZVxrqpaXllirSKtRq0frvTau7aedpr1Fu1n7gQ5Bx0onXCdHZ4/OBZ3nU9lT3acKpxZNPTr1ri6qa6UbobtEd79up+6Ynr5egJ5Mb6feeb3n+hx9L/1U/W36p/VHDFgGswwkBtsMzhg8xTVxbzwdL8fb8VFDXcNAQ6VhlWGX4YSRudE8o9VGjUYPjGnGXOMk423GbcajJgYmISZLTepN7ppSTbmmKaY7TDtMx83MzaLN1pk1mz0x1zLnm+eb15vft2BaeFostqi2uGVJsuRaplnutrxuhVo5WaVYVVpds0atna0l1rutu6cRp7lOk06rntZnw7Dxtsm2qbcZsOXYBtuutm22fWFnYhdnt8Wuw+6TvZN9un2N/T0HDYfZDqsdWh1+c7RyFDpWOt6azpzuP33F9JbpL2dYzxDP2DPjthPLKcRpnVOb00dnF2e5c4PziIuJS4LLLpc+Lpsbxt3IveRKdPVxXeF60vWdm7Obwu2o26/uNu5p7ofcn8w0nymeWTNz0MPIQ+BR5dE/C5+VMGvfrH5PQ0+BZ7XnIy9jL5FXrdewt6V3qvdh7xc+9j5yn+M+4zw33jLeWV/MN8C3yLfLT8Nvnl+F30N/I/9k/3r/0QCngCUBZwOJgUGBWwL7+Hp8Ib+OPzrbZfay2e1BjKC5QRVBj4KtguXBrSFoyOyQrSH355jOkc5pDoVQfujW0Adh5mGLw34MJ4WHhVeGP45wiFga0TGXNXfR3ENz30T6RJZE3ptnMU85ry1KNSo+qi5qPNo3ujS6P8YuZlnM1VidWElsSxw5LiquNm5svt/87fOH4p3iC+N7F5gvyF1weaHOwvSFpxapLhIsOpZATIhOOJTwQRAqqBaMJfITdyWOCnnCHcJnIi/RNtGI2ENcKh5O8kgqTXqS7JG8NXkkxTOlLOW5hCepkLxMDUzdmzqeFpp2IG0yPTq9MYOSkZBxQqohTZO2Z+pn5mZ2y6xlhbL+xW6Lty8elQfJa7OQrAVZLQq2QqboVFoo1yoHsmdlV2a/zYnKOZarnivN7cyzytuQN5zvn//tEsIS4ZK2pYZLVy0dWOa9rGo5sjxxedsK4xUFK4ZWBqw8uIq2Km3VT6vtV5eufr0mek1rgV7ByoLBtQFr6wtVCuWFfevc1+1dT1gvWd+1YfqGnRs+FYmKrhTbF5cVf9go3HjlG4dvyr+Z3JS0qavEuWTPZtJm6ebeLZ5bDpaql+aXDm4N2dq0Dd9WtO319kXbL5fNKNu7g7ZDuaO/PLi8ZafJzs07P1SkVPRU+lQ27tLdtWHX+G7R7ht7vPY07NXbW7z3/T7JvttVAVVN1WbVZftJ+7P3P66Jqun4lvttXa1ObXHtxwPSA/0HIw6217nU1R3SPVRSj9Yr60cOxx++/p3vdy0NNg1VjZzG4iNwRHnk6fcJ3/ceDTradox7rOEH0x92HWcdL2pCmvKaRptTmvtbYlu6T8w+0dbq3nr8R9sfD5w0PFl5SvNUyWna6YLTk2fyz4ydlZ19fi753GDborZ752PO32oPb++6EHTh0kX/i+c7vDvOXPK4dPKy2+UTV7hXmq86X23qdOo8/pPTT8e7nLuarrlca7nuer21e2b36RueN87d9L158Rb/1tWeOT3dvfN6b/fF9/XfFt1+cif9zsu72Xcn7q28T7xf9EDtQdlD3YfVP1v+3Njv3H9qwHeg89HcR/cGhYPP/pH1jw9DBY+Zj8uGDYbrnjg+OTniP3L96fynQ89kzyaeF/6i/suuFxYvfvjV69fO0ZjRoZfyl5O/bXyl/erA6xmv28bCxh6+yXgzMV70VvvtwXfcdx3vo98PT+R8IH8o/2j5sfVT0Kf7kxmTk/8EA5jz/GMzLdsAAAAgY0hSTQAAeiUAAICDAAD5/wAAgOkAAHUwAADqYAAAOpgAABdvkl/FRgAAB81JREFUeNrsnTGPm0oXht97FWm2Ch2pTEeHpUihsyvTuXO67Ta/IPkr+Qfp3MWdO7Zad0SKZDo6XIWOrTzV9xVXZ8SygGHXG4/t96lW68GGw8vMmZlzDv98+/btfyBXy780wXXzTv74/fs3rXFFfPz4kT0AoQAoAJqAAiAUAKEACAVAKABCARAKgFAAhAIgFAChAAgFQCgAQgEQCoBQAIQCIBQAoQAIBUAoAHLmvDv3C7i7u4PjOMiyDOv1+mC75XKJoiga2wRBAN/34TgOHMdBWZYoigJpmiLPcwrARhzHAQD4vg/P81pvlLRrwvM8zGYz00ZrbY5xHAe+7yPPc9zf36MsSwrAVmazGX78+DHoGM/zsFgsAAB5nmOz2ZgeQimF8XiMMAxNu+VyaQRCH8Ai8jyH4zgIw7D3MUopzOdzAECaplitVk+GB601kiTBz58/obWG4ziIoohOoI38+vULABCGYWd3X2U6nUIphbIsEcdxa7uiKPDw8GCGGtd1KQDbKMsSWZZBKYXJZNLrGN/3zdN/iDRNTdcvx1EAFqGUwmazeeIQduG6LpRSAIAsy3r9hrRjD2BxL5AkiXEI+8wetNa9PXtp13eIoQBOQJIkxmHrcgjlJkov8JKpJwVgIVpr47CFYdh6g/f7/ZM5/9CehgKwmDRNURQFlFKYTqeNN/rx8dH0AH2faBn7KYAzQKZ1QRCYZd0qf/78MX+PRqNe3ymO5W63owBsR9bwZShoGirEq++zeBQEweBZAwVwYh4eHqC1RhAErQ6jOHVdK3yu65qhJE1TDgHn5BDKTW6auxdFYdYOgiDAYrF40k4phTAM8fnzZyilUBRF54rhOfIOF06SJMYPaPt8v99jOp3C8zx4nget9bPZQ5ZlF3fzL0IAZVke9OLv7+/Njl/brCHLMozHY4xGI3z48MH0EEVRIMuyi40H+EdqBbNS6HXBSqGEAiAUAAVAE1AAhAIgFAChAAgFQCgAQgGQq+Eom0GLxeJgGHYVSdCUhM02yrI0qV5hGGIymaAsy9b0LNd1cXt7CwDYbDa98wOA/zKLVquVSQGr/nYTbe2iKDIh53JtZVmiLEvsdjtst9tn5z7EDmfXA3QFXdaTMbvYbrdm568tgkdueJ7njbt3QwJA+8YJ1tsFQQDXdXFzc2N2E0Uwk8kEX758eXbMEDtY2QOsVqtn//v69SsAYL1eH9xK7dNGgjuiKMJ4PH4WmSN7+QBMFu/3798bn1oAzz47NvVrqmYgz2azRpv1scNV+wDVaN969y6JIEmSWBmyJenlIgZbcgvOzgmUqJxqkmY18ldCvGwkz/MntQcogBcgETrVMV98Aptvfh1JTKEAXsBms4HWGp7nYT6fw3Ec5Hlufbi253lQSkFr3VqmhgLoQVmW2G63ZigQx8/2my/FKCR17WLWAV7LfD5vzOFLkqS1W0/T1HT9RVFY5/jNZjMz3ouvorVGHMet9QheYoer7AGq478Y2LaiDTc3N3Bd90megSwG2YQVPcDQ+a/ccK01ttutWSWsetl/i7bfq16TzP1lGFgul0exw9X2AJLGJV3joRXCl3rnXbUDhmQKl2WJ9XoNrbV1vdXZCUCWWqvVQGR8HFIgqmuaKUiCSJcA+nrzWmvzdA/ZN6EAKlTz/eXmA3iSuXOoNEzfBRsA+PTpU+PnUjxSfnvo9/ZNR6cAakjFj2rqd3VtQJ6u1z5h1e+SdYbqdK5aWHLImC0OoFQgpRN4YPoD/LfRVC8C2TQlkhVC3/dfVDG0/l1xHCOKIvi+b572atJoURSdtYnbfAHxV0aj0TP/oY8dzqYH6OscHXK26tO+rqcujmNTIKqtJkDfc0vTFMvl8smu436/R57niOO4NSbh0HfLkFHtpYbY4dgwOfRKYXIooQAIBUAB0AQUAKEACAVAKABCARAKgFAA5Gp4s93AKIrw/v17ExsnFEWB/X6P3W6HLMtaN0+GJkwOad+W2FlPLq3GHFSRdq85h2PYyGoByG6cvJOnHiEryZJSg7e+s1ZNmOyzSza0ffWYJsIwbMzk7Tp+6Dm81kZWC0BoCnSU7dowDE2K12q1alT60EDJYwVWKqUQRdHgPf9jnfMQG52dDyA5fLKnLlGztiB5Bn1eP3fuNvr31IaWZM9jhHIdEwk5G1Jk4hxtdPJZQJZlJrLWlnBpx3FMmrnrup3RReduIyumgXJxtryRUxw4mQXIO4Yv0UZWCMDWN3I2vX7u0mxk1RtDmp6yoQmTbe27kjK7iOMYt7e3CIIA2+22VyLIWyZ5Hrsnsmol0Jac+fo51QtSXJKNrOgBuvLsTrUOUO8FxAP3ff/gTXiLc3irt5aevAdQSpmpja0vZqq+fm4ymfz18i5vaaOTC0DSvapv8rQRmRY6joPxeHwxNjqpAGSpUwx8ikKJQ5AyNFKb4BJsdBIfwPM8BEFgFjXSNG3debMJSUv7GyuWf8tGby6Aaq2c+qvaJce/a3p2ioTJQ73A3d3di6aBbef8WhtZKQDJ6K1fTJ7neHx8PFjWTcbbvvPePm8QbVtc6ft/+UwKUdfbDT3n19roGDA59EphciihAAgFQAHQBBQAoQAIBUAoAEIBEAqAUACEAiAUAKEACAVAKABCARAKgFAAhAIgFAChAAgFQC4CkxgiceKEPQC5Iv4/APgB2O7x8IXXAAAAAElFTkSuQmCC'); } + +.texture-image.no-preview { + background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAIAAABMXPacAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAA8sSURBVHhe7Z3rbxXFG8d7B9SWthRabLmIYlHkIEXKJdXYBEXxHtEXprwxxsR3/jG+8PLCaDDGeAkmKsTEoCUVKoVCA6WNtLS2UEUKBSy0tKW/D+eZM9nu7tmz55z+mC2Zz4tl9tk5c2bnO/PMM2dnS+6nn36aYzFH7vvvv6+SFhMoAY4fPy7nljvG448/zjFPTiymsAIYxgpgGCuAYawAhrECGMYKYBgrgGGsAIaxAhjGCmAYK4BhrACGsQIYxgpgGCuAYawAhrECGMYKYBgrgGGsAIaxAhjmLhQgPz+/pKRk3rx56jzaRHFf0ObNmxctWkTi7Nmzp0+fFqNm+/btRUVFP/30kzp3UFtbu27duqVLl+bl3e5Y169f7+rqam1tvXnzpmSIFNHdF1RTU7M6TkNDQ0FBgbImWLVqFZfUSQKyvfzyy88991x1dfXU1NSFCxdGRkbuueeeurq6pqam0tJSlS96RNcFSQvSo9V5IC+88MIDDzwwOjr6448/fvTRR19++eVnn322Z8+ev//+u7i4+M0331ywYIHKGjGiK8Aff/zBMRaL5ebmiiUZjz322MqVK/Ez33333ZkzZxgBYh8eHt67d++lS5do/W3btokxakRXANxIf38/3mPNmjXKlARxpkeOHKGtxaIZHx9vaWkhwfTg9WZRILoCgIQG0r7JKC8vlxm7s7NTLC6YyW/cuFFYWIiPUqYoEWkB+vr6cOJLlizBwyiTB2l9vA0xj1hcTE9PDw4OkiA6EkukiLQAcOzYMY4bN26UUy8LFy7k+O+//8qpL1euXOF43333yWmkiLoATKqEQwSmlZWVyjQTIiWOwZG+npYjSNQFwIG0tbWRqK+vF4sL1r0qlZzJyUmOYXLeeaIuAHR3d+PfmQbE27hgguUY3LgS/0RzHMwBAei/R48ezcvL8x0EOCiOxEJy6osoJ1JFjTkgAHR0dExMTBDLexe0EvsTKQUMgsWLF3OUWChqzA0BGARoQBN7wyHWa6Ojo1x6+OGHlWkmaEOoeuvWrXPnzilTlJgbAgBeiEEQi8W8Pf3kyZMct27d6v0JGsf15JNPkmA5lmyhYJY5IwAenNmYBW1RUZEyJSBMYiYoLi7etWtXWVmZsubkkHPHjh2EsCjX3NysrBFjzggANDSeRJ04wEF9//33rLYqKip27979yiuvNDY2Pvvss2+//TZ+ieBn//79V69eVbkjRv6WLVv4hxW/nEcB+iyuo6ura3x8XJnicIqToV8zGpgSlDXO2NhYZ2cnV+WnIVZtTLxEn+fPn9+3b180p9+qqiqOd9ub8ihH67M8xuPT65mf1YXocXe+KY+PGhoa6unp4Rjl1tfcbQLMOawAhrECGMYKYBgrgGGsAIaxAhjGCmAYK4BhrACGyfy3oNdff72mpkadJLh27Vpvb29LS8vExIRYdu7c6dpLOz09ffPmTXLypadOnVLWnJzGxsZYLKZOPHR0dDQ3N7/33nv5+fkff/yx7/PFBQsWvPPOO5T/4YcfLly4sKmpaXBw8Ntvv5Wr7777bsAOUbINDw+Th5IpX1kTyGcPHz7c2tqqTHG4NW7wzz//9N2tHczs/BY0NjZ2PQFVLy4uXr9+/UsvvaQuJxgfH1eZ4tkKCwsrKiq2b9/u3XbozOkEzaamps6ePUueZHvcsOfl5ZFHtkH4oorzQOFU7MqVKzS0S6fy8nKxeDvckiVLOGbza2u22yW/+eYbOo46ie9Te/XVV5ctW7Z8+fK//vpLWXNyfvjhB2ctaaaGhoYNGzZs3bq1q6tLWeP88ssvdCh14oFLDz30EA3tuxFRhBGRkvHJJ5+olB8XLlxg6NCs/f39ypRo93/++Wfp0qWMP+fuCnna7N2TGp5ZngMQ48iRIyQefPBBsfhy69atgwcPjo6OlpSU+G42SQaicv80tPfBJBbslBwsQDBDQ0McpVk1CMBAx2HyFa79jUhFfeRTmTH7k7DsEky5DxBPffHiRRKytS0kNMTAwAAN4d0tigX7+fPnfaeHkEjlxbFoEIAvlTFRXV0tRhBnNTIy4hwT6TL7Asgz2zBvBUlO/K+chkQc1IoVK+RUI5YzZ87IaWZIX3buMpIJAP+Jroxv5zQgOmW52WL2BZDtyv/995+cJkMeHHJX6T42wcPgZ5gJ1HkCsWTjf4C+TCuXlpZqFyctLl6etpZpIH5F6eScAjNglgVg+n3iiSdIuHoiI/f2S19xamtrN23a9NprrzEVt7W1uSKWtWvXPu2HuhzfHkF/pFfef//9ypSTQxoLPi3lw3dV3Ez4UnU5/nicJpZuBAigvTzfyyU9DWQfAkG2UdCLL76oPeC99947f/58Et3d3cQMYhTk0b8TejGhfXt7uzpPgCfxuhf49ddfVSonp6enhyhr1apVeHyxkOYYxv8QJauUA9yaXpEQCKEH8zAJThGA1pd7lLamM0mCPNhl73vGZDsCGK10FgGffvnyZZYqP//8s7qcgCY7EUemMvz+F198ceDAAaZiyaA5duwYixov6nIcaWhpdEHSfIucBqCKm4m8hSDIBhHp3URoMgHEr9wefHoaYChw71qbjMlWgK+//pp1o/DBBx98/vnnLBfp3epyAmI4ujDs3bv3t99+I/J5/vnnfd++4/7pj17U5TjohzsuKysTL8yRNM5HwqpgVHEzce7KoYlpUynZO83qaYAOxzGbFYCQrQAsXOkXgrc7+4IYuA5WwgHvvaSEVuMoKy859vb23r6QNbQ+zof2Je2cAAQ9DYhCWU4AMPtRUBhko2B9fX1aiwAnEu3IakCOYfxPSFgN4HnwP7h7xHA6GT0NyFScZQgEZgRgimYyKCwsrKurU6Y0weHIbwO0FEfGX5bxuBPp8kR0jAPX22d8EY2Oa6qqqiJt3gVlzKFDhzjGYjFaUCzpgs/BGzQ2NnJkWg7pAMMg8Y/8Wul1Mn19fUiONtl3fzAmAP0XN8IgcM0EGzZs2JkElSOBTAMsLDiGnwBUWR74XpUjvuxiJS/TgK8AdBpUz34CAGMCgPy27hoEdC5Zr3lRORIQ8krYMzExMTAwIMaUqLI8iE/XyCCgj+NnxKLRoWf2/gcyfyBDGDNv3jw6csCP70C0QPvSUq6tzgKelK5EUxJZElazlFMX/PB6efkIJXsD0IKCgsrKSuclmpi1t6S9uBy6lJzMy1My5ae892DExdn/R8wYd+fu6DmHFcAwVgDDWAEMYwUwjBXAMFYAw1gBDGMFMIwVwDBp/xSxZs2aqqqqsbGxw4cPK1PiD2W0t7cne0K9ePHitWvXXr9+Xf4aKFRWVj7yyCMkKIfSxKgpLS1lpT4yMqIrxinGU6dOBf95OGH16tXV1dWuSmrkmbs6iTM5OXnjxo2enh7560Oap+O7MZz7AVzIF6kTPwI+m+FPEbT1+vXrN2/eXFJSokzxfXAYH330UXXuYd26dWRw/uoZi8WwgPPZukYKdO5vJI0FDdR5IL6V1KxYseL2FzvYuHFjQ0NDU1OTa7uRXFUnftTU1EieZKh8yUlPALott3T58mXSiC9GkJ/mA/aDyo1JNsjPz6fdr169OjU15SxnVqioqCgrK/NW0oXefrF///4DBw5QN2r1zDPPFBcXqxyhOXnypBTlReVITnoCyP20tLS4Gq6/v58hvGjRIudfi9HIrqnR0VG9jWfZsmXz58/nnoeGhiQt9llBVxIXFCCA3n7R3d3d0dFBY3EXRUVF4hjTAq8oRXlROZKTtgATExN9fX0DAwMyGsQ+PT0te3V8b1iMztqIpbe3l6JkNIh9VtCVpEGdlUyJPOjnI3J6Z0hDALkZbozuL63pbG6vReMSQFqcEcOACPhUZoj/kUrKPonwhcvTlTDbimeRNASQt1mkp9N5uUPn+y2Dg4M4Ge7f1eOQTR4taf+zcuVKfI6UI5sbli9f7pyfs0GaWwpnmLoqGYxswwr/dHNWSEMA7o37kfdecK+4b+luchUv5NudnS0iiEU/Rmfg5+XlBb/QEZ7gSjoh0CpPwOy1adMmQrVz58653tgJAz1MFTQT79+w8xJWACZSvobeoWN2r9MXAWSfmkb8u8v/UIjuaOk6igCkrYMrqXnqqad2JyAA3bZtG8N037593n2VKamvr1cFzaS2tlblSE5YAeQenLvPpJc57w0ng0thYaL3u0mLcGN6Bwf+p7CwkOmRfiqWixcv4rsIqLP3QmEqqRkeHqZWQK8njMH1U+233nor5FLDCcs3KcpFypckIOz2dLkHhiqrG7EAlZYmlqAb6Oksaoj65W+6iWOhG+pdU1IOGjjLQSGGF5nlD1BmTMhKCq2trXpcAkOT5RuV37Fjx1dffaWs4Whvb3f9DbvwhBoBdE8aiASr5y0O5B0j519MlVvSDt21/iooKBCPxFEVEYcGwhhmwAYgrUwiZSV9YUQeOnQI31VVVZXWe4NZEkoAqT3tyIrRibwQ6Ww4Qho6mvgTmoNG4ZZ0/EO70/cZ7+rzDojc+VTGe3VBur+3kvq/MInnCgINqD+JDLxQxqQWIDc3VzoyHYSB5uT333/HfUtDS2agCYhqWN8CpxKwyiVpI/XhmUhQJBkyQz7rrWRbWxvu3lXJZMhw0RW+A6QWQLoz9+DyoYI3hmFlzxHN+CAJp/+RAMk5SWqyjIXE/ySrJOsyjikLp+OzaiEKohxl+v+TWgCpt2+rgTfOu3TpEoENrQ/OcBP/w0RHyMGUKxYnrAbod84IyheCa/K4YH4KrqSvAK6i6urq3njjDcbu6dOnXTVUOWZCf1KX48opqweZOwNIEQVp/6PXTS7w77SyDHC9C5NeT0RBorOz0+V/5PcWL5OTk0hFkEq2EydOKKsHJlWVcoCjl8KTVVJUd1XStyjmp4MHD6qTBLt27VIpB3v27NEDZUMcSbugbrhBdeJHij9dTDyAvFQrWaMQXyLS+Pj4tWvX9PAn/kV5hgJhJXYxMgLIQDm+u3SBeZgOKJM2/YuhwJSoN+SWlJTQiJTphTZlzRlQSXBWkjUwsan6cBy+iLD9+PHjzc3Nzv22RLQqhwfEphBukx6mTH6wEEn2kOru/NPFc4gMn4hZZhcrgGGsAIaxAhjGCmAYK4BhrACGsQIYxgpgGCuAYawAhrECGMYKYBgrgGGsAIaxAhjGCmAYK4BhrACGsQIYxgpgGCuAYdS2FIsp7AgwSk7O/wCqCi/+JioQYgAAAABJRU5ErkJggg=='); +}