Merge branch 'master' of github.com:highfidelity/hifi into artist-stocking-inventory

This commit is contained in:
Howard Stearns 2019-02-15 15:15:13 -08:00
commit f16eb1c029
89 changed files with 1369 additions and 1452 deletions

View file

@ -3,7 +3,7 @@ set -xeuo pipefail
./gradlew -PHIFI_ANDROID_PRECOMPILED=${HIFI_ANDROID_PRECOMPILED} -PVERSION_CODE=${VERSION_CODE} -PRELEASE_NUMBER=${RELEASE_NUMBER} -PRELEASE_TYPE=${RELEASE_TYPE} ${ANDROID_APP}:${ANDROID_BUILD_TARGET}
# This is the actual output from gradle, which no longer attempts to muck with the naming of the APK
OUTPUT_APK=./apps/${ANDROID_APP}/build/outputs/apk/${ANDROID_BUILD_DIR}/${ANDROID_APP}-${ANDROID_BUILD_DIR}.apk
OUTPUT_APK=./apps/${ANDROID_APP}/build/outputs/apk/${ANDROID_BUILD_DIR}/${ANDROID_BUILT_APK_NAME}
# This is the APK name requested by Jenkins
TARGET_APK=./${ANDROID_APK_NAME}
# Make sure this matches up with the new ARTIFACT_EXPRESSION for jenkins builds, which should be "android/*.apk"

View file

@ -16,6 +16,7 @@ docker run \
-e RELEASE_NUMBER \
-e RELEASE_TYPE \
-e ANDROID_APP \
-e ANDROID_BUILT_APK_NAME \
-e ANDROID_APK_NAME \
-e ANDROID_BUILD_TARGET \
-e ANDROID_BUILD_DIR \

View file

@ -14,7 +14,7 @@ link_hifi_libraries(
audio avatars octree gpu graphics shaders fbx hfm entities
networking animation recording shared script-engine embedded-webserver
controllers physics plugins midi image
model-networking ktx shaders
material-networking model-networking ktx shaders
)
add_dependencies(${TARGET_NAME} oven)

View file

@ -324,12 +324,6 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
if (isFullScene) {
// we're forcing a full scene, clear the force in OctreeQueryNode so we don't force it next time again
nodeData->setShouldForceFullScene(false);
} else {
// we aren't forcing a full scene, check if something else suggests we should
isFullScene = nodeData->haveJSONParametersChanged() ||
(nodeData->hasConicalViews() &&
(nodeData->getViewFrustumJustStoppedChanging() ||
nodeData->hasLodChanged()));
}
if (nodeData->isPacketWaiting()) {

View file

@ -205,8 +205,8 @@ endif()
# link required hifi libraries
link_hifi_libraries(
shared workload task octree ktx gpu gl procedural graphics graphics-scripting render
pointers
recording hfm fbx networking model-networking model-baker entities avatars trackers
pointers recording hfm fbx networking material-networking
model-networking model-baker entities avatars trackers
audio audio-client animation script-engine physics
render-utils entities-renderer avatars-renderer ui qml auto-updater midi
controllers plugins image trackers

View file

@ -75,7 +75,7 @@ ModalWindow {
QtObject {
id: d
readonly property int minWidth: 480
readonly property int minWidth: 1100
readonly property int maxWidth: 1280
readonly property int minHeight: 120
readonly property int maxHeight: 720
@ -95,6 +95,7 @@ ModalWindow {
id: mainTextContainer
onTextChanged: d.resize();
wrapMode: Text.WordWrap
width: messageBox.width
size: hifi.fontSizes.menuItem
color: hifi.colors.baseGrayHighlight
anchors {

View file

@ -31,8 +31,9 @@ Rectangle {
id: root
property string activeView: "initialize"
property int currentSortIndex: 0
property int currentSortIndex: 1
property string sortString: "recent"
property bool isAscending: false
property string categoryString: ""
property string searchString: ""
property bool keyboardEnabled: HMD.active
@ -316,23 +317,28 @@ Rectangle {
font.pixelSize: hifi.fontSizes.textFieldInput
placeholderText: "Search Marketplace"
Timer {
id: keypressTimer
running: false
repeat: false
interval: 300
onTriggered: searchField.accepted()
}
// workaround for https://bugreports.qt.io/browse/QTBUG-49297
Keys.onPressed: {
switch (event.key) {
case Qt.Key_Return:
case Qt.Key_Enter:
event.accepted = true;
searchField.text = "";
// emit accepted signal manually
if (acceptableInput) {
searchField.accepted();
searchField.forceActiveFocus();
}
getMarketplaceItems();
searchField.forceActiveFocus();
break;
case Qt.Key_Backspace:
if (searchField.text === "") {
primaryFilter_index = -1;
}
default:
keypressTimer.restart();
break;
}
}
@ -498,6 +504,7 @@ Rectangle {
"",
"",
root.sortString,
root.isAscending,
WalletScriptingInterface.limitedCommerce,
marketBrowseModel.currentPageToRetrieve,
marketBrowseModel.itemsPerPage
@ -726,7 +733,8 @@ Rectangle {
top: parent.top
leftMargin: 20
}
width: root.isLoggedIn ? 322 : 242
width: root.isLoggedIn ? 342 : 262
height: parent.height
radius: 4
@ -737,27 +745,27 @@ Rectangle {
id: sortModel
ListElement {
name: "Name";
glyph: ";"
name: "Name"
sortString: "alpha"
ascending: true
}
ListElement {
name: "Date";
glyph: ";";
sortString: "recent";
name: "Date"
sortString: "recent"
ascending: false
}
ListElement {
name: "Popular";
glyph: ";";
sortString: "likes";
name: "Popular"
sortString: "likes"
ascending: false
}
ListElement {
name: "My Likes";
glyph: ";";
sortString: "my_likes";
name: "My Likes"
sortString: "my_likes"
ascending: false
}
}
@ -783,10 +791,10 @@ Rectangle {
currentIndex: 1;
delegate: SortButton {
width: 80
width: 85
height: parent.height
glyph: model.glyph
ascending: model.ascending
text: model.name
visible: root.isLoggedIn || model.sortString != "my_likes"
@ -794,6 +802,12 @@ Rectangle {
checked: ListView.isCurrentItem
onClicked: {
if(root.currentSortIndex == index) {
ascending = !ascending;
} else {
ascending = model.ascending;
}
root.isAscending = ascending;
root.currentSortIndex = index;
sortListView.positionViewAtIndex(index, ListView.Beginning);
sortListView.currentIndex = index;
@ -802,7 +816,7 @@ Rectangle {
}
}
highlight: Rectangle {
width: 80
width: 85
height: parent.height
color: hifi.colors.faintGray
@ -1125,6 +1139,8 @@ Rectangle {
fill: parent
}
ScrollBar.horizontal.policy: ScrollBar.AlwaysOff
RalewayRegular {
id: licenseText

View file

@ -526,7 +526,7 @@ Rectangle {
} else if (root.license === "Attribution-NonCommercial-NoDerivs (CC BY-NC-ND)") {
url = "https://creativecommons.org/licenses/by-nc-nd/4.0/legalcode.txt"
} else if (root.license === "Proof of Provenance License (PoP License)") {
url = "https://digitalassetregistry.com/PoP-License/v1/"
url = "licenses/Popv1.txt"
}
if(url) {
showLicense(url)

View file

@ -28,58 +28,60 @@ Item {
id: root;
property string ascGlyph: "\u2193"
property string descGlyph: "\u2191"
property string text: ""
property bool ascending: false
property bool checked: false
signal clicked()
property string glyph: "";
property string text: "";
property bool checked: false;
signal clicked();
width: childrenRect.width;
height: parent.height;
width: childrenRect.width
height: parent.height
Rectangle {
anchors.top: parent.top;
anchors.left: parent.left;
height: parent.height;
width: 2;
color: hifi.colors.faintGray;
visible: index > 0;
anchors.top: parent.top
anchors.left: parent.left
height: parent.height
width: 2
color: hifi.colors.faintGray
visible: index > 0
}
HiFiGlyphs {
id: buttonGlyph;
text: root.glyph;
RalewayRegular {
id: buttonGlyph
text: root.ascending ? root.ascGlyph : root.descGlyph
// Size
size: 14;
size: 14
// Anchors
anchors.left: parent.left;
anchors.leftMargin: 0;
anchors.top: parent.top;
anchors.verticalCenter: parent.verticalCenter;
height: parent.height;
horizontalAlignment: Text.AlignHCenter;
anchors.left: parent.left
anchors.leftMargin: 10
anchors.top: parent.top
anchors.topMargin: 6
anchors.bottom: parent.bottom
horizontalAlignment: Text.AlignHCenter
verticalAlignment: Text.AlignTop
// Style
color: hifi.colors.lightGray;
color: hifi.colors.lightGray
}
RalewayRegular {
id: buttonText;
text: root.text;
id: buttonText
text: root.text
// Text size
size: 14;
size: 14
// Style
color: hifi.colors.lightGray;
elide: Text.ElideRight;
horizontalAlignment: Text.AlignHCenter;
verticalAlignment: Text.AlignVCenter;
color: hifi.colors.lightGray
elide: Text.ElideRight
horizontalAlignment: Text.AlignHCenter
verticalAlignment: Text.AlignVCenter
// Anchors
anchors.left: parent.left;
anchors.leftMargin: 20;
anchors.top: parent.top;
height: parent.height;
anchors.left: buttonGlyph.right
anchors.leftMargin: 5
anchors.top: parent.top
height: parent.height
}
MouseArea {
anchors.fill: parent;
hoverEnabled: enabled;
anchors.fill: parent
hoverEnabled: enabled
onClicked: {
root.clicked();
}

View file

@ -0,0 +1,12 @@
<h2>Proof of Provenance License (PoP License) v1.0</h2>
<br>
<p>
Subject to the terms and conditions of this license, the Copyright Holder grants a worldwide, non-exclusive, non-sublicensable, non-transferable (except by transfer of the Certificate or beneficial ownership thereof) license (i) to the Certificate Holder to display ONE COPY of the Item at a time across any and all virtual worlds WITHOUT MODIFICATION; (ii) to any party to view and interact with the Item as displayed by the Certificate Holder. Redistributions of source code must retain the all copyright notices. Notwithstanding the foregoing, modification of the Item may be permitted pursuant to terms provided in the Certificate.
</p>
<p>
THE ITEM IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS ''AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR A CONTRIBUTOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS ITEM, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
</p>
<p><i>
Reference to the “Certificate” means the Proof of Provenance Certificate containing a hash of the code used to generate the Item; Item means the visual representation produced by the execution of the code hashed in the Certificate (which term includes the code itself); and “Certificate Holder” means a single holder of the private key for the Certificate.
</i></p>

View file

@ -16,15 +16,23 @@
#include <QObject>
/**jsdoc
* The <code>HifiAbout</code> API provides information about the version of Interface that is currently running. It also
* provides the ability to open a Web page in an Interface browser window.
*
* @namespace HifiAbout
*
* @hifi-interface
* @hifi-client-entity
* @hifi-avatar
*
* @property {string} buildDate
* @property {string} buildVersion
* @property {string} qtVersion
* @property {string} buildDate - The build date of Interface that is currently running. <em>Read-only.</em>
* @property {string} buildVersion - The build version of Interface that is currently running. <em>Read-only.</em>
* @property {string} qtVersion - The Qt version used in Interface that is currently running. <em>Read-only.</em>
*
* @example <caption>Report build information for the version of Interface currently running.</caption>
* print("HiFi build date: " + HifiAbout.buildDate); // 11 Feb 2019
* print("HiFi version: " + HifiAbout.buildVersion); // 0.78.0
* print("Qt version: " + HifiAbout.qtVersion); // 5.10.1
*/
class AboutUtil : public QObject {
@ -44,8 +52,9 @@ public:
public slots:
/**jsdoc
* Display a Web page in an Interface browser window.
* @function HifiAbout.openUrl
* @param {string} url
* @param {string} url - The URL of the Web page to display.
*/
void openUrl(const QString &url) const;
private:

View file

@ -102,7 +102,7 @@
#include <MessagesClient.h>
#include <hfm/ModelFormatRegistry.h>
#include <model-networking/ModelCacheScriptingInterface.h>
#include <model-networking/TextureCacheScriptingInterface.h>
#include <material-networking/TextureCacheScriptingInterface.h>
#include <ModelEntityItem.h>
#include <NetworkAccessManager.h>
#include <NetworkingConstants.h>
@ -154,7 +154,7 @@
#include <RenderableEntityItem.h>
#include <RenderableTextEntityItem.h>
#include <RenderableWebEntityItem.h>
#include <model-networking/MaterialCache.h>
#include <material-networking/MaterialCache.h>
#include "recording/ClipCache.h"
#include "AudioClient.h"
@ -6988,17 +6988,19 @@ void Application::nodeActivated(SharedNodePointer node) {
#if !defined(DISABLE_QML)
auto offscreenUi = getOffscreenUI();
auto assetDialog = offscreenUi ? offscreenUi->getRootItem()->findChild<QQuickItem*>("AssetServer") : nullptr;
if (assetDialog) {
if (offscreenUi) {
auto nodeList = DependencyManager::get<NodeList>();
if (nodeList->getThisNodeCanWriteAssets()) {
// call reload on the shown asset browser dialog to get the mappings (if permissions allow)
QMetaObject::invokeMethod(assetDialog, "reload");
auto assetDialog = offscreenUi ? offscreenUi->getRootItem()->findChild<QQuickItem*>("AssetServer") : nullptr;
if (assetDialog) {
QMetaObject::invokeMethod(assetDialog, "reload");
}
} else {
// we switched to an Asset Server that we can't modify, hide the Asset Browser
assetDialog->setVisible(false);
offscreenUi->hide("AssetServer");
}
}
#endif

View file

@ -50,9 +50,10 @@ void QmlMarketplace::getMarketplaceItems(
const QString& adminFilter,
const QString& adminFilterCost,
const QString& sort,
const bool isFree,
const int& page,
const int& perPage) {
bool isAscending,
bool isFree,
int page,
int perPage) {
QString endpoint = "items";
QUrlQuery request;
@ -62,6 +63,7 @@ void QmlMarketplace::getMarketplaceItems(
request.addQueryItem("adminFilter", adminFilter);
request.addQueryItem("adminFilterCost", adminFilterCost);
request.addQueryItem("sort", sort);
request.addQueryItem("sort_dir", isAscending ? "asc" : "desc");
if (isFree) {
request.addQueryItem("isFree", "true");
}

View file

@ -46,9 +46,10 @@ protected:
const QString& adminFilter = QString("published"),
const QString& adminFilterCost = QString(),
const QString& sort = QString(),
const bool isFree = false,
const int& page = 1,
const int& perPage = 20);
bool isAscending = false,
bool isFree = false,
int page = 1,
int perPage = 20);
Q_INVOKABLE void getMarketplaceItem(const QString& marketplaceItemId);
Q_INVOKABLE void marketplaceItemLike(const QString& marketplaceItemId, const bool like = true);
Q_INVOKABLE void getMarketplaceCategories();

View file

@ -54,6 +54,29 @@ public:
void clear();
void getWalletStatus();
/**jsdoc
* <p>A <code>WalletStatus</code> may have one of the following values:</p>
* <table>
* <thead>
* <tr><th>Value</th><th>Meaning</th><th>Description</th></tr>
* </thead>
* <tbody>
* <tr><td><code>0</code></td><td>Not logged in</td><td>The user isn't logged in.</td></tr>
* <tr><td><code>1</code></td><td>Not set up</td><td>The user's wallet isn't set up.</td></tr>
* <tr><td><code>2</code></td><td>Pre-existing</td><td>There is a wallet present on the server but not one
* locally.</td></tr>
* <tr><td><code>3</code></td><td>Conflicting</td><td>There is a wallet present on the server plus one present locally,
* and they don't match.</td></tr>
* <tr><td><code>4</code></td><td>Not authenticated</td><td>There is a wallet present locally but the user hasn't
* logged into it.</td></tr>
* <tr><td><code>5</code></td><td>Ready</td><td>The wallet is ready for use.</td></tr>
* </tbody>
* </table>
* <p>Wallets used to be stored locally but now they're stored on the server, unless the computer once had a wallet stored
* locally in which case the wallet may be present in both places.</p>
* @typedef {number} WalletScriptingInterface.WalletStatus
*/
enum WalletStatus {
WALLET_STATUS_NOT_LOGGED_IN = 0,
WALLET_STATUS_NOT_SET_UP,

View file

@ -112,6 +112,12 @@ DownloadInfoResult::DownloadInfoResult() :
{
}
/**jsdoc
* Information on the assets currently being downloaded and pending download.
* @typedef {object} AccountServices.DownloadInfoResult
* @property {number[]} downloading - The percentage complete for each asset currently being downloaded.
* @property {number} pending - The number of assets waiting to be download.
*/
QScriptValue DownloadInfoResultToScriptValue(QScriptEngine* engine, const DownloadInfoResult& result) {
QScriptValue object = engine->newObject();

View file

@ -38,17 +38,25 @@ class AccountServicesScriptingInterface : public QObject {
Q_OBJECT
/**jsdoc
* The AccountServices API contains helper functions related to user connectivity
* The <code>AccountServices</code> API provides functions related to user connectivity, visibility, and asset download
* progress.
*
* @hifi-interface
* @hifi-client-entity
* @hifi-avatar
*
* @namespace AccountServices
* @property {string} username <em>Read-only.</em>
* @property {boolean} loggedIn <em>Read-only.</em>
* @property {string} findableBy
* @property {string} metaverseServerURL <em>Read-only.</em>
* @property {string} username - The user name if the user is logged in, otherwise <code>"Unknown user"</code>.
* <em>Read-only.</em>
* @property {boolean} loggedIn - <code>true</code> if the user is logged in, otherwise <code>false</code>.
* <em>Read-only.</em>
* @property {string} findableBy - The user's visibility to other people:<br />
* <code>"none"</code> - user appears offline.<br />
* <code>"friends"</code> - user is visible only to friends.<br />
* <code>"connections"</code> - user is visible to friends and connections.<br />
* <code>"all"</code> - user is visible to everyone.
* @property {string} metaverseServerURL - The metaverse server that the user is authenticated against when logged in
* &mdash; typically <code>"https://metaverse.highfidelity.com"</code>. <em>Read-only.</em>
*/
Q_PROPERTY(QString username READ getUsername NOTIFY myUsernameChanged)
@ -66,29 +74,38 @@ public:
public slots:
/**jsdoc
* Get information on the progress of downloading assets in the domain.
* @function AccountServices.getDownloadInfo
* @returns {DownloadInfoResult}
* @returns {AccountServices.DownloadInfoResult} Information on the progress of assets download.
*/
DownloadInfoResult getDownloadInfo();
/**jsdoc
* Cause a {@link AccountServices.downloadInfoChanged|downloadInfoChanged} signal to be triggered with information on the
* current progress of the download of assets in the domain.
* @function AccountServices.updateDownloadInfo
*/
void updateDownloadInfo();
/**jsdoc
* Check whether the user is logged in.
* @function AccountServices.isLoggedIn
* @returns {boolean}
* @returns {boolean} <code>true</code> if the user is logged in, <code>false</code> otherwise.
* @example <caption>Report whether you are logged in.</caption>
* var isLoggedIn = AccountServices.isLoggedIn();
* print("You are logged in: " + isLoggedIn); // true or false
*/
bool isLoggedIn();
/**jsdoc
* Prompts the user to log in (the login dialog is displayed) if they're not already logged in.
* @function AccountServices.checkAndSignalForAccessToken
* @returns {boolean}
* @returns {boolean} <code>true</code> if the user is already logged in, <code>false</code> otherwise.
*/
bool checkAndSignalForAccessToken();
/**jsdoc
* Logs the user out.
* @function AccountServices.logOut
*/
void logOut();
@ -106,43 +123,75 @@ private slots:
signals:
/**jsdoc
* Not currently used.
* @function AccountServices.connected
* @returns {Signal}
*/
void connected();
/**jsdoc
* Triggered when the user logs out.
* @function AccountServices.disconnected
* @param {string} reason
* @param {string} reason - Has the value, <code>"logout"</code>.
* @returns {Signal}
*/
void disconnected(const QString& reason);
/**jsdoc
* Triggered when the username logged in with changes, i.e., when the user logs in or out.
* @function AccountServices.myUsernameChanged
* @param {string} username
* @param {string} username - The username logged in with if the user is logged in, otherwise <code>""</code>.
* @returns {Signal}
* @example <caption>Report when your username changes.</caption>
* AccountServices.myUsernameChanged.connect(function (username) {
* print("Username changed: " + username);
* });
*/
void myUsernameChanged(const QString& username);
/**jsdoc
* Triggered when the progress of the download of assets for the domain changes.
* @function AccountServices.downloadInfoChanged
* @param {} info
* @param {AccountServices.DownloadInfoResult} downloadInfo - Information on the progress of assets download.
* @returns {Signal}
*/
void downloadInfoChanged(DownloadInfoResult info);
/**jsdoc
* Triggered when the user's visibility to others changes.
* @function AccountServices.findableByChanged
* @param {string} discoverabilityMode
* @param {string} findableBy - The user's visibility to other people:<br />
* <code>"none"</code> - user appears offline.<br />
* <code>"friends"</code> - user is visible only to friends.<br />
* <code>"connections"</code> - user is visible to friends and connections.<br />
* <code>"all"</code> - user is visible to everyone.
* @returns {Signal}
* @example <caption>Report when your visiblity changes.</caption>
* AccountServices.findableByChanged.connect(function (findableBy) {
* print("Findable by changed: " + findableBy);
* });
*
* var originalFindableBy = AccountServices.findableBy;
* Script.setTimeout(function () {
* // Change visiblity.
* AccountServices.findableBy = originalFindableBy === "none" ? "all" : "none";
* }, 2000);
* Script.setTimeout(function () {
* // Restore original visibility.
* AccountServices.findableBy = originalFindableBy;
* }, 4000);
*/
void findableByChanged(const QString& discoverabilityMode);
/**jsdoc
* Triggered when the login status of the user changes.
* @function AccountServices.loggedInChanged
* @param {boolean} loggedIn
* @param {boolean} loggedIn - <code>true</code> if the user is logged in, otherwise <code>false</code>.
* @returns {Signal}
* @example <caption>Report when your login status changes.</caption>
* AccountServices.loggedInChanged.connect(function(loggedIn) {
* print("Logged in: " + loggedIn);
* });
*/
void loggedInChanged(bool loggedIn);

View file

@ -1,4 +1,4 @@
//
// WalletScriptingInterface.h
// interface/src/scripting
//
@ -30,14 +30,19 @@ public:
};
/**jsdoc
* @namespace Wallet
* The <code>WalletScriptingInterface</code> API provides functions related to the user's wallet and verification of certified
* avatar entities.
*
* @namespace WalletScriptingInterface
*
* @hifi-interface
* @hifi-client-entity
* @hifi-avatar
*
* @property {number} walletStatus
* @property {bool} limitedCommerce
* @property {WalletScriptingInterface.WalletStatus} walletStatus - The status of the user's wallet. <em>Read-only.</em>
* @property {boolean} limitedCommerce - <code>true</code> if Interface is running in limited commerce mode. In limited commerce
* mode, certain Interface functionality is disabled, e.g., users can't buy non-free items from the Marketplace. The Oculus
* Store version of Interface runs in limited commerce mode. <em>Read-only.</em>
*/
class WalletScriptingInterface : public QObject, public Dependency {
Q_OBJECT
@ -50,19 +55,56 @@ public:
WalletScriptingInterface();
/**jsdoc
* Check and update the user's wallet status.
* @function WalletScriptingInterface.refreshWalletStatus
*/
Q_INVOKABLE void refreshWalletStatus();
/**jsdoc
* Get the current status of the user's wallet.
* @function WalletScriptingInterface.getWalletStatus
* @returns {number}
* @returns {WalletScriptingInterface.WalletStatus}
* @example <caption>Two ways to report your wallet status.</caption>
* print("Wallet status: " + WalletScriptingInterface.walletStatus); // Same value as next line.
* print("Wallet status: " + WalletScriptingInterface.getWalletStatus());
*/
Q_INVOKABLE uint getWalletStatus() { return _walletStatus; }
/**jsdoc
* Check that a certified avatar entity is owned by the avatar whose entity it is. The result of the check is provided via
* the {@link WalletScriptingInterface.ownershipVerificationSuccess|ownershipVerificationSuccess} and
* {@link WalletScriptingInterface.ownershipVerificationFailed|ownershipVerificationFailed} signals.<br />
* <strong>Warning:</strong> Neither of these signals fire if the entity is not an avatar entity or it's not a certified
* entity.
* @function WalletScriptingInterface.proveAvatarEntityOwnershipVerification
* @param {Uuid} entityID
* @param {Uuid} entityID - The ID of the avatar entity to check.
* @example <caption>Check ownership of all nearby certified avatar entities.</caption>
* // Set up response handling.
* function ownershipSuccess(entityID) {
* print("Ownership test succeeded for: " + entityID);
* }
* function ownershipFailed(entityID) {
* print("Ownership test failed for: " + entityID);
* }
* WalletScriptingInterface.ownershipVerificationSuccess.connect(ownershipSuccess);
* WalletScriptingInterface.ownershipVerificationFailed.connect(ownershipFailed);
*
* // Check ownership of all nearby certified avatar entities.
* var entityIDs = Entities.findEntities(MyAvatar.position, 10);
* var i, length;
* for (i = 0, length = entityIDs.length; i < length; i++) {
* var properties = Entities.getEntityProperties(entityIDs[i], ["entityHostType", "certificateID"]);
* if (properties.entityHostType === "avatar" && properties.certificateID !== "") {
* print("Prove ownership of: " + entityIDs[i]);
* WalletScriptingInterface.proveAvatarEntityOwnershipVerification(entityIDs[i]);
* }
* }
*
* // Tidy up.
* Script.scriptEnding.connect(function () {
* WalletScriptingInterface.ownershipVerificationFailed.disconnect(ownershipFailed);
* WalletScriptingInterface.ownershipVerificationSuccess.disconnect(ownershipSuccess);
* });
*/
Q_INVOKABLE void proveAvatarEntityOwnershipVerification(const QUuid& entityID);
@ -76,33 +118,45 @@ public:
signals:
/**jsdoc
* Triggered when the status of the user's wallet changes.
* @function WalletScriptingInterface.walletStatusChanged
* @returns {Signal}
* @example <caption>Report when your wallet status changes, e.g., when you log in and out.</caption>
* WalletScriptingInterface.walletStatusChanged.connect(function () {
* print("Wallet status changed to: " + WalletScriptingInterface.walletStatus");
* });
*/
void walletStatusChanged();
/**jsdoc
* Triggered when the user's limited commerce status changes.
* @function WalletScriptingInterface.limitedCommerceChanged
* @returns {Signal}
*/
void limitedCommerceChanged();
/**jsdoc
* Triggered when the user rezzes a certified entity but the user's wallet is not ready and so the certified location of the
* entity cannot be updated in the metaverse.
* @function WalletScriptingInterface.walletNotSetup
* @returns {Signal}
*/
void walletNotSetup();
/**jsdoc
* Triggered when a certified avatar entity's ownership check requested via
* {@link WalletScriptingInterface.proveAvatarEntityOwnershipVerification|proveAvatarEntityOwnershipVerification} succeeds.
* @function WalletScriptingInterface.ownershipVerificationSuccess
* @param {Uuid} entityID
* @param {Uuid} entityID - The ID of the avatar entity checked.
* @returns {Signal}
*/
void ownershipVerificationSuccess(const QUuid& entityID);
/**jsdoc
* Triggered when a certified avatar entity's ownership check requested via
* {@link WalletScriptingInterface.proveAvatarEntityOwnershipVerification|proveAvatarEntityOwnershipVerification} fails.
* @function WalletScriptingInterface.ownershipVerificationFailed
* @param {Uuid} entityID
* @param {Uuid} entityID - The ID of the avatar entity checked.
* @returns {Signal}
*/
void ownershipVerificationFailed(const QUuid& entityID);

View file

@ -41,7 +41,7 @@ QSharedPointer<Resource> AnimationCache::createResource(const QUrl& url) {
}
QSharedPointer<Resource> AnimationCache::createResourceCopy(const QSharedPointer<Resource>& resource) {
return QSharedPointer<Resource>(new Animation(*resource.staticCast<Animation>().data()), &Resource::deleter);
return QSharedPointer<Resource>(new Animation(*resource.staticCast<Animation>()), &Resource::deleter);
}
AnimationReader::AnimationReader(const QUrl& url, const QByteArray& data) :

View file

@ -40,5 +40,5 @@ QSharedPointer<Resource> SoundCache::createResource(const QUrl& url) {
}
QSharedPointer<Resource> SoundCache::createResourceCopy(const QSharedPointer<Resource>& resource) {
return QSharedPointer<Resource>(new Sound(*resource.staticCast<Sound>().data()), &Resource::deleter);
return QSharedPointer<Resource>(new Sound(*resource.staticCast<Sound>()), &Resource::deleter);
}

View file

@ -1,6 +1,6 @@
set(TARGET_NAME avatars-renderer)
setup_hifi_library(Network Script)
link_hifi_libraries(shared shaders gpu graphics animation model-networking script-engine render render-utils image trackers entities-renderer)
link_hifi_libraries(shared shaders gpu graphics animation material-networking model-networking script-engine render render-utils image trackers entities-renderer)
include_hifi_library_headers(avatars)
include_hifi_library_headers(networking)
include_hifi_library_headers(hfm)

View file

@ -2,6 +2,7 @@ set(TARGET_NAME display-plugins)
setup_hifi_library(Gui)
link_hifi_libraries(shared shaders plugins ui-plugins gl ui render-utils ${PLATFORM_GL_BACKEND})
include_hifi_library_headers(gpu)
include_hifi_library_headers(material-networking)
include_hifi_library_headers(model-networking)
include_hifi_library_headers(networking)
include_hifi_library_headers(graphics)

View file

@ -720,7 +720,7 @@ void OpenGLDisplayPlugin::present() {
}
gpu::Backend::freeGPUMemSize.set(gpu::gl::getFreeDedicatedMemory());
} else {
} else if (alwaysPresent()) {
internalPresent();
}
_movingAveragePresent.addSample((float)(usecTimestampNow() - startPresent));

View file

@ -88,6 +88,11 @@ protected:
glm::uvec2 getSurfaceSize() const;
glm::uvec2 getSurfacePixels() const;
// Some display plugins require us to always execute some present logic,
// whether we have a frame or not (Oculus Mobile plugin)
// Such plugins must be prepared to do the right thing if the `_currentFrame`
// is not populated
virtual bool alwaysPresent() const { return false; }
void updateCompositeFramebuffer();

View file

@ -1,6 +1,6 @@
set(TARGET_NAME entities-renderer)
setup_hifi_library(Network Script)
link_hifi_libraries(shared workload gpu shaders procedural graphics model-networking script-engine render render-utils image qml ui pointers)
link_hifi_libraries(shared workload gpu shaders procedural graphics material-networking model-networking script-engine render render-utils image qml ui pointers)
include_hifi_library_headers(networking)
include_hifi_library_headers(gl)
include_hifi_library_headers(ktx)

View file

@ -6,4 +6,4 @@ include_hifi_library_headers(fbx)
include_hifi_library_headers(gpu)
include_hifi_library_headers(image)
include_hifi_library_headers(ktx)
link_hifi_libraries(shared shaders networking octree avatars graphics model-networking)
link_hifi_libraries(shared shaders networking octree avatars graphics material-networking model-networking)

View file

@ -13,7 +13,7 @@
#include "MaterialMappingMode.h"
#include <model-networking/ModelCache.h>
#include <model-networking/MaterialCache.h>
#include <material-networking/MaterialCache.h>
class MaterialEntityItem : public EntityItem {
using Pointer = std::shared_ptr<MaterialEntityItem>;

View file

@ -1326,7 +1326,7 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
hfmModel.meshExtents.reset();
// Create the Material Library
consolidateHFMMaterials(mapping);
consolidateHFMMaterials();
// We can't allow the scaling of a given image to different sizes, because the hash used for the KTX cache is based on the original image
// Allowing scaling of the same image to different sizes would cause different KTX files to target the same cache key

View file

@ -153,7 +153,7 @@ public:
QHash<QString, HFMMaterial> _hfmMaterials;
QHash<QString, MaterialParam> _materialParams;
void consolidateHFMMaterials(const QVariantHash& mapping);
void consolidateHFMMaterials();
bool _loadLightmaps { true };
float _lightmapOffset { 0.0f };

View file

@ -75,15 +75,7 @@ HFMTexture FBXSerializer::getTexture(const QString& textureID, const QString& ma
return texture;
}
void FBXSerializer::consolidateHFMMaterials(const QVariantHash& mapping) {
QJsonObject materialMap;
if (mapping.contains("materialMap")) {
QByteArray materialMapValue = mapping.value("materialMap").toByteArray();
materialMap = QJsonDocument::fromJson(materialMapValue).object();
if (materialMap.isEmpty()) {
qCDebug(modelformat) << "fbx Material Map found but did not produce valid JSON:" << materialMapValue;
}
}
void FBXSerializer::consolidateHFMMaterials() {
for (QHash<QString, HFMMaterial>::iterator it = _hfmMaterials.begin(); it != _hfmMaterials.end(); it++) {
HFMMaterial& material = (*it);
@ -266,23 +258,6 @@ void FBXSerializer::consolidateHFMMaterials(const QVariantHash& mapping) {
}
qCDebug(modelformat) << " fbx material Name:" << material.name;
if (materialMap.contains(material.name)) {
QJsonObject materialOptions = materialMap.value(material.name).toObject();
qCDebug(modelformat) << "Mapping fbx material:" << material.name << " with HifiMaterial: " << materialOptions;
if (materialOptions.contains("scattering")) {
float scattering = (float) materialOptions.value("scattering").toDouble();
material._material->setScattering(scattering);
}
if (materialOptions.contains("scatteringMap")) {
QByteArray scatteringMap = materialOptions.value("scatteringMap").toVariant().toByteArray();
material.scatteringTexture = HFMTexture();
material.scatteringTexture.name = material.name + ".scatteringMap";
material.scatteringTexture.filename = scatteringMap;
}
}
if (material.opacity <= 0.0f) {
material._material->setOpacity(1.0f);
} else {

View file

@ -892,6 +892,23 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const QUrl& url) {
for (int n = 0; n < colors.size() - 3; n += stride) {
mesh.colors.push_back(glm::vec3(colors[n], colors[n + 1], colors[n + 2]));
}
} else if (key == "TANGENT") {
QVector<float> tangents;
success = addArrayOfType(buffer.blob,
bufferview.byteOffset + accBoffset,
accessor.count,
tangents,
accessor.type,
accessor.componentType);
if (!success) {
qWarning(modelformat) << "There was a problem reading glTF TANGENT data for model " << _url;
continue;
}
int stride = (accessor.type == GLTFAccessorType::VEC4) ? 4 : 3;
for (int n = 0; n < tangents.size() - 3; n += stride) {
float tanW = stride == 4 ? tangents[n + 3] : 1;
mesh.tangents.push_back(glm::vec3(tanW * tangents[n], tangents[n + 1], tangents[n + 2]));
}
} else if (key == "TEXCOORD_0") {
QVector<float> texcoords;
success = addArrayOfType(buffer.blob,
@ -931,7 +948,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const QUrl& url) {
}
mesh.parts.push_back(part);
// populate the texture coordenates if they don't exist
// populate the texture coordinates if they don't exist
if (mesh.texCoords.size() == 0) {
for (int i = 0; i < part.triangleIndices.size(); i++) mesh.texCoords.push_back(glm::vec2(0.0, 1.0));
}

View file

@ -246,7 +246,7 @@ public:
struct Flags {
Flags() :
frontFaceClockwise(false), depthClampEnable(false), scissorEnable(false), multisampleEnable(false),
frontFaceClockwise(false), depthClampEnable(false), scissorEnable(false), multisampleEnable(true),
antialisedLineEnable(true), alphaToCoverageEnable(false), _spare1(0) {}
bool frontFaceClockwise : 1;
bool depthClampEnable : 1;

View file

@ -1,4 +1,4 @@
set(TARGET_NAME graphics-scripting)
setup_hifi_library()
link_hifi_libraries(shared networking graphics fbx image model-networking script-engine)
link_hifi_libraries(shared networking graphics fbx image material-networking model-networking script-engine)
include_hifi_library_headers(gpu)

View file

@ -0,0 +1,5 @@
set(TARGET_NAME material-networking)
setup_hifi_library()
link_hifi_libraries(shared shaders networking graphics ktx image gl)
include_hifi_library_headers(gpu)
include_hifi_library_headers(hfm)

View file

@ -425,5 +425,311 @@ QSharedPointer<Resource> MaterialCache::createResource(const QUrl& url) {
}
QSharedPointer<Resource> MaterialCache::createResourceCopy(const QSharedPointer<Resource>& resource) {
return QSharedPointer<Resource>(new NetworkMaterialResource(*resource.staticCast<NetworkMaterialResource>().data()), &Resource::deleter);
return QSharedPointer<Resource>(new NetworkMaterialResource(*resource.staticCast<NetworkMaterialResource>()), &Resource::deleter);
}
NetworkMaterial::NetworkMaterial(const NetworkMaterial& m) :
Material(m),
_textures(m._textures),
_albedoTransform(m._albedoTransform),
_lightmapTransform(m._lightmapTransform),
_lightmapParams(m._lightmapParams),
_isOriginal(m._isOriginal)
{}
const QString NetworkMaterial::NO_TEXTURE = QString();
const QString& NetworkMaterial::getTextureName(MapChannel channel) {
if (_textures[channel].texture) {
return _textures[channel].name;
}
return NO_TEXTURE;
}
QUrl NetworkMaterial::getTextureUrl(const QUrl& baseUrl, const HFMTexture& texture) {
if (texture.content.isEmpty()) {
// External file: search relative to the baseUrl, in case filename is relative
return baseUrl.resolved(QUrl(texture.filename));
} else {
// Inlined file: cache under the fbx file to avoid namespace clashes
// NOTE: We cannot resolve the path because filename may be an absolute path
assert(texture.filename.size() > 0);
auto baseUrlStripped = baseUrl.toDisplayString(QUrl::RemoveFragment | QUrl::RemoveQuery | QUrl::RemoveUserInfo);
if (texture.filename.at(0) == '/') {
return baseUrlStripped + texture.filename;
} else {
return baseUrlStripped + '/' + texture.filename;
}
}
}
graphics::TextureMapPointer NetworkMaterial::fetchTextureMap(const QUrl& baseUrl, const HFMTexture& hfmTexture,
image::TextureUsage::Type type, MapChannel channel) {
if (baseUrl.isEmpty()) {
return nullptr;
}
const auto url = getTextureUrl(baseUrl, hfmTexture);
const auto texture = DependencyManager::get<TextureCache>()->getTexture(url, type, hfmTexture.content, hfmTexture.maxNumPixels, hfmTexture.sourceChannel);
_textures[channel] = Texture { hfmTexture.name, texture };
auto map = std::make_shared<graphics::TextureMap>();
if (texture) {
map->setTextureSource(texture->_textureSource);
}
map->setTextureTransform(hfmTexture.transform);
return map;
}
graphics::TextureMapPointer NetworkMaterial::fetchTextureMap(const QUrl& url, image::TextureUsage::Type type, MapChannel channel) {
auto textureCache = DependencyManager::get<TextureCache>();
if (textureCache && !url.isEmpty()) {
auto texture = textureCache->getTexture(url, type);
_textures[channel].texture = texture;
auto map = std::make_shared<graphics::TextureMap>();
if (texture) {
map->setTextureSource(texture->_textureSource);
}
return map;
}
return nullptr;
}
void NetworkMaterial::setAlbedoMap(const QUrl& url, bool useAlphaChannel) {
auto map = fetchTextureMap(url, image::TextureUsage::ALBEDO_TEXTURE, MapChannel::ALBEDO_MAP);
if (map) {
map->setUseAlphaChannel(useAlphaChannel);
setTextureMap(MapChannel::ALBEDO_MAP, map);
}
}
void NetworkMaterial::setNormalMap(const QUrl& url, bool isBumpmap) {
auto map = fetchTextureMap(url, isBumpmap ? image::TextureUsage::BUMP_TEXTURE : image::TextureUsage::NORMAL_TEXTURE, MapChannel::NORMAL_MAP);
if (map) {
setTextureMap(MapChannel::NORMAL_MAP, map);
}
}
void NetworkMaterial::setRoughnessMap(const QUrl& url, bool isGloss) {
auto map = fetchTextureMap(url, isGloss ? image::TextureUsage::GLOSS_TEXTURE : image::TextureUsage::ROUGHNESS_TEXTURE, MapChannel::ROUGHNESS_MAP);
if (map) {
setTextureMap(MapChannel::ROUGHNESS_MAP, map);
}
}
void NetworkMaterial::setMetallicMap(const QUrl& url, bool isSpecular) {
auto map = fetchTextureMap(url, isSpecular ? image::TextureUsage::SPECULAR_TEXTURE : image::TextureUsage::METALLIC_TEXTURE, MapChannel::METALLIC_MAP);
if (map) {
setTextureMap(MapChannel::METALLIC_MAP, map);
}
}
void NetworkMaterial::setOcclusionMap(const QUrl& url) {
auto map = fetchTextureMap(url, image::TextureUsage::OCCLUSION_TEXTURE, MapChannel::OCCLUSION_MAP);
if (map) {
setTextureMap(MapChannel::OCCLUSION_MAP, map);
}
}
void NetworkMaterial::setEmissiveMap(const QUrl& url) {
auto map = fetchTextureMap(url, image::TextureUsage::EMISSIVE_TEXTURE, MapChannel::EMISSIVE_MAP);
if (map) {
setTextureMap(MapChannel::EMISSIVE_MAP, map);
}
}
void NetworkMaterial::setScatteringMap(const QUrl& url) {
auto map = fetchTextureMap(url, image::TextureUsage::SCATTERING_TEXTURE, MapChannel::SCATTERING_MAP);
if (map) {
setTextureMap(MapChannel::SCATTERING_MAP, map);
}
}
void NetworkMaterial::setLightmapMap(const QUrl& url) {
auto map = fetchTextureMap(url, image::TextureUsage::LIGHTMAP_TEXTURE, MapChannel::LIGHTMAP_MAP);
if (map) {
//map->setTextureTransform(_lightmapTransform);
//map->setLightmapOffsetScale(_lightmapParams.x, _lightmapParams.y);
setTextureMap(MapChannel::LIGHTMAP_MAP, map);
}
}
NetworkMaterial::NetworkMaterial(const HFMMaterial& material, const QUrl& textureBaseUrl) :
graphics::Material(*material._material),
_textures(MapChannel::NUM_MAP_CHANNELS)
{
_name = material.name.toStdString();
if (!material.albedoTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.albedoTexture, image::TextureUsage::ALBEDO_TEXTURE, MapChannel::ALBEDO_MAP);
if (map) {
_albedoTransform = material.albedoTexture.transform;
map->setTextureTransform(_albedoTransform);
if (!material.opacityTexture.filename.isEmpty()) {
if (material.albedoTexture.filename == material.opacityTexture.filename) {
// Best case scenario, just indicating that the albedo map contains transparency
// TODO: Different albedo/opacity maps are not currently supported
map->setUseAlphaChannel(true);
}
}
}
setTextureMap(MapChannel::ALBEDO_MAP, map);
}
if (!material.normalTexture.filename.isEmpty()) {
auto type = (material.normalTexture.isBumpmap ? image::TextureUsage::BUMP_TEXTURE : image::TextureUsage::NORMAL_TEXTURE);
auto map = fetchTextureMap(textureBaseUrl, material.normalTexture, type, MapChannel::NORMAL_MAP);
setTextureMap(MapChannel::NORMAL_MAP, map);
}
if (!material.roughnessTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.roughnessTexture, image::TextureUsage::ROUGHNESS_TEXTURE, MapChannel::ROUGHNESS_MAP);
setTextureMap(MapChannel::ROUGHNESS_MAP, map);
} else if (!material.glossTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.glossTexture, image::TextureUsage::GLOSS_TEXTURE, MapChannel::ROUGHNESS_MAP);
setTextureMap(MapChannel::ROUGHNESS_MAP, map);
}
if (!material.metallicTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.metallicTexture, image::TextureUsage::METALLIC_TEXTURE, MapChannel::METALLIC_MAP);
setTextureMap(MapChannel::METALLIC_MAP, map);
} else if (!material.specularTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.specularTexture, image::TextureUsage::SPECULAR_TEXTURE, MapChannel::METALLIC_MAP);
setTextureMap(MapChannel::METALLIC_MAP, map);
}
if (!material.occlusionTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.occlusionTexture, image::TextureUsage::OCCLUSION_TEXTURE, MapChannel::OCCLUSION_MAP);
if (map) {
map->setTextureTransform(material.occlusionTexture.transform);
}
setTextureMap(MapChannel::OCCLUSION_MAP, map);
}
if (!material.emissiveTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.emissiveTexture, image::TextureUsage::EMISSIVE_TEXTURE, MapChannel::EMISSIVE_MAP);
setTextureMap(MapChannel::EMISSIVE_MAP, map);
}
if (!material.scatteringTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.scatteringTexture, image::TextureUsage::SCATTERING_TEXTURE, MapChannel::SCATTERING_MAP);
setTextureMap(MapChannel::SCATTERING_MAP, map);
}
if (!material.lightmapTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.lightmapTexture, image::TextureUsage::LIGHTMAP_TEXTURE, MapChannel::LIGHTMAP_MAP);
if (map) {
_lightmapTransform = material.lightmapTexture.transform;
_lightmapParams = material.lightmapParams;
map->setTextureTransform(_lightmapTransform);
map->setLightmapOffsetScale(_lightmapParams.x, _lightmapParams.y);
}
setTextureMap(MapChannel::LIGHTMAP_MAP, map);
}
}
void NetworkMaterial::setTextures(const QVariantMap& textureMap) {
_isOriginal = false;
const auto& albedoName = getTextureName(MapChannel::ALBEDO_MAP);
const auto& normalName = getTextureName(MapChannel::NORMAL_MAP);
const auto& roughnessName = getTextureName(MapChannel::ROUGHNESS_MAP);
const auto& metallicName = getTextureName(MapChannel::METALLIC_MAP);
const auto& occlusionName = getTextureName(MapChannel::OCCLUSION_MAP);
const auto& emissiveName = getTextureName(MapChannel::EMISSIVE_MAP);
const auto& lightmapName = getTextureName(MapChannel::LIGHTMAP_MAP);
const auto& scatteringName = getTextureName(MapChannel::SCATTERING_MAP);
if (!albedoName.isEmpty()) {
auto url = textureMap.contains(albedoName) ? textureMap[albedoName].toUrl() : QUrl();
auto map = fetchTextureMap(url, image::TextureUsage::ALBEDO_TEXTURE, MapChannel::ALBEDO_MAP);
if (map) {
map->setTextureTransform(_albedoTransform);
// when reassigning the albedo texture we also check for the alpha channel used as opacity
map->setUseAlphaChannel(true);
}
setTextureMap(MapChannel::ALBEDO_MAP, map);
}
if (!normalName.isEmpty()) {
auto url = textureMap.contains(normalName) ? textureMap[normalName].toUrl() : QUrl();
auto map = fetchTextureMap(url, image::TextureUsage::NORMAL_TEXTURE, MapChannel::NORMAL_MAP);
setTextureMap(MapChannel::NORMAL_MAP, map);
}
if (!roughnessName.isEmpty()) {
auto url = textureMap.contains(roughnessName) ? textureMap[roughnessName].toUrl() : QUrl();
// FIXME: If passing a gloss map instead of a roughmap how do we know?
auto map = fetchTextureMap(url, image::TextureUsage::ROUGHNESS_TEXTURE, MapChannel::ROUGHNESS_MAP);
setTextureMap(MapChannel::ROUGHNESS_MAP, map);
}
if (!metallicName.isEmpty()) {
auto url = textureMap.contains(metallicName) ? textureMap[metallicName].toUrl() : QUrl();
// FIXME: If passing a specular map instead of a metallic how do we know?
auto map = fetchTextureMap(url, image::TextureUsage::METALLIC_TEXTURE, MapChannel::METALLIC_MAP);
setTextureMap(MapChannel::METALLIC_MAP, map);
}
if (!occlusionName.isEmpty()) {
auto url = textureMap.contains(occlusionName) ? textureMap[occlusionName].toUrl() : QUrl();
// FIXME: we need to handle the occlusion map transform here
auto map = fetchTextureMap(url, image::TextureUsage::OCCLUSION_TEXTURE, MapChannel::OCCLUSION_MAP);
setTextureMap(MapChannel::OCCLUSION_MAP, map);
}
if (!emissiveName.isEmpty()) {
auto url = textureMap.contains(emissiveName) ? textureMap[emissiveName].toUrl() : QUrl();
auto map = fetchTextureMap(url, image::TextureUsage::EMISSIVE_TEXTURE, MapChannel::EMISSIVE_MAP);
setTextureMap(MapChannel::EMISSIVE_MAP, map);
}
if (!scatteringName.isEmpty()) {
auto url = textureMap.contains(scatteringName) ? textureMap[scatteringName].toUrl() : QUrl();
auto map = fetchTextureMap(url, image::TextureUsage::SCATTERING_TEXTURE, MapChannel::SCATTERING_MAP);
setTextureMap(MapChannel::SCATTERING_MAP, map);
}
if (!lightmapName.isEmpty()) {
auto url = textureMap.contains(lightmapName) ? textureMap[lightmapName].toUrl() : QUrl();
auto map = fetchTextureMap(url, image::TextureUsage::LIGHTMAP_TEXTURE, MapChannel::LIGHTMAP_MAP);
if (map) {
map->setTextureTransform(_lightmapTransform);
map->setLightmapOffsetScale(_lightmapParams.x, _lightmapParams.y);
}
setTextureMap(MapChannel::LIGHTMAP_MAP, map);
}
}
bool NetworkMaterial::isMissingTexture() {
for (auto& networkTexture : _textures) {
auto& texture = networkTexture.texture;
if (!texture) {
continue;
}
// Failed texture downloads need to be considered as 'loaded'
// or the object will never fade in
bool finished = texture->isFailed() || (texture->isLoaded() && texture->getGPUTexture() && texture->getGPUTexture()->isDefined());
if (!finished) {
return true;
}
}
return false;
}
void NetworkMaterial::checkResetOpacityMap() {
// If material textures are loaded, check the material translucency
// FIXME: This should not be done here. The opacity map should already be reset in Material::setTextureMap.
// However, currently that code can be called before the albedo map is defined, so resetOpacityMap will fail.
// Geometry::areTexturesLoaded() is called repeatedly until it returns true, so we do the check here for now
const auto& albedoTexture = _textures[NetworkMaterial::MapChannel::ALBEDO_MAP];
if (albedoTexture.texture) {
resetOpacityMap();
}
}

View file

@ -0,0 +1,117 @@
//
// Created by Sam Gondelman on 2/9/2018
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_MaterialCache_h
#define hifi_MaterialCache_h
#include "glm/glm.hpp"
#include <ResourceCache.h>
#include <graphics/Material.h>
#include <hfm/HFM.h>
#include "TextureCache.h"
class NetworkMaterial : public graphics::Material {
public:
using MapChannel = graphics::Material::MapChannel;
NetworkMaterial() : _textures(MapChannel::NUM_MAP_CHANNELS) {}
NetworkMaterial(const HFMMaterial& material, const QUrl& textureBaseUrl);
NetworkMaterial(const NetworkMaterial& material);
void setAlbedoMap(const QUrl& url, bool useAlphaChannel);
void setNormalMap(const QUrl& url, bool isBumpmap);
void setRoughnessMap(const QUrl& url, bool isGloss);
void setMetallicMap(const QUrl& url, bool isSpecular);
void setOcclusionMap(const QUrl& url);
void setEmissiveMap(const QUrl& url);
void setScatteringMap(const QUrl& url);
void setLightmapMap(const QUrl& url);
bool isMissingTexture();
void checkResetOpacityMap();
protected:
friend class Geometry;
class Texture {
public:
QString name;
NetworkTexturePointer texture;
};
using Textures = std::vector<Texture>;
Textures _textures;
static const QString NO_TEXTURE;
const QString& getTextureName(MapChannel channel);
void setTextures(const QVariantMap& textureMap);
const bool& isOriginal() const { return _isOriginal; }
private:
// Helpers for the ctors
QUrl getTextureUrl(const QUrl& baseUrl, const HFMTexture& hfmTexture);
graphics::TextureMapPointer fetchTextureMap(const QUrl& baseUrl, const HFMTexture& hfmTexture,
image::TextureUsage::Type type, MapChannel channel);
graphics::TextureMapPointer fetchTextureMap(const QUrl& url, image::TextureUsage::Type type, MapChannel channel);
Transform _albedoTransform;
Transform _lightmapTransform;
vec2 _lightmapParams;
bool _isOriginal { true };
};
class NetworkMaterialResource : public Resource {
public:
NetworkMaterialResource() : Resource() {}
NetworkMaterialResource(const QUrl& url);
QString getType() const override { return "NetworkMaterial"; }
virtual void downloadFinished(const QByteArray& data) override;
typedef struct ParsedMaterials {
uint version { 1 };
std::vector<std::string> names;
std::unordered_map<std::string, std::shared_ptr<NetworkMaterial>> networkMaterials;
void reset() {
version = 1;
names.clear();
networkMaterials.clear();
}
} ParsedMaterials;
ParsedMaterials parsedMaterials;
static ParsedMaterials parseJSONMaterials(const QJsonDocument& materialJSON, const QUrl& baseUrl);
static std::pair<std::string, std::shared_ptr<NetworkMaterial>> parseJSONMaterial(const QJsonObject& materialJSON, const QUrl& baseUrl);
private:
static bool parseJSONColor(const QJsonValue& array, glm::vec3& color, bool& isSRGB);
};
using NetworkMaterialResourcePointer = QSharedPointer<NetworkMaterialResource>;
using MaterialMapping = std::vector<std::pair<std::string, NetworkMaterialResourcePointer>>;
class MaterialCache : public ResourceCache {
public:
static MaterialCache& instance();
NetworkMaterialResourcePointer getMaterial(const QUrl& url);
protected:
virtual QSharedPointer<Resource> createResource(const QUrl& url) override;
QSharedPointer<Resource> createResourceCopy(const QSharedPointer<Resource>& resource) override;
};
#endif

View file

@ -0,0 +1,11 @@
//
// Created by Sam Gondelman on 2/7/2019
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "MaterialNetworkingLogging.h"
Q_LOGGING_CATEGORY(materialnetworking, "hifi.gpu-material-network")

View file

@ -0,0 +1,11 @@
//
// Created by Sam Gondelman on 2/7/2019
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QLoggingCategory>
Q_DECLARE_LOGGING_CATEGORY(materialnetworking)

View file

@ -29,5 +29,5 @@ QSharedPointer<Resource> ShaderCache::createResource(const QUrl& url) {
}
QSharedPointer<Resource> ShaderCache::createResourceCopy(const QSharedPointer<Resource>& resource) {
return QSharedPointer<Resource>(new NetworkShader(*resource.staticCast<NetworkShader>().data()), &Resource::deleter);
return QSharedPointer<Resource>(new NetworkShader(*resource.staticCast<NetworkShader>()), &Resource::deleter);
}

View file

@ -44,7 +44,7 @@
#include <Profile.h>
#include "NetworkLogging.h"
#include "ModelNetworkingLogging.h"
#include "MaterialNetworkingLogging.h"
#include "NetworkingConstants.h"
#include <Trace.h>
#include <StatTracker.h>
@ -198,16 +198,16 @@ public:
namespace std {
template <>
struct hash<QByteArray> {
size_t operator()(const QByteArray& a) const {
return qHash(a);
size_t operator()(const QByteArray& byteArray) const {
return qHash(byteArray);
}
};
template <>
struct hash<TextureExtra> {
size_t operator()(const TextureExtra& a) const {
size_t operator()(const TextureExtra& textureExtra) const {
size_t result = 0;
hash_combine(result, (int)a.type, a.content, a.maxNumPixels, (int)a.sourceChannel);
hash_combine(result, (int)textureExtra.type, textureExtra.content, textureExtra.maxNumPixels, (int)textureExtra.sourceChannel);
return result;
}
};
@ -329,15 +329,14 @@ QSharedPointer<Resource> TextureCache::createResource(const QUrl& url) {
}
QSharedPointer<Resource> TextureCache::createResourceCopy(const QSharedPointer<Resource>& resource) {
return QSharedPointer<Resource>(new NetworkTexture(*resource.staticCast<NetworkTexture>().data()), &Resource::deleter);
return QSharedPointer<Resource>(new NetworkTexture(*resource.staticCast<NetworkTexture>()), &Resource::deleter);
}
int networkTexturePointerMetaTypeId = qRegisterMetaType<QWeakPointer<NetworkTexture>>();
NetworkTexture::NetworkTexture(const QUrl& url, bool resourceTexture) :
Resource(url),
Texture(),
_maxNumPixels(100)
Texture()
{
if (resourceTexture) {
_textureSource = std::make_shared<gpu::TextureSource>(url);
@ -955,7 +954,7 @@ void NetworkTexture::handleFinishedInitialLoad() {
cache::FilePointer file;
auto& ktxCache = textureCache->_ktxCache;
if (!memKtx || !(file = ktxCache->writeFile(data, KTXCache::Metadata(filename, length)))) {
qCWarning(modelnetworking) << url << " failed to write cache file";
qCWarning(materialnetworking) << url << " failed to write cache file";
QMetaObject::invokeMethod(resource.data(), "setImage",
Q_ARG(gpu::TexturePointer, nullptr),
Q_ARG(int, 0),
@ -1145,7 +1144,7 @@ void ImageReader::listSupportedImageFormats() {
static std::once_flag once;
std::call_once(once, []{
auto supportedFormats = QImageReader::supportedImageFormats();
qCDebug(modelnetworking) << "List of supported Image formats:" << supportedFormats.join(", ");
qCDebug(materialnetworking) << "List of supported Image formats:" << supportedFormats.join(", ");
});
}
@ -1194,7 +1193,7 @@ void ImageReader::read() {
if (texture) {
texture = textureCache->cacheTextureByHash(hash, texture);
} else {
qCWarning(modelnetworking) << "Invalid cached KTX " << _url << " under hash " << hash.c_str() << ", recreating...";
qCWarning(materialnetworking) << "Invalid cached KTX " << _url << " under hash " << hash.c_str() << ", recreating...";
}
}
}

View file

@ -1,6 +1,7 @@
set(TARGET_NAME model-baker)
setup_hifi_library()
link_hifi_libraries(shared task gpu graphics hfm)
link_hifi_libraries(shared shaders task gpu graphics hfm material-networking)
include_hifi_library_headers(networking)
include_hifi_library_headers(image)
include_hifi_library_headers(ktx)

View file

@ -101,7 +101,7 @@ namespace baker {
class BuildModelTask {
public:
using Input = VaryingSet5<hfm::Model::Pointer, std::vector<hfm::Mesh>, std::vector<hfm::Joint>, QMap<int, glm::quat> /*jointRotationOffsets*/, QHash<QString, int> /*jointIndices*/>;
using Input = VaryingSet5<hfm::Model::Pointer, std::vector<hfm::Mesh>, std::vector<hfm::Joint>, QMap<int, glm::quat>, QHash<QString, int>>;
using Output = hfm::Model::Pointer;
using JobModel = Job::ModelIO<BuildModelTask, Input, Output>;
@ -118,9 +118,9 @@ namespace baker {
class BakerEngineBuilder {
public:
using Input = VaryingSet2<hfm::Model::Pointer, QVariantHash>;
using Output = hfm::Model::Pointer;
using Output = VaryingSet2<hfm::Model::Pointer, MaterialMapping>;
using JobModel = Task::ModelIO<BakerEngineBuilder, Input, Output>;
void build(JobModel& model, const Varying& input, Varying& hfmModelOut) {
void build(JobModel& model, const Varying& input, Varying& output) {
const auto& hfmModelIn = input.getN<Input>(0);
const auto& mapping = input.getN<Input>(1);
@ -154,13 +154,18 @@ namespace baker {
const auto jointRotationOffsets = jointInfoOut.getN<PrepareJointsTask::Output>(1);
const auto jointIndices = jointInfoOut.getN<PrepareJointsTask::Output>(2);
// Parse material mapping
const auto materialMapping = model.addJob<ParseMaterialMappingTask>("ParseMaterialMapping", mapping);
// Combine the outputs into a new hfm::Model
const auto buildBlendshapesInputs = BuildBlendshapesTask::Input(blendshapesPerMeshIn, normalsPerBlendshapePerMesh, tangentsPerBlendshapePerMesh).asVarying();
const auto blendshapesPerMeshOut = model.addJob<BuildBlendshapesTask>("BuildBlendshapes", buildBlendshapesInputs);
const auto buildMeshesInputs = BuildMeshesTask::Input(meshesIn, graphicsMeshes, normalsPerMesh, tangentsPerMesh, blendshapesPerMeshOut).asVarying();
const auto meshesOut = model.addJob<BuildMeshesTask>("BuildMeshes", buildMeshesInputs);
const auto buildModelInputs = BuildModelTask::Input(hfmModelIn, meshesOut, jointsOut, jointRotationOffsets, jointIndices).asVarying();
hfmModelOut = model.addJob<BuildModelTask>("BuildModel", buildModelInputs);
const auto hfmModelOut = model.addJob<BuildModelTask>("BuildModel", buildModelInputs);
output = Output(hfmModelOut, materialMapping);
}
};
@ -172,7 +177,8 @@ namespace baker {
void Baker::run() {
_engine->run();
hfmModel = _engine->getOutput().get<BakerEngineBuilder::Output>();
hfmModel = _engine->getOutput().get<BakerEngineBuilder::Output>().get0();
materialMapping = _engine->getOutput().get<BakerEngineBuilder::Output>().get1();
}
};

View file

@ -18,6 +18,8 @@
#include "Engine.h"
#include "ParseMaterialMappingTask.h"
namespace baker {
class Baker {
public:
@ -27,6 +29,7 @@ namespace baker {
// Outputs, available after run() is called
hfm::Model::Pointer hfmModel;
MaterialMapping materialMapping;
protected:
EnginePointer _engine;

View file

@ -0,0 +1,76 @@
//
// Created by Sam Gondelman on 2/7/2019
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "ParseMaterialMappingTask.h"
#include "ModelBakerLogging.h"
void ParseMaterialMappingTask::run(const baker::BakeContextPointer& context, const Input& mapping, Output& output) {
MaterialMapping materialMapping;
auto mappingIter = mapping.find("materialMap");
if (mappingIter != mapping.end()) {
QByteArray materialMapValue = mappingIter.value().toByteArray();
QJsonObject materialMap = QJsonDocument::fromJson(materialMapValue).object();
if (materialMap.isEmpty()) {
qCDebug(model_baker) << "Material Map found but did not produce valid JSON:" << materialMapValue;
} else {
auto mappingKeys = materialMap.keys();
for (auto mapping : mappingKeys) {
auto mappingJSON = materialMap[mapping];
if (mappingJSON.isObject()) {
auto mappingValue = mappingJSON.toObject();
// Old subsurface scattering mapping
{
auto scatteringIter = mappingValue.find("scattering");
auto scatteringMapIter = mappingValue.find("scatteringMap");
if (scatteringIter != mappingValue.end() || scatteringMapIter != mappingValue.end()) {
std::shared_ptr<NetworkMaterial> material = std::make_shared<NetworkMaterial>();
if (scatteringIter != mappingValue.end()) {
float scattering = (float)scatteringIter.value().toDouble();
material->setScattering(scattering);
}
if (scatteringMapIter != mappingValue.end()) {
QString scatteringMap = scatteringMapIter.value().toString();
material->setScatteringMap(scatteringMap);
}
material->setDefaultFallthrough(true);
NetworkMaterialResourcePointer materialResource = NetworkMaterialResourcePointer(new NetworkMaterialResource(), [](NetworkMaterialResource* ptr) { ptr->deleteLater(); });
materialResource->moveToThread(qApp->thread());
materialResource->parsedMaterials.names.push_back("scattering");
materialResource->parsedMaterials.networkMaterials["scattering"] = material;
materialMapping.push_back(std::pair<std::string, NetworkMaterialResourcePointer>("mat::" + mapping.toStdString(), materialResource));
continue;
}
}
// Material JSON description
{
NetworkMaterialResourcePointer materialResource = NetworkMaterialResourcePointer(new NetworkMaterialResource(), [](NetworkMaterialResource* ptr) { ptr->deleteLater(); });
materialResource->moveToThread(qApp->thread());
// TODO: add baseURL to allow FSTs to reference relative files next to them
materialResource->parsedMaterials = NetworkMaterialResource::parseJSONMaterials(QJsonDocument(mappingValue), QUrl());
materialMapping.push_back(std::pair<std::string, NetworkMaterialResourcePointer>(mapping.toStdString(), materialResource));
}
} else if (mappingJSON.isString()) {
auto mappingValue = mappingJSON.toString();
materialMapping.push_back(std::pair<std::string, NetworkMaterialResourcePointer>(mapping.toStdString(), MaterialCache::instance().getMaterial(mappingValue)));
}
}
}
}
output = materialMapping;
}

View file

@ -0,0 +1,29 @@
//
// Created by Sam Gondelman on 2/7/2019
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_ParseMaterialMappingTask_h
#define hifi_ParseMaterialMappingTask_h
#include <QHash>
#include <hfm/HFM.h>
#include "Engine.h"
#include <material-networking/MaterialCache.h>
class ParseMaterialMappingTask {
public:
using Input = QVariantHash;
using Output = MaterialMapping;
using JobModel = baker::Job::ModelIO<ParseMaterialMappingTask, Input, Output>;
void run(const baker::BakeContextPointer& context, const Input& input, Output& output);
};
#endif // hifi_ParseMaterialMappingTask_h

View file

@ -1,6 +1,8 @@
set(TARGET_NAME model-networking)
setup_hifi_library()
link_hifi_libraries(shared shaders networking graphics fbx ktx image gl model-baker)
include_hifi_library_headers(gpu)
link_hifi_libraries(shared shaders networking graphics fbx material-networking model-baker)
include_hifi_library_headers(hfm)
include_hifi_library_headers(task)
include_hifi_library_headers(gpu)
include_hifi_library_headers(image)
include_hifi_library_headers(ktx)

View file

@ -1,60 +0,0 @@
//
// Created by Sam Gondelman on 2/9/2018
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_MaterialCache_h
#define hifi_MaterialCache_h
#include <ResourceCache.h>
#include "glm/glm.hpp"
#include "ModelCache.h"
class NetworkMaterialResource : public Resource {
public:
NetworkMaterialResource(const QUrl& url);
QString getType() const override { return "NetworkMaterial"; }
virtual void downloadFinished(const QByteArray& data) override;
typedef struct ParsedMaterials {
uint version { 1 };
std::vector<std::string> names;
std::unordered_map<std::string, std::shared_ptr<NetworkMaterial>> networkMaterials;
void reset() {
version = 1;
names.clear();
networkMaterials.clear();
}
} ParsedMaterials;
ParsedMaterials parsedMaterials;
static ParsedMaterials parseJSONMaterials(const QJsonDocument& materialJSON, const QUrl& baseUrl);
static std::pair<std::string, std::shared_ptr<NetworkMaterial>> parseJSONMaterial(const QJsonObject& materialJSON, const QUrl& baseUrl);
private:
static bool parseJSONColor(const QJsonValue& array, glm::vec3& color, bool& isSRGB);
};
using NetworkMaterialResourcePointer = QSharedPointer<NetworkMaterialResource>;
class MaterialCache : public ResourceCache {
public:
static MaterialCache& instance();
NetworkMaterialResourcePointer getMaterial(const QUrl& url);
protected:
virtual QSharedPointer<Resource> createResource(const QUrl& url) override;
QSharedPointer<Resource> createResourceCopy(const QSharedPointer<Resource>& resource) override;
};
#endif

View file

@ -76,9 +76,9 @@ namespace std {
template <>
struct hash<GeometryExtra> {
size_t operator()(const GeometryExtra& a) const {
size_t operator()(const GeometryExtra& geometryExtra) const {
size_t result = 0;
hash_combine(result, a.mapping, a.textureBaseUrl, a.combineParts);
hash_combine(result, geometryExtra.mapping, geometryExtra.textureBaseUrl, geometryExtra.combineParts);
return result;
}
};
@ -174,6 +174,7 @@ void GeometryMappingResource::downloadFinished(const QByteArray& data) {
void GeometryMappingResource::onGeometryMappingLoaded(bool success) {
if (success && _geometryResource) {
_hfmModel = _geometryResource->_hfmModel;
_materialMapping = _geometryResource->_materialMapping;
_meshParts = _geometryResource->_meshParts;
_meshes = _geometryResource->_meshes;
_materials = _geometryResource->_materials;
@ -341,6 +342,7 @@ void GeometryDefinitionResource::setGeometryDefinition(HFMModel::Pointer hfmMode
// Assume ownership of the processed HFMModel
_hfmModel = modelBaker.hfmModel;
_materialMapping = modelBaker.materialMapping;
// Copy materials
QHash<QString, size_t> materialIDAtlas;
@ -392,7 +394,7 @@ QSharedPointer<Resource> ModelCache::createResource(const QUrl& url) {
}
QSharedPointer<Resource> ModelCache::createResourceCopy(const QSharedPointer<Resource>& resource) {
return QSharedPointer<Resource>(new GeometryDefinitionResource(*resource.staticCast<GeometryDefinitionResource>().data()), &Resource::deleter);
return QSharedPointer<Resource>(new GeometryDefinitionResource(*resource.staticCast<GeometryDefinitionResource>()), &Resource::deleter);
}
GeometryResource::Pointer ModelCache::getGeometryResource(const QUrl& url,
@ -437,6 +439,7 @@ const QVariantMap Geometry::getTextures() const {
// FIXME: The materials should only be copied when modified, but the Model currently caches the original
Geometry::Geometry(const Geometry& geometry) {
_hfmModel = geometry._hfmModel;
_materialMapping = geometry._materialMapping;
_meshes = geometry._meshes;
_meshParts = geometry._meshParts;
@ -556,310 +559,4 @@ void GeometryResourceWatcher::resourceRefreshed() {
// _instance.reset();
}
NetworkMaterial::NetworkMaterial(const NetworkMaterial& m) :
Material(m),
_textures(m._textures),
_albedoTransform(m._albedoTransform),
_lightmapTransform(m._lightmapTransform),
_lightmapParams(m._lightmapParams),
_isOriginal(m._isOriginal)
{}
const QString NetworkMaterial::NO_TEXTURE = QString();
const QString& NetworkMaterial::getTextureName(MapChannel channel) {
if (_textures[channel].texture) {
return _textures[channel].name;
}
return NO_TEXTURE;
}
QUrl NetworkMaterial::getTextureUrl(const QUrl& baseUrl, const HFMTexture& texture) {
if (texture.content.isEmpty()) {
// External file: search relative to the baseUrl, in case filename is relative
return baseUrl.resolved(QUrl(texture.filename));
} else {
// Inlined file: cache under the fbx file to avoid namespace clashes
// NOTE: We cannot resolve the path because filename may be an absolute path
assert(texture.filename.size() > 0);
auto baseUrlStripped = baseUrl.toDisplayString(QUrl::RemoveFragment | QUrl::RemoveQuery | QUrl::RemoveUserInfo);
if (texture.filename.at(0) == '/') {
return baseUrlStripped + texture.filename;
} else {
return baseUrlStripped + '/' + texture.filename;
}
}
}
graphics::TextureMapPointer NetworkMaterial::fetchTextureMap(const QUrl& baseUrl, const HFMTexture& hfmTexture,
image::TextureUsage::Type type, MapChannel channel) {
if (baseUrl.isEmpty()) {
return nullptr;
}
const auto url = getTextureUrl(baseUrl, hfmTexture);
const auto texture = DependencyManager::get<TextureCache>()->getTexture(url, type, hfmTexture.content, hfmTexture.maxNumPixels, hfmTexture.sourceChannel);
_textures[channel] = Texture { hfmTexture.name, texture };
auto map = std::make_shared<graphics::TextureMap>();
if (texture) {
map->setTextureSource(texture->_textureSource);
}
map->setTextureTransform(hfmTexture.transform);
return map;
}
graphics::TextureMapPointer NetworkMaterial::fetchTextureMap(const QUrl& url, image::TextureUsage::Type type, MapChannel channel) {
auto textureCache = DependencyManager::get<TextureCache>();
if (textureCache && !url.isEmpty()) {
auto texture = textureCache->getTexture(url, type);
_textures[channel].texture = texture;
auto map = std::make_shared<graphics::TextureMap>();
if (texture) {
map->setTextureSource(texture->_textureSource);
}
return map;
}
return nullptr;
}
void NetworkMaterial::setAlbedoMap(const QUrl& url, bool useAlphaChannel) {
auto map = fetchTextureMap(url, image::TextureUsage::ALBEDO_TEXTURE, MapChannel::ALBEDO_MAP);
if (map) {
map->setUseAlphaChannel(useAlphaChannel);
setTextureMap(MapChannel::ALBEDO_MAP, map);
}
}
void NetworkMaterial::setNormalMap(const QUrl& url, bool isBumpmap) {
auto map = fetchTextureMap(url, isBumpmap ? image::TextureUsage::BUMP_TEXTURE : image::TextureUsage::NORMAL_TEXTURE, MapChannel::NORMAL_MAP);
if (map) {
setTextureMap(MapChannel::NORMAL_MAP, map);
}
}
void NetworkMaterial::setRoughnessMap(const QUrl& url, bool isGloss) {
auto map = fetchTextureMap(url, isGloss ? image::TextureUsage::GLOSS_TEXTURE : image::TextureUsage::ROUGHNESS_TEXTURE, MapChannel::ROUGHNESS_MAP);
if (map) {
setTextureMap(MapChannel::ROUGHNESS_MAP, map);
}
}
void NetworkMaterial::setMetallicMap(const QUrl& url, bool isSpecular) {
auto map = fetchTextureMap(url, isSpecular ? image::TextureUsage::SPECULAR_TEXTURE : image::TextureUsage::METALLIC_TEXTURE, MapChannel::METALLIC_MAP);
if (map) {
setTextureMap(MapChannel::METALLIC_MAP, map);
}
}
void NetworkMaterial::setOcclusionMap(const QUrl& url) {
auto map = fetchTextureMap(url, image::TextureUsage::OCCLUSION_TEXTURE, MapChannel::OCCLUSION_MAP);
if (map) {
setTextureMap(MapChannel::OCCLUSION_MAP, map);
}
}
void NetworkMaterial::setEmissiveMap(const QUrl& url) {
auto map = fetchTextureMap(url, image::TextureUsage::EMISSIVE_TEXTURE, MapChannel::EMISSIVE_MAP);
if (map) {
setTextureMap(MapChannel::EMISSIVE_MAP, map);
}
}
void NetworkMaterial::setScatteringMap(const QUrl& url) {
auto map = fetchTextureMap(url, image::TextureUsage::SCATTERING_TEXTURE, MapChannel::SCATTERING_MAP);
if (map) {
setTextureMap(MapChannel::SCATTERING_MAP, map);
}
}
void NetworkMaterial::setLightmapMap(const QUrl& url) {
auto map = fetchTextureMap(url, image::TextureUsage::LIGHTMAP_TEXTURE, MapChannel::LIGHTMAP_MAP);
if (map) {
//map->setTextureTransform(_lightmapTransform);
//map->setLightmapOffsetScale(_lightmapParams.x, _lightmapParams.y);
setTextureMap(MapChannel::LIGHTMAP_MAP, map);
}
}
NetworkMaterial::NetworkMaterial(const HFMMaterial& material, const QUrl& textureBaseUrl) :
graphics::Material(*material._material),
_textures(MapChannel::NUM_MAP_CHANNELS)
{
_name = material.name.toStdString();
if (!material.albedoTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.albedoTexture, image::TextureUsage::ALBEDO_TEXTURE, MapChannel::ALBEDO_MAP);
if (map) {
_albedoTransform = material.albedoTexture.transform;
map->setTextureTransform(_albedoTransform);
if (!material.opacityTexture.filename.isEmpty()) {
if (material.albedoTexture.filename == material.opacityTexture.filename) {
// Best case scenario, just indicating that the albedo map contains transparency
// TODO: Different albedo/opacity maps are not currently supported
map->setUseAlphaChannel(true);
}
}
}
setTextureMap(MapChannel::ALBEDO_MAP, map);
}
if (!material.normalTexture.filename.isEmpty()) {
auto type = (material.normalTexture.isBumpmap ? image::TextureUsage::BUMP_TEXTURE : image::TextureUsage::NORMAL_TEXTURE);
auto map = fetchTextureMap(textureBaseUrl, material.normalTexture, type, MapChannel::NORMAL_MAP);
setTextureMap(MapChannel::NORMAL_MAP, map);
}
if (!material.roughnessTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.roughnessTexture, image::TextureUsage::ROUGHNESS_TEXTURE, MapChannel::ROUGHNESS_MAP);
setTextureMap(MapChannel::ROUGHNESS_MAP, map);
} else if (!material.glossTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.glossTexture, image::TextureUsage::GLOSS_TEXTURE, MapChannel::ROUGHNESS_MAP);
setTextureMap(MapChannel::ROUGHNESS_MAP, map);
}
if (!material.metallicTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.metallicTexture, image::TextureUsage::METALLIC_TEXTURE, MapChannel::METALLIC_MAP);
setTextureMap(MapChannel::METALLIC_MAP, map);
} else if (!material.specularTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.specularTexture, image::TextureUsage::SPECULAR_TEXTURE, MapChannel::METALLIC_MAP);
setTextureMap(MapChannel::METALLIC_MAP, map);
}
if (!material.occlusionTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.occlusionTexture, image::TextureUsage::OCCLUSION_TEXTURE, MapChannel::OCCLUSION_MAP);
if (map) {
map->setTextureTransform(material.occlusionTexture.transform);
}
setTextureMap(MapChannel::OCCLUSION_MAP, map);
}
if (!material.emissiveTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.emissiveTexture, image::TextureUsage::EMISSIVE_TEXTURE, MapChannel::EMISSIVE_MAP);
setTextureMap(MapChannel::EMISSIVE_MAP, map);
}
if (!material.scatteringTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.scatteringTexture, image::TextureUsage::SCATTERING_TEXTURE, MapChannel::SCATTERING_MAP);
setTextureMap(MapChannel::SCATTERING_MAP, map);
}
if (!material.lightmapTexture.filename.isEmpty()) {
auto map = fetchTextureMap(textureBaseUrl, material.lightmapTexture, image::TextureUsage::LIGHTMAP_TEXTURE, MapChannel::LIGHTMAP_MAP);
if (map) {
_lightmapTransform = material.lightmapTexture.transform;
_lightmapParams = material.lightmapParams;
map->setTextureTransform(_lightmapTransform);
map->setLightmapOffsetScale(_lightmapParams.x, _lightmapParams.y);
}
setTextureMap(MapChannel::LIGHTMAP_MAP, map);
}
}
void NetworkMaterial::setTextures(const QVariantMap& textureMap) {
_isOriginal = false;
const auto& albedoName = getTextureName(MapChannel::ALBEDO_MAP);
const auto& normalName = getTextureName(MapChannel::NORMAL_MAP);
const auto& roughnessName = getTextureName(MapChannel::ROUGHNESS_MAP);
const auto& metallicName = getTextureName(MapChannel::METALLIC_MAP);
const auto& occlusionName = getTextureName(MapChannel::OCCLUSION_MAP);
const auto& emissiveName = getTextureName(MapChannel::EMISSIVE_MAP);
const auto& lightmapName = getTextureName(MapChannel::LIGHTMAP_MAP);
const auto& scatteringName = getTextureName(MapChannel::SCATTERING_MAP);
if (!albedoName.isEmpty()) {
auto url = textureMap.contains(albedoName) ? textureMap[albedoName].toUrl() : QUrl();
auto map = fetchTextureMap(url, image::TextureUsage::ALBEDO_TEXTURE, MapChannel::ALBEDO_MAP);
if (map) {
map->setTextureTransform(_albedoTransform);
// when reassigning the albedo texture we also check for the alpha channel used as opacity
map->setUseAlphaChannel(true);
}
setTextureMap(MapChannel::ALBEDO_MAP, map);
}
if (!normalName.isEmpty()) {
auto url = textureMap.contains(normalName) ? textureMap[normalName].toUrl() : QUrl();
auto map = fetchTextureMap(url, image::TextureUsage::NORMAL_TEXTURE, MapChannel::NORMAL_MAP);
setTextureMap(MapChannel::NORMAL_MAP, map);
}
if (!roughnessName.isEmpty()) {
auto url = textureMap.contains(roughnessName) ? textureMap[roughnessName].toUrl() : QUrl();
// FIXME: If passing a gloss map instead of a roughmap how do we know?
auto map = fetchTextureMap(url, image::TextureUsage::ROUGHNESS_TEXTURE, MapChannel::ROUGHNESS_MAP);
setTextureMap(MapChannel::ROUGHNESS_MAP, map);
}
if (!metallicName.isEmpty()) {
auto url = textureMap.contains(metallicName) ? textureMap[metallicName].toUrl() : QUrl();
// FIXME: If passing a specular map instead of a metallic how do we know?
auto map = fetchTextureMap(url, image::TextureUsage::METALLIC_TEXTURE, MapChannel::METALLIC_MAP);
setTextureMap(MapChannel::METALLIC_MAP, map);
}
if (!occlusionName.isEmpty()) {
auto url = textureMap.contains(occlusionName) ? textureMap[occlusionName].toUrl() : QUrl();
// FIXME: we need to handle the occlusion map transform here
auto map = fetchTextureMap(url, image::TextureUsage::OCCLUSION_TEXTURE, MapChannel::OCCLUSION_MAP);
setTextureMap(MapChannel::OCCLUSION_MAP, map);
}
if (!emissiveName.isEmpty()) {
auto url = textureMap.contains(emissiveName) ? textureMap[emissiveName].toUrl() : QUrl();
auto map = fetchTextureMap(url, image::TextureUsage::EMISSIVE_TEXTURE, MapChannel::EMISSIVE_MAP);
setTextureMap(MapChannel::EMISSIVE_MAP, map);
}
if (!scatteringName.isEmpty()) {
auto url = textureMap.contains(scatteringName) ? textureMap[scatteringName].toUrl() : QUrl();
auto map = fetchTextureMap(url, image::TextureUsage::SCATTERING_TEXTURE, MapChannel::SCATTERING_MAP);
setTextureMap(MapChannel::SCATTERING_MAP, map);
}
if (!lightmapName.isEmpty()) {
auto url = textureMap.contains(lightmapName) ? textureMap[lightmapName].toUrl() : QUrl();
auto map = fetchTextureMap(url, image::TextureUsage::LIGHTMAP_TEXTURE, MapChannel::LIGHTMAP_MAP);
if (map) {
map->setTextureTransform(_lightmapTransform);
map->setLightmapOffsetScale(_lightmapParams.x, _lightmapParams.y);
}
setTextureMap(MapChannel::LIGHTMAP_MAP, map);
}
}
bool NetworkMaterial::isMissingTexture() {
for (auto& networkTexture : _textures) {
auto& texture = networkTexture.texture;
if (!texture) {
continue;
}
// Failed texture downloads need to be considered as 'loaded'
// or the object will never fade in
bool finished = texture->isFailed() || (texture->isLoaded() && texture->getGPUTexture() && texture->getGPUTexture()->isDefined());
if (!finished) {
return true;
}
}
return false;
}
void NetworkMaterial::checkResetOpacityMap() {
// If material textures are loaded, check the material translucency
// FIXME: This should not be done here. The opacity map should already be reset in Material::setTextureMap.
// However, currently that code can be called before the albedo map is defined, so resetOpacityMap will fail.
// Geometry::areTexturesLoaded() is called repeatedly until it returns true, so we do the check here for now
const auto& albedoTexture = _textures[NetworkMaterial::MapChannel::ALBEDO_MAP];
if (albedoTexture.texture) {
resetOpacityMap();
}
}
#include "ModelCache.moc"

View file

@ -15,17 +15,13 @@
#include <DependencyManager.h>
#include <ResourceCache.h>
#include <graphics/Material.h>
#include <graphics/Asset.h>
#include "FBXSerializer.h"
#include "TextureCache.h"
#include <material-networking/MaterialCache.h>
#include <material-networking/TextureCache.h>
#include "ModelLoader.h"
// Alias instead of derive to avoid copying
class NetworkTexture;
class NetworkMaterial;
class MeshPart;
class GeometryMappingResource;
@ -49,6 +45,7 @@ public:
bool isHFMModelLoaded() const { return (bool)_hfmModel; }
const HFMModel& getHFMModel() const { return *_hfmModel; }
const MaterialMapping& getMaterialMapping() const { return _materialMapping; }
const GeometryMeshes& getMeshes() const { return *_meshes; }
const std::shared_ptr<NetworkMaterial> getShapeMaterial(int shapeID) const;
@ -64,6 +61,7 @@ protected:
// Shared across all geometries, constant throughout lifetime
std::shared_ptr<const HFMModel> _hfmModel;
MaterialMapping _materialMapping;
std::shared_ptr<const GeometryMeshes> _meshes;
std::shared_ptr<const GeometryMeshParts> _meshParts;
@ -166,59 +164,6 @@ private:
ModelLoader _modelLoader;
};
class NetworkMaterial : public graphics::Material {
public:
using MapChannel = graphics::Material::MapChannel;
NetworkMaterial() : _textures(MapChannel::NUM_MAP_CHANNELS) {}
NetworkMaterial(const HFMMaterial& material, const QUrl& textureBaseUrl);
NetworkMaterial(const NetworkMaterial& material);
void setAlbedoMap(const QUrl& url, bool useAlphaChannel);
void setNormalMap(const QUrl& url, bool isBumpmap);
void setRoughnessMap(const QUrl& url, bool isGloss);
void setMetallicMap(const QUrl& url, bool isSpecular);
void setOcclusionMap(const QUrl& url);
void setEmissiveMap(const QUrl& url);
void setScatteringMap(const QUrl& url);
void setLightmapMap(const QUrl& url);
bool isMissingTexture();
void checkResetOpacityMap();
protected:
friend class Geometry;
class Texture {
public:
QString name;
NetworkTexturePointer texture;
};
using Textures = std::vector<Texture>;
Textures _textures;
static const QString NO_TEXTURE;
const QString& getTextureName(MapChannel channel);
void setTextures(const QVariantMap& textureMap);
const bool& isOriginal() const { return _isOriginal; }
private:
// Helpers for the ctors
QUrl getTextureUrl(const QUrl& baseUrl, const HFMTexture& hfmTexture);
graphics::TextureMapPointer fetchTextureMap(const QUrl& baseUrl, const HFMTexture& hfmTexture,
image::TextureUsage::Type type, MapChannel channel);
graphics::TextureMapPointer fetchTextureMap(const QUrl& url, image::TextureUsage::Type type, MapChannel channel);
Transform _albedoTransform;
Transform _lightmapTransform;
vec2 _lightmapParams;
bool _isOriginal { true };
};
class MeshPart {
public:
MeshPart(int mesh, int part, int material) : meshID { mesh }, partID { part }, materialID { material } {}

View file

@ -8,4 +8,4 @@
#include "ModelNetworkingLogging.h"
Q_LOGGING_CATEGORY(modelnetworking, "hifi.gpu-network")
Q_LOGGING_CATEGORY(modelnetworking, "hifi.gpu-model-network")

View file

@ -360,13 +360,13 @@ class Resource : public QObject {
Q_OBJECT
public:
Resource() : QObject(), _loaded(true) {}
Resource(const Resource& other);
Resource(const QUrl& url);
virtual ~Resource();
virtual QString getType() const { return "Resource"; }
/// Returns the key last used to identify this resource in the unused map.
int getLRUKey() const { return _lruKey; }
@ -375,13 +375,13 @@ public:
/// Sets the load priority for one owner.
virtual void setLoadPriority(const QPointer<QObject>& owner, float priority);
/// Sets a set of priorities at once.
virtual void setLoadPriorities(const QHash<QPointer<QObject>, float>& priorities);
/// Clears the load priority for one owner.
virtual void clearLoadPriority(const QPointer<QObject>& owner);
/// Returns the highest load priority across all owners.
float getLoadPriority();
@ -491,14 +491,14 @@ protected:
QWeakPointer<Resource> _self;
QPointer<ResourceCache> _cache;
qint64 _bytesReceived{ 0 };
qint64 _bytesTotal{ 0 };
qint64 _bytes{ 0 };
qint64 _bytesReceived { 0 };
qint64 _bytesTotal { 0 };
qint64 _bytes { 0 };
int _requestID;
ResourceRequest* _request{ nullptr };
ResourceRequest* _request { nullptr };
size_t _extraHash;
size_t _extraHash { std::numeric_limits<size_t>::max() };
public slots:
void handleDownloadProgress(uint64_t bytesReceived, uint64_t bytesTotal);

View file

@ -57,6 +57,7 @@ protected:
void internalPresent() override;
void hmdPresent() override { throw std::runtime_error("Unused"); }
bool isHmdMounted() const override;
bool alwaysPresent() const override { return true; }
static const char* NAME;
mutable gl::Context* _mainContext{ nullptr };

View file

@ -7,6 +7,7 @@ include_hifi_library_headers(avatars)
include_hifi_library_headers(audio)
include_hifi_library_headers(octree)
include_hifi_library_headers(animation)
include_hifi_library_headers(material-networking)
include_hifi_library_headers(model-networking)
include_hifi_library_headers(image)
include_hifi_library_headers(ktx)

View file

@ -1,3 +1,4 @@
set(TARGET_NAME procedural)
setup_hifi_library()
link_hifi_libraries(shared gpu shaders networking graphics model-networking ktx image)
link_hifi_libraries(shared gpu shaders networking graphics material-networking ktx image)

View file

@ -19,8 +19,8 @@
#include <gpu/Shader.h>
#include <gpu/Pipeline.h>
#include <gpu/Batch.h>
#include <model-networking/ShaderCache.h>
#include <model-networking/TextureCache.h>
#include <material-networking/ShaderCache.h>
#include <material-networking/TextureCache.h>
using UniformLambdas = std::list<std::function<void(gpu::Batch& batch)>>;
const size_t MAX_PROCEDURAL_TEXTURE_CHANNELS{ 4 };

View file

@ -54,5 +54,5 @@ QSharedPointer<Resource> ClipCache::createResource(const QUrl& url) {
}
QSharedPointer<Resource> ClipCache::createResourceCopy(const QSharedPointer<Resource>& resource) {
return QSharedPointer<Resource>(new NetworkClipLoader(*resource.staticCast<NetworkClipLoader>().data()), &Resource::deleter);
return QSharedPointer<Resource>(new NetworkClipLoader(*resource.staticCast<NetworkClipLoader>()), &Resource::deleter);
}

View file

@ -3,7 +3,7 @@ set(TARGET_NAME render-utils)
# pull in the resources.qrc file
qt5_add_resources(QT_RESOURCES_FILE "${CMAKE_CURRENT_SOURCE_DIR}/res/fonts/fonts.qrc")
setup_hifi_library(Gui Network Qml Quick Script)
link_hifi_libraries(shared task ktx gpu shaders graphics graphics-scripting model-networking render animation fbx image procedural)
link_hifi_libraries(shared task ktx gpu shaders graphics graphics-scripting material-networking model-networking render animation fbx image procedural)
include_hifi_library_headers(audio)
include_hifi_library_headers(networking)
include_hifi_library_headers(octree)

View file

@ -756,7 +756,16 @@ scriptable::ScriptableModelBase Model::getScriptableModel() {
int numParts = (int)mesh->getNumParts();
for (int partIndex = 0; partIndex < numParts; partIndex++) {
result.appendMaterial(graphics::MaterialLayer(getGeometry()->getShapeMaterial(shapeID), 0), shapeID, _modelMeshMaterialNames[shapeID]);
auto& materialName = _modelMeshMaterialNames[shapeID];
result.appendMaterial(graphics::MaterialLayer(getGeometry()->getShapeMaterial(shapeID), 0), shapeID, materialName);
auto mappedMaterialIter = _materialMapping.find(shapeID);
if (mappedMaterialIter != _materialMapping.end()) {
auto mappedMaterials = mappedMaterialIter->second;
for (auto& mappedMaterial : mappedMaterials) {
result.appendMaterial(mappedMaterial, shapeID, materialName);
}
}
shapeID++;
}
}
@ -956,6 +965,7 @@ bool Model::addToScene(const render::ScenePointer& scene,
}
if (somethingAdded) {
applyMaterialMapping();
_addedToScene = true;
updateRenderItems();
_needsFixupInScene = false;
@ -973,6 +983,7 @@ void Model::removeFromScene(const render::ScenePointer& scene, render::Transacti
_modelMeshRenderItems.clear();
_modelMeshMaterialNames.clear();
_modelMeshRenderItemShapes.clear();
_priorityMap.clear();
_blendshapeOffsets.clear();
_blendshapeOffsetsInitialized = false;
@ -1519,17 +1530,65 @@ std::set<unsigned int> Model::getMeshIDsFromMaterialID(QString parentMaterialNam
return toReturn;
}
void Model::applyMaterialMapping() {
auto renderItemsKey = _renderItemKeyGlobalFlags;
PrimitiveMode primitiveMode = getPrimitiveMode();
bool useDualQuaternionSkinning = _useDualQuaternionSkinning;
auto& materialMapping = getMaterialMapping();
for (auto& mapping : materialMapping) {
std::set<unsigned int> shapeIDs = getMeshIDsFromMaterialID(QString(mapping.first.c_str()));
auto networkMaterialResource = mapping.second;
if (!networkMaterialResource || shapeIDs.size() == 0) {
continue;
}
auto materialLoaded = [this, networkMaterialResource, shapeIDs, renderItemsKey, primitiveMode, useDualQuaternionSkinning]() {
if (networkMaterialResource->isFailed() || networkMaterialResource->parsedMaterials.names.size() == 0) {
return;
}
render::Transaction transaction;
auto networkMaterial = networkMaterialResource->parsedMaterials.networkMaterials[networkMaterialResource->parsedMaterials.names[0]];
for (auto shapeID : shapeIDs) {
if (shapeID < _modelMeshRenderItemIDs.size()) {
auto itemID = _modelMeshRenderItemIDs[shapeID];
auto meshIndex = _modelMeshRenderItemShapes[shapeID].meshIndex;
bool invalidatePayloadShapeKey = shouldInvalidatePayloadShapeKey(meshIndex);
graphics::MaterialLayer material = graphics::MaterialLayer(networkMaterial, ++_priorityMap[shapeID]);
_materialMapping[shapeID].push_back(material);
transaction.updateItem<ModelMeshPartPayload>(itemID, [material, renderItemsKey,
invalidatePayloadShapeKey, primitiveMode, useDualQuaternionSkinning](ModelMeshPartPayload& data) {
data.addMaterial(material);
// if the material changed, we might need to update our item key or shape key
data.updateKey(renderItemsKey);
data.setShapeKey(invalidatePayloadShapeKey, primitiveMode, useDualQuaternionSkinning);
});
}
}
AbstractViewStateInterface::instance()->getMain3DScene()->enqueueTransaction(transaction);
};
if (networkMaterialResource->isLoaded()) {
materialLoaded();
} else {
connect(networkMaterialResource.data(), &Resource::finished, materialLoaded);
}
}
}
void Model::addMaterial(graphics::MaterialLayer material, const std::string& parentMaterialName) {
std::set<unsigned int> shapeIDs = getMeshIDsFromMaterialID(QString(parentMaterialName.c_str()));
auto renderItemsKey = _renderItemKeyGlobalFlags;
PrimitiveMode primitiveMode = getPrimitiveMode();
bool useDualQuaternionSkinning = _useDualQuaternionSkinning;
render::Transaction transaction;
for (auto shapeID : shapeIDs) {
if (shapeID < _modelMeshRenderItemIDs.size()) {
auto itemID = _modelMeshRenderItemIDs[shapeID];
auto renderItemsKey = _renderItemKeyGlobalFlags;
PrimitiveMode primitiveMode = getPrimitiveMode();
auto meshIndex = _modelMeshRenderItemShapes[shapeID].meshIndex;
bool invalidatePayloadShapeKey = shouldInvalidatePayloadShapeKey(meshIndex);
bool useDualQuaternionSkinning = _useDualQuaternionSkinning;
transaction.updateItem<ModelMeshPartPayload>(itemID, [material, renderItemsKey,
invalidatePayloadShapeKey, primitiveMode, useDualQuaternionSkinning](ModelMeshPartPayload& data) {
data.addMaterial(material);

View file

@ -183,6 +183,7 @@ public:
/// Provided as a convenience, will crash if !isLoaded()
// And so that getHFMModel() isn't chained everywhere
const HFMModel& getHFMModel() const { assert(isLoaded()); return _renderGeometry->getHFMModel(); }
const MaterialMapping& getMaterialMapping() const { assert(isLoaded()); return _renderGeometry->getMaterialMapping(); }
bool isActive() const { return isLoaded(); }
@ -373,6 +374,10 @@ signals:
protected:
std::unordered_map<unsigned int, quint16> _priorityMap; // only used for materialMapping
std::unordered_map<unsigned int, std::vector<graphics::MaterialLayer>> _materialMapping; // generated during applyMaterialMapping
void applyMaterialMapping();
void setBlendshapeCoefficients(const QVector<float>& coefficients) { _blendshapeCoefficients = coefficients; }
const QVector<float>& getBlendshapeCoefficients() const { return _blendshapeCoefficients; }

View file

@ -101,7 +101,7 @@ void DrawLayered3D::run(const RenderContextPointer& renderContext, const Inputs&
}
}
void CompositeHUD::run(const RenderContextPointer& renderContext) {
void CompositeHUD::run(const RenderContextPointer& renderContext, const gpu::FramebufferPointer& inputs) {
assert(renderContext->args);
assert(renderContext->args->_context);
@ -119,6 +119,9 @@ void CompositeHUD::run(const RenderContextPointer& renderContext) {
renderContext->args->getViewFrustum().evalViewTransform(viewMat);
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat, true);
if (inputs) {
batch.setFramebuffer(inputs);
}
if (renderContext->args->_hudOperator) {
renderContext->args->_hudOperator(batch, renderContext->args->_hudTexture, renderContext->args->_renderMode == RenderArgs::RenderMode::MIRROR_RENDER_MODE);
}
@ -197,7 +200,73 @@ void Blit::run(const RenderContextPointer& renderContext, const gpu::Framebuffer
});
}
void ExtractFrustums::run(const render::RenderContextPointer& renderContext, const Inputs& inputs, Outputs& output) {
void ResolveFramebuffer::run(const render::RenderContextPointer& renderContext, const Inputs& inputs, Outputs& outputs) {
RenderArgs* args = renderContext->args;
auto srcFbo = inputs.get0();
auto destFbo = inputs.get1();
if (!destFbo) {
destFbo = args->_blitFramebuffer;
}
outputs = destFbo;
// Check valid src and dest
if (!srcFbo || !destFbo) {
return;
}
// Check valid size for sr and dest
auto frameSize(srcFbo->getSize());
if (destFbo->getSize() != frameSize) {
return;
}
gpu::Vec4i rectSrc;
rectSrc.z = frameSize.x;
rectSrc.w = frameSize.y;
gpu::doInBatch("Resolve", args->_context, [&](gpu::Batch& batch) {
batch.blit(srcFbo, rectSrc, destFbo, rectSrc);
});
}
void ResolveNewFramebuffer::run(const render::RenderContextPointer& renderContext, const Inputs& inputs, Outputs& outputs) {
RenderArgs* args = renderContext->args;
auto srcFbo = inputs;
outputs.reset();
// Check valid src
if (!srcFbo) {
return;
}
// Check valid size for sr and dest
auto frameSize(srcFbo->getSize());
// Resizing framebuffers instead of re-building them seems to cause issues with threaded rendering
if (_outputFramebuffer && _outputFramebuffer->getSize() != frameSize) {
_outputFramebuffer.reset();
}
if (!_outputFramebuffer) {
_outputFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("resolvedNew.out"));
auto colorFormat = gpu::Element::COLOR_SRGBA_32;
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR);
auto colorTexture = gpu::Texture::createRenderBuffer(colorFormat, frameSize.x, frameSize.y, gpu::Texture::SINGLE_MIP, defaultSampler);
_outputFramebuffer->setRenderBuffer(0, colorTexture);
}
gpu::Vec4i rectSrc;
rectSrc.z = frameSize.x;
rectSrc.w = frameSize.y;
gpu::doInBatch("ResolveNew", args->_context, [&](gpu::Batch& batch) { batch.blit(srcFbo, rectSrc, _outputFramebuffer, rectSrc); });
outputs = _outputFramebuffer;
}
void ExtractFrustums::run(const render::RenderContextPointer& renderContext, const Inputs& inputs, Outputs& output) {
assert(renderContext->args);
assert(renderContext->args->_context);

View file

@ -77,10 +77,12 @@ protected:
class CompositeHUD {
public:
using JobModel = render::Job::Model<CompositeHUD>;
// IF specified the input Framebuffer is actively set by the batch of this job before calling the HUDOperator.
// If not, the current Framebuffer is left unchanged.
//using Inputs = gpu::FramebufferPointer;
using JobModel = render::Job::ModelI<CompositeHUD, gpu::FramebufferPointer>;
CompositeHUD() {}
void run(const render::RenderContextPointer& renderContext);
void run(const render::RenderContextPointer& renderContext, const gpu::FramebufferPointer& inputs);
};
class Blit {
@ -90,6 +92,28 @@ public:
void run(const render::RenderContextPointer& renderContext, const gpu::FramebufferPointer& srcFramebuffer);
};
class ResolveFramebuffer {
public:
using Inputs = render::VaryingSet2<gpu::FramebufferPointer, gpu::FramebufferPointer>;
using Outputs = gpu::FramebufferPointer;
using JobModel = render::Job::ModelIO<ResolveFramebuffer, Inputs, Outputs>;
void run(const render::RenderContextPointer& renderContext, const Inputs& source, Outputs& dest);
};
class ResolveNewFramebuffer {
public:
using Inputs = gpu::FramebufferPointer;
using Outputs = gpu::FramebufferPointer;
using JobModel = render::Job::ModelIO<ResolveNewFramebuffer, Inputs, Outputs>;
void run(const render::RenderContextPointer& renderContext, const Inputs& source, Outputs& dest);
private:
gpu::FramebufferPointer _outputFramebuffer;
};
class ExtractFrustums {
public:

View file

@ -257,8 +257,8 @@ void RenderDeferredTask::build(JobModel& task, const render::Varying& input, ren
// Upscale to finale resolution
const auto primaryFramebuffer = task.addJob<render::Upsample>("PrimaryBufferUpscale", scaledPrimaryFramebuffer);
// Composite the HUD and HUD layered objects
task.addJob<CompositeHUD>("HUD");
// Composite the HUD and HUD overlays
task.addJob<CompositeHUD>("HUD", primaryFramebuffer);
const auto nullJitter = Varying(glm::vec2(0.0f, 0.0f));
const auto hudOpaquesInputs = DrawLayered3D::Inputs(hudOpaque, lightingModel, nullJitter).asVarying();

View file

@ -126,14 +126,24 @@ void RenderForwardTask::build(JobModel& task, const render::Varying& input, rend
task.addJob<DebugZoneLighting>("DrawZoneStack", debugZoneInputs);
}
// Just resolve the msaa
const auto resolveInputs =
ResolveFramebuffer::Inputs(framebuffer, static_cast<gpu::FramebufferPointer>(nullptr)).asVarying();
const auto resolvedFramebuffer = task.addJob<ResolveFramebuffer>("Resolve", resolveInputs);
//auto resolvedFramebuffer = task.addJob<ResolveNewFramebuffer>("Resolve", framebuffer);
#if defined(Q_OS_ANDROID)
#else
// Lighting Buffer ready for tone mapping
// Forward rendering on GLES doesn't support tonemapping to and from the same FBO, so we specify
// the output FBO as null, which causes the tonemapping to target the blit framebuffer
const auto toneMappingInputs = ToneMappingDeferred::Inputs(framebuffer, static_cast<gpu::FramebufferPointer>(nullptr) ).asVarying();
const auto toneMappingInputs = ToneMappingDeferred::Inputs(resolvedFramebuffer, static_cast<gpu::FramebufferPointer>(nullptr)).asVarying();
task.addJob<ToneMappingDeferred>("ToneMapping", toneMappingInputs);
#endif
// Composite the HUD and HUD layered objects
task.addJob<CompositeHUD>("HUD");
// Layered Overlays
// Composite the HUD and HUD overlays
task.addJob<CompositeHUD>("HUD", resolvedFramebuffer);
const auto hudOpaquesInputs = DrawLayered3D::Inputs(hudOpaque, lightingModel, nullJitter).asVarying();
const auto hudTransparentsInputs = DrawLayered3D::Inputs(hudTransparent, lightingModel, nullJitter).asVarying();
@ -145,26 +155,32 @@ void RenderForwardTask::build(JobModel& task, const render::Varying& input, rend
// task.addJob<Blit>("Blit", framebuffer);
}
void PrepareFramebuffer::configure(const Config& config) {
_numSamples = config.getNumSamples();
}
void PrepareFramebuffer::run(const RenderContextPointer& renderContext, gpu::FramebufferPointer& framebuffer) {
glm::uvec2 frameSize(renderContext->args->_viewport.z, renderContext->args->_viewport.w);
// Resizing framebuffers instead of re-building them seems to cause issues with threaded rendering
if (_framebuffer && _framebuffer->getSize() != frameSize) {
if (_framebuffer && (_framebuffer->getSize() != frameSize || _framebuffer->getNumSamples() != _numSamples)) {
_framebuffer.reset();
}
if (!_framebuffer) {
_framebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("forward"));
int numSamples = _numSamples;
auto colorFormat = gpu::Element::COLOR_SRGBA_32;
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT);
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR);
auto colorTexture =
gpu::Texture::createRenderBuffer(colorFormat, frameSize.x, frameSize.y, gpu::Texture::SINGLE_MIP, defaultSampler);
gpu::Texture::createRenderBufferMultisample(colorFormat, frameSize.x, frameSize.y, numSamples, defaultSampler);
_framebuffer->setRenderBuffer(0, colorTexture);
auto depthFormat = gpu::Element(gpu::SCALAR, gpu::UINT32, gpu::DEPTH_STENCIL); // Depth24_Stencil8 texel format
auto depthTexture =
gpu::Texture::createRenderBuffer(depthFormat, frameSize.x, frameSize.y, gpu::Texture::SINGLE_MIP, defaultSampler);
gpu::Texture::createRenderBufferMultisample(depthFormat, frameSize.x, frameSize.y, numSamples, defaultSampler);
_framebuffer->setDepthStencilBuffer(depthTexture, depthFormat);
}

View file

@ -27,16 +27,37 @@ public:
void build(JobModel& task, const render::Varying& input, render::Varying& output);
};
class PrepareFramebufferConfig : public render::Job::Config {
Q_OBJECT
Q_PROPERTY(int numSamples WRITE setNumSamples READ getNumSamples NOTIFY dirty)
public:
int getNumSamples() const { return numSamples; }
void setNumSamples(int num) {
numSamples = std::max(1, std::min(32, num));
emit dirty();
}
signals:
void dirty();
protected:
int numSamples{ 4 };
};
class PrepareFramebuffer {
public:
using Inputs = gpu::FramebufferPointer;
using JobModel = render::Job::ModelO<PrepareFramebuffer, Inputs>;
using Config = PrepareFramebufferConfig;
using JobModel = render::Job::ModelO<PrepareFramebuffer, Inputs, Config>;
void configure(const Config& config);
void run(const render::RenderContextPointer& renderContext,
gpu::FramebufferPointer& framebuffer);
private:
gpu::FramebufferPointer _framebuffer;
int _numSamples;
};
class PrepareForward {

View file

@ -15,7 +15,7 @@
#include <functional>
#include <gpu/Context.h>
#include <model-networking/TextureCache.h>
#include <material-networking/TextureCache.h>
#include <render/DrawTask.h>
#include <shaders/Shaders.h>
#include <graphics/ShaderConstants.h>

View file

@ -1,2 +1,2 @@
// Compatibility
#include <model-networking/TextureCache.h>
#include <material-networking/TextureCache.h>

View file

@ -17,6 +17,6 @@ if (NOT ANDROID)
endif ()
link_hifi_libraries(shared networking octree shaders gpu procedural graphics model-networking ktx recording avatars fbx hfm entities controllers animation audio physics image midi)
link_hifi_libraries(shared networking octree shaders gpu procedural graphics material-networking model-networking ktx recording avatars fbx hfm entities controllers animation audio physics image midi)
# ui includes gl, but link_hifi_libraries does not use transitive includes, so gl must be explicit
include_hifi_library_headers(gl)

View file

@ -11,7 +11,7 @@ if (WIN32 AND (NOT USE_GLES))
setup_hifi_plugin(Gui Qml Multimedia)
link_hifi_libraries(shared task gl qml networking controllers ui
plugins display-plugins ui-plugins input-plugins script-engine
audio-client render-utils graphics shaders gpu render model-networking model-baker hfm fbx ktx image procedural ${PLATFORM_GL_BACKEND})
audio-client render-utils graphics shaders gpu render material-networking model-networking model-baker hfm fbx ktx image procedural ${PLATFORM_GL_BACKEND})
include_hifi_library_headers(octree)
target_openvr()

View file

@ -1210,6 +1210,26 @@ div#grid-section, body#entity-list-body {
width: 200px;
padding-top: 1px;
}
#filter-type-options-buttons {
top: -22px;
width: 224px;
z-index: 2;
background-color: #afafaf;
padding-bottom: 6px;
}
#filter-type-options input[type=button] {
position: relative;
left: 16px;
z-index: 3;
height: 23px;
min-width: 60px;
font-size: 10px;
color: #000;
background: linear-gradient(#afafaf 20%, #808080 100%);
}
#filter-type-options input[type=button]:enabled:hover {
background: linear-gradient(#afafaf 20%, #575757 100%);
}
#filter-search-and-icon {
position: relative;

View file

@ -44,6 +44,10 @@
</div>
<div id="filter-type-options" class="multiselect-options">
<!-- type options with checkbox, icon, and label are added at runtime in entityList -->
<div id="filter-type-options-buttons">
<input type="button" id="filter-type-select-all" value="Select All"/>
<input type="button" id="filter-type-clear-all" value="Clear All"/>
</div>
</div>
</div>
<div id="filter-search-and-icon">

View file

@ -188,6 +188,7 @@ let renameTimeout = null;
let renameLastBlur = null;
let renameLastEntityID = null;
let isRenameFieldBeingMoved = false;
let elFilterTypeInputs = {};
let elEntityTable,
elEntityTableHeader,
@ -201,6 +202,9 @@ let elEntityTable,
elFilterTypeMultiselectBox,
elFilterTypeText,
elFilterTypeOptions,
elFilterTypeOptionsButtons,
elFilterTypeSelectAll,
elFilterTypeClearAll,
elFilterSearch,
elFilterInView,
elFilterRadius,
@ -243,6 +247,9 @@ function loaded() {
elFilterTypeMultiselectBox = document.getElementById("filter-type-multiselect-box");
elFilterTypeText = document.getElementById("filter-type-text");
elFilterTypeOptions = document.getElementById("filter-type-options");
elFilterTypeOptionsButtons = document.getElementById("filter-type-options-buttons");
elFilterTypeSelectAll = document.getElementById('filter-type-select-all');
elFilterTypeClearAll = document.getElementById('filter-type-clear-all');
elFilterSearch = document.getElementById("filter-search");
elFilterInView = document.getElementById("filter-in-view");
elFilterRadius = document.getElementById("filter-radius");
@ -276,6 +283,8 @@ function loaded() {
};
elRefresh.onclick = refreshEntities;
elFilterTypeMultiselectBox.onclick = onToggleTypeDropdown;
elFilterTypeSelectAll.onclick = onSelectAllTypes;
elFilterTypeClearAll.onclick = onClearAllTypes;
elFilterSearch.onkeyup = refreshEntityList;
elFilterSearch.onsearch = refreshEntityList;
elFilterInView.onclick = onToggleFilterInView;
@ -290,13 +299,14 @@ function loaded() {
let elDiv = document.createElement('div');
elDiv.onclick = onToggleTypeFilter;
elFilterTypeOptions.appendChild(elDiv);
elFilterTypeOptions.insertBefore(elDiv, elFilterTypeOptionsButtons);
let elInput = document.createElement('input');
elInput.setAttribute("type", "checkbox");
elInput.setAttribute("id", typeFilterID);
elInput.setAttribute("filterType", type);
elInput.checked = true; // all types are checked initially
elFilterTypeInputs[type] = elInput;
elDiv.appendChild(elInput);
let elLabel = document.createElement('label');
@ -1065,7 +1075,21 @@ function loaded() {
event.stopPropagation();
}
function toggleTypeFilter(elInput, refresh) {
function refreshTypeFilter(refreshList) {
if (typeFilters.length === 0) {
elFilterTypeText.innerText = "No Types";
} else if (typeFilters.length === FILTER_TYPES.length) {
elFilterTypeText.innerText = "All Types";
} else {
elFilterTypeText.innerText = "Types...";
}
if (refreshList) {
refreshEntityList();
}
}
function toggleTypeFilter(elInput, refreshList) {
let type = elInput.getAttribute("filterType");
let typeChecked = elInput.checked;
@ -1076,17 +1100,7 @@ function loaded() {
typeFilters.push(type);
}
if (typeFilters.length === 0) {
elFilterTypeText.innerText = "No Types";
} else if (typeFilters.length === FILTER_TYPES.length) {
elFilterTypeText.innerText = "All Types";
} else {
elFilterTypeText.innerText = "Types...";
}
if (refresh) {
refreshEntityList();
}
refreshTypeFilter(refreshList);
}
function onToggleTypeFilter(event) {
@ -1097,6 +1111,24 @@ function loaded() {
event.stopPropagation();
}
function onSelectAllTypes(event) {
for (let type in elFilterTypeInputs) {
elFilterTypeInputs[type].checked = true;
}
typeFilters = FILTER_TYPES;
refreshTypeFilter(true);
event.stopPropagation();
}
function onClearAllTypes(event) {
for (let type in elFilterTypeInputs) {
elFilterTypeInputs[type].checked = false;
}
typeFilters = [];
refreshTypeFilter(true);
event.stopPropagation();
}
function onBodyClick(event) {
// if clicking anywhere outside of the multiselect dropdowns (since click event bubbled up to onBodyClick and
// propagation wasn't stopped in the toggle type/column callbacks) and the dropdown is open then close it

View file

@ -1,749 +0,0 @@
/* global $, window, MutationObserver */
//
// marketplacesInject.js
//
// Created by David Rowe on 12 Nov 2016.
// Copyright 2016 High Fidelity, Inc.
//
// Injected into marketplace Web pages.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
(function () {
// Event bridge messages.
var CLARA_IO_DOWNLOAD = "CLARA.IO DOWNLOAD";
var CLARA_IO_STATUS = "CLARA.IO STATUS";
var CLARA_IO_CANCEL_DOWNLOAD = "CLARA.IO CANCEL DOWNLOAD";
var CLARA_IO_CANCELLED_DOWNLOAD = "CLARA.IO CANCELLED DOWNLOAD";
var GOTO_DIRECTORY = "GOTO_DIRECTORY";
var GOTO_MARKETPLACE = "GOTO_MARKETPLACE";
var QUERY_CAN_WRITE_ASSETS = "QUERY_CAN_WRITE_ASSETS";
var CAN_WRITE_ASSETS = "CAN_WRITE_ASSETS";
var WARN_USER_NO_PERMISSIONS = "WARN_USER_NO_PERMISSIONS";
var canWriteAssets = false;
var xmlHttpRequest = null;
var isPreparing = false; // Explicitly track download request status.
var limitedCommerce = false;
var commerceMode = false;
var userIsLoggedIn = false;
var walletNeedsSetup = false;
var marketplaceBaseURL = "https://highfidelity.com";
var messagesWaiting = false;
function injectCommonCode(isDirectoryPage) {
// Supporting styles from marketplaces.css.
// Glyph font family, size, and spacing adjusted because HiFi-Glyphs cannot be used cross-domain.
$("head").append(
'<style>' +
'#marketplace-navigation { font-family: Arial, Helvetica, sans-serif; width: 100%; height: 50px; background: #00b4ef; position: fixed; bottom: 0; z-index: 1000; }' +
'#marketplace-navigation .glyph { margin-left: 10px; margin-right: 3px; font-family: sans-serif; color: #fff; font-size: 24px; line-height: 50px; }' +
'#marketplace-navigation .text { color: #fff; font-size: 16px; line-height: 50px; vertical-align: top; position: relative; top: 1px; }' +
'#marketplace-navigation input#back-button { position: absolute; left: 20px; margin-top: 12px; padding-left: 0; padding-right: 5px; }' +
'#marketplace-navigation input#all-markets { position: absolute; right: 20px; margin-top: 12px; padding-left: 15px; padding-right: 15px; }' +
'#marketplace-navigation .right { position: absolute; right: 20px; }' +
'</style>'
);
// Supporting styles from edit-style.css.
// Font family, size, and position adjusted because Raleway-Bold cannot be used cross-domain.
$("head").append(
'<style>' +
'input[type=button] { font-family: Arial, Helvetica, sans-serif; font-weight: bold; font-size: 12px; text-transform: uppercase; vertical-align: center; height: 28px; min-width: 100px; padding: 0 15px; border-radius: 5px; border: none; color: #fff; background-color: #000; background: linear-gradient(#343434 20%, #000 100%); cursor: pointer; }' +
'input[type=button].white { color: #121212; background-color: #afafaf; background: linear-gradient(#fff 20%, #afafaf 100%); }' +
'input[type=button].white:enabled:hover { background: linear-gradient(#fff, #fff); border: none; }' +
'input[type=button].white:active { background: linear-gradient(#afafaf, #afafaf); }' +
'</style>'
);
// Footer.
var isInitialHiFiPage = location.href === (marketplaceBaseURL + "/marketplace?");
$("body").append(
'<div id="marketplace-navigation">' +
(!isInitialHiFiPage ? '<input id="back-button" type="button" class="white" value="&lt; Back" />' : '') +
(isInitialHiFiPage ? '<span class="glyph">&#x1f6c8;</span> <span class="text">Get items from Clara.io!</span>' : '') +
(!isDirectoryPage ? '<input id="all-markets" type="button" class="white" value="See All Markets" />' : '') +
(isDirectoryPage ? '<span class="right"><span class="glyph">&#x1f6c8;</span> <span class="text">Select a marketplace to explore.</span><span>' : '') +
'</div>'
);
// Footer actions.
$("#back-button").on("click", function () {
if (document.referrer !== "") {
window.history.back();
} else {
var params = { type: GOTO_MARKETPLACE };
var itemIdMatch = location.search.match(/itemId=([^&]*)/);
if (itemIdMatch && itemIdMatch.length === 2) {
params.itemId = itemIdMatch[1];
}
EventBridge.emitWebEvent(JSON.stringify(params));
}
});
$("#all-markets").on("click", function () {
EventBridge.emitWebEvent(JSON.stringify({
type: GOTO_DIRECTORY
}));
});
}
function injectDirectoryCode() {
// Remove e-mail hyperlink.
var letUsKnow = $("#letUsKnow");
letUsKnow.replaceWith(letUsKnow.html());
// Add button links.
$('#exploreClaraMarketplace').on('click', function () {
window.location = "https://clara.io/library?gameCheck=true&public=true";
});
$('#exploreHifiMarketplace').on('click', function () {
EventBridge.emitWebEvent(JSON.stringify({
type: GOTO_MARKETPLACE
}));
});
}
emitWalletSetupEvent = function () {
EventBridge.emitWebEvent(JSON.stringify({
type: "WALLET_SETUP"
}));
};
function maybeAddSetupWalletButton() {
if (!$('body').hasClass("walletsetup-injected") && userIsLoggedIn && walletNeedsSetup) {
$('body').addClass("walletsetup-injected");
var resultsElement = document.getElementById('results');
var setupWalletElement = document.createElement('div');
setupWalletElement.classList.add("row");
setupWalletElement.id = "setupWalletDiv";
setupWalletElement.style = "height:60px;margin:20px 10px 10px 10px;padding:12px 5px;" +
"background-color:#D6F4D8;border-color:#aee9b2;border-width:2px;border-style:solid;border-radius:5px;";
var span = document.createElement('span');
span.style = "margin:10px 5px;color:#1b6420;font-size:15px;";
span.innerHTML = "<a href='#' onclick='emitWalletSetupEvent(); return false;'>Activate your Wallet</a> to get money and shop in Marketplace.";
var xButton = document.createElement('a');
xButton.id = "xButton";
xButton.setAttribute('href', "#");
xButton.style = "width:50px;height:100%;margin:0;color:#ccc;font-size:20px;";
xButton.innerHTML = "X";
xButton.onclick = function () {
setupWalletElement.remove();
dummyRow.remove();
};
setupWalletElement.appendChild(span);
setupWalletElement.appendChild(xButton);
resultsElement.insertBefore(setupWalletElement, resultsElement.firstChild);
// Dummy row for padding
var dummyRow = document.createElement('div');
dummyRow.classList.add("row");
dummyRow.style = "height:15px;";
resultsElement.insertBefore(dummyRow, resultsElement.firstChild);
}
}
function maybeAddLogInButton() {
if (!$('body').hasClass("login-injected") && !userIsLoggedIn) {
$('body').addClass("login-injected");
var resultsElement = document.getElementById('results');
if (!resultsElement) { // If we're on the main page, this will evaluate to `true`
resultsElement = document.getElementById('item-show');
resultsElement.style = 'margin-top:0;';
}
var logInElement = document.createElement('div');
logInElement.classList.add("row");
logInElement.id = "logInDiv";
logInElement.style = "height:60px;margin:20px 10px 10px 10px;padding:5px;" +
"background-color:#D6F4D8;border-color:#aee9b2;border-width:2px;border-style:solid;border-radius:5px;";
var button = document.createElement('a');
button.classList.add("btn");
button.classList.add("btn-default");
button.id = "logInButton";
button.setAttribute('href', "#");
button.innerHTML = "LOG IN";
button.style = "width:80px;height:100%;margin-top:0;margin-left:10px;padding:13px;font-weight:bold;background:linear-gradient(white, #ccc);";
button.onclick = function () {
EventBridge.emitWebEvent(JSON.stringify({
type: "LOGIN"
}));
};
var span = document.createElement('span');
span.style = "margin:10px;color:#1b6420;font-size:15px;";
span.innerHTML = "to get items from the Marketplace.";
var xButton = document.createElement('a');
xButton.id = "xButton";
xButton.setAttribute('href', "#");
xButton.style = "width:50px;height:100%;margin:0;color:#ccc;font-size:20px;";
xButton.innerHTML = "X";
xButton.onclick = function () {
logInElement.remove();
dummyRow.remove();
};
logInElement.appendChild(button);
logInElement.appendChild(span);
logInElement.appendChild(xButton);
resultsElement.insertBefore(logInElement, resultsElement.firstChild);
// Dummy row for padding
var dummyRow = document.createElement('div');
dummyRow.classList.add("row");
dummyRow.style = "height:15px;";
resultsElement.insertBefore(dummyRow, resultsElement.firstChild);
}
}
function changeDropdownMenu() {
var logInOrOutButton = document.createElement('a');
logInOrOutButton.id = "logInOrOutButton";
logInOrOutButton.setAttribute('href', "#");
logInOrOutButton.innerHTML = userIsLoggedIn ? "Log Out" : "Log In";
logInOrOutButton.onclick = function () {
EventBridge.emitWebEvent(JSON.stringify({
type: "LOGIN"
}));
};
$($('.dropdown-menu').find('li')[0]).append(logInOrOutButton);
$('a[href="/marketplace?view=mine"]').each(function () {
$(this).attr('href', '#');
$(this).on('click', function () {
EventBridge.emitWebEvent(JSON.stringify({
type: "MY_ITEMS"
}));
});
});
}
function buyButtonClicked(id, referrer, edition) {
EventBridge.emitWebEvent(JSON.stringify({
type: "CHECKOUT",
itemId: id,
referrer: referrer,
itemEdition: edition
}));
}
function injectBuyButtonOnMainPage() {
var cost;
// Unbind original mouseenter and mouseleave behavior
$('body').off('mouseenter', '#price-or-edit .price');
$('body').off('mouseleave', '#price-or-edit .price');
$('.grid-item').find('#price-or-edit').each(function () {
$(this).css({ "margin-top": "0" });
});
$('.grid-item').find('#price-or-edit').find('a').each(function() {
if ($(this).attr('href') !== '#') { // Guard necessary because of the AJAX nature of Marketplace site
$(this).attr('data-href', $(this).attr('href'));
$(this).attr('href', '#');
}
cost = $(this).closest('.col-xs-3').find('.item-cost').text();
var costInt = parseInt(cost, 10);
$(this).closest('.col-xs-3').prev().attr("class", 'col-xs-6');
$(this).closest('.col-xs-3').attr("class", 'col-xs-6');
var priceElement = $(this).find('.price');
var available = true;
if (priceElement.text() === 'invalidated' ||
priceElement.text() === 'sold out' ||
priceElement.text() === 'not for sale') {
available = false;
priceElement.css({
"padding": "3px 5px",
"height": "40px",
"width": "100px",
"background": "linear-gradient(#a2a2a2, #fefefe)",
"color": "#000",
"font-weight": "600",
"line-height": "20px"
});
} else {
priceElement.css({
"padding": "3px 5px",
"height": "40px",
"background": "linear-gradient(#00b4ef, #0093C5)",
"color": "#FFF",
"font-weight": "600",
"line-height": "34px"
});
}
if (parseInt(cost) > 0) {
priceElement.css({ "width": "auto" });
if (available) {
priceElement.html('<span class="hifi-glyph hifi-glyph-hfc" style="filter:invert(1);background-size:20px;' +
'width:20px;height:20px;position:relative;top:5px;"></span> ' + cost);
}
priceElement.css({ "min-width": priceElement.width() + 30 });
}
});
// change pricing to GET/BUY on button hover
$('body').on('mouseenter', '#price-or-edit .price', function () {
var $this = $(this);
var buyString = "BUY";
var getString = "GET";
// Protection against the button getting stuck in the "BUY"/"GET" state.
// That happens when the browser gets two MOUSEENTER events before getting a
// MOUSELEAVE event. Also, if not available for sale, just return.
if ($this.text() === buyString ||
$this.text() === getString ||
$this.text() === 'invalidated' ||
$this.text() === 'sold out' ||
$this.text() === 'not for sale' ) {
return;
}
$this.data('initialHtml', $this.html());
var cost = $(this).parent().siblings().text();
if (parseInt(cost) > 0) {
$this.text(buyString);
}
if (parseInt(cost) == 0) {
$this.text(getString);
}
});
$('body').on('mouseleave', '#price-or-edit .price', function () {
var $this = $(this);
$this.html($this.data('initialHtml'));
});
$('.grid-item').find('#price-or-edit').find('a').on('click', function () {
var price = $(this).closest('.grid-item').find('.price').text();
if (price === 'invalidated' ||
price === 'sold out' ||
price === 'not for sale') {
return false;
}
buyButtonClicked($(this).closest('.grid-item').attr('data-item-id'),
"mainPage",
-1);
});
}
function injectUnfocusOnSearch() {
// unfocus input field on search, thus hiding virtual keyboard
$('#search-box').on('submit', function () {
if (document.activeElement) {
document.activeElement.blur();
}
});
}
// fix for 10108 - marketplace category cannot scroll
function injectAddScrollbarToCategories() {
$('#categories-dropdown').on('show.bs.dropdown', function () {
$('body > div.container').css('display', 'none')
$('#categories-dropdown > ul.dropdown-menu').css({ 'overflow': 'auto', 'height': 'calc(100vh - 110px)' });
});
$('#categories-dropdown').on('hide.bs.dropdown', function () {
$('body > div.container').css('display', '');
$('#categories-dropdown > ul.dropdown-menu').css({ 'overflow': '', 'height': '' });
});
}
function injectHiFiCode() {
if (commerceMode) {
maybeAddLogInButton();
maybeAddSetupWalletButton();
if (!$('body').hasClass("code-injected")) {
$('body').addClass("code-injected");
changeDropdownMenu();
var target = document.getElementById('templated-items');
// MutationObserver is necessary because the DOM is populated after the page is loaded.
// We're searching for changes to the element whose ID is '#templated-items' - this is
// the element that gets filled in by the AJAX.
var observer = new MutationObserver(function (mutations) {
mutations.forEach(function (mutation) {
injectBuyButtonOnMainPage();
});
});
var config = { attributes: true, childList: true, characterData: true };
observer.observe(target, config);
// Try this here in case it works (it will if the user just pressed the "back" button,
// since that doesn't trigger another AJAX request.
injectBuyButtonOnMainPage();
}
}
injectUnfocusOnSearch();
injectAddScrollbarToCategories();
}
function injectHiFiItemPageCode() {
if (commerceMode) {
maybeAddLogInButton();
if (!$('body').hasClass("code-injected")) {
$('body').addClass("code-injected");
changeDropdownMenu();
var purchaseButton = $('#side-info').find('.btn').first();
var href = purchaseButton.attr('href');
purchaseButton.attr('href', '#');
var cost = $('.item-cost').text();
var costInt = parseInt(cost, 10);
// One of 'invalidated', 'not for sale', 'sold out', or 'available'
var availability = $.trim($('.item-availability').text());
if (limitedCommerce && (costInt > 0)) {
availability = '';
}
var isUpdating = window.location.href.indexOf('edition=') > -1;
// NFS only shows for artist stocking inventory
var isBuyEnabled = ('available' === availability) || isUpdating || ('not for sale' === availability);
if (isBuyEnabled) {
purchaseButton.css({
"background": "linear-gradient(#00b4ef, #0093C5)",
"color": "#FFF",
"font-weight": "600",
"padding-bottom": "10px"
});
} else {
purchaseButton.css({
"background": "linear-gradient(#a2a2a2, #fefefe)",
"color": "#000",
"font-weight": "600",
"padding-bottom": "10px"
});
}
var urlParams = new URLSearchParams(window.location.search);
if (isUpdating) {
purchaseButton.html('UPDATE FOR FREE');
} else if (availability == 'not for sale') {
purchaseButton.html("Free artist's stock to inventory");
} else if (availability !== 'available') {
purchaseButton.html('UNAVAILABLE ' + (availability ? ('(' + availability + ')') : ''));
} else if (parseInt(cost) > 0 && $('#side-info').find('#buyItemButton').size() === 0) {
purchaseButton.html('PURCHASE <span class="hifi-glyph hifi-glyph-hfc" style="filter:invert(1);background-size:20px;' +
'width:20px;height:20px;position:relative;top:5px;"></span> ' + cost);
}
purchaseButton.on('click', function () {
if (isBuyEnabled) {
buyButtonClicked(window.location.pathname.split("/")[3],
"itemPage",
urlParams.get('edition'));
}
});
}
}
injectUnfocusOnSearch();
}
function updateClaraCode() {
// Have to repeatedly update Clara page because its content can change dynamically without location.href changing.
// Clara library page.
if (location.href.indexOf("clara.io/library") !== -1) {
// Make entries navigate to "Image" view instead of default "Real Time" view.
var elements = $("a.thumbnail");
for (var i = 0, length = elements.length; i < length; i++) {
var value = elements[i].getAttribute("href");
if (value.slice(-6) !== "/image") {
elements[i].setAttribute("href", value + "/image");
}
}
}
// Clara item page.
if (location.href.indexOf("clara.io/view/") !== -1) {
// Make site navigation links retain gameCheck etc. parameters.
var element = $("a[href^=\'/library\']")[0];
var parameters = "?gameCheck=true&public=true";
var href = element.getAttribute("href");
if (href.slice(-parameters.length) !== parameters) {
element.setAttribute("href", href + parameters);
}
// Remove unwanted buttons and replace download options with a single "Download to High Fidelity" button.
var buttons = $("a.embed-button").parent("div");
var downloadFBX;
if (buttons.find("div.btn-group").length > 0) {
buttons.children(".btn-primary, .btn-group , .embed-button").each(function () { this.remove(); });
if ($("#hifi-download-container").length === 0) { // Button hasn't been moved already.
downloadFBX = $('<a class="btn btn-primary"><i class=\'glyphicon glyphicon-download-alt\'></i> Download to High Fidelity</a>');
buttons.prepend(downloadFBX);
downloadFBX[0].addEventListener("click", startAutoDownload);
}
}
// Move the "Download to High Fidelity" button to be more visible on tablet.
if ($("#hifi-download-container").length === 0 && window.innerWidth < 700) {
var downloadContainer = $('<div id="hifi-download-container"></div>');
$(".top-title .col-sm-4").append(downloadContainer);
downloadContainer.append(downloadFBX);
}
}
}
// Automatic download to High Fidelity.
function startAutoDownload() {
// One file request at a time.
if (isPreparing) {
console.log("WARNING: Clara.io FBX: Prepare only one download at a time");
return;
}
// User must be able to write to Asset Server.
if (!canWriteAssets) {
console.log("ERROR: Clara.io FBX: File download cancelled because no permissions to write to Asset Server");
EventBridge.emitWebEvent(JSON.stringify({
type: WARN_USER_NO_PERMISSIONS
}));
return;
}
// User must be logged in.
var loginButton = $("#topnav a[href='/signup']");
if (loginButton.length > 0) {
loginButton[0].click();
return;
}
// Obtain zip file to download for requested asset.
// Reference: https://clara.io/learn/sdk/api/export
//var XMLHTTPREQUEST_URL = "https://clara.io/api/scenes/{uuid}/export/fbx?zip=true&centerScene=true&alignSceneGround=true&fbxUnit=Meter&fbxVersion=7&fbxEmbedTextures=true&imageFormat=WebGL";
// 13 Jan 2017: Specify FBX version 5 and remove some options in order to make Clara.io site more likely to
// be successful in generating zip files.
var XMLHTTPREQUEST_URL = "https://clara.io/api/scenes/{uuid}/export/fbx?fbxUnit=Meter&fbxVersion=5&fbxEmbedTextures=true&imageFormat=WebGL";
var uuid = location.href.match(/\/view\/([a-z0-9\-]*)/)[1];
var url = XMLHTTPREQUEST_URL.replace("{uuid}", uuid);
xmlHttpRequest = new XMLHttpRequest();
var responseTextIndex = 0;
var zipFileURL = "";
xmlHttpRequest.onreadystatechange = function () {
// Messages are appended to responseText; process the new ones.
var message = this.responseText.slice(responseTextIndex);
var statusMessage = "";
if (isPreparing) { // Ignore messages in flight after finished/cancelled.
var lines = message.split(/[\n\r]+/);
for (var i = 0, length = lines.length; i < length; i++) {
if (lines[i].slice(0, 5) === "data:") {
// Parse line.
var data;
try {
data = JSON.parse(lines[i].slice(5));
}
catch (e) {
data = {};
}
// Extract zip file URL.
if (data.hasOwnProperty("files") && data.files.length > 0) {
zipFileURL = data.files[0].url;
}
}
}
if (statusMessage !== "") {
// Update the UI with the most recent status message.
EventBridge.emitWebEvent(JSON.stringify({
type: CLARA_IO_STATUS,
status: statusMessage
}));
}
}
responseTextIndex = this.responseText.length;
};
// Note: onprogress doesn't have computable total length so can't use it to determine % complete.
xmlHttpRequest.onload = function () {
var statusMessage = "";
if (!isPreparing) {
return;
}
isPreparing = false;
var HTTP_OK = 200;
if (this.status !== HTTP_OK) {
EventBridge.emitWebEvent(JSON.stringify({
type: CLARA_IO_STATUS,
status: statusMessage
}));
} else if (zipFileURL.slice(-4) !== ".zip") {
EventBridge.emitWebEvent(JSON.stringify({
type: CLARA_IO_STATUS,
status: (statusMessage + ": " + zipFileURL)
}));
} else {
EventBridge.emitWebEvent(JSON.stringify({
type: CLARA_IO_DOWNLOAD
}));
}
xmlHttpRequest = null;
}
isPreparing = true;
EventBridge.emitWebEvent(JSON.stringify({
type: CLARA_IO_STATUS,
status: "Initiating download"
}));
xmlHttpRequest.open("POST", url, true);
xmlHttpRequest.setRequestHeader("Accept", "text/event-stream");
xmlHttpRequest.send();
}
function injectClaraCode() {
// Make space for marketplaces footer in Clara pages.
$("head").append(
'<style>' +
'#app { margin-bottom: 135px; }' +
'.footer { bottom: 50px; }' +
'</style>'
);
// Condense space.
$("head").append(
'<style>' +
'div.page-title { line-height: 1.2; font-size: 13px; }' +
'div.page-title-row { padding-bottom: 0; }' +
'</style>'
);
// Move "Download to High Fidelity" button.
$("head").append(
'<style>' +
'#hifi-download-container { position: absolute; top: 6px; right: 16px; }' +
'</style>'
);
// Update code injected per page displayed.
var updateClaraCodeInterval = undefined;
updateClaraCode();
updateClaraCodeInterval = setInterval(function () {
updateClaraCode();
}, 1000);
window.addEventListener("unload", function () {
clearInterval(updateClaraCodeInterval);
updateClaraCodeInterval = undefined;
});
EventBridge.emitWebEvent(JSON.stringify({
type: QUERY_CAN_WRITE_ASSETS
}));
}
function cancelClaraDownload() {
isPreparing = false;
if (xmlHttpRequest) {
xmlHttpRequest.abort();
xmlHttpRequest = null;
console.log("Clara.io FBX: File download cancelled");
EventBridge.emitWebEvent(JSON.stringify({
type: CLARA_IO_CANCELLED_DOWNLOAD
}));
}
}
function injectCode() {
var DIRECTORY = 0;
var HIFI = 1;
var CLARA = 2;
var HIFI_ITEM_PAGE = 3;
var pageType = DIRECTORY;
if (location.href.indexOf(marketplaceBaseURL + "/") !== -1) { pageType = HIFI; }
if (location.href.indexOf("clara.io/") !== -1) { pageType = CLARA; }
if (location.href.indexOf(marketplaceBaseURL + "/marketplace/items/") !== -1) { pageType = HIFI_ITEM_PAGE; }
injectCommonCode(pageType === DIRECTORY);
switch (pageType) {
case DIRECTORY:
injectDirectoryCode();
break;
case HIFI:
injectHiFiCode();
break;
case CLARA:
injectClaraCode();
break;
case HIFI_ITEM_PAGE:
injectHiFiItemPageCode();
break;
}
}
function onLoad() {
EventBridge.scriptEventReceived.connect(function (message) {
message = JSON.parse(message);
if (message.type === CAN_WRITE_ASSETS) {
canWriteAssets = message.canWriteAssets;
} else if (message.type === CLARA_IO_CANCEL_DOWNLOAD) {
cancelClaraDownload();
} else if (message.type === "marketplaces") {
if (message.action === "commerceSetting") {
limitedCommerce = !!message.data.limitedCommerce;
commerceMode = !!message.data.commerceMode;
userIsLoggedIn = !!message.data.userIsLoggedIn;
walletNeedsSetup = !!message.data.walletNeedsSetup;
marketplaceBaseURL = message.data.metaverseServerURL;
if (marketplaceBaseURL.indexOf('metaverse.') !== -1) {
marketplaceBaseURL = marketplaceBaseURL.replace('metaverse.', '');
}
messagesWaiting = message.data.messagesWaiting;
injectCode();
}
}
});
// Request commerce setting
// Code is injected into the webpage after the setting comes back.
EventBridge.emitWebEvent(JSON.stringify({
type: "REQUEST_SETTING"
}));
}
// Load / unload.
window.addEventListener("load", onLoad); // More robust to Web site issues than using $(document).ready().
window.addEventListener("page:change", onLoad); // Triggered after Marketplace HTML is changed
}());

View file

@ -285,6 +285,10 @@ SelectionManager = (function() {
properties.localPosition = properties.position;
properties.localRotation = properties.rotation;
}
properties.localVelocity = Vec3.ZERO;
properties.localAngularVelocity = Vec3.ZERO;
delete properties.actionData;
var newEntityID = Entities.addEntity(properties);

View file

@ -20,7 +20,7 @@ endfunction()
if (BUILD_TOOLS)
# Allow different tools for stable builds
if (STABLE_BUILD)
if (RELEASE_TYPE STREQUAL "PRODUCTION")
set(ALL_TOOLS
udt-test
vhacd-util

View file

@ -14,6 +14,7 @@
#include <QJsonObject>
#include <QMessageBox>
#include <QProcess>
#include <QRegularExpression>
#include <quazip5/quazip.h>
#include <quazip5/JlCompress.h>
@ -589,4 +590,4 @@ void AWSInterface::updateAWS() {
QStringList parameters = QStringList() << "-c" << _pythonCommand + " " + filename;
process->start("sh", parameters);
#endif
}
}

View file

@ -40,7 +40,11 @@ Nitpick::Nitpick(QWidget* parent) : QMainWindow(parent) {
_ui.plainTextEdit->setReadOnly(true);
setWindowTitle("Nitpick - v2.1.2");
setWindowTitle("Nitpick - v3.0.0");
clientProfiles << "VR-High" << "Desktop-High" << "Desktop-Low" << "Mobile-Touch" << "VR-Standalone";
_ui.clientProfileComboBox->insertItems(0, clientProfiles);
}
Nitpick::~Nitpick() {
@ -157,7 +161,7 @@ void Nitpick::on_createAllRecursiveScriptsPushbutton_clicked() {
}
void Nitpick::on_createTestsPushbutton_clicked() {
_test->createTests();
_test->createTests(_ui.clientProfileComboBox->currentText());
}
void Nitpick::on_createMDFilePushbutton_clicked() {

View file

@ -126,6 +126,8 @@ private:
bool _isRunningFromCommandline{ false };
void* _caller;
QStringList clientProfiles;
};
#endif // hifi_Nitpick_h

View file

@ -391,7 +391,7 @@ void Test::includeTest(QTextStream& textStream, const QString& testPathname) {
textStream << "Script.include(testsRootPath + \"" << partialPathWithoutTests + "\");" << endl;
}
void Test::createTests() {
void Test::createTests(const QString& clientProfile) {
// Rename files sequentially, as ExpectedResult_00000.png, ExpectedResult_00001.png and so on
// Any existing expected result images will be deleted
QString previousSelection = _snapshotDirectory;

View file

@ -52,7 +52,7 @@ public:
void finishTestsEvaluation();
void createTests();
void createTests(const QString& clientProfile);
void createTestsOutline();

View file

@ -52,8 +52,8 @@
<widget class="QPushButton" name="createTestsPushbutton">
<property name="geometry">
<rect>
<x>210</x>
<y>60</y>
<x>70</x>
<y>40</y>
<width>220</width>
<height>40</height>
</rect>
@ -153,6 +153,16 @@
<string>Create all testAuto scripts</string>
</property>
</widget>
<widget class="QComboBox" name="clientProfileComboBox">
<property name="geometry">
<rect>
<x>320</x>
<y>40</y>
<width>120</width>
<height>40</height>
</rect>
</property>
</widget>
</widget>
<widget class="QWidget" name="tab_4">
<attribute name="title">

View file

@ -14,13 +14,14 @@ using System.Collections.Generic;
class AvatarExporter : MonoBehaviour {
// update version number for every PR that changes this file, also set updated version in README file
static readonly string AVATAR_EXPORTER_VERSION = "0.1";
static readonly string AVATAR_EXPORTER_VERSION = "0.2";
static readonly float HIPS_GROUND_MIN_Y = 0.01f;
static readonly float HIPS_SPINE_CHEST_MIN_SEPARATION = 0.001f;
static readonly int MAXIMUM_USER_BONE_COUNT = 256;
static readonly string EMPTY_WARNING_TEXT = "None";
// TODO: use regex
static readonly string[] RECOMMENDED_UNITY_VERSIONS = new string[] {
"2018.2.12f1",
"2018.2.11f1",
@ -262,8 +263,8 @@ class AvatarExporter : MonoBehaviour {
static string assetPath = "";
static string assetName = "";
static HumanDescription humanDescription;
static Dictionary<string, string> dependencyTextures = new Dictionary<string, string>();
[MenuItem("High Fidelity/Export New Avatar")]
static void ExportNewAvatar() {
ExportSelectedAvatar(false);
@ -301,54 +302,38 @@ class AvatarExporter : MonoBehaviour {
" the Rig section of it's Inspector window.", "Ok");
return;
}
humanDescription = modelImporter.humanDescription;
SetUserBoneInformation();
string textureWarnings = SetTextureDependencies();
// check if we should be substituting a bone for a missing UpperChest mapping
AdjustUpperChestMapping();
// format resulting bone rule failure strings
// consider export-blocking bone rules to be errors and show them in an error dialog,
// and also include any other bone rule failures as warnings in the dialog
// and also include any other bone rule failures plus texture warnings as warnings in the dialog
string boneErrors = "";
string boneWarnings = "";
string warnings = "";
foreach (var failedBoneRule in failedBoneRules) {
if (Array.IndexOf(EXPORT_BLOCKING_BONE_RULES, failedBoneRule.Key) >= 0) {
boneErrors += failedBoneRule.Value + "\n\n";
} else {
boneWarnings += failedBoneRule.Value + "\n\n";
warnings += failedBoneRule.Value + "\n\n";
}
}
warnings += textureWarnings;
if (!string.IsNullOrEmpty(boneErrors)) {
// if there are both errors and warnings then warnings will be displayed with errors in the error dialog
if (!string.IsNullOrEmpty(boneWarnings)) {
if (!string.IsNullOrEmpty(warnings)) {
boneErrors = "Errors:\n\n" + boneErrors;
boneErrors += "Warnings:\n\n" + boneWarnings;
boneErrors += "Warnings:\n\n" + warnings;
}
// remove ending newlines from the last rule failure string that was added above
boneErrors = boneErrors.Substring(0, boneErrors.LastIndexOf("\n\n"));
EditorUtility.DisplayDialog("Error", boneErrors, "Ok");
return;
}
if (!humanoidToUserBoneMappings.ContainsKey("UpperChest")) {
// if parent of Neck is not Chest then map the parent to UpperChest
string neckUserBone;
if (humanoidToUserBoneMappings.TryGetValue("Neck", out neckUserBone)) {
UserBoneInformation neckParentBoneInfo;
string neckParentUserBone = userBoneInfos[neckUserBone].parentName;
if (userBoneInfos.TryGetValue(neckParentUserBone, out neckParentBoneInfo) && !neckParentBoneInfo.HasHumanMapping()) {
neckParentBoneInfo.humanName = "UpperChest";
humanoidToUserBoneMappings.Add("UpperChest", neckParentUserBone);
}
}
// if there is still no UpperChest bone but there is a Chest bone then we remap Chest to UpperChest
string chestUserBone;
if (!humanoidToUserBoneMappings.ContainsKey("UpperChest") &&
humanoidToUserBoneMappings.TryGetValue("Chest", out chestUserBone)) {
userBoneInfos[chestUserBone].humanName = "UpperChest";
humanoidToUserBoneMappings.Remove("Chest");
humanoidToUserBoneMappings.Add("UpperChest", chestUserBone);
}
}
string documentsFolder = System.Environment.GetFolderPath(System.Environment.SpecialFolder.MyDocuments);
string hifiFolder = documentsFolder + "\\High Fidelity Projects";
@ -407,7 +392,13 @@ class AvatarExporter : MonoBehaviour {
return;
} else if (option == 0) { // Yes - copy model to Unity project
// copy the fbx from the project folder to Unity Assets, overwriting the existing fbx, and re-import it
File.Copy(exportModelPath, assetPath, true);
try {
File.Copy(exportModelPath, assetPath, true);
} catch {
EditorUtility.DisplayDialog("Error", "Failed to copy existing file " + exportModelPath + " to " + assetPath +
". Please check the location and try again.", "Ok");
return;
}
AssetDatabase.ImportAsset(assetPath);
// set model to Humanoid animation type and force another refresh on it to process Humanoid
@ -455,12 +446,20 @@ class AvatarExporter : MonoBehaviour {
}
// write out a new fst file in place of the old file
WriteFST(exportFstPath, projectName);
if (!WriteFST(exportFstPath, projectName)) {
return;
}
// copy any external texture files to the project's texture directory that are considered dependencies of the model
string texturesDirectory = GetTextureDirectory(exportFstPath);
if (!CopyExternalTextures(texturesDirectory)) {
return;
}
// display success dialog with any bone rule warnings
string successDialog = "Avatar successfully updated!";
if (!string.IsNullOrEmpty(boneWarnings)) {
successDialog += "\n\nWarnings:\n" + boneWarnings;
if (!string.IsNullOrEmpty(warnings)) {
successDialog += "\n\nWarnings:\n" + warnings;
}
EditorUtility.DisplayDialog("Success!", successDialog, "Ok");
} else { // Export New Avatar menu option
@ -469,13 +468,13 @@ class AvatarExporter : MonoBehaviour {
Directory.CreateDirectory(hifiFolder);
}
if (string.IsNullOrEmpty(boneWarnings)) {
boneWarnings = EMPTY_WARNING_TEXT;
if (string.IsNullOrEmpty(warnings)) {
warnings = EMPTY_WARNING_TEXT;
}
// open a popup window to enter new export project name and project location
ExportProjectWindow window = ScriptableObject.CreateInstance<ExportProjectWindow>();
window.Init(hifiFolder, boneWarnings, OnExportProjectWindowClose);
window.Init(hifiFolder, warnings, OnExportProjectWindowClose);
}
}
@ -485,28 +484,34 @@ class AvatarExporter : MonoBehaviour {
File.Copy(assetPath, exportModelPath);
// create empty Textures and Scripts folders in the project directory
string texturesDirectory = projectDirectory + "\\textures";
string texturesDirectory = GetTextureDirectory(projectDirectory);
string scriptsDirectory = projectDirectory + "\\scripts";
Directory.CreateDirectory(texturesDirectory);
Directory.CreateDirectory(scriptsDirectory);
// write out the avatar.fst file to the project directory
string exportFstPath = projectDirectory + "avatar.fst";
WriteFST(exportFstPath, projectName);
if (!WriteFST(exportFstPath, projectName)) {
return;
}
// copy any external texture files to the project's texture directory that are considered dependencies of the model
if (!CopyExternalTextures(texturesDirectory)) {
return;
}
// remove any double slashes in texture directory path, display success dialog with any
// bone warnings previously mentioned, and suggest user to copy external textures over
texturesDirectory = texturesDirectory.Replace("\\\\", "\\");
string successDialog = "Avatar successfully exported!\n\n";
if (warnings != EMPTY_WARNING_TEXT) {
successDialog += "Warnings:\n" + warnings;
}
successDialog += "Note: If you are using any external textures with your model, " +
"please copy those textures to " + texturesDirectory;
"please ensure those textures are copied to " + texturesDirectory;
EditorUtility.DisplayDialog("Success!", successDialog, "Ok");
}
static void WriteFST(string exportFstPath, string projectName) {
static bool WriteFST(string exportFstPath, string projectName) {
// write out core fields to top of fst file
try {
File.WriteAllText(exportFstPath, "name = " + projectName + "\ntype = body+head\nscale = 1\nfilename = " +
@ -514,7 +519,7 @@ class AvatarExporter : MonoBehaviour {
} catch {
EditorUtility.DisplayDialog("Error", "Failed to write file " + exportFstPath +
". Please check the location and try again.", "Ok");
return;
return false;
}
// write out joint mappings to fst file
@ -573,6 +578,8 @@ class AvatarExporter : MonoBehaviour {
// open File Explorer to the project directory once finished
System.Diagnostics.Process.Start("explorer.exe", "/select," + exportFstPath);
return true;
}
static void SetUserBoneInformation() {
@ -652,6 +659,29 @@ class AvatarExporter : MonoBehaviour {
return result;
}
static void AdjustUpperChestMapping() {
if (!humanoidToUserBoneMappings.ContainsKey("UpperChest")) {
// if parent of Neck is not Chest then map the parent to UpperChest
string neckUserBone;
if (humanoidToUserBoneMappings.TryGetValue("Neck", out neckUserBone)) {
UserBoneInformation neckParentBoneInfo;
string neckParentUserBone = userBoneInfos[neckUserBone].parentName;
if (userBoneInfos.TryGetValue(neckParentUserBone, out neckParentBoneInfo) && !neckParentBoneInfo.HasHumanMapping()) {
neckParentBoneInfo.humanName = "UpperChest";
humanoidToUserBoneMappings.Add("UpperChest", neckParentUserBone);
}
}
// if there is still no UpperChest bone but there is a Chest bone then we remap Chest to UpperChest
string chestUserBone;
if (!humanoidToUserBoneMappings.ContainsKey("UpperChest") &&
humanoidToUserBoneMappings.TryGetValue("Chest", out chestUserBone)) {
userBoneInfos[chestUserBone].humanName = "UpperChest";
humanoidToUserBoneMappings.Remove("Chest");
humanoidToUserBoneMappings.Add("UpperChest", chestUserBone);
}
}
}
static void SetFailedBoneRules() {
failedBoneRules.Clear();
@ -865,6 +895,50 @@ class AvatarExporter : MonoBehaviour {
appendage + " (" + rightCount + ").");
}
}
static string GetTextureDirectory(string basePath) {
string textureDirectory = Path.GetDirectoryName(basePath) + "\\textures";
textureDirectory = textureDirectory.Replace("\\\\", "\\");
return textureDirectory;
}
static string SetTextureDependencies() {
string textureWarnings = "";
dependencyTextures.Clear();
// build the list of all local asset paths for textures that Unity considers dependencies of the model
// for any textures that have duplicate names, return a string of duplicate name warnings
string[] dependencies = AssetDatabase.GetDependencies(assetPath);
foreach (string dependencyPath in dependencies) {
UnityEngine.Object textureObject = AssetDatabase.LoadAssetAtPath(dependencyPath, typeof(Texture2D));
if (textureObject != null) {
string textureName = Path.GetFileName(dependencyPath);
if (dependencyTextures.ContainsKey(textureName)) {
textureWarnings += "There is more than one texture with the name " + textureName +
" referenced in the selected avatar.\n\n";
} else {
dependencyTextures.Add(textureName, dependencyPath);
}
}
}
return textureWarnings;
}
static bool CopyExternalTextures(string texturesDirectory) {
// copy the found dependency textures from the local asset folder to the textures folder in the target export project
foreach (var texture in dependencyTextures) {
string targetPath = texturesDirectory + "\\" + texture.Key;
try {
File.Copy(texture.Value, targetPath, true);
} catch {
EditorUtility.DisplayDialog("Error", "Failed to copy texture file " + texture.Value + " to " + targetPath +
". Please check the location and try again.", "Ok");
return false;
}
}
return true;
}
}
class ExportProjectWindow : EditorWindow {

View file

@ -1,6 +1,6 @@
High Fidelity, Inc.
Avatar Exporter
Version 0.1
Version 0.2
Note: It is recommended to use Unity versions between 2017.4.17f1 and 2018.2.12f1 for this Avatar Exporter.