Merge branch 'master' into fix/create/missingStartMiddleFinishTooltips

This commit is contained in:
Thijs Wenker 2018-12-06 20:48:42 +01:00 committed by GitHub
commit d25b5f054b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
230 changed files with 3433 additions and 2428 deletions

View file

@ -656,6 +656,8 @@ void Agent::queryAvatars() {
ViewFrustum view;
view.setPosition(scriptedAvatar->getWorldPosition());
view.setOrientation(scriptedAvatar->getHeadOrientation());
view.setProjection(DEFAULT_FIELD_OF_VIEW_DEGREES, DEFAULT_ASPECT_RATIO,
DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP);
view.calculate();
ConicalViewFrustum conicalView { view };
@ -876,18 +878,30 @@ void Agent::aboutToFinish() {
DependencyManager::destroy<AudioInjectorManager>();
// destroy all other created dependencies
DependencyManager::destroy<ScriptCache>();
DependencyManager::destroy<ResourceCacheSharedItems>();
DependencyManager::destroy<SoundCacheScriptingInterface>();
DependencyManager::destroy<SoundCache>();
DependencyManager::destroy<AudioScriptingInterface>();
DependencyManager::destroy<RecordingScriptingInterface>();
DependencyManager::destroy<AnimationCacheScriptingInterface>();
DependencyManager::destroy<EntityScriptingInterface>();
DependencyManager::destroy<ResourceScriptingInterface>();
DependencyManager::destroy<UserActivityLoggerScriptingInterface>();
DependencyManager::destroy<ScriptCache>();
DependencyManager::destroy<SoundCache>();
DependencyManager::destroy<AnimationCache>();
DependencyManager::destroy<recording::Deck>();
DependencyManager::destroy<recording::Recorder>();
DependencyManager::destroy<recording::ClipCache>();
DependencyManager::destroy<AvatarHashMap>();
DependencyManager::destroy<AssignmentParentFinder>();
DependencyManager::destroy<MessagesClient>();
DependencyManager::destroy<ResourceManager>();
DependencyManager::destroy<ResourceCacheSharedItems>();
// drop our shared pointer to the script engine, then ask ScriptEngines to shutdown scripting
// this ensures that the ScriptEngine goes down before ScriptEngines
_scriptEngine.clear();

View file

@ -129,17 +129,12 @@ void AssignmentClient::stopAssignmentClient() {
QThread* currentAssignmentThread = _currentAssignment->thread();
// ask the current assignment to stop
BLOCKING_INVOKE_METHOD(_currentAssignment, "stop");
QMetaObject::invokeMethod(_currentAssignment, "stop");
// ask the current assignment to delete itself on its thread
_currentAssignment->deleteLater();
// when this thread is destroyed we don't need to run our assignment complete method
disconnect(currentAssignmentThread, &QThread::destroyed, this, &AssignmentClient::assignmentCompleted);
// wait on the thread from that assignment - it will be gone once the current assignment deletes
currentAssignmentThread->quit();
currentAssignmentThread->wait();
auto PROCESS_EVENTS_INTERVAL_MS = 100;
while (!currentAssignmentThread->wait(PROCESS_EVENTS_INTERVAL_MS)) {
QCoreApplication::processEvents();
}
}
}

View file

@ -337,6 +337,13 @@ void AudioMixerClientData::removeAgentAvatarAudioStream() {
if (it != _audioStreams.end()) {
_audioStreams.erase(it);
// Clear mixing structures so that they get recreated with up to date
// data if the stream comes back
setHasReceivedFirstMix(false);
_streams.skipped.clear();
_streams.inactive.clear();
_streams.active.clear();
}
}

View file

@ -152,6 +152,8 @@ void AvatarMixerClientData::processSetTraitsMessage(ReceivedMessage& message,
if (packetTraitVersion > instanceVersionRef) {
if (traitSize == AvatarTraits::DELETED_TRAIT_SIZE) {
_avatar->processDeletedTraitInstance(traitType, instanceID);
// Mixer doesn't need deleted IDs.
_avatar->getAndClearRecentlyDetachedIDs();
// to track a deleted instance but keep version information
// the avatar mixer uses the negative value of the sent version

View file

@ -21,7 +21,7 @@
#include <GLMHelpers.h>
ScriptableAvatar::ScriptableAvatar() {
_clientTraitsHandler = std::unique_ptr<ClientTraitsHandler>(new ClientTraitsHandler(this));
_clientTraitsHandler.reset(new ClientTraitsHandler(this));
}
QByteArray ScriptableAvatar::toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking) {

View file

@ -583,15 +583,29 @@ void EntityScriptServer::handleOctreePacket(QSharedPointer<ReceivedMessage> mess
void EntityScriptServer::aboutToFinish() {
shutdownScriptEngine();
DependencyManager::get<EntityScriptingInterface>()->setEntityTree(nullptr);
DependencyManager::get<ResourceManager>()->cleanup();
DependencyManager::destroy<AudioScriptingInterface>();
DependencyManager::destroy<SoundCacheScriptingInterface>();
DependencyManager::destroy<ResourceScriptingInterface>();
DependencyManager::destroy<EntityScriptingInterface>();
DependencyManager::destroy<SoundCache>();
DependencyManager::destroy<ScriptCache>();
DependencyManager::destroy<ResourceManager>();
DependencyManager::destroy<ResourceCacheSharedItems>();
DependencyManager::destroy<MessagesClient>();
DependencyManager::destroy<AssignmentDynamicFactory>();
DependencyManager::destroy<AssignmentParentFinder>();
DependencyManager::destroy<AvatarHashMap>();
DependencyManager::get<ResourceManager>()->cleanup();
DependencyManager::destroy<PluginManager>();
DependencyManager::destroy<ResourceScriptingInterface>();
DependencyManager::destroy<EntityScriptingInterface>();
// cleanup the AudioInjectorManager (and any still running injectors)
DependencyManager::destroy<AudioInjectorManager>();

View file

@ -16,9 +16,9 @@ if (HIFI_MEMORY_DEBUGGING)
if (UNIX)
if (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
# for clang on Linux
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-omit-frame-pointer -fsanitize=undefined -fsanitize=address -fsanitize-recover=address")
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=undefined -fsanitize=address -fsanitize-recover=address")
SET(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=undefined -fsanitize=address -fsanitize-recover=address")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-omit-frame-pointer -shared-libasan -fsanitize=undefined -fsanitize=address -fsanitize-recover=address")
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -shared-libasan -fsanitize=undefined -fsanitize=address -fsanitize-recover=address")
SET(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -shared-libasan -fsanitize=undefined -fsanitize=address -fsanitize-recover=address")
else ()
# for gcc on Linux
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=undefined -fsanitize=address -U_FORTIFY_SOURCE -fno-stack-protector -fno-omit-frame-pointer")

View file

@ -23,6 +23,8 @@ Item {
width: root.pane.width
property bool failAfterSignUp: false
onWidthChanged: d.resize();
function login() {
flavorText.visible = false
mainTextContainer.visible = false
@ -127,7 +129,7 @@ Item {
Column {
id: form
width: parent.width
onHeightChanged: d.resize(); onWidthChanged: d.resize();
onHeightChanged: d.resize();
anchors {
top: mainTextContainer.bottom

View file

@ -44,14 +44,14 @@ Rectangle {
onPasswordChanged: {
var use3DKeyboard = (typeof MenuInterface === "undefined") ? false : MenuInterface.isOptionChecked("Use 3D Keyboard");
var use3DKeyboard = (typeof KeyboardScriptingInterface === "undefined") ? false : KeyboardScriptingInterface.use3DKeyboard;
if (use3DKeyboard) {
KeyboardScriptingInterface.password = password;
}
}
onRaisedChanged: {
var use3DKeyboard = (typeof MenuInterface === "undefined") ? false : MenuInterface.isOptionChecked("Use 3D Keyboard");
var use3DKeyboard = (typeof KeyboardScriptingInterface === "undefined") ? false : KeyboardScriptingInterface.use3DKeyboard;
if (!use3DKeyboard) {
keyboardBase.height = raised ? raisedHeight : 0;
keyboardBase.visible = raised;

View file

@ -70,8 +70,8 @@ OriginalDesktop.Desktop {
anchors.horizontalCenter: settings.constrainToolbarToCenterX ? desktop.horizontalCenter : undefined;
// Literal 50 is overwritten by settings from previous session, and sysToolbar.x comes from settings when not constrained.
x: sysToolbar.x
buttonModel: tablet.buttons;
shown: tablet.toolbarMode;
buttonModel: tablet ? tablet.buttons : null;
shown: tablet ? tablet.toolbarMode : false;
}
Settings {

View file

@ -19,6 +19,7 @@ import controlsUit 1.0 as HifiControlsUit
import "../../../controls" as HifiControls
import "../wallet" as HifiWallet
import "../common" as HifiCommerceCommon
import "../.." as HifiCommon
// references XXX from root context
@ -31,6 +32,7 @@ Rectangle {
property bool ownershipStatusReceived: false;
property bool balanceReceived: false;
property bool availableUpdatesReceived: false;
property bool itemInfoReceived: false;
property string baseItemName: "";
property string itemName;
property string itemId;
@ -181,11 +183,14 @@ Rectangle {
onItemIdChanged: {
root.ownershipStatusReceived = false;
root.itemInfoReceived = false;
Commerce.alreadyOwned(root.itemId);
root.availableUpdatesReceived = false;
root.currentUpdatesPage = 1;
Commerce.getAvailableUpdates(root.itemId);
itemPreviewImage.source = "https://hifi-metaverse.s3-us-west-1.amazonaws.com/marketplace/previews/" + itemId + "/thumbnail/hifi-mp-" + itemId + ".jpg";
var MARKETPLACE_API_URL = Account.metaverseServerURL + "/api/v1/marketplace/items/";
http.request({uri: MARKETPLACE_API_URL + root.itemId}, updateCheckoutQMLFromHTTP);
}
onItemTypeChanged: {
@ -279,6 +284,7 @@ Rectangle {
ownershipStatusReceived = false;
balanceReceived = false;
availableUpdatesReceived = false;
itemInfoReceived = false;
Commerce.getWalletStatus();
}
}
@ -355,7 +361,7 @@ Rectangle {
Rectangle {
id: loading;
z: 997;
visible: !root.ownershipStatusReceived || !root.balanceReceived || !root.availableUpdatesReceived;
visible: !root.ownershipStatusReceived || !root.balanceReceived || !root.availableUpdatesReceived || !root.itemInfoReceived;
anchors.fill: parent;
color: hifi.colors.white;
@ -1063,10 +1069,33 @@ Rectangle {
}
}
}
HifiCommon.RootHttpRequest {
id: http;
}
//
// FUNCTION DEFINITIONS START
//
function updateCheckoutQMLFromHTTP(error, result) {
if (error || (result.status !== 'success')) {
// The QML will display a loading spinner forever if the user is stuck here.
console.log("Error in Checkout.qml when getting marketplace item info!");
return;
}
root.itemInfoReceived = true;
root.itemName = result.data.title;
root.itemPrice = result.data.cost;
root.itemHref = Account.metaverseServerURL + result.data.path;
root.itemAuthor = result.data.creator;
root.itemType = result.data.item_type || "unknown";
itemPreviewImage.source = result.data.thumbnail_url;
refreshBuyUI();
}
//
// Function Name: fromScript()
//
@ -1080,18 +1109,24 @@ Rectangle {
// Description:
// Called when a message is received from a script.
//
function fromScript(message) {
switch (message.method) {
case 'updateCheckoutQML':
root.itemId = message.params.itemId;
root.itemName = message.params.itemName.trim();
root.itemPrice = message.params.itemPrice;
root.itemHref = message.params.itemHref;
root.referrer = message.params.referrer;
root.itemAuthor = message.params.itemAuthor;
case 'updateCheckoutQMLItemID':
if (!message.params.itemId) {
console.log("A message with method 'updateCheckoutQMLItemID' was sent without an itemId!");
return;
}
// If we end up following the referrer (i.e. in case the wallet "isn't set up" or the user cancels),
// we want the user to be placed back on the individual item's page - thus we set the
// default of the referrer in this case to "itemPage".
root.referrer = message.params.referrer || "itemPage";
root.itemEdition = message.params.itemEdition || -1;
root.itemType = message.params.itemType || "unknown";
refreshBuyUI();
root.itemId = message.params.itemId;
break;
case 'http.response':
http.handleHttpResponse(message);
break;
default:
console.log('Checkout.qml: Unrecognized message from marketplaces.js');

View file

@ -25,14 +25,15 @@ Item {
id: root;
property bool isDisplayingNearby; // as opposed to 'connections'
// true when sending to 'nearby' or when a script raises the send asset dialog
property bool multiLineDisplay;
property string displayName;
property string userName;
property string profilePic;
property string textColor: hifi.colors.white;
Item {
visible: root.isDisplayingNearby;
visible: root.multiLineDisplay;
anchors.fill: parent;
RalewaySemiBold {
@ -71,7 +72,7 @@ Item {
}
Item {
visible: !root.isDisplayingNearby;
visible: !root.multiLineDisplay;
anchors.fill: parent;
Image {

View file

@ -39,7 +39,7 @@ Item {
property string sendingPubliclyEffectImage;
property var http;
property var listModelName;
property var keyboardContainer: nil;
property var keyboardContainer;
// This object is always used in a popup or full-screen Wallet section.
// This MouseArea is used to prevent a user from being
@ -56,7 +56,7 @@ Item {
// Background
Rectangle {
z: 1;
visible: root.assetName !== "" && sendAssetStep.visible;
visible: root.assetCertID !== "" && sendAssetStep.referrer !== "payIn" && sendAssetStep.visible;
anchors.top: parent.top;
anchors.topMargin: root.parentAppTitleBarHeight;
anchors.left: parent.left;
@ -84,7 +84,6 @@ Item {
if (sendPubliclyCheckbox.checked && sendAssetStep.referrer === "nearby") {
sendSignalToParent({
method: 'sendAsset_sendPublicly',
assetName: root.assetName,
recipient: sendAssetStep.selectedRecipientNodeID,
amount: parseInt(amountTextField.text),
effectImage: root.sendingPubliclyEffectImage
@ -108,6 +107,14 @@ Item {
root.nextActiveView = 'paymentFailure';
}
}
onCertificateInfoResult: {
if (result.status !== 'success') {
console.log("Failed to get certificate info", result.data.message);
} else {
root.assetName = result.data.marketplace_item_name;
}
}
}
Connections {
@ -155,7 +162,7 @@ Item {
Item {
id: userInfoContainer;
visible: root.assetName === "";
visible: root.assetCertID === "";
anchors.top: parent.top;
anchors.left: parent.left;
anchors.right: parent.right;
@ -251,7 +258,7 @@ Item {
LinearGradient {
anchors.fill: parent;
visible: root.assetName === "";
visible: root.assetCertID === "";
start: Qt.point(0, 0);
end: Qt.point(0, height);
gradient: Gradient {
@ -262,7 +269,7 @@ Item {
RalewaySemiBold {
id: sendAssetText;
text: root.assetName === "" ? "Send Money To:" : "Gift \"" + root.assetName + "\" To:";
text: root.assetCertID === "" ? "Send Money To:" : "Gift \"" + root.assetName + "\" To:";
// Anchors
anchors.top: parent.top;
anchors.topMargin: 26;
@ -405,7 +412,7 @@ Item {
HifiModels.PSFListModel {
id: connectionsModel;
http: root.http;
listModelName: root.listModelName;
listModelName: root.listModelName || "";
endpoint: "/api/v1/users?filter=connections";
itemsPerPage: 9;
listView: connectionsList;
@ -441,7 +448,7 @@ Item {
HiFiGlyphs {
id: closeGlyphButton_connections;
text: hifi.glyphs.close;
color: root.assetName === "" ? hifi.colors.lightGrayText : hifi.colors.baseGray;
color: root.assetCertID === "" ? hifi.colors.lightGrayText : hifi.colors.baseGray;
size: 26;
anchors.top: parent.top;
anchors.topMargin: 10;
@ -684,7 +691,7 @@ Item {
HiFiGlyphs {
id: closeGlyphButton_nearby;
text: hifi.glyphs.close;
color: root.assetName === "" ? hifi.colors.lightGrayText : hifi.colors.baseGray;
color: root.assetCertID === "" ? hifi.colors.lightGrayText : hifi.colors.baseGray;
size: 26;
anchors.top: parent.top;
anchors.topMargin: 10;
@ -760,7 +767,7 @@ Item {
RalewaySemiBold {
id: sendToText;
text: root.assetName === "" ? "Send to:" : "Gift to:";
text: root.assetCertID === "" ? "Send to:" : "Gift to:";
// Anchors
anchors.top: parent.top;
anchors.topMargin: 36;
@ -853,7 +860,7 @@ Item {
id: sendAssetStep;
z: 996;
property string referrer; // either "connections" or "nearby"
property string referrer; // either "connections", "nearby", or "payIn"
property string selectedRecipientNodeID;
property string selectedRecipientDisplayName;
property string selectedRecipientUserName;
@ -865,7 +872,8 @@ Item {
RalewaySemiBold {
id: sendAssetText_sendAssetStep;
text: root.assetName === "" ? "Send Money" : "Gift \"" + root.assetName + "\"";
text: sendAssetStep.referrer === "payIn" && root.assetCertID !== "" ? "Send \"" + root.assetName + "\":" :
(root.assetCertID === "" ? "Send Money To:" : "Gift \"" + root.assetName + "\" To:");
// Anchors
anchors.top: parent.top;
anchors.topMargin: 26;
@ -878,7 +886,7 @@ Item {
// Text size
size: 22;
// Style
color: root.assetName === "" ? hifi.colors.white : hifi.colors.black;
color: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colors.white : hifi.colors.black;
}
Item {
@ -893,7 +901,7 @@ Item {
RalewaySemiBold {
id: sendToText_sendAssetStep;
text: root.assetName === "" ? "Send to:" : "Gift to:";
text: (root.assetCertID === "" || sendAssetStep.referrer === "payIn") ? "Send to:" : "Gift to:";
// Anchors
anchors.top: parent.top;
anchors.left: parent.left;
@ -902,7 +910,7 @@ Item {
// Text size
size: 18;
// Style
color: root.assetName === "" ? hifi.colors.white : hifi.colors.black;
color: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colors.white : hifi.colors.black;
verticalAlignment: Text.AlignVCenter;
}
@ -912,25 +920,26 @@ Item {
anchors.right: changeButton.left;
anchors.rightMargin: 12;
height: parent.height;
textColor: root.assetName === "" ? hifi.colors.white : hifi.colors.black;
textColor: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colors.white : hifi.colors.black;
displayName: sendAssetStep.selectedRecipientDisplayName;
userName: sendAssetStep.selectedRecipientUserName;
profilePic: sendAssetStep.selectedRecipientProfilePic !== "" ? ((0 === sendAssetStep.selectedRecipientProfilePic.indexOf("http")) ?
sendAssetStep.selectedRecipientProfilePic : (Account.metaverseServerURL + sendAssetStep.selectedRecipientProfilePic)) : "";
isDisplayingNearby: sendAssetStep.referrer === "nearby";
multiLineDisplay: sendAssetStep.referrer === "nearby" || sendAssetStep.referrer === "payIn";
}
// "CHANGE" button
HifiControlsUit.Button {
id: changeButton;
color: root.assetName === "" ? hifi.buttons.none : hifi.buttons.white;
color: root.assetCertID === "" ? hifi.buttons.none : hifi.buttons.white;
colorScheme: hifi.colorSchemes.dark;
anchors.right: parent.right;
anchors.verticalCenter: parent.verticalCenter;
height: 35;
width: 100;
text: "CHANGE";
visible: sendAssetStep.referrer !== "payIn";
onClicked: {
if (sendAssetStep.referrer === "connections") {
root.nextActiveView = "chooseRecipientConnection";
@ -944,7 +953,7 @@ Item {
Item {
id: amountContainer;
visible: root.assetName === "";
visible: root.assetCertID === "";
anchors.top: sendToContainer.bottom;
anchors.topMargin: 2;
anchors.left: parent.left;
@ -970,8 +979,9 @@ Item {
HifiControlsUit.TextField {
id: amountTextField;
text: root.assetName === "" ? "" : "1";
colorScheme: root.assetName === "" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
readOnly: sendAssetStep.referrer === "payIn";
text: root.assetCertID === "" ? "" : "1";
colorScheme: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
inputMethodHints: Qt.ImhDigitsOnly;
// Anchors
anchors.verticalCenter: parent.verticalCenter;
@ -980,8 +990,8 @@ Item {
height: 50;
// Style
leftPermanentGlyph: hifi.glyphs.hfc;
activeFocusOnPress: true;
activeFocusOnTab: true;
activeFocusOnPress: !amountTextField.readOnly;
activeFocusOnTab: !amountTextField.readOnly;
validator: IntValidator { bottom: 0; }
@ -1071,6 +1081,7 @@ Item {
TextArea {
id: optionalMessage;
readOnly: sendAssetStep.referrer === "payIn";
property int maximumLength: 72;
property string previousText: text;
placeholderText: "<i>Optional Public Message (" + maximumLength + " character limit)</i>";
@ -1081,12 +1092,13 @@ Item {
// Style
background: Rectangle {
anchors.fill: parent;
color: root.assetName === "" ? (optionalMessage.activeFocus ? hifi.colors.black : hifi.colors.baseGrayShadow) :
color: (root.assetCertID === "" || sendAssetStep.referrer === "payIn") ?
(optionalMessage.activeFocus && !optionalMessage.readOnly ? hifi.colors.black : hifi.colors.baseGrayShadow) :
(optionalMessage.activeFocus ? "#EFEFEF" : "#EEEEEE");
border.width: optionalMessage.activeFocus ? 1 : 0;
border.color: optionalMessage.activeFocus ? hifi.colors.primaryHighlight : hifi.colors.textFieldLightBackground;
border.width: optionalMessage.activeFocus && !optionalMessage.readOnly ? 1 : 0;
border.color: optionalMessage.activeFocus && !optionalMessage.readOnly ? hifi.colors.primaryHighlight : hifi.colors.textFieldLightBackground;
}
color: root.assetName === "" ? hifi.colors.white : hifi.colors.black;
color: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colors.white : hifi.colors.black;
textFormat: TextEdit.PlainText;
wrapMode: TextEdit.Wrap;
activeFocusOnPress: true;
@ -1122,7 +1134,8 @@ Item {
// Text size
size: 16;
// Style
color: optionalMessage.text.length === optionalMessage.maximumLength ? "#ea89a5" : (root.assetName === "" ? hifi.colors.lightGrayText : hifi.colors.baseGrayHighlight);
color: optionalMessage.text.length === optionalMessage.maximumLength ? "#ea89a5" :
(root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colors.lightGrayText : hifi.colors.baseGrayHighlight);
verticalAlignment: Text.AlignTop;
horizontalAlignment: Text.AlignRight;
}
@ -1167,7 +1180,7 @@ Item {
parent.color = hifi.colors.blueAccent;
}
onClicked: {
lightboxPopup.titleText = (root.assetName === "" ? "Send Effect" : "Gift Effect");
lightboxPopup.titleText = (root.assetCertID === "" ? "Send Effect" : "Gift Effect");
lightboxPopup.bodyImageSource = "sendAsset/images/send-money-effect-sm.jpg"; // Path relative to CommerceLightbox.qml
lightboxPopup.bodyText = "Enabling this option will create a particle effect between you and " +
"your recipient that is visible to everyone nearby.";
@ -1196,7 +1209,7 @@ Item {
// "CANCEL" button
HifiControlsUit.Button {
id: cancelButton_sendAssetStep;
color: root.assetName === "" ? hifi.buttons.noneBorderlessWhite : hifi.buttons.noneBorderlessGray;
color: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.buttons.noneBorderlessWhite : hifi.buttons.noneBorderlessGray;
colorScheme: hifi.colorSchemes.dark;
anchors.right: sendButton.left;
anchors.rightMargin: 24;
@ -1205,8 +1218,12 @@ Item {
width: 100;
text: "CANCEL";
onClicked: {
resetSendAssetData();
root.nextActiveView = "sendAssetHome";
if (sendAssetStep.referrer === "payIn") {
sendToScript({method: "closeSendAsset"});
} else {
resetSendAssetData();
root.nextActiveView = "sendAssetHome";
}
}
}
@ -1214,7 +1231,7 @@ Item {
HifiControlsUit.Button {
id: sendButton;
color: hifi.buttons.blue;
colorScheme: root.assetName === "" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
colorScheme: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
anchors.right: parent.right;
anchors.rightMargin: 0;
anchors.verticalCenter: parent.verticalCenter;
@ -1222,11 +1239,11 @@ Item {
width: 100;
text: "SUBMIT";
onClicked: {
if (root.assetName === "" && parseInt(amountTextField.text) > parseInt(balanceText.text)) {
if (root.assetCertID === "" && parseInt(amountTextField.text) > parseInt(balanceText.text)) {
amountTextField.focus = true;
amountTextField.error = true;
amountTextFieldError.text = "<i>amount exceeds available funds</i>";
} else if (root.assetName === "" && (amountTextField.text === "" || parseInt(amountTextField.text) < 1)) {
} else if (root.assetCertID === "" && (amountTextField.text === "" || parseInt(amountTextField.text) < 1)) {
amountTextField.focus = true;
amountTextField.error = true;
amountTextFieldError.text = "<i>invalid amount</i>";
@ -1236,7 +1253,7 @@ Item {
root.isCurrentlySendingAsset = true;
amountTextField.focus = false;
optionalMessage.focus = false;
if (sendAssetStep.referrer === "connections") {
if (sendAssetStep.referrer === "connections" || sendAssetStep.referrer === "payIn") {
Commerce.transferAssetToUsername(sendAssetStep.selectedRecipientUserName,
root.assetCertID,
parseInt(amountTextField.text),
@ -1317,18 +1334,18 @@ Item {
Rectangle {
anchors.top: parent.top;
anchors.topMargin: root.assetName === "" ? 15 : 125;
anchors.topMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 15 : 125;
anchors.left: parent.left;
anchors.leftMargin: root.assetName === "" ? 15 : 50;
anchors.leftMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 15 : 50;
anchors.right: parent.right;
anchors.rightMargin: root.assetName === "" ? 15 : 50;
anchors.rightMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 15 : 50;
anchors.bottom: parent.bottom;
anchors.bottomMargin: root.assetName === "" ? 15 : 125;
anchors.bottomMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 15 : 125;
color: "#FFFFFF";
RalewaySemiBold {
id: paymentSentText;
text: root.assetName === "" ? "Payment Sent" : "Gift Sent";
text: root.assetCertID === "" ? "Payment Sent" : (sendAssetStep.referrer === "payIn" ? "Item Sent" : "Gift Sent");
// Anchors
anchors.top: parent.top;
anchors.topMargin: 26;
@ -1346,7 +1363,7 @@ Item {
HiFiGlyphs {
id: closeGlyphButton_paymentSuccess;
visible: root.assetName === "";
visible: root.assetCertID === "" && sendAssetStep.referrer !== "payIn";
text: hifi.glyphs.close;
color: hifi.colors.lightGrayText;
size: 26;
@ -1364,10 +1381,14 @@ Item {
parent.text = hifi.glyphs.close;
}
onClicked: {
root.nextActiveView = "sendAssetHome";
resetSendAssetData();
if (root.assetName !== "") {
sendSignalToParent({method: "closeSendAsset"});
if (sendAssetStep.referrer === "payIn") {
sendToScript({method: "closeSendAsset"});
} else {
root.nextActiveView = "sendAssetHome";
resetSendAssetData();
if (root.assetName !== "") {
sendSignalToParent({method: "closeSendAsset"});
}
}
}
}
@ -1409,14 +1430,14 @@ Item {
userName: sendAssetStep.selectedRecipientUserName;
profilePic: sendAssetStep.selectedRecipientProfilePic !== "" ? ((0 === sendAssetStep.selectedRecipientProfilePic.indexOf("http")) ?
sendAssetStep.selectedRecipientProfilePic : (Account.metaverseServerURL + sendAssetStep.selectedRecipientProfilePic)) : "";
isDisplayingNearby: sendAssetStep.referrer === "nearby";
multiLineDisplay: sendAssetStep.referrer === "nearby" || sendAssetStep.referrer === "payIn";
}
}
Item {
id: giftContainer_paymentSuccess;
visible: root.assetName !== "";
visible: root.assetCertID !== "";
anchors.top: sendToContainer_paymentSuccess.bottom;
anchors.topMargin: 8;
anchors.left: parent.left;
@ -1427,7 +1448,7 @@ Item {
RalewaySemiBold {
id: gift_paymentSuccess;
text: "Gift:";
text: sendAssetStep.referrer === "payIn" ? "Item:" : "Gift:";
// Anchors
anchors.top: parent.top;
anchors.left: parent.left;
@ -1458,7 +1479,7 @@ Item {
Item {
id: amountContainer_paymentSuccess;
visible: root.assetName === "";
visible: root.assetCertID === "";
anchors.top: sendToContainer_paymentSuccess.bottom;
anchors.topMargin: 16;
anchors.left: parent.left;
@ -1513,7 +1534,7 @@ Item {
RalewaySemiBold {
id: optionalMessage_paymentSuccess;
visible: root.assetName === "";
visible: root.assetCertID === "";
text: optionalMessage.text;
// Anchors
anchors.top: amountContainer_paymentSuccess.visible ? amountContainer_paymentSuccess.bottom : sendToContainer_paymentSuccess.bottom;
@ -1535,18 +1556,22 @@ Item {
HifiControlsUit.Button {
id: closeButton;
color: hifi.buttons.blue;
colorScheme: root.assetName === "" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
colorScheme: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
anchors.horizontalCenter: parent.horizontalCenter;
anchors.bottom: parent.bottom;
anchors.bottomMargin: root.assetName === "" ? 80 : 30;
anchors.bottomMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 80 : 30;
height: 50;
width: 120;
text: "Close";
onClicked: {
root.nextActiveView = "sendAssetHome";
resetSendAssetData();
if (root.assetName !== "") {
sendSignalToParent({method: "closeSendAsset"});
if (sendAssetStep.referrer === "payIn") {
sendToScript({method: "closeSendAsset"});
} else {
root.nextActiveView = "sendAssetHome";
resetSendAssetData();
if (root.assetName !== "") {
sendSignalToParent({method: "closeSendAsset"});
}
}
}
}
@ -1574,18 +1599,18 @@ Item {
Rectangle {
anchors.top: parent.top;
anchors.topMargin: root.assetName === "" ? 15 : 150;
anchors.topMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 15 : 150;
anchors.left: parent.left;
anchors.leftMargin: root.assetName === "" ? 15 : 50;
anchors.leftMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 15 : 50;
anchors.right: parent.right;
anchors.rightMargin: root.assetName === "" ? 15 : 50;
anchors.rightMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 15 : 50;
anchors.bottom: parent.bottom;
anchors.bottomMargin: root.assetName === "" ? 15 : 300;
anchors.bottomMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 15 : 300;
color: "#FFFFFF";
RalewaySemiBold {
id: paymentFailureText;
text: root.assetName === "" ? "Payment Failed" : "Failed";
text: root.assetCertID === "" && sendAssetStep.referrer !== "payIn" ? "Payment Failed" : "Failed";
// Anchors
anchors.top: parent.top;
anchors.topMargin: 26;
@ -1603,7 +1628,7 @@ Item {
HiFiGlyphs {
id: closeGlyphButton_paymentFailure;
visible: root.assetName === "";
visible: root.assetCertID === "" && sendAssetStep.referrer !== "payIn";
text: hifi.glyphs.close;
color: hifi.colors.lightGrayText;
size: 26;
@ -1632,7 +1657,8 @@ Item {
RalewaySemiBold {
id: paymentFailureDetailText;
text: "The recipient you specified was unable to receive your " + (root.assetName === "" ? "payment." : "gift.");
text: "The recipient you specified was unable to receive your " +
(root.assetCertID === "" ? "payment." : (sendAssetStep.referrer === "payIn" ? "item." : "gift."));
anchors.top: paymentFailureText.bottom;
anchors.topMargin: 20;
anchors.left: parent.left;
@ -1650,7 +1676,7 @@ Item {
Item {
id: sendToContainer_paymentFailure;
visible: root.assetName === "";
visible: root.assetCertID === "" || sendAssetStep.referrer === "payIn";
anchors.top: paymentFailureDetailText.bottom;
anchors.topMargin: 8;
anchors.left: parent.left;
@ -1685,13 +1711,13 @@ Item {
userName: sendAssetStep.selectedRecipientUserName;
profilePic: sendAssetStep.selectedRecipientProfilePic !== "" ? ((0 === sendAssetStep.selectedRecipientProfilePic.indexOf("http")) ?
sendAssetStep.selectedRecipientProfilePic : (Account.metaverseServerURL + sendAssetStep.selectedRecipientProfilePic)) : "";
isDisplayingNearby: sendAssetStep.referrer === "nearby";
multiLineDisplay: sendAssetStep.referrer === "nearby" || sendAssetStep.referrer === "payIn";
}
}
Item {
id: amountContainer_paymentFailure;
visible: root.assetName === "";
visible: root.assetCertID === "";
anchors.top: sendToContainer_paymentFailure.bottom;
anchors.topMargin: 16;
anchors.left: parent.left;
@ -1746,7 +1772,7 @@ Item {
RalewaySemiBold {
id: optionalMessage_paymentFailure;
visible: root.assetName === "";
visible: root.assetCertID === "" || sendAssetStep.referrer === "payIn";
text: optionalMessage.text;
// Anchors
anchors.top: amountContainer_paymentFailure.visible ? amountContainer_paymentFailure.bottom : sendToContainer_paymentFailure.bottom;
@ -1768,19 +1794,23 @@ Item {
HifiControlsUit.Button {
id: closeButton_paymentFailure;
color: hifi.buttons.noneBorderless;
colorScheme: root.assetName === "" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
colorScheme: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
anchors.right: retryButton_paymentFailure.left;
anchors.rightMargin: 12;
anchors.bottom: parent.bottom;
anchors.bottomMargin: root.assetName === "" ? 80 : 30;
anchors.bottomMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 80 : 30;
height: 50;
width: 120;
text: "Cancel";
onClicked: {
root.nextActiveView = "sendAssetHome";
resetSendAssetData();
if (root.assetName !== "") {
sendSignalToParent({method: "closeSendAsset"});
if (sendAssetStep.referrer === "payIn") {
sendToScript({method: "closeSendAsset"});
} else {
root.nextActiveView = "sendAssetHome";
resetSendAssetData();
if (root.assetName !== "") {
sendSignalToParent({method: "closeSendAsset"});
}
}
}
}
@ -1789,17 +1819,17 @@ Item {
HifiControlsUit.Button {
id: retryButton_paymentFailure;
color: hifi.buttons.blue;
colorScheme: root.assetName === "" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
colorScheme: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
anchors.right: parent.right;
anchors.rightMargin: 12;
anchors.bottom: parent.bottom;
anchors.bottomMargin: root.assetName === "" ? 80 : 30;
anchors.bottomMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 80 : 30;
height: 50;
width: 120;
text: "Retry";
onClicked: {
root.isCurrentlySendingAsset = true;
if (sendAssetStep.referrer === "connections") {
if (sendAssetStep.referrer === "connections" || sendAssetStep.referrer === "payIn") {
Commerce.transferAssetToUsername(sendAssetStep.selectedRecipientUserName,
root.assetCertID,
parseInt(amountTextField.text),
@ -1866,11 +1896,32 @@ Item {
case 'updateSelectedRecipientUsername':
sendAssetStep.selectedRecipientUserName = message.userName;
break;
case 'updateSendAssetQML':
root.assetName = "";
root.assetCertID = message.assetCertID || "";
if (root.assetCertID === "") {
amountTextField.text = message.amount || 1;
} else {
amountTextField.text = "";
Commerce.certificateInfo(root.assetCertID);
}
sendAssetStep.referrer = "payIn";
sendAssetStep.selectedRecipientNodeID = "";
sendAssetStep.selectedRecipientDisplayName = "Determined by script:";
sendAssetStep.selectedRecipientUserName = message.username;
optionalMessage.text = message.message || "No Message Provided";
root.nextActiveView = "sendAssetStep";
break;
case 'inspectionCertificate_resetCert':
// NOP
break;
default:
console.log('SendAsset: Unrecognized message from wallet.js');
}
}
signal sendSignalToParent(var msg);
signal sendToScript(var message);
//
// FUNCTION DEFINITIONS END
//

View file

@ -207,12 +207,12 @@ Flickable {
width: 112
label: "Y Offset"
suffix: " cm"
minimumValue: -10
minimumValue: -50
maximumValue: 50
realStepSize: 1
realValue: -5
colorScheme: hifi.colorSchemes.dark
onEditingFinished: {
onRealValueChanged: {
sendConfigurationSettings();
openVrConfiguration.forceActiveFocus();
}
@ -223,14 +223,14 @@ Flickable {
id: headZOffset
width: 112
label: "Z Offset"
minimumValue: -10
minimumValue: -50
maximumValue: 50
realStepSize: 1
decimals: 1
suffix: " cm"
realValue: -5
colorScheme: hifi.colorSchemes.dark
onEditingFinished: {
onRealValueChanged: {
sendConfigurationSettings();
openVrConfiguration.forceActiveFocus();
}
@ -319,11 +319,12 @@ Flickable {
width: 112
suffix: " cm"
label: "Y Offset"
minimumValue: -10
minimumValue: -30
maximumValue: 30
realStepSize: 1
colorScheme: hifi.colorSchemes.dark
onEditingFinished: {
onRealValueChanged: {
sendConfigurationSettings();
openVrConfiguration.forceActiveFocus();
}
@ -335,12 +336,13 @@ Flickable {
width: 112
label: "Z Offset"
suffix: " cm"
minimumValue: -10
minimumValue: -30
maximumValue: 30
realStepSize: 1
decimals: 1
colorScheme: hifi.colorSchemes.dark
onEditingFinished: {
onRealValueChanged: {
sendConfigurationSettings();
openVrConfiguration.forceActiveFocus();
}
@ -574,7 +576,7 @@ Flickable {
colorScheme: hifi.colorSchemes.dark
realValue: 33.0
onEditingFinished: {
onRealValueChanged: {
sendConfigurationSettings();
openVrConfiguration.forceActiveFocus();
}
@ -592,7 +594,7 @@ Flickable {
colorScheme: hifi.colorSchemes.dark
realValue: 48
onEditingFinished: {
onRealValueChanged: {
sendConfigurationSettings();
openVrConfiguration.forceActiveFocus();
}
@ -771,7 +773,7 @@ Flickable {
realStepSize: 1.0
colorScheme: hifi.colorSchemes.dark
onEditingFinished: {
onRealValueChanged: {
calibrationTimer.interval = realValue * 1000;
openVrConfiguration.countDown = realValue;
numberAnimation.duration = calibrationTimer.interval;
@ -977,6 +979,13 @@ Flickable {
var configurationType = settings["trackerConfiguration"];
displayTrackerConfiguration(configurationType);
// default offset for user wearing puck on the center of their forehead.
headYOffset.realValue = 4; // (cm), puck is above the head joint.
headZOffset.realValue = 8; // (cm), puck is in front of the head joint.
// defaults for user wearing the pucks on the backs of their palms.
handYOffset.realValue = 8; // (cm), puck is past the the hand joint. (set this to zero if puck is on the wrist)
handZOffset.realValue = -4; // (cm), puck is on above hand joint.
var HmdHead = settings["HMDHead"];
var viveController = settings["handController"];

View file

@ -61,7 +61,7 @@ Item {
RalewaySemiBold {
text: Account.loggedIn ? qsTr("Log out") : qsTr("Log in")
horizontalAlignment: Text.AlignRight
anchors.right: parent.right
Layout.alignment: Qt.AlignRight
font.pixelSize: 20
color: "#afafaf"
}
@ -71,7 +71,7 @@ Item {
height: Account.loggedIn ? parent.height/2 - parent.spacing/2 : 0
text: Account.loggedIn ? "[" + tabletRoot.usernameShort + "]" : ""
horizontalAlignment: Text.AlignRight
anchors.right: parent.right
Layout.alignment: Qt.AlignRight
font.pixelSize: 20
color: "#afafaf"
}
@ -115,9 +115,9 @@ Item {
property int previousIndex: -1
Repeater {
id: pageRepeater
model: Math.ceil(tabletProxy.buttons.rowCount() / TabletEnums.ButtonsOnPage)
model: tabletProxy != null ? Math.ceil(tabletProxy.buttons.rowCount() / TabletEnums.ButtonsOnPage) : 0
onItemAdded: {
item.proxyModel.sourceModel = tabletProxy.buttons;
item.proxyModel.sourceModel = tabletProxy != null ? tabletProxy.buttons : null;
item.proxyModel.pageIndex = index;
}

View file

@ -87,7 +87,6 @@
#include <FramebufferCache.h>
#include <gpu/Batch.h>
#include <gpu/Context.h>
#include <gpu/gl/GLBackend.h>
#include <InfoView.h>
#include <input-plugins/InputPlugin.h>
#include <controllers/UserInputMapper.h>
@ -122,8 +121,6 @@
#include <plugins/SteamClientPlugin.h>
#include <plugins/InputConfiguration.h>
#include <RecordingScriptingInterface.h>
#include <UpdateSceneTask.h>
#include <RenderViewTask.h>
#include <render/EngineStats.h>
#include <SecondaryCamera.h>
#include <ResourceCache.h>
@ -264,54 +261,7 @@ extern "C" {
#include "AndroidHelper.h"
#endif
enum ApplicationEvent {
// Execute a lambda function
Lambda = QEvent::User + 1,
// Trigger the next render
Render,
// Trigger the next idle
Idle,
};
class RenderEventHandler : public QObject {
using Parent = QObject;
Q_OBJECT
public:
RenderEventHandler() {
// Transfer to a new thread
moveToNewNamedThread(this, "RenderThread", [](QThread* renderThread) {
hifi::qt::addBlockingForbiddenThread("Render", renderThread);
qApp->_lastTimeRendered.start();
}, std::bind(&RenderEventHandler::initialize, this), QThread::HighestPriority);
}
private:
void initialize() {
setObjectName("Render");
PROFILE_SET_THREAD_NAME("Render");
setCrashAnnotation("render_thread_id", std::to_string((size_t)QThread::currentThreadId()));
}
void render() {
if (qApp->shouldPaint()) {
qApp->paintGL();
}
}
bool event(QEvent* event) override {
switch ((int)event->type()) {
case ApplicationEvent::Render:
render();
qApp->_pendingRenderEvent.store(false);
return true;
default:
break;
}
return Parent::event(event);
}
};
#include "graphics/RenderEventHandler.h"
Q_LOGGING_CATEGORY(trace_app_input_mouse, "trace.app.input.mouse")
@ -374,8 +324,6 @@ static const int THROTTLED_SIM_FRAME_PERIOD_MS = MSECS_PER_SECOND / THROTTLED_SI
static const int ENTITY_SERVER_ADDED_TIMEOUT = 5000;
static const int ENTITY_SERVER_CONNECTION_TIMEOUT = 5000;
static const uint32_t INVALID_FRAME = UINT32_MAX;
static const float INITIAL_QUERY_RADIUS = 10.0f; // priority radius for entities before physics enabled
static const QString DESKTOP_LOCATION = QStandardPaths::writableLocation(QStandardPaths::DesktopLocation);
@ -927,7 +875,9 @@ bool setupEssentials(int& argc, char** argv, bool runningMarkerExisted) {
#endif
DependencyManager::set<DiscoverabilityManager>();
DependencyManager::set<SceneScriptingInterface>();
#if !defined(DISABLE_QML)
DependencyManager::set<OffscreenUi>();
#endif
DependencyManager::set<Midi>();
DependencyManager::set<PathUtils>();
DependencyManager::set<InterfaceDynamicFactory>();
@ -1000,6 +950,14 @@ const bool DEFAULT_PREFER_AVATAR_FINGER_OVER_STYLUS = false;
const QString DEFAULT_CURSOR_NAME = "DEFAULT";
const bool DEFAULT_MINI_TABLET_ENABLED = true;
QSharedPointer<OffscreenUi> getOffscreenUI() {
#if !defined(DISABLE_QML)
return DependencyManager::get<OffscreenUi>();
#else
return nullptr;
#endif
}
Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bool runningMarkerExisted) :
QApplication(argc, argv),
_window(new MainWindow(desktop())),
@ -1604,7 +1562,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
auto userInputMapper = DependencyManager::get<UserInputMapper>();
connect(userInputMapper.data(), &UserInputMapper::actionEvent, [this](int action, float state) {
using namespace controller;
auto offscreenUi = DependencyManager::get<OffscreenUi>();
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
{
auto actionEnum = static_cast<Action>(action);
@ -1743,7 +1700,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
return qApp->getMyAvatar()->getCharacterController()->onGround() ? 1 : 0;
});
_applicationStateDevice->setInputVariant(STATE_NAV_FOCUSED, []() -> float {
return DependencyManager::get<OffscreenUi>()->navigationFocused() ? 1 : 0;
auto offscreenUi = getOffscreenUI();
return offscreenUi ? (offscreenUi->navigationFocused() ? 1 : 0) : 0;
});
_applicationStateDevice->setInputVariant(STATE_PLATFORM_WINDOWS, []() -> float {
#if defined(Q_OS_WIN)
@ -1809,9 +1767,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// Now that we've loaded the menu and thus switched to the previous display plugin
// we can unlock the desktop repositioning code, since all the positions will be
// relative to the desktop size for this plugin
auto offscreenUi = DependencyManager::get<OffscreenUi>();
auto offscreenUi = getOffscreenUI();
connect(offscreenUi.data(), &OffscreenUi::desktopReady, []() {
auto offscreenUi = DependencyManager::get<OffscreenUi>();
auto offscreenUi = getOffscreenUI();
auto desktop = offscreenUi->getDesktop();
if (desktop) {
desktop->setProperty("repositionLocked", false);
@ -2050,7 +2008,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
auto displayPlugin = qApp->getActiveDisplayPlugin();
properties["render_rate"] = _renderLoopCounter.rate();
properties["render_rate"] = getRenderLoopRate();
properties["target_render_rate"] = getTargetRenderFrameRate();
properties["present_rate"] = displayPlugin->presentRate();
properties["new_frame_present_rate"] = displayPlugin->newFramePresentRate();
@ -2362,7 +2320,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
DependencyManager::get<Keyboard>()->createKeyboard();
_pendingIdleEvent = false;
_pendingRenderEvent = false;
_graphicsEngine.startup();
qCDebug(interfaceapp) << "Metaverse session ID is" << uuidStringWithoutCurlyBraces(accountManager->getSessionID());
@ -2372,6 +2330,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
connect(&AndroidHelper::instance(), &AndroidHelper::enterForeground, this, &Application::enterForeground);
AndroidHelper::instance().notifyLoadComplete();
#else
#if !defined(DISABLE_QML)
// Do not show login dialog if requested not to on the command line
const QString HIFI_NO_LOGIN_COMMAND_LINE_KEY = "--no-login-suggestion";
int index = arguments().indexOf(HIFI_NO_LOGIN_COMMAND_LINE_KEY);
@ -2396,6 +2355,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
checkLoginTimer->start();
}
#endif
#endif
}
void Application::updateVerboseLogging() {
@ -2546,7 +2506,9 @@ void Application::onAboutToQuit() {
DependencyManager::get<CloseEventSender>()->startThread();
// Hide Running Scripts dialog so that it gets destroyed in an orderly manner; prevents warnings at shutdown.
DependencyManager::get<OffscreenUi>()->hide("RunningScripts");
#if !defined(DISABLE_QML)
getOffscreenUI()->hide("RunningScripts");
#endif
_aboutToQuit = true;
@ -2558,6 +2520,8 @@ void Application::cleanupBeforeQuit() {
QString webengineRemoteDebugging = QProcessEnvironment::systemEnvironment().value("QTWEBENGINE_REMOTE_DEBUGGING", "false");
qCDebug(interfaceapp) << "QTWEBENGINE_REMOTE_DEBUGGING =" << webengineRemoteDebugging;
DependencyManager::prepareToExit();
if (tracing::enabled()) {
auto tracer = DependencyManager::get<tracing::Tracer>();
tracer->stopTracing();
@ -2626,11 +2590,6 @@ void Application::cleanupBeforeQuit() {
// Cleanup all overlays after the scripts, as scripts might add more
_overlays.cleanupAllOverlays();
// The cleanup process enqueues the transactions but does not process them. Calling this here will force the actual
// removal of the items.
// See https://highfidelity.fogbugz.com/f/cases/5328
_main3DScene->enqueueFrame(); // flush all the transactions
_main3DScene->processTransactionQueue(); // process and apply deletions
// first stop all timers directly or by invokeMethod
// depending on what thread they run in
@ -2645,7 +2604,6 @@ void Application::cleanupBeforeQuit() {
}
_window->saveGeometry();
_gpuContext->shutdown();
// Destroy third party processes after scripts have finished using them.
#ifdef HAVE_DDE
@ -2670,6 +2628,7 @@ void Application::cleanupBeforeQuit() {
// destroy Audio so it and its threads have a chance to go down safely
// this must happen after QML, as there are unexplained audio crashes originating in qtwebengine
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "stop");
DependencyManager::destroy<AudioClient>();
DependencyManager::destroy<AudioInjectorManager>();
DependencyManager::destroy<AudioScriptingInterface>();
@ -2702,10 +2661,9 @@ Application::~Application() {
_shapeManager.collectGarbage();
assert(_shapeManager.getNumShapes() == 0);
// shutdown render engine
_main3DScene = nullptr;
_renderEngine = nullptr;
// shutdown graphics engine
_graphicsEngine.shutdown();
_gameWorkload.shutdown();
DependencyManager::destroy<Preferences>();
@ -2761,10 +2719,8 @@ Application::~Application() {
// quit the thread used by the closure event sender
closeEventSender->thread()->quit();
// Can't log to file passed this point, FileLogger about to be deleted
// Can't log to file past this point, FileLogger about to be deleted
qInstallMessageHandler(LogHandler::verboseMessageHandler);
_renderEventHandler->deleteLater();
}
void Application::initializeGL() {
@ -2784,10 +2740,10 @@ void Application::initializeGL() {
_glWidget->windowHandle()->setFormat(getDefaultOpenGLSurfaceFormat());
// When loading QtWebEngineWidgets, it creates a global share context on startup.
// We have to account for this possibility by checking here for an existing
// We have to account for this possibility by checking here for an existing
// global share context
auto globalShareContext = qt_gl_global_share_context();
#if !defined(DISABLE_QML)
// Build a shared canvas / context for the Chromium processes
if (!globalShareContext) {
@ -2854,26 +2810,13 @@ void Application::initializeGL() {
#endif
_renderEventHandler = new RenderEventHandler();
// Build an offscreen GL context for the main thread.
_glWidget->makeCurrent();
glClearColor(0.2f, 0.2f, 0.2f, 1);
glClear(GL_COLOR_BUFFER_BIT);
_glWidget->swapBuffers();
// Create the GPU backend
// Requires the window context, because that's what's used in the actual rendering
// and the GPU backend will make things like the VAO which cannot be shared across
// contexts
_glWidget->makeCurrent();
gpu::Context::init<gpu::gl::GLBackend>();
_glWidget->makeCurrent();
_gpuContext = std::make_shared<gpu::Context>();
DependencyManager::get<TextureCache>()->setGPUContext(_gpuContext);
_graphicsEngine.initializeGPU(_glWidget);
}
static const QString SPLASH_SKYBOX{ "{\"ProceduralEntity\":{ \"version\":2, \"shaderUrl\":\"qrc:///shaders/splashSkybox.frag\" } }" };
@ -2887,7 +2830,7 @@ void Application::initializeDisplayPlugins() {
// Once time initialization code
DisplayPluginPointer targetDisplayPlugin;
foreach(auto displayPlugin, displayPlugins) {
displayPlugin->setContext(_gpuContext);
displayPlugin->setContext(_graphicsEngine.getGPUContext());
if (displayPlugin->getName() == lastActiveDisplayPluginName) {
targetDisplayPlugin = displayPlugin;
}
@ -2957,24 +2900,15 @@ void Application::initializeDisplayPlugins() {
void Application::initializeRenderEngine() {
// FIXME: on low end systems os the shaders take up to 1 minute to compile, so we pause the deadlock watchdog thread.
DeadlockWatchdogThread::withPause([&] {
// Set up the render engine
render::CullFunctor cullFunctor = LODManager::shouldRender;
_renderEngine->addJob<UpdateSceneTask>("UpdateScene");
#ifndef Q_OS_ANDROID
_renderEngine->addJob<SecondaryCameraRenderTask>("SecondaryCameraJob", cullFunctor, !DISABLE_DEFERRED);
#endif
_renderEngine->addJob<RenderViewTask>("RenderMainView", cullFunctor, !DISABLE_DEFERRED, render::ItemKey::TAG_BITS_0, render::ItemKey::TAG_BITS_0);
_renderEngine->load();
_renderEngine->registerScene(_main3DScene);
// Now that OpenGL is initialized, we are sure we have a valid context and can create the various pipeline shaders with success.
DependencyManager::get<GeometryCache>()->initializeShapePipelines();
_graphicsEngine.initializeRender(DISABLE_DEFERRED);
DependencyManager::get<Keyboard>()->registerKeyboardHighlighting();
});
}
extern void setupPreferences();
#if !defined(DISABLE_QML)
static void addDisplayPluginToMenu(const DisplayPluginPointer& displayPlugin, int index, bool active = false);
#endif
void Application::initializeUi() {
AddressBarDialog::registerType();
@ -3024,12 +2958,13 @@ void Application::initializeUi() {
tabletScriptingInterface->getTablet(SYSTEM_TABLET);
}
auto offscreenUi = DependencyManager::get<OffscreenUi>();
auto offscreenUi = getOffscreenUI();
connect(offscreenUi.data(), &hifi::qml::OffscreenSurface::rootContextCreated,
this, &Application::onDesktopRootContextCreated);
connect(offscreenUi.data(), &hifi::qml::OffscreenSurface::rootItemCreated,
this, &Application::onDesktopRootItemCreated);
#if !defined(DISABLE_QML)
offscreenUi->setProxyWindow(_window->windowHandle());
// OffscreenUi is a subclass of OffscreenQmlSurface specifically designed to
// support the window management and scripting proxies for VR use
@ -3039,9 +2974,13 @@ void Application::initializeUi() {
// FIXME either expose so that dialogs can set this themselves or
// do better detection in the offscreen UI of what has focus
offscreenUi->setNavigationFocused(false);
#else
_window->setMenuBar(new Menu());
#endif
setupPreferences();
#if !defined(DISABLE_QML)
_glWidget->installEventFilter(offscreenUi.data());
offscreenUi->setMouseTranslator([=](const QPointF& pt) {
QPointF result = pt;
@ -3054,6 +2993,7 @@ void Application::initializeUi() {
return result.toPoint();
});
offscreenUi->resume();
#endif
connect(_window, &MainWindow::windowGeometryChanged, [this](const QRect& r){
resizeGL();
if (_touchscreenVirtualPadDevice) {
@ -3092,6 +3032,7 @@ void Application::initializeUi() {
}
});
#if !defined(DISABLE_QML)
// Pre-create a couple of Web3D overlays to speed up tablet UI
auto offscreenSurfaceCache = DependencyManager::get<OffscreenQmlSurfaceCache>();
offscreenSurfaceCache->setOnRootContextCreated([&](const QString& rootObject, QQmlContext* surfaceContext) {
@ -3105,9 +3046,11 @@ void Application::initializeUi() {
offscreenSurfaceCache->reserve(TabletScriptingInterface::QML, 1);
offscreenSurfaceCache->reserve(Web3DOverlay::QML, 2);
#endif
flushMenuUpdates();
#if !defined(DISABLE_QML)
// Now that the menu is instantiated, ensure the display plugin menu is properly updated
{
auto displayPlugins = PluginManager::getInstance()->getDisplayPlugins();
@ -3126,6 +3069,7 @@ void Application::initializeUi() {
auto parent = getPrimaryMenu()->getMenu(MenuOption::OutputMenu);
parent->addSeparator();
}
#endif
// The display plugins are created before the menu now, so we need to do this here to hide the menu bar
// now that it exists
@ -3157,7 +3101,7 @@ void Application::onDesktopRootContextCreated(QQmlContext* surfaceContext) {
surfaceContext->setContextProperty("Recording", DependencyManager::get<RecordingScriptingInterface>().data());
surfaceContext->setContextProperty("Preferences", DependencyManager::get<Preferences>().data());
surfaceContext->setContextProperty("AddressManager", DependencyManager::get<AddressManager>().data());
surfaceContext->setContextProperty("FrameTimings", &_frameTimingsScriptingInterface);
surfaceContext->setContextProperty("FrameTimings", &_graphicsEngine._frameTimingsScriptingInterface);
surfaceContext->setContextProperty("Rates", new RatesScriptingInterface(this));
surfaceContext->setContextProperty("TREE_SCALE", TREE_SCALE);
@ -3206,7 +3150,7 @@ void Application::onDesktopRootContextCreated(QQmlContext* surfaceContext) {
surfaceContext->setContextProperty("LODManager", DependencyManager::get<LODManager>().data());
surfaceContext->setContextProperty("HMD", DependencyManager::get<HMDScriptingInterface>().data());
surfaceContext->setContextProperty("Scene", DependencyManager::get<SceneScriptingInterface>().data());
surfaceContext->setContextProperty("Render", _renderEngine->getConfiguration().get());
surfaceContext->setContextProperty("Render", _graphicsEngine.getRenderEngine()->getConfiguration().get());
surfaceContext->setContextProperty("Workload", _gameWorkload._engine->getConfiguration().get());
surfaceContext->setContextProperty("Reticle", getApplicationCompositor().getReticleInterface());
surfaceContext->setContextProperty("Snapshot", DependencyManager::get<Snapshot>().data());
@ -3230,12 +3174,12 @@ void Application::onDesktopRootContextCreated(QQmlContext* surfaceContext) {
void Application::onDesktopRootItemCreated(QQuickItem* rootItem) {
Stats::show();
AnimStats::show();
auto surfaceContext = DependencyManager::get<OffscreenUi>()->getSurfaceContext();
auto surfaceContext = getOffscreenUI()->getSurfaceContext();
surfaceContext->setContextProperty("Stats", Stats::getInstance());
surfaceContext->setContextProperty("AnimStats", AnimStats::getInstance());
#if !defined(Q_OS_ANDROID)
auto offscreenUi = DependencyManager::get<OffscreenUi>();
auto offscreenUi = getOffscreenUI();
auto qml = PathUtils::qmlUrl("AvatarInputsBar.qml");
offscreenUi->show(qml, "AvatarInputsBar");
#endif
@ -3418,7 +3362,7 @@ void Application::setPreferredCursor(const QString& cursorName) {
void Application::setSettingConstrainToolbarPosition(bool setting) {
_constrainToolbarPosition.set(setting);
DependencyManager::get<OffscreenUi>()->setConstrainToolbarToCenterX(setting);
getOffscreenUI()->setConstrainToolbarToCenterX(setting);
}
void Application::setMiniTabletEnabled(bool enabled) {
@ -3494,7 +3438,7 @@ void Application::resizeGL() {
auto renderResolutionScale = getRenderResolutionScale();
if (displayPlugin->getRenderResolutionScale() != renderResolutionScale) {
auto renderConfig = _renderEngine->getConfiguration();
auto renderConfig = _graphicsEngine.getRenderEngine()->getConfiguration();
assert(renderConfig);
auto mainView = renderConfig->getConfig("RenderMainView.RenderDeferredTask");
assert(mainView);
@ -3512,7 +3456,9 @@ void Application::resizeGL() {
_myCamera.loadViewFrustum(_viewFrustum);
}
DependencyManager::get<OffscreenUi>()->resize(fromGlm(displayPlugin->getRecommendedUiSize()));
#if !defined(DISABLE_QML)
getOffscreenUI()->resize(fromGlm(displayPlugin->getRecommendedUiSize()));
#endif
}
void Application::handleSandboxStatus(QNetworkReply* reply) {
@ -3723,8 +3669,8 @@ void Application::onPresent(quint32 frameCount) {
postEvent(this, new QEvent((QEvent::Type)ApplicationEvent::Idle), Qt::HighEventPriority);
}
expected = false;
if (_renderEventHandler && !isAboutToQuit() && _pendingRenderEvent.compare_exchange_strong(expected, true)) {
postEvent(_renderEventHandler, new QEvent((QEvent::Type)ApplicationEvent::Render));
if (_graphicsEngine.checkPendingRenderEvent() && !isAboutToQuit()) {
postEvent(_graphicsEngine._renderEventHandler, new QEvent((QEvent::Type)ApplicationEvent::Render));
}
}
@ -3912,10 +3858,12 @@ bool Application::eventFilter(QObject* object, QEvent* event) {
}
if (event->type() == QEvent::ShortcutOverride) {
if (DependencyManager::get<OffscreenUi>()->shouldSwallowShortcut(event)) {
#if !defined(DISABLE_QML)
if (getOffscreenUI()->shouldSwallowShortcut(event)) {
event->accept();
return true;
}
#endif
// Filter out captured keys before they're used for shortcut actions.
if (_controllerScriptingInterface->isKeyCaptured(static_cast<QKeyEvent*>(event))) {
@ -3998,7 +3946,7 @@ void Application::keyPressEvent(QKeyEvent* event) {
case Qt::Key_X:
if (isShifted && isMeta) {
auto offscreenUi = DependencyManager::get<OffscreenUi>();
auto offscreenUi = getOffscreenUI();
offscreenUi->togglePinned();
//offscreenUi->getSurfaceContext()->engine()->clearComponentCache();
//OffscreenUi::information("Debugging", "Component cache cleared");
@ -4014,7 +3962,7 @@ void Application::keyPressEvent(QKeyEvent* event) {
case Qt::Key_B:
if (isMeta) {
auto offscreenUi = DependencyManager::get<OffscreenUi>();
auto offscreenUi = getOffscreenUI();
offscreenUi->load("Browser.qml");
} else if (isOption) {
controller::InputRecorder* inputRecorder = controller::InputRecorder::getInstance();
@ -4036,7 +3984,7 @@ void Application::keyPressEvent(QKeyEvent* event) {
case Qt::Key_R:
if (isMeta && !event->isAutoRepeat()) {
DependencyManager::get<ScriptEngines>()->reloadAllScripts();
DependencyManager::get<OffscreenUi>()->clearCache();
getOffscreenUI()->clearCache();
}
break;
@ -4233,9 +4181,13 @@ void Application::mouseMoveEvent(QMouseEvent* event) {
return; // bail
}
auto offscreenUi = DependencyManager::get<OffscreenUi>();
#if !defined(DISABLE_QML)
auto offscreenUi = getOffscreenUI();
auto eventPosition = compositor.getMouseEventPosition(event);
QPointF transformedPos = offscreenUi->mapToVirtualScreen(eventPosition);
QPointF transformedPos = offscreenUi ? offscreenUi->mapToVirtualScreen(eventPosition) : QPointF();
#else
QPointF transformedPos;
#endif
auto button = event->button();
auto buttons = event->buttons();
// Determine if the ReticleClick Action is 1 and if so, fake include the LeftMouseButton
@ -4273,7 +4225,8 @@ void Application::mousePressEvent(QMouseEvent* event) {
// Inhibit the menu if the user is using alt-mouse dragging
_altPressed = false;
auto offscreenUi = DependencyManager::get<OffscreenUi>();
#if !defined(DISABLE_QML)
auto offscreenUi = getOffscreenUI();
// If we get a mouse press event it means it wasn't consumed by the offscreen UI,
// hence, we should defocus all of the offscreen UI windows, in order to allow
// keyboard shortcuts not to be swallowed by them. In particular, WebEngineViews
@ -4282,6 +4235,9 @@ void Application::mousePressEvent(QMouseEvent* event) {
auto eventPosition = getApplicationCompositor().getMouseEventPosition(event);
QPointF transformedPos = offscreenUi->mapToVirtualScreen(eventPosition);
#else
QPointF transformedPos;
#endif
QMouseEvent mappedEvent(event->type(),
transformedPos,
event->screenPos(), event->button(),
@ -4318,9 +4274,13 @@ void Application::mousePressEvent(QMouseEvent* event) {
}
void Application::mouseDoublePressEvent(QMouseEvent* event) {
auto offscreenUi = DependencyManager::get<OffscreenUi>();
#if !defined(DISABLE_QML)
auto offscreenUi = getOffscreenUI();
auto eventPosition = getApplicationCompositor().getMouseEventPosition(event);
QPointF transformedPos = offscreenUi->mapToVirtualScreen(eventPosition);
#else
QPointF transformedPos;
#endif
QMouseEvent mappedEvent(event->type(),
transformedPos,
event->screenPos(), event->button(),
@ -4341,9 +4301,13 @@ void Application::mouseDoublePressEvent(QMouseEvent* event) {
void Application::mouseReleaseEvent(QMouseEvent* event) {
auto offscreenUi = DependencyManager::get<OffscreenUi>();
#if !defined(DISABLE_QML)
auto offscreenUi = getOffscreenUI();
auto eventPosition = getApplicationCompositor().getMouseEventPosition(event);
QPointF transformedPos = offscreenUi->mapToVirtualScreen(eventPosition);
#else
QPointF transformedPos;
#endif
QMouseEvent mappedEvent(event->type(),
transformedPos,
event->screenPos(), event->button(),
@ -4494,39 +4458,6 @@ bool Application::acceptSnapshot(const QString& urlString) {
return true;
}
static uint32_t _renderedFrameIndex { INVALID_FRAME };
bool Application::shouldPaint() const {
if (_aboutToQuit || _window->isMinimized()) {
return false;
}
auto displayPlugin = getActiveDisplayPlugin();
#ifdef DEBUG_PAINT_DELAY
static uint64_t paintDelaySamples{ 0 };
static uint64_t paintDelayUsecs{ 0 };
paintDelayUsecs += displayPlugin->getPaintDelayUsecs();
static const int PAINT_DELAY_THROTTLE = 1000;
if (++paintDelaySamples % PAINT_DELAY_THROTTLE == 0) {
qCDebug(interfaceapp).nospace() <<
"Paint delay (" << paintDelaySamples << " samples): " <<
(float)paintDelaySamples / paintDelayUsecs << "us";
}
#endif
// Throttle if requested
if (displayPlugin->isThrottled() && (_lastTimeRendered.elapsed() < THROTTLED_SIM_FRAME_PERIOD_MS)) {
return false;
}
// Sync up the _renderedFrameIndex
_renderedFrameIndex = displayPlugin->presentCount();
return true;
}
#ifdef Q_OS_WIN
#include <Windows.h>
#include <TCHAR.h>
@ -4739,7 +4670,8 @@ void Application::idle() {
// Update the deadlock watchdog
updateHeartbeat();
auto offscreenUi = DependencyManager::get<OffscreenUi>();
#if !defined(DISABLE_QML)
auto offscreenUi = getOffscreenUI();
// These tasks need to be done on our first idle, because we don't want the showing of
// overlay subwindows to do a showDesktop() until after the first time through
@ -4748,6 +4680,7 @@ void Application::idle() {
firstIdle = false;
connect(offscreenUi.data(), &OffscreenUi::showDesktop, this, &Application::showDesktop);
}
#endif
#ifdef Q_OS_WIN
// If tracing is enabled then monitor the CPU in a separate thread
@ -4764,6 +4697,7 @@ void Application::idle() {
#endif
auto displayPlugin = getActiveDisplayPlugin();
#if !defined(DISABLE_QML)
if (displayPlugin) {
auto uiSize = displayPlugin->getRecommendedUiSize();
// Bit of a hack since there's no device pixel ratio change event I can find.
@ -4772,30 +4706,18 @@ void Application::idle() {
offscreenUi->resize(fromGlm(uiSize));
}
}
#endif
if (displayPlugin) {
PROFILE_COUNTER_IF_CHANGED(app, "present", float, displayPlugin->presentRate());
}
PROFILE_COUNTER_IF_CHANGED(app, "renderLoopRate", float, _renderLoopCounter.rate());
PROFILE_COUNTER_IF_CHANGED(app, "currentDownloads", uint32_t, ResourceCache::getLoadingRequestCount());
PROFILE_COUNTER_IF_CHANGED(app, "renderLoopRate", float, getRenderLoopRate());
PROFILE_COUNTER_IF_CHANGED(app, "currentDownloads", uint32_t, ResourceCache::getLoadingRequests().length());
PROFILE_COUNTER_IF_CHANGED(app, "pendingDownloads", uint32_t, ResourceCache::getPendingRequestCount());
PROFILE_COUNTER_IF_CHANGED(app, "currentProcessing", int, DependencyManager::get<StatTracker>()->getStat("Processing").toInt());
PROFILE_COUNTER_IF_CHANGED(app, "pendingProcessing", int, DependencyManager::get<StatTracker>()->getStat("PendingProcessing").toInt());
auto renderConfig = _renderEngine->getConfiguration();
PROFILE_COUNTER_IF_CHANGED(render, "gpuTime", float, (float)_gpuContext->getFrameTimerGPUAverage());
auto opaqueRangeTimer = renderConfig->getConfig("OpaqueRangeTimer");
auto linearDepth = renderConfig->getConfig("LinearDepth");
auto surfaceGeometry = renderConfig->getConfig("SurfaceGeometry");
auto renderDeferred = renderConfig->getConfig("RenderDeferred");
auto toneAndPostRangeTimer = renderConfig->getConfig("ToneAndPostRangeTimer");
PROFILE_COUNTER(render_detail, "gpuTimes", {
{ "OpaqueRangeTimer", opaqueRangeTimer ? opaqueRangeTimer->property("gpuRunTime") : 0 },
{ "LinearDepth", linearDepth ? linearDepth->property("gpuRunTime") : 0 },
{ "SurfaceGeometry", surfaceGeometry ? surfaceGeometry->property("gpuRunTime") : 0 },
{ "RenderDeferred", renderDeferred ? renderDeferred->property("gpuRunTime") : 0 },
{ "ToneAndPostRangeTimer", toneAndPostRangeTimer ? toneAndPostRangeTimer->property("gpuRunTime") : 0 }
});
auto renderConfig = _graphicsEngine.getRenderEngine()->getConfiguration();
PROFILE_COUNTER_IF_CHANGED(render, "gpuTime", float, (float)_graphicsEngine.getGPUContext()->getFrameTimerGPUAverage());
PROFILE_RANGE(app, __FUNCTION__);
@ -4806,6 +4728,7 @@ void Application::idle() {
float secondsSinceLastUpdate = (float)_lastTimeUpdated.nsecsElapsed() / NSECS_PER_MSEC / MSECS_PER_SECOND;
_lastTimeUpdated.start();
#if !defined(DISABLE_QML)
// If the offscreen Ui has something active that is NOT the root, then assume it has keyboard focus.
if (offscreenUi && offscreenUi->getWindow()) {
auto activeFocusItem = offscreenUi->getWindow()->activeFocusItem();
@ -4817,9 +4740,11 @@ void Application::idle() {
_keyboardDeviceHasFocus = true;
}
}
#endif
checkChangeCursor();
#if !defined(DISABLE_QML)
auto stats = Stats::getInstance();
if (stats) {
stats->updateStats();
@ -4828,6 +4753,7 @@ void Application::idle() {
if (animStats) {
animStats->updateStats();
}
#endif
// Normally we check PipelineWarnings, but since idle will often take more than 10ms we only show these idle timing
// details if we're in ExtraDebugging mode. However, the ::update() and its subcomponents will show their timing
@ -5163,10 +5089,9 @@ QVector<EntityItemID> Application::pasteEntities(float x, float y, float z) {
void Application::init() {
// Make sure Login state is up to date
#if !defined(DISABLE_QML)
DependencyManager::get<DialogsManager>()->toggleLoginDialog();
if (!DISABLE_DEFERRED) {
DependencyManager::get<DeferredLightingEffect>()->init();
}
#endif
DependencyManager::get<AvatarManager>()->init();
_timerStart.start();
@ -5235,7 +5160,7 @@ void Application::init() {
}
}, Qt::QueuedConnection);
_gameWorkload.startup(getEntities()->getWorkloadSpace(), _main3DScene, _entitySimulation);
_gameWorkload.startup(getEntities()->getWorkloadSpace(), _graphicsEngine.getRenderScene(), _entitySimulation);
_entitySimulation->setWorkloadSpace(getEntities()->getWorkloadSpace());
}
@ -5269,7 +5194,7 @@ void Application::updateLOD(float deltaTime) const {
// adjust it unless we were asked to disable this feature, or if we're currently in throttleRendering mode
if (!isThrottleRendering()) {
float presentTime = getActiveDisplayPlugin()->getAveragePresentTime();
float engineRunTime = (float)(_renderEngine->getConfiguration().get()->getCPURunTime());
float engineRunTime = (float)(_graphicsEngine.getRenderEngine()->getConfiguration().get()->getCPURunTime());
float gpuTime = getGPUContext()->getFrameTimerGPUAverage();
float batchTime = getGPUContext()->getFrameTimerBatchAverage();
auto lodManager = DependencyManager::get<LODManager>();
@ -5670,7 +5595,7 @@ void Application::updateSecondaryCameraViewFrustum() {
// camera should be.
// Code based on SecondaryCameraJob
auto renderConfig = _renderEngine->getConfiguration();
auto renderConfig = _graphicsEngine.getRenderEngine()->getConfiguration();
assert(renderConfig);
auto camera = dynamic_cast<SecondaryCameraJobConfig*>(renderConfig->getConfig("SecondaryCamera"));
@ -5739,7 +5664,7 @@ void Application::updateSecondaryCameraViewFrustum() {
static bool domainLoadingInProgress = false;
void Application::update(float deltaTime) {
PROFILE_RANGE_EX(app, __FUNCTION__, 0xffff0000, (uint64_t)_renderFrameCount + 1);
PROFILE_RANGE_EX(app, __FUNCTION__, 0xffff0000, (uint64_t)_graphicsEngine._renderFrameCount + 1);
if (_aboutToQuit) {
return;
@ -6155,7 +6080,7 @@ void Application::update(float deltaTime) {
// TODO: Fix this by modeling the way the secondary camera works on how the main camera works
// ie. Use a camera object stored in the game logic and informs the Engine on where the secondary
// camera should be.
updateSecondaryCameraViewFrustum();
// updateSecondaryCameraViewFrustum();
}
quint64 now = usecTimestampNow();
@ -6231,13 +6156,6 @@ void Application::update(float deltaTime) {
updateRenderArgs(deltaTime);
// HACK
// load the view frustum
// FIXME: This preDisplayRender call is temporary until we create a separate render::scene for the mirror rendering.
// Then we can move this logic into the Avatar::simulate call.
myAvatar->preDisplaySide(&_appRenderArgs._renderArgs);
{
PerformanceTimer perfTimer("AnimDebugDraw");
AnimDebugDraw::getInstance().update();
@ -6248,10 +6166,15 @@ void Application::update(float deltaTime) {
PerformanceTimer perfTimer("enqueueFrame");
getMain3DScene()->enqueueFrame();
}
// If the display plugin is inactive then the frames won't be processed so process them here.
if (!getActiveDisplayPlugin()->isActive()) {
getMain3DScene()->processTransactionQueue();
}
}
void Application::updateRenderArgs(float deltaTime) {
editRenderArgs([this, deltaTime](AppRenderArgs& appRenderArgs) {
_graphicsEngine.editRenderArgs([this, deltaTime](AppRenderArgs& appRenderArgs) {
PerformanceTimer perfTimer("editRenderArgs");
appRenderArgs._headPose = getHMDSensorPose();
@ -6280,7 +6203,7 @@ void Application::updateRenderArgs(float deltaTime) {
_viewFrustum.setProjection(adjustedProjection);
_viewFrustum.calculate();
}
appRenderArgs._renderArgs = RenderArgs(_gpuContext, lodManager->getOctreeSizeScale(),
appRenderArgs._renderArgs = RenderArgs(_graphicsEngine.getGPUContext(), lodManager->getOctreeSizeScale(),
lodManager->getBoundaryLevelAdjust(), lodManager->getLODAngleHalfTan(), RenderArgs::DEFAULT_RENDER_MODE,
RenderArgs::MONO, RenderArgs::RENDER_DEBUG_NONE);
appRenderArgs._renderArgs._scene = getMain3DScene();
@ -6365,6 +6288,13 @@ void Application::updateRenderArgs(float deltaTime) {
QMutexLocker viewLocker(&_viewMutex);
appRenderArgs._renderArgs.setViewFrustum(_displayViewFrustum);
}
// HACK
// load the view frustum
// FIXME: This preDisplayRender call is temporary until we create a separate render::scene for the mirror rendering.
// Then we can move this logic into the Avatar::simulate call.
myAvatar->preDisplaySide(&appRenderArgs._renderArgs);
});
}
@ -6574,11 +6504,16 @@ void Application::resetSensors(bool andReload) {
}
void Application::hmdVisibleChanged(bool visible) {
// TODO
// calling start and stop will change audio input and ouput to default audio devices.
// we need to add a pause/unpause functionality to AudioClient for this to work properly
#if 0
if (visible) {
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "start", Qt::QueuedConnection);
} else {
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "stop", Qt::QueuedConnection);
}
#endif
}
void Application::updateWindowTitle() const {
@ -6702,8 +6637,9 @@ void Application::nodeActivated(SharedNodePointer node) {
if (node->getType() == NodeType::AssetServer) {
// asset server just connected - check if we have the asset browser showing
auto offscreenUi = DependencyManager::get<OffscreenUi>();
auto assetDialog = offscreenUi->getRootItem()->findChild<QQuickItem*>("AssetServer");
#if !defined(DISABLE_QML)
auto offscreenUi = getOffscreenUI();
auto assetDialog = offscreenUi ? offscreenUi->getRootItem()->findChild<QQuickItem*>("AssetServer") : nullptr;
if (assetDialog) {
auto nodeList = DependencyManager::get<NodeList>();
@ -6716,6 +6652,7 @@ void Application::nodeActivated(SharedNodePointer node) {
assetDialog->setVisible(false);
}
}
#endif
}
// If we get a new EntityServer activated, reset lastQueried time
@ -6773,13 +6710,15 @@ void Application::nodeKilled(SharedNodePointer node) {
} else if (node->getType() == NodeType::AssetServer) {
// asset server going away - check if we have the asset browser showing
auto offscreenUi = DependencyManager::get<OffscreenUi>();
auto assetDialog = offscreenUi->getRootItem()->findChild<QQuickItem*>("AssetServer");
#if !defined(DISABLE_QML)
auto offscreenUi = getOffscreenUI();
auto assetDialog = offscreenUi ? offscreenUi->getRootItem()->findChild<QQuickItem*>("AssetServer") : nullptr;
if (assetDialog) {
// call reload on the shown asset browser dialog
QMetaObject::invokeMethod(assetDialog, "clear");
}
#endif
}
}
@ -6886,8 +6825,10 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEnginePointe
qScriptRegisterMetaType(scriptEngine.data(), RayToOverlayIntersectionResultToScriptValue,
RayToOverlayIntersectionResultFromScriptValue);
scriptEngine->registerGlobalObject("OffscreenFlags", DependencyManager::get<OffscreenUi>()->getFlags());
#if !defined(DISABLE_QML)
scriptEngine->registerGlobalObject("OffscreenFlags", getOffscreenUI()->getFlags());
scriptEngine->registerGlobalObject("Desktop", DependencyManager::get<DesktopScriptingInterface>().data());
#endif
qScriptRegisterMetaType(scriptEngine.data(), wrapperToScriptValue<ToolbarProxy>, wrapperFromScriptValue<ToolbarProxy>);
qScriptRegisterMetaType(scriptEngine.data(),
@ -6913,14 +6854,16 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEnginePointe
bool clientScript = scriptEngine->isClientScript();
scriptEngine->registerFunction("OverlayWindow", clientScript ? QmlWindowClass::constructor : QmlWindowClass::restricted_constructor);
#if !defined(Q_OS_ANDROID)
#if !defined(Q_OS_ANDROID) && !defined(DISABLE_QML)
scriptEngine->registerFunction("OverlayWebWindow", clientScript ? QmlWebWindowClass::constructor : QmlWebWindowClass::restricted_constructor);
#endif
scriptEngine->registerFunction("QmlFragment", clientScript ? QmlFragmentClass::constructor : QmlFragmentClass::restricted_constructor);
scriptEngine->registerGlobalObject("Menu", MenuScriptingInterface::getInstance());
scriptEngine->registerGlobalObject("DesktopPreviewProvider", DependencyManager::get<DesktopPreviewProvider>().data());
#if !defined(DISABLE_QML)
scriptEngine->registerGlobalObject("Stats", Stats::getInstance());
#endif
scriptEngine->registerGlobalObject("Settings", SettingsScriptingInterface::getInstance());
scriptEngine->registerGlobalObject("Snapshot", DependencyManager::get<Snapshot>().data());
scriptEngine->registerGlobalObject("AudioStats", DependencyManager::get<AudioClient>()->getStats().data());
@ -6961,7 +6904,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEnginePointe
scriptEngine->registerFunction("HMD", "getHUDLookAtPosition3D", HMDScriptingInterface::getHUDLookAtPosition3D, 0);
scriptEngine->registerGlobalObject("Scene", DependencyManager::get<SceneScriptingInterface>().data());
scriptEngine->registerGlobalObject("Render", _renderEngine->getConfiguration().get());
scriptEngine->registerGlobalObject("Render", _graphicsEngine.getRenderEngine()->getConfiguration().get());
scriptEngine->registerGlobalObject("Workload", _gameWorkload._engine->getConfiguration().get());
GraphicsScriptingInterface::registerMetaTypes(scriptEngine.data());
@ -7116,7 +7059,7 @@ bool Application::askToSetAvatarUrl(const QString& url) {
qCDebug(interfaceapp) << "Declined to agree to avatar license";
}
//auto offscreenUi = DependencyManager::get<OffscreenUi>();
//auto offscreenUi = getOffscreenUI();
});
} else {
setAvatar(url, modelName);
@ -7314,7 +7257,9 @@ void Application::showDialog(const QUrl& widgetUrl, const QUrl& tabletUrl, const
toggleTabletUI(true);
}
} else {
DependencyManager::get<OffscreenUi>()->show(widgetUrl, name);
#if !defined(DISABLE_QML)
getOffscreenUI()->show(widgetUrl, name);
#endif
}
}
@ -7339,10 +7284,10 @@ void Application::showAssetServerWidget(QString filePath) {
auto tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet(SYSTEM_TABLET));
auto hmd = DependencyManager::get<HMDScriptingInterface>();
if (tablet->getToolbarMode()) {
DependencyManager::get<OffscreenUi>()->show(url, "AssetServer", startUpload);
getOffscreenUI()->show(url, "AssetServer", startUpload);
} else {
if (!hmd->getShouldShowTablet() && !isHMDMode()) {
DependencyManager::get<OffscreenUi>()->show(url, "AssetServer", startUpload);
getOffscreenUI()->show(url, "AssetServer", startUpload);
} else {
static const QUrl url("hifi/dialogs/TabletAssetServer.qml");
if (!tablet->isPathLoaded(url)) {
@ -7697,7 +7642,7 @@ void Application::addAssetToWorldInfo(QString modelName, QString infoText) {
if (!_addAssetToWorldErrorTimer.isActive()) {
if (!_addAssetToWorldMessageBox) {
_addAssetToWorldMessageBox = DependencyManager::get<OffscreenUi>()->createMessageBox(OffscreenUi::ICON_INFORMATION,
_addAssetToWorldMessageBox = getOffscreenUI()->createMessageBox(OffscreenUi::ICON_INFORMATION,
"Downloading Model", "", QMessageBox::NoButton, QMessageBox::NoButton);
connect(_addAssetToWorldMessageBox, SIGNAL(destroyed()), this, SLOT(onAssetToWorldMessageBoxClosed()));
}
@ -7780,7 +7725,7 @@ void Application::addAssetToWorldError(QString modelName, QString errorText) {
addAssetToWorldInfoClear(modelName);
if (!_addAssetToWorldMessageBox) {
_addAssetToWorldMessageBox = DependencyManager::get<OffscreenUi>()->createMessageBox(OffscreenUi::ICON_INFORMATION,
_addAssetToWorldMessageBox = getOffscreenUI()->createMessageBox(OffscreenUi::ICON_INFORMATION,
"Downloading Model", "", QMessageBox::NoButton, QMessageBox::NoButton);
connect(_addAssetToWorldMessageBox, SIGNAL(destroyed()), this, SLOT(onAssetToWorldMessageBoxClosed()));
}
@ -8274,6 +8219,8 @@ DisplayPluginPointer Application::getActiveDisplayPlugin() const {
return _displayPlugin;
}
#if !defined(DISABLE_QML)
static const char* EXCLUSION_GROUP_KEY = "exclusionGroup";
static void addDisplayPluginToMenu(const DisplayPluginPointer& displayPlugin, int index, bool active) {
@ -8314,6 +8261,7 @@ static void addDisplayPluginToMenu(const DisplayPluginPointer& displayPlugin, in
action->setProperty(EXCLUSION_GROUP_KEY, QVariant::fromValue(displayPluginGroup));
Q_ASSERT(menu->menuItemExists(MenuOption::OutputMenu, name));
}
#endif
void Application::updateDisplayMode() {
// Unsafe to call this method from anything but the main thread
@ -8358,8 +8306,8 @@ void Application::setDisplayPlugin(DisplayPluginPointer newDisplayPlugin) {
// instead emit a signal that the display plugin is changing and let
// the desktop lock itself. Reduces coupling between the UI and display
// plugins
auto offscreenUi = DependencyManager::get<OffscreenUi>();
auto desktop = offscreenUi->getDesktop();
auto offscreenUi = getOffscreenUI();
auto desktop = offscreenUi ? offscreenUi->getDesktop() : nullptr;
auto menu = Menu::getInstance();
// Make the switch atomic from the perspective of other threads
@ -8405,7 +8353,9 @@ void Application::setDisplayPlugin(DisplayPluginPointer newDisplayPlugin) {
}
}
offscreenUi->resize(fromGlm(newDisplayPlugin->getRecommendedUiSize()));
if (offscreenUi) {
offscreenUi->resize(fromGlm(newDisplayPlugin->getRecommendedUiSize()));
}
getApplicationCompositor().setDisplayPlugin(newDisplayPlugin);
_displayPlugin = newDisplayPlugin;
connect(_displayPlugin.get(), &DisplayPlugin::presented, this, &Application::onPresent, Qt::DirectConnection);

View file

@ -70,11 +70,11 @@
#include "ui/overlays/Overlays.h"
#include "workload/GameWorkload.h"
#include "graphics/GraphicsEngine.h"
#include <procedural/ProceduralSkybox.h>
#include <graphics/Skybox.h>
#include <ModelScriptingInterface.h>
#include "FrameTimingsScriptingInterface.h"
#include "Sound.h"
@ -153,7 +153,6 @@ public:
void updateSecondaryCameraViewFrustum();
void updateCamera(RenderArgs& renderArgs, float deltaTime);
void paintGL();
void resizeGL();
bool event(QEvent* event) override;
@ -203,8 +202,8 @@ public:
Overlays& getOverlays() { return _overlays; }
size_t getRenderFrameCount() const { return _renderFrameCount; }
float getRenderLoopRate() const { return _renderLoopCounter.rate(); }
size_t getRenderFrameCount() const { return _graphicsEngine.getRenderFrameCount(); }
float getRenderLoopRate() const { return _graphicsEngine.getRenderLoopRate(); }
float getNumCollisionObjects() const;
float getTargetRenderFrameRate() const; // frames/second
@ -275,10 +274,10 @@ public:
void setMaxOctreePacketsPerSecond(int maxOctreePPS);
int getMaxOctreePacketsPerSecond() const;
render::ScenePointer getMain3DScene() override { return _main3DScene; }
const render::ScenePointer& getMain3DScene() const { return _main3DScene; }
render::EnginePointer getRenderEngine() override { return _renderEngine; }
gpu::ContextPointer getGPUContext() const { return _gpuContext; }
render::ScenePointer getMain3DScene() override { return _graphicsEngine.getRenderScene(); }
render::EnginePointer getRenderEngine() override { return _graphicsEngine.getRenderEngine(); }
gpu::ContextPointer getGPUContext() const { return _graphicsEngine.getGPUContext(); }
const GameWorkload& getGameWorkload() const { return _gameWorkload; }
@ -515,7 +514,6 @@ private:
bool handleFileOpenEvent(QFileOpenEvent* event);
void cleanupBeforeQuit();
bool shouldPaint() const;
void idle();
void update(float deltaTime);
@ -535,8 +533,6 @@ private:
void initializeAcceptedFiles();
void runRenderFrame(RenderArgs* renderArgs/*, Camera& whichCamera, bool selfAvatarOnly = false*/);
bool importJSONFromURL(const QString& urlString);
bool importSVOFromURL(const QString& urlString);
bool importFromZIP(const QString& filePath);
@ -586,18 +582,12 @@ private:
bool _activatingDisplayPlugin { false };
uint32_t _renderFrameCount { 0 };
// Frame Rate Measurement
RateCounter<500> _renderLoopCounter;
RateCounter<500> _gameLoopCounter;
FrameTimingsScriptingInterface _frameTimingsScriptingInterface;
QTimer _minimizedWindowTimer;
QElapsedTimer _timerStart;
QElapsedTimer _lastTimeUpdated;
QElapsedTimer _lastTimeRendered;
int _minimumGPUTextureMemSizeStabilityCount { 30 };
@ -683,29 +673,9 @@ private:
quint64 _lastFaceTrackerUpdate;
render::ScenePointer _main3DScene{ new render::Scene(glm::vec3(-0.5f * (float)TREE_SCALE), (float)TREE_SCALE) };
render::EnginePointer _renderEngine{ new render::RenderEngine() };
gpu::ContextPointer _gpuContext; // initialized during window creation
GameWorkload _gameWorkload;
mutable QMutex _renderArgsMutex{ QMutex::Recursive };
struct AppRenderArgs {
render::Args _renderArgs;
glm::mat4 _eyeToWorld;
glm::mat4 _view;
glm::mat4 _eyeOffsets[2];
glm::mat4 _eyeProjections[2];
glm::mat4 _headPose;
glm::mat4 _sensorToWorld;
float _sensorToWorldScale { 1.0f };
bool _isStereo{ false };
};
AppRenderArgs _appRenderArgs;
using RenderArgsEditor = std::function <void (AppRenderArgs&)>;
void editRenderArgs(RenderArgsEditor editor);
GraphicsEngine _graphicsEngine;
void updateRenderArgs(float deltaTime);
@ -751,8 +721,6 @@ private:
bool _keyboardDeviceHasFocus { true };
QString _returnFromFullScreenMirrorTo;
ConnectionMonitor _connectionMonitor;
QTimer _addAssetToWorldResizeTimer;
@ -786,12 +754,8 @@ private:
QUrl _avatarOverrideUrl;
bool _saveAvatarOverrideUrl { false };
QObject* _renderEventHandler{ nullptr };
friend class RenderEventHandler;
std::atomic<bool> _pendingIdleEvent { true };
std::atomic<bool> _pendingRenderEvent { true };
bool quitWhenFinished { false };

View file

@ -19,218 +19,207 @@
#include "Util.h"
// Statically provided display and input plugins
extern DisplayPluginList getDisplayPlugins();
void Application::editRenderArgs(RenderArgsEditor editor) {
QMutexLocker renderLocker(&_renderArgsMutex);
editor(_appRenderArgs);
}
void Application::paintGL() {
// Some plugins process message events, allowing paintGL to be called reentrantly.
_renderFrameCount++;
_lastTimeRendered.start();
auto lastPaintBegin = usecTimestampNow();
PROFILE_RANGE_EX(render, __FUNCTION__, 0xff0000ff, (uint64_t)_renderFrameCount);
PerformanceTimer perfTimer("paintGL");
if (nullptr == _displayPlugin) {
return;
}
DisplayPluginPointer displayPlugin;
{
PROFILE_RANGE(render, "/getActiveDisplayPlugin");
displayPlugin = getActiveDisplayPlugin();
}
{
PROFILE_RANGE(render, "/pluginBeginFrameRender");
// If a display plugin loses it's underlying support, it
// needs to be able to signal us to not use it
if (!displayPlugin->beginFrameRender(_renderFrameCount)) {
QMetaObject::invokeMethod(this, "updateDisplayMode");
return;
}
}
RenderArgs renderArgs;
glm::mat4 HMDSensorPose;
glm::mat4 eyeToWorld;
glm::mat4 sensorToWorld;
bool isStereo;
glm::mat4 stereoEyeOffsets[2];
glm::mat4 stereoEyeProjections[2];
{
QMutexLocker viewLocker(&_renderArgsMutex);
renderArgs = _appRenderArgs._renderArgs;
// don't render if there is no context.
if (!_appRenderArgs._renderArgs._context) {
return;
}
HMDSensorPose = _appRenderArgs._headPose;
eyeToWorld = _appRenderArgs._eyeToWorld;
sensorToWorld = _appRenderArgs._sensorToWorld;
isStereo = _appRenderArgs._isStereo;
for_each_eye([&](Eye eye) {
stereoEyeOffsets[eye] = _appRenderArgs._eyeOffsets[eye];
stereoEyeProjections[eye] = _appRenderArgs._eyeProjections[eye];
});
}
{
PROFILE_RANGE(render, "/gpuContextReset");
_gpuContext->beginFrame(_appRenderArgs._view, HMDSensorPose);
// Reset the gpu::Context Stages
// Back to the default framebuffer;
gpu::doInBatch("Application_render::gpuContextReset", _gpuContext, [&](gpu::Batch& batch) {
batch.resetStages();
});
}
{
PROFILE_RANGE(render, "/renderOverlay");
PerformanceTimer perfTimer("renderOverlay");
// NOTE: There is no batch associated with this renderArgs
// the ApplicationOverlay class assumes it's viewport is setup to be the device size
renderArgs._viewport = glm::ivec4(0, 0, getDeviceSize());
_applicationOverlay.renderOverlay(&renderArgs);
}
{
PROFILE_RANGE(render, "/updateCompositor");
getApplicationCompositor().setFrameInfo(_renderFrameCount, eyeToWorld, sensorToWorld);
}
gpu::FramebufferPointer finalFramebuffer;
QSize finalFramebufferSize;
{
PROFILE_RANGE(render, "/getOutputFramebuffer");
// Primary rendering pass
auto framebufferCache = DependencyManager::get<FramebufferCache>();
finalFramebufferSize = framebufferCache->getFrameBufferSize();
// Final framebuffer that will be handed to the display-plugin
finalFramebuffer = framebufferCache->getFramebuffer();
}
{
if (isStereo) {
renderArgs._context->enableStereo(true);
renderArgs._context->setStereoProjections(stereoEyeProjections);
renderArgs._context->setStereoViews(stereoEyeOffsets);
}
renderArgs._hudOperator = displayPlugin->getHUDOperator();
renderArgs._hudTexture = _applicationOverlay.getOverlayTexture();
renderArgs._blitFramebuffer = finalFramebuffer;
runRenderFrame(&renderArgs);
}
auto frame = _gpuContext->endFrame();
frame->frameIndex = _renderFrameCount;
frame->framebuffer = finalFramebuffer;
frame->framebufferRecycler = [](const gpu::FramebufferPointer& framebuffer) {
auto frameBufferCache = DependencyManager::get<FramebufferCache>();
if (frameBufferCache) {
frameBufferCache->releaseFramebuffer(framebuffer);
}
};
// deliver final scene rendering commands to the display plugin
{
PROFILE_RANGE(render, "/pluginOutput");
PerformanceTimer perfTimer("pluginOutput");
_renderLoopCounter.increment();
displayPlugin->submitFrame(frame);
}
// Reset the framebuffer and stereo state
renderArgs._blitFramebuffer.reset();
renderArgs._context->enableStereo(false);
{
auto stats = Stats::getInstance();
if (stats) {
stats->setRenderDetails(renderArgs._details);
}
}
uint64_t lastPaintDuration = usecTimestampNow() - lastPaintBegin;
_frameTimingsScriptingInterface.addValue(lastPaintDuration);
}
//void Application::paintGL() {
// // Some plugins process message events, allowing paintGL to be called reentrantly.
//
// _renderFrameCount++;
// // SG: Moved into the RenderEventHandler
// //_lastTimeRendered.start();
//
// auto lastPaintBegin = usecTimestampNow();
// PROFILE_RANGE_EX(render, __FUNCTION__, 0xff0000ff, (uint64_t)_renderFrameCount);
// PerformanceTimer perfTimer("paintGL");
//
// if (nullptr == _displayPlugin) {
// return;
// }
//
// DisplayPluginPointer displayPlugin;
// {
// PROFILE_RANGE(render, "/getActiveDisplayPlugin");
// displayPlugin = getActiveDisplayPlugin();
// }
//
// {
// PROFILE_RANGE(render, "/pluginBeginFrameRender");
// // If a display plugin loses it's underlying support, it
// // needs to be able to signal us to not use it
// if (!displayPlugin->beginFrameRender(_renderFrameCount)) {
// QMetaObject::invokeMethod(this, "updateDisplayMode");
// return;
// }
// }
//
// RenderArgs renderArgs;
// glm::mat4 HMDSensorPose;
// glm::mat4 eyeToWorld;
// glm::mat4 sensorToWorld;
//
// bool isStereo;
// glm::mat4 stereoEyeOffsets[2];
// glm::mat4 stereoEyeProjections[2];
//
// {
// QMutexLocker viewLocker(&_renderArgsMutex);
// renderArgs = _appRenderArgs._renderArgs;
//
// // don't render if there is no context.
// if (!_appRenderArgs._renderArgs._context) {
// return;
// }
//
// HMDSensorPose = _appRenderArgs._headPose;
// eyeToWorld = _appRenderArgs._eyeToWorld;
// sensorToWorld = _appRenderArgs._sensorToWorld;
// isStereo = _appRenderArgs._isStereo;
// for_each_eye([&](Eye eye) {
// stereoEyeOffsets[eye] = _appRenderArgs._eyeOffsets[eye];
// stereoEyeProjections[eye] = _appRenderArgs._eyeProjections[eye];
// });
// }
//
// {
// PROFILE_RANGE(render, "/gpuContextReset");
// _graphicsEngine.getGPUContext()->beginFrame(_appRenderArgs._view, HMDSensorPose);
// // Reset the gpu::Context Stages
// // Back to the default framebuffer;
// gpu::doInBatch("Application_render::gpuContextReset", _graphicsEngine.getGPUContext(), [&](gpu::Batch& batch) {
// batch.resetStages();
// });
// }
//
//
// {
// PROFILE_RANGE(render, "/renderOverlay");
// PerformanceTimer perfTimer("renderOverlay");
// // NOTE: There is no batch associated with this renderArgs
// // the ApplicationOverlay class assumes it's viewport is setup to be the device size
// renderArgs._viewport = glm::ivec4(0, 0, getDeviceSize() * getRenderResolutionScale());
// _applicationOverlay.renderOverlay(&renderArgs);
// }
//
// {
// PROFILE_RANGE(render, "/updateCompositor");
// getApplicationCompositor().setFrameInfo(_renderFrameCount, eyeToWorld, sensorToWorld);
// }
//
// gpu::FramebufferPointer finalFramebuffer;
// QSize finalFramebufferSize;
// {
// PROFILE_RANGE(render, "/getOutputFramebuffer");
// // Primary rendering pass
// auto framebufferCache = DependencyManager::get<FramebufferCache>();
// finalFramebufferSize = framebufferCache->getFrameBufferSize();
// // Final framebuffer that will be handled to the display-plugin
// finalFramebuffer = framebufferCache->getFramebuffer();
// }
//
// {
// if (isStereo) {
// renderArgs._context->enableStereo(true);
// renderArgs._context->setStereoProjections(stereoEyeProjections);
// renderArgs._context->setStereoViews(stereoEyeOffsets);
// }
//
// renderArgs._hudOperator = displayPlugin->getHUDOperator();
// renderArgs._hudTexture = _applicationOverlay.getOverlayTexture();
// renderArgs._blitFramebuffer = finalFramebuffer;
// _graphicsEngine.render_runRenderFrame(&renderArgs);
// }
//
// auto frame = _graphicsEngine.getGPUContext()->endFrame();
// frame->frameIndex = _renderFrameCount;
// frame->framebuffer = finalFramebuffer;
// frame->framebufferRecycler = [](const gpu::FramebufferPointer& framebuffer) {
// auto frameBufferCache = DependencyManager::get<FramebufferCache>();
// if (frameBufferCache) {
// frameBufferCache->releaseFramebuffer(framebuffer);
// }
// };
// // deliver final scene rendering commands to the display plugin
// {
// PROFILE_RANGE(render, "/pluginOutput");
// PerformanceTimer perfTimer("pluginOutput");
// _renderLoopCounter.increment();
// displayPlugin->submitFrame(frame);
// }
//
// // Reset the framebuffer and stereo state
// renderArgs._blitFramebuffer.reset();
// renderArgs._context->enableStereo(false);
//
// {
// Stats::getInstance()->setRenderDetails(renderArgs._details);
// }
//
// uint64_t lastPaintDuration = usecTimestampNow() - lastPaintBegin;
// _frameTimingsScriptingInterface.addValue(lastPaintDuration);
//}
// WorldBox Render Data & rendering functions
class WorldBoxRenderData {
public:
typedef render::Payload<WorldBoxRenderData> Payload;
typedef Payload::DataPointer Pointer;
int _val = 0;
static render::ItemID _item; // unique WorldBoxRenderData
};
render::ItemID WorldBoxRenderData::_item{ render::Item::INVALID_ITEM_ID };
namespace render {
template <> const ItemKey payloadGetKey(const WorldBoxRenderData::Pointer& stuff) { return ItemKey::Builder::opaqueShape().withTagBits(ItemKey::TAG_BITS_0 | ItemKey::TAG_BITS_1); }
template <> const Item::Bound payloadGetBound(const WorldBoxRenderData::Pointer& stuff) { return Item::Bound(); }
template <> void payloadRender(const WorldBoxRenderData::Pointer& stuff, RenderArgs* args) {
if (Menu::getInstance()->isOptionChecked(MenuOption::WorldAxes)) {
PerformanceTimer perfTimer("worldBox");
auto& batch = *args->_batch;
DependencyManager::get<GeometryCache>()->bindSimpleProgram(batch);
renderWorldBox(args, batch);
}
}
}
void Application::runRenderFrame(RenderArgs* renderArgs) {
PROFILE_RANGE(render, __FUNCTION__);
PerformanceTimer perfTimer("display");
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "Application::runRenderFrame()");
// The pending changes collecting the changes here
render::Transaction transaction;
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderEntities()) {
// render models...
PerformanceTimer perfTimer("entities");
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::runRenderFrame() ... entities...");
RenderArgs::DebugFlags renderDebugFlags = RenderArgs::RENDER_DEBUG_NONE;
renderArgs->_debugFlags = renderDebugFlags;
}
// Make sure the WorldBox is in the scene
// For the record, this one RenderItem is the first one we created and added to the scene.
// We could move that code elsewhere but you know...
if (!render::Item::isValidID(WorldBoxRenderData::_item)) {
auto worldBoxRenderData = std::make_shared<WorldBoxRenderData>();
auto worldBoxRenderPayload = std::make_shared<WorldBoxRenderData::Payload>(worldBoxRenderData);
WorldBoxRenderData::_item = _main3DScene->allocateID();
transaction.resetItem(WorldBoxRenderData::_item, worldBoxRenderPayload);
_main3DScene->enqueueTransaction(transaction);
}
{
PerformanceTimer perfTimer("EngineRun");
_renderEngine->getRenderContext()->args = renderArgs;
_renderEngine->run();
}
}
//
//class WorldBoxRenderData {
//public:
// typedef render::Payload<WorldBoxRenderData> Payload;
// typedef Payload::DataPointer Pointer;
//
// int _val = 0;
// static render::ItemID _item; // unique WorldBoxRenderData
//};
//
//render::ItemID WorldBoxRenderData::_item{ render::Item::INVALID_ITEM_ID };
//
//namespace render {
// template <> const ItemKey payloadGetKey(const WorldBoxRenderData::Pointer& stuff) { return ItemKey::Builder::opaqueShape().withTagBits(ItemKey::TAG_BITS_0 | ItemKey::TAG_BITS_1); }
// template <> const Item::Bound payloadGetBound(const WorldBoxRenderData::Pointer& stuff) { return Item::Bound(); }
// template <> void payloadRender(const WorldBoxRenderData::Pointer& stuff, RenderArgs* args) {
// if (Menu::getInstance()->isOptionChecked(MenuOption::WorldAxes)) {
// PerformanceTimer perfTimer("worldBox");
//
// auto& batch = *args->_batch;
// DependencyManager::get<GeometryCache>()->bindSimpleProgram(batch);
// renderWorldBox(args, batch);
// }
// }
//}
//
//void Application::runRenderFrame(RenderArgs* renderArgs) {
// PROFILE_RANGE(render, __FUNCTION__);
// PerformanceTimer perfTimer("display");
// PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "Application::runRenderFrame()");
//
// // The pending changes collecting the changes here
// render::Transaction transaction;
//
// if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderEntities()) {
// // render models...
// PerformanceTimer perfTimer("entities");
// PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
// "Application::runRenderFrame() ... entities...");
//
// RenderArgs::DebugFlags renderDebugFlags = RenderArgs::RENDER_DEBUG_NONE;
//
// renderArgs->_debugFlags = renderDebugFlags;
// }
//
// // Make sure the WorldBox is in the scene
// // For the record, this one RenderItem is the first one we created and added to the scene.
// // We could move that code elsewhere but you know...
// if (!render::Item::isValidID(WorldBoxRenderData::_item)) {
// auto worldBoxRenderData = std::make_shared<WorldBoxRenderData>();
// auto worldBoxRenderPayload = std::make_shared<WorldBoxRenderData::Payload>(worldBoxRenderData);
//
// WorldBoxRenderData::_item = _main3DScene->allocateID();
//
// transaction.resetItem(WorldBoxRenderData::_item, worldBoxRenderPayload);
// _main3DScene->enqueueTransaction(transaction);
// }
//
// {
// PerformanceTimer perfTimer("EngineRun");
// _renderEngine->getRenderContext()->args = renderArgs;
// _renderEngine->run();
// }
//}

View file

@ -247,25 +247,35 @@ QVariantMap AvatarBookmarks::getAvatarDataToBookmark() {
bookmark.insert(ENTRY_AVATAR_URL, avatarUrl);
bookmark.insert(ENTRY_AVATAR_SCALE, avatarScale);
QScriptEngine scriptEngine;
QVariantList wearableEntities;
auto treeRenderer = DependencyManager::get<EntityTreeRenderer>();
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
auto avatarEntities = myAvatar->getAvatarEntityData();
for (auto entityID : avatarEntities.keys()) {
auto entity = entityTree->findEntityByID(entityID);
if (!entity || !isWearableEntity(entity)) {
continue;
if (entityTree) {
QScriptEngine scriptEngine;
auto avatarEntities = myAvatar->getAvatarEntityData();
for (auto entityID : avatarEntities.keys()) {
auto entity = entityTree->findEntityByID(entityID);
if (!entity || !isWearableEntity(entity)) {
continue;
}
QVariantMap avatarEntityData;
EncodeBitstreamParams params;
auto desiredProperties = entity->getEntityProperties(params);
desiredProperties += PROP_LOCAL_POSITION;
desiredProperties += PROP_LOCAL_ROTATION;
desiredProperties -= PROP_JOINT_ROTATIONS_SET;
desiredProperties -= PROP_JOINT_ROTATIONS;
desiredProperties -= PROP_JOINT_TRANSLATIONS_SET;
desiredProperties -= PROP_JOINT_TRANSLATIONS;
EntityItemProperties entityProperties = entity->getProperties(desiredProperties);
QScriptValue scriptProperties = EntityItemPropertiesToScriptValue(&scriptEngine, entityProperties);
avatarEntityData["properties"] = scriptProperties.toVariant();
wearableEntities.append(QVariant(avatarEntityData));
}
QVariantMap avatarEntityData;
EncodeBitstreamParams params;
auto desiredProperties = entity->getEntityProperties(params);
desiredProperties += PROP_LOCAL_POSITION;
desiredProperties += PROP_LOCAL_ROTATION;
EntityItemProperties entityProperties = entity->getProperties(desiredProperties);
QScriptValue scriptProperties = EntityItemPropertiesToScriptValue(&scriptEngine, entityProperties);
avatarEntityData["properties"] = scriptProperties.toVariant();
wearableEntities.append(QVariant(avatarEntityData));
}
bookmark.insert(ENTRY_AVATAR_ENTITIES, wearableEntities);
return bookmark;

View file

@ -48,7 +48,9 @@ void ConnectionMonitor::init() {
emit setRedirectErrorState(REDIRECT_HIFI_ADDRESS, "", 5);
} else {
qDebug() << "ConnectionMonitor: Showing connection failure window";
#if !defined(DISABLE_QML)
DependencyManager::get<DialogsManager>()->setDomainConnectionFailureVisibility(true);
#endif
}
});
}
@ -59,8 +61,10 @@ void ConnectionMonitor::startTimer() {
void ConnectionMonitor::stopTimer() {
_timer.stop();
#if !defined(DISABLE_QML)
bool enableInterstitial = DependencyManager::get<NodeList>()->getDomainHandler().getInterstitialModeEnabled();
if (!enableInterstitial) {
DependencyManager::get<DialogsManager>()->setDomainConnectionFailureVisibility(false);
}
#endif
}

View file

@ -364,8 +364,6 @@ Menu::Menu() {
qApp->setHmdTabletBecomesToolbarSetting(action->isChecked());
});
addCheckableActionToQMenuAndActionHash(uiOptionsMenu, MenuOption::Use3DKeyboard, 0, true);
// Developer > Render >>>
MenuWrapper* renderOptionsMenu = developerMenu->addMenu("Render");

View file

@ -117,7 +117,7 @@ namespace MenuOption {
const QString FrameTimer = "Show Timer";
const QString FullscreenMirror = "Mirror";
const QString Help = "Help...";
const QString HomeLocation = "Home";
const QString HomeLocation = "Home ";
const QString IncreaseAvatarSize = "Increase Avatar Size";
const QString IndependentMode = "Independent Mode";
const QString ActionMotorControl = "Enable Default Motor Control";
@ -213,7 +213,6 @@ namespace MenuOption {
const QString TurnWithHead = "Turn using Head";
const QString UseAudioForMouth = "Use Audio for Mouth";
const QString UseCamera = "Use Camera";
const QString Use3DKeyboard = "Use 3D Keyboard";
const QString VelocityFilter = "Velocity Filter";
const QString VisibleToEveryone = "Everyone";
const QString VisibleToFriends = "Friends";

View file

@ -17,6 +17,7 @@
#include <QTemporaryDir>
#include <FSTReader.h>
#include <FBXSerializer.h>
#include <OffscreenUi.h>
#include "ModelSelector.h"
@ -108,7 +109,7 @@ bool ModelPackager::loadModel() {
qCDebug(interfaceapp) << "Reading FBX file : " << _fbxInfo.filePath();
QByteArray fbxContents = fbx.readAll();
_hfmModel.reset(readFBX(fbxContents, QVariantHash(), _fbxInfo.filePath()));
_hfmModel = FBXSerializer().read(fbxContents, QVariantHash(), _fbxInfo.filePath());
// make sure we have some basic mappings
populateBasicMapping(_mapping, _fbxInfo.filePath(), *_hfmModel);

View file

@ -45,7 +45,7 @@ private:
QString _scriptDir;
QVariantHash _mapping;
std::unique_ptr<hfm::Model> _hfmModel;
std::shared_ptr<hfm::Model> _hfmModel;
QStringList _textures;
QStringList _scripts;
};

View file

@ -14,7 +14,7 @@
#include <QDialog>
#include <FBXReader.h>
#include <hfm/HFM.h>
#include <FSTReader.h>
#include "ui/ModelsBrowser.h"

View file

@ -19,7 +19,6 @@
#include <QStandardPaths>
static const QString AVATAR_HEAD_AND_BODY_STRING = "Avatar Body with Head";
static const QString AVATAR_ATTACHEMENT_STRING = "Avatar Attachment";
static const QString ENTITY_MODEL_STRING = "Entity Model";
ModelSelector::ModelSelector() {

View file

@ -171,7 +171,7 @@ void SecondaryCameraJobConfig::setOrientation(glm::quat orient) {
}
void SecondaryCameraJobConfig::enableSecondaryCameraRenderConfigs(bool enabled) {
qApp->getRenderEngine()->getConfiguration()->getConfig<SecondaryCameraRenderTask>()->setEnabled(enabled);
qApp->getRenderEngine()->getConfiguration()->getConfig<SecondaryCameraRenderTask>("SecondaryCameraJob")->setEnabled(enabled);
setEnabled(enabled);
}
@ -187,11 +187,13 @@ public:
void run(const render::RenderContextPointer& renderContext, const RenderArgsPointer& cachedArgs) {
auto args = renderContext->args;
if (cachedArgs) {
args->_blitFramebuffer = cachedArgs->_blitFramebuffer;
args->_viewport = cachedArgs->_viewport;
args->popViewFrustum();
args->_displayMode = cachedArgs->_displayMode;
args->_renderMode = cachedArgs->_renderMode;
}
args->popViewFrustum();
gpu::doInBatch("EndSecondaryCameraFrame::run", args->_context, [&](gpu::Batch& batch) {
batch.restoreContextStereo();

View file

@ -36,114 +36,6 @@
using namespace std;
void renderWorldBox(RenderArgs* args, gpu::Batch& batch) {
auto geometryCache = DependencyManager::get<GeometryCache>();
// Show center of world
static const glm::vec3 RED(1.0f, 0.0f, 0.0f);
static const glm::vec3 GREEN(0.0f, 1.0f, 0.0f);
static const glm::vec3 BLUE(0.0f, 0.0f, 1.0f);
static const glm::vec3 GREY(0.5f, 0.5f, 0.5f);
static const glm::vec4 GREY4(0.5f, 0.5f, 0.5f, 1.0f);
static const glm::vec4 DASHED_RED(1.0f, 0.0f, 0.0f, 1.0f);
static const glm::vec4 DASHED_GREEN(0.0f, 1.0f, 0.0f, 1.0f);
static const glm::vec4 DASHED_BLUE(0.0f, 0.0f, 1.0f, 1.0f);
static const float DASH_LENGTH = 1.0f;
static const float GAP_LENGTH = 1.0f;
auto transform = Transform{};
static std::array<int, 18> geometryIds;
static std::once_flag initGeometryIds;
std::call_once(initGeometryIds, [&] {
for (size_t i = 0; i < geometryIds.size(); ++i) {
geometryIds[i] = geometryCache->allocateID();
}
});
batch.setModelTransform(transform);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(HALF_TREE_SCALE, 0.0f, 0.0f), RED, geometryIds[0]);
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(-HALF_TREE_SCALE, 0.0f, 0.0f), DASHED_RED,
DASH_LENGTH, GAP_LENGTH, geometryIds[1]);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, HALF_TREE_SCALE, 0.0f), GREEN, geometryIds[2]);
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, -HALF_TREE_SCALE, 0.0f), DASHED_GREEN,
DASH_LENGTH, GAP_LENGTH, geometryIds[3]);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 0.0f, HALF_TREE_SCALE), BLUE, geometryIds[4]);
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 0.0f, -HALF_TREE_SCALE), DASHED_BLUE,
DASH_LENGTH, GAP_LENGTH, geometryIds[5]);
// X center boundaries
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
glm::vec3(HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f), GREY,
geometryIds[6]);
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
glm::vec3(-HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY,
geometryIds[7]);
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f),
glm::vec3(HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY,
geometryIds[8]);
geometryCache->renderLine(batch, glm::vec3(HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
glm::vec3(HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY,
geometryIds[9]);
// Z center boundaries
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, -HALF_TREE_SCALE),
glm::vec3(0.0f, -HALF_TREE_SCALE, HALF_TREE_SCALE), GREY,
geometryIds[10]);
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, -HALF_TREE_SCALE),
glm::vec3(0.0f, HALF_TREE_SCALE, -HALF_TREE_SCALE), GREY,
geometryIds[11]);
geometryCache->renderLine(batch, glm::vec3(0.0f, HALF_TREE_SCALE, -HALF_TREE_SCALE),
glm::vec3(0.0f, HALF_TREE_SCALE, HALF_TREE_SCALE), GREY,
geometryIds[12]);
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, HALF_TREE_SCALE),
glm::vec3(0.0f, HALF_TREE_SCALE, HALF_TREE_SCALE), GREY,
geometryIds[13]);
// Center boundaries
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
glm::vec3(-HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY,
geometryIds[14]);
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
glm::vec3(HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE), GREY,
geometryIds[15]);
geometryCache->renderLine(batch, glm::vec3(HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
glm::vec3(HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY,
geometryIds[16]);
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE),
glm::vec3(HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY,
geometryIds[17]);
geometryCache->renderWireCubeInstance(args, batch, GREY4);
// Draw meter markers along the 3 axis to help with measuring things
const float MARKER_DISTANCE = 1.0f;
const float MARKER_RADIUS = 0.05f;
transform = Transform().setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(args, batch, RED);
transform = Transform().setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, 0.0f)).setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(args, batch, RED);
transform = Transform().setTranslation(glm::vec3(0.0f, MARKER_DISTANCE, 0.0f)).setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(args, batch, GREEN);
transform = Transform().setTranslation(glm::vec3(0.0f, 0.0f, MARKER_DISTANCE)).setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(args, batch, BLUE);
transform = Transform().setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, MARKER_DISTANCE)).setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(args, batch, GREY);
}
// Do some basic timing tests and report the results
void runTimingTests() {
// How long does it take to make a call to get the time?

View file

@ -15,14 +15,9 @@
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
#include <gpu/Batch.h>
#include <render/Forward.h>
class ShapeEntityItem;
class ShapeInfo;
void renderWorldBox(RenderArgs* args, gpu::Batch& batch);
void runTimingTests();
void runUnitTests();

View file

@ -139,7 +139,7 @@ MyAvatar::MyAvatar(QThread* thread) :
_flyingHMDSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "flyingHMD", _flyingPrefHMD),
_avatarEntityCountSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "avatarEntityData" << "size", 0)
{
_clientTraitsHandler = std::unique_ptr<ClientTraitsHandler>(new ClientTraitsHandler(this));
_clientTraitsHandler.reset(new ClientTraitsHandler(this));
// give the pointer to our head to inherited _headData variable from AvatarData
_headData = new MyHead(this);
@ -807,46 +807,6 @@ void MyAvatar::simulate(float deltaTime) {
// before we perform rig animations and IK.
updateSensorToWorldMatrix();
// if we detect the hand controller is at rest, i.e. lying on the table, or the hand is too far away from the hmd
// disable the associated hand controller input.
{
// NOTE: all poses are in sensor space.
auto leftHandIter = _controllerPoseMap.find(controller::Action::LEFT_HAND);
if (leftHandIter != _controllerPoseMap.end() && leftHandIter->second.isValid()) {
_leftHandAtRestDetector.update(leftHandIter->second.getTranslation(), leftHandIter->second.getRotation());
if (_leftHandAtRestDetector.isAtRest()) {
leftHandIter->second.valid = false;
}
} else {
_leftHandAtRestDetector.invalidate();
}
auto rightHandIter = _controllerPoseMap.find(controller::Action::RIGHT_HAND);
if (rightHandIter != _controllerPoseMap.end() && rightHandIter->second.isValid()) {
_rightHandAtRestDetector.update(rightHandIter->second.getTranslation(), rightHandIter->second.getRotation());
if (_rightHandAtRestDetector.isAtRest()) {
rightHandIter->second.valid = false;
}
} else {
_rightHandAtRestDetector.invalidate();
}
auto headIter = _controllerPoseMap.find(controller::Action::HEAD);
// The 99th percentile man has a spine to fingertip to height ratio of 0.45. Lets increase that by about 10% to 0.5
// then measure the distance the center of the eyes to the finger tips. To come up with this ratio.
// From "The Measure of Man and Woman: Human Factors in Design, Revised Edition" by Alvin R. Tilley, Henry Dreyfuss Associates
const float MAX_HEAD_TO_HAND_DISTANCE_RATIO = 0.52f;
float maxHeadHandDistance = getUserHeight() * MAX_HEAD_TO_HAND_DISTANCE_RATIO;
if (glm::length(headIter->second.getTranslation() - leftHandIter->second.getTranslation()) > maxHeadHandDistance) {
leftHandIter->second.valid = false;
}
if (glm::length(headIter->second.getTranslation() - rightHandIter->second.getTranslation()) > maxHeadHandDistance) {
rightHandIter->second.valid = false;
}
}
{
PerformanceTimer perfTimer("skeleton");

View file

@ -0,0 +1,301 @@
//
// GraphicsEngine.cpp
//
// Created by Sam Gateau on 29/6/2018.
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "GraphicsEngine.h"
#include <shared/GlobalAppProperties.h>
#include "WorldBox.h"
#include "LODManager.h"
#include <GeometryCache.h>
#include <TextureCache.h>
#include <FramebufferCache.h>
#include <UpdateSceneTask.h>
#include <RenderViewTask.h>
#include <SecondaryCamera.h>
#include "RenderEventHandler.h"
#include <gpu/Batch.h>
#include <gpu/Context.h>
#include <gpu/gl/GLBackend.h>
#include <display-plugins/DisplayPlugin.h>
#include <display-plugins/CompositorHelper.h>
#include <QMetaObject>
#include "ui/Stats.h"
#include "Application.h"
GraphicsEngine::GraphicsEngine() {
}
GraphicsEngine::~GraphicsEngine() {
}
void GraphicsEngine::initializeGPU(GLWidget* glwidget) {
_renderEventHandler = new RenderEventHandler(
[this]() { return this->shouldPaint(); },
[this]() { this->render_performFrame(); }
);
// Requires the window context, because that's what's used in the actual rendering
// and the GPU backend will make things like the VAO which cannot be shared across
// contexts
glwidget->makeCurrent();
gpu::Context::init<gpu::gl::GLBackend>();
glwidget->makeCurrent();
_gpuContext = std::make_shared<gpu::Context>();
DependencyManager::get<TextureCache>()->setGPUContext(_gpuContext);
}
void GraphicsEngine::initializeRender(bool disableDeferred) {
// Set up the render engine
render::CullFunctor cullFunctor = LODManager::shouldRender;
_renderEngine->addJob<UpdateSceneTask>("UpdateScene");
#ifndef Q_OS_ANDROID
_renderEngine->addJob<SecondaryCameraRenderTask>("SecondaryCameraJob", cullFunctor, !disableDeferred);
#endif
_renderEngine->addJob<RenderViewTask>("RenderMainView", cullFunctor, !disableDeferred, render::ItemKey::TAG_BITS_0, render::ItemKey::TAG_BITS_0);
_renderEngine->load();
_renderEngine->registerScene(_renderScene);
// Now that OpenGL is initialized, we are sure we have a valid context and can create the various pipeline shaders with success.
DependencyManager::get<GeometryCache>()->initializeShapePipelines();
}
void GraphicsEngine::startup() {
static_cast<RenderEventHandler*>(_renderEventHandler)->resumeThread();
}
void GraphicsEngine::shutdown() {
// The cleanup process enqueues the transactions but does not process them. Calling this here will force the actual
// removal of the items.
// See https://highfidelity.fogbugz.com/f/cases/5328
_renderScene->enqueueFrame(); // flush all the transactions
_renderScene->processTransactionQueue(); // process and apply deletions
_gpuContext->shutdown();
// shutdown render engine
_renderScene = nullptr;
_renderEngine = nullptr;
_renderEventHandler->deleteLater();
}
void GraphicsEngine::render_runRenderFrame(RenderArgs* renderArgs) {
PROFILE_RANGE(render, __FUNCTION__);
PerformanceTimer perfTimer("render");
// Make sure the WorldBox is in the scene
// For the record, this one RenderItem is the first one we created and added to the scene.
// We could move that code elsewhere but you know...
if (!render::Item::isValidID(WorldBoxRenderData::_item)) {
render::Transaction transaction;
auto worldBoxRenderData = std::make_shared<WorldBoxRenderData>();
auto worldBoxRenderPayload = std::make_shared<WorldBoxRenderData::Payload>(worldBoxRenderData);
WorldBoxRenderData::_item = _renderScene->allocateID();
transaction.resetItem(WorldBoxRenderData::_item, worldBoxRenderPayload);
_renderScene->enqueueTransaction(transaction);
}
{
_renderEngine->getRenderContext()->args = renderArgs;
_renderEngine->run();
}
}
static const unsigned int THROTTLED_SIM_FRAMERATE = 15;
static const int THROTTLED_SIM_FRAME_PERIOD_MS = MSECS_PER_SECOND / THROTTLED_SIM_FRAMERATE;
bool GraphicsEngine::shouldPaint() const {
auto displayPlugin = qApp->getActiveDisplayPlugin();
#ifdef DEBUG_PAINT_DELAY
static uint64_t paintDelaySamples{ 0 };
static uint64_t paintDelayUsecs{ 0 };
paintDelayUsecs += displayPlugin->getPaintDelayUsecs();
static const int PAINT_DELAY_THROTTLE = 1000;
if (++paintDelaySamples % PAINT_DELAY_THROTTLE == 0) {
qCDebug(interfaceapp).nospace() <<
"Paint delay (" << paintDelaySamples << " samples): " <<
(float)paintDelaySamples / paintDelayUsecs << "us";
}
#endif
// Throttle if requested
//if (displayPlugin->isThrottled() && (_graphicsEngine._renderEventHandler->_lastTimeRendered.elapsed() < THROTTLED_SIM_FRAME_PERIOD_MS)) {
if ( displayPlugin->isThrottled() &&
(static_cast<RenderEventHandler*>(_renderEventHandler)->_lastTimeRendered.elapsed() < THROTTLED_SIM_FRAME_PERIOD_MS)) {
return false;
}
return true;
}
bool GraphicsEngine::checkPendingRenderEvent() {
bool expected = false;
return (_renderEventHandler && static_cast<RenderEventHandler*>(_renderEventHandler)->_pendingRenderEvent.compare_exchange_strong(expected, true));
}
void GraphicsEngine::render_performFrame() {
// Some plugins process message events, allowing paintGL to be called reentrantly.
_renderFrameCount++;
auto lastPaintBegin = usecTimestampNow();
PROFILE_RANGE_EX(render, __FUNCTION__, 0xff0000ff, (uint64_t)_renderFrameCount);
PerformanceTimer perfTimer("paintGL");
DisplayPluginPointer displayPlugin;
{
PROFILE_RANGE(render, "/getActiveDisplayPlugin");
displayPlugin = qApp->getActiveDisplayPlugin();
}
{
PROFILE_RANGE(render, "/pluginBeginFrameRender");
// If a display plugin loses it's underlying support, it
// needs to be able to signal us to not use it
if (!displayPlugin->beginFrameRender(_renderFrameCount)) {
QMetaObject::invokeMethod(qApp, "updateDisplayMode");
return;
}
}
RenderArgs renderArgs;
glm::mat4 HMDSensorPose;
glm::mat4 eyeToWorld;
glm::mat4 sensorToWorld;
bool isStereo;
glm::mat4 stereoEyeOffsets[2];
glm::mat4 stereoEyeProjections[2];
{
QMutexLocker viewLocker(&_renderArgsMutex);
renderArgs = _appRenderArgs._renderArgs;
// don't render if there is no context.
if (!_appRenderArgs._renderArgs._context) {
return;
}
HMDSensorPose = _appRenderArgs._headPose;
eyeToWorld = _appRenderArgs._eyeToWorld;
sensorToWorld = _appRenderArgs._sensorToWorld;
isStereo = _appRenderArgs._isStereo;
for_each_eye([&](Eye eye) {
stereoEyeOffsets[eye] = _appRenderArgs._eyeOffsets[eye];
stereoEyeProjections[eye] = _appRenderArgs._eyeProjections[eye];
});
}
{
PROFILE_RANGE(render, "/gpuContextReset");
getGPUContext()->beginFrame(_appRenderArgs._view, HMDSensorPose);
// Reset the gpu::Context Stages
// Back to the default framebuffer;
gpu::doInBatch("Application_render::gpuContextReset", getGPUContext(), [&](gpu::Batch& batch) {
batch.resetStages();
});
}
{
PROFILE_RANGE(render, "/renderOverlay");
PerformanceTimer perfTimer("renderOverlay");
// NOTE: There is no batch associated with this renderArgs
// the ApplicationOverlay class assumes it's viewport is setup to be the device size
renderArgs._viewport = glm::ivec4(0, 0, qApp->getDeviceSize());
qApp->getApplicationOverlay().renderOverlay(&renderArgs);
}
{
PROFILE_RANGE(render, "/updateCompositor");
qApp->getApplicationCompositor().setFrameInfo(_renderFrameCount, eyeToWorld, sensorToWorld);
}
gpu::FramebufferPointer finalFramebuffer;
QSize finalFramebufferSize;
{
PROFILE_RANGE(render, "/getOutputFramebuffer");
// Primary rendering pass
auto framebufferCache = DependencyManager::get<FramebufferCache>();
finalFramebufferSize = framebufferCache->getFrameBufferSize();
// Final framebuffer that will be handled to the display-plugin
finalFramebuffer = framebufferCache->getFramebuffer();
}
{
if (isStereo) {
renderArgs._context->enableStereo(true);
renderArgs._context->setStereoProjections(stereoEyeProjections);
renderArgs._context->setStereoViews(stereoEyeOffsets);
}
renderArgs._hudOperator = displayPlugin->getHUDOperator();
renderArgs._hudTexture = qApp->getApplicationOverlay().getOverlayTexture();
renderArgs._blitFramebuffer = finalFramebuffer;
render_runRenderFrame(&renderArgs);
}
auto frame = getGPUContext()->endFrame();
frame->frameIndex = _renderFrameCount;
frame->framebuffer = finalFramebuffer;
frame->framebufferRecycler = [](const gpu::FramebufferPointer& framebuffer) {
auto frameBufferCache = DependencyManager::get<FramebufferCache>();
if (frameBufferCache) {
frameBufferCache->releaseFramebuffer(framebuffer);
}
};
// deliver final scene rendering commands to the display plugin
{
PROFILE_RANGE(render, "/pluginOutput");
PerformanceTimer perfTimer("pluginOutput");
_renderLoopCounter.increment();
displayPlugin->submitFrame(frame);
}
// Reset the framebuffer and stereo state
renderArgs._blitFramebuffer.reset();
renderArgs._context->enableStereo(false);
{
auto stats = Stats::getInstance();
if (stats) {
stats->setRenderDetails(renderArgs._details);
}
}
uint64_t lastPaintDuration = usecTimestampNow() - lastPaintBegin;
_frameTimingsScriptingInterface.addValue(lastPaintDuration);
}
void GraphicsEngine::editRenderArgs(RenderArgsEditor editor) {
QMutexLocker renderLocker(&_renderArgsMutex);
editor(_appRenderArgs);
}

View file

@ -0,0 +1,90 @@
//
// GraphicsEngine.h
//
// Created by Sam Gateau on 29/6/2018.
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_GraphicsEngine_h
#define hifi_GraphicsEngine_h
#include <gl/OffscreenGLCanvas.h>
#include <gl/GLWidget.h>
#include <qmutex.h>
#include <render/Engine.h>
#include <OctreeConstants.h>
#include <shared/RateCounter.h>
#include "FrameTimingsScriptingInterface.h"
struct AppRenderArgs {
render::Args _renderArgs;
glm::mat4 _eyeToWorld;
glm::mat4 _view;
glm::mat4 _eyeOffsets[2];
glm::mat4 _eyeProjections[2];
glm::mat4 _headPose;
glm::mat4 _sensorToWorld;
float _sensorToWorldScale{ 1.0f };
bool _isStereo{ false };
};
using RenderArgsEditor = std::function <void(AppRenderArgs&)>;
class GraphicsEngine {
public:
GraphicsEngine();
~GraphicsEngine();
void initializeGPU(GLWidget*);
void initializeRender(bool disableDeferred);
void startup();
void shutdown();
render::ScenePointer getRenderScene() const { return _renderScene; }
render::EnginePointer getRenderEngine() const { return _renderEngine; }
gpu::ContextPointer getGPUContext() const { return _gpuContext; }
// Same as the one in application
bool shouldPaint() const;
bool checkPendingRenderEvent();
size_t getRenderFrameCount() const { return _renderFrameCount; }
float getRenderLoopRate() const { return _renderLoopCounter.rate(); }
// Feed GRaphics Engine with new frame configuration
void editRenderArgs(RenderArgsEditor editor);
private:
// Thread specific calls
void render_performFrame();
void render_runRenderFrame(RenderArgs* renderArgs);
protected:
mutable QMutex _renderArgsMutex{ QMutex::Recursive };
AppRenderArgs _appRenderArgs;
RateCounter<500> _renderLoopCounter;
uint32_t _renderFrameCount{ 0 };
render::ScenePointer _renderScene{ new render::Scene(glm::vec3(-0.5f * (float)TREE_SCALE), (float)TREE_SCALE) };
render::EnginePointer _renderEngine{ new render::RenderEngine() };
gpu::ContextPointer _gpuContext; // initialized during window creation
QObject* _renderEventHandler{ nullptr };
friend class RenderEventHandler;
FrameTimingsScriptingInterface _frameTimingsScriptingInterface;
friend class Application;
};
#endif // hifi_GraphicsEngine_h

View file

@ -0,0 +1,58 @@
//
// RenderEventHandler.cpp
//
// Created by Bradley Austin Davis on 29/6/2018.
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "RenderEventHandler.h"
#include "Application.h"
#include <shared/GlobalAppProperties.h>
#include <shared/QtHelpers.h>
#include "CrashHandler.h"
RenderEventHandler::RenderEventHandler(CheckCall checkCall, RenderCall renderCall) :
_checkCall(checkCall),
_renderCall(renderCall)
{
// Transfer to a new thread
moveToNewNamedThread(this, "RenderThread", [this](QThread* renderThread) {
hifi::qt::addBlockingForbiddenThread("Render", renderThread);
_lastTimeRendered.start();
}, std::bind(&RenderEventHandler::initialize, this), QThread::HighestPriority);
}
void RenderEventHandler::initialize() {
setObjectName("Render");
PROFILE_SET_THREAD_NAME("Render");
setCrashAnnotation("render_thread_id", std::to_string((size_t)QThread::currentThreadId()));
}
void RenderEventHandler::resumeThread() {
_pendingRenderEvent = false;
}
void RenderEventHandler::render() {
if (_checkCall()) {
_lastTimeRendered.start();
_renderCall();
}
}
bool RenderEventHandler::event(QEvent* event) {
switch ((int)event->type()) {
case ApplicationEvent::Render:
render();
_pendingRenderEvent.store(false);
return true;
default:
break;
}
return Parent::event(event);
}

View file

@ -0,0 +1,52 @@
//
// RenderEventHandler.h
//
// Created by Bradley Austin Davis on 29/6/2018.
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_RenderEventHandler_h
#define hifi_RenderEventHandler_h
#include <QEvent>
#include <QElapsedTimer>
#include "gl/OffscreenGLCanvas.h"
enum ApplicationEvent {
// Execute a lambda function
Lambda = QEvent::User + 1,
// Trigger the next render
Render,
// Trigger the next idle
Idle,
};
class RenderEventHandler : public QObject {
using Parent = QObject;
Q_OBJECT
public:
using CheckCall = std::function <bool()>;
using RenderCall = std::function <void()>;
CheckCall _checkCall;
RenderCall _renderCall;
RenderEventHandler(CheckCall checkCall, RenderCall renderCall);
QElapsedTimer _lastTimeRendered;
std::atomic<bool> _pendingRenderEvent{ true };
void resumeThread();
private:
void initialize();
void render();
bool event(QEvent* event) override;
};
#endif // #include hifi_RenderEventHandler_h

View file

@ -0,0 +1,138 @@
//
// WorldBox.cpp
//
// Created by Sam Gateau on 01/07/2018.
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "WorldBox.h"
#include "OctreeConstants.h"
render::ItemID WorldBoxRenderData::_item{ render::Item::INVALID_ITEM_ID };
namespace render {
template <> const ItemKey payloadGetKey(const WorldBoxRenderData::Pointer& stuff) { return ItemKey::Builder::opaqueShape().withTagBits(ItemKey::TAG_BITS_0 | ItemKey::TAG_BITS_1); }
template <> const Item::Bound payloadGetBound(const WorldBoxRenderData::Pointer& stuff) { return Item::Bound(); }
template <> void payloadRender(const WorldBoxRenderData::Pointer& stuff, RenderArgs* args) {
if (Menu::getInstance()->isOptionChecked(MenuOption::WorldAxes)) {
PerformanceTimer perfTimer("worldBox");
auto& batch = *args->_batch;
DependencyManager::get<GeometryCache>()->bindSimpleProgram(batch);
WorldBoxRenderData::renderWorldBox(args, batch);
}
}
}
void WorldBoxRenderData::renderWorldBox(RenderArgs* args, gpu::Batch& batch) {
auto geometryCache = DependencyManager::get<GeometryCache>();
// Show center of world
static const glm::vec3 RED(1.0f, 0.0f, 0.0f);
static const glm::vec3 GREEN(0.0f, 1.0f, 0.0f);
static const glm::vec3 BLUE(0.0f, 0.0f, 1.0f);
static const glm::vec3 GREY(0.5f, 0.5f, 0.5f);
static const glm::vec4 GREY4(0.5f, 0.5f, 0.5f, 1.0f);
static const glm::vec4 DASHED_RED(1.0f, 0.0f, 0.0f, 1.0f);
static const glm::vec4 DASHED_GREEN(0.0f, 1.0f, 0.0f, 1.0f);
static const glm::vec4 DASHED_BLUE(0.0f, 0.0f, 1.0f, 1.0f);
static const float DASH_LENGTH = 1.0f;
static const float GAP_LENGTH = 1.0f;
auto transform = Transform{};
static std::array<int, 18> geometryIds;
static std::once_flag initGeometryIds;
std::call_once(initGeometryIds, [&] {
for (size_t i = 0; i < geometryIds.size(); ++i) {
geometryIds[i] = geometryCache->allocateID();
}
});
batch.setModelTransform(transform);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(HALF_TREE_SCALE, 0.0f, 0.0f), RED, geometryIds[0]);
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(-HALF_TREE_SCALE, 0.0f, 0.0f), DASHED_RED,
DASH_LENGTH, GAP_LENGTH, geometryIds[1]);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, HALF_TREE_SCALE, 0.0f), GREEN, geometryIds[2]);
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, -HALF_TREE_SCALE, 0.0f), DASHED_GREEN,
DASH_LENGTH, GAP_LENGTH, geometryIds[3]);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 0.0f, HALF_TREE_SCALE), BLUE, geometryIds[4]);
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 0.0f, -HALF_TREE_SCALE), DASHED_BLUE,
DASH_LENGTH, GAP_LENGTH, geometryIds[5]);
// X center boundaries
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
glm::vec3(HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f), GREY,
geometryIds[6]);
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
glm::vec3(-HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY,
geometryIds[7]);
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f),
glm::vec3(HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY,
geometryIds[8]);
geometryCache->renderLine(batch, glm::vec3(HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
glm::vec3(HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY,
geometryIds[9]);
// Z center boundaries
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, -HALF_TREE_SCALE),
glm::vec3(0.0f, -HALF_TREE_SCALE, HALF_TREE_SCALE), GREY,
geometryIds[10]);
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, -HALF_TREE_SCALE),
glm::vec3(0.0f, HALF_TREE_SCALE, -HALF_TREE_SCALE), GREY,
geometryIds[11]);
geometryCache->renderLine(batch, glm::vec3(0.0f, HALF_TREE_SCALE, -HALF_TREE_SCALE),
glm::vec3(0.0f, HALF_TREE_SCALE, HALF_TREE_SCALE), GREY,
geometryIds[12]);
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, HALF_TREE_SCALE),
glm::vec3(0.0f, HALF_TREE_SCALE, HALF_TREE_SCALE), GREY,
geometryIds[13]);
// Center boundaries
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
glm::vec3(-HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY,
geometryIds[14]);
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
glm::vec3(HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE), GREY,
geometryIds[15]);
geometryCache->renderLine(batch, glm::vec3(HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
glm::vec3(HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY,
geometryIds[16]);
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE),
glm::vec3(HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY,
geometryIds[17]);
geometryCache->renderWireCubeInstance(args, batch, GREY4);
// Draw meter markers along the 3 axis to help with measuring things
const float MARKER_DISTANCE = 1.0f;
const float MARKER_RADIUS = 0.05f;
transform = Transform().setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(args, batch, RED);
transform = Transform().setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, 0.0f)).setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(args, batch, RED);
transform = Transform().setTranslation(glm::vec3(0.0f, MARKER_DISTANCE, 0.0f)).setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(args, batch, GREEN);
transform = Transform().setTranslation(glm::vec3(0.0f, 0.0f, MARKER_DISTANCE)).setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(args, batch, BLUE);
transform = Transform().setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, MARKER_DISTANCE)).setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(args, batch, GREY);
}

View file

@ -0,0 +1,43 @@
//
// WorldBox.h
//
// Created by Sam Gateau on 01/07/2018.
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_WorldBox_h
#define hifi_WorldBox_h
#include <PerfStat.h>
#include <gpu/Batch.h>
#include <render/Forward.h>
#include <render/Item.h>
#include <GeometryCache.h>
#include "Menu.h"
class WorldBoxRenderData {
public:
typedef render::Payload<WorldBoxRenderData> Payload;
typedef Payload::DataPointer Pointer;
int _val = 0;
static render::ItemID _item; // unique WorldBoxRenderData
static void renderWorldBox(RenderArgs* args, gpu::Batch& batch);
};
namespace render {
template <> const ItemKey payloadGetKey(const WorldBoxRenderData::Pointer& stuff);
template <> const Item::Bound payloadGetBound(const WorldBoxRenderData::Pointer& stuff);
template <> void payloadRender(const WorldBoxRenderData::Pointer& stuff, RenderArgs* args);
}
#endif // hifi_WorldBox_h

View file

@ -149,7 +149,7 @@ void CollisionPick::computeShapeInfo(const CollisionRegion& pick, ShapeInfo& sha
uint32_t numIndices = (uint32_t)meshPart.triangleIndices.size();
// TODO: assert rather than workaround after we start sanitizing HFMMesh higher up
//assert(numIndices % TRIANGLE_STRIDE == 0);
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXSerializer
for (uint32_t j = 0; j < numIndices; j += TRIANGLE_STRIDE) {
glm::vec3 p0 = mesh.vertices[meshPart.triangleIndices[j]];
@ -170,7 +170,7 @@ void CollisionPick::computeShapeInfo(const CollisionRegion& pick, ShapeInfo& sha
numIndices = (uint32_t)meshPart.quadIndices.size();
// TODO: assert rather than workaround after we start sanitizing HFMMesh higher up
//assert(numIndices % QUAD_STRIDE == 0);
numIndices -= numIndices % QUAD_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
numIndices -= numIndices % QUAD_STRIDE; // WORKAROUND lack of sanity checking in FBXSerializer
for (uint32_t j = 0; j < numIndices; j += QUAD_STRIDE) {
glm::vec3 p0 = mesh.vertices[meshPart.quadIndices[j]];
@ -305,7 +305,7 @@ void CollisionPick::computeShapeInfo(const CollisionRegion& pick, ShapeInfo& sha
auto numIndices = meshPart.triangleIndices.count();
// TODO: assert rather than workaround after we start sanitizing HFMMesh higher up
//assert(numIndices% TRIANGLE_STRIDE == 0);
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXSerializer
auto indexItr = meshPart.triangleIndices.cbegin();
while (indexItr != meshPart.triangleIndices.cend()) {

View file

@ -32,3 +32,7 @@ void KeyboardScriptingInterface::setPassword(bool password) {
void KeyboardScriptingInterface::loadKeyboardFile(const QString& keyboardFile) {
DependencyManager::get<Keyboard>()->loadKeyboardFile(keyboardFile);
}
bool KeyboardScriptingInterface::getUse3DKeyboard() {
return DependencyManager::get<Keyboard>()->getUse3DKeyboard();
}

View file

@ -30,6 +30,7 @@ class KeyboardScriptingInterface : public QObject, public Dependency {
Q_OBJECT
Q_PROPERTY(bool raised READ isRaised WRITE setRaised)
Q_PROPERTY(bool password READ isPassword WRITE setPassword)
Q_PROPERTY(bool use3DKeyboard READ getUse3DKeyboard);
public:
Q_INVOKABLE void loadKeyboardFile(const QString& string);
@ -39,5 +40,7 @@ private:
bool isPassword();
void setPassword(bool password);
bool getUse3DKeyboard();
};
#endif

View file

@ -55,8 +55,6 @@ ApplicationOverlay::~ApplicationOverlay() {
// Renders the overlays either to a texture or to the screen
void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
PROFILE_RANGE(render, __FUNCTION__);
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "ApplicationOverlay::displayOverlay()");
buildFramebufferObject();
if (!_overlayFramebuffer) {
@ -83,7 +81,9 @@ void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
// Now render the overlay components together into a single texture
renderDomainConnectionStatusBorder(renderArgs); // renders the connected domain line
renderOverlays(renderArgs); // renders Scripts Overlay and AudioScope
#if !defined(DISABLE_QML)
renderQmlUi(renderArgs); // renders a unit quad with the QML UI texture, and the text overlays from scripts
#endif
});
renderArgs->_batch = nullptr; // so future users of renderArgs don't try to use our batch

View file

@ -60,7 +60,7 @@ static const float MALLET_TOUCH_Y_OFFSET = 0.050f;
static const float MALLET_Y_OFFSET = 0.160f;
static const glm::quat MALLET_ROTATION_OFFSET{0.70710678f, 0.0f, -0.70710678f, 0.0f};
static const glm::vec3 MALLET_MODEL_DIMENSIONS{0.03f, MALLET_LENGTH, 0.03f};
static const glm::vec3 MALLET_MODEL_DIMENSIONS{0.01f, MALLET_LENGTH, 0.01f};
static const glm::vec3 MALLET_POSITION_OFFSET{0.0f, -MALLET_Y_OFFSET / 2.0f, 0.0f};
static const glm::vec3 MALLET_TIP_OFFSET{0.0f, MALLET_LENGTH - MALLET_TOUCH_Y_OFFSET, 0.0f};
@ -241,6 +241,17 @@ void Keyboard::registerKeyboardHighlighting() {
selection->enableListToScene(KEY_PRESSED_HIGHLIGHT);
}
bool Keyboard::getUse3DKeyboard() const {
return _use3DKeyboardLock.resultWithReadLock<bool>([&] {
return _use3DKeyboard.get();
});
}
void Keyboard::setUse3DKeyboard(bool use) {
_use3DKeyboardLock.withWriteLock([&] {
_use3DKeyboard.set(use);
});
}
void Keyboard::createKeyboard() {
auto pointerManager = DependencyManager::get<PointerManager>();

View file

@ -23,6 +23,7 @@
#include <Sound.h>
#include <AudioInjector.h>
#include <shared/ReadWriteLockable.h>
#include <SettingHandle.h>
#include "ui/overlays/Overlay.h"
@ -97,6 +98,9 @@ public:
bool isPassword() const;
void setPassword(bool password);
bool getUse3DKeyboard() const;
void setUse3DKeyboard(bool use);
void loadKeyboardFile(const QString& keyboardFile);
QVector<OverlayID> getKeysID();
@ -143,6 +147,9 @@ private:
SharedSoundPointer _keySound { nullptr };
std::shared_ptr<QTimer> _layerSwitchTimer { std::make_shared<QTimer>() };
mutable ReadWriteLockable _use3DKeyboardLock;
Setting::Handle<bool> _use3DKeyboard { "use3DKeyboard", true };
QString _typedCharacters;
TextDisplay _textDisplay;
Anchor _anchor;

View file

@ -74,6 +74,7 @@ void OverlayConductor::centerUI() {
}
void OverlayConductor::update(float dt) {
#if !defined(DISABLE_QML)
auto offscreenUi = DependencyManager::get<OffscreenUi>();
if (!offscreenUi) {
return;
@ -115,4 +116,5 @@ void OverlayConductor::update(float dt) {
if (shouldRecenter && !_suppressedByHead) {
centerUI();
}
#endif
}

View file

@ -25,8 +25,10 @@ private:
bool headOutsideOverlay() const;
bool updateAvatarIsAtRest();
#if !defined(DISABLE_QML)
bool _suppressedByHead { false };
bool _hmdMode { false };
#endif
// used by updateAvatarIsAtRest
uint64_t _desiredAtRestTimer { 0 };

View file

@ -24,6 +24,7 @@
#include "Snapshot.h"
#include "SnapshotAnimated.h"
#include "UserActivityLogger.h"
#include "ui/Keyboard.h"
void setupPreferences() {
auto preferences = DependencyManager::get<Preferences>();
@ -119,6 +120,12 @@ void setupPreferences() {
preferences->addPreference(new CheckPreference(UI_CATEGORY, "Use reticle cursor instead of arrow", getter, setter));
}
{
auto getter = []()->bool { return DependencyManager::get<Keyboard>()->getUse3DKeyboard(); };
auto setter = [](bool value) { DependencyManager::get<Keyboard>()->setUse3DKeyboard(value); };
preferences->addPreference(new CheckPreference(UI_CATEGORY, "Use Virtual Keyboard", getter, setter));
}
{
auto getter = []()->bool { return qApp->getMiniTabletEnabled(); };
auto setter = [](bool value) { qApp->setMiniTabletEnabled(value); };

View file

@ -232,11 +232,15 @@ OverlayID Overlays::addOverlay(const QString& type, const QVariant& properties)
*/
if (type == ImageOverlay::TYPE) {
#if !defined(DISABLE_QML)
thisOverlay = Overlay::Pointer(new ImageOverlay(), [](Overlay* ptr) { ptr->deleteLater(); });
#endif
} else if (type == Image3DOverlay::TYPE || type == "billboard") { // "billboard" for backwards compatibility
thisOverlay = Overlay::Pointer(new Image3DOverlay(), [](Overlay* ptr) { ptr->deleteLater(); });
} else if (type == TextOverlay::TYPE) {
#if !defined(DISABLE_QML)
thisOverlay = Overlay::Pointer(new TextOverlay(), [](Overlay* ptr) { ptr->deleteLater(); });
#endif
} else if (type == Text3DOverlay::TYPE) {
thisOverlay = Overlay::Pointer(new Text3DOverlay(), [](Overlay* ptr) { ptr->deleteLater(); });
} else if (type == Shape3DOverlay::TYPE) {

View file

@ -15,7 +15,7 @@
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
#include <FBXReader.h>
#include <FBXSerializer.h>
#include "AnimPose.h"
class AnimSkeleton {

View file

@ -71,7 +71,7 @@ void AnimationReader::run() {
// Parse the FBX directly from the QNetworkReply
HFMModel::Pointer hfmModel;
if (_url.path().toLower().endsWith(".fbx")) {
hfmModel.reset(readFBX(_data, QVariantHash(), _url.path()));
hfmModel = FBXSerializer().read(_data, QVariantHash(), _url.path());
} else {
QString errorStr("usupported format");
emit onError(299, errorStr);

View file

@ -17,7 +17,7 @@
#include <QtScript/QScriptValue>
#include <DependencyManager.h>
#include <FBXReader.h>
#include <FBXSerializer.h>
#include <ResourceCache.h>
class Animation;

View file

@ -15,7 +15,7 @@
#include <QObject>
#include <QScriptable>
#include <FBXReader.h>
#include <FBXSerializer.h>
class QScriptEngine;

View file

@ -302,7 +302,6 @@ void AudioClient::customDeleter() {
#if defined(Q_OS_ANDROID)
_shouldRestartInputSetup = false;
#endif
stop();
deleteLater();
}

View file

@ -51,7 +51,7 @@
#include <xmmintrin.h>
// convert float to int using round-to-nearest
FORCEINLINE static int32_t floatToInt(float x) {
return _mm_cvt_ss2si(_mm_load_ss(&x));
return _mm_cvt_ss2si(_mm_set_ss(x));
}
#else
@ -150,7 +150,7 @@ static const int IEEE754_EXPN_BIAS = 127;
//
// Peak detection and -log2(x) for float input (mono)
// x < 2^(31-LOG2_HEADROOM) returns 0x7fffffff
// x > 2^LOG2_HEADROOM undefined
// x > 2^LOG2_HEADROOM returns 0
//
FORCEINLINE static int32_t peaklog2(float* input) {
@ -161,12 +161,12 @@ FORCEINLINE static int32_t peaklog2(float* input) {
uint32_t peak = u & IEEE754_FABS_MASK;
// split into e and x - 1.0
int e = IEEE754_EXPN_BIAS - (peak >> IEEE754_MANT_BITS) + LOG2_HEADROOM;
int32_t e = IEEE754_EXPN_BIAS - (peak >> IEEE754_MANT_BITS) + LOG2_HEADROOM;
int32_t x = (peak << IEEE754_EXPN_BITS) & 0x7fffffff;
// saturate
if (e > 31) {
return 0x7fffffff;
// saturate when e > 31 or e < 0
if ((uint32_t)e > 31) {
return 0x7fffffff & ~(e >> 31);
}
int k = x >> (31 - LOG2_TABBITS);
@ -186,7 +186,7 @@ FORCEINLINE static int32_t peaklog2(float* input) {
//
// Peak detection and -log2(x) for float input (stereo)
// x < 2^(31-LOG2_HEADROOM) returns 0x7fffffff
// x > 2^LOG2_HEADROOM undefined
// x > 2^LOG2_HEADROOM returns 0
//
FORCEINLINE static int32_t peaklog2(float* input0, float* input1) {
@ -200,12 +200,12 @@ FORCEINLINE static int32_t peaklog2(float* input0, float* input1) {
uint32_t peak = MAX(u0, u1);
// split into e and x - 1.0
int e = IEEE754_EXPN_BIAS - (peak >> IEEE754_MANT_BITS) + LOG2_HEADROOM;
int32_t e = IEEE754_EXPN_BIAS - (peak >> IEEE754_MANT_BITS) + LOG2_HEADROOM;
int32_t x = (peak << IEEE754_EXPN_BITS) & 0x7fffffff;
// saturate
if (e > 31) {
return 0x7fffffff;
// saturate when e > 31 or e < 0
if ((uint32_t)e > 31) {
return 0x7fffffff & ~(e >> 31);
}
int k = x >> (31 - LOG2_TABBITS);
@ -225,7 +225,7 @@ FORCEINLINE static int32_t peaklog2(float* input0, float* input1) {
//
// Peak detection and -log2(x) for float input (quad)
// x < 2^(31-LOG2_HEADROOM) returns 0x7fffffff
// x > 2^LOG2_HEADROOM undefined
// x > 2^LOG2_HEADROOM returns 0
//
FORCEINLINE static int32_t peaklog2(float* input0, float* input1, float* input2, float* input3) {
@ -243,12 +243,12 @@ FORCEINLINE static int32_t peaklog2(float* input0, float* input1, float* input2,
uint32_t peak = MAX(MAX(u0, u1), MAX(u2, u3));
// split into e and x - 1.0
int e = IEEE754_EXPN_BIAS - (peak >> IEEE754_MANT_BITS) + LOG2_HEADROOM;
int32_t e = IEEE754_EXPN_BIAS - (peak >> IEEE754_MANT_BITS) + LOG2_HEADROOM;
int32_t x = (peak << IEEE754_EXPN_BITS) & 0x7fffffff;
// saturate
if (e > 31) {
return 0x7fffffff;
// saturate when e > 31 or e < 0
if ((uint32_t)e > 31) {
return 0x7fffffff & ~(e >> 31);
}
int k = x >> (31 - LOG2_TABBITS);

View file

@ -447,9 +447,9 @@ AudioInjectorPointer AudioInjector::playSound(SharedSoundPointer sound, const A
using AudioConstants::AudioSample;
using AudioConstants::SAMPLE_RATE;
const int standardRate = SAMPLE_RATE;
// limit to 4 octaves
const int pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
const int resampledRate = SAMPLE_RATE / pitch;
// limit pitch to 4 octaves
const float pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
const int resampledRate = glm::round(SAMPLE_RATE / pitch);
auto audioData = sound->getAudioData();
auto numChannels = audioData->getNumChannels();
@ -499,9 +499,9 @@ AudioInjectorPointer AudioInjector::playSound(AudioDataPointer audioData, const
using AudioConstants::AudioSample;
using AudioConstants::SAMPLE_RATE;
const int standardRate = SAMPLE_RATE;
// limit to 4 octaves
const int pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
const int resampledRate = SAMPLE_RATE / pitch;
// limit pitch to 4 octaves
const float pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
const int resampledRate = glm::round(SAMPLE_RATE / pitch);
auto numChannels = audioData->getNumChannels();
auto numFrames = audioData->getNumFrames();

View file

@ -2103,8 +2103,9 @@ void AvatarData::setJointMappingsFromNetworkReply() {
// before we process this update, make sure that the skeleton model URL hasn't changed
// since we made the FST request
if (networkReply->url() != _skeletonModelURL) {
if (networkReply->error() != QNetworkReply::NoError || networkReply->url() != _skeletonModelURL) {
qCDebug(avatars) << "Refusing to set joint mappings for FST URL that does not match the current URL";
networkReply->deleteLater();
return;
}

View file

@ -1490,7 +1490,7 @@ protected:
bool _isClientAvatar { false };
// null unless MyAvatar or ScriptableAvatar sending traits data to mixer
std::unique_ptr<ClientTraitsHandler> _clientTraitsHandler;
std::unique_ptr<ClientTraitsHandler, LaterDeleter> _clientTraitsHandler;
template <typename T, typename F>
T readLockWithNamedJointIndex(const QString& name, const T& defaultValue, F f) const {

View file

@ -22,7 +22,7 @@ ClientTraitsHandler::ClientTraitsHandler(AvatarData* owningAvatar) :
_owningAvatar(owningAvatar)
{
auto nodeList = DependencyManager::get<NodeList>();
QObject::connect(nodeList.data(), &NodeList::nodeAdded, [this](SharedNodePointer addedNode){
QObject::connect(nodeList.data(), &NodeList::nodeAdded, this, [this](SharedNodePointer addedNode) {
if (addedNode->getType() == NodeType::AvatarMixer) {
resetForNewMixer();
}

View file

@ -27,7 +27,7 @@
#include <PathUtils.h>
#include <FBXReader.h>
#include <FBXSerializer.h>
#include <FBXWriter.h>
#include "ModelBakingLoggingCategory.h"
@ -187,10 +187,10 @@ void FBXBaker::importScene() {
return;
}
FBXReader reader;
FBXSerializer fbxSerializer;
qCDebug(model_baking) << "Parsing" << _modelURL;
_rootNode = reader._rootNode = reader.parseFBX(&fbxFile);
_rootNode = fbxSerializer._rootNode = fbxSerializer.parseFBX(&fbxFile);
#ifdef HIFI_DUMP_FBX
{
@ -206,8 +206,8 @@ void FBXBaker::importScene() {
}
#endif
_hfmModel = reader.extractHFMModel({}, _modelURL.toString());
_textureContentMap = reader._textureContent;
_hfmModel = fbxSerializer.extractHFMModel({}, _modelURL.toString());
_textureContentMap = fbxSerializer._textureContent;
}
void FBXBaker::rewriteAndBakeSceneModels() {
@ -232,7 +232,7 @@ void FBXBaker::rewriteAndBakeSceneModels() {
if (objectChild.name == "Geometry") {
// TODO Pull this out of _hfmModel instead so we don't have to reprocess it
auto extractedMesh = FBXReader::extractMesh(objectChild, meshIndex, false);
auto extractedMesh = FBXSerializer::extractMesh(objectChild, meshIndex, false);
// Callback to get MaterialID
GetMaterialIDCallback materialIDcallback = [&extractedMesh](int partIndex) {

View file

@ -13,7 +13,6 @@
#include <PathUtils.h>
#include <FBXReader.h>
#include <FBXWriter.h>
#ifdef _WIN32

View file

@ -14,7 +14,7 @@
#include <PathUtils.h>
#include <NetworkAccessManager.h>
#include "OBJReader.h"
#include "OBJSerializer.h"
#include "FBXWriter.h"
const double UNIT_SCALE_FACTOR = 100.0;
@ -143,9 +143,10 @@ void OBJBaker::bakeOBJ() {
QByteArray objData = objFile.readAll();
bool combineParts = true; // set true so that OBJReader reads material info from material library
OBJReader reader;
auto geometry = reader.readOBJ(objData, QVariantHash(), combineParts, _modelURL);
OBJSerializer serializer;
QVariantHash mapping;
mapping["combineParts"] = true; // set true so that OBJSerializer reads material info from material library
auto geometry = serializer.read(objData, mapping, _modelURL);
// Write OBJ Data as FBX tree nodes
createFBXNodeTree(_rootNode, *geometry);
@ -219,7 +220,7 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, HFMModel& hfmModel) {
FBXNode materialNode;
materialNode.name = MATERIAL_NODE_NAME;
if (hfmModel.materials.size() == 1) {
// case when no material information is provided, OBJReader considers it as a single default material
// case when no material information is provided, OBJSerializer considers it as a single default material
for (auto& materialID : hfmModel.materials.keys()) {
setMaterialNodeProperties(materialNode, materialID, hfmModel);
}

View file

@ -14,8 +14,6 @@ void main(void) {
ivec2 texCoord = ivec2(floor(varTexCoord0 * vec2(textureData.textureSize)));
texCoord.x /= 2;
int row = int(floor(gl_FragCoord.y));
if (row % 2 > 0) {
texCoord.x += (textureData.textureSize.x / 2);
}
texCoord.x += int(row % 2 > 0) * (textureData.textureSize.x / 2);
outFragColor = vec4(pow(texelFetch(colorMap, texCoord, 0).rgb, vec3(2.2)), 1.0);
}

View file

@ -534,18 +534,26 @@ void OpenGLDisplayPlugin::updateFrameData() {
}
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> OpenGLDisplayPlugin::getHUDOperator() {
return [this](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, bool mirror) {
if (_hudPipeline && hudTexture) {
auto hudPipeline = _hudPipeline;
auto hudMirrorPipeline = _mirrorHUDPipeline;
auto hudStereo = isStereo();
auto hudCompositeFramebufferSize = _compositeFramebuffer->getSize();
std::array<glm::ivec4, 2> hudEyeViewports;
for_each_eye([&](Eye eye) {
hudEyeViewports[eye] = eyeViewport(eye);
});
return [=](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, bool mirror) {
if (hudPipeline && hudTexture) {
batch.enableStereo(false);
batch.setPipeline(mirror ? _mirrorHUDPipeline : _hudPipeline);
batch.setPipeline(mirror ? hudMirrorPipeline : hudPipeline);
batch.setResourceTexture(0, hudTexture);
if (isStereo()) {
if (hudStereo) {
for_each_eye([&](Eye eye) {
batch.setViewportTransform(eyeViewport(eye));
batch.setViewportTransform(hudEyeViewports[eye]);
batch.draw(gpu::TRIANGLE_STRIP, 4);
});
} else {
batch.setViewportTransform(ivec4(uvec2(0), _compositeFramebuffer->getSize()));
batch.setViewportTransform(ivec4(uvec2(0), hudCompositeFramebufferSize));
batch.draw(gpu::TRIANGLE_STRIP, 4);
}
}

View file

@ -9,7 +9,7 @@ layout(location=0) out vec4 outFragColor;
float sRGBFloatToLinear(float value) {
const float SRGB_ELBOW = 0.04045;
return (value <= SRGB_ELBOW) ? value / 12.92 : pow((value + 0.055) / 1.055, 2.4);
return mix(pow((value + 0.055) / 1.055, 2.4), value / 12.92, float(value <= SRGB_ELBOW));
}
vec3 colorToLinearRGB(vec3 srgb) {

View file

@ -420,18 +420,26 @@ void HmdDisplayPlugin::HUDRenderer::updatePipeline() {
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> HmdDisplayPlugin::HUDRenderer::render(HmdDisplayPlugin& plugin) {
updatePipeline();
return [this](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, bool mirror) {
if (pipeline && hudTexture) {
batch.setPipeline(pipeline);
batch.setInputFormat(format);
gpu::BufferView posView(vertices, VERTEX_OFFSET, vertices->getSize(), VERTEX_STRIDE, format->getAttributes().at(gpu::Stream::POSITION)._element);
gpu::BufferView uvView(vertices, TEXTURE_OFFSET, vertices->getSize(), VERTEX_STRIDE, format->getAttributes().at(gpu::Stream::TEXCOORD)._element);
auto hudPipeline = pipeline;
auto hudFormat = format;
auto hudVertices = vertices;
auto hudIndices = indices;
auto hudUniformBuffer = uniformsBuffer;
auto hudUniforms = uniforms;
auto hudIndexCount = indexCount;
return [=](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, bool mirror) {
if (hudPipeline && hudTexture) {
batch.setPipeline(hudPipeline);
batch.setInputFormat(hudFormat);
gpu::BufferView posView(hudVertices, VERTEX_OFFSET, hudVertices->getSize(), VERTEX_STRIDE, hudFormat->getAttributes().at(gpu::Stream::POSITION)._element);
gpu::BufferView uvView(hudVertices, TEXTURE_OFFSET, hudVertices->getSize(), VERTEX_STRIDE, hudFormat->getAttributes().at(gpu::Stream::TEXCOORD)._element);
batch.setInputBuffer(gpu::Stream::POSITION, posView);
batch.setInputBuffer(gpu::Stream::TEXCOORD, uvView);
batch.setIndexBuffer(gpu::UINT16, indices, 0);
uniformsBuffer->setSubData(0, uniforms);
batch.setUniformBuffer(0, uniformsBuffer);
batch.setIndexBuffer(gpu::UINT16, hudIndices, 0);
hudUniformBuffer->setSubData(0, hudUniforms);
batch.setUniformBuffer(0, hudUniformBuffer);
auto compositorHelper = DependencyManager::get<CompositorHelper>();
glm::mat4 modelTransform = compositorHelper->getUiTransform();
@ -441,7 +449,7 @@ std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> HmdDis
batch.setModelTransform(modelTransform);
batch.setResourceTexture(0, hudTexture);
batch.drawIndexed(gpu::TRIANGLES, indexCount);
batch.drawIndexed(gpu::TRIANGLES, hudIndexCount);
}
};
}

View file

@ -126,7 +126,7 @@ void MaterialEntityRenderer::doRender(RenderArgs* args) {
batch.setModelTransform(renderTransform);
if (args->_renderMode != render::Args::RenderMode::SHADOW_RENDER_MODE) {
drawMaterial->setTextureTransforms(textureTransform);
drawMaterial->setTextureTransforms(textureTransform, MaterialMappingMode::UV, true);
// bind the material
RenderPipelines::bindMaterial(drawMaterial, batch, args->_enableTexturing);

View file

@ -421,7 +421,7 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& shapeInfo) {
uint32_t numIndices = (uint32_t)meshPart.triangleIndices.size();
// TODO: assert rather than workaround after we start sanitizing HFMMesh higher up
//assert(numIndices % TRIANGLE_STRIDE == 0);
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXSerializer
for (uint32_t j = 0; j < numIndices; j += TRIANGLE_STRIDE) {
glm::vec3 p0 = mesh.vertices[meshPart.triangleIndices[j]];
@ -442,7 +442,7 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& shapeInfo) {
numIndices = (uint32_t)meshPart.quadIndices.size();
// TODO: assert rather than workaround after we start sanitizing HFMMesh higher up
//assert(numIndices % QUAD_STRIDE == 0);
numIndices -= numIndices % QUAD_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
numIndices -= numIndices % QUAD_STRIDE; // WORKAROUND lack of sanity checking in FBXSerializer
for (uint32_t j = 0; j < numIndices; j += QUAD_STRIDE) {
glm::vec3 p0 = mesh.vertices[meshPart.quadIndices[j]];
@ -595,7 +595,7 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& shapeInfo) {
if (partItr->_topology == graphics::Mesh::TRIANGLES) {
// TODO: assert rather than workaround after we start sanitizing HFMMesh higher up
//assert(numIndices % TRIANGLE_STRIDE == 0);
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXSerializer
auto indexItr = indices.cbegin<const gpu::BufferView::Index>() + partItr->_startIndex;
auto indexEnd = indexItr + numIndices;
@ -652,7 +652,7 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& shapeInfo) {
if (partItr->_topology == graphics::Mesh::TRIANGLES) {
// TODO: assert rather than workaround after we start sanitizing HFMMesh higher up
//assert(numIndices% TRIANGLE_STRIDE == 0);
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXSerializer
auto indexItr = indices.cbegin<const gpu::BufferView::Index>() + partItr->_startIndex;
auto indexEnd = indexItr + numIndices;

View file

@ -80,10 +80,11 @@ float interpolate3Points(float y1, float y2, float y3, float u) {
halfSlope = (y3 - y1) / 2.0f;
float slope12 = y2 - y1;
float slope23 = y3 - y2;
if (abs(halfSlope) > abs(slope12)) {
halfSlope = slope12;
} else if (abs(halfSlope) > abs(slope23)) {
halfSlope = slope23;
{
float check = float(abs(halfSlope) > abs(slope12));
halfSlope = mix(halfSlope, slope12, check);
halfSlope = mix(halfSlope, slope23, (1.0 - check) * float(abs(halfSlope) > abs(slope23)));
}
}

View file

@ -385,6 +385,7 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
CHECK_PROPERTY_CHANGE(PROP_MATERIAL_MAPPING_SCALE, materialMappingScale);
CHECK_PROPERTY_CHANGE(PROP_MATERIAL_MAPPING_ROT, materialMappingRot);
CHECK_PROPERTY_CHANGE(PROP_MATERIAL_DATA, materialData);
CHECK_PROPERTY_CHANGE(PROP_MATERIAL_REPEAT, materialRepeat);
CHECK_PROPERTY_CHANGE(PROP_VISIBLE_IN_SECONDARY_CAMERA, isVisibleInSecondaryCamera);
CHECK_PROPERTY_CHANGE(PROP_PARTICLE_SPIN, particleSpin);
CHECK_PROPERTY_CHANGE(PROP_SPIN_SPREAD, spinSpread);
@ -754,7 +755,8 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
* Otherwise the property value is parsed as an unsigned integer, specifying the mesh index to modify. Invalid values are
* parsed to <code>0</code>.
* @property {string} materialMappingMode="uv" - How the material is mapped to the entity. Either <code>"uv"</code> or
* <code>"projected"</code>. <em>Currently, only <code>"uv"</code> is supported.
* <code>"projected"</code>. In "uv" mode, the material will be evaluated within the UV space of the mesh it is applied to. In
* "projected" mode, the 3D transform of the Material Entity will be used to evaluate the texture coordinates for the material.
* @property {Vec2} materialMappingPos=0,0 - Offset position in UV-space of the top left of the material, range
* <code>{ x: 0, y: 0 }</code> &ndash; <code>{ x: 1, y: 1 }</code>.
* @property {Vec2} materialMappingScale=1,1 - How much to scale the material within the parent's UV-space.
@ -762,6 +764,8 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
* @property {string} materialData="" - Used to store {@link MaterialResource} data as a JSON string. You can use
* <code>JSON.parse()</code> to parse the string into a JavaScript object which you can manipulate the properties of, and
* use <code>JSON.stringify()</code> to convert the object into a string to put in the property.
* @property {boolean} materialRepeat=true - If true, the material will repeat. If false, fragments outside of texCoord 0 - 1 will be discarded.
* Works in both "uv" and "projected" modes.
* @example <caption>Color a sphere using a Material entity.</caption>
* var entityID = Entities.addEntity({
* type: "Sphere",
@ -1485,6 +1489,7 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_MATERIAL_MAPPING_SCALE, materialMappingScale);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_MATERIAL_MAPPING_ROT, materialMappingRot);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_MATERIAL_DATA, materialData);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_MATERIAL_REPEAT, materialRepeat);
}
/**jsdoc
@ -1666,6 +1671,7 @@ void EntityItemProperties::copyFromScriptValue(const QScriptValue& object, bool
COPY_PROPERTY_FROM_QSCRIPTVALUE(materialMappingScale, vec2, setMaterialMappingScale);
COPY_PROPERTY_FROM_QSCRIPTVALUE(materialMappingRot, float, setMaterialMappingRot);
COPY_PROPERTY_FROM_QSCRIPTVALUE(materialData, QString, setMaterialData);
COPY_PROPERTY_FROM_QSCRIPTVALUE(materialRepeat, bool, setMaterialRepeat);
COPY_PROPERTY_FROM_QSCRIPTVALUE(isVisibleInSecondaryCamera, bool, setIsVisibleInSecondaryCamera);
COPY_PROPERTY_FROM_QSCRIPTVALUE(particleSpin, float, setParticleSpin);
COPY_PROPERTY_FROM_QSCRIPTVALUE(spinSpread, float, setSpinSpread);
@ -2061,6 +2067,7 @@ void EntityItemProperties::entityPropertyFlagsFromScriptValue(const QScriptValue
ADD_PROPERTY_TO_MAP(PROP_MATERIAL_MAPPING_SCALE, MaterialMappingScale, materialMappingScale, vec2);
ADD_PROPERTY_TO_MAP(PROP_MATERIAL_MAPPING_ROT, MaterialMappingRot, materialMappingRot, float);
ADD_PROPERTY_TO_MAP(PROP_MATERIAL_DATA, MaterialData, materialData, QString);
ADD_PROPERTY_TO_MAP(PROP_MATERIAL_REPEAT, MaterialRepeat, materialRepeat, bool);
ADD_PROPERTY_TO_MAP(PROP_VISIBLE_IN_SECONDARY_CAMERA, IsVisibleInSecondaryCamera, isVisibleInSecondaryCamera, bool);
@ -2511,6 +2518,7 @@ OctreeElement::AppendState EntityItemProperties::encodeEntityEditPacket(PacketTy
APPEND_ENTITY_PROPERTY(PROP_MATERIAL_MAPPING_SCALE, properties.getMaterialMappingScale());
APPEND_ENTITY_PROPERTY(PROP_MATERIAL_MAPPING_ROT, properties.getMaterialMappingRot());
APPEND_ENTITY_PROPERTY(PROP_MATERIAL_DATA, properties.getMaterialData());
APPEND_ENTITY_PROPERTY(PROP_MATERIAL_REPEAT, properties.getMaterialRepeat());
}
APPEND_ENTITY_PROPERTY(PROP_NAME, properties.getName());
@ -2898,6 +2906,7 @@ bool EntityItemProperties::decodeEntityEditPacket(const unsigned char* data, int
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_MATERIAL_MAPPING_SCALE, vec2, setMaterialMappingScale);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_MATERIAL_MAPPING_ROT, float, setMaterialMappingRot);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_MATERIAL_DATA, QString, setMaterialData);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_MATERIAL_REPEAT, bool, setMaterialRepeat);
}
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_NAME, QString, setName);
@ -3137,6 +3146,7 @@ void EntityItemProperties::markAllChanged() {
_materialMappingScaleChanged = true;
_materialMappingRotChanged = true;
_materialDataChanged = true;
_materialRepeatChanged = true;
// Certifiable Properties
_itemNameChanged = true;
@ -3587,6 +3597,9 @@ QList<QString> EntityItemProperties::listChangedProperties() {
if (materialDataChanged()) {
out += "materialData";
}
if (materialRepeatChanged()) {
out += "materialRepeat";
}
if (isVisibleInSecondaryCameraChanged()) {
out += "isVisibleInSecondaryCamera";
}

View file

@ -241,6 +241,7 @@ public:
DEFINE_PROPERTY_REF(PROP_MATERIAL_MAPPING_SCALE, MaterialMappingScale, materialMappingScale, glm::vec2, glm::vec2(1.0f));
DEFINE_PROPERTY_REF(PROP_MATERIAL_MAPPING_ROT, MaterialMappingRot, materialMappingRot, float, 0);
DEFINE_PROPERTY_REF(PROP_MATERIAL_DATA, MaterialData, materialData, QString, "");
DEFINE_PROPERTY_REF(PROP_MATERIAL_REPEAT, MaterialRepeat, materialRepeat, bool, true);
DEFINE_PROPERTY(PROP_VISIBLE_IN_SECONDARY_CAMERA, IsVisibleInSecondaryCamera, isVisibleInSecondaryCamera, bool, ENTITY_ITEM_DEFAULT_VISIBLE_IN_SECONDARY_CAMERA);

View file

@ -175,6 +175,7 @@ QDebug& operator<<(QDebug& dbg, const EntityPropertyFlags& f) {
result = f.getHasProperty(PROP_MATERIAL_MAPPING_SCALE) ? result + "materialMappingScale " : result;
result = f.getHasProperty(PROP_MATERIAL_MAPPING_ROT) ? result + "materialMappingRot " : result;
result = f.getHasProperty(PROP_MATERIAL_DATA) ? result + "materialData " : result;
result = f.getHasProperty(PROP_MATERIAL_REPEAT) ? result + "materialRepeat " : result;
result = f.getHasProperty(PROP_VISIBLE_IN_SECONDARY_CAMERA) ? result + "visibleInSecondaryCamera " : result;
result = f.getHasProperty(PROP_PARTICLE_SPIN) ? result + "particleSpin " : result;
result = f.getHasProperty(PROP_SPIN_START) ? result + "spinStart " : result;

View file

@ -275,6 +275,8 @@ enum EntityPropertyList {
PROP_GRAB_EQUIPPABLE_INDICATOR_SCALE,
PROP_GRAB_EQUIPPABLE_INDICATOR_OFFSET,
PROP_MATERIAL_REPEAT,
////////////////////////////////////////////////////////////////////////////////////////////////////
// ATTENTION: add new properties to end of list just ABOVE this line
PROP_AFTER_LAST_ITEM,

View file

@ -486,6 +486,8 @@ QUuid EntityScriptingInterface::addEntity(const EntityItemProperties& properties
propertiesWithSimID.setLastEditedBy(sessionID);
propertiesWithSimID.setActionData(QByteArray());
bool scalesWithParent = propertiesWithSimID.getScalesWithParent();
propertiesWithSimID = convertPropertiesFromScriptSemantics(propertiesWithSimID, scalesWithParent);
@ -830,6 +832,8 @@ QUuid EntityScriptingInterface::editEntity(QUuid id, const EntityItemProperties&
properties.setClientOnly(entity->getClientOnly());
properties.setOwningAvatarID(entity->getOwningAvatarID());
properties.setActionData(entity->getDynamicData());
// make sure the properties has a type, so that the encode can know which properties to include
properties.setType(entity->getType());

View file

@ -41,6 +41,7 @@ EntityItemProperties MaterialEntityItem::getProperties(const EntityPropertyFlags
COPY_ENTITY_PROPERTY_TO_PROPERTIES(materialMappingScale, getMaterialMappingScale);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(materialMappingRot, getMaterialMappingRot);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(materialData, getMaterialData);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(materialRepeat, getMaterialRepeat);
return properties;
}
@ -55,6 +56,7 @@ bool MaterialEntityItem::setProperties(const EntityItemProperties& properties) {
SET_ENTITY_PROPERTY_FROM_PROPERTIES(materialMappingScale, setMaterialMappingScale);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(materialMappingRot, setMaterialMappingRot);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(materialData, setMaterialData);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(materialRepeat, setMaterialRepeat);
if (somethingChanged) {
bool wantDebug = false;
@ -85,6 +87,7 @@ int MaterialEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* da
READ_ENTITY_PROPERTY(PROP_MATERIAL_MAPPING_SCALE, glm::vec2, setMaterialMappingScale);
READ_ENTITY_PROPERTY(PROP_MATERIAL_MAPPING_ROT, float, setMaterialMappingRot);
READ_ENTITY_PROPERTY(PROP_MATERIAL_DATA, QString, setMaterialData);
READ_ENTITY_PROPERTY(PROP_MATERIAL_REPEAT, bool, setMaterialRepeat);
return bytesRead;
}
@ -99,6 +102,7 @@ EntityPropertyFlags MaterialEntityItem::getEntityProperties(EncodeBitstreamParam
requestedProperties += PROP_MATERIAL_MAPPING_SCALE;
requestedProperties += PROP_MATERIAL_MAPPING_ROT;
requestedProperties += PROP_MATERIAL_DATA;
requestedProperties += PROP_MATERIAL_REPEAT;
return requestedProperties;
}
@ -119,6 +123,7 @@ void MaterialEntityItem::appendSubclassData(OctreePacketData* packetData, Encode
APPEND_ENTITY_PROPERTY(PROP_MATERIAL_MAPPING_SCALE, getMaterialMappingScale());
APPEND_ENTITY_PROPERTY(PROP_MATERIAL_MAPPING_ROT, getMaterialMappingRot());
APPEND_ENTITY_PROPERTY(PROP_MATERIAL_DATA, getMaterialData());
APPEND_ENTITY_PROPERTY(PROP_MATERIAL_REPEAT, getMaterialRepeat());
}
void MaterialEntityItem::debugDump() const {
@ -128,6 +133,7 @@ void MaterialEntityItem::debugDump() const {
qCDebug(entities) << " material url:" << _materialURL;
qCDebug(entities) << " current material name:" << _currentMaterialName.c_str();
qCDebug(entities) << " material mapping mode:" << _materialMappingMode;
qCDebug(entities) << " material repeat:" << _materialRepeat;
qCDebug(entities) << " priority:" << _priority;
qCDebug(entities) << " parent material name:" << _parentMaterialName;
qCDebug(entities) << " material mapping pos:" << _materialMappingPos;
@ -140,7 +146,12 @@ void MaterialEntityItem::debugDump() const {
}
void MaterialEntityItem::setUnscaledDimensions(const glm::vec3& value) {
EntityItem::setUnscaledDimensions(ENTITY_ITEM_DEFAULT_DIMENSIONS);
_desiredDimensions = value;
if (_materialMappingMode == MaterialMappingMode::UV) {
EntityItem::setUnscaledDimensions(ENTITY_ITEM_DEFAULT_DIMENSIONS);
} else if (_materialMappingMode == MaterialMappingMode::PROJECTED) {
EntityItem::setUnscaledDimensions(value);
}
}
std::shared_ptr<NetworkMaterial> MaterialEntityItem::getMaterial() const {
@ -208,6 +219,23 @@ void MaterialEntityItem::setMaterialData(const QString& materialData) {
}
}
void MaterialEntityItem::setMaterialMappingMode(MaterialMappingMode mode) {
if (_materialMappingMode != mode) {
removeMaterial();
_materialMappingMode = mode;
setUnscaledDimensions(_desiredDimensions);
applyMaterial();
}
}
void MaterialEntityItem::setMaterialRepeat(bool repeat) {
if (_materialRepeat != repeat) {
removeMaterial();
_materialRepeat = repeat;
applyMaterial();
}
}
void MaterialEntityItem::setMaterialMappingPos(const glm::vec2& materialMappingPos) {
if (_materialMappingPos != materialMappingPos) {
removeMaterial();
@ -256,6 +284,22 @@ void MaterialEntityItem::setParentID(const QUuid& parentID) {
}
}
void MaterialEntityItem::locationChanged(bool tellPhysics) {
EntityItem::locationChanged();
if (_materialMappingMode == MaterialMappingMode::PROJECTED) {
removeMaterial();
applyMaterial();
}
}
void MaterialEntityItem::dimensionsChanged() {
EntityItem::dimensionsChanged();
if (_materialMappingMode == MaterialMappingMode::PROJECTED) {
removeMaterial();
applyMaterial();
}
}
void MaterialEntityItem::removeMaterial() {
graphics::MaterialPointer material = getMaterial();
if (!material) {
@ -289,11 +333,19 @@ void MaterialEntityItem::applyMaterial() {
if (!material || parentID.isNull()) {
return;
}
Transform textureTransform;
textureTransform.setTranslation(glm::vec3(_materialMappingPos, 0.0f));
textureTransform.setRotation(glm::vec3(0.0f, 0.0f, glm::radians(_materialMappingRot)));
textureTransform.setScale(glm::vec3(_materialMappingScale, 1.0f));
material->setTextureTransforms(textureTransform);
if (_materialMappingMode == MaterialMappingMode::UV) {
textureTransform.setTranslation(glm::vec3(_materialMappingPos, 0.0f));
textureTransform.setRotation(glm::vec3(0.0f, 0.0f, glm::radians(_materialMappingRot)));
textureTransform.setScale(glm::vec3(_materialMappingScale, 1.0f));
} else if (_materialMappingMode == MaterialMappingMode::PROJECTED) {
textureTransform = getTransform();
textureTransform.postScale(getUnscaledDimensions());
// Pass the inverse transform here so we don't need to compute it in the shaders
textureTransform.evalFromRawMatrix(textureTransform.getInverseMatrix());
}
material->setTextureTransforms(textureTransform, _materialMappingMode, _materialRepeat);
graphics::MaterialLayer materialLayer = graphics::MaterialLayer(material, getPriority());

View file

@ -58,7 +58,10 @@ public:
void setCurrentMaterialName(const std::string& currentMaterialName);
MaterialMappingMode getMaterialMappingMode() const { return _materialMappingMode; }
void setMaterialMappingMode(MaterialMappingMode mode) { _materialMappingMode = mode; }
void setMaterialMappingMode(MaterialMappingMode mode);
bool getMaterialRepeat() const { return _materialRepeat; }
void setMaterialRepeat(bool repeat);
quint16 getPriority() const { return _priority; }
void setPriority(quint16 priority);
@ -80,6 +83,9 @@ public:
void setParentID(const QUuid& parentID) override;
void locationChanged(bool tellPhysics) override;
void dimensionsChanged() override;
void applyMaterial();
void removeMaterial();
@ -104,8 +110,10 @@ private:
// emissiveMap, albedoMap (set opacityMap = albedoMap for transparency), metallicMap or specularMap, roughnessMap or glossMap,
// normalMap or bumpMap, occlusionMap, lightmapMap (broken, FIXME), scatteringMap (only works if normal mapped)
QString _materialURL;
// Type of material. "uv" or "projected". NOT YET IMPLEMENTED, only UV is used
// Type of material. "uv" or "projected".
MaterialMappingMode _materialMappingMode { UV };
bool _materialRepeat { true };
glm::vec3 _desiredDimensions;
// Priority for this material when applying it to its parent. Only the highest priority material will be used. Materials with the same priority are (essentially) randomly sorted.
// Base materials that come with models always have priority 0.
quint16 _priority { 0 };

View file

@ -33,7 +33,7 @@ using NormalType = glm::vec3;
#define FBX_NORMAL_ELEMENT gpu::Element::VEC3F_XYZ
#endif
// See comment in FBXReader::parseFBX().
// See comment in FBXSerializer::parseFBX().
static const int FBX_HEADER_BYTES_BEFORE_VERSION = 23;
static const QByteArray FBX_BINARY_PROLOG("Kaydara FBX Binary ");
static const QByteArray FBX_BINARY_PROLOG2("\0\x1a\0", 3);

View file

@ -1,6 +1,6 @@
//
// FBXReader.cpp
// interface/src/renderer
// FBXSerializer.cpp
// libraries/fbx/src
//
// Created by Andrzej Kapolka on 9/18/13.
// Copyright 2013 High Fidelity, Inc.
@ -9,7 +9,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "FBXReader.h"
#include "FBXSerializer.h"
#include <iostream>
#include <QBuffer>
@ -36,7 +36,7 @@
#include <hfm/ModelFormatLogging.h>
// TOOL: Uncomment the following line to enable the filtering of all the unkwnon fields of a node so we can break point easily while loading a model with problems...
//#define DEBUG_FBXREADER
//#define DEBUG_FBXSERIALIZER
using namespace std;
@ -254,13 +254,13 @@ HFMBlendshape extractBlendshape(const FBXNode& object) {
HFMBlendshape blendshape;
foreach (const FBXNode& data, object.children) {
if (data.name == "Indexes") {
blendshape.indices = FBXReader::getIntVector(data);
blendshape.indices = FBXSerializer::getIntVector(data);
} else if (data.name == "Vertices") {
blendshape.vertices = FBXReader::createVec3Vector(FBXReader::getDoubleVector(data));
blendshape.vertices = FBXSerializer::createVec3Vector(FBXSerializer::getDoubleVector(data));
} else if (data.name == "Normals") {
blendshape.normals = FBXReader::createVec3Vector(FBXReader::getDoubleVector(data));
blendshape.normals = FBXSerializer::createVec3Vector(FBXSerializer::getDoubleVector(data));
}
}
return blendshape;
@ -384,7 +384,7 @@ HFMLight extractLight(const FBXNode& object) {
if (propname == "Intensity") {
light.intensity = 0.01f * property.properties.at(valIndex).value<float>();
} else if (propname == "Color") {
light.color = FBXReader::getVec3(property.properties, valIndex);
light.color = FBXSerializer::getVec3(property.properties, valIndex);
}
}
}
@ -392,7 +392,7 @@ HFMLight extractLight(const FBXNode& object) {
|| subobject.name == "TypeFlags") {
}
}
#if defined(DEBUG_FBXREADER)
#if defined(DEBUG_FBXSERIALIZER)
QString type = object.properties.at(0).toString();
type = object.properties.at(1).toString();
@ -441,7 +441,7 @@ QMap<QString, glm::quat> getJointRotationOffsets(const QVariantHash& mapping) {
return jointRotationOffsets;
}
HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString& url) {
HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QString& url) {
const FBXNode& node = _rootNode;
QMap<QString, ExtractedMesh> meshes;
QHash<QString, QString> modelIDsToNames;
@ -512,7 +512,7 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
}
}
QMultiHash<QString, WeightedIndex> blendshapeChannelIndices;
#if defined(DEBUG_FBXREADER)
#if defined(DEBUG_FBXSERIALIZER)
int unknown = 0;
#endif
HFMModel* hfmModelPtr = new HFMModel;
@ -760,7 +760,7 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
extractBlendshape(subobject) };
blendshapes.append(blendshape);
}
#if defined(DEBUG_FBXREADER)
#if defined(DEBUG_FBXSERIALIZER)
else if (subobject.name == "TypeFlags") {
QString attributetype = subobject.properties.at(0).toString();
if (!attributetype.empty()) {
@ -886,7 +886,7 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
tex.scaling.z = 1.0f;
}
}
#if defined(DEBUG_FBXREADER)
#if defined(DEBUG_FBXSERIALIZER)
else {
QString propName = v;
unknown++;
@ -895,7 +895,7 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
}
}
}
#if defined(DEBUG_FBXREADER)
#if defined(DEBUG_FBXSERIALIZER)
else {
if (subobject.name == "Type") {
} else if (subobject.name == "Version") {
@ -1068,7 +1068,7 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
}
}
}
#if defined(DEBUG_FBXREADER)
#if defined(DEBUG_FBXSERIALIZER)
else {
QString propname = subobject.name.data();
int unknown = 0;
@ -1085,7 +1085,7 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
} else if (object.name == "NodeAttribute") {
#if defined(DEBUG_FBXREADER)
#if defined(DEBUG_FBXSERIALIZER)
std::vector<QString> properties;
foreach(const QVariant& v, object.properties) {
properties.push_back(v.toString());
@ -1148,7 +1148,7 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
animationCurves.insert(getID(object.properties), curve);
}
#if defined(DEBUG_FBXREADER)
#if defined(DEBUG_FBXSERIALIZER)
else {
QString objectname = object.name.data();
if ( objectname == "Pose"
@ -1239,7 +1239,7 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
}
}
}
#if defined(DEBUG_FBXREADER)
#if defined(DEBUG_FBXSERIALIZER)
else {
QString objectname = child.name.data();
if ( objectname == "Pose"
@ -1833,17 +1833,11 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
return hfmModelPtr;
}
HFMModel* readFBX(const QByteArray& data, const QVariantHash& mapping, const QString& url, bool loadLightmaps, float lightmapLevel) {
HFMModel::Pointer FBXSerializer::read(const QByteArray& data, const QVariantHash& mapping, const QUrl& url) {
QBuffer buffer(const_cast<QByteArray*>(&data));
buffer.open(QIODevice::ReadOnly);
return readFBX(&buffer, mapping, url, loadLightmaps, lightmapLevel);
}
HFMModel* readFBX(QIODevice* device, const QVariantHash& mapping, const QString& url, bool loadLightmaps, float lightmapLevel) {
FBXReader reader;
reader._rootNode = FBXReader::parseFBX(device);
reader._loadLightmaps = loadLightmaps;
reader._lightmapLevel = lightmapLevel;
_rootNode = parseFBX(&buffer);
return reader.extractHFMModel(mapping, url);
return HFMModel::Pointer(extractHFMModel(mapping, url.toString()));
}

View file

@ -1,6 +1,6 @@
//
// FBXReader.h
// interface/src/renderer
// FBXSerializer.h
// libraries/fbx/src
//
// Created by Andrzej Kapolka on 9/18/13.
// Copyright 2013 High Fidelity, Inc.
@ -9,8 +9,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_FBXReader_h
#define hifi_FBXReader_h
#ifndef hifi_FBXSerializer_h
#define hifi_FBXSerializer_h
#include <QtGlobal>
#include <QMetaType>
@ -27,7 +27,7 @@
#include <Transform.h>
#include "FBX.h"
#include <hfm/HFM.h>
#include <hfm/HFMSerializer.h>
#include <graphics/Geometry.h>
#include <graphics/Material.h>
@ -35,14 +35,6 @@
class QIODevice;
class FBXNode;
/// Reads HFMModel from the supplied model and mapping data.
/// \exception QString if an error occurs in parsing
HFMModel* readFBX(const QByteArray& data, const QVariantHash& mapping, const QString& url = "", bool loadLightmaps = true, float lightmapLevel = 1.0f);
/// Reads HFMModel from the supplied model and mapping data.
/// \exception QString if an error occurs in parsing
HFMModel* readFBX(QIODevice* device, const QVariantHash& mapping, const QString& url = "", bool loadLightmaps = true, float lightmapLevel = 1.0f);
class TextureParam {
public:
glm::vec2 UVTranslation;
@ -102,9 +94,12 @@ public:
class ExtractedMesh;
class FBXReader {
class FBXSerializer : public HFMSerializer {
public:
HFMModel* _hfmModel;
/// Reads HFMModel from the supplied model and mapping data.
/// \exception QString if an error occurs in parsing
HFMModel::Pointer read(const QByteArray& data, const QVariantHash& mapping, const QUrl& url = QUrl()) override;
FBXNode _rootNode;
static FBXNode parseFBX(QIODevice* device);
@ -147,9 +142,9 @@ public:
void consolidateHFMMaterials(const QVariantHash& mapping);
bool _loadLightmaps = true;
float _lightmapOffset = 0.0f;
float _lightmapLevel;
bool _loadLightmaps { true };
float _lightmapOffset { 0.0f };
float _lightmapLevel { 1.0f };
QMultiMap<QString, QString> _connectionParentMap;
QMultiMap<QString, QString> _connectionChildMap;
@ -166,4 +161,4 @@ public:
static QVector<double> getDoubleVector(const FBXNode& node);
};
#endif // hifi_FBXReader_h
#endif // hifi_FBXSerializer_h

View file

@ -1,5 +1,5 @@
//
// FBXReader_Material.cpp
// FBXSerializer_Material.cpp
// interface/src/fbx
//
// Created by Sam Gateau on 8/27/2015.
@ -9,7 +9,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "FBXReader.h"
#include "FBXSerializer.h"
#include <iostream>
#include <memory>
@ -27,7 +27,7 @@
#include <hfm/ModelFormatLogging.h>
HFMTexture FBXReader::getTexture(const QString& textureID) {
HFMTexture FBXSerializer::getTexture(const QString& textureID) {
HFMTexture texture;
const QByteArray& filepath = _textureFilepaths.value(textureID);
texture.content = _textureContent.value(filepath);
@ -69,7 +69,7 @@ HFMTexture FBXReader::getTexture(const QString& textureID) {
return texture;
}
void FBXReader::consolidateHFMMaterials(const QVariantHash& mapping) {
void FBXSerializer::consolidateHFMMaterials(const QVariantHash& mapping) {
QString materialMapString = mapping.value("materialMap").toString();
QJsonDocument materialMapDocument = QJsonDocument::fromJson(materialMapString.toUtf8());

View file

@ -1,5 +1,5 @@
//
// FBXReader_Mesh.cpp
// FBXSerializer_Mesh.cpp
// interface/src/fbx
//
// Created by Sam Gateau on 8/27/2015.
@ -33,7 +33,7 @@
#include <LogHandler.h>
#include <hfm/ModelFormatLogging.h>
#include "FBXReader.h"
#include "FBXSerializer.h"
#include <memory>
@ -191,7 +191,7 @@ void appendIndex(MeshData& data, QVector<int>& indices, int index, bool deduplic
}
}
ExtractedMesh FBXReader::extractMesh(const FBXNode& object, unsigned int& meshIndex, bool deduplicate) {
ExtractedMesh FBXSerializer::extractMesh(const FBXNode& object, unsigned int& meshIndex, bool deduplicate) {
MeshData data;
data.extracted.mesh.meshIndex = meshIndex++;
@ -254,7 +254,7 @@ ExtractedMesh FBXReader::extractMesh(const FBXNode& object, unsigned int& meshIn
data.colorsByVertex = true;
}
#if defined(FBXREADER_KILL_BLACK_COLOR_ATTRIBUTE)
#if defined(FBXSERIALIZER_KILL_BLACK_COLOR_ATTRIBUTE)
// Potential feature where we decide to kill the color attribute is to dark?
// Tested with the model:
// https://hifi-public.s3.amazonaws.com/ryan/gardenLight2.fbx
@ -281,7 +281,7 @@ ExtractedMesh FBXReader::extractMesh(const FBXNode& object, unsigned int& meshIn
} else if (subdata.name == "Name") {
attrib.name = subdata.properties.at(0).toString();
}
#if defined(DEBUG_FBXREADER)
#if defined(DEBUG_FBXSERIALIZER)
else {
int unknown = 0;
QString subname = subdata.name.data();
@ -307,7 +307,7 @@ ExtractedMesh FBXReader::extractMesh(const FBXNode& object, unsigned int& meshIn
} else if (subdata.name == "Name") {
attrib.name = subdata.properties.at(0).toString();
}
#if defined(DEBUG_FBXREADER)
#if defined(DEBUG_FBXSERIALIZER)
else {
int unknown = 0;
QString subname = subdata.name.data();
@ -557,7 +557,7 @@ ExtractedMesh FBXReader::extractMesh(const FBXNode& object, unsigned int& meshIn
return data.extracted;
}
glm::vec3 FBXReader::normalizeDirForPacking(const glm::vec3& dir) {
glm::vec3 FBXSerializer::normalizeDirForPacking(const glm::vec3& dir) {
auto maxCoord = glm::max(fabsf(dir.x), glm::max(fabsf(dir.y), fabsf(dir.z)));
if (maxCoord > 1e-6f) {
return dir / maxCoord;
@ -565,7 +565,7 @@ glm::vec3 FBXReader::normalizeDirForPacking(const glm::vec3& dir) {
return dir;
}
void FBXReader::buildModelMesh(HFMMesh& extractedMesh, const QString& url) {
void FBXSerializer::buildModelMesh(HFMMesh& extractedMesh, const QString& url) {
unsigned int totalSourceIndices = 0;
foreach(const HFMMeshPart& part, extractedMesh.parts) {
totalSourceIndices += (part.quadTrianglesIndices.size() + part.triangleIndices.size());

View file

@ -1,5 +1,5 @@
//
// FBXReader_Node.cpp
// FBXSerializer_Node.cpp
// interface/src/fbx
//
// Created by Sam Gateau on 8/27/2015.
@ -9,7 +9,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "FBXReader.h"
#include "FBXSerializer.h"
#include <iostream>
#include <QtCore/QBuffer>
@ -345,7 +345,7 @@ FBXNode parseTextFBXNode(Tokenizer& tokenizer) {
return node;
}
FBXNode FBXReader::parseFBX(QIODevice* device) {
FBXNode FBXSerializer::parseFBX(QIODevice* device) {
PROFILE_RANGE_EX(resource_parse, __FUNCTION__, 0xff0000ff, device);
// verify the prolog
if (device->peek(FBX_BINARY_PROLOG.size()) != FBX_BINARY_PROLOG) {
@ -398,12 +398,12 @@ FBXNode FBXReader::parseFBX(QIODevice* device) {
}
glm::vec3 FBXReader::getVec3(const QVariantList& properties, int index) {
glm::vec3 FBXSerializer::getVec3(const QVariantList& properties, int index) {
return glm::vec3(properties.at(index).value<double>(), properties.at(index + 1).value<double>(),
properties.at(index + 2).value<double>());
}
QVector<glm::vec4> FBXReader::createVec4Vector(const QVector<double>& doubleVector) {
QVector<glm::vec4> FBXSerializer::createVec4Vector(const QVector<double>& doubleVector) {
QVector<glm::vec4> values;
for (const double* it = doubleVector.constData(), *end = it + ((doubleVector.size() / 4) * 4); it != end; ) {
float x = *it++;
@ -416,7 +416,7 @@ QVector<glm::vec4> FBXReader::createVec4Vector(const QVector<double>& doubleVect
}
QVector<glm::vec4> FBXReader::createVec4VectorRGBA(const QVector<double>& doubleVector, glm::vec4& average) {
QVector<glm::vec4> FBXSerializer::createVec4VectorRGBA(const QVector<double>& doubleVector, glm::vec4& average) {
QVector<glm::vec4> values;
for (const double* it = doubleVector.constData(), *end = it + ((doubleVector.size() / 4) * 4); it != end; ) {
float x = *it++;
@ -433,7 +433,7 @@ QVector<glm::vec4> FBXReader::createVec4VectorRGBA(const QVector<double>& double
return values;
}
QVector<glm::vec3> FBXReader::createVec3Vector(const QVector<double>& doubleVector) {
QVector<glm::vec3> FBXSerializer::createVec3Vector(const QVector<double>& doubleVector) {
QVector<glm::vec3> values;
for (const double* it = doubleVector.constData(), *end = it + ((doubleVector.size() / 3) * 3); it != end; ) {
float x = *it++;
@ -444,7 +444,7 @@ QVector<glm::vec3> FBXReader::createVec3Vector(const QVector<double>& doubleVect
return values;
}
QVector<glm::vec2> FBXReader::createVec2Vector(const QVector<double>& doubleVector) {
QVector<glm::vec2> FBXSerializer::createVec2Vector(const QVector<double>& doubleVector) {
QVector<glm::vec2> values;
for (const double* it = doubleVector.constData(), *end = it + ((doubleVector.size() / 2) * 2); it != end; ) {
float s = *it++;
@ -454,14 +454,14 @@ QVector<glm::vec2> FBXReader::createVec2Vector(const QVector<double>& doubleVect
return values;
}
glm::mat4 FBXReader::createMat4(const QVector<double>& doubleVector) {
glm::mat4 FBXSerializer::createMat4(const QVector<double>& doubleVector) {
return glm::mat4(doubleVector.at(0), doubleVector.at(1), doubleVector.at(2), doubleVector.at(3),
doubleVector.at(4), doubleVector.at(5), doubleVector.at(6), doubleVector.at(7),
doubleVector.at(8), doubleVector.at(9), doubleVector.at(10), doubleVector.at(11),
doubleVector.at(12), doubleVector.at(13), doubleVector.at(14), doubleVector.at(15));
}
QVector<int> FBXReader::getIntVector(const FBXNode& node) {
QVector<int> FBXSerializer::getIntVector(const FBXNode& node) {
foreach (const FBXNode& child, node.children) {
if (child.name == "a") {
return getIntVector(child);
@ -480,7 +480,7 @@ QVector<int> FBXReader::getIntVector(const FBXNode& node) {
return vector;
}
QVector<float> FBXReader::getFloatVector(const FBXNode& node) {
QVector<float> FBXSerializer::getFloatVector(const FBXNode& node) {
foreach (const FBXNode& child, node.children) {
if (child.name == "a") {
return getFloatVector(child);
@ -499,7 +499,7 @@ QVector<float> FBXReader::getFloatVector(const FBXNode& node) {
return vector;
}
QVector<double> FBXReader::getDoubleVector(const FBXNode& node) {
QVector<double> FBXSerializer::getDoubleVector(const FBXNode& node) {
foreach (const FBXNode& child, node.children) {
if (child.name == "a") {
return getDoubleVector(child);

View file

@ -1,5 +1,5 @@
//
// GLTFReader.cpp
// GLTFSerializer.cpp
// libraries/fbx/src
//
// Created by Luis Cuenca on 8/30/17.
@ -9,7 +9,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "GLTFReader.h"
#include "GLTFSerializer.h"
#include <QtCore/QBuffer>
#include <QtCore/QIODevice>
@ -33,14 +33,14 @@
#include <ResourceManager.h>
#include <PathUtils.h>
#include "FBXReader.h"
#include "FBXSerializer.h"
GLTFReader::GLTFReader() {
GLTFSerializer::GLTFSerializer() {
}
bool GLTFReader::getStringVal(const QJsonObject& object, const QString& fieldname,
bool GLTFSerializer::getStringVal(const QJsonObject& object, const QString& fieldname,
QString& value, QMap<QString, bool>& defined) {
bool _defined = (object.contains(fieldname) && object[fieldname].isString());
if (_defined) {
@ -50,7 +50,7 @@ bool GLTFReader::getStringVal(const QJsonObject& object, const QString& fieldnam
return _defined;
}
bool GLTFReader::getBoolVal(const QJsonObject& object, const QString& fieldname,
bool GLTFSerializer::getBoolVal(const QJsonObject& object, const QString& fieldname,
bool& value, QMap<QString, bool>& defined) {
bool _defined = (object.contains(fieldname) && object[fieldname].isBool());
if (_defined) {
@ -60,7 +60,7 @@ bool GLTFReader::getBoolVal(const QJsonObject& object, const QString& fieldname,
return _defined;
}
bool GLTFReader::getIntVal(const QJsonObject& object, const QString& fieldname,
bool GLTFSerializer::getIntVal(const QJsonObject& object, const QString& fieldname,
int& value, QMap<QString, bool>& defined) {
bool _defined = (object.contains(fieldname) && !object[fieldname].isNull());
if (_defined) {
@ -70,7 +70,7 @@ bool GLTFReader::getIntVal(const QJsonObject& object, const QString& fieldname,
return _defined;
}
bool GLTFReader::getDoubleVal(const QJsonObject& object, const QString& fieldname,
bool GLTFSerializer::getDoubleVal(const QJsonObject& object, const QString& fieldname,
double& value, QMap<QString, bool>& defined) {
bool _defined = (object.contains(fieldname) && object[fieldname].isDouble());
if (_defined) {
@ -79,7 +79,7 @@ bool GLTFReader::getDoubleVal(const QJsonObject& object, const QString& fieldnam
defined.insert(fieldname, _defined);
return _defined;
}
bool GLTFReader::getObjectVal(const QJsonObject& object, const QString& fieldname,
bool GLTFSerializer::getObjectVal(const QJsonObject& object, const QString& fieldname,
QJsonObject& value, QMap<QString, bool>& defined) {
bool _defined = (object.contains(fieldname) && object[fieldname].isObject());
if (_defined) {
@ -89,7 +89,7 @@ bool GLTFReader::getObjectVal(const QJsonObject& object, const QString& fieldnam
return _defined;
}
bool GLTFReader::getIntArrayVal(const QJsonObject& object, const QString& fieldname,
bool GLTFSerializer::getIntArrayVal(const QJsonObject& object, const QString& fieldname,
QVector<int>& values, QMap<QString, bool>& defined) {
bool _defined = (object.contains(fieldname) && object[fieldname].isArray());
if (_defined) {
@ -104,7 +104,7 @@ bool GLTFReader::getIntArrayVal(const QJsonObject& object, const QString& fieldn
return _defined;
}
bool GLTFReader::getDoubleArrayVal(const QJsonObject& object, const QString& fieldname,
bool GLTFSerializer::getDoubleArrayVal(const QJsonObject& object, const QString& fieldname,
QVector<double>& values, QMap<QString, bool>& defined) {
bool _defined = (object.contains(fieldname) && object[fieldname].isArray());
if (_defined) {
@ -119,7 +119,7 @@ bool GLTFReader::getDoubleArrayVal(const QJsonObject& object, const QString& fie
return _defined;
}
bool GLTFReader::getObjectArrayVal(const QJsonObject& object, const QString& fieldname,
bool GLTFSerializer::getObjectArrayVal(const QJsonObject& object, const QString& fieldname,
QJsonArray& objects, QMap<QString, bool>& defined) {
bool _defined = (object.contains(fieldname) && object[fieldname].isArray());
if (_defined) {
@ -129,7 +129,7 @@ bool GLTFReader::getObjectArrayVal(const QJsonObject& object, const QString& fie
return _defined;
}
int GLTFReader::getMeshPrimitiveRenderingMode(const QString& type)
int GLTFSerializer::getMeshPrimitiveRenderingMode(const QString& type)
{
if (type == "POINTS") {
return GLTFMeshPrimitivesRenderingMode::POINTS;
@ -155,7 +155,7 @@ int GLTFReader::getMeshPrimitiveRenderingMode(const QString& type)
return GLTFMeshPrimitivesRenderingMode::TRIANGLES;
}
int GLTFReader::getAccessorType(const QString& type)
int GLTFSerializer::getAccessorType(const QString& type)
{
if (type == "SCALAR") {
return GLTFAccessorType::SCALAR;
@ -181,7 +181,7 @@ int GLTFReader::getAccessorType(const QString& type)
return GLTFAccessorType::SCALAR;
}
int GLTFReader::getMaterialAlphaMode(const QString& type)
int GLTFSerializer::getMaterialAlphaMode(const QString& type)
{
if (type == "OPAQUE") {
return GLTFMaterialAlphaMode::OPAQUE;
@ -195,7 +195,7 @@ int GLTFReader::getMaterialAlphaMode(const QString& type)
return GLTFMaterialAlphaMode::OPAQUE;
}
int GLTFReader::getCameraType(const QString& type)
int GLTFSerializer::getCameraType(const QString& type)
{
if (type == "orthographic") {
return GLTFCameraTypes::ORTHOGRAPHIC;
@ -206,7 +206,7 @@ int GLTFReader::getCameraType(const QString& type)
return GLTFCameraTypes::PERSPECTIVE;
}
int GLTFReader::getImageMimeType(const QString& mime)
int GLTFSerializer::getImageMimeType(const QString& mime)
{
if (mime == "image/jpeg") {
return GLTFImageMimetype::JPEG;
@ -217,7 +217,7 @@ int GLTFReader::getImageMimeType(const QString& mime)
return GLTFImageMimetype::JPEG;
}
int GLTFReader::getAnimationSamplerInterpolation(const QString& interpolation)
int GLTFSerializer::getAnimationSamplerInterpolation(const QString& interpolation)
{
if (interpolation == "LINEAR") {
return GLTFAnimationSamplerInterpolation::LINEAR;
@ -225,7 +225,7 @@ int GLTFReader::getAnimationSamplerInterpolation(const QString& interpolation)
return GLTFAnimationSamplerInterpolation::LINEAR;
}
bool GLTFReader::setAsset(const QJsonObject& object) {
bool GLTFSerializer::setAsset(const QJsonObject& object) {
QJsonObject jsAsset;
bool isAssetDefined = getObjectVal(object, "asset", jsAsset, _file.defined);
if (isAssetDefined) {
@ -239,7 +239,7 @@ bool GLTFReader::setAsset(const QJsonObject& object) {
return isAssetDefined;
}
bool GLTFReader::addAccessor(const QJsonObject& object) {
bool GLTFSerializer::addAccessor(const QJsonObject& object) {
GLTFAccessor accessor;
getIntVal(object, "bufferView", accessor.bufferView, accessor.defined);
@ -259,7 +259,7 @@ bool GLTFReader::addAccessor(const QJsonObject& object) {
return true;
}
bool GLTFReader::addAnimation(const QJsonObject& object) {
bool GLTFSerializer::addAnimation(const QJsonObject& object) {
GLTFAnimation animation;
QJsonArray channels;
@ -297,7 +297,7 @@ bool GLTFReader::addAnimation(const QJsonObject& object) {
return true;
}
bool GLTFReader::addBufferView(const QJsonObject& object) {
bool GLTFSerializer::addBufferView(const QJsonObject& object) {
GLTFBufferView bufferview;
getIntVal(object, "buffer", bufferview.buffer, bufferview.defined);
@ -310,7 +310,7 @@ bool GLTFReader::addBufferView(const QJsonObject& object) {
return true;
}
bool GLTFReader::addBuffer(const QJsonObject& object) {
bool GLTFSerializer::addBuffer(const QJsonObject& object) {
GLTFBuffer buffer;
getIntVal(object, "byteLength", buffer.byteLength, buffer.defined);
@ -324,7 +324,7 @@ bool GLTFReader::addBuffer(const QJsonObject& object) {
return true;
}
bool GLTFReader::addCamera(const QJsonObject& object) {
bool GLTFSerializer::addCamera(const QJsonObject& object) {
GLTFCamera camera;
QJsonObject jsPerspective;
@ -352,7 +352,7 @@ bool GLTFReader::addCamera(const QJsonObject& object) {
return true;
}
bool GLTFReader::addImage(const QJsonObject& object) {
bool GLTFSerializer::addImage(const QJsonObject& object) {
GLTFImage image;
QString mime;
@ -367,7 +367,7 @@ bool GLTFReader::addImage(const QJsonObject& object) {
return true;
}
bool GLTFReader::getIndexFromObject(const QJsonObject& object, const QString& field,
bool GLTFSerializer::getIndexFromObject(const QJsonObject& object, const QString& field,
int& outidx, QMap<QString, bool>& defined) {
QJsonObject subobject;
if (getObjectVal(object, field, subobject, defined)) {
@ -377,7 +377,7 @@ bool GLTFReader::getIndexFromObject(const QJsonObject& object, const QString& fi
return false;
}
bool GLTFReader::addMaterial(const QJsonObject& object) {
bool GLTFSerializer::addMaterial(const QJsonObject& object) {
GLTFMaterial material;
getStringVal(object, "name", material.name, material.defined);
@ -413,7 +413,7 @@ bool GLTFReader::addMaterial(const QJsonObject& object) {
return true;
}
bool GLTFReader::addMesh(const QJsonObject& object) {
bool GLTFSerializer::addMesh(const QJsonObject& object) {
GLTFMesh mesh;
getStringVal(object, "name", mesh.name, mesh.defined);
@ -467,7 +467,7 @@ bool GLTFReader::addMesh(const QJsonObject& object) {
return true;
}
bool GLTFReader::addNode(const QJsonObject& object) {
bool GLTFSerializer::addNode(const QJsonObject& object) {
GLTFNode node;
getStringVal(object, "name", node.name, node.defined);
@ -487,7 +487,7 @@ bool GLTFReader::addNode(const QJsonObject& object) {
return true;
}
bool GLTFReader::addSampler(const QJsonObject& object) {
bool GLTFSerializer::addSampler(const QJsonObject& object) {
GLTFSampler sampler;
getIntVal(object, "magFilter", sampler.magFilter, sampler.defined);
@ -501,7 +501,7 @@ bool GLTFReader::addSampler(const QJsonObject& object) {
}
bool GLTFReader::addScene(const QJsonObject& object) {
bool GLTFSerializer::addScene(const QJsonObject& object) {
GLTFScene scene;
getStringVal(object, "name", scene.name, scene.defined);
@ -511,7 +511,7 @@ bool GLTFReader::addScene(const QJsonObject& object) {
return true;
}
bool GLTFReader::addSkin(const QJsonObject& object) {
bool GLTFSerializer::addSkin(const QJsonObject& object) {
GLTFSkin skin;
getIntVal(object, "inverseBindMatrices", skin.inverseBindMatrices, skin.defined);
@ -523,7 +523,7 @@ bool GLTFReader::addSkin(const QJsonObject& object) {
return true;
}
bool GLTFReader::addTexture(const QJsonObject& object) {
bool GLTFSerializer::addTexture(const QJsonObject& object) {
GLTFTexture texture;
getIntVal(object, "sampler", texture.sampler, texture.defined);
getIntVal(object, "source", texture.source, texture.defined);
@ -533,7 +533,7 @@ bool GLTFReader::addTexture(const QJsonObject& object) {
return true;
}
bool GLTFReader::parseGLTF(const QByteArray& data) {
bool GLTFSerializer::parseGLTF(const QByteArray& data) {
PROFILE_RANGE_EX(resource_parse, __FUNCTION__, 0xffff0000, nullptr);
QJsonDocument d = QJsonDocument::fromJson(data);
@ -664,7 +664,7 @@ bool GLTFReader::parseGLTF(const QByteArray& data) {
return true;
}
glm::mat4 GLTFReader::getModelTransform(const GLTFNode& node) {
glm::mat4 GLTFSerializer::getModelTransform(const GLTFNode& node) {
glm::mat4 tmat = glm::mat4(1.0);
if (node.defined["matrix"] && node.matrix.size() == 16) {
@ -697,7 +697,7 @@ glm::mat4 GLTFReader::getModelTransform(const GLTFNode& node) {
return tmat;
}
bool GLTFReader::buildGeometry(HFMModel& hfmModel, const QUrl& url) {
bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const QUrl& url) {
//Build dependencies
QVector<QVector<int>> nodeDependencies(_file.nodes.size());
@ -899,7 +899,7 @@ bool GLTFReader::buildGeometry(HFMModel& hfmModel, const QUrl& url) {
}
mesh.meshIndex = hfmModel.meshes.size();
FBXReader::buildModelMesh(mesh, url.toString());
FBXSerializer::buildModelMesh(mesh, url.toString());
}
}
@ -910,13 +910,12 @@ bool GLTFReader::buildGeometry(HFMModel& hfmModel, const QUrl& url) {
return true;
}
HFMModel* GLTFReader::readGLTF(QByteArray& data, const QVariantHash& mapping,
const QUrl& url, bool loadLightmaps, float lightmapLevel) {
HFMModel::Pointer GLTFSerializer::read(const QByteArray& data, const QVariantHash& mapping, const QUrl& url) {
_url = url;
// Normalize url for local files
QUrl normalizeUrl = DependencyManager::get<ResourceManager>()->normalizeURL(url);
QUrl normalizeUrl = DependencyManager::get<ResourceManager>()->normalizeURL(_url);
if (normalizeUrl.scheme().isEmpty() || (normalizeUrl.scheme() == "file")) {
QString localFileName = PathUtils::expandToLocalDataAbsolutePath(normalizeUrl).toLocalFile();
_url = QUrl(QFileInfo(localFileName).absoluteFilePath());
@ -924,17 +923,17 @@ HFMModel* GLTFReader::readGLTF(QByteArray& data, const QVariantHash& mapping,
parseGLTF(data);
//_file.dump();
HFMModel* hfmModelPtr = new HFMModel();
auto hfmModelPtr = std::make_shared<HFMModel>();
HFMModel& hfmModel = *hfmModelPtr;
buildGeometry(hfmModel, url);
buildGeometry(hfmModel, _url);
//hfmDebugDump(data);
return hfmModelPtr;
}
bool GLTFReader::readBinary(const QString& url, QByteArray& outdata) {
bool GLTFSerializer::readBinary(const QString& url, QByteArray& outdata) {
QUrl binaryUrl = _url.resolved(url);
bool success;
@ -943,7 +942,7 @@ bool GLTFReader::readBinary(const QString& url, QByteArray& outdata) {
return success;
}
bool GLTFReader::doesResourceExist(const QString& url) {
bool GLTFSerializer::doesResourceExist(const QString& url) {
if (_url.isEmpty()) {
return false;
}
@ -951,9 +950,9 @@ bool GLTFReader::doesResourceExist(const QString& url) {
return DependencyManager::get<ResourceManager>()->resourceExists(candidateUrl);
}
std::tuple<bool, QByteArray> GLTFReader::requestData(QUrl& url) {
std::tuple<bool, QByteArray> GLTFSerializer::requestData(QUrl& url) {
auto request = DependencyManager::get<ResourceManager>()->createResourceRequest(
nullptr, url, true, -1, "GLTFReader::requestData");
nullptr, url, true, -1, "GLTFSerializer::requestData");
if (!request) {
return std::make_tuple(false, QByteArray());
@ -972,7 +971,7 @@ std::tuple<bool, QByteArray> GLTFReader::requestData(QUrl& url) {
}
QNetworkReply* GLTFReader::request(QUrl& url, bool isTest) {
QNetworkReply* GLTFSerializer::request(QUrl& url, bool isTest) {
if (!qApp) {
return nullptr;
}
@ -996,7 +995,7 @@ QNetworkReply* GLTFReader::request(QUrl& url, bool isTest) {
return netReply; // trying to sync later on.
}
HFMTexture GLTFReader::getHFMTexture(const GLTFTexture& texture) {
HFMTexture GLTFSerializer::getHFMTexture(const GLTFTexture& texture) {
HFMTexture fbxtex = HFMTexture();
fbxtex.texcoordSet = 0;
@ -1011,7 +1010,7 @@ HFMTexture GLTFReader::getHFMTexture(const GLTFTexture& texture) {
return fbxtex;
}
void GLTFReader::setHFMMaterial(HFMMaterial& fbxmat, const GLTFMaterial& material) {
void GLTFSerializer::setHFMMaterial(HFMMaterial& fbxmat, const GLTFMaterial& material) {
if (material.defined["name"]) {
@ -1074,7 +1073,7 @@ void GLTFReader::setHFMMaterial(HFMMaterial& fbxmat, const GLTFMaterial& materia
}
template<typename T, typename L>
bool GLTFReader::readArray(const QByteArray& bin, int byteOffset, int count,
bool GLTFSerializer::readArray(const QByteArray& bin, int byteOffset, int count,
QVector<L>& outarray, int accessorType) {
QDataStream blobstream(bin);
@ -1131,7 +1130,7 @@ bool GLTFReader::readArray(const QByteArray& bin, int byteOffset, int count,
return true;
}
template<typename T>
bool GLTFReader::addArrayOfType(const QByteArray& bin, int byteOffset, int count,
bool GLTFSerializer::addArrayOfType(const QByteArray& bin, int byteOffset, int count,
QVector<T>& outarray, int accessorType, int componentType) {
switch (componentType) {
@ -1155,7 +1154,7 @@ bool GLTFReader::addArrayOfType(const QByteArray& bin, int byteOffset, int count
return false;
}
void GLTFReader::retriangulate(const QVector<int>& inIndices, const QVector<glm::vec3>& in_vertices,
void GLTFSerializer::retriangulate(const QVector<int>& inIndices, const QVector<glm::vec3>& in_vertices,
const QVector<glm::vec3>& in_normals, QVector<int>& outIndices,
QVector<glm::vec3>& out_vertices, QVector<glm::vec3>& out_normals) {
for (int i = 0; i < inIndices.size(); i = i + 3) {
@ -1178,7 +1177,7 @@ void GLTFReader::retriangulate(const QVector<int>& inIndices, const QVector<glm:
}
}
void GLTFReader::hfmDebugDump(const HFMModel& hfmModel) {
void GLTFSerializer::hfmDebugDump(const HFMModel& hfmModel) {
qCDebug(modelformat) << "---------------- hfmModel ----------------";
qCDebug(modelformat) << " hasSkeletonJoints =" << hfmModel.hasSkeletonJoints;
qCDebug(modelformat) << " offset =" << hfmModel.offset;

View file

@ -1,5 +1,5 @@
//
// GLTFReader.h
// GLTFSerializer.h
// libraries/fbx/src
//
// Created by Luis Cuenca on 8/30/17.
@ -9,13 +9,14 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_GLTFReader_h
#define hifi_GLTFReader_h
#ifndef hifi_GLTFSerializer_h
#define hifi_GLTFSerializer_h
#include <memory.h>
#include <QtNetwork/QNetworkReply>
#include <hfm/ModelFormatLogging.h>
#include "FBXReader.h"
#include <hfm/HFMSerializer.h>
#include "FBXSerializer.h"
struct GLTFAsset {
@ -699,12 +700,11 @@ struct GLTFFile {
}
};
class GLTFReader : public QObject {
class GLTFSerializer : public QObject, public HFMSerializer {
Q_OBJECT
public:
GLTFReader();
HFMModel* readGLTF(QByteArray& data, const QVariantHash& mapping,
const QUrl& url, bool loadLightmaps = true, float lightmapLevel = 1.0f);
GLTFSerializer();
HFMModel::Pointer read(const QByteArray& data, const QVariantHash& mapping, const QUrl& url = QUrl()) override;
private:
GLTFFile _file;
QUrl _url;
@ -780,4 +780,4 @@ private:
void hfmDebugDump(const HFMModel& hfmModel);
};
#endif // hifi_GLTFReader_h
#endif // hifi_GLTFSerializer_h

View file

@ -1,5 +1,5 @@
//
// OBJReader.cpp
// OBJSerializer.cpp
// libraries/fbx/src/
//
// Created by Seth Alves on 3/7/15.
@ -12,7 +12,7 @@
// http://www.scratchapixel.com/old/lessons/3d-advanced-lessons/obj-file-format/obj-file-format/
// http://paulbourke.net/dataformats/obj/
#include "OBJReader.h"
#include "OBJSerializer.h"
#include <ctype.h> // .obj files are not locale-specific. The C/ASCII charset applies.
#include <sstream>
@ -27,7 +27,7 @@
#include <NetworkAccessManager.h>
#include <ResourceManager.h>
#include "FBXReader.h"
#include "FBXSerializer.h"
#include <hfm/ModelFormatLogging.h>
#include <shared/PlatformHacks.h>
@ -238,7 +238,7 @@ void OBJFace::addFrom(const OBJFace* face, int index) { // add using data from f
}
}
bool OBJReader::isValidTexture(const QByteArray &filename) {
bool OBJSerializer::isValidTexture(const QByteArray &filename) {
if (_url.isEmpty()) {
return false;
}
@ -247,7 +247,7 @@ bool OBJReader::isValidTexture(const QByteArray &filename) {
return DependencyManager::get<ResourceManager>()->resourceExists(candidateUrl);
}
void OBJReader::parseMaterialLibrary(QIODevice* device) {
void OBJSerializer::parseMaterialLibrary(QIODevice* device) {
OBJTokenizer tokenizer(device);
QString matName = SMART_DEFAULT_MATERIAL_NAME;
OBJMaterial& currentMaterial = materials[matName];
@ -255,7 +255,7 @@ void OBJReader::parseMaterialLibrary(QIODevice* device) {
switch (tokenizer.nextToken()) {
case OBJTokenizer::COMMENT_TOKEN:
#ifdef WANT_DEBUG
qCDebug(modelformat) << "OBJ Reader MTLLIB comment:" << tokenizer.getComment();
qCDebug(modelformat) << "OBJSerializer MTLLIB comment:" << tokenizer.getComment();
#endif
break;
case OBJTokenizer::DATUM_TOKEN:
@ -264,7 +264,7 @@ void OBJReader::parseMaterialLibrary(QIODevice* device) {
materials[matName] = currentMaterial;
#ifdef WANT_DEBUG
qCDebug(modelformat) <<
"OBJ Reader Last material illumination model:" << currentMaterial.illuminationModel <<
"OBJSerializer Last material illumination model:" << currentMaterial.illuminationModel <<
" shininess:" << currentMaterial.shininess <<
" opacity:" << currentMaterial.opacity <<
" diffuse color:" << currentMaterial.diffuseColor <<
@ -287,7 +287,7 @@ void OBJReader::parseMaterialLibrary(QIODevice* device) {
matName = tokenizer.getDatum();
currentMaterial = materials[matName];
#ifdef WANT_DEBUG
qCDebug(modelformat) << "OBJ Reader Starting new material definition " << matName;
qCDebug(modelformat) << "OBJSerializer Starting new material definition " << matName;
#endif
currentMaterial.diffuseTextureFilename = "";
currentMaterial.emissiveTextureFilename = "";
@ -299,7 +299,7 @@ void OBJReader::parseMaterialLibrary(QIODevice* device) {
currentMaterial.shininess = tokenizer.getFloat();
} else if (token == "Ni") {
#ifdef WANT_DEBUG
qCDebug(modelformat) << "OBJ Reader Ignoring material Ni " << tokenizer.getFloat();
qCDebug(modelformat) << "OBJSerializer Ignoring material Ni " << tokenizer.getFloat();
#else
tokenizer.getFloat();
#endif
@ -311,13 +311,13 @@ void OBJReader::parseMaterialLibrary(QIODevice* device) {
currentMaterial.illuminationModel = tokenizer.getFloat();
} else if (token == "Tf") {
#ifdef WANT_DEBUG
qCDebug(modelformat) << "OBJ Reader Ignoring material Tf " << tokenizer.getVec3();
qCDebug(modelformat) << "OBJSerializer Ignoring material Tf " << tokenizer.getVec3();
#else
tokenizer.getVec3();
#endif
} else if (token == "Ka") {
#ifdef WANT_DEBUG
qCDebug(modelformat) << "OBJ Reader Ignoring material Ka " << tokenizer.getVec3();;
qCDebug(modelformat) << "OBJSerializer Ignoring material Ka " << tokenizer.getVec3();;
#else
tokenizer.getVec3();
#endif
@ -334,7 +334,7 @@ void OBJReader::parseMaterialLibrary(QIODevice* device) {
parseTextureLine(textureLine, filename, textureOptions);
if (filename.endsWith(".tga")) {
#ifdef WANT_DEBUG
qCDebug(modelformat) << "OBJ Reader WARNING: currently ignoring tga texture " << filename << " in " << _url;
qCDebug(modelformat) << "OBJSerializer WARNING: currently ignoring tga texture " << filename << " in " << _url;
#endif
break;
}
@ -354,7 +354,7 @@ void OBJReader::parseMaterialLibrary(QIODevice* device) {
}
}
void OBJReader::parseTextureLine(const QByteArray& textureLine, QByteArray& filename, OBJMaterialTextureOptions& textureOptions) {
void OBJSerializer::parseTextureLine(const QByteArray& textureLine, QByteArray& filename, OBJMaterialTextureOptions& textureOptions) {
// Texture options reference http://paulbourke.net/dataformats/mtl/
// and https://wikivisually.com/wiki/Material_Template_Library
@ -368,7 +368,7 @@ void OBJReader::parseTextureLine(const QByteArray& textureLine, QByteArray& file
if (option == "-blendu" || option == "-blendv") {
#ifdef WANT_DEBUG
const std::string& onoff = parser[i++];
qCDebug(modelformat) << "OBJ Reader WARNING: Ignoring texture option" << option.c_str() << onoff.c_str();
qCDebug(modelformat) << "OBJSerializer WARNING: Ignoring texture option" << option.c_str() << onoff.c_str();
#endif
} else if (option == "-bm") {
const std::string& bm = parser[i++];
@ -377,22 +377,22 @@ void OBJReader::parseTextureLine(const QByteArray& textureLine, QByteArray& file
#ifdef WANT_DEBUG
const std::string& boost = parser[i++];
float boostFloat = std::stof(boost);
qCDebug(modelformat) << "OBJ Reader WARNING: Ignoring texture option" << option.c_str() << boost.c_str();
qCDebug(modelformat) << "OBJSerializer WARNING: Ignoring texture option" << option.c_str() << boost.c_str();
#endif
} else if (option == "-cc") {
#ifdef WANT_DEBUG
const std::string& onoff = parser[i++];
qCDebug(modelformat) << "OBJ Reader WARNING: Ignoring texture option" << option.c_str() << onoff.c_str();
qCDebug(modelformat) << "OBJSerializer WARNING: Ignoring texture option" << option.c_str() << onoff.c_str();
#endif
} else if (option == "-clamp") {
#ifdef WANT_DEBUG
const std::string& onoff = parser[i++];
qCDebug(modelformat) << "OBJ Reader WARNING: Ignoring texture option" << option.c_str() << onoff.c_str();
qCDebug(modelformat) << "OBJSerializer WARNING: Ignoring texture option" << option.c_str() << onoff.c_str();
#endif
} else if (option == "-imfchan") {
#ifdef WANT_DEBUG
const std::string& imfchan = parser[i++];
qCDebug(modelformat) << "OBJ Reader WARNING: Ignoring texture option" << option.c_str() << imfchan.c_str();
qCDebug(modelformat) << "OBJSerializer WARNING: Ignoring texture option" << option.c_str() << imfchan.c_str();
#endif
} else if (option == "-mm") {
if (i + 1 < parser.size()) {
@ -401,7 +401,7 @@ void OBJReader::parseTextureLine(const QByteArray& textureLine, QByteArray& file
const std::string& mmGain = parser[i++];
float mmBaseFloat = std::stof(mmBase);
float mmGainFloat = std::stof(mmGain);
qCDebug(modelformat) << "OBJ Reader WARNING: Ignoring texture option" << option.c_str() << mmBase.c_str() << mmGain.c_str();
qCDebug(modelformat) << "OBJSerializer WARNING: Ignoring texture option" << option.c_str() << mmBase.c_str() << mmGain.c_str();
#endif
}
} else if (option == "-o" || option == "-s" || option == "-t") {
@ -413,23 +413,23 @@ void OBJReader::parseTextureLine(const QByteArray& textureLine, QByteArray& file
float uFloat = std::stof(u);
float vFloat = std::stof(v);
float wFloat = std::stof(w);
qCDebug(modelformat) << "OBJ Reader WARNING: Ignoring texture option" << option.c_str() << u.c_str() << v.c_str() << w.c_str();
qCDebug(modelformat) << "OBJSerializer WARNING: Ignoring texture option" << option.c_str() << u.c_str() << v.c_str() << w.c_str();
#endif
}
} else if (option == "-texres") {
#ifdef WANT_DEBUG
const std::string& texres = parser[i++];
float texresFloat = std::stof(texres);
qCDebug(modelformat) << "OBJ Reader WARNING: Ignoring texture option" << option.c_str() << texres.c_str();
qCDebug(modelformat) << "OBJSerializer WARNING: Ignoring texture option" << option.c_str() << texres.c_str();
#endif
} else if (option == "-type") {
#ifdef WANT_DEBUG
const std::string& type = parser[i++];
qCDebug(modelformat) << "OBJ Reader WARNING: Ignoring texture option" << option.c_str() << type.c_str();
qCDebug(modelformat) << "OBJSerializer WARNING: Ignoring texture option" << option.c_str() << type.c_str();
#endif
} else if (option[0] == '-') {
#ifdef WANT_DEBUG
qCDebug(modelformat) << "OBJ Reader WARNING: Ignoring unsupported texture option" << option.c_str();
qCDebug(modelformat) << "OBJSerializer WARNING: Ignoring unsupported texture option" << option.c_str();
#endif
}
} else { // assume filename at end when no more options
@ -444,7 +444,7 @@ void OBJReader::parseTextureLine(const QByteArray& textureLine, QByteArray& file
std::tuple<bool, QByteArray> requestData(QUrl& url) {
auto request = DependencyManager::get<ResourceManager>()->createResourceRequest(
nullptr, url, true, -1, "(OBJReader) requestData");
nullptr, url, true, -1, "(OBJSerializer) requestData");
if (!request) {
return std::make_tuple(false, QByteArray());
@ -488,7 +488,7 @@ QNetworkReply* request(QUrl& url, bool isTest) {
}
bool OBJReader::parseOBJGroup(OBJTokenizer& tokenizer, const QVariantHash& mapping, HFMModel& hfmModel,
bool OBJSerializer::parseOBJGroup(OBJTokenizer& tokenizer, const QVariantHash& mapping, HFMModel& hfmModel,
float& scaleGuess, bool combineParts) {
FaceGroup faces;
HFMMesh& mesh = hfmModel.meshes[0];
@ -557,7 +557,7 @@ bool OBJReader::parseOBJGroup(OBJTokenizer& tokenizer, const QVariantHash& mappi
currentMaterialName = nextName;
}
#ifdef WANT_DEBUG
qCDebug(modelformat) << "OBJ Reader new current material:" << currentMaterialName;
qCDebug(modelformat) << "OBJSerializer new current material:" << currentMaterialName;
#endif
}
} else if (token == "v") {
@ -652,12 +652,12 @@ done:
}
HFMModel::Pointer OBJReader::readOBJ(QByteArray& data, const QVariantHash& mapping, bool combineParts, const QUrl& url) {
HFMModel::Pointer OBJSerializer::read(const QByteArray& data, const QVariantHash& mapping, const QUrl& url) {
PROFILE_RANGE_EX(resource_parse, __FUNCTION__, 0xffff0000, nullptr);
QBuffer buffer { &data };
QBuffer buffer { const_cast<QByteArray*>(&data) };
buffer.open(QIODevice::ReadOnly);
auto hfmModelPtr { std::make_shared<HFMModel>() };
auto hfmModelPtr = std::make_shared<HFMModel>();
HFMModel& hfmModel { *hfmModelPtr };
OBJTokenizer tokenizer { &buffer };
float scaleGuess = 1.0f;
@ -665,6 +665,7 @@ HFMModel::Pointer OBJReader::readOBJ(QByteArray& data, const QVariantHash& mappi
bool needsMaterialLibrary = false;
_url = url;
bool combineParts = mapping.value("combineParts").toBool();
hfmModel.meshExtents.reset();
hfmModel.meshes.append(HFMMesh());
@ -720,7 +721,7 @@ HFMModel::Pointer OBJReader::readOBJ(QByteArray& data, const QVariantHash& mappi
QString groupMaterialName = face.materialName;
if (groupMaterialName.isEmpty() && specifiesUV) {
#ifdef WANT_DEBUG
qCDebug(modelformat) << "OBJ Reader WARNING: " << url
qCDebug(modelformat) << "OBJSerializer WARNING: " << url
<< " needs a texture that isn't specified. Using default mechanism.";
#endif
groupMaterialName = SMART_DEFAULT_MATERIAL_NAME;
@ -822,11 +823,11 @@ HFMModel::Pointer OBJReader::readOBJ(QByteArray& data, const QVariantHash& mappi
}
// Build the single mesh.
FBXReader::buildModelMesh(mesh, url.toString());
FBXSerializer::buildModelMesh(mesh, _url.toString());
// hfmDebugDump(hfmModel);
} catch(const std::exception& e) {
qCDebug(modelformat) << "OBJ reader fail: " << e.what();
qCDebug(modelformat) << "OBJSerializer fail: " << e.what();
}
QString queryPart = _url.query();
@ -838,14 +839,14 @@ HFMModel::Pointer OBJReader::readOBJ(QByteArray& data, const QVariantHash& mappi
}
// Some .obj files use the convention that a group with uv coordinates that doesn't define a material, should use
// a texture with the same basename as the .obj file.
if (preDefinedMaterial.userSpecifiesUV && !url.isEmpty()) {
QString filename = url.fileName();
if (preDefinedMaterial.userSpecifiesUV && !_url.isEmpty()) {
QString filename = _url.fileName();
int extIndex = filename.lastIndexOf('.'); // by construction, this does not fail
QString basename = filename.remove(extIndex + 1, sizeof("obj"));
preDefinedMaterial.diffuseColor = glm::vec3(1.0f);
QVector<QByteArray> extensions = { "jpg", "jpeg", "png", "tga" };
QByteArray base = basename.toUtf8(), textName = "";
qCDebug(modelformat) << "OBJ Reader looking for default texture";
qCDebug(modelformat) << "OBJSerializer looking for default texture";
for (int i = 0; i < extensions.count(); i++) {
QByteArray candidateString = base + extensions[i];
if (isValidTexture(candidateString)) {
@ -856,7 +857,7 @@ HFMModel::Pointer OBJReader::readOBJ(QByteArray& data, const QVariantHash& mappi
if (!textName.isEmpty()) {
#ifdef WANT_DEBUG
qCDebug(modelformat) << "OBJ Reader found a default texture: " << textName;
qCDebug(modelformat) << "OBJSerializer found a default texture: " << textName;
#endif
preDefinedMaterial.diffuseTextureFilename = textName;
}
@ -866,7 +867,7 @@ HFMModel::Pointer OBJReader::readOBJ(QByteArray& data, const QVariantHash& mappi
foreach (QString libraryName, librariesSeen.keys()) {
// Throw away any path part of libraryName, and merge against original url.
QUrl libraryUrl = _url.resolved(QUrl(libraryName).fileName());
qCDebug(modelformat) << "OBJ Reader material library" << libraryName;
qCDebug(modelformat) << "OBJSerializer material library" << libraryName;
bool success;
QByteArray data;
std::tie<bool, QByteArray>(success, data) = requestData(libraryUrl);
@ -875,7 +876,7 @@ HFMModel::Pointer OBJReader::readOBJ(QByteArray& data, const QVariantHash& mappi
buffer.open(QIODevice::ReadOnly);
parseMaterialLibrary(&buffer);
} else {
qCDebug(modelformat) << "OBJ Reader WARNING:" << libraryName << "did not answer";
qCDebug(modelformat) << "OBJSerializer WARNING:" << libraryName << "did not answer";
}
}
}

View file

@ -1,6 +1,20 @@
//
// OBJSerializer.h
// libraries/fbx/src/
//
// Created by Seth Alves on 3/6/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_OBJSerializer_h
#define hifi_OBJSerializer_h
#include <QtNetwork/QNetworkReply>
#include "FBXReader.h"
#include <hfm/HFMSerializer.h>
#include "FBXSerializer.h"
class OBJTokenizer {
public:
@ -75,7 +89,7 @@ public:
OBJMaterial() : shininess(0.0f), opacity(1.0f), diffuseColor(0.9f), specularColor(0.9f), emissiveColor(0.0f), illuminationModel(-1) {}
};
class OBJReader: public QObject { // QObject so we can make network requests.
class OBJSerializer: public QObject, public HFMSerializer { // QObject so we can make network requests.
Q_OBJECT
public:
typedef QVector<OBJFace> FaceGroup;
@ -86,8 +100,8 @@ public:
QVector<FaceGroup> faceGroups;
QString currentMaterialName;
QHash<QString, OBJMaterial> materials;
HFMModel::Pointer readOBJ(QByteArray& data, const QVariantHash& mapping, bool combineParts, const QUrl& url = QUrl());
HFMModel::Pointer read(const QByteArray& data, const QVariantHash& mapping, const QUrl& url = QUrl()) override;
private:
QUrl _url;
@ -105,3 +119,5 @@ private:
// What are these utilities doing here? One is used by fbx loading code in VHACD Utils, and the other a general debugging utility.
void setMeshPartDefaults(HFMMeshPart& meshPart, QString materialID);
void hfmDebugDump(const HFMModel& hfmModel);
#endif // hifi_OBJSerializer_h

View file

@ -231,7 +231,8 @@ float snoise(vec2 v) {
// Other corners
vec2 i1;
i1 = (x0.x > x0.y) ? vec2(1.0, 0.0) : vec2(0.0, 1.0);
float check = float(x0.x > x0.y);
i1 = vec2(check, 1.0 - check);
vec4 x12 = x0.xyxy + C.xxzz;
x12.xy -= i1;

View file

@ -130,9 +130,11 @@ void Material::setTextureMap(MapChannel channel, const TextureMapPointer& textur
if (channel == MaterialKey::LIGHTMAP_MAP) {
// update the texcoord1 with lightmap
_schemaBuffer.edit<Schema>()._texcoordTransforms[1] = (textureMap ? textureMap->getTextureTransform().getMatrix() : glm::mat4());
_schemaBuffer.edit<Schema>()._lightmapParams = (textureMap ? glm::vec4(textureMap->getLightmapOffsetScale(), 0.0, 0.0) : glm::vec4(0.0, 1.0, 0.0, 0.0));
_schemaBuffer.edit<Schema>()._lightmapParams = (textureMap ? glm::vec2(textureMap->getLightmapOffsetScale()) : glm::vec2(0.0, 1.0));
}
_schemaBuffer.edit<Schema>()._materialParams = (textureMap ? glm::vec2(textureMap->getMappingMode(), textureMap->getRepeat()) : glm::vec2(MaterialMappingMode::UV, 1.0));
_schemaBuffer.edit<Schema>()._key = (uint32)_key._flags.to_ulong();
}
@ -211,13 +213,16 @@ bool Material::calculateMaterialInfo() const {
return _hasCalculatedTextureInfo;
}
void Material::setTextureTransforms(const Transform& transform) {
void Material::setTextureTransforms(const Transform& transform, MaterialMappingMode mode, bool repeat) {
for (auto &textureMapItem : _textureMaps) {
if (textureMapItem.second) {
textureMapItem.second->setTextureTransform(transform);
textureMapItem.second->setMappingMode(mode);
textureMapItem.second->setRepeat(repeat);
}
}
for (int i = 0; i < NUM_TEXCOORD_TRANSFORMS; i++) {
_schemaBuffer.edit<Schema>()._texcoordTransforms[i] = transform.getMatrix();
}
}
_schemaBuffer.edit<Schema>()._materialParams = glm::vec2(mode, repeat);
}

View file

@ -22,6 +22,8 @@
#include <gpu/Resource.h>
#include <gpu/TextureTable.h>
#include "MaterialMappingMode.h"
class Transform;
namespace graphics {
@ -330,7 +332,11 @@ public:
// Texture Coord Transform Array
glm::mat4 _texcoordTransforms[NUM_TEXCOORD_TRANSFORMS];
glm::vec4 _lightmapParams{ 0.0, 1.0, 0.0, 0.0 };
glm::vec2 _lightmapParams { 0.0, 1.0 };
// x: material mode (0 for UV, 1 for PROJECTED)
// y: 1 for texture repeat, 0 for discard outside of 0 - 1
glm::vec2 _materialParams { 0.0, 1.0 };
Schema() {}
};
@ -353,7 +359,7 @@ public:
size_t getTextureSize() const { calculateMaterialInfo(); return _textureSize; }
bool hasTextureInfo() const { return _hasCalculatedTextureInfo; }
void setTextureTransforms(const Transform& transform);
void setTextureTransforms(const Transform& transform, MaterialMappingMode mode, bool repeat);
const std::string& getName() const { return _name; }

View file

@ -19,20 +19,25 @@ const int MAX_TEXCOORDS = 2;
struct TexMapArray {
mat4 _texcoordTransforms0;
mat4 _texcoordTransforms1;
vec4 _lightmapParams;
vec2 _lightmapParams;
vec2 _materialParams;
};
<@func declareMaterialTexMapArrayBuffer()@>
<@func evalTexMapArrayTexcoord0(texMapArray, inTexcoord0, outTexcoord0)@>
<@func evalTexMapArrayTexcoord0(texMapArray, inTexcoord0, worldPosition, outTexcoord0)@>
{
<$outTexcoord0$> = (<$texMapArray$>._texcoordTransforms0 * vec4(<$inTexcoord0$>.st, 0.0, 1.0)).st;
<$outTexcoord0$> = mix(<$texMapArray$>._texcoordTransforms0 * vec4(<$inTexcoord0$>.st, 0.0, 1.0),
<$texMapArray$>._texcoordTransforms0 * <$worldPosition$> + vec4(0.5),
<$texMapArray$>._materialParams.x).st;
}
<@endfunc@>
<@func evalTexMapArrayTexcoord1(texMapArray, inTexcoord1, outTexcoord1)@>
<@func evalTexMapArrayTexcoord1(texMapArray, inTexcoord1, worldPosition, outTexcoord1)@>
{
<$outTexcoord1$> = (<$texMapArray$>._texcoordTransforms1 * vec4(<$inTexcoord1$>.st, 0.0, 1.0)).st;
<$outTexcoord1$> = mix(<$texMapArray$>._texcoordTransforms1 * vec4(<$inTexcoord1$>.st, 0.0, 1.0),
<$texMapArray$>._texcoordTransforms1 * <$worldPosition$> + vec4(0.5),
<$texMapArray$>._materialParams.x).st;
}
<@endfunc@>

View file

@ -151,29 +151,32 @@ float fetchScatteringMap(vec2 uv) {
<@func fetchMaterialTexturesCoord0(matKey, texcoord0, albedo, roughness, normal, metallic, emissive, scattering)@>
if (getTexMapArray()._materialParams.y != 1.0 && clamp(<$texcoord0$>, vec2(0.0), vec2(1.0)) != <$texcoord0$>) {
discard;
}
<@if albedo@>
vec4 <$albedo$> = (((<$matKey$> & (ALBEDO_MAP_BIT | OPACITY_MASK_MAP_BIT | OPACITY_TRANSLUCENT_MAP_BIT)) != 0) ? fetchAlbedoMap(<$texcoord0$>) : vec4(1.0));
vec4 <$albedo$> = mix(vec4(1.0), fetchAlbedoMap(<$texcoord0$>), float((<$matKey$> & (ALBEDO_MAP_BIT | OPACITY_MASK_MAP_BIT | OPACITY_TRANSLUCENT_MAP_BIT)) != 0));
<@endif@>
<@if roughness@>
float <$roughness$> = (((<$matKey$> & ROUGHNESS_MAP_BIT) != 0) ? fetchRoughnessMap(<$texcoord0$>) : 1.0);
float <$roughness$> = mix(1.0, fetchRoughnessMap(<$texcoord0$>), float((<$matKey$> & ROUGHNESS_MAP_BIT) != 0));
<@endif@>
<@if normal@>
vec3 <$normal$> = (((<$matKey$> & NORMAL_MAP_BIT) != 0) ? fetchNormalMap(<$texcoord0$>) : vec3(0.0, 1.0, 0.0));
vec3 <$normal$> = mix(vec3(0.0, 1.0, 0.0), fetchNormalMap(<$texcoord0$>), float((<$matKey$> & NORMAL_MAP_BIT) != 0));
<@endif@>
<@if metallic@>
float <$metallic$> = (((<$matKey$> & METALLIC_MAP_BIT) != 0) ? fetchMetallicMap(<$texcoord0$>) : 0.0);
float <$metallic$> = float((<$matKey$> & METALLIC_MAP_BIT) != 0) * fetchMetallicMap(<$texcoord0$>);
<@endif@>
<@if emissive@>
vec3 <$emissive$> = (((<$matKey$> & EMISSIVE_MAP_BIT) != 0) ? fetchEmissiveMap(<$texcoord0$>) : vec3(0.0));
vec3 <$emissive$> = float((<$matKey$> & EMISSIVE_MAP_BIT) != 0) * fetchEmissiveMap(<$texcoord0$>);
<@endif@>
<@if scattering@>
float <$scattering$> = (((<$matKey$> & SCATTERING_MAP_BIT) != 0) ? fetchScatteringMap(<$texcoord0$>) : 0.0);
float <$scattering$> = float((<$matKey$> & SCATTERING_MAP_BIT) != 0) * fetchScatteringMap(<$texcoord0$>);
<@endif@>
<@endfunc@>
<@func fetchMaterialTexturesCoord1(matKey, texcoord1, occlusion, lightmap)@>
<@if occlusion@>
float <$occlusion$> = (((<$matKey$> & OCCLUSION_MAP_BIT) != 0) ? fetchOcclusionMap(<$texcoord1$>) : 1.0);
float <$occlusion$> = mix(1.0, fetchOcclusionMap(<$texcoord1$>), float((<$matKey$> & OCCLUSION_MAP_BIT) != 0));
<@endif@>
<@if lightmap@>
vec3 <$lightmap$> = fetchLightmapMap(<$texcoord1$>);
@ -188,7 +191,7 @@ float fetchScatteringMap(vec2 uv) {
LAYOUT(binding=GRAPHICS_TEXTURE_MATERIAL_EMISSIVE_LIGHTMAP) uniform sampler2D emissiveMap;
vec3 fetchLightmapMap(vec2 uv) {
vec2 lightmapParams = getTexMapArray()._lightmapParams.xy;
vec2 lightmapParams = getTexMapArray()._lightmapParams;
return (vec3(lightmapParams.x) + lightmapParams.y * texture(emissiveMap, uv).rgb);
}
<@endfunc@>
@ -207,20 +210,19 @@ vec3 fetchLightmapMap(vec2 uv) {
<@func evalMaterialAlbedo(fetchedAlbedo, materialAlbedo, matKey, albedo)@>
{
<$albedo$>.xyz = (((<$matKey$> & ALBEDO_VAL_BIT) != 0) ? <$materialAlbedo$> : vec3(1.0));
if (((<$matKey$> & ALBEDO_MAP_BIT) != 0)) {
<$albedo$>.xyz *= <$fetchedAlbedo$>.xyz;
}
<$albedo$>.xyz = mix(vec3(1.0), <$materialAlbedo$>, float((<$matKey$> & ALBEDO_VAL_BIT) != 0));
<$albedo$>.xyz *= mix(vec3(1.0), <$fetchedAlbedo$>.xyz, float((<$matKey$> & ALBEDO_MAP_BIT) != 0));
}
<@endfunc@>
<@func evalMaterialOpacity(fetchedOpacity, materialOpacity, matKey, opacity)@>
{
const float OPACITY_MASK_THRESHOLD = 0.5;
<$opacity$> = (((<$matKey$> & (OPACITY_TRANSLUCENT_MAP_BIT | OPACITY_MASK_MAP_BIT)) != 0) ?
(((<$matKey$> & OPACITY_MASK_MAP_BIT) != 0) ? step(OPACITY_MASK_THRESHOLD, <$fetchedOpacity$>) : <$fetchedOpacity$>) :
1.0) * <$materialOpacity$>;
<$opacity$> = mix(1.0,
mix(<$fetchedOpacity$>,
step(OPACITY_MASK_THRESHOLD, <$fetchedOpacity$>),
float((<$matKey$> & OPACITY_MASK_MAP_BIT) != 0)),
float((<$matKey$> & (OPACITY_TRANSLUCENT_MAP_BIT | OPACITY_MASK_MAP_BIT)) != 0)) * <$materialOpacity$>;
}
<@endfunc@>
@ -241,19 +243,19 @@ vec3 fetchLightmapMap(vec2 uv) {
<@func evalMaterialRoughness(fetchedRoughness, materialRoughness, matKey, roughness)@>
{
<$roughness$> = (((<$matKey$> & ROUGHNESS_MAP_BIT) != 0) ? <$fetchedRoughness$> : <$materialRoughness$>);
<$roughness$> = mix(<$materialRoughness$>, <$fetchedRoughness$>, float((<$matKey$> & ROUGHNESS_MAP_BIT) != 0));
}
<@endfunc@>
<@func evalMaterialMetallic(fetchedMetallic, materialMetallic, matKey, metallic)@>
{
<$metallic$> = (((<$matKey$> & METALLIC_MAP_BIT) != 0) ? <$fetchedMetallic$> : <$materialMetallic$>);
<$metallic$> = mix(<$materialMetallic$>, <$fetchedMetallic$>, float((<$matKey$> & METALLIC_MAP_BIT) != 0));
}
<@endfunc@>
<@func evalMaterialEmissive(fetchedEmissive, materialEmissive, matKey, emissive)@>
{
<$emissive$> = (((<$matKey$> & EMISSIVE_MAP_BIT) != 0) ? <$fetchedEmissive$> : <$materialEmissive$>);
<$emissive$> = mix(<$materialEmissive$>, <$fetchedEmissive$>, float((<$matKey$> & EMISSIVE_MAP_BIT) != 0));
}
<@endfunc@>
@ -265,7 +267,7 @@ vec3 fetchLightmapMap(vec2 uv) {
<@func evalMaterialScattering(fetchedScattering, materialScattering, matKey, scattering)@>
{
<$scattering$> = (((<$matKey$> & SCATTERING_MAP_BIT) != 0) ? <$fetchedScattering$> : <$materialScattering$>);
<$scattering$> = mix(<$materialScattering$>, <$fetchedScattering$>, float((<$matKey$> & SCATTERING_MAP_BIT) != 0));
}
<@endfunc@>

View file

@ -223,8 +223,8 @@ void SunSkyStage::setSunDirection(const Vec3& direction) {
}
}
// THe sun declinaison calculus is taken from https://en.wikipedia.org/wiki/Position_of_the_Sun
double evalSunDeclinaison(double dayNumber) {
// The sun declination calculus is taken from https://en.wikipedia.org/wiki/Position_of_the_Sun
double evalSunDeclination(double dayNumber) {
return -(23.0 + 44.0/60.0)*cos(glm::radians((360.0/365.0)*(dayNumber + 10.0)));
}
@ -235,8 +235,8 @@ void SunSkyStage::updateGraphicsObject() const {
float sunLongitude = _earthSunModel.getLongitude() + (MAX_LONGITUDE * signedNormalizedDayTime);
_earthSunModel.setSunLongitude(sunLongitude);
// And update the sunLAtitude as the declinaison depending of the time of the year
_earthSunModel.setSunLatitude(evalSunDeclinaison(_yearTime));
// And update the sunLatitude as the declination depending of the time of the year
_earthSunModel.setSunLatitude(evalSunDeclination(_yearTime));
if (isSunModelEnabled()) {
Vec3d sunLightDir = -_earthSunModel.getSurfaceSunDir();

View file

@ -14,6 +14,7 @@
#include "gpu/Texture.h"
#include "Transform.h"
#include "MaterialMappingMode.h"
namespace graphics {
@ -30,6 +31,12 @@ public:
void setTextureTransform(const Transform& texcoordTransform);
const Transform& getTextureTransform() const { return _texcoordTransform; }
void setMappingMode(MaterialMappingMode mode) { _mappingMode = mode; }
MaterialMappingMode getMappingMode() const { return _mappingMode; }
void setRepeat(bool repeat) { _repeat = repeat; }
bool getRepeat() const { return _repeat; }
void setUseAlphaChannel(bool useAlpha) { _useAlphaChannel = useAlpha; }
bool useAlphaChannel() const { return _useAlphaChannel; }
@ -41,6 +48,8 @@ protected:
Transform _texcoordTransform;
glm::vec2 _lightmapOffsetScale{ 0.0f, 1.0f };
MaterialMappingMode _mappingMode { MaterialMappingMode::UV };
bool _repeat { true };
bool _useAlphaChannel{ false };
};

View file

@ -30,11 +30,9 @@ void main(void) {
vec3 color = skybox.color.rgb;
// blend is only set if there is a cubemap
if (skybox.color.a > 0.0) {
color = texture(cubeMap, coord).rgb;
if (skybox.color.a < 1.0) {
color *= skybox.color.rgb;
}
}
float check = float(skybox.color.a > 0.0);
color = mix(color, texture(cubeMap, coord).rgb, check);
color *= mix(vec3(1.0), skybox.color.rgb, check * float(skybox.color.a < 1.0));
_fragColor = vec4(color, 0.0);
}

View file

@ -0,0 +1,29 @@
//
// FBXSerializer.h
// libraries/hfm/src/hfm
//
// Created by Sabrina Shanman on 2018/11/07.
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_HFMSerializer_h
#define hifi_HFMSerializer_h
#include <shared/HifiTypes.h>
#include "HFM.h"
namespace hfm {
class Serializer {
virtual Model::Pointer read(const hifi::ByteArray& data, const hifi::VariantHash& mapping, const hifi::URL& url = hifi::URL()) = 0;
};
};
using HFMSerializer = hfm::Serializer;
#endif // hifi_HFMSerializer_h

View file

@ -12,9 +12,9 @@
#include "ModelCache.h"
#include <Finally.h>
#include <FSTReader.h>
#include "FBXReader.h"
#include "OBJReader.h"
#include "GLTFReader.h"
#include "FBXSerializer.h"
#include "OBJSerializer.h"
#include "GLTFSerializer.h"
#include <gpu/Batch.h>
#include <gpu/Stream.h>
@ -193,24 +193,26 @@ void GeometryReader::run() {
HFMModel::Pointer hfmModel;
QVariantHash serializerMapping = _mapping;
serializerMapping["combineParts"] = _combineParts;
if (_url.path().toLower().endsWith(".fbx")) {
hfmModel.reset(readFBX(_data, _mapping, _url.path()));
hfmModel = FBXSerializer().read(_data, serializerMapping, _url);
if (hfmModel->meshes.size() == 0 && hfmModel->joints.size() == 0) {
throw QString("empty geometry, possibly due to an unsupported FBX version");
}
} else if (_url.path().toLower().endsWith(".obj")) {
hfmModel = OBJReader().readOBJ(_data, _mapping, _combineParts, _url);
hfmModel = OBJSerializer().read(_data, serializerMapping, _url);
} else if (_url.path().toLower().endsWith(".obj.gz")) {
QByteArray uncompressedData;
if (gunzip(_data, uncompressedData)){
hfmModel = OBJReader().readOBJ(uncompressedData, _mapping, _combineParts, _url);
hfmModel = OBJSerializer().read(uncompressedData, serializerMapping, _url);
} else {
throw QString("failed to decompress .obj.gz");
}
} else if (_url.path().toLower().endsWith(".gltf")) {
std::shared_ptr<GLTFReader> glreader = std::make_shared<GLTFReader>();
hfmModel.reset(glreader->readGLTF(_data, _mapping, _url));
hfmModel = GLTFSerializer().read(_data, serializerMapping, _url);
if (hfmModel->meshes.size() == 0 && hfmModel->joints.size() == 0) {
throw QString("empty geometry, possibly due to an unsupported GLTF version");
}

View file

@ -18,7 +18,7 @@
#include <graphics/Material.h>
#include <graphics/Asset.h>
#include "FBXReader.h"
#include "FBXSerializer.h"
#include "TextureCache.h"
// Alias instead of derive to avoid copying

View file

@ -33,7 +33,7 @@ PacketVersion versionForPacketType(PacketType packetType) {
case PacketType::EntityEdit:
case PacketType::EntityData:
case PacketType::EntityPhysics:
return static_cast<PacketVersion>(EntityVersion::FixedLightSerialization);
return static_cast<PacketVersion>(EntityVersion::MaterialRepeat);
case PacketType::EntityQuery:
return static_cast<PacketVersion>(EntityQueryPacketVersion::ConicalFrustums);
case PacketType::AvatarIdentity:

View file

@ -244,7 +244,8 @@ enum class EntityVersion : PacketVersion {
BloomEffect,
GrabProperties,
ScriptGlmVectors,
FixedLightSerialization
FixedLightSerialization,
MaterialRepeat
};
enum class EntityScriptCallMethodVersion : PacketVersion {

View file

@ -389,6 +389,10 @@ void OffscreenSurface::finishQmlLoad(QQmlComponent* qmlComponent,
}
// Allow child windows to be destroyed from JS
QQmlEngine::setObjectOwnership(newObject, QQmlEngine::JavaScriptOwnership);
// add object to the manual deletion list
_sharedObject->addToDeletionList(newObject);
newObject->setParent(parent);
newItem->setParentItem(parent);
} else {

View file

@ -15,6 +15,7 @@
#include <QtQml/QQmlEngine>
#include <QtGui/QOpenGLContext>
#include <QPointer>
#include <NumericalConstants.h>
#include <shared/NsightHelpers.h>
@ -96,6 +97,15 @@ SharedObject::~SharedObject() {
}
#endif
// already deleted objects will be reset to null by QPointer so it should be safe just iterate here
for (auto qmlObject : _deletionList) {
if (qmlObject) {
// manually delete not-deleted-yet qml items
QQmlEngine::setObjectOwnership(qmlObject, QQmlEngine::CppOwnership);
delete qmlObject;
}
}
if (_rootItem) {
delete _rootItem;
_rootItem = nullptr;
@ -412,6 +422,11 @@ bool SharedObject::fetchTexture(TextureAndFence& textureAndFence) {
return true;
}
void hifi::qml::impl::SharedObject::addToDeletionList(QObject * object)
{
_deletionList.append(QPointer<QObject>(object));
}
void SharedObject::setProxyWindow(QWindow* window) {
#ifndef DISABLE_QML
_proxyWindow = window;

View file

@ -66,7 +66,7 @@ public:
void resume();
bool isPaused() const;
bool fetchTexture(TextureAndFence& textureAndFence);
void addToDeletionList(QObject* object);
private:
bool event(QEvent* e) override;
@ -91,6 +91,8 @@ private:
void onAboutToQuit();
void updateTextureAndFence(const TextureAndFence& newTextureAndFence);
QList<QPointer<QObject>> _deletionList;
// Texture management
TextureAndFence _latestTextureAndFence{ 0, 0 };
QQuickItem* _item{ nullptr };

View file

@ -180,9 +180,7 @@ void Deck::processFrames() {
#ifdef WANT_RECORDING_DEBUG
qCDebug(recordingLog) << "Setting timer for next processing " << nextInterval;
#endif
_timer.singleShot(nextInterval, [this] {
processFrames();
});
_timer.singleShot(nextInterval, this, &Deck::processFrames);
}
void Deck::removeClip(const ClipConstPointer& clip) {

Some files were not shown because too many files have changed in this diff Show more