Merge branch 'master' into audio-mixer-volume-control

This commit is contained in:
Ken Cooke 2019-03-23 16:15:29 -07:00 committed by GitHub
commit b5e8176b0f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
219 changed files with 6867 additions and 3352 deletions
android/libraries/oculus/src/main
assets/shaders
java/io/highfidelity/oculus
assignment-client/src
cmake/macros
interface
libraries

View file

@ -0,0 +1,39 @@
#version 320 es
precision highp float;
precision highp sampler2D;
layout(location = 0) in vec4 vTexCoordLR;
layout(location = 0) out vec4 FragColorL;
layout(location = 1) out vec4 FragColorR;
uniform sampler2D sampler;
// https://software.intel.com/en-us/node/503873
// sRGB ====> Linear
vec3 color_sRGBToLinear(vec3 srgb) {
return mix(pow((srgb + vec3(0.055)) / vec3(1.055), vec3(2.4)), srgb / vec3(12.92), vec3(lessThanEqual(srgb, vec3(0.04045))));
}
vec4 color_sRGBAToLinear(vec4 srgba) {
return vec4(color_sRGBToLinear(srgba.xyz), srgba.w);
}
// Linear ====> sRGB
vec3 color_LinearTosRGB(vec3 lrgb) {
return mix(vec3(1.055) * pow(vec3(lrgb), vec3(0.41666)) - vec3(0.055), vec3(lrgb) * vec3(12.92), vec3(lessThan(lrgb, vec3(0.0031308))));
}
vec4 color_LinearTosRGBA(vec4 lrgba) {
return vec4(color_LinearTosRGB(lrgba.xyz), lrgba.w);
}
// FIXME switch to texelfetch for getting from the source texture?
void main() {
//FragColorL = color_LinearTosRGBA(texture(sampler, vTexCoordLR.xy));
//FragColorR = color_LinearTosRGBA(texture(sampler, vTexCoordLR.zw));
FragColorL = texture(sampler, vTexCoordLR.xy);
FragColorR = texture(sampler, vTexCoordLR.zw);
}

View file

@ -0,0 +1,21 @@
#version 320 es
layout(location = 0) out vec4 vTexCoordLR;
void main(void) {
const float depth = 0.0;
const vec4 UNIT_QUAD[4] = vec4[4](
vec4(-1.0, -1.0, depth, 1.0),
vec4(1.0, -1.0, depth, 1.0),
vec4(-1.0, 1.0, depth, 1.0),
vec4(1.0, 1.0, depth, 1.0)
);
vec4 pos = UNIT_QUAD[gl_VertexID];
gl_Position = pos;
vTexCoordLR.xy = pos.xy;
vTexCoordLR.xy += 1.0;
vTexCoordLR.y *= 0.5;
vTexCoordLR.x *= 0.25;
vTexCoordLR.zw = vTexCoordLR.xy;
vTexCoordLR.z += 0.5;
}

View file

@ -7,6 +7,7 @@
//
package io.highfidelity.oculus;
import android.content.res.AssetManager;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
@ -24,7 +25,7 @@ public class OculusMobileActivity extends QtActivity implements SurfaceHolder.Ca
private static final String TAG = OculusMobileActivity.class.getSimpleName();
static { System.loadLibrary("oculusMobile"); }
private native void nativeOnCreate();
private native void nativeOnCreate(AssetManager assetManager);
private native static void nativeOnResume();
private native static void nativeOnPause();
private native static void nativeOnSurfaceChanged(Surface s);
@ -53,7 +54,7 @@ public class OculusMobileActivity extends QtActivity implements SurfaceHolder.Ca
mView = new SurfaceView(this);
mView.getHolder().addCallback(this);
nativeOnCreate();
nativeOnCreate(getAssets());
questNativeOnCreate();
}
@ -81,7 +82,7 @@ public class OculusMobileActivity extends QtActivity implements SurfaceHolder.Ca
Log.w(TAG, "QQQ onResume");
super.onResume();
//Reconnect the global reference back to handler
nativeOnCreate();
nativeOnCreate(getAssets());
questNativeOnResume();
nativeOnResume();

View file

@ -52,6 +52,8 @@
#include <WebSocketServerClass.h>
#include <EntityScriptingInterface.h> // TODO: consider moving to scriptengine.h
#include <hfm/ModelFormatRegistry.h>
#include "entities/AssignmentParentFinder.h"
#include "AssignmentDynamicFactory.h"
#include "RecordingScriptingInterface.h"
@ -99,6 +101,9 @@ Agent::Agent(ReceivedMessage& message) :
DependencyManager::set<RecordingScriptingInterface>();
DependencyManager::set<UsersScriptingInterface>();
DependencyManager::set<ModelFormatRegistry>();
DependencyManager::set<ModelCache>();
// Needed to ensure the creation of the DebugDraw instance on the main thread
DebugDraw::getInstance();
@ -819,6 +824,9 @@ void Agent::aboutToFinish() {
DependencyManager::get<ResourceManager>()->cleanup();
DependencyManager::destroy<ModelFormatRegistry>();
DependencyManager::destroy<ModelCache>();
DependencyManager::destroy<PluginManager>();
// cleanup the AudioInjectorManager (and any still running injectors)

View file

@ -1,7 +1,7 @@
macro(TARGET_PYTHON)
if (NOT HIFI_PYTHON_EXEC)
# Find the python interpreter
if (CAME_VERSION VERSION_LESS 3.12)
if (CMAKE_VERSION VERSION_LESS 3.12)
# this logic is deprecated in CMake after 3.12
# FIXME eventually we should make 3.12 the min cmake verion and just use the Python3 find_package path
set(Python_ADDITIONAL_VERSIONS 3)

View file

@ -379,9 +379,9 @@ Item {
Component.onCompleted: {
// with the link.
if (completeProfileBody.withOculus) {
termsText.text = qsTr("By signing up, you agree to <a href='https://highfidelity.com/terms'>High Fidelity's Terms of Service</a>")
termsText.text = qsTr("By signing up, you agree to <a href='https://www.highfidelity.com/termsofservice'>High Fidelity's Terms of Service</a>")
} else {
termsText.text = qsTr("By creating this user profile, you agree to <a href='https://highfidelity.com/terms'>High Fidelity's Terms of Service</a>")
termsText.text = qsTr("By creating this user profile, you agree to <a href='https://www.highfidelity.com/termsofservice'>High Fidelity's Terms of Service</a>")
}
}
}

View file

@ -395,7 +395,7 @@ Item {
text: signUpBody.termsContainerText
Component.onCompleted: {
// with the link.
termsText.text = qsTr("By signing up, you agree to <a href='https://highfidelity.com/terms'>High Fidelity's Terms of Service</a>")
termsText.text = qsTr("By signing up, you agree to <a href='https://www.highfidelity.com/termsofservice'>High Fidelity's Terms of Service</a>")
}
}

View file

@ -218,7 +218,7 @@ Item {
text: usernameCollisionBody.termsContainerText
Component.onCompleted: {
// with the link.
termsText.text = qsTr("By creating this user profile, you agree to <a href='https://highfidelity.com/terms'>High Fidelity's Terms of Service</a>")
termsText.text = qsTr("By creating this user profile, you agree to <a href='https://www.highfidelity.com/termsofservice'>High Fidelity's Terms of Service</a>")
}
}

View file

@ -232,6 +232,10 @@ Item {
text: "Audio Codec: " + root.audioCodec + " Noise Gate: " +
root.audioNoiseGate;
}
StatText {
visible: root.expanded;
text: "Injectors (Local/NonLocal): " + root.audioInjectors.x + "/" + root.audioInjectors.y;
}
StatText {
visible: root.expanded;
text: "Entity Servers In: " + root.entityPacketsInKbps + " kbps";

View file

@ -28,7 +28,7 @@ TabletModalWindow {
id: mouse;
anchors.fill: parent
}
function click(button) {
clickedButton = button;
selected(button);

View file

@ -40,6 +40,7 @@ Item {
property bool isConcurrency: action === 'concurrency';
property bool isAnnouncement: action === 'announcement';
property bool isStacked: !isConcurrency && drillDownToPlace;
property bool has3DHTML: PlatformInfo.has3DHTML();
property int textPadding: 10;
@ -298,7 +299,7 @@ Item {
StateImage {
id: actionIcon;
visible: !isAnnouncement;
visible: !isAnnouncement && has3DHTML;
imageURL: "../../images/info-icon-2-state.svg";
size: 30;
buttonState: messageArea.containsMouse ? 1 : 0;
@ -315,7 +316,7 @@ Item {
}
MouseArea {
id: messageArea;
visible: !isAnnouncement;
visible: !isAnnouncement && has3DHTML;
width: parent.width;
height: messageHeight;
anchors.top: lobby.bottom;

View file

@ -54,7 +54,7 @@ Column {
'require_online=true',
'protocol=' + encodeURIComponent(Window.protocolSignature())
];
endpoint: '/api/v1/user_stories?' + options.join('&');
endpoint: '/api/v1/user_stories?' + options.join('&') + (PlatformInfo.isStandalone() ? '&standalone_optimized=true' : '')
itemsPerPage: 4;
processPage: function (data) {
return data.user_stories.map(makeModelData);

View file

@ -46,6 +46,8 @@ Item {
property string placeName: ""
property string profilePicBorderColor: (connectionStatus == "connection" ? hifi.colors.indigoAccent : (connectionStatus == "friend" ? hifi.colors.greenHighlight : "transparent"))
property alias avImage: avatarImage
property bool has3DHTML: PlatformInfo.has3DHTML();
Item {
id: avatarImage
visible: profileUrl !== "" && userName !== "";
@ -94,10 +96,12 @@ Item {
enabled: (selected && activeTab == "nearbyTab") || isMyCard;
hoverEnabled: enabled
onClicked: {
userInfoViewer.url = Account.metaverseServerURL + "/users/" + userName;
userInfoViewer.visible = true;
if (has3DHTML) {
userInfoViewer.url = Account.metaverseServerURL + "/users/" + userName;
userInfoViewer.visible = true;
}
}
onEntered: infoHoverImage.visible = true;
onEntered: infoHoverImage.visible = has3DHTML;
onExited: infoHoverImage.visible = false;
}
}
@ -352,7 +356,7 @@ Item {
}
StateImage {
id: nameCardConnectionInfoImage
visible: selected && !isMyCard && pal.activeTab == "connectionsTab"
visible: selected && !isMyCard && pal.activeTab == "connectionsTab" && has3DHTML
imageURL: "../../images/info-icon-2-state.svg" // PLACEHOLDER!!!
size: 32;
buttonState: 0;
@ -364,8 +368,10 @@ Item {
enabled: selected
hoverEnabled: true
onClicked: {
userInfoViewer.url = Account.metaverseServerURL + "/users/" + userName;
userInfoViewer.visible = true;
if (has3DHTML) {
userInfoViewer.url = Account.metaverseServerURL + "/users/" + userName;
userInfoViewer.visible = true;
}
}
onEntered: {
nameCardConnectionInfoImage.buttonState = 1;
@ -376,8 +382,7 @@ Item {
}
FiraSansRegular {
id: nameCardConnectionInfoText
visible: selected && !isMyCard && pal.activeTab == "connectionsTab"
width: parent.width
visible: selected && !isMyCard && pal.activeTab == "connectionsTab" && PlatformInfo.has3DHTML()
height: displayNameTextPixelSize
size: displayNameTextPixelSize - 4
anchors.left: nameCardConnectionInfoImage.right
@ -391,9 +396,10 @@ Item {
id: nameCardRemoveConnectionImage
visible: selected && !isMyCard && pal.activeTab == "connectionsTab"
text: hifi.glyphs.close
size: 28;
size: 24;
x: 120
anchors.verticalCenter: nameCardConnectionInfoImage.verticalCenter
anchors.left: has3DHTML ? nameCardConnectionInfoText.right + 10 : avatarImage.right
}
MouseArea {
anchors.fill:nameCardRemoveConnectionImage

View file

@ -1261,6 +1261,14 @@ Rectangle {
case 'refreshConnections':
refreshConnections();
break;
case 'connectionRemoved':
for (var i=0; i<connectionsUserModel.count; ++i) {
if (connectionsUserModel.get(i).userName === message.params) {
connectionsUserModel.remove(i);
break;
}
}
break;
case 'avatarDisconnected':
var sessionID = message.params[0];
delete ignored[sessionID];

View file

@ -87,22 +87,11 @@ Rectangle {
console.log("Failed to get Marketplace Categories", result.data.message);
} else {
categoriesModel.clear();
categoriesModel.append({
id: -1,
name: "Everything"
});
categoriesModel.append({
id: -1,
name: "Stand-alone Optimized"
});
categoriesModel.append({
id: -1,
name: "Stand-alone Compatible"
});
result.data.items.forEach(function(category) {
result.data.categories.forEach(function(category) {
categoriesModel.append({
id: category.id,
name: category.name
name: category.name,
count: category.count
});
});
}
@ -359,9 +348,11 @@ Rectangle {
}
onAccepted: {
root.searchString = searchField.text;
getMarketplaceItems();
searchField.forceActiveFocus();
if (root.searchString !== searchField.text) {
root.searchString = searchField.text;
getMarketplaceItems();
searchField.forceActiveFocus();
}
}
onActiveFocusChanged: {
@ -397,12 +388,12 @@ Rectangle {
Rectangle {
anchors {
left: parent.left;
bottom: parent.bottom;
top: parent.top;
topMargin: 100;
left: parent.left
bottom: parent.bottom
top: parent.top
topMargin: 100
}
width: parent.width/3
width: parent.width*2/3
color: hifi.colors.white
@ -436,20 +427,49 @@ Rectangle {
border.color: hifi.colors.blueHighlight
border.width: 0
RalewayRegular {
RalewaySemiBold {
id: categoriesItemText
anchors.leftMargin: 15
anchors.fill:parent
anchors.top:parent.top
anchors.bottom: parent.bottom
anchors.left: categoryItemCount.right
elide: Text.ElideRight
text: model.name
color: categoriesItemDelegate.ListView.isCurrentItem ? hifi.colors.blueHighlight : hifi.colors.baseGray
horizontalAlignment: Text.AlignLeft
verticalAlignment: Text.AlignVCenter
size: 14
}
Rectangle {
id: categoryItemCount
anchors {
top: parent.top
bottom: parent.bottom
topMargin: 7
bottomMargin: 7
leftMargin: 10
rightMargin: 10
left: parent.left
}
width: childrenRect.width
color: hifi.colors.faintGray
radius: height/2
RalewaySemiBold {
anchors.top: parent.top
anchors.bottom: parent.bottom
width: 50
text: model.count
color: hifi.colors.lightGrayText
horizontalAlignment: Text.AlignHCenter
verticalAlignment: Text.AlignVCenter
size: 16
}
}
}
MouseArea {
anchors.fill: parent
z: 10
@ -486,9 +506,9 @@ Rectangle {
parent: categoriesListView.parent
anchors {
top: categoriesListView.top;
bottom: categoriesListView.bottom;
left: categoriesListView.right;
top: categoriesListView.top
bottom: categoriesListView.bottom
left: categoriesListView.right
}
contentItem.opacity: 1

View file

@ -49,7 +49,7 @@ Item {
property string wornEntityID;
property string updatedItemId;
property string upgradeTitle;
property bool updateAvailable: root.updateItemId && root.updateItemId !== "";
property bool updateAvailable: root.updateItemId !== "";
property bool valid;
property bool standaloneOptimized;
property bool standaloneIncompatible;

View file

@ -523,9 +523,9 @@ Rectangle {
item.cardBackVisible = false;
item.isInstalled = root.installedApps.indexOf(item.id) > -1;
item.wornEntityID = '';
item.upgrade_id = item.upgrade_id ? item.upgrade_id : "";
});
sendToScript({ method: 'purchases_updateWearables' });
return data.assets;
}
}
@ -545,7 +545,7 @@ Rectangle {
delegate: PurchasedItem {
itemName: title;
itemId: id;
updateItemId: model.upgrade_id ? model.upgrade_id : "";
updateItemId: model.upgrade_id
itemPreviewImageUrl: preview;
itemHref: download_url;
certificateId: certificate_id;

View file

@ -32,6 +32,7 @@ Rectangle {
property string initialActiveViewAfterStatus5: "walletInventory";
property bool keyboardRaised: false;
property bool isPassword: false;
property bool has3DHTML: PlatformInfo.has3DHTML();
anchors.fill: (typeof parent === undefined) ? undefined : parent;
@ -335,8 +336,10 @@ Rectangle {
Connections {
onSendSignalToWallet: {
if (msg.method === 'transactionHistory_usernameLinkClicked') {
userInfoViewer.url = msg.usernameLink;
userInfoViewer.visible = true;
if (has3DHTML) {
userInfoViewer.url = msg.usernameLink;
userInfoViewer.visible = true;
}
} else {
sendToScript(msg);
}

View file

@ -24,6 +24,8 @@ Item {
HifiConstants { id: hifi; }
id: root;
property bool has3DHTML: PlatformInfo.has3DHTML();
onVisibleChanged: {
if (visible) {
@ -333,7 +335,9 @@ Item {
onLinkActivated: {
if (link.indexOf("users/") !== -1) {
sendSignalToWallet({method: 'transactionHistory_usernameLinkClicked', usernameLink: link});
if (has3DHTML) {
sendSignalToWallet({method: 'transactionHistory_usernameLinkClicked', usernameLink: link});
}
} else {
sendSignalToWallet({method: 'transactionHistory_linkClicked', itemId: model.marketplace_item});
}

View file

@ -35,6 +35,7 @@ StackView {
property int cardWidth: 212;
property int cardHeight: 152;
property var tablet: null;
property bool has3DHTML: PlatformInfo.has3DHTML();
RootHttpRequest { id: http; }
signal sendToScript(var message);
@ -75,8 +76,10 @@ StackView {
}
function goCard(targetString, standaloneOptimized) {
if (0 !== targetString.indexOf('hifi://')) {
var card = tabletWebView.createObject();
card.url = addressBarDialog.metaverseServerUrl + targetString;
if(has3DHTML) {
var card = tabletWebView.createObject();
card.url = addressBarDialog.metaverseServerUrl + targetString;
}
card.parentStackItem = root;
root.push(card);
return;

View file

@ -117,7 +117,6 @@ Rectangle {
if (loader.item.hasOwnProperty("gotoPreviousApp")) {
loader.item.gotoPreviousApp = true;
}
screenChanged("Web", url)
});
}

View file

@ -1985,6 +1985,13 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
return nullptr;
});
EntityTree::setEmitScriptEventOperator([this](const QUuid& id, const QVariant& message) {
auto entities = getEntities();
if (auto entity = entities->renderableForEntityId(id)) {
entity->emitScriptEvent(message);
}
});
EntityTree::setTextSizeOperator([this](const QUuid& id, const QString& text) {
auto entities = getEntities();
if (auto entity = entities->renderableForEntityId(id)) {
@ -2342,6 +2349,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
return viewFrustum.getPosition();
});
DependencyManager::get<UsersScriptingInterface>()->setKickConfirmationOperator([this] (const QUuid& nodeID) { userKickConfirmation(nodeID); });
render::entities::WebEntityRenderer::setAcquireWebSurfaceOperator([this](const QString& url, bool htmlContent, QSharedPointer<OffscreenQmlSurface>& webSurface, bool& cachedWebSurface) {
bool isTablet = url == TabletScriptingInterface::QML;
if (htmlContent) {
@ -2704,9 +2713,7 @@ void Application::cleanupBeforeQuit() {
DependencyManager::destroy<OffscreenQmlSurfaceCache>();
if (_snapshotSoundInjector != nullptr) {
_snapshotSoundInjector->stop();
}
_snapshotSoundInjector = nullptr;
// destroy Audio so it and its threads have a chance to go down safely
// this must happen after QML, as there are unexplained audio crashes originating in qtwebengine
@ -3043,6 +3050,9 @@ void Application::initializeUi() {
QUrl{ "hifi/commerce/wallet/Wallet.qml" },
QUrl{ "hifi/commerce/wallet/WalletHome.qml" },
QUrl{ "hifi/tablet/TabletAddressDialog.qml" },
QUrl{ "hifi/Card.qml" },
QUrl{ "hifi/Pal.qml" },
QUrl{ "hifi/NameCard.qml" },
}, platformInfoCallback);
QmlContextCallback ttsCallback = [](QQmlContext* context) {
@ -3287,6 +3297,40 @@ void Application::onDesktopRootItemCreated(QQuickItem* rootItem) {
#endif
}
void Application::userKickConfirmation(const QUuid& nodeID) {
auto avatarHashMap = DependencyManager::get<AvatarHashMap>();
auto avatar = avatarHashMap->getAvatarBySessionID(nodeID);
QString userName;
if (avatar) {
userName = avatar->getSessionDisplayName();
} else {
userName = nodeID.toString();
}
QString kickMessage = "Do you wish to kick " + userName + " from your domain";
ModalDialogListener* dlg = OffscreenUi::asyncQuestion("Kick User", kickMessage,
QMessageBox::Yes | QMessageBox::No);
if (dlg->getDialogItem()) {
QObject::connect(dlg, &ModalDialogListener::response, this, [=] (QVariant answer) {
QObject::disconnect(dlg, &ModalDialogListener::response, this, nullptr);
bool yes = (static_cast<QMessageBox::StandardButton>(answer.toInt()) == QMessageBox::Yes);
// ask the NodeList to kick the user with the given session ID
if (yes) {
DependencyManager::get<NodeList>()->kickNodeBySessionID(nodeID);
}
DependencyManager::get<UsersScriptingInterface>()->setWaitForKickResponse(false);
});
DependencyManager::get<UsersScriptingInterface>()->setWaitForKickResponse(true);
}
}
void Application::setupQmlSurface(QQmlContext* surfaceContext, bool setAdditionalContextProperties) {
surfaceContext->setContextProperty("Users", DependencyManager::get<UsersScriptingInterface>().data());
surfaceContext->setContextProperty("HMD", DependencyManager::get<HMDScriptingInterface>().data());
@ -4225,10 +4269,9 @@ void Application::keyPressEvent(QKeyEvent* event) {
Setting::Handle<bool> notificationSoundSnapshot{ MenuOption::NotificationSoundsSnapshot, true };
if (notificationSounds.get() && notificationSoundSnapshot.get()) {
if (_snapshotSoundInjector) {
_snapshotSoundInjector->setOptions(options);
_snapshotSoundInjector->restart();
DependencyManager::get<AudioInjectorManager>()->setOptionsAndRestart(_snapshotSoundInjector, options);
} else {
_snapshotSoundInjector = AudioInjector::playSound(_snapshotSound, options);
_snapshotSoundInjector = DependencyManager::get<AudioInjectorManager>()->playSound(_snapshotSound, options);
}
}
takeSnapshot(true);
@ -5772,6 +5815,7 @@ void Application::reloadResourceCaches() {
queryOctree(NodeType::EntityServer, PacketType::EntityQuery);
getMyAvatar()->prepareAvatarEntityDataForReload();
// Clear the entities and their renderables
getEntities()->clear();
@ -6947,9 +6991,6 @@ void Application::updateWindowTitle() const {
}
void Application::clearDomainOctreeDetails(bool clearAll) {
// before we delete all entities get MyAvatar's AvatarEntityData ready
getMyAvatar()->prepareAvatarEntityDataForReload();
// if we're about to quit, we really don't need to do the rest of these things...
if (_aboutToQuit) {
return;

View file

@ -593,6 +593,7 @@ private:
void toggleTabletUI(bool shouldOpen = false) const;
static void setupQmlSurface(QQmlContext* surfaceContext, bool setAdditionalContextProperties);
void userKickConfirmation(const QUuid& nodeID);
MainWindow* _window;
QElapsedTimer& _sessionRunTimer;

View file

@ -149,6 +149,9 @@ void AvatarBookmarks::removeBookmark(const QString& bookmarkName) {
emit bookmarkDeleted(bookmarkName);
}
void AvatarBookmarks::deleteBookmark() {
}
void AvatarBookmarks::updateAvatarEntities(const QVariantList &avatarEntities) {
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
auto currentAvatarEntities = myAvatar->getAvatarEntityData();

View file

@ -76,6 +76,9 @@ protected:
void readFromFile() override;
QVariantMap getAvatarDataToBookmark();
protected slots:
void deleteBookmark() override;
private:
const QString AVATARBOOKMARKS_FILENAME = "avatarbookmarks.json";
const QString ENTRY_AVATAR_URL = "avatarUrl";

View file

@ -51,13 +51,10 @@ protected:
bool _isMenuSorted;
protected slots:
/**jsdoc
* @function AvatarBookmarks.deleteBookmark
*/
/**jsdoc
* @function LocationBookmarks.deleteBookmark
*/
void deleteBookmark();
virtual void deleteBookmark();
private:
static bool sortOrder(QAction* a, QAction* b);

View file

@ -629,8 +629,7 @@ void AvatarManager::handleCollisionEvents(const CollisionEvents& collisionEvents
// but most avatars are roughly the same size, so let's not be so fancy yet.
const float AVATAR_STRETCH_FACTOR = 1.0f;
_collisionInjectors.remove_if(
[](const AudioInjectorPointer& injector) { return !injector || injector->isFinished(); });
_collisionInjectors.remove_if([](const AudioInjectorPointer& injector) { return !injector; });
static const int MAX_INJECTOR_COUNT = 3;
if (_collisionInjectors.size() < MAX_INJECTOR_COUNT) {
@ -640,7 +639,7 @@ void AvatarManager::handleCollisionEvents(const CollisionEvents& collisionEvents
options.volume = energyFactorOfFull;
options.pitch = 1.0f / AVATAR_STRETCH_FACTOR;
auto injector = AudioInjector::playSoundAndDelete(collisionSound, options);
auto injector = DependencyManager::get<AudioInjectorManager>()->playSound(collisionSound, options, true);
_collisionInjectors.emplace_back(injector);
}
}

View file

@ -24,7 +24,7 @@
#include <SimpleMovingAverage.h>
#include <shared/RateCounter.h>
#include <avatars-renderer/ScriptAvatar.h>
#include <AudioInjector.h>
#include <AudioInjectorManager.h>
#include <workload/Space.h>
#include <EntitySimulation.h> // for SetOfEntities
@ -239,7 +239,7 @@ private:
std::shared_ptr<MyAvatar> _myAvatar;
quint64 _lastSendAvatarDataTime = 0; // Controls MyAvatar send data rate.
std::list<AudioInjectorPointer> _collisionInjectors;
std::list<QWeakPointer<AudioInjector>> _collisionInjectors;
RateCounter<> _myAvatarSendRate;
int _numAvatarsUpdated { 0 };

View file

@ -87,7 +87,7 @@ void MarketplaceItemUploader::doGetCategories() {
if (error == QNetworkReply::NoError) {
auto doc = QJsonDocument::fromJson(reply->readAll());
auto extractCategoryID = [&doc]() -> std::pair<bool, int> {
auto items = doc.object()["data"].toObject()["items"];
auto items = doc.object()["data"].toObject()["categories"];
if (!items.isArray()) {
qWarning() << "Categories parse error: data.items is not an array";
return { false, 0 };

View file

@ -1570,7 +1570,7 @@ void MyAvatar::handleChangedAvatarEntityData() {
entityTree->withWriteLock([&] {
EntityItemPointer entity = entityTree->addEntity(id, properties);
if (entity) {
packetSender->queueEditEntityMessage(PacketType::EntityAdd, entityTree, id, properties);
packetSender->queueEditAvatarEntityMessage(entityTree, id);
}
});
}
@ -3450,6 +3450,44 @@ float MyAvatar::getGravity() {
return _characterController.getGravity();
}
void MyAvatar::setSessionUUID(const QUuid& sessionUUID) {
QUuid oldSessionID = getSessionUUID();
Avatar::setSessionUUID(sessionUUID);
QUuid newSessionID = getSessionUUID();
if (DependencyManager::get<NodeList>()->getSessionUUID().isNull()) {
// we don't actually have a connection to a domain right now
// so there is no need to queue AvatarEntity messages --> bail early
return;
}
if (newSessionID != oldSessionID) {
auto treeRenderer = DependencyManager::get<EntityTreeRenderer>();
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
if (entityTree) {
QList<QUuid> avatarEntityIDs;
_avatarEntitiesLock.withReadLock([&] {
avatarEntityIDs = _packedAvatarEntityData.keys();
});
EntityEditPacketSender* packetSender = qApp->getEntityEditPacketSender();
entityTree->withWriteLock([&] {
for (const auto& entityID : avatarEntityIDs) {
auto entity = entityTree->findEntityByID(entityID);
if (!entity) {
continue;
}
entity->setOwningAvatarID(newSessionID);
// NOTE: each attached AvatarEntity should already have the correct updated parentID
// via magic in SpatiallyNestable, but when an AvatarEntity IS parented to MyAvatar
// we need to update the "packedAvatarEntityData" we send to the avatar-mixer
// so that others will get the updated state.
if (entity->getParentID() == newSessionID) {
packetSender->queueEditAvatarEntityMessage(entityTree, entityID);
}
}
});
}
}
}
void MyAvatar::increaseSize() {
float minScale = getDomainMinScale();
float maxScale = getDomainMaxScale();
@ -5495,14 +5533,14 @@ void MyAvatar::initFlowFromFST() {
}
}
void MyAvatar::sendPacket(const QUuid& entityID, const EntityItemProperties& properties) const {
void MyAvatar::sendPacket(const QUuid& entityID) const {
auto treeRenderer = DependencyManager::get<EntityTreeRenderer>();
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
if (entityTree) {
entityTree->withWriteLock([&] {
// force an update packet
EntityEditPacketSender* packetSender = qApp->getEntityEditPacketSender();
packetSender->queueEditEntityMessage(PacketType::EntityEdit, entityTree, entityID, properties);
packetSender->queueEditAvatarEntityMessage(entityTree, entityID);
});
}
}

View file

@ -1122,6 +1122,7 @@ public:
float getUserEyeHeight() const;
virtual SpatialParentTree* getParentTree() const override;
virtual glm::vec3 scaleForChildren() const override { return glm::vec3(getSensorToWorldScale()); }
const QUuid& getSelfID() const { return AVATAR_SELF_ID; }
@ -1213,6 +1214,12 @@ public:
public slots:
/**jsdoc
* @function MyAvatar.setSessionUUID
* @param {Uuid} sessionUUID
*/
virtual void setSessionUUID(const QUuid& sessionUUID) override;
/**jsdoc
* Increase the avatar's scale by five percent, up to a minimum scale of <code>1000</code>.
* @function MyAvatar.increaseSize
@ -1912,7 +1919,7 @@ private:
bool didTeleport();
bool getIsAway() const { return _isAway; }
void setAway(bool value);
void sendPacket(const QUuid& entityID, const EntityItemProperties& properties) const override;
void sendPacket(const QUuid& entityID) const override;
std::mutex _pinnedJointsMutex;
std::vector<int> _pinnedJoints;

View file

@ -66,7 +66,7 @@ void TTSScriptingInterface::updateLastSoundAudioInjector() {
if (_lastSoundAudioInjector) {
AudioInjectorOptions options;
options.position = DependencyManager::get<AvatarManager>()->getMyAvatarPosition();
_lastSoundAudioInjector->setOptions(options);
DependencyManager::get<AudioInjectorManager>()->setOptions(_lastSoundAudioInjector, options);
_lastSoundAudioInjectorUpdateTimer.start(INJECTOR_INTERVAL_MS);
}
}
@ -143,7 +143,7 @@ void TTSScriptingInterface::speakText(const QString& textToSpeak) {
options.position = DependencyManager::get<AvatarManager>()->getMyAvatarPosition();
if (_lastSoundAudioInjector) {
_lastSoundAudioInjector->stop();
DependencyManager::get<AudioInjectorManager>()->stop(_lastSoundAudioInjector);
_lastSoundAudioInjectorUpdateTimer.stop();
}
@ -151,7 +151,7 @@ void TTSScriptingInterface::speakText(const QString& textToSpeak) {
uint32_t numSamples = (uint32_t)_lastSoundByteArray.size() / sizeof(AudioData::AudioSample);
auto samples = reinterpret_cast<AudioData::AudioSample*>(_lastSoundByteArray.data());
auto newAudioData = AudioData::make(numSamples, numChannels, samples);
_lastSoundAudioInjector = AudioInjector::playSoundAndDelete(newAudioData, options);
_lastSoundAudioInjector = DependencyManager::get<AudioInjectorManager>()->playSound(newAudioData, options, true);
_lastSoundAudioInjectorUpdateTimer.start(INJECTOR_INTERVAL_MS);
#else
@ -161,7 +161,7 @@ void TTSScriptingInterface::speakText(const QString& textToSpeak) {
void TTSScriptingInterface::stopLastSpeech() {
if (_lastSoundAudioInjector) {
_lastSoundAudioInjector->stop();
_lastSoundAudioInjector = NULL;
DependencyManager::get<AudioInjectorManager>()->stop(_lastSoundAudioInjector);
_lastSoundAudioInjector = nullptr;
}
}

View file

@ -199,13 +199,3 @@ void TestScriptingInterface::setOtherAvatarsReplicaCount(int count) {
int TestScriptingInterface::getOtherAvatarsReplicaCount() {
return qApp->getOtherAvatarsReplicaCount();
}
QString TestScriptingInterface::getOperatingSystemType() {
#ifdef Q_OS_WIN
return "WINDOWS";
#elif defined Q_OS_MAC
return "MACOS";
#else
return "UNKNOWN";
#endif
}

View file

@ -163,13 +163,6 @@ public slots:
*/
Q_INVOKABLE int getOtherAvatarsReplicaCount();
/**jsdoc
* Returns the Operating Sytem type
* @function Test.getOperatingSystemType
* @returns {string} "WINDOWS", "MACOS" or "UNKNOWN"
*/
QString getOperatingSystemType();
private:
bool waitForCondition(qint64 maxWaitMs, std::function<bool()> condition);
QString _testResultsLocation;

View file

@ -29,7 +29,7 @@
#include <PathUtils.h>
#include <ResourceManager.h>
#include <SoundCache.h>
#include <AudioInjector.h>
#include <AudioInjectorManager.h>
#include <RegisteredMetaTypes.h>
#include <ui/TabletScriptingInterface.h>
@ -537,7 +537,7 @@ void Keyboard::handleTriggerBegin(const QUuid& id, const PointerEvent& event) {
audioOptions.position = keyWorldPosition;
audioOptions.volume = 0.05f;
AudioInjector::playSoundAndDelete(_keySound, audioOptions);
DependencyManager::get<AudioInjectorManager>()->playSound(_keySound, audioOptions, true);
int scanCode = key.getScanCode(_capsEnabled);
QString keyString = key.getKeyString(_capsEnabled);

View file

@ -19,9 +19,9 @@
#include <QtCore/QObject>
#include <QTimer>
#include <QHash>
#include <QUuid>
#include <DependencyManager.h>
#include <Sound.h>
#include <AudioInjector.h>
#include <shared/ReadWriteLockable.h>
#include <SettingHandle.h>

View file

@ -266,6 +266,11 @@ void Stats::updateStats(bool force) {
}
STAT_UPDATE(audioCodec, audioClient->getSelectedAudioFormat());
STAT_UPDATE(audioNoiseGate, audioClient->getNoiseGateOpen() ? "Open" : "Closed");
{
int localInjectors = audioClient->getNumLocalInjectors();
size_t nonLocalInjectors = DependencyManager::get<AudioInjectorManager>()->getNumInjectors();
STAT_UPDATE(audioInjectors, QVector2D(localInjectors, nonLocalInjectors));
}
STAT_UPDATE(entityPacketsInKbps, octreeServerCount ? totalEntityKbps / octreeServerCount : -1);

View file

@ -87,6 +87,7 @@ private: \
* @property {number} audioPacketLoss - <em>Read-only.</em>
* @property {string} audioCodec - <em>Read-only.</em>
* @property {string} audioNoiseGate - <em>Read-only.</em>
* @property {Vec2} audioInjectors - <em>Read-only.</em>
* @property {number} entityPacketsInKbps - <em>Read-only.</em>
*
* @property {number} downloads - <em>Read-only.</em>
@ -243,6 +244,7 @@ class Stats : public QQuickItem {
STATS_PROPERTY(int, audioPacketLoss, 0)
STATS_PROPERTY(QString, audioCodec, QString())
STATS_PROPERTY(QString, audioNoiseGate, QString())
STATS_PROPERTY(QVector2D, audioInjectors, QVector2D());
STATS_PROPERTY(int, entityPacketsInKbps, 0)
STATS_PROPERTY(int, downloads, 0)
@ -692,6 +694,13 @@ signals:
*/
void audioNoiseGateChanged();
/**jsdoc
* Triggered when the value of the <code>audioInjectors</code> property changes.
* @function Stats.audioInjectorsChanged
* @returns {Signal}
*/
void audioInjectorsChanged();
/**jsdoc
* Triggered when the value of the <code>entityPacketsInKbps</code> property changes.
* @function Stats.entityPacketsInKbpsChanged

View file

@ -124,41 +124,45 @@ void AnimClip::copyFromNetworkAnim() {
_anim.resize(animFrameCount);
// find the size scale factor for translation in the animation.
const int avatarHipsParentIndex = avatarSkeleton->getParentIndex(avatarSkeleton->nameToJointIndex("Hips"));
const int animHipsParentIndex = animSkeleton.getParentIndex(animSkeleton.nameToJointIndex("Hips"));
const AnimPose& avatarHipsAbsoluteDefaultPose = avatarSkeleton->getAbsoluteDefaultPose(avatarSkeleton->nameToJointIndex("Hips"));
const AnimPose& animHipsAbsoluteDefaultPose = animSkeleton.getAbsoluteDefaultPose(animSkeleton.nameToJointIndex("Hips"));
// the get the units and the heights for the animation and the avatar
const float avatarUnitScale = extractScale(avatarSkeleton->getGeometryOffset()).y;
const float animationUnitScale = extractScale(animModel.offset).y;
const float avatarHeightInMeters = avatarUnitScale * avatarHipsAbsoluteDefaultPose.trans().y;
const float animHeightInMeters = animationUnitScale * animHipsAbsoluteDefaultPose.trans().y;
// get the parent scales for the avatar and the animation
float avatarHipsParentScale = 1.0f;
if (avatarHipsParentIndex >= 0) {
const AnimPose& avatarHipsParentAbsoluteDefaultPose = avatarSkeleton->getAbsoluteDefaultPose(avatarHipsParentIndex);
avatarHipsParentScale = avatarHipsParentAbsoluteDefaultPose.scale().y;
}
float animHipsParentScale = 1.0f;
if (animHipsParentIndex >= 0) {
const AnimPose& animationHipsParentAbsoluteDefaultPose = animSkeleton.getAbsoluteDefaultPose(animHipsParentIndex);
animHipsParentScale = animationHipsParentAbsoluteDefaultPose.scale().y;
}
const float EPSILON = 0.0001f;
float boneLengthScale = 1.0f;
// compute the ratios for the units, the heights in meters, and the parent scales
if ((fabsf(animHeightInMeters) > EPSILON) && (animationUnitScale > EPSILON) && (animHipsParentScale > EPSILON)) {
const float avatarToAnimationHeightRatio = avatarHeightInMeters / animHeightInMeters;
const float unitsRatio = 1.0f / (avatarUnitScale / animationUnitScale);
const float parentScaleRatio = 1.0f / (avatarHipsParentScale / animHipsParentScale);
const int avatarHipsIndex = avatarSkeleton->nameToJointIndex("Hips");
const int animHipsIndex = animSkeleton.nameToJointIndex("Hips");
if (avatarHipsIndex != -1 && animHipsIndex != -1) {
const int avatarHipsParentIndex = avatarSkeleton->getParentIndex(avatarHipsIndex);
const int animHipsParentIndex = animSkeleton.getParentIndex(animHipsIndex);
boneLengthScale = avatarToAnimationHeightRatio * unitsRatio * parentScaleRatio;
const AnimPose& avatarHipsAbsoluteDefaultPose = avatarSkeleton->getAbsoluteDefaultPose(avatarHipsIndex);
const AnimPose& animHipsAbsoluteDefaultPose = animSkeleton.getAbsoluteDefaultPose(animHipsIndex);
// the get the units and the heights for the animation and the avatar
const float avatarUnitScale = extractScale(avatarSkeleton->getGeometryOffset()).y;
const float animationUnitScale = extractScale(animModel.offset).y;
const float avatarHeightInMeters = avatarUnitScale * avatarHipsAbsoluteDefaultPose.trans().y;
const float animHeightInMeters = animationUnitScale * animHipsAbsoluteDefaultPose.trans().y;
// get the parent scales for the avatar and the animation
float avatarHipsParentScale = 1.0f;
if (avatarHipsParentIndex != -1) {
const AnimPose& avatarHipsParentAbsoluteDefaultPose = avatarSkeleton->getAbsoluteDefaultPose(avatarHipsParentIndex);
avatarHipsParentScale = avatarHipsParentAbsoluteDefaultPose.scale().y;
}
float animHipsParentScale = 1.0f;
if (animHipsParentIndex != -1) {
const AnimPose& animationHipsParentAbsoluteDefaultPose = animSkeleton.getAbsoluteDefaultPose(animHipsParentIndex);
animHipsParentScale = animationHipsParentAbsoluteDefaultPose.scale().y;
}
const float EPSILON = 0.0001f;
// compute the ratios for the units, the heights in meters, and the parent scales
if ((fabsf(animHeightInMeters) > EPSILON) && (animationUnitScale > EPSILON) && (animHipsParentScale > EPSILON)) {
const float avatarToAnimationHeightRatio = avatarHeightInMeters / animHeightInMeters;
const float unitsRatio = 1.0f / (avatarUnitScale / animationUnitScale);
const float parentScaleRatio = 1.0f / (avatarHipsParentScale / animHipsParentScale);
boneLengthScale = avatarToAnimationHeightRatio * unitsRatio * parentScaleRatio;
}
}
for (int frame = 0; frame < animFrameCount; frame++) {
const HFMAnimationFrame& animFrame = animModel.animationFrames[frame];

View file

@ -1354,28 +1354,30 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
for (const AudioInjectorPointer& injector : _activeLocalAudioInjectors) {
// the lock guarantees that injectorBuffer, if found, is invariant
AudioInjectorLocalBuffer* injectorBuffer = injector->getLocalBuffer();
auto injectorBuffer = injector->getLocalBuffer();
if (injectorBuffer) {
auto options = injector->getOptions();
static const int HRTF_DATASET_INDEX = 1;
int numChannels = injector->isAmbisonic() ? AudioConstants::AMBISONIC : (injector->isStereo() ? AudioConstants::STEREO : AudioConstants::MONO);
int numChannels = options.ambisonic ? AudioConstants::AMBISONIC : (options.stereo ? AudioConstants::STEREO : AudioConstants::MONO);
size_t bytesToRead = numChannels * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
// get one frame from the injector
memset(_localScratchBuffer, 0, bytesToRead);
if (0 < injectorBuffer->readData((char*)_localScratchBuffer, bytesToRead)) {
bool isSystemSound = !injector->isPositionSet() && !injector->isAmbisonic();
bool isSystemSound = !options.positionSet && !options.ambisonic;
float gain = injector->getVolume() * (isSystemSound ? _systemInjectorGain : _localInjectorGain);
float gain = options.volume * (isSystemSound ? _systemInjectorGain : _localInjectorGain);
if (injector->isAmbisonic()) {
if (options.ambisonic) {
if (injector->isPositionSet()) {
if (options.positionSet) {
// distance attenuation
glm::vec3 relativePosition = injector->getPosition() - _positionGetter();
glm::vec3 relativePosition = options.position - _positionGetter();
float distance = glm::max(glm::length(relativePosition), EPSILON);
gain = gainForSource(distance, gain);
}
@ -1384,7 +1386,7 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
// Calculate the soundfield orientation relative to the listener.
// Injector orientation can be used to align a recording to our world coordinates.
//
glm::quat relativeOrientation = injector->getOrientation() * glm::inverse(_orientationGetter());
glm::quat relativeOrientation = options.orientation * glm::inverse(_orientationGetter());
// convert from Y-up (OpenGL) to Z-up (Ambisonic) coordinate system
float qw = relativeOrientation.w;
@ -1396,12 +1398,12 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
injector->getLocalFOA().render(_localScratchBuffer, mixBuffer, HRTF_DATASET_INDEX,
qw, qx, qy, qz, gain, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
} else if (injector->isStereo()) {
} else if (options.stereo) {
if (injector->isPositionSet()) {
if (options.positionSet) {
// distance attenuation
glm::vec3 relativePosition = injector->getPosition() - _positionGetter();
glm::vec3 relativePosition = options.position - _positionGetter();
float distance = glm::max(glm::length(relativePosition), EPSILON);
gain = gainForSource(distance, gain);
}
@ -1414,10 +1416,10 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
} else { // injector is mono
if (injector->isPositionSet()) {
if (options.positionSet) {
// distance attenuation
glm::vec3 relativePosition = injector->getPosition() - _positionGetter();
glm::vec3 relativePosition = options.position - _positionGetter();
float distance = glm::max(glm::length(relativePosition), EPSILON);
gain = gainForSource(distance, gain);
@ -1439,21 +1441,21 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
} else {
qCDebug(audioclient) << "injector has no more data, marking finished for removal";
//qCDebug(audioclient) << "injector has no more data, marking finished for removal";
injector->finishLocalInjection();
injectorsToRemove.append(injector);
}
} else {
qCDebug(audioclient) << "injector has no local buffer, marking as finished for removal";
//qCDebug(audioclient) << "injector has no local buffer, marking as finished for removal";
injector->finishLocalInjection();
injectorsToRemove.append(injector);
}
}
for (const AudioInjectorPointer& injector : injectorsToRemove) {
qCDebug(audioclient) << "removing injector";
//qCDebug(audioclient) << "removing injector";
_activeLocalAudioInjectors.removeOne(injector);
}
@ -1573,15 +1575,13 @@ bool AudioClient::setIsStereoInput(bool isStereoInput) {
}
bool AudioClient::outputLocalInjector(const AudioInjectorPointer& injector) {
AudioInjectorLocalBuffer* injectorBuffer = injector->getLocalBuffer();
auto injectorBuffer = injector->getLocalBuffer();
if (injectorBuffer) {
// local injectors are on the AudioInjectorsThread, so we must guard access
Lock lock(_injectorsMutex);
if (!_activeLocalAudioInjectors.contains(injector)) {
qCDebug(audioclient) << "adding new injector";
//qCDebug(audioclient) << "adding new injector";
_activeLocalAudioInjectors.append(injector);
// move local buffer to the LocalAudioThread to avoid dataraces with AudioInjector (like stop())
injectorBuffer->setParent(nullptr);
// update the flag
_localInjectorsAvailable.exchange(true, std::memory_order_release);
@ -1597,6 +1597,11 @@ bool AudioClient::outputLocalInjector(const AudioInjectorPointer& injector) {
}
}
int AudioClient::getNumLocalInjectors() {
Lock lock(_injectorsMutex);
return _activeLocalAudioInjectors.size();
}
void AudioClient::outputFormatChanged() {
_outputFrameSize = (AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * OUTPUT_CHANNEL_COUNT * _outputFormat.sampleRate()) /
_desiredOutputFormat.sampleRate();

View file

@ -181,6 +181,8 @@ public:
bool isHeadsetPluggedIn() { return _isHeadsetPluggedIn; }
#endif
int getNumLocalInjectors();
public slots:
void start();
void stop();

View file

@ -24,9 +24,10 @@
#include "AudioRingBuffer.h"
#include "AudioLogging.h"
#include "SoundCache.h"
#include "AudioSRC.h"
#include "AudioHelpers.h"
int metaType = qRegisterMetaType<AudioInjectorPointer>("AudioInjectorPointer");
AbstractAudioInterface* AudioInjector::_localAudioInterface{ nullptr };
AudioInjectorState operator& (AudioInjectorState lhs, AudioInjectorState rhs) {
@ -51,26 +52,30 @@ AudioInjector::AudioInjector(AudioDataPointer audioData, const AudioInjectorOpti
{
}
AudioInjector::~AudioInjector() {
deleteLocalBuffer();
}
AudioInjector::~AudioInjector() {}
bool AudioInjector::stateHas(AudioInjectorState state) const {
return (_state & state) == state;
return resultWithReadLock<bool>([&] {
return (_state & state) == state;
});
}
void AudioInjector::setOptions(const AudioInjectorOptions& options) {
// since options.stereo is computed from the audio stream,
// we need to copy it from existing options just in case.
bool currentlyStereo = _options.stereo;
bool currentlyAmbisonic = _options.ambisonic;
_options = options;
_options.stereo = currentlyStereo;
_options.ambisonic = currentlyAmbisonic;
withWriteLock([&] {
bool currentlyStereo = _options.stereo;
bool currentlyAmbisonic = _options.ambisonic;
_options = options;
_options.stereo = currentlyStereo;
_options.ambisonic = currentlyAmbisonic;
});
}
void AudioInjector::finishNetworkInjection() {
_state |= AudioInjectorState::NetworkInjectionFinished;
withWriteLock([&] {
_state |= AudioInjectorState::NetworkInjectionFinished;
});
// if we are already finished with local
// injection, then we are finished
@ -80,35 +85,31 @@ void AudioInjector::finishNetworkInjection() {
}
void AudioInjector::finishLocalInjection() {
_state |= AudioInjectorState::LocalInjectionFinished;
if(_options.localOnly || stateHas(AudioInjectorState::NetworkInjectionFinished)) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "finishLocalInjection");
return;
}
bool localOnly = false;
withWriteLock([&] {
_state |= AudioInjectorState::LocalInjectionFinished;
localOnly = _options.localOnly;
});
if(localOnly || stateHas(AudioInjectorState::NetworkInjectionFinished)) {
finish();
}
}
void AudioInjector::finish() {
_state |= AudioInjectorState::Finished;
withWriteLock([&] {
_state |= AudioInjectorState::Finished;
});
emit finished();
deleteLocalBuffer();
_localBuffer = nullptr;
}
void AudioInjector::restart() {
// grab the AudioInjectorManager
auto injectorManager = DependencyManager::get<AudioInjectorManager>();
if (thread() != QThread::currentThread()) {
QMetaObject::invokeMethod(this, "restart");
if (!_options.localOnly) {
// notify the AudioInjectorManager to wake up in case it's waiting for new injectors
injectorManager->notifyInjectorReadyCondition();
}
return;
}
// reset the current send offset to zero
_currentSendOffset = 0;
@ -121,19 +122,23 @@ void AudioInjector::restart() {
// check our state to decide if we need extra handling for the restart request
if (stateHas(AudioInjectorState::Finished)) {
if (!inject(&AudioInjectorManager::restartFinishedInjector)) {
if (!inject(&AudioInjectorManager::threadInjector)) {
qWarning() << "AudioInjector::restart failed to thread injector";
}
}
}
bool AudioInjector::inject(bool(AudioInjectorManager::*injection)(const AudioInjectorPointer&)) {
_state = AudioInjectorState::NotFinished;
AudioInjectorOptions options;
withWriteLock([&] {
_state = AudioInjectorState::NotFinished;
options = _options;
});
int byteOffset = 0;
if (_options.secondOffset > 0.0f) {
int numChannels = _options.ambisonic ? 4 : (_options.stereo ? 2 : 1);
byteOffset = (int)(AudioConstants::SAMPLE_RATE * _options.secondOffset * numChannels);
if (options.secondOffset > 0.0f) {
int numChannels = options.ambisonic ? 4 : (options.stereo ? 2 : 1);
byteOffset = (int)(AudioConstants::SAMPLE_RATE * options.secondOffset * numChannels);
byteOffset *= AudioConstants::SAMPLE_SIZE;
}
_currentSendOffset = byteOffset;
@ -143,7 +148,7 @@ bool AudioInjector::inject(bool(AudioInjectorManager::*injection)(const AudioInj
}
bool success = true;
if (!_options.localOnly) {
if (!options.localOnly) {
auto injectorManager = DependencyManager::get<AudioInjectorManager>();
if (!(*injectorManager.*injection)(sharedFromThis())) {
success = false;
@ -158,7 +163,8 @@ bool AudioInjector::injectLocally() {
if (_localAudioInterface) {
if (_audioData->getNumBytes() > 0) {
_localBuffer = new AudioInjectorLocalBuffer(_audioData);
_localBuffer = QSharedPointer<AudioInjectorLocalBuffer>(new AudioInjectorLocalBuffer(_audioData), &AudioInjectorLocalBuffer::deleteLater);
_localBuffer->moveToThread(thread());
_localBuffer->open(QIODevice::ReadOnly);
_localBuffer->setShouldLoop(_options.loop);
@ -181,14 +187,6 @@ bool AudioInjector::injectLocally() {
return success;
}
void AudioInjector::deleteLocalBuffer() {
if (_localBuffer) {
_localBuffer->stop();
_localBuffer->deleteLater();
_localBuffer = nullptr;
}
}
const uchar MAX_INJECTOR_VOLUME = packFloatGainToByte(1.0f);
static const int64_t NEXT_FRAME_DELTA_ERROR_OR_FINISHED = -1;
static const int64_t NEXT_FRAME_DELTA_IMMEDIATELY = 0;
@ -220,6 +218,10 @@ int64_t AudioInjector::injectNextFrame() {
static int volumeOptionOffset = -1;
static int audioDataOffset = -1;
AudioInjectorOptions options = resultWithReadLock<AudioInjectorOptions>([&] {
return _options;
});
if (!_currentPacket) {
if (_currentSendOffset < 0 ||
_currentSendOffset >= (int)_audioData->getNumBytes()) {
@ -253,7 +255,7 @@ int64_t AudioInjector::injectNextFrame() {
audioPacketStream << QUuid::createUuid();
// pack the stereo/mono type of the stream
audioPacketStream << _options.stereo;
audioPacketStream << options.stereo;
// pack the flag for loopback, if requested
loopbackOptionOffset = _currentPacket->pos();
@ -262,15 +264,16 @@ int64_t AudioInjector::injectNextFrame() {
// pack the position for injected audio
positionOptionOffset = _currentPacket->pos();
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&_options.position),
sizeof(_options.position));
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&options.position),
sizeof(options.position));
// pack our orientation for injected audio
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&_options.orientation),
sizeof(_options.orientation));
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&options.orientation),
sizeof(options.orientation));
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&options.position),
sizeof(options.position));
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&_options.position),
sizeof(_options.position));
glm::vec3 boxCorner = glm::vec3(0);
audioPacketStream.writeRawData(reinterpret_cast<const char*>(&boxCorner),
sizeof(glm::vec3));
@ -283,7 +286,7 @@ int64_t AudioInjector::injectNextFrame() {
volumeOptionOffset = _currentPacket->pos();
quint8 volume = MAX_INJECTOR_VOLUME;
audioPacketStream << volume;
audioPacketStream << _options.ignorePenumbra;
audioPacketStream << options.ignorePenumbra;
audioDataOffset = _currentPacket->pos();
@ -313,10 +316,10 @@ int64_t AudioInjector::injectNextFrame() {
_currentPacket->writePrimitive((uchar)(_localAudioInterface && _localAudioInterface->shouldLoopbackInjectors()));
_currentPacket->seek(positionOptionOffset);
_currentPacket->writePrimitive(_options.position);
_currentPacket->writePrimitive(_options.orientation);
_currentPacket->writePrimitive(options.position);
_currentPacket->writePrimitive(options.orientation);
quint8 volume = packFloatGainToByte(_options.volume);
quint8 volume = packFloatGainToByte(options.volume);
_currentPacket->seek(volumeOptionOffset);
_currentPacket->writePrimitive(volume);
@ -326,8 +329,8 @@ int64_t AudioInjector::injectNextFrame() {
// Might be a reasonable place to do the encode step here.
QByteArray decodedAudio;
int totalBytesLeftToCopy = (_options.stereo ? 2 : 1) * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
if (!_options.loop) {
int totalBytesLeftToCopy = (options.stereo ? 2 : 1) * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
if (!options.loop) {
// If we aren't looping, let's make sure we don't read past the end
int bytesLeftToRead = _audioData->getNumBytes() - _currentSendOffset;
totalBytesLeftToCopy = std::min(totalBytesLeftToCopy, bytesLeftToRead);
@ -342,14 +345,16 @@ int64_t AudioInjector::injectNextFrame() {
auto samplesOut = reinterpret_cast<AudioSample*>(decodedAudio.data());
// Copy and Measure the loudness of this frame
_loudness = 0.0f;
for (int i = 0; i < samplesLeftToCopy; ++i) {
auto index = (currentSample + i) % _audioData->getNumSamples();
auto sample = samples[index];
samplesOut[i] = sample;
_loudness += abs(sample) / (AudioConstants::MAX_SAMPLE_VALUE / 2.0f);
}
_loudness /= (float)samplesLeftToCopy;
withWriteLock([&] {
_loudness = 0.0f;
for (int i = 0; i < samplesLeftToCopy; ++i) {
auto index = (currentSample + i) % _audioData->getNumSamples();
auto sample = samples[index];
samplesOut[i] = sample;
_loudness += abs(sample) / (AudioConstants::MAX_SAMPLE_VALUE / 2.0f);
}
_loudness /= (float)samplesLeftToCopy;
});
_currentSendOffset = (_currentSendOffset + totalBytesLeftToCopy) %
_audioData->getNumBytes();
@ -371,7 +376,7 @@ int64_t AudioInjector::injectNextFrame() {
_outgoingSequenceNumber++;
}
if (_currentSendOffset == 0 && !_options.loop) {
if (_currentSendOffset == 0 && !options.loop) {
finishNetworkInjection();
return NEXT_FRAME_DELTA_ERROR_OR_FINISHED;
}
@ -391,134 +396,10 @@ int64_t AudioInjector::injectNextFrame() {
// If we are falling behind by more frames than our threshold, let's skip the frames ahead
qCDebug(audio) << this << "injectNextFrame() skipping ahead, fell behind by " << (currentFrameBasedOnElapsedTime - _nextFrame) << " frames";
_nextFrame = currentFrameBasedOnElapsedTime;
_currentSendOffset = _nextFrame * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL * (_options.stereo ? 2 : 1) % _audioData->getNumBytes();
_currentSendOffset = _nextFrame * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL * (options.stereo ? 2 : 1) % _audioData->getNumBytes();
}
int64_t playNextFrameAt = ++_nextFrame * AudioConstants::NETWORK_FRAME_USECS;
return std::max(INT64_C(0), playNextFrameAt - currentTime);
}
void AudioInjector::stop() {
// trigger a call on the injector's thread to change state to finished
QMetaObject::invokeMethod(this, "finish");
}
void AudioInjector::triggerDeleteAfterFinish() {
// make sure this fires on the AudioInjector thread
if (thread() != QThread::currentThread()) {
QMetaObject::invokeMethod(this, "triggerDeleteAfterFinish", Qt::QueuedConnection);
return;
}
if (stateHas(AudioInjectorState::Finished)) {
stop();
} else {
_state |= AudioInjectorState::PendingDelete;
}
}
AudioInjectorPointer AudioInjector::playSoundAndDelete(SharedSoundPointer sound, const AudioInjectorOptions& options) {
AudioInjectorPointer injector = playSound(sound, options);
if (injector) {
injector->_state |= AudioInjectorState::PendingDelete;
}
return injector;
}
AudioInjectorPointer AudioInjector::playSound(SharedSoundPointer sound, const AudioInjectorOptions& options) {
if (!sound || !sound->isReady()) {
return AudioInjectorPointer();
}
if (options.pitch == 1.0f) {
AudioInjectorPointer injector = AudioInjectorPointer::create(sound, options);
if (!injector->inject(&AudioInjectorManager::threadInjector)) {
qWarning() << "AudioInjector::playSound failed to thread injector";
}
return injector;
} else {
using AudioConstants::AudioSample;
using AudioConstants::SAMPLE_RATE;
const int standardRate = SAMPLE_RATE;
// limit pitch to 4 octaves
const float pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
const int resampledRate = glm::round(SAMPLE_RATE / pitch);
auto audioData = sound->getAudioData();
auto numChannels = audioData->getNumChannels();
auto numFrames = audioData->getNumFrames();
AudioSRC resampler(standardRate, resampledRate, numChannels);
// create a resampled buffer that is guaranteed to be large enough
const int maxOutputFrames = resampler.getMaxOutput(numFrames);
const int maxOutputSize = maxOutputFrames * numChannels * sizeof(AudioSample);
QByteArray resampledBuffer(maxOutputSize, '\0');
auto bufferPtr = reinterpret_cast<AudioSample*>(resampledBuffer.data());
resampler.render(audioData->data(), bufferPtr, numFrames);
int numSamples = maxOutputFrames * numChannels;
auto newAudioData = AudioData::make(numSamples, numChannels, bufferPtr);
AudioInjectorPointer injector = AudioInjectorPointer::create(newAudioData, options);
if (!injector->inject(&AudioInjectorManager::threadInjector)) {
qWarning() << "AudioInjector::playSound failed to thread pitch-shifted injector";
}
return injector;
}
}
AudioInjectorPointer AudioInjector::playSoundAndDelete(AudioDataPointer audioData, const AudioInjectorOptions& options) {
AudioInjectorPointer injector = playSound(audioData, options);
if (injector) {
injector->_state |= AudioInjectorState::PendingDelete;
}
return injector;
}
AudioInjectorPointer AudioInjector::playSound(AudioDataPointer audioData, const AudioInjectorOptions& options) {
if (options.pitch == 1.0f) {
AudioInjectorPointer injector = AudioInjectorPointer::create(audioData, options);
if (!injector->inject(&AudioInjectorManager::threadInjector)) {
qWarning() << "AudioInjector::playSound failed to thread pitch-shifted injector";
}
return injector;
} else {
using AudioConstants::AudioSample;
using AudioConstants::SAMPLE_RATE;
const int standardRate = SAMPLE_RATE;
// limit pitch to 4 octaves
const float pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
const int resampledRate = glm::round(SAMPLE_RATE / pitch);
auto numChannels = audioData->getNumChannels();
auto numFrames = audioData->getNumFrames();
AudioSRC resampler(standardRate, resampledRate, numChannels);
// create a resampled buffer that is guaranteed to be large enough
const int maxOutputFrames = resampler.getMaxOutput(numFrames);
const int maxOutputSize = maxOutputFrames * numChannels * sizeof(AudioSample);
QByteArray resampledBuffer(maxOutputSize, '\0');
auto bufferPtr = reinterpret_cast<AudioSample*>(resampledBuffer.data());
resampler.render(audioData->data(), bufferPtr, numFrames);
int numSamples = maxOutputFrames * numChannels;
auto newAudioData = AudioData::make(numSamples, numChannels, bufferPtr);
return AudioInjector::playSound(newAudioData, options);
}
}

View file

@ -19,6 +19,8 @@
#include <QtCore/QSharedPointer>
#include <QtCore/QThread>
#include <shared/ReadWriteLockable.h>
#include <glm/glm.hpp>
#include <glm/gtx/quaternion.hpp>
@ -49,7 +51,7 @@ AudioInjectorState& operator|= (AudioInjectorState& lhs, AudioInjectorState rhs)
// In order to make scripting cleaner for the AudioInjector, the script now holds on to the AudioInjector object
// until it dies.
class AudioInjector : public QObject, public QEnableSharedFromThis<AudioInjector> {
class AudioInjector : public QObject, public QEnableSharedFromThis<AudioInjector>, public ReadWriteLockable {
Q_OBJECT
public:
AudioInjector(SharedSoundPointer sound, const AudioInjectorOptions& injectorOptions);
@ -61,40 +63,34 @@ public:
int getCurrentSendOffset() const { return _currentSendOffset; }
void setCurrentSendOffset(int currentSendOffset) { _currentSendOffset = currentSendOffset; }
AudioInjectorLocalBuffer* getLocalBuffer() const { return _localBuffer; }
QSharedPointer<AudioInjectorLocalBuffer> getLocalBuffer() const { return _localBuffer; }
AudioHRTF& getLocalHRTF() { return _localHRTF; }
AudioFOA& getLocalFOA() { return _localFOA; }
bool isLocalOnly() const { return _options.localOnly; }
float getVolume() const { return _options.volume; }
bool isPositionSet() const { return _options.positionSet; }
glm::vec3 getPosition() const { return _options.position; }
glm::quat getOrientation() const { return _options.orientation; }
bool isStereo() const { return _options.stereo; }
bool isAmbisonic() const { return _options.ambisonic; }
float getLoudness() const { return resultWithReadLock<float>([&] { return _loudness; }); }
bool isPlaying() const { return !stateHas(AudioInjectorState::Finished); }
bool isLocalOnly() const { return resultWithReadLock<bool>([&] { return _options.localOnly; }); }
float getVolume() const { return resultWithReadLock<float>([&] { return _options.volume; }); }
bool isPositionSet() const { return resultWithReadLock<bool>([&] { return _options.positionSet; }); }
glm::vec3 getPosition() const { return resultWithReadLock<glm::vec3>([&] { return _options.position; }); }
glm::quat getOrientation() const { return resultWithReadLock<glm::quat>([&] { return _options.orientation; }); }
bool isStereo() const { return resultWithReadLock<bool>([&] { return _options.stereo; }); }
bool isAmbisonic() const { return resultWithReadLock<bool>([&] { return _options.ambisonic; }); }
AudioInjectorOptions getOptions() const { return resultWithReadLock<AudioInjectorOptions>([&] { return _options; }); }
void setOptions(const AudioInjectorOptions& options);
bool stateHas(AudioInjectorState state) const ;
static void setLocalAudioInterface(AbstractAudioInterface* audioInterface) { _localAudioInterface = audioInterface; }
static AudioInjectorPointer playSoundAndDelete(SharedSoundPointer sound, const AudioInjectorOptions& options);
static AudioInjectorPointer playSound(SharedSoundPointer sound, const AudioInjectorOptions& options);
static AudioInjectorPointer playSoundAndDelete(AudioDataPointer audioData, const AudioInjectorOptions& options);
static AudioInjectorPointer playSound(AudioDataPointer audioData, const AudioInjectorOptions& options);
void restart();
void finish();
void finishNetworkInjection();
public slots:
void restart();
void stop();
void triggerDeleteAfterFinish();
const AudioInjectorOptions& getOptions() const { return _options; }
void setOptions(const AudioInjectorOptions& options);
float getLoudness() const { return _loudness; }
bool isPlaying() const { return !stateHas(AudioInjectorState::Finished); }
void finish();
void finishLocalInjection();
void finishNetworkInjection();
signals:
void finished();
@ -104,7 +100,6 @@ private:
int64_t injectNextFrame();
bool inject(bool(AudioInjectorManager::*injection)(const AudioInjectorPointer&));
bool injectLocally();
void deleteLocalBuffer();
static AbstractAudioInterface* _localAudioInterface;
@ -116,7 +111,7 @@ private:
float _loudness { 0.0f };
int _currentSendOffset { 0 };
std::unique_ptr<NLPacket> _currentPacket { nullptr };
AudioInjectorLocalBuffer* _localBuffer { nullptr };
QSharedPointer<AudioInjectorLocalBuffer> _localBuffer { nullptr };
int64_t _nextFrame { 0 };
std::unique_ptr<QElapsedTimer> _frameTimer { nullptr };
@ -128,4 +123,6 @@ private:
friend class AudioInjectorManager;
};
Q_DECLARE_METATYPE(AudioInjectorPointer)
#endif // hifi_AudioInjector_h

View file

@ -16,6 +16,10 @@ AudioInjectorLocalBuffer::AudioInjectorLocalBuffer(AudioDataPointer audioData) :
{
}
AudioInjectorLocalBuffer::~AudioInjectorLocalBuffer() {
stop();
}
void AudioInjectorLocalBuffer::stop() {
_isStopped = true;
@ -30,9 +34,8 @@ bool AudioInjectorLocalBuffer::seek(qint64 pos) {
}
}
qint64 AudioInjectorLocalBuffer::readData(char* data, qint64 maxSize) {
if (!_isStopped) {
if (!_isStopped && _audioData) {
// first copy to the end of the raw audio
int bytesToEnd = (int)_audioData->getNumBytes() - _currentOffset;

View file

@ -22,6 +22,7 @@ class AudioInjectorLocalBuffer : public QIODevice {
Q_OBJECT
public:
AudioInjectorLocalBuffer(AudioDataPointer audioData);
~AudioInjectorLocalBuffer();
void stop();

View file

@ -14,11 +14,14 @@
#include <QtCore/QCoreApplication>
#include <SharedUtil.h>
#include <shared/QtHelpers.h>
#include "AudioConstants.h"
#include "AudioInjector.h"
#include "AudioLogging.h"
#include "AudioSRC.h"
AudioInjectorManager::~AudioInjectorManager() {
_shouldStop = true;
@ -30,7 +33,7 @@ AudioInjectorManager::~AudioInjectorManager() {
auto& timePointerPair = _injectors.top();
// ask it to stop and be deleted
timePointerPair.second->stop();
timePointerPair.second->finish();
_injectors.pop();
}
@ -46,6 +49,8 @@ AudioInjectorManager::~AudioInjectorManager() {
_thread->quit();
_thread->wait();
}
moveToThread(qApp->thread());
}
void AudioInjectorManager::createThread() {
@ -55,6 +60,8 @@ void AudioInjectorManager::createThread() {
// when the thread is started, have it call our run to handle injection of audio
connect(_thread, &QThread::started, this, &AudioInjectorManager::run, Qt::DirectConnection);
moveToThread(_thread);
// start the thread
_thread->start();
}
@ -141,36 +148,7 @@ bool AudioInjectorManager::wouldExceedLimits() { // Should be called inside of a
bool AudioInjectorManager::threadInjector(const AudioInjectorPointer& injector) {
if (_shouldStop) {
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
return false;
}
// guard the injectors vector with a mutex
Lock lock(_injectorsMutex);
if (wouldExceedLimits()) {
return false;
} else {
if (!_thread) {
createThread();
}
// move the injector to the QThread
injector->moveToThread(_thread);
// add the injector to the queue with a send timestamp of now
_injectors.emplace(usecTimestampNow(), injector);
// notify our wait condition so we can inject two frames for this injector immediately
_injectorReady.notify_one();
return true;
}
}
bool AudioInjectorManager::restartFinishedInjector(const AudioInjectorPointer& injector) {
if (_shouldStop) {
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
return false;
}
@ -188,3 +166,192 @@ bool AudioInjectorManager::restartFinishedInjector(const AudioInjectorPointer& i
}
return true;
}
AudioInjectorPointer AudioInjectorManager::playSound(const SharedSoundPointer& sound, const AudioInjectorOptions& options, bool setPendingDelete) {
if (_shouldStop) {
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
return nullptr;
}
AudioInjectorPointer injector = nullptr;
if (sound && sound->isReady()) {
if (options.pitch == 1.0f) {
injector = QSharedPointer<AudioInjector>(new AudioInjector(sound, options), &AudioInjector::deleteLater);
} else {
using AudioConstants::AudioSample;
using AudioConstants::SAMPLE_RATE;
const int standardRate = SAMPLE_RATE;
// limit pitch to 4 octaves
const float pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
const int resampledRate = glm::round(SAMPLE_RATE / pitch);
auto audioData = sound->getAudioData();
auto numChannels = audioData->getNumChannels();
auto numFrames = audioData->getNumFrames();
AudioSRC resampler(standardRate, resampledRate, numChannels);
// create a resampled buffer that is guaranteed to be large enough
const int maxOutputFrames = resampler.getMaxOutput(numFrames);
const int maxOutputSize = maxOutputFrames * numChannels * sizeof(AudioSample);
QByteArray resampledBuffer(maxOutputSize, '\0');
auto bufferPtr = reinterpret_cast<AudioSample*>(resampledBuffer.data());
resampler.render(audioData->data(), bufferPtr, numFrames);
int numSamples = maxOutputFrames * numChannels;
auto newAudioData = AudioData::make(numSamples, numChannels, bufferPtr);
injector = QSharedPointer<AudioInjector>(new AudioInjector(newAudioData, options), &AudioInjector::deleteLater);
}
}
if (!injector) {
return nullptr;
}
if (setPendingDelete) {
injector->_state |= AudioInjectorState::PendingDelete;
}
injector->moveToThread(_thread);
injector->inject(&AudioInjectorManager::threadInjector);
return injector;
}
AudioInjectorPointer AudioInjectorManager::playSound(const AudioDataPointer& audioData, const AudioInjectorOptions& options, bool setPendingDelete) {
if (_shouldStop) {
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
return nullptr;
}
AudioInjectorPointer injector = nullptr;
if (options.pitch == 1.0f) {
injector = QSharedPointer<AudioInjector>(new AudioInjector(audioData, options), &AudioInjector::deleteLater);
} else {
using AudioConstants::AudioSample;
using AudioConstants::SAMPLE_RATE;
const int standardRate = SAMPLE_RATE;
// limit pitch to 4 octaves
const float pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
const int resampledRate = glm::round(SAMPLE_RATE / pitch);
auto numChannels = audioData->getNumChannels();
auto numFrames = audioData->getNumFrames();
AudioSRC resampler(standardRate, resampledRate, numChannels);
// create a resampled buffer that is guaranteed to be large enough
const int maxOutputFrames = resampler.getMaxOutput(numFrames);
const int maxOutputSize = maxOutputFrames * numChannels * sizeof(AudioSample);
QByteArray resampledBuffer(maxOutputSize, '\0');
auto bufferPtr = reinterpret_cast<AudioSample*>(resampledBuffer.data());
resampler.render(audioData->data(), bufferPtr, numFrames);
int numSamples = maxOutputFrames * numChannels;
auto newAudioData = AudioData::make(numSamples, numChannels, bufferPtr);
injector = QSharedPointer<AudioInjector>(new AudioInjector(newAudioData, options), &AudioInjector::deleteLater);
}
if (!injector) {
return nullptr;
}
if (setPendingDelete) {
injector->_state |= AudioInjectorState::PendingDelete;
}
injector->moveToThread(_thread);
injector->inject(&AudioInjectorManager::threadInjector);
return injector;
}
void AudioInjectorManager::setOptionsAndRestart(const AudioInjectorPointer& injector, const AudioInjectorOptions& options) {
if (!injector) {
return;
}
if (QThread::currentThread() != _thread) {
QMetaObject::invokeMethod(this, "setOptionsAndRestart", Q_ARG(const AudioInjectorPointer&, injector), Q_ARG(const AudioInjectorOptions&, options));
_injectorReady.notify_one();
return;
}
injector->setOptions(options);
injector->restart();
}
void AudioInjectorManager::restart(const AudioInjectorPointer& injector) {
if (!injector) {
return;
}
if (QThread::currentThread() != _thread) {
QMetaObject::invokeMethod(this, "restart", Q_ARG(const AudioInjectorPointer&, injector));
_injectorReady.notify_one();
return;
}
injector->restart();
}
void AudioInjectorManager::setOptions(const AudioInjectorPointer& injector, const AudioInjectorOptions& options) {
if (!injector) {
return;
}
if (QThread::currentThread() != _thread) {
QMetaObject::invokeMethod(this, "setOptions", Q_ARG(const AudioInjectorPointer&, injector), Q_ARG(const AudioInjectorOptions&, options));
_injectorReady.notify_one();
return;
}
injector->setOptions(options);
}
AudioInjectorOptions AudioInjectorManager::getOptions(const AudioInjectorPointer& injector) {
if (!injector) {
return AudioInjectorOptions();
}
return injector->getOptions();
}
float AudioInjectorManager::getLoudness(const AudioInjectorPointer& injector) {
if (!injector) {
return 0.0f;
}
return injector->getLoudness();
}
bool AudioInjectorManager::isPlaying(const AudioInjectorPointer& injector) {
if (!injector) {
return false;
}
return injector->isPlaying();
}
void AudioInjectorManager::stop(const AudioInjectorPointer& injector) {
if (!injector) {
return;
}
if (QThread::currentThread() != _thread) {
QMetaObject::invokeMethod(this, "stop", Q_ARG(const AudioInjectorPointer&, injector));
_injectorReady.notify_one();
return;
}
injector->finish();
}
size_t AudioInjectorManager::getNumInjectors() {
Lock lock(_injectorsMutex);
return _injectors.size();
}

View file

@ -30,8 +30,27 @@ class AudioInjectorManager : public QObject, public Dependency {
SINGLETON_DEPENDENCY
public:
~AudioInjectorManager();
AudioInjectorPointer playSound(const SharedSoundPointer& sound, const AudioInjectorOptions& options, bool setPendingDelete = false);
AudioInjectorPointer playSound(const AudioDataPointer& audioData, const AudioInjectorOptions& options, bool setPendingDelete = false);
size_t getNumInjectors();
public slots:
void setOptionsAndRestart(const AudioInjectorPointer& injector, const AudioInjectorOptions& options);
void restart(const AudioInjectorPointer& injector);
void setOptions(const AudioInjectorPointer& injector, const AudioInjectorOptions& options);
AudioInjectorOptions getOptions(const AudioInjectorPointer& injector);
float getLoudness(const AudioInjectorPointer& injector);
bool isPlaying(const AudioInjectorPointer& injector);
void stop(const AudioInjectorPointer& injector);
private slots:
void run();
private:
using TimeInjectorPointerPair = std::pair<uint64_t, AudioInjectorPointer>;
@ -49,11 +68,10 @@ private:
using Lock = std::unique_lock<Mutex>;
bool threadInjector(const AudioInjectorPointer& injector);
bool restartFinishedInjector(const AudioInjectorPointer& injector);
void notifyInjectorReadyCondition() { _injectorReady.notify_one(); }
bool wouldExceedLimits();
AudioInjectorManager() {};
AudioInjectorManager() { createThread(); }
AudioInjectorManager(const AudioInjectorManager&) = delete;
AudioInjectorManager& operator=(const AudioInjectorManager&) = delete;

View file

@ -43,7 +43,6 @@
using namespace std;
const int NUM_BODY_CONE_SIDES = 9;
const float CHAT_MESSAGE_SCALE = 0.0015f;
const float CHAT_MESSAGE_HEIGHT = 0.1f;
const float DISPLAYNAME_FADE_TIME = 0.5f;
@ -377,7 +376,7 @@ bool Avatar::applyGrabChanges() {
const EntityItemPointer& entity = std::dynamic_pointer_cast<EntityItem>(target);
if (entity && entity->getEntityHostType() == entity::HostType::AVATAR && entity->getSimulationOwner().getID() == getID()) {
EntityItemProperties properties = entity->getProperties();
sendPacket(entity->getID(), properties);
sendPacket(entity->getID());
}
}
} else {
@ -1661,60 +1660,6 @@ int Avatar::parseDataFromBuffer(const QByteArray& buffer) {
return bytesRead;
}
int Avatar::_jointConesID = GeometryCache::UNKNOWN_ID;
// render a makeshift cone section that serves as a body part connecting joint spheres
void Avatar::renderJointConnectingCone(gpu::Batch& batch, glm::vec3 position1, glm::vec3 position2,
float radius1, float radius2, const glm::vec4& color) {
auto geometryCache = DependencyManager::get<GeometryCache>();
if (_jointConesID == GeometryCache::UNKNOWN_ID) {
_jointConesID = geometryCache->allocateID();
}
glm::vec3 axis = position2 - position1;
float length = glm::length(axis);
if (length > 0.0f) {
axis /= length;
glm::vec3 perpSin = glm::vec3(1.0f, 0.0f, 0.0f);
glm::vec3 perpCos = glm::normalize(glm::cross(axis, perpSin));
perpSin = glm::cross(perpCos, axis);
float angleb = 0.0f;
QVector<glm::vec3> points;
for (int i = 0; i < NUM_BODY_CONE_SIDES; i ++) {
// the rectangles that comprise the sides of the cone section are
// referenced by "a" and "b" in one dimension, and "1", and "2" in the other dimension.
int anglea = angleb;
angleb = ((float)(i+1) / (float)NUM_BODY_CONE_SIDES) * TWO_PI;
float sa = sinf(anglea);
float sb = sinf(angleb);
float ca = cosf(anglea);
float cb = cosf(angleb);
glm::vec3 p1a = position1 + perpSin * sa * radius1 + perpCos * ca * radius1;
glm::vec3 p1b = position1 + perpSin * sb * radius1 + perpCos * cb * radius1;
glm::vec3 p2a = position2 + perpSin * sa * radius2 + perpCos * ca * radius2;
glm::vec3 p2b = position2 + perpSin * sb * radius2 + perpCos * cb * radius2;
points << p1a << p1b << p2a << p1b << p2a << p2b;
}
PROFILE_RANGE_BATCH(batch, __FUNCTION__);
// TODO: this is really inefficient constantly recreating these vertices buffers. It would be
// better if the avatars cached these buffers for each of the joints they are rendering
geometryCache->updateVertices(_jointConesID, points, color);
geometryCache->renderVertices(batch, gpu::TRIANGLES, _jointConesID);
}
}
float Avatar::getSkeletonHeight() const {
Extents extents = _skeletonModel->getBindExtents();
return extents.maximum.y - extents.minimum.y;

View file

@ -296,9 +296,6 @@ public:
virtual int parseDataFromBuffer(const QByteArray& buffer) override;
static void renderJointConnectingCone(gpu::Batch& batch, glm::vec3 position1, glm::vec3 position2,
float radius1, float radius2, const glm::vec4& color);
/**jsdoc
* Set the offset applied to the current avatar. The offset adjusts the position that the avatar is rendered. For example,
* with an offset of <code>{ x: 0, y: 0.1, z: 0 }</code>, your avatar will appear to be raised off the ground slightly.
@ -608,7 +605,7 @@ protected:
// protected methods...
bool isLookingAtMe(AvatarSharedPointer avatar) const;
virtual void sendPacket(const QUuid& entityID, const EntityItemProperties& properties) const { }
virtual void sendPacket(const QUuid& entityID) const { }
bool applyGrabChanges();
void relayJointDataToChildren();
@ -665,8 +662,6 @@ protected:
AvatarTransit _transit;
std::mutex _transitLock;
static int _jointConesID;
int _voiceSphereID;
float _displayNameTargetAlpha { 1.0f };

View file

@ -338,24 +338,20 @@ void SkeletonModel::computeBoundingShape() {
void SkeletonModel::renderBoundingCollisionShapes(RenderArgs* args, gpu::Batch& batch, float scale, float alpha) {
auto geometryCache = DependencyManager::get<GeometryCache>();
// draw a blue sphere at the capsule top point
glm::vec3 topPoint = _translation + getRotation() * (scale * (_boundingCapsuleLocalOffset + (0.5f * _boundingCapsuleHeight) * Vectors::UNIT_Y));
glm::vec3 topPoint = _translation + _rotation * (scale * (_boundingCapsuleLocalOffset + (0.5f * _boundingCapsuleHeight) * Vectors::UNIT_Y));
batch.setModelTransform(Transform().setTranslation(topPoint).postScale(scale * _boundingCapsuleRadius));
geometryCache->renderSolidSphereInstance(args, batch, glm::vec4(0.6f, 0.6f, 0.8f, alpha));
// draw a yellow sphere at the capsule bottom point
glm::vec3 bottomPoint = topPoint - glm::vec3(0.0f, scale * _boundingCapsuleHeight, 0.0f);
glm::vec3 axis = topPoint - bottomPoint;
glm::vec3 bottomPoint = topPoint - _rotation * glm::vec3(0.0f, scale * _boundingCapsuleHeight, 0.0f);
batch.setModelTransform(Transform().setTranslation(bottomPoint).postScale(scale * _boundingCapsuleRadius));
geometryCache->renderSolidSphereInstance(args, batch, glm::vec4(0.8f, 0.8f, 0.6f, alpha));
// draw a green cylinder between the two points
glm::vec3 origin(0.0f);
batch.setModelTransform(Transform().setTranslation(bottomPoint));
geometryCache->bindSimpleProgram(batch);
Avatar::renderJointConnectingCone(batch, origin, axis, scale * _boundingCapsuleRadius, scale * _boundingCapsuleRadius,
glm::vec4(0.6f, 0.8f, 0.6f, alpha));
float capsuleDiameter = 2.0f * _boundingCapsuleRadius;
glm::vec3 cylinderDimensions = glm::vec3(capsuleDiameter, _boundingCapsuleHeight, capsuleDiameter);
batch.setModelTransform(Transform().setScale(scale * cylinderDimensions).setRotation(_rotation).setTranslation(0.5f * (topPoint + bottomPoint)));
geometryCache->renderSolidShapeInstance(args, batch, GeometryCache::Shape::Cylinder, glm::vec4(0.6f, 0.8f, 0.6f, alpha));
}
bool SkeletonModel::hasSkeleton() {

View file

@ -1,8 +1,6 @@
set(TARGET_NAME baking)
setup_hifi_library(Concurrent)
link_hifi_libraries(shared graphics networking ktx image fbx)
link_hifi_libraries(shared shaders graphics networking material-networking graphics-scripting ktx image fbx model-baker task)
include_hifi_library_headers(gpu)
include_hifi_library_headers(hfm)
target_draco()

View file

@ -52,7 +52,7 @@ protected:
void handleErrors(const QStringList& errors);
// List of baked output files. For instance, for an FBX this would
// include the .fbx and all of its texture files.
// include the .fbx, a .fst pointing to the fbx, and all of the fbx texture files.
std::vector<QString> _outputFiles;
QStringList _errorList;

View file

@ -33,29 +33,19 @@
#include "ModelBakingLoggingCategory.h"
#include "TextureBaker.h"
#ifdef HIFI_DUMP_FBX
#include "FBXToJSON.h"
#endif
void FBXBaker::bake() {
qDebug() << "FBXBaker" << _modelURL << "bake starting";
// setup the output folder for the results of this bake
setupOutputFolder();
if (shouldStop()) {
return;
FBXBaker::FBXBaker(const QUrl& inputModelURL, TextureBakerThreadGetter inputTextureThreadGetter,
const QString& bakedOutputDirectory, const QString& originalOutputDirectory, bool hasBeenBaked) :
ModelBaker(inputModelURL, inputTextureThreadGetter, bakedOutputDirectory, originalOutputDirectory, hasBeenBaked) {
if (hasBeenBaked) {
// Look for the original model file one directory higher. Perhaps this is an oven output directory.
QUrl originalRelativePath = QUrl("../original/" + inputModelURL.fileName().replace(BAKED_FBX_EXTENSION, FBX_EXTENSION));
QUrl newInputModelURL = inputModelURL.adjusted(QUrl::RemoveFilename).resolved(originalRelativePath);
_modelURL = newInputModelURL;
}
connect(this, &FBXBaker::sourceCopyReadyToLoad, this, &FBXBaker::bakeSourceCopy);
// make a local copy of the FBX file
loadSourceFBX();
}
void FBXBaker::bakeSourceCopy() {
// load the scene from the FBX file
importScene();
void FBXBaker::bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) {
_hfmModel = hfmModel;
if (shouldStop()) {
return;
@ -68,222 +58,100 @@ void FBXBaker::bakeSourceCopy() {
return;
}
rewriteAndBakeSceneModels();
rewriteAndBakeSceneModels(hfmModel->meshes, dracoMeshes, dracoMaterialLists);
}
if (shouldStop()) {
void FBXBaker::replaceMeshNodeWithDraco(FBXNode& meshNode, const QByteArray& dracoMeshBytes, const std::vector<hifi::ByteArray>& dracoMaterialList) {
// Compress mesh information and store in dracoMeshNode
FBXNode dracoMeshNode;
bool success = buildDracoMeshNode(dracoMeshNode, dracoMeshBytes, dracoMaterialList);
if (!success) {
return;
}
// check if we're already done with textures (in case we had none to re-write)
checkIfTexturesFinished();
}
void FBXBaker::setupOutputFolder() {
// make sure there isn't already an output directory using the same name
if (QDir(_bakedOutputDir).exists()) {
qWarning() << "Output path" << _bakedOutputDir << "already exists. Continuing.";
} else {
qCDebug(model_baking) << "Creating FBX output folder" << _bakedOutputDir;
meshNode.children.push_back(dracoMeshNode);
// attempt to make the output folder
if (!QDir().mkpath(_bakedOutputDir)) {
handleError("Failed to create FBX output folder " + _bakedOutputDir);
return;
}
// attempt to make the output folder
if (!QDir().mkpath(_originalOutputDir)) {
handleError("Failed to create FBX output folder " + _originalOutputDir);
return;
}
}
}
static const std::vector<QString> nodeNamesToDelete {
// Node data that is packed into the draco mesh
"Vertices",
"PolygonVertexIndex",
"LayerElementNormal",
"LayerElementColor",
"LayerElementUV",
"LayerElementMaterial",
"LayerElementTexture",
void FBXBaker::loadSourceFBX() {
// check if the FBX is local or first needs to be downloaded
if (_modelURL.isLocalFile()) {
// load up the local file
QFile localFBX { _modelURL.toLocalFile() };
qDebug() << "Local file url: " << _modelURL << _modelURL.toString() << _modelURL.toLocalFile() << ", copying to: " << _originalModelFilePath;
if (!localFBX.exists()) {
//QMessageBox::warning(this, "Could not find " + _fbxURL.toString(), "");
handleError("Could not find " + _modelURL.toString());
return;
}
// make a copy in the output folder
if (!_originalOutputDir.isEmpty()) {
qDebug() << "Copying to: " << _originalOutputDir << "/" << _modelURL.fileName();
localFBX.copy(_originalOutputDir + "/" + _modelURL.fileName());
}
localFBX.copy(_originalModelFilePath);
// emit our signal to start the import of the FBX source copy
emit sourceCopyReadyToLoad();
} else {
// remote file, kick off a download
auto& networkAccessManager = NetworkAccessManager::getInstance();
QNetworkRequest networkRequest;
// setup the request to follow re-directs and always hit the network
networkRequest.setAttribute(QNetworkRequest::FollowRedirectsAttribute, true);
networkRequest.setAttribute(QNetworkRequest::CacheLoadControlAttribute, QNetworkRequest::AlwaysNetwork);
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
networkRequest.setUrl(_modelURL);
qCDebug(model_baking) << "Downloading" << _modelURL;
auto networkReply = networkAccessManager.get(networkRequest);
connect(networkReply, &QNetworkReply::finished, this, &FBXBaker::handleFBXNetworkReply);
}
}
void FBXBaker::handleFBXNetworkReply() {
auto requestReply = qobject_cast<QNetworkReply*>(sender());
if (requestReply->error() == QNetworkReply::NoError) {
qCDebug(model_baking) << "Downloaded" << _modelURL;
// grab the contents of the reply and make a copy in the output folder
QFile copyOfOriginal(_originalModelFilePath);
qDebug(model_baking) << "Writing copy of original FBX to" << _originalModelFilePath << copyOfOriginal.fileName();
if (!copyOfOriginal.open(QIODevice::WriteOnly)) {
// add an error to the error list for this FBX stating that a duplicate of the original FBX could not be made
handleError("Could not create copy of " + _modelURL.toString() + " (Failed to open " + _originalModelFilePath + ")");
return;
}
if (copyOfOriginal.write(requestReply->readAll()) == -1) {
handleError("Could not create copy of " + _modelURL.toString() + " (Failed to write)");
return;
}
// close that file now that we are done writing to it
copyOfOriginal.close();
if (!_originalOutputDir.isEmpty()) {
copyOfOriginal.copy(_originalOutputDir + "/" + _modelURL.fileName());
}
// emit our signal to start the import of the FBX source copy
emit sourceCopyReadyToLoad();
} else {
// add an error to our list stating that the FBX could not be downloaded
handleError("Failed to download " + _modelURL.toString());
}
}
void FBXBaker::importScene() {
qDebug() << "file path: " << _originalModelFilePath.toLocal8Bit().data() << QDir(_originalModelFilePath).exists();
QFile fbxFile(_originalModelFilePath);
if (!fbxFile.open(QIODevice::ReadOnly)) {
handleError("Error opening " + _originalModelFilePath + " for reading");
return;
}
FBXSerializer fbxSerializer;
qCDebug(model_baking) << "Parsing" << _modelURL;
_rootNode = fbxSerializer._rootNode = fbxSerializer.parseFBX(&fbxFile);
#ifdef HIFI_DUMP_FBX
{
FBXToJSON fbxToJSON;
fbxToJSON << _rootNode;
QFileInfo modelFile(_originalModelFilePath);
QString outFilename(_bakedOutputDir + "/" + modelFile.completeBaseName() + "_FBX.json");
QFile jsonFile(outFilename);
if (jsonFile.open(QIODevice::WriteOnly)) {
jsonFile.write(fbxToJSON.str().c_str(), fbxToJSON.str().length());
jsonFile.close();
}
}
#endif
_hfmModel = fbxSerializer.extractHFMModel({}, _modelURL.toString());
_textureContentMap = fbxSerializer._textureContent;
}
void FBXBaker::rewriteAndBakeSceneModels() {
unsigned int meshIndex = 0;
bool hasDeformers { false };
for (FBXNode& rootChild : _rootNode.children) {
if (rootChild.name == "Objects") {
for (FBXNode& objectChild : rootChild.children) {
if (objectChild.name == "Deformer") {
hasDeformers = true;
break;
}
// Node data that we don't support
"Edges",
"LayerElementTangent",
"LayerElementBinormal",
"LayerElementSmoothing"
};
auto& children = meshNode.children;
auto it = children.begin();
while (it != children.end()) {
auto begin = nodeNamesToDelete.begin();
auto end = nodeNamesToDelete.end();
if (find(begin, end, it->name) != end) {
it = children.erase(it);
} else {
++it;
}
}
if (hasDeformers) {
break;
}
}
}
void FBXBaker::rewriteAndBakeSceneModels(const QVector<hfm::Mesh>& meshes, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) {
std::vector<int> meshIndexToRuntimeOrder;
auto meshCount = (int)meshes.size();
meshIndexToRuntimeOrder.resize(meshCount);
for (int i = 0; i < meshCount; i++) {
meshIndexToRuntimeOrder[meshes[i].meshIndex] = i;
}
// The meshIndex represents the order in which the meshes are loaded from the FBX file
// We replicate this order by iterating over the meshes in the same way that FBXSerializer does
int meshIndex = 0;
for (FBXNode& rootChild : _rootNode.children) {
if (rootChild.name == "Objects") {
for (FBXNode& objectChild : rootChild.children) {
if (objectChild.name == "Geometry") {
// TODO Pull this out of _hfmModel instead so we don't have to reprocess it
auto extractedMesh = FBXSerializer::extractMesh(objectChild, meshIndex, false);
// Callback to get MaterialID
GetMaterialIDCallback materialIDcallback = [&extractedMesh](int partIndex) {
return extractedMesh.partMaterialTextures[partIndex].first;
};
// Compress mesh information and store in dracoMeshNode
FBXNode dracoMeshNode;
bool success = compressMesh(extractedMesh.mesh, hasDeformers, dracoMeshNode, materialIDcallback);
// if bake fails - return, if there were errors and continue, if there were warnings.
if (!success) {
if (hasErrors()) {
return;
} else if (hasWarnings()) {
continue;
}
} else {
objectChild.children.push_back(dracoMeshNode);
static const std::vector<QString> nodeNamesToDelete {
// Node data that is packed into the draco mesh
"Vertices",
"PolygonVertexIndex",
"LayerElementNormal",
"LayerElementColor",
"LayerElementUV",
"LayerElementMaterial",
"LayerElementTexture",
// Node data that we don't support
"Edges",
"LayerElementTangent",
"LayerElementBinormal",
"LayerElementSmoothing"
};
auto& children = objectChild.children;
auto it = children.begin();
while (it != children.end()) {
auto begin = nodeNamesToDelete.begin();
auto end = nodeNamesToDelete.end();
if (find(begin, end, it->name) != end) {
it = children.erase(it);
} else {
++it;
for (FBXNode& object : rootChild.children) {
if (object.name == "Geometry") {
if (object.properties.at(2) == "Mesh") {
int meshNum = meshIndexToRuntimeOrder[meshIndex];
replaceMeshNodeWithDraco(object, dracoMeshes[meshNum], dracoMaterialLists[meshNum]);
meshIndex++;
}
} else if (object.name == "Model") {
for (FBXNode& modelChild : object.children) {
if (modelChild.name == "Properties60" || modelChild.name == "Properties70") {
// This is a properties node
// Remove the geometric transform because that has been applied directly to the vertices in FBXSerializer
static const QVariant GEOMETRIC_TRANSLATION = hifi::ByteArray("GeometricTranslation");
static const QVariant GEOMETRIC_ROTATION = hifi::ByteArray("GeometricRotation");
static const QVariant GEOMETRIC_SCALING = hifi::ByteArray("GeometricScaling");
for (int i = 0; i < modelChild.children.size(); i++) {
const auto& prop = modelChild.children[i];
const auto& propertyName = prop.properties.at(0);
if (propertyName == GEOMETRIC_TRANSLATION ||
propertyName == GEOMETRIC_ROTATION ||
propertyName == GEOMETRIC_SCALING) {
modelChild.children.removeAt(i);
--i;
}
}
} else if (modelChild.name == "Vertices") {
// This model is also a mesh
int meshNum = meshIndexToRuntimeOrder[meshIndex];
replaceMeshNodeWithDraco(object, dracoMeshes[meshNum], dracoMaterialLists[meshNum]);
meshIndex++;
}
}
} // Geometry Object
}
} // foreach root child
if (hasErrors()) {
return;
}
}
}
}
}

View file

@ -31,31 +31,18 @@ using TextureBakerThreadGetter = std::function<QThread*()>;
class FBXBaker : public ModelBaker {
Q_OBJECT
public:
using ModelBaker::ModelBaker;
FBXBaker(const QUrl& inputModelURL, TextureBakerThreadGetter inputTextureThreadGetter,
const QString& bakedOutputDirectory, const QString& originalOutputDirectory = "", bool hasBeenBaked = false);
public slots:
virtual void bake() override;
signals:
void sourceCopyReadyToLoad();
private slots:
void bakeSourceCopy();
void handleFBXNetworkReply();
protected:
virtual void bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) override;
private:
void setupOutputFolder();
void loadSourceFBX();
void importScene();
void embedTextureMetaData();
void rewriteAndBakeSceneModels();
void rewriteAndBakeSceneModels(const QVector<hfm::Mesh>& meshes, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists);
void rewriteAndBakeSceneTextures();
void replaceMeshNodeWithDraco(FBXNode& meshNode, const QByteArray& dracoMeshBytes, const std::vector<hifi::ByteArray>& dracoMaterialList);
HFMModel* _hfmModel;
QHash<QString, int> _textureNameMatchCount;
QHash<QUrl, QString> _remappedTexturePaths;
hfm::Model::Pointer _hfmModel;
bool _pendingErrorEmission { false };
};

View file

@ -11,9 +11,11 @@
#include "JSBaker.h"
#include <PathUtils.h>
#include <QtNetwork/QNetworkReply>
#include "Baker.h"
#include <NetworkAccessManager.h>
#include <SharedUtil.h>
#include <PathUtils.h>
const int ASCII_CHARACTERS_UPPER_LIMIT = 126;
@ -21,25 +23,79 @@ JSBaker::JSBaker(const QUrl& jsURL, const QString& bakedOutputDir) :
_jsURL(jsURL),
_bakedOutputDir(bakedOutputDir)
{
}
void JSBaker::bake() {
qCDebug(js_baking) << "JS Baker " << _jsURL << "bake starting";
// Import file to start baking
QFile jsFile(_jsURL.toLocalFile());
if (!jsFile.open(QIODevice::ReadOnly | QIODevice::Text)) {
handleError("Error opening " + _jsURL.fileName() + " for reading");
return;
}
// once our script is loaded, kick off a the processing
connect(this, &JSBaker::originalScriptLoaded, this, &JSBaker::processScript);
if (_originalScript.isEmpty()) {
// first load the script (either locally or remotely)
loadScript();
} else {
// we already have a script passed to us, use that
processScript();
}
}
void JSBaker::loadScript() {
// check if the script is local or first needs to be downloaded
if (_jsURL.isLocalFile()) {
// load up the local file
QFile localScript(_jsURL.toLocalFile());
if (!localScript.open(QIODevice::ReadOnly | QIODevice::Text)) {
handleError("Error opening " + _jsURL.fileName() + " for reading");
return;
}
_originalScript = localScript.readAll();
emit originalScriptLoaded();
} else {
// remote file, kick off a download
auto& networkAccessManager = NetworkAccessManager::getInstance();
QNetworkRequest networkRequest;
// setup the request to follow re-directs and always hit the network
networkRequest.setAttribute(QNetworkRequest::FollowRedirectsAttribute, true);
networkRequest.setAttribute(QNetworkRequest::CacheLoadControlAttribute, QNetworkRequest::AlwaysNetwork);
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
networkRequest.setUrl(_jsURL);
qCDebug(js_baking) << "Downloading" << _jsURL;
// kickoff the download, wait for slot to tell us it is done
auto networkReply = networkAccessManager.get(networkRequest);
connect(networkReply, &QNetworkReply::finished, this, &JSBaker::handleScriptNetworkReply);
}
}
void JSBaker::handleScriptNetworkReply() {
auto requestReply = qobject_cast<QNetworkReply*>(sender());
if (requestReply->error() == QNetworkReply::NoError) {
qCDebug(js_baking) << "Downloaded script" << _jsURL;
// store the original script so it can be passed along for the bake
_originalScript = requestReply->readAll();
emit originalScriptLoaded();
} else {
// add an error to our list stating that this script could not be downloaded
handleError("Error downloading " + _jsURL.toString() + " - " + requestReply->errorString());
}
}
void JSBaker::processScript() {
// Read file into an array
QByteArray inputJS = jsFile.readAll();
QByteArray outputJS;
// Call baking on inputJS and store result in outputJS
bool success = bakeJS(inputJS, outputJS);
bool success = bakeJS(_originalScript, outputJS);
if (!success) {
qCDebug(js_baking) << "Bake Failed";
handleError("Unterminated multi-line comment");

View file

@ -25,11 +25,24 @@ public:
JSBaker(const QUrl& jsURL, const QString& bakedOutputDir);
static bool bakeJS(const QByteArray& inputFile, QByteArray& outputFile);
QString getJSPath() const { return _jsURL.toDisplayString(); }
QString getBakedJSFilePath() const { return _bakedJSFilePath; }
public slots:
virtual void bake() override;
signals:
void originalScriptLoaded();
private slots:
void processScript();
private:
void loadScript();
void handleScriptNetworkReply();
QUrl _jsURL;
QByteArray _originalScript;
QString _bakedOutputDir;
QString _bakedJSFilePath;

View file

@ -0,0 +1,247 @@
//
// MaterialBaker.cpp
// libraries/baking/src
//
// Created by Sam Gondelman on 2/26/2019
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "MaterialBaker.h"
#include <unordered_map>
#include "QJsonObject"
#include "QJsonDocument"
#include "MaterialBakingLoggingCategory.h"
#include <SharedUtil.h>
#include <PathUtils.h>
#include <graphics-scripting/GraphicsScriptingInterface.h>
std::function<QThread*()> MaterialBaker::_getNextOvenWorkerThreadOperator;
static int materialNum = 0;
namespace std {
template <>
struct hash<graphics::Material::MapChannel> {
size_t operator()(const graphics::Material::MapChannel& a) const {
return std::hash<size_t>()((size_t)a);
}
};
};
MaterialBaker::MaterialBaker(const QString& materialData, bool isURL, const QString& bakedOutputDir, const QUrl& destinationPath) :
_materialData(materialData),
_isURL(isURL),
_bakedOutputDir(bakedOutputDir),
_textureOutputDir(bakedOutputDir + "/materialTextures/" + QString::number(materialNum++)),
_destinationPath(destinationPath)
{
}
void MaterialBaker::bake() {
qDebug(material_baking) << "Material Baker" << _materialData << "bake starting";
// once our script is loaded, kick off a the processing
connect(this, &MaterialBaker::originalMaterialLoaded, this, &MaterialBaker::processMaterial);
if (!_materialResource) {
// first load the material (either locally or remotely)
loadMaterial();
} else {
// we already have a material passed to us, use that
if (_materialResource->isLoaded()) {
processMaterial();
} else {
connect(_materialResource.data(), &Resource::finished, this, &MaterialBaker::originalMaterialLoaded);
}
}
}
void MaterialBaker::loadMaterial() {
if (!_isURL) {
qCDebug(material_baking) << "Loading local material" << _materialData;
_materialResource = NetworkMaterialResourcePointer(new NetworkMaterialResource());
// TODO: add baseURL to allow these to reference relative files next to them
_materialResource->parsedMaterials = NetworkMaterialResource::parseJSONMaterials(QJsonDocument::fromJson(_materialData.toUtf8()), QUrl());
} else {
qCDebug(material_baking) << "Downloading material" << _materialData;
_materialResource = MaterialCache::instance().getMaterial(_materialData);
}
if (_materialResource) {
if (_materialResource->isLoaded()) {
emit originalMaterialLoaded();
} else {
connect(_materialResource.data(), &Resource::finished, this, &MaterialBaker::originalMaterialLoaded);
}
} else {
handleError("Error loading " + _materialData);
}
}
void MaterialBaker::processMaterial() {
if (!_materialResource || _materialResource->parsedMaterials.networkMaterials.size() == 0) {
handleError("Error processing " + _materialData);
return;
}
if (QDir(_textureOutputDir).exists()) {
qWarning() << "Output path" << _textureOutputDir << "already exists. Continuing.";
} else {
qCDebug(material_baking) << "Creating materialTextures output folder" << _textureOutputDir;
if (!QDir().mkpath(_textureOutputDir)) {
handleError("Failed to create materialTextures output folder " + _textureOutputDir);
}
}
for (auto networkMaterial : _materialResource->parsedMaterials.networkMaterials) {
if (networkMaterial.second) {
auto textureMaps = networkMaterial.second->getTextureMaps();
for (auto textureMap : textureMaps) {
if (textureMap.second && textureMap.second->getTextureSource()) {
graphics::Material::MapChannel mapChannel = textureMap.first;
auto texture = textureMap.second->getTextureSource();
QUrl url = texture->getUrl();
QString cleanURL = url.adjusted(QUrl::RemoveQuery | QUrl::RemoveFragment).toDisplayString();
auto idx = cleanURL.lastIndexOf('.');
auto extension = idx >= 0 ? url.toDisplayString().mid(idx + 1).toLower() : "";
if (QImageReader::supportedImageFormats().contains(extension.toLatin1())) {
QUrl textureURL = url.adjusted(QUrl::RemoveQuery | QUrl::RemoveFragment);
// FIXME: this isn't properly handling bumpMaps or glossMaps
static std::unordered_map<graphics::Material::MapChannel, image::TextureUsage::Type> MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP;
if (MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP.empty()) {
MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP[graphics::Material::MapChannel::EMISSIVE_MAP] = image::TextureUsage::EMISSIVE_TEXTURE;
MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP[graphics::Material::MapChannel::ALBEDO_MAP] = image::TextureUsage::ALBEDO_TEXTURE;
MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP[graphics::Material::MapChannel::METALLIC_MAP] = image::TextureUsage::METALLIC_TEXTURE;
MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP[graphics::Material::MapChannel::ROUGHNESS_MAP] = image::TextureUsage::ROUGHNESS_TEXTURE;
MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP[graphics::Material::MapChannel::NORMAL_MAP] = image::TextureUsage::NORMAL_TEXTURE;
MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP[graphics::Material::MapChannel::OCCLUSION_MAP] = image::TextureUsage::OCCLUSION_TEXTURE;
MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP[graphics::Material::MapChannel::LIGHTMAP_MAP] = image::TextureUsage::LIGHTMAP_TEXTURE;
MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP[graphics::Material::MapChannel::SCATTERING_MAP] = image::TextureUsage::SCATTERING_TEXTURE;
}
auto it = MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP.find(mapChannel);
if (it == MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP.end()) {
handleError("Unknown map channel");
return;
}
QPair<QUrl, image::TextureUsage::Type> textureKey(textureURL, it->second);
if (!_textureBakers.contains(textureKey)) {
auto baseTextureFileName = _textureFileNamer.createBaseTextureFileName(textureURL.fileName(), it->second);
QSharedPointer<TextureBaker> textureBaker {
new TextureBaker(textureURL, it->second, _textureOutputDir, "", baseTextureFileName),
&TextureBaker::deleteLater
};
textureBaker->setMapChannel(mapChannel);
connect(textureBaker.data(), &TextureBaker::finished, this, &MaterialBaker::handleFinishedTextureBaker);
_textureBakers.insert(textureKey, textureBaker);
textureBaker->moveToThread(_getNextOvenWorkerThreadOperator ? _getNextOvenWorkerThreadOperator() : thread());
QMetaObject::invokeMethod(textureBaker.data(), "bake");
}
_materialsNeedingRewrite.insert(textureKey, networkMaterial.second);
} else {
qCDebug(material_baking) << "Texture extension not supported: " << extension;
}
}
}
}
}
if (_textureBakers.empty()) {
outputMaterial();
}
}
void MaterialBaker::handleFinishedTextureBaker() {
auto baker = qobject_cast<TextureBaker*>(sender());
if (baker) {
QPair<QUrl, image::TextureUsage::Type> textureKey = { baker->getTextureURL(), baker->getTextureType() };
if (!baker->hasErrors()) {
// this TextureBaker is done and everything went according to plan
qCDebug(material_baking) << "Re-writing texture references to" << baker->getTextureURL();
auto newURL = QUrl(_textureOutputDir).resolved(baker->getMetaTextureFileName());
auto relativeURL = QDir(_bakedOutputDir).relativeFilePath(newURL.toString());
// Replace the old texture URLs
for (auto networkMaterial : _materialsNeedingRewrite.values(textureKey)) {
networkMaterial->getTextureMap(baker->getMapChannel())->getTextureSource()->setUrl(_destinationPath.resolved(relativeURL));
}
} else {
// this texture failed to bake - this doesn't fail the entire bake but we need to add the errors from
// the texture to our warnings
_warningList << baker->getWarnings();
}
_materialsNeedingRewrite.remove(textureKey);
_textureBakers.remove(textureKey);
if (_textureBakers.empty()) {
outputMaterial();
}
} else {
handleWarning("Unidentified baker finished and signaled to material baker to handle texture. Material: " + _materialData);
}
}
void MaterialBaker::outputMaterial() {
if (_materialResource) {
QJsonObject json;
if (_materialResource->parsedMaterials.networkMaterials.size() == 1) {
auto networkMaterial = _materialResource->parsedMaterials.networkMaterials.begin();
auto scriptableMaterial = scriptable::ScriptableMaterial(networkMaterial->second);
QVariant materialVariant = scriptable::scriptableMaterialToScriptValue(&_scriptEngine, scriptableMaterial).toVariant();
json.insert("materials", QJsonDocument::fromVariant(materialVariant).object());
} else {
QJsonArray materialArray;
for (auto networkMaterial : _materialResource->parsedMaterials.networkMaterials) {
auto scriptableMaterial = scriptable::ScriptableMaterial(networkMaterial.second);
QVariant materialVariant = scriptable::scriptableMaterialToScriptValue(&_scriptEngine, scriptableMaterial).toVariant();
materialArray.append(QJsonDocument::fromVariant(materialVariant).object());
}
json.insert("materials", materialArray);
}
QByteArray outputMaterial = QJsonDocument(json).toJson(QJsonDocument::Compact);
if (_isURL) {
auto fileName = QUrl(_materialData).fileName();
auto baseName = fileName.left(fileName.lastIndexOf('.'));
auto bakedFilename = baseName + BAKED_MATERIAL_EXTENSION;
_bakedMaterialData = _bakedOutputDir + "/" + bakedFilename;
QFile bakedFile;
bakedFile.setFileName(_bakedMaterialData);
if (!bakedFile.open(QIODevice::WriteOnly)) {
handleError("Error opening " + _bakedMaterialData + " for writing");
return;
}
bakedFile.write(outputMaterial);
// Export successful
_outputFiles.push_back(_bakedMaterialData);
qCDebug(material_baking) << "Exported" << _materialData << "to" << _bakedMaterialData;
} else {
_bakedMaterialData = QString(outputMaterial);
qCDebug(material_baking) << "Converted" << _materialData << "to" << _bakedMaterialData;
}
}
// emit signal to indicate the material baking is finished
emit finished();
}

View file

@ -0,0 +1,67 @@
//
// MaterialBaker.h
// libraries/baking/src
//
// Created by Sam Gondelman on 2/26/2019
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_MaterialBaker_h
#define hifi_MaterialBaker_h
#include "Baker.h"
#include "TextureBaker.h"
#include "baking/TextureFileNamer.h"
#include <material-networking/MaterialCache.h>
static const QString BAKED_MATERIAL_EXTENSION = ".baked.json";
class MaterialBaker : public Baker {
Q_OBJECT
public:
MaterialBaker(const QString& materialData, bool isURL, const QString& bakedOutputDir, const QUrl& destinationPath);
QString getMaterialData() const { return _materialData; }
bool isURL() const { return _isURL; }
QString getBakedMaterialData() const { return _bakedMaterialData; }
static void setNextOvenWorkerThreadOperator(std::function<QThread*()> getNextOvenWorkerThreadOperator) { _getNextOvenWorkerThreadOperator = getNextOvenWorkerThreadOperator; }
public slots:
virtual void bake() override;
signals:
void originalMaterialLoaded();
private slots:
void processMaterial();
void outputMaterial();
void handleFinishedTextureBaker();
private:
void loadMaterial();
QString _materialData;
bool _isURL;
NetworkMaterialResourcePointer _materialResource;
QHash<QPair<QUrl, image::TextureUsage::Type>, QSharedPointer<TextureBaker>> _textureBakers;
QMultiHash<QPair<QUrl, image::TextureUsage::Type>, std::shared_ptr<NetworkMaterial>> _materialsNeedingRewrite;
QString _bakedOutputDir;
QString _textureOutputDir;
QString _bakedMaterialData;
QUrl _destinationPath;
QScriptEngine _scriptEngine;
static std::function<QThread*()> _getNextOvenWorkerThreadOperator;
TextureFileNamer _textureFileNamer;
};
#endif // !hifi_MaterialBaker_h

View file

@ -0,0 +1,14 @@
//
// MaterialBakingLoggingCategory.cpp
// libraries/baking/src
//
// Created by Sam Gondelman on 2/26/2019
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "MaterialBakingLoggingCategory.h"
Q_LOGGING_CATEGORY(material_baking, "hifi.material-baking");

View file

@ -0,0 +1,19 @@
//
// MaterialBakingLoggingCategory.h
// libraries/baking/src
//
// Created by Sam Gondelman on 2/26/2019
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_MaterialBakingLoggingCategory_h
#define hifi_MaterialBakingLoggingCategory_h
#include <QtCore/QLoggingCategory>
Q_DECLARE_LOGGING_CATEGORY(material_baking)
#endif // hifi_MaterialBakingLoggingCategory_h

View file

@ -12,8 +12,17 @@
#include "ModelBaker.h"
#include <PathUtils.h>
#include <NetworkAccessManager.h>
#include <DependencyManager.h>
#include <hfm/ModelFormatRegistry.h>
#include <FBXSerializer.h>
#include <model-baker/Baker.h>
#include <model-baker/PrepareJointsTask.h>
#include <FBXWriter.h>
#include <FSTReader.h>
#ifdef _WIN32
#pragma warning( push )
@ -31,37 +40,275 @@
#pragma warning( pop )
#endif
#include "baking/BakerLibrary.h"
ModelBaker::ModelBaker(const QUrl& inputModelURL, TextureBakerThreadGetter inputTextureThreadGetter,
const QString& bakedOutputDirectory, const QString& originalOutputDirectory) :
const QString& bakedOutputDirectory, const QString& originalOutputDirectory, bool hasBeenBaked) :
_modelURL(inputModelURL),
_bakedOutputDir(bakedOutputDirectory),
_originalOutputDir(originalOutputDirectory),
_textureThreadGetter(inputTextureThreadGetter)
_textureThreadGetter(inputTextureThreadGetter),
_hasBeenBaked(hasBeenBaked)
{
auto tempDir = PathUtils::generateTemporaryDir();
auto bakedFilename = _modelURL.fileName();
if (!hasBeenBaked) {
bakedFilename = bakedFilename.left(bakedFilename.lastIndexOf('.'));
bakedFilename += BAKED_FBX_EXTENSION;
}
_bakedModelURL = _bakedOutputDir + "/" + bakedFilename;
}
if (tempDir.isEmpty()) {
handleError("Failed to create a temporary directory.");
void ModelBaker::setOutputURLSuffix(const QUrl& outputURLSuffix) {
_outputURLSuffix = outputURLSuffix;
}
void ModelBaker::setMappingURL(const QUrl& mappingURL) {
_mappingURL = mappingURL;
}
void ModelBaker::setMapping(const hifi::VariantHash& mapping) {
_mapping = mapping;
}
QUrl ModelBaker::getFullOutputMappingURL() const {
QUrl appendedURL = _outputMappingURL;
appendedURL.setFragment(_outputURLSuffix.fragment());
appendedURL.setQuery(_outputURLSuffix.query());
appendedURL.setUserInfo(_outputURLSuffix.userInfo());
return appendedURL;
}
void ModelBaker::bake() {
qDebug() << "ModelBaker" << _modelURL << "bake starting";
// Setup the output folders for the results of this bake
initializeOutputDirs();
if (shouldStop()) {
return;
}
_modelTempDir = tempDir;
_originalModelFilePath = _modelTempDir.filePath(_modelURL.fileName());
qDebug() << "Made temporary dir " << _modelTempDir;
qDebug() << "Origin file path: " << _originalModelFilePath;
connect(this, &ModelBaker::modelLoaded, this, &ModelBaker::bakeSourceCopy);
// make a local copy of the model
saveSourceModel();
}
ModelBaker::~ModelBaker() {
if (_modelTempDir.exists()) {
if (!_modelTempDir.remove(_originalModelFilePath)) {
qCWarning(model_baking) << "Failed to remove temporary copy of fbx file:" << _originalModelFilePath;
void ModelBaker::initializeOutputDirs() {
// Attempt to make the output folders
// Warn if there is an output directory using the same name, unless we know a parent FST baker created them already
if (QDir(_bakedOutputDir).exists()) {
if (_mappingURL.isEmpty()) {
qWarning() << "Output path" << _bakedOutputDir << "already exists. Continuing.";
}
if (!_modelTempDir.rmdir(".")) {
qCWarning(model_baking) << "Failed to remove temporary directory:" << _modelTempDir;
} else {
qCDebug(model_baking) << "Creating baked output folder" << _bakedOutputDir;
if (!QDir().mkpath(_bakedOutputDir)) {
handleError("Failed to create baked output folder " + _bakedOutputDir);
return;
}
}
QDir originalOutputDir { _originalOutputDir };
if (originalOutputDir.exists()) {
if (_mappingURL.isEmpty()) {
qWarning() << "Output path" << _originalOutputDir << "already exists. Continuing.";
}
} else {
qCDebug(model_baking) << "Creating original output folder" << _originalOutputDir;
if (!QDir().mkpath(_originalOutputDir)) {
handleError("Failed to create original output folder " + _originalOutputDir);
return;
}
}
if (originalOutputDir.isReadable()) {
// The output directory is available. Use that to write/read the original model file
_originalOutputModelPath = originalOutputDir.filePath(_modelURL.fileName());
} else {
handleError("Unable to write to original output folder " + _originalOutputDir);
}
}
void ModelBaker::saveSourceModel() {
// check if the FBX is local or first needs to be downloaded
if (_modelURL.isLocalFile()) {
// load up the local file
QFile localModelURL { _modelURL.toLocalFile() };
qDebug() << "Local file url: " << _modelURL << _modelURL.toString() << _modelURL.toLocalFile() << ", copying to: " << _originalOutputModelPath;
if (!localModelURL.exists()) {
//QMessageBox::warning(this, "Could not find " + _modelURL.toString(), "");
handleError("Could not find " + _modelURL.toString());
return;
}
localModelURL.copy(_originalOutputModelPath);
// emit our signal to start the import of the model source copy
emit modelLoaded();
} else {
// remote file, kick off a download
auto& networkAccessManager = NetworkAccessManager::getInstance();
QNetworkRequest networkRequest;
// setup the request to follow re-directs and always hit the network
networkRequest.setAttribute(QNetworkRequest::FollowRedirectsAttribute, true);
networkRequest.setAttribute(QNetworkRequest::CacheLoadControlAttribute, QNetworkRequest::AlwaysNetwork);
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
networkRequest.setUrl(_modelURL);
qCDebug(model_baking) << "Downloading" << _modelURL;
auto networkReply = networkAccessManager.get(networkRequest);
connect(networkReply, &QNetworkReply::finished, this, &ModelBaker::handleModelNetworkReply);
}
}
void ModelBaker::handleModelNetworkReply() {
auto requestReply = qobject_cast<QNetworkReply*>(sender());
if (requestReply->error() == QNetworkReply::NoError) {
qCDebug(model_baking) << "Downloaded" << _modelURL;
// grab the contents of the reply and make a copy in the output folder
QFile copyOfOriginal(_originalOutputModelPath);
qDebug(model_baking) << "Writing copy of original model file to" << _originalOutputModelPath << copyOfOriginal.fileName();
if (!copyOfOriginal.open(QIODevice::WriteOnly)) {
// add an error to the error list for this model stating that a duplicate of the original model could not be made
handleError("Could not create copy of " + _modelURL.toString() + " (Failed to open " + _originalOutputModelPath + ")");
return;
}
if (copyOfOriginal.write(requestReply->readAll()) == -1) {
handleError("Could not create copy of " + _modelURL.toString() + " (Failed to write)");
return;
}
// close that file now that we are done writing to it
copyOfOriginal.close();
// emit our signal to start the import of the model source copy
emit modelLoaded();
} else {
// add an error to our list stating that the model could not be downloaded
handleError("Failed to download " + _modelURL.toString());
}
}
void ModelBaker::bakeSourceCopy() {
QFile modelFile(_originalOutputModelPath);
if (!modelFile.open(QIODevice::ReadOnly)) {
handleError("Error opening " + _originalOutputModelPath + " for reading");
return;
}
hifi::ByteArray modelData = modelFile.readAll();
hfm::Model::Pointer bakedModel;
std::vector<hifi::ByteArray> dracoMeshes;
std::vector<std::vector<hifi::ByteArray>> dracoMaterialLists; // Material order for per-mesh material lookup used by dracoMeshes
{
auto serializer = DependencyManager::get<ModelFormatRegistry>()->getSerializerForMediaType(modelData, _modelURL, "");
if (!serializer) {
handleError("Could not recognize file type of model file " + _originalOutputModelPath);
return;
}
hifi::VariantHash serializerMapping = _mapping;
serializerMapping["combineParts"] = true; // set true so that OBJSerializer reads material info from material library
serializerMapping["deduplicateIndices"] = true; // Draco compression also deduplicates, but we might as well shave it off to save on some earlier processing (currently FBXSerializer only)
hfm::Model::Pointer loadedModel = serializer->read(modelData, serializerMapping, _modelURL);
// Temporarily support copying the pre-parsed node from FBXSerializer, for better performance in FBXBaker
// TODO: Pure HFM baking
std::shared_ptr<FBXSerializer> fbxSerializer = std::dynamic_pointer_cast<FBXSerializer>(serializer);
if (fbxSerializer) {
qCDebug(model_baking) << "Parsing" << _modelURL;
_rootNode = fbxSerializer->_rootNode;
}
baker::Baker baker(loadedModel, serializerMapping, _mappingURL);
auto config = baker.getConfiguration();
// Enable compressed draco mesh generation
config->getJobConfig("BuildDracoMesh")->setEnabled(true);
// Do not permit potentially lossy modification of joint data meant for runtime
((PrepareJointsConfig*)config->getJobConfig("PrepareJoints"))->passthrough = true;
// The resources parsed from this job will not be used for now
// TODO: Proper full baking of all materials for a model
config->getJobConfig("ParseMaterialMapping")->setEnabled(false);
// Begin hfm baking
baker.run();
bakedModel = baker.getHFMModel();
dracoMeshes = baker.getDracoMeshes();
dracoMaterialLists = baker.getDracoMaterialLists();
}
// Populate _textureContentMap with path to content mappings, for quick lookup by URL
for (auto materialIt = bakedModel->materials.cbegin(); materialIt != bakedModel->materials.cend(); materialIt++) {
static const auto addTexture = [](QHash<hifi::ByteArray, hifi::ByteArray>& textureContentMap, const hfm::Texture& texture) {
if (!textureContentMap.contains(texture.filename)) {
// Content may be empty, unless the data is inlined
textureContentMap[texture.filename] = texture.content;
}
};
const hfm::Material& material = *materialIt;
addTexture(_textureContentMap, material.normalTexture);
addTexture(_textureContentMap, material.albedoTexture);
addTexture(_textureContentMap, material.opacityTexture);
addTexture(_textureContentMap, material.glossTexture);
addTexture(_textureContentMap, material.roughnessTexture);
addTexture(_textureContentMap, material.specularTexture);
addTexture(_textureContentMap, material.metallicTexture);
addTexture(_textureContentMap, material.emissiveTexture);
addTexture(_textureContentMap, material.occlusionTexture);
addTexture(_textureContentMap, material.scatteringTexture);
addTexture(_textureContentMap, material.lightmapTexture);
}
// Do format-specific baking
bakeProcessedSource(bakedModel, dracoMeshes, dracoMaterialLists);
if (shouldStop()) {
return;
}
// Output FST file, copying over input mappings if available
QString outputFSTFilename = !_mappingURL.isEmpty() ? _mappingURL.fileName() : _modelURL.fileName();
auto extensionStart = outputFSTFilename.indexOf(".");
if (extensionStart != -1) {
outputFSTFilename.resize(extensionStart);
}
outputFSTFilename += ".baked.fst";
QString outputFSTURL = _bakedOutputDir + "/" + outputFSTFilename;
auto outputMapping = _mapping;
outputMapping[FST_VERSION_FIELD] = FST_VERSION;
outputMapping[FILENAME_FIELD] = _bakedModelURL.fileName();
// All textures will be found in the same directory as the model
outputMapping[TEXDIR_FIELD] = ".";
hifi::ByteArray fstOut = FSTReader::writeMapping(outputMapping);
QFile fstOutputFile { outputFSTURL };
if (!fstOutputFile.open(QIODevice::WriteOnly)) {
handleError("Failed to open file '" + outputFSTURL + "' for writing");
return;
}
if (fstOutputFile.write(fstOut) == -1) {
handleError("Failed to write to file '" + outputFSTURL + "'");
return;
}
_outputFiles.push_back(outputFSTURL);
_outputMappingURL = outputFSTURL;
// check if we're already done with textures (in case we had none to re-write)
checkIfTexturesFinished();
}
void ModelBaker::abort() {
@ -74,176 +321,36 @@ void ModelBaker::abort() {
}
}
bool ModelBaker::compressMesh(HFMMesh& mesh, bool hasDeformers, FBXNode& dracoMeshNode, GetMaterialIDCallback materialIDCallback) {
if (mesh.wasCompressed) {
handleError("Cannot re-bake a file that contains compressed mesh");
bool ModelBaker::buildDracoMeshNode(FBXNode& dracoMeshNode, const QByteArray& dracoMeshBytes, const std::vector<hifi::ByteArray>& dracoMaterialList) {
if (dracoMeshBytes.isEmpty()) {
handleError("Failed to finalize the baking of a draco Geometry node");
return false;
}
Q_ASSERT(mesh.normals.size() == 0 || mesh.normals.size() == mesh.vertices.size());
Q_ASSERT(mesh.colors.size() == 0 || mesh.colors.size() == mesh.vertices.size());
Q_ASSERT(mesh.texCoords.size() == 0 || mesh.texCoords.size() == mesh.vertices.size());
int64_t numTriangles{ 0 };
for (auto& part : mesh.parts) {
if ((part.quadTrianglesIndices.size() % 3) != 0 || (part.triangleIndices.size() % 3) != 0) {
handleWarning("Found a mesh part with invalid index data, skipping");
continue;
}
numTriangles += part.quadTrianglesIndices.size() / 3;
numTriangles += part.triangleIndices.size() / 3;
}
if (numTriangles == 0) {
return false;
}
draco::TriangleSoupMeshBuilder meshBuilder;
meshBuilder.Start(numTriangles);
bool hasNormals{ mesh.normals.size() > 0 };
bool hasColors{ mesh.colors.size() > 0 };
bool hasTexCoords{ mesh.texCoords.size() > 0 };
bool hasTexCoords1{ mesh.texCoords1.size() > 0 };
bool hasPerFaceMaterials = (materialIDCallback) ? (mesh.parts.size() > 1 || materialIDCallback(0) != 0 ) : true;
bool needsOriginalIndices{ hasDeformers };
int normalsAttributeID { -1 };
int colorsAttributeID { -1 };
int texCoordsAttributeID { -1 };
int texCoords1AttributeID { -1 };
int faceMaterialAttributeID { -1 };
int originalIndexAttributeID { -1 };
const int positionAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::POSITION,
3, draco::DT_FLOAT32);
if (needsOriginalIndices) {
originalIndexAttributeID = meshBuilder.AddAttribute(
(draco::GeometryAttribute::Type)DRACO_ATTRIBUTE_ORIGINAL_INDEX,
1, draco::DT_INT32);
}
if (hasNormals) {
normalsAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::NORMAL,
3, draco::DT_FLOAT32);
}
if (hasColors) {
colorsAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::COLOR,
3, draco::DT_FLOAT32);
}
if (hasTexCoords) {
texCoordsAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::TEX_COORD,
2, draco::DT_FLOAT32);
}
if (hasTexCoords1) {
texCoords1AttributeID = meshBuilder.AddAttribute(
(draco::GeometryAttribute::Type)DRACO_ATTRIBUTE_TEX_COORD_1,
2, draco::DT_FLOAT32);
}
if (hasPerFaceMaterials) {
faceMaterialAttributeID = meshBuilder.AddAttribute(
(draco::GeometryAttribute::Type)DRACO_ATTRIBUTE_MATERIAL_ID,
1, draco::DT_UINT16);
}
auto partIndex = 0;
draco::FaceIndex face;
uint16_t materialID;
for (auto& part : mesh.parts) {
materialID = (materialIDCallback) ? materialIDCallback(partIndex) : partIndex;
auto addFace = [&](QVector<int>& indices, int index, draco::FaceIndex face) {
int32_t idx0 = indices[index];
int32_t idx1 = indices[index + 1];
int32_t idx2 = indices[index + 2];
if (hasPerFaceMaterials) {
meshBuilder.SetPerFaceAttributeValueForFace(faceMaterialAttributeID, face, &materialID);
}
meshBuilder.SetAttributeValuesForFace(positionAttributeID, face,
&mesh.vertices[idx0], &mesh.vertices[idx1],
&mesh.vertices[idx2]);
if (needsOriginalIndices) {
meshBuilder.SetAttributeValuesForFace(originalIndexAttributeID, face,
&mesh.originalIndices[idx0],
&mesh.originalIndices[idx1],
&mesh.originalIndices[idx2]);
}
if (hasNormals) {
meshBuilder.SetAttributeValuesForFace(normalsAttributeID, face,
&mesh.normals[idx0], &mesh.normals[idx1],
&mesh.normals[idx2]);
}
if (hasColors) {
meshBuilder.SetAttributeValuesForFace(colorsAttributeID, face,
&mesh.colors[idx0], &mesh.colors[idx1],
&mesh.colors[idx2]);
}
if (hasTexCoords) {
meshBuilder.SetAttributeValuesForFace(texCoordsAttributeID, face,
&mesh.texCoords[idx0], &mesh.texCoords[idx1],
&mesh.texCoords[idx2]);
}
if (hasTexCoords1) {
meshBuilder.SetAttributeValuesForFace(texCoords1AttributeID, face,
&mesh.texCoords1[idx0], &mesh.texCoords1[idx1],
&mesh.texCoords1[idx2]);
}
};
for (int i = 0; (i + 2) < part.quadTrianglesIndices.size(); i += 3) {
addFace(part.quadTrianglesIndices, i, face++);
}
for (int i = 0; (i + 2) < part.triangleIndices.size(); i += 3) {
addFace(part.triangleIndices, i, face++);
}
partIndex++;
}
auto dracoMesh = meshBuilder.Finalize();
if (!dracoMesh) {
handleWarning("Failed to finalize the baking of a draco Geometry node");
return false;
}
// we need to modify unique attribute IDs for custom attributes
// so the attributes are easily retrievable on the other side
if (hasPerFaceMaterials) {
dracoMesh->attribute(faceMaterialAttributeID)->set_unique_id(DRACO_ATTRIBUTE_MATERIAL_ID);
}
if (hasTexCoords1) {
dracoMesh->attribute(texCoords1AttributeID)->set_unique_id(DRACO_ATTRIBUTE_TEX_COORD_1);
}
if (needsOriginalIndices) {
dracoMesh->attribute(originalIndexAttributeID)->set_unique_id(DRACO_ATTRIBUTE_ORIGINAL_INDEX);
}
draco::Encoder encoder;
encoder.SetAttributeQuantization(draco::GeometryAttribute::POSITION, 14);
encoder.SetAttributeQuantization(draco::GeometryAttribute::TEX_COORD, 12);
encoder.SetAttributeQuantization(draco::GeometryAttribute::NORMAL, 10);
encoder.SetSpeedOptions(0, 5);
draco::EncoderBuffer buffer;
encoder.EncodeMeshToBuffer(*dracoMesh, &buffer);
FBXNode dracoNode;
dracoNode.name = "DracoMesh";
auto value = QVariant::fromValue(QByteArray(buffer.data(), (int)buffer.size()));
dracoNode.properties.append(value);
dracoNode.properties.append(QVariant::fromValue(dracoMeshBytes));
// Additional draco mesh node information
{
FBXNode fbxVersionNode;
fbxVersionNode.name = "FBXDracoMeshVersion";
fbxVersionNode.properties.append(FBX_DRACO_MESH_VERSION);
dracoNode.children.append(fbxVersionNode);
FBXNode dracoVersionNode;
dracoVersionNode.name = "DracoMeshVersion";
dracoVersionNode.properties.append(DRACO_MESH_VERSION);
dracoNode.children.append(dracoVersionNode);
FBXNode materialListNode;
materialListNode.name = "MaterialList";
for (const hifi::ByteArray& materialID : dracoMaterialList) {
materialListNode.properties.append(materialID);
}
dracoNode.children.append(materialListNode);
}
dracoMeshNode = dracoNode;
// Mesh compression successful return true
return true;
}
@ -274,45 +381,42 @@ QString ModelBaker::compressTexture(QString modelTextureFileName, image::Texture
if (!modelTextureFileInfo.filePath().isEmpty()) {
textureContent = _textureContentMap.value(modelTextureFileName.toLocal8Bit());
}
auto urlToTexture = getTextureURL(modelTextureFileInfo, modelTextureFileName, !textureContent.isNull());
auto urlToTexture = getTextureURL(modelTextureFileInfo, !textureContent.isNull());
QString baseTextureFileName;
if (_remappedTexturePaths.contains(urlToTexture)) {
baseTextureFileName = _remappedTexturePaths[urlToTexture];
} else {
TextureKey textureKey { urlToTexture, textureType };
auto bakingTextureIt = _bakingTextures.find(textureKey);
if (bakingTextureIt == _bakingTextures.cend()) {
// construct the new baked texture file name and file path
// ensuring that the baked texture will have a unique name
// even if there was another texture with the same name at a different path
baseTextureFileName = createBaseTextureFileName(modelTextureFileInfo);
_remappedTexturePaths[urlToTexture] = baseTextureFileName;
}
QString baseTextureFileName = _textureFileNamer.createBaseTextureFileName(modelTextureFileInfo, textureType);
qCDebug(model_baking).noquote() << "Re-mapping" << modelTextureFileName
<< "to" << baseTextureFileName;
QString bakedTextureFilePath {
_bakedOutputDir + "/" + baseTextureFileName + BAKED_META_TEXTURE_SUFFIX
};
QString bakedTextureFilePath {
_bakedOutputDir + "/" + baseTextureFileName + BAKED_META_TEXTURE_SUFFIX
};
textureChild = baseTextureFileName + BAKED_META_TEXTURE_SUFFIX;
textureChild = baseTextureFileName + BAKED_META_TEXTURE_SUFFIX;
if (!_bakingTextures.contains(urlToTexture)) {
_outputFiles.push_back(bakedTextureFilePath);
// bake this texture asynchronously
bakeTexture(urlToTexture, textureType, _bakedOutputDir, baseTextureFileName, textureContent);
bakeTexture(textureKey, _bakedOutputDir, baseTextureFileName, textureContent);
} else {
// Fetch existing texture meta name
textureChild = (*bakingTextureIt)->getBaseFilename() + BAKED_META_TEXTURE_SUFFIX;
}
}
qCDebug(model_baking).noquote() << "Re-mapping" << modelTextureFileName
<< "to" << textureChild;
return textureChild;
}
void ModelBaker::bakeTexture(const QUrl& textureURL, image::TextureUsage::Type textureType,
const QDir& outputDir, const QString& bakedFilename, const QByteArray& textureContent) {
void ModelBaker::bakeTexture(const TextureKey& textureKey, const QDir& outputDir, const QString& bakedFilename, const QByteArray& textureContent) {
// start a bake for this texture and add it to our list to keep track of
QSharedPointer<TextureBaker> bakingTexture{
new TextureBaker(textureURL, textureType, outputDir, "../", bakedFilename, textureContent),
new TextureBaker(textureKey.first, textureKey.second, outputDir, "../", bakedFilename, textureContent),
&TextureBaker::deleteLater
};
@ -321,7 +425,7 @@ void ModelBaker::bakeTexture(const QUrl& textureURL, image::TextureUsage::Type t
connect(bakingTexture.data(), &TextureBaker::aborted, this, &ModelBaker::handleAbortedTexture);
// keep a shared pointer to the baking texture
_bakingTextures.insert(textureURL, bakingTexture);
_bakingTextures.insert(textureKey, bakingTexture);
// start baking the texture on one of our available worker threads
bakingTexture->moveToThread(_textureThreadGetter());
@ -373,7 +477,7 @@ void ModelBaker::handleBakedTexture() {
// now that this texture has been baked and handled, we can remove that TextureBaker from our hash
_bakingTextures.remove(bakedTexture->getTextureURL());
_bakingTextures.remove({ bakedTexture->getTextureURL(), bakedTexture->getTextureType() });
checkIfTexturesFinished();
} else {
@ -384,7 +488,7 @@ void ModelBaker::handleBakedTexture() {
_pendingErrorEmission = true;
// now that this texture has been baked, even though it failed, we can remove that TextureBaker from our list
_bakingTextures.remove(bakedTexture->getTextureURL());
_bakingTextures.remove({ bakedTexture->getTextureURL(), bakedTexture->getTextureType() });
// abort any other ongoing texture bakes since we know we'll end up failing
for (auto& bakingTexture : _bakingTextures) {
@ -397,7 +501,7 @@ void ModelBaker::handleBakedTexture() {
// we have errors to attend to, so we don't do extra processing for this texture
// but we do need to remove that TextureBaker from our list
// and then check if we're done with all textures
_bakingTextures.remove(bakedTexture->getTextureURL());
_bakingTextures.remove({ bakedTexture->getTextureURL(), bakedTexture->getTextureType() });
checkIfTexturesFinished();
}
@ -411,7 +515,7 @@ void ModelBaker::handleAbortedTexture() {
qDebug() << "Texture aborted: " << bakedTexture->getTextureURL();
if (bakedTexture) {
_bakingTextures.remove(bakedTexture->getTextureURL());
_bakingTextures.remove({ bakedTexture->getTextureURL(), bakedTexture->getTextureType() });
}
// since a texture we were baking aborted, our status is also aborted
@ -425,14 +529,11 @@ void ModelBaker::handleAbortedTexture() {
checkIfTexturesFinished();
}
QUrl ModelBaker::getTextureURL(const QFileInfo& textureFileInfo, QString relativeFileName, bool isEmbedded) {
QUrl ModelBaker::getTextureURL(const QFileInfo& textureFileInfo, bool isEmbedded) {
QUrl urlToTexture;
// use QFileInfo to easily split up the existing texture filename into its components
auto apparentRelativePath = QFileInfo(relativeFileName.replace("\\", "/"));
if (isEmbedded) {
urlToTexture = _modelURL.toString() + "/" + apparentRelativePath.filePath();
urlToTexture = _modelURL.toString() + "/" + textureFileInfo.filePath();
} else {
if (textureFileInfo.exists() && textureFileInfo.isFile()) {
// set the texture URL to the local texture that we have confirmed exists
@ -442,14 +543,14 @@ QUrl ModelBaker::getTextureURL(const QFileInfo& textureFileInfo, QString relativ
// this is a relative file path which will require different handling
// depending on the location of the original model
if (_modelURL.isLocalFile() && apparentRelativePath.exists() && apparentRelativePath.isFile()) {
if (_modelURL.isLocalFile() && textureFileInfo.exists() && textureFileInfo.isFile()) {
// the absolute path we ran into for the texture in the model exists on this machine
// so use that file
urlToTexture = QUrl::fromLocalFile(apparentRelativePath.absoluteFilePath());
urlToTexture = QUrl::fromLocalFile(textureFileInfo.absoluteFilePath());
} else {
// we didn't find the texture on this machine at the absolute path
// so assume that it is right beside the model to match the behaviour of interface
urlToTexture = _modelURL.resolved(apparentRelativePath.fileName());
urlToTexture = _modelURL.resolved(textureFileInfo.fileName());
}
}
}
@ -494,25 +595,6 @@ void ModelBaker::checkIfTexturesFinished() {
}
}
QString ModelBaker::createBaseTextureFileName(const QFileInfo& textureFileInfo) {
// first make sure we have a unique base name for this texture
// in case another texture referenced by this model has the same base name
auto& nameMatches = _textureNameMatchCount[textureFileInfo.baseName()];
QString baseTextureFileName{ textureFileInfo.completeBaseName() };
if (nameMatches > 0) {
// there are already nameMatches texture with this name
// append - and that number to our baked texture file name so that it is unique
baseTextureFileName += "-" + QString::number(nameMatches);
}
// increment the number of name matches
++nameMatches;
return baseTextureFileName;
}
void ModelBaker::setWasAborted(bool wasAborted) {
if (wasAborted != _wasAborted.load()) {
Baker::setWasAborted(wasAborted);
@ -588,31 +670,25 @@ void ModelBaker::embedTextureMetaData() {
}
void ModelBaker::exportScene() {
// save the relative path to this FBX inside our passed output folder
auto fileName = _modelURL.fileName();
auto baseName = fileName.left(fileName.lastIndexOf('.'));
auto bakedFilename = baseName + BAKED_FBX_EXTENSION;
_bakedModelFilePath = _bakedOutputDir + "/" + bakedFilename;
auto fbxData = FBXWriter::encodeFBX(_rootNode);
QFile bakedFile(_bakedModelFilePath);
QString bakedModelURL = _bakedModelURL.toString();
QFile bakedFile(bakedModelURL);
if (!bakedFile.open(QIODevice::WriteOnly)) {
handleError("Error opening " + _bakedModelFilePath + " for writing");
handleError("Error opening " + bakedModelURL + " for writing");
return;
}
bakedFile.write(fbxData);
_outputFiles.push_back(_bakedModelFilePath);
_outputFiles.push_back(bakedModelURL);
#ifdef HIFI_DUMP_FBX
{
FBXToJSON fbxToJSON;
fbxToJSON << _rootNode;
QFileInfo modelFile(_bakedModelFilePath);
QFileInfo modelFile(_bakedModelURL.toString());
QString outFilename(modelFile.dir().absolutePath() + "/" + modelFile.completeBaseName() + "_FBX.json");
QFile jsonFile(outFilename);
if (jsonFile.open(QIODevice::WriteOnly)) {
@ -622,5 +698,5 @@ void ModelBaker::exportScene() {
}
#endif
qCDebug(model_baking) << "Exported" << _modelURL << "with re-written paths to" << _bakedModelFilePath;
qCDebug(model_baking) << "Exported" << _modelURL << "with re-written paths to" << bakedModelURL;
}

View file

@ -19,6 +19,7 @@
#include "Baker.h"
#include "TextureBaker.h"
#include "baking/TextureFileNamer.h"
#include "ModelBakingLoggingCategory.h"
@ -30,57 +31,84 @@
using TextureBakerThreadGetter = std::function<QThread*()>;
using GetMaterialIDCallback = std::function <int(int)>;
static const QString BAKED_FBX_EXTENSION = ".baked.fbx";
static const QString FST_EXTENSION { ".fst" };
static const QString BAKED_FST_EXTENSION { ".baked.fst" };
static const QString FBX_EXTENSION { ".fbx" };
static const QString BAKED_FBX_EXTENSION { ".baked.fbx" };
static const QString OBJ_EXTENSION { ".obj" };
static const QString GLTF_EXTENSION { ".gltf" };
class ModelBaker : public Baker {
Q_OBJECT
public:
ModelBaker(const QUrl& inputModelURL, TextureBakerThreadGetter inputTextureThreadGetter,
const QString& bakedOutputDirectory, const QString& originalOutputDirectory = "");
virtual ~ModelBaker();
using TextureKey = QPair<QUrl, image::TextureUsage::Type>;
bool compressMesh(HFMMesh& mesh, bool hasDeformers, FBXNode& dracoMeshNode, GetMaterialIDCallback materialIDCallback = nullptr);
ModelBaker(const QUrl& inputModelURL, TextureBakerThreadGetter inputTextureThreadGetter,
const QString& bakedOutputDirectory, const QString& originalOutputDirectory = "", bool hasBeenBaked = false);
void setOutputURLSuffix(const QUrl& urlSuffix);
void setMappingURL(const QUrl& mappingURL);
void setMapping(const hifi::VariantHash& mapping);
void initializeOutputDirs();
bool buildDracoMeshNode(FBXNode& dracoMeshNode, const QByteArray& dracoMeshBytes, const std::vector<hifi::ByteArray>& dracoMaterialList);
QString compressTexture(QString textureFileName, image::TextureUsage::Type = image::TextureUsage::Type::DEFAULT_TEXTURE);
virtual void setWasAborted(bool wasAborted) override;
QUrl getModelURL() const { return _modelURL; }
QString getBakedModelFilePath() const { return _bakedModelFilePath; }
virtual QUrl getFullOutputMappingURL() const;
QUrl getBakedModelURL() const { return _bakedModelURL; }
signals:
void modelLoaded();
public slots:
virtual void bake() override;
virtual void abort() override;
protected:
void saveSourceModel();
virtual void bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) = 0;
void checkIfTexturesFinished();
void texturesFinished();
void embedTextureMetaData();
void exportScene();
FBXNode _rootNode;
QHash<QByteArray, QByteArray> _textureContentMap;
QUrl _modelURL;
QUrl _outputURLSuffix;
QUrl _mappingURL;
hifi::VariantHash _mapping;
QString _bakedOutputDir;
QString _originalOutputDir;
QString _bakedModelFilePath;
QDir _modelTempDir;
QString _originalModelFilePath;
TextureBakerThreadGetter _textureThreadGetter;
QString _originalOutputModelPath;
QString _outputMappingURL;
QUrl _bakedModelURL;
protected slots:
void handleModelNetworkReply();
virtual void bakeSourceCopy();
private slots:
void handleBakedTexture();
void handleAbortedTexture();
private:
QString createBaseTextureFileName(const QFileInfo & textureFileInfo);
QUrl getTextureURL(const QFileInfo& textureFileInfo, QString relativeFileName, bool isEmbedded = false);
void bakeTexture(const QUrl & textureURL, image::TextureUsage::Type textureType, const QDir & outputDir,
const QString & bakedFilename, const QByteArray & textureContent);
QUrl getTextureURL(const QFileInfo& textureFileInfo, bool isEmbedded = false);
void bakeTexture(const TextureKey& textureKey, const QDir& outputDir, const QString& bakedFilename, const QByteArray& textureContent);
QString texturePathRelativeToModel(QUrl modelURL, QUrl textureURL);
TextureBakerThreadGetter _textureThreadGetter;
QMultiHash<QUrl, QSharedPointer<TextureBaker>> _bakingTextures;
QMultiHash<TextureKey, QSharedPointer<TextureBaker>> _bakingTextures;
QHash<QString, int> _textureNameMatchCount;
QHash<QUrl, QString> _remappedTexturePaths;
bool _pendingErrorEmission{ false };
bool _pendingErrorEmission { false };
bool _hasBeenBaked { false };
TextureFileNamer _textureFileNamer;
};
#endif // hifi_ModelBaker_h

View file

@ -35,157 +35,51 @@ const QByteArray CONNECTIONS_NODE_PROPERTY = "OO";
const QByteArray CONNECTIONS_NODE_PROPERTY_1 = "OP";
const QByteArray MESH = "Mesh";
void OBJBaker::bake() {
qDebug() << "OBJBaker" << _modelURL << "bake starting";
// trigger bakeOBJ once OBJ is loaded
connect(this, &OBJBaker::OBJLoaded, this, &OBJBaker::bakeOBJ);
// make a local copy of the OBJ
loadOBJ();
}
void OBJBaker::loadOBJ() {
if (!QDir().mkpath(_bakedOutputDir)) {
handleError("Failed to create baked OBJ output folder " + _bakedOutputDir);
return;
}
if (!QDir().mkpath(_originalOutputDir)) {
handleError("Failed to create original OBJ output folder " + _originalOutputDir);
return;
}
// check if the OBJ is local or it needs to be downloaded
if (_modelURL.isLocalFile()) {
// loading the local OBJ
QFile localOBJ { _modelURL.toLocalFile() };
qDebug() << "Local file url: " << _modelURL << _modelURL.toString() << _modelURL.toLocalFile() << ", copying to: " << _originalModelFilePath;
if (!localOBJ.exists()) {
handleError("Could not find " + _modelURL.toString());
return;
}
// make a copy in the output folder
if (!_originalOutputDir.isEmpty()) {
qDebug() << "Copying to: " << _originalOutputDir << "/" << _modelURL.fileName();
localOBJ.copy(_originalOutputDir + "/" + _modelURL.fileName());
}
localOBJ.copy(_originalModelFilePath);
// local OBJ is loaded emit signal to trigger its baking
emit OBJLoaded();
} else {
// OBJ is remote, start download
auto& networkAccessManager = NetworkAccessManager::getInstance();
QNetworkRequest networkRequest;
// setup the request to follow re-directs and always hit the network
networkRequest.setAttribute(QNetworkRequest::FollowRedirectsAttribute, true);
networkRequest.setAttribute(QNetworkRequest::CacheLoadControlAttribute, QNetworkRequest::AlwaysNetwork);
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
networkRequest.setUrl(_modelURL);
qCDebug(model_baking) << "Downloading" << _modelURL;
auto networkReply = networkAccessManager.get(networkRequest);
connect(networkReply, &QNetworkReply::finished, this, &OBJBaker::handleOBJNetworkReply);
}
}
void OBJBaker::handleOBJNetworkReply() {
auto requestReply = qobject_cast<QNetworkReply*>(sender());
if (requestReply->error() == QNetworkReply::NoError) {
qCDebug(model_baking) << "Downloaded" << _modelURL;
// grab the contents of the reply and make a copy in the output folder
QFile copyOfOriginal(_originalModelFilePath);
qDebug(model_baking) << "Writing copy of original obj to" << _originalModelFilePath << copyOfOriginal.fileName();
if (!copyOfOriginal.open(QIODevice::WriteOnly)) {
// add an error to the error list for this obj stating that a duplicate of the original obj could not be made
handleError("Could not create copy of " + _modelURL.toString() + " (Failed to open " + _originalModelFilePath + ")");
return;
}
if (copyOfOriginal.write(requestReply->readAll()) == -1) {
handleError("Could not create copy of " + _modelURL.toString() + " (Failed to write)");
return;
}
// close that file now that we are done writing to it
copyOfOriginal.close();
if (!_originalOutputDir.isEmpty()) {
copyOfOriginal.copy(_originalOutputDir + "/" + _modelURL.fileName());
}
// remote OBJ is loaded emit signal to trigger its baking
emit OBJLoaded();
} else {
// add an error to our list stating that the OBJ could not be downloaded
handleError("Failed to download " + _modelURL.toString());
}
}
void OBJBaker::bakeOBJ() {
// Read the OBJ file
QFile objFile(_originalModelFilePath);
if (!objFile.open(QIODevice::ReadOnly)) {
handleError("Error opening " + _originalModelFilePath + " for reading");
return;
}
QByteArray objData = objFile.readAll();
OBJSerializer serializer;
QVariantHash mapping;
mapping["combineParts"] = true; // set true so that OBJSerializer reads material info from material library
auto geometry = serializer.read(objData, mapping, _modelURL);
void OBJBaker::bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) {
// Write OBJ Data as FBX tree nodes
createFBXNodeTree(_rootNode, *geometry);
checkIfTexturesFinished();
createFBXNodeTree(_rootNode, hfmModel, dracoMeshes[0]);
}
void OBJBaker::createFBXNodeTree(FBXNode& rootNode, HFMModel& hfmModel) {
void OBJBaker::createFBXNodeTree(FBXNode& rootNode, const hfm::Model::Pointer& hfmModel, const hifi::ByteArray& dracoMesh) {
// Make all generated nodes children of rootNode
rootNode.children = { FBXNode(), FBXNode(), FBXNode() };
FBXNode& globalSettingsNode = rootNode.children[0];
FBXNode& objectNode = rootNode.children[1];
FBXNode& connectionsNode = rootNode.children[2];
// Generating FBX Header Node
FBXNode headerNode;
headerNode.name = FBX_HEADER_EXTENSION;
// Generating global settings node
// Required for Unit Scale Factor
FBXNode globalSettingsNode;
globalSettingsNode.name = GLOBAL_SETTINGS_NODE_NAME;
// Setting the tree hierarchy: GlobalSettings -> Properties70 -> P -> Properties
FBXNode properties70Node;
properties70Node.name = PROPERTIES70_NODE_NAME;
FBXNode pNode;
{
pNode.name = P_NODE_NAME;
pNode.properties.append({
"UnitScaleFactor", "double", "Number", "",
UNIT_SCALE_FACTOR
});
globalSettingsNode.children.push_back(FBXNode());
FBXNode& properties70Node = globalSettingsNode.children.back();
properties70Node.name = PROPERTIES70_NODE_NAME;
FBXNode pNode;
{
pNode.name = P_NODE_NAME;
pNode.properties.append({
"UnitScaleFactor", "double", "Number", "",
UNIT_SCALE_FACTOR
});
}
properties70Node.children = { pNode };
}
properties70Node.children = { pNode };
globalSettingsNode.children = { properties70Node };
// Generating Object node
FBXNode objectNode;
objectNode.name = OBJECTS_NODE_NAME;
objectNode.children = { FBXNode(), FBXNode() };
FBXNode& geometryNode = objectNode.children[0];
FBXNode& modelNode = objectNode.children[1];
// Generating Object node's child - Geometry node
FBXNode geometryNode;
// Generating Object node's child - Geometry node
geometryNode.name = GEOMETRY_NODE_NAME;
NodeID geometryID;
{
@ -196,15 +90,8 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, HFMModel& hfmModel) {
MESH
};
}
// Compress the mesh information and store in dracoNode
bool hasDeformers = false; // No concept of deformers for an OBJ
FBXNode dracoNode;
compressMesh(hfmModel.meshes[0], hasDeformers, dracoNode);
geometryNode.children.append(dracoNode);
// Generating Object node's child - Model node
FBXNode modelNode;
modelNode.name = MODEL_NODE_NAME;
NodeID modelID;
{
@ -212,16 +99,14 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, HFMModel& hfmModel) {
modelNode.properties = { modelID, MODEL_NODE_NAME, MESH };
}
objectNode.children = { geometryNode, modelNode };
// Generating Objects node's child - Material node
auto& meshParts = hfmModel.meshes[0].parts;
auto& meshParts = hfmModel->meshes[0].parts;
for (auto& meshPart : meshParts) {
FBXNode materialNode;
materialNode.name = MATERIAL_NODE_NAME;
if (hfmModel.materials.size() == 1) {
if (hfmModel->materials.size() == 1) {
// case when no material information is provided, OBJSerializer considers it as a single default material
for (auto& materialID : hfmModel.materials.keys()) {
for (auto& materialID : hfmModel->materials.keys()) {
setMaterialNodeProperties(materialNode, materialID, hfmModel);
}
} else {
@ -231,12 +116,28 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, HFMModel& hfmModel) {
objectNode.children.append(materialNode);
}
// Store the draco node containing the compressed mesh information, along with the per-meshPart material IDs the draco node references
// Because we redefine the material IDs when initializing the material nodes above, we pass that in for the material list
// The nth mesh part gets the nth material
if (!dracoMesh.isEmpty()) {
std::vector<hifi::ByteArray> newMaterialList;
newMaterialList.reserve(_materialIDs.size());
for (auto materialID : _materialIDs) {
newMaterialList.push_back(hifi::ByteArray(std::to_string((int)materialID).c_str()));
}
FBXNode dracoNode;
buildDracoMeshNode(dracoNode, dracoMesh, newMaterialList);
geometryNode.children.append(dracoNode);
} else {
handleWarning("Baked mesh for OBJ model '" + _modelURL.toString() + "' is empty");
}
// Generating Texture Node
// iterate through mesh parts and process the associated textures
auto size = meshParts.size();
for (int i = 0; i < size; i++) {
QString material = meshParts[i].materialID;
HFMMaterial currentMaterial = hfmModel.materials[material];
HFMMaterial currentMaterial = hfmModel->materials[material];
if (!currentMaterial.albedoTexture.filename.isEmpty() || !currentMaterial.specularTexture.filename.isEmpty()) {
auto textureID = nextNodeID();
_mapTextureMaterial.emplace_back(textureID, i);
@ -281,14 +182,15 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, HFMModel& hfmModel) {
}
// Generating Connections node
FBXNode connectionsNode;
connectionsNode.name = CONNECTIONS_NODE_NAME;
// connect Geometry to Model
FBXNode cNode;
cNode.name = C_NODE_NAME;
cNode.properties = { CONNECTIONS_NODE_PROPERTY, geometryID, modelID };
connectionsNode.children = { cNode };
// connect Geometry to Model
{
FBXNode cNode;
cNode.name = C_NODE_NAME;
cNode.properties = { CONNECTIONS_NODE_PROPERTY, geometryID, modelID };
connectionsNode.children.push_back(cNode);
}
// connect all materials to model
for (auto& materialID : _materialIDs) {
@ -320,18 +222,15 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, HFMModel& hfmModel) {
};
connectionsNode.children.append(cDiffuseNode);
}
// Make all generated nodes children of rootNode
rootNode.children = { globalSettingsNode, objectNode, connectionsNode };
}
// Set properties for material nodes
void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, QString material, HFMModel& hfmModel) {
void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, QString material, const hfm::Model::Pointer& hfmModel) {
auto materialID = nextNodeID();
_materialIDs.push_back(materialID);
materialNode.properties = { materialID, material, MESH };
HFMMaterial currentMaterial = hfmModel.materials[material];
HFMMaterial currentMaterial = hfmModel->materials[material];
// Setting the hierarchy: Material -> Properties70 -> P -> Properties
FBXNode properties70Node;

View file

@ -27,20 +27,12 @@ class OBJBaker : public ModelBaker {
public:
using ModelBaker::ModelBaker;
public slots:
virtual void bake() override;
signals:
void OBJLoaded();
private slots:
void bakeOBJ();
void handleOBJNetworkReply();
protected:
virtual void bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) override;
private:
void loadOBJ();
void createFBXNodeTree(FBXNode& rootNode, HFMModel& hfmModel);
void setMaterialNodeProperties(FBXNode& materialNode, QString material, HFMModel& hfmModel);
void createFBXNodeTree(FBXNode& rootNode, const hfm::Model::Pointer& hfmModel, const hifi::ByteArray& dracoMesh);
void setMaterialNodeProperties(FBXNode& materialNode, QString material, const hfm::Model::Pointer& hfmModel);
NodeID nextNodeID() { return _nodeID++; }

View file

@ -47,6 +47,14 @@ TextureBaker::TextureBaker(const QUrl& textureURL, image::TextureUsage::Type tex
auto originalFilename = textureURL.fileName();
_baseFilename = originalFilename.left(originalFilename.lastIndexOf('.'));
}
auto textureFilename = _textureURL.fileName();
QString originalExtension;
int extensionStart = textureFilename.indexOf(".");
if (extensionStart != -1) {
originalExtension = textureFilename.mid(extensionStart);
}
_originalCopyFilePath = _outputDirectory.absoluteFilePath(_baseFilename + originalExtension);
}
void TextureBaker::bake() {
@ -128,7 +136,9 @@ void TextureBaker::processTexture() {
TextureMeta meta;
auto originalCopyFilePath = _outputDirectory.absoluteFilePath(_textureURL.fileName());
QString originalCopyFilePath = _originalCopyFilePath.toString();
// Copy the original file into the baked output directory if it doesn't exist yet
{
QFile file { originalCopyFilePath };
if (!file.open(QIODevice::WriteOnly) || file.write(_originalTexture) == -1) {
@ -138,9 +148,10 @@ void TextureBaker::processTexture() {
// IMPORTANT: _originalTexture is empty past this point
_originalTexture.clear();
_outputFiles.push_back(originalCopyFilePath);
meta.original = _metaTexturePathPrefix + _textureURL.fileName();
meta.original = _metaTexturePathPrefix + _originalCopyFilePath.fileName();
}
// Load the copy of the original file from the baked output directory. New images will be created using the original as the source data.
auto buffer = std::static_pointer_cast<QIODevice>(std::make_shared<QFile>(originalCopyFilePath));
if (!buffer->open(QIODevice::ReadOnly)) {
handleError("Could not open original file at " + originalCopyFilePath);

View file

@ -22,6 +22,8 @@
#include "Baker.h"
#include <material-networking/MaterialCache.h>
extern const QString BAKED_TEXTURE_KTX_EXT;
extern const QString BAKED_META_TEXTURE_SUFFIX;
@ -37,12 +39,18 @@ public:
QUrl getTextureURL() const { return _textureURL; }
QString getBaseFilename() const { return _baseFilename; }
QString getMetaTextureFileName() const { return _metaTextureFileName; }
virtual void setWasAborted(bool wasAborted) override;
static void setCompressionEnabled(bool enabled) { _compressionEnabled = enabled; }
void setMapChannel(graphics::Material::MapChannel mapChannel) { _mapChannel = mapChannel; }
graphics::Material::MapChannel getMapChannel() const { return _mapChannel; }
image::TextureUsage::Type getTextureType() const { return _textureType; }
public slots:
virtual void bake() override;
virtual void abort() override;
@ -60,11 +68,14 @@ private:
QUrl _textureURL;
QByteArray _originalTexture;
image::TextureUsage::Type _textureType;
graphics::Material::MapChannel _mapChannel;
bool _mapChannelSet { false };
QString _baseFilename;
QDir _outputDirectory;
QString _metaTextureFileName;
QString _metaTexturePathPrefix;
QUrl _originalCopyFilePath;
std::atomic<bool> _abortProcessing { false };

View file

@ -0,0 +1,83 @@
//
// BakerLibrary.cpp
// libraries/baking/src/baking
//
// Created by Sabrina Shanman on 2019/02/14.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "BakerLibrary.h"
#include "FSTBaker.h"
#include "../FBXBaker.h"
#include "../OBJBaker.h"
// Check if the file pointed to by this URL is a bakeable model, by comparing extensions
QUrl getBakeableModelURL(const QUrl& url) {
static const std::vector<QString> extensionsToBake = {
FST_EXTENSION,
BAKED_FST_EXTENSION,
FBX_EXTENSION,
BAKED_FBX_EXTENSION,
OBJ_EXTENSION,
GLTF_EXTENSION
};
QUrl cleanURL = url.adjusted(QUrl::RemoveQuery | QUrl::RemoveFragment);
QString cleanURLString = cleanURL.fileName();
for (auto& extension : extensionsToBake) {
if (cleanURLString.endsWith(extension, Qt::CaseInsensitive)) {
return cleanURL;
}
}
qWarning() << "Unknown model type: " << url.fileName();
return QUrl();
}
bool isModelBaked(const QUrl& bakeableModelURL) {
auto modelString = bakeableModelURL.toString();
auto beforeModelExtension = modelString;
beforeModelExtension.resize(modelString.lastIndexOf('.'));
return beforeModelExtension.endsWith(".baked");
}
std::unique_ptr<ModelBaker> getModelBaker(const QUrl& bakeableModelURL, TextureBakerThreadGetter inputTextureThreadGetter, const QString& contentOutputPath) {
auto filename = bakeableModelURL.fileName();
// Output in a sub-folder with the name of the model, potentially suffixed by a number to make it unique
auto baseName = filename.left(filename.lastIndexOf('.')).left(filename.lastIndexOf(".baked"));
auto subDirName = "/" + baseName;
int i = 1;
while (QDir(contentOutputPath + subDirName).exists()) {
subDirName = "/" + baseName + "-" + QString::number(i++);
}
QString bakedOutputDirectory = contentOutputPath + subDirName + "/baked";
QString originalOutputDirectory = contentOutputPath + subDirName + "/original";
return getModelBakerWithOutputDirectories(bakeableModelURL, inputTextureThreadGetter, bakedOutputDirectory, originalOutputDirectory);
}
std::unique_ptr<ModelBaker> getModelBakerWithOutputDirectories(const QUrl& bakeableModelURL, TextureBakerThreadGetter inputTextureThreadGetter, const QString& bakedOutputDirectory, const QString& originalOutputDirectory) {
auto filename = bakeableModelURL.fileName();
std::unique_ptr<ModelBaker> baker;
if (filename.endsWith(FST_EXTENSION, Qt::CaseInsensitive)) {
baker = std::make_unique<FSTBaker>(bakeableModelURL, inputTextureThreadGetter, bakedOutputDirectory, originalOutputDirectory, filename.endsWith(BAKED_FST_EXTENSION, Qt::CaseInsensitive));
} else if (filename.endsWith(FBX_EXTENSION, Qt::CaseInsensitive)) {
baker = std::make_unique<FBXBaker>(bakeableModelURL, inputTextureThreadGetter, bakedOutputDirectory, originalOutputDirectory, filename.endsWith(BAKED_FBX_EXTENSION, Qt::CaseInsensitive));
} else if (filename.endsWith(OBJ_EXTENSION, Qt::CaseInsensitive)) {
baker = std::make_unique<OBJBaker>(bakeableModelURL, inputTextureThreadGetter, bakedOutputDirectory, originalOutputDirectory);
//} else if (filename.endsWith(GLTF_EXTENSION, Qt::CaseInsensitive)) {
//baker = std::make_unique<GLTFBaker>(bakeableModelURL, inputTextureThreadGetter, bakedOutputDirectory, originalOutputDirectory);
} else {
qDebug() << "Could not create ModelBaker for url" << bakeableModelURL;
}
return baker;
}

View file

@ -0,0 +1,31 @@
//
// ModelBaker.h
// libraries/baking/src/baking
//
// Created by Sabrina Shanman on 2019/02/14.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_BakerLibrary_h
#define hifi_BakerLibrary_h
#include <QUrl>
#include "../ModelBaker.h"
// Returns either the given model URL if valid, or an empty URL
QUrl getBakeableModelURL(const QUrl& url);
bool isModelBaked(const QUrl& bakeableModelURL);
// Assuming the URL is valid, gets the appropriate baker for the given URL, and creates the base directory where the baker's output will later be stored
// Returns an empty pointer if a baker could not be created
std::unique_ptr<ModelBaker> getModelBaker(const QUrl& bakeableModelURL, TextureBakerThreadGetter inputTextureThreadGetter, const QString& contentOutputPath);
// Similar to getModelBaker, but gives control over where the output folders will be
std::unique_ptr<ModelBaker> getModelBakerWithOutputDirectories(const QUrl& bakeableModelURL, TextureBakerThreadGetter inputTextureThreadGetter, const QString& bakedOutputDirectory, const QString& originalOutputDirectory);
#endif // hifi_BakerLibrary_h

View file

@ -0,0 +1,128 @@
//
// FSTBaker.cpp
// libraries/baking/src/baking
//
// Created by Sabrina Shanman on 2019/03/06.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "FSTBaker.h"
#include <PathUtils.h>
#include <NetworkAccessManager.h>
#include "BakerLibrary.h"
#include <FSTReader.h>
FSTBaker::FSTBaker(const QUrl& inputMappingURL, TextureBakerThreadGetter inputTextureThreadGetter,
const QString& bakedOutputDirectory, const QString& originalOutputDirectory, bool hasBeenBaked) :
ModelBaker(inputMappingURL, inputTextureThreadGetter, bakedOutputDirectory, originalOutputDirectory, hasBeenBaked) {
if (hasBeenBaked) {
// Look for the original model file one directory higher. Perhaps this is an oven output directory.
QUrl originalRelativePath = QUrl("../original/" + inputMappingURL.fileName().replace(BAKED_FST_EXTENSION, FST_EXTENSION));
QUrl newInputMappingURL = inputMappingURL.adjusted(QUrl::RemoveFilename).resolved(originalRelativePath);
_modelURL = newInputMappingURL;
}
_mappingURL = _modelURL;
{
// Unused, but defined for consistency
auto bakedFilename = _modelURL.fileName();
bakedFilename.replace(FST_EXTENSION, BAKED_FST_EXTENSION);
_bakedModelURL = _bakedOutputDir + "/" + bakedFilename;
}
}
QUrl FSTBaker::getFullOutputMappingURL() const {
if (_modelBaker) {
return _modelBaker->getFullOutputMappingURL();
}
return QUrl();
}
void FSTBaker::bakeSourceCopy() {
if (shouldStop()) {
return;
}
QFile fstFile(_originalOutputModelPath);
if (!fstFile.open(QIODevice::ReadOnly)) {
handleError("Error opening " + _originalOutputModelPath + " for reading");
return;
}
hifi::ByteArray fstData = fstFile.readAll();
_mapping = FSTReader::readMapping(fstData);
auto filenameField = _mapping[FILENAME_FIELD].toString();
if (filenameField.isEmpty()) {
handleError("The '" + FILENAME_FIELD + "' property in the FST file '" + _originalOutputModelPath + "' could not be found");
return;
}
auto modelURL = _mappingURL.adjusted(QUrl::RemoveFilename).resolved(filenameField);
auto bakeableModelURL = getBakeableModelURL(modelURL);
if (bakeableModelURL.isEmpty()) {
handleError("The '" + FILENAME_FIELD + "' property in the FST file '" + _originalOutputModelPath + "' could not be resolved to a valid bakeable model url");
return;
}
auto baker = getModelBakerWithOutputDirectories(bakeableModelURL, _textureThreadGetter, _bakedOutputDir, _originalOutputDir);
_modelBaker = std::unique_ptr<ModelBaker>(dynamic_cast<ModelBaker*>(baker.release()));
if (!_modelBaker) {
handleError("The model url '" + bakeableModelURL.toString() + "' from the FST file '" + _originalOutputModelPath + "' (property: '" + FILENAME_FIELD + "') could not be used to initialize a valid model baker");
return;
}
if (dynamic_cast<FSTBaker*>(_modelBaker.get())) {
// Could be interesting, but for now let's just prevent infinite FST loops in the most straightforward way possible
handleError("The FST file '" + _originalOutputModelPath + "' (property: '" + FILENAME_FIELD + "') references another FST file. FST chaining is not supported.");
return;
}
_modelBaker->setMappingURL(_mappingURL);
_modelBaker->setMapping(_mapping);
// Hold on to the old url userinfo/query/fragment data so ModelBaker::getFullOutputMappingURL retains that data from the original model URL
_modelBaker->setOutputURLSuffix(modelURL);
connect(_modelBaker.get(), &ModelBaker::aborted, this, &FSTBaker::handleModelBakerAborted);
connect(_modelBaker.get(), &ModelBaker::finished, this, &FSTBaker::handleModelBakerFinished);
// FSTBaker can't do much while waiting for the ModelBaker to finish, so start the bake on this thread.
_modelBaker->bake();
}
void FSTBaker::handleModelBakerEnded() {
for (auto& warning : _modelBaker->getWarnings()) {
_warningList.push_back(warning);
}
for (auto& error : _modelBaker->getErrors()) {
_errorList.push_back(error);
}
// Get the output files, including but not limited to the FST file and the baked model file
for (auto& outputFile : _modelBaker->getOutputFiles()) {
_outputFiles.push_back(outputFile);
}
}
void FSTBaker::handleModelBakerAborted() {
handleModelBakerEnded();
if (!wasAborted()) {
setWasAborted(true);
}
}
void FSTBaker::handleModelBakerFinished() {
handleModelBakerEnded();
setIsFinished(true);
}
void FSTBaker::abort() {
ModelBaker::abort();
if (_modelBaker) {
_modelBaker->abort();
}
}

View file

@ -0,0 +1,45 @@
//
// FSTBaker.h
// libraries/baking/src/baking
//
// Created by Sabrina Shanman on 2019/03/06.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_FSTBaker_h
#define hifi_FSTBaker_h
#include "../ModelBaker.h"
class FSTBaker : public ModelBaker {
Q_OBJECT
public:
FSTBaker(const QUrl& inputMappingURL, TextureBakerThreadGetter inputTextureThreadGetter,
const QString& bakedOutputDirectory, const QString& originalOutputDirectory = "", bool hasBeenBaked = false);
virtual QUrl getFullOutputMappingURL() const override;
signals:
void fstLoaded();
public slots:
virtual void abort() override;
protected:
std::unique_ptr<ModelBaker> _modelBaker;
protected slots:
virtual void bakeSourceCopy() override;
virtual void bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) override {};
void handleModelBakerAborted();
void handleModelBakerFinished();
private:
void handleModelBakerEnded();
};
#endif // hifi_FSTBaker_h

View file

@ -0,0 +1,34 @@
//
// TextureFileNamer.cpp
// libraries/baking/src/baking
//
// Created by Sabrina Shanman on 2019/03/14.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "TextureFileNamer.h"
QString TextureFileNamer::createBaseTextureFileName(const QFileInfo& textureFileInfo, const image::TextureUsage::Type textureType) {
// If two textures have the same URL but are used differently, we need to process them separately
QString addMapChannel = QString::fromStdString("_" + std::to_string(textureType));
QString baseTextureFileName{ textureFileInfo.baseName() + addMapChannel };
// first make sure we have a unique base name for this texture
// in case another texture referenced by this model has the same base name
auto& nameMatches = _textureNameMatchCount[baseTextureFileName];
if (nameMatches > 0) {
// there are already nameMatches texture with this name
// append - and that number to our baked texture file name so that it is unique
baseTextureFileName += "-" + QString::number(nameMatches);
}
// increment the number of name matches
++nameMatches;
return baseTextureFileName;
}

View file

@ -0,0 +1,30 @@
//
// TextureFileNamer.h
// libraries/baking/src/baking
//
// Created by Sabrina Shanman on 2019/03/14.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_TextureFileNamer_h
#define hifi_TextureFileNamer_h
#include <QtCore/QFileInfo>
#include <QHash>
#include <image/Image.h>
class TextureFileNamer {
public:
TextureFileNamer() {}
QString createBaseTextureFileName(const QFileInfo& textureFileInfo, const image::TextureUsage::Type textureType);
protected:
QHash<QString, int> _textureNameMatchCount;
};
#endif // hifi_TextureFileNamer_h

View file

@ -109,7 +109,7 @@ bool Basic2DWindowOpenGLDisplayPlugin::internalActivate() {
return Parent::internalActivate();
}
void Basic2DWindowOpenGLDisplayPlugin::compositeExtra() {
void Basic2DWindowOpenGLDisplayPlugin::compositeExtra(const gpu::FramebufferPointer& compositeFramebuffer) {
#if defined(Q_OS_ANDROID)
auto& virtualPadManager = VirtualPad::Manager::instance();
if(virtualPadManager.getLeftVirtualPad()->isShown()) {
@ -121,7 +121,7 @@ void Basic2DWindowOpenGLDisplayPlugin::compositeExtra() {
render([&](gpu::Batch& batch) {
batch.enableStereo(false);
batch.setFramebuffer(_compositeFramebuffer);
batch.setFramebuffer(compositeFramebuffer);
batch.resetViewTransform();
batch.setProjectionTransform(mat4());
batch.setPipeline(_cursorPipeline);
@ -140,7 +140,7 @@ void Basic2DWindowOpenGLDisplayPlugin::compositeExtra() {
});
}
#endif
Parent::compositeExtra();
Parent::compositeExtra(compositeFramebuffer);
}
static const uint32_t MIN_THROTTLE_CHECK_FRAMES = 60;

View file

@ -33,7 +33,7 @@ public:
virtual bool isThrottled() const override;
virtual void compositeExtra() override;
virtual void compositeExtra(const gpu::FramebufferPointer&) override;
virtual void pluginUpdate() override {};

View file

@ -379,14 +379,6 @@ void OpenGLDisplayPlugin::customizeContext() {
scissorState->setDepthTest(gpu::State::DepthTest(false));
scissorState->setScissorEnable(true);
{
#ifdef Q_OS_ANDROID
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTextureGammaLinearToSRGB);
#else
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTexture);
#endif
_simplePipeline = gpu::Pipeline::create(program, scissorState);
}
{
#ifdef Q_OS_ANDROID
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTextureGammaLinearToSRGB);
@ -396,29 +388,59 @@ void OpenGLDisplayPlugin::customizeContext() {
_presentPipeline = gpu::Pipeline::create(program, scissorState);
}
// HUD operator
{
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTexture);
_hudPipeline = gpu::Pipeline::create(program, blendState);
}
{
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTextureMirroredX);
_mirrorHUDPipeline = gpu::Pipeline::create(program, blendState);
gpu::PipelinePointer hudPipeline;
{
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTexture);
hudPipeline = gpu::Pipeline::create(program, blendState);
}
gpu::PipelinePointer hudMirrorPipeline;
{
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTextureMirroredX);
hudMirrorPipeline = gpu::Pipeline::create(program, blendState);
}
_hudOperator = [=](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, const gpu::FramebufferPointer& compositeFramebuffer, bool mirror) {
auto hudStereo = isStereo();
auto hudCompositeFramebufferSize = compositeFramebuffer->getSize();
std::array<glm::ivec4, 2> hudEyeViewports;
for_each_eye([&](Eye eye) {
hudEyeViewports[eye] = eyeViewport(eye);
});
if (hudPipeline && hudTexture) {
batch.enableStereo(false);
batch.setPipeline(mirror ? hudMirrorPipeline : hudPipeline);
batch.setResourceTexture(0, hudTexture);
if (hudStereo) {
for_each_eye([&](Eye eye) {
batch.setViewportTransform(hudEyeViewports[eye]);
batch.draw(gpu::TRIANGLE_STRIP, 4);
});
} else {
batch.setViewportTransform(ivec4(uvec2(0), hudCompositeFramebufferSize));
batch.draw(gpu::TRIANGLE_STRIP, 4);
}
}
};
}
{
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTransformedTexture);
_cursorPipeline = gpu::Pipeline::create(program, blendState);
}
}
updateCompositeFramebuffer();
}
void OpenGLDisplayPlugin::uncustomizeContext() {
_presentPipeline.reset();
_cursorPipeline.reset();
_hudPipeline.reset();
_mirrorHUDPipeline.reset();
_compositeFramebuffer.reset();
_hudOperator = DEFAULT_HUD_OPERATOR;
withPresentThreadLock([&] {
_currentFrame.reset();
_lastFrame = nullptr;
@ -510,24 +532,16 @@ void OpenGLDisplayPlugin::captureFrame(const std::string& filename) const {
});
}
void OpenGLDisplayPlugin::renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor) {
renderFromTexture(batch, texture, viewport, scissor, nullptr);
}
void OpenGLDisplayPlugin::renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor, const gpu::FramebufferPointer& copyFbo /*=gpu::FramebufferPointer()*/) {
auto fbo = gpu::FramebufferPointer();
void OpenGLDisplayPlugin::renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor, const gpu::FramebufferPointer& destFbo, const gpu::FramebufferPointer& copyFbo /*=gpu::FramebufferPointer()*/) {
batch.enableStereo(false);
batch.resetViewTransform();
batch.setFramebuffer(fbo);
batch.setFramebuffer(destFbo);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, vec4(0));
batch.setStateScissorRect(scissor);
batch.setViewportTransform(viewport);
batch.setResourceTexture(0, texture);
#ifndef USE_GLES
batch.setPipeline(_presentPipeline);
#else
batch.setPipeline(_simplePipeline);
#endif
batch.draw(gpu::TRIANGLE_STRIP, 4);
if (copyFbo) {
gpu::Vec4i copyFboRect(0, 0, copyFbo->getWidth(), copyFbo->getHeight());
@ -553,7 +567,7 @@ void OpenGLDisplayPlugin::renderFromTexture(gpu::Batch& batch, const gpu::Textur
batch.setViewportTransform(copyFboRect);
batch.setStateScissorRect(copyFboRect);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, {0.0f, 0.0f, 0.0f, 1.0f});
batch.blit(fbo, sourceRect, copyFbo, copyRect);
batch.blit(destFbo, sourceRect, copyFbo, copyRect);
}
}
@ -581,41 +595,14 @@ void OpenGLDisplayPlugin::updateFrameData() {
});
}
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> OpenGLDisplayPlugin::getHUDOperator() {
auto hudPipeline = _hudPipeline;
auto hudMirrorPipeline = _mirrorHUDPipeline;
auto hudStereo = isStereo();
auto hudCompositeFramebufferSize = _compositeFramebuffer->getSize();
std::array<glm::ivec4, 2> hudEyeViewports;
for_each_eye([&](Eye eye) {
hudEyeViewports[eye] = eyeViewport(eye);
});
return [=](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, bool mirror) {
if (hudPipeline && hudTexture) {
batch.enableStereo(false);
batch.setPipeline(mirror ? hudMirrorPipeline : hudPipeline);
batch.setResourceTexture(0, hudTexture);
if (hudStereo) {
for_each_eye([&](Eye eye) {
batch.setViewportTransform(hudEyeViewports[eye]);
batch.draw(gpu::TRIANGLE_STRIP, 4);
});
} else {
batch.setViewportTransform(ivec4(uvec2(0), hudCompositeFramebufferSize));
batch.draw(gpu::TRIANGLE_STRIP, 4);
}
}
};
}
void OpenGLDisplayPlugin::compositePointer() {
void OpenGLDisplayPlugin::compositePointer(const gpu::FramebufferPointer& compositeFramebuffer) {
auto& cursorManager = Cursor::Manager::instance();
const auto& cursorData = _cursorsData[cursorManager.getCursor()->getIcon()];
auto cursorTransform = DependencyManager::get<CompositorHelper>()->getReticleTransform(glm::mat4());
render([&](gpu::Batch& batch) {
batch.enableStereo(false);
batch.setProjectionTransform(mat4());
batch.setFramebuffer(_compositeFramebuffer);
batch.setFramebuffer(compositeFramebuffer);
batch.setPipeline(_cursorPipeline);
batch.setResourceTexture(0, cursorData.texture);
batch.resetViewTransform();
@ -626,34 +613,13 @@ void OpenGLDisplayPlugin::compositePointer() {
batch.draw(gpu::TRIANGLE_STRIP, 4);
});
} else {
batch.setViewportTransform(ivec4(uvec2(0), _compositeFramebuffer->getSize()));
batch.setViewportTransform(ivec4(uvec2(0), compositeFramebuffer->getSize()));
batch.draw(gpu::TRIANGLE_STRIP, 4);
}
});
}
void OpenGLDisplayPlugin::compositeScene() {
render([&](gpu::Batch& batch) {
batch.enableStereo(false);
batch.setFramebuffer(_compositeFramebuffer);
batch.setViewportTransform(ivec4(uvec2(), _compositeFramebuffer->getSize()));
batch.setStateScissorRect(ivec4(uvec2(), _compositeFramebuffer->getSize()));
batch.resetViewTransform();
batch.setProjectionTransform(mat4());
batch.setPipeline(_simplePipeline);
batch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0));
batch.draw(gpu::TRIANGLE_STRIP, 4);
});
}
void OpenGLDisplayPlugin::compositeLayers() {
updateCompositeFramebuffer();
{
PROFILE_RANGE_EX(render_detail, "compositeScene", 0xff0077ff, (uint64_t)presentCount())
compositeScene();
}
void OpenGLDisplayPlugin::compositeLayers(const gpu::FramebufferPointer& compositeFramebuffer) {
#ifdef HIFI_ENABLE_NSIGHT_DEBUG
if (false) // do not draw the HUD if running nsight debug
#endif
@ -667,23 +633,35 @@ void OpenGLDisplayPlugin::compositeLayers() {
{
PROFILE_RANGE_EX(render_detail, "compositeExtra", 0xff0077ff, (uint64_t)presentCount())
compositeExtra();
compositeExtra(compositeFramebuffer);
}
// Draw the pointer last so it's on top of everything
auto compositorHelper = DependencyManager::get<CompositorHelper>();
if (compositorHelper->getReticleVisible()) {
PROFILE_RANGE_EX(render_detail, "compositePointer", 0xff0077ff, (uint64_t)presentCount())
compositePointer();
compositePointer(compositeFramebuffer);
}
}
void OpenGLDisplayPlugin::internalPresent() {
void OpenGLDisplayPlugin::internalPresent(const gpu::FramebufferPointer& compositeFramebuffer) {
render([&](gpu::Batch& batch) {
// Note: _displayTexture must currently be the same size as the display.
uvec2 dims = _displayTexture ? uvec2(_displayTexture->getDimensions()) : getSurfacePixels();
auto viewport = ivec4(uvec2(0), dims);
renderFromTexture(batch, _displayTexture ? _displayTexture : _compositeFramebuffer->getRenderBuffer(0), viewport, viewport);
gpu::TexturePointer finalTexture;
if (_displayTexture) {
finalTexture = _displayTexture;
} else if (compositeFramebuffer) {
finalTexture = compositeFramebuffer->getRenderBuffer(0);
} else {
qCWarning(displayPlugins) << "No valid texture for output";
}
if (finalTexture) {
renderFromTexture(batch, finalTexture, viewport, viewport);
}
});
swapBuffers();
_presentRate.increment();
@ -700,7 +678,7 @@ void OpenGLDisplayPlugin::present() {
}
incrementPresentCount();
if (_currentFrame) {
if (_currentFrame && _currentFrame->framebuffer) {
auto correction = getViewCorrection();
getGLBackend()->setCameraCorrection(correction, _prevRenderView);
_prevRenderView = correction * _currentFrame->view;
@ -720,18 +698,18 @@ void OpenGLDisplayPlugin::present() {
// Write all layers to a local framebuffer
{
PROFILE_RANGE_EX(render, "composite", 0xff00ffff, frameId)
compositeLayers();
compositeLayers(_currentFrame->framebuffer);
}
// Take the composite framebuffer and send it to the output device
{
PROFILE_RANGE_EX(render, "internalPresent", 0xff00ffff, frameId)
internalPresent();
internalPresent(_currentFrame->framebuffer);
}
gpu::Backend::freeGPUMemSize.set(gpu::gl::getFreeDedicatedMemory());
} else if (alwaysPresent()) {
internalPresent();
internalPresent(nullptr);
}
_movingAveragePresent.addSample((float)(usecTimestampNow() - startPresent));
}
@ -788,7 +766,12 @@ bool OpenGLDisplayPlugin::setDisplayTexture(const QString& name) {
}
QImage OpenGLDisplayPlugin::getScreenshot(float aspectRatio) const {
auto size = _compositeFramebuffer->getSize();
if (!_currentFrame || !_currentFrame->framebuffer) {
return QImage();
}
auto compositeFramebuffer = _currentFrame->framebuffer;
auto size = compositeFramebuffer->getSize();
if (isHmd()) {
size.x /= 2;
}
@ -806,7 +789,7 @@ QImage OpenGLDisplayPlugin::getScreenshot(float aspectRatio) const {
auto glBackend = const_cast<OpenGLDisplayPlugin&>(*this).getGLBackend();
QImage screenshot(bestSize.x, bestSize.y, QImage::Format_ARGB32);
withOtherThreadContext([&] {
glBackend->downloadFramebuffer(_compositeFramebuffer, ivec4(corner, bestSize), screenshot);
glBackend->downloadFramebuffer(compositeFramebuffer, ivec4(corner, bestSize), screenshot);
});
return screenshot.mirrored(false, true);
}
@ -858,7 +841,7 @@ bool OpenGLDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
}
ivec4 OpenGLDisplayPlugin::eyeViewport(Eye eye) const {
uvec2 vpSize = _compositeFramebuffer->getSize();
auto vpSize = glm::uvec2(getRecommendedRenderSize());
vpSize.x /= 2;
uvec2 vpPos;
if (eye == Eye::Right) {
@ -891,14 +874,6 @@ void OpenGLDisplayPlugin::render(std::function<void(gpu::Batch& batch)> f) {
OpenGLDisplayPlugin::~OpenGLDisplayPlugin() {
}
void OpenGLDisplayPlugin::updateCompositeFramebuffer() {
auto renderSize = glm::uvec2(getRecommendedRenderSize());
if (!_compositeFramebuffer || _compositeFramebuffer->getSize() != renderSize) {
_compositeFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("OpenGLDisplayPlugin::composite", gpu::Element::COLOR_RGBA_32, renderSize.x, renderSize.y));
// _compositeFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("OpenGLDisplayPlugin::composite", gpu::Element::COLOR_SRGBA_32, renderSize.x, renderSize.y));
}
}
void OpenGLDisplayPlugin::copyTextureToQuickFramebuffer(NetworkTexturePointer networkTexture, QOpenGLFramebufferObject* target, GLsync* fenceSync) {
#if !defined(USE_GLES)
auto glBackend = const_cast<OpenGLDisplayPlugin&>(*this).getGLBackend();

View file

@ -94,14 +94,10 @@ protected:
// is not populated
virtual bool alwaysPresent() const { return false; }
void updateCompositeFramebuffer();
virtual QThread::Priority getPresentPriority() { return QThread::HighPriority; }
virtual void compositeLayers();
virtual void compositeScene();
virtual std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> getHUDOperator();
virtual void compositePointer();
virtual void compositeExtra() {};
virtual void compositeLayers(const gpu::FramebufferPointer&);
virtual void compositePointer(const gpu::FramebufferPointer&);
virtual void compositeExtra(const gpu::FramebufferPointer&) {};
// These functions must only be called on the presentation thread
virtual void customizeContext();
@ -116,10 +112,10 @@ protected:
virtual void deactivateSession() {}
// Plugin specific functionality to send the composed scene to the output window or device
virtual void internalPresent();
virtual void internalPresent(const gpu::FramebufferPointer&);
void renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor, const gpu::FramebufferPointer& fbo);
void renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor);
void renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor, const gpu::FramebufferPointer& destFbo = nullptr, const gpu::FramebufferPointer& copyFbo = nullptr);
virtual void updateFrameData();
virtual glm::mat4 getViewCorrection() { return glm::mat4(); }
@ -142,14 +138,8 @@ protected:
gpu::FramePointer _currentFrame;
gpu::Frame* _lastFrame { nullptr };
mat4 _prevRenderView;
gpu::FramebufferPointer _compositeFramebuffer;
gpu::PipelinePointer _hudPipeline;
gpu::PipelinePointer _mirrorHUDPipeline;
gpu::ShaderPointer _mirrorHUDPS;
gpu::PipelinePointer _simplePipeline;
gpu::PipelinePointer _presentPipeline;
gpu::PipelinePointer _cursorPipeline;
gpu::TexturePointer _displayTexture{};
gpu::TexturePointer _displayTexture;
float _compositeHUDAlpha { 1.0f };
struct CursorData {
@ -185,5 +175,9 @@ protected:
// be serialized through this mutex
mutable Mutex _presentMutex;
float _hudAlpha{ 1.0f };
private:
gpu::PipelinePointer _presentPipeline;
};

View file

@ -24,7 +24,7 @@ public:
protected:
void updatePresentPose() override;
void hmdPresent() override {}
void hmdPresent(const gpu::FramebufferPointer&) override {}
bool isHmdMounted() const override { return true; }
bool internalActivate() override;
private:

View file

@ -114,20 +114,23 @@ void HmdDisplayPlugin::internalDeactivate() {
void HmdDisplayPlugin::customizeContext() {
Parent::customizeContext();
_hudRenderer.build();
_hudOperator = _hudRenderer.build();
}
void HmdDisplayPlugin::uncustomizeContext() {
// This stops the weirdness where if the preview was disabled, on switching back to 2D,
// the vsync was stuck in the disabled state. No idea why that happens though.
_disablePreview = false;
render([&](gpu::Batch& batch) {
batch.enableStereo(false);
batch.resetViewTransform();
batch.setFramebuffer(_compositeFramebuffer);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, vec4(0));
});
_hudRenderer = HUDRenderer();
if (_currentFrame && _currentFrame->framebuffer) {
render([&](gpu::Batch& batch) {
batch.enableStereo(false);
batch.resetViewTransform();
batch.setFramebuffer(_currentFrame->framebuffer);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, vec4(0));
});
}
_hudRenderer = {};
_previewTexture.reset();
Parent::uncustomizeContext();
}
@ -174,11 +177,11 @@ float HmdDisplayPlugin::getLeftCenterPixel() const {
return leftCenterPixel;
}
void HmdDisplayPlugin::internalPresent() {
void HmdDisplayPlugin::internalPresent(const gpu::FramebufferPointer& compositeFramebuffer) {
PROFILE_RANGE_EX(render, __FUNCTION__, 0xff00ff00, (uint64_t)presentCount())
// Composite together the scene, hud and mouse cursor
hmdPresent();
hmdPresent(compositeFramebuffer);
if (_displayTexture) {
// Note: _displayTexture must currently be the same size as the display.
@ -260,7 +263,7 @@ void HmdDisplayPlugin::internalPresent() {
viewport.z *= 2;
}
renderFromTexture(batch, _compositeFramebuffer->getRenderBuffer(0), viewport, scissor, fbo);
renderFromTexture(batch, compositeFramebuffer->getRenderBuffer(0), viewport, scissor, nullptr, fbo);
});
swapBuffers();
@ -345,7 +348,7 @@ glm::mat4 HmdDisplayPlugin::getViewCorrection() {
}
}
void HmdDisplayPlugin::HUDRenderer::build() {
DisplayPlugin::HUDOperator HmdDisplayPlugin::HUDRenderer::build() {
vertices = std::make_shared<gpu::Buffer>();
indices = std::make_shared<gpu::Buffer>();
@ -380,7 +383,7 @@ void HmdDisplayPlugin::HUDRenderer::build() {
indexCount = numberOfRectangles * TRIANGLE_PER_RECTANGLE * VERTEX_PER_TRANGLE;
// Compute indices order
std::vector<GLushort> indices;
std::vector<GLushort> indexData;
for (int i = 0; i < stacks - 1; i++) {
for (int j = 0; j < slices - 1; j++) {
GLushort bottomLeftIndex = i * slices + j;
@ -388,24 +391,21 @@ void HmdDisplayPlugin::HUDRenderer::build() {
GLushort topLeftIndex = bottomLeftIndex + slices;
GLushort topRightIndex = topLeftIndex + 1;
// FIXME make a z-order curve for better vertex cache locality
indices.push_back(topLeftIndex);
indices.push_back(bottomLeftIndex);
indices.push_back(topRightIndex);
indexData.push_back(topLeftIndex);
indexData.push_back(bottomLeftIndex);
indexData.push_back(topRightIndex);
indices.push_back(topRightIndex);
indices.push_back(bottomLeftIndex);
indices.push_back(bottomRightIndex);
indexData.push_back(topRightIndex);
indexData.push_back(bottomLeftIndex);
indexData.push_back(bottomRightIndex);
}
}
this->indices->append(indices);
indices->append(indexData);
format = std::make_shared<gpu::Stream::Format>(); // 1 for everyone
format->setAttribute(gpu::Stream::POSITION, gpu::Stream::POSITION, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), 0);
format->setAttribute(gpu::Stream::TEXCOORD, gpu::Stream::TEXCOORD, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::UV));
uniformsBuffer = std::make_shared<gpu::Buffer>(sizeof(Uniforms), nullptr);
updatePipeline();
}
void HmdDisplayPlugin::HUDRenderer::updatePipeline() {
if (!pipeline) {
auto program = gpu::Shader::createProgram(shader::render_utils::program::hmd_ui);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
@ -416,10 +416,6 @@ void HmdDisplayPlugin::HUDRenderer::updatePipeline() {
pipeline = gpu::Pipeline::create(program, state);
}
}
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> HmdDisplayPlugin::HUDRenderer::render(HmdDisplayPlugin& plugin) {
updatePipeline();
auto hudPipeline = pipeline;
auto hudFormat = format;
@ -428,9 +424,9 @@ std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> HmdDis
auto hudUniformBuffer = uniformsBuffer;
auto hudUniforms = uniforms;
auto hudIndexCount = indexCount;
return [=](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, bool mirror) {
if (hudPipeline && hudTexture) {
batch.setPipeline(hudPipeline);
return [=](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, const gpu::FramebufferPointer&, const bool mirror) {
if (pipeline && hudTexture) {
batch.setPipeline(pipeline);
batch.setInputFormat(hudFormat);
gpu::BufferView posView(hudVertices, VERTEX_OFFSET, hudVertices->getSize(), VERTEX_STRIDE, hudFormat->getAttributes().at(gpu::Stream::POSITION)._element);
@ -454,7 +450,7 @@ std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> HmdDis
};
}
void HmdDisplayPlugin::compositePointer() {
void HmdDisplayPlugin::compositePointer(const gpu::FramebufferPointer& compositeFramebuffer) {
auto& cursorManager = Cursor::Manager::instance();
const auto& cursorData = _cursorsData[cursorManager.getCursor()->getIcon()];
auto compositorHelper = DependencyManager::get<CompositorHelper>();
@ -463,7 +459,7 @@ void HmdDisplayPlugin::compositePointer() {
render([&](gpu::Batch& batch) {
// FIXME use standard gpu stereo rendering for this.
batch.enableStereo(false);
batch.setFramebuffer(_compositeFramebuffer);
batch.setFramebuffer(compositeFramebuffer);
batch.setPipeline(_cursorPipeline);
batch.setResourceTexture(0, cursorData.texture);
batch.resetViewTransform();
@ -478,10 +474,6 @@ void HmdDisplayPlugin::compositePointer() {
});
}
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> HmdDisplayPlugin::getHUDOperator() {
return _hudRenderer.render(*this);
}
HmdDisplayPlugin::~HmdDisplayPlugin() {
}

View file

@ -53,16 +53,15 @@ signals:
void hmdVisibleChanged(bool visible);
protected:
virtual void hmdPresent() = 0;
virtual void hmdPresent(const gpu::FramebufferPointer&) = 0;
virtual bool isHmdMounted() const = 0;
virtual void postPreview() {};
virtual void updatePresentPose();
bool internalActivate() override;
void internalDeactivate() override;
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> getHUDOperator() override;
void compositePointer() override;
void internalPresent() override;
void compositePointer(const gpu::FramebufferPointer&) override;
void internalPresent(const gpu::FramebufferPointer&) override;
void customizeContext() override;
void uncustomizeContext() override;
void updateFrameData() override;
@ -120,8 +119,6 @@ private:
static const size_t TEXTURE_OFFSET { offsetof(Vertex, uv) };
static const int VERTEX_STRIDE { sizeof(Vertex) };
void build();
void updatePipeline();
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> render(HmdDisplayPlugin& plugin);
HUDOperator build();
} _hudRenderer;
};

View file

@ -37,13 +37,13 @@ glm::uvec2 InterleavedStereoDisplayPlugin::getRecommendedRenderSize() const {
return result;
}
void InterleavedStereoDisplayPlugin::internalPresent() {
void InterleavedStereoDisplayPlugin::internalPresent(const gpu::FramebufferPointer& compositeFramebuffer) {
render([&](gpu::Batch& batch) {
batch.enableStereo(false);
batch.resetViewTransform();
batch.setFramebuffer(gpu::FramebufferPointer());
batch.setViewportTransform(ivec4(uvec2(0), getSurfacePixels()));
batch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0));
batch.setResourceTexture(0, compositeFramebuffer->getRenderBuffer(0));
batch.setPipeline(_interleavedPresentPipeline);
batch.draw(gpu::TRIANGLE_STRIP, 4);
});

View file

@ -21,7 +21,7 @@ protected:
// initialize OpenGL context settings needed by the plugin
void customizeContext() override;
void uncustomizeContext() override;
void internalPresent() override;
void internalPresent(const gpu::FramebufferPointer&) override;
private:
static const QString NAME;

View file

@ -1105,7 +1105,7 @@ void EntityTreeRenderer::playEntityCollisionSound(const EntityItemPointer& entit
options.volume = volume;
options.pitch = 1.0f / stretchFactor;
AudioInjector::playSoundAndDelete(collisionSound, options);
DependencyManager::get<AudioInjectorManager>()->playSound(collisionSound, options, true);
}
void EntityTreeRenderer::entityCollisionWithEntity(const EntityItemID& idA, const EntityItemID& idB,

View file

@ -16,7 +16,7 @@
#include <QtCore/QStack>
#include <QtGui/QMouseEvent>
#include <AbstractAudioInterface.h>
#include <AudioInjectorManager.h>
#include <EntityScriptingInterface.h> // for RayToEntityIntersectionResult
#include <EntityTree.h>
#include <PointerEvent.h>

View file

@ -40,6 +40,7 @@ public:
virtual bool wantsKeyboardFocus() const { return false; }
virtual void setProxyWindow(QWindow* proxyWindow) {}
virtual QObject* getEventHandler() { return nullptr; }
virtual void emitScriptEvent(const QVariant& message) {}
const EntityItemPointer& getEntity() const { return _entity; }
const ItemID& getRenderItemID() const { return _renderItemID; }

View file

@ -1034,7 +1034,7 @@ void RenderableModelEntityItem::copyAnimationJointDataToModel() {
});
if (changed) {
locationChanged(false, true);
locationChanged(true, true);
}
}

View file

@ -46,12 +46,7 @@ PolyLineEntityRenderer::PolyLineEntityRenderer(const EntityItemPointer& entity)
void PolyLineEntityRenderer::buildPipeline() {
// FIXME: opaque pipeline
gpu::ShaderPointer program;
if (DISABLE_DEFERRED) {
program = gpu::Shader::createProgram(shader::entities_renderer::program::paintStroke_forward);
} else {
program = gpu::Shader::createProgram(shader::entities_renderer::program::paintStroke);
}
gpu::ShaderPointer program = gpu::Shader::createProgram(DISABLE_DEFERRED ? shader::entities_renderer::program::paintStroke_forward : shader::entities_renderer::program::paintStroke);
{
gpu::StatePointer state = gpu::StatePointer(new gpu::State());

View file

@ -19,8 +19,6 @@
#include "RenderPipelines.h"
#include <DisableDeferred.h>
//#define SHAPE_ENTITY_USE_FADE_EFFECT
#ifdef SHAPE_ENTITY_USE_FADE_EFFECT
#include <FadeEffect.h>
@ -277,16 +275,10 @@ void ShapeEntityRenderer::doRender(RenderArgs* args) {
} else if (!useMaterialPipeline(materials)) {
// FIXME, support instanced multi-shape rendering using multidraw indirect
outColor.a *= _isFading ? Interpolate::calculateFadeRatio(_fadeStartTime) : 1.0f;
render::ShapePipelinePointer pipeline;
if (_renderLayer == RenderLayer::WORLD && !DISABLE_DEFERRED) {
pipeline = outColor.a < 1.0f ? geometryCache->getTransparentShapePipeline() : geometryCache->getOpaqueShapePipeline();
} else {
pipeline = outColor.a < 1.0f ? geometryCache->getForwardTransparentShapePipeline() : geometryCache->getForwardOpaqueShapePipeline();
}
if (render::ShapeKey(args->_globalShapeKey).isWireframe() || _primitiveMode == PrimitiveMode::LINES) {
geometryCache->renderWireShapeInstance(args, batch, geometryShape, outColor, pipeline);
geometryCache->renderWireShapeInstance(args, batch, geometryShape, outColor, args->_shapePipeline);
} else {
geometryCache->renderSolidShapeInstance(args, batch, geometryShape, outColor, pipeline);
geometryCache->renderSolidShapeInstance(args, batch, geometryShape, outColor, args->_shapePipeline);
}
} else {
if (args->_renderMode != render::Args::RenderMode::SHADOW_RENDER_MODE) {

View file

@ -19,7 +19,7 @@
#include "GLMHelpers.h"
#include <DisableDeferred.h>
#include "DeferredLightingEffect.h"
using namespace render;
using namespace render::entities;
@ -162,7 +162,7 @@ void TextEntityRenderer::doRender(RenderArgs* args) {
glm::vec4 backgroundColor;
Transform modelTransform;
glm::vec3 dimensions;
bool forwardRendered;
bool layered;
withReadLock([&] {
modelTransform = _renderTransform;
dimensions = _dimensions;
@ -172,7 +172,7 @@ void TextEntityRenderer::doRender(RenderArgs* args) {
textColor = EntityRenderer::calculatePulseColor(textColor, _pulseProperties, _created);
backgroundColor = glm::vec4(_backgroundColor, fadeRatio * _backgroundAlpha);
backgroundColor = EntityRenderer::calculatePulseColor(backgroundColor, _pulseProperties, _created);
forwardRendered = _renderLayer != RenderLayer::WORLD || DISABLE_DEFERRED;
layered = _renderLayer != RenderLayer::WORLD;
});
// Render background
@ -184,6 +184,11 @@ void TextEntityRenderer::doRender(RenderArgs* args) {
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
// FIXME: we need to find a better way of rendering text so we don't have to do this
if (layered) {
DependencyManager::get<DeferredLightingEffect>()->setupKeyLightBatch(args, batch);
}
auto transformToTopLeft = modelTransform;
transformToTopLeft.setRotation(EntityItem::getBillboardRotation(transformToTopLeft.getTranslation(), transformToTopLeft.getRotation(), _billboardMode, args->getViewFrustum().getPosition()));
transformToTopLeft.postTranslate(dimensions * glm::vec3(-0.5f, 0.5f, 0.0f)); // Go to the top left
@ -192,7 +197,7 @@ void TextEntityRenderer::doRender(RenderArgs* args) {
if (backgroundColor.a > 0.0f) {
batch.setModelTransform(transformToTopLeft);
auto geometryCache = DependencyManager::get<GeometryCache>();
geometryCache->bindSimpleProgram(batch, false, backgroundColor.a < 1.0f, false, false, false, true, forwardRendered);
geometryCache->bindSimpleProgram(batch, false, backgroundColor.a < 1.0f, false, false, false, true, layered);
geometryCache->renderQuad(batch, minCorner, maxCorner, backgroundColor, _geometryID);
}
@ -203,7 +208,11 @@ void TextEntityRenderer::doRender(RenderArgs* args) {
batch.setModelTransform(transformToTopLeft);
glm::vec2 bounds = glm::vec2(dimensions.x - (_leftMargin + _rightMargin), dimensions.y - (_topMargin + _bottomMargin));
_textRenderer->draw(batch, _leftMargin / scale, -_topMargin / scale, _text, textColor, bounds / scale, forwardRendered);
_textRenderer->draw(batch, _leftMargin / scale, -_topMargin / scale, _text, textColor, bounds / scale, layered);
}
if (layered) {
DependencyManager::get<DeferredLightingEffect>()->unsetKeyLightBatch(batch);
}
}

View file

@ -106,7 +106,7 @@ private:
static std::function<void(QSharedPointer<OffscreenQmlSurface>&, bool&, std::vector<QMetaObject::Connection>&)> _releaseWebSurfaceOperator;
public slots:
void emitScriptEvent(const QVariant& scriptMessage);
void emitScriptEvent(const QVariant& scriptMessage) override;
signals:
void scriptEventReceived(const QVariant& message);

View file

@ -39,9 +39,7 @@ void EntityEditPacketSender::adjustEditPacketForClockSkew(PacketType type, QByte
}
}
void EntityEditPacketSender::queueEditAvatarEntityMessage(EntityTreePointer entityTree,
EntityItemID entityItemID,
const EntityItemProperties& properties) {
void EntityEditPacketSender::queueEditAvatarEntityMessage(EntityTreePointer entityTree, EntityItemID entityItemID) {
assert(_myAvatar);
if (!entityTree) {
qCDebug(entities) << "EntityEditPacketSender::queueEditAvatarEntityMessage null entityTree.";
@ -54,11 +52,6 @@ void EntityEditPacketSender::queueEditAvatarEntityMessage(EntityTreePointer enti
}
entity->setLastBroadcast(usecTimestampNow());
// serialize ALL properties in an "AvatarEntity" packet
// rather than just the ones being edited.
EntityItemProperties entityProperties = entity->getProperties();
entityProperties.merge(properties);
OctreePacketData packetData(false, AvatarTraits::MAXIMUM_TRAIT_SIZE);
EncodeBitstreamParams params;
EntityTreeElementExtraEncodeDataPointer extra { nullptr };
@ -82,7 +75,7 @@ void EntityEditPacketSender::queueEditEntityMessage(PacketType type,
qCWarning(entities) << "Suppressing entity edit message: cannot send avatar entity edit with no myAvatar";
} else if (properties.getOwningAvatarID() == _myAvatar->getID()) {
// this is an avatar-based entity --> update our avatar-data rather than sending to the entity-server
queueEditAvatarEntityMessage(entityTree, entityItemID, properties);
queueEditAvatarEntityMessage(entityTree, entityItemID);
} else {
qCWarning(entities) << "Suppressing entity edit message: cannot send avatar entity edit for another avatar";
}

View file

@ -50,8 +50,8 @@ public slots:
void processEntityEditNackPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
private:
void queueEditAvatarEntityMessage(EntityTreePointer entityTree,
EntityItemID entityItemID, const EntityItemProperties& properties);
friend class MyAvatar;
void queueEditAvatarEntityMessage(EntityTreePointer entityTree, EntityItemID entityItemID);
private:
std::mutex _mutex;

View file

@ -511,8 +511,6 @@ public:
virtual void setProxyWindow(QWindow* proxyWindow) {}
virtual QObject* getEventHandler() { return nullptr; }
virtual void emitScriptEvent(const QVariant& message) {}
QUuid getLastEditedBy() const { return _lastEditedBy; }
void setLastEditedBy(QUuid value) { _lastEditedBy = value; }

View file

@ -1646,11 +1646,9 @@ bool EntityScriptingInterface::actionWorker(const QUuid& entityID,
auto nodeList = DependencyManager::get<NodeList>();
const QUuid myNodeID = nodeList->getSessionUUID();
EntityItemProperties properties;
EntityItemPointer entity;
bool doTransmit = false;
_entityTree->withWriteLock([this, &entity, entityID, myNodeID, &doTransmit, actor, &properties] {
_entityTree->withWriteLock([this, &entity, entityID, myNodeID, &doTransmit, actor] {
EntitySimulationPointer simulation = _entityTree->getSimulation();
entity = _entityTree->findEntityByEntityItemID(entityID);
if (!entity) {
@ -1669,16 +1667,12 @@ bool EntityScriptingInterface::actionWorker(const QUuid& entityID,
doTransmit = actor(simulation, entity);
_entityTree->entityChanged(entity);
if (doTransmit) {
properties.setEntityHostType(entity->getEntityHostType());
properties.setOwningAvatarID(entity->getOwningAvatarID());
}
});
// transmit the change
if (doTransmit) {
_entityTree->withReadLock([&] {
properties = entity->getProperties();
EntityItemProperties properties = _entityTree->resultWithReadLock<EntityItemProperties>([&] {
return entity->getProperties();
});
properties.setActionDataDirty();
@ -2202,14 +2196,7 @@ bool EntityScriptingInterface::wantsHandControllerPointerEvents(const QUuid& id)
}
void EntityScriptingInterface::emitScriptEvent(const EntityItemID& entityID, const QVariant& message) {
if (_entityTree) {
_entityTree->withReadLock([&] {
EntityItemPointer entity = _entityTree->findEntityByEntityItemID(EntityItemID(entityID));
if (entity) {
entity->emitScriptEvent(message);
}
});
}
EntityTree::emitScriptEvent(entityID, message);
}
// TODO move this someplace that makes more sense...

View file

@ -1529,7 +1529,6 @@ public slots:
* @function Entities.emitScriptEvent
* @param {Uuid} entityID - The ID of the {@link Entities.EntityType|Web} entity.
* @param {string} message - The message to send.
* @todo <em>This function is currently not implemented.</em>
*/
Q_INVOKABLE void emitScriptEvent(const EntityItemID& entityID, const QVariant& message);

View file

@ -2978,6 +2978,7 @@ QStringList EntityTree::getJointNames(const QUuid& entityID) const {
std::function<QObject*(const QUuid&)> EntityTree::_getEntityObjectOperator = nullptr;
std::function<QSizeF(const QUuid&, const QString&)> EntityTree::_textSizeOperator = nullptr;
std::function<bool()> EntityTree::_areEntityClicksCapturedOperator = nullptr;
std::function<void(const QUuid&, const QVariant&)> EntityTree::_emitScriptEventOperator = nullptr;
QObject* EntityTree::getEntityObject(const QUuid& id) {
if (_getEntityObjectOperator) {
@ -3000,6 +3001,12 @@ bool EntityTree::areEntityClicksCaptured() {
return false;
}
void EntityTree::emitScriptEvent(const QUuid& id, const QVariant& message) {
if (_emitScriptEventOperator) {
_emitScriptEventOperator(id, message);
}
}
void EntityTree::updateEntityQueryAACubeWorker(SpatiallyNestablePointer object, EntityEditPacketSender* packetSender,
MovingEntitiesOperator& moveOperator, bool force, bool tellServer) {
// if the queryBox has changed, tell the entity-server

View file

@ -272,6 +272,9 @@ public:
static void setEntityClicksCapturedOperator(std::function<bool()> areEntityClicksCapturedOperator) { _areEntityClicksCapturedOperator = areEntityClicksCapturedOperator; }
static bool areEntityClicksCaptured();
static void setEmitScriptEventOperator(std::function<void(const QUuid&, const QVariant&)> emitScriptEventOperator) { _emitScriptEventOperator = emitScriptEventOperator; }
static void emitScriptEvent(const QUuid& id, const QVariant& message);
std::map<QString, QString> getNamedPaths() const { return _namedPaths; }
void updateEntityQueryAACube(SpatiallyNestablePointer object, EntityEditPacketSender* packetSender,
@ -383,6 +386,7 @@ private:
static std::function<QObject*(const QUuid&)> _getEntityObjectOperator;
static std::function<QSizeF(const QUuid&, const QString&)> _textSizeOperator;
static std::function<bool()> _areEntityClicksCapturedOperator;
static std::function<void(const QUuid&, const QVariant&)> _emitScriptEventOperator;
std::vector<int32_t> _staleProxies;

View file

@ -13,27 +13,26 @@
#define hifi_FBX_h_
#include <QMetaType>
#include <QVarLengthArray>
#include <QVariant>
#include <QVector>
#include <glm/glm.hpp>
#include <shared/HifiTypes.h>
// See comment in FBXSerializer::parseFBX().
static const int FBX_HEADER_BYTES_BEFORE_VERSION = 23;
static const QByteArray FBX_BINARY_PROLOG("Kaydara FBX Binary ");
static const QByteArray FBX_BINARY_PROLOG2("\0\x1a\0", 3);
static const hifi::ByteArray FBX_BINARY_PROLOG("Kaydara FBX Binary ");
static const hifi::ByteArray FBX_BINARY_PROLOG2("\0\x1a\0", 3);
static const quint32 FBX_VERSION_2015 = 7400;
static const quint32 FBX_VERSION_2016 = 7500;
static const int DRACO_BEGIN_CUSTOM_HIFI_ATTRIBUTES = 1000;
static const int DRACO_ATTRIBUTE_MATERIAL_ID = DRACO_BEGIN_CUSTOM_HIFI_ATTRIBUTES;
static const int DRACO_ATTRIBUTE_TEX_COORD_1 = DRACO_BEGIN_CUSTOM_HIFI_ATTRIBUTES + 1;
static const int DRACO_ATTRIBUTE_ORIGINAL_INDEX = DRACO_BEGIN_CUSTOM_HIFI_ATTRIBUTES + 2;
static const int32_t FBX_PROPERTY_UNCOMPRESSED_FLAG = 0;
static const int32_t FBX_PROPERTY_COMPRESSED_FLAG = 1;
// The version of the FBX node containing the draco mesh. See also: DRACO_MESH_VERSION in HFM.h
static const int FBX_DRACO_MESH_VERSION = 2;
class FBXNode;
using FBXNodeList = QList<FBXNode>;
@ -41,7 +40,7 @@ using FBXNodeList = QList<FBXNode>;
/// A node within an FBX document.
class FBXNode {
public:
QByteArray name;
hifi::ByteArray name;
QVariantList properties;
FBXNodeList children;
};

View file

@ -178,7 +178,7 @@ public:
void printNode(const FBXNode& node, int indentLevel) {
int indentLength = 2;
QByteArray spaces(indentLevel * indentLength, ' ');
hifi::ByteArray spaces(indentLevel * indentLength, ' ');
QDebug nodeDebug = qDebug(modelformat);
nodeDebug.nospace() << spaces.data() << node.name.data() << ": ";
@ -308,7 +308,7 @@ public:
};
bool checkMaterialsHaveTextures(const QHash<QString, HFMMaterial>& materials,
const QHash<QString, QByteArray>& textureFilenames, const QMultiMap<QString, QString>& _connectionChildMap) {
const QHash<QString, hifi::ByteArray>& textureFilenames, const QMultiMap<QString, QString>& _connectionChildMap) {
foreach (const QString& materialID, materials.keys()) {
foreach (const QString& childID, _connectionChildMap.values(materialID)) {
if (textureFilenames.contains(childID)) {
@ -375,7 +375,7 @@ HFMLight extractLight(const FBXNode& object) {
return light;
}
QByteArray fileOnUrl(const QByteArray& filepath, const QString& url) {
hifi::ByteArray fileOnUrl(const hifi::ByteArray& filepath, const QString& url) {
// in order to match the behaviour when loading models from remote URLs
// we assume that all external textures are right beside the loaded model
// ignoring any relative paths or absolute paths inside of models
@ -383,8 +383,10 @@ QByteArray fileOnUrl(const QByteArray& filepath, const QString& url) {
return filepath.mid(filepath.lastIndexOf('/') + 1);
}
HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QString& url) {
HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const QString& url) {
const FBXNode& node = _rootNode;
bool deduplicateIndices = mapping["deduplicateIndices"].toBool();
QMap<QString, ExtractedMesh> meshes;
QHash<QString, QString> modelIDsToNames;
QHash<QString, int> meshIDsToMeshIndices;
@ -406,11 +408,11 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
std::map<QString, HFMLight> lights;
QVariantHash blendshapeMappings = mapping.value("bs").toHash();
hifi::VariantHash blendshapeMappings = mapping.value("bs").toHash();
QMultiHash<QByteArray, WeightedIndex> blendshapeIndices;
QMultiHash<hifi::ByteArray, WeightedIndex> blendshapeIndices;
for (int i = 0;; i++) {
QByteArray blendshapeName = FACESHIFT_BLENDSHAPES[i];
hifi::ByteArray blendshapeName = FACESHIFT_BLENDSHAPES[i];
if (blendshapeName.isEmpty()) {
break;
}
@ -455,7 +457,7 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
}
} else if (subobject.name == "Properties70") {
foreach (const FBXNode& subsubobject, subobject.children) {
static const QVariant APPLICATION_NAME = QVariant(QByteArray("Original|ApplicationName"));
static const QVariant APPLICATION_NAME = QVariant(hifi::ByteArray("Original|ApplicationName"));
if (subsubobject.name == "P" && subsubobject.properties.size() >= 5 &&
subsubobject.properties.at(0) == APPLICATION_NAME) {
hfmModel.applicationName = subsubobject.properties.at(4).toString();
@ -472,9 +474,9 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
int index = 4;
foreach (const FBXNode& subobject, object.children) {
if (subobject.name == propertyName) {
static const QVariant UNIT_SCALE_FACTOR = QByteArray("UnitScaleFactor");
static const QVariant AMBIENT_COLOR = QByteArray("AmbientColor");
static const QVariant UP_AXIS = QByteArray("UpAxis");
static const QVariant UNIT_SCALE_FACTOR = hifi::ByteArray("UnitScaleFactor");
static const QVariant AMBIENT_COLOR = hifi::ByteArray("AmbientColor");
static const QVariant UP_AXIS = hifi::ByteArray("UpAxis");
const auto& subpropName = subobject.properties.at(0);
if (subpropName == UNIT_SCALE_FACTOR) {
unitScaleFactor = subobject.properties.at(index).toFloat();
@ -499,7 +501,7 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
foreach (const FBXNode& object, child.children) {
if (object.name == "Geometry") {
if (object.properties.at(2) == "Mesh") {
meshes.insert(getID(object.properties), extractMesh(object, meshIndex));
meshes.insert(getID(object.properties), extractMesh(object, meshIndex, deduplicateIndices));
} else { // object.properties.at(2) == "Shape"
ExtractedBlendshape extracted = { getID(object.properties), extractBlendshape(object) };
blendshapes.append(extracted);
@ -540,7 +542,7 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
QVector<ExtractedBlendshape> blendshapes;
foreach (const FBXNode& subobject, object.children) {
bool properties = false;
QByteArray propertyName;
hifi::ByteArray propertyName;
int index;
if (subobject.name == "Properties60") {
properties = true;
@ -553,27 +555,27 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
index = 4;
}
if (properties) {
static const QVariant ROTATION_ORDER = QByteArray("RotationOrder");
static const QVariant GEOMETRIC_TRANSLATION = QByteArray("GeometricTranslation");
static const QVariant GEOMETRIC_ROTATION = QByteArray("GeometricRotation");
static const QVariant GEOMETRIC_SCALING = QByteArray("GeometricScaling");
static const QVariant LCL_TRANSLATION = QByteArray("Lcl Translation");
static const QVariant LCL_ROTATION = QByteArray("Lcl Rotation");
static const QVariant LCL_SCALING = QByteArray("Lcl Scaling");
static const QVariant ROTATION_MAX = QByteArray("RotationMax");
static const QVariant ROTATION_MAX_X = QByteArray("RotationMaxX");
static const QVariant ROTATION_MAX_Y = QByteArray("RotationMaxY");
static const QVariant ROTATION_MAX_Z = QByteArray("RotationMaxZ");
static const QVariant ROTATION_MIN = QByteArray("RotationMin");
static const QVariant ROTATION_MIN_X = QByteArray("RotationMinX");
static const QVariant ROTATION_MIN_Y = QByteArray("RotationMinY");
static const QVariant ROTATION_MIN_Z = QByteArray("RotationMinZ");
static const QVariant ROTATION_OFFSET = QByteArray("RotationOffset");
static const QVariant ROTATION_PIVOT = QByteArray("RotationPivot");
static const QVariant SCALING_OFFSET = QByteArray("ScalingOffset");
static const QVariant SCALING_PIVOT = QByteArray("ScalingPivot");
static const QVariant PRE_ROTATION = QByteArray("PreRotation");
static const QVariant POST_ROTATION = QByteArray("PostRotation");
static const QVariant ROTATION_ORDER = hifi::ByteArray("RotationOrder");
static const QVariant GEOMETRIC_TRANSLATION = hifi::ByteArray("GeometricTranslation");
static const QVariant GEOMETRIC_ROTATION = hifi::ByteArray("GeometricRotation");
static const QVariant GEOMETRIC_SCALING = hifi::ByteArray("GeometricScaling");
static const QVariant LCL_TRANSLATION = hifi::ByteArray("Lcl Translation");
static const QVariant LCL_ROTATION = hifi::ByteArray("Lcl Rotation");
static const QVariant LCL_SCALING = hifi::ByteArray("Lcl Scaling");
static const QVariant ROTATION_MAX = hifi::ByteArray("RotationMax");
static const QVariant ROTATION_MAX_X = hifi::ByteArray("RotationMaxX");
static const QVariant ROTATION_MAX_Y = hifi::ByteArray("RotationMaxY");
static const QVariant ROTATION_MAX_Z = hifi::ByteArray("RotationMaxZ");
static const QVariant ROTATION_MIN = hifi::ByteArray("RotationMin");
static const QVariant ROTATION_MIN_X = hifi::ByteArray("RotationMinX");
static const QVariant ROTATION_MIN_Y = hifi::ByteArray("RotationMinY");
static const QVariant ROTATION_MIN_Z = hifi::ByteArray("RotationMinZ");
static const QVariant ROTATION_OFFSET = hifi::ByteArray("RotationOffset");
static const QVariant ROTATION_PIVOT = hifi::ByteArray("RotationPivot");
static const QVariant SCALING_OFFSET = hifi::ByteArray("ScalingOffset");
static const QVariant SCALING_PIVOT = hifi::ByteArray("ScalingPivot");
static const QVariant PRE_ROTATION = hifi::ByteArray("PreRotation");
static const QVariant POST_ROTATION = hifi::ByteArray("PostRotation");
foreach(const FBXNode& property, subobject.children) {
const auto& childProperty = property.properties.at(0);
if (property.name == propertyName) {
@ -643,10 +645,10 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
}
}
}
} else if (subobject.name == "Vertices") {
} else if (subobject.name == "Vertices" || subobject.name == "DracoMesh") {
// it's a mesh as well as a model
mesh = &meshes[getID(object.properties)];
*mesh = extractMesh(object, meshIndex);
*mesh = extractMesh(object, meshIndex, deduplicateIndices);
} else if (subobject.name == "Shape") {
ExtractedBlendshape blendshape = { subobject.properties.at(0).toString(),
@ -713,8 +715,8 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
const int MODEL_UV_SCALING_MIN_SIZE = 2;
const int CROPPING_MIN_SIZE = 4;
if (subobject.name == "RelativeFilename" && subobject.properties.length() >= RELATIVE_FILENAME_MIN_SIZE) {
QByteArray filename = subobject.properties.at(0).toByteArray();
QByteArray filepath = filename.replace('\\', '/');
hifi::ByteArray filename = subobject.properties.at(0).toByteArray();
hifi::ByteArray filepath = filename.replace('\\', '/');
filename = fileOnUrl(filepath, url);
_textureFilepaths.insert(getID(object.properties), filepath);
_textureFilenames.insert(getID(object.properties), filename);
@ -743,17 +745,17 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
subobject.properties.at(2).value<int>(),
subobject.properties.at(3).value<int>()));
} else if (subobject.name == "Properties70") {
QByteArray propertyName;
hifi::ByteArray propertyName;
int index;
propertyName = "P";
index = 4;
foreach (const FBXNode& property, subobject.children) {
static const QVariant UV_SET = QByteArray("UVSet");
static const QVariant CURRENT_TEXTURE_BLEND_MODE = QByteArray("CurrentTextureBlendMode");
static const QVariant USE_MATERIAL = QByteArray("UseMaterial");
static const QVariant TRANSLATION = QByteArray("Translation");
static const QVariant ROTATION = QByteArray("Rotation");
static const QVariant SCALING = QByteArray("Scaling");
static const QVariant UV_SET = hifi::ByteArray("UVSet");
static const QVariant CURRENT_TEXTURE_BLEND_MODE = hifi::ByteArray("CurrentTextureBlendMode");
static const QVariant USE_MATERIAL = hifi::ByteArray("UseMaterial");
static const QVariant TRANSLATION = hifi::ByteArray("Translation");
static const QVariant ROTATION = hifi::ByteArray("Rotation");
static const QVariant SCALING = hifi::ByteArray("Scaling");
if (property.name == propertyName) {
QString v = property.properties.at(0).toString();
if (property.properties.at(0) == UV_SET) {
@ -807,8 +809,8 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
_textureParams.insert(getID(object.properties), tex);
}
} else if (object.name == "Video") {
QByteArray filepath;
QByteArray content;
hifi::ByteArray filepath;
hifi::ByteArray content;
foreach (const FBXNode& subobject, object.children) {
if (subobject.name == "RelativeFilename") {
filepath = subobject.properties.at(0).toByteArray();
@ -828,7 +830,7 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
foreach (const FBXNode& subobject, object.children) {
bool properties = false;
QByteArray propertyName;
hifi::ByteArray propertyName;
int index;
if (subobject.name == "Properties60") {
properties = true;
@ -845,31 +847,31 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
if (properties) {
std::vector<std::string> unknowns;
static const QVariant DIFFUSE_COLOR = QByteArray("DiffuseColor");
static const QVariant DIFFUSE_FACTOR = QByteArray("DiffuseFactor");
static const QVariant DIFFUSE = QByteArray("Diffuse");
static const QVariant SPECULAR_COLOR = QByteArray("SpecularColor");
static const QVariant SPECULAR_FACTOR = QByteArray("SpecularFactor");
static const QVariant SPECULAR = QByteArray("Specular");
static const QVariant EMISSIVE_COLOR = QByteArray("EmissiveColor");
static const QVariant EMISSIVE_FACTOR = QByteArray("EmissiveFactor");
static const QVariant EMISSIVE = QByteArray("Emissive");
static const QVariant AMBIENT_FACTOR = QByteArray("AmbientFactor");
static const QVariant SHININESS = QByteArray("Shininess");
static const QVariant OPACITY = QByteArray("Opacity");
static const QVariant MAYA_USE_NORMAL_MAP = QByteArray("Maya|use_normal_map");
static const QVariant MAYA_BASE_COLOR = QByteArray("Maya|base_color");
static const QVariant MAYA_USE_COLOR_MAP = QByteArray("Maya|use_color_map");
static const QVariant MAYA_ROUGHNESS = QByteArray("Maya|roughness");
static const QVariant MAYA_USE_ROUGHNESS_MAP = QByteArray("Maya|use_roughness_map");
static const QVariant MAYA_METALLIC = QByteArray("Maya|metallic");
static const QVariant MAYA_USE_METALLIC_MAP = QByteArray("Maya|use_metallic_map");
static const QVariant MAYA_EMISSIVE = QByteArray("Maya|emissive");
static const QVariant MAYA_EMISSIVE_INTENSITY = QByteArray("Maya|emissive_intensity");
static const QVariant MAYA_USE_EMISSIVE_MAP = QByteArray("Maya|use_emissive_map");
static const QVariant MAYA_USE_AO_MAP = QByteArray("Maya|use_ao_map");
static const QVariant MAYA_UV_SCALE = QByteArray("Maya|uv_scale");
static const QVariant MAYA_UV_OFFSET = QByteArray("Maya|uv_offset");
static const QVariant DIFFUSE_COLOR = hifi::ByteArray("DiffuseColor");
static const QVariant DIFFUSE_FACTOR = hifi::ByteArray("DiffuseFactor");
static const QVariant DIFFUSE = hifi::ByteArray("Diffuse");
static const QVariant SPECULAR_COLOR = hifi::ByteArray("SpecularColor");
static const QVariant SPECULAR_FACTOR = hifi::ByteArray("SpecularFactor");
static const QVariant SPECULAR = hifi::ByteArray("Specular");
static const QVariant EMISSIVE_COLOR = hifi::ByteArray("EmissiveColor");
static const QVariant EMISSIVE_FACTOR = hifi::ByteArray("EmissiveFactor");
static const QVariant EMISSIVE = hifi::ByteArray("Emissive");
static const QVariant AMBIENT_FACTOR = hifi::ByteArray("AmbientFactor");
static const QVariant SHININESS = hifi::ByteArray("Shininess");
static const QVariant OPACITY = hifi::ByteArray("Opacity");
static const QVariant MAYA_USE_NORMAL_MAP = hifi::ByteArray("Maya|use_normal_map");
static const QVariant MAYA_BASE_COLOR = hifi::ByteArray("Maya|base_color");
static const QVariant MAYA_USE_COLOR_MAP = hifi::ByteArray("Maya|use_color_map");
static const QVariant MAYA_ROUGHNESS = hifi::ByteArray("Maya|roughness");
static const QVariant MAYA_USE_ROUGHNESS_MAP = hifi::ByteArray("Maya|use_roughness_map");
static const QVariant MAYA_METALLIC = hifi::ByteArray("Maya|metallic");
static const QVariant MAYA_USE_METALLIC_MAP = hifi::ByteArray("Maya|use_metallic_map");
static const QVariant MAYA_EMISSIVE = hifi::ByteArray("Maya|emissive");
static const QVariant MAYA_EMISSIVE_INTENSITY = hifi::ByteArray("Maya|emissive_intensity");
static const QVariant MAYA_USE_EMISSIVE_MAP = hifi::ByteArray("Maya|use_emissive_map");
static const QVariant MAYA_USE_AO_MAP = hifi::ByteArray("Maya|use_ao_map");
static const QVariant MAYA_UV_SCALE = hifi::ByteArray("Maya|uv_scale");
static const QVariant MAYA_UV_OFFSET = hifi::ByteArray("Maya|uv_offset");
static const int MAYA_UV_OFFSET_PROPERTY_LENGTH = 6;
static const int MAYA_UV_SCALE_PROPERTY_LENGTH = 6;
@ -1050,7 +1052,7 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
}
} else if (object.properties.last() == "BlendShapeChannel") {
QByteArray name = object.properties.at(1).toByteArray();
hifi::ByteArray name = object.properties.at(1).toByteArray();
name = name.left(name.indexOf('\0'));
if (!blendshapeIndices.contains(name)) {
@ -1087,8 +1089,8 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
#endif
}
} else if (child.name == "Connections") {
static const QVariant OO = QByteArray("OO");
static const QVariant OP = QByteArray("OP");
static const QVariant OO = hifi::ByteArray("OO");
static const QVariant OP = hifi::ByteArray("OP");
foreach (const FBXNode& connection, child.children) {
if (connection.name == "C" || connection.name == "Connect") {
if (connection.properties.at(0) == OO) {
@ -1107,7 +1109,7 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
}
} else if (connection.properties.at(0) == OP) {
int counter = 0;
QByteArray type = connection.properties.at(3).toByteArray().toLower();
hifi::ByteArray type = connection.properties.at(3).toByteArray().toLower();
if (type.contains("DiffuseFactor")) {
diffuseFactorTextures.insert(getID(connection.properties, 2), getID(connection.properties, 1));
} else if ((type.contains("diffuse") && !type.contains("tex_global_diffuse"))) {
@ -1404,9 +1406,9 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
// look for textures, material properties
// allocate the Part material library
// NOTE: extracted.partMaterialTextures is empty for FBX_DRACO_MESH_VERSION >= 2. In that case, the mesh part's materialID string is already defined.
int materialIndex = 0;
int textureIndex = 0;
bool generateTangents = false;
QList<QString> children = _connectionChildMap.values(modelID);
for (int i = children.size() - 1; i >= 0; i--) {
@ -1419,12 +1421,10 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
if (extracted.partMaterialTextures.at(j).first == materialIndex) {
HFMMeshPart& part = extracted.mesh.parts[j];
part.materialID = material.materialID;
generateTangents |= material.needTangentSpace();
}
}
materialIndex++;
} else if (_textureFilenames.contains(childID)) {
// NOTE (Sabrina 2019/01/11): getTextures now takes in the materialID as a second parameter, because FBX material nodes can sometimes have uv transform information (ex: "Maya|uv_scale")
// I'm leaving the second parameter blank right now as this code may never be used.
@ -1694,11 +1694,13 @@ std::unique_ptr<hfm::Serializer::Factory> FBXSerializer::getFactory() const {
return std::make_unique<hfm::Serializer::SimpleFactory<FBXSerializer>>();
}
HFMModel::Pointer FBXSerializer::read(const QByteArray& data, const QVariantHash& mapping, const QUrl& url) {
QBuffer buffer(const_cast<QByteArray*>(&data));
HFMModel::Pointer FBXSerializer::read(const hifi::ByteArray& data, const hifi::VariantHash& mapping, const hifi::URL& url) {
QBuffer buffer(const_cast<hifi::ByteArray*>(&data));
buffer.open(QIODevice::ReadOnly);
_rootNode = parseFBX(&buffer);
// FBXSerializer's mapping parameter supports the bool "deduplicateIndices," which is passed into FBXSerializer::extractMesh as "deduplicate"
return HFMModel::Pointer(extractHFMModel(mapping, url.toString()));
}

View file

@ -15,9 +15,6 @@
#include <QtGlobal>
#include <QMetaType>
#include <QSet>
#include <QUrl>
#include <QVarLengthArray>
#include <QVariant>
#include <QVector>
#include <glm/glm.hpp>
@ -25,6 +22,7 @@
#include <Extents.h>
#include <Transform.h>
#include <shared/HifiTypes.h>
#include "FBX.h"
#include <hfm/HFMSerializer.h>
@ -114,25 +112,25 @@ public:
HFMModel* _hfmModel;
/// Reads HFMModel from the supplied model and mapping data.
/// \exception QString if an error occurs in parsing
HFMModel::Pointer read(const QByteArray& data, const QVariantHash& mapping, const QUrl& url = QUrl()) override;
HFMModel::Pointer read(const hifi::ByteArray& data, const hifi::VariantHash& mapping, const hifi::URL& url = hifi::URL()) override;
FBXNode _rootNode;
static FBXNode parseFBX(QIODevice* device);
HFMModel* extractHFMModel(const QVariantHash& mapping, const QString& url);
HFMModel* extractHFMModel(const hifi::VariantHash& mapping, const QString& url);
static ExtractedMesh extractMesh(const FBXNode& object, unsigned int& meshIndex, bool deduplicate = true);
static ExtractedMesh extractMesh(const FBXNode& object, unsigned int& meshIndex, bool deduplicate);
QHash<QString, ExtractedMesh> meshes;
HFMTexture getTexture(const QString& textureID, const QString& materialID);
QHash<QString, QString> _textureNames;
// Hashes the original RelativeFilename of textures
QHash<QString, QByteArray> _textureFilepaths;
QHash<QString, hifi::ByteArray> _textureFilepaths;
// Hashes the place to look for textures, in case they are not inlined
QHash<QString, QByteArray> _textureFilenames;
QHash<QString, hifi::ByteArray> _textureFilenames;
// Hashes texture content by filepath, in case they are inlined
QHash<QByteArray, QByteArray> _textureContent;
QHash<hifi::ByteArray, hifi::ByteArray> _textureContent;
QHash<QString, TextureParam> _textureParams;

View file

@ -15,7 +15,6 @@
#include <memory>
#include <QBuffer>
#include <QDataStream>
#include <QIODevice>
#include <QStringList>
#include <QTextStream>
@ -29,7 +28,7 @@
HFMTexture FBXSerializer::getTexture(const QString& textureID, const QString& materialID) {
HFMTexture texture;
const QByteArray& filepath = _textureFilepaths.value(textureID);
const hifi::ByteArray& filepath = _textureFilepaths.value(textureID);
texture.content = _textureContent.value(filepath);
if (texture.content.isEmpty()) { // the content is not inlined

Some files were not shown because too many files have changed in this diff Show more