Integrate HFM Asset Engine (aka model prep step) into Oven

Add 'deduplicateIndices' parameter to FBXSerializer and make deduplicate a required parameter for extractMesh

Add draco mesh and FBX draco node version

Support generating/saving draco meshes from FBX Model nodes
This commit is contained in:
sabrina-shanman 2019-02-21 15:36:31 -08:00
parent 82382fe9a1
commit 1576125c42
22 changed files with 744 additions and 532 deletions

View file

@ -1,7 +1,7 @@
set(TARGET_NAME baking)
setup_hifi_library(Concurrent)
link_hifi_libraries(shared shaders graphics networking material-networking graphics-scripting ktx image fbx)
link_hifi_libraries(shared shaders graphics networking material-networking graphics-scripting ktx image fbx model-baker task)
include_hifi_library_headers(gpu)
include_hifi_library_headers(hfm)

View file

@ -37,24 +37,9 @@
#include "FBXToJSON.h"
#endif
void FBXBaker::bake() {
qDebug() << "FBXBaker" << _modelURL << "bake starting";
// Setup the output folders for the results of this bake
initializeOutputDirs();
if (shouldStop()) {
return;
}
connect(this, &FBXBaker::sourceCopyReadyToLoad, this, &FBXBaker::bakeSourceCopy);
// make a local copy of the FBX file
loadSourceFBX();
}
void FBXBaker::bakeSourceCopy() {
// load the scene from the FBX file
void FBXBaker::bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) {
_hfmModel = hfmModel;
// Load the root node from the FBX file
importScene();
if (shouldStop()) {
@ -68,94 +53,7 @@ void FBXBaker::bakeSourceCopy() {
return;
}
rewriteAndBakeSceneModels();
if (shouldStop()) {
return;
}
// check if we're already done with textures (in case we had none to re-write)
checkIfTexturesFinished();
}
void FBXBaker::loadSourceFBX() {
// check if the FBX is local or first needs to be downloaded
if (_modelURL.isLocalFile()) {
// load up the local file
QFile localFBX { _modelURL.toLocalFile() };
qDebug() << "Local file url: " << _modelURL << _modelURL.toString() << _modelURL.toLocalFile() << ", copying to: " << _originalModelFilePath;
if (!localFBX.exists()) {
//QMessageBox::warning(this, "Could not find " + _fbxURL.toString(), "");
handleError("Could not find " + _modelURL.toString());
return;
}
// make a copy in the output folder
if (!_originalOutputDir.isEmpty()) {
qDebug() << "Copying to: " << _originalOutputDir << "/" << _modelURL.fileName();
localFBX.copy(_originalOutputDir + "/" + _modelURL.fileName());
}
localFBX.copy(_originalModelFilePath);
// emit our signal to start the import of the FBX source copy
emit sourceCopyReadyToLoad();
} else {
// remote file, kick off a download
auto& networkAccessManager = NetworkAccessManager::getInstance();
QNetworkRequest networkRequest;
// setup the request to follow re-directs and always hit the network
networkRequest.setAttribute(QNetworkRequest::FollowRedirectsAttribute, true);
networkRequest.setAttribute(QNetworkRequest::CacheLoadControlAttribute, QNetworkRequest::AlwaysNetwork);
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
networkRequest.setUrl(_modelURL);
qCDebug(model_baking) << "Downloading" << _modelURL;
auto networkReply = networkAccessManager.get(networkRequest);
connect(networkReply, &QNetworkReply::finished, this, &FBXBaker::handleFBXNetworkReply);
}
}
void FBXBaker::handleFBXNetworkReply() {
auto requestReply = qobject_cast<QNetworkReply*>(sender());
if (requestReply->error() == QNetworkReply::NoError) {
qCDebug(model_baking) << "Downloaded" << _modelURL;
// grab the contents of the reply and make a copy in the output folder
QFile copyOfOriginal(_originalModelFilePath);
qDebug(model_baking) << "Writing copy of original FBX to" << _originalModelFilePath << copyOfOriginal.fileName();
if (!copyOfOriginal.open(QIODevice::WriteOnly)) {
// add an error to the error list for this FBX stating that a duplicate of the original FBX could not be made
handleError("Could not create copy of " + _modelURL.toString() + " (Failed to open " + _originalModelFilePath + ")");
return;
}
if (copyOfOriginal.write(requestReply->readAll()) == -1) {
handleError("Could not create copy of " + _modelURL.toString() + " (Failed to write)");
return;
}
// close that file now that we are done writing to it
copyOfOriginal.close();
if (!_originalOutputDir.isEmpty()) {
copyOfOriginal.copy(_originalOutputDir + "/" + _modelURL.fileName());
}
// emit our signal to start the import of the FBX source copy
emit sourceCopyReadyToLoad();
} else {
// add an error to our list stating that the FBX could not be downloaded
handleError("Failed to download " + _modelURL.toString());
}
rewriteAndBakeSceneModels(hfmModel->meshes, dracoMeshes, dracoMaterialLists);
}
void FBXBaker::importScene() {
@ -167,10 +65,8 @@ void FBXBaker::importScene() {
return;
}
FBXSerializer fbxSerializer;
qCDebug(model_baking) << "Parsing" << _modelURL;
_rootNode = fbxSerializer._rootNode = fbxSerializer.parseFBX(&fbxFile);
_rootNode = FBXSerializer().parseFBX(&fbxFile);
#ifdef HIFI_DUMP_FBX
{
@ -185,85 +81,113 @@ void FBXBaker::importScene() {
}
}
#endif
_hfmModel = fbxSerializer.extractHFMModel({}, _modelURL.toString());
_textureContentMap = fbxSerializer._textureContent;
}
void FBXBaker::rewriteAndBakeSceneModels() {
unsigned int meshIndex = 0;
bool hasDeformers { false };
for (FBXNode& rootChild : _rootNode.children) {
if (rootChild.name == "Objects") {
for (FBXNode& objectChild : rootChild.children) {
if (objectChild.name == "Deformer") {
hasDeformers = true;
break;
}
void FBXBaker::replaceMeshNodeWithDraco(FBXNode& meshNode, const QByteArray& dracoMeshBytes, const std::vector<hifi::ByteArray>& dracoMaterialList) {
// Compress mesh information and store in dracoMeshNode
FBXNode dracoMeshNode;
bool success = buildDracoMeshNode(dracoMeshNode, dracoMeshBytes, dracoMaterialList);
if (!success) {
return;
} else {
meshNode.children.push_back(dracoMeshNode);
static const std::vector<QString> nodeNamesToDelete {
// Node data that is packed into the draco mesh
"Vertices",
"PolygonVertexIndex",
"LayerElementNormal",
"LayerElementColor",
"LayerElementUV",
"LayerElementMaterial",
"LayerElementTexture",
// Node data that we don't support
"Edges",
"LayerElementTangent",
"LayerElementBinormal",
"LayerElementSmoothing"
};
auto& children = meshNode.children;
auto it = children.begin();
while (it != children.end()) {
auto begin = nodeNamesToDelete.begin();
auto end = nodeNamesToDelete.end();
if (find(begin, end, it->name) != end) {
it = children.erase(it);
} else {
++it;
}
}
if (hasDeformers) {
break;
}
}
}
void FBXBaker::rewriteAndBakeSceneModels(const QVector<hfm::Mesh>& meshes, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) {
std::vector<int> meshIndexToRuntimeOrder;
auto meshCount = (int)meshes.size();
meshIndexToRuntimeOrder.resize(meshCount);
for (int i = 0; i < meshCount; i++) {
meshIndexToRuntimeOrder[meshes[i].meshIndex] = i;
}
// The meshIndex represents the order in which the meshes are loaded from the FBX file
// We replicate this order by iterating over the meshes in the same way that FBXSerializer does
int meshIndex = 0;
for (FBXNode& rootChild : _rootNode.children) {
if (rootChild.name == "Objects") {
for (FBXNode& objectChild : rootChild.children) {
if (objectChild.name == "Geometry") {
for (FBXNode& object : rootChild.children) {
if (object.name == "Geometry") {
if (object.properties.at(2) == "Mesh") {
int meshNum = meshIndexToRuntimeOrder[meshIndex];
replaceMeshNodeWithDraco(object, dracoMeshes[meshNum], dracoMaterialLists[meshNum]);
meshIndex++;
}
} else if (object.name == "Model") {
for (FBXNode& modelChild : object.children) {
bool properties = false;
hifi::ByteArray propertyName;
int index;
if (modelChild.name == "Properties60") {
properties = true;
propertyName = "Property";
index = 3;
// TODO Pull this out of _hfmModel instead so we don't have to reprocess it
auto extractedMesh = FBXSerializer::extractMesh(objectChild, meshIndex, false);
// Callback to get MaterialID
GetMaterialIDCallback materialIDcallback = [&extractedMesh](int partIndex) {
return extractedMesh.partMaterialTextures[partIndex].first;
};
// Compress mesh information and store in dracoMeshNode
FBXNode dracoMeshNode;
bool success = compressMesh(extractedMesh.mesh, hasDeformers, dracoMeshNode, materialIDcallback);
// if bake fails - return, if there were errors and continue, if there were warnings.
if (!success) {
if (hasErrors()) {
return;
} else if (hasWarnings()) {
continue;
} else if (modelChild.name == "Properties70") {
properties = true;
propertyName = "P";
index = 4;
}
} else {
objectChild.children.push_back(dracoMeshNode);
static const std::vector<QString> nodeNamesToDelete {
// Node data that is packed into the draco mesh
"Vertices",
"PolygonVertexIndex",
"LayerElementNormal",
"LayerElementColor",
"LayerElementUV",
"LayerElementMaterial",
"LayerElementTexture",
// Node data that we don't support
"Edges",
"LayerElementTangent",
"LayerElementBinormal",
"LayerElementSmoothing"
};
auto& children = objectChild.children;
auto it = children.begin();
while (it != children.end()) {
auto begin = nodeNamesToDelete.begin();
auto end = nodeNamesToDelete.end();
if (find(begin, end, it->name) != end) {
it = children.erase(it);
} else {
++it;
if (properties) {
// This is a properties node
// Remove the geometric transform because that has been applied directly to the vertices in FBXSerializer
static const QVariant GEOMETRIC_TRANSLATION = hifi::ByteArray("GeometricTranslation");
static const QVariant GEOMETRIC_ROTATION = hifi::ByteArray("GeometricRotation");
static const QVariant GEOMETRIC_SCALING = hifi::ByteArray("GeometricScaling");
for (int i = 0; i < modelChild.children.size(); i++) {
const auto& prop = modelChild.children[i];
const auto& propertyName = prop.properties.at(0);
if (propertyName == GEOMETRIC_TRANSLATION ||
propertyName == GEOMETRIC_ROTATION ||
propertyName == GEOMETRIC_SCALING) {
modelChild.children.removeAt(i);
--i;
}
}
} else if (modelChild.name == "Vertices") {
// This model is also a mesh
int meshNum = meshIndexToRuntimeOrder[meshIndex];
replaceMeshNodeWithDraco(object, dracoMeshes[meshNum], dracoMaterialLists[meshNum]);
meshIndex++;
}
}
} // Geometry Object
}
} // foreach root child
if (hasErrors()) {
return;
}
}
}
}
}

View file

@ -33,25 +33,16 @@ class FBXBaker : public ModelBaker {
public:
using ModelBaker::ModelBaker;
public slots:
virtual void bake() override;
signals:
void sourceCopyReadyToLoad();
private slots:
void bakeSourceCopy();
void handleFBXNetworkReply();
protected:
virtual void bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) override;
private:
void loadSourceFBX();
void importScene();
void embedTextureMetaData();
void rewriteAndBakeSceneModels();
void rewriteAndBakeSceneModels(const QVector<hfm::Mesh>& meshes, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists);
void rewriteAndBakeSceneTextures();
void replaceMeshNodeWithDraco(FBXNode& meshNode, const QByteArray& dracoMeshBytes, const std::vector<hifi::ByteArray>& dracoMaterialList);
HFMModel* _hfmModel;
hfm::Model::Pointer _hfmModel;
QHash<QString, int> _textureNameMatchCount;
QHash<QUrl, QString> _remappedTexturePaths;

View file

@ -12,6 +12,13 @@
#include "ModelBaker.h"
#include <PathUtils.h>
#include <NetworkAccessManager.h>
#include <DependencyManager.h>
#include <hfm/ModelFormatRegistry.h>
#include <model-baker/Baker.h>
#include <model-baker/PrepareJointsTask.h>
#include <FBXWriter.h>
@ -31,6 +38,8 @@
#pragma warning( pop )
#endif
#include "baking/BakerLibrary.h"
ModelBaker::ModelBaker(const QUrl& inputModelURL, TextureBakerThreadGetter inputTextureThreadGetter,
const QString& bakedOutputDirectory, const QString& originalOutputDirectory, bool hasBeenBaked) :
_modelURL(inputModelURL),
@ -65,6 +74,22 @@ ModelBaker::~ModelBaker() {
}
}
void ModelBaker::bake() {
qDebug() << "ModelBaker" << _modelURL << "bake starting";
// Setup the output folders for the results of this bake
initializeOutputDirs();
if (shouldStop()) {
return;
}
connect(this, &ModelBaker::modelLoaded, this, &ModelBaker::bakeSourceCopy);
// make a local copy of the model
saveSourceModel();
}
void ModelBaker::initializeOutputDirs() {
// Attempt to make the output folders
// Warn if there is an output directory using the same name
@ -88,6 +113,166 @@ void ModelBaker::initializeOutputDirs() {
}
}
void ModelBaker::saveSourceModel() {
// check if the FBX is local or first needs to be downloaded
if (_modelURL.isLocalFile()) {
// load up the local file
QFile localModelURL { _modelURL.toLocalFile() };
qDebug() << "Local file url: " << _modelURL << _modelURL.toString() << _modelURL.toLocalFile() << ", copying to: " << _originalModelFilePath;
if (!localModelURL.exists()) {
//QMessageBox::warning(this, "Could not find " + _fbxURL.toString(), "");
handleError("Could not find " + _modelURL.toString());
return;
}
// make a copy in the output folder
if (!_originalOutputDir.isEmpty()) {
qDebug() << "Copying to: " << _originalOutputDir << "/" << _modelURL.fileName();
localModelURL.copy(_originalOutputDir + "/" + _modelURL.fileName());
}
localModelURL.copy(_originalModelFilePath);
// emit our signal to start the import of the FBX source copy
emit modelLoaded();
} else {
// remote file, kick off a download
auto& networkAccessManager = NetworkAccessManager::getInstance();
QNetworkRequest networkRequest;
// setup the request to follow re-directs and always hit the network
networkRequest.setAttribute(QNetworkRequest::FollowRedirectsAttribute, true);
networkRequest.setAttribute(QNetworkRequest::CacheLoadControlAttribute, QNetworkRequest::AlwaysNetwork);
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
networkRequest.setUrl(_modelURL);
qCDebug(model_baking) << "Downloading" << _modelURL;
auto networkReply = networkAccessManager.get(networkRequest);
connect(networkReply, &QNetworkReply::finished, this, &ModelBaker::handleModelNetworkReply);
}
}
void ModelBaker::handleModelNetworkReply() {
auto requestReply = qobject_cast<QNetworkReply*>(sender());
if (requestReply->error() == QNetworkReply::NoError) {
qCDebug(model_baking) << "Downloaded" << _modelURL;
// grab the contents of the reply and make a copy in the output folder
QFile copyOfOriginal(_originalModelFilePath);
qDebug(model_baking) << "Writing copy of original model file to" << _originalModelFilePath << copyOfOriginal.fileName();
if (!copyOfOriginal.open(QIODevice::WriteOnly)) {
// add an error to the error list for this model stating that a duplicate of the original model could not be made
handleError("Could not create copy of " + _modelURL.toString() + " (Failed to open " + _originalModelFilePath + ")");
return;
}
if (copyOfOriginal.write(requestReply->readAll()) == -1) {
handleError("Could not create copy of " + _modelURL.toString() + " (Failed to write)");
return;
}
// close that file now that we are done writing to it
copyOfOriginal.close();
if (!_originalOutputDir.isEmpty()) {
copyOfOriginal.copy(_originalOutputDir + "/" + _modelURL.fileName());
}
// emit our signal to start the import of the model source copy
emit modelLoaded();
} else {
// add an error to our list stating that the model could not be downloaded
handleError("Failed to download " + _modelURL.toString());
}
}
// TODO: Remove after testing
#include <model-baker/BuildDracoMeshTask.h>
void ModelBaker::bakeSourceCopy() {
QFile modelFile(_originalModelFilePath);
if (!modelFile.open(QIODevice::ReadOnly)) {
handleError("Error opening " + _originalModelFilePath + " for reading");
return;
}
hifi::ByteArray modelData = modelFile.readAll();
hfm::Model::Pointer bakedModel;
std::vector<hifi::ByteArray> dracoMeshes;
std::vector<std::vector<hifi::ByteArray>> dracoMaterialLists; // Material order for per-mesh material lookup used by dracoMeshes
{
auto serializer = DependencyManager::get<ModelFormatRegistry>()->getSerializerForMediaType(modelData, _modelURL, "");
if (!serializer) {
handleError("Could not recognize file type of model file " + _originalModelFilePath);
return;
}
hifi::VariantHash mapping;
mapping["combineParts"] = true; // set true so that OBJSerializer reads material info from material library
hfm::Model::Pointer loadedModel = serializer->read(modelData, mapping, _modelURL);
baker::Baker baker(loadedModel, mapping);
auto config = baker.getConfiguration();
// Enable compressed draco mesh generation
config->getJobConfig("BuildDracoMesh")->setEnabled(true);
// Do not permit potentially lossy modification of joint data meant for runtime
((PrepareJointsConfig*)config->getJobConfig("PrepareJoints"))->passthrough = true;
// TODO: Remove after testing
{
auto* dracoConfig = ((BuildDracoMeshConfig*)config->getJobConfig("BuildDracoMesh"));
dracoConfig->encodeSpeed = 10;
dracoConfig->decodeSpeed = -1;
}
// Begin hfm baking
baker.run();
bakedModel = baker.getHFMModel();
dracoMeshes = baker.getDracoMeshes();
dracoMaterialLists = baker.getDracoMaterialLists();
}
// Populate _textureContentMap with path to content mappings, for quick lookup by URL
for (auto materialIt = bakedModel->materials.cbegin(); materialIt != bakedModel->materials.cend(); materialIt++) {
static const auto addTexture = [](QHash<hifi::ByteArray, hifi::ByteArray>& textureContentMap, const hfm::Texture& texture) {
if (!textureContentMap.contains(texture.filename)) {
// Content may be empty, unless the data is inlined
textureContentMap[texture.filename] = texture.content;
}
};
const hfm::Material& material = *materialIt;
addTexture(_textureContentMap, material.normalTexture);
addTexture(_textureContentMap, material.albedoTexture);
addTexture(_textureContentMap, material.opacityTexture);
addTexture(_textureContentMap, material.glossTexture);
addTexture(_textureContentMap, material.roughnessTexture);
addTexture(_textureContentMap, material.specularTexture);
addTexture(_textureContentMap, material.metallicTexture);
addTexture(_textureContentMap, material.emissiveTexture);
addTexture(_textureContentMap, material.occlusionTexture);
addTexture(_textureContentMap, material.scatteringTexture);
addTexture(_textureContentMap, material.lightmapTexture);
}
// Do format-specific baking
bakeProcessedSource(bakedModel, dracoMeshes, dracoMaterialLists);
if (shouldStop()) {
return;
}
// check if we're already done with textures (in case we had none to re-write)
checkIfTexturesFinished();
}
void ModelBaker::abort() {
Baker::abort();
@ -98,176 +283,36 @@ void ModelBaker::abort() {
}
}
bool ModelBaker::compressMesh(HFMMesh& mesh, bool hasDeformers, FBXNode& dracoMeshNode, GetMaterialIDCallback materialIDCallback) {
if (mesh.wasCompressed) {
handleError("Cannot re-bake a file that contains compressed mesh");
bool ModelBaker::buildDracoMeshNode(FBXNode& dracoMeshNode, const QByteArray& dracoMeshBytes, const std::vector<hifi::ByteArray>& dracoMaterialList) {
if (dracoMeshBytes.isEmpty()) {
handleError("Failed to finalize the baking of a draco Geometry node");
return false;
}
Q_ASSERT(mesh.normals.size() == 0 || mesh.normals.size() == mesh.vertices.size());
Q_ASSERT(mesh.colors.size() == 0 || mesh.colors.size() == mesh.vertices.size());
Q_ASSERT(mesh.texCoords.size() == 0 || mesh.texCoords.size() == mesh.vertices.size());
int64_t numTriangles{ 0 };
for (auto& part : mesh.parts) {
if ((part.quadTrianglesIndices.size() % 3) != 0 || (part.triangleIndices.size() % 3) != 0) {
handleWarning("Found a mesh part with invalid index data, skipping");
continue;
}
numTriangles += part.quadTrianglesIndices.size() / 3;
numTriangles += part.triangleIndices.size() / 3;
}
if (numTriangles == 0) {
return false;
}
draco::TriangleSoupMeshBuilder meshBuilder;
meshBuilder.Start(numTriangles);
bool hasNormals{ mesh.normals.size() > 0 };
bool hasColors{ mesh.colors.size() > 0 };
bool hasTexCoords{ mesh.texCoords.size() > 0 };
bool hasTexCoords1{ mesh.texCoords1.size() > 0 };
bool hasPerFaceMaterials = (materialIDCallback) ? (mesh.parts.size() > 1 || materialIDCallback(0) != 0 ) : true;
bool needsOriginalIndices{ hasDeformers };
int normalsAttributeID { -1 };
int colorsAttributeID { -1 };
int texCoordsAttributeID { -1 };
int texCoords1AttributeID { -1 };
int faceMaterialAttributeID { -1 };
int originalIndexAttributeID { -1 };
const int positionAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::POSITION,
3, draco::DT_FLOAT32);
if (needsOriginalIndices) {
originalIndexAttributeID = meshBuilder.AddAttribute(
(draco::GeometryAttribute::Type)DRACO_ATTRIBUTE_ORIGINAL_INDEX,
1, draco::DT_INT32);
}
if (hasNormals) {
normalsAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::NORMAL,
3, draco::DT_FLOAT32);
}
if (hasColors) {
colorsAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::COLOR,
3, draco::DT_FLOAT32);
}
if (hasTexCoords) {
texCoordsAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::TEX_COORD,
2, draco::DT_FLOAT32);
}
if (hasTexCoords1) {
texCoords1AttributeID = meshBuilder.AddAttribute(
(draco::GeometryAttribute::Type)DRACO_ATTRIBUTE_TEX_COORD_1,
2, draco::DT_FLOAT32);
}
if (hasPerFaceMaterials) {
faceMaterialAttributeID = meshBuilder.AddAttribute(
(draco::GeometryAttribute::Type)DRACO_ATTRIBUTE_MATERIAL_ID,
1, draco::DT_UINT16);
}
auto partIndex = 0;
draco::FaceIndex face;
uint16_t materialID;
for (auto& part : mesh.parts) {
materialID = (materialIDCallback) ? materialIDCallback(partIndex) : partIndex;
auto addFace = [&](QVector<int>& indices, int index, draco::FaceIndex face) {
int32_t idx0 = indices[index];
int32_t idx1 = indices[index + 1];
int32_t idx2 = indices[index + 2];
if (hasPerFaceMaterials) {
meshBuilder.SetPerFaceAttributeValueForFace(faceMaterialAttributeID, face, &materialID);
}
meshBuilder.SetAttributeValuesForFace(positionAttributeID, face,
&mesh.vertices[idx0], &mesh.vertices[idx1],
&mesh.vertices[idx2]);
if (needsOriginalIndices) {
meshBuilder.SetAttributeValuesForFace(originalIndexAttributeID, face,
&mesh.originalIndices[idx0],
&mesh.originalIndices[idx1],
&mesh.originalIndices[idx2]);
}
if (hasNormals) {
meshBuilder.SetAttributeValuesForFace(normalsAttributeID, face,
&mesh.normals[idx0], &mesh.normals[idx1],
&mesh.normals[idx2]);
}
if (hasColors) {
meshBuilder.SetAttributeValuesForFace(colorsAttributeID, face,
&mesh.colors[idx0], &mesh.colors[idx1],
&mesh.colors[idx2]);
}
if (hasTexCoords) {
meshBuilder.SetAttributeValuesForFace(texCoordsAttributeID, face,
&mesh.texCoords[idx0], &mesh.texCoords[idx1],
&mesh.texCoords[idx2]);
}
if (hasTexCoords1) {
meshBuilder.SetAttributeValuesForFace(texCoords1AttributeID, face,
&mesh.texCoords1[idx0], &mesh.texCoords1[idx1],
&mesh.texCoords1[idx2]);
}
};
for (int i = 0; (i + 2) < part.quadTrianglesIndices.size(); i += 3) {
addFace(part.quadTrianglesIndices, i, face++);
}
for (int i = 0; (i + 2) < part.triangleIndices.size(); i += 3) {
addFace(part.triangleIndices, i, face++);
}
partIndex++;
}
auto dracoMesh = meshBuilder.Finalize();
if (!dracoMesh) {
handleWarning("Failed to finalize the baking of a draco Geometry node");
return false;
}
// we need to modify unique attribute IDs for custom attributes
// so the attributes are easily retrievable on the other side
if (hasPerFaceMaterials) {
dracoMesh->attribute(faceMaterialAttributeID)->set_unique_id(DRACO_ATTRIBUTE_MATERIAL_ID);
}
if (hasTexCoords1) {
dracoMesh->attribute(texCoords1AttributeID)->set_unique_id(DRACO_ATTRIBUTE_TEX_COORD_1);
}
if (needsOriginalIndices) {
dracoMesh->attribute(originalIndexAttributeID)->set_unique_id(DRACO_ATTRIBUTE_ORIGINAL_INDEX);
}
draco::Encoder encoder;
encoder.SetAttributeQuantization(draco::GeometryAttribute::POSITION, 14);
encoder.SetAttributeQuantization(draco::GeometryAttribute::TEX_COORD, 12);
encoder.SetAttributeQuantization(draco::GeometryAttribute::NORMAL, 10);
encoder.SetSpeedOptions(0, 5);
draco::EncoderBuffer buffer;
encoder.EncodeMeshToBuffer(*dracoMesh, &buffer);
FBXNode dracoNode;
dracoNode.name = "DracoMesh";
auto value = QVariant::fromValue(QByteArray(buffer.data(), (int)buffer.size()));
dracoNode.properties.append(value);
dracoNode.properties.append(QVariant::fromValue(dracoMeshBytes));
// Additional draco mesh node information
{
FBXNode fbxVersionNode;
fbxVersionNode.name = "FBXDracoMeshVersion";
fbxVersionNode.properties.append(FBX_DRACO_MESH_VERSION);
dracoNode.children.append(fbxVersionNode);
FBXNode dracoVersionNode;
dracoVersionNode.name = "DracoMeshVersion";
dracoVersionNode.properties.append(DRACO_MESH_VERSION);
dracoNode.children.append(dracoVersionNode);
FBXNode materialListNode;
materialListNode.name = "MaterialList";
for (const hifi::ByteArray& materialID : dracoMaterialList) {
materialListNode.properties.append(materialID);
}
dracoNode.children.append(materialListNode);
}
dracoMeshNode = dracoNode;
// Mesh compression successful return true
return true;
}

View file

@ -47,17 +47,23 @@ public:
void initializeOutputDirs();
bool compressMesh(HFMMesh& mesh, bool hasDeformers, FBXNode& dracoMeshNode, GetMaterialIDCallback materialIDCallback = nullptr);
bool buildDracoMeshNode(FBXNode& dracoMeshNode, const QByteArray& dracoMeshBytes, const std::vector<hifi::ByteArray>& dracoMaterialList);
QString compressTexture(QString textureFileName, image::TextureUsage::Type = image::TextureUsage::Type::DEFAULT_TEXTURE);
virtual void setWasAborted(bool wasAborted) override;
QUrl getModelURL() const { return _modelURL; }
QString getBakedModelFilePath() const { return _bakedModelFilePath; }
signals:
void modelLoaded();
public slots:
virtual void bake() override;
virtual void abort() override;
protected:
void saveSourceModel();
virtual void bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) = 0;
void checkIfTexturesFinished();
void texturesFinished();
void embedTextureMetaData();
@ -72,6 +78,10 @@ protected:
QDir _modelTempDir;
QString _originalModelFilePath;
protected slots:
void handleModelNetworkReply();
virtual void bakeSourceCopy();
private slots:
void handleBakedTexture();
void handleAbortedTexture();

View file

@ -35,150 +35,51 @@ const QByteArray CONNECTIONS_NODE_PROPERTY = "OO";
const QByteArray CONNECTIONS_NODE_PROPERTY_1 = "OP";
const QByteArray MESH = "Mesh";
void OBJBaker::bake() {
qDebug() << "OBJBaker" << _modelURL << "bake starting";
// Setup the output folders for the results of this bake
initializeOutputDirs();
// trigger bakeOBJ once OBJ is loaded
connect(this, &OBJBaker::OBJLoaded, this, &OBJBaker::bakeOBJ);
// make a local copy of the OBJ
loadOBJ();
}
void OBJBaker::loadOBJ() {
// check if the OBJ is local or it needs to be downloaded
if (_modelURL.isLocalFile()) {
// loading the local OBJ
QFile localOBJ { _modelURL.toLocalFile() };
qDebug() << "Local file url: " << _modelURL << _modelURL.toString() << _modelURL.toLocalFile() << ", copying to: " << _originalModelFilePath;
if (!localOBJ.exists()) {
handleError("Could not find " + _modelURL.toString());
return;
}
// make a copy in the output folder
if (!_originalOutputDir.isEmpty()) {
qDebug() << "Copying to: " << _originalOutputDir << "/" << _modelURL.fileName();
localOBJ.copy(_originalOutputDir + "/" + _modelURL.fileName());
}
localOBJ.copy(_originalModelFilePath);
// local OBJ is loaded emit signal to trigger its baking
emit OBJLoaded();
} else {
// OBJ is remote, start download
auto& networkAccessManager = NetworkAccessManager::getInstance();
QNetworkRequest networkRequest;
// setup the request to follow re-directs and always hit the network
networkRequest.setAttribute(QNetworkRequest::FollowRedirectsAttribute, true);
networkRequest.setAttribute(QNetworkRequest::CacheLoadControlAttribute, QNetworkRequest::AlwaysNetwork);
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
networkRequest.setUrl(_modelURL);
qCDebug(model_baking) << "Downloading" << _modelURL;
auto networkReply = networkAccessManager.get(networkRequest);
connect(networkReply, &QNetworkReply::finished, this, &OBJBaker::handleOBJNetworkReply);
}
}
void OBJBaker::handleOBJNetworkReply() {
auto requestReply = qobject_cast<QNetworkReply*>(sender());
if (requestReply->error() == QNetworkReply::NoError) {
qCDebug(model_baking) << "Downloaded" << _modelURL;
// grab the contents of the reply and make a copy in the output folder
QFile copyOfOriginal(_originalModelFilePath);
qDebug(model_baking) << "Writing copy of original obj to" << _originalModelFilePath << copyOfOriginal.fileName();
if (!copyOfOriginal.open(QIODevice::WriteOnly)) {
// add an error to the error list for this obj stating that a duplicate of the original obj could not be made
handleError("Could not create copy of " + _modelURL.toString() + " (Failed to open " + _originalModelFilePath + ")");
return;
}
if (copyOfOriginal.write(requestReply->readAll()) == -1) {
handleError("Could not create copy of " + _modelURL.toString() + " (Failed to write)");
return;
}
// close that file now that we are done writing to it
copyOfOriginal.close();
if (!_originalOutputDir.isEmpty()) {
copyOfOriginal.copy(_originalOutputDir + "/" + _modelURL.fileName());
}
// remote OBJ is loaded emit signal to trigger its baking
emit OBJLoaded();
} else {
// add an error to our list stating that the OBJ could not be downloaded
handleError("Failed to download " + _modelURL.toString());
}
}
void OBJBaker::bakeOBJ() {
// Read the OBJ file
QFile objFile(_originalModelFilePath);
if (!objFile.open(QIODevice::ReadOnly)) {
handleError("Error opening " + _originalModelFilePath + " for reading");
return;
}
QByteArray objData = objFile.readAll();
OBJSerializer serializer;
QVariantHash mapping;
mapping["combineParts"] = true; // set true so that OBJSerializer reads material info from material library
auto geometry = serializer.read(objData, mapping, _modelURL);
void OBJBaker::bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) {
// Write OBJ Data as FBX tree nodes
createFBXNodeTree(_rootNode, *geometry);
checkIfTexturesFinished();
createFBXNodeTree(_rootNode, hfmModel, dracoMeshes[0]);
}
void OBJBaker::createFBXNodeTree(FBXNode& rootNode, HFMModel& hfmModel) {
void OBJBaker::createFBXNodeTree(FBXNode& rootNode, const hfm::Model::Pointer& hfmModel, const hifi::ByteArray& dracoMesh) {
// Make all generated nodes children of rootNode
rootNode.children = { FBXNode(), FBXNode(), FBXNode() };
FBXNode& globalSettingsNode = rootNode.children[0];
FBXNode& objectNode = rootNode.children[1];
FBXNode& connectionsNode = rootNode.children[2];
// Generating FBX Header Node
FBXNode headerNode;
headerNode.name = FBX_HEADER_EXTENSION;
// Generating global settings node
// Required for Unit Scale Factor
FBXNode globalSettingsNode;
globalSettingsNode.name = GLOBAL_SETTINGS_NODE_NAME;
// Setting the tree hierarchy: GlobalSettings -> Properties70 -> P -> Properties
FBXNode properties70Node;
properties70Node.name = PROPERTIES70_NODE_NAME;
FBXNode pNode;
{
pNode.name = P_NODE_NAME;
pNode.properties.append({
"UnitScaleFactor", "double", "Number", "",
UNIT_SCALE_FACTOR
});
globalSettingsNode.children.push_back(FBXNode());
FBXNode& properties70Node = globalSettingsNode.children.back();
properties70Node.name = PROPERTIES70_NODE_NAME;
FBXNode pNode;
{
pNode.name = P_NODE_NAME;
pNode.properties.append({
"UnitScaleFactor", "double", "Number", "",
UNIT_SCALE_FACTOR
});
}
properties70Node.children = { pNode };
}
properties70Node.children = { pNode };
globalSettingsNode.children = { properties70Node };
// Generating Object node
FBXNode objectNode;
objectNode.name = OBJECTS_NODE_NAME;
objectNode.children = { FBXNode(), FBXNode() };
FBXNode& geometryNode = objectNode.children[0];
FBXNode& modelNode = objectNode.children[1];
// Generating Object node's child - Geometry node
FBXNode geometryNode;
// Generating Object node's child - Geometry node
geometryNode.name = GEOMETRY_NODE_NAME;
NodeID geometryID;
{
@ -189,15 +90,8 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, HFMModel& hfmModel) {
MESH
};
}
// Compress the mesh information and store in dracoNode
bool hasDeformers = false; // No concept of deformers for an OBJ
FBXNode dracoNode;
compressMesh(hfmModel.meshes[0], hasDeformers, dracoNode);
geometryNode.children.append(dracoNode);
// Generating Object node's child - Model node
FBXNode modelNode;
modelNode.name = MODEL_NODE_NAME;
NodeID modelID;
{
@ -205,16 +99,14 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, HFMModel& hfmModel) {
modelNode.properties = { modelID, MODEL_NODE_NAME, MESH };
}
objectNode.children = { geometryNode, modelNode };
// Generating Objects node's child - Material node
auto& meshParts = hfmModel.meshes[0].parts;
auto& meshParts = hfmModel->meshes[0].parts;
for (auto& meshPart : meshParts) {
FBXNode materialNode;
materialNode.name = MATERIAL_NODE_NAME;
if (hfmModel.materials.size() == 1) {
if (hfmModel->materials.size() == 1) {
// case when no material information is provided, OBJSerializer considers it as a single default material
for (auto& materialID : hfmModel.materials.keys()) {
for (auto& materialID : hfmModel->materials.keys()) {
setMaterialNodeProperties(materialNode, materialID, hfmModel);
}
} else {
@ -224,12 +116,26 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, HFMModel& hfmModel) {
objectNode.children.append(materialNode);
}
// Store the draco node containing the compressed mesh information, along with the per-meshPart material IDs the draco node references
// Because we redefine the material IDs when initializing the material nodes above, we pass that in for the material list
// The nth mesh part gets the nth material
{
std::vector<hifi::ByteArray> newMaterialList;
newMaterialList.reserve(_materialIDs.size());
for (auto materialID : _materialIDs) {
newMaterialList.push_back(hifi::ByteArray(std::to_string((int)materialID).c_str()));
}
FBXNode dracoNode;
buildDracoMeshNode(dracoNode, dracoMesh, newMaterialList);
geometryNode.children.append(dracoNode);
}
// Generating Texture Node
// iterate through mesh parts and process the associated textures
auto size = meshParts.size();
for (int i = 0; i < size; i++) {
QString material = meshParts[i].materialID;
HFMMaterial currentMaterial = hfmModel.materials[material];
HFMMaterial currentMaterial = hfmModel->materials[material];
if (!currentMaterial.albedoTexture.filename.isEmpty() || !currentMaterial.specularTexture.filename.isEmpty()) {
auto textureID = nextNodeID();
_mapTextureMaterial.emplace_back(textureID, i);
@ -274,14 +180,15 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, HFMModel& hfmModel) {
}
// Generating Connections node
FBXNode connectionsNode;
connectionsNode.name = CONNECTIONS_NODE_NAME;
// connect Geometry to Model
FBXNode cNode;
cNode.name = C_NODE_NAME;
cNode.properties = { CONNECTIONS_NODE_PROPERTY, geometryID, modelID };
connectionsNode.children = { cNode };
// connect Geometry to Model
{
FBXNode cNode;
cNode.name = C_NODE_NAME;
cNode.properties = { CONNECTIONS_NODE_PROPERTY, geometryID, modelID };
connectionsNode.children.push_back(cNode);
}
// connect all materials to model
for (auto& materialID : _materialIDs) {
@ -313,18 +220,15 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, HFMModel& hfmModel) {
};
connectionsNode.children.append(cDiffuseNode);
}
// Make all generated nodes children of rootNode
rootNode.children = { globalSettingsNode, objectNode, connectionsNode };
}
// Set properties for material nodes
void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, QString material, HFMModel& hfmModel) {
void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, QString material, const hfm::Model::Pointer& hfmModel) {
auto materialID = nextNodeID();
_materialIDs.push_back(materialID);
materialNode.properties = { materialID, material, MESH };
HFMMaterial currentMaterial = hfmModel.materials[material];
HFMMaterial currentMaterial = hfmModel->materials[material];
// Setting the hierarchy: Material -> Properties70 -> P -> Properties
FBXNode properties70Node;

View file

@ -27,20 +27,12 @@ class OBJBaker : public ModelBaker {
public:
using ModelBaker::ModelBaker;
public slots:
virtual void bake() override;
signals:
void OBJLoaded();
private slots:
void bakeOBJ();
void handleOBJNetworkReply();
protected:
virtual void bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) override;
private:
void loadOBJ();
void createFBXNodeTree(FBXNode& rootNode, HFMModel& hfmModel);
void setMaterialNodeProperties(FBXNode& materialNode, QString material, HFMModel& hfmModel);
void createFBXNodeTree(FBXNode& rootNode, const hfm::Model::Pointer& hfmModel, const hifi::ByteArray& dracoMesh);
void setMaterialNodeProperties(FBXNode& materialNode, QString material, const hfm::Model::Pointer& hfmModel);
NodeID nextNodeID() { return _nodeID++; }

View file

@ -69,4 +69,4 @@ std::unique_ptr<ModelBaker> getModelBaker(const QUrl& bakeableModelURL, TextureB
}
return baker;
}
}

View file

@ -30,6 +30,9 @@ static const quint32 FBX_VERSION_2016 = 7500;
static const int32_t FBX_PROPERTY_UNCOMPRESSED_FLAG = 0;
static const int32_t FBX_PROPERTY_COMPRESSED_FLAG = 1;
// The version of the FBX node containing the draco mesh. See also: DRACO_MESH_VERSION in HFM.h
static const int FBX_DRACO_MESH_VERSION = 2;
class FBXNode;
using FBXNodeList = QList<FBXNode>;

View file

@ -386,6 +386,8 @@ hifi::ByteArray fileOnUrl(const hifi::ByteArray& filepath, const QString& url) {
HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const QString& url) {
const FBXNode& node = _rootNode;
bool deduplicateIndices = mapping["deduplicateIndices"].toBool();
QMap<QString, ExtractedMesh> meshes;
QHash<QString, QString> modelIDsToNames;
QHash<QString, int> meshIDsToMeshIndices;
@ -487,7 +489,7 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
foreach (const FBXNode& object, child.children) {
if (object.name == "Geometry") {
if (object.properties.at(2) == "Mesh") {
meshes.insert(getID(object.properties), extractMesh(object, meshIndex));
meshes.insert(getID(object.properties), extractMesh(object, meshIndex, deduplicateIndices));
} else { // object.properties.at(2) == "Shape"
ExtractedBlendshape extracted = { getID(object.properties), extractBlendshape(object) };
blendshapes.append(extracted);
@ -631,10 +633,10 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
}
}
}
} else if (subobject.name == "Vertices") {
} else if (subobject.name == "Vertices" || subobject.name == "DracoMesh") {
// it's a mesh as well as a model
mesh = &meshes[getID(object.properties)];
*mesh = extractMesh(object, meshIndex);
*mesh = extractMesh(object, meshIndex, deduplicateIndices);
} else if (subobject.name == "Shape") {
ExtractedBlendshape blendshape = { subobject.properties.at(0).toString(),
@ -1386,9 +1388,9 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
// look for textures, material properties
// allocate the Part material library
// NOTE: extracted.partMaterialTextures is empty for FBX_DRACO_MESH_VERSION >= 2. In that case, the mesh part's materialID string is already defined.
int materialIndex = 0;
int textureIndex = 0;
bool generateTangents = false;
QList<QString> children = _connectionChildMap.values(modelID);
for (int i = children.size() - 1; i >= 0; i--) {
@ -1401,12 +1403,10 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
if (extracted.partMaterialTextures.at(j).first == materialIndex) {
HFMMeshPart& part = extracted.mesh.parts[j];
part.materialID = material.materialID;
generateTangents |= material.needTangentSpace();
}
}
materialIndex++;
} else if (_textureFilenames.contains(childID)) {
// NOTE (Sabrina 2019/01/11): getTextures now takes in the materialID as a second parameter, because FBX material nodes can sometimes have uv transform information (ex: "Maya|uv_scale")
// I'm leaving the second parameter blank right now as this code may never be used.
@ -1684,5 +1684,7 @@ HFMModel::Pointer FBXSerializer::read(const hifi::ByteArray& data, const hifi::V
_rootNode = parseFBX(&buffer);
// FBXSerializer's mapping parameter supports the bool "deduplicateIndices," which is passed into FBXSerializer::extractMesh as "deduplicate"
return HFMModel::Pointer(extractHFMModel(mapping, url.toString()));
}

View file

@ -119,7 +119,7 @@ public:
HFMModel* extractHFMModel(const hifi::VariantHash& mapping, const QString& url);
static ExtractedMesh extractMesh(const FBXNode& object, unsigned int& meshIndex, bool deduplicate = true);
static ExtractedMesh extractMesh(const FBXNode& object, unsigned int& meshIndex, bool deduplicate);
QHash<QString, ExtractedMesh> meshes;
HFMTexture getTexture(const QString& textureID, const QString& materialID);

View file

@ -345,6 +345,22 @@ ExtractedMesh FBXSerializer::extractMesh(const FBXNode& object, unsigned int& me
isDracoMesh = true;
data.extracted.mesh.wasCompressed = true;
// Check for additional metadata
unsigned int dracoMeshNodeVersion = 1;
std::vector<QString> dracoMaterialList;
for (const auto& dracoChild : child.children) {
if (dracoChild.name == "FBXDracoMeshVersion") {
if (!dracoChild.children.isEmpty()) {
dracoMeshNodeVersion = dracoChild.properties[0].toUInt();
}
} else if (dracoChild.name == "MaterialList") {
dracoMaterialList.reserve(dracoChild.properties.size());
for (const auto& materialID : dracoChild.properties) {
dracoMaterialList.push_back(materialID.toString());
}
}
}
// load the draco mesh from the FBX and create a draco::Mesh
draco::Decoder decoder;
draco::DecoderBuffer decodedBuffer;
@ -462,8 +478,20 @@ ExtractedMesh FBXSerializer::extractMesh(const FBXNode& object, unsigned int& me
// grab or setup the HFMMeshPart for the part this face belongs to
int& partIndexPlusOne = materialTextureParts[materialTexture];
if (partIndexPlusOne == 0) {
data.extracted.partMaterialTextures.append(materialTexture);
data.extracted.mesh.parts.resize(data.extracted.mesh.parts.size() + 1);
HFMMeshPart& part = data.extracted.mesh.parts.back();
// Figure out what material this part is
if (dracoMeshNodeVersion >= 2) {
// Define the materialID now
if (dracoMaterialList.size() - 1 <= materialID) {
part.materialID = dracoMaterialList[materialID];
}
} else {
// Define the materialID later, based on the order of first appearance of the materials in the _connectionChildMap
data.extracted.partMaterialTextures.append(materialTexture);
}
partIndexPlusOne = data.extracted.mesh.parts.size();
}

View file

@ -53,6 +53,9 @@ using ColorType = glm::vec3;
const int MAX_NUM_PIXELS_FOR_FBX_TEXTURE = 2048 * 2048;
// The version of the Draco mesh binary data itself. See also: FBX_DRACO_MESH_VERSION in FBX.h
static const int DRACO_MESH_VERSION = 2;
static const int DRACO_BEGIN_CUSTOM_HIFI_ATTRIBUTES = 1000;
static const int DRACO_ATTRIBUTE_MATERIAL_ID = DRACO_BEGIN_CUSTOM_HIFI_ATTRIBUTES;
static const int DRACO_ATTRIBUTE_TEX_COORD_1 = DRACO_BEGIN_CUSTOM_HIFI_ATTRIBUTES + 1;

View file

@ -19,6 +19,7 @@
#include "CalculateBlendshapeNormalsTask.h"
#include "CalculateBlendshapeTangentsTask.h"
#include "PrepareJointsTask.h"
#include "BuildDracoMeshTask.h"
namespace baker {
@ -117,7 +118,7 @@ namespace baker {
class BakerEngineBuilder {
public:
using Input = VaryingSet2<hfm::Model::Pointer, hifi::VariantHash>;
using Output = VaryingSet2<hfm::Model::Pointer, MaterialMapping>;
using Output = VaryingSet4<hfm::Model::Pointer, MaterialMapping, std::vector<hifi::ByteArray>, std::vector<std::vector<hifi::ByteArray>>>;
using JobModel = Task::ModelIO<BakerEngineBuilder, Input, Output>;
void build(JobModel& model, const Varying& input, Varying& output) {
const auto& hfmModelIn = input.getN<Input>(0);
@ -156,6 +157,14 @@ namespace baker {
// Parse material mapping
const auto materialMapping = model.addJob<ParseMaterialMappingTask>("ParseMaterialMapping", mapping);
// Build Draco meshes
// NOTE: This task is disabled by default and must be enabled through configuration
// TODO: Tangent support (Needs changes to FBXSerializer_Mesh as well)
const auto buildDracoMeshInputs = BuildDracoMeshTask::Input(meshesIn, normalsPerMesh, tangentsPerMesh).asVarying();
const auto buildDracoMeshOutputs = model.addJob<BuildDracoMeshTask>("BuildDracoMesh", buildDracoMeshInputs);
const auto dracoMeshes = buildDracoMeshOutputs.getN<BuildDracoMeshTask::Output>(0);
const auto materialList = buildDracoMeshOutputs.getN<BuildDracoMeshTask::Output>(1);
// Combine the outputs into a new hfm::Model
const auto buildBlendshapesInputs = BuildBlendshapesTask::Input(blendshapesPerMeshIn, normalsPerBlendshapePerMesh, tangentsPerBlendshapePerMesh).asVarying();
const auto blendshapesPerMeshOut = model.addJob<BuildBlendshapesTask>("BuildBlendshapes", buildBlendshapesInputs);
@ -164,7 +173,7 @@ namespace baker {
const auto buildModelInputs = BuildModelTask::Input(hfmModelIn, meshesOut, jointsOut, jointRotationOffsets, jointIndices).asVarying();
const auto hfmModelOut = model.addJob<BuildModelTask>("BuildModel", buildModelInputs);
output = Output(hfmModelOut, materialMapping);
output = Output(hfmModelOut, materialMapping, dracoMeshes, materialList);
}
};
@ -174,6 +183,10 @@ namespace baker {
_engine->feedInput<BakerEngineBuilder::Input>(1, mapping);
}
std::shared_ptr<TaskConfig> Baker::getConfiguration() {
return _engine->getConfiguration();
}
void Baker::run() {
_engine->run();
}
@ -185,4 +198,12 @@ namespace baker {
MaterialMapping Baker::getMaterialMapping() const {
return _engine->getOutput().get<BakerEngineBuilder::Output>().get1();
}
const std::vector<hifi::ByteArray>& Baker::getDracoMeshes() const {
return _engine->getOutput().get<BakerEngineBuilder::Output>().get2();
}
std::vector<std::vector<hifi::ByteArray>> Baker::getDracoMaterialLists() const {
return _engine->getOutput().get<BakerEngineBuilder::Output>().get3();
}
};

View file

@ -24,11 +24,16 @@ namespace baker {
public:
Baker(const hfm::Model::Pointer& hfmModel, const hifi::VariantHash& mapping);
std::shared_ptr<TaskConfig> getConfiguration();
void run();
// Outputs, available after run() is called
hfm::Model::Pointer getHFMModel() const;
MaterialMapping getMaterialMapping() const;
const std::vector<hifi::ByteArray>& getDracoMeshes() const;
// This is a ByteArray and not a std::string because the character sequence can contain the null character (particularly for FBX materials)
std::vector<std::vector<hifi::ByteArray>> getDracoMaterialLists() const;
protected:
EnginePointer _engine;

View file

@ -0,0 +1,233 @@
//
// BuildDracoMeshTask.cpp
// model-baker/src/model-baker
//
// Created by Sabrina Shanman on 2019/02/20.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "BuildDracoMeshTask.h"
// Fix build warnings due to draco headers not casting size_t
#ifdef _WIN32
#pragma warning( push )
#pragma warning( disable : 4267 )
#endif
#include <draco/compression/encode.h>
#include <draco/mesh/triangle_soup_mesh_builder.h>
#ifdef _WIN32
#pragma warning( pop )
#endif
#include "ModelBakerLogging.h"
#include "ModelMath.h"
std::vector<hifi::ByteArray> createMaterialList(const hfm::Mesh& mesh) {
std::vector<hifi::ByteArray> materialList;
for (const auto& meshPart : mesh.parts) {
auto materialID = QVariant(meshPart.materialID).toByteArray();
const auto materialIt = std::find(materialList.cbegin(), materialList.cend(), materialID);
if (materialIt == materialList.cend()) {
materialList.push_back(materialID);
}
}
return materialList;
}
std::unique_ptr<draco::Mesh> createDracoMesh(const hfm::Mesh& mesh, const std::vector<glm::vec3>& normals, const std::vector<glm::vec3>& tangents, const std::vector<hifi::ByteArray>& materialList) {
Q_ASSERT(normals.size() == 0 || normals.size() == mesh.vertices.size());
Q_ASSERT(mesh.colors.size() == 0 || mesh.colors.size() == mesh.vertices.size());
Q_ASSERT(mesh.texCoords.size() == 0 || mesh.texCoords.size() == mesh.vertices.size());
int64_t numTriangles{ 0 };
for (auto& part : mesh.parts) {
int extraQuadTriangleIndices = part.quadTrianglesIndices.size() % 3;
int extraTriangleIndices = part.triangleIndices.size() % 3;
if (extraQuadTriangleIndices != 0 || extraTriangleIndices != 0) {
qCWarning(model_baker) << "Found a mesh part with indices not divisible by three. Some indices will be discarded during Draco mesh creation.";
}
numTriangles += (part.quadTrianglesIndices.size() - extraQuadTriangleIndices) / 3;
numTriangles += (part.triangleIndices.size() - extraTriangleIndices) / 3;
}
if (numTriangles == 0) {
return std::unique_ptr<draco::Mesh>();
}
draco::TriangleSoupMeshBuilder meshBuilder;
meshBuilder.Start(numTriangles);
bool hasNormals{ normals.size() > 0 };
bool hasColors{ mesh.colors.size() > 0 };
bool hasTexCoords{ mesh.texCoords.size() > 0 };
bool hasTexCoords1{ mesh.texCoords1.size() > 0 };
bool hasPerFaceMaterials{ mesh.parts.size() > 1 };
bool needsOriginalIndices{ (!mesh.clusterIndices.empty() || !mesh.blendshapes.empty()) && mesh.originalIndices.size() > 0 };
int normalsAttributeID { -1 };
int colorsAttributeID { -1 };
int texCoordsAttributeID { -1 };
int texCoords1AttributeID { -1 };
int faceMaterialAttributeID { -1 };
int originalIndexAttributeID { -1 };
const int positionAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::POSITION,
3, draco::DT_FLOAT32);
if (needsOriginalIndices) {
originalIndexAttributeID = meshBuilder.AddAttribute(
(draco::GeometryAttribute::Type)DRACO_ATTRIBUTE_ORIGINAL_INDEX,
1, draco::DT_INT32);
}
if (hasNormals) {
normalsAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::NORMAL,
3, draco::DT_FLOAT32);
}
if (hasColors) {
colorsAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::COLOR,
3, draco::DT_FLOAT32);
}
if (hasTexCoords) {
texCoordsAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::TEX_COORD,
2, draco::DT_FLOAT32);
}
if (hasTexCoords1) {
texCoords1AttributeID = meshBuilder.AddAttribute(
(draco::GeometryAttribute::Type)DRACO_ATTRIBUTE_TEX_COORD_1,
2, draco::DT_FLOAT32);
}
if (hasPerFaceMaterials) {
faceMaterialAttributeID = meshBuilder.AddAttribute(
(draco::GeometryAttribute::Type)DRACO_ATTRIBUTE_MATERIAL_ID,
1, draco::DT_UINT16);
}
auto partIndex = 0;
draco::FaceIndex face;
uint16_t materialID;
for (auto& part : mesh.parts) {
auto materialIt = std::find(materialList.cbegin(), materialList.cend(), QVariant(part.materialID).toByteArray());
materialID = (uint16_t)(materialIt - materialList.cbegin());
auto addFace = [&](const QVector<int>& indices, int index, draco::FaceIndex face) {
int32_t idx0 = indices[index];
int32_t idx1 = indices[index + 1];
int32_t idx2 = indices[index + 2];
if (hasPerFaceMaterials) {
meshBuilder.SetPerFaceAttributeValueForFace(faceMaterialAttributeID, face, &materialID);
}
meshBuilder.SetAttributeValuesForFace(positionAttributeID, face,
&mesh.vertices[idx0], &mesh.vertices[idx1],
&mesh.vertices[idx2]);
if (needsOriginalIndices) {
meshBuilder.SetAttributeValuesForFace(originalIndexAttributeID, face,
&mesh.originalIndices[idx0],
&mesh.originalIndices[idx1],
&mesh.originalIndices[idx2]);
}
if (hasNormals) {
meshBuilder.SetAttributeValuesForFace(normalsAttributeID, face,
&normals[idx0], &normals[idx1],
&normals[idx2]);
}
if (hasColors) {
meshBuilder.SetAttributeValuesForFace(colorsAttributeID, face,
&mesh.colors[idx0], &mesh.colors[idx1],
&mesh.colors[idx2]);
}
if (hasTexCoords) {
meshBuilder.SetAttributeValuesForFace(texCoordsAttributeID, face,
&mesh.texCoords[idx0], &mesh.texCoords[idx1],
&mesh.texCoords[idx2]);
}
if (hasTexCoords1) {
meshBuilder.SetAttributeValuesForFace(texCoords1AttributeID, face,
&mesh.texCoords1[idx0], &mesh.texCoords1[idx1],
&mesh.texCoords1[idx2]);
}
};
for (int i = 0; (i + 2) < part.quadTrianglesIndices.size(); i += 3) {
addFace(part.quadTrianglesIndices, i, face++);
}
for (int i = 0; (i + 2) < part.triangleIndices.size(); i += 3) {
addFace(part.triangleIndices, i, face++);
}
partIndex++;
}
auto dracoMesh = meshBuilder.Finalize();
if (!dracoMesh) {
qCWarning(model_baker) << "Failed to finalize the baking of a draco Geometry node";
return std::unique_ptr<draco::Mesh>();
}
// we need to modify unique attribute IDs for custom attributes
// so the attributes are easily retrievable on the other side
if (hasPerFaceMaterials) {
dracoMesh->attribute(faceMaterialAttributeID)->set_unique_id(DRACO_ATTRIBUTE_MATERIAL_ID);
}
if (hasTexCoords1) {
dracoMesh->attribute(texCoords1AttributeID)->set_unique_id(DRACO_ATTRIBUTE_TEX_COORD_1);
}
if (needsOriginalIndices) {
dracoMesh->attribute(originalIndexAttributeID)->set_unique_id(DRACO_ATTRIBUTE_ORIGINAL_INDEX);
}
return dracoMesh;
}
void BuildDracoMeshTask::configure(const Config& config) {
// Nothing to configure yet
}
void BuildDracoMeshTask::run(const baker::BakeContextPointer& context, const Input& input, Output& output) {
const auto& meshes = input.get0();
const auto& normalsPerMesh = input.get1();
const auto& tangentsPerMesh = input.get2();
auto& dracoBytesPerMesh = output.edit0();
auto& materialLists = output.edit1();
dracoBytesPerMesh.reserve(meshes.size());
materialLists.reserve(meshes.size());
for (size_t i = 0; i < meshes.size(); i++) {
const auto& mesh = meshes[i];
const auto& normals = baker::safeGet(normalsPerMesh, i);
const auto& tangents = baker::safeGet(tangentsPerMesh, i);
dracoBytesPerMesh.emplace_back();
auto& dracoBytes = dracoBytesPerMesh.back();
materialLists.push_back(createMaterialList(mesh));
const auto& materialList = materialLists.back();
auto dracoMesh = createDracoMesh(mesh, normals, tangents, materialList);
if (dracoMesh) {
draco::Encoder encoder;
encoder.SetAttributeQuantization(draco::GeometryAttribute::POSITION, 14);
encoder.SetAttributeQuantization(draco::GeometryAttribute::TEX_COORD, 12);
encoder.SetAttributeQuantization(draco::GeometryAttribute::NORMAL, 10);
encoder.SetSpeedOptions(0, 5);
draco::EncoderBuffer buffer;
encoder.EncodeMeshToBuffer(*dracoMesh, &buffer);
dracoBytes = hifi::ByteArray(buffer.data(), (int)buffer.size());
}
}
}

View file

@ -0,0 +1,39 @@
//
// BuildDracoMeshTask.h
// model-baker/src/model-baker
//
// Created by Sabrina Shanman on 2019/02/20.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_BuildDracoMeshTask_h
#define hifi_BuildDracoMeshTask_h
#include <hfm/HFM.h>
#include <shared/HifiTypes.h>
#include "Engine.h"
#include "BakerTypes.h"
// BuildDracoMeshTask is disabled by default
class BuildDracoMeshConfig : public baker::JobConfig {
Q_OBJECT
public:
BuildDracoMeshConfig() : baker::JobConfig(false) {}
};
class BuildDracoMeshTask {
public:
using Config = BuildDracoMeshConfig;
using Input = baker::VaryingSet3<std::vector<hfm::Mesh>, baker::NormalsPerMesh, baker::TangentsPerMesh>;
using Output = baker::VaryingSet2<std::vector<hifi::ByteArray>, std::vector<std::vector<hifi::ByteArray>>>;
using JobModel = baker::Job::ModelIO<BuildDracoMeshTask, Input, Output, Config>;
void configure(const Config& config);
void run(const baker::BakeContextPointer& context, const Input& input, Output& output);
};
#endif // hifi_BuildDracoMeshTask_h

View file

@ -18,7 +18,7 @@
#include "Engine.h"
// The property "passthrough", when enabled, will let the input joints flow to the output unmodified, unlike the disabled property, which discards the data
class PrepareJointsTaskConfig : public baker::JobConfig {
class PrepareJointsConfig : public baker::JobConfig {
Q_OBJECT
Q_PROPERTY(bool passthrough MEMBER passthrough)
public:
@ -27,7 +27,7 @@ public:
class PrepareJointsTask {
public:
using Config = PrepareJointsTaskConfig;
using Config = PrepareJointsConfig;
using Input = baker::VaryingSet2<std::vector<hfm::Joint>, hifi::VariantHash /*mapping*/>;
using Output = baker::VaryingSet3<std::vector<hfm::Joint>, QMap<int, glm::quat> /*jointRotationOffsets*/, QHash<QString, int> /*jointIndices*/>;
using JobModel = baker::Job::ModelIO<PrepareJointsTask, Input, Output, Config>;

View file

@ -246,6 +246,7 @@ void GeometryReader::run() {
HFMModel::Pointer hfmModel;
QVariantHash serializerMapping = _mapping;
serializerMapping["combineParts"] = _combineParts;
serializerMapping["deduplicateIndices"] = true;
if (_url.path().toLower().endsWith(".gz")) {
QByteArray uncompressedData;

View file

@ -2,7 +2,7 @@ set(TARGET_NAME oven)
setup_hifi_project(Widgets Gui Concurrent)
link_hifi_libraries(shared shaders image gpu ktx fbx hfm baking graphics networking material-networking)
link_hifi_libraries(shared shaders image gpu ktx fbx hfm baking graphics networking material-networking model-baker task)
setup_memory_debugger()

View file

@ -22,6 +22,9 @@
#include <ResourceRequestObserver.h>
#include <ResourceCache.h>
#include <material-networking/TextureCache.h>
#include <hfm/ModelFormatRegistry.h>
#include <FBXSerializer.h>
#include <OBJSerializer.h>
#include "MaterialBaker.h"
@ -43,6 +46,12 @@ Oven::Oven() {
MaterialBaker::setNextOvenWorkerThreadOperator([] {
return Oven::instance().getNextWorkerThread();
});
{
auto modelFormatRegistry = DependencyManager::set<ModelFormatRegistry>();
modelFormatRegistry->addFormat(FBXSerializer());
modelFormatRegistry->addFormat(OBJSerializer());
}
}
Oven::~Oven() {

View file

@ -44,10 +44,12 @@ bool vhacd::VHACDUtil::loadFBX(const QString filename, HFMModel& result) {
try {
hifi::ByteArray fbxContents = fbx.readAll();
HFMModel::Pointer hfmModel;
hifi::VariantHash mapping;
mapping["deduplicateIndices"] = true;
if (filename.toLower().endsWith(".obj")) {
hfmModel = OBJSerializer().read(fbxContents, QVariantHash(), filename);
hfmModel = OBJSerializer().read(fbxContents, mapping, filename);
} else if (filename.toLower().endsWith(".fbx")) {
hfmModel = FBXSerializer().read(fbxContents, QVariantHash(), filename);
hfmModel = FBXSerializer().read(fbxContents, mapping, filename);
} else {
qWarning() << "file has unknown extension" << filename;
return false;