mirror of
https://github.com/overte-org/overte.git
synced 2025-08-08 19:56:44 +02:00
Merge pull request #16194 from sabrina-shanman/instancing_shape
(DEV-557) Introduce hfm::Shape
This commit is contained in:
commit
092a7e96f9
25 changed files with 144 additions and 102 deletions
|
@ -144,10 +144,10 @@ void ScriptableAvatar::update(float deltatime) {
|
||||||
}
|
}
|
||||||
_animationDetails.currentFrame = currentFrame;
|
_animationDetails.currentFrame = currentFrame;
|
||||||
|
|
||||||
const QVector<HFMJoint>& modelJoints = _bind->getHFMModel().joints;
|
const std::vector<HFMJoint>& modelJoints = _bind->getHFMModel().joints;
|
||||||
QStringList animationJointNames = _animation->getJointNames();
|
QStringList animationJointNames = _animation->getJointNames();
|
||||||
|
|
||||||
const int nJoints = modelJoints.size();
|
const auto nJoints = (int)modelJoints.size();
|
||||||
if (_jointData.size() != nJoints) {
|
if (_jointData.size() != nJoints) {
|
||||||
_jointData.resize(nJoints);
|
_jointData.resize(nJoints);
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,7 +80,7 @@ QVariantHash ModelPropertiesDialog::getMapping() const {
|
||||||
|
|
||||||
// update the joint indices
|
// update the joint indices
|
||||||
QVariantHash jointIndices;
|
QVariantHash jointIndices;
|
||||||
for (int i = 0; i < _hfmModel.joints.size(); i++) {
|
for (size_t i = 0; i < _hfmModel.joints.size(); i++) {
|
||||||
jointIndices.insert(_hfmModel.joints.at(i).name, QString::number(i));
|
jointIndices.insert(_hfmModel.joints.at(i).name, QString::number(i));
|
||||||
}
|
}
|
||||||
mapping.insert(JOINT_INDEX_FIELD, jointIndices);
|
mapping.insert(JOINT_INDEX_FIELD, jointIndices);
|
||||||
|
|
|
@ -99,12 +99,12 @@ void AvatarDoctor::startDiagnosing() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// RIG
|
// RIG
|
||||||
if (avatarModel.joints.isEmpty()) {
|
if (avatarModel.joints.empty()) {
|
||||||
addError("Avatar has no rig.", "no-rig");
|
addError("Avatar has no rig.", "no-rig");
|
||||||
} else {
|
} else {
|
||||||
auto jointNames = avatarModel.getJointNames();
|
auto jointNames = avatarModel.getJointNames();
|
||||||
|
|
||||||
if (avatarModel.joints.length() > NETWORKED_JOINTS_LIMIT) {
|
if (avatarModel.joints.size() > NETWORKED_JOINTS_LIMIT) {
|
||||||
addError(tr( "Avatar has over %n bones.", "", NETWORKED_JOINTS_LIMIT), "maximum-bone-limit");
|
addError(tr( "Avatar has over %n bones.", "", NETWORKED_JOINTS_LIMIT), "maximum-bone-limit");
|
||||||
}
|
}
|
||||||
// Avatar does not have Hips bone mapped
|
// Avatar does not have Hips bone mapped
|
||||||
|
|
|
@ -248,9 +248,9 @@ void CollisionPick::computeShapeInfo(const CollisionRegion& pick, ShapeInfo& sha
|
||||||
shapeInfo.setParams(type, dimensions, resource->getURL().toString());
|
shapeInfo.setParams(type, dimensions, resource->getURL().toString());
|
||||||
} else if (type >= SHAPE_TYPE_SIMPLE_HULL && type <= SHAPE_TYPE_STATIC_MESH) {
|
} else if (type >= SHAPE_TYPE_SIMPLE_HULL && type <= SHAPE_TYPE_STATIC_MESH) {
|
||||||
const HFMModel& hfmModel = resource->getHFMModel();
|
const HFMModel& hfmModel = resource->getHFMModel();
|
||||||
int numHFMMeshes = hfmModel.meshes.size();
|
uint32_t numHFMMeshes = (uint32_t)hfmModel.meshes.size();
|
||||||
int totalNumVertices = 0;
|
int totalNumVertices = 0;
|
||||||
for (int i = 0; i < numHFMMeshes; i++) {
|
for (uint32_t i = 0; i < numHFMMeshes; i++) {
|
||||||
const HFMMesh& mesh = hfmModel.meshes.at(i);
|
const HFMMesh& mesh = hfmModel.meshes.at(i);
|
||||||
totalNumVertices += mesh.vertices.size();
|
totalNumVertices += mesh.vertices.size();
|
||||||
}
|
}
|
||||||
|
|
|
@ -312,7 +312,7 @@ void SkeletonModel::computeBoundingShape() {
|
||||||
}
|
}
|
||||||
|
|
||||||
const HFMModel& hfmModel = getHFMModel();
|
const HFMModel& hfmModel = getHFMModel();
|
||||||
if (hfmModel.joints.isEmpty() || _rig.indexOfJoint("Hips") == -1) {
|
if (hfmModel.joints.empty() || _rig.indexOfJoint("Hips") == -1) {
|
||||||
// rootJointIndex == -1 if the avatar model has no skeleton
|
// rootJointIndex == -1 if the avatar model has no skeleton
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,11 +90,11 @@ void FBXBaker::replaceMeshNodeWithDraco(FBXNode& meshNode, const QByteArray& dra
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void FBXBaker::rewriteAndBakeSceneModels(const QVector<hfm::Mesh>& meshes, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) {
|
void FBXBaker::rewriteAndBakeSceneModels(const std::vector<hfm::Mesh>& meshes, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) {
|
||||||
std::vector<int> meshIndexToRuntimeOrder;
|
std::vector<int> meshIndexToRuntimeOrder;
|
||||||
auto meshCount = (int)meshes.size();
|
auto meshCount = (uint32_t)meshes.size();
|
||||||
meshIndexToRuntimeOrder.resize(meshCount);
|
meshIndexToRuntimeOrder.resize(meshCount);
|
||||||
for (int i = 0; i < meshCount; i++) {
|
for (uint32_t i = 0; i < meshCount; i++) {
|
||||||
meshIndexToRuntimeOrder[meshes[i].meshIndex] = i;
|
meshIndexToRuntimeOrder[meshes[i].meshIndex] = i;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,7 @@ protected:
|
||||||
virtual void bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) override;
|
virtual void bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void rewriteAndBakeSceneModels(const QVector<hfm::Mesh>& meshes, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists);
|
void rewriteAndBakeSceneModels(const std::vector<hfm::Mesh>& meshes, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists);
|
||||||
void replaceMeshNodeWithDraco(FBXNode& meshNode, const QByteArray& dracoMeshBytes, const std::vector<hifi::ByteArray>& dracoMaterialList);
|
void replaceMeshNodeWithDraco(FBXNode& meshNode, const QByteArray& dracoMeshBytes, const std::vector<hifi::ByteArray>& dracoMaterialList);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -258,7 +258,7 @@ void MaterialBaker::addTexture(const QString& materialName, image::TextureUsage:
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
void MaterialBaker::setMaterials(const QHash<QString, hfm::Material>& materials, const QString& baseURL) {
|
void MaterialBaker::setMaterials(const std::vector<hfm::Material>& materials, const QString& baseURL) {
|
||||||
_materialResource = NetworkMaterialResourcePointer(new NetworkMaterialResource(), [](NetworkMaterialResource* ptr) { ptr->deleteLater(); });
|
_materialResource = NetworkMaterialResourcePointer(new NetworkMaterialResource(), [](NetworkMaterialResource* ptr) { ptr->deleteLater(); });
|
||||||
for (auto& material : materials) {
|
for (auto& material : materials) {
|
||||||
_materialResource->parsedMaterials.names.push_back(material.name.toStdString());
|
_materialResource->parsedMaterials.names.push_back(material.name.toStdString());
|
||||||
|
|
|
@ -32,7 +32,7 @@ public:
|
||||||
bool isURL() const { return _isURL; }
|
bool isURL() const { return _isURL; }
|
||||||
QString getBakedMaterialData() const { return _bakedMaterialData; }
|
QString getBakedMaterialData() const { return _bakedMaterialData; }
|
||||||
|
|
||||||
void setMaterials(const QHash<QString, hfm::Material>& materials, const QString& baseURL);
|
void setMaterials(const std::vector<hfm::Material>& materials, const QString& baseURL);
|
||||||
void setMaterials(const NetworkMaterialResourcePointer& materialResource);
|
void setMaterials(const NetworkMaterialResourcePointer& materialResource);
|
||||||
|
|
||||||
NetworkMaterialResourcePointer getNetworkMaterialResource() const { return _materialResource; }
|
NetworkMaterialResourcePointer getNetworkMaterialResource() const { return _materialResource; }
|
||||||
|
|
|
@ -259,7 +259,7 @@ void ModelBaker::bakeSourceCopy() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!_hfmModel->materials.isEmpty()) {
|
if (!_hfmModel->materials.empty()) {
|
||||||
_materialBaker = QSharedPointer<MaterialBaker>(
|
_materialBaker = QSharedPointer<MaterialBaker>(
|
||||||
new MaterialBaker(_modelURL.fileName(), true, _bakedOutputDir),
|
new MaterialBaker(_modelURL.fileName(), true, _bakedOutputDir),
|
||||||
&MaterialBaker::deleteLater
|
&MaterialBaker::deleteLater
|
||||||
|
|
|
@ -106,11 +106,16 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, const hfm::Model::Pointer& h
|
||||||
materialNode.name = MATERIAL_NODE_NAME;
|
materialNode.name = MATERIAL_NODE_NAME;
|
||||||
if (hfmModel->materials.size() == 1) {
|
if (hfmModel->materials.size() == 1) {
|
||||||
// case when no material information is provided, OBJSerializer considers it as a single default material
|
// case when no material information is provided, OBJSerializer considers it as a single default material
|
||||||
for (auto& materialID : hfmModel->materials.keys()) {
|
for (auto& material : hfmModel->materials) {
|
||||||
setMaterialNodeProperties(materialNode, materialID, hfmModel);
|
setMaterialNodeProperties(materialNode, material.name, material, hfmModel);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
setMaterialNodeProperties(materialNode, meshPart.materialID, hfmModel);
|
for (auto& material : hfmModel->materials) {
|
||||||
|
if (material.name == meshPart.materialID) {
|
||||||
|
setMaterialNodeProperties(materialNode, meshPart.materialID, material, hfmModel);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
objectNode.children.append(materialNode);
|
objectNode.children.append(materialNode);
|
||||||
|
@ -153,12 +158,10 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, const hfm::Model::Pointer& h
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set properties for material nodes
|
// Set properties for material nodes
|
||||||
void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, QString material, const hfm::Model::Pointer& hfmModel) {
|
void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, const QString& materialName, const hfm::Material& material, const hfm::Model::Pointer& hfmModel) {
|
||||||
auto materialID = nextNodeID();
|
auto materialID = nextNodeID();
|
||||||
_materialIDs.push_back(materialID);
|
_materialIDs.push_back(materialID);
|
||||||
materialNode.properties = { materialID, material, MESH };
|
materialNode.properties = { materialID, materialName, MESH };
|
||||||
|
|
||||||
HFMMaterial currentMaterial = hfmModel->materials[material];
|
|
||||||
|
|
||||||
// Setting the hierarchy: Material -> Properties70 -> P -> Properties
|
// Setting the hierarchy: Material -> Properties70 -> P -> Properties
|
||||||
FBXNode properties70Node;
|
FBXNode properties70Node;
|
||||||
|
@ -170,7 +173,7 @@ void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, QString material
|
||||||
pNodeDiffuseColor.name = P_NODE_NAME;
|
pNodeDiffuseColor.name = P_NODE_NAME;
|
||||||
pNodeDiffuseColor.properties.append({
|
pNodeDiffuseColor.properties.append({
|
||||||
"DiffuseColor", "Color", "", "A",
|
"DiffuseColor", "Color", "", "A",
|
||||||
currentMaterial.diffuseColor[0], currentMaterial.diffuseColor[1], currentMaterial.diffuseColor[2]
|
material.diffuseColor[0], material.diffuseColor[1], material.diffuseColor[2]
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
properties70Node.children.append(pNodeDiffuseColor);
|
properties70Node.children.append(pNodeDiffuseColor);
|
||||||
|
@ -181,7 +184,7 @@ void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, QString material
|
||||||
pNodeSpecularColor.name = P_NODE_NAME;
|
pNodeSpecularColor.name = P_NODE_NAME;
|
||||||
pNodeSpecularColor.properties.append({
|
pNodeSpecularColor.properties.append({
|
||||||
"SpecularColor", "Color", "", "A",
|
"SpecularColor", "Color", "", "A",
|
||||||
currentMaterial.specularColor[0], currentMaterial.specularColor[1], currentMaterial.specularColor[2]
|
material.specularColor[0], material.specularColor[1], material.specularColor[2]
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
properties70Node.children.append(pNodeSpecularColor);
|
properties70Node.children.append(pNodeSpecularColor);
|
||||||
|
@ -192,7 +195,7 @@ void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, QString material
|
||||||
pNodeShininess.name = P_NODE_NAME;
|
pNodeShininess.name = P_NODE_NAME;
|
||||||
pNodeShininess.properties.append({
|
pNodeShininess.properties.append({
|
||||||
"Shininess", "Number", "", "A",
|
"Shininess", "Number", "", "A",
|
||||||
currentMaterial.shininess
|
material.shininess
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
properties70Node.children.append(pNodeShininess);
|
properties70Node.children.append(pNodeShininess);
|
||||||
|
@ -203,7 +206,7 @@ void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, QString material
|
||||||
pNodeOpacity.name = P_NODE_NAME;
|
pNodeOpacity.name = P_NODE_NAME;
|
||||||
pNodeOpacity.properties.append({
|
pNodeOpacity.properties.append({
|
||||||
"Opacity", "Number", "", "A",
|
"Opacity", "Number", "", "A",
|
||||||
currentMaterial.opacity
|
material.opacity
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
properties70Node.children.append(pNodeOpacity);
|
properties70Node.children.append(pNodeOpacity);
|
||||||
|
|
|
@ -28,7 +28,7 @@ protected:
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void createFBXNodeTree(FBXNode& rootNode, const hfm::Model::Pointer& hfmModel, const hifi::ByteArray& dracoMesh);
|
void createFBXNodeTree(FBXNode& rootNode, const hfm::Model::Pointer& hfmModel, const hifi::ByteArray& dracoMesh);
|
||||||
void setMaterialNodeProperties(FBXNode& materialNode, QString material, const hfm::Model::Pointer& hfmModel);
|
void setMaterialNodeProperties(FBXNode& materialNode, const QString& materialName, const hfm::Material& material, const hfm::Model::Pointer& hfmModel);
|
||||||
NodeID nextNodeID() { return _nodeID++; }
|
NodeID nextNodeID() { return _nodeID++; }
|
||||||
|
|
||||||
NodeID _nodeID { 0 };
|
NodeID _nodeID { 0 };
|
||||||
|
|
|
@ -473,11 +473,11 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& shapeInfo) {
|
||||||
// compute meshPart local transforms
|
// compute meshPart local transforms
|
||||||
QVector<glm::mat4> localTransforms;
|
QVector<glm::mat4> localTransforms;
|
||||||
const HFMModel& hfmModel = model->getHFMModel();
|
const HFMModel& hfmModel = model->getHFMModel();
|
||||||
int numHFMMeshes = hfmModel.meshes.size();
|
uint32_t numHFMMeshes = (uint32_t)hfmModel.meshes.size();
|
||||||
int totalNumVertices = 0;
|
int totalNumVertices = 0;
|
||||||
glm::vec3 dimensions = getScaledDimensions();
|
glm::vec3 dimensions = getScaledDimensions();
|
||||||
glm::mat4 invRegistraionOffset = glm::translate(dimensions * (getRegistrationPoint() - ENTITY_ITEM_DEFAULT_REGISTRATION_POINT));
|
glm::mat4 invRegistraionOffset = glm::translate(dimensions * (getRegistrationPoint() - ENTITY_ITEM_DEFAULT_REGISTRATION_POINT));
|
||||||
for (int i = 0; i < numHFMMeshes; i++) {
|
for (uint32_t i = 0; i < numHFMMeshes; i++) {
|
||||||
const HFMMesh& mesh = hfmModel.meshes.at(i);
|
const HFMMesh& mesh = hfmModel.meshes.at(i);
|
||||||
if (mesh.clusters.size() > 0) {
|
if (mesh.clusters.size() > 0) {
|
||||||
const HFMCluster& cluster = mesh.clusters.at(0);
|
const HFMCluster& cluster = mesh.clusters.at(0);
|
||||||
|
|
|
@ -490,7 +490,7 @@ void ParticleEffectEntityRenderer::fetchGeometryResource() {
|
||||||
void ParticleEffectEntityRenderer::computeTriangles(const hfm::Model& hfmModel) {
|
void ParticleEffectEntityRenderer::computeTriangles(const hfm::Model& hfmModel) {
|
||||||
PROFILE_RANGE(render, __FUNCTION__);
|
PROFILE_RANGE(render, __FUNCTION__);
|
||||||
|
|
||||||
int numberOfMeshes = hfmModel.meshes.size();
|
uint32_t numberOfMeshes = (uint32_t)hfmModel.meshes.size();
|
||||||
|
|
||||||
_hasComputedTriangles = true;
|
_hasComputedTriangles = true;
|
||||||
_triangleInfo.triangles.clear();
|
_triangleInfo.triangles.clear();
|
||||||
|
@ -500,11 +500,11 @@ void ParticleEffectEntityRenderer::computeTriangles(const hfm::Model& hfmModel)
|
||||||
float minArea = FLT_MAX;
|
float minArea = FLT_MAX;
|
||||||
AABox bounds;
|
AABox bounds;
|
||||||
|
|
||||||
for (int i = 0; i < numberOfMeshes; i++) {
|
for (uint32_t i = 0; i < numberOfMeshes; i++) {
|
||||||
const HFMMesh& mesh = hfmModel.meshes.at(i);
|
const HFMMesh& mesh = hfmModel.meshes.at(i);
|
||||||
|
|
||||||
const int numberOfParts = mesh.parts.size();
|
const uint32_t numberOfParts = (uint32_t)mesh.parts.size();
|
||||||
for (int j = 0; j < numberOfParts; j++) {
|
for (uint32_t j = 0; j < numberOfParts; j++) {
|
||||||
const HFMMeshPart& part = mesh.parts.at(j);
|
const HFMMeshPart& part = mesh.parts.at(j);
|
||||||
|
|
||||||
const int INDICES_PER_TRIANGLE = 3;
|
const int INDICES_PER_TRIANGLE = 3;
|
||||||
|
|
|
@ -1288,7 +1288,7 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
|
||||||
const FBXModel& fbxModel = fbxModels[modelID];
|
const FBXModel& fbxModel = fbxModels[modelID];
|
||||||
HFMJoint joint;
|
HFMJoint joint;
|
||||||
joint.parentIndex = fbxModel.parentIndex;
|
joint.parentIndex = fbxModel.parentIndex;
|
||||||
int jointIndex = hfmModel.joints.size();
|
uint32_t jointIndex = (uint32_t)hfmModel.joints.size();
|
||||||
|
|
||||||
joint.translation = fbxModel.translation; // these are usually in centimeters
|
joint.translation = fbxModel.translation; // these are usually in centimeters
|
||||||
joint.preTransform = fbxModel.preTransform;
|
joint.preTransform = fbxModel.preTransform;
|
||||||
|
@ -1357,11 +1357,11 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
hfmModel.joints.append(joint);
|
hfmModel.joints.push_back(joint);
|
||||||
}
|
}
|
||||||
|
|
||||||
// NOTE: shapeVertices are in joint-frame
|
// NOTE: shapeVertices are in joint-frame
|
||||||
hfmModel.shapeVertices.resize(std::max(1, hfmModel.joints.size()) );
|
hfmModel.shapeVertices.resize(std::max((size_t)1, hfmModel.joints.size()) );
|
||||||
|
|
||||||
hfmModel.bindExtents.reset();
|
hfmModel.bindExtents.reset();
|
||||||
hfmModel.meshExtents.reset();
|
hfmModel.meshExtents.reset();
|
||||||
|
@ -1400,7 +1400,10 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
hfmModel.materials = _hfmMaterials;
|
|
||||||
|
for (auto materialIt = _hfmMaterials.cbegin(); materialIt != _hfmMaterials.cend(); ++materialIt) {
|
||||||
|
hfmModel.materials.push_back(materialIt.value());
|
||||||
|
}
|
||||||
|
|
||||||
// see if any materials have texture children
|
// see if any materials have texture children
|
||||||
bool materialsHaveTextures = checkMaterialsHaveTextures(_hfmMaterials, _textureFilenames, _connectionChildMap);
|
bool materialsHaveTextures = checkMaterialsHaveTextures(_hfmMaterials, _textureFilenames, _connectionChildMap);
|
||||||
|
@ -1610,7 +1613,7 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
|
||||||
// transform cluster vertices to joint-frame and save for later
|
// transform cluster vertices to joint-frame and save for later
|
||||||
glm::mat4 meshToJoint = glm::inverse(joint.bindTransform) * modelTransform;
|
glm::mat4 meshToJoint = glm::inverse(joint.bindTransform) * modelTransform;
|
||||||
ShapeVertices& points = hfmModel.shapeVertices.at(jointIndex);
|
ShapeVertices& points = hfmModel.shapeVertices.at(jointIndex);
|
||||||
foreach (const glm::vec3& vertex, extracted.mesh.vertices) {
|
for (const glm::vec3& vertex : extracted.mesh.vertices) {
|
||||||
const glm::mat4 vertexTransform = meshToJoint * glm::translate(vertex);
|
const glm::mat4 vertexTransform = meshToJoint * glm::translate(vertex);
|
||||||
points.push_back(extractTranslation(vertexTransform));
|
points.push_back(extractTranslation(vertexTransform));
|
||||||
}
|
}
|
||||||
|
@ -1624,8 +1627,8 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
hfmModel.meshes.append(extracted.mesh);
|
hfmModel.meshes.push_back(extracted.mesh);
|
||||||
int meshIndex = hfmModel.meshes.size() - 1;
|
uint32_t meshIndex = (uint32_t)hfmModel.meshes.size() - 1;
|
||||||
meshIDsToMeshIndices.insert(it.key(), meshIndex);
|
meshIDsToMeshIndices.insert(it.key(), meshIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -500,7 +500,7 @@ ExtractedMesh FBXSerializer::extractMesh(const FBXNode& object, unsigned int& me
|
||||||
data.extracted.partMaterialTextures.append(materialTexture);
|
data.extracted.partMaterialTextures.append(materialTexture);
|
||||||
}
|
}
|
||||||
|
|
||||||
partIndexPlusOne = data.extracted.mesh.parts.size();
|
partIndexPlusOne = (int)data.extracted.mesh.parts.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
// give the mesh part this index
|
// give the mesh part this index
|
||||||
|
@ -535,7 +535,7 @@ ExtractedMesh FBXSerializer::extractMesh(const FBXNode& object, unsigned int& me
|
||||||
if (partIndex == 0) {
|
if (partIndex == 0) {
|
||||||
data.extracted.partMaterialTextures.append(materialTexture);
|
data.extracted.partMaterialTextures.append(materialTexture);
|
||||||
data.extracted.mesh.parts.resize(data.extracted.mesh.parts.size() + 1);
|
data.extracted.mesh.parts.resize(data.extracted.mesh.parts.size() + 1);
|
||||||
partIndex = data.extracted.mesh.parts.size();
|
partIndex = (int)data.extracted.mesh.parts.size();
|
||||||
}
|
}
|
||||||
HFMMeshPart& part = data.extracted.mesh.parts[partIndex - 1];
|
HFMMeshPart& part = data.extracted.mesh.parts[partIndex - 1];
|
||||||
|
|
||||||
|
|
|
@ -77,7 +77,7 @@ FST* FST::createFSTFromModel(const QString& fstPath, const QString& modelFilePat
|
||||||
mapping.insert(JOINT_FIELD, joints);
|
mapping.insert(JOINT_FIELD, joints);
|
||||||
|
|
||||||
QVariantHash jointIndices;
|
QVariantHash jointIndices;
|
||||||
for (int i = 0; i < hfmModel.joints.size(); i++) {
|
for (size_t i = 0; i < (size_t)hfmModel.joints.size(); i++) {
|
||||||
jointIndices.insert(hfmModel.joints.at(i).name, QString::number(i));
|
jointIndices.insert(hfmModel.joints.at(i).name, QString::number(i));
|
||||||
}
|
}
|
||||||
mapping.insert(JOINT_INDEX_FIELD, jointIndices);
|
mapping.insert(JOINT_INDEX_FIELD, jointIndices);
|
||||||
|
|
|
@ -1002,8 +1002,8 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::VariantHash&
|
||||||
|
|
||||||
for (int i = 0; i < materialIDs.size(); ++i) {
|
for (int i = 0; i < materialIDs.size(); ++i) {
|
||||||
QString& matid = materialIDs[i];
|
QString& matid = materialIDs[i];
|
||||||
hfmModel.materials[matid] = HFMMaterial();
|
hfmModel.materials.emplace_back();
|
||||||
HFMMaterial& hfmMaterial = hfmModel.materials[matid];
|
HFMMaterial& hfmMaterial = hfmModel.materials.back();
|
||||||
hfmMaterial._material = std::make_shared<graphics::Material>();
|
hfmMaterial._material = std::make_shared<graphics::Material>();
|
||||||
hfmMaterial.name = hfmMaterial.materialID = matid;
|
hfmMaterial.name = hfmMaterial.materialID = matid;
|
||||||
setHFMMaterial(hfmMaterial, _file.materials[i]);
|
setHFMMaterial(hfmMaterial, _file.materials[i]);
|
||||||
|
@ -1018,7 +1018,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::VariantHash&
|
||||||
|
|
||||||
if (node.defined["mesh"]) {
|
if (node.defined["mesh"]) {
|
||||||
|
|
||||||
hfmModel.meshes.append(HFMMesh());
|
hfmModel.meshes.push_back(HFMMesh());
|
||||||
HFMMesh& mesh = hfmModel.meshes[hfmModel.meshes.size() - 1];
|
HFMMesh& mesh = hfmModel.meshes[hfmModel.meshes.size() - 1];
|
||||||
if (!hfmModel.hasSkeletonJoints) {
|
if (!hfmModel.hasSkeletonJoints) {
|
||||||
HFMCluster cluster;
|
HFMCluster cluster;
|
||||||
|
@ -1613,7 +1613,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::VariantHash&
|
||||||
hfmModel.meshExtents.minimum -= glm::vec3(EPSILON, EPSILON, EPSILON);
|
hfmModel.meshExtents.minimum -= glm::vec3(EPSILON, EPSILON, EPSILON);
|
||||||
hfmModel.meshExtents.maximum += glm::vec3(EPSILON, EPSILON, EPSILON);
|
hfmModel.meshExtents.maximum += glm::vec3(EPSILON, EPSILON, EPSILON);
|
||||||
|
|
||||||
mesh.meshIndex = hfmModel.meshes.size();
|
mesh.meshIndex = (int)hfmModel.meshes.size();
|
||||||
}
|
}
|
||||||
++nodecount;
|
++nodecount;
|
||||||
}
|
}
|
||||||
|
@ -2036,11 +2036,11 @@ void GLTFSerializer::hfmDebugDump(const HFMModel& hfmModel) {
|
||||||
qCDebug(modelformat) << " meshExtents.size() = " << hfmModel.meshExtents.size();
|
qCDebug(modelformat) << " meshExtents.size() = " << hfmModel.meshExtents.size();
|
||||||
|
|
||||||
qCDebug(modelformat) << " jointIndices.size() =" << hfmModel.jointIndices.size();
|
qCDebug(modelformat) << " jointIndices.size() =" << hfmModel.jointIndices.size();
|
||||||
qCDebug(modelformat) << " joints.count() =" << hfmModel.joints.count();
|
qCDebug(modelformat) << " joints.count() =" << hfmModel.joints.size();
|
||||||
qCDebug(modelformat) << "---------------- Meshes ----------------";
|
qCDebug(modelformat) << "---------------- Meshes ----------------";
|
||||||
qCDebug(modelformat) << " meshes.count() =" << hfmModel.meshes.count();
|
qCDebug(modelformat) << " meshes.count() =" << hfmModel.meshes.size();
|
||||||
qCDebug(modelformat) << " blendshapeChannelNames = " << hfmModel.blendshapeChannelNames;
|
qCDebug(modelformat) << " blendshapeChannelNames = " << hfmModel.blendshapeChannelNames;
|
||||||
foreach(HFMMesh mesh, hfmModel.meshes) {
|
for (const HFMMesh& mesh : hfmModel.meshes) {
|
||||||
qCDebug(modelformat) << "\n";
|
qCDebug(modelformat) << "\n";
|
||||||
qCDebug(modelformat) << " meshpointer =" << mesh._mesh.get();
|
qCDebug(modelformat) << " meshpointer =" << mesh._mesh.get();
|
||||||
qCDebug(modelformat) << " meshindex =" << mesh.meshIndex;
|
qCDebug(modelformat) << " meshindex =" << mesh.meshIndex;
|
||||||
|
@ -2054,7 +2054,7 @@ void GLTFSerializer::hfmDebugDump(const HFMModel& hfmModel) {
|
||||||
qCDebug(modelformat) << " clusterIndices.count() =" << mesh.clusterIndices.count();
|
qCDebug(modelformat) << " clusterIndices.count() =" << mesh.clusterIndices.count();
|
||||||
qCDebug(modelformat) << " clusterWeights.count() =" << mesh.clusterWeights.count();
|
qCDebug(modelformat) << " clusterWeights.count() =" << mesh.clusterWeights.count();
|
||||||
qCDebug(modelformat) << " modelTransform =" << mesh.modelTransform;
|
qCDebug(modelformat) << " modelTransform =" << mesh.modelTransform;
|
||||||
qCDebug(modelformat) << " parts.count() =" << mesh.parts.count();
|
qCDebug(modelformat) << " parts.count() =" << mesh.parts.size();
|
||||||
qCDebug(modelformat) << "---------------- Meshes (blendshapes)--------";
|
qCDebug(modelformat) << "---------------- Meshes (blendshapes)--------";
|
||||||
foreach(HFMBlendshape bshape, mesh.blendshapes) {
|
foreach(HFMBlendshape bshape, mesh.blendshapes) {
|
||||||
qCDebug(modelformat) << "\n";
|
qCDebug(modelformat) << "\n";
|
||||||
|
|
|
@ -492,8 +492,8 @@ bool OBJSerializer::parseOBJGroup(OBJTokenizer& tokenizer, const hifi::VariantHa
|
||||||
float& scaleGuess, bool combineParts) {
|
float& scaleGuess, bool combineParts) {
|
||||||
FaceGroup faces;
|
FaceGroup faces;
|
||||||
HFMMesh& mesh = hfmModel.meshes[0];
|
HFMMesh& mesh = hfmModel.meshes[0];
|
||||||
mesh.parts.append(HFMMeshPart());
|
mesh.parts.push_back(HFMMeshPart());
|
||||||
HFMMeshPart& meshPart = mesh.parts.last();
|
HFMMeshPart& meshPart = mesh.parts.back();
|
||||||
bool sawG = false;
|
bool sawG = false;
|
||||||
bool result = true;
|
bool result = true;
|
||||||
int originalFaceCountForDebugging = 0;
|
int originalFaceCountForDebugging = 0;
|
||||||
|
@ -501,7 +501,7 @@ bool OBJSerializer::parseOBJGroup(OBJTokenizer& tokenizer, const hifi::VariantHa
|
||||||
bool anyVertexColor { false };
|
bool anyVertexColor { false };
|
||||||
int vertexCount { 0 };
|
int vertexCount { 0 };
|
||||||
|
|
||||||
setMeshPartDefaults(meshPart, QString("dontknow") + QString::number(mesh.parts.count()));
|
setMeshPartDefaults(meshPart, QString("dontknow") + QString::number(mesh.parts.size()));
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
int tokenType = tokenizer.nextToken();
|
int tokenType = tokenizer.nextToken();
|
||||||
|
@ -676,7 +676,7 @@ HFMModel::Pointer OBJSerializer::read(const hifi::ByteArray& data, const hifi::V
|
||||||
_url = url;
|
_url = url;
|
||||||
bool combineParts = mapping.value("combineParts").toBool();
|
bool combineParts = mapping.value("combineParts").toBool();
|
||||||
hfmModel.meshExtents.reset();
|
hfmModel.meshExtents.reset();
|
||||||
hfmModel.meshes.append(HFMMesh());
|
hfmModel.meshes.push_back(HFMMesh());
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// call parseOBJGroup as long as it's returning true. Each successful call will
|
// call parseOBJGroup as long as it's returning true. Each successful call will
|
||||||
|
@ -706,8 +706,8 @@ HFMModel::Pointer OBJSerializer::read(const hifi::ByteArray& data, const hifi::V
|
||||||
mesh.clusters.append(cluster);
|
mesh.clusters.append(cluster);
|
||||||
|
|
||||||
QMap<QString, int> materialMeshIdMap;
|
QMap<QString, int> materialMeshIdMap;
|
||||||
QVector<HFMMeshPart> hfmMeshParts;
|
std::vector<HFMMeshPart> hfmMeshParts;
|
||||||
for (int i = 0, meshPartCount = 0; i < mesh.parts.count(); i++, meshPartCount++) {
|
for (uint32_t i = 0, meshPartCount = 0; i < (uint32_t)mesh.parts.size(); i++, meshPartCount++) {
|
||||||
HFMMeshPart& meshPart = mesh.parts[i];
|
HFMMeshPart& meshPart = mesh.parts[i];
|
||||||
FaceGroup faceGroup = faceGroups[meshPartCount];
|
FaceGroup faceGroup = faceGroups[meshPartCount];
|
||||||
bool specifiesUV = false;
|
bool specifiesUV = false;
|
||||||
|
@ -718,8 +718,8 @@ HFMModel::Pointer OBJSerializer::read(const hifi::ByteArray& data, const hifi::V
|
||||||
// Create a new HFMMesh for this material mapping.
|
// Create a new HFMMesh for this material mapping.
|
||||||
materialMeshIdMap.insert(face.materialName, materialMeshIdMap.count());
|
materialMeshIdMap.insert(face.materialName, materialMeshIdMap.count());
|
||||||
|
|
||||||
hfmMeshParts.append(HFMMeshPart());
|
hfmMeshParts.push_back(HFMMeshPart());
|
||||||
HFMMeshPart& meshPartNew = hfmMeshParts.last();
|
HFMMeshPart& meshPartNew = hfmMeshParts.back();
|
||||||
meshPartNew.quadIndices = QVector<int>(meshPart.quadIndices); // Copy over quad indices [NOTE (trent/mittens, 4/3/17): Likely unnecessary since they go unused anyway].
|
meshPartNew.quadIndices = QVector<int>(meshPart.quadIndices); // Copy over quad indices [NOTE (trent/mittens, 4/3/17): Likely unnecessary since they go unused anyway].
|
||||||
meshPartNew.quadTrianglesIndices = QVector<int>(meshPart.quadTrianglesIndices); // Copy over quad triangulated indices [NOTE (trent/mittens, 4/3/17): Likely unnecessary since they go unused anyway].
|
meshPartNew.quadTrianglesIndices = QVector<int>(meshPart.quadTrianglesIndices); // Copy over quad triangulated indices [NOTE (trent/mittens, 4/3/17): Likely unnecessary since they go unused anyway].
|
||||||
meshPartNew.triangleIndices = QVector<int>(meshPart.triangleIndices); // Copy over triangle indices.
|
meshPartNew.triangleIndices = QVector<int>(meshPart.triangleIndices); // Copy over triangle indices.
|
||||||
|
@ -752,11 +752,11 @@ HFMModel::Pointer OBJSerializer::read(const hifi::ByteArray& data, const hifi::V
|
||||||
}
|
}
|
||||||
|
|
||||||
// clean up old mesh parts.
|
// clean up old mesh parts.
|
||||||
int unmodifiedMeshPartCount = mesh.parts.count();
|
auto unmodifiedMeshPartCount = (uint32_t)mesh.parts.size();
|
||||||
mesh.parts.clear();
|
mesh.parts.clear();
|
||||||
mesh.parts = QVector<HFMMeshPart>(hfmMeshParts);
|
mesh.parts = hfmMeshParts;
|
||||||
|
|
||||||
for (int i = 0, meshPartCount = 0; i < unmodifiedMeshPartCount; i++, meshPartCount++) {
|
for (uint32_t i = 0, meshPartCount = 0; i < unmodifiedMeshPartCount; i++, meshPartCount++) {
|
||||||
FaceGroup faceGroup = faceGroups[meshPartCount];
|
FaceGroup faceGroup = faceGroups[meshPartCount];
|
||||||
|
|
||||||
// Now that each mesh has been created with its own unique material mappings, fill them with data (vertex data is duplicated, face data is not).
|
// Now that each mesh has been created with its own unique material mappings, fill them with data (vertex data is duplicated, face data is not).
|
||||||
|
@ -892,11 +892,12 @@ HFMModel::Pointer OBJSerializer::read(const hifi::ByteArray& data, const hifi::V
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
HFMMaterial& hfmMaterial = hfmModel.materials[materialID] = HFMMaterial(objMaterial.diffuseColor,
|
hfmModel.materials.emplace_back(objMaterial.diffuseColor,
|
||||||
objMaterial.specularColor,
|
objMaterial.specularColor,
|
||||||
objMaterial.emissiveColor,
|
objMaterial.emissiveColor,
|
||||||
objMaterial.shininess,
|
objMaterial.shininess,
|
||||||
objMaterial.opacity);
|
objMaterial.opacity);
|
||||||
|
HFMMaterial& hfmMaterial = hfmModel.materials.back();
|
||||||
|
|
||||||
hfmMaterial.name = materialID;
|
hfmMaterial.name = materialID;
|
||||||
hfmMaterial.materialID = materialID;
|
hfmMaterial.materialID = materialID;
|
||||||
|
@ -1003,7 +1004,7 @@ void hfmDebugDump(const HFMModel& hfmModel) {
|
||||||
qCDebug(modelformat) << "---------------- hfmModel ----------------";
|
qCDebug(modelformat) << "---------------- hfmModel ----------------";
|
||||||
qCDebug(modelformat) << " hasSkeletonJoints =" << hfmModel.hasSkeletonJoints;
|
qCDebug(modelformat) << " hasSkeletonJoints =" << hfmModel.hasSkeletonJoints;
|
||||||
qCDebug(modelformat) << " offset =" << hfmModel.offset;
|
qCDebug(modelformat) << " offset =" << hfmModel.offset;
|
||||||
qCDebug(modelformat) << " meshes.count() =" << hfmModel.meshes.count();
|
qCDebug(modelformat) << " meshes.count() =" << hfmModel.meshes.size();
|
||||||
foreach (HFMMesh mesh, hfmModel.meshes) {
|
foreach (HFMMesh mesh, hfmModel.meshes) {
|
||||||
qCDebug(modelformat) << " vertices.count() =" << mesh.vertices.count();
|
qCDebug(modelformat) << " vertices.count() =" << mesh.vertices.count();
|
||||||
qCDebug(modelformat) << " colors.count() =" << mesh.colors.count();
|
qCDebug(modelformat) << " colors.count() =" << mesh.colors.count();
|
||||||
|
@ -1021,7 +1022,7 @@ void hfmDebugDump(const HFMModel& hfmModel) {
|
||||||
qCDebug(modelformat) << " clusterWeights.count() =" << mesh.clusterWeights.count();
|
qCDebug(modelformat) << " clusterWeights.count() =" << mesh.clusterWeights.count();
|
||||||
qCDebug(modelformat) << " meshExtents =" << mesh.meshExtents;
|
qCDebug(modelformat) << " meshExtents =" << mesh.meshExtents;
|
||||||
qCDebug(modelformat) << " modelTransform =" << mesh.modelTransform;
|
qCDebug(modelformat) << " modelTransform =" << mesh.modelTransform;
|
||||||
qCDebug(modelformat) << " parts.count() =" << mesh.parts.count();
|
qCDebug(modelformat) << " parts.count() =" << mesh.parts.size();
|
||||||
foreach (HFMMeshPart meshPart, mesh.parts) {
|
foreach (HFMMeshPart meshPart, mesh.parts) {
|
||||||
qCDebug(modelformat) << " quadIndices.count() =" << meshPart.quadIndices.count();
|
qCDebug(modelformat) << " quadIndices.count() =" << meshPart.quadIndices.count();
|
||||||
qCDebug(modelformat) << " triangleIndices.count() =" << meshPart.triangleIndices.count();
|
qCDebug(modelformat) << " triangleIndices.count() =" << meshPart.triangleIndices.count();
|
||||||
|
@ -1046,7 +1047,7 @@ void hfmDebugDump(const HFMModel& hfmModel) {
|
||||||
}
|
}
|
||||||
|
|
||||||
qCDebug(modelformat) << " jointIndices =" << hfmModel.jointIndices;
|
qCDebug(modelformat) << " jointIndices =" << hfmModel.jointIndices;
|
||||||
qCDebug(modelformat) << " joints.count() =" << hfmModel.joints.count();
|
qCDebug(modelformat) << " joints.count() =" << hfmModel.joints.size();
|
||||||
|
|
||||||
foreach (HFMJoint joint, hfmModel.joints) {
|
foreach (HFMJoint joint, hfmModel.joints) {
|
||||||
|
|
||||||
|
|
|
@ -76,7 +76,7 @@ QStringList HFMModel::getJointNames() const {
|
||||||
}
|
}
|
||||||
|
|
||||||
bool HFMModel::hasBlendedMeshes() const {
|
bool HFMModel::hasBlendedMeshes() const {
|
||||||
if (!meshes.isEmpty()) {
|
if (!meshes.empty()) {
|
||||||
foreach (const HFMMesh& mesh, meshes) {
|
foreach (const HFMMesh& mesh, meshes) {
|
||||||
if (!mesh.blendshapes.isEmpty()) {
|
if (!mesh.blendshapes.isEmpty()) {
|
||||||
return true;
|
return true;
|
||||||
|
@ -166,11 +166,11 @@ void HFMModel::computeKdops() {
|
||||||
glm::vec3(INV_SQRT_3, INV_SQRT_3, -INV_SQRT_3),
|
glm::vec3(INV_SQRT_3, INV_SQRT_3, -INV_SQRT_3),
|
||||||
glm::vec3(INV_SQRT_3, -INV_SQRT_3, -INV_SQRT_3)
|
glm::vec3(INV_SQRT_3, -INV_SQRT_3, -INV_SQRT_3)
|
||||||
};
|
};
|
||||||
if (joints.size() != (int)shapeVertices.size()) {
|
if (joints.size() != shapeVertices.size()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// now that all joints have been scanned compute a k-Dop bounding volume of mesh
|
// now that all joints have been scanned compute a k-Dop bounding volume of mesh
|
||||||
for (int i = 0; i < joints.size(); ++i) {
|
for (size_t i = 0; i < joints.size(); ++i) {
|
||||||
HFMJoint& joint = joints[i];
|
HFMJoint& joint = joints[i];
|
||||||
|
|
||||||
// NOTE: points are in joint-frame
|
// NOTE: points are in joint-frame
|
||||||
|
|
|
@ -228,7 +228,7 @@ public:
|
||||||
class Mesh {
|
class Mesh {
|
||||||
public:
|
public:
|
||||||
|
|
||||||
QVector<MeshPart> parts;
|
std::vector<MeshPart> parts;
|
||||||
|
|
||||||
QVector<glm::vec3> vertices;
|
QVector<glm::vec3> vertices;
|
||||||
QVector<glm::vec3> normals;
|
QVector<glm::vec3> normals;
|
||||||
|
@ -287,6 +287,36 @@ public:
|
||||||
bool shouldInitCollisions() const { return _collisionsConfig.size() > 0; }
|
bool shouldInitCollisions() const { return _collisionsConfig.size() > 0; }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
class TransformNode {
|
||||||
|
uint32_t parent { 0 };
|
||||||
|
Transform transform;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Formerly contained in hfm::Mesh
|
||||||
|
class Deformer {
|
||||||
|
std::vector<uint16_t> indices;
|
||||||
|
std::vector<uint16_t> weights;
|
||||||
|
};
|
||||||
|
|
||||||
|
class DynamicTransform {
|
||||||
|
std::vector<uint32_t> deformers;
|
||||||
|
std::vector<Cluster> clusters; // affect the deformer of the same index
|
||||||
|
std::vector<uint32_t> blendshapes;
|
||||||
|
// There are also the meshExtents and modelTransform, which for now are left in hfm::Mesh
|
||||||
|
};
|
||||||
|
|
||||||
|
// The lightweight model part description.
|
||||||
|
class Shape {
|
||||||
|
public:
|
||||||
|
const static uint32_t UNDEFINED_KEY { (uint32_t)-1 };
|
||||||
|
|
||||||
|
uint32_t mesh { UNDEFINED_KEY };
|
||||||
|
uint32_t meshPart { UNDEFINED_KEY };
|
||||||
|
uint32_t material { UNDEFINED_KEY };
|
||||||
|
uint32_t transform { UNDEFINED_KEY }; // The static transform node when not taking into account rigging/skinning
|
||||||
|
uint32_t dynamicTransform { UNDEFINED_KEY };
|
||||||
|
};
|
||||||
|
|
||||||
/// The runtime model format.
|
/// The runtime model format.
|
||||||
class Model {
|
class Model {
|
||||||
public:
|
public:
|
||||||
|
@ -297,15 +327,20 @@ public:
|
||||||
QString author;
|
QString author;
|
||||||
QString applicationName; ///< the name of the application that generated the model
|
QString applicationName; ///< the name of the application that generated the model
|
||||||
|
|
||||||
QVector<Joint> joints;
|
std::vector<Shape> shapes;
|
||||||
|
|
||||||
|
std::vector<Mesh> meshes;
|
||||||
|
std::vector<Material> materials;
|
||||||
|
std::vector<Deformer> deformers;
|
||||||
|
|
||||||
|
std::vector<TransformNode> transforms;
|
||||||
|
std::vector<DynamicTransform> dynamicTransforms;
|
||||||
|
|
||||||
|
std::vector<Joint> joints;
|
||||||
QHash<QString, int> jointIndices; ///< 1-based, so as to more easily detect missing indices
|
QHash<QString, int> jointIndices; ///< 1-based, so as to more easily detect missing indices
|
||||||
bool hasSkeletonJoints;
|
bool hasSkeletonJoints;
|
||||||
|
|
||||||
QVector<Mesh> meshes;
|
|
||||||
QVector<QString> scripts;
|
QVector<QString> scripts;
|
||||||
|
|
||||||
QHash<QString, Material> materials;
|
|
||||||
|
|
||||||
glm::mat4 offset; // This includes offset, rotation, and scale as specified by the FST file
|
glm::mat4 offset; // This includes offset, rotation, and scale as specified by the FST file
|
||||||
|
|
||||||
glm::vec3 neckPivot;
|
glm::vec3 neckPivot;
|
||||||
|
|
|
@ -32,15 +32,15 @@ namespace baker {
|
||||||
|
|
||||||
void run(const BakeContextPointer& context, const Input& input, Output& output) {
|
void run(const BakeContextPointer& context, const Input& input, Output& output) {
|
||||||
const auto& hfmModelIn = input;
|
const auto& hfmModelIn = input;
|
||||||
output.edit0() = hfmModelIn->meshes.toStdVector();
|
output.edit0() = hfmModelIn->meshes;
|
||||||
output.edit1() = hfmModelIn->originalURL;
|
output.edit1() = hfmModelIn->originalURL;
|
||||||
output.edit2() = hfmModelIn->meshIndicesToModelNames;
|
output.edit2() = hfmModelIn->meshIndicesToModelNames;
|
||||||
auto& blendshapesPerMesh = output.edit3();
|
auto& blendshapesPerMesh = output.edit3();
|
||||||
blendshapesPerMesh.reserve(hfmModelIn->meshes.size());
|
blendshapesPerMesh.reserve(hfmModelIn->meshes.size());
|
||||||
for (int i = 0; i < hfmModelIn->meshes.size(); i++) {
|
for (size_t i = 0; i < hfmModelIn->meshes.size(); i++) {
|
||||||
blendshapesPerMesh.push_back(hfmModelIn->meshes[i].blendshapes.toStdVector());
|
blendshapesPerMesh.push_back(hfmModelIn->meshes[i].blendshapes.toStdVector());
|
||||||
}
|
}
|
||||||
output.edit4() = hfmModelIn->joints.toStdVector();
|
output.edit4() = hfmModelIn->joints;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -107,8 +107,8 @@ namespace baker {
|
||||||
|
|
||||||
void run(const BakeContextPointer& context, const Input& input, Output& output) {
|
void run(const BakeContextPointer& context, const Input& input, Output& output) {
|
||||||
auto hfmModelOut = input.get0();
|
auto hfmModelOut = input.get0();
|
||||||
hfmModelOut->meshes = QVector<hfm::Mesh>::fromStdVector(input.get1());
|
hfmModelOut->meshes = input.get1();
|
||||||
hfmModelOut->joints = QVector<hfm::Joint>::fromStdVector(input.get2());
|
hfmModelOut->joints = input.get2();
|
||||||
hfmModelOut->jointRotationOffsets = input.get3();
|
hfmModelOut->jointRotationOffsets = input.get3();
|
||||||
hfmModelOut->jointIndices = input.get4();
|
hfmModelOut->jointIndices = input.get4();
|
||||||
hfmModelOut->flowData = input.get5();
|
hfmModelOut->flowData = input.get5();
|
||||||
|
|
|
@ -734,7 +734,7 @@ bool Model::replaceScriptableModelMeshPart(scriptable::ScriptableModelBasePointe
|
||||||
for (int partID = 0; partID < numParts; partID++) {
|
for (int partID = 0; partID < numParts; partID++) {
|
||||||
HFMMeshPart part;
|
HFMMeshPart part;
|
||||||
part.triangleIndices = buffer_helpers::bufferToVector<int>(mesh._mesh->getIndexBuffer(), "part.triangleIndices");
|
part.triangleIndices = buffer_helpers::bufferToVector<int>(mesh._mesh->getIndexBuffer(), "part.triangleIndices");
|
||||||
mesh.parts << part;
|
mesh.parts.push_back(part);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
foreach (const glm::vec3& vertex, mesh.vertices) {
|
foreach (const glm::vec3& vertex, mesh.vertices) {
|
||||||
|
@ -745,7 +745,7 @@ bool Model::replaceScriptableModelMeshPart(scriptable::ScriptableModelBasePointe
|
||||||
mesh.meshExtents.maximum = glm::max(mesh.meshExtents.maximum, transformedVertex);
|
mesh.meshExtents.maximum = glm::max(mesh.meshExtents.maximum, transformedVertex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
hfmModel.meshes << mesh;
|
hfmModel.meshes.push_back(mesh);
|
||||||
}
|
}
|
||||||
calculateTriangleSets(hfmModel);
|
calculateTriangleSets(hfmModel);
|
||||||
}
|
}
|
||||||
|
@ -762,9 +762,9 @@ scriptable::ScriptableModelBase Model::getScriptableModel() {
|
||||||
}
|
}
|
||||||
|
|
||||||
const HFMModel& hfmModel = getHFMModel();
|
const HFMModel& hfmModel = getHFMModel();
|
||||||
int numberOfMeshes = hfmModel.meshes.size();
|
uint32_t numberOfMeshes = (uint32_t)hfmModel.meshes.size();
|
||||||
int shapeID = 0;
|
int shapeID = 0;
|
||||||
for (int i = 0; i < numberOfMeshes; i++) {
|
for (uint32_t i = 0; i < numberOfMeshes; i++) {
|
||||||
const HFMMesh& hfmMesh = hfmModel.meshes.at(i);
|
const HFMMesh& hfmMesh = hfmModel.meshes.at(i);
|
||||||
if (auto mesh = hfmMesh._mesh) {
|
if (auto mesh = hfmMesh._mesh) {
|
||||||
result.append(mesh);
|
result.append(mesh);
|
||||||
|
@ -795,20 +795,20 @@ scriptable::ScriptableModelBase Model::getScriptableModel() {
|
||||||
void Model::calculateTriangleSets(const HFMModel& hfmModel) {
|
void Model::calculateTriangleSets(const HFMModel& hfmModel) {
|
||||||
PROFILE_RANGE(render, __FUNCTION__);
|
PROFILE_RANGE(render, __FUNCTION__);
|
||||||
|
|
||||||
int numberOfMeshes = hfmModel.meshes.size();
|
uint32_t numberOfMeshes = (uint32_t)hfmModel.meshes.size();
|
||||||
|
|
||||||
_triangleSetsValid = true;
|
_triangleSetsValid = true;
|
||||||
_modelSpaceMeshTriangleSets.clear();
|
_modelSpaceMeshTriangleSets.clear();
|
||||||
_modelSpaceMeshTriangleSets.resize(numberOfMeshes);
|
_modelSpaceMeshTriangleSets.resize(numberOfMeshes);
|
||||||
|
|
||||||
for (int i = 0; i < numberOfMeshes; i++) {
|
for (uint32_t i = 0; i < numberOfMeshes; i++) {
|
||||||
const HFMMesh& mesh = hfmModel.meshes.at(i);
|
const HFMMesh& mesh = hfmModel.meshes.at(i);
|
||||||
|
|
||||||
const int numberOfParts = mesh.parts.size();
|
const uint32_t numberOfParts = (uint32_t)mesh.parts.size();
|
||||||
auto& meshTriangleSets = _modelSpaceMeshTriangleSets[i];
|
auto& meshTriangleSets = _modelSpaceMeshTriangleSets[i];
|
||||||
meshTriangleSets.resize(numberOfParts);
|
meshTriangleSets.resize(numberOfParts);
|
||||||
|
|
||||||
for (int j = 0; j < numberOfParts; j++) {
|
for (uint32_t j = 0; j < numberOfParts; j++) {
|
||||||
const HFMMeshPart& part = mesh.parts.at(j);
|
const HFMMeshPart& part = mesh.parts.at(j);
|
||||||
|
|
||||||
auto& partTriangleSet = meshTriangleSets[j];
|
auto& partTriangleSet = meshTriangleSets[j];
|
||||||
|
|
|
@ -154,7 +154,7 @@ void vhacd::VHACDUtil::fattenMesh(const HFMMesh& mesh, const glm::mat4& modelOff
|
||||||
newMeshPart.triangleIndices << index0 << index3 << index1;
|
newMeshPart.triangleIndices << index0 << index3 << index1;
|
||||||
newMeshPart.triangleIndices << index1 << index3 << index2;
|
newMeshPart.triangleIndices << index1 << index3 << index2;
|
||||||
newMeshPart.triangleIndices << index2 << index3 << index0;
|
newMeshPart.triangleIndices << index2 << index3 << index0;
|
||||||
result.parts.append(newMeshPart);
|
result.parts.push_back(newMeshPart);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -259,8 +259,8 @@ void vhacd::VHACDUtil::getConvexResults(VHACD::IVHACD* convexifier, HFMMesh& res
|
||||||
VHACD::IVHACD::ConvexHull hull;
|
VHACD::IVHACD::ConvexHull hull;
|
||||||
convexifier->GetConvexHull(j, hull);
|
convexifier->GetConvexHull(j, hull);
|
||||||
|
|
||||||
resultMesh.parts.append(HFMMeshPart());
|
resultMesh.parts.push_back(HFMMeshPart());
|
||||||
HFMMeshPart& resultMeshPart = resultMesh.parts.last();
|
HFMMeshPart& resultMeshPart = resultMesh.parts.back();
|
||||||
|
|
||||||
int hullIndexStart = resultMesh.vertices.size();
|
int hullIndexStart = resultMesh.vertices.size();
|
||||||
resultMesh.vertices.reserve(hullIndexStart + hull.m_nPoints);
|
resultMesh.vertices.reserve(hullIndexStart + hull.m_nPoints);
|
||||||
|
@ -300,8 +300,8 @@ bool vhacd::VHACDUtil::computeVHACD(HFMModel& hfmModel,
|
||||||
}
|
}
|
||||||
|
|
||||||
// count the mesh-parts
|
// count the mesh-parts
|
||||||
int numParts = 0;
|
size_t numParts = 0;
|
||||||
foreach (const HFMMesh& mesh, hfmModel.meshes) {
|
for (const HFMMesh& mesh : hfmModel.meshes) {
|
||||||
numParts += mesh.parts.size();
|
numParts += mesh.parts.size();
|
||||||
}
|
}
|
||||||
if (_verbose) {
|
if (_verbose) {
|
||||||
|
@ -311,8 +311,8 @@ bool vhacd::VHACDUtil::computeVHACD(HFMModel& hfmModel,
|
||||||
VHACD::IVHACD * convexifier = VHACD::CreateVHACD();
|
VHACD::IVHACD * convexifier = VHACD::CreateVHACD();
|
||||||
|
|
||||||
result.meshExtents.reset();
|
result.meshExtents.reset();
|
||||||
result.meshes.append(HFMMesh());
|
result.meshes.push_back(HFMMesh());
|
||||||
HFMMesh &resultMesh = result.meshes.last();
|
HFMMesh &resultMesh = result.meshes.back();
|
||||||
|
|
||||||
const uint32_t POINT_STRIDE = 3;
|
const uint32_t POINT_STRIDE = 3;
|
||||||
const uint32_t TRIANGLE_STRIDE = 3;
|
const uint32_t TRIANGLE_STRIDE = 3;
|
||||||
|
|
|
@ -387,7 +387,7 @@ VHACDUtilApp::VHACDUtilApp(int argc, char* argv[]) :
|
||||||
}
|
}
|
||||||
|
|
||||||
if (verbose) {
|
if (verbose) {
|
||||||
int totalHulls = result.meshes[0].parts.size();
|
auto totalHulls = result.meshes[0].parts.size();
|
||||||
qDebug() << "output file =" << outputFilename;
|
qDebug() << "output file =" << outputFilename;
|
||||||
qDebug() << "vertices =" << totalVertices;
|
qDebug() << "vertices =" << totalVertices;
|
||||||
qDebug() << "triangles =" << totalTriangles;
|
qDebug() << "triangles =" << totalTriangles;
|
||||||
|
@ -402,7 +402,7 @@ VHACDUtilApp::VHACDUtilApp(int argc, char* argv[]) :
|
||||||
HFMMesh result;
|
HFMMesh result;
|
||||||
|
|
||||||
// count the mesh-parts
|
// count the mesh-parts
|
||||||
unsigned int meshCount = 0;
|
size_t meshCount = 0;
|
||||||
foreach (const HFMMesh& mesh, fbx.meshes) {
|
foreach (const HFMMesh& mesh, fbx.meshes) {
|
||||||
meshCount += mesh.parts.size();
|
meshCount += mesh.parts.size();
|
||||||
}
|
}
|
||||||
|
@ -412,7 +412,7 @@ VHACDUtilApp::VHACDUtilApp(int argc, char* argv[]) :
|
||||||
vUtil.fattenMesh(mesh, fbx.offset, result);
|
vUtil.fattenMesh(mesh, fbx.offset, result);
|
||||||
}
|
}
|
||||||
|
|
||||||
newFbx.meshes.append(result);
|
newFbx.meshes.push_back(result);
|
||||||
writeOBJ(outputFilename, newFbx, outputCentimeters);
|
writeOBJ(outputFilename, newFbx, outputCentimeters);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue