Fix build warnings/errors

This commit is contained in:
sabrina-shanman 2019-09-13 15:39:29 -07:00
parent b14593202c
commit a166b41216
11 changed files with 35 additions and 32 deletions

View file

@ -248,9 +248,9 @@ void CollisionPick::computeShapeInfo(const CollisionRegion& pick, ShapeInfo& sha
shapeInfo.setParams(type, dimensions, resource->getURL().toString());
} else if (type >= SHAPE_TYPE_SIMPLE_HULL && type <= SHAPE_TYPE_STATIC_MESH) {
const HFMModel& hfmModel = resource->getHFMModel();
int numHFMMeshes = hfmModel.meshes.size();
uint32_t numHFMMeshes = (uint32_t)hfmModel.meshes.size();
int totalNumVertices = 0;
for (int i = 0; i < numHFMMeshes; i++) {
for (uint32_t i = 0; i < numHFMMeshes; i++) {
const HFMMesh& mesh = hfmModel.meshes.at(i);
totalNumVertices += mesh.vertices.size();
}

View file

@ -90,11 +90,11 @@ void FBXBaker::replaceMeshNodeWithDraco(FBXNode& meshNode, const QByteArray& dra
}
}
void FBXBaker::rewriteAndBakeSceneModels(const QVector<hfm::Mesh>& meshes, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) {
void FBXBaker::rewriteAndBakeSceneModels(const std::vector<hfm::Mesh>& meshes, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) {
std::vector<int> meshIndexToRuntimeOrder;
auto meshCount = (int)meshes.size();
auto meshCount = (uint32_t)meshes.size();
meshIndexToRuntimeOrder.resize(meshCount);
for (int i = 0; i < meshCount; i++) {
for (uint32_t i = 0; i < meshCount; i++) {
meshIndexToRuntimeOrder[meshes[i].meshIndex] = i;
}

View file

@ -33,7 +33,7 @@ protected:
virtual void bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) override;
private:
void rewriteAndBakeSceneModels(const QVector<hfm::Mesh>& meshes, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists);
void rewriteAndBakeSceneModels(const std::vector<hfm::Mesh>& meshes, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists);
void replaceMeshNodeWithDraco(FBXNode& meshNode, const QByteArray& dracoMeshBytes, const std::vector<hifi::ByteArray>& dracoMaterialList);
};

View file

@ -106,11 +106,16 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, const hfm::Model::Pointer& h
materialNode.name = MATERIAL_NODE_NAME;
if (hfmModel->materials.size() == 1) {
// case when no material information is provided, OBJSerializer considers it as a single default material
for (auto& materialID : hfmModel->materials.keys()) {
setMaterialNodeProperties(materialNode, materialID, hfmModel);
for (auto& material : hfmModel->materials) {
setMaterialNodeProperties(materialNode, material.name, material, hfmModel);
}
} else {
setMaterialNodeProperties(materialNode, meshPart.materialID, hfmModel);
for (auto& material : hfmModel->materials) {
if (material.name == meshPart.materialID) {
setMaterialNodeProperties(materialNode, meshPart.materialID, material, hfmModel);
break;
}
}
}
objectNode.children.append(materialNode);
@ -153,12 +158,10 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, const hfm::Model::Pointer& h
}
// Set properties for material nodes
void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, QString material, const hfm::Model::Pointer& hfmModel) {
void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, const QString& materialName, const hfm::Material& material, const hfm::Model::Pointer& hfmModel) {
auto materialID = nextNodeID();
_materialIDs.push_back(materialID);
materialNode.properties = { materialID, material, MESH };
HFMMaterial currentMaterial = hfmModel->materials[material];
materialNode.properties = { materialID, materialName, MESH };
// Setting the hierarchy: Material -> Properties70 -> P -> Properties
FBXNode properties70Node;
@ -170,7 +173,7 @@ void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, QString material
pNodeDiffuseColor.name = P_NODE_NAME;
pNodeDiffuseColor.properties.append({
"DiffuseColor", "Color", "", "A",
currentMaterial.diffuseColor[0], currentMaterial.diffuseColor[1], currentMaterial.diffuseColor[2]
material.diffuseColor[0], material.diffuseColor[1], material.diffuseColor[2]
});
}
properties70Node.children.append(pNodeDiffuseColor);
@ -181,7 +184,7 @@ void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, QString material
pNodeSpecularColor.name = P_NODE_NAME;
pNodeSpecularColor.properties.append({
"SpecularColor", "Color", "", "A",
currentMaterial.specularColor[0], currentMaterial.specularColor[1], currentMaterial.specularColor[2]
material.specularColor[0], material.specularColor[1], material.specularColor[2]
});
}
properties70Node.children.append(pNodeSpecularColor);
@ -192,7 +195,7 @@ void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, QString material
pNodeShininess.name = P_NODE_NAME;
pNodeShininess.properties.append({
"Shininess", "Number", "", "A",
currentMaterial.shininess
material.shininess
});
}
properties70Node.children.append(pNodeShininess);
@ -203,7 +206,7 @@ void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, QString material
pNodeOpacity.name = P_NODE_NAME;
pNodeOpacity.properties.append({
"Opacity", "Number", "", "A",
currentMaterial.opacity
material.opacity
});
}
properties70Node.children.append(pNodeOpacity);

View file

@ -28,7 +28,7 @@ protected:
private:
void createFBXNodeTree(FBXNode& rootNode, const hfm::Model::Pointer& hfmModel, const hifi::ByteArray& dracoMesh);
void setMaterialNodeProperties(FBXNode& materialNode, QString material, const hfm::Model::Pointer& hfmModel);
void setMaterialNodeProperties(FBXNode& materialNode, const QString& materialName, const hfm::Material& material, const hfm::Model::Pointer& hfmModel);
NodeID nextNodeID() { return _nodeID++; }
NodeID _nodeID { 0 };

View file

@ -473,11 +473,11 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& shapeInfo) {
// compute meshPart local transforms
QVector<glm::mat4> localTransforms;
const HFMModel& hfmModel = model->getHFMModel();
int numHFMMeshes = hfmModel.meshes.size();
uint32_t numHFMMeshes = (uint32_t)hfmModel.meshes.size();
int totalNumVertices = 0;
glm::vec3 dimensions = getScaledDimensions();
glm::mat4 invRegistraionOffset = glm::translate(dimensions * (getRegistrationPoint() - ENTITY_ITEM_DEFAULT_REGISTRATION_POINT));
for (int i = 0; i < numHFMMeshes; i++) {
for (uint32_t i = 0; i < numHFMMeshes; i++) {
const HFMMesh& mesh = hfmModel.meshes.at(i);
if (mesh.clusters.size() > 0) {
const HFMCluster& cluster = mesh.clusters.at(0);

View file

@ -490,7 +490,7 @@ void ParticleEffectEntityRenderer::fetchGeometryResource() {
void ParticleEffectEntityRenderer::computeTriangles(const hfm::Model& hfmModel) {
PROFILE_RANGE(render, __FUNCTION__);
int numberOfMeshes = hfmModel.meshes.size();
uint32_t numberOfMeshes = (uint32_t)hfmModel.meshes.size();
_hasComputedTriangles = true;
_triangleInfo.triangles.clear();
@ -500,11 +500,11 @@ void ParticleEffectEntityRenderer::computeTriangles(const hfm::Model& hfmModel)
float minArea = FLT_MAX;
AABox bounds;
for (int i = 0; i < numberOfMeshes; i++) {
for (uint32_t i = 0; i < numberOfMeshes; i++) {
const HFMMesh& mesh = hfmModel.meshes.at(i);
const int numberOfParts = mesh.parts.size();
for (int j = 0; j < numberOfParts; j++) {
const uint32_t numberOfParts = (uint32_t)mesh.parts.size();
for (uint32_t j = 0; j < numberOfParts; j++) {
const HFMMeshPart& part = mesh.parts.at(j);
const int INDICES_PER_TRIANGLE = 3;

View file

@ -1288,7 +1288,7 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
const FBXModel& fbxModel = fbxModels[modelID];
HFMJoint joint;
joint.parentIndex = fbxModel.parentIndex;
int jointIndex = hfmModel.joints.size();
uint32_t jointIndex = (uint32_t)hfmModel.joints.size();
joint.translation = fbxModel.translation; // these are usually in centimeters
joint.preTransform = fbxModel.preTransform;
@ -1613,7 +1613,7 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
// transform cluster vertices to joint-frame and save for later
glm::mat4 meshToJoint = glm::inverse(joint.bindTransform) * modelTransform;
ShapeVertices& points = hfmModel.shapeVertices.at(jointIndex);
foreach (const glm::vec3& vertex, extracted.mesh.vertices) {
for (const glm::vec3& vertex : extracted.mesh.vertices) {
const glm::mat4 vertexTransform = meshToJoint * glm::translate(vertex);
points.push_back(extractTranslation(vertexTransform));
}
@ -1628,7 +1628,7 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
}
hfmModel.meshes.push_back(extracted.mesh);
int meshIndex = hfmModel.meshes.size() - 1;
uint32_t meshIndex = (uint32_t)hfmModel.meshes.size() - 1;
meshIDsToMeshIndices.insert(it.key(), meshIndex);
}

View file

@ -500,7 +500,7 @@ ExtractedMesh FBXSerializer::extractMesh(const FBXNode& object, unsigned int& me
data.extracted.partMaterialTextures.append(materialTexture);
}
partIndexPlusOne = data.extracted.mesh.parts.size();
partIndexPlusOne = (int)data.extracted.mesh.parts.size();
}
// give the mesh part this index
@ -535,7 +535,7 @@ ExtractedMesh FBXSerializer::extractMesh(const FBXNode& object, unsigned int& me
if (partIndex == 0) {
data.extracted.partMaterialTextures.append(materialTexture);
data.extracted.mesh.parts.resize(data.extracted.mesh.parts.size() + 1);
partIndex = data.extracted.mesh.parts.size();
partIndex = (int)data.extracted.mesh.parts.size();
}
HFMMeshPart& part = data.extracted.mesh.parts[partIndex - 1];

View file

@ -752,11 +752,11 @@ HFMModel::Pointer OBJSerializer::read(const hifi::ByteArray& data, const hifi::V
}
// clean up old mesh parts.
int unmodifiedMeshPartCount = mesh.parts.size();
auto unmodifiedMeshPartCount = (uint32_t)mesh.parts.size();
mesh.parts.clear();
mesh.parts = hfmMeshParts;
for (int i = 0, meshPartCount = 0; i < unmodifiedMeshPartCount; i++, meshPartCount++) {
for (uint32_t i = 0, meshPartCount = 0; i < unmodifiedMeshPartCount; i++, meshPartCount++) {
FaceGroup faceGroup = faceGroups[meshPartCount];
// Now that each mesh has been created with its own unique material mappings, fill them with data (vertex data is duplicated, face data is not).

View file

@ -804,7 +804,7 @@ void Model::calculateTriangleSets(const HFMModel& hfmModel) {
for (uint32_t i = 0; i < numberOfMeshes; i++) {
const HFMMesh& mesh = hfmModel.meshes.at(i);
const uint32_t numberOfParts = mesh.parts.size();
const uint32_t numberOfParts = (uint32_t)mesh.parts.size();
auto& meshTriangleSets = _modelSpaceMeshTriangleSets[i];
meshTriangleSets.resize(numberOfParts);