mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-04-07 10:02:24 +02:00
std::vector-ize meshes/mesh parts in hfm::Model
This commit is contained in:
parent
df5684c437
commit
c4db6c78d8
7 changed files with 31 additions and 31 deletions
|
@ -1624,7 +1624,7 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
|
|||
}
|
||||
}
|
||||
|
||||
hfmModel.meshes.append(extracted.mesh);
|
||||
hfmModel.meshes.push_back(extracted.mesh);
|
||||
int meshIndex = hfmModel.meshes.size() - 1;
|
||||
meshIDsToMeshIndices.insert(it.key(), meshIndex);
|
||||
}
|
||||
|
|
|
@ -1018,7 +1018,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::VariantHash&
|
|||
|
||||
if (node.defined["mesh"]) {
|
||||
|
||||
hfmModel.meshes.append(HFMMesh());
|
||||
hfmModel.meshes.push_back(HFMMesh());
|
||||
HFMMesh& mesh = hfmModel.meshes[hfmModel.meshes.size() - 1];
|
||||
if (!hfmModel.hasSkeletonJoints) {
|
||||
HFMCluster cluster;
|
||||
|
@ -2038,9 +2038,9 @@ void GLTFSerializer::hfmDebugDump(const HFMModel& hfmModel) {
|
|||
qCDebug(modelformat) << " jointIndices.size() =" << hfmModel.jointIndices.size();
|
||||
qCDebug(modelformat) << " joints.count() =" << hfmModel.joints.count();
|
||||
qCDebug(modelformat) << "---------------- Meshes ----------------";
|
||||
qCDebug(modelformat) << " meshes.count() =" << hfmModel.meshes.count();
|
||||
qCDebug(modelformat) << " meshes.count() =" << hfmModel.meshes.size();
|
||||
qCDebug(modelformat) << " blendshapeChannelNames = " << hfmModel.blendshapeChannelNames;
|
||||
foreach(HFMMesh mesh, hfmModel.meshes) {
|
||||
for (const HFMMesh& mesh : hfmModel.meshes) {
|
||||
qCDebug(modelformat) << "\n";
|
||||
qCDebug(modelformat) << " meshpointer =" << mesh._mesh.get();
|
||||
qCDebug(modelformat) << " meshindex =" << mesh.meshIndex;
|
||||
|
@ -2054,7 +2054,7 @@ void GLTFSerializer::hfmDebugDump(const HFMModel& hfmModel) {
|
|||
qCDebug(modelformat) << " clusterIndices.count() =" << mesh.clusterIndices.count();
|
||||
qCDebug(modelformat) << " clusterWeights.count() =" << mesh.clusterWeights.count();
|
||||
qCDebug(modelformat) << " modelTransform =" << mesh.modelTransform;
|
||||
qCDebug(modelformat) << " parts.count() =" << mesh.parts.count();
|
||||
qCDebug(modelformat) << " parts.count() =" << mesh.parts.size();
|
||||
qCDebug(modelformat) << "---------------- Meshes (blendshapes)--------";
|
||||
foreach(HFMBlendshape bshape, mesh.blendshapes) {
|
||||
qCDebug(modelformat) << "\n";
|
||||
|
|
|
@ -492,8 +492,8 @@ bool OBJSerializer::parseOBJGroup(OBJTokenizer& tokenizer, const hifi::VariantHa
|
|||
float& scaleGuess, bool combineParts) {
|
||||
FaceGroup faces;
|
||||
HFMMesh& mesh = hfmModel.meshes[0];
|
||||
mesh.parts.append(HFMMeshPart());
|
||||
HFMMeshPart& meshPart = mesh.parts.last();
|
||||
mesh.parts.push_back(HFMMeshPart());
|
||||
HFMMeshPart& meshPart = mesh.parts.back();
|
||||
bool sawG = false;
|
||||
bool result = true;
|
||||
int originalFaceCountForDebugging = 0;
|
||||
|
@ -501,7 +501,7 @@ bool OBJSerializer::parseOBJGroup(OBJTokenizer& tokenizer, const hifi::VariantHa
|
|||
bool anyVertexColor { false };
|
||||
int vertexCount { 0 };
|
||||
|
||||
setMeshPartDefaults(meshPart, QString("dontknow") + QString::number(mesh.parts.count()));
|
||||
setMeshPartDefaults(meshPart, QString("dontknow") + QString::number(mesh.parts.size()));
|
||||
|
||||
while (true) {
|
||||
int tokenType = tokenizer.nextToken();
|
||||
|
@ -676,7 +676,7 @@ HFMModel::Pointer OBJSerializer::read(const hifi::ByteArray& data, const hifi::V
|
|||
_url = url;
|
||||
bool combineParts = mapping.value("combineParts").toBool();
|
||||
hfmModel.meshExtents.reset();
|
||||
hfmModel.meshes.append(HFMMesh());
|
||||
hfmModel.meshes.push_back(HFMMesh());
|
||||
|
||||
try {
|
||||
// call parseOBJGroup as long as it's returning true. Each successful call will
|
||||
|
@ -706,8 +706,8 @@ HFMModel::Pointer OBJSerializer::read(const hifi::ByteArray& data, const hifi::V
|
|||
mesh.clusters.append(cluster);
|
||||
|
||||
QMap<QString, int> materialMeshIdMap;
|
||||
QVector<HFMMeshPart> hfmMeshParts;
|
||||
for (int i = 0, meshPartCount = 0; i < mesh.parts.count(); i++, meshPartCount++) {
|
||||
std::vector<HFMMeshPart> hfmMeshParts;
|
||||
for (uint32_t i = 0, meshPartCount = 0; i < (uint32_t)mesh.parts.size(); i++, meshPartCount++) {
|
||||
HFMMeshPart& meshPart = mesh.parts[i];
|
||||
FaceGroup faceGroup = faceGroups[meshPartCount];
|
||||
bool specifiesUV = false;
|
||||
|
@ -718,8 +718,8 @@ HFMModel::Pointer OBJSerializer::read(const hifi::ByteArray& data, const hifi::V
|
|||
// Create a new HFMMesh for this material mapping.
|
||||
materialMeshIdMap.insert(face.materialName, materialMeshIdMap.count());
|
||||
|
||||
hfmMeshParts.append(HFMMeshPart());
|
||||
HFMMeshPart& meshPartNew = hfmMeshParts.last();
|
||||
hfmMeshParts.push_back(HFMMeshPart());
|
||||
HFMMeshPart& meshPartNew = hfmMeshParts.back();
|
||||
meshPartNew.quadIndices = QVector<int>(meshPart.quadIndices); // Copy over quad indices [NOTE (trent/mittens, 4/3/17): Likely unnecessary since they go unused anyway].
|
||||
meshPartNew.quadTrianglesIndices = QVector<int>(meshPart.quadTrianglesIndices); // Copy over quad triangulated indices [NOTE (trent/mittens, 4/3/17): Likely unnecessary since they go unused anyway].
|
||||
meshPartNew.triangleIndices = QVector<int>(meshPart.triangleIndices); // Copy over triangle indices.
|
||||
|
@ -752,9 +752,9 @@ HFMModel::Pointer OBJSerializer::read(const hifi::ByteArray& data, const hifi::V
|
|||
}
|
||||
|
||||
// clean up old mesh parts.
|
||||
int unmodifiedMeshPartCount = mesh.parts.count();
|
||||
int unmodifiedMeshPartCount = mesh.parts.size();
|
||||
mesh.parts.clear();
|
||||
mesh.parts = QVector<HFMMeshPart>(hfmMeshParts);
|
||||
mesh.parts = hfmMeshParts;
|
||||
|
||||
for (int i = 0, meshPartCount = 0; i < unmodifiedMeshPartCount; i++, meshPartCount++) {
|
||||
FaceGroup faceGroup = faceGroups[meshPartCount];
|
||||
|
@ -1003,7 +1003,7 @@ void hfmDebugDump(const HFMModel& hfmModel) {
|
|||
qCDebug(modelformat) << "---------------- hfmModel ----------------";
|
||||
qCDebug(modelformat) << " hasSkeletonJoints =" << hfmModel.hasSkeletonJoints;
|
||||
qCDebug(modelformat) << " offset =" << hfmModel.offset;
|
||||
qCDebug(modelformat) << " meshes.count() =" << hfmModel.meshes.count();
|
||||
qCDebug(modelformat) << " meshes.count() =" << hfmModel.meshes.size();
|
||||
foreach (HFMMesh mesh, hfmModel.meshes) {
|
||||
qCDebug(modelformat) << " vertices.count() =" << mesh.vertices.count();
|
||||
qCDebug(modelformat) << " colors.count() =" << mesh.colors.count();
|
||||
|
@ -1021,7 +1021,7 @@ void hfmDebugDump(const HFMModel& hfmModel) {
|
|||
qCDebug(modelformat) << " clusterWeights.count() =" << mesh.clusterWeights.count();
|
||||
qCDebug(modelformat) << " meshExtents =" << mesh.meshExtents;
|
||||
qCDebug(modelformat) << " modelTransform =" << mesh.modelTransform;
|
||||
qCDebug(modelformat) << " parts.count() =" << mesh.parts.count();
|
||||
qCDebug(modelformat) << " parts.count() =" << mesh.parts.size();
|
||||
foreach (HFMMeshPart meshPart, mesh.parts) {
|
||||
qCDebug(modelformat) << " quadIndices.count() =" << meshPart.quadIndices.count();
|
||||
qCDebug(modelformat) << " triangleIndices.count() =" << meshPart.triangleIndices.count();
|
||||
|
|
|
@ -76,7 +76,7 @@ QStringList HFMModel::getJointNames() const {
|
|||
}
|
||||
|
||||
bool HFMModel::hasBlendedMeshes() const {
|
||||
if (!meshes.isEmpty()) {
|
||||
if (!meshes.empty()) {
|
||||
foreach (const HFMMesh& mesh, meshes) {
|
||||
if (!mesh.blendshapes.isEmpty()) {
|
||||
return true;
|
||||
|
|
|
@ -228,7 +228,7 @@ public:
|
|||
class Mesh {
|
||||
public:
|
||||
|
||||
QVector<MeshPart> parts;
|
||||
std::vector<MeshPart> parts;
|
||||
|
||||
QVector<glm::vec3> vertices;
|
||||
QVector<glm::vec3> normals;
|
||||
|
@ -314,11 +314,11 @@ public:
|
|||
|
||||
std::vector<Shape> shapes;
|
||||
|
||||
std::vector<Mesh> meshes;
|
||||
|
||||
QVector<Joint> joints;
|
||||
QHash<QString, int> jointIndices; ///< 1-based, so as to more easily detect missing indices
|
||||
bool hasSkeletonJoints;
|
||||
|
||||
QVector<Mesh> meshes;
|
||||
QVector<QString> scripts;
|
||||
|
||||
QHash<QString, Material> materials;
|
||||
|
|
|
@ -32,7 +32,7 @@ namespace baker {
|
|||
|
||||
void run(const BakeContextPointer& context, const Input& input, Output& output) {
|
||||
const auto& hfmModelIn = input;
|
||||
output.edit0() = hfmModelIn->meshes.toStdVector();
|
||||
output.edit0() = hfmModelIn->meshes;
|
||||
output.edit1() = hfmModelIn->originalURL;
|
||||
output.edit2() = hfmModelIn->meshIndicesToModelNames;
|
||||
auto& blendshapesPerMesh = output.edit3();
|
||||
|
@ -107,7 +107,7 @@ namespace baker {
|
|||
|
||||
void run(const BakeContextPointer& context, const Input& input, Output& output) {
|
||||
auto hfmModelOut = input.get0();
|
||||
hfmModelOut->meshes = QVector<hfm::Mesh>::fromStdVector(input.get1());
|
||||
hfmModelOut->meshes = input.get1();
|
||||
hfmModelOut->joints = QVector<hfm::Joint>::fromStdVector(input.get2());
|
||||
hfmModelOut->jointRotationOffsets = input.get3();
|
||||
hfmModelOut->jointIndices = input.get4();
|
||||
|
|
|
@ -734,7 +734,7 @@ bool Model::replaceScriptableModelMeshPart(scriptable::ScriptableModelBasePointe
|
|||
for (int partID = 0; partID < numParts; partID++) {
|
||||
HFMMeshPart part;
|
||||
part.triangleIndices = buffer_helpers::bufferToVector<int>(mesh._mesh->getIndexBuffer(), "part.triangleIndices");
|
||||
mesh.parts << part;
|
||||
mesh.parts.push_back(part);
|
||||
}
|
||||
{
|
||||
foreach (const glm::vec3& vertex, mesh.vertices) {
|
||||
|
@ -745,7 +745,7 @@ bool Model::replaceScriptableModelMeshPart(scriptable::ScriptableModelBasePointe
|
|||
mesh.meshExtents.maximum = glm::max(mesh.meshExtents.maximum, transformedVertex);
|
||||
}
|
||||
}
|
||||
hfmModel.meshes << mesh;
|
||||
hfmModel.meshes.push_back(mesh);
|
||||
}
|
||||
calculateTriangleSets(hfmModel);
|
||||
}
|
||||
|
@ -762,9 +762,9 @@ scriptable::ScriptableModelBase Model::getScriptableModel() {
|
|||
}
|
||||
|
||||
const HFMModel& hfmModel = getHFMModel();
|
||||
int numberOfMeshes = hfmModel.meshes.size();
|
||||
uint32_t numberOfMeshes = (uint32_t)hfmModel.meshes.size();
|
||||
int shapeID = 0;
|
||||
for (int i = 0; i < numberOfMeshes; i++) {
|
||||
for (uint32_t i = 0; i < numberOfMeshes; i++) {
|
||||
const HFMMesh& hfmMesh = hfmModel.meshes.at(i);
|
||||
if (auto mesh = hfmMesh._mesh) {
|
||||
result.append(mesh);
|
||||
|
@ -795,20 +795,20 @@ scriptable::ScriptableModelBase Model::getScriptableModel() {
|
|||
void Model::calculateTriangleSets(const HFMModel& hfmModel) {
|
||||
PROFILE_RANGE(render, __FUNCTION__);
|
||||
|
||||
int numberOfMeshes = hfmModel.meshes.size();
|
||||
uint32_t numberOfMeshes = (uint32_t)hfmModel.meshes.size();
|
||||
|
||||
_triangleSetsValid = true;
|
||||
_modelSpaceMeshTriangleSets.clear();
|
||||
_modelSpaceMeshTriangleSets.resize(numberOfMeshes);
|
||||
|
||||
for (int i = 0; i < numberOfMeshes; i++) {
|
||||
for (uint32_t i = 0; i < numberOfMeshes; i++) {
|
||||
const HFMMesh& mesh = hfmModel.meshes.at(i);
|
||||
|
||||
const int numberOfParts = mesh.parts.size();
|
||||
const uint32_t numberOfParts = mesh.parts.size();
|
||||
auto& meshTriangleSets = _modelSpaceMeshTriangleSets[i];
|
||||
meshTriangleSets.resize(numberOfParts);
|
||||
|
||||
for (int j = 0; j < numberOfParts; j++) {
|
||||
for (uint32_t j = 0; j < numberOfParts; j++) {
|
||||
const HFMMeshPart& part = mesh.parts.at(j);
|
||||
|
||||
auto& partTriangleSet = meshTriangleSets[j];
|
||||
|
|
Loading…
Reference in a new issue