mirror of
https://github.com/overte-org/overte.git
synced 2025-08-08 07:57:30 +02:00
re-order nodes, cluster update
This commit is contained in:
parent
f458427067
commit
104ed758b9
1 changed files with 57 additions and 83 deletions
|
@ -753,97 +753,73 @@ void GLTFSerializer::getSkinInverseBindMatrices(std::vector<std::vector<float>>&
|
||||||
|
|
||||||
bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
int numNodes = _file.nodes.size();
|
int numNodes = _file.nodes.size();
|
||||||
bool skinnedModel = !_file.skins.isEmpty();
|
|
||||||
|
|
||||||
|
|
||||||
// Build dependencies
|
// Build dependencies
|
||||||
QVector<QVector<int>> nodeDependencies(numNodes);
|
|
||||||
QVector<int> parents;
|
QVector<int> parents;
|
||||||
QVector<int> nodesToSort = _file.scenes[_file.scene].nodes;
|
QVector<int> sortedNodes;
|
||||||
parents.fill(-1, numNodes);
|
parents.fill(-1, numNodes);
|
||||||
nodesToSort.reserve(numNodes);
|
sortedNodes.reserve(numNodes);
|
||||||
int nodecount = 0;
|
int nodecount = 0;
|
||||||
foreach(auto &node, _file.nodes) {
|
foreach(auto &node, _file.nodes) {
|
||||||
foreach(int child, node.children) {
|
foreach(int child, node.children) {
|
||||||
nodeDependencies[child].push_back(nodecount);
|
|
||||||
parents[child] = nodecount;
|
parents[child] = nodecount;
|
||||||
}
|
}
|
||||||
if (!nodesToSort.contains(nodecount)) {
|
sortedNodes.push_back(nodecount);
|
||||||
nodesToSort.push_back(nodecount);
|
|
||||||
}
|
|
||||||
nodecount++;
|
nodecount++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Build transforms
|
||||||
nodecount = 0;
|
nodecount = 0;
|
||||||
foreach(auto &node, _file.nodes) {
|
foreach(auto &node, _file.nodes) {
|
||||||
// collect node transform
|
// collect node transform
|
||||||
_file.nodes[nodecount].transforms.push_back(getModelTransform(node));
|
_file.nodes[nodecount].transforms.push_back(getModelTransform(node));
|
||||||
if (nodeDependencies[nodecount].size() == 1) {
|
int parentIndex = parents[nodecount];
|
||||||
int parentidx = nodeDependencies[nodecount][0];
|
while (parentIndex != -1) {
|
||||||
while (true) { // iterate parents
|
const auto& parentNode = _file.nodes[parentIndex];
|
||||||
// collect parents transforms
|
// collect transforms for a node's parents, grandparents, etc.
|
||||||
_file.nodes[nodecount].transforms.push_back(getModelTransform(_file.nodes[parentidx]));
|
_file.nodes[nodecount].transforms.push_back(getModelTransform(parentNode));
|
||||||
if (nodeDependencies[parentidx].size() == 1) {
|
parentIndex = parents[parentIndex];
|
||||||
parentidx = nodeDependencies[parentidx][0];
|
|
||||||
} else break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
nodecount++;
|
nodecount++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// initialize order in which nodes will be parsed
|
// since parent indices must exist in the sorted list before any of their children, sortedNodes might not be initialized in the correct order
|
||||||
std::vector<int> nodeQueue;
|
// therefore we need to re-initialize the order in which nodes will be parsed
|
||||||
QVector<int> originalToNewNodeIndexMap;
|
|
||||||
QVector<bool> hasBeenSorted;
|
QVector<bool> hasBeenSorted;
|
||||||
originalToNewNodeIndexMap.fill(-1, numNodes);
|
|
||||||
hasBeenSorted.fill(false, numNodes);
|
hasBeenSorted.fill(false, numNodes);
|
||||||
nodeQueue = _file.scenes[_file.scene].nodes.toStdVector();
|
int i = 0; // initial index
|
||||||
|
while (i < numNodes) {
|
||||||
for (int sceneNodeCount = 0; sceneNodeCount < _file.scenes[_file.scene].nodes.size(); sceneNodeCount++) {
|
int currentNode = sortedNodes[i];
|
||||||
int sceneNode = nodeQueue[sceneNodeCount];
|
int parentIndex = parents[currentNode];
|
||||||
originalToNewNodeIndexMap[sceneNode] = sceneNodeCount;
|
if (parentIndex == -1 || hasBeenSorted[parentIndex]) {
|
||||||
nodesToSort[nodesToSort.indexOf(sceneNode)] = nodesToSort.back();
|
hasBeenSorted[currentNode] = true;
|
||||||
nodesToSort.pop_back();
|
i++;
|
||||||
hasBeenSorted[sceneNode] = true;
|
} else {
|
||||||
for (int child : _file.nodes[sceneNode].children.toStdVector()) {
|
int j = i + 1; // index of node to be sorted
|
||||||
nodesToSort[nodesToSort.indexOf(child)] = nodesToSort.back();
|
while (j < numNodes) {
|
||||||
nodesToSort.pop_back();
|
int nextNode = sortedNodes[j];
|
||||||
}
|
parentIndex = parents[nextNode];
|
||||||
|
if (parentIndex == -1 || hasBeenSorted[parentIndex]) {
|
||||||
for (int child : _file.nodes[sceneNode].children) {
|
// swap with currentNode
|
||||||
originalToNewNodeIndexMap[child] = nodeQueue.size();
|
hasBeenSorted[nextNode] = true;
|
||||||
nodeQueue.push_back(child);
|
sortedNodes[i] = nextNode;
|
||||||
hasBeenSorted[child] = true;
|
sortedNodes[j] = currentNode;
|
||||||
|
|
||||||
if (!_file.nodes[child].children.isEmpty() && nodeQueue.size() < numNodes) {
|
|
||||||
int newSize = nodesToSort.size();
|
|
||||||
while (!nodesToSort.isEmpty()) {
|
|
||||||
int i = 0;
|
|
||||||
|
|
||||||
while (i < nodesToSort.size()) {
|
|
||||||
int nodeIndex = nodesToSort[i];
|
|
||||||
int parentIndex = parents[nodeIndex];
|
|
||||||
newSize = nodesToSort.size();
|
|
||||||
|
|
||||||
if ((parentIndex == -1 || hasBeenSorted[parentIndex])) {
|
|
||||||
originalToNewNodeIndexMap[nodeIndex] = nodeQueue.size();
|
|
||||||
nodeQueue.push_back(nodeIndex);
|
|
||||||
hasBeenSorted[nodeIndex] = true;
|
|
||||||
// copy back and pop
|
|
||||||
nodesToSort[i] = nodesToSort.back();
|
|
||||||
nodesToSort.pop_back();
|
|
||||||
} else { // skip
|
|
||||||
i++;
|
i++;
|
||||||
}
|
|
||||||
}
|
|
||||||
// if the end of nodesToSort is reached without removing any nodes, break to move onto the next child
|
|
||||||
if (newSize == nodesToSort.size() && i == nodesToSort.size()) {
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
j++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Build map from original to new indices
|
||||||
|
QVector<int> originalToNewNodeIndexMap;
|
||||||
|
originalToNewNodeIndexMap.fill(-1, numNodes);
|
||||||
|
for (int i = 0; i < numNodes; i++) {
|
||||||
|
originalToNewNodeIndexMap[sortedNodes[i]] = i;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -851,9 +827,8 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
HFMJoint joint;
|
HFMJoint joint;
|
||||||
joint.distanceToParent = 0;
|
joint.distanceToParent = 0;
|
||||||
hfmModel.jointIndices["x"] = numNodes;
|
hfmModel.jointIndices["x"] = numNodes;
|
||||||
hfmModel.hasSkeletonJoints = false;
|
|
||||||
|
|
||||||
for (int nodeIndex : nodeQueue) {
|
for (int nodeIndex : sortedNodes) {
|
||||||
auto& node = _file.nodes[nodeIndex];
|
auto& node = _file.nodes[nodeIndex];
|
||||||
|
|
||||||
joint.parentIndex = -1;
|
joint.parentIndex = -1;
|
||||||
|
@ -873,22 +848,24 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
|
|
||||||
|
|
||||||
// Build skeleton
|
// Build skeleton
|
||||||
|
hfmModel.hasSkeletonJoints = false;
|
||||||
std::vector<glm::mat4> jointInverseBindTransforms;
|
std::vector<glm::mat4> jointInverseBindTransforms;
|
||||||
jointInverseBindTransforms.resize(numNodes);
|
jointInverseBindTransforms.resize(numNodes);
|
||||||
if (!_file.skins.isEmpty()) {
|
if (!_file.skins.isEmpty()) {
|
||||||
|
hfmModel.hasSkeletonJoints = true;
|
||||||
std::vector<std::vector<float>> inverseBindValues;
|
std::vector<std::vector<float>> inverseBindValues;
|
||||||
getSkinInverseBindMatrices(inverseBindValues);
|
getSkinInverseBindMatrices(inverseBindValues);
|
||||||
|
|
||||||
for (int jointIndex = 0; jointIndex < numNodes; jointIndex++) {
|
for (int jointIndex = 0; jointIndex < numNodes; jointIndex++) {
|
||||||
int nodeIndex = nodeQueue[jointIndex];
|
int nodeIndex = sortedNodes[jointIndex];
|
||||||
auto joint = hfmModel.joints[jointIndex];
|
auto joint = hfmModel.joints[jointIndex];
|
||||||
|
|
||||||
hfmModel.hasSkeletonJoints = true;
|
|
||||||
for (int s = 0; s < _file.skins.size(); s++) {
|
for (int s = 0; s < _file.skins.size(); s++) {
|
||||||
const auto& skin = _file.skins[s];
|
const auto& skin = _file.skins[s];
|
||||||
int matrixIndex = skin.joints.indexOf(nodeIndex);
|
int matrixIndex = skin.joints.indexOf(nodeIndex);
|
||||||
joint.isSkeletonJoint = skin.joints.contains(nodeIndex);
|
joint.isSkeletonJoint = skin.joints.contains(nodeIndex);
|
||||||
|
|
||||||
|
// build inverse bind matrices
|
||||||
if (joint.isSkeletonJoint) {
|
if (joint.isSkeletonJoint) {
|
||||||
std::vector<float>& value = inverseBindValues[s];
|
std::vector<float>& value = inverseBindValues[s];
|
||||||
int matrixCount = 16 * matrixIndex;
|
int matrixCount = 16 * matrixIndex;
|
||||||
|
@ -928,7 +905,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
|
|
||||||
// Build meshes
|
// Build meshes
|
||||||
nodecount = 0;
|
nodecount = 0;
|
||||||
for (int nodeIndex : nodeQueue) {
|
for (int nodeIndex : sortedNodes) {
|
||||||
auto& node = _file.nodes[nodeIndex];
|
auto& node = _file.nodes[nodeIndex];
|
||||||
|
|
||||||
if (node.defined["mesh"]) {
|
if (node.defined["mesh"]) {
|
||||||
|
@ -942,8 +919,8 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
cluster.inverseBindMatrix = glm::mat4();
|
cluster.inverseBindMatrix = glm::mat4();
|
||||||
cluster.inverseBindTransform = Transform(cluster.inverseBindMatrix);
|
cluster.inverseBindTransform = Transform(cluster.inverseBindMatrix);
|
||||||
mesh.clusters.append(cluster);
|
mesh.clusters.append(cluster);
|
||||||
} else {
|
} else { // skinned model
|
||||||
for (int j : nodeQueue) {
|
for (int j = 0; j < numNodes; j++) {
|
||||||
HFMCluster cluster;
|
HFMCluster cluster;
|
||||||
cluster.jointIndex = j;
|
cluster.jointIndex = j;
|
||||||
cluster.inverseBindMatrix = jointInverseBindTransforms[j];
|
cluster.inverseBindMatrix = jointInverseBindTransforms[j];
|
||||||
|
@ -952,10 +929,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
HFMCluster root;
|
HFMCluster root;
|
||||||
root.jointIndex = nodeQueue[0];
|
root.jointIndex = sortedNodes[0];
|
||||||
if (root.jointIndex == -1) {
|
|
||||||
root.jointIndex = 0;
|
|
||||||
}
|
|
||||||
root.inverseBindMatrix = jointInverseBindTransforms[root.jointIndex];
|
root.inverseBindMatrix = jointInverseBindTransforms[root.jointIndex];
|
||||||
root.inverseBindTransform = Transform(root.inverseBindMatrix);
|
root.inverseBindTransform = Transform(root.inverseBindMatrix);
|
||||||
mesh.clusters.append(root);
|
mesh.clusters.append(root);
|
||||||
|
@ -1055,6 +1029,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
qWarning(modelformat) << "There was a problem reading glTF TANGENT data for model " << _url;
|
qWarning(modelformat) << "There was a problem reading glTF TANGENT data for model " << _url;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
// tangents can be a vec3 or a vec4 which includes a w component (of -1 or 1)
|
||||||
int stride = (accessor.type == GLTFAccessorType::VEC4) ? 4 : 3;
|
int stride = (accessor.type == GLTFAccessorType::VEC4) ? 4 : 3;
|
||||||
for (int n = 0; n < tangents.size() - 3; n += stride) {
|
for (int n = 0; n < tangents.size() - 3; n += stride) {
|
||||||
float tanW = stride == 4 ? tangents[n + 3] : 1;
|
float tanW = stride == 4 ? tangents[n + 3] : 1;
|
||||||
|
@ -1123,7 +1098,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// adapted from FBXSerializer.cpp
|
// Build weights (adapted from FBXSerializer.cpp)
|
||||||
if (hfmModel.hasSkeletonJoints) {
|
if (hfmModel.hasSkeletonJoints) {
|
||||||
int numClusterIndices = clusterJoints.size();
|
int numClusterIndices = clusterJoints.size();
|
||||||
const int WEIGHTS_PER_VERTEX = 4;
|
const int WEIGHTS_PER_VERTEX = 4;
|
||||||
|
@ -1133,7 +1108,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
mesh.clusterWeights.fill(0, numClusterIndices);
|
mesh.clusterWeights.fill(0, numClusterIndices);
|
||||||
|
|
||||||
for (int c = 0; c < clusterJoints.size(); c++) {
|
for (int c = 0; c < clusterJoints.size(); c++) {
|
||||||
mesh.clusterIndices[c] = _file.skins[node.skin].joints[clusterJoints[c]];
|
mesh.clusterIndices[c] = originalToNewNodeIndexMap[_file.skins[node.skin].joints[clusterJoints[c]]];
|
||||||
}
|
}
|
||||||
|
|
||||||
// normalize and compress to 16-bits
|
// normalize and compress to 16-bits
|
||||||
|
@ -1172,7 +1147,6 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
|
|
||||||
mesh.meshIndex = hfmModel.meshes.size();
|
mesh.meshIndex = hfmModel.meshes.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
nodecount++;
|
nodecount++;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue