mirror of
https://github.com/overte-org/overte.git
synced 2025-08-08 13:58:51 +02:00
Merge pull request #15332 from raveenajain/gltf_brainstem_fix
Case 22060 fix model
This commit is contained in:
commit
bbab0b59b7
2 changed files with 78 additions and 94 deletions
|
@ -751,107 +751,95 @@ void GLTFSerializer::getSkinInverseBindMatrices(std::vector<std::vector<float>>&
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void GLTFSerializer::getNodeQueueByDepthFirstChildren(std::vector<int>& children, int stride, std::vector<int>& result) {
|
|
||||||
int startingIndex = 0;
|
|
||||||
int finalIndex = (int)children.size();
|
|
||||||
if (stride == -1) {
|
|
||||||
startingIndex = (int)children.size() - 1;
|
|
||||||
finalIndex = -1;
|
|
||||||
}
|
|
||||||
for (int index = startingIndex; index != finalIndex; index += stride) {
|
|
||||||
int c = children[index];
|
|
||||||
result.push_back(c);
|
|
||||||
std::vector<int> nested = _file.nodes[c].children.toStdVector();
|
|
||||||
if (nested.size() != 0) {
|
|
||||||
std::sort(nested.begin(), nested.end());
|
|
||||||
for (int r : nested) {
|
|
||||||
if (result.end() == std::find(result.begin(), result.end(), r)) {
|
|
||||||
getNodeQueueByDepthFirstChildren(nested, stride, result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
int numNodes = _file.nodes.size();
|
int numNodes = _file.nodes.size();
|
||||||
|
|
||||||
//Build dependencies
|
// Build dependencies
|
||||||
QVector<QVector<int>> nodeDependencies(numNodes);
|
QVector<int> parents;
|
||||||
|
QVector<int> sortedNodes;
|
||||||
|
parents.fill(-1, numNodes);
|
||||||
|
sortedNodes.reserve(numNodes);
|
||||||
int nodecount = 0;
|
int nodecount = 0;
|
||||||
foreach(auto &node, _file.nodes) {
|
foreach(auto &node, _file.nodes) {
|
||||||
//nodes_transforms.push_back(getModelTransform(node));
|
foreach(int child, node.children) {
|
||||||
foreach(int child, node.children) nodeDependencies[child].push_back(nodecount);
|
parents[child] = nodecount;
|
||||||
|
}
|
||||||
|
sortedNodes.push_back(nodecount);
|
||||||
nodecount++;
|
nodecount++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Build transforms
|
||||||
nodecount = 0;
|
nodecount = 0;
|
||||||
foreach(auto &node, _file.nodes) {
|
foreach(auto &node, _file.nodes) {
|
||||||
// collect node transform
|
// collect node transform
|
||||||
_file.nodes[nodecount].transforms.push_back(getModelTransform(node));
|
_file.nodes[nodecount].transforms.push_back(getModelTransform(node));
|
||||||
if (nodeDependencies[nodecount].size() == 1) {
|
int parentIndex = parents[nodecount];
|
||||||
int parentidx = nodeDependencies[nodecount][0];
|
while (parentIndex != -1) {
|
||||||
while (true) { // iterate parents
|
const auto& parentNode = _file.nodes[parentIndex];
|
||||||
// collect parents transforms
|
// collect transforms for a node's parents, grandparents, etc.
|
||||||
_file.nodes[nodecount].transforms.push_back(getModelTransform(_file.nodes[parentidx]));
|
_file.nodes[nodecount].transforms.push_back(getModelTransform(parentNode));
|
||||||
if (nodeDependencies[parentidx].size() == 1) {
|
parentIndex = parents[parentIndex];
|
||||||
parentidx = nodeDependencies[parentidx][0];
|
|
||||||
} else break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
nodecount++;
|
nodecount++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// since parent indices must exist in the sorted list before any of their children, sortedNodes might not be initialized in the correct order
|
||||||
|
// therefore we need to re-initialize the order in which nodes will be parsed
|
||||||
|
QVector<bool> hasBeenSorted;
|
||||||
|
hasBeenSorted.fill(false, numNodes);
|
||||||
|
int i = 0; // initial index
|
||||||
|
while (i < numNodes) {
|
||||||
|
int currentNode = sortedNodes[i];
|
||||||
|
int parentIndex = parents[currentNode];
|
||||||
|
if (parentIndex == -1 || hasBeenSorted[parentIndex]) {
|
||||||
|
hasBeenSorted[currentNode] = true;
|
||||||
|
i++;
|
||||||
|
} else {
|
||||||
|
int j = i + 1; // index of node to be sorted
|
||||||
|
while (j < numNodes) {
|
||||||
|
int nextNode = sortedNodes[j];
|
||||||
|
parentIndex = parents[nextNode];
|
||||||
|
if (parentIndex == -1 || hasBeenSorted[parentIndex]) {
|
||||||
|
// swap with currentNode
|
||||||
|
hasBeenSorted[nextNode] = true;
|
||||||
|
sortedNodes[i] = nextNode;
|
||||||
|
sortedNodes[j] = currentNode;
|
||||||
|
i++;
|
||||||
|
currentNode = sortedNodes[i];
|
||||||
|
}
|
||||||
|
j++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// initialize order in which nodes will be parsed
|
|
||||||
std::vector<int> nodeQueue;
|
|
||||||
nodeQueue.reserve(numNodes);
|
|
||||||
int rootNode = 0;
|
|
||||||
int finalNode = numNodes;
|
|
||||||
if (!_file.scenes[_file.scene].nodes.contains(0)) {
|
|
||||||
rootNode = numNodes - 1;
|
|
||||||
finalNode = -1;
|
|
||||||
}
|
|
||||||
bool rootAtStartOfList = rootNode < finalNode;
|
|
||||||
int nodeListStride = 1;
|
|
||||||
if (!rootAtStartOfList) { nodeListStride = -1; }
|
|
||||||
|
|
||||||
QVector<int> initialSceneNodes = _file.scenes[_file.scene].nodes;
|
// Build map from original to new indices
|
||||||
std::sort(initialSceneNodes.begin(), initialSceneNodes.end());
|
QVector<int> originalToNewNodeIndexMap;
|
||||||
int sceneRootNode = 0;
|
originalToNewNodeIndexMap.fill(-1, numNodes);
|
||||||
int sceneFinalNode = initialSceneNodes.size();
|
for (int i = 0; i < numNodes; i++) {
|
||||||
if (!rootAtStartOfList) {
|
originalToNewNodeIndexMap[sortedNodes[i]] = i;
|
||||||
sceneRootNode = initialSceneNodes.size() - 1;
|
|
||||||
sceneFinalNode = -1;
|
|
||||||
}
|
|
||||||
for (int index = sceneRootNode; index != sceneFinalNode; index += nodeListStride) {
|
|
||||||
int i = initialSceneNodes[index];
|
|
||||||
nodeQueue.push_back(i);
|
|
||||||
std::vector<int> children = _file.nodes[i].children.toStdVector();
|
|
||||||
std::sort(children.begin(), children.end());
|
|
||||||
getNodeQueueByDepthFirstChildren(children, nodeListStride, nodeQueue);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// Build joints
|
// Build joints
|
||||||
HFMJoint joint;
|
HFMJoint joint;
|
||||||
joint.distanceToParent = 0;
|
joint.distanceToParent = 0;
|
||||||
hfmModel.jointIndices["x"] = numNodes;
|
hfmModel.jointIndices["x"] = numNodes;
|
||||||
hfmModel.hasSkeletonJoints = false;
|
|
||||||
|
|
||||||
for (int nodeIndex : nodeQueue) {
|
for (int nodeIndex : sortedNodes) {
|
||||||
auto& node = _file.nodes[nodeIndex];
|
auto& node = _file.nodes[nodeIndex];
|
||||||
|
|
||||||
joint.parentIndex = -1;
|
joint.parentIndex = parents[nodeIndex];
|
||||||
if (!_file.scenes[_file.scene].nodes.contains(nodeIndex)) {
|
if (joint.parentIndex != -1) {
|
||||||
joint.parentIndex = std::distance(nodeQueue.begin(), std::find(nodeQueue.begin(), nodeQueue.end(), nodeDependencies[nodeIndex][0]));
|
joint.parentIndex = originalToNewNodeIndexMap[joint.parentIndex];
|
||||||
}
|
}
|
||||||
joint.transform = node.transforms.first();
|
joint.transform = node.transforms.first();
|
||||||
joint.translation = extractTranslation(joint.transform);
|
joint.translation = extractTranslation(joint.transform);
|
||||||
joint.rotation = glmExtractRotation(joint.transform);
|
joint.rotation = glmExtractRotation(joint.transform);
|
||||||
glm::vec3 scale = extractScale(joint.transform);
|
glm::vec3 scale = extractScale(joint.transform);
|
||||||
joint.postTransform = glm::scale(glm::mat4(), scale);
|
joint.postTransform = glm::scale(glm::mat4(), scale);
|
||||||
|
|
||||||
joint.name = node.name;
|
joint.name = node.name;
|
||||||
joint.isSkeletonJoint = false;
|
joint.isSkeletonJoint = false;
|
||||||
|
@ -862,24 +850,25 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
// Build skeleton
|
// Build skeleton
|
||||||
std::vector<glm::mat4> jointInverseBindTransforms;
|
std::vector<glm::mat4> jointInverseBindTransforms;
|
||||||
jointInverseBindTransforms.resize(numNodes);
|
jointInverseBindTransforms.resize(numNodes);
|
||||||
if (!_file.skins.isEmpty()) {
|
hfmModel.hasSkeletonJoints = !_file.skins.isEmpty();
|
||||||
|
if (hfmModel.hasSkeletonJoints) {
|
||||||
|
hfmModel.hasSkeletonJoints = true;
|
||||||
std::vector<std::vector<float>> inverseBindValues;
|
std::vector<std::vector<float>> inverseBindValues;
|
||||||
getSkinInverseBindMatrices(inverseBindValues);
|
getSkinInverseBindMatrices(inverseBindValues);
|
||||||
|
|
||||||
int jointIndex = finalNode;
|
for (int jointIndex = 0; jointIndex < numNodes; jointIndex++) {
|
||||||
while (jointIndex != rootNode) {
|
int nodeIndex = sortedNodes[jointIndex];
|
||||||
rootAtStartOfList ? jointIndex-- : jointIndex++;
|
|
||||||
int jOffset = nodeQueue[jointIndex];
|
|
||||||
auto joint = hfmModel.joints[jointIndex];
|
auto joint = hfmModel.joints[jointIndex];
|
||||||
|
|
||||||
hfmModel.hasSkeletonJoints = true;
|
|
||||||
for (int s = 0; s < _file.skins.size(); s++) {
|
for (int s = 0; s < _file.skins.size(); s++) {
|
||||||
auto skin = _file.skins[s];
|
const auto& skin = _file.skins[s];
|
||||||
joint.isSkeletonJoint = skin.joints.contains(jOffset);
|
int matrixIndex = skin.joints.indexOf(nodeIndex);
|
||||||
|
joint.isSkeletonJoint = skin.joints.contains(nodeIndex);
|
||||||
|
|
||||||
|
// build inverse bind matrices
|
||||||
if (joint.isSkeletonJoint) {
|
if (joint.isSkeletonJoint) {
|
||||||
std::vector<float> value = inverseBindValues[s];
|
std::vector<float>& value = inverseBindValues[s];
|
||||||
int matrixCount = 16 * skin.joints.indexOf(jOffset);
|
int matrixCount = 16 * matrixIndex;
|
||||||
jointInverseBindTransforms[jointIndex] =
|
jointInverseBindTransforms[jointIndex] =
|
||||||
glm::mat4(value[matrixCount], value[matrixCount + 1], value[matrixCount + 2], value[matrixCount + 3],
|
glm::mat4(value[matrixCount], value[matrixCount + 1], value[matrixCount + 2], value[matrixCount + 3],
|
||||||
value[matrixCount + 4], value[matrixCount + 5], value[matrixCount + 6], value[matrixCount + 7],
|
value[matrixCount + 4], value[matrixCount + 5], value[matrixCount + 6], value[matrixCount + 7],
|
||||||
|
@ -896,7 +885,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
//Build materials
|
// Build materials
|
||||||
QVector<QString> materialIDs;
|
QVector<QString> materialIDs;
|
||||||
QString unknown = "Default";
|
QString unknown = "Default";
|
||||||
int ukcount = 0;
|
int ukcount = 0;
|
||||||
|
@ -916,7 +905,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
|
|
||||||
// Build meshes
|
// Build meshes
|
||||||
nodecount = 0;
|
nodecount = 0;
|
||||||
for (int nodeIndex = rootNode; nodeIndex != finalNode; nodeIndex += nodeListStride) {
|
for (int nodeIndex : sortedNodes) {
|
||||||
auto& node = _file.nodes[nodeIndex];
|
auto& node = _file.nodes[nodeIndex];
|
||||||
|
|
||||||
if (node.defined["mesh"]) {
|
if (node.defined["mesh"]) {
|
||||||
|
@ -924,14 +913,14 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
foreach(auto &primitive, _file.meshes[node.mesh].primitives) {
|
foreach(auto &primitive, _file.meshes[node.mesh].primitives) {
|
||||||
hfmModel.meshes.append(HFMMesh());
|
hfmModel.meshes.append(HFMMesh());
|
||||||
HFMMesh& mesh = hfmModel.meshes[hfmModel.meshes.size() - 1];
|
HFMMesh& mesh = hfmModel.meshes[hfmModel.meshes.size() - 1];
|
||||||
if (!hfmModel.hasSkeletonJoints) {
|
if (!hfmModel.hasSkeletonJoints) {
|
||||||
HFMCluster cluster;
|
HFMCluster cluster;
|
||||||
cluster.jointIndex = nodecount;
|
cluster.jointIndex = nodecount;
|
||||||
cluster.inverseBindMatrix = glm::mat4();
|
cluster.inverseBindMatrix = glm::mat4();
|
||||||
cluster.inverseBindTransform = Transform(cluster.inverseBindMatrix);
|
cluster.inverseBindTransform = Transform(cluster.inverseBindMatrix);
|
||||||
mesh.clusters.append(cluster);
|
mesh.clusters.append(cluster);
|
||||||
} else {
|
} else { // skinned model
|
||||||
for (int j = rootNode; j != finalNode; j += nodeListStride) {
|
for (int j = 0; j < numNodes; j++) {
|
||||||
HFMCluster cluster;
|
HFMCluster cluster;
|
||||||
cluster.jointIndex = j;
|
cluster.jointIndex = j;
|
||||||
cluster.inverseBindMatrix = jointInverseBindTransforms[j];
|
cluster.inverseBindMatrix = jointInverseBindTransforms[j];
|
||||||
|
@ -940,10 +929,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
HFMCluster root;
|
HFMCluster root;
|
||||||
root.jointIndex = rootNode;
|
root.jointIndex = 0;
|
||||||
if (root.jointIndex == -1) {
|
|
||||||
root.jointIndex = 0;
|
|
||||||
}
|
|
||||||
root.inverseBindMatrix = jointInverseBindTransforms[root.jointIndex];
|
root.inverseBindMatrix = jointInverseBindTransforms[root.jointIndex];
|
||||||
root.inverseBindTransform = Transform(root.inverseBindMatrix);
|
root.inverseBindTransform = Transform(root.inverseBindMatrix);
|
||||||
mesh.clusters.append(root);
|
mesh.clusters.append(root);
|
||||||
|
@ -1043,6 +1029,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
qWarning(modelformat) << "There was a problem reading glTF TANGENT data for model " << _url;
|
qWarning(modelformat) << "There was a problem reading glTF TANGENT data for model " << _url;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
// tangents can be a vec3 or a vec4 which includes a w component (of -1 or 1)
|
||||||
int stride = (accessor.type == GLTFAccessorType::VEC4) ? 4 : 3;
|
int stride = (accessor.type == GLTFAccessorType::VEC4) ? 4 : 3;
|
||||||
for (int n = 0; n < tangents.size() - 3; n += stride) {
|
for (int n = 0; n < tangents.size() - 3; n += stride) {
|
||||||
float tanW = stride == 4 ? tangents[n + 3] : 1;
|
float tanW = stride == 4 ? tangents[n + 3] : 1;
|
||||||
|
@ -1111,7 +1098,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// adapted from FBXSerializer.cpp
|
// Build weights (adapted from FBXSerializer.cpp)
|
||||||
if (hfmModel.hasSkeletonJoints) {
|
if (hfmModel.hasSkeletonJoints) {
|
||||||
int numClusterIndices = clusterJoints.size();
|
int numClusterIndices = clusterJoints.size();
|
||||||
const int WEIGHTS_PER_VERTEX = 4;
|
const int WEIGHTS_PER_VERTEX = 4;
|
||||||
|
@ -1121,7 +1108,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
mesh.clusterWeights.fill(0, numClusterIndices);
|
mesh.clusterWeights.fill(0, numClusterIndices);
|
||||||
|
|
||||||
for (int c = 0; c < clusterJoints.size(); c++) {
|
for (int c = 0; c < clusterJoints.size(); c++) {
|
||||||
mesh.clusterIndices[c] = _file.skins[node.skin].joints[clusterJoints[c]];
|
mesh.clusterIndices[c] = originalToNewNodeIndexMap[_file.skins[node.skin].joints[clusterJoints[c]]];
|
||||||
}
|
}
|
||||||
|
|
||||||
// normalize and compress to 16-bits
|
// normalize and compress to 16-bits
|
||||||
|
@ -1160,12 +1147,10 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||||
|
|
||||||
mesh.meshIndex = hfmModel.meshes.size();
|
mesh.meshIndex = hfmModel.meshes.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
nodecount++;
|
nodecount++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -713,7 +713,6 @@ private:
|
||||||
|
|
||||||
glm::mat4 getModelTransform(const GLTFNode& node);
|
glm::mat4 getModelTransform(const GLTFNode& node);
|
||||||
void getSkinInverseBindMatrices(std::vector<std::vector<float>>& inverseBindMatrixValues);
|
void getSkinInverseBindMatrices(std::vector<std::vector<float>>& inverseBindMatrixValues);
|
||||||
void getNodeQueueByDepthFirstChildren(std::vector<int>& children, int stride, std::vector<int>& result);
|
|
||||||
|
|
||||||
bool buildGeometry(HFMModel& hfmModel, const hifi::URL& url);
|
bool buildGeometry(HFMModel& hfmModel, const hifi::URL& url);
|
||||||
bool parseGLTF(const hifi::ByteArray& data);
|
bool parseGLTF(const hifi::ByteArray& data);
|
||||||
|
|
Loading…
Reference in a new issue