mirror of
https://github.com/lubosz/overte.git
synced 2025-04-17 00:57:44 +02:00
skinning, skeleton for models, avatar
This commit is contained in:
parent
73cf51182e
commit
5480c9f5ca
2 changed files with 232 additions and 40 deletions
|
@ -711,19 +711,19 @@ glm::mat4 GLTFSerializer::getModelTransform(const GLTFNode& node) {
|
|||
node.matrix[12], node.matrix[13], node.matrix[14], node.matrix[15]);
|
||||
} else {
|
||||
|
||||
if (node.defined["rotation"] && node.rotation.size() == 4) {
|
||||
//quat(x,y,z,w) to quat(w,x,y,z)
|
||||
glm::quat rotquat = glm::quat(node.rotation[3], node.rotation[0], node.rotation[1], node.rotation[2]);
|
||||
tmat = glm::mat4_cast(rotquat) * tmat;
|
||||
}
|
||||
|
||||
if (node.defined["scale"] && node.scale.size() == 3) {
|
||||
glm::vec3 scale = glm::vec3(node.scale[0], node.scale[1], node.scale[2]);
|
||||
glm::mat4 s = glm::mat4(1.0);
|
||||
s = glm::scale(s, scale);
|
||||
tmat = s * tmat;
|
||||
}
|
||||
|
||||
|
||||
if (node.defined["rotation"] && node.rotation.size() == 4) {
|
||||
//quat(x,y,z,w) to quat(w,x,y,z)
|
||||
glm::quat rotquat = glm::quat(node.rotation[3], node.rotation[0], node.rotation[1], node.rotation[2]);
|
||||
tmat = glm::mat4_cast(rotquat) * tmat;
|
||||
}
|
||||
|
||||
if (node.defined["translation"] && node.translation.size() == 3) {
|
||||
glm::vec3 trans = glm::vec3(node.translation[0], node.translation[1], node.translation[2]);
|
||||
glm::mat4 t = glm::mat4(1.0);
|
||||
|
@ -734,15 +734,58 @@ glm::mat4 GLTFSerializer::getModelTransform(const GLTFNode& node) {
|
|||
return tmat;
|
||||
}
|
||||
|
||||
std::vector<QVector<float>> GLTFSerializer::getSkinInverseBindMatrices() {
|
||||
std::vector<QVector<float>> inverseBindMatrixValues;
|
||||
for (auto &skin : _file.skins) {
|
||||
GLTFAccessor& indicesAccessor = _file.accessors[skin.inverseBindMatrices];
|
||||
GLTFBufferView& indicesBufferview = _file.bufferviews[indicesAccessor.bufferView];
|
||||
GLTFBuffer& indicesBuffer = _file.buffers[indicesBufferview.buffer];
|
||||
int accBoffset = indicesAccessor.defined["byteOffset"] ? indicesAccessor.byteOffset : 0;
|
||||
QVector<float> matrices;
|
||||
addArrayOfType(indicesBuffer.blob,
|
||||
indicesBufferview.byteOffset + accBoffset,
|
||||
indicesAccessor.count,
|
||||
matrices,
|
||||
indicesAccessor.type,
|
||||
indicesAccessor.componentType);
|
||||
inverseBindMatrixValues.push_back(matrices);
|
||||
}
|
||||
return inverseBindMatrixValues;
|
||||
}
|
||||
|
||||
QVector<int> GLTFSerializer::nodeDFS(int n, std::vector<int>& children, bool order) {
|
||||
QVector<int> result;
|
||||
result.append(n);
|
||||
int begin = 0;
|
||||
int finish = children.size();
|
||||
if (order) {
|
||||
begin = children.size() - 1;
|
||||
finish = -1;
|
||||
}
|
||||
int index = begin;
|
||||
while (index != finish) {
|
||||
int c = children[index];
|
||||
std::vector<int> nested = _file.nodes[c].children.toStdVector();
|
||||
if (nested.size() != 0) {
|
||||
std::sort(nested.begin(), nested.end());
|
||||
result.append(nodeDFS(c, nested, order));
|
||||
} else {
|
||||
result.append(c);
|
||||
}
|
||||
begin < finish ? index++ : index--;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
||||
int nodesSize = _file.nodes.size();
|
||||
|
||||
//Build dependencies
|
||||
QVector<QVector<int>> nodeDependencies(_file.nodes.size());
|
||||
QVector<QVector<int>> nodeDependencies(nodesSize);
|
||||
int nodecount = 0;
|
||||
bool hasChildren = false;
|
||||
foreach(auto &node, _file.nodes) {
|
||||
//nodes_transforms.push_back(getModelTransform(node));
|
||||
hasChildren |= !node.children.isEmpty();
|
||||
foreach(int child, node.children) nodeDependencies[child].push_back(nodecount);
|
||||
nodecount++;
|
||||
}
|
||||
|
@ -764,26 +807,99 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
|||
|
||||
nodecount++;
|
||||
}
|
||||
|
||||
HFMJoint joint;
|
||||
joint.isSkeletonJoint = true;
|
||||
joint.bindTransformFoundInCluster = false;
|
||||
joint.distanceToParent = 0;
|
||||
joint.parentIndex = -1;
|
||||
hfmModel.joints.resize(_file.nodes.size());
|
||||
hfmModel.jointIndices["x"] = _file.nodes.size();
|
||||
int jointInd = 0;
|
||||
for (auto& node : _file.nodes) {
|
||||
int size = node.transforms.size();
|
||||
if (hasChildren) { size--; }
|
||||
joint.preTransform = glm::mat4(1);
|
||||
for (int i = 0; i < size; i++) {
|
||||
joint.preTransform = node.transforms[i] * joint.preTransform;
|
||||
}
|
||||
joint.name = node.name;
|
||||
hfmModel.joints[jointInd] = joint;
|
||||
jointInd++;
|
||||
|
||||
|
||||
// initialize order in which nodes will be parsed
|
||||
QVector<int> nodeQueue;
|
||||
int start = 0;
|
||||
int end = nodesSize;
|
||||
if (!_file.scenes[_file.scene].nodes.contains(0)) {
|
||||
end = -1;
|
||||
start = nodesSize - 1;
|
||||
}
|
||||
QVector<int> init = _file.scenes[_file.scene].nodes;
|
||||
std::sort(init.begin(), init.end());
|
||||
int begin = 0;
|
||||
int finish = init.size();
|
||||
if (start > end) {
|
||||
begin = init.size() - 1;
|
||||
finish = -1;
|
||||
}
|
||||
int index = begin;
|
||||
while (index != finish) {
|
||||
int i = init[index];
|
||||
std::vector<int> children = _file.nodes[i].children.toStdVector();
|
||||
std::sort(children.begin(), children.end());
|
||||
nodeQueue.append(nodeDFS(i, children, start > end));
|
||||
begin < finish ? index++ : index--;
|
||||
}
|
||||
|
||||
|
||||
// Build joints
|
||||
HFMJoint joint;
|
||||
joint.distanceToParent = 0;
|
||||
hfmModel.jointIndices["x"] = nodesSize;
|
||||
hfmModel.hasSkeletonJoints = false;
|
||||
|
||||
for (int nodeIndex : nodeQueue) {
|
||||
auto& node = _file.nodes[nodeIndex];
|
||||
|
||||
joint.parentIndex = -1;
|
||||
if (!_file.scenes[_file.scene].nodes.contains(nodeIndex)) {
|
||||
joint.parentIndex = nodeQueue.indexOf(nodeDependencies[nodeIndex][0]);
|
||||
}
|
||||
joint.transform = node.transforms.first();
|
||||
joint.postTransform = glm::mat4();
|
||||
glm:vec3 scale = extractScale(joint.transform);
|
||||
joint.postTransform[0][0] = scale.x;
|
||||
joint.postTransform[1][1] = scale.y;
|
||||
joint.postTransform[2][2] = scale.z;
|
||||
joint.rotation = glmExtractRotation(joint.transform);
|
||||
joint.translation = extractTranslation(joint.transform);
|
||||
|
||||
joint.name = node.name;
|
||||
hfmModel.joints.push_back(joint);
|
||||
}
|
||||
|
||||
|
||||
// Build skeleton
|
||||
int matrixIndex = 0;
|
||||
std::vector<QVector<float>> inverseBindValues = getSkinInverseBindMatrices();
|
||||
std::vector<glm::mat4> jointInverseBindTransforms;
|
||||
jointInverseBindTransforms.resize(nodesSize);
|
||||
|
||||
int jointIndex = end;
|
||||
while (jointIndex != start) {
|
||||
start < end ? jointIndex-- : jointIndex++;
|
||||
int jOffset = nodeQueue[jointIndex];
|
||||
auto joint = hfmModel.joints[jointIndex];
|
||||
|
||||
joint.isSkeletonJoint = false;
|
||||
if (!_file.skins.isEmpty()) {
|
||||
hfmModel.hasSkeletonJoints = true;
|
||||
for (int s = 0; s < _file.skins.size(); s++) {
|
||||
auto skin = _file.skins[s];
|
||||
joint.isSkeletonJoint = skin.joints.contains(jOffset);
|
||||
|
||||
if (joint.isSkeletonJoint) {
|
||||
QVector<float> value = inverseBindValues[s];
|
||||
int matrixCount = 16 * skin.joints.indexOf(jOffset);
|
||||
jointInverseBindTransforms[jointIndex] =
|
||||
glm::mat4(value[matrixCount], value[matrixCount + 1], value[matrixCount + 2], value[matrixCount + 3],
|
||||
value[matrixCount + 4], value[matrixCount + 5], value[matrixCount + 6], value[matrixCount + 7],
|
||||
value[matrixCount + 8], value[matrixCount + 9], value[matrixCount + 10], value[matrixCount + 11],
|
||||
value[matrixCount + 12], value[matrixCount + 13], value[matrixCount + 14], value[matrixCount + 15]);
|
||||
matrixIndex++;
|
||||
} else {
|
||||
jointInverseBindTransforms[jointIndex] = glm::mat4();
|
||||
}
|
||||
}
|
||||
glm::vec3 bindTranslation = extractTranslation(hfmModel.offset * glm::inverse(jointInverseBindTransforms[jointIndex]));
|
||||
hfmModel.bindExtents.addPoint(bindTranslation);
|
||||
}
|
||||
hfmModel.joints[jointIndex] = joint;
|
||||
}
|
||||
|
||||
|
||||
//Build materials
|
||||
QVector<QString> materialIDs;
|
||||
|
@ -803,23 +919,34 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
|||
}
|
||||
|
||||
|
||||
|
||||
nodecount = 0;
|
||||
// Build meshes
|
||||
foreach(auto &node, _file.nodes) {
|
||||
nodecount = 0;
|
||||
int nodeIndex = start;
|
||||
while (nodeIndex != end) {
|
||||
auto& node = _file.nodes[nodeIndex];
|
||||
|
||||
if (node.defined["mesh"]) {
|
||||
qCDebug(modelformat) << "node_transforms" << node.transforms;
|
||||
foreach(auto &primitive, _file.meshes[node.mesh].primitives) {
|
||||
hfmModel.meshes.append(HFMMesh());
|
||||
HFMMesh& mesh = hfmModel.meshes[hfmModel.meshes.size() - 1];
|
||||
HFMCluster cluster;
|
||||
cluster.jointIndex = nodecount;
|
||||
cluster.inverseBindMatrix = glm::mat4(1, 0, 0, 0,
|
||||
0, 1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
0, 0, 0, 1);
|
||||
mesh.clusters.append(cluster);
|
||||
if (!hfmModel.hasSkeletonJoints) {
|
||||
HFMCluster cluster;
|
||||
cluster.jointIndex = nodecount;
|
||||
cluster.inverseBindMatrix = glm::mat4();
|
||||
cluster.inverseBindTransform = Transform(cluster.inverseBindMatrix);
|
||||
mesh.clusters.append(cluster);
|
||||
} else {
|
||||
int j = start;
|
||||
while (j != end) {
|
||||
HFMCluster cluster;
|
||||
cluster.jointIndex = j;
|
||||
cluster.inverseBindMatrix = jointInverseBindTransforms[j];
|
||||
cluster.inverseBindTransform = Transform(cluster.inverseBindMatrix);
|
||||
mesh.clusters.append(cluster);
|
||||
start < end ? j++ : j--;
|
||||
}
|
||||
}
|
||||
|
||||
HFMMeshPart part = HFMMeshPart();
|
||||
|
||||
|
@ -848,6 +975,8 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
|||
}
|
||||
|
||||
QList<QString> keys = primitive.attributes.values.keys();
|
||||
QVector<uint16_t> clusterJoints;
|
||||
QVector<float> clusterWeights;
|
||||
|
||||
foreach(auto &key, keys) {
|
||||
int accessorIdx = primitive.attributes.values[key];
|
||||
|
@ -949,8 +1078,68 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
|||
for (int n = 0; n < texcoords.size(); n = n + 2) {
|
||||
mesh.texCoords1.push_back(glm::vec2(texcoords[n], texcoords[n + 1]));
|
||||
}
|
||||
} else if (key == "JOINTS_0") {
|
||||
QVector<uint16_t> joints;
|
||||
success = addArrayOfType(buffer.blob,
|
||||
bufferview.byteOffset + accBoffset,
|
||||
accessor.count,
|
||||
joints,
|
||||
accessor.type,
|
||||
accessor.componentType);
|
||||
if (!success) {
|
||||
qWarning(modelformat) << "There was a problem reading glTF JOINTS_0 data for model " << _url;
|
||||
continue;
|
||||
}
|
||||
for (int n = 0; n < joints.size(); n++) {
|
||||
clusterJoints.push_back(joints[n]);
|
||||
}
|
||||
} else if (key == "WEIGHTS_0") {
|
||||
QVector<float> weights;
|
||||
success = addArrayOfType(buffer.blob,
|
||||
bufferview.byteOffset + accBoffset,
|
||||
accessor.count,
|
||||
weights,
|
||||
accessor.type,
|
||||
accessor.componentType);
|
||||
if (!success) {
|
||||
qWarning(modelformat) << "There was a problem reading glTF WEIGHTS_0 data for model " << _url;
|
||||
continue;
|
||||
}
|
||||
for (int n = 0; n < weights.size(); n++) {
|
||||
clusterWeights.push_back(weights[n]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// adapted from FBXSerializer.cpp
|
||||
if (hfmModel.hasSkeletonJoints) {
|
||||
int numClusterIndices = clusterJoints.size();
|
||||
const int WEIGHTS_PER_VERTEX = 4;
|
||||
const float ALMOST_HALF = 0.499f;
|
||||
int numVertices = mesh.vertices.size();
|
||||
mesh.clusterIndices.fill(0, numClusterIndices);
|
||||
mesh.clusterWeights.fill(0, numClusterIndices);
|
||||
|
||||
for (int c = 0; c < clusterJoints.size(); c++) {
|
||||
mesh.clusterIndices[c] = _file.skins[node.skin].joints[clusterJoints[c]];
|
||||
}
|
||||
|
||||
// normalize and compress to 16-bits
|
||||
for (int i = 0; i < numVertices; ++i) {
|
||||
int j = i * WEIGHTS_PER_VERTEX;
|
||||
|
||||
float totalWeight = 0.0f;
|
||||
for (int k = j; k < j + WEIGHTS_PER_VERTEX; ++k) {
|
||||
totalWeight += clusterWeights[k];
|
||||
}
|
||||
const float ALMOST_HALF = 0.499f;
|
||||
if (totalWeight > 0.0f) {
|
||||
float weightScalingFactor = (float)(UINT16_MAX) / totalWeight;
|
||||
for (int k = j; k < j + WEIGHTS_PER_VERTEX; ++k) {
|
||||
mesh.clusterWeights[k] = (uint16_t)(weightScalingFactor * clusterWeights[k] + ALMOST_HALF);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (primitive.defined["material"]) {
|
||||
|
@ -959,7 +1148,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
|||
mesh.parts.push_back(part);
|
||||
|
||||
// populate the texture coordinates if they don't exist
|
||||
if (mesh.texCoords.size() == 0) {
|
||||
if (mesh.texCoords.size() == 0 && !hfmModel.hasSkeletonJoints) {
|
||||
for (int i = 0; i < part.triangleIndices.size(); i++) mesh.texCoords.push_back(glm::vec2(0.0, 1.0));
|
||||
}
|
||||
mesh.meshExtents.reset();
|
||||
|
@ -973,6 +1162,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::URL& url) {
|
|||
|
||||
}
|
||||
nodecount++;
|
||||
start < end ? nodeIndex++ : nodeIndex--;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -712,6 +712,8 @@ private:
|
|||
hifi::ByteArray _glbBinary;
|
||||
|
||||
glm::mat4 getModelTransform(const GLTFNode& node);
|
||||
std::vector<QVector<float>> getSkinInverseBindMatrices();
|
||||
QVector<int> nodeDFS(int n, std::vector<int>& children, bool order);
|
||||
|
||||
bool buildGeometry(HFMModel& hfmModel, const hifi::URL& url);
|
||||
bool parseGLTF(const hifi::ByteArray& data);
|
||||
|
|
Loading…
Reference in a new issue