mirror of
https://github.com/overte-org/overte.git
synced 2025-04-19 16:23:39 +02:00
Factoring the FBX model code out to a shared base class.
This commit is contained in:
parent
b84a5679dd
commit
7e174093e4
25 changed files with 239 additions and 741 deletions
|
@ -363,10 +363,10 @@ void Application::paintGL() {
|
|||
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
|
||||
_myCamera.setTightness (100.0f);
|
||||
glm::vec3 targetPosition = _myAvatar.getUprightHeadPosition();
|
||||
if (_myAvatar.getHead().getBlendFace().isActive()) {
|
||||
if (_myAvatar.getHead().getFaceModel().isActive()) {
|
||||
// make sure we're aligned to the blend face eyes
|
||||
glm::vec3 leftEyePosition, rightEyePosition;
|
||||
if (_myAvatar.getHead().getBlendFace().getEyePositions(leftEyePosition, rightEyePosition, true)) {
|
||||
if (_myAvatar.getHead().getFaceModel().getEyePositions(leftEyePosition, rightEyePosition)) {
|
||||
targetPosition = (leftEyePosition + rightEyePosition) * 0.5f;
|
||||
}
|
||||
}
|
||||
|
@ -1332,7 +1332,7 @@ void Application::processAvatarFaceVideoMessage(unsigned char* packetData, size_
|
|||
if (!avatar) {
|
||||
return;
|
||||
}
|
||||
avatar->getHead().getFace().processVideoMessage(packetData, dataBytes);
|
||||
avatar->getHead().getVideoFace().processVideoMessage(packetData, dataBytes);
|
||||
}
|
||||
|
||||
void Application::checkBandwidthMeterClick() {
|
||||
|
|
|
@ -141,9 +141,8 @@ void DataServerClient::processSendFromDataServer(unsigned char* packetData, int
|
|||
Avatar* avatar = (Avatar *) node->getLinkedData();
|
||||
|
||||
if (avatar->getUUID() == userUUID) {
|
||||
QMetaObject::invokeMethod(&avatar->getHead().getBlendFace(),
|
||||
"setModelURL",
|
||||
Q_ARG(QUrl, QUrl(valueList[0])));
|
||||
QMetaObject::invokeMethod(&avatar->getHead().getFaceModel(),
|
||||
"setURL", Q_ARG(QUrl, QUrl(valueList[0])));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -161,9 +160,8 @@ void DataServerClient::processSendFromDataServer(unsigned char* packetData, int
|
|||
Avatar* avatar = (Avatar *) node->getLinkedData();
|
||||
|
||||
if (avatar->getUUID() == userUUID) {
|
||||
QMetaObject::invokeMethod(&avatar->getBody(),
|
||||
"setSkeletonModelURL",
|
||||
Q_ARG(QUrl, QUrl(valueList[0])));
|
||||
QMetaObject::invokeMethod(&avatar->getSkeletonModel(), "setURL",
|
||||
Q_ARG(QUrl, QUrl(valueList[0])));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -297,7 +297,7 @@ Menu::Menu() :
|
|||
addActionToQMenuAndActionHash(avatarOptionsMenu,
|
||||
MenuOption::FaceMode,
|
||||
0,
|
||||
&appInstance->getAvatar()->getHead().getFace(),
|
||||
&appInstance->getAvatar()->getHead().getVideoFace(),
|
||||
SLOT(cycleRenderMode()));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::LookAtVectors, 0, true);
|
||||
|
|
|
@ -78,8 +78,8 @@ void Avatar::sendAvatarURLsMessage(const QUrl& voxelURL) {
|
|||
Avatar::Avatar(Node* owningNode) :
|
||||
AvatarData(owningNode),
|
||||
_head(this),
|
||||
_body(this),
|
||||
_hand(this),
|
||||
_skeletonModel(this),
|
||||
_ballSpringsInitialized(false),
|
||||
_bodyYawDelta(0.0f),
|
||||
_movedHandOffset(0.0f, 0.0f, 0.0f),
|
||||
|
@ -261,6 +261,7 @@ Avatar::~Avatar() {
|
|||
void Avatar::init() {
|
||||
_head.init();
|
||||
_hand.init();
|
||||
_skeletonModel.init();
|
||||
_voxels.init();
|
||||
_initialized = true;
|
||||
}
|
||||
|
@ -415,7 +416,7 @@ void Avatar::simulate(float deltaTime, Transmitter* transmitter) {
|
|||
_head.setPosition(_bodyBall[ BODY_BALL_HEAD_BASE ].position);
|
||||
_head.setSkinColor(glm::vec3(SKIN_COLOR[0], SKIN_COLOR[1], SKIN_COLOR[2]));
|
||||
_head.simulate(deltaTime, false);
|
||||
_body.simulate(deltaTime);
|
||||
_skeletonModel.simulate(deltaTime);
|
||||
_hand.simulate(deltaTime, false);
|
||||
|
||||
// use speed and angular velocity to determine walking vs. standing
|
||||
|
@ -744,18 +745,18 @@ float Avatar::getBallRenderAlpha(int ball, bool lookingInMirror) const {
|
|||
|
||||
void Avatar::renderBody(bool lookingInMirror, bool renderAvatarBalls) {
|
||||
|
||||
if (_head.getFace().isFullFrame()) {
|
||||
if (_head.getVideoFace().isFullFrame()) {
|
||||
// Render the full-frame video
|
||||
float alpha = getBallRenderAlpha(BODY_BALL_HEAD_BASE, lookingInMirror);
|
||||
if (alpha > 0.0f) {
|
||||
_head.getFace().render(1.0f);
|
||||
_head.getVideoFace().render(1.0f);
|
||||
}
|
||||
} else if (renderAvatarBalls || !(_voxels.getVoxelURL().isValid() || _body.isActive())) {
|
||||
} else if (renderAvatarBalls || !(_voxels.getVoxelURL().isValid() || _skeletonModel.isActive())) {
|
||||
// Render the body as balls and cones
|
||||
glm::vec3 skinColor(SKIN_COLOR[0], SKIN_COLOR[1], SKIN_COLOR[2]);
|
||||
glm::vec3 darkSkinColor(DARK_SKIN_COLOR[0], DARK_SKIN_COLOR[1], DARK_SKIN_COLOR[2]);
|
||||
if (_head.getBlendFace().isActive()) {
|
||||
skinColor = glm::vec3(_head.getBlendFace().computeAverageColor());
|
||||
if (_head.getFaceModel().isActive()) {
|
||||
skinColor = glm::vec3(_head.getFaceModel().computeAverageColor());
|
||||
const float SKIN_DARKENING = 0.9f;
|
||||
darkSkinColor = skinColor * SKIN_DARKENING;
|
||||
}
|
||||
|
@ -780,7 +781,7 @@ void Avatar::renderBody(bool lookingInMirror, bool renderAvatarBalls) {
|
|||
skinColor.g - _bodyBall[b].touchForce * 0.2f,
|
||||
skinColor.b - _bodyBall[b].touchForce * 0.1f);
|
||||
|
||||
if (b == BODY_BALL_NECK_BASE && _head.getBlendFace().isActive()) {
|
||||
if (b == BODY_BALL_NECK_BASE && _head.getFaceModel().isActive()) {
|
||||
continue; // don't render the neck if we have a face model
|
||||
}
|
||||
|
||||
|
@ -815,7 +816,7 @@ void Avatar::renderBody(bool lookingInMirror, bool renderAvatarBalls) {
|
|||
// Render the body's voxels and head
|
||||
float alpha = getBallRenderAlpha(BODY_BALL_HEAD_BASE, lookingInMirror);
|
||||
if (alpha > 0.0f) {
|
||||
if (!_body.render(alpha)) {
|
||||
if (!_skeletonModel.render(alpha)) {
|
||||
_voxels.render(false);
|
||||
}
|
||||
_head.render(alpha, false);
|
||||
|
|
|
@ -18,11 +18,11 @@
|
|||
#include "AvatarTouch.h"
|
||||
#include "AvatarVoxelSystem.h"
|
||||
#include "Balls.h"
|
||||
#include "Body.h"
|
||||
#include "Hand.h"
|
||||
#include "Head.h"
|
||||
#include "InterfaceConfig.h"
|
||||
#include "Skeleton.h"
|
||||
#include "SkeletonModel.h"
|
||||
#include "world.h"
|
||||
#include "devices/SerialInterface.h"
|
||||
#include "devices/Transmitter.h"
|
||||
|
@ -147,12 +147,12 @@ public:
|
|||
//getters
|
||||
bool isInitialized() const { return _initialized; }
|
||||
const Skeleton& getSkeleton() const { return _skeleton; }
|
||||
SkeletonModel& getSkeletonModel() { return _skeletonModel; }
|
||||
float getHeadYawRate() const { return _head.yawRate; }
|
||||
const glm::vec3& getHeadJointPosition() const { return _skeleton.joint[ AVATAR_JOINT_HEAD_BASE ].position; }
|
||||
float getScale() const { return _scale; }
|
||||
const glm::vec3& getVelocity() const { return _velocity; }
|
||||
Head& getHead() { return _head; }
|
||||
Body& getBody() { return _body; }
|
||||
Hand& getHand() { return _hand; }
|
||||
glm::quat getOrientation() const;
|
||||
glm::quat getWorldAlignedOrientation() const;
|
||||
|
@ -198,9 +198,9 @@ protected:
|
|||
};
|
||||
|
||||
Head _head;
|
||||
Body _body;
|
||||
Hand _hand;
|
||||
Skeleton _skeleton;
|
||||
SkeletonModel _skeletonModel;
|
||||
bool _ballSpringsInitialized;
|
||||
float _bodyYawDelta;
|
||||
glm::vec3 _movedHandOffset;
|
||||
|
|
|
@ -1,430 +0,0 @@
|
|||
//
|
||||
// BlendFace.cpp
|
||||
// interface
|
||||
//
|
||||
// Created by Andrzej Kapolka on 9/16/13.
|
||||
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#include <QNetworkReply>
|
||||
|
||||
#include <glm/gtx/transform.hpp>
|
||||
|
||||
#include "Application.h"
|
||||
#include "BlendFace.h"
|
||||
#include "Head.h"
|
||||
|
||||
using namespace fs;
|
||||
using namespace std;
|
||||
|
||||
BlendFace::BlendFace(Head* owningHead) :
|
||||
_owningHead(owningHead)
|
||||
{
|
||||
// we may have been created in the network thread, but we live in the main thread
|
||||
moveToThread(Application::getInstance()->thread());
|
||||
}
|
||||
|
||||
BlendFace::~BlendFace() {
|
||||
deleteGeometry();
|
||||
}
|
||||
|
||||
ProgramObject BlendFace::_program;
|
||||
ProgramObject BlendFace::_skinProgram;
|
||||
int BlendFace::_clusterMatricesLocation;
|
||||
int BlendFace::_clusterIndicesLocation;
|
||||
int BlendFace::_clusterWeightsLocation;
|
||||
|
||||
void BlendFace::init() {
|
||||
if (!_program.isLinked()) {
|
||||
switchToResourcesParentIfRequired();
|
||||
_program.addShaderFromSourceFile(QGLShader::Vertex, "resources/shaders/blendface.vert");
|
||||
_program.addShaderFromSourceFile(QGLShader::Fragment, "resources/shaders/blendface.frag");
|
||||
_program.link();
|
||||
|
||||
_program.bind();
|
||||
_program.setUniformValue("texture", 0);
|
||||
_program.release();
|
||||
|
||||
_skinProgram.addShaderFromSourceFile(QGLShader::Vertex, "resources/shaders/skin_blendface.vert");
|
||||
_skinProgram.addShaderFromSourceFile(QGLShader::Fragment, "resources/shaders/blendface.frag");
|
||||
_skinProgram.link();
|
||||
|
||||
_skinProgram.bind();
|
||||
_clusterMatricesLocation = _skinProgram.uniformLocation("clusterMatrices");
|
||||
_clusterIndicesLocation = _skinProgram.attributeLocation("clusterIndices");
|
||||
_clusterWeightsLocation = _skinProgram.attributeLocation("clusterWeights");
|
||||
_skinProgram.setUniformValue("texture", 0);
|
||||
_skinProgram.release();
|
||||
}
|
||||
}
|
||||
|
||||
void BlendFace::reset() {
|
||||
_resetStates = true;
|
||||
}
|
||||
|
||||
const glm::vec3 MODEL_TRANSLATION(0.0f, -60.0f, 40.0f); // temporary fudge factor
|
||||
const float MODEL_SCALE = 0.0006f;
|
||||
|
||||
void BlendFace::simulate(float deltaTime) {
|
||||
if (!isActive()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// set up world vertices on first simulate after load
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
if (_meshStates.isEmpty()) {
|
||||
QVector<glm::vec3> vertices;
|
||||
foreach (const FBXJoint& joint, geometry.joints) {
|
||||
JointState state;
|
||||
state.rotation = joint.rotation;
|
||||
_jointStates.append(state);
|
||||
}
|
||||
foreach (const FBXMesh& mesh, geometry.meshes) {
|
||||
MeshState state;
|
||||
state.clusterMatrices.resize(mesh.clusters.size());
|
||||
if (mesh.springiness > 0.0f) {
|
||||
state.worldSpaceVertices.resize(mesh.vertices.size());
|
||||
state.vertexVelocities.resize(mesh.vertices.size());
|
||||
state.worldSpaceNormals.resize(mesh.vertices.size());
|
||||
}
|
||||
_meshStates.append(state);
|
||||
}
|
||||
_resetStates = true;
|
||||
}
|
||||
|
||||
const Skeleton& skeleton = static_cast<Avatar*>(_owningHead->_owningAvatar)->getSkeleton();
|
||||
glm::quat orientation = skeleton.joint[AVATAR_JOINT_NECK_BASE].absoluteRotation;
|
||||
glm::vec3 scale = glm::vec3(-1.0f, 1.0f, -1.0f) * _owningHead->getScale() * MODEL_SCALE;
|
||||
glm::vec3 offset = MODEL_TRANSLATION - geometry.neckPivot;
|
||||
glm::mat4 baseTransform = glm::translate(skeleton.joint[AVATAR_JOINT_NECK_BASE].position) * glm::mat4_cast(orientation) *
|
||||
glm::scale(scale) * glm::translate(offset);
|
||||
|
||||
// update the world space transforms for all joints
|
||||
for (int i = 0; i < _jointStates.size(); i++) {
|
||||
JointState& state = _jointStates[i];
|
||||
const FBXJoint& joint = geometry.joints.at(i);
|
||||
if (joint.parentIndex == -1) {
|
||||
state.transform = baseTransform * geometry.offset * joint.preRotation *
|
||||
glm::mat4_cast(state.rotation) * joint.postRotation;
|
||||
|
||||
} else {
|
||||
if (i == geometry.neckJointIndex) {
|
||||
// get the rotation axes in joint space and use them to adjust the rotation
|
||||
glm::mat3 axes = glm::mat3_cast(orientation);
|
||||
glm::mat3 inverse = glm::inverse(glm::mat3(_jointStates[joint.parentIndex].transform *
|
||||
joint.preRotation * glm::mat4_cast(joint.rotation)));
|
||||
state.rotation = glm::angleAxis(_owningHead->getRoll(), glm::normalize(inverse * axes[2])) *
|
||||
glm::angleAxis(_owningHead->getYaw(), glm::normalize(inverse * axes[1])) *
|
||||
glm::angleAxis(_owningHead->getPitch(), glm::normalize(inverse * axes[0])) * joint.rotation;
|
||||
|
||||
} else if (i == geometry.leftEyeJointIndex || i == geometry.rightEyeJointIndex) {
|
||||
// likewise with the lookat position
|
||||
glm::mat4 inverse = glm::inverse(_jointStates[joint.parentIndex].transform *
|
||||
joint.preRotation * glm::mat4_cast(joint.rotation));
|
||||
glm::vec3 front = glm::vec3(inverse * glm::vec4(_owningHead->getOrientation() * IDENTITY_FRONT, 0.0f));
|
||||
glm::vec3 lookAt = glm::vec3(inverse * glm::vec4(_owningHead->getLookAtPosition() +
|
||||
_owningHead->getSaccade(), 1.0f));
|
||||
state.rotation = rotationBetween(front, lookAt) * joint.rotation;
|
||||
}
|
||||
state.transform = _jointStates[joint.parentIndex].transform * joint.preRotation *
|
||||
glm::mat4_cast(state.rotation) * joint.postRotation;
|
||||
}
|
||||
}
|
||||
|
||||
for (int i = 0; i < _meshStates.size(); i++) {
|
||||
MeshState& state = _meshStates[i];
|
||||
const FBXMesh& mesh = geometry.meshes.at(i);
|
||||
for (int j = 0; j < mesh.clusters.size(); j++) {
|
||||
const FBXCluster& cluster = mesh.clusters.at(j);
|
||||
state.clusterMatrices[j] = _jointStates[cluster.jointIndex].transform * cluster.inverseBindMatrix;
|
||||
}
|
||||
int vertexCount = state.worldSpaceVertices.size();
|
||||
if (vertexCount == 0) {
|
||||
continue;
|
||||
}
|
||||
glm::vec3* destVertices = state.worldSpaceVertices.data();
|
||||
glm::vec3* destVelocities = state.vertexVelocities.data();
|
||||
glm::vec3* destNormals = state.worldSpaceNormals.data();
|
||||
|
||||
const glm::vec3* sourceVertices = mesh.vertices.constData();
|
||||
if (!mesh.blendshapes.isEmpty()) {
|
||||
_blendedVertices.resize(max(_blendedVertices.size(), vertexCount));
|
||||
memcpy(_blendedVertices.data(), mesh.vertices.constData(), vertexCount * sizeof(glm::vec3));
|
||||
|
||||
// blend in each coefficient
|
||||
const vector<float>& coefficients = _owningHead->getBlendshapeCoefficients();
|
||||
for (int j = 0; j < coefficients.size(); j++) {
|
||||
float coefficient = coefficients[j];
|
||||
if (coefficient == 0.0f || j >= mesh.blendshapes.size() || mesh.blendshapes[j].vertices.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
const glm::vec3* vertex = mesh.blendshapes[j].vertices.constData();
|
||||
for (const int* index = mesh.blendshapes[j].indices.constData(),
|
||||
*end = index + mesh.blendshapes[j].indices.size(); index != end; index++, vertex++) {
|
||||
_blendedVertices[*index] += *vertex * coefficient;
|
||||
}
|
||||
}
|
||||
sourceVertices = _blendedVertices.constData();
|
||||
}
|
||||
glm::mat4 transform;
|
||||
if (mesh.clusters.size() > 1) {
|
||||
_blendedVertices.resize(max(_blendedVertices.size(), vertexCount));
|
||||
|
||||
// skin each vertex
|
||||
const glm::vec4* clusterIndices = mesh.clusterIndices.constData();
|
||||
const glm::vec4* clusterWeights = mesh.clusterWeights.constData();
|
||||
for (int j = 0; j < vertexCount; j++) {
|
||||
_blendedVertices[j] =
|
||||
glm::vec3(state.clusterMatrices[clusterIndices[j][0]] *
|
||||
glm::vec4(sourceVertices[j], 1.0f)) * clusterWeights[j][0] +
|
||||
glm::vec3(state.clusterMatrices[clusterIndices[j][1]] *
|
||||
glm::vec4(sourceVertices[j], 1.0f)) * clusterWeights[j][1] +
|
||||
glm::vec3(state.clusterMatrices[clusterIndices[j][2]] *
|
||||
glm::vec4(sourceVertices[j], 1.0f)) * clusterWeights[j][2] +
|
||||
glm::vec3(state.clusterMatrices[clusterIndices[j][3]] *
|
||||
glm::vec4(sourceVertices[j], 1.0f)) * clusterWeights[j][3];
|
||||
}
|
||||
sourceVertices = _blendedVertices.constData();
|
||||
|
||||
} else {
|
||||
transform = state.clusterMatrices[0];
|
||||
}
|
||||
if (_resetStates) {
|
||||
for (int j = 0; j < vertexCount; j++) {
|
||||
destVertices[j] = glm::vec3(transform * glm::vec4(sourceVertices[j], 1.0f));
|
||||
destVelocities[j] = glm::vec3();
|
||||
}
|
||||
} else {
|
||||
const float SPRINGINESS_MULTIPLIER = 200.0f;
|
||||
const float DAMPING = 5.0f;
|
||||
for (int j = 0; j < vertexCount; j++) {
|
||||
destVelocities[j] += ((glm::vec3(transform * glm::vec4(sourceVertices[j], 1.0f)) - destVertices[j]) *
|
||||
mesh.springiness * SPRINGINESS_MULTIPLIER - destVelocities[j] * DAMPING) * deltaTime;
|
||||
destVertices[j] += destVelocities[j] * deltaTime;
|
||||
}
|
||||
}
|
||||
for (int j = 0; j < vertexCount; j++) {
|
||||
destNormals[j] = glm::vec3();
|
||||
|
||||
const glm::vec3& middle = destVertices[j];
|
||||
for (QVarLengthArray<QPair<int, int>, 4>::const_iterator connection = mesh.vertexConnections.at(j).constBegin();
|
||||
connection != mesh.vertexConnections.at(j).constEnd(); connection++) {
|
||||
destNormals[j] += glm::normalize(glm::cross(destVertices[connection->second] - middle,
|
||||
destVertices[connection->first] - middle));
|
||||
}
|
||||
}
|
||||
}
|
||||
_resetStates = false;
|
||||
}
|
||||
|
||||
bool BlendFace::render(float alpha) {
|
||||
if (_meshStates.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// set up blended buffer ids on first render after load/simulate
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
const QVector<NetworkMesh>& networkMeshes = _geometry->getMeshes();
|
||||
if (_blendedVertexBufferIDs.isEmpty()) {
|
||||
foreach (const FBXMesh& mesh, geometry.meshes) {
|
||||
GLuint id = 0;
|
||||
if (!mesh.blendshapes.isEmpty() || mesh.springiness > 0.0f) {
|
||||
glGenBuffers(1, &id);
|
||||
glBindBuffer(GL_ARRAY_BUFFER, id);
|
||||
glBufferData(GL_ARRAY_BUFFER, (mesh.vertices.size() + mesh.normals.size()) * sizeof(glm::vec3),
|
||||
NULL, GL_DYNAMIC_DRAW);
|
||||
glBindBuffer(GL_ARRAY_BUFFER, 0);
|
||||
}
|
||||
_blendedVertexBufferIDs.append(id);
|
||||
|
||||
QVector<QSharedPointer<Texture> > dilated;
|
||||
dilated.resize(mesh.parts.size());
|
||||
_dilatedTextures.append(dilated);
|
||||
}
|
||||
}
|
||||
|
||||
glEnableClientState(GL_VERTEX_ARRAY);
|
||||
glEnableClientState(GL_NORMAL_ARRAY);
|
||||
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
|
||||
|
||||
glDisable(GL_COLOR_MATERIAL);
|
||||
|
||||
for (int i = 0; i < networkMeshes.size(); i++) {
|
||||
const NetworkMesh& networkMesh = networkMeshes.at(i);
|
||||
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, networkMesh.indexBufferID);
|
||||
|
||||
const FBXMesh& mesh = geometry.meshes.at(i);
|
||||
int vertexCount = mesh.vertices.size();
|
||||
|
||||
glBindBuffer(GL_ARRAY_BUFFER, networkMesh.vertexBufferID);
|
||||
|
||||
const MeshState& state = _meshStates.at(i);
|
||||
if (state.worldSpaceVertices.isEmpty()) {
|
||||
if (state.clusterMatrices.size() > 1) {
|
||||
_skinProgram.bind();
|
||||
glUniformMatrix4fvARB(_clusterMatricesLocation, state.clusterMatrices.size(), false,
|
||||
(const float*)state.clusterMatrices.constData());
|
||||
int offset = vertexCount * sizeof(glm::vec2) + (mesh.blendshapes.isEmpty() ?
|
||||
vertexCount * 2 * sizeof(glm::vec3) : 0);
|
||||
_skinProgram.setAttributeBuffer(_clusterIndicesLocation, GL_FLOAT, offset, 4);
|
||||
_skinProgram.setAttributeBuffer(_clusterWeightsLocation, GL_FLOAT,
|
||||
offset + vertexCount * sizeof(glm::vec4), 4);
|
||||
_skinProgram.enableAttributeArray(_clusterIndicesLocation);
|
||||
_skinProgram.enableAttributeArray(_clusterWeightsLocation);
|
||||
|
||||
} else {
|
||||
glPushMatrix();
|
||||
glMultMatrixf((const GLfloat*)&state.clusterMatrices[0]);
|
||||
_program.bind();
|
||||
}
|
||||
} else {
|
||||
_program.bind();
|
||||
}
|
||||
|
||||
if (mesh.blendshapes.isEmpty() && mesh.springiness == 0.0f) {
|
||||
glTexCoordPointer(2, GL_FLOAT, 0, (void*)(vertexCount * 2 * sizeof(glm::vec3)));
|
||||
|
||||
} else {
|
||||
glTexCoordPointer(2, GL_FLOAT, 0, 0);
|
||||
glBindBuffer(GL_ARRAY_BUFFER, _blendedVertexBufferIDs.at(i));
|
||||
|
||||
if (!state.worldSpaceVertices.isEmpty()) {
|
||||
glBufferSubData(GL_ARRAY_BUFFER, 0, vertexCount * sizeof(glm::vec3), state.worldSpaceVertices.constData());
|
||||
glBufferSubData(GL_ARRAY_BUFFER, vertexCount * sizeof(glm::vec3),
|
||||
vertexCount * sizeof(glm::vec3), state.worldSpaceNormals.constData());
|
||||
|
||||
} else {
|
||||
_blendedVertices.resize(max(_blendedVertices.size(), vertexCount));
|
||||
_blendedNormals.resize(_blendedVertices.size());
|
||||
memcpy(_blendedVertices.data(), mesh.vertices.constData(), vertexCount * sizeof(glm::vec3));
|
||||
memcpy(_blendedNormals.data(), mesh.normals.constData(), vertexCount * sizeof(glm::vec3));
|
||||
|
||||
// blend in each coefficient
|
||||
const vector<float>& coefficients = _owningHead->getBlendshapeCoefficients();
|
||||
for (int j = 0; j < coefficients.size(); j++) {
|
||||
float coefficient = coefficients[j];
|
||||
if (coefficient == 0.0f || j >= mesh.blendshapes.size() || mesh.blendshapes[j].vertices.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
const float NORMAL_COEFFICIENT_SCALE = 0.01f;
|
||||
float normalCoefficient = coefficient * NORMAL_COEFFICIENT_SCALE;
|
||||
const glm::vec3* vertex = mesh.blendshapes[j].vertices.constData();
|
||||
const glm::vec3* normal = mesh.blendshapes[j].normals.constData();
|
||||
for (const int* index = mesh.blendshapes[j].indices.constData(),
|
||||
*end = index + mesh.blendshapes[j].indices.size(); index != end; index++, vertex++, normal++) {
|
||||
_blendedVertices[*index] += *vertex * coefficient;
|
||||
_blendedNormals[*index] += *normal * normalCoefficient;
|
||||
}
|
||||
}
|
||||
|
||||
glBufferSubData(GL_ARRAY_BUFFER, 0, vertexCount * sizeof(glm::vec3), _blendedVertices.constData());
|
||||
glBufferSubData(GL_ARRAY_BUFFER, vertexCount * sizeof(glm::vec3),
|
||||
vertexCount * sizeof(glm::vec3), _blendedNormals.constData());
|
||||
}
|
||||
}
|
||||
glVertexPointer(3, GL_FLOAT, 0, 0);
|
||||
glNormalPointer(GL_FLOAT, 0, (void*)(vertexCount * sizeof(glm::vec3)));
|
||||
|
||||
qint64 offset = 0;
|
||||
for (int j = 0; j < networkMesh.parts.size(); j++) {
|
||||
const NetworkMeshPart& networkPart = networkMesh.parts.at(j);
|
||||
const FBXMeshPart& part = mesh.parts.at(j);
|
||||
|
||||
// apply material properties
|
||||
glm::vec4 diffuse = glm::vec4(part.diffuseColor, alpha);
|
||||
glm::vec4 specular = glm::vec4(part.specularColor, alpha);
|
||||
glMaterialfv(GL_FRONT, GL_AMBIENT, (const float*)&diffuse);
|
||||
glMaterialfv(GL_FRONT, GL_DIFFUSE, (const float*)&diffuse);
|
||||
glMaterialfv(GL_FRONT, GL_SPECULAR, (const float*)&specular);
|
||||
glMaterialf(GL_FRONT, GL_SHININESS, part.shininess);
|
||||
|
||||
Texture* texture = networkPart.diffuseTexture.data();
|
||||
if (mesh.isEye) {
|
||||
if (texture != NULL) {
|
||||
texture = (_dilatedTextures[i][j] = static_cast<DilatableNetworkTexture*>(texture)->getDilatedTexture(
|
||||
_owningHead->getPupilDilation())).data();
|
||||
}
|
||||
}
|
||||
glBindTexture(GL_TEXTURE_2D, texture == NULL ? Application::getInstance()->getTextureCache()->getWhiteTextureID() :
|
||||
texture->getID());
|
||||
|
||||
glDrawRangeElementsEXT(GL_QUADS, 0, vertexCount - 1, part.quadIndices.size(), GL_UNSIGNED_INT, (void*)offset);
|
||||
offset += part.quadIndices.size() * sizeof(int);
|
||||
glDrawRangeElementsEXT(GL_TRIANGLES, 0, vertexCount - 1, part.triangleIndices.size(),
|
||||
GL_UNSIGNED_INT, (void*)offset);
|
||||
offset += part.triangleIndices.size() * sizeof(int);
|
||||
}
|
||||
|
||||
if (state.worldSpaceVertices.isEmpty()) {
|
||||
if (state.clusterMatrices.size() > 1) {
|
||||
_skinProgram.disableAttributeArray(_clusterIndicesLocation);
|
||||
_skinProgram.disableAttributeArray(_clusterWeightsLocation);
|
||||
_skinProgram.release();
|
||||
|
||||
} else {
|
||||
glPopMatrix();
|
||||
_program.release();
|
||||
}
|
||||
} else {
|
||||
_program.release();
|
||||
}
|
||||
}
|
||||
|
||||
// deactivate vertex arrays after drawing
|
||||
glDisableClientState(GL_NORMAL_ARRAY);
|
||||
glDisableClientState(GL_VERTEX_ARRAY);
|
||||
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
|
||||
|
||||
// bind with 0 to switch back to normal operation
|
||||
glBindBuffer(GL_ARRAY_BUFFER, 0);
|
||||
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
// restore all the default material settings
|
||||
Application::getInstance()->setupWorldLight(*Application::getInstance()->getCamera());
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool BlendFace::getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition, bool upright) const {
|
||||
if (!isActive() || _jointStates.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
if (geometry.leftEyeJointIndex != -1) {
|
||||
const glm::mat4& transform = _jointStates[geometry.leftEyeJointIndex].transform;
|
||||
firstEyePosition = glm::vec3(transform[3][0], transform[3][1], transform[3][2]);
|
||||
}
|
||||
if (geometry.rightEyeJointIndex != -1) {
|
||||
const glm::mat4& transform = _jointStates[geometry.rightEyeJointIndex].transform;
|
||||
secondEyePosition = glm::vec3(transform[3][0], transform[3][1], transform[3][2]);
|
||||
}
|
||||
return geometry.leftEyeJointIndex != -1 && geometry.rightEyeJointIndex != -1;
|
||||
}
|
||||
|
||||
glm::vec4 BlendFace::computeAverageColor() const {
|
||||
return _geometry ? _geometry->computeAverageColor() : glm::vec4(1.0f, 1.0f, 1.0f, 1.0f);
|
||||
}
|
||||
|
||||
void BlendFace::setModelURL(const QUrl& url) {
|
||||
// don't recreate the geometry if it's the same URL
|
||||
if (_modelURL == url) {
|
||||
return;
|
||||
}
|
||||
_modelURL = url;
|
||||
|
||||
// delete our local geometry and custom textures
|
||||
deleteGeometry();
|
||||
_dilatedTextures.clear();
|
||||
|
||||
_geometry = Application::getInstance()->getGeometryCache()->getGeometry(url);
|
||||
}
|
||||
|
||||
void BlendFace::deleteGeometry() {
|
||||
foreach (GLuint id, _blendedVertexBufferIDs) {
|
||||
glDeleteBuffers(1, &id);
|
||||
}
|
||||
_blendedVertexBufferIDs.clear();
|
||||
_jointStates.clear();
|
||||
_meshStates.clear();
|
||||
}
|
|
@ -1,92 +0,0 @@
|
|||
//
|
||||
// BlendFace.h
|
||||
// interface
|
||||
//
|
||||
// Created by Andrzej Kapolka on 9/16/13.
|
||||
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#ifndef __interface__BlendFace__
|
||||
#define __interface__BlendFace__
|
||||
|
||||
#include <QObject>
|
||||
#include <QUrl>
|
||||
|
||||
#include "InterfaceConfig.h"
|
||||
#include "renderer/GeometryCache.h"
|
||||
#include "renderer/ProgramObject.h"
|
||||
#include "renderer/TextureCache.h"
|
||||
|
||||
class QNetworkReply;
|
||||
|
||||
class Head;
|
||||
|
||||
/// A face formed from a linear mix of blendshapes according to a set of coefficients.
|
||||
class BlendFace : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
|
||||
BlendFace(Head* owningHead);
|
||||
~BlendFace();
|
||||
|
||||
bool isActive() const { return _geometry && _geometry->isLoaded(); }
|
||||
|
||||
void init();
|
||||
void reset();
|
||||
void simulate(float deltaTime);
|
||||
bool render(float alpha);
|
||||
|
||||
Q_INVOKABLE void setModelURL(const QUrl& url);
|
||||
const QUrl& getModelURL() const { return _modelURL; }
|
||||
|
||||
/// Retrieve the positions of up to two eye meshes.
|
||||
/// \param upright if true, retrieve the locations of the eyes in the upright position
|
||||
/// \return whether or not both eye meshes were found
|
||||
bool getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition, bool upright = false) const;
|
||||
|
||||
/// Returns the average color of all meshes in the geometry.
|
||||
glm::vec4 computeAverageColor() const;
|
||||
|
||||
private:
|
||||
|
||||
void deleteGeometry();
|
||||
|
||||
Head* _owningHead;
|
||||
|
||||
QUrl _modelURL;
|
||||
|
||||
QSharedPointer<NetworkGeometry> _geometry;
|
||||
|
||||
class JointState {
|
||||
public:
|
||||
glm::quat rotation;
|
||||
glm::mat4 transform;
|
||||
};
|
||||
|
||||
QVector<JointState> _jointStates;
|
||||
|
||||
class MeshState {
|
||||
public:
|
||||
QVector<glm::mat4> clusterMatrices;
|
||||
QVector<glm::vec3> worldSpaceVertices;
|
||||
QVector<glm::vec3> vertexVelocities;
|
||||
QVector<glm::vec3> worldSpaceNormals;
|
||||
};
|
||||
|
||||
QVector<MeshState> _meshStates;
|
||||
QVector<GLuint> _blendedVertexBufferIDs;
|
||||
QVector<QVector<QSharedPointer<Texture> > > _dilatedTextures;
|
||||
bool _resetStates;
|
||||
|
||||
QVector<glm::vec3> _blendedVertices;
|
||||
QVector<glm::vec3> _blendedNormals;
|
||||
|
||||
static ProgramObject _program;
|
||||
static ProgramObject _skinProgram;
|
||||
static int _clusterMatricesLocation;
|
||||
static int _clusterIndicesLocation;
|
||||
static int _clusterWeightsLocation;
|
||||
};
|
||||
|
||||
#endif /* defined(__interface__BlendFace__) */
|
|
@ -1,81 +0,0 @@
|
|||
//
|
||||
// Body.cpp
|
||||
// interface
|
||||
//
|
||||
// Created by Andrzej Kapolka on 10/17/13.
|
||||
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#include <glm/gtx/transform.hpp>
|
||||
|
||||
#include "Application.h"
|
||||
#include "Body.h"
|
||||
|
||||
Body::Body(Avatar* owningAvatar) : _owningAvatar(owningAvatar) {
|
||||
// we may have been created in the network thread, but we live in the main thread
|
||||
moveToThread(Application::getInstance()->thread());
|
||||
}
|
||||
|
||||
void Body::simulate(float deltaTime) {
|
||||
if (!isActive()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// set up joint states on first simulate after load
|
||||
const FBXGeometry& geometry = _skeletonGeometry->getFBXGeometry();
|
||||
if (_jointStates.isEmpty()) {
|
||||
foreach (const FBXJoint& joint, geometry.joints) {
|
||||
JointState state;
|
||||
state.rotation = joint.rotation;
|
||||
_jointStates.append(state);
|
||||
}
|
||||
}
|
||||
|
||||
glm::quat orientation = _owningAvatar->getOrientation();
|
||||
const float MODEL_SCALE = 0.05f;
|
||||
glm::vec3 scale = glm::vec3(-1.0f, 1.0f, -1.0f) * _owningAvatar->getScale() * MODEL_SCALE;
|
||||
glm::mat4 baseTransform = glm::translate(_owningAvatar->getPosition()) * glm::mat4_cast(orientation) * glm::scale(scale);
|
||||
|
||||
// update the world space transforms for all joints
|
||||
for (int i = 0; i < _jointStates.size(); i++) {
|
||||
JointState& state = _jointStates[i];
|
||||
const FBXJoint& joint = geometry.joints.at(i);
|
||||
if (joint.parentIndex == -1) {
|
||||
state.transform = baseTransform * geometry.offset * joint.preRotation *
|
||||
glm::mat4_cast(state.rotation) * joint.postRotation;
|
||||
|
||||
} else {
|
||||
state.transform = _jointStates[joint.parentIndex].transform * joint.preRotation *
|
||||
glm::mat4_cast(state.rotation) * joint.postRotation;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool Body::render(float alpha) {
|
||||
if (_jointStates.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
glColor4f(1.0f, 1.0f, 1.0f, alpha);
|
||||
|
||||
for (int i = 0; i < _jointStates.size(); i++) {
|
||||
const JointState& state = _jointStates[i];
|
||||
glPushMatrix();
|
||||
glMultMatrixf((const GLfloat*)&state.transform);
|
||||
|
||||
glutSolidSphere(0.2f, 10, 10);
|
||||
|
||||
glPopMatrix();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void Body::setSkeletonModelURL(const QUrl& url) {
|
||||
// don't recreate the geometry if it's the same URL
|
||||
if (_skeletonModelURL == url) {
|
||||
return;
|
||||
}
|
||||
_skeletonModelURL = url;
|
||||
_skeletonGeometry = Application::getInstance()->getGeometryCache()->getGeometry(url);
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
//
|
||||
// Body.h
|
||||
// interface
|
||||
//
|
||||
// Created by Andrzej Kapolka on 10/17/13.
|
||||
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#ifndef __interface__Body__
|
||||
#define __interface__Body__
|
||||
|
||||
#include <QObject>
|
||||
#include <QUrl>
|
||||
|
||||
#include "renderer/GeometryCache.h"
|
||||
|
||||
/// An avatar body with an arbitrary skeleton.
|
||||
class Body : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
|
||||
Body(Avatar* owningAvatar);
|
||||
|
||||
bool isActive() const { return _skeletonGeometry && _skeletonGeometry->isLoaded(); }
|
||||
|
||||
void simulate(float deltaTime);
|
||||
bool render(float alpha);
|
||||
|
||||
Q_INVOKABLE void setSkeletonModelURL(const QUrl& url);
|
||||
const QUrl& getSkeletonModelURL() const { return _skeletonModelURL; }
|
||||
|
||||
private:
|
||||
|
||||
Avatar* _owningAvatar;
|
||||
|
||||
QUrl _skeletonModelURL;
|
||||
|
||||
QSharedPointer<NetworkGeometry> _skeletonGeometry;
|
||||
|
||||
class JointState {
|
||||
public:
|
||||
glm::quat rotation;
|
||||
glm::mat4 transform;
|
||||
};
|
||||
|
||||
QVector<JointState> _jointStates;
|
||||
};
|
||||
|
||||
#endif /* defined(__interface__Body__) */
|
55
interface/src/avatar/FaceModel.cpp
Normal file
55
interface/src/avatar/FaceModel.cpp
Normal file
|
@ -0,0 +1,55 @@
|
|||
//
|
||||
// FaceModel.cpp
|
||||
// interface
|
||||
//
|
||||
// Created by Andrzej Kapolka on 9/16/13.
|
||||
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#include "Avatar.h"
|
||||
#include "FaceModel.h"
|
||||
#include "Head.h"
|
||||
|
||||
FaceModel::FaceModel(Head* owningHead) :
|
||||
_owningHead(owningHead)
|
||||
{
|
||||
}
|
||||
|
||||
void FaceModel::simulate(float deltaTime) {
|
||||
if (!isActive()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const Skeleton& skeleton = static_cast<Avatar*>(_owningHead->_owningAvatar)->getSkeleton();
|
||||
setTranslation(skeleton.joint[AVATAR_JOINT_NECK_BASE].position);
|
||||
setRotation(skeleton.joint[AVATAR_JOINT_NECK_BASE].absoluteRotation);
|
||||
const float MODEL_SCALE = 0.0006f;
|
||||
setScale(glm::vec3(-1.0f, 1.0f, -1.0f) * _owningHead->getScale() * MODEL_SCALE);
|
||||
const glm::vec3 MODEL_TRANSLATION(0.0f, -60.0f, 40.0f); // temporary fudge factor
|
||||
setOffset(MODEL_TRANSLATION - _geometry->getFBXGeometry().neckPivot);
|
||||
|
||||
setPupilDilation(_owningHead->getPupilDilation());
|
||||
setBlendshapeCoefficients(_owningHead->getBlendshapeCoefficients());
|
||||
|
||||
Model::simulate(deltaTime);
|
||||
}
|
||||
|
||||
void FaceModel::maybeUpdateNeckRotation(const FBXJoint& joint, JointState& state) {
|
||||
// get the rotation axes in joint space and use them to adjust the rotation
|
||||
glm::mat3 axes = glm::mat3_cast(getRotation());
|
||||
glm::mat3 inverse = glm::inverse(glm::mat3(_jointStates[joint.parentIndex].transform *
|
||||
joint.preRotation * glm::mat4_cast(joint.rotation)));
|
||||
state.rotation = glm::angleAxis(_owningHead->getRoll(), glm::normalize(inverse * axes[2])) *
|
||||
glm::angleAxis(_owningHead->getYaw(), glm::normalize(inverse * axes[1])) *
|
||||
glm::angleAxis(_owningHead->getPitch(), glm::normalize(inverse * axes[0])) * joint.rotation;
|
||||
}
|
||||
|
||||
void FaceModel::maybeUpdateEyeRotation(const FBXJoint& joint, JointState& state) {
|
||||
// get the lookat position in joint space and use it to adjust the rotation
|
||||
glm::mat4 inverse = glm::inverse(_jointStates[joint.parentIndex].transform *
|
||||
joint.preRotation * glm::mat4_cast(joint.rotation));
|
||||
glm::vec3 front = glm::vec3(inverse * glm::vec4(_owningHead->getOrientation() * IDENTITY_FRONT, 0.0f));
|
||||
glm::vec3 lookAt = glm::vec3(inverse * glm::vec4(_owningHead->getLookAtPosition() +
|
||||
_owningHead->getSaccade(), 1.0f));
|
||||
state.rotation = rotationBetween(front, lookAt) * joint.rotation;
|
||||
}
|
39
interface/src/avatar/FaceModel.h
Normal file
39
interface/src/avatar/FaceModel.h
Normal file
|
@ -0,0 +1,39 @@
|
|||
//
|
||||
// FaceModel.h
|
||||
// interface
|
||||
//
|
||||
// Created by Andrzej Kapolka on 9/16/13.
|
||||
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#ifndef __interface__FaceModel__
|
||||
#define __interface__FaceModel__
|
||||
|
||||
#include "renderer/Model.h"
|
||||
|
||||
class Head;
|
||||
|
||||
/// A face formed from a linear mix of blendshapes according to a set of coefficients.
|
||||
class FaceModel : public Model {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
|
||||
FaceModel(Head* owningHead);
|
||||
|
||||
void simulate(float deltaTime);
|
||||
|
||||
protected:
|
||||
|
||||
/// Applies neck rotation based on head orientation.
|
||||
virtual void maybeUpdateNeckRotation(const FBXJoint& joint, JointState& state);
|
||||
|
||||
/// Applies eye rotation based on lookat position.
|
||||
virtual void maybeUpdateEyeRotation(const FBXJoint& joint, JointState& state);
|
||||
|
||||
private:
|
||||
|
||||
Head* _owningHead;
|
||||
};
|
||||
|
||||
#endif /* defined(__interface__FaceModel__) */
|
|
@ -83,8 +83,8 @@ Head::Head(Avatar* owningAvatar) :
|
|||
_mousePitch(0.f),
|
||||
_cameraYaw(_yaw),
|
||||
_isCameraMoving(false),
|
||||
_face(this),
|
||||
_blendFace(this)
|
||||
_videoFace(this),
|
||||
_faceModel(this)
|
||||
{
|
||||
if (USING_PHYSICAL_MOHAWK) {
|
||||
resetHairPhysics();
|
||||
|
@ -104,7 +104,7 @@ void Head::init() {
|
|||
_irisTexture = Application::getInstance()->getTextureCache()->getTexture(QUrl::fromLocalFile(IRIS_TEXTURE_FILENAME),
|
||||
true).staticCast<DilatableNetworkTexture>();
|
||||
}
|
||||
_blendFace.init();
|
||||
_faceModel.init();
|
||||
}
|
||||
|
||||
void Head::reset() {
|
||||
|
@ -115,7 +115,7 @@ void Head::reset() {
|
|||
resetHairPhysics();
|
||||
}
|
||||
|
||||
_blendFace.reset();
|
||||
_faceModel.reset();
|
||||
}
|
||||
|
||||
void Head::resetHairPhysics() {
|
||||
|
@ -237,7 +237,7 @@ void Head::simulate(float deltaTime, bool isMine) {
|
|||
updateHairPhysics(deltaTime);
|
||||
}
|
||||
|
||||
_blendFace.simulate(deltaTime);
|
||||
_faceModel.simulate(deltaTime);
|
||||
}
|
||||
|
||||
void Head::calculateGeometry() {
|
||||
|
@ -285,7 +285,7 @@ void Head::calculateGeometry() {
|
|||
void Head::render(float alpha, bool isMine) {
|
||||
_renderAlpha = alpha;
|
||||
|
||||
if (!(_face.render(alpha) || _blendFace.render(alpha))) {
|
||||
if (!(_videoFace.render(alpha) || _faceModel.render(alpha))) {
|
||||
calculateGeometry();
|
||||
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
|
@ -300,9 +300,9 @@ void Head::render(float alpha, bool isMine) {
|
|||
renderEyeBrows();
|
||||
}
|
||||
|
||||
if (_blendFace.isActive()) {
|
||||
if (_faceModel.isActive()) {
|
||||
// the blend face may have custom eye meshes
|
||||
_blendFace.getEyePositions(_leftEyePosition, _rightEyePosition);
|
||||
_faceModel.getEyePositions(_leftEyePosition, _rightEyePosition);
|
||||
}
|
||||
|
||||
if (_renderLookatVectors) {
|
||||
|
|
|
@ -18,9 +18,9 @@
|
|||
#include <VoxelConstants.h>
|
||||
|
||||
#include "BendyLine.h"
|
||||
#include "BlendFace.h"
|
||||
#include "Face.h"
|
||||
#include "FaceModel.h"
|
||||
#include "InterfaceConfig.h"
|
||||
#include "VideoFace.h"
|
||||
#include "world.h"
|
||||
#include "devices/SerialInterface.h"
|
||||
#include "renderer/TextureCache.h"
|
||||
|
@ -76,8 +76,8 @@ public:
|
|||
|
||||
glm::quat getEyeRotation(const glm::vec3& eyePosition) const;
|
||||
|
||||
Face& getFace() { return _face; }
|
||||
BlendFace& getBlendFace() { return _blendFace; }
|
||||
VideoFace& getVideoFace() { return _videoFace; }
|
||||
FaceModel& getFaceModel() { return _faceModel; }
|
||||
|
||||
const bool getReturnToCenter() const { return _returnHeadToCenter; } // Do you want head to try to return to center (depends on interface detected)
|
||||
float getAverageLoudness() const { return _averageLoudness; }
|
||||
|
@ -132,8 +132,8 @@ private:
|
|||
float _mousePitch;
|
||||
float _cameraYaw;
|
||||
bool _isCameraMoving;
|
||||
Face _face;
|
||||
BlendFace _blendFace;
|
||||
VideoFace _videoFace;
|
||||
FaceModel _faceModel;
|
||||
|
||||
QSharedPointer<Texture> _dilatedIrisTexture;
|
||||
|
||||
|
@ -154,7 +154,7 @@ private:
|
|||
void resetHairPhysics();
|
||||
void updateHairPhysics(float deltaTime);
|
||||
|
||||
friend class BlendFace;
|
||||
friend class FaceModel;
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
|
@ -322,7 +322,7 @@ void MyAvatar::simulate(float deltaTime, Transmitter* transmitter) {
|
|||
_head.setScale(_scale);
|
||||
_head.setSkinColor(glm::vec3(SKIN_COLOR[0], SKIN_COLOR[1], SKIN_COLOR[2]));
|
||||
_head.simulate(deltaTime, true);
|
||||
_body.simulate(deltaTime);
|
||||
_skeletonModel.simulate(deltaTime);
|
||||
_hand.simulate(deltaTime, true);
|
||||
|
||||
const float WALKING_SPEED_THRESHOLD = 0.2f;
|
||||
|
@ -391,7 +391,7 @@ void MyAvatar::updateFromGyrosAndOrWebcam(float pitchFromTouch, bool turnWithHea
|
|||
_head.setMousePitch(pitchFromTouch);
|
||||
_head.setPitch(pitchFromTouch);
|
||||
}
|
||||
_head.getFace().clearFrame();
|
||||
_head.getVideoFace().clearFrame();
|
||||
|
||||
// restore rotation, lean to neutral positions
|
||||
const float RESTORE_RATE = 0.05f;
|
||||
|
@ -407,7 +407,7 @@ void MyAvatar::updateFromGyrosAndOrWebcam(float pitchFromTouch, bool turnWithHea
|
|||
estimatedPosition = webcam->getEstimatedPosition();
|
||||
|
||||
// apply face data
|
||||
_head.getFace().setFrameFromWebcam();
|
||||
_head.getVideoFace().setFrameFromWebcam();
|
||||
|
||||
// compute and store the joint rotations
|
||||
const JointVector& joints = webcam->getEstimatedJoints();
|
||||
|
@ -424,7 +424,7 @@ void MyAvatar::updateFromGyrosAndOrWebcam(float pitchFromTouch, bool turnWithHea
|
|||
}
|
||||
}
|
||||
} else {
|
||||
_head.getFace().clearFrame();
|
||||
_head.getVideoFace().clearFrame();
|
||||
}
|
||||
|
||||
// Set the rotation of the avatar's head (as seen by others, not affecting view frustum)
|
||||
|
@ -605,18 +605,18 @@ void MyAvatar::renderBody(bool lookingInMirror, bool renderAvatarBalls) {
|
|||
return;
|
||||
}
|
||||
|
||||
if (_head.getFace().isFullFrame()) {
|
||||
if (_head.getVideoFace().isFullFrame()) {
|
||||
// Render the full-frame video
|
||||
float alpha = getBallRenderAlpha(BODY_BALL_HEAD_BASE, lookingInMirror);
|
||||
if (alpha > 0.0f) {
|
||||
_head.getFace().render(1.0f);
|
||||
_head.getVideoFace().render(1.0f);
|
||||
}
|
||||
} else if (renderAvatarBalls || !(_voxels.getVoxelURL().isValid() || _body.isActive())) {
|
||||
} else if (renderAvatarBalls || !(_voxels.getVoxelURL().isValid() || _skeletonModel.isActive())) {
|
||||
// Render the body as balls and cones
|
||||
glm::vec3 skinColor(SKIN_COLOR[0], SKIN_COLOR[1], SKIN_COLOR[2]);
|
||||
glm::vec3 darkSkinColor(DARK_SKIN_COLOR[0], DARK_SKIN_COLOR[1], DARK_SKIN_COLOR[2]);
|
||||
if (_head.getBlendFace().isActive()) {
|
||||
skinColor = glm::vec3(_head.getBlendFace().computeAverageColor());
|
||||
if (_head.getFaceModel().isActive()) {
|
||||
skinColor = glm::vec3(_head.getFaceModel().computeAverageColor());
|
||||
const float SKIN_DARKENING = 0.9f;
|
||||
darkSkinColor = skinColor * SKIN_DARKENING;
|
||||
}
|
||||
|
@ -650,7 +650,7 @@ void MyAvatar::renderBody(bool lookingInMirror, bool renderAvatarBalls) {
|
|||
alpha);
|
||||
}
|
||||
|
||||
if (b == BODY_BALL_NECK_BASE && _head.getBlendFace().isActive()) {
|
||||
if (b == BODY_BALL_NECK_BASE && _head.getFaceModel().isActive()) {
|
||||
continue; // don't render the neck if we have a face model
|
||||
}
|
||||
|
||||
|
@ -686,7 +686,7 @@ void MyAvatar::renderBody(bool lookingInMirror, bool renderAvatarBalls) {
|
|||
// Render the body's voxels and head
|
||||
float alpha = getBallRenderAlpha(BODY_BALL_HEAD_BASE, lookingInMirror);
|
||||
if (alpha > 0.0f) {
|
||||
if (!_body.render(alpha)) {
|
||||
if (!_skeletonModel.render(alpha)) {
|
||||
_voxels.render(false);
|
||||
}
|
||||
_head.render(alpha, true);
|
||||
|
|
|
@ -44,17 +44,15 @@ void Profile::setUUID(const QUuid& uuid) {
|
|||
void Profile::setFaceModelURL(const QUrl& faceModelURL) {
|
||||
_faceModelURL = faceModelURL;
|
||||
|
||||
QMetaObject::invokeMethod(&Application::getInstance()->getAvatar()->getHead().getBlendFace(),
|
||||
"setModelURL",
|
||||
Q_ARG(QUrl, _faceModelURL));
|
||||
QMetaObject::invokeMethod(&Application::getInstance()->getAvatar()->getHead().getFaceModel(),
|
||||
"setURL", Q_ARG(QUrl, _faceModelURL));
|
||||
}
|
||||
|
||||
void Profile::setSkeletonModelURL(const QUrl& skeletonModelURL) {
|
||||
_skeletonModelURL = skeletonModelURL;
|
||||
|
||||
QMetaObject::invokeMethod(&Application::getInstance()->getAvatar()->getBody(),
|
||||
"setSkeletonModelURL",
|
||||
Q_ARG(QUrl, _skeletonModelURL));
|
||||
QMetaObject::invokeMethod(&Application::getInstance()->getAvatar()->getSkeletonModel(),
|
||||
"setURL", Q_ARG(QUrl, _skeletonModelURL));
|
||||
}
|
||||
|
||||
void Profile::updateDomain(const QString& domain) {
|
||||
|
|
28
interface/src/avatar/SkeletonModel.cpp
Normal file
28
interface/src/avatar/SkeletonModel.cpp
Normal file
|
@ -0,0 +1,28 @@
|
|||
//
|
||||
// SkeletonModel.cpp
|
||||
// interface
|
||||
//
|
||||
// Created by Andrzej Kapolka on 10/17/13.
|
||||
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#include "Avatar.h"
|
||||
#include "SkeletonModel.h"
|
||||
|
||||
SkeletonModel::SkeletonModel(Avatar* owningAvatar) :
|
||||
_owningAvatar(owningAvatar)
|
||||
{
|
||||
}
|
||||
|
||||
void SkeletonModel::simulate(float deltaTime) {
|
||||
if (!isActive()) {
|
||||
return;
|
||||
}
|
||||
|
||||
setTranslation(_owningAvatar->getPosition());
|
||||
setRotation(_owningAvatar->getOrientation());
|
||||
const float MODEL_SCALE = 0.0006f;
|
||||
setScale(glm::vec3(-1.0f, 1.0f, -1.0f) * _owningAvatar->getScale() * MODEL_SCALE);
|
||||
|
||||
Model::simulate(deltaTime);
|
||||
}
|
31
interface/src/avatar/SkeletonModel.h
Normal file
31
interface/src/avatar/SkeletonModel.h
Normal file
|
@ -0,0 +1,31 @@
|
|||
//
|
||||
// SkeletonModel.h
|
||||
// interface
|
||||
//
|
||||
// Created by Andrzej Kapolka on 10/17/13.
|
||||
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#ifndef __interface__SkeletonModel__
|
||||
#define __interface__SkeletonModel__
|
||||
|
||||
#include "renderer/Model.h"
|
||||
|
||||
class Avatar;
|
||||
|
||||
/// A skeleton loaded from a model.
|
||||
class SkeletonModel : public Model {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
|
||||
SkeletonModel(Avatar* owningAvatar);
|
||||
|
||||
void simulate(float deltaTime);
|
||||
|
||||
private:
|
||||
|
||||
Avatar* _owningAvatar;
|
||||
};
|
||||
|
||||
#endif /* defined(__interface__SkeletonModel__) */
|
|
@ -1,5 +1,5 @@
|
|||
//
|
||||
// Face.cpp
|
||||
// VideoFace.cpp
|
||||
// interface
|
||||
//
|
||||
// Created by Andrzej Kapolka on 7/11/13.
|
||||
|
@ -16,26 +16,26 @@
|
|||
#include "Application.h"
|
||||
#include "Avatar.h"
|
||||
#include "Head.h"
|
||||
#include "Face.h"
|
||||
#include "VideoFace.h"
|
||||
#include "renderer/ProgramObject.h"
|
||||
|
||||
using namespace cv;
|
||||
|
||||
bool Face::_initialized = false;
|
||||
ProgramObject Face::_videoProgram;
|
||||
Face::Locations Face::_videoProgramLocations;
|
||||
ProgramObject Face::_texturedProgram;
|
||||
Face::Locations Face::_texturedProgramLocations;
|
||||
GLuint Face::_vboID;
|
||||
GLuint Face::_iboID;
|
||||
bool VideoFace::_initialized = false;
|
||||
ProgramObject VideoFace::_videoProgram;
|
||||
VideoFace::Locations VideoFace::_videoProgramLocations;
|
||||
ProgramObject VideoFace::_texturedProgram;
|
||||
VideoFace::Locations VideoFace::_texturedProgramLocations;
|
||||
GLuint VideoFace::_vboID;
|
||||
GLuint VideoFace::_iboID;
|
||||
|
||||
Face::Face(Head* owningHead) : _owningHead(owningHead), _renderMode(MESH),
|
||||
VideoFace::VideoFace(Head* owningHead) : _owningHead(owningHead), _renderMode(MESH),
|
||||
_colorTextureID(0), _depthTextureID(0), _colorCodec(), _depthCodec(), _frameCount(0) {
|
||||
// we may have been created in the network thread, but we live in the main thread
|
||||
moveToThread(Application::getInstance()->thread());
|
||||
}
|
||||
|
||||
Face::~Face() {
|
||||
VideoFace::~VideoFace() {
|
||||
if (_colorCodec.name != 0) {
|
||||
vpx_codec_destroy(&_colorCodec);
|
||||
|
||||
|
@ -55,7 +55,7 @@ Face::~Face() {
|
|||
}
|
||||
}
|
||||
|
||||
void Face::setFrameFromWebcam() {
|
||||
void VideoFace::setFrameFromWebcam() {
|
||||
Webcam* webcam = Application::getInstance()->getWebcam();
|
||||
if (webcam->isSending()) {
|
||||
_colorTextureID = webcam->getColorTextureID();
|
||||
|
@ -68,12 +68,12 @@ void Face::setFrameFromWebcam() {
|
|||
}
|
||||
}
|
||||
|
||||
void Face::clearFrame() {
|
||||
void VideoFace::clearFrame() {
|
||||
_colorTextureID = 0;
|
||||
_depthTextureID = 0;
|
||||
}
|
||||
|
||||
int Face::processVideoMessage(unsigned char* packetData, size_t dataBytes) {
|
||||
int VideoFace::processVideoMessage(unsigned char* packetData, size_t dataBytes) {
|
||||
unsigned char* packetPosition = packetData;
|
||||
|
||||
int frameCount = *(uint32_t*)packetPosition;
|
||||
|
@ -243,7 +243,7 @@ int Face::processVideoMessage(unsigned char* packetData, size_t dataBytes) {
|
|||
return dataBytes;
|
||||
}
|
||||
|
||||
bool Face::render(float alpha) {
|
||||
bool VideoFace::render(float alpha) {
|
||||
if (!isActive()) {
|
||||
return false;
|
||||
}
|
||||
|
@ -404,11 +404,11 @@ bool Face::render(float alpha) {
|
|||
return true;
|
||||
}
|
||||
|
||||
void Face::cycleRenderMode() {
|
||||
void VideoFace::cycleRenderMode() {
|
||||
_renderMode = (RenderMode)((_renderMode + 1) % RENDER_MODE_COUNT);
|
||||
}
|
||||
|
||||
void Face::setFrame(const cv::Mat& color, const cv::Mat& depth, float aspectRatio) {
|
||||
void VideoFace::setFrame(const cv::Mat& color, const cv::Mat& depth, float aspectRatio) {
|
||||
Size2f textureSize = _textureSize;
|
||||
if (!color.empty()) {
|
||||
bool generate = (_colorTextureID == 0);
|
||||
|
@ -457,7 +457,7 @@ void Face::setFrame(const cv::Mat& color, const cv::Mat& depth, float aspectRati
|
|||
_textureSize = textureSize;
|
||||
}
|
||||
|
||||
void Face::destroyCodecs() {
|
||||
void VideoFace::destroyCodecs() {
|
||||
if (_colorCodec.name != 0) {
|
||||
vpx_codec_destroy(&_colorCodec);
|
||||
_colorCodec.name = 0;
|
||||
|
@ -468,7 +468,7 @@ void Face::destroyCodecs() {
|
|||
}
|
||||
}
|
||||
|
||||
void Face::loadProgram(ProgramObject& program, const QString& suffix, const char* secondTextureUniform, Locations& locations) {
|
||||
void VideoFace::loadProgram(ProgramObject& program, const QString& suffix, const char* secondTextureUniform, Locations& locations) {
|
||||
program.addShaderFromSourceFile(QGLShader::Vertex, "resources/shaders/face" + suffix + ".vert");
|
||||
program.addShaderFromSourceFile(QGLShader::Fragment, "resources/shaders/face" + suffix + ".frag");
|
||||
program.link();
|
|
@ -1,13 +1,13 @@
|
|||
//
|
||||
// Face.h
|
||||
// VideoFace.h
|
||||
// interface
|
||||
//
|
||||
// Created by Andrzej Kapolka on 7/11/13.
|
||||
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#ifndef __interface__Face__
|
||||
#define __interface__Face__
|
||||
#ifndef __interface__VideoFace__
|
||||
#define __interface__VideoFace__
|
||||
|
||||
#include <QObject>
|
||||
|
||||
|
@ -22,13 +22,13 @@ class ProgramObject;
|
|||
|
||||
const float FULL_FRAME_ASPECT = 0.0f;
|
||||
|
||||
class Face : public QObject {
|
||||
class VideoFace : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
|
||||
Face(Head* owningHead);
|
||||
~Face();
|
||||
VideoFace(Head* owningHead);
|
||||
~VideoFace();
|
||||
|
||||
bool isActive() const { return _colorTextureID != 0 || _depthTextureID != 0; }
|
||||
bool isFullFrame() const { return isActive() && _aspectRatio == FULL_FRAME_ASPECT; }
|
||||
|
@ -91,4 +91,4 @@ private:
|
|||
static GLuint _iboID;
|
||||
};
|
||||
|
||||
#endif /* defined(__interface__Face__) */
|
||||
#endif /* defined(__interface__VideoFace__) */
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
#include "Application.h"
|
||||
#include "Webcam.h"
|
||||
#include "avatar/Face.h"
|
||||
#include "avatar/VideoFace.h"
|
||||
|
||||
using namespace cv;
|
||||
using namespace std;
|
||||
|
|
|
@ -420,7 +420,7 @@ const char* FACESHIFT_BLENDSHAPES[] = {
|
|||
""
|
||||
};
|
||||
|
||||
class Model {
|
||||
class FBXModel {
|
||||
public:
|
||||
QByteArray name;
|
||||
|
||||
|
@ -432,10 +432,10 @@ public:
|
|||
};
|
||||
|
||||
glm::mat4 getGlobalTransform(const QMultiHash<QString, QString>& parentMap,
|
||||
const QHash<QString, Model>& models, QString nodeID) {
|
||||
const QHash<QString, FBXModel>& models, QString nodeID) {
|
||||
glm::mat4 globalTransform;
|
||||
while (!nodeID.isNull()) {
|
||||
const Model& model = models.value(nodeID);
|
||||
const FBXModel& model = models.value(nodeID);
|
||||
globalTransform = model.preRotation * glm::mat4_cast(model.rotation) * model.postRotation * globalTransform;
|
||||
|
||||
QList<QString> parentIDs = parentMap.values(nodeID);
|
||||
|
@ -484,7 +484,7 @@ public:
|
|||
};
|
||||
|
||||
void appendModelIDs(const QString& parentID, const QMultiHash<QString, QString>& childMap,
|
||||
QHash<QString, Model>& models, QVector<QString>& modelIDs) {
|
||||
QHash<QString, FBXModel>& models, QVector<QString>& modelIDs) {
|
||||
if (models.contains(parentID)) {
|
||||
modelIDs.append(parentID);
|
||||
}
|
||||
|
@ -502,7 +502,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
|
|||
QVector<ExtractedBlendshape> blendshapes;
|
||||
QMultiHash<QString, QString> parentMap;
|
||||
QMultiHash<QString, QString> childMap;
|
||||
QHash<QString, Model> models;
|
||||
QHash<QString, FBXModel> models;
|
||||
QHash<QString, Cluster> clusters;
|
||||
QHash<QString, QByteArray> textureFilenames;
|
||||
QHash<QString, Material> materials;
|
||||
|
@ -692,7 +692,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
|
|||
glm::vec3 preRotation, rotation, postRotation;
|
||||
glm::vec3 scale = glm::vec3(1.0f, 1.0f, 1.0f);
|
||||
glm::vec3 scalePivot, rotationPivot;
|
||||
Model model = { name };
|
||||
FBXModel model = { name };
|
||||
foreach (const FBXNode& subobject, object.children) {
|
||||
if (subobject.name == "Properties60") {
|
||||
foreach (const FBXNode& property, subobject.children) {
|
||||
|
@ -927,7 +927,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
|
|||
|
||||
// convert the models to joints
|
||||
foreach (const QString& modelID, modelIDs) {
|
||||
const Model& model = models[modelID];
|
||||
const FBXModel& model = models[modelID];
|
||||
FBXJoint joint;
|
||||
joint.parentIndex = model.parentIndex;
|
||||
joint.preRotation = model.preRotation;
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
#include <QVector>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/quaternion.hpp>
|
||||
|
||||
class FBXNode;
|
||||
|
||||
|
|
Loading…
Reference in a new issue