Merge branch 'master' of github.com:worklist/hifi into ds-admin

This commit is contained in:
Stephen Birarda 2013-10-23 13:21:50 -07:00
commit aab710d785
33 changed files with 1450 additions and 707 deletions

View file

@ -8,7 +8,7 @@
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
const int MAX_CLUSTERS = 32;
const int MAX_CLUSTERS = 64;
const int INDICES_PER_VERTEX = 4;
uniform mat4 clusterMatrices[MAX_CLUSTERS];

View file

@ -382,6 +382,20 @@ void Application::paintGL() {
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
_myCamera.setTargetPosition(_myAvatar.getUprightHeadPosition());
_myCamera.setTargetRotation(_myAvatar.getHead().getCameraOrientation());
} else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
_myCamera.setTightness(0.0f);
_myCamera.setDistance(0.3f);
glm::vec3 targetPosition = _myAvatar.getUprightHeadPosition();
if (_myAvatar.getHead().getFaceModel().isActive()) {
// make sure we're aligned to the blend face eyes
glm::vec3 leftEyePosition, rightEyePosition;
if (_myAvatar.getHead().getFaceModel().getEyePositions(leftEyePosition, rightEyePosition)) {
targetPosition = (leftEyePosition + rightEyePosition) * 0.5f;
}
}
_myCamera.setTargetPosition(targetPosition);
_myCamera.setTargetRotation(_myAvatar.getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PIf, 0.0f)));
}
// Update camera position
@ -438,13 +452,15 @@ void Application::paintGL() {
// set the bounds of rear mirror view
glViewport(_mirrorViewRect.x(), _glWidget->height() - _mirrorViewRect.y() - _mirrorViewRect.height(), _mirrorViewRect.width(), _mirrorViewRect.height());
glScissor(_mirrorViewRect.x(), _glWidget->height() - _mirrorViewRect.y() - _mirrorViewRect.height(), _mirrorViewRect.width(), _mirrorViewRect.height());
updateProjectionMatrix(_mirrorCamera);
bool updateViewFrustum = false;
updateProjectionMatrix(_mirrorCamera, updateViewFrustum);
glEnable(GL_SCISSOR_TEST);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// render rear mirror view
glPushMatrix();
displaySide(_mirrorCamera);
bool selfAvatarOnly = true;
displaySide(_mirrorCamera, selfAvatarOnly);
glPopMatrix();
// render rear view tools if mouse is in the bounds
@ -456,7 +472,7 @@ void Application::paintGL() {
// reset Viewport and projection matrix
glViewport(0, 0, _glWidget->width(), _glWidget->height());
glDisable(GL_SCISSOR_TEST);
updateProjectionMatrix();
updateProjectionMatrix(_myCamera, updateViewFrustum);
}
displayOverlay();
@ -494,15 +510,16 @@ void Application::updateProjectionMatrix() {
updateProjectionMatrix(_myCamera);
}
void Application::updateProjectionMatrix(Camera& camera) {
void Application::updateProjectionMatrix(Camera& camera, bool updateViewFrustum) {
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
// Tell our viewFrustum about this change, using the application camera
loadViewFrustum(camera, _viewFrustum);
float left, right, bottom, top, nearVal, farVal;
glm::vec4 nearClipPlane, farClipPlane;
// Tell our viewFrustum about this change, using the application camera
if (updateViewFrustum) {
loadViewFrustum(camera, _viewFrustum);
computeOffAxisFrustum(left, right, bottom, top, nearVal, farVal, nearClipPlane, farClipPlane);
// If we're in Display Frustum mode, then we want to use the slightly adjust near/far clip values of the
@ -511,6 +528,11 @@ void Application::updateProjectionMatrix(Camera& camera) {
nearVal = _viewFrustumOffsetCamera.getNearClip();
farVal = _viewFrustumOffsetCamera.getFarClip();
}
} else {
ViewFrustum tempViewFrustum;
loadViewFrustum(camera, tempViewFrustum);
tempViewFrustum.computeOffAxisFrustum(left, right, bottom, top, nearVal, farVal, nearClipPlane, farClipPlane);
}
glFrustum(left, right, bottom, top, nearVal, farVal);
glMatrixMode(GL_MODELVIEW);
@ -885,7 +907,11 @@ void Application::keyPressEvent(QKeyEvent* event) {
updateProjectionMatrix();
break;
case Qt::Key_H:
if (isShifted) {
Menu::getInstance()->triggerOption(MenuOption::Mirror);
} else {
Menu::getInstance()->triggerOption(MenuOption::FullscreenMirror);
}
break;
case Qt::Key_F:
if (isShifted) {
@ -1367,7 +1393,8 @@ void Application::processAvatarURLsMessage(unsigned char* packetData, size_t dat
QMetaObject::invokeMethod(avatar->getVoxels(), "setVoxelURL", Q_ARG(QUrl, voxelURL));
// use this timing to as the data-server for an updated mesh for this avatar (if we have UUID)
DataServerClient::getValueForKeyAndUUID(DataServerKey::FaceMeshURL, avatar->getUUID());
DataServerClient::getValuesForKeysAndUUID(QStringList() << DataServerKey::FaceMeshURL << DataServerKey::SkeletonURL,
avatar->getUUID());
}
void Application::processAvatarFaceVideoMessage(unsigned char* packetData, size_t dataBytes) {
@ -1375,7 +1402,7 @@ void Application::processAvatarFaceVideoMessage(unsigned char* packetData, size_
if (!avatar) {
return;
}
avatar->getHead().getFace().processVideoMessage(packetData, dataBytes);
avatar->getHead().getVideoFace().processVideoMessage(packetData, dataBytes);
}
void Application::checkBandwidthMeterClick() {
@ -1682,6 +1709,7 @@ void Application::init() {
if (!_profile.getUsername().isEmpty()) {
// we have a username for this avatar, ask the data-server for the mesh URL for this avatar
DataServerClient::getClientValueForKey(DataServerKey::FaceMeshURL);
DataServerClient::getClientValueForKey(DataServerKey::SkeletonURL);
}
// Set up VoxelSystem after loading preferences so we can get the desired max voxel count
@ -2125,7 +2153,12 @@ void Application::update(float deltaTime) {
}
if (!OculusManager::isConnected()) {
if (Menu::getInstance()->isOptionChecked(MenuOption::FirstPerson)) {
if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
if (_myCamera.getMode() != CAMERA_MODE_MIRROR) {
_myCamera.setMode(CAMERA_MODE_MIRROR);
_myCamera.setModeShiftRate(100.0f);
}
} else if (Menu::getInstance()->isOptionChecked(MenuOption::FirstPerson)) {
if (_myCamera.getMode() != CAMERA_MODE_FIRST_PERSON) {
_myCamera.setMode(CAMERA_MODE_FIRST_PERSON);
_myCamera.setModeShiftRate(1.0f);
@ -2508,7 +2541,7 @@ void Application::computeOffAxisFrustum(float& left, float& right, float& bottom
_viewFrustum.computeOffAxisFrustum(left, right, bottom, top, near, far, nearClipPlane, farClipPlane);
}
void Application::displaySide(Camera& whichCamera) {
void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "Application::displaySide()");
// transform by eye offset
@ -2540,7 +2573,7 @@ void Application::displaySide(Camera& whichCamera) {
// Setup 3D lights (after the camera transform, so that they are positioned in world space)
setupWorldLight(whichCamera);
if (Menu::getInstance()->isOptionChecked(MenuOption::Stars)) {
if (!selfAvatarOnly && Menu::getInstance()->isOptionChecked(MenuOption::Stars)) {
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::displaySide() ... stars...");
if (!_stars.isStarsLoaded()) {
@ -2567,7 +2600,7 @@ void Application::displaySide(Camera& whichCamera) {
}
// draw the sky dome
if (Menu::getInstance()->isOptionChecked(MenuOption::Atmosphere)) {
if (!selfAvatarOnly && Menu::getInstance()->isOptionChecked(MenuOption::Atmosphere)) {
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::displaySide() ... atmosphere...");
_environment.renderAtmospheres(whichCamera);
@ -2580,6 +2613,7 @@ void Application::displaySide(Camera& whichCamera) {
//renderLineToTouchedVoxel();
//renderThrustAtVoxel(_voxelThrust);
if (!selfAvatarOnly) {
// draw a red sphere
float sphereRadius = 0.25f;
glColor3f(1,0,0);
@ -2697,6 +2731,7 @@ void Application::displaySide(Camera& whichCamera) {
_sharedVoxelSystem.render(true);
glPopMatrix();
}
}
_myAvatar.renderScreenTint(SCREEN_TINT_BEFORE_AVATARS, whichCamera);
@ -2705,6 +2740,7 @@ void Application::displaySide(Camera& whichCamera) {
"Application::displaySide() ... Avatars...");
if (!selfAvatarOnly) {
// Render avatars of other nodes
NodeList* nodeList = NodeList::getInstance();
@ -2726,6 +2762,7 @@ void Application::displaySide(Camera& whichCamera) {
node->unlock();
}
}
// Render my own Avatar
if (whichCamera.getMode() == CAMERA_MODE_MIRROR && !_faceshift.isActive()) {

View file

@ -196,7 +196,7 @@ private slots:
private:
void resetCamerasOnResizeGL(Camera& camera, int width, int height);
void updateProjectionMatrix();
void updateProjectionMatrix(Camera& camera);
void updateProjectionMatrix(Camera& camera, bool updateViewFrustum = true);
static bool sendVoxelsOperation(VoxelNode* node, void* extraData);
static void processAvatarURLsMessage(unsigned char* packetData, size_t dataBytes);
@ -221,7 +221,7 @@ private:
void loadViewFrustum(Camera& camera, ViewFrustum& viewFrustum);
void displayOculus(Camera& whichCamera);
void displaySide(Camera& whichCamera);
void displaySide(Camera& whichCamera, bool selfAvatarOnly = false);
void displayOverlay();
void displayStats();
void renderViewFrustum(ViewFrustum& viewFrustum);

View file

@ -131,8 +131,8 @@ void DataServerClient::processSendFromDataServer(unsigned char* packetData, int
if (keyList[i] == DataServerKey::FaceMeshURL) {
if (userUUID.isNull() || userUUID == Application::getInstance()->getProfile()->getUUID()) {
qDebug("Changing user's face model URL to %s\n", valueList[0].toLocal8Bit().constData());
Application::getInstance()->getProfile()->setFaceModelURL(QUrl(valueList[0]));
qDebug("Changing user's face model URL to %s\n", valueList[i].toLocal8Bit().constData());
Application::getInstance()->getProfile()->setFaceModelURL(QUrl(valueList[i]));
} else {
// mesh URL for a UUID, find avatar in our list
NodeList* nodeList = NodeList::getInstance();
@ -141,9 +141,27 @@ void DataServerClient::processSendFromDataServer(unsigned char* packetData, int
Avatar* avatar = (Avatar *) node->getLinkedData();
if (avatar->getUUID() == userUUID) {
QMetaObject::invokeMethod(&avatar->getHead().getBlendFace(),
"setModelURL",
Q_ARG(QUrl, QUrl(valueList[0])));
QMetaObject::invokeMethod(&avatar->getHead().getFaceModel(),
"setURL", Q_ARG(QUrl, QUrl(valueList[i])));
}
}
}
}
} else if (keyList[i] == DataServerKey::SkeletonURL) {
if (userUUID.isNull() || userUUID == Application::getInstance()->getProfile()->getUUID()) {
qDebug("Changing user's skeleton URL to %s\n", valueList[i].toLocal8Bit().constData());
Application::getInstance()->getProfile()->setSkeletonModelURL(QUrl(valueList[i]));
} else {
// skeleton URL for a UUID, find avatar in our list
NodeList* nodeList = NodeList::getInstance();
for (NodeList::iterator node = nodeList->begin(); node != nodeList->end(); node++) {
if (node->getLinkedData() != NULL && node->getType() == NODE_TYPE_AGENT) {
Avatar* avatar = (Avatar *) node->getLinkedData();
if (avatar->getUUID() == userUUID) {
QMetaObject::invokeMethod(&avatar->getSkeletonModel(), "setURL",
Q_ARG(QUrl, QUrl(valueList[i])));
}
}
}
@ -169,7 +187,7 @@ void DataServerClient::processSendFromDataServer(unsigned char* packetData, int
} else if (keyList[i] == DataServerKey::UUID) {
// this is the user's UUID - set it on the profile
Application::getInstance()->getProfile()->setUUID(valueList[0]);
Application::getInstance()->getProfile()->setUUID(valueList[i]);
}
}
}

View file

@ -38,6 +38,7 @@ private:
namespace DataServerKey {
const QString Domain = "domain";
const QString FaceMeshURL = "mesh";
const QString SkeletonURL = "skeleton";
const QString Position = "position";
const QString UUID = "uuid";
}

View file

@ -203,7 +203,8 @@ Menu::Menu() :
appInstance,
SLOT(setFullscreen(bool)));
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::FirstPerson, Qt::Key_P, true);
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Mirror, Qt::Key_H);
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Mirror, Qt::SHIFT | Qt::Key_H);
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::FullscreenMirror, Qt::Key_H);
QMenu* avatarSizeMenu = viewMenu->addMenu("Avatar Size");
@ -289,8 +290,8 @@ Menu::Menu() :
addDisabledActionAndSeparator(cullingOptionsMenu, "Individual Option Settings");
addCheckableActionToQMenuAndActionHash(cullingOptionsMenu, MenuOption::DisableFastVoxelPipeline, 0,
false, appInstance->getVoxels(), SLOT(setDisableFastVoxelPipeline(bool)));
addCheckableActionToQMenuAndActionHash(cullingOptionsMenu, MenuOption::RemoveOutOfView);
addCheckableActionToQMenuAndActionHash(cullingOptionsMenu, MenuOption::DisableHideOutOfView);
addCheckableActionToQMenuAndActionHash(cullingOptionsMenu, MenuOption::RemoveOutOfView);
addCheckableActionToQMenuAndActionHash(cullingOptionsMenu, MenuOption::UseFullFrustumInHide);
addCheckableActionToQMenuAndActionHash(cullingOptionsMenu, MenuOption::DisableConstantCulling);
@ -309,7 +310,7 @@ Menu::Menu() :
addActionToQMenuAndActionHash(avatarOptionsMenu,
MenuOption::FaceMode,
0,
&appInstance->getAvatar()->getHead().getFace(),
&appInstance->getAvatar()->getHead().getVideoFace(),
SLOT(cycleRenderMode()));
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::LookAtVectors, 0, true);
@ -811,6 +812,11 @@ void Menu::editPreferences() {
faceURLEdit->setMinimumWidth(QLINE_MINIMUM_WIDTH);
form->addRow("Face URL:", faceURLEdit);
QString skeletonURLString = applicationInstance->getProfile()->getSkeletonModelURL().toString();
QLineEdit* skeletonURLEdit = new QLineEdit(skeletonURLString);
skeletonURLEdit->setMinimumWidth(QLINE_MINIMUM_WIDTH);
form->addRow("Skeleton URL:", skeletonURLEdit);
QSlider* pupilDilation = new QSlider(Qt::Horizontal);
pupilDilation->setValue(applicationInstance->getAvatar()->getHead().getPupilDilation() * pupilDilation->maximum());
form->addRow("Pupil Dilation:", pupilDilation);
@ -863,6 +869,17 @@ void Menu::editPreferences() {
faceModelURL.toString().toLocal8Bit().constData());
}
QUrl skeletonModelURL(skeletonURLEdit->text());
if (skeletonModelURL.toString() != skeletonURLString) {
// change the skeletonModelURL in the profile, it will also update this user's Body
applicationInstance->getProfile()->setSkeletonModelURL(skeletonModelURL);
// send the new skeleton model URL to the data-server (if we have a client UUID)
DataServerClient::putValueForKey(DataServerKey::SkeletonURL,
skeletonModelURL.toString().toLocal8Bit().constData());
}
QUrl avatarVoxelURL(avatarURL->text());
applicationInstance->getAvatar()->getVoxels()->setVoxelURL(avatarVoxelURL);

View file

@ -147,7 +147,7 @@ namespace MenuOption {
const QString DisableFastVoxelPipeline = "Disable Fast Voxel Pipeline";
const QString DisplayFrustum = "Display Frustum";
const QString DontRenderVoxels = "Don't call _voxels.render()";
const QString DontCallOpenGLForVoxels = "Don't call glDrawElements()/glDrawRangeElementsEXT() for Voxels";
const QString DontCallOpenGLForVoxels = "Don't call glDrawRangeElementsEXT() for Voxels";
const QString EchoAudio = "Echo Audio";
const QString ExportVoxels = "Export Voxels";
const QString HeadMouse = "Head Mouse";
@ -164,6 +164,7 @@ namespace MenuOption {
const QString FrameTimer = "Show Timer";
const QString FrustumRenderMode = "Render Mode";
const QString Fullscreen = "Fullscreen";
const QString FullscreenMirror = "Fullscreen Mirror";
const QString GlowMode = "Cycle Glow Mode";
const QString GoToDomain = "Go To Domain...";
const QString GoToLocation = "Go To Location...";
@ -196,7 +197,7 @@ namespace MenuOption {
const QString PipelineWarnings = "Show Render Pipeline Warnings";
const QString Preferences = "Preferences...";
const QString RandomizeVoxelColors = "Randomize Voxel TRUE Colors";
const QString RemoveOutOfView = "Remove Out of View Voxels";
const QString RemoveOutOfView = "Instead of Hide Remove Out of View Voxels";
const QString ResetAvatarSize = "Reset Avatar Size";
const QString ResetSwatchColors = "Reset Swatch Colors";
const QString RunTimingTests = "Run Timing Tests";

View file

@ -158,6 +158,66 @@ glm::quat safeMix(const glm::quat& q1, const glm::quat& q2, float proportion) {
return glm::normalize(glm::quat(s0 * q1.w + s1 * ow, s0 * q1.x + s1 * ox, s0 * q1.y + s1 * oy, s0 * q1.z + s1 * oz));
}
glm::vec3 extractTranslation(const glm::mat4& matrix) {
return glm::vec3(matrix[3][0], matrix[3][1], matrix[3][2]);
}
glm::quat extractRotation(const glm::mat4& matrix, bool assumeOrthogonal) {
// uses the iterative polar decomposition algorithm described by Ken Shoemake at
// http://www.cs.wisc.edu/graphics/Courses/838-s2002/Papers/polar-decomp.pdf
// code adapted from Clyde, https://github.com/threerings/clyde/blob/master/src/main/java/com/threerings/math/Matrix4f.java
// start with the contents of the upper 3x3 portion of the matrix
glm::mat3 upper = glm::mat3(matrix);
if (!assumeOrthogonal) {
for (int i = 0; i < 10; i++) {
// store the results of the previous iteration
glm::mat3 previous = upper;
// compute average of the matrix with its inverse transpose
float sd00 = previous[1][1] * previous[2][2] - previous[2][1] * previous[1][2];
float sd10 = previous[0][1] * previous[2][2] - previous[2][1] * previous[0][2];
float sd20 = previous[0][1] * previous[1][2] - previous[1][1] * previous[0][2];
float det = previous[0][0] * sd00 + previous[2][0] * sd20 - previous[1][0] * sd10;
if (fabs(det) == 0.0f) {
// determinant is zero; matrix is not invertible
break;
}
float hrdet = 0.5f / det;
upper[0][0] = +sd00 * hrdet + previous[0][0] * 0.5f;
upper[1][0] = -sd10 * hrdet + previous[1][0] * 0.5f;
upper[2][0] = +sd20 * hrdet + previous[2][0] * 0.5f;
upper[0][1] = -(previous[1][0] * previous[2][2] - previous[2][0] * previous[1][2]) * hrdet + previous[0][1] * 0.5f;
upper[1][1] = +(previous[0][0] * previous[2][2] - previous[2][0] * previous[0][2]) * hrdet + previous[1][1] * 0.5f;
upper[2][1] = -(previous[0][0] * previous[1][2] - previous[1][0] * previous[0][2]) * hrdet + previous[2][1] * 0.5f;
upper[0][2] = +(previous[1][0] * previous[2][1] - previous[2][0] * previous[1][1]) * hrdet + previous[0][2] * 0.5f;
upper[1][2] = -(previous[0][0] * previous[2][1] - previous[2][0] * previous[0][1]) * hrdet + previous[1][2] * 0.5f;
upper[2][2] = +(previous[0][0] * previous[1][1] - previous[1][0] * previous[0][1]) * hrdet + previous[2][2] * 0.5f;
// compute the difference; if it's small enough, we're done
glm::mat3 diff = upper - previous;
if (diff[0][0] * diff[0][0] + diff[1][0] * diff[1][0] + diff[2][0] * diff[2][0] + diff[0][1] * diff[0][1] +
diff[1][1] * diff[1][1] + diff[2][1] * diff[2][1] + diff[0][2] * diff[0][2] + diff[1][2] * diff[1][2] +
diff[2][2] * diff[2][2] < EPSILON) {
break;
}
}
}
// now that we have a nice orthogonal matrix, we can extract the rotation quaternion
// using the method described in http://en.wikipedia.org/wiki/Rotation_matrix#Conversions
float x2 = fabs(1.0f + upper[0][0] - upper[1][1] - upper[2][2]);
float y2 = fabs(1.0f - upper[0][0] + upper[1][1] - upper[2][2]);
float z2 = fabs(1.0f - upper[0][0] - upper[1][1] + upper[2][2]);
float w2 = fabs(1.0f + upper[0][0] + upper[1][1] + upper[2][2]);
return glm::normalize(glm::quat(0.5f * sqrtf(w2),
0.5f * sqrtf(x2) * (upper[1][2] >= upper[2][1] ? 1.0f : -1.0f),
0.5f * sqrtf(y2) * (upper[2][0] >= upper[0][2] ? 1.0f : -1.0f),
0.5f * sqrtf(z2) * (upper[0][1] >= upper[1][0] ? 1.0f : -1.0f)));
}
// Draw a 3D vector floating in space
void drawVector(glm::vec3 * vector) {
glDisable(GL_LIGHTING);

View file

@ -55,6 +55,10 @@ glm::vec3 safeEulerAngles(const glm::quat& q);
glm::quat safeMix(const glm::quat& q1, const glm::quat& q2, float alpha);
glm::vec3 extractTranslation(const glm::mat4& matrix);
glm::quat extractRotation(const glm::mat4& matrix, bool assumeOrthogonal = false);
double diffclock(timeval *clock1,timeval *clock2);
void renderGroundPlaneGrid(float size, float impact);

View file

@ -753,25 +753,54 @@ void VoxelSystem::checkForCulling() {
&& !isViewChanging()
)
) {
_lastViewCulling = start;
// When we call removeOutOfView() voxels, we don't actually remove the voxels from the VBOs, but we do remove
// them from tree, this makes our tree caclulations faster, but doesn't require us to fully rebuild the VBOs (which
// can be expensive).
if (!Menu::getInstance()->isOptionChecked(MenuOption::DisableHideOutOfView)) {
hideOutOfView();
// track how long its been since we were last moving. If we have recently moved then only use delta frustums, if
// it's been a long time since we last moved, then go ahead and do a full frustum cull.
if (isViewChanging()) {
_lastViewIsChanging = start;
}
if (Menu::getInstance()->isOptionChecked(MenuOption::RemoveOutOfView)) {
uint64_t sinceLastMoving = (start - _lastViewIsChanging) / 1000;
bool enoughTime = (sinceLastMoving >= std::max((float) _lastViewCullingElapsed, VIEW_CULLING_RATE_IN_MILLISECONDS));
// These has changed events will occur before we stop. So we need to remember this for when we finally have stopped
// moving long enough to be enoughTime
if (hasViewChanged()) {
_hasRecentlyChanged = true;
}
// If we have recently changed, but it's been enough time since we last moved, then we will do a full frustum
// hide/show culling pass
bool forceFullFrustum = enoughTime && _hasRecentlyChanged;
// in hide mode, we only track the full frustum culls, because we don't care about the partials.
if (forceFullFrustum) {
_lastViewCulling = start;
_hasRecentlyChanged = false;
}
hideOutOfView(forceFullFrustum);
if (forceFullFrustum) {
uint64_t endViewCulling = usecTimestampNow();
_lastViewCullingElapsed = (endViewCulling - start) / 1000;
}
} else if (Menu::getInstance()->isOptionChecked(MenuOption::RemoveOutOfView)) {
_lastViewCulling = start;
removeOutOfView();
uint64_t endViewCulling = usecTimestampNow();
_lastViewCullingElapsed = (endViewCulling - start) / 1000;
}
// Once we call cleanupRemovedVoxels() we do need to rebuild our VBOs (if anything was actually removed). So,
// we should consider putting this someplace else... as this might be able to occur less frequently, and save us on
// VBO reubuilding. Possibly we should do this only if our actual VBO usage crosses some lower boundary.
cleanupRemovedVoxels();
uint64_t endViewCulling = usecTimestampNow();
_lastViewCullingElapsed = (endViewCulling - start) / 1000;
}
uint64_t sinceLastAudit = (start - _lastAudit) / 1000;
@ -1047,7 +1076,8 @@ void VoxelSystem::init() {
_callsToTreesToArrays = 0;
_setupNewVoxelsForDrawingLastFinished = 0;
_setupNewVoxelsForDrawingLastElapsed = 0;
_lastViewCullingElapsed = _lastViewCulling = _lastAudit = 0;
_lastViewCullingElapsed = _lastViewCulling = _lastAudit = _lastViewIsChanging = 0;
_hasRecentlyChanged = false;
_voxelsDirty = false;
_voxelsInWriteArrays = 0;
@ -1807,11 +1837,26 @@ public:
}
};
void VoxelSystem::hideOutOfView() {
void VoxelSystem::hideOutOfView(bool forceFullFrustum) {
bool showDebugDetails = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showDebugDetails, "hideOutOfView()", showDebugDetails);
bool widenFrustum = true;
bool wantDeltaFrustums = !Menu::getInstance()->isOptionChecked(MenuOption::UseFullFrustumInHide);
// When using "delta" view frustums and only hide/show items that are in the difference
// between the two view frustums. There are some potential problems with this mode.
//
// 1) This work well for rotating, but what about moving forward?
// In the move forward case, you'll get new voxel details, but those
// new voxels will be in the last view.
//
// 2) Also, voxels will arrive from the network that are OUTSIDE of the view
// frustum... these won't get hidden... and so we can't assume they are correctly
// hidden...
//
// Both these problems are solved by intermittently calling this with forceFullFrustum set
// to true. This will essentially clean up the improperly hidden or shown voxels.
//
bool wantDeltaFrustums = !forceFullFrustum && !Menu::getInstance()->isOptionChecked(MenuOption::UseFullFrustumInHide);
hideOutOfViewArgs args(this, this->_tree, _culledOnce, widenFrustum, wantDeltaFrustums);
const bool wantViewFrustumDebugging = false; // change to true for additional debugging
@ -1822,24 +1867,11 @@ void VoxelSystem::hideOutOfView() {
}
}
if (_culledOnce && args.lastViewFrustum.matches(args.thisViewFrustum)) {
if (!forceFullFrustum && _culledOnce && args.lastViewFrustum.matches(args.thisViewFrustum)) {
//printf("view frustum hasn't changed BAIL!!!\n");
return;
}
// Changed hideOutOfView() to support "delta" view frustums and only hide/show items that are in the difference
// between the two view frustums. There are some potential problems with this idea...
//
// 1) This might work well for rotating, but what about moving forward?
// in the move forward case, you'll get new voxel details, but those
// new voxels will be in the last view... does that work? This works
// ok for now because voxel server resends them and so they get redisplayed,
// but this will not work if we update the voxel server to send less data.
//
// 2) what about voxels coming in from the network that are OUTSIDE of the view
// frustum... they don't get hidden... and so we can't assume they are correctly
// hidden... we could solve this with checking in view on voxelUpdated...
//
_tree->recurseTreeWithOperation(hideOutOfViewOperation,(void*)&args);
_lastCulledViewFrustum = args.thisViewFrustum; // save last stable
_culledOnce = true;

View file

@ -85,7 +85,7 @@ public:
void killLocalVoxels();
virtual void removeOutOfView();
virtual void hideOutOfView();
virtual void hideOutOfView(bool forceFullFrustum = false);
bool hasViewChanged();
bool isViewChanging();
@ -228,8 +228,10 @@ private:
int _setupNewVoxelsForDrawingLastElapsed;
uint64_t _setupNewVoxelsForDrawingLastFinished;
uint64_t _lastViewCulling;
uint64_t _lastViewIsChanging;
uint64_t _lastAudit;
int _lastViewCullingElapsed;
bool _hasRecentlyChanged;
void initVoxelMemory();
void cleanupVoxelMemory();

View file

@ -79,6 +79,7 @@ Avatar::Avatar(Node* owningNode) :
AvatarData(owningNode),
_head(this),
_hand(this),
_skeletonModel(this),
_ballSpringsInitialized(false),
_bodyYawDelta(0.0f),
_movedHandOffset(0.0f, 0.0f, 0.0f),
@ -260,6 +261,7 @@ Avatar::~Avatar() {
void Avatar::init() {
_head.init();
_hand.init();
_skeletonModel.init();
_voxels.init();
_initialized = true;
}
@ -410,8 +412,13 @@ void Avatar::simulate(float deltaTime, Transmitter* transmitter) {
}
}
_skeletonModel.simulate(deltaTime);
_head.setBodyRotation(glm::vec3(_bodyPitch, _bodyYaw, _bodyRoll));
_head.setPosition(_bodyBall[ BODY_BALL_HEAD_BASE ].position);
glm::vec3 headPosition;
if (!_skeletonModel.getHeadPosition(headPosition)) {
headPosition = _bodyBall[BODY_BALL_HEAD_BASE].position;
}
_head.setPosition(headPosition);
_head.setSkinColor(glm::vec3(SKIN_COLOR[0], SKIN_COLOR[1], SKIN_COLOR[2]));
_head.simulate(deltaTime, false);
_hand.simulate(deltaTime, false);
@ -742,21 +749,16 @@ float Avatar::getBallRenderAlpha(int ball, bool lookingInMirror) const {
void Avatar::renderBody(bool lookingInMirror, bool renderAvatarBalls) {
if (_head.getFace().isFullFrame()) {
if (_head.getVideoFace().isFullFrame()) {
// Render the full-frame video
float alpha = getBallRenderAlpha(BODY_BALL_HEAD_BASE, lookingInMirror);
if (alpha > 0.0f) {
_head.getFace().render(1.0f);
_head.getVideoFace().render(1.0f);
}
} else if (renderAvatarBalls || !_voxels.getVoxelURL().isValid()) {
} else if (renderAvatarBalls || !(_voxels.getVoxelURL().isValid() || _skeletonModel.isActive())) {
// Render the body as balls and cones
glm::vec3 skinColor(SKIN_COLOR[0], SKIN_COLOR[1], SKIN_COLOR[2]);
glm::vec3 darkSkinColor(DARK_SKIN_COLOR[0], DARK_SKIN_COLOR[1], DARK_SKIN_COLOR[2]);
if (_head.getBlendFace().isActive()) {
skinColor = glm::vec3(_head.getBlendFace().computeAverageColor());
const float SKIN_DARKENING = 0.9f;
darkSkinColor = skinColor * SKIN_DARKENING;
}
glm::vec3 skinColor, darkSkinColor;
getSkinColors(skinColor, darkSkinColor);
for (int b = 0; b < NUM_AVATAR_BODY_BALLS; b++) {
float alpha = getBallRenderAlpha(b, lookingInMirror);
@ -778,7 +780,7 @@ void Avatar::renderBody(bool lookingInMirror, bool renderAvatarBalls) {
skinColor.g - _bodyBall[b].touchForce * 0.2f,
skinColor.b - _bodyBall[b].touchForce * 0.1f);
if (b == BODY_BALL_NECK_BASE && _head.getBlendFace().isActive()) {
if (b == BODY_BALL_NECK_BASE && _head.getFaceModel().isActive()) {
continue; // don't render the neck if we have a face model
}
@ -813,13 +815,25 @@ void Avatar::renderBody(bool lookingInMirror, bool renderAvatarBalls) {
// Render the body's voxels and head
float alpha = getBallRenderAlpha(BODY_BALL_HEAD_BASE, lookingInMirror);
if (alpha > 0.0f) {
if (!_skeletonModel.render(alpha)) {
_voxels.render(false);
}
_head.render(alpha, false);
}
}
_hand.render(lookingInMirror);
}
void Avatar::getSkinColors(glm::vec3& lighter, glm::vec3& darker) {
lighter = glm::vec3(SKIN_COLOR[0], SKIN_COLOR[1], SKIN_COLOR[2]);
darker = glm::vec3(DARK_SKIN_COLOR[0], DARK_SKIN_COLOR[1], DARK_SKIN_COLOR[2]);
if (_head.getFaceModel().isActive()) {
lighter = glm::vec3(_head.getFaceModel().computeAverageColor());
const float SKIN_DARKENING = 0.9f;
darker = lighter * SKIN_DARKENING;
}
}
void Avatar::getBodyBallTransform(AvatarJointID jointID, glm::vec3& position, glm::quat& rotation) const {
position = _bodyBall[jointID].position;
rotation = _bodyBall[jointID].rotation;

View file

@ -22,6 +22,7 @@
#include "Head.h"
#include "InterfaceConfig.h"
#include "Skeleton.h"
#include "SkeletonModel.h"
#include "world.h"
#include "devices/SerialInterface.h"
#include "devices/Transmitter.h"
@ -146,6 +147,7 @@ public:
//getters
bool isInitialized() const { return _initialized; }
const Skeleton& getSkeleton() const { return _skeleton; }
SkeletonModel& getSkeletonModel() { return _skeletonModel; }
float getHeadYawRate() const { return _head.yawRate; }
const glm::vec3& getHeadJointPosition() const { return _skeleton.joint[ AVATAR_JOINT_HEAD_BASE ].position; }
float getScale() const { return _scale; }
@ -156,6 +158,8 @@ public:
glm::quat getWorldAlignedOrientation() const;
AvatarVoxelSystem* getVoxels() { return &_voxels; }
void getSkinColors(glm::vec3& lighter, glm::vec3& darker);
// Get the position/rotation of a single body ball
void getBodyBallTransform(AvatarJointID jointID, glm::vec3& position, glm::quat& rotation) const;
@ -198,6 +202,7 @@ protected:
Head _head;
Hand _hand;
Skeleton _skeleton;
SkeletonModel _skeletonModel;
bool _ballSpringsInitialized;
float _bodyYawDelta;
glm::vec3 _movedHandOffset;

View file

@ -1,92 +0,0 @@
//
// BlendFace.h
// interface
//
// Created by Andrzej Kapolka on 9/16/13.
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#ifndef __interface__BlendFace__
#define __interface__BlendFace__
#include <QObject>
#include <QUrl>
#include "InterfaceConfig.h"
#include "renderer/GeometryCache.h"
#include "renderer/ProgramObject.h"
#include "renderer/TextureCache.h"
class QNetworkReply;
class Head;
/// A face formed from a linear mix of blendshapes according to a set of coefficients.
class BlendFace : public QObject {
Q_OBJECT
public:
BlendFace(Head* owningHead);
~BlendFace();
bool isActive() const { return _geometry && _geometry->isLoaded(); }
void init();
void reset();
void simulate(float deltaTime);
bool render(float alpha);
Q_INVOKABLE void setModelURL(const QUrl& url);
const QUrl& getModelURL() const { return _modelURL; }
/// Retrieve the positions of up to two eye meshes.
/// \param upright if true, retrieve the locations of the eyes in the upright position
/// \return whether or not both eye meshes were found
bool getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition, bool upright = false) const;
/// Returns the average color of all meshes in the geometry.
glm::vec4 computeAverageColor() const;
private:
void deleteGeometry();
Head* _owningHead;
QUrl _modelURL;
QSharedPointer<NetworkGeometry> _geometry;
class JointState {
public:
glm::quat rotation;
glm::mat4 transform;
};
QVector<JointState> _jointStates;
class MeshState {
public:
QVector<glm::mat4> clusterMatrices;
QVector<glm::vec3> worldSpaceVertices;
QVector<glm::vec3> vertexVelocities;
QVector<glm::vec3> worldSpaceNormals;
};
QVector<MeshState> _meshStates;
QVector<GLuint> _blendedVertexBufferIDs;
QVector<QVector<QSharedPointer<Texture> > > _dilatedTextures;
bool _resetStates;
QVector<glm::vec3> _blendedVertices;
QVector<glm::vec3> _blendedNormals;
static ProgramObject _program;
static ProgramObject _skinProgram;
static int _clusterMatricesLocation;
static int _clusterIndicesLocation;
static int _clusterWeightsLocation;
};
#endif /* defined(__interface__BlendFace__) */

View file

@ -0,0 +1,66 @@
//
// FaceModel.cpp
// interface
//
// Created by Andrzej Kapolka on 9/16/13.
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#include <glm/gtx/transform.hpp>
#include "Avatar.h"
#include "FaceModel.h"
#include "Head.h"
FaceModel::FaceModel(Head* owningHead) :
_owningHead(owningHead)
{
}
void FaceModel::simulate(float deltaTime) {
if (!isActive()) {
return;
}
Avatar* owningAvatar = static_cast<Avatar*>(_owningHead->_owningAvatar);
glm::vec3 neckPosition;
if (!owningAvatar->getSkeletonModel().getNeckPosition(neckPosition)) {
neckPosition = owningAvatar->getSkeleton().joint[AVATAR_JOINT_NECK_BASE].position;
}
setTranslation(neckPosition);
glm::quat neckRotation;
if (!owningAvatar->getSkeletonModel().getNeckRotation(neckRotation)) {
neckRotation = owningAvatar->getSkeleton().joint[AVATAR_JOINT_NECK_BASE].absoluteRotation *
glm::angleAxis(180.0f, 0.0f, 1.0f, 0.0f);
}
setRotation(neckRotation);
const float MODEL_SCALE = 0.0006f;
setScale(glm::vec3(1.0f, 1.0f, 1.0f) * _owningHead->getScale() * MODEL_SCALE);
const glm::vec3 MODEL_TRANSLATION(0.0f, -60.0f, 40.0f); // temporary fudge factor
setOffset(MODEL_TRANSLATION - _geometry->getFBXGeometry().neckPivot);
setPupilDilation(_owningHead->getPupilDilation());
setBlendshapeCoefficients(_owningHead->getBlendshapeCoefficients());
Model::simulate(deltaTime);
}
void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
// get the rotation axes in joint space and use them to adjust the rotation
glm::mat3 axes = glm::mat3_cast(_rotation);
glm::mat3 inverse = glm::mat3(glm::inverse(parentState.transform *
joint.preTransform * glm::mat4_cast(joint.preRotation * joint.rotation)));
state.rotation = glm::angleAxis(-_owningHead->getRoll(), glm::normalize(inverse * axes[2])) *
glm::angleAxis(_owningHead->getYaw(), glm::normalize(inverse * axes[1])) *
glm::angleAxis(-_owningHead->getPitch(), glm::normalize(inverse * axes[0])) * joint.rotation;
}
void FaceModel::maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
// likewise with the eye joints
glm::mat4 inverse = glm::inverse(parentState.transform *
joint.preTransform * glm::mat4_cast(joint.preRotation * joint.rotation));
glm::vec3 front = glm::vec3(inverse * glm::vec4(_owningHead->getOrientation() * IDENTITY_FRONT, 0.0f));
glm::vec3 lookAt = glm::vec3(inverse * glm::vec4(_owningHead->getLookAtPosition() +
_owningHead->getSaccade(), 1.0f));
state.rotation = rotationBetween(front, lookAt) * joint.rotation;
}

View file

@ -0,0 +1,36 @@
//
// FaceModel.h
// interface
//
// Created by Andrzej Kapolka on 9/16/13.
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#ifndef __interface__FaceModel__
#define __interface__FaceModel__
#include "renderer/Model.h"
class Head;
/// A face formed from a linear mix of blendshapes according to a set of coefficients.
class FaceModel : public Model {
Q_OBJECT
public:
FaceModel(Head* owningHead);
void simulate(float deltaTime);
protected:
virtual void maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
virtual void maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
private:
Head* _owningHead;
};
#endif /* defined(__interface__FaceModel__) */

View file

@ -83,8 +83,8 @@ Head::Head(Avatar* owningAvatar) :
_mousePitch(0.f),
_cameraYaw(_yaw),
_isCameraMoving(false),
_face(this),
_blendFace(this)
_videoFace(this),
_faceModel(this)
{
if (USING_PHYSICAL_MOHAWK) {
resetHairPhysics();
@ -104,7 +104,7 @@ void Head::init() {
_irisTexture = Application::getInstance()->getTextureCache()->getTexture(QUrl::fromLocalFile(IRIS_TEXTURE_FILENAME),
true).staticCast<DilatableNetworkTexture>();
}
_blendFace.init();
_faceModel.init();
}
void Head::reset() {
@ -115,7 +115,7 @@ void Head::reset() {
resetHairPhysics();
}
_blendFace.reset();
_faceModel.reset();
}
void Head::resetHairPhysics() {
@ -237,7 +237,7 @@ void Head::simulate(float deltaTime, bool isMine) {
updateHairPhysics(deltaTime);
}
_blendFace.simulate(deltaTime);
_faceModel.simulate(deltaTime);
}
void Head::calculateGeometry() {
@ -285,7 +285,7 @@ void Head::calculateGeometry() {
void Head::render(float alpha, bool isMine) {
_renderAlpha = alpha;
if (!(_face.render(alpha) || _blendFace.render(alpha))) {
if (!(_videoFace.render(alpha) || _faceModel.render(alpha))) {
calculateGeometry();
glEnable(GL_DEPTH_TEST);
@ -300,9 +300,9 @@ void Head::render(float alpha, bool isMine) {
renderEyeBrows();
}
if (_blendFace.isActive()) {
if (_faceModel.isActive()) {
// the blend face may have custom eye meshes
_blendFace.getEyePositions(_leftEyePosition, _rightEyePosition);
_faceModel.getEyePositions(_leftEyePosition, _rightEyePosition);
}
if (_renderLookatVectors) {

View file

@ -18,9 +18,9 @@
#include <VoxelConstants.h>
#include "BendyLine.h"
#include "BlendFace.h"
#include "Face.h"
#include "FaceModel.h"
#include "InterfaceConfig.h"
#include "VideoFace.h"
#include "world.h"
#include "devices/SerialInterface.h"
#include "renderer/TextureCache.h"
@ -76,8 +76,8 @@ public:
glm::quat getEyeRotation(const glm::vec3& eyePosition) const;
Face& getFace() { return _face; }
BlendFace& getBlendFace() { return _blendFace; }
VideoFace& getVideoFace() { return _videoFace; }
FaceModel& getFaceModel() { return _faceModel; }
const bool getReturnToCenter() const { return _returnHeadToCenter; } // Do you want head to try to return to center (depends on interface detected)
float getAverageLoudness() const { return _averageLoudness; }
@ -132,8 +132,8 @@ private:
float _mousePitch;
float _cameraYaw;
bool _isCameraMoving;
Face _face;
BlendFace _blendFace;
VideoFace _videoFace;
FaceModel _faceModel;
QSharedPointer<Texture> _dilatedIrisTexture;
@ -154,7 +154,7 @@ private:
void resetHairPhysics();
void updateHairPhysics(float deltaTime);
friend class BlendFace;
friend class FaceModel;
};
#endif

View file

@ -317,8 +317,13 @@ void MyAvatar::simulate(float deltaTime, Transmitter* transmitter) {
}
}
_skeletonModel.simulate(deltaTime);
_head.setBodyRotation(glm::vec3(_bodyPitch, _bodyYaw, _bodyRoll));
_head.setPosition(_bodyBall[ BODY_BALL_HEAD_BASE ].position);
glm::vec3 headPosition;
if (!_skeletonModel.getHeadPosition(headPosition)) {
headPosition = _bodyBall[BODY_BALL_HEAD_BASE].position;
}
_head.setPosition(headPosition);
_head.setScale(_scale);
_head.setSkinColor(glm::vec3(SKIN_COLOR[0], SKIN_COLOR[1], SKIN_COLOR[2]));
_head.simulate(deltaTime, true);
@ -390,7 +395,7 @@ void MyAvatar::updateFromGyrosAndOrWebcam(float pitchFromTouch, bool turnWithHea
_head.setMousePitch(pitchFromTouch);
_head.setPitch(pitchFromTouch);
}
_head.getFace().clearFrame();
_head.getVideoFace().clearFrame();
// restore rotation, lean to neutral positions
const float RESTORE_RATE = 0.05f;
@ -406,7 +411,7 @@ void MyAvatar::updateFromGyrosAndOrWebcam(float pitchFromTouch, bool turnWithHea
estimatedPosition = webcam->getEstimatedPosition();
// apply face data
_head.getFace().setFrameFromWebcam();
_head.getVideoFace().setFrameFromWebcam();
// compute and store the joint rotations
const JointVector& joints = webcam->getEstimatedJoints();
@ -423,7 +428,7 @@ void MyAvatar::updateFromGyrosAndOrWebcam(float pitchFromTouch, bool turnWithHea
}
}
} else {
_head.getFace().clearFrame();
_head.getVideoFace().clearFrame();
}
// Set the rotation of the avatar's head (as seen by others, not affecting view frustum)
@ -604,21 +609,16 @@ void MyAvatar::renderBody(bool lookingInMirror, bool renderAvatarBalls) {
return;
}
if (_head.getFace().isFullFrame()) {
if (_head.getVideoFace().isFullFrame()) {
// Render the full-frame video
float alpha = getBallRenderAlpha(BODY_BALL_HEAD_BASE, lookingInMirror);
if (alpha > 0.0f) {
_head.getFace().render(1.0f);
_head.getVideoFace().render(1.0f);
}
} else if (renderAvatarBalls || !_voxels.getVoxelURL().isValid()) {
} else if (renderAvatarBalls || !(_voxels.getVoxelURL().isValid() || _skeletonModel.isActive())) {
// Render the body as balls and cones
glm::vec3 skinColor(SKIN_COLOR[0], SKIN_COLOR[1], SKIN_COLOR[2]);
glm::vec3 darkSkinColor(DARK_SKIN_COLOR[0], DARK_SKIN_COLOR[1], DARK_SKIN_COLOR[2]);
if (_head.getBlendFace().isActive()) {
skinColor = glm::vec3(_head.getBlendFace().computeAverageColor());
const float SKIN_DARKENING = 0.9f;
darkSkinColor = skinColor * SKIN_DARKENING;
}
glm::vec3 skinColor, darkSkinColor;
getSkinColors(skinColor, darkSkinColor);
for (int b = 0; b < NUM_AVATAR_BODY_BALLS; b++) {
float alpha = getBallRenderAlpha(b, lookingInMirror);
@ -649,7 +649,7 @@ void MyAvatar::renderBody(bool lookingInMirror, bool renderAvatarBalls) {
alpha);
}
if (b == BODY_BALL_NECK_BASE && _head.getBlendFace().isActive()) {
if (b == BODY_BALL_NECK_BASE && _head.getFaceModel().isActive()) {
continue; // don't render the neck if we have a face model
}
@ -685,7 +685,9 @@ void MyAvatar::renderBody(bool lookingInMirror, bool renderAvatarBalls) {
// Render the body's voxels and head
float alpha = getBallRenderAlpha(BODY_BALL_HEAD_BASE, lookingInMirror);
if (alpha > 0.0f) {
if (!_skeletonModel.render(alpha)) {
_voxels.render(false);
}
_head.render(alpha, true);
}
}

View file

@ -44,9 +44,15 @@ void Profile::setUUID(const QUuid& uuid) {
void Profile::setFaceModelURL(const QUrl& faceModelURL) {
_faceModelURL = faceModelURL;
QMetaObject::invokeMethod(&Application::getInstance()->getAvatar()->getHead().getBlendFace(),
"setModelURL",
Q_ARG(QUrl, _faceModelURL));
QMetaObject::invokeMethod(&Application::getInstance()->getAvatar()->getHead().getFaceModel(),
"setURL", Q_ARG(QUrl, _faceModelURL));
}
void Profile::setSkeletonModelURL(const QUrl& skeletonModelURL) {
_skeletonModelURL = skeletonModelURL;
QMetaObject::invokeMethod(&Application::getInstance()->getAvatar()->getSkeletonModel(),
"setURL", Q_ARG(QUrl, _skeletonModelURL));
}
void Profile::updateDomain(const QString& domain) {
@ -91,6 +97,7 @@ void Profile::saveData(QSettings* settings) {
settings->setValue("username", _username);
settings->setValue("UUID", _uuid);
settings->setValue("faceModelURL", _faceModelURL);
settings->setValue("skeletonModelURL", _skeletonModelURL);
settings->endGroup();
}
@ -101,6 +108,7 @@ void Profile::loadData(QSettings* settings) {
_username = settings->value("username").toString();
this->setUUID(settings->value("UUID").toUuid());
_faceModelURL = settings->value("faceModelURL").toUrl();
_skeletonModelURL = settings->value("skeletonModelURL").toUrl();
settings->endGroup();
}

View file

@ -29,6 +29,9 @@ public:
void setFaceModelURL(const QUrl& faceModelURL);
const QUrl& getFaceModelURL() const { return _faceModelURL; }
void setSkeletonModelURL(const QUrl& skeletonModelURL);
const QUrl& getSkeletonModelURL() const { return _skeletonModelURL; }
void updateDomain(const QString& domain);
void updatePosition(const glm::vec3 position);
@ -43,6 +46,7 @@ private:
QString _lastDomain;
glm::vec3 _lastPosition;
QUrl _faceModelURL;
QUrl _skeletonModelURL;
};
#endif /* defined(__hifi__Profile__) */

View file

@ -0,0 +1,96 @@
//
// SkeletonModel.cpp
// interface
//
// Created by Andrzej Kapolka on 10/17/13.
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#include <glm/gtx/transform.hpp>
#include "Avatar.h"
#include "SkeletonModel.h"
SkeletonModel::SkeletonModel(Avatar* owningAvatar) :
_owningAvatar(owningAvatar) {
}
void SkeletonModel::simulate(float deltaTime) {
if (!isActive()) {
return;
}
setTranslation(_owningAvatar->getPosition());
setRotation(_owningAvatar->getOrientation() * glm::angleAxis(180.0f, 0.0f, 1.0f, 0.0f));
const float MODEL_SCALE = 0.0006f;
setScale(glm::vec3(1.0f, 1.0f, 1.0f) * _owningAvatar->getScale() * MODEL_SCALE);
Model::simulate(deltaTime);
}
bool SkeletonModel::render(float alpha) {
if (_jointStates.isEmpty()) {
return false;
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
glm::vec3 skinColor, darkSkinColor;
_owningAvatar->getSkinColors(skinColor, darkSkinColor);
for (int i = 0; i < _jointStates.size(); i++) {
glPushMatrix();
glm::vec3 position;
getJointPosition(i, position);
glTranslatef(position.x, position.y, position.z);
glm::quat rotation;
getJointRotation(i, rotation);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::angle(rotation), axis.x, axis.y, axis.z);
glColor4f(skinColor.r, skinColor.g, skinColor.b, alpha);
const float BALL_RADIUS = 0.02f;
const int BALL_SUBDIVISIONS = 10;
glutSolidSphere(BALL_RADIUS * _owningAvatar->getScale(), BALL_SUBDIVISIONS, BALL_SUBDIVISIONS);
glPopMatrix();
int parentIndex = geometry.joints[i].parentIndex;
if (parentIndex == -1) {
continue;
}
glColor4f(darkSkinColor.r, darkSkinColor.g, darkSkinColor.b, alpha);
glm::vec3 parentPosition;
getJointPosition(parentIndex, parentPosition);
const float STICK_RADIUS = BALL_RADIUS * 0.5f;
Avatar::renderJointConnectingCone(parentPosition, position, STICK_RADIUS, STICK_RADIUS);
}
Model::render(alpha);
return true;
}
void SkeletonModel::updateJointState(int index) {
Model::updateJointState(index);
if (index == _geometry->getFBXGeometry().rootJointIndex) {
JointState& state = _jointStates[index];
state.transform[3][0] = _translation.x;
state.transform[3][1] = _translation.y;
state.transform[3][2] = _translation.z;
}
}
void SkeletonModel::maybeUpdateLeanRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
// get the rotation axes in joint space and use them to adjust the rotation
glm::mat3 axes = glm::mat3_cast(_rotation);
glm::mat3 inverse = glm::mat3(glm::inverse(parentState.transform *
joint.preTransform * glm::mat4_cast(joint.preRotation * joint.rotation)));
state.rotation = glm::angleAxis(-_owningAvatar->getHead().getLeanSideways(), glm::normalize(inverse * axes[2])) *
glm::angleAxis(-_owningAvatar->getHead().getLeanForward(), glm::normalize(inverse * axes[0])) * joint.rotation;
}

View file

@ -0,0 +1,39 @@
//
// SkeletonModel.h
// interface
//
// Created by Andrzej Kapolka on 10/17/13.
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#ifndef __interface__SkeletonModel__
#define __interface__SkeletonModel__
#include "renderer/Model.h"
class Avatar;
/// A skeleton loaded from a model.
class SkeletonModel : public Model {
Q_OBJECT
public:
SkeletonModel(Avatar* owningAvatar);
void simulate(float deltaTime);
bool render(float alpha);
protected:
/// Updates the state of the joint at the specified index.
virtual void updateJointState(int index);
virtual void maybeUpdateLeanRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
private:
Avatar* _owningAvatar;
};
#endif /* defined(__interface__SkeletonModel__) */

View file

@ -1,5 +1,5 @@
//
// Face.cpp
// VideoFace.cpp
// interface
//
// Created by Andrzej Kapolka on 7/11/13.
@ -16,26 +16,26 @@
#include "Application.h"
#include "Avatar.h"
#include "Head.h"
#include "Face.h"
#include "VideoFace.h"
#include "renderer/ProgramObject.h"
using namespace cv;
bool Face::_initialized = false;
ProgramObject Face::_videoProgram;
Face::Locations Face::_videoProgramLocations;
ProgramObject Face::_texturedProgram;
Face::Locations Face::_texturedProgramLocations;
GLuint Face::_vboID;
GLuint Face::_iboID;
bool VideoFace::_initialized = false;
ProgramObject VideoFace::_videoProgram;
VideoFace::Locations VideoFace::_videoProgramLocations;
ProgramObject VideoFace::_texturedProgram;
VideoFace::Locations VideoFace::_texturedProgramLocations;
GLuint VideoFace::_vboID;
GLuint VideoFace::_iboID;
Face::Face(Head* owningHead) : _owningHead(owningHead), _renderMode(MESH),
VideoFace::VideoFace(Head* owningHead) : _owningHead(owningHead), _renderMode(MESH),
_colorTextureID(0), _depthTextureID(0), _colorCodec(), _depthCodec(), _frameCount(0) {
// we may have been created in the network thread, but we live in the main thread
moveToThread(Application::getInstance()->thread());
}
Face::~Face() {
VideoFace::~VideoFace() {
if (_colorCodec.name != 0) {
vpx_codec_destroy(&_colorCodec);
@ -55,7 +55,7 @@ Face::~Face() {
}
}
void Face::setFrameFromWebcam() {
void VideoFace::setFrameFromWebcam() {
Webcam* webcam = Application::getInstance()->getWebcam();
if (webcam->isSending()) {
_colorTextureID = webcam->getColorTextureID();
@ -68,12 +68,12 @@ void Face::setFrameFromWebcam() {
}
}
void Face::clearFrame() {
void VideoFace::clearFrame() {
_colorTextureID = 0;
_depthTextureID = 0;
}
int Face::processVideoMessage(unsigned char* packetData, size_t dataBytes) {
int VideoFace::processVideoMessage(unsigned char* packetData, size_t dataBytes) {
unsigned char* packetPosition = packetData;
int frameCount = *(uint32_t*)packetPosition;
@ -243,7 +243,7 @@ int Face::processVideoMessage(unsigned char* packetData, size_t dataBytes) {
return dataBytes;
}
bool Face::render(float alpha) {
bool VideoFace::render(float alpha) {
if (!isActive()) {
return false;
}
@ -404,11 +404,11 @@ bool Face::render(float alpha) {
return true;
}
void Face::cycleRenderMode() {
void VideoFace::cycleRenderMode() {
_renderMode = (RenderMode)((_renderMode + 1) % RENDER_MODE_COUNT);
}
void Face::setFrame(const cv::Mat& color, const cv::Mat& depth, float aspectRatio) {
void VideoFace::setFrame(const cv::Mat& color, const cv::Mat& depth, float aspectRatio) {
Size2f textureSize = _textureSize;
if (!color.empty()) {
bool generate = (_colorTextureID == 0);
@ -457,7 +457,7 @@ void Face::setFrame(const cv::Mat& color, const cv::Mat& depth, float aspectRati
_textureSize = textureSize;
}
void Face::destroyCodecs() {
void VideoFace::destroyCodecs() {
if (_colorCodec.name != 0) {
vpx_codec_destroy(&_colorCodec);
_colorCodec.name = 0;
@ -468,7 +468,7 @@ void Face::destroyCodecs() {
}
}
void Face::loadProgram(ProgramObject& program, const QString& suffix, const char* secondTextureUniform, Locations& locations) {
void VideoFace::loadProgram(ProgramObject& program, const QString& suffix, const char* secondTextureUniform, Locations& locations) {
program.addShaderFromSourceFile(QGLShader::Vertex, "resources/shaders/face" + suffix + ".vert");
program.addShaderFromSourceFile(QGLShader::Fragment, "resources/shaders/face" + suffix + ".frag");
program.link();

View file

@ -1,13 +1,13 @@
//
// Face.h
// VideoFace.h
// interface
//
// Created by Andrzej Kapolka on 7/11/13.
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#ifndef __interface__Face__
#define __interface__Face__
#ifndef __interface__VideoFace__
#define __interface__VideoFace__
#include <QObject>
@ -22,13 +22,13 @@ class ProgramObject;
const float FULL_FRAME_ASPECT = 0.0f;
class Face : public QObject {
class VideoFace : public QObject {
Q_OBJECT
public:
Face(Head* owningHead);
~Face();
VideoFace(Head* owningHead);
~VideoFace();
bool isActive() const { return _colorTextureID != 0 || _depthTextureID != 0; }
bool isFullFrame() const { return isActive() && _aspectRatio == FULL_FRAME_ASPECT; }
@ -91,4 +91,4 @@ private:
static GLuint _iboID;
};
#endif /* defined(__interface__Face__) */
#endif /* defined(__interface__VideoFace__) */

View file

@ -19,7 +19,7 @@
#include "Application.h"
#include "Webcam.h"
#include "avatar/Face.h"
#include "avatar/VideoFace.h"
using namespace cv;
using namespace std;

View file

@ -9,17 +9,20 @@
#include <QBuffer>
#include <QDataStream>
#include <QIODevice>
#include <QTextStream>
#include <QtDebug>
#include <QtEndian>
#include <glm/gtc/quaternion.hpp>
#include <glm/gtx/quaternion.hpp>
#include <glm/gtx/transform.hpp>
#include "FBXReader.h"
#include "Util.h"
using namespace std;
template<class T> QVariant readArray(QDataStream& in) {
template<class T> QVariant readBinaryArray(QDataStream& in) {
quint32 arrayLength;
quint32 encoding;
quint32 compressedLength;
@ -54,7 +57,7 @@ template<class T> QVariant readArray(QDataStream& in) {
return QVariant::fromValue(values);
}
QVariant parseFBXProperty(QDataStream& in) {
QVariant parseBinaryFBXProperty(QDataStream& in) {
char ch;
in.device()->getChar(&ch);
switch (ch) {
@ -89,19 +92,19 @@ QVariant parseFBXProperty(QDataStream& in) {
return QVariant::fromValue(value);
}
case 'f': {
return readArray<float>(in);
return readBinaryArray<float>(in);
}
case 'd': {
return readArray<double>(in);
return readBinaryArray<double>(in);
}
case 'l': {
return readArray<qint64>(in);
return readBinaryArray<qint64>(in);
}
case 'i': {
return readArray<qint32>(in);
return readBinaryArray<qint32>(in);
}
case 'b': {
return readArray<bool>(in);
return readBinaryArray<bool>(in);
}
case 'S':
case 'R': {
@ -114,7 +117,7 @@ QVariant parseFBXProperty(QDataStream& in) {
}
}
FBXNode parseFBXNode(QDataStream& in) {
FBXNode parseBinaryFBXNode(QDataStream& in) {
quint32 endOffset;
quint32 propertyCount;
quint32 propertyListLength;
@ -134,11 +137,11 @@ FBXNode parseFBXNode(QDataStream& in) {
node.name = in.device()->read(nameLength);
for (int i = 0; i < propertyCount; i++) {
node.properties.append(parseFBXProperty(in));
node.properties.append(parseBinaryFBXProperty(in));
}
while (endOffset > in.device()->pos()) {
FBXNode child = parseFBXNode(in);
FBXNode child = parseBinaryFBXNode(in);
if (child.name.isNull()) {
return node;
@ -150,28 +153,149 @@ FBXNode parseFBXNode(QDataStream& in) {
return node;
}
class Tokenizer {
public:
Tokenizer(QIODevice* device) : _device(device), _pushedBackToken(-1) { }
enum SpecialToken { DATUM_TOKEN = 0x100 };
int nextToken();
const QByteArray& getDatum() const { return _datum; }
void pushBackToken(int token) { _pushedBackToken = token; }
private:
QIODevice* _device;
QByteArray _datum;
int _pushedBackToken;
};
int Tokenizer::nextToken() {
if (_pushedBackToken != -1) {
int token = _pushedBackToken;
_pushedBackToken = -1;
return token;
}
char ch;
while (_device->getChar(&ch)) {
if (QChar(ch).isSpace()) {
continue; // skip whitespace
}
switch (ch) {
case ';':
_device->readLine(); // skip the comment
break;
case ':':
case '{':
case '}':
case ',':
return ch; // special punctuation
case '\"':
_datum = "";
while (_device->getChar(&ch)) {
if (ch == '\"') { // end on closing quote
break;
}
if (ch == '\\') { // handle escaped quotes
if (_device->getChar(&ch) && ch != '\"') {
_datum.append('\\');
}
}
_datum.append(ch);
}
return DATUM_TOKEN;
default:
_datum = "";
_datum.append(ch);
while (_device->getChar(&ch)) {
if (QChar(ch).isSpace() || ch == ';' || ch == ':' || ch == '{' || ch == '}' || ch == ',' || ch == '\"') {
_device->ungetChar(ch); // read until we encounter a special character, then replace it
break;
}
_datum.append(ch);
}
return DATUM_TOKEN;
}
}
return -1;
}
FBXNode parseTextFBXNode(Tokenizer& tokenizer) {
FBXNode node;
if (tokenizer.nextToken() != Tokenizer::DATUM_TOKEN) {
return node;
}
node.name = tokenizer.getDatum();
if (tokenizer.nextToken() != ':') {
return node;
}
int token;
bool expectingDatum = true;
while ((token = tokenizer.nextToken()) != -1) {
if (token == '{') {
for (FBXNode child = parseTextFBXNode(tokenizer); !child.name.isNull(); child = parseTextFBXNode(tokenizer)) {
node.children.append(child);
}
return node;
}
if (token == ',') {
expectingDatum = true;
} else if (token == Tokenizer::DATUM_TOKEN && expectingDatum) {
node.properties.append(tokenizer.getDatum());
expectingDatum = false;
} else {
tokenizer.pushBackToken(token);
return node;
}
}
return node;
}
FBXNode parseFBX(QIODevice* device) {
// verify the prolog
const QByteArray BINARY_PROLOG = "Kaydara FBX Binary ";
if (device->peek(BINARY_PROLOG.size()) != BINARY_PROLOG) {
// parse as a text file
FBXNode top;
Tokenizer tokenizer(device);
while (device->bytesAvailable()) {
FBXNode next = parseTextFBXNode(tokenizer);
if (next.name.isNull()) {
return top;
} else {
top.children.append(next);
}
}
return top;
}
QDataStream in(device);
in.setByteOrder(QDataStream::LittleEndian);
in.setVersion(QDataStream::Qt_4_5); // for single/double precision switch
// see http://code.blender.org/index.php/2013/08/fbx-binary-file-format-specification/ for an explanation
// of the FBX format
// verify the prolog
const QByteArray EXPECTED_PROLOG = "Kaydara FBX Binary ";
if (device->read(EXPECTED_PROLOG.size()) != EXPECTED_PROLOG) {
throw QString("Invalid header.");
}
// of the FBX binary format
// skip the rest of the header
const int HEADER_SIZE = 27;
in.skipRawData(HEADER_SIZE - EXPECTED_PROLOG.size());
in.skipRawData(HEADER_SIZE);
// parse the top-level node
FBXNode top;
while (device->bytesAvailable()) {
FBXNode next = parseFBXNode(in);
FBXNode next = parseBinaryFBXNode(in);
if (next.name.isNull()) {
return top;
@ -246,6 +370,33 @@ glm::mat4 createMat4(const QVector<double>& doubleVector) {
doubleVector.at(12), doubleVector.at(13), doubleVector.at(14), doubleVector.at(15));
}
QVector<int> getIntVector(const QVariantList& properties, int index) {
QVector<int> vector = properties.at(index).value<QVector<int> >();
if (!vector.isEmpty()) {
return vector;
}
for (; index < properties.size(); index++) {
vector.append(properties.at(index).toInt());
}
return vector;
}
QVector<double> getDoubleVector(const QVariantList& properties, int index) {
QVector<double> vector = properties.at(index).value<QVector<double> >();
if (!vector.isEmpty()) {
return vector;
}
for (; index < properties.size(); index++) {
vector.append(properties.at(index).toDouble());
}
return vector;
}
glm::vec3 getVec3(const QVariantList& properties, int index) {
return glm::vec3(properties.at(index).value<double>(), properties.at(index + 1).value<double>(),
properties.at(index + 2).value<double>());
}
const char* FACESHIFT_BLENDSHAPES[] = {
"EyeBlink_L",
"EyeBlink_R",
@ -298,26 +449,30 @@ const char* FACESHIFT_BLENDSHAPES[] = {
""
};
class Model {
class FBXModel {
public:
QByteArray name;
glm::mat4 preRotation;
glm::quat rotation;
glm::mat4 postRotation;
int parentIndex;
glm::mat4 preTransform;
glm::quat preRotation;
glm::quat rotation;
glm::quat postRotation;
glm::mat4 postTransform;
};
glm::mat4 getGlobalTransform(const QMultiHash<qint64, qint64>& parentMap, const QHash<qint64, Model>& models, qint64 nodeID) {
glm::mat4 getGlobalTransform(const QMultiHash<QString, QString>& parentMap,
const QHash<QString, FBXModel>& models, QString nodeID) {
glm::mat4 globalTransform;
while (nodeID != 0) {
const Model& model = models.value(nodeID);
globalTransform = model.preRotation * glm::mat4_cast(model.rotation) * model.postRotation * globalTransform;
while (!nodeID.isNull()) {
const FBXModel& model = models.value(nodeID);
globalTransform = model.preTransform * glm::mat4_cast(model.preRotation * model.rotation * model.postRotation) *
model.postTransform * globalTransform;
QList<qint64> parentIDs = parentMap.values(nodeID);
nodeID = 0;
foreach (qint64 parentID, parentIDs) {
QList<QString> parentIDs = parentMap.values(nodeID);
nodeID = QString();
foreach (const QString& parentID, parentIDs) {
if (models.contains(parentID)) {
nodeID = parentID;
break;
@ -330,7 +485,7 @@ glm::mat4 getGlobalTransform(const QMultiHash<qint64, qint64>& parentMap, const
class ExtractedBlendshape {
public:
qint64 id;
QString id;
FBXBlendshape blendshape;
};
@ -360,65 +515,24 @@ public:
glm::mat4 transformLink;
};
void appendModelIDs(qint64 parentID, const QMultiHash<qint64, qint64>& childMap,
QHash<qint64, Model>& models, QVector<qint64>& modelIDs) {
if (parentID != 0) {
void appendModelIDs(const QString& parentID, const QMultiHash<QString, QString>& childMap,
QHash<QString, FBXModel>& models, QVector<QString>& modelIDs) {
if (models.contains(parentID)) {
modelIDs.append(parentID);
}
int parentIndex = modelIDs.size() - 1;
foreach (qint64 childID, childMap.values(parentID)) {
foreach (const QString& childID, childMap.values(parentID)) {
if (models.contains(childID)) {
models[childID].parentIndex = parentIndex;
FBXModel& model = models[childID];
if (model.parentIndex == -1) {
model.parentIndex = parentIndex;
appendModelIDs(childID, childMap, models, modelIDs);
}
}
}
FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping) {
QHash<qint64, FBXMesh> meshes;
QVector<ExtractedBlendshape> blendshapes;
QMultiHash<qint64, qint64> parentMap;
QMultiHash<qint64, qint64> childMap;
QHash<qint64, Model> models;
QHash<qint64, Cluster> clusters;
QHash<qint64, QByteArray> textureFilenames;
QHash<qint64, Material> materials;
QHash<qint64, qint64> diffuseTextures;
QHash<qint64, qint64> bumpTextures;
QVariantHash joints = mapping.value("joint").toHash();
QByteArray jointEyeLeftName = joints.value("jointEyeLeft", "jointEyeLeft").toByteArray();
QByteArray jointEyeRightName = joints.value("jointEyeRight", "jointEyeRight").toByteArray();
QByteArray jointNeckName = joints.value("jointNeck", "jointNeck").toByteArray();
qint64 jointEyeLeftID = 0;
qint64 jointEyeRightID = 0;
qint64 jointNeckID = 0;
QVariantHash blendshapeMappings = mapping.value("bs").toHash();
QHash<QByteArray, QPair<int, float> > blendshapeIndices;
for (int i = 0;; i++) {
QByteArray blendshapeName = FACESHIFT_BLENDSHAPES[i];
if (blendshapeName.isEmpty()) {
break;
}
QList<QVariant> mappings = blendshapeMappings.values(blendshapeName);
if (mappings.isEmpty()) {
blendshapeIndices.insert(blendshapeName, QPair<int, float>(i, 1.0f));
} else {
foreach (const QVariant& mapping, mappings) {
QVariantList blendshapeMapping = mapping.toList();
blendshapeIndices.insert(blendshapeMapping.at(0).toByteArray(),
QPair<int, float>(i, blendshapeMapping.at(1).toFloat()));
}
}
}
QHash<qint64, QPair<int, float> > blendshapeChannelIndices;
foreach (const FBXNode& child, node.children) {
if (child.name == "Objects") {
foreach (const FBXNode& object, child.children) {
if (object.name == "Geometry") {
if (object.properties.at(2) == "Mesh") {
FBXMesh extractMesh(const FBXNode& object) {
FBXMesh mesh;
QVector<int> polygonIndices;
@ -429,19 +543,19 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
QVector<int> materials;
foreach (const FBXNode& data, object.children) {
if (data.name == "Vertices") {
mesh.vertices = createVec3Vector(data.properties.at(0).value<QVector<double> >());
mesh.vertices = createVec3Vector(getDoubleVector(data.properties, 0));
} else if (data.name == "PolygonVertexIndex") {
polygonIndices = data.properties.at(0).value<QVector<int> >();
polygonIndices = getIntVector(data.properties, 0);
} else if (data.name == "LayerElementNormal") {
bool byVertex = false;
foreach (const FBXNode& subdata, data.children) {
if (subdata.name == "Normals") {
normals = createVec3Vector(subdata.properties.at(0).value<QVector<double> >());
normals = createVec3Vector(getDoubleVector(subdata.properties, 0));
} else if (subdata.name == "NormalsIndex") {
normalIndices = subdata.properties.at(0).value<QVector<int> >();
normalIndices = getIntVector(subdata.properties, 0);
} else if (subdata.name == "MappingInformationType" &&
subdata.properties.at(0) == "ByVertice") {
@ -454,16 +568,16 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
} else if (data.name == "LayerElementUV" && data.properties.at(0).toInt() == 0) {
foreach (const FBXNode& subdata, data.children) {
if (subdata.name == "UV") {
texCoords = createVec2Vector(subdata.properties.at(0).value<QVector<double> >());
texCoords = createVec2Vector(getDoubleVector(subdata.properties, 0));
} else if (subdata.name == "UVIndex") {
texCoordIndices = subdata.properties.at(0).value<QVector<int> >();
texCoordIndices = getIntVector(subdata.properties, 0);
}
}
} else if (data.name == "LayerElementMaterial") {
foreach (const FBXNode& subdata, data.children) {
if (subdata.name == "Materials") {
materials = subdata.properties.at(0).value<QVector<int> >();
materials = getIntVector(subdata.properties, 0);
}
}
}
@ -531,101 +645,172 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
beginIndex = endIndex;
}
}
meshes.insert(object.properties.at(0).value<qint64>(), mesh);
return mesh;
}
FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping) {
QHash<QString, FBXMesh> meshes;
QVector<ExtractedBlendshape> blendshapes;
QMultiHash<QString, QString> parentMap;
QMultiHash<QString, QString> childMap;
QHash<QString, FBXModel> models;
QHash<QString, Cluster> clusters;
QHash<QString, QByteArray> textureFilenames;
QHash<QString, Material> materials;
QHash<QString, QString> diffuseTextures;
QHash<QString, QString> bumpTextures;
QVariantHash joints = mapping.value("joint").toHash();
QByteArray jointEyeLeftName = joints.value("jointEyeLeft", "jointEyeLeft").toByteArray();
QByteArray jointEyeRightName = joints.value("jointEyeRight", "jointEyeRight").toByteArray();
QByteArray jointNeckName = joints.value("jointNeck", "jointNeck").toByteArray();
QByteArray jointRootName = joints.value("jointRoot", "jointRoot").toByteArray();
QByteArray jointLeanName = joints.value("jointLean", "jointLean").toByteArray();
QByteArray jointHeadName = joints.value("jointHead", "jointHead").toByteArray();
QString jointEyeLeftID;
QString jointEyeRightID;
QString jointNeckID;
QString jointRootID;
QString jointLeanID;
QString jointHeadID;
QVariantHash blendshapeMappings = mapping.value("bs").toHash();
QHash<QByteArray, QPair<int, float> > blendshapeIndices;
for (int i = 0;; i++) {
QByteArray blendshapeName = FACESHIFT_BLENDSHAPES[i];
if (blendshapeName.isEmpty()) {
break;
}
QList<QVariant> mappings = blendshapeMappings.values(blendshapeName);
if (mappings.isEmpty()) {
blendshapeIndices.insert(blendshapeName, QPair<int, float>(i, 1.0f));
} else {
foreach (const QVariant& mapping, mappings) {
QVariantList blendshapeMapping = mapping.toList();
blendshapeIndices.insert(blendshapeMapping.at(0).toByteArray(),
QPair<int, float>(i, blendshapeMapping.at(1).toFloat()));
}
}
}
QHash<QString, QPair<int, float> > blendshapeChannelIndices;
foreach (const FBXNode& child, node.children) {
if (child.name == "Objects") {
foreach (const FBXNode& object, child.children) {
if (object.name == "Geometry") {
if (object.properties.at(2) == "Mesh") {
meshes.insert(object.properties.at(0).toString(), extractMesh(object));
} else { // object.properties.at(2) == "Shape"
ExtractedBlendshape extracted = { object.properties.at(0).value<qint64>() };
ExtractedBlendshape extracted = { object.properties.at(0).toString() };
foreach (const FBXNode& data, object.children) {
if (data.name == "Indexes") {
extracted.blendshape.indices = data.properties.at(0).value<QVector<int> >();
extracted.blendshape.indices = getIntVector(data.properties, 0);
} else if (data.name == "Vertices") {
extracted.blendshape.vertices = createVec3Vector(
data.properties.at(0).value<QVector<double> >());
getDoubleVector(data.properties, 0));
} else if (data.name == "Normals") {
extracted.blendshape.normals = createVec3Vector(
data.properties.at(0).value<QVector<double> >());
getDoubleVector(data.properties, 0));
}
}
blendshapes.append(extracted);
}
} else if (object.name == "Model") {
QByteArray name = object.properties.at(1).toByteArray();
QByteArray name;
if (object.properties.size() == 3) {
name = object.properties.at(1).toByteArray();
name = name.left(name.indexOf('\0'));
} else {
name = object.properties.at(0).toByteArray();
}
if (name == jointEyeLeftName || name == "EyeL" || name == "joint_Leye") {
jointEyeLeftID = object.properties.at(0).value<qint64>();
jointEyeLeftID = object.properties.at(0).toString();
} else if (name == jointEyeRightName || name == "EyeR" || name == "joint_Reye") {
jointEyeRightID = object.properties.at(0).value<qint64>();
jointEyeRightID = object.properties.at(0).toString();
} else if (name == jointNeckName || name == "NeckRot" || name == "joint_neck") {
jointNeckID = object.properties.at(0).value<qint64>();
jointNeckID = object.properties.at(0).toString();
} else if (name == jointRootName) {
jointRootID = object.properties.at(0).toString();
} else if (name == jointLeanName) {
jointLeanID = object.properties.at(0).toString();
} else if (name == jointHeadName) {
jointHeadID = object.properties.at(0).toString();
}
glm::vec3 translation;
glm::vec3 rotationOffset;
glm::vec3 preRotation, rotation, postRotation;
glm::vec3 scale = glm::vec3(1.0f, 1.0f, 1.0f);
glm::vec3 scalePivot, rotationPivot;
Model model = { name };
FBXModel model = { name, -1 };
foreach (const FBXNode& subobject, object.children) {
if (subobject.name == "Properties70") {
bool properties = false;
QByteArray propertyName;
int index;
if (subobject.name == "Properties60") {
properties = true;
propertyName = "Property";
index = 3;
} else if (subobject.name == "Properties70") {
properties = true;
propertyName = "P";
index = 4;
}
if (properties) {
foreach (const FBXNode& property, subobject.children) {
if (property.name == "P") {
if (property.name == propertyName) {
if (property.properties.at(0) == "Lcl Translation") {
translation = glm::vec3(property.properties.at(4).value<double>(),
property.properties.at(5).value<double>(),
property.properties.at(6).value<double>());
translation = getVec3(property.properties, index);
} else if (property.properties.at(0) == "RotationOffset") {
rotationOffset = glm::vec3(property.properties.at(4).value<double>(),
property.properties.at(5).value<double>(),
property.properties.at(6).value<double>());
rotationOffset = getVec3(property.properties, index);
} else if (property.properties.at(0) == "RotationPivot") {
rotationPivot = glm::vec3(property.properties.at(4).value<double>(),
property.properties.at(5).value<double>(),
property.properties.at(6).value<double>());
rotationPivot = getVec3(property.properties, index);
} else if (property.properties.at(0) == "PreRotation") {
preRotation = glm::vec3(property.properties.at(4).value<double>(),
property.properties.at(5).value<double>(),
property.properties.at(6).value<double>());
preRotation = getVec3(property.properties, index);
} else if (property.properties.at(0) == "Lcl Rotation") {
rotation = glm::vec3(property.properties.at(4).value<double>(),
property.properties.at(5).value<double>(),
property.properties.at(6).value<double>());
rotation = getVec3(property.properties, index);
} else if (property.properties.at(0) == "PostRotation") {
postRotation = glm::vec3(property.properties.at(4).value<double>(),
property.properties.at(5).value<double>(),
property.properties.at(6).value<double>());
postRotation = getVec3(property.properties, index);
} else if (property.properties.at(0) == "ScalingPivot") {
scalePivot = glm::vec3(property.properties.at(4).value<double>(),
property.properties.at(5).value<double>(),
property.properties.at(6).value<double>());
scalePivot = getVec3(property.properties, index);
} else if (property.properties.at(0) == "Lcl Scaling") {
scale = glm::vec3(property.properties.at(4).value<double>(),
property.properties.at(5).value<double>(),
property.properties.at(6).value<double>());
scale = getVec3(property.properties, index);
}
}
}
} else if (subobject.name == "Vertices") {
// it's a mesh as well as a model
meshes.insert(object.properties.at(0).toString(), extractMesh(object));
}
}
// see FBX documentation, http://download.autodesk.com/us/fbx/20112/FBX_SDK_HELP/index.html
model.preRotation = glm::translate(translation) * glm::translate(rotationOffset) *
glm::translate(rotationPivot) * glm::mat4_cast(glm::quat(glm::radians(preRotation)));
model.preTransform = glm::translate(translation) * glm::translate(rotationOffset) *
glm::translate(rotationPivot);
model.preRotation = glm::quat(glm::radians(preRotation));
model.rotation = glm::quat(glm::radians(rotation));
model.postRotation = glm::mat4_cast(glm::quat(glm::radians(postRotation))) *
glm::translate(-rotationPivot) * glm::translate(scalePivot) *
model.postRotation = glm::quat(glm::radians(postRotation));
model.postTransform = glm::translate(-rotationPivot) * glm::translate(scalePivot) *
glm::scale(scale) * glm::translate(-scalePivot);
models.insert(object.properties.at(0).value<qint64>(), model);
models.insert(object.properties.at(0).toString(), model);
} else if (object.name == "Texture") {
foreach (const FBXNode& subobject, object.children) {
@ -633,78 +818,87 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
// trim off any path information
QByteArray filename = subobject.properties.at(0).toByteArray();
filename = filename.mid(qMax(filename.lastIndexOf('\\'), filename.lastIndexOf('/')) + 1);
textureFilenames.insert(object.properties.at(0).value<qint64>(), filename);
textureFilenames.insert(object.properties.at(0).toString(), filename);
}
}
} else if (object.name == "Material") {
Material material = { glm::vec3(1.0f, 1.0f, 1.0f), glm::vec3(1.0f, 1.0f, 1.0f), 96.0f };
foreach (const FBXNode& subobject, object.children) {
if (subobject.name == "Properties70") {
bool properties = false;
QByteArray propertyName;
int index;
if (subobject.name == "Properties60") {
properties = true;
propertyName = "Property";
index = 3;
} else if (subobject.name == "Properties70") {
properties = true;
propertyName = "P";
index = 4;
}
if (properties) {
foreach (const FBXNode& property, subobject.children) {
if (property.name == "P") {
if (property.name == propertyName) {
if (property.properties.at(0) == "DiffuseColor") {
material.diffuse = glm::vec3(property.properties.at(4).value<double>(),
property.properties.at(5).value<double>(),
property.properties.at(6).value<double>());
material.diffuse = getVec3(property.properties, index);
} else if (property.properties.at(0) == "SpecularColor") {
material.specular = glm::vec3(property.properties.at(4).value<double>(),
property.properties.at(5).value<double>(),
property.properties.at(6).value<double>());
material.specular = getVec3(property.properties, index);
} else if (property.properties.at(0) == "Shininess") {
material.shininess = property.properties.at(4).value<double>();
material.shininess = property.properties.at(index).value<double>();
}
}
}
}
}
materials.insert(object.properties.at(0).value<qint64>(), material);
materials.insert(object.properties.at(0).toString(), material);
} else if (object.name == "Deformer") {
if (object.properties.at(2) == "Cluster") {
if (object.properties.last() == "Cluster") {
Cluster cluster;
foreach (const FBXNode& subobject, object.children) {
if (subobject.name == "Indexes") {
cluster.indices = subobject.properties.at(0).value<QVector<int> >();
cluster.indices = getIntVector(subobject.properties, 0);
} else if (subobject.name == "Weights") {
cluster.weights = subobject.properties.at(0).value<QVector<double> >();
cluster.weights = getDoubleVector(subobject.properties, 0);
} else if (subobject.name == "TransformLink") {
QVector<double> values = subobject.properties.at(0).value<QVector<double> >();
QVector<double> values = getDoubleVector(subobject.properties, 0);
cluster.transformLink = createMat4(values);
}
}
clusters.insert(object.properties.at(0).value<qint64>(), cluster);
clusters.insert(object.properties.at(0).toString(), cluster);
} else if (object.properties.at(2) == "BlendShapeChannel") {
} else if (object.properties.last() == "BlendShapeChannel") {
QByteArray name = object.properties.at(1).toByteArray();
name = name.left(name.indexOf('\0'));
if (!blendshapeIndices.contains(name)) {
// try everything after the dot
name = name.mid(name.lastIndexOf('.') + 1);
}
blendshapeChannelIndices.insert(object.properties.at(0).value<qint64>(),
blendshapeChannelIndices.insert(object.properties.at(0).toString(),
blendshapeIndices.value(name));
}
}
}
} else if (child.name == "Connections") {
foreach (const FBXNode& connection, child.children) {
if (connection.name == "C") {
if (connection.name == "C" || connection.name == "Connect") {
if (connection.properties.at(0) == "OP") {
if (connection.properties.at(3) == "DiffuseColor") {
diffuseTextures.insert(connection.properties.at(2).value<qint64>(),
connection.properties.at(1).value<qint64>());
diffuseTextures.insert(connection.properties.at(2).toString(),
connection.properties.at(1).toString());
} else if (connection.properties.at(3) == "Bump") {
bumpTextures.insert(connection.properties.at(2).value<qint64>(),
connection.properties.at(1).value<qint64>());
bumpTextures.insert(connection.properties.at(2).toString(),
connection.properties.at(1).toString());
}
}
parentMap.insert(connection.properties.at(1).value<qint64>(), connection.properties.at(2).value<qint64>());
childMap.insert(connection.properties.at(2).value<qint64>(), connection.properties.at(1).value<qint64>());
parentMap.insert(connection.properties.at(1).toString(), connection.properties.at(2).toString());
childMap.insert(connection.properties.at(2).toString(), connection.properties.at(1).toString());
}
}
}
@ -712,10 +906,10 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
// assign the blendshapes to their corresponding meshes
foreach (const ExtractedBlendshape& extracted, blendshapes) {
qint64 blendshapeChannelID = parentMap.value(extracted.id);
QString blendshapeChannelID = parentMap.value(extracted.id);
QPair<int, float> index = blendshapeChannelIndices.value(blendshapeChannelID);
qint64 blendshapeID = parentMap.value(blendshapeChannelID);
qint64 meshID = parentMap.value(blendshapeID);
QString blendshapeID = parentMap.value(blendshapeChannelID);
QString meshID = parentMap.value(blendshapeID);
FBXMesh& mesh = meshes[meshID];
mesh.blendshapes.resize(max(mesh.blendshapes.size(), index.first + 1));
mesh.blendshapes[index.first] = extracted.blendshape;
@ -733,37 +927,62 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
// get offset transform from mapping
FBXGeometry geometry;
float offsetScale = mapping.value("scale", 1.0f).toFloat();
glm::quat offsetRotation = glm::quat(glm::radians(glm::vec3(mapping.value("rx").toFloat(),
mapping.value("ry").toFloat(), mapping.value("rz").toFloat())));
geometry.offset = glm::translate(mapping.value("tx").toFloat(), mapping.value("ty").toFloat(),
mapping.value("tz").toFloat()) * glm::mat4_cast(glm::quat(glm::radians(glm::vec3(mapping.value("rx").toFloat(),
mapping.value("ry").toFloat(), mapping.value("rz").toFloat())))) *
glm::scale(offsetScale, offsetScale, offsetScale);
mapping.value("tz").toFloat()) * glm::mat4_cast(offsetRotation) * glm::scale(offsetScale, offsetScale, offsetScale);
// get the list of models in depth-first traversal order
QVector<qint64> modelIDs;
appendModelIDs(0, childMap, models, modelIDs);
QVector<QString> modelIDs;
if (!models.isEmpty()) {
QString top = models.constBegin().key();
forever {
foreach (const QString& name, parentMap.values(top)) {
if (models.contains(name)) {
top = name;
goto outerContinue;
}
}
top = parentMap.value(top);
break;
outerContinue: ;
}
appendModelIDs(top, childMap, models, modelIDs);
}
// convert the models to joints
foreach (qint64 modelID, modelIDs) {
const Model& model = models[modelID];
foreach (const QString& modelID, modelIDs) {
const FBXModel& model = models[modelID];
FBXJoint joint;
joint.parentIndex = model.parentIndex;
joint.preTransform = model.preTransform;
joint.preRotation = model.preRotation;
joint.rotation = model.rotation;
joint.postRotation = model.postRotation;
joint.postTransform = model.postTransform;
glm::quat combinedRotation = model.preRotation * model.rotation * model.postRotation;
if (joint.parentIndex == -1) {
joint.transform = geometry.offset * model.preRotation * glm::mat4_cast(model.rotation) * model.postRotation;
joint.transform = geometry.offset * model.preTransform * glm::mat4_cast(combinedRotation) * model.postTransform;
joint.inverseBindRotation = glm::inverse(combinedRotation);
} else {
joint.transform = geometry.joints.at(joint.parentIndex).transform *
model.preRotation * glm::mat4_cast(model.rotation) * model.postRotation;
const FBXJoint& parentJoint = geometry.joints.at(joint.parentIndex);
joint.transform = parentJoint.transform *
model.preTransform * glm::mat4_cast(combinedRotation) * model.postTransform;
joint.inverseBindRotation = glm::inverse(combinedRotation) * parentJoint.inverseBindRotation;
}
geometry.joints.append(joint);
geometry.jointIndices.insert(model.name, geometry.joints.size() - 1);
}
// find our special joints
geometry.leftEyeJointIndex = modelIDs.indexOf(jointEyeLeftID);
geometry.rightEyeJointIndex = modelIDs.indexOf(jointEyeRightID);
geometry.neckJointIndex = modelIDs.indexOf(jointNeckID);
geometry.rootJointIndex = modelIDs.indexOf(jointRootID);
geometry.leanJointIndex = modelIDs.indexOf(jointLeanID);
geometry.headJointIndex = modelIDs.indexOf(jointHeadID);
// extract the translation component of the neck transform
if (geometry.neckJointIndex != -1) {
@ -773,40 +992,48 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
QVariantHash springs = mapping.value("spring").toHash();
QVariant defaultSpring = springs.value("default");
for (QHash<qint64, FBXMesh>::iterator it = meshes.begin(); it != meshes.end(); it++) {
for (QHash<QString, FBXMesh>::iterator it = meshes.begin(); it != meshes.end(); it++) {
FBXMesh& mesh = it.value();
// accumulate local transforms
qint64 modelID = parentMap.value(it.key());
QString modelID = models.contains(it.key()) ? it.key() : parentMap.value(it.key());
mesh.springiness = springs.value(models.value(modelID).name, defaultSpring).toFloat();
glm::mat4 modelTransform = getGlobalTransform(parentMap, models, modelID);
// look for textures, material properties
int partIndex = 0;
foreach (qint64 childID, childMap.values(modelID)) {
if (!materials.contains(childID) || partIndex >= mesh.parts.size()) {
int partIndex = mesh.parts.size() - 1;
foreach (const QString& childID, childMap.values(modelID)) {
if (partIndex < 0) {
break;
}
FBXMeshPart& part = mesh.parts[partIndex];
if (textureFilenames.contains(childID)) {
part.diffuseFilename = textureFilenames.value(childID);
continue;
}
if (!materials.contains(childID)) {
continue;
}
Material material = materials.value(childID);
FBXMeshPart& part = mesh.parts[mesh.parts.size() - ++partIndex];
part.diffuseColor = material.diffuse;
part.specularColor = material.specular;
part.shininess = material.shininess;
qint64 diffuseTextureID = diffuseTextures.value(childID);
if (diffuseTextureID != 0) {
QString diffuseTextureID = diffuseTextures.value(childID);
if (!diffuseTextureID.isNull()) {
part.diffuseFilename = textureFilenames.value(diffuseTextureID);
}
qint64 bumpTextureID = bumpTextures.value(childID);
if (bumpTextureID != 0) {
QString bumpTextureID = bumpTextures.value(childID);
if (!bumpTextureID.isNull()) {
part.normalFilename = textureFilenames.value(bumpTextureID);
}
partIndex--;
}
// find the clusters with which the mesh is associated
mesh.isEye = false;
QVector<qint64> clusterIDs;
foreach (qint64 childID, childMap.values(it.key())) {
foreach (qint64 clusterID, childMap.values(childID)) {
QVector<QString> clusterIDs;
foreach (const QString& childID, childMap.values(it.key())) {
foreach (const QString& clusterID, childMap.values(childID)) {
if (!clusters.contains(clusterID)) {
continue;
}
@ -814,7 +1041,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
const Cluster& cluster = clusters[clusterID];
clusterIDs.append(clusterID);
qint64 jointID = childMap.value(clusterID);
QString jointID = childMap.value(clusterID);
if (jointID == jointEyeLeftID || jointID == jointEyeRightID) {
mesh.isEye = true;
}
@ -838,7 +1065,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
mesh.clusterIndices.resize(mesh.vertices.size());
mesh.clusterWeights.resize(mesh.vertices.size());
for (int i = 0; i < clusterIDs.size(); i++) {
qint64 clusterID = clusterIDs.at(i);
QString clusterID = clusterIDs.at(i);
const Cluster& cluster = clusters[clusterID];
for (int j = 0; j < cluster.indices.size(); j++) {

View file

@ -14,6 +14,7 @@
#include <QVector>
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
class FBXNode;
@ -42,10 +43,13 @@ class FBXJoint {
public:
int parentIndex;
glm::mat4 preRotation;
glm::mat4 preTransform;
glm::quat preRotation;
glm::quat rotation;
glm::mat4 postRotation;
glm::quat postRotation;
glm::mat4 postTransform;
glm::mat4 transform;
glm::quat inverseBindRotation;
};
/// A single binding to a joint in an FBX document.
@ -99,6 +103,7 @@ class FBXGeometry {
public:
QVector<FBXJoint> joints;
QHash<QString, int> jointIndices;
QVector<FBXMesh> meshes;
@ -107,6 +112,9 @@ public:
int leftEyeJointIndex;
int rightEyeJointIndex;
int neckJointIndex;
int rootJointIndex;
int leanJointIndex;
int headJointIndex;
glm::vec3 neckPivot;
};

View file

@ -57,7 +57,7 @@ public:
NetworkGeometry(const QUrl& url);
~NetworkGeometry();
bool isLoaded() const { return !_meshes.isEmpty(); }
bool isLoaded() const { return !_geometry.joints.isEmpty(); }
const FBXGeometry& getFBXGeometry() const { return _geometry; }
const QVector<NetworkMesh>& getMeshes() const { return _meshes; }

View file

@ -1,52 +1,48 @@
//
// BlendFace.cpp
// Model.cpp
// interface
//
// Created by Andrzej Kapolka on 9/16/13.
// Created by Andrzej Kapolka on 10/18/13.
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#include <QNetworkReply>
#include <glm/gtx/transform.hpp>
#include "Application.h"
#include "BlendFace.h"
#include "Head.h"
#include "Model.h"
using namespace fs;
using namespace std;
BlendFace::BlendFace(Head* owningHead) :
_owningHead(owningHead)
Model::Model() :
_pupilDilation(0.0f)
{
// we may have been created in the network thread, but we live in the main thread
moveToThread(Application::getInstance()->thread());
}
BlendFace::~BlendFace() {
Model::~Model() {
deleteGeometry();
}
ProgramObject BlendFace::_program;
ProgramObject BlendFace::_skinProgram;
int BlendFace::_clusterMatricesLocation;
int BlendFace::_clusterIndicesLocation;
int BlendFace::_clusterWeightsLocation;
ProgramObject Model::_program;
ProgramObject Model::_skinProgram;
int Model::_clusterMatricesLocation;
int Model::_clusterIndicesLocation;
int Model::_clusterWeightsLocation;
void BlendFace::init() {
void Model::init() {
if (!_program.isLinked()) {
switchToResourcesParentIfRequired();
_program.addShaderFromSourceFile(QGLShader::Vertex, "resources/shaders/blendface.vert");
_program.addShaderFromSourceFile(QGLShader::Fragment, "resources/shaders/blendface.frag");
_program.addShaderFromSourceFile(QGLShader::Vertex, "resources/shaders/model.vert");
_program.addShaderFromSourceFile(QGLShader::Fragment, "resources/shaders/model.frag");
_program.link();
_program.bind();
_program.setUniformValue("texture", 0);
_program.release();
_skinProgram.addShaderFromSourceFile(QGLShader::Vertex, "resources/shaders/skin_blendface.vert");
_skinProgram.addShaderFromSourceFile(QGLShader::Fragment, "resources/shaders/blendface.frag");
_skinProgram.addShaderFromSourceFile(QGLShader::Vertex, "resources/shaders/skin_model.vert");
_skinProgram.addShaderFromSourceFile(QGLShader::Fragment, "resources/shaders/model.frag");
_skinProgram.link();
_skinProgram.bind();
@ -58,22 +54,18 @@ void BlendFace::init() {
}
}
void BlendFace::reset() {
void Model::reset() {
_resetStates = true;
}
const glm::vec3 MODEL_TRANSLATION(0.0f, -60.0f, 40.0f); // temporary fudge factor
const float MODEL_SCALE = 0.0006f;
void BlendFace::simulate(float deltaTime) {
void Model::simulate(float deltaTime) {
if (!isActive()) {
return;
}
// set up world vertices on first simulate after load
const FBXGeometry& geometry = _geometry->getFBXGeometry();
if (_meshStates.isEmpty()) {
QVector<glm::vec3> vertices;
if (_jointStates.isEmpty()) {
foreach (const FBXJoint& joint, geometry.joints) {
JointState state;
state.rotation = joint.rotation;
@ -92,43 +84,9 @@ void BlendFace::simulate(float deltaTime) {
_resetStates = true;
}
const Skeleton& skeleton = static_cast<Avatar*>(_owningHead->_owningAvatar)->getSkeleton();
glm::quat orientation = skeleton.joint[AVATAR_JOINT_NECK_BASE].absoluteRotation;
glm::vec3 scale = glm::vec3(-1.0f, 1.0f, -1.0f) * _owningHead->getScale() * MODEL_SCALE;
glm::vec3 offset = MODEL_TRANSLATION - geometry.neckPivot;
glm::mat4 baseTransform = glm::translate(skeleton.joint[AVATAR_JOINT_NECK_BASE].position) * glm::mat4_cast(orientation) *
glm::scale(scale) * glm::translate(offset);
// update the world space transforms for all joints
for (int i = 0; i < _jointStates.size(); i++) {
JointState& state = _jointStates[i];
const FBXJoint& joint = geometry.joints.at(i);
if (joint.parentIndex == -1) {
state.transform = baseTransform * geometry.offset * joint.preRotation *
glm::mat4_cast(state.rotation) * joint.postRotation;
} else {
if (i == geometry.neckJointIndex) {
// get the rotation axes in joint space and use them to adjust the rotation
glm::mat3 axes = glm::mat3_cast(orientation);
glm::mat3 inverse = glm::inverse(glm::mat3(_jointStates[joint.parentIndex].transform *
joint.preRotation * glm::mat4_cast(joint.rotation)));
state.rotation = glm::angleAxis(_owningHead->getRoll(), glm::normalize(inverse * axes[2])) *
glm::angleAxis(_owningHead->getYaw(), glm::normalize(inverse * axes[1])) *
glm::angleAxis(_owningHead->getPitch(), glm::normalize(inverse * axes[0])) * joint.rotation;
} else if (i == geometry.leftEyeJointIndex || i == geometry.rightEyeJointIndex) {
// likewise with the lookat position
glm::mat4 inverse = glm::inverse(_jointStates[joint.parentIndex].transform *
joint.preRotation * glm::mat4_cast(joint.rotation));
glm::vec3 front = glm::vec3(inverse * glm::vec4(_owningHead->getOrientation() * IDENTITY_FRONT, 0.0f));
glm::vec3 lookAt = glm::vec3(inverse * glm::vec4(_owningHead->getLookAtPosition() +
_owningHead->getSaccade(), 1.0f));
state.rotation = rotationBetween(front, lookAt) * joint.rotation;
}
state.transform = _jointStates[joint.parentIndex].transform * joint.preRotation *
glm::mat4_cast(state.rotation) * joint.postRotation;
}
updateJointState(i);
}
for (int i = 0; i < _meshStates.size(); i++) {
@ -152,9 +110,8 @@ void BlendFace::simulate(float deltaTime) {
memcpy(_blendedVertices.data(), mesh.vertices.constData(), vertexCount * sizeof(glm::vec3));
// blend in each coefficient
const vector<float>& coefficients = _owningHead->getBlendshapeCoefficients();
for (int j = 0; j < coefficients.size(); j++) {
float coefficient = coefficients[j];
for (int j = 0; j < _blendshapeCoefficients.size(); j++) {
float coefficient = _blendshapeCoefficients[j];
if (coefficient == 0.0f || j >= mesh.blendshapes.size() || mesh.blendshapes[j].vertices.isEmpty()) {
continue;
}
@ -217,7 +174,7 @@ void BlendFace::simulate(float deltaTime) {
_resetStates = false;
}
bool BlendFace::render(float alpha) {
bool Model::render(float alpha) {
if (_meshStates.isEmpty()) {
return false;
}
@ -300,9 +257,8 @@ bool BlendFace::render(float alpha) {
memcpy(_blendedNormals.data(), mesh.normals.constData(), vertexCount * sizeof(glm::vec3));
// blend in each coefficient
const vector<float>& coefficients = _owningHead->getBlendshapeCoefficients();
for (int j = 0; j < coefficients.size(); j++) {
float coefficient = coefficients[j];
for (int j = 0; j < _blendshapeCoefficients.size(); j++) {
float coefficient = _blendshapeCoefficients[j];
if (coefficient == 0.0f || j >= mesh.blendshapes.size() || mesh.blendshapes[j].vertices.isEmpty()) {
continue;
}
@ -342,7 +298,7 @@ bool BlendFace::render(float alpha) {
if (mesh.isEye) {
if (texture != NULL) {
texture = (_dilatedTextures[i][j] = static_cast<DilatableNetworkTexture*>(texture)->getDilatedTexture(
_owningHead->getPupilDilation())).data();
_pupilDilation)).data();
}
}
glBindTexture(GL_TEXTURE_2D, texture == NULL ? Application::getInstance()->getTextureCache()->getWhiteTextureID() :
@ -386,32 +342,33 @@ bool BlendFace::render(float alpha) {
return true;
}
bool BlendFace::getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition, bool upright) const {
if (!isActive() || _jointStates.isEmpty()) {
bool Model::getHeadPosition(glm::vec3& headPosition) const {
return isActive() && getJointPosition(_geometry->getFBXGeometry().headJointIndex, headPosition);
}
bool Model::getNeckPosition(glm::vec3& neckPosition) const {
return isActive() && getJointPosition(_geometry->getFBXGeometry().neckJointIndex, neckPosition);
}
bool Model::getNeckRotation(glm::quat& neckRotation) const {
return isActive() && getJointRotation(_geometry->getFBXGeometry().neckJointIndex, neckRotation);
}
bool Model::getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const {
if (!isActive()) {
return false;
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
if (geometry.leftEyeJointIndex != -1) {
const glm::mat4& transform = _jointStates[geometry.leftEyeJointIndex].transform;
firstEyePosition = glm::vec3(transform[3][0], transform[3][1], transform[3][2]);
}
if (geometry.rightEyeJointIndex != -1) {
const glm::mat4& transform = _jointStates[geometry.rightEyeJointIndex].transform;
secondEyePosition = glm::vec3(transform[3][0], transform[3][1], transform[3][2]);
}
return geometry.leftEyeJointIndex != -1 && geometry.rightEyeJointIndex != -1;
return getJointPosition(geometry.leftEyeJointIndex, firstEyePosition) &&
getJointPosition(geometry.rightEyeJointIndex, secondEyePosition);
}
glm::vec4 BlendFace::computeAverageColor() const {
return _geometry ? _geometry->computeAverageColor() : glm::vec4(1.0f, 1.0f, 1.0f, 1.0f);
}
void BlendFace::setModelURL(const QUrl& url) {
void Model::setURL(const QUrl& url) {
// don't recreate the geometry if it's the same URL
if (_modelURL == url) {
if (_url == url) {
return;
}
_modelURL = url;
_url = url;
// delete our local geometry and custom textures
deleteGeometry();
@ -420,7 +377,72 @@ void BlendFace::setModelURL(const QUrl& url) {
_geometry = Application::getInstance()->getGeometryCache()->getGeometry(url);
}
void BlendFace::deleteGeometry() {
glm::vec4 Model::computeAverageColor() const {
return _geometry ? _geometry->computeAverageColor() : glm::vec4(1.0f, 1.0f, 1.0f, 1.0f);
}
void Model::updateJointState(int index) {
JointState& state = _jointStates[index];
const FBXGeometry& geometry = _geometry->getFBXGeometry();
const FBXJoint& joint = geometry.joints.at(index);
if (joint.parentIndex == -1) {
glm::mat4 baseTransform = glm::translate(_translation) * glm::mat4_cast(_rotation) *
glm::scale(_scale) * glm::translate(_offset);
glm::quat combinedRotation = joint.preRotation * state.rotation * joint.postRotation;
state.transform = baseTransform * geometry.offset * joint.preTransform *
glm::mat4_cast(combinedRotation) * joint.postTransform;
state.combinedRotation = _rotation * combinedRotation;
} else {
const JointState& parentState = _jointStates.at(joint.parentIndex);
if (index == geometry.leanJointIndex) {
maybeUpdateLeanRotation(parentState, joint, state);
} else if (index == geometry.neckJointIndex) {
maybeUpdateNeckRotation(parentState, joint, state);
} else if (index == geometry.leftEyeJointIndex || index == geometry.rightEyeJointIndex) {
maybeUpdateEyeRotation(parentState, joint, state);
}
glm::quat combinedRotation = joint.preRotation * state.rotation * joint.postRotation;
state.transform = parentState.transform * joint.preTransform *
glm::mat4_cast(combinedRotation) * joint.postTransform;
state.combinedRotation = parentState.combinedRotation * combinedRotation;
}
}
void Model::maybeUpdateLeanRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
// nothing by default
}
void Model::maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
// nothing by default
}
void Model::maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
// nothing by default
}
bool Model::getJointPosition(int jointIndex, glm::vec3& position) const {
if (jointIndex == -1 || _jointStates.isEmpty()) {
return false;
}
position = extractTranslation(_jointStates[jointIndex].transform);
return true;
}
bool Model::getJointRotation(int jointIndex, glm::quat& rotation) const {
if (jointIndex == -1 || _jointStates.isEmpty()) {
return false;
}
rotation = _jointStates[jointIndex].combinedRotation *
_geometry->getFBXGeometry().joints[jointIndex].inverseBindRotation;
return true;
}
void Model::deleteGeometry() {
foreach (GLuint id, _blendedVertexBufferIDs) {
glDeleteBuffers(1, &id);
}

View file

@ -0,0 +1,136 @@
//
// Model.h
// interface
//
// Created by Andrzej Kapolka on 10/18/13.
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#ifndef __interface__Model__
#define __interface__Model__
#include <QObject>
#include <QUrl>
#include "GeometryCache.h"
#include "InterfaceConfig.h"
#include "ProgramObject.h"
#include "TextureCache.h"
/// A generic 3D model displaying geometry loaded from a URL.
class Model : public QObject {
Q_OBJECT
public:
Model();
virtual ~Model();
void setTranslation(const glm::vec3& translation) { _translation = translation; }
const glm::vec3& getTranslation() const { return _translation; }
void setRotation(const glm::quat& rotation) { _rotation = rotation; }
const glm::quat& getRotation() const { return _rotation; }
void setScale(const glm::vec3& scale) { _scale = scale; }
const glm::vec3& getScale() const { return _scale; }
void setOffset(const glm::vec3& offset) { _offset = offset; }
const glm::vec3& getOffset() const { return _offset; }
void setPupilDilation(float dilation) { _pupilDilation = dilation; }
float getPupilDilation() const { return _pupilDilation; }
void setBlendshapeCoefficients(const std::vector<float>& coefficients) { _blendshapeCoefficients = coefficients; }
const std::vector<float>& getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
bool isActive() const { return _geometry && _geometry->isLoaded(); }
void init();
void reset();
void simulate(float deltaTime);
bool render(float alpha);
Q_INVOKABLE void setURL(const QUrl& url);
const QUrl& getURL() const { return _url; }
/// Returns the position of the head joint.
/// \return whether or not the head was found
bool getHeadPosition(glm::vec3& headPosition) const;
/// Returns the position of the neck joint.
/// \return whether or not the neck was found
bool getNeckPosition(glm::vec3& neckPosition) const;
/// Returns the rotation of the neck joint.
/// \return whether or not the neck was found
bool getNeckRotation(glm::quat& neckRotation) const;
/// Retrieve the positions of up to two eye meshes.
/// \return whether or not both eye meshes were found
bool getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const;
/// Returns the average color of all meshes in the geometry.
glm::vec4 computeAverageColor() const;
protected:
QSharedPointer<NetworkGeometry> _geometry;
glm::vec3 _translation;
glm::quat _rotation;
glm::vec3 _scale;
glm::vec3 _offset;
class JointState {
public:
glm::quat rotation;
glm::mat4 transform;
glm::quat combinedRotation;
};
QVector<JointState> _jointStates;
/// Updates the state of the joint at the specified index.
virtual void updateJointState(int index);
virtual void maybeUpdateLeanRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
virtual void maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
virtual void maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
bool getJointPosition(int jointIndex, glm::vec3& position) const;
bool getJointRotation(int jointIndex, glm::quat& rotation) const;
private:
void deleteGeometry();
float _pupilDilation;
std::vector<float> _blendshapeCoefficients;
QUrl _url;
class MeshState {
public:
QVector<glm::mat4> clusterMatrices;
QVector<glm::vec3> worldSpaceVertices;
QVector<glm::vec3> vertexVelocities;
QVector<glm::vec3> worldSpaceNormals;
};
QVector<MeshState> _meshStates;
QVector<GLuint> _blendedVertexBufferIDs;
QVector<QVector<QSharedPointer<Texture> > > _dilatedTextures;
bool _resetStates;
QVector<glm::vec3> _blendedVertices;
QVector<glm::vec3> _blendedNormals;
static ProgramObject _program;
static ProgramObject _skinProgram;
static int _clusterMatricesLocation;
static int _clusterIndicesLocation;
static int _clusterWeightsLocation;
};
#endif /* defined(__interface__Model__) */