Merge pull request #964 from ey6es/blendface

Custom Faceshift faces: either the rig from the TCP interface, or mesh from an FBX file.  Still need normals, etc.
This commit is contained in:
Stephen Birarda 2013-09-20 10:15:08 -07:00
commit 4f3909b682
21 changed files with 844 additions and 119 deletions

View file

@ -137,7 +137,7 @@ void AvatarMixer::run() {
case PACKET_TYPE_INJECT_AUDIO:
broadcastAvatarData(nodeList, nodeAddress);
break;
case PACKET_TYPE_AVATAR_VOXEL_URL:
case PACKET_TYPE_AVATAR_URLS:
case PACKET_TYPE_AVATAR_FACE_VIDEO:
// grab the node ID from the packet
unpackNodeId(packetData + numBytesForPacketHeader(packetData), &nodeID);
@ -158,4 +158,4 @@ void AvatarMixer::run() {
}
nodeList->stopSilentNodeRemovalThread();
}
}

View file

@ -1230,15 +1230,19 @@ static Avatar* processAvatarMessageHeader(unsigned char*& packetData, size_t& da
return avatar->isInitialized() ? avatar : NULL;
}
void Application::processAvatarVoxelURLMessage(unsigned char* packetData, size_t dataBytes) {
void Application::processAvatarURLsMessage(unsigned char* packetData, size_t dataBytes) {
Avatar* avatar = processAvatarMessageHeader(packetData, dataBytes);
if (!avatar) {
return;
}
QUrl url = QUrl::fromEncoded(QByteArray((char*)packetData, dataBytes));
}
QDataStream in(QByteArray((char*)packetData, dataBytes));
QUrl voxelURL, faceURL;
in >> voxelURL;
in >> faceURL;
// invoke the set URL function on the simulate/render thread
QMetaObject::invokeMethod(avatar->getVoxels(), "setVoxelURL", Q_ARG(QUrl, url));
// invoke the set URL functions on the simulate/render thread
QMetaObject::invokeMethod(avatar->getVoxels(), "setVoxelURL", Q_ARG(QUrl, voxelURL));
QMetaObject::invokeMethod(&avatar->getHead().getBlendFace(), "setModelURL", Q_ARG(QUrl, faceURL));
}
void Application::processAvatarFaceVideoMessage(unsigned char* packetData, size_t dataBytes) {
@ -1564,8 +1568,8 @@ void Application::init() {
qDebug("Loaded settings.\n");
Avatar::sendAvatarVoxelURLMessage(_myAvatar.getVoxels()->getVoxelURL());
Avatar::sendAvatarURLsMessage(_myAvatar.getVoxels()->getVoxelURL(), _myAvatar.getHead().getBlendFace().getModelURL());
_palette.init(_glWidget->width(), _glWidget->height());
_palette.addAction(Menu::getInstance()->getActionForOption(MenuOption::VoxelAddMode), 0, 0);
_palette.addAction(Menu::getInstance()->getActionForOption(MenuOption::VoxelDeleteMode), 0, 1);
@ -2136,10 +2140,11 @@ void Application::updateAvatar(float deltaTime) {
controlledBroadcastToNodes(broadcastString, endOfBroadcastStringWrite - broadcastString,
nodeTypesOfInterest, sizeof(nodeTypesOfInterest));
// once in a while, send my voxel url
const float AVATAR_VOXEL_URL_SEND_INTERVAL = 1.0f; // seconds
if (shouldDo(AVATAR_VOXEL_URL_SEND_INTERVAL, deltaTime)) {
Avatar::sendAvatarVoxelURLMessage(_myAvatar.getVoxels()->getVoxelURL());
// once in a while, send my urls
const float AVATAR_URLS_SEND_INTERVAL = 1.0f; // seconds
if (shouldDo(AVATAR_URLS_SEND_INTERVAL, deltaTime)) {
Avatar::sendAvatarURLsMessage(_myAvatar.getVoxels()->getVoxelURL(),
_myAvatar.getHead().getBlendFace().getModelURL());
}
}
}
@ -3510,8 +3515,8 @@ void* Application::networkReceive(void* args) {
bytesReceived);
getInstance()->_bandwidthMeter.inputStream(BandwidthMeter::AVATARS).updateValue(bytesReceived);
break;
case PACKET_TYPE_AVATAR_VOXEL_URL:
processAvatarVoxelURLMessage(app->_incomingPacket, bytesReceived);
case PACKET_TYPE_AVATAR_URLS:
processAvatarURLsMessage(app->_incomingPacket, bytesReceived);
break;
case PACKET_TYPE_AVATAR_FACE_VIDEO:
processAvatarFaceVideoMessage(app->_incomingPacket, bytesReceived);

View file

@ -187,7 +187,7 @@ private:
void updateProjectionMatrix();
static bool sendVoxelsOperation(VoxelNode* node, void* extraData);
static void processAvatarVoxelURLMessage(unsigned char* packetData, size_t dataBytes);
static void processAvatarURLsMessage(unsigned char* packetData, size_t dataBytes);
static void processAvatarFaceVideoMessage(unsigned char* packetData, size_t dataBytes);
static void sendPingPackets();

View file

@ -275,7 +275,8 @@ Menu::Menu() :
appInstance->getGlowEffect(),
SLOT(cycleRenderMode()));
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::UseFaceshiftRig, 0, false,
appInstance->getFaceshift(), SLOT(setUsingRig(bool)));
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::UsePerlinFace, 0, false);
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::LookAtVectors, 0, true);
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::LookAtIndicator, 0, true);
@ -746,6 +747,10 @@ void Menu::editPreferences() {
avatarURL->setMinimumWidth(QLINE_MINIMUM_WIDTH);
form->addRow("Avatar URL:", avatarURL);
QLineEdit* faceURL = new QLineEdit(applicationInstance->getAvatar()->getHead().getBlendFace().getModelURL().toString());
faceURL->setMinimumWidth(QLINE_MINIMUM_WIDTH);
form->addRow("Face URL:", faceURL);
QSpinBox* fieldOfView = new QSpinBox();
fieldOfView->setMaximum(180);
fieldOfView->setMinimum(1);
@ -779,9 +784,13 @@ void Menu::editPreferences() {
updateDSHostname(domainServerLineEdit->text());
QUrl url(avatarURL->text());
applicationInstance->getAvatar()->getVoxels()->setVoxelURL(url);
Avatar::sendAvatarVoxelURLMessage(url);
QUrl avatarVoxelURL(avatarURL->text());
applicationInstance->getAvatar()->getVoxels()->setVoxelURL(avatarVoxelURL);
QUrl faceModelURL(faceURL->text());
applicationInstance->getAvatar()->getHead().getBlendFace().setModelURL(faceModelURL);
Avatar::sendAvatarURLsMessage(avatarVoxelURL, faceModelURL);
_gyroCameraSensitivity = gyroCameraSensitivity->value();

View file

@ -31,6 +31,8 @@ struct ViewFrustumOffset {
float up;
};
class QSettings;
class BandwidthDialog;
class VoxelStatsDialog;
@ -197,6 +199,7 @@ namespace MenuOption {
const QString TestRaveGlove = "Test Rave Glove";
const QString TreeStats = "Calculate Tree Stats";
const QString TransmitterDrive = "Transmitter Drive";
const QString UseFaceshiftRig = "Use Faceshift Rig";
const QString UsePerlinFace = "Use Perlin's Face";
const QString Quit = "Quit";
const QString Webcam = "Webcam";

View file

@ -61,7 +61,7 @@ const bool usingBigSphereCollisionTest = true;
const float chatMessageScale = 0.0015;
const float chatMessageHeight = 0.20;
void Avatar::sendAvatarVoxelURLMessage(const QUrl& url) {
void Avatar::sendAvatarURLsMessage(const QUrl& voxelURL, const QUrl& faceURL) {
uint16_t ownerID = NodeList::getInstance()->getOwnerID();
if (ownerID == UNKNOWN_NODE_ID) {
@ -71,11 +71,14 @@ void Avatar::sendAvatarVoxelURLMessage(const QUrl& url) {
QByteArray message;
char packetHeader[MAX_PACKET_HEADER_BYTES];
int numBytesPacketHeader = populateTypeAndVersion((unsigned char*) packetHeader, PACKET_TYPE_AVATAR_VOXEL_URL);
int numBytesPacketHeader = populateTypeAndVersion((unsigned char*) packetHeader, PACKET_TYPE_AVATAR_URLS);
message.append(packetHeader, numBytesPacketHeader);
message.append((const char*)&ownerID, sizeof(ownerID));
message.append(url.toEncoded());
QDataStream out(&message, QIODevice::WriteOnly);
out << voxelURL;
out << faceURL;
Application::controlledBroadcastToNodes((unsigned char*)message.data(), message.size(), &NODE_TYPE_AVATAR_MIXER, 1);
}
@ -786,6 +789,7 @@ void Avatar::loadData(QSettings* settings) {
_position.z = loadSetting(settings, "position_z", 0.0f);
_voxels.setVoxelURL(settings->value("voxelURL").toUrl());
_head.getBlendFace().setModelURL(settings->value("faceModelURL").toUrl());
_leanScale = loadSetting(settings, "leanScale", 0.05f);
@ -837,6 +841,7 @@ void Avatar::saveData(QSettings* set) {
set->setValue("position_z", _position.z);
set->setValue("voxelURL", _voxels.getVoxelURL());
set->setValue("faceModelURL", _head.getBlendFace().getModelURL());
set->setValue("leanScale", _leanScale);
set->setValue("scale", _newScale);

View file

@ -129,7 +129,7 @@ class Avatar : public AvatarData {
Q_OBJECT
public:
static void sendAvatarVoxelURLMessage(const QUrl& url);
static void sendAvatarURLsMessage(const QUrl& voxelURL, const QUrl& faceURL);
Avatar(Node* owningNode = NULL);
~Avatar();

View file

@ -0,0 +1,214 @@
//
// BlendFace.cpp
// interface
//
// Created by Andrzej Kapolka on 9/16/13.
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#include <QNetworkReply>
#include "Application.h"
#include "BlendFace.h"
#include "Head.h"
using namespace fs;
using namespace std;
BlendFace::BlendFace(Head* owningHead) :
_owningHead(owningHead),
_modelReply(NULL),
_iboID(0)
{
}
BlendFace::~BlendFace() {
if (_iboID != 0) {
glDeleteBuffers(1, &_iboID);
glDeleteBuffers(1, &_vboID);
}
}
bool BlendFace::render(float alpha) {
if (_iboID == 0) {
return false;
}
glPushMatrix();
glTranslatef(_owningHead->getPosition().x, _owningHead->getPosition().y, _owningHead->getPosition().z);
glm::quat orientation = _owningHead->getOrientation();
glm::vec3 axis = glm::axis(orientation);
glRotatef(glm::angle(orientation), axis.x, axis.y, axis.z);
glTranslatef(0.0f, -0.025f, -0.025f); // temporary fudge factor until we have a better method of per-model positioning
const float MODEL_SCALE = 0.0006f;
glScalef(_owningHead->getScale() * MODEL_SCALE, _owningHead->getScale() * MODEL_SCALE,
-_owningHead->getScale() * MODEL_SCALE);
glColor4f(1.0f, 1.0f, 1.0f, alpha);
// start with the base
int vertexCount = _geometry.vertices.size();
_blendedVertices.resize(vertexCount);
memcpy(_blendedVertices.data(), _geometry.vertices.constData(), vertexCount * sizeof(glm::vec3));
// blend in each coefficient
const vector<float>& coefficients = _owningHead->getBlendshapeCoefficients();
for (int i = 0; i < coefficients.size(); i++) {
float coefficient = coefficients[i];
if (coefficient == 0.0f || i >= _geometry.blendshapes.size() || _geometry.blendshapes[i].vertices.isEmpty()) {
continue;
}
const glm::vec3* source = _geometry.blendshapes[i].vertices.constData();
for (const int* index = _geometry.blendshapes[i].indices.constData(),
*end = index + _geometry.blendshapes[i].indices.size(); index != end; index++, source++) {
_blendedVertices[*index] += *source * coefficient;
}
}
// update the blended vertices
glBindBuffer(GL_ARRAY_BUFFER, _vboID);
glBufferSubData(GL_ARRAY_BUFFER, 0, vertexCount * sizeof(glm::vec3), _blendedVertices.constData());
// tell OpenGL where to find vertex information
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(3, GL_FLOAT, 0, 0);
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _iboID);
glDrawRangeElementsEXT(GL_QUADS, 0, vertexCount - 1, _geometry.quadIndices.size(), GL_UNSIGNED_INT, 0);
glDrawRangeElementsEXT(GL_TRIANGLES, 0, vertexCount - 1, _geometry.triangleIndices.size(), GL_UNSIGNED_INT,
(void*)(_geometry.quadIndices.size() * sizeof(int)));
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
// deactivate vertex arrays after drawing
glDisableClientState(GL_VERTEX_ARRAY);
// bind with 0 to switch back to normal operation
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glPopMatrix();
return true;
}
void BlendFace::setModelURL(const QUrl& url) {
// don't restart the download if it's the same URL
if (_modelURL == url) {
return;
}
// cancel any current download
if (_modelReply != 0) {
delete _modelReply;
_modelReply = 0;
}
// remember the URL
_modelURL = url;
// load the URL data asynchronously
if (!url.isValid()) {
return;
}
_modelReply = Application::getInstance()->getNetworkAccessManager()->get(QNetworkRequest(url));
connect(_modelReply, SIGNAL(downloadProgress(qint64,qint64)), SLOT(handleModelDownloadProgress(qint64,qint64)));
connect(_modelReply, SIGNAL(error(QNetworkReply::NetworkError)), SLOT(handleModelReplyError()));
}
glm::vec3 createVec3(const fsVector3f& vector) {
return glm::vec3(vector.x, vector.y, vector.z);
}
void BlendFace::setRig(const fsMsgRig& rig) {
// convert to FBX geometry
FBXGeometry geometry;
for (vector<fsVector4i>::const_iterator it = rig.mesh().m_quads.begin(), end = rig.mesh().m_quads.end(); it != end; it++) {
geometry.quadIndices.append(it->x);
geometry.quadIndices.append(it->y);
geometry.quadIndices.append(it->z);
geometry.quadIndices.append(it->w);
}
for (vector<fsVector3i>::const_iterator it = rig.mesh().m_tris.begin(), end = rig.mesh().m_tris.end(); it != end; it++) {
geometry.triangleIndices.append(it->x);
geometry.triangleIndices.append(it->y);
geometry.triangleIndices.append(it->z);
}
for (vector<fsVector3f>::const_iterator it = rig.mesh().m_vertex_data.m_vertices.begin(),
end = rig.mesh().m_vertex_data.m_vertices.end(); it != end; it++) {
geometry.vertices.append(glm::vec3(it->x, it->y, it->z));
}
for (vector<fsVertexData>::const_iterator it = rig.blendshapes().begin(), end = rig.blendshapes().end(); it != end; it++) {
FBXBlendshape blendshape;
for (int i = 0, n = it->m_vertices.size(); i < n; i++) {
// subtract the base vertex position; we want the deltas
blendshape.vertices.append(createVec3(it->m_vertices[i]) - geometry.vertices[i]);
blendshape.indices.append(i);
}
geometry.blendshapes.append(blendshape);
}
setGeometry(geometry);
}
void BlendFace::handleModelDownloadProgress(qint64 bytesReceived, qint64 bytesTotal) {
if (bytesReceived < bytesTotal) {
return;
}
QByteArray entirety = _modelReply->readAll();
_modelReply->disconnect(this);
_modelReply->deleteLater();
_modelReply = 0;
try {
setGeometry(extractFBXGeometry(parseFBX(entirety)));
} catch (const QString& error) {
qDebug() << error << "\n";
return;
}
}
void BlendFace::handleModelReplyError() {
qDebug("%s\n", _modelReply->errorString().toLocal8Bit().constData());
_modelReply->disconnect(this);
_modelReply->deleteLater();
_modelReply = 0;
}
void BlendFace::setGeometry(const FBXGeometry& geometry) {
if (geometry.vertices.isEmpty()) {
// clear any existing geometry
if (_iboID != 0) {
glDeleteBuffers(1, &_iboID);
glDeleteBuffers(1, &_vboID);
_iboID = 0;
}
return;
}
if (_iboID == 0) {
glGenBuffers(1, &_iboID);
glGenBuffers(1, &_vboID);
}
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _iboID);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, (geometry.quadIndices.size() + geometry.triangleIndices.size()) * sizeof(int),
NULL, GL_STATIC_DRAW);
glBufferSubData(GL_ELEMENT_ARRAY_BUFFER, 0, geometry.quadIndices.size() * sizeof(int), geometry.quadIndices.constData());
glBufferSubData(GL_ELEMENT_ARRAY_BUFFER, geometry.quadIndices.size() * sizeof(int),
geometry.triangleIndices.size() * sizeof(int), geometry.triangleIndices.constData());
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindBuffer(GL_ARRAY_BUFFER, _vboID);
glBufferData(GL_ARRAY_BUFFER, geometry.vertices.size() * sizeof(glm::vec3), NULL, GL_DYNAMIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
_geometry = geometry;
}

View file

@ -0,0 +1,64 @@
//
// BlendFace.h
// interface
//
// Created by Andrzej Kapolka on 9/16/13.
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#ifndef __interface__BlendFace__
#define __interface__BlendFace__
#include <QObject>
#include <QUrl>
#include <fsbinarystream.h>
#include "InterfaceConfig.h"
#include "renderer/FBXReader.h"
class QNetworkReply;
class Head;
/// A face formed from a linear mix of blendshapes according to a set of coefficients.
class BlendFace : public QObject {
Q_OBJECT
public:
BlendFace(Head* owningHead);
~BlendFace();
bool render(float alpha);
Q_INVOKABLE void setModelURL(const QUrl& url);
const QUrl& getModelURL() const { return _modelURL; }
public slots:
void setRig(const fs::fsMsgRig& rig);
private slots:
void handleModelDownloadProgress(qint64 bytesReceived, qint64 bytesTotal);
void handleModelReplyError();
private:
void setGeometry(const FBXGeometry& geometry);
Head* _owningHead;
QUrl _modelURL;
QNetworkReply* _modelReply;
GLuint _iboID;
GLuint _vboID;
FBXGeometry _geometry;
QVector<glm::vec3> _blendedVertices;
};
#endif /* defined(__interface__BlendFace__) */

View file

@ -87,7 +87,8 @@ Head::Head(Avatar* owningAvatar) :
_cameraFollowsHead(false),
_cameraFollowHeadRate(0.0f),
_face(this),
_perlinFace(this)
_perlinFace(this),
_blendFace(this)
{
if (USING_PHYSICAL_MOHAWK) {
resetHairPhysics();
@ -159,6 +160,7 @@ void Head::simulate(float deltaTime, bool isMine, float gyroCameraSensitivity) {
_averageLoudness = faceshift->getMouthSize() * faceshift->getMouthSize() * MOUTH_SIZE_SCALE;
const float BROW_HEIGHT_SCALE = 0.005f;
_browAudioLift = faceshift->getBrowUpCenter() * BROW_HEIGHT_SCALE;
_blendshapeCoefficients = faceshift->getBlendshapeCoefficients();
} else if (!_isFaceshiftConnected) {
// Update eye saccades
@ -325,7 +327,7 @@ void Head::calculateGeometry() {
void Head::render(float alpha, bool isMine) {
_renderAlpha = alpha;
if (!_face.render(alpha)) {
if (!(_face.render(alpha) || _blendFace.render(alpha))) {
calculateGeometry();
glEnable(GL_DEPTH_TEST);

View file

@ -18,9 +18,10 @@
#include <VoxelConstants.h>
#include "BendyLine.h"
#include "BlendFace.h"
#include "Face.h"
#include "PerlinFace.h"
#include "InterfaceConfig.h"
#include "PerlinFace.h"
#include "world.h"
#include "devices/SerialInterface.h"
@ -71,6 +72,7 @@ public:
glm::vec3 getFrontDirection() const { return getOrientation() * IDENTITY_FRONT; }
Face& getFace() { return _face; }
BlendFace& getBlendFace() { return _blendFace; }
const bool getReturnToCenter() const { return _returnHeadToCenter; } // Do you want head to try to return to center (depends on interface detected)
float getAverageLoudness() const { return _averageLoudness; }
@ -128,6 +130,7 @@ private:
float _cameraFollowHeadRate;
Face _face;
PerlinFace _perlinFace;
BlendFace _blendFace;
static ProgramObject _irisProgram;
static GLuint _irisTextureID;

View file

@ -56,7 +56,15 @@ MyAvatar::MyAvatar(Node* owningNode) :
_driveKeys[i] = false;
}
_collisionRadius = _height * COLLISION_RADIUS_SCALE;
_collisionRadius = _height * COLLISION_RADIUS_SCALE;
}
void MyAvatar::init() {
Avatar::init();
// when we receive a Faceshift rig, apply it to our own blend face
_head.getBlendFace().connect(Application::getInstance()->getFaceshift(), SIGNAL(rigReceived(fs::fsMsgRig)),
SLOT(setRig(fs::fsMsgRig)));
}
void MyAvatar::reset() {

View file

@ -15,6 +15,7 @@ class MyAvatar : public Avatar {
public:
MyAvatar(Node* owningNode = NULL);
void init();
void reset();
void simulate(float deltaTime, Transmitter* transmitter, float gyroCameraSensitivity);
void updateFromGyrosAndOrWebcam(bool gyroLook, float pitchFromTouch);
@ -87,4 +88,4 @@ public:
void checkForMouseRayTouching();
};
#endif
#endif

View file

@ -11,6 +11,7 @@
#include <SharedUtil.h>
#include "Faceshift.h"
#include "Menu.h"
using namespace fs;
using namespace std;
@ -24,29 +25,17 @@ Faceshift::Faceshift() :
_eyeGazeLeftYaw(0.0f),
_eyeGazeRightPitch(0.0f),
_eyeGazeRightYaw(0.0f),
_leftBlink(0.0f),
_rightBlink(0.0f),
_leftEyeOpen(0.0f),
_rightEyeOpen(0.0f),
_leftBlinkIndex(0), // see http://support.faceshift.com/support/articles/35129-export-of-blendshapes
_rightBlinkIndex(1),
_leftEyeOpenIndex(8),
_rightEyeOpenIndex(9),
_browDownLeft(0.0f),
_browDownRight(0.0f),
_browUpCenter(0.0f),
_browUpLeft(0.0f),
_browUpRight(0.0f),
_browDownLeftIndex(-1),
_browDownRightIndex(-1),
_browDownLeftIndex(14),
_browDownRightIndex(15),
_browUpCenterIndex(16),
_browUpLeftIndex(-1),
_browUpRightIndex(-1),
_mouthSize(0.0f),
_mouthSmileLeft(0),
_mouthSmileRight(0),
_mouthSmileLeftIndex(-1),
_mouthSmileRightIndex(0),
_browUpLeftIndex(17),
_browUpRightIndex(18),
_mouthSmileLeftIndex(28),
_mouthSmileRightIndex(29),
_jawOpenIndex(21),
_longTermAverageEyePitch(0.0f),
_longTermAverageEyeYaw(0.0f),
@ -98,6 +87,17 @@ void Faceshift::setTCPEnabled(bool enabled) {
}
}
void Faceshift::setUsingRig(bool usingRig) {
if (usingRig && _tcpSocket.state() == QAbstractSocket::ConnectedState) {
string message;
fsBinaryStream::encode_message(message, fsMsgSendRig());
send(message);
} else {
emit rigReceived(fsMsgRig());
}
}
void Faceshift::connectSocket() {
if (_tcpEnabled) {
qDebug("Faceshift: Connecting...\n");
@ -114,6 +114,11 @@ void Faceshift::noteConnected() {
string message;
fsBinaryStream::encode_message(message, fsMsgSendBlendshapeNames());
send(message);
// if using faceshift rig, request it
if (Menu::getInstance()->isOptionChecked(MenuOption::UseFaceshiftRig)) {
setUsingRig(true);
}
}
void Faceshift::noteError(QAbstractSocket::SocketError error) {
@ -138,6 +143,10 @@ void Faceshift::readFromSocket() {
receive(_tcpSocket.readAll());
}
float Faceshift::getBlendshapeCoefficient(int index) const {
return (index >= 0 && index < _blendshapeCoefficients.size()) ? _blendshapeCoefficients[index] : 0.0f;
}
void Faceshift::send(const std::string& message) {
_tcpSocket.write(message.data(), message.size());
}
@ -159,43 +168,7 @@ void Faceshift::receive(const QByteArray& buffer) {
_eyeGazeLeftYaw = data.m_eyeGazeLeftYaw;
_eyeGazeRightPitch = -data.m_eyeGazeRightPitch;
_eyeGazeRightYaw = data.m_eyeGazeRightYaw;
if (_leftBlinkIndex != -1) {
_leftBlink = data.m_coeffs[_leftBlinkIndex];
}
if (_rightBlinkIndex != -1) {
_rightBlink = data.m_coeffs[_rightBlinkIndex];
}
if (_leftEyeOpenIndex != -1) {
_leftEyeOpen = data.m_coeffs[_leftEyeOpenIndex];
}
if (_rightEyeOpenIndex != -1) {
_rightEyeOpen = data.m_coeffs[_rightEyeOpenIndex];
}
if (_browDownLeftIndex != -1) {
_browDownLeft = data.m_coeffs[_browDownLeftIndex];
}
if (_browDownRightIndex != -1) {
_browDownRight = data.m_coeffs[_browDownRightIndex];
}
if (_browUpCenterIndex != -1) {
_browUpCenter = data.m_coeffs[_browUpCenterIndex];
}
if (_browUpLeftIndex != -1) {
_browUpLeft = data.m_coeffs[_browUpLeftIndex];
}
if (_browUpRightIndex != -1) {
_browUpRight = data.m_coeffs[_browUpRightIndex];
}
if (_jawOpenIndex != -1) {
_mouthSize = data.m_coeffs[_jawOpenIndex];
}
if (_mouthSmileLeftIndex != -1) {
_mouthSmileLeft = data.m_coeffs[_mouthSmileLeftIndex];
}
if (_mouthSmileRightIndex != -1) {
_mouthSmileRight = data.m_coeffs[_mouthSmileRightIndex];
}
_blendshapeCoefficients = data.m_coeffs;
}
break;
}
@ -208,10 +181,10 @@ void Faceshift::receive(const QByteArray& buffer) {
} else if (names[i] == "EyeBlink_R") {
_rightBlinkIndex = i;
}else if (names[i] == "EyeOpen_L") {
} else if (names[i] == "EyeOpen_L") {
_leftEyeOpenIndex = i;
}else if (names[i] == "EyeOpen_R") {
} else if (names[i] == "EyeOpen_R") {
_rightEyeOpenIndex = i;
} else if (names[i] == "BrowsD_L") {
@ -237,11 +210,15 @@ void Faceshift::receive(const QByteArray& buffer) {
} else if (names[i] == "MouthSmile_R") {
_mouthSmileRightIndex = i;
}
}
break;
}
case fsMsg::MSG_OUT_RIG: {
fsMsgRig* rig = static_cast<fsMsgRig*>(msg.get());
emit rigReceived(*rig);
break;
}
default:
break;
}

View file

@ -9,6 +9,8 @@
#ifndef __interface__Faceshift__
#define __interface__Faceshift__
#include <vector>
#include <QTcpSocket>
#include <QUdpSocket>
@ -39,28 +41,35 @@ public:
float getEstimatedEyePitch() const { return _estimatedEyePitch; }
float getEstimatedEyeYaw() const { return _estimatedEyeYaw; }
float getLeftBlink() const { return _leftBlink; }
float getRightBlink() const { return _rightBlink; }
float getLeftEyeOpen() const { return _leftEyeOpen; }
float getRightEyeOpen() const { return _rightEyeOpen; }
const std::vector<float>& getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
float getBrowDownLeft() const { return _browDownLeft; }
float getBrowDownRight() const { return _browDownRight; }
float getBrowUpCenter() const { return _browUpCenter; }
float getBrowUpLeft() const { return _browUpLeft; }
float getBrowUpRight() const { return _browUpRight; }
float getLeftBlink() const { return getBlendshapeCoefficient(_leftBlinkIndex); }
float getRightBlink() const { return getBlendshapeCoefficient(_rightBlinkIndex); }
float getLeftEyeOpen() const { return getBlendshapeCoefficient(_leftEyeOpenIndex); }
float getRightEyeOpen() const { return getBlendshapeCoefficient(_rightEyeOpenIndex); }
float getMouthSize() const { return _mouthSize; }
float getMouthSmileLeft() const { return _mouthSmileLeft; }
float getMouthSmileRight() const { return _mouthSmileRight; }
float getBrowDownLeft() const { return getBlendshapeCoefficient(_browDownLeftIndex); }
float getBrowDownRight() const { return getBlendshapeCoefficient(_browDownRightIndex); }
float getBrowUpCenter() const { return getBlendshapeCoefficient(_browUpCenterIndex); }
float getBrowUpLeft() const { return getBlendshapeCoefficient(_browUpLeftIndex); }
float getBrowUpRight() const { return getBlendshapeCoefficient(_browUpRightIndex); }
float getMouthSize() const { return getBlendshapeCoefficient(_jawOpenIndex); }
float getMouthSmileLeft() const { return getBlendshapeCoefficient(_mouthSmileLeftIndex); }
float getMouthSmileRight() const { return getBlendshapeCoefficient(_mouthSmileRightIndex); }
void update();
void reset();
signals:
void rigReceived(const fs::fsMsgRig& rig);
public slots:
void setTCPEnabled(bool enabled);
void setUsingRig(bool usingRig);
private slots:
void connectSocket();
@ -71,6 +80,8 @@ private slots:
private:
float getBlendshapeCoefficient(int index) const;
void send(const std::string& message);
void receive(const QByteArray& buffer);
@ -90,35 +101,20 @@ private:
float _eyeGazeRightPitch;
float _eyeGazeRightYaw;
float _leftBlink;
float _rightBlink;
float _leftEyeOpen;
float _rightEyeOpen;
std::vector<float> _blendshapeCoefficients;
int _leftBlinkIndex;
int _rightBlinkIndex;
int _leftEyeOpenIndex;
int _rightEyeOpenIndex;
// Brows
float _browDownLeft;
float _browDownRight;
float _browUpCenter;
float _browUpLeft;
float _browUpRight;
int _browDownLeftIndex;
int _browDownRightIndex;
int _browUpCenterIndex;
int _browUpLeftIndex;
int _browUpRightIndex;
float _mouthSize;
float _mouthSmileLeft;
float _mouthSmileRight;
int _mouthSmileLeftIndex;
int _mouthSmileRightIndex;

View file

@ -0,0 +1,353 @@
//
// FBXReader.cpp
// interface
//
// Created by Andrzej Kapolka on 9/18/13.
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#include <QBuffer>
#include <QDataStream>
#include <QIODevice>
#include <QtDebug>
#include <QtEndian>
#include "FBXReader.h"
using namespace std;
FBXNode parseFBX(const QByteArray& data) {
QBuffer buffer(const_cast<QByteArray*>(&data));
buffer.open(QIODevice::ReadOnly);
return parseFBX(&buffer);
}
template<class T> QVariant readArray(QDataStream& in) {
quint32 arrayLength;
quint32 encoding;
quint32 compressedLength;
in >> arrayLength;
in >> encoding;
in >> compressedLength;
QVector<T> values;
const int DEFLATE_ENCODING = 1;
if (encoding == DEFLATE_ENCODING) {
// preface encoded data with uncompressed length
QByteArray compressed(sizeof(quint32) + compressedLength, 0);
*((quint32*)compressed.data()) = qToBigEndian<quint32>(arrayLength * sizeof(T));
in.readRawData(compressed.data() + sizeof(quint32), compressedLength);
QByteArray uncompressed = qUncompress(compressed);
QDataStream uncompressedIn(uncompressed);
uncompressedIn.setByteOrder(QDataStream::LittleEndian);
for (int i = 0; i < arrayLength; i++) {
T value;
uncompressedIn >> value;
values.append(value);
}
} else {
for (int i = 0; i < arrayLength; i++) {
T value;
in >> value;
values.append(value);
}
}
return QVariant::fromValue(values);
}
QVariant parseFBXProperty(QDataStream& in) {
char ch;
in.device()->getChar(&ch);
switch (ch) {
case 'Y': {
qint16 value;
in >> value;
return QVariant::fromValue(value);
}
case 'C': {
bool value;
in >> value;
return QVariant::fromValue(value);
}
case 'I': {
qint32 value;
in >> value;
return QVariant::fromValue(value);
}
case 'F': {
float value;
in >> value;
return QVariant::fromValue(value);
}
case 'D': {
double value;
in >> value;
return QVariant::fromValue(value);
}
case 'L': {
qint64 value;
in >> value;
return QVariant::fromValue(value);
}
case 'f': {
return readArray<float>(in);
}
case 'd': {
return readArray<double>(in);
}
case 'l': {
return readArray<qint64>(in);
}
case 'i': {
return readArray<qint32>(in);
}
case 'b': {
return readArray<bool>(in);
}
case 'S':
case 'R': {
quint32 length;
in >> length;
return QVariant::fromValue(in.device()->read(length));
}
default:
throw QString("Unknown property type: ") + ch;
}
}
FBXNode parseFBXNode(QDataStream& in) {
quint32 endOffset;
quint32 propertyCount;
quint32 propertyListLength;
quint8 nameLength;
in >> endOffset;
in >> propertyCount;
in >> propertyListLength;
in >> nameLength;
FBXNode node;
const int MIN_VALID_OFFSET = 40;
if (endOffset < MIN_VALID_OFFSET || nameLength == 0) {
// use a null name to indicate a null node
return node;
}
node.name = in.device()->read(nameLength);
for (int i = 0; i < propertyCount; i++) {
node.properties.append(parseFBXProperty(in));
}
while (endOffset > in.device()->pos()) {
FBXNode child = parseFBXNode(in);
if (child.name.isNull()) {
return node;
} else {
node.children.append(child);
}
}
return node;
}
FBXNode parseFBX(QIODevice* device) {
QDataStream in(device);
in.setByteOrder(QDataStream::LittleEndian);
// see http://code.blender.org/index.php/2013/08/fbx-binary-file-format-specification/ for an explanation
// of the FBX format
// verify the prolog
const QByteArray EXPECTED_PROLOG = "Kaydara FBX Binary ";
if (device->read(EXPECTED_PROLOG.size()) != EXPECTED_PROLOG) {
throw QString("Invalid header.");
}
// skip the rest of the header
const int HEADER_SIZE = 27;
in.skipRawData(HEADER_SIZE - EXPECTED_PROLOG.size());
// parse the top-level node
FBXNode top;
while (device->bytesAvailable()) {
FBXNode next = parseFBXNode(in);
if (next.name.isNull()) {
return top;
} else {
top.children.append(next);
}
}
return top;
}
QVector<glm::vec3> createVec3Vector(const QVector<double>& doubleVector) {
QVector<glm::vec3> values;
for (const double* it = doubleVector.constData(), *end = it + doubleVector.size(); it != end; ) {
values.append(glm::vec3(*it++, *it++, *it++));
}
return values;
}
const char* FACESHIFT_BLENDSHAPES[] = {
"EyeBlink_L",
"EyeBlink_R",
"EyeSquint_L",
"EyeSquint_R",
"EyeDown_L",
"EyeDown_R",
"EyeIn_L",
"EyeIn_R",
"EyeOpen_L",
"EyeOpen_R",
"EyeOut_L",
"EyeOut_R",
"EyeUp_L",
"EyeUp_R",
"BrowsD_L",
"BrowsD_R",
"BrowsU_C",
"BrowsU_L",
"BrowsU_R",
"JawFwd",
"JawLeft",
"JawOpen",
"JawChew",
"JawRight",
"MouthLeft",
"MouthRight",
"MouthFrown_L",
"MouthFrown_R",
"MouthSmile_L",
"MouthSmile_R",
"MouthDimple_L",
"MouthDimple_R",
"LipsStretch_L",
"LipsStretch_R",
"LipsUpperClose",
"LipsLowerClose",
"LipsUpperUp",
"LipsLowerDown",
"LipsUpperOpen",
"LipsLowerOpen",
"LipsFunnel",
"LipsPucker",
"ChinLowerRaise",
"ChinUpperRaise",
"Sneer",
"Puff",
"CheekSquint_L",
"CheekSquint_R",
""
};
QHash<QByteArray, int> createBlendshapeMap() {
QHash<QByteArray, int> map;
for (int i = 0;; i++) {
QByteArray name = FACESHIFT_BLENDSHAPES[i];
if (name != "") {
map.insert(name, i);
} else {
return map;
}
}
}
FBXGeometry extractFBXGeometry(const FBXNode& node) {
FBXGeometry geometry;
foreach (const FBXNode& child, node.children) {
if (child.name == "Objects") {
foreach (const FBXNode& object, child.children) {
if (object.name == "Geometry") {
if (object.properties.at(2) == "Mesh") {
QVector<glm::vec3> vertices;
QVector<int> polygonIndices;
foreach (const FBXNode& data, object.children) {
if (data.name == "Vertices") {
geometry.vertices = createVec3Vector(data.properties.at(0).value<QVector<double> >());
} else if (data.name == "PolygonVertexIndex") {
polygonIndices = data.properties.at(0).value<QVector<int> >();
} else if (data.name == "LayerElementNormal") {
foreach (const FBXNode& subdata, data.children) {
if (subdata.name == "Normals") {
geometry.normals = createVec3Vector(
subdata.properties.at(0).value<QVector<double> >());
}
}
}
}
// convert the polygons to quads and triangles
for (const int* beginIndex = polygonIndices.constData(), *end = beginIndex + polygonIndices.size();
beginIndex != end; ) {
const int* endIndex = beginIndex;
while (*endIndex++ >= 0);
if (endIndex - beginIndex == 4) {
geometry.quadIndices.append(*beginIndex++);
geometry.quadIndices.append(*beginIndex++);
geometry.quadIndices.append(*beginIndex++);
geometry.quadIndices.append(-*beginIndex++ - 1);
} else {
for (const int* nextIndex = beginIndex + 1;; ) {
geometry.triangleIndices.append(*beginIndex);
geometry.triangleIndices.append(*nextIndex++);
if (*nextIndex >= 0) {
geometry.triangleIndices.append(*nextIndex);
} else {
geometry.triangleIndices.append(-*nextIndex - 1);
break;
}
}
beginIndex = endIndex;
}
}
} else { // object.properties.at(2) == "Shape"
FBXBlendshape blendshape;
foreach (const FBXNode& data, object.children) {
if (data.name == "Indexes") {
blendshape.indices = data.properties.at(0).value<QVector<int> >();
} else if (data.name == "Vertices") {
blendshape.vertices = createVec3Vector(data.properties.at(0).value<QVector<double> >());
} else if (data.name == "Normals") {
blendshape.normals = createVec3Vector(data.properties.at(0).value<QVector<double> >());
}
}
// the name is followed by a null and some type info
QByteArray name = object.properties.at(1).toByteArray();
static QHash<QByteArray, int> blendshapeMap = createBlendshapeMap();
int index = blendshapeMap.value(name.left(name.indexOf('\0')));
geometry.blendshapes.resize(qMax(geometry.blendshapes.size(), index + 1));
geometry.blendshapes[index] = blendshape;
}
}
}
}
}
return geometry;
}
void printNode(const FBXNode& node, int indent) {
QByteArray spaces(indent, ' ');
qDebug("%s%s: ", spaces.data(), node.name.data());
foreach (const QVariant& property, node.properties) {
qDebug() << property;
}
qDebug() << "\n";
foreach (const FBXNode& child, node.children) {
printNode(child, indent + 1);
}
}

View file

@ -0,0 +1,66 @@
//
// FBXReader.h
// interface
//
// Created by Andrzej Kapolka on 9/18/13.
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#ifndef __interface__FBXReader__
#define __interface__FBXReader__
#include <QVariant>
#include <QVector>
#include <glm/glm.hpp>
class QIODevice;
class FBXNode;
typedef QList<FBXNode> FBXNodeList;
/// A node within an FBX document.
class FBXNode {
public:
QByteArray name;
QVariantList properties;
FBXNodeList children;
};
/// A single blendshape extracted from an FBX document.
class FBXBlendshape {
public:
QVector<int> indices;
QVector<glm::vec3> vertices;
QVector<glm::vec3> normals;
};
/// Base geometry with blendshapes mapped by name.
class FBXGeometry {
public:
QVector<int> quadIndices;
QVector<int> triangleIndices;
QVector<glm::vec3> vertices;
QVector<glm::vec3> normals;
QVector<FBXBlendshape> blendshapes;
};
/// Parses the input from the supplied data as an FBX file.
/// \exception QString if an error occurs in parsing
FBXNode parseFBX(const QByteArray& data);
/// Parses the input from the supplied device as an FBX file.
/// \exception QString if an error occurs in parsing
FBXNode parseFBX(QIODevice* device);
/// Extracts the geometry from a parsed FBX node.
FBXGeometry extractFBXGeometry(const FBXNode& node);
void printNode(const FBXNode& node, int indent = 0);
#endif /* defined(__interface__FBXReader__) */

View file

@ -204,6 +204,11 @@ int AvatarData::getBroadcastData(unsigned char* destinationBuffer) {
memcpy(destinationBuffer, &_headData->_browAudioLift, sizeof(float));
destinationBuffer += sizeof(float);
*destinationBuffer++ = _headData->_blendshapeCoefficients.size();
memcpy(destinationBuffer, _headData->_blendshapeCoefficients.data(),
_headData->_blendshapeCoefficients.size() * sizeof(float));
destinationBuffer += _headData->_blendshapeCoefficients.size() * sizeof(float);
}
// leap hand data
@ -334,6 +339,11 @@ int AvatarData::parseData(unsigned char* sourceBuffer, int numBytes) {
memcpy(&_headData->_browAudioLift, sourceBuffer, sizeof(float));
sourceBuffer += sizeof(float);
_headData->_blendshapeCoefficients.resize(*sourceBuffer++);
memcpy(_headData->_blendshapeCoefficients.data(), sourceBuffer,
_headData->_blendshapeCoefficients.size() * sizeof(float));
sourceBuffer += _headData->_blendshapeCoefficients.size() * sizeof(float);
}
// leap hand data

View file

@ -10,6 +10,7 @@
#define __hifi__HeadData__
#include <iostream>
#include <vector>
#include <glm/glm.hpp>
@ -43,6 +44,8 @@ public:
void setAudioLoudness(float audioLoudness) { _audioLoudness = audioLoudness; }
const std::vector<float>& getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
void addYaw(float yaw);
void addPitch(float pitch);
void addRoll(float roll);
@ -52,6 +55,7 @@ public:
void setLookAtPosition(const glm::vec3& lookAtPosition) { _lookAtPosition = lookAtPosition; }
friend class AvatarData;
protected:
float _yaw;
float _pitch;
@ -65,7 +69,9 @@ protected:
float _rightEyeBlink;
float _averageLoudness;
float _browAudioLift;
std::vector<float> _blendshapeCoefficients;
AvatarData* _owningAvatar;
private:
// privatize copy ctor and assignment operator so copies of this object cannot be made
HeadData(const HeadData&);

View file

@ -20,8 +20,11 @@ PACKET_VERSION versionForPacketType(PACKET_TYPE type) {
return 1;
case PACKET_TYPE_HEAD_DATA:
return 6;
return 7;
case PACKET_TYPE_AVATAR_URLS:
return 1;
case PACKET_TYPE_AVATAR_FACE_VIDEO:
return 1;

View file

@ -29,7 +29,7 @@ const PACKET_TYPE PACKET_TYPE_ERASE_VOXEL = 'E';
const PACKET_TYPE PACKET_TYPE_VOXEL_DATA = 'V';
const PACKET_TYPE PACKET_TYPE_VOXEL_DATA_MONOCHROME = 'v';
const PACKET_TYPE PACKET_TYPE_BULK_AVATAR_DATA = 'X';
const PACKET_TYPE PACKET_TYPE_AVATAR_VOXEL_URL = 'U';
const PACKET_TYPE PACKET_TYPE_AVATAR_URLS = 'U';
const PACKET_TYPE PACKET_TYPE_AVATAR_FACE_VIDEO = 'F';
const PACKET_TYPE PACKET_TYPE_TRANSMITTER_DATA_V2 = 'T';
const PACKET_TYPE PACKET_TYPE_ENVIRONMENT_DATA = 'e';