Merge pull request #2051 from ey6es/bilbo

Simple avatar billboards distributed through avatar mixer.
This commit is contained in:
Philip Rosedale 2014-02-23 17:20:00 -08:00
commit 1abf0926d3
20 changed files with 382 additions and 82 deletions

View file

@ -123,6 +123,30 @@ void broadcastIdentityPacket() {
}
}
void broadcastBillboardPacket(const SharedNodePointer& sendingNode) {
AvatarMixerClientData* nodeData = static_cast<AvatarMixerClientData*>(sendingNode->getLinkedData());
QByteArray packet = byteArrayWithPopulatedHeader(PacketTypeAvatarBillboard);
packet.append(sendingNode->getUUID().toRfc4122());
packet.append(nodeData->getBillboard());
NodeList* nodeList = NodeList::getInstance();
foreach (const SharedNodePointer& node, nodeList->getNodeHash()) {
if (node->getType() == NodeType::Agent && node != sendingNode) {
nodeList->writeDatagram(packet, node);
}
}
}
void broadcastBillboardPackets() {
foreach (const SharedNodePointer& node, NodeList::getInstance()->getNodeHash()) {
if (node->getLinkedData() && node->getType() == NodeType::Agent) {
AvatarMixerClientData* nodeData = static_cast<AvatarMixerClientData*>(node->getLinkedData());
broadcastBillboardPacket(node);
nodeData->setHasSentBillboardBetweenKeyFrames(false);
}
}
}
void AvatarMixer::nodeKilled(SharedNodePointer killedNode) {
if (killedNode->getType() == NodeType::Agent
&& killedNode->getLinkedData()) {
@ -170,6 +194,23 @@ void AvatarMixer::readPendingDatagrams() {
nodeList->broadcastToNodes(identityPacket, NodeSet() << NodeType::Agent);
}
}
break;
}
case PacketTypeAvatarBillboard: {
// check if we have a matching node in our list
SharedNodePointer avatarNode = nodeList->sendingNodeForPacket(receivedPacket);
if (avatarNode && avatarNode->getLinkedData()) {
AvatarMixerClientData* nodeData = static_cast<AvatarMixerClientData*>(avatarNode->getLinkedData());
if (nodeData->hasBillboardChangedAfterParsing(receivedPacket)
&& !nodeData->hasSentBillboardBetweenKeyFrames()) {
// this avatar changed their billboard and we haven't sent a packet in this keyframe
broadcastBillboardPacket(avatarNode);
nodeData->setHasSentBillboardBetweenKeyFrames(true);
}
}
break;
}
case PacketTypeKillAvatar: {
nodeList->processKillNode(receivedPacket);
@ -185,6 +226,7 @@ void AvatarMixer::readPendingDatagrams() {
}
const qint64 AVATAR_IDENTITY_KEYFRAME_MSECS = 5000;
const qint64 AVATAR_BILLBOARD_KEYFRAME_MSECS = 5000;
void AvatarMixer::run() {
commonInit(AVATAR_MIXER_LOGGING_NAME, NodeType::AvatarMixer);
@ -202,6 +244,9 @@ void AvatarMixer::run() {
QElapsedTimer identityTimer;
identityTimer.start();
QElapsedTimer billboardTimer;
billboardTimer.start();
while (!_isFinished) {
QCoreApplication::processEvents();
@ -219,6 +264,11 @@ void AvatarMixer::run() {
// restart the timer so we do it again in AVATAR_IDENTITY_KEYFRAME_MSECS
identityTimer.restart();
}
if (billboardTimer.elapsed() >= AVATAR_BILLBOARD_KEYFRAME_MSECS) {
broadcastBillboardPackets();
billboardTimer.restart();
}
int usecToSleep = usecTimestamp(&startTime) + (++nextFrame * AVATAR_DATA_SEND_INTERVAL_USECS) - usecTimestampNow();

View file

@ -9,7 +9,8 @@
#include "AvatarMixerClientData.h"
AvatarMixerClientData::AvatarMixerClientData() :
_hasSentIdentityBetweenKeyFrames(false)
_hasSentIdentityBetweenKeyFrames(false),
_hasSentBillboardBetweenKeyFrames(false)
{
}

View file

@ -21,9 +21,15 @@ public:
bool hasSentIdentityBetweenKeyFrames() const { return _hasSentIdentityBetweenKeyFrames; }
void setHasSentIdentityBetweenKeyFrames(bool hasSentIdentityBetweenKeyFrames)
{ _hasSentIdentityBetweenKeyFrames = hasSentIdentityBetweenKeyFrames; }
bool hasSentBillboardBetweenKeyFrames() const { return _hasSentBillboardBetweenKeyFrames; }
void setHasSentBillboardBetweenKeyFrames(bool hasSentBillboardBetweenKeyFrames)
{ _hasSentBillboardBetweenKeyFrames = hasSentBillboardBetweenKeyFrames; }
private:
bool _hasSentIdentityBetweenKeyFrames;
bool _hasSentBillboardBetweenKeyFrames;
};
#endif /* defined(__hifi__AvatarMixerClientData__) */

View file

@ -98,6 +98,7 @@ const int MIRROR_VIEW_HEIGHT = 215;
const float MIRROR_FULLSCREEN_DISTANCE = 0.35f;
const float MIRROR_REARVIEW_DISTANCE = 0.65f;
const float MIRROR_REARVIEW_BODY_DISTANCE = 2.3f;
const float MIRROR_FIELD_OF_VIEW = 30.0f;
const QString CHECK_VERSION_URL = "http://highfidelity.io/latestVersion.xml";
const QString SKIP_FILENAME = QStandardPaths::writableLocation(QStandardPaths::DataLocation) + "/hifi.skipversion";
@ -255,6 +256,11 @@ Application::Application(int& argc, char** argv, timeval &startup_time) :
connect(identityPacketTimer, &QTimer::timeout, _myAvatar, &MyAvatar::sendIdentityPacket);
identityPacketTimer->start(AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS);
// send the billboard packet for our avatar every few seconds
QTimer* billboardPacketTimer = new QTimer();
connect(billboardPacketTimer, &QTimer::timeout, _myAvatar, &MyAvatar::sendBillboardPacket);
billboardPacketTimer->start(AVATAR_BILLBOARD_PACKET_SEND_INTERVAL_MSECS);
QString cachePath = QStandardPaths::writableLocation(QStandardPaths::DataLocation);
_networkAccessManager = new QNetworkAccessManager(this);
@ -527,73 +533,8 @@ void Application::paintGL() {
_glowEffect.render();
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
bool eyeRelativeCamera = false;
if (_rearMirrorTools->getZoomLevel() == BODY) {
_mirrorCamera.setDistance(MIRROR_REARVIEW_BODY_DISTANCE * _myAvatar->getScale());
_mirrorCamera.setTargetPosition(_myAvatar->getChestPosition());
} else { // HEAD zoom level
_mirrorCamera.setDistance(MIRROR_REARVIEW_DISTANCE * _myAvatar->getScale());
if (_myAvatar->getSkeletonModel().isActive() && _myAvatar->getHead()->getFaceModel().isActive()) {
// as a hack until we have a better way of dealing with coordinate precision issues, reposition the
// face/body so that the average eye position lies at the origin
eyeRelativeCamera = true;
_mirrorCamera.setTargetPosition(glm::vec3());
} else {
_mirrorCamera.setTargetPosition(_myAvatar->getHead()->calculateAverageEyePosition());
}
}
_mirrorCamera.setTargetRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PIf, 0.0f)));
_mirrorCamera.update(1.0f/_fps);
// set the bounds of rear mirror view
glViewport(_mirrorViewRect.x(), _glWidget->height() - _mirrorViewRect.y() - _mirrorViewRect.height(),
_mirrorViewRect.width(), _mirrorViewRect.height());
glScissor(_mirrorViewRect.x(), _glWidget->height() - _mirrorViewRect.y() - _mirrorViewRect.height(),
_mirrorViewRect.width(), _mirrorViewRect.height());
bool updateViewFrustum = false;
updateProjectionMatrix(_mirrorCamera, updateViewFrustum);
glEnable(GL_SCISSOR_TEST);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// render rear mirror view
glPushMatrix();
if (eyeRelativeCamera) {
// save absolute translations
glm::vec3 absoluteSkeletonTranslation = _myAvatar->getSkeletonModel().getTranslation();
glm::vec3 absoluteFaceTranslation = _myAvatar->getHead()->getFaceModel().getTranslation();
// get the eye positions relative to the neck and use them to set the face translation
glm::vec3 leftEyePosition, rightEyePosition;
_myAvatar->getHead()->getFaceModel().setTranslation(glm::vec3());
_myAvatar->getHead()->getFaceModel().getEyePositions(leftEyePosition, rightEyePosition);
_myAvatar->getHead()->getFaceModel().setTranslation((leftEyePosition + rightEyePosition) * -0.5f);
// get the neck position relative to the body and use it to set the skeleton translation
glm::vec3 neckPosition;
_myAvatar->getSkeletonModel().setTranslation(glm::vec3());
_myAvatar->getSkeletonModel().getNeckPosition(neckPosition);
_myAvatar->getSkeletonModel().setTranslation(_myAvatar->getHead()->getFaceModel().getTranslation() -
neckPosition);
displaySide(_mirrorCamera, true);
// restore absolute translations
_myAvatar->getSkeletonModel().setTranslation(absoluteSkeletonTranslation);
_myAvatar->getHead()->getFaceModel().setTranslation(absoluteFaceTranslation);
} else {
displaySide(_mirrorCamera, true);
}
glPopMatrix();
_rearMirrorTools->render(false);
// reset Viewport and projection matrix
glViewport(0, 0, _glWidget->width(), _glWidget->height());
glDisable(GL_SCISSOR_TEST);
updateProjectionMatrix(_myCamera, updateViewFrustum);
renderRearViewMirror(_mirrorViewRect);
} else if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
_rearMirrorTools->render(true);
}
@ -2743,6 +2684,24 @@ void Application::setupWorldLight() {
glMateriali(GL_FRONT, GL_SHININESS, 96);
}
QImage Application::renderAvatarBillboard() {
_textureCache.getPrimaryFramebufferObject()->bind();
glDisable(GL_BLEND);
const int BILLBOARD_SIZE = 64;
renderRearViewMirror(QRect(0, _glWidget->height() - BILLBOARD_SIZE, BILLBOARD_SIZE, BILLBOARD_SIZE), true);
QImage image(BILLBOARD_SIZE, BILLBOARD_SIZE, QImage::Format_ARGB32);
glReadPixels(0, 0, BILLBOARD_SIZE, BILLBOARD_SIZE, GL_BGRA, GL_UNSIGNED_BYTE, image.bits());
glEnable(GL_BLEND);
_textureCache.getPrimaryFramebufferObject()->release();
return image;
}
void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "Application::displaySide()");
// transform by eye offset
@ -3660,6 +3619,84 @@ void Application::renderCoverageMapsRecursively(CoverageMap* map) {
}
}
void Application::renderRearViewMirror(const QRect& region, bool billboard) {
bool eyeRelativeCamera = false;
if (billboard) {
_mirrorCamera.setFieldOfView(BILLBOARD_FIELD_OF_VIEW);
_mirrorCamera.setDistance(BILLBOARD_DISTANCE * _myAvatar->getScale());
_mirrorCamera.setTargetPosition(_myAvatar->getPosition());
} else if (_rearMirrorTools->getZoomLevel() == BODY) {
_mirrorCamera.setFieldOfView(MIRROR_FIELD_OF_VIEW);
_mirrorCamera.setDistance(MIRROR_REARVIEW_BODY_DISTANCE * _myAvatar->getScale());
_mirrorCamera.setTargetPosition(_myAvatar->getChestPosition());
} else { // HEAD zoom level
_mirrorCamera.setFieldOfView(MIRROR_FIELD_OF_VIEW);
_mirrorCamera.setDistance(MIRROR_REARVIEW_DISTANCE * _myAvatar->getScale());
if (_myAvatar->getSkeletonModel().isActive() && _myAvatar->getHead()->getFaceModel().isActive()) {
// as a hack until we have a better way of dealing with coordinate precision issues, reposition the
// face/body so that the average eye position lies at the origin
eyeRelativeCamera = true;
_mirrorCamera.setTargetPosition(glm::vec3());
} else {
_mirrorCamera.setTargetPosition(_myAvatar->getHead()->calculateAverageEyePosition());
}
}
_mirrorCamera.setAspectRatio((float)region.width() / region.height());
_mirrorCamera.setTargetRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PIf, 0.0f)));
_mirrorCamera.update(1.0f/_fps);
// set the bounds of rear mirror view
glViewport(region.x(), _glWidget->height() - region.y() - region.height(), region.width(), region.height());
glScissor(region.x(), _glWidget->height() - region.y() - region.height(), region.width(), region.height());
bool updateViewFrustum = false;
updateProjectionMatrix(_mirrorCamera, updateViewFrustum);
glEnable(GL_SCISSOR_TEST);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// render rear mirror view
glPushMatrix();
if (eyeRelativeCamera) {
// save absolute translations
glm::vec3 absoluteSkeletonTranslation = _myAvatar->getSkeletonModel().getTranslation();
glm::vec3 absoluteFaceTranslation = _myAvatar->getHead()->getFaceModel().getTranslation();
// get the eye positions relative to the neck and use them to set the face translation
glm::vec3 leftEyePosition, rightEyePosition;
_myAvatar->getHead()->getFaceModel().setTranslation(glm::vec3());
_myAvatar->getHead()->getFaceModel().getEyePositions(leftEyePosition, rightEyePosition);
_myAvatar->getHead()->getFaceModel().setTranslation((leftEyePosition + rightEyePosition) * -0.5f);
// get the neck position relative to the body and use it to set the skeleton translation
glm::vec3 neckPosition;
_myAvatar->getSkeletonModel().setTranslation(glm::vec3());
_myAvatar->getSkeletonModel().getNeckPosition(neckPosition);
_myAvatar->getSkeletonModel().setTranslation(_myAvatar->getHead()->getFaceModel().getTranslation() -
neckPosition);
displaySide(_mirrorCamera, true);
// restore absolute translations
_myAvatar->getSkeletonModel().setTranslation(absoluteSkeletonTranslation);
_myAvatar->getHead()->getFaceModel().setTranslation(absoluteFaceTranslation);
} else {
displaySide(_mirrorCamera, true);
}
glPopMatrix();
if (!billboard) {
_rearMirrorTools->render(false);
}
// reset Viewport and projection matrix
glViewport(0, 0, _glWidget->width(), _glWidget->height());
glDisable(GL_SCISSOR_TEST);
updateProjectionMatrix(_myCamera, updateViewFrustum);
}
// renderViewFrustum()
//
// Description: this will render the view frustum bounds for EITHER the head

View file

@ -14,6 +14,7 @@
#include <QApplication>
#include <QAction>
#include <QImage>
#include <QSettings>
#include <QTouchEvent>
#include <QList>
@ -96,6 +97,9 @@ static const float NODE_KILLED_BLUE = 0.0f;
static const QString SNAPSHOT_EXTENSION = ".jpg";
static const float BILLBOARD_FIELD_OF_VIEW = 30.0f;
static const float BILLBOARD_DISTANCE = 5.0f;
class Application : public QApplication {
Q_OBJECT
@ -185,6 +189,8 @@ public:
void setupWorldLight();
QImage renderAvatarBillboard();
void displaySide(Camera& whichCamera, bool selfAvatarOnly = false);
/// Loads a view matrix that incorporates the specified model translation without the precision issues that can
@ -200,6 +206,8 @@ public:
void computeOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) const;
VoxelShader& getVoxelShader() { return _voxelShader; }
PointShader& getPointShader() { return _pointShader; }
FileLogger* getLogger() { return _logger; }
@ -328,7 +336,7 @@ private:
void displayStats();
void checkStatsClick();
void toggleStatsExpanded();
void renderAvatars(bool forceRenderHead, bool selfAvatarOnly = false);
void renderRearViewMirror(const QRect& region, bool billboard = false);
void renderViewFrustum(ViewFrustum& viewFrustum);
void checkBandwidthMeterClick();

View file

@ -98,7 +98,8 @@ void DatagramProcessor::processDatagrams() {
break;
case PacketTypeBulkAvatarData:
case PacketTypeKillAvatar:
case PacketTypeAvatarIdentity: {
case PacketTypeAvatarIdentity:
case PacketTypeAvatarBillboard: {
// update having heard from the avatar-mixer and record the bytes received
SharedNodePointer avatarMixer = nodeList->sendingNodeForPacket(incomingPacket);

View file

@ -26,6 +26,7 @@
#include "Physics.h"
#include "world.h"
#include "devices/OculusManager.h"
#include "renderer/TextureCache.h"
#include "ui/TextRenderer.h"
using namespace std;
@ -107,6 +108,10 @@ glm::quat Avatar::getWorldAlignedOrientation () const {
return computeRotationFromBodyToWorldUp() * getOrientation();
}
float Avatar::getLODDistance() const {
return glm::distance(Application::getInstance()->getCamera()->getPosition(), _position) / _scale;
}
void Avatar::simulate(float deltaTime) {
if (_scale != _targetScale) {
setScale(_targetScale);
@ -116,6 +121,7 @@ void Avatar::simulate(float deltaTime) {
glm::vec3 oldVelocity = getVelocity();
getHand()->simulate(deltaTime, false);
_skeletonModel.setLODDistance(getLODDistance());
_skeletonModel.simulate(deltaTime);
Head* head = getHead();
head->setBodyRotation(glm::vec3(_bodyPitch, _bodyYaw, _bodyRoll));
@ -282,9 +288,11 @@ glm::quat Avatar::computeRotationFromBodyToWorldUp(float proportion) const {
}
void Avatar::renderBody(bool forceRenderHead) {
// Render the body's voxels and head
glm::vec3 pos = getPosition();
//printf("Render other at %.3f, %.2f, %.2f\n", pos.x, pos.y, pos.z);
const float BILLBOARD_DISTANCE = 40.0f;
if (!_billboard.isEmpty() && getLODDistance() >= BILLBOARD_DISTANCE) {
renderBillboard();
return;
}
_skeletonModel.render(1.0f);
if (forceRenderHead) {
getHead()->render(1.0f);
@ -292,6 +300,62 @@ void Avatar::renderBody(bool forceRenderHead) {
getHand()->render(false);
}
void Avatar::renderBillboard() {
if (!_billboardTexture) {
QImage image = QImage::fromData(_billboard).convertToFormat(QImage::Format_ARGB32);
_billboardTexture.reset(new Texture());
glBindTexture(GL_TEXTURE_2D, _billboardTexture->getID());
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, image.width(), image.height(), 1,
GL_BGRA, GL_UNSIGNED_BYTE, image.constBits());
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
} else {
glBindTexture(GL_TEXTURE_2D, _billboardTexture->getID());
}
glEnable(GL_ALPHA_TEST);
glAlphaFunc(GL_GREATER, 0.5f);
glEnable(GL_TEXTURE_2D);
glDisable(GL_LIGHTING);
glPushMatrix();
glTranslatef(_position.x, _position.y, _position.z);
// rotate about vertical to face the camera
glm::quat rotation = getOrientation();
glm::vec3 cameraVector = glm::inverse(rotation) * (Application::getInstance()->getCamera()->getPosition() - _position);
rotation = rotation * glm::angleAxis(glm::degrees(atan2f(-cameraVector.x, -cameraVector.z)), 0.0f, 1.0f, 0.0f);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::angle(rotation), axis.x, axis.y, axis.z);
// compute the size from the billboard camera parameters and scale
float size = _scale * BILLBOARD_DISTANCE * tanf(glm::radians(BILLBOARD_FIELD_OF_VIEW / 2.0f));
glScalef(size, size, size);
glColor3f(1.0f, 1.0f, 1.0f);
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f);
glVertex2f(-1.0f, -1.0f);
glTexCoord2f(1.0f, 0.0f);
glVertex2f(1.0f, -1.0f);
glTexCoord2f(1.0f, 1.0f);
glVertex2f(1.0f, 1.0f);
glTexCoord2f(0.0f, 1.0f);
glVertex2f(-1.0f, 1.0f);
glEnd();
glPopMatrix();
glDisable(GL_TEXTURE_2D);
glEnable(GL_LIGHTING);
glDisable(GL_ALPHA_TEST);
glBindTexture(GL_TEXTURE_2D, 0);
}
void Avatar::renderDisplayName() {
if (_displayName.isEmpty() || _displayNameAlpha == 0.0f) {
@ -502,6 +566,13 @@ void Avatar::setDisplayName(const QString& displayName) {
_displayNameBoundingRect = textRenderer(DISPLAYNAME)->metrics().tightBoundingRect(displayName);
}
void Avatar::setBillboard(const QByteArray& billboard) {
AvatarData::setBillboard(billboard);
// clear out any existing billboard texture
_billboardTexture.reset();
}
int Avatar::parseData(const QByteArray& packet) {
// change in position implies movement
glm::vec3 oldPosition = _position;

View file

@ -11,6 +11,7 @@
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
#include <QtCore/QScopedPointer>
#include <QtCore/QUuid>
#include <AvatarData.h>
@ -62,6 +63,8 @@ enum ScreenTintLayer {
// Grayson as he's building a street around here for demo dinner 2
const glm::vec3 START_LOCATION(0.485f * TREE_SCALE, 0.f, 0.5f * TREE_SCALE);
class Texture;
class Avatar : public AvatarData {
Q_OBJECT
@ -87,6 +90,9 @@ public:
Head* getHead() { return static_cast<Head*>(_headData); }
Hand* getHand() { return static_cast<Hand*>(_handData); }
glm::quat getWorldAlignedOrientation() const;
/// Returns the distance to use as a LOD parameter.
float getLODDistance() const;
Node* getOwningAvatarMixer() { return _owningAvatarMixer.data(); }
void setOwningAvatarMixer(const QWeakPointer<Node>& owningAvatarMixer) { _owningAvatarMixer = owningAvatarMixer; }
@ -114,6 +120,7 @@ public:
virtual void setFaceModelURL(const QUrl& faceModelURL);
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL);
virtual void setDisplayName(const QString& displayName);
virtual void setBillboard(const QByteArray& billboard);
void setShowDisplayName(bool showDisplayName);
@ -167,8 +174,10 @@ protected:
private:
bool _initialized;
QScopedPointer<Texture> _billboardTexture;
void renderBody(bool forceRenderHead);
void renderBillboard();
void renderDisplayName();
};

View file

@ -133,6 +133,9 @@ void AvatarManager::processAvatarMixerDatagram(const QByteArray& datagram, const
case PacketTypeAvatarIdentity:
processAvatarIdentityPacket(datagram);
break;
case PacketTypeAvatarBillboard:
processAvatarBillboardPacket(datagram);
break;
case PacketTypeKillAvatar:
processKillAvatar(datagram);
break;
@ -212,6 +215,20 @@ void AvatarManager::processAvatarIdentityPacket(const QByteArray &packet) {
}
}
void AvatarManager::processAvatarBillboardPacket(const QByteArray& packet) {
int headerSize = numBytesForPacketHeader(packet);
QUuid nodeUUID = QUuid::fromRfc4122(QByteArray::fromRawData(packet.constData() + headerSize, NUM_BYTES_RFC4122_UUID));
AvatarSharedPointer matchingAvatar = _avatarHash.value(nodeUUID);
if (matchingAvatar) {
Avatar* avatar = static_cast<Avatar*>(matchingAvatar.data());
QByteArray billboard = packet.mid(headerSize + NUM_BYTES_RFC4122_UUID);
if (avatar->getBillboard() != billboard) {
avatar->setBillboard(billboard);
}
}
}
void AvatarManager::processKillAvatar(const QByteArray& datagram) {
// read the node id
QUuid nodeUUID = QUuid::fromRfc4122(datagram.mid(numBytesForPacketHeader(datagram), NUM_BYTES_RFC4122_UUID));

View file

@ -42,6 +42,7 @@ private:
void processAvatarDataPacket(const QByteArray& packet, const QWeakPointer<Node>& mixerWeakPointer);
void processAvatarIdentityPacket(const QByteArray& packet);
void processAvatarBillboardPacket(const QByteArray& packet);
void processKillAvatar(const QByteArray& datagram);
void simulateAvatarFades(float deltaTime);

View file

@ -158,6 +158,9 @@ void Head::simulate(float deltaTime, bool isMine) {
glm::clamp(sqrt(_averageLoudness * JAW_OPEN_SCALE) - JAW_OPEN_DEAD_ZONE, 0.0f, 1.0f), _blendshapeCoefficients);
}
if (!isMine) {
_faceModel.setLODDistance(static_cast<Avatar*>(_owningAvatar)->getLODDistance());
}
_faceModel.simulate(deltaTime);
// the blend face may have custom eye meshes

View file

@ -9,6 +9,8 @@
#include <algorithm>
#include <vector>
#include <QBuffer>
#include <glm/gtx/vector_angle.hpp>
#include <NodeList.h>
@ -57,7 +59,8 @@ MyAvatar::MyAvatar() :
_thrustMultiplier(1.0f),
_moveTarget(0,0,0),
_moveTargetStepCounter(0),
_lookAtTargetAvatar()
_lookAtTargetAvatar(),
_billboardValid(false)
{
for (int i = 0; i < MAX_DRIVE_KEYS; i++) {
_driveKeys[i] = 0.0f;
@ -332,7 +335,9 @@ void MyAvatar::simulate(float deltaTime) {
// Zero thrust out now that we've added it to velocity in this frame
_thrust = glm::vec3(0, 0, 0);
// consider updating our billboard
maybeUpdateBillboard();
}
const float MAX_PITCH = 90.0f;
@ -705,6 +710,16 @@ glm::vec3 MyAvatar::getUprightHeadPosition() const {
return _position + getWorldAlignedOrientation() * glm::vec3(0.0f, getPelvisToHeadLength(), 0.0f);
}
void MyAvatar::setFaceModelURL(const QUrl& faceModelURL) {
Avatar::setFaceModelURL(faceModelURL);
_billboardValid = false;
}
void MyAvatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
Avatar::setSkeletonModelURL(skeletonModelURL);
_billboardValid = false;
}
void MyAvatar::renderBody(bool forceRenderHead) {
// Render the body's voxels and head
_skeletonModel.render(1.0f);
@ -1124,6 +1139,20 @@ void MyAvatar::updateChatCircle(float deltaTime) {
_position = glm::mix(_position, targetPosition, APPROACH_RATE);
}
void MyAvatar::maybeUpdateBillboard() {
if (_billboardValid || !(_skeletonModel.isLoadedWithTextures() && getHead()->getFaceModel().isLoadedWithTextures())) {
return;
}
QImage image = Application::getInstance()->renderAvatarBillboard();
_billboard.clear();
QBuffer buffer(&_billboard);
buffer.open(QIODevice::WriteOnly);
image.save(&buffer, "PNG");
_billboardValid = true;
sendBillboardPacket();
}
void MyAvatar::setGravity(glm::vec3 gravity) {
_gravity = gravity;
getHead()->setGravity(_gravity);

View file

@ -84,6 +84,9 @@ public:
void updateLookAtTargetAvatar(glm::vec3& eyePosition);
void clearLookAtTargetAvatar();
virtual void setFaceModelURL(const QUrl& faceModelURL);
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL);
public slots:
void goHome();
void increaseSize();
@ -118,6 +121,8 @@ private:
glm::vec3 _transmitterPickStart;
glm::vec3 _transmitterPickEnd;
bool _billboardValid;
// private methods
void renderBody(bool forceRenderHead);
void updateThrust(float deltaTime);
@ -128,6 +133,7 @@ private:
void applyHardCollision(const glm::vec3& penetration, float elasticity, float damping);
void updateCollisionSound(const glm::vec3& penetration, float deltaTime, float frequency);
void updateChatCircle(float deltaTime);
void maybeUpdateBillboard();
};
#endif

View file

@ -17,8 +17,8 @@ using namespace std;
Model::Model(QObject* parent) :
QObject(parent),
_pupilDilation(0.0f)
{
_lodDistance(0.0f),
_pupilDilation(0.0f) {
// we may have been created in the network thread, but we live in the main thread
moveToThread(Application::getInstance()->thread());
}
@ -46,6 +46,21 @@ void Model::initSkinProgram(ProgramObject& program, Model::SkinLocations& locati
program.release();
}
bool Model::isLoadedWithTextures() const {
if (!isActive()) {
return false;
}
foreach (const NetworkMesh& mesh, _geometry->getMeshes()) {
foreach (const NetworkMeshPart& part, mesh.parts) {
if (part.diffuseTexture && !part.diffuseTexture->isLoaded() ||
part.normalTexture && !part.normalTexture->isLoaded()) {
return false;
}
}
}
return true;
}
void Model::init() {
if (!_program.isLinked()) {
switchToResourcesParentIfRequired();
@ -92,8 +107,7 @@ void Model::reset() {
void Model::simulate(float deltaTime) {
// update our LOD
if (_geometry) {
QSharedPointer<NetworkGeometry> geometry = _geometry->getLODOrFallback(glm::distance(_translation,
Application::getInstance()->getCamera()->getPosition()), _lodHysteresis);
QSharedPointer<NetworkGeometry> geometry = _geometry->getLODOrFallback(_lodDistance, _lodHysteresis);
if (_geometry != geometry) {
deleteGeometry();
_dilatedTextures.clear();

View file

@ -46,6 +46,8 @@ public:
bool isActive() const { return _geometry && _geometry->isLoaded(); }
bool isLoadedWithTextures() const;
void init();
void reset();
void simulate(float deltaTime);
@ -54,6 +56,9 @@ public:
Q_INVOKABLE void setURL(const QUrl& url, const QUrl& fallback = QUrl());
const QUrl& getURL() const { return _url; }
/// Sets the distance parameter used for LOD computations.
void setLODDistance(float distance) { _lodDistance = distance; }
/// Returns the extents of the model in its bind pose.
Extents getBindExtents() const;
@ -228,13 +233,14 @@ private:
void renderMeshes(float alpha, bool translucent);
QSharedPointer<NetworkGeometry> _baseGeometry; ///< reference required to prevent collection of base
float _lodDistance;
float _lodHysteresis;
float _pupilDilation;
std::vector<float> _blendshapeCoefficients;
QUrl _url;
QVector<GLuint> _blendedVertexBufferIDs;
QVector<QVector<QSharedPointer<Texture> > > _dilatedTextures;
bool _resetStates;

View file

@ -258,9 +258,11 @@ NetworkTexture::NetworkTexture(const QUrl& url, bool normalMap) :
_reply(NULL),
_attempts(0),
_averageColor(1.0f, 1.0f, 1.0f, 1.0f),
_translucent(false) {
_translucent(false),
_loaded(false) {
if (!url.isValid()) {
_loaded = true;
return;
}
_request.setAttribute(QNetworkRequest::CacheLoadControlAttribute, QNetworkRequest::PreferCache);
@ -298,6 +300,7 @@ void NetworkTexture::handleDownloadProgress(qint64 bytesReceived, qint64 bytesTo
_reply->disconnect(this);
_reply->deleteLater();
_reply = NULL;
_loaded = true;
QImage image = QImage::fromData(entirety).convertToFormat(QImage::Format_ARGB32);
@ -345,6 +348,8 @@ void NetworkTexture::handleReplyError() {
QTimer::singleShot(BASE_DELAY_MS * (int)pow(2.0, _attempts), this, SLOT(makeRequest()));
debug << " -- retrying...";
} else {
_loaded = true;
}
}

View file

@ -118,6 +118,8 @@ public:
NetworkTexture(const QUrl& url, bool normalMap);
~NetworkTexture();
bool isLoaded() const { return _loaded; }
/// Returns the average color over the entire texture.
const glm::vec4& getAverageColor() const { return _averageColor; }
@ -142,6 +144,7 @@ private:
int _attempts;
glm::vec4 _averageColor;
bool _translucent;
bool _loaded;
};
/// Caches derived, dilated textures.

View file

@ -305,6 +305,15 @@ QByteArray AvatarData::identityByteArray() {
return identityData;
}
bool AvatarData::hasBillboardChangedAfterParsing(const QByteArray& packet) {
QByteArray newBillboard = packet.mid(numBytesForPacketHeader(packet));
if (newBillboard == _billboard) {
return false;
}
_billboard = newBillboard;
return true;
}
void AvatarData::setFaceModelURL(const QUrl& faceModelURL) {
_faceModelURL = faceModelURL.isEmpty() ? DEFAULT_HEAD_MODEL_URL : faceModelURL;
@ -323,6 +332,12 @@ void AvatarData::setDisplayName(const QString& displayName) {
qDebug() << "Changing display name for avatar to" << displayName;
}
void AvatarData::setBillboard(const QByteArray& billboard) {
_billboard = billboard;
qDebug() << "Changing billboard for avatar.";
}
void AvatarData::setClampedTargetScale(float targetScale) {
targetScale = glm::clamp(targetScale, MIN_AVATAR_SCALE, MAX_AVATAR_SCALE);
@ -344,3 +359,10 @@ void AvatarData::sendIdentityPacket() {
NodeList::getInstance()->broadcastToNodes(identityPacket, NodeSet() << NodeType::AvatarMixer);
}
void AvatarData::sendBillboardPacket() {
QByteArray billboardPacket = byteArrayWithPopulatedHeader(PacketTypeAvatarBillboard);
billboardPacket.append(_billboard);
NodeList::getInstance()->broadcastToNodes(billboardPacket, NodeSet() << NodeType::AvatarMixer);
}

View file

@ -29,6 +29,7 @@ typedef unsigned long long quint64;
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
#include <QtCore/QByteArray>
#include <QtCore/QObject>
#include <QtCore/QUrl>
#include <QtCore/QUuid>
@ -54,6 +55,7 @@ static const float MIN_AVATAR_SCALE = .005f;
const float MAX_AUDIO_LOUDNESS = 1000.0; // close enough for mouth animation
const int AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS = 1000;
const int AVATAR_BILLBOARD_PACKET_SEND_INTERVAL_MSECS = 5000;
const QUrl DEFAULT_HEAD_MODEL_URL = QUrl("http://public.highfidelity.io/meshes/defaultAvatar_head.fst");
const QUrl DEFAULT_BODY_MODEL_URL = QUrl("http://public.highfidelity.io/meshes/defaultAvatar_body.fst");
@ -151,6 +153,8 @@ public:
bool hasIdentityChangedAfterParsing(const QByteArray& packet);
QByteArray identityByteArray();
bool hasBillboardChangedAfterParsing(const QByteArray& packet);
const QUrl& getFaceModelURL() const { return _faceModelURL; }
QString getFaceModelURLString() const { return _faceModelURL.toString(); }
const QUrl& getSkeletonModelURL() const { return _skeletonModelURL; }
@ -159,6 +163,9 @@ public:
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL);
virtual void setDisplayName(const QString& displayName);
virtual void setBillboard(const QByteArray& billboard);
const QByteArray& getBillboard() const { return _billboard; }
QString getFaceModelURLFromScript() const { return _faceModelURL.toString(); }
void setFaceModelURLFromScript(const QString& faceModelString) { setFaceModelURL(faceModelString); }
@ -169,6 +176,7 @@ public:
public slots:
void sendIdentityPacket();
void sendBillboardPacket();
protected:
glm::vec3 _position;
@ -204,6 +212,8 @@ protected:
float _displayNameTargetAlpha;
float _displayNameAlpha;
QByteArray _billboard;
private:
// privatize the copy constructor and assignment operator so they cannot be called
AvatarData(const AvatarData&);

View file

@ -53,7 +53,8 @@ enum PacketType {
PacketTypeParticleErase,
PacketTypeParticleAddResponse,
PacketTypeMetavoxelData,
PacketTypeAvatarIdentity
PacketTypeAvatarIdentity,
PacketTypeAvatarBillboard
};
typedef char PacketVersion;