mirror of
https://github.com/overte-org/overte.git
synced 2025-04-21 09:44:21 +02:00
Merge pull request #2338 from AndrewMeadows/cleanup
Don't send redundant avatar hand/head data, and cleanup.
This commit is contained in:
commit
500f9d1485
14 changed files with 101 additions and 311 deletions
|
@ -62,7 +62,8 @@ void broadcastAvatarData() {
|
|||
mixedAvatarByteArray.resize(numPacketHeaderBytes);
|
||||
|
||||
AvatarMixerClientData* myData = reinterpret_cast<AvatarMixerClientData*>(node->getLinkedData());
|
||||
glm::vec3 myPosition = myData->getPosition();
|
||||
AvatarData& avatar = myData->getAvatar();
|
||||
glm::vec3 myPosition = avatar.getPosition();
|
||||
|
||||
// this is an AGENT we have received head data from
|
||||
// send back a packet with other active node data to this node
|
||||
|
@ -70,7 +71,8 @@ void broadcastAvatarData() {
|
|||
if (otherNode->getLinkedData() && otherNode->getUUID() != node->getUUID()) {
|
||||
|
||||
AvatarMixerClientData* otherNodeData = reinterpret_cast<AvatarMixerClientData*>(otherNode->getLinkedData());
|
||||
glm::vec3 otherPosition = otherNodeData->getPosition();
|
||||
AvatarData& otherAvatar = otherNodeData->getAvatar();
|
||||
glm::vec3 otherPosition = otherAvatar.getPosition();
|
||||
float distanceToAvatar = glm::length(myPosition - otherPosition);
|
||||
// The full rate distance is the distance at which EVERY update will be sent for this avatar
|
||||
// at a distance of twice the full rate distance, there will be a 50% chance of sending this avatar's update
|
||||
|
@ -79,7 +81,7 @@ void broadcastAvatarData() {
|
|||
if ((distanceToAvatar == 0.f) || (randFloat() < FULL_RATE_DISTANCE / distanceToAvatar)) {
|
||||
QByteArray avatarByteArray;
|
||||
avatarByteArray.append(otherNode->getUUID().toRfc4122());
|
||||
avatarByteArray.append(otherNodeData->toByteArray());
|
||||
avatarByteArray.append(otherAvatar.toByteArray());
|
||||
|
||||
if (avatarByteArray.size() + mixedAvatarByteArray.size() > MAX_PACKET_SIZE) {
|
||||
nodeList->writeDatagram(mixedAvatarByteArray, node);
|
||||
|
@ -110,7 +112,8 @@ void broadcastIdentityPacket() {
|
|||
if (node->getLinkedData() && node->getType() == NodeType::Agent) {
|
||||
|
||||
AvatarMixerClientData* nodeData = reinterpret_cast<AvatarMixerClientData*>(node->getLinkedData());
|
||||
QByteArray individualData = nodeData->identityByteArray();
|
||||
AvatarData& avatar = nodeData->getAvatar();
|
||||
QByteArray individualData = avatar.identityByteArray();
|
||||
individualData.replace(0, NUM_BYTES_RFC4122_UUID, node->getUUID().toRfc4122());
|
||||
|
||||
if (avatarIdentityPacket.size() + individualData.size() > MAX_PACKET_SIZE) {
|
||||
|
@ -135,9 +138,10 @@ void broadcastIdentityPacket() {
|
|||
|
||||
void broadcastBillboardPacket(const SharedNodePointer& sendingNode) {
|
||||
AvatarMixerClientData* nodeData = static_cast<AvatarMixerClientData*>(sendingNode->getLinkedData());
|
||||
AvatarData& avatar = nodeData->getAvatar();
|
||||
QByteArray packet = byteArrayWithPopulatedHeader(PacketTypeAvatarBillboard);
|
||||
packet.append(sendingNode->getUUID().toRfc4122());
|
||||
packet.append(nodeData->getBillboard());
|
||||
packet.append(avatar.getBillboard());
|
||||
|
||||
NodeList* nodeList = NodeList::getInstance();
|
||||
foreach (const SharedNodePointer& node, nodeList->getNodeHash()) {
|
||||
|
@ -190,12 +194,13 @@ void AvatarMixer::readPendingDatagrams() {
|
|||
|
||||
if (avatarNode && avatarNode->getLinkedData()) {
|
||||
AvatarMixerClientData* nodeData = reinterpret_cast<AvatarMixerClientData*>(avatarNode->getLinkedData());
|
||||
if (nodeData->hasIdentityChangedAfterParsing(receivedPacket)
|
||||
AvatarData& avatar = nodeData->getAvatar();
|
||||
if (avatar.hasIdentityChangedAfterParsing(receivedPacket)
|
||||
&& !nodeData->hasSentIdentityBetweenKeyFrames()) {
|
||||
// this avatar changed their identity in some way and we haven't sent a packet in this keyframe
|
||||
QByteArray identityPacket = byteArrayWithPopulatedHeader(PacketTypeAvatarIdentity);
|
||||
|
||||
QByteArray individualByteArray = nodeData->identityByteArray();
|
||||
QByteArray individualByteArray = avatar.identityByteArray();
|
||||
individualByteArray.replace(0, NUM_BYTES_RFC4122_UUID, avatarNode->getUUID().toRfc4122());
|
||||
|
||||
identityPacket.append(individualByteArray);
|
||||
|
@ -213,7 +218,8 @@ void AvatarMixer::readPendingDatagrams() {
|
|||
|
||||
if (avatarNode && avatarNode->getLinkedData()) {
|
||||
AvatarMixerClientData* nodeData = static_cast<AvatarMixerClientData*>(avatarNode->getLinkedData());
|
||||
if (nodeData->hasBillboardChangedAfterParsing(receivedPacket)
|
||||
AvatarData& avatar = nodeData->getAvatar();
|
||||
if (avatar.hasBillboardChangedAfterParsing(receivedPacket)
|
||||
&& !nodeData->hasSentBillboardBetweenKeyFrames()) {
|
||||
// this avatar changed their billboard and we haven't sent a packet in this keyframe
|
||||
broadcastBillboardPacket(avatarNode);
|
||||
|
|
|
@ -9,8 +9,15 @@
|
|||
#include "AvatarMixerClientData.h"
|
||||
|
||||
AvatarMixerClientData::AvatarMixerClientData() :
|
||||
NodeData(),
|
||||
_hasSentIdentityBetweenKeyFrames(false),
|
||||
_hasSentBillboardBetweenKeyFrames(false)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
int AvatarMixerClientData::parseData(const QByteArray& packet) {
|
||||
// compute the offset to the data payload
|
||||
int offset = numBytesForPacketHeader(packet);
|
||||
return _avatar.parseDataAtOffset(packet, offset);
|
||||
}
|
||||
|
|
|
@ -12,11 +12,14 @@
|
|||
#include <QtCore/QUrl>
|
||||
|
||||
#include <AvatarData.h>
|
||||
#include <NodeData.h>
|
||||
|
||||
class AvatarMixerClientData : public AvatarData {
|
||||
class AvatarMixerClientData : public NodeData {
|
||||
Q_OBJECT
|
||||
public:
|
||||
AvatarMixerClientData();
|
||||
|
||||
int parseData(const QByteArray& packet);
|
||||
|
||||
bool hasSentIdentityBetweenKeyFrames() const { return _hasSentIdentityBetweenKeyFrames; }
|
||||
void setHasSentIdentityBetweenKeyFrames(bool hasSentIdentityBetweenKeyFrames)
|
||||
|
@ -25,11 +28,14 @@ public:
|
|||
bool hasSentBillboardBetweenKeyFrames() const { return _hasSentBillboardBetweenKeyFrames; }
|
||||
void setHasSentBillboardBetweenKeyFrames(bool hasSentBillboardBetweenKeyFrames)
|
||||
{ _hasSentBillboardBetweenKeyFrames = hasSentBillboardBetweenKeyFrames; }
|
||||
|
||||
AvatarData& getAvatar() { return _avatar; }
|
||||
|
||||
private:
|
||||
|
||||
bool _hasSentIdentityBetweenKeyFrames;
|
||||
bool _hasSentBillboardBetweenKeyFrames;
|
||||
AvatarData _avatar;
|
||||
};
|
||||
|
||||
#endif /* defined(__hifi__AvatarMixerClientData__) */
|
||||
|
|
|
@ -189,55 +189,60 @@ static TextRenderer* textRenderer(TextRendererType type) {
|
|||
return displayNameRenderer;
|
||||
}
|
||||
|
||||
void Avatar::render(bool forShadowMap) {
|
||||
void Avatar::render(const glm::vec3& cameraPosition, bool forShadowMap) {
|
||||
// simple frustum check
|
||||
float boundingRadius = getBillboardSize();
|
||||
if (Application::getInstance()->getViewFrustum()->sphereInFrustum(_position, boundingRadius) == ViewFrustum::OUTSIDE) {
|
||||
if (Application::getInstance()->getViewFrustum()->sphereInFrustum(cameraPosition, boundingRadius) == ViewFrustum::OUTSIDE) {
|
||||
return;
|
||||
}
|
||||
|
||||
glm::vec3 toTarget = _position - Application::getInstance()->getAvatar()->getPosition();
|
||||
float lengthToTarget = glm::length(toTarget);
|
||||
glm::vec3 toTarget = cameraPosition - Application::getInstance()->getAvatar()->getPosition();
|
||||
float distanceToTarget = glm::length(toTarget);
|
||||
|
||||
{
|
||||
// glow when moving in the distance
|
||||
|
||||
// glow when moving far away
|
||||
const float GLOW_DISTANCE = 20.0f;
|
||||
Glower glower(_moving && lengthToTarget > GLOW_DISTANCE && !forShadowMap ? 1.0f : 0.0f);
|
||||
Glower glower(_moving && distanceToTarget > GLOW_DISTANCE && !forShadowMap ? 1.0f : 0.0f);
|
||||
|
||||
// render body
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Avatars)) {
|
||||
renderBody(forShadowMap);
|
||||
}
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::RenderSkeletonCollisionProxies)) {
|
||||
_skeletonModel.renderCollisionProxies(0.7f);
|
||||
}
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::RenderHeadCollisionProxies)) {
|
||||
getHead()->getFaceModel().renderCollisionProxies(0.7f);
|
||||
}
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Avatars)) {
|
||||
renderBody();
|
||||
}
|
||||
|
||||
// render voice intensity sphere for avatars that are farther away
|
||||
const float MAX_SPHERE_ANGLE = 10.f * RADIANS_PER_DEGREE;
|
||||
const float MIN_SPHERE_ANGLE = 1.f * RADIANS_PER_DEGREE;
|
||||
const float MIN_SPHERE_SIZE = 0.01f;
|
||||
const float SPHERE_LOUDNESS_SCALING = 0.0005f;
|
||||
const float SPHERE_COLOR[] = { 0.5f, 0.8f, 0.8f };
|
||||
float height = getSkeletonHeight();
|
||||
glm::vec3 delta = height * (getHead()->getCameraOrientation() * IDENTITY_UP) / 2.f;
|
||||
float angle = abs(angleBetween(toTarget + delta, toTarget - delta));
|
||||
float sphereRadius = getHead()->getAverageLoudness() * SPHERE_LOUDNESS_SCALING;
|
||||
|
||||
if (!forShadowMap && (sphereRadius > MIN_SPHERE_SIZE) && (angle < MAX_SPHERE_ANGLE) && (angle > MIN_SPHERE_ANGLE)) {
|
||||
glColor4f(SPHERE_COLOR[0], SPHERE_COLOR[1], SPHERE_COLOR[2], 1.f - angle / MAX_SPHERE_ANGLE);
|
||||
glPushMatrix();
|
||||
glTranslatef(_position.x, _position.y, _position.z);
|
||||
glScalef(height, height, height);
|
||||
glutSolidSphere(sphereRadius, 15, 15);
|
||||
glPopMatrix();
|
||||
// quick check before falling into the code below:
|
||||
// (a 10 degree breadth of an almost 2 meter avatar kicks in at about 12m)
|
||||
const float MIN_VOICE_SPHERE_DISTANCE = 12.f;
|
||||
if (distanceToTarget > MIN_VOICE_SPHERE_DISTANCE) {
|
||||
// render voice intensity sphere for avatars that are farther away
|
||||
const float MAX_SPHERE_ANGLE = 10.f * RADIANS_PER_DEGREE;
|
||||
const float MIN_SPHERE_ANGLE = 1.f * RADIANS_PER_DEGREE;
|
||||
const float MIN_SPHERE_SIZE = 0.01f;
|
||||
const float SPHERE_LOUDNESS_SCALING = 0.0005f;
|
||||
const float SPHERE_COLOR[] = { 0.5f, 0.8f, 0.8f };
|
||||
float height = getSkeletonHeight();
|
||||
glm::vec3 delta = height * (getHead()->getCameraOrientation() * IDENTITY_UP) / 2.f;
|
||||
float angle = abs(angleBetween(toTarget + delta, toTarget - delta));
|
||||
float sphereRadius = getHead()->getAverageLoudness() * SPHERE_LOUDNESS_SCALING;
|
||||
|
||||
if (!forShadowMap && (sphereRadius > MIN_SPHERE_SIZE) && (angle < MAX_SPHERE_ANGLE) && (angle > MIN_SPHERE_ANGLE)) {
|
||||
glColor4f(SPHERE_COLOR[0], SPHERE_COLOR[1], SPHERE_COLOR[2], 1.f - angle / MAX_SPHERE_ANGLE);
|
||||
glPushMatrix();
|
||||
glTranslatef(_position.x, _position.y, _position.z);
|
||||
glScalef(height, height, height);
|
||||
glutSolidSphere(sphereRadius, 15, 15);
|
||||
glPopMatrix();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const float DISPLAYNAME_DISTANCE = 10.0f;
|
||||
setShowDisplayName(!forShadowMap && lengthToTarget < DISPLAYNAME_DISTANCE);
|
||||
setShowDisplayName(!forShadowMap && distanceToTarget < DISPLAYNAME_DISTANCE);
|
||||
if (forShadowMap) {
|
||||
return;
|
||||
}
|
||||
|
@ -257,7 +262,6 @@ void Avatar::render(bool forShadowMap) {
|
|||
glm::vec3 chatAxis = glm::axis(chatRotation);
|
||||
glRotatef(glm::degrees(glm::angle(chatRotation)), chatAxis.x, chatAxis.y, chatAxis.z);
|
||||
|
||||
|
||||
glColor3f(0.f, 0.8f, 0.f);
|
||||
glRotatef(180.f, 0.f, 1.f, 0.f);
|
||||
glRotatef(180.f, 0.f, 0.f, 1.f);
|
||||
|
@ -302,9 +306,12 @@ glm::quat Avatar::computeRotationFromBodyToWorldUp(float proportion) const {
|
|||
return glm::angleAxis(angle * proportion, axis);
|
||||
}
|
||||
|
||||
void Avatar::renderBody() {
|
||||
void Avatar::renderBody(bool forShadowMap) {
|
||||
if (_shouldRenderBillboard || !(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
|
||||
// render the billboard until both models are loaded
|
||||
if (forShadowMap) {
|
||||
return;
|
||||
}
|
||||
renderBillboard();
|
||||
return;
|
||||
}
|
||||
|
@ -626,11 +633,11 @@ void Avatar::setBillboard(const QByteArray& billboard) {
|
|||
_billboardTexture.reset();
|
||||
}
|
||||
|
||||
int Avatar::parseData(const QByteArray& packet) {
|
||||
int Avatar::parseDataAtOffset(const QByteArray& packet, int offset) {
|
||||
// change in position implies movement
|
||||
glm::vec3 oldPosition = _position;
|
||||
|
||||
int bytesRead = AvatarData::parseData(packet);
|
||||
int bytesRead = AvatarData::parseDataAtOffset(packet, offset);
|
||||
|
||||
const float MOVE_DISTANCE_THRESHOLD = 0.001f;
|
||||
_moving = glm::distance(oldPosition, _position) > MOVE_DISTANCE_THRESHOLD;
|
||||
|
|
|
@ -74,7 +74,7 @@ public:
|
|||
|
||||
void init();
|
||||
void simulate(float deltaTime);
|
||||
void render(bool forShadowMap = false);
|
||||
virtual void render(const glm::vec3& cameraPosition, bool forShadowMap);
|
||||
|
||||
//setters
|
||||
void setDisplayingLookatVectors(bool displayingLookatVectors) { getHead()->setRenderLookatVectors(displayingLookatVectors); }
|
||||
|
@ -133,7 +133,7 @@ public:
|
|||
|
||||
void setShowDisplayName(bool showDisplayName);
|
||||
|
||||
int parseData(const QByteArray& packet);
|
||||
int parseDataAtOffset(const QByteArray& packet, int offset);
|
||||
|
||||
static void renderJointConnectingCone(glm::vec3 position1, glm::vec3 position2, float radius1, float radius2);
|
||||
|
||||
|
@ -181,6 +181,7 @@ protected:
|
|||
float getPelvisToHeadLength() const;
|
||||
|
||||
void renderDisplayName();
|
||||
virtual void renderBody(bool forShadowMap);
|
||||
|
||||
private:
|
||||
|
||||
|
@ -189,7 +190,6 @@ private:
|
|||
bool _shouldRenderBillboard;
|
||||
bool _modelsDirty;
|
||||
|
||||
void renderBody();
|
||||
void renderBillboard();
|
||||
|
||||
float getBillboardSize() const;
|
||||
|
|
|
@ -77,7 +77,7 @@ void AvatarManager::renderAvatars(bool forShadowMapOrMirror, bool selfAvatarOnly
|
|||
"Application::renderAvatars()");
|
||||
bool renderLookAtVectors = Menu::getInstance()->isOptionChecked(MenuOption::LookAtVectors);
|
||||
|
||||
|
||||
glm::vec3 cameraPosition = Application::getInstance()->getCamera()->getPosition();
|
||||
|
||||
if (!selfAvatarOnly) {
|
||||
foreach (const AvatarSharedPointer& avatarPointer, _avatarHash) {
|
||||
|
@ -85,17 +85,13 @@ void AvatarManager::renderAvatars(bool forShadowMapOrMirror, bool selfAvatarOnly
|
|||
if (!avatar->isInitialized()) {
|
||||
continue;
|
||||
}
|
||||
if (avatar == static_cast<Avatar*>(_myAvatar.data())) {
|
||||
_myAvatar->render(forShadowMapOrMirror);
|
||||
} else {
|
||||
avatar->render(forShadowMapOrMirror);
|
||||
}
|
||||
avatar->render(cameraPosition, forShadowMapOrMirror);
|
||||
avatar->setDisplayingLookatVectors(renderLookAtVectors);
|
||||
}
|
||||
renderAvatarFades(forShadowMapOrMirror);
|
||||
renderAvatarFades(cameraPosition, forShadowMapOrMirror);
|
||||
} else {
|
||||
// just render myAvatar
|
||||
_myAvatar->render(forShadowMapOrMirror);
|
||||
_myAvatar->render(cameraPosition, forShadowMapOrMirror);
|
||||
_myAvatar->setDisplayingLookatVectors(renderLookAtVectors);
|
||||
}
|
||||
}
|
||||
|
@ -118,13 +114,15 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
|
|||
}
|
||||
}
|
||||
|
||||
void AvatarManager::renderAvatarFades(bool forShadowMap) {
|
||||
void AvatarManager::renderAvatarFades(const glm::vec3& cameraPosition, bool forShadowMap) {
|
||||
// render avatar fades
|
||||
Glower glower(forShadowMap ? 0.0f : 1.0f);
|
||||
|
||||
foreach(const AvatarSharedPointer& fadingAvatar, _avatarFades) {
|
||||
Avatar* avatar = static_cast<Avatar*>(fadingAvatar.data());
|
||||
avatar->render(forShadowMap);
|
||||
if (avatar != static_cast<Avatar*>(_myAvatar.data())) {
|
||||
avatar->render(cameraPosition, forShadowMap);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -150,14 +148,11 @@ void AvatarManager::processAvatarMixerDatagram(const QByteArray& datagram, const
|
|||
void AvatarManager::processAvatarDataPacket(const QByteArray &datagram, const QWeakPointer<Node> &mixerWeakPointer) {
|
||||
int bytesRead = numBytesForPacketHeader(datagram);
|
||||
|
||||
QByteArray dummyAvatarByteArray = byteArrayWithPopulatedHeader(PacketTypeAvatarData);
|
||||
int numDummyHeaderBytes = dummyAvatarByteArray.size();
|
||||
int numDummyHeaderBytesWithoutUUID = numDummyHeaderBytes - NUM_BYTES_RFC4122_UUID;
|
||||
|
||||
// enumerate over all of the avatars in this packet
|
||||
// only add them if mixerWeakPointer points to something (meaning that mixer is still around)
|
||||
while (bytesRead < datagram.size() && mixerWeakPointer.data()) {
|
||||
QUuid nodeUUID = QUuid::fromRfc4122(datagram.mid(bytesRead, NUM_BYTES_RFC4122_UUID));
|
||||
bytesRead += NUM_BYTES_RFC4122_UUID;
|
||||
|
||||
AvatarSharedPointer matchingAvatar = _avatarHash.value(nodeUUID);
|
||||
|
||||
|
@ -173,16 +168,9 @@ void AvatarManager::processAvatarDataPacket(const QByteArray &datagram, const QW
|
|||
qDebug() << "Adding avatar with UUID" << nodeUUID << "to AvatarManager hash.";
|
||||
}
|
||||
|
||||
// copy the rest of the packet to the avatarData holder so we can read the next Avatar from there
|
||||
dummyAvatarByteArray.resize(numDummyHeaderBytesWithoutUUID);
|
||||
|
||||
// make this Avatar's UUID the UUID in the packet and tack the remaining data onto the end
|
||||
dummyAvatarByteArray.append(datagram.mid(bytesRead));
|
||||
|
||||
// have the matching (or new) avatar parse the data from the packet
|
||||
bytesRead += matchingAvatar->parseData(dummyAvatarByteArray) - numDummyHeaderBytesWithoutUUID;
|
||||
bytesRead += matchingAvatar->parseDataAtOffset(datagram, bytesRead);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void AvatarManager::processAvatarIdentityPacket(const QByteArray &packet) {
|
||||
|
|
|
@ -45,7 +45,7 @@ private:
|
|||
void processKillAvatar(const QByteArray& datagram);
|
||||
|
||||
void simulateAvatarFades(float deltaTime);
|
||||
void renderAvatarFades(bool forShadowMap);
|
||||
void renderAvatarFades(const glm::vec3& cameraPosition, bool forShadowMap);
|
||||
|
||||
// virtual override
|
||||
AvatarHash::iterator erase(const AvatarHash::iterator& iterator);
|
||||
|
|
|
@ -454,68 +454,14 @@ void MyAvatar::renderDebugBodyPoints() {
|
|||
|
||||
|
||||
}
|
||||
void MyAvatar::render(bool forShadowMapOrMirror) {
|
||||
|
||||
// virtual
|
||||
void MyAvatar::render(const glm::vec3& cameraPosition, bool forShadowMapOrMirror) {
|
||||
// don't render if we've been asked to disable local rendering
|
||||
if (!_shouldRender) {
|
||||
return; // exit early
|
||||
}
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Avatars)) {
|
||||
renderBody(forShadowMapOrMirror);
|
||||
}
|
||||
// render body
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::RenderSkeletonCollisionProxies)) {
|
||||
_skeletonModel.renderCollisionProxies(0.8f);
|
||||
}
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::RenderHeadCollisionProxies)) {
|
||||
getHead()->getFaceModel().renderCollisionProxies(0.8f);
|
||||
}
|
||||
setShowDisplayName(!forShadowMapOrMirror);
|
||||
if (forShadowMapOrMirror) {
|
||||
return;
|
||||
}
|
||||
renderDisplayName();
|
||||
|
||||
if (!_chatMessage.empty()) {
|
||||
int width = 0;
|
||||
int lastWidth = 0;
|
||||
for (string::iterator it = _chatMessage.begin(); it != _chatMessage.end(); it++) {
|
||||
width += (lastWidth = textRenderer()->computeWidth(*it));
|
||||
}
|
||||
glPushMatrix();
|
||||
|
||||
glm::vec3 chatPosition = getHead()->getEyePosition() + getBodyUpDirection() * CHAT_MESSAGE_HEIGHT * _scale;
|
||||
glTranslatef(chatPosition.x, chatPosition.y, chatPosition.z);
|
||||
glm::quat chatRotation = Application::getInstance()->getCamera()->getRotation();
|
||||
glm::vec3 chatAxis = glm::axis(chatRotation);
|
||||
glRotatef(glm::degrees(glm::angle(chatRotation)), chatAxis.x, chatAxis.y, chatAxis.z);
|
||||
|
||||
glColor3f(0.f, 0.8f, 0.f);
|
||||
glRotatef(180.f, 0.f, 1.f, 0.f);
|
||||
glRotatef(180.f, 0.f, 0.f, 1.f);
|
||||
glScalef(_scale * CHAT_MESSAGE_SCALE, _scale * CHAT_MESSAGE_SCALE, 1.0f);
|
||||
|
||||
glDisable(GL_LIGHTING);
|
||||
glDepthMask(false);
|
||||
if (_keyState == NO_KEY_DOWN) {
|
||||
textRenderer()->draw(-width / 2.0f, 0, _chatMessage.c_str());
|
||||
|
||||
} else {
|
||||
// rather than using substr and allocating a new string, just replace the last
|
||||
// character with a null, then restore it
|
||||
int lastIndex = _chatMessage.size() - 1;
|
||||
char lastChar = _chatMessage[lastIndex];
|
||||
_chatMessage[lastIndex] = '\0';
|
||||
textRenderer()->draw(-width / 2.0f, 0, _chatMessage.c_str());
|
||||
_chatMessage[lastIndex] = lastChar;
|
||||
glColor3f(0.f, 1.f, 0.f);
|
||||
textRenderer()->draw(width / 2.0f - lastWidth, 0, _chatMessage.c_str() + lastIndex);
|
||||
}
|
||||
glEnable(GL_LIGHTING);
|
||||
glDepthMask(true);
|
||||
|
||||
glPopMatrix();
|
||||
}
|
||||
Avatar::render(cameraPosition, forShadowMapOrMirror);
|
||||
}
|
||||
|
||||
void MyAvatar::renderHeadMouse() const {
|
||||
|
|
|
@ -35,7 +35,8 @@ public:
|
|||
void simulate(float deltaTime);
|
||||
void updateFromGyros(float deltaTime);
|
||||
|
||||
void render(bool forShadowMapOrMirror = false);
|
||||
void render(const glm::vec3& cameraPosition, bool forShadowMapOrMirror = false);
|
||||
void renderBody(bool forceRenderHead);
|
||||
void renderDebugBodyPoints();
|
||||
void renderHeadMouse() const;
|
||||
|
||||
|
@ -120,7 +121,6 @@ private:
|
|||
bool _billboardValid;
|
||||
|
||||
// private methods
|
||||
void renderBody(bool forceRenderHead);
|
||||
void updateThrust(float deltaTime);
|
||||
void updateHandMovementAndTouching(float deltaTime);
|
||||
void updateCollisionWithAvatars(float deltaTime);
|
||||
|
|
|
@ -29,7 +29,6 @@ using namespace std;
|
|||
QNetworkAccessManager* AvatarData::networkAccessManager = NULL;
|
||||
|
||||
AvatarData::AvatarData() :
|
||||
NodeData(),
|
||||
_handPosition(0,0,0),
|
||||
_bodyYaw(-90.f),
|
||||
_bodyPitch(0.0f),
|
||||
|
@ -89,23 +88,6 @@ QByteArray AvatarData::toByteArray() {
|
|||
// Body scale
|
||||
destinationBuffer += packFloatRatioToTwoByte(destinationBuffer, _targetScale);
|
||||
|
||||
// Head rotation (NOTE: This needs to become a quaternion to save two bytes)
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->getTweakedYaw());
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->getTweakedPitch());
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->getTweakedRoll());
|
||||
|
||||
|
||||
// Head lean X,Z (head lateral and fwd/back motion relative to torso)
|
||||
memcpy(destinationBuffer, &_headData->_leanSideways, sizeof(_headData->_leanSideways));
|
||||
destinationBuffer += sizeof(_headData->_leanSideways);
|
||||
memcpy(destinationBuffer, &_headData->_leanForward, sizeof(_headData->_leanForward));
|
||||
destinationBuffer += sizeof(_headData->_leanForward);
|
||||
|
||||
// Hand Position - is relative to body position
|
||||
glm::vec3 handPositionRelative = _handPosition - _position;
|
||||
memcpy(destinationBuffer, &handPositionRelative, sizeof(float) * 3);
|
||||
destinationBuffer += sizeof(float) * 3;
|
||||
|
||||
// Lookat Position
|
||||
memcpy(destinationBuffer, &_headData->_lookAtPosition, sizeof(_headData->_lookAtPosition));
|
||||
destinationBuffer += sizeof(_headData->_lookAtPosition);
|
||||
|
@ -178,14 +160,11 @@ QByteArray AvatarData::toByteArray() {
|
|||
}
|
||||
}
|
||||
|
||||
// hand data
|
||||
destinationBuffer += HandData::encodeData(_handData, destinationBuffer);
|
||||
|
||||
return avatarDataByteArray.left(destinationBuffer - startPosition);
|
||||
}
|
||||
|
||||
// called on the other nodes - assigns it to my views of the others
|
||||
int AvatarData::parseData(const QByteArray& packet) {
|
||||
// read data in packet starting at byte offset and return number of bytes parsed
|
||||
int AvatarData::parseDataAtOffset(const QByteArray& packet, int offset) {
|
||||
|
||||
// lazily allocate memory for HeadData in case we're not an Avatar instance
|
||||
if (!_headData) {
|
||||
|
@ -197,9 +176,8 @@ int AvatarData::parseData(const QByteArray& packet) {
|
|||
_handData = new HandData(this);
|
||||
}
|
||||
|
||||
// increment to push past the packet header
|
||||
const unsigned char* startPosition = reinterpret_cast<const unsigned char*>(packet.data());
|
||||
const unsigned char* sourceBuffer = startPosition + numBytesForPacketHeader(packet);
|
||||
const unsigned char* sourceBuffer = startPosition + offset;
|
||||
|
||||
// Body world position
|
||||
memcpy(&_position, sourceBuffer, sizeof(float) * 3);
|
||||
|
@ -213,28 +191,6 @@ int AvatarData::parseData(const QByteArray& packet) {
|
|||
// Body scale
|
||||
sourceBuffer += unpackFloatRatioFromTwoByte(sourceBuffer, _targetScale);
|
||||
|
||||
// Head rotation (NOTE: This needs to become a quaternion to save two bytes)
|
||||
float headYaw, headPitch, headRoll;
|
||||
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*) sourceBuffer, &headYaw);
|
||||
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*) sourceBuffer, &headPitch);
|
||||
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*) sourceBuffer, &headRoll);
|
||||
|
||||
_headData->setYaw(headYaw);
|
||||
_headData->setPitch(headPitch);
|
||||
_headData->setRoll(headRoll);
|
||||
|
||||
// Head position relative to pelvis
|
||||
memcpy(&_headData->_leanSideways, sourceBuffer, sizeof(_headData->_leanSideways));
|
||||
sourceBuffer += sizeof(float);
|
||||
memcpy(&_headData->_leanForward, sourceBuffer, sizeof(_headData->_leanForward));
|
||||
sourceBuffer += sizeof(_headData->_leanForward);
|
||||
|
||||
// Hand Position - is relative to body position
|
||||
glm::vec3 handPositionRelative;
|
||||
memcpy(&handPositionRelative, sourceBuffer, sizeof(float) * 3);
|
||||
_handPosition = _position + handPositionRelative;
|
||||
sourceBuffer += sizeof(float) * 3;
|
||||
|
||||
// Lookat Position
|
||||
memcpy(&_headData->_lookAtPosition, sourceBuffer, sizeof(_headData->_lookAtPosition));
|
||||
sourceBuffer += sizeof(_headData->_lookAtPosition);
|
||||
|
@ -288,13 +244,13 @@ int AvatarData::parseData(const QByteArray& packet) {
|
|||
// joint data
|
||||
int jointCount = *sourceBuffer++;
|
||||
_jointData.resize(jointCount);
|
||||
unsigned char validity = 0; // although always set below, this fixes a warning of potential uninitialized use
|
||||
unsigned char validity = 0;
|
||||
int validityBit = 0;
|
||||
for (int i = 0; i < jointCount; i++) {
|
||||
if (validityBit == 0) {
|
||||
validity = *sourceBuffer++;
|
||||
}
|
||||
_jointData[i].valid = validity & (1 << validityBit);
|
||||
_jointData[i].valid = (bool)(validity & (1 << validityBit));
|
||||
validityBit = (validityBit + 1) % BITS_IN_BYTE;
|
||||
}
|
||||
for (int i = 0; i < jointCount; i++) {
|
||||
|
@ -304,12 +260,6 @@ int AvatarData::parseData(const QByteArray& packet) {
|
|||
}
|
||||
}
|
||||
|
||||
// hand data
|
||||
if (sourceBuffer - startPosition < packet.size()) {
|
||||
// check passed, bytes match
|
||||
sourceBuffer += _handData->decodeRemoteData(packet.mid(sourceBuffer - startPosition));
|
||||
}
|
||||
|
||||
return sourceBuffer - startPosition;
|
||||
}
|
||||
|
||||
|
|
|
@ -40,7 +40,6 @@ typedef unsigned long long quint64;
|
|||
|
||||
#include <CollisionInfo.h>
|
||||
#include <RegisteredMetaTypes.h>
|
||||
#include <NodeData.h>
|
||||
|
||||
#include "HeadData.h"
|
||||
#include "HandData.h"
|
||||
|
@ -74,7 +73,7 @@ class QNetworkAccessManager;
|
|||
|
||||
class JointData;
|
||||
|
||||
class AvatarData : public NodeData {
|
||||
class AvatarData : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
Q_PROPERTY(glm::vec3 position READ getPosition WRITE setPosition)
|
||||
|
@ -97,7 +96,7 @@ class AvatarData : public NodeData {
|
|||
Q_PROPERTY(QString billboardURL READ getBillboardURL WRITE setBillboardFromURL)
|
||||
public:
|
||||
AvatarData();
|
||||
~AvatarData();
|
||||
virtual ~AvatarData();
|
||||
|
||||
const glm::vec3& getPosition() const { return _position; }
|
||||
void setPosition(const glm::vec3 position) { _position = position; }
|
||||
|
@ -106,7 +105,11 @@ public:
|
|||
void setHandPosition(const glm::vec3& handPosition);
|
||||
|
||||
QByteArray toByteArray();
|
||||
int parseData(const QByteArray& packet);
|
||||
|
||||
/// \param packet byte array of data
|
||||
/// \param offset number of bytes into packet where data starts
|
||||
/// \return number of bytes parsed
|
||||
virtual int parseDataAtOffset(const QByteArray& packet, int offset);
|
||||
|
||||
// Body Rotation (degrees)
|
||||
float getBodyYaw() const { return _bodyYaw; }
|
||||
|
|
|
@ -109,123 +109,6 @@ _owningHandData(owningHandData)
|
|||
setTrailLength(standardTrailLength);
|
||||
}
|
||||
|
||||
// static
|
||||
int HandData::encodeData(HandData* hand, unsigned char* destinationBuffer) {
|
||||
if (hand) {
|
||||
return hand->encodeRemoteData(destinationBuffer);
|
||||
}
|
||||
// else encode empty data:
|
||||
// One byte for zero hands
|
||||
// One byte for error checking.
|
||||
*destinationBuffer = 0;
|
||||
*(destinationBuffer + 1) = 1;
|
||||
return 2;
|
||||
}
|
||||
|
||||
int HandData::encodeRemoteData(unsigned char* destinationBuffer) {
|
||||
const unsigned char* startPosition = destinationBuffer;
|
||||
|
||||
unsigned int numPalms = 0;
|
||||
for (unsigned int handIndex = 0; handIndex < getNumPalms(); ++handIndex) {
|
||||
PalmData& palm = getPalms()[handIndex];
|
||||
if (palm.isActive()) {
|
||||
numPalms++;
|
||||
}
|
||||
}
|
||||
*destinationBuffer++ = numPalms;
|
||||
|
||||
for (unsigned int handIndex = 0; handIndex < getNumPalms(); ++handIndex) {
|
||||
PalmData& palm = getPalms()[handIndex];
|
||||
if (palm.isActive()) {
|
||||
destinationBuffer += packFloatVec3ToSignedTwoByteFixed(destinationBuffer, palm.getRawPosition(), fingerVectorRadix);
|
||||
destinationBuffer += packFloatVec3ToSignedTwoByteFixed(destinationBuffer, palm.getRawNormal(), fingerVectorRadix);
|
||||
|
||||
unsigned int numFingers = 0;
|
||||
for (unsigned int fingerIndex = 0; fingerIndex < palm.getNumFingers(); ++fingerIndex) {
|
||||
FingerData& finger = palm.getFingers()[fingerIndex];
|
||||
if (finger.isActive()) {
|
||||
numFingers++;
|
||||
}
|
||||
}
|
||||
*destinationBuffer++ = numFingers;
|
||||
|
||||
for (unsigned int fingerIndex = 0; fingerIndex < palm.getNumFingers(); ++fingerIndex) {
|
||||
FingerData& finger = palm.getFingers()[fingerIndex];
|
||||
if (finger.isActive()) {
|
||||
destinationBuffer += packFloatVec3ToSignedTwoByteFixed(destinationBuffer, finger.getTipRawPosition(), fingerVectorRadix);
|
||||
destinationBuffer += packFloatVec3ToSignedTwoByteFixed(destinationBuffer, finger.getRootRawPosition(), fingerVectorRadix);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// One byte for error checking safety.
|
||||
size_t checkLength = destinationBuffer - startPosition;
|
||||
*destinationBuffer++ = (unsigned char)checkLength;
|
||||
|
||||
// just a double-check, while tracing a crash.
|
||||
// decodeRemoteData(destinationBuffer - (destinationBuffer - startPosition));
|
||||
|
||||
return destinationBuffer - startPosition;
|
||||
}
|
||||
|
||||
int HandData::decodeRemoteData(const QByteArray& dataByteArray) {
|
||||
const unsigned char* startPosition;
|
||||
const unsigned char* sourceBuffer = startPosition = reinterpret_cast<const unsigned char*>(dataByteArray.data());
|
||||
unsigned int numPalms = *sourceBuffer++;
|
||||
|
||||
for (unsigned int handIndex = 0; handIndex < numPalms; ++handIndex) {
|
||||
if (handIndex >= (unsigned int)getNumPalms())
|
||||
addNewPalm();
|
||||
PalmData& palm = getPalms()[handIndex];
|
||||
|
||||
glm::vec3 handPosition;
|
||||
glm::vec3 handNormal;
|
||||
sourceBuffer += unpackFloatVec3FromSignedTwoByteFixed(sourceBuffer, handPosition, fingerVectorRadix);
|
||||
sourceBuffer += unpackFloatVec3FromSignedTwoByteFixed(sourceBuffer, handNormal, fingerVectorRadix);
|
||||
unsigned int numFingers = *sourceBuffer++;
|
||||
|
||||
palm.setRawPosition(handPosition);
|
||||
palm.setRawNormal(handNormal);
|
||||
palm.setActive(true);
|
||||
|
||||
// For received data, set the sixense controller ID to match the order initialized and sent - 0 Left, 1 Right
|
||||
palm.setSixenseID(handIndex);
|
||||
|
||||
for (unsigned int fingerIndex = 0; fingerIndex < numFingers; ++fingerIndex) {
|
||||
if (fingerIndex < (unsigned int)palm.getNumFingers()) {
|
||||
FingerData& finger = palm.getFingers()[fingerIndex];
|
||||
|
||||
glm::vec3 tipPosition;
|
||||
glm::vec3 rootPosition;
|
||||
sourceBuffer += unpackFloatVec3FromSignedTwoByteFixed(sourceBuffer, tipPosition, fingerVectorRadix);
|
||||
sourceBuffer += unpackFloatVec3FromSignedTwoByteFixed(sourceBuffer, rootPosition, fingerVectorRadix);
|
||||
|
||||
finger.setRawTipPosition(tipPosition);
|
||||
finger.setRawRootPosition(rootPosition);
|
||||
finger.setActive(true);
|
||||
}
|
||||
}
|
||||
// Turn off any fingers which weren't used.
|
||||
for (unsigned int fingerIndex = numFingers; fingerIndex < palm.getNumFingers(); ++fingerIndex) {
|
||||
FingerData& finger = palm.getFingers()[fingerIndex];
|
||||
finger.setActive(false);
|
||||
}
|
||||
}
|
||||
// Turn off any hands which weren't used.
|
||||
for (unsigned int handIndex = numPalms; handIndex < getNumPalms(); ++handIndex) {
|
||||
PalmData& palm = getPalms()[handIndex];
|
||||
palm.setActive(false);
|
||||
}
|
||||
|
||||
// One byte for error checking safety. Last byte contains the expected length (less itself)
|
||||
// actualLength less expected byte = (sourceBuffer - startPosition)
|
||||
// expectedLength less expected byte = (*sourceBuffer)
|
||||
assert((unsigned char)(sourceBuffer - startPosition) == (unsigned char)(*sourceBuffer));
|
||||
sourceBuffer++; // skip the trailing byte which is expected length
|
||||
|
||||
return sourceBuffer - startPosition;
|
||||
}
|
||||
|
||||
void HandData::setFingerTrailLength(unsigned int length) {
|
||||
for (size_t i = 0; i < getNumPalms(); ++i) {
|
||||
PalmData& palm = getPalms()[i];
|
||||
|
|
|
@ -63,12 +63,6 @@ public:
|
|||
void setFingerTrailLength(unsigned int length);
|
||||
void updateFingerTrails();
|
||||
|
||||
static int encodeData(HandData* hand, unsigned char* destinationBuffer);
|
||||
|
||||
// Use these for sending and receiving hand data
|
||||
int encodeRemoteData(unsigned char* destinationBuffer);
|
||||
int decodeRemoteData(const QByteArray& dataByteArray);
|
||||
|
||||
/// Checks for penetration between the described sphere and the hand.
|
||||
/// \param penetratorCenter the center of the penetration test sphere
|
||||
/// \param penetratorRadius the radius of the penetration test sphere
|
||||
|
|
|
@ -45,7 +45,7 @@ int packArithmeticallyCodedValue(int value, char* destination) {
|
|||
PacketVersion versionForPacketType(PacketType type) {
|
||||
switch (type) {
|
||||
case PacketTypeAvatarData:
|
||||
return 1;
|
||||
return 2;
|
||||
case PacketTypeParticleData:
|
||||
return 1;
|
||||
case PacketTypeDomainList:
|
||||
|
|
Loading…
Reference in a new issue