Merge branch 'master' of https://github.com/worklist/hifi into keep_local_voxels

This commit is contained in:
ZappoMan 2013-10-09 14:02:40 -07:00
commit d939d3aaae
37 changed files with 809 additions and 395 deletions

View file

@ -15,7 +15,6 @@ set(CMAKE_AUTOMOC ON)
add_subdirectory(animation-server)
add_subdirectory(assignment-client)
add_subdirectory(domain-server)
add_subdirectory(eve)
add_subdirectory(interface)
add_subdirectory(injector)
add_subdirectory(pairing-server)

View file

@ -12,6 +12,7 @@
#include <AvatarData.h>
#include <NodeList.h>
#include <UUID.h>
#include <VoxelConstants.h>
#include "Agent.h"
@ -50,7 +51,7 @@ void Agent::run() {
// figure out the URL for the script for this agent assignment
QString scriptURLString("http://%1:8080/assignment/%2");
scriptURLString = scriptURLString.arg(NodeList::getInstance()->getDomainIP().toString(),
this->getUUIDStringWithoutCurlyBraces());
uuidStringWithoutCurlyBraces(_uuid));
// setup curl for script download
CURLcode curlResult;

View file

@ -98,7 +98,7 @@ void AvatarMixer::run() {
nodeList->startSilentNodeRemovalThread();
sockaddr* nodeAddress = new sockaddr;
sockaddr nodeAddress = {};
ssize_t receivedBytes = 0;
unsigned char* packetData = new unsigned char[MAX_PACKET_SIZE];
@ -107,8 +107,6 @@ void AvatarMixer::run() {
Node* avatarNode = NULL;
timeval lastDomainServerCheckIn = {};
// we only need to hear back about avatar nodes from the DS
nodeList->setNodeTypesOfInterest(&NODE_TYPE_AGENT, 1);
while (true) {
@ -122,7 +120,7 @@ void AvatarMixer::run() {
NodeList::getInstance()->sendDomainServerCheckIn(_uuid.toRfc4122().constData());
}
if (nodeList->getNodeSocket()->receive(nodeAddress, packetData, &receivedBytes) &&
if (nodeList->getNodeSocket()->receive(&nodeAddress, packetData, &receivedBytes) &&
packetVersionMatch(packetData)) {
switch (packetData[0]) {
case PACKET_TYPE_HEAD_DATA:
@ -130,12 +128,12 @@ void AvatarMixer::run() {
unpackNodeId(packetData + numBytesForPacketHeader(packetData), &nodeID);
// add or update the node in our list
avatarNode = nodeList->addOrUpdateNode(nodeAddress, nodeAddress, NODE_TYPE_AGENT, nodeID);
avatarNode = nodeList->addOrUpdateNode(&nodeAddress, &nodeAddress, NODE_TYPE_AGENT, nodeID);
// parse positional data from an node
nodeList->updateNodeWithData(avatarNode, packetData, receivedBytes);
case PACKET_TYPE_INJECT_AUDIO:
broadcastAvatarData(nodeList, nodeAddress);
broadcastAvatarData(nodeList, &nodeAddress);
break;
case PACKET_TYPE_AVATAR_URLS:
case PACKET_TYPE_AVATAR_FACE_VIDEO:
@ -151,7 +149,7 @@ void AvatarMixer::run() {
break;
default:
// hand this off to the NodeList
nodeList->processNodeData(nodeAddress, packetData, receivedBytes);
nodeList->processNodeData(&nodeAddress, packetData, receivedBytes);
break;
}
}

View file

@ -12,6 +12,7 @@
#include <PacketHeaders.h>
#include <SharedUtil.h>
#include <UUID.h>
#include "DomainServer.h"
@ -62,7 +63,7 @@ void DomainServer::civetwebUploadHandler(struct mg_connection *connection, const
QString newPath(ASSIGNMENT_SCRIPT_HOST_LOCATION);
newPath += "/";
// append the UUID for this script as the new filename, remove the curly braces
newPath += scriptAssignment->getUUIDStringWithoutCurlyBraces();
newPath += uuidStringWithoutCurlyBraces(scriptAssignment->getUUID());
// rename the saved script to the GUID of the assignment and move it to the script host locaiton
rename(path, newPath.toLocal8Bit().constData());

View file

@ -1,21 +0,0 @@
cmake_minimum_required(VERSION 2.8)
set(ROOT_DIR ..)
set(MACRO_DIR ${ROOT_DIR}/cmake/macros)
# setup for find modules
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/../cmake/modules/")
set(TARGET_NAME eve)
include(${MACRO_DIR}/SetupHifiProject.cmake)
setup_hifi_project(${TARGET_NAME} TRUE)
include(${MACRO_DIR}/IncludeGLM.cmake)
include_glm(${TARGET_NAME} ${ROOT_DIR})
# link the required hifi libraries
include(${MACRO_DIR}/LinkHifiLibrary.cmake)
link_hifi_library(shared ${TARGET_NAME} ${ROOT_DIR})
link_hifi_library(avatars ${TARGET_NAME} ${ROOT_DIR})
link_hifi_library(audio ${TARGET_NAME} ${ROOT_DIR})

Binary file not shown.

View file

@ -1,212 +0,0 @@
//
// main.cpp
// eve
//
// Created by Stephen Birarda on 4/22/13.
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#include <cstring>
#include <sys/time.h>
#include <cstring>
#include <SharedUtil.h>
#include <NodeTypes.h>
#include <PacketHeaders.h>
#include <NodeList.h>
#include <AvatarData.h>
#include <AudioInjectionManager.h>
#include <AudioInjector.h>
const int EVE_NODE_LISTEN_PORT = 55441;
const float RANDOM_POSITION_MAX_DIMENSION = 10.0f;
const float DATA_SEND_INTERVAL_MSECS = 15;
const float MIN_AUDIO_SEND_INTERVAL_SECS = 10;
const int MIN_ITERATIONS_BETWEEN_AUDIO_SENDS = (MIN_AUDIO_SEND_INTERVAL_SECS * 1000) / DATA_SEND_INTERVAL_MSECS;
const int MAX_AUDIO_SEND_INTERVAL_SECS = 15;
const float MAX_ITERATIONS_BETWEEN_AUDIO_SENDS = (MAX_AUDIO_SEND_INTERVAL_SECS * 1000) / DATA_SEND_INTERVAL_MSECS;
const int ITERATIONS_BEFORE_HAND_GRAB = 100;
const int HAND_GRAB_DURATION_ITERATIONS = 50;
const int HAND_TIMER_SLEEP_ITERATIONS = 50;
const float EVE_PELVIS_HEIGHT = 0.565925f;
const float AUDIO_INJECT_PROXIMITY = 0.4f;
const int EVE_VOLUME_BYTE = 190;
const char EVE_AUDIO_FILENAME[] = "/etc/highfidelity/eve/resources/eve.raw";
bool stopReceiveNodeDataThread;
void *receiveNodeData(void *args) {
sockaddr senderAddress;
ssize_t bytesReceived;
unsigned char incomingPacket[MAX_PACKET_SIZE];
NodeList* nodeList = NodeList::getInstance();
while (!::stopReceiveNodeDataThread) {
if (nodeList->getNodeSocket()->receive(&senderAddress, incomingPacket, &bytesReceived) &&
packetVersionMatch(incomingPacket)) {
switch (incomingPacket[0]) {
case PACKET_TYPE_BULK_AVATAR_DATA:
// this is the positional data for other nodes
// pass that off to the nodeList processBulkNodeData method
nodeList->processBulkNodeData(&senderAddress, incomingPacket, bytesReceived);
break;
default:
// have the nodeList handle list of nodes from DS, replies from other nodes, etc.
nodeList->processNodeData(&senderAddress, incomingPacket, bytesReceived);
break;
}
}
}
pthread_exit(0);
return NULL;
}
void createAvatarDataForNode(Node* node) {
if (!node->getLinkedData()) {
node->setLinkedData(new AvatarData(node));
}
}
int main(int argc, const char* argv[]) {
// new seed for random audio sleep times
srand(time(0));
// create an NodeList instance to handle communication with other nodes
NodeList* nodeList = NodeList::createInstance(NODE_TYPE_AGENT, EVE_NODE_LISTEN_PORT);
// start the node list thread that will kill off nodes when they stop talking
nodeList->startSilentNodeRemovalThread();
pthread_t receiveNodeDataThread;
pthread_create(&receiveNodeDataThread, NULL, receiveNodeData, NULL);
// create an AvatarData object, "eve"
AvatarData eve;
// move eve away from the origin
// pick a random point inside a 10x10 grid
eve.setPosition(glm::vec3(randFloatInRange(0, RANDOM_POSITION_MAX_DIMENSION),
EVE_PELVIS_HEIGHT, // this should be the same as the avatar's pelvis standing height
randFloatInRange(0, RANDOM_POSITION_MAX_DIMENSION)));
// face any instance of eve down the z-axis
eve.setBodyYaw(0);
// put her hand out so somebody can shake it
eve.setHandPosition(glm::vec3(eve.getPosition()[0] - 0.2,
0.5,
eve.getPosition()[2] + 0.1));
// prepare the audio injection manager by giving it a handle to our node socket
AudioInjectionManager::setInjectorSocket(nodeList->getNodeSocket());
// read eve's audio data
AudioInjector eveAudioInjector(EVE_AUDIO_FILENAME);
// lower Eve's volume by setting the attentuation modifier (this is a value out of 255)
eveAudioInjector.setVolume(EVE_VOLUME_BYTE);
// set the position of the audio injector
eveAudioInjector.setPosition(eve.getPosition());
// register the callback for node data creation
nodeList->linkedDataCreateCallback = createAvatarDataForNode;
unsigned char broadcastPacket[MAX_PACKET_SIZE];
int numHeaderBytes = populateTypeAndVersion(broadcastPacket, PACKET_TYPE_HEAD_DATA);
timeval thisSend;
int numMicrosecondsSleep = 0;
int handStateTimer = 0;
timeval lastDomainServerCheckIn = {};
// eve wants to hear about an avatar mixer and an audio mixer from the domain server
const char EVE_NODE_TYPES_OF_INTEREST[] = {NODE_TYPE_AVATAR_MIXER, NODE_TYPE_AUDIO_MIXER};
NodeList::getInstance()->setNodeTypesOfInterest(EVE_NODE_TYPES_OF_INTEREST, sizeof(EVE_NODE_TYPES_OF_INTEREST));
while (true) {
// send a check in packet to the domain server if DOMAIN_SERVER_CHECK_IN_USECS has elapsed
if (usecTimestampNow() - usecTimestamp(&lastDomainServerCheckIn) >= DOMAIN_SERVER_CHECK_IN_USECS) {
gettimeofday(&lastDomainServerCheckIn, NULL);
NodeList::getInstance()->sendDomainServerCheckIn();
}
// update the thisSend timeval to the current time
gettimeofday(&thisSend, NULL);
// find the current avatar mixer
Node* avatarMixer = nodeList->soloNodeOfType(NODE_TYPE_AVATAR_MIXER);
// make sure we actually have an avatar mixer with an active socket
if (nodeList->getOwnerID() != UNKNOWN_NODE_ID && avatarMixer && avatarMixer->getActiveSocket() != NULL) {
unsigned char* packetPosition = broadcastPacket + numHeaderBytes;
packetPosition += packNodeId(packetPosition, nodeList->getOwnerID());
// use the getBroadcastData method in the AvatarData class to populate the broadcastPacket buffer
packetPosition += eve.getBroadcastData(packetPosition);
// use the UDPSocket instance attached to our node list to send avatar data to mixer
nodeList->getNodeSocket()->send(avatarMixer->getActiveSocket(), broadcastPacket, packetPosition - broadcastPacket);
}
if (!eveAudioInjector.isInjectingAudio()) {
// enumerate the other nodes to decide if one is close enough that eve should talk
for (NodeList::iterator node = nodeList->begin(); node != nodeList->end(); node++) {
AvatarData* avatarData = (AvatarData*) node->getLinkedData();
if (avatarData) {
glm::vec3 tempVector = eve.getPosition() - avatarData->getPosition();
float squareDistance = glm::dot(tempVector, tempVector);
if (squareDistance <= AUDIO_INJECT_PROXIMITY) {
// look for an audio mixer in our node list
Node* audioMixer = NodeList::getInstance()->soloNodeOfType(NODE_TYPE_AUDIO_MIXER);
if (audioMixer) {
// update the destination socket for the AIM, in case the mixer has changed
AudioInjectionManager::setDestinationSocket(*audioMixer->getPublicSocket());
// we have an active audio mixer we can send data to
AudioInjectionManager::threadInjector(&eveAudioInjector);
}
}
}
}
}
// simulate the effect of pressing and un-pressing the mouse button/pad
handStateTimer++;
if (handStateTimer == ITERATIONS_BEFORE_HAND_GRAB) {
eve.setHandState(1);
} else if (handStateTimer == ITERATIONS_BEFORE_HAND_GRAB + HAND_GRAB_DURATION_ITERATIONS) {
eve.setHandState(0);
} else if (handStateTimer >= ITERATIONS_BEFORE_HAND_GRAB + HAND_GRAB_DURATION_ITERATIONS + HAND_TIMER_SLEEP_ITERATIONS) {
handStateTimer = 0;
}
// sleep for the correct amount of time to have data send be consistently timed
if ((numMicrosecondsSleep = (DATA_SEND_INTERVAL_MSECS * 1000) - (usecTimestampNow() - usecTimestamp(&thisSend))) > 0) {
usleep(numMicrosecondsSleep);
}
}
// stop the receive node data thread
stopReceiveNodeDataThread = true;
pthread_join(receiveNodeDataThread, NULL);
// stop the node list's threads
nodeList->stopSilentNodeRemovalThread();
}

View file

@ -58,6 +58,7 @@
#include <VoxelSceneStats.h>
#include "Application.h"
#include "DataServerClient.h"
#include "LogDisplay.h"
#include "Menu.h"
#include "Swatch.h"
@ -332,7 +333,15 @@ void Application::paintGL() {
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
_myCamera.setTightness (100.0f);
_myCamera.setTargetPosition(_myAvatar.getUprightHeadPosition());
glm::vec3 targetPosition = _myAvatar.getUprightHeadPosition();
if (_myAvatar.getHead().getBlendFace().isActive()) {
// make sure we're aligned to the blend face eyes
glm::vec3 leftEyePosition, rightEyePosition;
if (_myAvatar.getHead().getBlendFace().getEyePositions(leftEyePosition, rightEyePosition, true)) {
targetPosition = (leftEyePosition + rightEyePosition) * 0.5f;
}
}
_myCamera.setTargetPosition(targetPosition);
_myCamera.setTargetRotation(_myAvatar.getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PIf, 0.0f)));
} else if (OculusManager::isConnected()) {
@ -1252,13 +1261,14 @@ void Application::processAvatarURLsMessage(unsigned char* packetData, size_t dat
return;
}
QDataStream in(QByteArray((char*)packetData, dataBytes));
QUrl voxelURL, faceURL;
QUrl voxelURL;
in >> voxelURL;
in >> faceURL;
// invoke the set URL functions on the simulate/render thread
QMetaObject::invokeMethod(avatar->getVoxels(), "setVoxelURL", Q_ARG(QUrl, voxelURL));
QMetaObject::invokeMethod(&avatar->getHead().getBlendFace(), "setModelURL", Q_ARG(QUrl, faceURL));
// use this timing to as the data-server for an updated mesh for this avatar (if we have UUID)
DataServerClient::getValueForKeyAndUUID(DataServerKey::FaceMeshURL, avatar->getUUID());
}
void Application::processAvatarFaceVideoMessage(unsigned char* packetData, size_t dataBytes) {
@ -1585,6 +1595,11 @@ void Application::init() {
_audio.setJitterBufferSamples(Menu::getInstance()->getAudioJitterBufferSamples());
}
qDebug("Loaded settings.\n");
if (!_profile.getUsername().isEmpty()) {
// we have a username for this avatar, ask the data-server for the mesh URL for this avatar
DataServerClient::getClientValueForKey(DataServerKey::FaceMeshURL);
}
// Set up VoxelSystem after loading preferences so we can get the desired max voxel count
_voxels.setMaxVoxels(Menu::getInstance()->getMaxVoxels());
@ -1594,7 +1609,7 @@ void Application::init() {
_voxels.init();
Avatar::sendAvatarURLsMessage(_myAvatar.getVoxels()->getVoxelURL(), _myAvatar.getHead().getBlendFace().getModelURL());
Avatar::sendAvatarURLsMessage(_myAvatar.getVoxels()->getVoxelURL());
_palette.init(_glWidget->width(), _glWidget->height());
_palette.addAction(Menu::getInstance()->getActionForOption(MenuOption::VoxelAddMode), 0, 0);
@ -1636,9 +1651,9 @@ Avatar* Application::findLookatTargetAvatar(const glm::vec3& mouseRayOrigin, con
glm::vec3 headPosition = avatar->getHead().getPosition();
float distance;
if (rayIntersectsSphere(mouseRayOrigin, mouseRayDirection, headPosition,
HEAD_SPHERE_RADIUS * avatar->getScale(), distance)) {
HEAD_SPHERE_RADIUS * avatar->getHead().getScale(), distance)) {
eyePosition = avatar->getHead().getEyePosition();
_lookatIndicatorScale = avatar->getScale();
_lookatIndicatorScale = avatar->getHead().getScale();
_lookatOtherPosition = headPosition;
nodeID = avatar->getOwningNode()->getNodeID();
return avatar;
@ -1779,8 +1794,8 @@ void Application::update(float deltaTime) {
_faceshift.getEstimatedEyePitch(), _faceshift.getEstimatedEyeYaw(), 0.0f))) * glm::vec3(0.0f, 0.0f, -1.0f);
}
updateLookatTargetAvatar(lookAtRayOrigin, lookAtRayDirection, lookAtSpot);
if (_lookatTargetAvatar) {
updateLookatTargetAvatar(mouseRayOrigin, mouseRayDirection, lookAtSpot);
if (_lookatTargetAvatar && !_faceshift.isActive()) {
// If the mouse is over another avatar's head...
_myAvatar.getHead().setLookAtPosition(lookAtSpot);
} else if (_isHoverVoxel && !_faceshift.isActive()) {
@ -2169,8 +2184,7 @@ void Application::updateAvatar(float deltaTime) {
// once in a while, send my urls
const float AVATAR_URLS_SEND_INTERVAL = 1.0f; // seconds
if (shouldDo(AVATAR_URLS_SEND_INTERVAL, deltaTime)) {
Avatar::sendAvatarURLsMessage(_myAvatar.getVoxels()->getVoxelURL(),
_myAvatar.getHead().getBlendFace().getModelURL());
Avatar::sendAvatarURLsMessage(_myAvatar.getVoxels()->getVoxelURL());
}
}
}
@ -3608,6 +3622,12 @@ void* Application::networkReceive(void* args) {
case PACKET_TYPE_AVATAR_FACE_VIDEO:
processAvatarFaceVideoMessage(app->_incomingPacket, bytesReceived);
break;
case PACKET_TYPE_DATA_SERVER_GET:
case PACKET_TYPE_DATA_SERVER_PUT:
case PACKET_TYPE_DATA_SERVER_SEND:
case PACKET_TYPE_DATA_SERVER_CONFIRM:
DataServerClient::processMessageFromDataServer(app->_incomingPacket, bytesReceived);
break;
default:
NodeList::getInstance()->processNodeData(&senderAddress, app->_incomingPacket, bytesReceived);
break;

View file

@ -44,6 +44,7 @@
#include "VoxelImporter.h"
#include "avatar/Avatar.h"
#include "avatar/MyAvatar.h"
#include "avatar/Profile.h"
#include "avatar/HandControl.h"
#include "devices/Faceshift.h"
#include "devices/SerialInterface.h"
@ -110,6 +111,7 @@ public:
QGLWidget* getGLWidget() { return _glWidget; }
MyAvatar* getAvatar() { return &_myAvatar; }
Profile* getProfile() { return &_profile; }
Audio* getAudio() { return &_audio; }
Camera* getCamera() { return &_myCamera; }
ViewFrustum* getViewFrustum() { return &_viewFrustum; }
@ -275,6 +277,7 @@ private:
Oscilloscope _audioScope;
MyAvatar _myAvatar; // The rendered avatar of oneself
Profile _profile; // The data-server linked profile for this user
Transmitter _myTransmitter; // Gets UDP data from transmitter app used to animate the avatar

View file

@ -0,0 +1,197 @@
//
// DataServerClient.cpp
// hifi
//
// Created by Stephen Birarda on 10/7/13.
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
//
#include <QtCore/QUrl>
#include <NodeList.h>
#include <PacketHeaders.h>
#include <UDPSocket.h>
#include <UUID.h>
#include "Application.h"
#include "avatar/Profile.h"
#include "DataServerClient.h"
std::map<unsigned char*, int> DataServerClient::_unmatchedPackets;
const char DATA_SERVER_HOSTNAME[] = "data.highfidelity.io";
const unsigned short DATA_SERVER_PORT = 3282;
const sockaddr_in DATA_SERVER_SOCKET = socketForHostnameAndHostOrderPort(DATA_SERVER_HOSTNAME, DATA_SERVER_PORT);
void DataServerClient::putValueForKey(const char* key, const char* value) {
Profile* userProfile = Application::getInstance()->getProfile();
QString clientString = userProfile->getUUID().isNull()
? userProfile->getUsername()
: uuidStringWithoutCurlyBraces(userProfile->getUUID().toString());
if (!clientString.isEmpty()) {
unsigned char* putPacket = new unsigned char[MAX_PACKET_SIZE];
// setup the header for this packet
int numPacketBytes = populateTypeAndVersion(putPacket, PACKET_TYPE_DATA_SERVER_PUT);
// pack the client UUID, null terminated
memcpy(putPacket + numPacketBytes, clientString.toLocal8Bit().constData(), clientString.toLocal8Bit().size());
numPacketBytes += clientString.toLocal8Bit().size();
putPacket[numPacketBytes++] = '\0';
// pack the key, null terminated
strcpy((char*) putPacket + numPacketBytes, key);
numPacketBytes += strlen(key);
putPacket[numPacketBytes++] = '\0';
// pack the value, null terminated
strcpy((char*) putPacket + numPacketBytes, value);
numPacketBytes += strlen(value);
putPacket[numPacketBytes++] = '\0';
// add the putPacket to our vector of unconfirmed packets, will be deleted once put is confirmed
_unmatchedPackets.insert(std::pair<unsigned char*, int>(putPacket, numPacketBytes));
// send this put request to the data server
NodeList::getInstance()->getNodeSocket()->send((sockaddr*) &DATA_SERVER_SOCKET, putPacket, numPacketBytes);
}
}
void DataServerClient::getValueForKeyAndUUID(const char* key, const QUuid &uuid) {
if (!uuid.isNull()) {
getValueForKeyAndUserString(key, uuidStringWithoutCurlyBraces(uuid));
}
}
void DataServerClient::getValueForKeyAndUserString(const char* key, const QString& userString) {
unsigned char* getPacket = new unsigned char[MAX_PACKET_SIZE];
// setup the header for this packet
int numPacketBytes = populateTypeAndVersion(getPacket, PACKET_TYPE_DATA_SERVER_GET);
// pack the user string (could be username or UUID string), null-terminate
memcpy(getPacket + numPacketBytes, userString.toLocal8Bit().constData(), userString.toLocal8Bit().size());
numPacketBytes += userString.toLocal8Bit().size();
getPacket[numPacketBytes++] = '\0';
// pack the key, null terminated
strcpy((char*) getPacket + numPacketBytes, key);
int numKeyBytes = strlen(key);
if (numKeyBytes > 0) {
numPacketBytes += numKeyBytes;
getPacket[numPacketBytes++] = '\0';
}
// add the getPacket to our vector of uncofirmed packets, will be deleted once we get a response from the nameserver
_unmatchedPackets.insert(std::pair<unsigned char*, int>(getPacket, numPacketBytes));
// send the get to the data server
NodeList::getInstance()->getNodeSocket()->send((sockaddr*) &DATA_SERVER_SOCKET, getPacket, numPacketBytes);
}
void DataServerClient::getClientValueForKey(const char* key) {
getValueForKeyAndUserString(key, Application::getInstance()->getProfile()->getUsername());
}
void DataServerClient::processConfirmFromDataServer(unsigned char* packetData, int numPacketBytes) {
removeMatchedPacketFromMap(packetData, numPacketBytes);
}
void DataServerClient::processSendFromDataServer(unsigned char* packetData, int numPacketBytes) {
// pull the user string from the packet so we know who to associate this with
int numHeaderBytes = numBytesForPacketHeader(packetData);
char* userStringPosition = (char*) packetData + numHeaderBytes;
QString userString(QByteArray(userStringPosition, strlen(userStringPosition)));
QUuid userUUID(userString);
char* dataKeyPosition = (char*) packetData + numHeaderBytes + strlen(userStringPosition) + sizeof('\0');
char* dataValuePosition = dataKeyPosition + strlen(dataKeyPosition) + sizeof(char);
QString dataValueString(QByteArray(dataValuePosition,
numPacketBytes - ((unsigned char*) dataValuePosition - packetData)));
if (userUUID.isNull()) {
// the user string was a username
// for now assume this means that it is for our avatar
if (strcmp(dataKeyPosition, DataServerKey::FaceMeshURL) == 0) {
// pull the user's face mesh and set it on the Avatar instance
qDebug("Changing user's face model URL to %s\n", dataValueString.toLocal8Bit().constData());
Application::getInstance()->getProfile()->setFaceModelURL(QUrl(dataValueString));
} else if (strcmp(dataKeyPosition, DataServerKey::UUID) == 0) {
// this is the user's UUID - set it on the profile
Application::getInstance()->getProfile()->setUUID(dataValueString);
}
} else {
// user string was UUID, find matching avatar and associate data
if (strcmp(dataKeyPosition, DataServerKey::FaceMeshURL) == 0) {
NodeList* nodeList = NodeList::getInstance();
for (NodeList::iterator node = nodeList->begin(); node != nodeList->end(); node++) {
if (node->getLinkedData() != NULL && node->getType() == NODE_TYPE_AGENT) {
Avatar* avatar = (Avatar *) node->getLinkedData();
if (avatar->getUUID() == userUUID) {
QMetaObject::invokeMethod(&avatar->getHead().getBlendFace(),
"setModelURL",
Q_ARG(QUrl, QUrl(dataValueString)));
}
}
}
}
}
// remove the matched packet from our map so it isn't re-sent to the data-server
removeMatchedPacketFromMap(packetData, numPacketBytes);
}
void DataServerClient::processMessageFromDataServer(unsigned char* packetData, int numPacketBytes) {
switch (packetData[0]) {
case PACKET_TYPE_DATA_SERVER_SEND:
processSendFromDataServer(packetData, numPacketBytes);
break;
case PACKET_TYPE_DATA_SERVER_CONFIRM:
processConfirmFromDataServer(packetData, numPacketBytes);
break;
default:
break;
}
}
void DataServerClient::removeMatchedPacketFromMap(unsigned char* packetData, int numPacketBytes) {
for (std::map<unsigned char*, int>::iterator mapIterator = _unmatchedPackets.begin();
mapIterator != _unmatchedPackets.end();
++mapIterator) {
if (memcmp(mapIterator->first + sizeof(PACKET_TYPE),
packetData + sizeof(PACKET_TYPE),
numPacketBytes - sizeof(PACKET_TYPE)) == 0) {
// this is a match - remove the confirmed packet from the vector and delete associated member
// so it isn't sent back out
delete[] mapIterator->first;
_unmatchedPackets.erase(mapIterator);
// we've matched the packet - bail out
break;
}
}
}
void DataServerClient::resendUnmatchedPackets() {
for (std::map<unsigned char*, int>::iterator mapIterator = _unmatchedPackets.begin();
mapIterator != _unmatchedPackets.end();
++mapIterator) {
// send the unmatched packet to the data server
NodeList::getInstance()->getNodeSocket()->send((sockaddr*) &DATA_SERVER_SOCKET,
mapIterator->first,
mapIterator->second);
}
}

View file

@ -0,0 +1,38 @@
//
// DataServerClient.h
// hifi
//
// Created by Stephen Birarda on 10/7/13.
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
//
#ifndef __hifi__DataServerClient__
#define __hifi__DataServerClient__
#include <map>
#include <QtCore/QUuid>
#include "Application.h"
class DataServerClient {
public:
static void putValueForKey(const char* key, const char* value);
static void getValueForKeyAndUUID(const char* key, const QUuid& uuid);
static void getValueForKeyAndUserString(const char* key, const QString& userString);
static void getClientValueForKey(const char* key);
static void processConfirmFromDataServer(unsigned char* packetData, int numPacketBytes);
static void processSendFromDataServer(unsigned char* packetData, int numPacketBytes);
static void processMessageFromDataServer(unsigned char* packetData, int numPacketBytes);
static void removeMatchedPacketFromMap(unsigned char* packetData, int numPacketBytes);
static void resendUnmatchedPackets();
private:
static std::map<unsigned char*, int> _unmatchedPackets;
};
namespace DataServerKey {
const char FaceMeshURL[] = "mesh";
const char UUID[] = "uuid";
}
#endif /* defined(__hifi__DataServerClient__) */

View file

@ -19,8 +19,12 @@
#include <QMainWindow>
#include <QSlider>
#include <QStandardPaths>
#include <QUuid>
#include <UUID.h>
#include "Application.h"
#include "DataServerClient.h"
#include "PairingHandler.h"
#include "Menu.h"
#include "Util.h"
@ -503,6 +507,7 @@ void Menu::loadSettings(QSettings* settings) {
settings->endGroup();
scanMenuBar(&loadAction, settings);
Application::getInstance()->getProfile()->loadData(settings);
Application::getInstance()->getAvatar()->loadData(settings);
Application::getInstance()->getSwatch()->loadData(settings);
}
@ -525,6 +530,7 @@ void Menu::saveSettings(QSettings* settings) {
scanMenuBar(&saveAction, settings);
Application::getInstance()->getAvatar()->saveData(settings);
Application::getInstance()->getProfile()->saveData(settings);
Application::getInstance()->getSwatch()->saveData(settings);
// ask the NodeList to save its data
@ -745,6 +751,7 @@ QLineEdit* lineEditForDomainHostname() {
void Menu::editPreferences() {
Application* applicationInstance = Application::getInstance();
QDialog dialog(applicationInstance->getGLWidget());
dialog.setWindowTitle("Interface Preferences");
QBoxLayout* layout = new QBoxLayout(QBoxLayout::TopToBottom);
@ -753,13 +760,19 @@ void Menu::editPreferences() {
QFormLayout* form = new QFormLayout();
layout->addLayout(form, 1);
QString avatarUsername = applicationInstance->getProfile()->getUsername();
QLineEdit* avatarUsernameEdit = new QLineEdit(avatarUsername);
avatarUsernameEdit->setMinimumWidth(QLINE_MINIMUM_WIDTH);
form->addRow("Username:", avatarUsernameEdit);
QLineEdit* avatarURL = new QLineEdit(applicationInstance->getAvatar()->getVoxels()->getVoxelURL().toString());
avatarURL->setMinimumWidth(QLINE_MINIMUM_WIDTH);
form->addRow("Avatar URL:", avatarURL);
QLineEdit* faceURL = new QLineEdit(applicationInstance->getAvatar()->getHead().getBlendFace().getModelURL().toString());
faceURL->setMinimumWidth(QLINE_MINIMUM_WIDTH);
form->addRow("Face URL:", faceURL);
QString faceURLString = applicationInstance->getProfile()->getFaceModelURL().toString();
QLineEdit* faceURLEdit = new QLineEdit(faceURLString);
faceURLEdit->setMinimumWidth(QLINE_MINIMUM_WIDTH);
form->addRow("Face URL:", faceURLEdit);
QSlider* pupilDilation = new QSlider(Qt::Horizontal);
pupilDilation->setValue(applicationInstance->getAvatar()->getHead().getPupilDilation() * pupilDilation->maximum());
@ -802,13 +815,32 @@ void Menu::editPreferences() {
return;
}
QUrl faceModelURL(faceURLEdit->text());
if (avatarUsernameEdit->text() != avatarUsername) {
// there has been a username change - set the new UUID on the avatar instance
applicationInstance->getProfile()->setUsername(avatarUsernameEdit->text());
if (faceModelURL.toString() == faceURLString && !avatarUsernameEdit->text().isEmpty()) {
// if there was no change to the face model URL then ask the data-server for what it is
DataServerClient::getClientValueForKey(DataServerKey::FaceMeshURL);
}
}
if (faceModelURL.toString() != faceURLString) {
// change the faceModelURL in the profile, it will also update this user's BlendFace
applicationInstance->getProfile()->setFaceModelURL(faceModelURL);
// send the new face mesh URL to the data-server (if we have a client UUID)
DataServerClient::putValueForKey(DataServerKey::FaceMeshURL,
faceModelURL.toString().toLocal8Bit().constData());
}
QUrl avatarVoxelURL(avatarURL->text());
applicationInstance->getAvatar()->getVoxels()->setVoxelURL(avatarVoxelURL);
QUrl faceModelURL(faceURL->text());
applicationInstance->getAvatar()->getHead().getBlendFace().setModelURL(faceModelURL);
Avatar::sendAvatarURLsMessage(avatarVoxelURL, faceModelURL);
Avatar::sendAvatarURLsMessage(avatarVoxelURL);
applicationInstance->getAvatar()->getHead().setPupilDilation(pupilDilation->value() / (float)pupilDilation->maximum());

View file

@ -18,6 +18,7 @@
#include "Application.h"
#include "Avatar.h"
#include "DataServerClient.h"
#include "Hand.h"
#include "Head.h"
#include "Physics.h"
@ -59,7 +60,7 @@ const int NUM_BODY_CONE_SIDES = 9;
const float chatMessageScale = 0.0015;
const float chatMessageHeight = 0.20;
void Avatar::sendAvatarURLsMessage(const QUrl& voxelURL, const QUrl& faceURL) {
void Avatar::sendAvatarURLsMessage(const QUrl& voxelURL) {
uint16_t ownerID = NodeList::getInstance()->getOwnerID();
if (ownerID == UNKNOWN_NODE_ID) {
@ -76,7 +77,6 @@ void Avatar::sendAvatarURLsMessage(const QUrl& voxelURL, const QUrl& faceURL) {
QDataStream out(&message, QIODevice::WriteOnly | QIODevice::Append);
out << voxelURL;
out << faceURL;
Application::controlledBroadcastToNodes((unsigned char*)message.data(), message.size(), &NODE_TYPE_AVATAR_MIXER, 1);
}
@ -102,6 +102,8 @@ Avatar::Avatar(Node* owningNode) :
_leadingAvatar(NULL),
_voxels(this),
_moving(false),
_hoverOnDuration(0.0f),
_hoverOffDuration(0.0f),
_initialized(false),
_handHoldingPosition(0.0f, 0.0f, 0.0f),
_maxArmLength(0.0f),
@ -388,15 +390,27 @@ void Avatar::simulate(float deltaTime, Transmitter* transmitter) {
}
// head scale grows when avatar is looked at
const float BASE_MAX_SCALE = 3.0f;
float maxScale = BASE_MAX_SCALE * glm::distance(_position, Application::getInstance()->getCamera()->getPosition());
if (Application::getInstance()->getLookatTargetAvatar() == this) {
const float BASE_MAX_SCALE = 3.0f;
const float GROW_SPEED = 0.1f;
_head.setScale(min(BASE_MAX_SCALE * glm::distance(_position, Application::getInstance()->getCamera()->getPosition()),
_head.getScale() + deltaTime * GROW_SPEED));
_hoverOnDuration += deltaTime;
_hoverOffDuration = 0.0f;
const float GROW_DELAY = 1.0f;
const float GROW_RATE = 0.25f;
if (_hoverOnDuration > GROW_DELAY) {
_head.setScale(glm::mix(_head.getScale(), maxScale, GROW_RATE));
}
} else {
const float SHRINK_SPEED = 100.0f;
_head.setScale(max(_scale, _head.getScale() - deltaTime * SHRINK_SPEED));
_hoverOnDuration = 0.0f;
_hoverOffDuration += deltaTime;
const float SHRINK_DELAY = 1.0f;
const float SHRINK_RATE = 0.25f;
if (_hoverOffDuration > SHRINK_DELAY) {
_head.setScale(glm::mix(_head.getScale(), 1.0f, SHRINK_RATE));
}
}
_head.setBodyRotation(glm::vec3(_bodyPitch, _bodyYaw, _bodyRoll));
@ -465,15 +479,16 @@ void Avatar::render(bool lookingInMirror, bool renderAvatarBalls) {
renderDiskShadow(_position, glm::vec3(0.0f, 1.0f, 0.0f), _scale * 0.1f, 0.2f);
{
// glow when moving
Glower glower(_moving ? 1.0f : 0.0f);
// glow when moving in the distance
glm::vec3 toTarget = _position - Application::getInstance()->getAvatar()->getPosition();
const float GLOW_DISTANCE = 5.0f;
Glower glower(_moving && glm::length(toTarget) > GLOW_DISTANCE ? 1.0f : 0.0f);
// render body
renderBody(lookingInMirror, renderAvatarBalls);
// render sphere when far away
const float MAX_ANGLE = 10.f;
glm::vec3 toTarget = _position - Application::getInstance()->getAvatar()->getPosition();
glm::vec3 delta = _height * (_head.getCameraOrientation() * IDENTITY_UP) / 2.f;
float angle = abs(angleBetween(toTarget + delta, toTarget - delta));
@ -751,31 +766,6 @@ void Avatar::renderBody(bool lookingInMirror, bool renderAvatarBalls) {
_hand.render(lookingInMirror);
}
void Avatar::loadData(QSettings* settings) {
settings->beginGroup("Avatar");
// in case settings is corrupt or missing loadSetting() will check for NaN
_bodyYaw = loadSetting(settings, "bodyYaw", 0.0f);
_bodyPitch = loadSetting(settings, "bodyPitch", 0.0f);
_bodyRoll = loadSetting(settings, "bodyRoll", 0.0f);
_position.x = loadSetting(settings, "position_x", 0.0f);
_position.y = loadSetting(settings, "position_y", 0.0f);
_position.z = loadSetting(settings, "position_z", 0.0f);
_voxels.setVoxelURL(settings->value("voxelURL").toUrl());
_head.getBlendFace().setModelURL(settings->value("faceModelURL").toUrl());
_head.setPupilDilation(settings->value("pupilDilation", 0.0f).toFloat());
_leanScale = loadSetting(settings, "leanScale", 0.05f);
_newScale = loadSetting(settings, "scale", 1.0f);
setScale(_scale);
Application::getInstance()->getCamera()->setScale(_scale);
settings->endGroup();
}
void Avatar::getBodyBallTransform(AvatarJointID jointID, glm::vec3& position, glm::quat& rotation) const {
position = _bodyBall[jointID].position;
rotation = _bodyBall[jointID].rotation;
@ -805,27 +795,6 @@ int Avatar::parseData(unsigned char* sourceBuffer, int numBytes) {
return bytesRead;
}
void Avatar::saveData(QSettings* set) {
set->beginGroup("Avatar");
set->setValue("bodyYaw", _bodyYaw);
set->setValue("bodyPitch", _bodyPitch);
set->setValue("bodyRoll", _bodyRoll);
set->setValue("position_x", _position.x);
set->setValue("position_y", _position.y);
set->setValue("position_z", _position.z);
set->setValue("voxelURL", _voxels.getVoxelURL());
set->setValue("faceModelURL", _head.getBlendFace().getModelURL());
set->setValue("pupilDilation", _head.getPupilDilation());
set->setValue("leanScale", _leanScale);
set->setValue("scale", _newScale);
set->endGroup();
}
// render a makeshift cone section that serves as a body part connecting joint spheres
void Avatar::renderJointConnectingCone(glm::vec3 position1, glm::vec3 position2, float radius1, float radius2) {

View file

@ -11,7 +11,7 @@
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
#include <QSettings>
#include <QtCore/QUuid>
#include <AvatarData.h>
@ -129,7 +129,7 @@ class Avatar : public AvatarData {
Q_OBJECT
public:
static void sendAvatarURLsMessage(const QUrl& voxelURL, const QUrl& faceURL);
static void sendAvatarURLsMessage(const QUrl& voxelURL);
Avatar(Node* owningNode = NULL);
~Avatar();
@ -155,10 +155,6 @@ public:
glm::quat getOrientation() const;
glm::quat getWorldAlignedOrientation() const;
AvatarVoxelSystem* getVoxels() { return &_voxels; }
// get/set avatar data
void saveData(QSettings* set);
void loadData(QSettings* set);
// Get the position/rotation of a single body ball
void getBodyBallTransform(AvatarJointID jointID, glm::vec3& position, glm::quat& rotation) const;
@ -226,6 +222,8 @@ protected:
AvatarVoxelSystem _voxels;
bool _moving; ///< set when position is changing
float _hoverOnDuration;
float _hoverOffDuration;
// protected methods...
glm::vec3 getBodyRightDirection() const { return getOrientation() * IDENTITY_RIGHT; }
@ -247,6 +245,7 @@ private:
float _maxArmLength;
float _pelvisStandingHeight;
// private methods...
glm::vec3 calculateAverageEyePosition() { return _head.calculateAverageEyePosition(); } // get the position smack-dab between the eyes (for lookat)
float getBallRenderAlpha(int ball, bool lookingInMirror) const;

View file

@ -8,6 +8,8 @@
#include <QNetworkReply>
#include <glm/gtx/transform.hpp>
#include "Application.h"
#include "BlendFace.h"
#include "Head.h"
@ -41,21 +43,117 @@ void BlendFace::init() {
}
}
void BlendFace::reset() {
_resetStates = true;
}
const glm::vec3 MODEL_TRANSLATION(0.0f, -120.0f, 40.0f); // temporary fudge factor
const float MODEL_SCALE = 0.0006f;
bool BlendFace::render(float alpha) {
void BlendFace::simulate(float deltaTime) {
if (!isActive()) {
return;
}
// set up world vertices on first simulate after load
const FBXGeometry& geometry = _geometry->getFBXGeometry();
if (_meshStates.isEmpty()) {
QVector<glm::vec3> vertices;
foreach (const FBXMesh& mesh, geometry.meshes) {
MeshState state;
if (mesh.springiness > 0.0f) {
state.worldSpaceVertices.resize(mesh.vertices.size());
state.vertexVelocities.resize(mesh.vertices.size());
state.worldSpaceNormals.resize(mesh.vertices.size());
}
_meshStates.append(state);
}
_resetStates = true;
}
glm::quat orientation = _owningHead->getOrientation();
glm::vec3 scale = glm::vec3(-1.0f, 1.0f, -1.0f) * _owningHead->getScale() * MODEL_SCALE;
glm::vec3 offset = MODEL_TRANSLATION - _geometry->getFBXGeometry().neckPivot;
glm::mat4 baseTransform = glm::translate(_owningHead->getPosition()) * glm::mat4_cast(orientation) *
glm::scale(scale) * glm::translate(offset);
for (int i = 0; i < _meshStates.size(); i++) {
MeshState& state = _meshStates[i];
int vertexCount = state.worldSpaceVertices.size();
if (vertexCount == 0) {
continue;
}
glm::vec3* destVertices = state.worldSpaceVertices.data();
glm::vec3* destVelocities = state.vertexVelocities.data();
glm::vec3* destNormals = state.worldSpaceNormals.data();
const FBXMesh& mesh = geometry.meshes.at(i);
const glm::vec3* sourceVertices = mesh.vertices.constData();
if (!mesh.blendshapes.isEmpty()) {
_blendedVertices.resize(max(_blendedVertices.size(), vertexCount));
memcpy(_blendedVertices.data(), mesh.vertices.constData(), vertexCount * sizeof(glm::vec3));
// blend in each coefficient
const vector<float>& coefficients = _owningHead->getBlendshapeCoefficients();
for (int j = 0; j < coefficients.size(); j++) {
float coefficient = coefficients[j];
if (coefficient == 0.0f || j >= mesh.blendshapes.size() || mesh.blendshapes[j].vertices.isEmpty()) {
continue;
}
const glm::vec3* vertex = mesh.blendshapes[j].vertices.constData();
for (const int* index = mesh.blendshapes[j].indices.constData(),
*end = index + mesh.blendshapes[j].indices.size(); index != end; index++, vertex++) {
_blendedVertices[*index] += *vertex * coefficient;
}
}
sourceVertices = _blendedVertices.constData();
}
glm::mat4 transform = baseTransform;
if (mesh.isEye) {
transform = transform * glm::translate(mesh.pivot) * glm::mat4_cast(glm::inverse(orientation) *
_owningHead->getEyeRotation(orientation * ((mesh.pivot + offset) * scale) + _owningHead->getPosition())) *
glm::translate(-mesh.pivot);
}
if (_resetStates) {
for (int j = 0; j < vertexCount; j++) {
destVertices[j] = glm::vec3(transform * glm::vec4(sourceVertices[j], 1.0f));
destVelocities[j] = glm::vec3();
}
} else {
const float SPRINGINESS_MULTIPLIER = 200.0f;
const float DAMPING = 5.0f;
for (int j = 0; j < vertexCount; j++) {
destVelocities[j] += ((glm::vec3(transform * glm::vec4(sourceVertices[j], 1.0f)) - destVertices[j]) *
mesh.springiness * SPRINGINESS_MULTIPLIER - destVelocities[j] * DAMPING) * deltaTime;
destVertices[j] += destVelocities[j] * deltaTime;
}
}
for (int j = 0; j < vertexCount; j++) {
destNormals[j] = glm::vec3();
const glm::vec3& middle = destVertices[j];
for (QVarLengthArray<QPair<int, int>, 4>::const_iterator connection = mesh.vertexConnections.at(j).constBegin();
connection != mesh.vertexConnections.at(j).constEnd(); connection++) {
destNormals[j] += glm::normalize(glm::cross(destVertices[connection->second] - middle,
destVertices[connection->first] - middle));
}
}
}
_resetStates = false;
}
bool BlendFace::render(float alpha) {
if (_meshStates.isEmpty()) {
return false;
}
// set up blended buffer ids on first render after load
// set up blended buffer ids on first render after load/simulate
const FBXGeometry& geometry = _geometry->getFBXGeometry();
const QVector<NetworkMesh>& networkMeshes = _geometry->getMeshes();
if (_blendedVertexBufferIDs.isEmpty()) {
foreach (const FBXMesh& mesh, geometry.meshes) {
GLuint id = 0;
if (!mesh.blendshapes.isEmpty()) {
if (!mesh.blendshapes.isEmpty() || mesh.springiness > 0.0f) {
glGenBuffers(1, &id);
glBindBuffer(GL_ARRAY_BUFFER, id);
glBufferData(GL_ARRAY_BUFFER, (mesh.vertices.size() + mesh.normals.size()) * sizeof(glm::vec3),
@ -69,6 +167,9 @@ bool BlendFace::render(float alpha) {
_dilatedTextures.resize(geometry.meshes.size());
}
glm::mat4 viewMatrix;
glGetFloatv(GL_MODELVIEW_MATRIX, (GLfloat*)&viewMatrix);
glPushMatrix();
glTranslatef(_owningHead->getPosition().x, _owningHead->getPosition().y, _owningHead->getPosition().z);
glm::quat orientation = _owningHead->getOrientation();
@ -90,6 +191,9 @@ bool BlendFace::render(float alpha) {
glEnable(GL_TEXTURE_2D);
glDisable(GL_COLOR_MATERIAL);
// the eye shader uses the color state even though color material is disabled
glColor4f(1.0f, 1.0f, 1.0f, alpha);
for (int i = 0; i < networkMeshes.size(); i++) {
const NetworkMesh& networkMesh = networkMeshes.at(i);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, networkMesh.indexBufferID);
@ -130,39 +234,49 @@ bool BlendFace::render(float alpha) {
glBindTexture(GL_TEXTURE_2D, texture == NULL ? 0 : texture->getID());
glBindBuffer(GL_ARRAY_BUFFER, networkMesh.vertexBufferID);
if (mesh.blendshapes.isEmpty()) {
if (mesh.blendshapes.isEmpty() && mesh.springiness == 0.0f) {
glTexCoordPointer(2, GL_FLOAT, 0, (void*)(vertexCount * 2 * sizeof(glm::vec3)));
} else {
glTexCoordPointer(2, GL_FLOAT, 0, 0);
_blendedVertices.resize(max(_blendedVertices.size(), vertexCount));
_blendedNormals.resize(_blendedVertices.size());
memcpy(_blendedVertices.data(), mesh.vertices.constData(), vertexCount * sizeof(glm::vec3));
memcpy(_blendedNormals.data(), mesh.normals.constData(), vertexCount * sizeof(glm::vec3));
// blend in each coefficient
const vector<float>& coefficients = _owningHead->getBlendshapeCoefficients();
for (int j = 0; j < coefficients.size(); j++) {
float coefficient = coefficients[j];
if (coefficient == 0.0f || j >= mesh.blendshapes.size() || mesh.blendshapes[j].vertices.isEmpty()) {
continue;
}
const float NORMAL_COEFFICIENT_SCALE = 0.01f;
float normalCoefficient = coefficient * NORMAL_COEFFICIENT_SCALE;
const glm::vec3* vertex = mesh.blendshapes[j].vertices.constData();
const glm::vec3* normal = mesh.blendshapes[j].normals.constData();
for (const int* index = mesh.blendshapes[j].indices.constData(),
*end = index + mesh.blendshapes[j].indices.size(); index != end; index++, vertex++, normal++) {
_blendedVertices[*index] += *vertex * coefficient;
_blendedNormals[*index] += *normal * normalCoefficient;
}
}
glBindBuffer(GL_ARRAY_BUFFER, _blendedVertexBufferIDs.at(i));
glBufferSubData(GL_ARRAY_BUFFER, 0, vertexCount * sizeof(glm::vec3), _blendedVertices.constData());
glBufferSubData(GL_ARRAY_BUFFER, vertexCount * sizeof(glm::vec3),
vertexCount * sizeof(glm::vec3), _blendedNormals.constData());
const MeshState& state = _meshStates.at(i);
if (!state.worldSpaceVertices.isEmpty()) {
glLoadMatrixf((const GLfloat*)&viewMatrix);
glBufferSubData(GL_ARRAY_BUFFER, 0, vertexCount * sizeof(glm::vec3), state.worldSpaceVertices.constData());
glBufferSubData(GL_ARRAY_BUFFER, vertexCount * sizeof(glm::vec3),
vertexCount * sizeof(glm::vec3), state.worldSpaceNormals.constData());
} else {
_blendedVertices.resize(max(_blendedVertices.size(), vertexCount));
_blendedNormals.resize(_blendedVertices.size());
memcpy(_blendedVertices.data(), mesh.vertices.constData(), vertexCount * sizeof(glm::vec3));
memcpy(_blendedNormals.data(), mesh.normals.constData(), vertexCount * sizeof(glm::vec3));
// blend in each coefficient
const vector<float>& coefficients = _owningHead->getBlendshapeCoefficients();
for (int j = 0; j < coefficients.size(); j++) {
float coefficient = coefficients[j];
if (coefficient == 0.0f || j >= mesh.blendshapes.size() || mesh.blendshapes[j].vertices.isEmpty()) {
continue;
}
const float NORMAL_COEFFICIENT_SCALE = 0.01f;
float normalCoefficient = coefficient * NORMAL_COEFFICIENT_SCALE;
const glm::vec3* vertex = mesh.blendshapes[j].vertices.constData();
const glm::vec3* normal = mesh.blendshapes[j].normals.constData();
for (const int* index = mesh.blendshapes[j].indices.constData(),
*end = index + mesh.blendshapes[j].indices.size(); index != end; index++, vertex++, normal++) {
_blendedVertices[*index] += *vertex * coefficient;
_blendedNormals[*index] += *normal * normalCoefficient;
}
}
glBufferSubData(GL_ARRAY_BUFFER, 0, vertexCount * sizeof(glm::vec3), _blendedVertices.constData());
glBufferSubData(GL_ARRAY_BUFFER, vertexCount * sizeof(glm::vec3),
vertexCount * sizeof(glm::vec3), _blendedNormals.constData());
}
}
glVertexPointer(3, GL_FLOAT, 0, 0);
glNormalPointer(GL_FLOAT, 0, (void*)(vertexCount * sizeof(glm::vec3)));
@ -199,12 +313,16 @@ bool BlendFace::render(float alpha) {
return true;
}
void BlendFace::getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const {
bool BlendFace::getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition, bool upright) const {
if (!isActive()) {
return;
return false;
}
glm::vec3 translation = _owningHead->getPosition();
glm::quat orientation = _owningHead->getOrientation();
if (upright) {
translation = static_cast<MyAvatar*>(_owningHead->_owningAvatar)->getUprightHeadPosition();
orientation = static_cast<Avatar*>(_owningHead->_owningAvatar)->getWorldAlignedOrientation();
}
glm::vec3 scale(-_owningHead->getScale() * MODEL_SCALE, _owningHead->getScale() * MODEL_SCALE,
-_owningHead->getScale() * MODEL_SCALE);
bool foundFirst = false;
@ -212,16 +330,16 @@ void BlendFace::getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEy
const FBXGeometry& geometry = _geometry->getFBXGeometry();
foreach (const FBXMesh& mesh, geometry.meshes) {
if (mesh.isEye) {
glm::vec3 position = orientation * ((mesh.pivot + MODEL_TRANSLATION - geometry.neckPivot) * scale) +
_owningHead->getPosition();
glm::vec3 position = orientation * ((mesh.pivot + MODEL_TRANSLATION - geometry.neckPivot) * scale) + translation;
if (foundFirst) {
secondEyePosition = position;
return;
return true;
}
firstEyePosition = position;
foundFirst = true;
}
}
return false;
}
void BlendFace::setModelURL(const QUrl& url) {
@ -243,4 +361,5 @@ void BlendFace::deleteGeometry() {
glDeleteBuffers(1, &id);
}
_blendedVertexBufferIDs.clear();
_meshStates.clear();
}

View file

@ -33,12 +33,17 @@ public:
bool isActive() const { return _geometry && _geometry->isLoaded(); }
void init();
void reset();
void simulate(float deltaTime);
bool render(float alpha);
Q_INVOKABLE void setModelURL(const QUrl& url);
const QUrl& getModelURL() const { return _modelURL; }
void getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const;
/// Retrieve the positions of up to two eye meshes.
/// \param upright if true, retrieve the locations of the eyes in the upright position
/// \return whether or not both eye meshes were found
bool getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition, bool upright = false) const;
private:
@ -50,8 +55,17 @@ private:
QSharedPointer<NetworkGeometry> _geometry;
class MeshState {
public:
QVector<glm::vec3> worldSpaceVertices;
QVector<glm::vec3> vertexVelocities;
QVector<glm::vec3> worldSpaceNormals;
};
QVector<MeshState> _meshStates;
QVector<GLuint> _blendedVertexBufferIDs;
QVector<QSharedPointer<Texture> > _dilatedTextures;
bool _resetStates;
QVector<glm::vec3> _blendedVertices;
QVector<glm::vec3> _blendedNormals;

View file

@ -117,6 +117,8 @@ void Head::reset() {
if (USING_PHYSICAL_MOHAWK) {
resetHairPhysics();
}
_blendFace.reset();
}
void Head::resetHairPhysics() {
@ -235,6 +237,8 @@ void Head::simulate(float deltaTime, bool isMine) {
if (USING_PHYSICAL_MOHAWK) {
updateHairPhysics(deltaTime);
}
_blendFace.simulate(deltaTime);
}
void Head::calculateGeometry() {
@ -300,15 +304,14 @@ void Head::render(float alpha, bool isMine) {
renderEyeBrows();
}
}
if (_blendFace.isActive()) {
// the blend face may have custom eye meshes
_blendFace.getEyePositions(_leftEyePosition, _rightEyePosition);
}
if (_renderLookatVectors) {
glm::vec3 firstEyePosition = _leftEyePosition;
glm::vec3 secondEyePosition = _rightEyePosition;
if (_blendFace.isActive()) {
// the blend face may have custom eye meshes
_blendFace.getEyePositions(firstEyePosition, secondEyePosition);
}
renderLookatVectors(firstEyePosition, secondEyePosition, _lookAtPosition);
renderLookatVectors(_leftEyePosition, _rightEyePosition, _lookAtPosition);
}
}

View file

@ -158,6 +158,7 @@ private:
void resetHairPhysics();
void updateHairPhysics(float deltaTime);
friend class BlendFace;
friend class PerlinFace;
};

View file

@ -16,6 +16,7 @@
#include <SharedUtil.h>
#include "Application.h"
#include "DataServerClient.h"
#include "MyAvatar.h"
#include "Physics.h"
#include "devices/OculusManager.h"
@ -524,6 +525,50 @@ void MyAvatar::renderScreenTint(ScreenTintLayer layer, Camera& whichCamera) {
}
}
void MyAvatar::saveData(QSettings* settings) {
settings->beginGroup("Avatar");
settings->setValue("bodyYaw", _bodyYaw);
settings->setValue("bodyPitch", _bodyPitch);
settings->setValue("bodyRoll", _bodyRoll);
settings->setValue("position_x", _position.x);
settings->setValue("position_y", _position.y);
settings->setValue("position_z", _position.z);
settings->setValue("voxelURL", _voxels.getVoxelURL());
settings->setValue("pupilDilation", _head.getPupilDilation());
settings->setValue("leanScale", _leanScale);
settings->setValue("scale", _newScale);
settings->endGroup();
}
void MyAvatar::loadData(QSettings* settings) {
settings->beginGroup("Avatar");
// in case settings is corrupt or missing loadSetting() will check for NaN
_bodyYaw = loadSetting(settings, "bodyYaw", 0.0f);
_bodyPitch = loadSetting(settings, "bodyPitch", 0.0f);
_bodyRoll = loadSetting(settings, "bodyRoll", 0.0f);
_position.x = loadSetting(settings, "position_x", 0.0f);
_position.y = loadSetting(settings, "position_y", 0.0f);
_position.z = loadSetting(settings, "position_z", 0.0f);
_voxels.setVoxelURL(settings->value("voxelURL").toUrl());
_head.setPupilDilation(settings->value("pupilDilation", 0.0f).toFloat());
_leanScale = loadSetting(settings, "leanScale", 0.05f);
_newScale = loadSetting(settings, "scale", 1.0f);
setScale(_scale);
Application::getInstance()->getCamera()->setScale(_scale);
settings->endGroup();
}
float MyAvatar::getAbsoluteHeadYaw() const {
return glm::yaw(_head.getOrientation());
}
@ -555,9 +600,6 @@ void MyAvatar::renderBody(bool lookingInMirror, bool renderAvatarBalls) {
return;
}
// glow when moving
Glower glower(_moving ? 1.0f : 0.0f);
if (_head.getFace().isFullFrame()) {
// Render the full-frame video
float alpha = getBallRenderAlpha(BODY_BALL_HEAD_BASE, lookingInMirror);

View file

@ -9,6 +9,8 @@
#ifndef __interface__myavatar__
#define __interface__myavatar__
#include <QSettings>
#include "Avatar.h"
class MyAvatar : public Avatar {
@ -47,6 +49,10 @@ public:
glm::vec3 getGravity() const { return _gravity; }
glm::vec3 getUprightHeadPosition() const;
glm::vec3 getUprightEyeLevelPosition() const;
// get/set avatar data
void saveData(QSettings* settings);
void loadData(QSettings* settings);
// Set what driving keys are being pressed to control thrust levels
void setDriveKeys(int key, bool val) { _driveKeys[key] = val; };
@ -57,7 +63,7 @@ public:
void addThrust(glm::vec3 newThrust) { _thrust += newThrust; };
glm::vec3 getThrust() { return _thrust; };
private:
private:
bool _mousePressed;
float _bodyPitchDelta;
float _bodyRollDelta;

View file

@ -0,0 +1,71 @@
//
// Profile.cpp
// hifi
//
// Created by Stephen Birarda on 10/8/13.
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
//
#include <QtCore/QSettings>
#include "Profile.h"
#include "DataServerClient.h"
Profile::Profile() :
_username(),
_uuid(),
_faceModelURL()
{
}
void Profile::clear() {
_username.clear();
_uuid = QUuid();
_faceModelURL.clear();
}
void Profile::setUsername(const QString &username) {
this->clear();
_username = username;
if (!_username.isEmpty()) {
// we've been given a new username, ask the data-server for our UUID
DataServerClient::getClientValueForKey(DataServerKey::UUID);
}
}
void Profile::setUUID(const QUuid& uuid) {
_uuid = uuid;
// when the UUID is changed we need set it appropriately on our avatar instance
Application::getInstance()->getAvatar()->setUUID(_uuid);
}
void Profile::setFaceModelURL(const QUrl& faceModelURL) {
_faceModelURL = faceModelURL;
QMetaObject::invokeMethod(&Application::getInstance()->getAvatar()->getHead().getBlendFace(),
"setModelURL",
Q_ARG(QUrl, _faceModelURL));
}
void Profile::saveData(QSettings* settings) {
settings->beginGroup("Profile");
settings->setValue("username", _username);
settings->setValue("UUID", _uuid);
settings->setValue("faceModelURL", _faceModelURL);
settings->endGroup();
}
void Profile::loadData(QSettings* settings) {
settings->beginGroup("Profile");
_username = settings->value("username").toString();
this->setUUID(settings->value("UUID").toUuid());
_faceModelURL = settings->value("faceModelURL").toUrl();
settings->endGroup();
}

View file

@ -0,0 +1,39 @@
//
// Profile.h
// hifi
//
// Created by Stephen Birarda on 10/8/13.
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
//
#ifndef __hifi__Profile__
#define __hifi__Profile__
#include <QtCore/QString>
#include <QtCore/QUrl>
#include <QtCore/QUuid>
class Profile {
public:
Profile();
void setUsername(const QString& username);
QString& getUsername() { return _username; }
void setUUID(const QUuid& uuid);
QUuid& getUUID() { return _uuid; }
void setFaceModelURL(const QUrl& faceModelURL);
QUrl& getFaceModelURL() { return _faceModelURL; }
void clear();
void saveData(QSettings* settings);
void loadData(QSettings* settings);
private:
QString _username;
QUuid _uuid;
QUrl _faceModelURL;
};
#endif /* defined(__hifi__Profile__) */

View file

@ -300,6 +300,7 @@ const char* FACESHIFT_BLENDSHAPES[] = {
class Transform {
public:
QByteArray name;
bool inheritScale;
glm::mat4 withScale;
glm::mat4 withoutScale;
@ -535,7 +536,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
glm::vec3 preRotation, rotation, postRotation;
glm::vec3 scale = glm::vec3(1.0f, 1.0f, 1.0f);
glm::vec3 scalePivot, rotationPivot;
Transform transform = { true };
Transform transform = { name, true };
foreach (const FBXNode& subobject, object.children) {
if (subobject.name == "Properties70") {
foreach (const FBXNode& property, subobject.children) {
@ -683,14 +684,15 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
mapping.value("ry").toFloat(), mapping.value("rz").toFloat())))) *
glm::scale(offsetScale, offsetScale, offsetScale);
// as a temporary hack, put the mesh with the most blendshapes on top; assume it to be the face
FBXGeometry geometry;
int mostBlendshapes = 0;
QVariantHash springs = mapping.value("spring").toHash();
QVariant defaultSpring = springs.value("default");
for (QHash<qint64, FBXMesh>::iterator it = meshes.begin(); it != meshes.end(); it++) {
FBXMesh& mesh = it.value();
// accumulate local transforms
qint64 modelID = parentMap.value(it.key());
mesh.springiness = springs.value(localTransforms.value(modelID).name, defaultSpring).toFloat();
glm::mat4 modelTransform = getGlobalTransform(parentMap, localTransforms, modelID);
// look for textures, material properties
@ -735,13 +737,47 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
}
}
if (mesh.blendshapes.size() > mostBlendshapes) {
geometry.meshes.prepend(mesh);
mostBlendshapes = mesh.blendshapes.size();
// extract spring edges, connections if springy
if (mesh.springiness > 0.0f) {
QSet<QPair<int, int> > edges;
} else {
geometry.meshes.append(mesh);
mesh.vertexConnections.resize(mesh.vertices.size());
for (int i = 0; i < mesh.quadIndices.size(); i += 4) {
int index0 = mesh.quadIndices.at(i);
int index1 = mesh.quadIndices.at(i + 1);
int index2 = mesh.quadIndices.at(i + 2);
int index3 = mesh.quadIndices.at(i + 3);
edges.insert(QPair<int, int>(qMin(index0, index1), qMax(index0, index1)));
edges.insert(QPair<int, int>(qMin(index1, index2), qMax(index1, index2)));
edges.insert(QPair<int, int>(qMin(index2, index3), qMax(index2, index3)));
edges.insert(QPair<int, int>(qMin(index3, index0), qMax(index3, index0)));
mesh.vertexConnections[index0].append(QPair<int, int>(index3, index1));
mesh.vertexConnections[index1].append(QPair<int, int>(index0, index2));
mesh.vertexConnections[index2].append(QPair<int, int>(index1, index3));
mesh.vertexConnections[index3].append(QPair<int, int>(index2, index0));
}
for (int i = 0; i < mesh.triangleIndices.size(); i += 3) {
int index0 = mesh.triangleIndices.at(i);
int index1 = mesh.triangleIndices.at(i + 1);
int index2 = mesh.triangleIndices.at(i + 2);
edges.insert(QPair<int, int>(qMin(index0, index1), qMax(index0, index1)));
edges.insert(QPair<int, int>(qMin(index1, index2), qMax(index1, index2)));
edges.insert(QPair<int, int>(qMin(index2, index0), qMax(index2, index0)));
mesh.vertexConnections[index0].append(QPair<int, int>(index2, index1));
mesh.vertexConnections[index1].append(QPair<int, int>(index0, index2));
mesh.vertexConnections[index2].append(QPair<int, int>(index1, index0));
}
for (QSet<QPair<int, int> >::const_iterator edge = edges.constBegin(); edge != edges.constEnd(); edge++) {
mesh.springEdges.append(*edge);
}
}
geometry.meshes.append(mesh);
}
// extract translation component for neck pivot

View file

@ -9,6 +9,7 @@
#ifndef __interface__FBXReader__
#define __interface__FBXReader__
#include <QVarLengthArray>
#include <QVariant>
#include <QVector>
@ -59,6 +60,10 @@ public:
QByteArray normalFilename;
QVector<FBXBlendshape> blendshapes;
float springiness;
QVector<QPair<int, int> > springEdges;
QVector<QVarLengthArray<QPair<int, int>, 4> > vertexConnections;
};
/// A set of meshes extracted from an FBX document.

View file

@ -346,7 +346,7 @@ void NetworkGeometry::maybeReadModelWithMapping() {
glGenBuffers(1, &networkMesh.vertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, networkMesh.vertexBufferID);
if (mesh.blendshapes.isEmpty()) {
if (mesh.blendshapes.isEmpty() && mesh.springiness == 0.0f) {
glBufferData(GL_ARRAY_BUFFER, (mesh.vertices.size() + mesh.normals.size()) * sizeof(glm::vec3) +
mesh.texCoords.size() * sizeof(glm::vec2), NULL, GL_STATIC_DRAW);
glBufferSubData(GL_ARRAY_BUFFER, 0, mesh.vertices.size() * sizeof(glm::vec3), mesh.vertices.constData());

View file

@ -23,6 +23,7 @@ static const float fingerVectorRadix = 4; // bits of precision when converting f
AvatarData::AvatarData(Node* owningNode) :
NodeData(owningNode),
_uuid(),
_handPosition(0,0,0),
_bodyYaw(-90.0),
_bodyPitch(0.0),
@ -116,6 +117,11 @@ int AvatarData::getBroadcastData(unsigned char* destinationBuffer) {
_handData = new HandData(this);
}
// UUID
QByteArray uuidByteArray = _uuid.toRfc4122();
memcpy(destinationBuffer, uuidByteArray.constData(), uuidByteArray.size());
destinationBuffer += uuidByteArray.size();
// Body world position
memcpy(destinationBuffer, &_position, sizeof(float) * 3);
destinationBuffer += sizeof(float) * 3;
@ -249,6 +255,11 @@ int AvatarData::parseData(unsigned char* sourceBuffer, int numBytes) {
// push past the node ID
sourceBuffer += sizeof(uint16_t);
// UUID
const int NUM_BYTES_RFC4122_UUID = 16;
_uuid = QUuid::fromRfc4122(QByteArray((char*) sourceBuffer, NUM_BYTES_RFC4122_UUID));
sourceBuffer += NUM_BYTES_RFC4122_UUID;
// Body world position
memcpy(&_position, sourceBuffer, sizeof(float) * 3);
sourceBuffer += sizeof(float) * 3;
@ -259,7 +270,7 @@ int AvatarData::parseData(unsigned char* sourceBuffer, int numBytes) {
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*) sourceBuffer, &_bodyRoll);
// Body scale
sourceBuffer += unpackFloatRatioFromTwoByte( sourceBuffer, _newScale);
sourceBuffer += unpackFloatRatioFromTwoByte(sourceBuffer, _newScale);
// Follow mode info
memcpy(&_leaderID, sourceBuffer, sizeof(uint16_t));

View file

@ -17,6 +17,7 @@
#include <glm/gtc/quaternion.hpp>
#include <QtCore/QObject>
#include <QtCore/QUuid>
#include <QtCore/QVariantMap>
#include <NodeData.h>
@ -72,6 +73,9 @@ public:
int getBroadcastData(unsigned char* destinationBuffer);
int parseData(unsigned char* sourceBuffer, int numBytes);
QUuid& getUUID() { return _uuid; }
void setUUID(const QUuid& uuid) { _uuid = uuid; }
// Body Rotation
float getBodyYaw() const { return _bodyYaw; }
void setBodyYaw(float bodyYaw) { _bodyYaw = bodyYaw; }
@ -79,7 +83,6 @@ public:
void setBodyPitch(float bodyPitch) { _bodyPitch = bodyPitch; }
float getBodyRoll() const { return _bodyRoll; }
void setBodyRoll(float bodyRoll) { _bodyRoll = bodyRoll; }
// Hand State
void setHandState(char s) { _handState = s; }
@ -133,6 +136,8 @@ public slots:
void setWantOcclusionCulling(bool wantOcclusionCulling) { _wantOcclusionCulling = wantOcclusionCulling; }
protected:
QUuid _uuid;
glm::vec3 _position;
glm::vec3 _handPosition;

View file

@ -139,10 +139,6 @@ void Assignment::setPayload(const uchar* payload, int numBytes) {
memcpy(_payload, payload, _numPayloadBytes);
}
QString Assignment::getUUIDStringWithoutCurlyBraces() const {
return _uuid.toString().mid(1, _uuid.toString().length() - 2);
}
int Assignment::packToBuffer(unsigned char* buffer) {
int numPackedBytes = 0;

View file

@ -60,7 +60,6 @@ public:
void setUUID(const QUuid& uuid) { _uuid = uuid; }
const QUuid& getUUID() const { return _uuid; }
QString getUUIDStringWithoutCurlyBraces() const;
void resetUUID() { _uuid = QUuid::createUuid(); }
Assignment::Command getCommand() const { return _command; }

View file

@ -428,6 +428,7 @@ void NodeList::sendAssignment(Assignment& assignment) {
}
Node* NodeList::addOrUpdateNode(sockaddr* publicSocket, sockaddr* localSocket, char nodeType, uint16_t nodeId) {
NodeList::iterator node = end();
if (publicSocket) {
@ -439,7 +440,7 @@ Node* NodeList::addOrUpdateNode(sockaddr* publicSocket, sockaddr* localSocket, c
}
}
if (node == end()) {
if (node == end()) {
// we didn't have this node, so add them
Node* newNode = new Node(publicSocket, localSocket, nodeType, nodeId);
@ -540,6 +541,8 @@ void* removeSilentNodes(void *args) {
for(NodeList::iterator node = nodeList->begin(); node != nodeList->end(); ++node) {
node->lock();
if ((checkTimeUSecs - node->getLastHeardMicrostamp()) > NODE_SILENCE_THRESHOLD_USECS) {
qDebug() << "Killed " << *node << "\n";
@ -548,6 +551,8 @@ void* removeSilentNodes(void *args) {
node->setAlive(false);
}
node->unlock();
}
sleepTime = NODE_SILENCE_THRESHOLD_USECS - (usecTimestampNow() - checkTimeUSecs);

View file

@ -30,7 +30,7 @@ const int NODES_PER_BUCKET = 100;
const int MAX_PACKET_SIZE = 1500;
const int NODE_SILENCE_THRESHOLD_USECS = 2 * 1000000;
const uint64_t NODE_SILENCE_THRESHOLD_USECS = 2 * 1000000;
const int DOMAIN_SERVER_CHECK_IN_USECS = 1 * 1000000;
extern const char SOLO_NODE_TYPES[2];

View file

@ -20,7 +20,7 @@ PACKET_VERSION versionForPacketType(PACKET_TYPE type) {
return 1;
case PACKET_TYPE_HEAD_DATA:
return 8;
return 9;
case PACKET_TYPE_AVATAR_URLS:
return 1;

View file

@ -41,6 +41,10 @@ const PACKET_TYPE PACKET_TYPE_DEPLOY_ASSIGNMENT = 'd';
const PACKET_TYPE PACKET_TYPE_VOXEL_STATS = '#';
const PACKET_TYPE PACKET_TYPE_VOXEL_JURISDICTION = 'J';
const PACKET_TYPE PACKET_TYPE_VOXEL_JURISDICTION_REQUEST = 'j';
const PACKET_TYPE PACKET_TYPE_DATA_SERVER_PUT = 'p';
const PACKET_TYPE PACKET_TYPE_DATA_SERVER_GET = 'g';
const PACKET_TYPE PACKET_TYPE_DATA_SERVER_SEND = 'u';
const PACKET_TYPE PACKET_TYPE_DATA_SERVER_CONFIRM = 'c';
typedef char PACKET_VERSION;

View file

@ -43,7 +43,7 @@ bool socketMatch(const sockaddr* first, const sockaddr* second) {
const sockaddr_in *secondIn = (const sockaddr_in *) second;
return firstIn->sin_addr.s_addr == secondIn->sin_addr.s_addr
&& firstIn->sin_port == secondIn->sin_port;
&& firstIn->sin_port == secondIn->sin_port;
} else {
return false;
}
@ -254,7 +254,7 @@ bool UDPSocket::receive(sockaddr* recvAddress, void* receivedData, ssize_t* rece
#ifdef _WIN32
int addressSize = sizeof(*recvAddress);
#else
socklen_t addressSize = sizeof(&recvAddress);
socklen_t addressSize = sizeof(*recvAddress);
#endif
*receivedBytes = recvfrom(handle, static_cast<char*>(receivedData), MAX_BUFFER_LENGTH_BYTES,
0, recvAddress, &addressSize);

View file

@ -0,0 +1,14 @@
//
// UUID.cpp
// hifi
//
// Created by Stephen Birarda on 10/7/13.
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
//
#include "UUID.h"
QString uuidStringWithoutCurlyBraces(const QUuid& uuid) {
QString uuidStringNoBraces = uuid.toString().mid(1, uuid.toString().length() - 2);
return uuidStringNoBraces;
}

View file

@ -0,0 +1,16 @@
//
// UUID.h
// hifi
//
// Created by Stephen Birarda on 10/7/13.
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
//
#ifndef __hifi__UUID__
#define __hifi__UUID__
#include <QtCore/QUuid>
QString uuidStringWithoutCurlyBraces(const QUuid& uuid);
#endif /* defined(__hifi__UUID__) */

View file

@ -371,7 +371,7 @@ bool ViewFrustum::matches(const ViewFrustum& compareTo, bool debug) const {
void ViewFrustum::computePickRay(float x, float y, glm::vec3& origin, glm::vec3& direction) const {
origin = _nearTopLeft + x*(_nearTopRight - _nearTopLeft) + y*(_nearBottomLeft - _nearTopLeft);
direction = glm::normalize(origin - _position);
direction = glm::normalize(origin - (_position + _orientation * _eyeOffsetPosition));
}
void ViewFrustum::computeOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& near, float& far,
@ -401,6 +401,10 @@ void ViewFrustum::computeOffAxisFrustum(float& left, float& right, float& bottom
far = max(far, -corners[i].z);
}
// make sure the near clip isn't too small to be valid
const float MIN_NEAR = 0.01f;
near = max(MIN_NEAR, near);
// get the near/far normal and use it to find the clip planes
glm::vec4 normal = eyeMatrix * glm::vec4(0.0f, 0.0f, 1.0f, 0.0f);
nearClipPlane = glm::vec4(-normal.x, -normal.y, -normal.z, glm::dot(normal, corners[0]));