Merge branch 'master' of https://github.com/highfidelity/hifi into temp0

This commit is contained in:
Sam Gateau 2014-11-06 10:56:54 -08:00
commit b0189efcdf
41 changed files with 792 additions and 390 deletions

View file

@ -61,7 +61,7 @@
const float LOUDNESS_TO_DISTANCE_RATIO = 0.00001f;
const float DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE = 0.18;
const float DEFAULT_NOISE_MUTING_THRESHOLD = 0.003f;
const QString AUDIO_MIXER_LOGGING_TARGET_NAME = "audio-mixer";
const QString AUDIO_ENV_GROUP_KEY = "audio_env";
const QString AUDIO_BUFFER_GROUP_KEY = "audio_buffer";
@ -78,12 +78,17 @@ bool AudioMixer::_printStreamStats = false;
bool AudioMixer::_enableFilter = true;
bool AudioMixer::shouldMute(float quietestFrame, float loudestFrame) {
return (quietestFrame > _noiseMutingThreshold);
}
AudioMixer::AudioMixer(const QByteArray& packet) :
ThreadedAssignment(packet),
_trailingSleepRatio(1.0f),
_minAudibilityThreshold(LOUDNESS_TO_DISTANCE_RATIO / 2.0f),
_performanceThrottlingRatio(0.0f),
_attenuationPerDoublingInDistance(DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE),
_noiseMutingThreshold(DEFAULT_NOISE_MUTING_THRESHOLD),
_numStatFrames(0),
_sumListeners(0),
_sumMixes(0),
@ -136,6 +141,11 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
return 0;
}
// if the stream should be muted, bail
if (shouldMute(streamToAdd->getQuietestTrailingFrameLoudness(), streamToAdd->getLoudestTrailingFrameLoudness())) {
return 0;
}
float bearingRelativeAngleToSource = 0.0f;
float attenuationCoefficient = 1.0f;
int numSamplesDelay = 0;
@ -353,7 +363,7 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
}
}
if (!sourceIsSelf && _enableFilter) {
if (!sourceIsSelf && _enableFilter && !streamToAdd->ignorePenumbraFilter()) {
const float TWO_OVER_PI = 2.0f / PI;
@ -400,15 +410,12 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
penumbraFilterGainR += (1.f - penumbraFilterGainR) * (1.f - distanceBetween / RADIUS_OF_HEAD);
}
#if 0
qDebug() << "gainL="
<< penumbraFilterGainL
<< "gainR="
<< penumbraFilterGainR
<< "angle="
<< -bearingRelativeAngleToSource;
#endif
bool wantDebug = false;
if (wantDebug) {
qDebug() << "gainL=" << penumbraFilterGainL
<< "gainR=" << penumbraFilterGainR
<< "angle=" << -bearingRelativeAngleToSource;
}
// Get our per listener/source data so we can get our filter
AudioFilterHSF1s& penumbraFilter = listenerNodeData->getListenerSourcePairData(streamUUID)->getPenumbraFilter();
@ -1003,7 +1010,17 @@ void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
qDebug() << "Attenuation per doubling in distance changed to" << _attenuationPerDoublingInDistance;
}
}
const QString NOISE_MUTING_THRESHOLD = "noise_muting_threshold";
if (audioEnvGroupObject[NOISE_MUTING_THRESHOLD].isString()) {
bool ok = false;
float noiseMutingThreshold = audioEnvGroupObject[NOISE_MUTING_THRESHOLD].toString().toFloat(&ok);
if (ok) {
_noiseMutingThreshold = noiseMutingThreshold;
qDebug() << "Noise muting threshold changed to" << _noiseMutingThreshold;
}
}
const QString FILTER_KEY = "enable_filter";
if (audioEnvGroupObject[FILTER_KEY].isBool()) {
_enableFilter = audioEnvGroupObject[FILTER_KEY].toBool();

View file

@ -59,6 +59,8 @@ private:
int16_t _mixSamples[NETWORK_BUFFER_LENGTH_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)];
void perSecondActions();
bool shouldMute(float quietestFrame, float loudestFrame);
QString getReadPendingDatagramsCallsPerSecondsStatsString() const;
QString getReadPendingDatagramsPacketsPerCallStatsString() const;
@ -71,6 +73,7 @@ private:
float _minAudibilityThreshold;
float _performanceThrottlingRatio;
float _attenuationPerDoublingInDistance;
float _noiseMutingThreshold;
int _numStatFrames;
int _sumListeners;
int _sumMixes;

View file

@ -34,6 +34,7 @@ public:
virtual const char* getMyLoggingServerTargetName() const { return MODEL_SERVER_LOGGING_TARGET_NAME; }
virtual const char* getMyDefaultPersistFilename() const { return LOCAL_MODELS_PERSIST_FILE; }
virtual PacketType getMyEditNackType() const { return PacketTypeEntityEditNack; }
virtual QString getMyDomainSettingsKey() const { return QString("entity_server_settings"); }
// subclass may implement these method
virtual void beforeRun();

View file

@ -98,15 +98,28 @@ void OctreeInboundPacketProcessor::processPacket(const SharedNodePointer& sendin
unsigned short int sequence = (*((unsigned short int*)(packetData + numBytesPacketHeader)));
quint64 sentAt = (*((quint64*)(packetData + numBytesPacketHeader + sizeof(sequence))));
quint64 arrivedAt = usecTimestampNow();
if (sentAt > arrivedAt) {
if (debugProcessPacket || _myServer->wantsDebugReceiving()) {
qDebug() << "unreasonable sentAt=" << sentAt << " usecs";
qDebug() << "setting sentAt to arrivedAt=" << arrivedAt << " usecs";
}
sentAt = arrivedAt;
}
quint64 transitTime = arrivedAt - sentAt;
int editsInPacket = 0;
quint64 processTime = 0;
quint64 lockWaitTime = 0;
if (debugProcessPacket || _myServer->wantsDebugReceiving()) {
qDebug() << "PROCESSING THREAD: got '" << packetType << "' packet - " << _receivedPacketCount
<< " command from client receivedBytes=" << packet.size()
<< " sequence=" << sequence << " transitTime=" << transitTime << " usecs";
qDebug() << "PROCESSING THREAD: got '" << packetType << "' packet - " << _receivedPacketCount << " command from client";
qDebug() << " receivedBytes=" << packet.size();
qDebug() << " sequence=" << sequence;
qDebug() << " sentAt=" << sentAt << " usecs";
qDebug() << " arrivedAt=" << arrivedAt << " usecs";
qDebug() << " transitTime=" << transitTime << " usecs";
qDebug() << " sendingNode->getClockSkewUsec()=" << sendingNode->getClockSkewUsec() << " usecs";
}
if (debugProcessPacket) {

View file

@ -900,85 +900,206 @@ void OctreeServer::setupDatagramProcessingThread() {
// start the datagram processing thread
_datagramProcessingThread->start();
}
bool OctreeServer::readOptionBool(const QString& optionName, const QJsonObject& settingsSectionObject, bool& result) {
result = false; // assume it doesn't exist
bool optionAvailable = false;
QString argName = "--" + optionName;
bool argExists = cmdOptionExists(_argc, _argv, qPrintable(argName));
if (argExists) {
optionAvailable = true;
result = argExists;
qDebug() << "From payload arguments: " << qPrintable(argName) << ":" << result;
} else if (settingsSectionObject.contains(optionName)) {
optionAvailable = true;
result = settingsSectionObject[optionName].toBool();
qDebug() << "From domain settings: " << qPrintable(optionName) << ":" << result;
}
return optionAvailable;
}
bool OctreeServer::readOptionInt(const QString& optionName, const QJsonObject& settingsSectionObject, int& result) {
bool optionAvailable = false;
QString argName = "--" + optionName;
const char* argValue = getCmdOption(_argc, _argv, qPrintable(argName));
if (argValue) {
optionAvailable = true;
result = atoi(argValue);
qDebug() << "From payload arguments: " << qPrintable(argName) << ":" << result;
} else if (settingsSectionObject.contains(optionName)) {
optionAvailable = true;
result = settingsSectionObject[optionName].toString().toInt(&optionAvailable);
if (optionAvailable) {
qDebug() << "From domain settings: " << qPrintable(optionName) << ":" << result;
}
}
return optionAvailable;
}
bool OctreeServer::readOptionString(const QString& optionName, const QJsonObject& settingsSectionObject, QString& result) {
bool optionAvailable = false;
QString argName = "--" + optionName;
const char* argValue = getCmdOption(_argc, _argv, qPrintable(argName));
if (argValue) {
optionAvailable = true;
result = QString(argValue);
qDebug() << "From payload arguments: " << qPrintable(argName) << ":" << qPrintable(result);
} else if (settingsSectionObject.contains(optionName)) {
optionAvailable = true;
result = settingsSectionObject[optionName].toString();
qDebug() << "From domain settings: " << qPrintable(optionName) << ":" << qPrintable(result);
}
return optionAvailable;
}
void OctreeServer::readConfiguration() {
// if the assignment had a payload, read and parse that
if (getPayload().size() > 0) {
parsePayload();
}
// wait until we have the domain-server settings, otherwise we bail
NodeList* nodeList = NodeList::getInstance();
DomainHandler& domainHandler = nodeList->getDomainHandler();
qDebug() << "Waiting for domain settings from domain-server.";
// block until we get the settingsRequestComplete signal
QEventLoop loop;
connect(&domainHandler, &DomainHandler::settingsReceived, &loop, &QEventLoop::quit);
connect(&domainHandler, &DomainHandler::settingsReceiveFail, &loop, &QEventLoop::quit);
domainHandler.requestDomainSettings();
loop.exec();
if (domainHandler.getSettingsObject().isEmpty()) {
qDebug() << "No settings object from domain-server.";
}
const QJsonObject& settingsObject = domainHandler.getSettingsObject();
QString settingsKey = getMyDomainSettingsKey();
QJsonObject settingsSectionObject = settingsObject[settingsKey].toObject();
if (!readOptionString(QString("statusHost"), settingsSectionObject, _statusHost) || _statusHost.isEmpty()) {
_statusHost = getLocalAddress().toString();
}
qDebug("statusHost=%s", qPrintable(_statusHost));
if (readOptionInt(QString("statusPort"), settingsSectionObject, _statusPort)) {
initHTTPManager(_statusPort);
qDebug() << "statusPort=" << _statusPort;
} else {
qDebug() << "statusPort= DISABLED";
}
QString jurisdictionFile;
if (readOptionString(QString("jurisdictionFile"), settingsSectionObject, jurisdictionFile)) {
qDebug("jurisdictionFile=%s", qPrintable(jurisdictionFile));
qDebug("about to readFromFile().... jurisdictionFile=%s", qPrintable(jurisdictionFile));
_jurisdiction = new JurisdictionMap(qPrintable(jurisdictionFile));
qDebug("after readFromFile().... jurisdictionFile=%s", qPrintable(jurisdictionFile));
} else {
QString jurisdictionRoot;
bool hasRoot = readOptionString(QString("jurisdictionRoot"), settingsSectionObject, jurisdictionRoot);
QString jurisdictionEndNodes;
bool hasEndNodes = readOptionString(QString("jurisdictionEndNodes"), settingsSectionObject, jurisdictionEndNodes);
if (hasRoot || hasEndNodes) {
_jurisdiction = new JurisdictionMap(qPrintable(jurisdictionRoot), qPrintable(jurisdictionEndNodes));
}
}
readOptionBool(QString("verboseDebug"), settingsSectionObject, _verboseDebug);
qDebug("verboseDebug=%s", debug::valueOf(_verboseDebug));
readOptionBool(QString("debugSending"), settingsSectionObject, _debugSending);
qDebug("debugSending=%s", debug::valueOf(_debugSending));
readOptionBool(QString("debugReceiving"), settingsSectionObject, _debugReceiving);
qDebug("debugReceiving=%s", debug::valueOf(_debugReceiving));
bool noPersist;
readOptionBool(QString("NoPersist"), settingsSectionObject, noPersist);
_wantPersist = !noPersist;
qDebug("wantPersist=%s", debug::valueOf(_wantPersist));
if (_wantPersist) {
QString persistFilename;
if (!readOptionString(QString("persistFilename"), settingsSectionObject, persistFilename)) {
persistFilename = getMyDefaultPersistFilename();
}
strcpy(_persistFilename, qPrintable(persistFilename));
qDebug("persistFilename=%s", _persistFilename);
} else {
qDebug("persistFilename= DISABLED");
}
// Debug option to demonstrate that the server's local time does not
// need to be in sync with any other network node. This forces clock
// skew for the individual server node
int clockSkew;
if (readOptionInt(QString("clockSkew"), settingsSectionObject, clockSkew)) {
usecTimestampNowForceClockSkew(clockSkew);
qDebug("clockSkew=%d", clockSkew);
}
// Check to see if the user passed in a command line option for setting packet send rate
int packetsPerSecondPerClientMax = -1;
if (readOptionInt(QString("packetsPerSecondPerClientMax"), settingsSectionObject, packetsPerSecondPerClientMax)) {
_packetsPerClientPerInterval = packetsPerSecondPerClientMax / INTERVALS_PER_SECOND;
if (_packetsPerClientPerInterval < 1) {
_packetsPerClientPerInterval = 1;
}
}
qDebug("packetsPerSecondPerClientMax=%d _packetsPerClientPerInterval=%d",
packetsPerSecondPerClientMax, _packetsPerClientPerInterval);
// Check to see if the user passed in a command line option for setting packet send rate
int packetsPerSecondTotalMax = -1;
if (readOptionInt(QString("packetsPerSecondTotalMax"), settingsSectionObject, packetsPerSecondTotalMax)) {
_packetsTotalPerInterval = packetsPerSecondTotalMax / INTERVALS_PER_SECOND;
if (_packetsTotalPerInterval < 1) {
_packetsTotalPerInterval = 1;
}
}
qDebug("packetsPerSecondTotalMax=%d _packetsTotalPerInterval=%d",
packetsPerSecondTotalMax, _packetsTotalPerInterval);
readAdditionalConfiguration(settingsSectionObject);
}
void OctreeServer::run() {
qInstallMessageHandler(LogHandler::verboseMessageHandler);
_safeServerName = getMyServerName();
// Before we do anything else, create our tree...
OctreeElement::resetPopulationStatistics();
_tree = createTree();
_tree->setIsServer(true);
// make sure our NodeList knows what type we are
NodeList* nodeList = NodeList::getInstance();
nodeList->setOwnerType(getMyNodeType());
// use common init to setup common timers and logging
commonInit(getMyLoggingServerTargetName(), getMyNodeType());
setupDatagramProcessingThread();
// Now would be a good time to parse our arguments, if we got them as assignment
if (getPayload().size() > 0) {
parsePayload();
}
// read the configuration from either the payload or the domain server configuration
readConfiguration();
beforeRun(); // after payload has been processed
qInstallMessageHandler(LogHandler::verboseMessageHandler);
const char* STATUS_PORT = "--statusPort";
const char* statusPort = getCmdOption(_argc, _argv, STATUS_PORT);
if (statusPort) {
_statusPort = atoi(statusPort);
initHTTPManager(_statusPort);
}
const char* STATUS_HOST = "--statusHost";
const char* statusHost = getCmdOption(_argc, _argv, STATUS_HOST);
if (statusHost) {
qDebug("--statusHost=%s", statusHost);
_statusHost = statusHost;
} else {
_statusHost = getLocalAddress().toString();
}
qDebug("statusHost=%s", qPrintable(_statusHost));
const char* JURISDICTION_FILE = "--jurisdictionFile";
const char* jurisdictionFile = getCmdOption(_argc, _argv, JURISDICTION_FILE);
if (jurisdictionFile) {
qDebug("jurisdictionFile=%s", jurisdictionFile);
qDebug("about to readFromFile().... jurisdictionFile=%s", jurisdictionFile);
_jurisdiction = new JurisdictionMap(jurisdictionFile);
qDebug("after readFromFile().... jurisdictionFile=%s", jurisdictionFile);
} else {
const char* JURISDICTION_ROOT = "--jurisdictionRoot";
const char* jurisdictionRoot = getCmdOption(_argc, _argv, JURISDICTION_ROOT);
if (jurisdictionRoot) {
qDebug("jurisdictionRoot=%s", jurisdictionRoot);
}
const char* JURISDICTION_ENDNODES = "--jurisdictionEndNodes";
const char* jurisdictionEndNodes = getCmdOption(_argc, _argv, JURISDICTION_ENDNODES);
if (jurisdictionEndNodes) {
qDebug("jurisdictionEndNodes=%s", jurisdictionEndNodes);
}
if (jurisdictionRoot || jurisdictionEndNodes) {
_jurisdiction = new JurisdictionMap(jurisdictionRoot, jurisdictionEndNodes);
}
}
NodeList* nodeList = NodeList::getInstance();
nodeList->setOwnerType(getMyNodeType());
connect(nodeList, SIGNAL(nodeAdded(SharedNodePointer)), SLOT(nodeAdded(SharedNodePointer)));
connect(nodeList, SIGNAL(nodeKilled(SharedNodePointer)),SLOT(nodeKilled(SharedNodePointer)));
// we need to ask the DS about agents so we can ping/reply with them
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
#ifndef WIN32
setvbuf(stdout, NULL, _IOLBF, 0);
#endif
@ -987,39 +1108,9 @@ void OctreeServer::run() {
srand((unsigned)time(0));
const char* VERBOSE_DEBUG = "--verboseDebug";
_verboseDebug = cmdOptionExists(_argc, _argv, VERBOSE_DEBUG);
qDebug("verboseDebug=%s", debug::valueOf(_verboseDebug));
const char* DEBUG_SENDING = "--debugSending";
_debugSending = cmdOptionExists(_argc, _argv, DEBUG_SENDING);
qDebug("debugSending=%s", debug::valueOf(_debugSending));
const char* DEBUG_RECEIVING = "--debugReceiving";
_debugReceiving = cmdOptionExists(_argc, _argv, DEBUG_RECEIVING);
qDebug("debugReceiving=%s", debug::valueOf(_debugReceiving));
// By default we will persist, if you want to disable this, then pass in this parameter
const char* NO_PERSIST = "--NoPersist";
if (cmdOptionExists(_argc, _argv, NO_PERSIST)) {
_wantPersist = false;
}
qDebug("wantPersist=%s", debug::valueOf(_wantPersist));
// if we want Persistence, set up the local file and persist thread
if (_wantPersist) {
// Check to see if the user passed in a command line option for setting packet send rate
const char* PERSIST_FILENAME = "--persistFilename";
const char* persistFilenameParameter = getCmdOption(_argc, _argv, PERSIST_FILENAME);
if (persistFilenameParameter) {
strcpy(_persistFilename, persistFilenameParameter);
} else {
strcpy(_persistFilename, getMyDefaultPersistFilename());
}
qDebug("persistFilename=%s", _persistFilename);
// now set up PersistThread
_persistThread = new OctreePersistThread(_tree, _persistFilename);
if (_persistThread) {
@ -1027,41 +1118,6 @@ void OctreeServer::run() {
}
}
// Debug option to demonstrate that the server's local time does not
// need to be in sync with any other network node. This forces clock
// skew for the individual server node
const char* CLOCK_SKEW = "--clockSkew";
const char* clockSkewOption = getCmdOption(_argc, _argv, CLOCK_SKEW);
if (clockSkewOption) {
int clockSkew = atoi(clockSkewOption);
usecTimestampNowForceClockSkew(clockSkew);
qDebug("clockSkewOption=%s clockSkew=%d", clockSkewOption, clockSkew);
}
// Check to see if the user passed in a command line option for setting packet send rate
const char* PACKETS_PER_SECOND_PER_CLIENT_MAX = "--packetsPerSecondPerClientMax";
const char* packetsPerSecondPerClientMax = getCmdOption(_argc, _argv, PACKETS_PER_SECOND_PER_CLIENT_MAX);
if (packetsPerSecondPerClientMax) {
_packetsPerClientPerInterval = atoi(packetsPerSecondPerClientMax) / INTERVALS_PER_SECOND;
if (_packetsPerClientPerInterval < 1) {
_packetsPerClientPerInterval = 1;
}
}
qDebug("packetsPerSecondPerClientMax=%s _packetsPerClientPerInterval=%d",
packetsPerSecondPerClientMax, _packetsPerClientPerInterval);
// Check to see if the user passed in a command line option for setting packet send rate
const char* PACKETS_PER_SECOND_TOTAL_MAX = "--packetsPerSecondTotalMax";
const char* packetsPerSecondTotalMax = getCmdOption(_argc, _argv, PACKETS_PER_SECOND_TOTAL_MAX);
if (packetsPerSecondTotalMax) {
_packetsTotalPerInterval = atoi(packetsPerSecondTotalMax) / INTERVALS_PER_SECOND;
if (_packetsTotalPerInterval < 1) {
_packetsTotalPerInterval = 1;
}
}
qDebug("packetsPerSecondTotalMax=%s _packetsTotalPerInterval=%d",
packetsPerSecondTotalMax, _packetsTotalPerInterval);
HifiSockAddr senderSockAddr;
// set up our jurisdiction broadcaster...

View file

@ -69,6 +69,7 @@ public:
virtual const char* getMyLoggingServerTargetName() const = 0;
virtual const char* getMyDefaultPersistFilename() const = 0;
virtual PacketType getMyEditNackType() const = 0;
virtual QString getMyDomainSettingsKey() const { return QString("octree_server_settings"); }
// subclass may implement these method
virtual void beforeRun() { }
@ -131,6 +132,11 @@ public slots:
void readPendingDatagram(const QByteArray& receivedPacket, const HifiSockAddr& senderSockAddr);
protected:
bool readOptionBool(const QString& optionName, const QJsonObject& settingsSectionObject, bool& result);
bool readOptionInt(const QString& optionName, const QJsonObject& settingsSectionObject, int& result);
bool readOptionString(const QString& optionName, const QJsonObject& settingsSectionObject, QString& result);
void readConfiguration();
virtual void readAdditionalConfiguration(const QJsonObject& settingsSectionObject) { };
void parsePayload();
void initHTTPManager(int port);
void resetSendingStats();

View file

@ -82,18 +82,18 @@ int VoxelServer::sendSpecialPacket(const SharedNodePointer& node, OctreeQueryNod
}
void VoxelServer::beforeRun() {
void VoxelServer::readAdditionalConfiguration(const QJsonObject& settingsSectionObject) {
// should we send environments? Default is yes, but this command line suppresses sending
const char* SEND_ENVIRONMENTS = "--sendEnvironments";
bool dontSendEnvironments = !cmdOptionExists(_argc, _argv, SEND_ENVIRONMENTS);
readOptionBool(QString("sendEnvironments"), settingsSectionObject, _sendEnvironments);
bool dontSendEnvironments = !_sendEnvironments;
if (dontSendEnvironments) {
qDebug("Sending environments suppressed...");
_sendEnvironments = false;
} else {
_sendEnvironments = true;
// should we send environments? Default is yes, but this command line suppresses sending
const char* MINIMAL_ENVIRONMENT = "--minimalEnvironment";
_sendMinimalEnvironment = cmdOptionExists(_argc, _argv, MINIMAL_ENVIRONMENT);
//const char* MINIMAL_ENVIRONMENT = "--minimalEnvironment";
//_sendMinimalEnvironment = cmdOptionExists(_argc, _argv, MINIMAL_ENVIRONMENT);
readOptionBool(QString("minimalEnvironment"), settingsSectionObject, _sendMinimalEnvironment);
qDebug("Using Minimal Environment=%s", debug::valueOf(_sendMinimalEnvironment));
}
qDebug("Sending environments=%s", debug::valueOf(_sendEnvironments));

View file

@ -43,12 +43,15 @@ public:
virtual const char* getMyLoggingServerTargetName() const { return VOXEL_SERVER_LOGGING_TARGET_NAME; }
virtual const char* getMyDefaultPersistFilename() const { return LOCAL_VOXELS_PERSIST_FILE; }
virtual PacketType getMyEditNackType() const { return PacketTypeVoxelEditNack; }
virtual QString getMyDomainSettingsKey() const { return QString("voxel_server_settings"); }
// subclass may implement these method
virtual void beforeRun();
virtual bool hasSpecialPacketToSend(const SharedNodePointer& node);
virtual int sendSpecialPacket(const SharedNodePointer& node, OctreeQueryNode* queryNode, int& packetsSent);
protected:
virtual void readAdditionalConfiguration(const QJsonObject& settingsSectionObject);
private:
bool _sendEnvironments;
bool _sendMinimalEnvironment;

View file

@ -89,6 +89,14 @@
"default": "0.18",
"advanced": false
},
{
"name": "noise_muting_threshold",
"label": "Noise Muting Threshold",
"help": "Loudness value for noise background between 0 and 1.0 (0: mute everyone, 1.0: never mute)",
"placeholder": "0.003",
"default": "0.003",
"advanced": false
},
{
"name": "enable_filter",
"type": "checkbox",
@ -262,5 +270,106 @@
"advanced": true
}
]
}
},
{
"name": "entity_server_settings",
"label": "Entity Server Settings",
"assignment-types": [6],
"settings": [
{
"name": "statusHost",
"label": "Status Hostname",
"help": "host name or IP address of the server for accessing the status page",
"placeholder": "",
"default": "",
"advanced": true
},
{
"name": "statusPort",
"label": "Status Port",
"help": "port of the server for accessing the status page",
"placeholder": "",
"default": "",
"advanced": true
},
{
"name": "verboseDebug",
"type": "checkbox",
"help": "lots of debugging",
"default": false,
"advanced": true
},
{
"name": "debugReceiving",
"type": "checkbox",
"help": "extra debugging on receiving",
"default": false,
"advanced": true
},
{
"name": "debugSending",
"type": "checkbox",
"help": "extra debugging on sending",
"default": false,
"advanced": true
},
{
"name": "clockSkew",
"label": "Clock Skew",
"help": "Number of msecs to skew the server clock by to test clock skew",
"placeholder": "0",
"default": "0",
"advanced": true
}
]
},
{
"name": "voxel_server_settings",
"label": "Voxel Server Settings",
"assignment-types": [3],
"settings": [
{
"name": "statusHost",
"label": "Status Hostname",
"help": "host name or IP address of the server for accessing the status page",
"placeholder": "",
"default": "",
"advanced": true
},
{
"name": "statusPort",
"label": "Status Port",
"help": "port of the server for accessing the status page",
"placeholder": "",
"default": "",
"advanced": true
},
{
"name": "clockSkew",
"label": "Clock Skew",
"help": "Number of msecs to skew the server clock by to test clock skew",
"placeholder": "0",
"default": "0",
"advanced": true
},
{
"name": "sendEnvironments",
"type": "checkbox",
"help": "send environmental data",
"default": false,
"advanced": true
},
{
"name": "minimalEnvironment",
"type": "checkbox",
"help": "send minimal environmental data if sending environmental data",
"default": false,
"advanced": true
}
]
}
]

164
examples/birdSongs.js Normal file
View file

@ -0,0 +1,164 @@
//
// birdSongs.js
// examples
//
// Copyright 2014 High Fidelity, Inc.
// Plays a sample audio file at the avatar's current location
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// First, load a sample sound from a URL
var birds = [];
var playing = [];
var lowerCorner = { x: 0, y: 8, z: 0 };
var upperCorner = { x: 10, y: 10, z: 10 };
var RATE = 0.035;
var numPlaying = 0;
var BIRD_SIZE = 0.1;
var BIRD_VELOCITY = 2.0;
var LIGHT_RADIUS = 10.0;
var BIRD_MASTER_VOLUME = 0.5;
var useLights = true;
function randomVector(scale) {
return { x: Math.random() * scale - scale / 2.0, y: Math.random() * scale - scale / 2.0, z: Math.random() * scale - scale / 2.0 };
}
function maybePlaySound(deltaTime) {
if (Math.random() < RATE) {
// Set the location and other info for the sound to play
var whichBird = Math.floor(Math.random() * birds.length);
//print("playing sound # " + whichBird);
var options = new AudioInjectionOptions();
var position = { x: lowerCorner.x + Math.random() * (upperCorner.x - lowerCorner.x),
y: lowerCorner.y + Math.random() * (upperCorner.y - lowerCorner.y),
z: lowerCorner.z + Math.random() * (upperCorner.z - lowerCorner.z) };
options.position = position;
options.volume = BIRD_MASTER_VOLUME;
//
var entityId = Entities.addEntity({
type: "Sphere",
position: position,
dimensions: { x: BIRD_SIZE, y: BIRD_SIZE, z: BIRD_SIZE },
color: birds[whichBird].color,
lifetime: 10
});
if (useLights) {
var lightId = Entities.addEntity({
type: "Light",
position: position,
dimensions: { x: LIGHT_RADIUS, y: LIGHT_RADIUS, z: LIGHT_RADIUS },
isSpotlight: false,
diffuseColor: birds[whichBird].color,
ambientColor: { red: 0, green: 0, blue: 0 },
specularColor: { red: 255, green: 255, blue: 255 },
constantAttenuation: 0,
linearAttenuation: 4.0,
quadraticAttenuation: 2.0,
lifetime: 10
});
}
playing.push({ audioId: Audio.playSound(birds[whichBird].sound, options), entityId: entityId, lightId: lightId, color: birds[whichBird].color });
}
if (playing.length != numPlaying) {
numPlaying = playing.length;
//print("number playing = " + numPlaying);
}
for (var i = 0; i < playing.length; i++) {
if (!Audio.isInjectorPlaying(playing[i].audioId)) {
Entities.deleteEntity(playing[i].entityId);
if (useLights) {
Entities.deleteEntity(playing[i].lightId);
}
playing.splice(i, 1);
} else {
var loudness = Audio.getLoudness(playing[i].audioId);
var newColor = { red: playing[i].color.red, green: playing[i].color.green, blue: playing[i].color.blue };
if (loudness > 0.05) {
newColor.red *= (1.0 - loudness);
newColor.green *= (1.0 - loudness);
newColor.blue *= (1.0 - loudness);
}
var properties = Entities.getEntityProperties(playing[i].entityId);
var newPosition = Vec3.sum(properties.position, randomVector(BIRD_VELOCITY * deltaTime));
if (properties) {
properties.position = newPosition;
Entities.editEntity(playing[i].entityId, { position: properties.position, color: newColor });
}
if (useLights) {
var lightProperties = Entities.getEntityProperties(playing[i].lightId);
if (lightProperties) {
Entities.editEntity(playing[i].lightId, { position: newPosition, diffuseColor: newColor });
}
}
}
}
}
loadBirds();
// Connect a call back that happens every frame
Script.update.connect(maybePlaySound);
// Delete our little friends if script is stopped
Script.scriptEnding.connect(function() {
for (var i = 0; i < playing.length; i++) {
Entities.deleteEntity(playing[i].entityId);
if (useLights) {
Entities.deleteEntity(playing[i].lightId);
}
}
});
function loadBirds() {
var sound_filenames = ["bushtit_1.raw", "bushtit_2.raw", "bushtit_3.raw", "mexicanWhipoorwill.raw",
"rosyfacedlovebird.raw", "saysphoebe.raw", "westernscreechowl.raw", "bandtailedpigeon.wav", "bridledtitmouse.wav",
"browncrestedflycatcher.wav", "commonnighthawk.wav", "commonpoorwill.wav", "doublecrestedcormorant.wav",
"gambelsquail.wav", "goldcrownedkinglet.wav", "greaterroadrunner.wav","groovebilledani.wav","hairywoodpecker.wav",
"housewren.wav","hummingbird.wav", "mountainchickadee.wav", "nightjar.wav", "piebilledgrieb.wav", "pygmynuthatch.wav",
"whistlingduck.wav", "woodpecker.wav"];
var colors = [
{ red: 242, green: 207, blue: 013 },
{ red: 238, green: 94, blue: 11 },
{ red: 81, green: 30, blue: 7 },
{ red: 195, green: 176, blue: 81 },
{ red: 235, green: 190, blue: 152 },
{ red: 167, green: 99, blue: 52 },
{ red: 199, green: 122, blue: 108 },
{ red: 246, green: 220, blue: 189 },
{ red: 208, green: 145, blue: 65 },
{ red: 173, green: 120 , blue: 71 },
{ red: 132, green: 147, blue: 174 },
{ red: 164, green: 74, blue: 40 },
{ red: 131, green: 127, blue: 134 },
{ red: 209, green: 157, blue: 117 },
{ red: 205, green: 191, blue: 193 },
{ red: 193, green: 154, blue: 118 },
{ red: 205, green: 190, blue: 169 },
{ red: 199, green: 111, blue: 69 },
{ red: 221, green: 223, blue: 228 },
{ red: 115, green: 92, blue: 87 },
{ red: 214, green: 165, blue: 137 },
{ red: 160, green: 124, blue: 33 },
{ red: 117, green: 91, blue: 86 },
{ red: 113, green: 104, blue: 107 },
{ red: 216, green: 153, blue: 99 },
{ red: 242, green: 226, blue: 64 }
];
var SOUND_BASE_URL = "http://public.highfidelity.io/sounds/Animals/";
for (var i = 0; i < sound_filenames.length; i++) {
birds.push({ sound: new Sound(SOUND_BASE_URL + sound_filenames[i]),
color: colors[i]
} );
}
}

View file

@ -12,6 +12,8 @@ var MIN_CHANGE = 2.0;
var LANDING_DISTANCE = 2.0;
var LANDING_RANDOM = 0.2;
var relativePosition;
function update(deltaTime) {
if (Math.random() < deltaTime) {
@ -26,20 +28,15 @@ function update(deltaTime) {
}
if (guide) {
relativePosition = Vec3.subtract(MyAvatar.position, lastGuidePosition);
// Check whether guide has moved, update if so
if (Vec3.length(lastGuidePosition) == 0.0) {
lastGuidePosition = guide.position;
} else {
if (Vec3.length(Vec3.subtract(lastGuidePosition, guide.position)) > MIN_CHANGE) {
var meToGuide = Vec3.multiply(Vec3.normalize(Vec3.subtract(guide.position, MyAvatar.position)), LANDING_DISTANCE);
var newPosition = Vec3.subtract(guide.position, meToGuide);
newPosition = Vec3.sum(newPosition, { x: Math.random() * LANDING_RANDOM - LANDING_RANDOM / 2.0,
y: 0,
z: Math.random() * LANDING_RANDOM - LANDING_RANDOM / 2.0 });
var newPosition = Vec3.sum(guide.position, relativePosition);
MyAvatar.position = newPosition;
lastGuidePosition = guide.position;
MyAvatar.orientation = guide.orientation;
}
}
}

46
examples/lightExample.js Normal file
View file

@ -0,0 +1,46 @@
//
// lightExample.js
// examples
//
// Created by Philip Rosedale on November 5, 2014
// Copyright 2014 High Fidelity, Inc.
//
// Makes a light right in front of your avatar, as well as a sphere at that location.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var position = Vec3.sum(MyAvatar.position, Quat.getFront(Camera.getOrientation()));
var sphereID = Entities.addEntity({
type: "Sphere",
position: position,
dimensions: { x: 0.1, y: 0.1, z: 0.1 },
color: { red: 255, green: 255, blue: 0 }
});
var lightID = Entities.addEntity({
type: "Light",
position: position,
dimensions: { x: 1, y: 1, z: 1 },
angularVelocity: { x: 0, y: 0, z: 0 },
angularDamping: 0,
isSpotlight: false,
diffuseColor: { red: 255, green: 255, blue: 0 },
ambientColor: { red: 0, green: 0, blue: 0 },
specularColor: { red: 255, green: 255, blue: 255 },
constantAttenuation: 0,
linearAttenuation: 1,
quadraticAttenuation: 0,
exponent: 0,
cutoff: 180, // in degrees
});
Script.scriptEnding.connect(function() {
print("Deleted sphere and light");
Entities.deleteEntity(sphereID);
Entities.deleteEntity(lightID);
});

View file

@ -1,167 +0,0 @@
//
// voxelDrumming.js
// examples
//
// Created by Brad Hefta-Gaub on 2/14/14.
// Copyright 2014 High Fidelity, Inc.
//
// This is an example script that demonstrates use of the Overlays, Controller, and Audio classes
//
// It adds Hydra controller "fingertip on voxels" drumming
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Menu.addMenuItem({
menuName: "Developer > Hand Options",
menuItemName: "Voxel Drumming",
isCheckable: true,
isChecked: false
});
var collisionCenter = new Array();
collisionCenter[0] = { x: 0, y: 0, z: 0};
collisionCenter[1] = { x: 0, y: 0, z: 0};
var collisionAge = new Array();
collisionAge[0] = 0;
collisionAge[1] = 0;
var collisionDuration = new Array();
collisionDuration[0] = 0;
collisionDuration[1] = 0;
var isColliding = new Array();
isColliding[0] = false;
isColliding[1] = false;
var highlightVoxel = Overlays.addOverlay("cube",
{
position: { x: 0, y: 0, z: 0},
size: 0,
color: { red: 0, green: 0, blue: 0 },
visible: false,
lineWidth: 3,
solid: false
});
var collisionBubble = new Array();
collisionBubble[0] = Overlays.addOverlay("sphere",
{
position: { x: 0, y: 0, z: 0},
size: 0,
color: { red: 0, green: 0, blue: 0 },
alpha: 0.5,
visible: false
});
collisionBubble[1] = Overlays.addOverlay("sphere",
{
position: { x: 0, y: 0, z: 0},
size: 0,
color: { red: 0, green: 0, blue: 0 },
alpha: 0.5,
visible: false
});
var audioOptions = new AudioInjectionOptions();
audioOptions.position = { x: MyAvatar.position.x, y: MyAvatar.position.y + 1, z: MyAvatar.position.z };
audioOptions.volume = 1;
function clamp(valueToClamp, minValue, maxValue) {
return Math.max(minValue, Math.min(maxValue, valueToClamp));
}
function produceCollisionSound(deltaTime, palm, voxelDetail) {
// Collision between finger and a voxel plays sound
var palmVelocity = Controller.getSpatialControlVelocity(palm * 2);
var speed = Vec3.length(palmVelocity);
var fingerTipPosition = Controller.getSpatialControlPosition(palm * 2 + 1);
var LOWEST_FREQUENCY = 100.0;
var HERTZ_PER_RGB = 3.0;
var DECAY_PER_SAMPLE = 0.0005;
var DURATION_MAX = 2.0;
var MIN_VOLUME = 0.1;
var volume = MIN_VOLUME + clamp(speed, 0.0, (1.0 - MIN_VOLUME));
var duration = volume;
collisionCenter[palm] = fingerTipPosition;
collisionAge[palm] = deltaTime;
collisionDuration[palm] = duration;
var voxelBrightness = voxelDetail.red + voxelDetail.green + voxelDetail.blue;
var frequency = LOWEST_FREQUENCY + (voxelBrightness * HERTZ_PER_RGB);
audioOptions.position = fingerTipPosition;
Audio.startDrumSound(volume, frequency, DURATION_MAX, DECAY_PER_SAMPLE, audioOptions);
}
function update(deltaTime) {
// Voxel Drumming with fingertips if enabled
if (Menu.isOptionChecked("Voxel Drumming")) {
for (var palm = 0; palm < 2; palm++) {
var fingerTipPosition = Controller.getSpatialControlPosition(palm * 2 + 1);
var voxel = Voxels.getVoxelEnclosingPoint(fingerTipPosition);
if (voxel.s > 0) {
if (!isColliding[palm]) {
// Collision has just started
isColliding[palm] = true;
produceCollisionSound(deltaTime, palm, voxel);
// Set highlight voxel
Overlays.editOverlay(highlightVoxel,
{
position: { x: voxel.x, y: voxel.y, z: voxel.z},
size: voxel.s + 0.002,
color: { red: voxel.red + 128, green: voxel.green + 128, blue: voxel.blue + 128 },
visible: true
});
}
} else {
if (isColliding[palm]) {
// Collision has just ended
isColliding[palm] = false;
Overlays.editOverlay(highlightVoxel, { visible: false });
}
}
if (collisionAge[palm] > 0) {
collisionAge[palm] += deltaTime;
}
// If hand/voxel collision has happened, render a little expanding sphere
if (collisionAge[palm] > 0) {
var opacity = clamp(1 - (collisionAge[palm] / collisionDuration[palm]), 0, 1);
var size = collisionAge[palm] * 0.25;
Overlays.editOverlay(collisionBubble[palm],
{
position: { x: collisionCenter[palm].x, y: collisionCenter[palm].y, z: collisionCenter[palm].z},
size: size,
color: { red: 255, green: 0, blue: 0 },
alpha: 0.5 * opacity,
visible: true
});
if (collisionAge[palm] > collisionDuration[palm]) {
collisionAge[palm] = 0;
Overlays.editOverlay(collisionBubble[palm], { visible: false });
}
}
} // palm loop
} // menu item check
}
Script.update.connect(update);
function scriptEnding() {
Overlays.deleteOverlay(highlightVoxel);
Overlays.deleteOverlay(collisionBubble[0]);
Overlays.deleteOverlay(collisionBubble[1]);
Menu.removeMenuItem("Developer > Hand Options","Voxel Drumming");
}
Script.scriptEnding.connect(scriptEnding);

View file

@ -14,5 +14,6 @@
void main(void) {
gl_Position = ftransform();
vec4 projected = gl_Position / gl_Position.w;
gl_TexCoord[0] = vec4(dot(projected, gl_ObjectPlaneS[3]), dot(projected, gl_ObjectPlaneT[3]), 0.0, 1.0);
gl_TexCoord[0] = vec4(dot(projected, gl_ObjectPlaneS[3]) * gl_Position.w,
dot(projected, gl_ObjectPlaneT[3]) * gl_Position.w, 0.0, gl_Position.w);
}

View file

@ -40,16 +40,17 @@ uniform float radius;
void main(void) {
// get the depth and exit early if it doesn't pass the test
float depth = texture2D(depthMap, gl_TexCoord[0].st).r;
vec2 texCoord = gl_TexCoord[0].st / gl_TexCoord[0].q;
float depth = texture2D(depthMap, texCoord).r;
if (depth < gl_FragCoord.z) {
discard;
}
// compute the view space position using the depth
float z = near / (depth * depthScale - 1.0);
vec4 position = vec4((depthTexCoordOffset + gl_TexCoord[0].st * depthTexCoordScale) * z, z, 1.0);
vec4 position = vec4((depthTexCoordOffset + texCoord * depthTexCoordScale) * z, z, 1.0);
// get the normal from the map
vec4 normal = texture2D(normalMap, gl_TexCoord[0].st);
vec4 normal = texture2D(normalMap, texCoord);
vec4 normalizedNormal = normalize(normal * 2.0 - vec4(1.0, 1.0, 1.0, 2.0));
// compute the base color based on OpenGL lighting model
@ -58,7 +59,7 @@ void main(void) {
lightVector = lightVector / lightDistance;
float diffuse = dot(normalizedNormal, lightVector);
float facingLight = step(0.0, diffuse);
vec4 baseColor = texture2D(diffuseMap, gl_TexCoord[0].st) * (gl_FrontLightProduct[1].ambient +
vec4 baseColor = texture2D(diffuseMap, texCoord) * (gl_FrontLightProduct[1].ambient +
gl_FrontLightProduct[1].diffuse * (diffuse * facingLight));
// compute attenuation based on distance, etc.
@ -69,6 +70,6 @@ void main(void) {
// add base to specular, modulate by attenuation
float specular = facingLight * max(0.0, dot(normalize(lightVector - normalize(vec4(position.xyz, 0.0))),
normalizedNormal));
vec4 specularColor = texture2D(specularMap, gl_TexCoord[0].st);
vec4 specularColor = texture2D(specularMap, texCoord);
gl_FragColor = vec4((baseColor.rgb + pow(specular, specularColor.a * 128.0) * specularColor.rgb) * attenuation, 0.0);
}

View file

@ -40,16 +40,17 @@ uniform float radius;
void main(void) {
// get the depth and exit early if it doesn't pass the test
float depth = texture2D(depthMap, gl_TexCoord[0].st).r;
vec2 texCoord = gl_TexCoord[0].st / gl_TexCoord[0].q;
float depth = texture2D(depthMap, texCoord).r;
if (depth < gl_FragCoord.z) {
discard;
}
// compute the view space position using the depth
float z = near / (depth * depthScale - 1.0);
vec4 position = vec4((depthTexCoordOffset + gl_TexCoord[0].st * depthTexCoordScale) * z, z, 1.0);
vec4 position = vec4((depthTexCoordOffset + texCoord * depthTexCoordScale) * z, z, 1.0);
// get the normal from the map
vec4 normal = texture2D(normalMap, gl_TexCoord[0].st);
vec4 normal = texture2D(normalMap, texCoord);
vec4 normalizedNormal = normalize(normal * 2.0 - vec4(1.0, 1.0, 1.0, 2.0));
// compute the base color based on OpenGL lighting model
@ -58,7 +59,7 @@ void main(void) {
lightVector = lightVector / lightDistance;
float diffuse = dot(normalizedNormal, lightVector);
float facingLight = step(0.0, diffuse);
vec4 baseColor = texture2D(diffuseMap, gl_TexCoord[0].st) * (gl_FrontLightProduct[1].ambient +
vec4 baseColor = texture2D(diffuseMap, texCoord) * (gl_FrontLightProduct[1].ambient +
gl_FrontLightProduct[1].diffuse * (diffuse * facingLight));
// compute attenuation based on spot angle, distance, etc.
@ -71,6 +72,6 @@ void main(void) {
// add base to specular, modulate by attenuation
float specular = facingLight * max(0.0, dot(normalize(lightVector - normalize(vec4(position.xyz, 0.0))),
normalizedNormal));
vec4 specularColor = texture2D(specularMap, gl_TexCoord[0].st);
vec4 specularColor = texture2D(specularMap, texCoord);
gl_FragColor = vec4((baseColor.rgb + pow(specular, specularColor.a * 128.0) * specularColor.rgb) * attenuation, 0.0);
}

View file

@ -451,6 +451,9 @@ Application::~Application() {
_audio.thread()->quit();
_audio.thread()->wait();
// kill any audio injectors that are still around
AudioScriptingInterface::getInstance().stopAllInjectors();
_octreeProcessor.terminate();
_voxelHideShowThread.terminate();
_voxelEditSender.terminate();

View file

@ -77,6 +77,9 @@ Audio::Audio(QObject* parent) :
_isStereoInput(false),
_averagedLatency(0.0),
_lastInputLoudness(0),
_inputFrameCounter(0),
_quietestFrame(std::numeric_limits<float>::max()),
_loudestFrame(0.0f),
_timeSinceLastClip(-1.0),
_dcOffset(0),
_noiseGateMeasuredFloor(0),
@ -717,6 +720,20 @@ void Audio::handleAudioInput() {
}
_lastInputLoudness = fabs(loudness / NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
if (_quietestFrame > _lastInputLoudness) {
_quietestFrame = _lastInputLoudness;
}
if (_loudestFrame < _lastInputLoudness) {
_loudestFrame = _lastInputLoudness;
}
const int FRAMES_FOR_NOISE_DETECTION = 400;
if (_inputFrameCounter++ > FRAMES_FOR_NOISE_DETECTION) {
_quietestFrame = std::numeric_limits<float>::max();
_loudestFrame = 0.0f;
_inputFrameCounter = 0;
}
// If Noise Gate is enabled, check and turn the gate on and off
if (!_audioSourceInjectEnabled && _noiseGateEnabled) {

View file

@ -213,6 +213,9 @@ private:
QElapsedTimer _timeSinceLastReceived;
float _averagedLatency;
float _lastInputLoudness;
int _inputFrameCounter;
float _quietestFrame;
float _loudestFrame;
float _timeSinceLastClip;
float _dcOffset;
float _noiseGateMeasuredFloor;

View file

@ -52,12 +52,10 @@ Hair::Hair(int strands,
glm::vec3 thisVertex;
for (int strand = 0; strand < _strands; strand++) {
float strandAngle = randFloat() * PI;
float azimuth;
float elevation = - (randFloat() * PI);
azimuth = PI_OVER_TWO;
if (randFloat() < 0.5f) {
azimuth *= -1.0f;
}
float azimuth = (float)strand / (float)_strands * PI * 2.0f;
float elevation = 0.0f;
glm::vec3 thisStrand(sinf(azimuth) * cosf(elevation), sinf(elevation), -cosf(azimuth) * cosf(elevation));
thisStrand *= _radius;
@ -115,11 +113,22 @@ void Hair::simulate(float deltaTime) {
glm::vec3 diff = thisPosition - _hairLastPosition[vertexIndex];
_hairPosition[vertexIndex] += diff * HAIR_DAMPING;
/*
// Resolve collisions with sphere
if (glm::length(_hairPosition[vertexIndex]) < _radius) {
_hairPosition[vertexIndex] += glm::normalize(_hairPosition[vertexIndex]) *
(_radius - glm::length(_hairPosition[vertexIndex]));
} */
// Collide with a conical body descending from the root of the hair
glm::vec3 thisVertex = _hairPosition[vertexIndex];
float depth = -thisVertex.y;
thisVertex.y = 0.0f;
const float BODY_CONE_ANGLE = 0.30;
if (glm::length(thisVertex) < depth * BODY_CONE_ANGLE) {
_hairPosition[vertexIndex] += glm::normalize(thisVertex) * (depth * BODY_CONE_ANGLE - glm::length(thisVertex));
}
// Add random thing driven by loudness
float loudnessFactor = (_loudness > SOUND_THRESHOLD) ? logf(_loudness - SOUND_THRESHOLD) / 2000.0f : 0.0f;

View file

@ -269,7 +269,7 @@ void DeferredLightingEffect::render() {
} else {
glTranslatef(light.position.x, light.position.y, light.position.z);
Application::getInstance()->getGeometryCache()->renderSphere(expandedRadius, 64, 64);
Application::getInstance()->getGeometryCache()->renderSphere(expandedRadius, 32, 32);
}
glPopMatrix();
@ -323,7 +323,7 @@ void DeferredLightingEffect::render() {
glRotatef(glm::degrees(glm::angle(spotRotation)), axis.x, axis.y, axis.z);
glTranslatef(0.0f, 0.0f, -light.radius * (1.0f + SCALE_EXPANSION * 0.5f));
Application::getInstance()->getGeometryCache()->renderCone(expandedRadius * glm::tan(light.cutoff),
expandedRadius, 64, 32);
expandedRadius, 32, 1);
}
glPopMatrix();

View file

@ -26,6 +26,8 @@ AudioInjector::AudioInjector(QObject* parent) :
_sound(NULL),
_options(),
_shouldStop(false),
_loudness(0.0f),
_isFinished(false),
_currentSendPosition(0)
{
}
@ -34,6 +36,8 @@ AudioInjector::AudioInjector(Sound* sound, const AudioInjectorOptions& injectorO
_sound(sound),
_options(injectorOptions),
_shouldStop(false),
_loudness(0.0f),
_isFinished(false),
_currentSendPosition(0)
{
}
@ -42,6 +46,10 @@ void AudioInjector::setOptions(AudioInjectorOptions& options) {
_options = options;
}
float AudioInjector::getLoudness() {
return _loudness;
}
const uchar MAX_INJECTOR_VOLUME = 0xFF;
void AudioInjector::injectAudio() {
@ -100,6 +108,8 @@ void AudioInjector::injectAudio() {
quint8 volume = MAX_INJECTOR_VOLUME * _options.getVolume();
packetStream << volume;
packetStream << _options.ignorePenumbra();
QElapsedTimer timer;
timer.start();
int nextFrame = 0;
@ -113,6 +123,15 @@ void AudioInjector::injectAudio() {
int bytesToCopy = std::min(((_options.isStereo()) ? 2 : 1) * NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL,
soundByteArray.size() - _currentSendPosition);
// Measure the loudness of this frame
_loudness = 0.0f;
for (int i = 0; i < bytesToCopy; i += sizeof(int16_t)) {
_loudness += abs(*reinterpret_cast<int16_t*>(soundByteArray.data() + _currentSendPosition + i)) /
(MAX_SAMPLE_VALUE / 2.0f);
}
_loudness /= (float)(bytesToCopy / sizeof(int16_t));
memcpy(injectAudioPacket.data() + positionOptionOffset,
&_options.getPosition(),
sizeof(_options.getPosition()));
@ -161,5 +180,6 @@ void AudioInjector::injectAudio() {
}
}
_isFinished = true;
emit finished();
}

View file

@ -27,19 +27,25 @@ public:
AudioInjector(QObject* parent);
AudioInjector(Sound* sound, const AudioInjectorOptions& injectorOptions);
bool isFinished() const { return _isFinished; }
int getCurrentSendPosition() const { return _currentSendPosition; }
public slots:
void injectAudio();
void stop() { _shouldStop = true; }
void setOptions(AudioInjectorOptions& options);
void setCurrentSendPosition(int currentSendPosition) { _currentSendPosition = currentSendPosition; }
float getLoudness();
signals:
void finished();
private:
Sound* _sound;
AudioInjectorOptions _options;
bool _shouldStop;
float _loudness;
bool _isFinished;
int _currentSendPosition;
};
Q_DECLARE_METATYPE(AudioInjector*)

View file

@ -18,6 +18,7 @@ AudioInjectorOptions::AudioInjectorOptions(QObject* parent) :
_loop(false),
_orientation(glm::vec3(0.0f, 0.0f, 0.0f)),
_isStereo(false),
_ignorePenumbra(false),
_loopbackAudioInterface(NULL)
{
}
@ -28,6 +29,7 @@ AudioInjectorOptions::AudioInjectorOptions(const AudioInjectorOptions& other) {
_loop = other._loop;
_orientation = other._orientation;
_isStereo = other._isStereo;
_ignorePenumbra = other._ignorePenumbra;
_loopbackAudioInterface = other._loopbackAudioInterface;
}
@ -37,5 +39,6 @@ void AudioInjectorOptions::operator=(const AudioInjectorOptions& other) {
_loop = other._loop;
_orientation = other._orientation;
_isStereo = other._isStereo;
_ignorePenumbra = other._ignorePenumbra;
_loopbackAudioInterface = other._loopbackAudioInterface;
}

View file

@ -29,6 +29,7 @@ class AudioInjectorOptions : public QObject {
Q_PROPERTY(float volume READ getVolume WRITE setVolume)
Q_PROPERTY(bool loop READ getLoop WRITE setLoop)
Q_PROPERTY(bool isStereo READ isStereo WRITE setIsStereo)
Q_PROPERTY(bool ignorePenumbra READ ignorePenumbra WRITE setIgnorePenumbra)
public:
AudioInjectorOptions(QObject* parent = 0);
AudioInjectorOptions(const AudioInjectorOptions& other);
@ -49,6 +50,9 @@ public:
const bool isStereo() const { return _isStereo; }
void setIsStereo(const bool isStereo) { _isStereo = isStereo; }
const bool ignorePenumbra() const {return _ignorePenumbra; }
void setIgnorePenumbra(bool ignorePenumbra) { _ignorePenumbra = ignorePenumbra; }
AbstractAudioInterface* getLoopbackAudioInterface() const { return _loopbackAudioInterface; }
void setLoopbackAudioInterface(AbstractAudioInterface* loopbackAudioInterface)
{ _loopbackAudioInterface = loopbackAudioInterface; }
@ -58,6 +62,7 @@ private:
bool _loop;
glm::quat _orientation;
bool _isStereo;
bool _ignorePenumbra;
AbstractAudioInterface* _loopbackAudioInterface;
};

View file

@ -11,6 +11,26 @@
#include "AudioScriptingInterface.h"
AudioScriptingInterface& AudioScriptingInterface::getInstance() {
static AudioScriptingInterface staticInstance;
return staticInstance;
}
void AudioScriptingInterface::stopAllInjectors() {
QList<QPointer<AudioInjector> >::iterator injector = _activeInjectors.begin();
while (injector != _activeInjectors.end()) {
if (!injector->isNull()) {
injector->data()->stop();
while (injector->data() && !injector->data()->isFinished()) {
// wait for this injector to go down
}
}
injector = _activeInjectors.erase(injector);
}
}
AudioInjector* AudioScriptingInterface::playSound(Sound* sound, const AudioInjectorOptions* injectorOptions) {
if (sound->isStereo()) {
@ -23,15 +43,18 @@ AudioInjector* AudioScriptingInterface::playSound(Sound* sound, const AudioInjec
injector->moveToThread(injectorThread);
// start injecting when the injector thread starts
connect(injectorThread, SIGNAL(started()), injector, SLOT(injectAudio()));
connect(injectorThread, &QThread::started, injector, &AudioInjector::injectAudio);
// connect the right slots and signals so that the AudioInjector is killed once the injection is complete
connect(injector, SIGNAL(finished()), injector, SLOT(deleteLater()));
connect(injector, SIGNAL(finished()), injectorThread, SLOT(quit()));
connect(injectorThread, SIGNAL(finished()), injectorThread, SLOT(deleteLater()));
connect(injector, &AudioInjector::finished, injector, &AudioInjector::deleteLater);
connect(injector, &AudioInjector::finished, injectorThread, &QThread::quit);
connect(injector, &AudioInjector::finished, this, &AudioScriptingInterface::injectorStopped);
connect(injectorThread, &QThread::finished, injectorThread, &QThread::deleteLater);
injectorThread->start();
_activeInjectors.append(QPointer<AudioInjector>(injector));
return injector;
}
@ -45,24 +68,14 @@ bool AudioScriptingInterface::isInjectorPlaying(AudioInjector* injector) {
return (injector != NULL);
}
void AudioScriptingInterface::startDrumSound(float volume, float frequency, float duration, float decay,
const AudioInjectorOptions* injectorOptions) {
Sound* sound = new Sound(volume, frequency, duration, decay);
AudioInjector* injector = new AudioInjector(sound, *injectorOptions);
sound->setParent(injector);
QThread* injectorThread = new QThread();
injector->moveToThread(injectorThread);
// start injecting when the injector thread starts
connect(injectorThread, SIGNAL(started()), injector, SLOT(injectAudio()));
// connect the right slots and signals so that the AudioInjector is killed once the injection is complete
connect(injector, SIGNAL(finished()), injector, SLOT(deleteLater()));
connect(injector, SIGNAL(finished()), injectorThread, SLOT(quit()));
connect(injectorThread, SIGNAL(finished()), injectorThread, SLOT(deleteLater()));
injectorThread->start();
float AudioScriptingInterface::getLoudness(AudioInjector* injector) {
if (injector) {
return injector->getLoudness();
} else {
return 0.0f;
}
}
void AudioScriptingInterface::injectorStopped() {
_activeInjectors.removeAll(QPointer<AudioInjector>(reinterpret_cast<AudioInjector*>(sender())));
}

View file

@ -12,6 +12,8 @@
#ifndef hifi_AudioScriptingInterface_h
#define hifi_AudioScriptingInterface_h
#include <qpointer.h>
#include "AudioInjector.h"
#include "Sound.h"
@ -19,12 +21,24 @@ const AudioInjectorOptions DEFAULT_INJECTOR_OPTIONS;
class AudioScriptingInterface : public QObject {
Q_OBJECT
public:
static AudioScriptingInterface& getInstance();
void stopAllInjectors();
public slots:
static AudioInjector* playSound(Sound* sound, const AudioInjectorOptions* injectorOptions = NULL);
static void stopInjector(AudioInjector* injector);
static bool isInjectorPlaying(AudioInjector* injector);
static void startDrumSound(float volume, float frequency, float duration, float decay,
const AudioInjectorOptions* injectorOptions = NULL);
static float getLoudness(AudioInjector* injector);
AudioInjector* playSound(Sound* sound, const AudioInjectorOptions* injectorOptions = NULL);
void stopInjector(AudioInjector* injector);
bool isInjectorPlaying(AudioInjector* injector);
void injectorStopped();
private:
AudioScriptingInterface() {};
QList< QPointer<AudioInjector> > _activeInjectors;
};
#endif // hifi_AudioScriptingInterface_h

View file

@ -113,10 +113,8 @@ public:
bool lastPopSucceeded() const { return _lastPopSucceeded; };
const AudioRingBuffer::ConstIterator& getLastPopOutput() const { return _lastPopOutput; }
void setToStarved();
void setSettings(const Settings& settings);
void setMaxFramesOverDesired(int maxFramesOverDesired) { _maxFramesOverDesired = maxFramesOverDesired; }
@ -163,7 +161,7 @@ public:
float getWetLevel() const { return _wetLevel; }
void setReverb(float reverbTime, float wetLevel);
void clearReverb() { _hasReverb = false; }
public slots:
/// This function should be called every second for all the stats to function properly. If dynamic jitter buffers
/// is enabled, those stats are used to calculate _desiredJitterBufferFrames.

View file

@ -63,7 +63,9 @@ int InjectedAudioStream::parseStreamProperties(PacketType type,
quint8 attenuationByte = 0;
packetStream >> attenuationByte;
_attenuationRatio = attenuationByte / (float)MAX_INJECTOR_VOLUME;
packetStream >> _ignorePenumbra;
int numAudioBytes = packetAfterSeqNum.size() - packetStream.device()->pos();
numAudioSamples = numAudioBytes / sizeof(int16_t);

View file

@ -29,8 +29,12 @@ PositionalAudioStream::PositionalAudioStream(PositionalAudioStream::Type type, b
_orientation(0.0f, 0.0f, 0.0f, 0.0f),
_shouldLoopbackForNode(false),
_isStereo(isStereo),
_ignorePenumbra(false),
_lastPopOutputTrailingLoudness(0.0f),
_lastPopOutputLoudness(0.0f)
_lastPopOutputLoudness(0.0f),
_quietestTrailingFrameLoudness(std::numeric_limits<float>::max()),
_loudestTrailingFrameLoudness(0.0f),
_frameCounter(0)
{
}
@ -42,8 +46,9 @@ void PositionalAudioStream::resetStats() {
void PositionalAudioStream::updateLastPopOutputLoudnessAndTrailingLoudness() {
_lastPopOutputLoudness = _ringBuffer.getFrameLoudness(_lastPopOutput);
const int TRAILING_AVERAGE_FRAMES = 100;
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
const int TRAILING_MUTE_THRESHOLD_FRAMES = 400;
const int TRAILING_LOUDNESS_FRAMES = 200;
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_LOUDNESS_FRAMES;
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
const float LOUDNESS_EPSILON = 0.000001f;
@ -56,6 +61,17 @@ void PositionalAudioStream::updateLastPopOutputLoudnessAndTrailingLoudness() {
_lastPopOutputTrailingLoudness = 0;
}
}
if (_frameCounter++ == TRAILING_MUTE_THRESHOLD_FRAMES) {
_frameCounter = 0;
_quietestTrailingFrameLoudness = std::numeric_limits<float>::max();
_loudestTrailingFrameLoudness = 0.0f;
}
if (_lastPopOutputLoudness < _quietestTrailingFrameLoudness) {
_quietestTrailingFrameLoudness = _lastPopOutputLoudness;
}
if (_lastPopOutputLoudness > _loudestTrailingFrameLoudness) {
_loudestTrailingFrameLoudness = _lastPopOutputLoudness;
}
}
int PositionalAudioStream::parsePositionalData(const QByteArray& positionalByteArray) {

View file

@ -36,12 +36,16 @@ public:
void updateLastPopOutputLoudnessAndTrailingLoudness();
float getLastPopOutputTrailingLoudness() const { return _lastPopOutputTrailingLoudness; }
float getLastPopOutputLoudness() const { return _lastPopOutputLoudness; }
float getQuietestTrailingFrameLoudness() const { return _quietestTrailingFrameLoudness; }
float getLoudestTrailingFrameLoudness() const { return _loudestTrailingFrameLoudness; }
bool shouldLoopbackForNode() const { return _shouldLoopbackForNode; }
bool isStereo() const { return _isStereo; }
bool ignorePenumbraFilter() { return _ignorePenumbra; }
PositionalAudioStream::Type getType() const { return _type; }
const glm::vec3& getPosition() const { return _position; }
const glm::quat& getOrientation() const { return _orientation; }
protected:
// disallow copying of PositionalAudioStream objects
@ -57,9 +61,14 @@ protected:
bool _shouldLoopbackForNode;
bool _isStereo;
// Ignore penumbra filter
bool _ignorePenumbra;
float _lastPopOutputTrailingLoudness;
float _lastPopOutputLoudness;
float _quietestTrailingFrameLoudness;
float _loudestTrailingFrameLoudness;
int _frameCounter;
};
#endif // hifi_PositionalAudioStream_h

View file

@ -1069,6 +1069,24 @@ void EntityTree::dumpTree() {
recurseTreeWithOperator(&theOperator);
}
class PruneOperator : public RecurseOctreeOperator {
public:
virtual bool preRecursion(OctreeElement* element) { return true; }
virtual bool postRecursion(OctreeElement* element);
};
bool PruneOperator::postRecursion(OctreeElement* element) {
EntityTreeElement* entityTreeElement = static_cast<EntityTreeElement*>(element);
entityTreeElement->pruneChildren();
return true;
}
void EntityTree::pruneTree() {
// First, look for the existing entity in the tree..
PruneOperator theOperator;
recurseTreeWithOperator(&theOperator);
}
void EntityTree::sendEntities(EntityEditPacketSender* packetSender, EntityTree* localTree, float x, float y, float z) {
SendEntitiesOperationArgs args;
args.packetSender = packetSender;

View file

@ -131,6 +131,7 @@ public:
void resetContainingElement(const EntityItemID& entityItemID, EntityTreeElement* element);
void debugDumpMap();
virtual void dumpTree();
virtual void pruneTree();
void sendEntities(EntityEditPacketSender* packetSender, EntityTree* localTree, float x, float y, float z);

View file

@ -812,11 +812,20 @@ bool EntityTreeElement::pruneChildren() {
void EntityTreeElement::debugDump() {
qDebug() << "EntityTreeElement...";
qDebug() << "entity count:" << _entityItems->size();
qDebug() << "cube:" << getAACube();
for (uint16_t i = 0; i < _entityItems->size(); i++) {
EntityItem* entity = (*_entityItems)[i];
entity->debugDump();
AACube temp = getAACube();
temp.scale((float)TREE_SCALE);
qDebug() << " cube:" << temp;
qDebug() << " has child elements:" << getChildCount();
if (_entityItems->size()) {
qDebug() << " has entities:" << _entityItems->size();
qDebug() << "--------------------------------------------------";
for (uint16_t i = 0; i < _entityItems->size(); i++) {
EntityItem* entity = (*_entityItems)[i];
entity->debugDump();
}
qDebug() << "--------------------------------------------------";
} else {
qDebug() << " NO entities!";
}
}

View file

@ -54,6 +54,8 @@ PacketVersion versionForPacketType(PacketType type) {
return 4;
case PacketTypeMixedAudio:
return 1;
case PacketTypeInjectAudio:
return 1;
case PacketTypeAvatarData:
return 3;
case PacketTypeAvatarIdentity:

View file

@ -352,6 +352,7 @@ public:
void setIsClient(bool isClient) { _isServer = !isClient; }
virtual void dumpTree() { };
virtual void pruneTree() { };
signals:
void importSize(float x, float y, float z);

View file

@ -246,9 +246,7 @@ void OctreeEditPacketSender::queueOctreeEditMessage(PacketType type, unsigned ch
// But we can't really do that with a packed message, since each edit message could be destined
// for a different server... So we need to actually manage multiple queued packets... one
// for each server
_packetsQueueLock.lock();
foreach (const SharedNodePointer& node, NodeList::getInstance()->getNodeHash()) {
// only send to the NodeTypes that are getMyNodeType()
if (node->getActiveSocket() && node->getType() == getMyNodeType()) {
@ -277,12 +275,12 @@ void OctreeEditPacketSender::queueOctreeEditMessage(PacketType type, unsigned ch
if ((type != packetBuffer._currentType && packetBuffer._currentSize > 0) ||
(packetBuffer._currentSize + length >= (size_t)_maxPacketSize)) {
releaseQueuedPacket(packetBuffer);
initializePacket(packetBuffer, type);
initializePacket(packetBuffer, type, node->getClockSkewUsec());
}
// If the buffer is empty and not correctly initialized for our type...
if (type != packetBuffer._currentType && packetBuffer._currentSize == 0) {
initializePacket(packetBuffer, type);
initializePacket(packetBuffer, type, node->getClockSkewUsec());
}
// This is really the first time we know which server/node this particular edit message
@ -330,14 +328,14 @@ void OctreeEditPacketSender::releaseQueuedPacket(EditPacketBuffer& packetBuffer)
_releaseQueuedPacketMutex.unlock();
}
void OctreeEditPacketSender::initializePacket(EditPacketBuffer& packetBuffer, PacketType type) {
void OctreeEditPacketSender::initializePacket(EditPacketBuffer& packetBuffer, PacketType type, int nodeClockSkew) {
packetBuffer._currentSize = populatePacketHeader(reinterpret_cast<char*>(&packetBuffer._currentBuffer[0]), type);
// skip over sequence number for now; will be packed when packet is ready to be sent out
packetBuffer._currentSize += sizeof(quint16);
// pack in timestamp
quint64 now = usecTimestampNow();
quint64 now = usecTimestampNow() + nodeClockSkew;
quint64* timeAt = (quint64*)&packetBuffer._currentBuffer[packetBuffer._currentSize];
*timeAt = now;
packetBuffer._currentSize += sizeof(quint64); // nudge past timestamp

View file

@ -101,7 +101,7 @@ protected:
void queuePacketToNode(const QUuid& nodeID, unsigned char* buffer, size_t length, qint64 satoshiCost = 0);
void queuePendingPacketToNodes(PacketType type, unsigned char* buffer, size_t length, qint64 satoshiCost = 0);
void queuePacketToNodes(unsigned char* buffer, size_t length, qint64 satoshiCost = 0);
void initializePacket(EditPacketBuffer& packetBuffer, PacketType type);
void initializePacket(EditPacketBuffer& packetBuffer, PacketType type, int nodeClockSkew);
void releaseQueuedPacket(EditPacketBuffer& packetBuffer); // releases specific queued packet
void processPreServerExistsPackets();

View file

@ -36,6 +36,7 @@ bool OctreePersistThread::process() {
{
PerformanceWarning warn(true, "Loading Octree File", true);
persistantFileRead = _tree->readFromSVOFile(_filename.toLocal8Bit().constData());
_tree->pruneTree();
}
_tree->unlock();
@ -80,10 +81,14 @@ bool OctreePersistThread::process() {
// check the dirty bit and persist here...
_lastCheck = usecTimestampNow();
if (_tree->isDirty()) {
qDebug() << "saving Octrees to file " << _filename << "...";
qDebug() << "pruning Octree before saving...";
_tree->pruneTree();
qDebug() << "DONE pruning Octree before saving...";
qDebug() << "saving Octree to file " << _filename << "...";
_tree->writeToSVOFile(_filename.toLocal8Bit().constData());
_tree->clearDirtyBit(); // tree is clean after saving
qDebug("DONE saving Octrees to file...");
qDebug("DONE saving Octree to file...");
}
}
}

View file

@ -290,7 +290,7 @@ void ScriptEngine::init() {
qScriptRegisterMetaType(this, animationDetailsToScriptValue, animationDetailsFromScriptValue);
registerGlobalObject("Script", this);
registerGlobalObject("Audio", &_audioScriptingInterface);
registerGlobalObject("Audio", &AudioScriptingInterface::getInstance());
registerGlobalObject("Controller", _controllerScriptingInterface);
registerGlobalObject("Entities", &_entityScriptingInterface);
registerGlobalObject("Quat", &_quatLibrary);

View file

@ -142,7 +142,6 @@ private:
static EntityScriptingInterface _entityScriptingInterface;
AbstractControllerScriptingInterface* _controllerScriptingInterface;
AudioScriptingInterface _audioScriptingInterface;
AvatarData* _avatarData;
QString _scriptName;
QString _fileNameString;