Merge branch 'master' of https://github.com/highfidelity/hifi into metavoxels

This commit is contained in:
Andrzej Kapolka 2014-03-26 12:43:33 -07:00
commit 97b895be16
18 changed files with 237 additions and 73 deletions

View file

@ -26,7 +26,8 @@
Agent::Agent(const QByteArray& packet) :
ThreadedAssignment(packet),
_voxelEditSender(),
_particleEditSender()
_particleEditSender(),
_receivedAudioBuffer(NETWORK_BUFFER_LENGTH_SAMPLES_STEREO)
{
// be the parent of the script engine so it gets moved when we do
_scriptEngine.setParent(this);
@ -113,6 +114,16 @@ void Agent::readPendingDatagrams() {
_voxelViewer.processDatagram(mutablePacket, sourceNode);
}
} else if (datagramPacketType == PacketTypeMixedAudio) {
// parse the data and grab the average loudness
_receivedAudioBuffer.parseData(receivedPacket);
// pretend like we have read the samples from this buffer so it does not fill
static int16_t garbageAudioBuffer[NETWORK_BUFFER_LENGTH_SAMPLES_STEREO];
_receivedAudioBuffer.readSamples(garbageAudioBuffer, NETWORK_BUFFER_LENGTH_SAMPLES_STEREO);
// let this continue through to the NodeList so it updates last heard timestamp
// for the sending audio mixer
} else {
NodeList::getInstance()->processNodeData(senderSockAddr, receivedPacket);
}

View file

@ -15,6 +15,7 @@
#include <QtCore/QObject>
#include <QtCore/QUrl>
#include <MixedAudioRingBuffer.h>
#include <ParticleEditPacketSender.h>
#include <ParticleTree.h>
#include <ParticleTreeHeadlessViewer.h>
@ -30,6 +31,7 @@ class Agent : public ThreadedAssignment {
Q_PROPERTY(bool isAvatar READ isAvatar WRITE setIsAvatar)
Q_PROPERTY(bool isPlayingAvatarSound READ isPlayingAvatarSound)
Q_PROPERTY(bool isListeningToAudioStream READ isListeningToAudioStream WRITE setIsListeningToAudioStream)
Q_PROPERTY(float lastReceivedAudioLoudness READ getLastReceivedAudioLoudness)
public:
Agent(const QByteArray& packet);
@ -41,6 +43,8 @@ public:
bool isListeningToAudioStream() const { return _scriptEngine.isListeningToAudioStream(); }
void setIsListeningToAudioStream(bool isListeningToAudioStream)
{ _scriptEngine.setIsListeningToAudioStream(isListeningToAudioStream); }
float getLastReceivedAudioLoudness() const { return _receivedAudioBuffer.getLastReadFrameAverageLoudness(); }
virtual void aboutToFinish();
@ -56,6 +60,8 @@ private:
ParticleTreeHeadlessViewer _particleViewer;
VoxelTreeHeadlessViewer _voxelViewer;
MixedAudioRingBuffer _receivedAudioBuffer;
};
#endif /* defined(__hifi__Agent__) */

View file

@ -93,7 +93,7 @@ void AudioMixer::addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuf
distanceBetween = EPSILON;
}
if (bufferToAdd->getAverageLoudness() / distanceBetween <= _minAudibilityThreshold) {
if (bufferToAdd->getNextOutputTrailingLoudness() / distanceBetween <= _minAudibilityThreshold) {
// according to mixer performance we have decided this does not get to be mixed in
// bail out
return;
@ -324,7 +324,7 @@ void AudioMixer::prepareMixForListeningNode(Node* node) {
if ((*otherNode != *node
|| otherNodeBuffer->shouldLoopbackForNode())
&& otherNodeBuffer->willBeAddedToMix()
&& otherNodeBuffer->getAverageLoudness() > 0) {
&& otherNodeBuffer->getNextOutputTrailingLoudness() > 0) {
addBufferToMixForListeningNodeWithBuffer(otherNodeBuffer, nodeRingBuffer);
}
}

View file

@ -92,7 +92,7 @@ void AudioMixerClientData::checkBuffersBeforeFrameSend(int jitterBufferLengthSam
// calculate the average loudness for the next NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL
// that would be mixed in
_ringBuffers[i]->updateAverageLoudnessForBoundarySamples(NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
_ringBuffers[i]->updateNextOutputTrailingLoudness();
}
}
}

View file

@ -32,6 +32,7 @@ const unsigned int AVATAR_DATA_SEND_INTERVAL_MSECS = (1.0f / 60.0f) * 1000;
AvatarMixer::AvatarMixer(const QByteArray& packet) :
ThreadedAssignment(packet),
_broadcastThread(),
_lastFrameTimestamp(QDateTime::currentMSecsSinceEpoch()),
_trailingSleepRatio(1.0f),
_performanceThrottlingRatio(0.0f),
@ -44,6 +45,11 @@ AvatarMixer::AvatarMixer(const QByteArray& packet) :
connect(NodeList::getInstance(), &NodeList::nodeKilled, this, &AvatarMixer::nodeKilled);
}
AvatarMixer::~AvatarMixer() {
_broadcastThread.quit();
_broadcastThread.wait();
}
void attachAvatarDataToNode(Node* newNode) {
if (!newNode->getLinkedData()) {
newNode->setLinkedData(new AvatarMixerClientData());
@ -309,18 +315,15 @@ void AvatarMixer::run() {
nodeList->linkedDataCreateCallback = attachAvatarDataToNode;
// create a thead for broadcast of avatar data
QThread* broadcastThread = new QThread(this);
// setup the timer that will be fired on the broadcast thread
QTimer* broadcastTimer = new QTimer();
broadcastTimer->setInterval(AVATAR_DATA_SEND_INTERVAL_MSECS);
broadcastTimer->moveToThread(broadcastThread);
broadcastTimer->moveToThread(&_broadcastThread);
// connect appropriate signals and slots
connect(broadcastTimer, &QTimer::timeout, this, &AvatarMixer::broadcastAvatarData, Qt::DirectConnection);
connect(broadcastThread, SIGNAL(started()), broadcastTimer, SLOT(start()));
connect(&_broadcastThread, SIGNAL(started()), broadcastTimer, SLOT(start()));
// start the broadcastThread
broadcastThread->start();
_broadcastThread.start();
}

View file

@ -15,7 +15,7 @@
class AvatarMixer : public ThreadedAssignment {
public:
AvatarMixer(const QByteArray& packet);
~AvatarMixer();
public slots:
/// runs the avatar mixer
void run();
@ -30,6 +30,8 @@ public slots:
private:
void broadcastAvatarData();
QThread _broadcastThread;
quint64 _lastFrameTimestamp;
float _trailingSleepRatio;

View file

@ -1,5 +1,9 @@
<!--#include file="header.html"-->
<div id="nodes-lead" class="table-lead"><h3>Nodes</h3><div class="lead-line"></div></div>
<div style="clear:both;"></div>
<button type="button" class="btn btn-danger" id="kill-all-btn">
<span class="glyphicon glyphicon-fire"></span> Kill all Nodes
</button>
<table id="nodes-table" class="table table-striped">
<thead>
<tr>

View file

@ -49,4 +49,18 @@ $(document).ready(function(){
}
});
});
$(document.body).on('click', '#kill-all-btn', function() {
var confirmed_kill = confirm("Are you sure?");
if (confirmed_kill == true) {
$.ajax({
url: "/nodes/",
type: 'DELETE',
success: function(result) {
console.log("Successful request to delete all nodes.");
}
});
}
});
});

View file

@ -657,6 +657,8 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
const QString URI_ASSIGNMENT = "/assignment";
const QString URI_NODES = "/nodes";
const QString UUID_REGEX_STRING = "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}";
if (connection->requestOperation() == QNetworkAccessManager::GetOperation) {
if (url.path() == "/assignments.json") {
// user is asking for json list of assignments
@ -726,9 +728,8 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
return true;
} else {
const QString NODE_REGEX_STRING =
QString("\\%1\\/([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}).json\\/?$").arg(URI_NODES);
QRegExp nodeShowRegex(NODE_REGEX_STRING);
const QString NODE_JSON_REGEX_STRING = QString("\\%1\\/(%2).json\\/?$").arg(URI_NODES).arg(UUID_REGEX_STRING);
QRegExp nodeShowRegex(NODE_JSON_REGEX_STRING);
if (nodeShowRegex.indexIn(url.path()) != -1) {
QUuid matchingUUID = QUuid(nodeShowRegex.cap(1));
@ -801,29 +802,35 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
return true;
}
} else if (connection->requestOperation() == QNetworkAccessManager::DeleteOperation) {
if (url.path().startsWith(URI_NODES)) {
// this is a request to DELETE a node by UUID
const QString ALL_NODE_DELETE_REGEX_STRING = QString("\\%1\\/?$").arg(URI_NODES);
const QString NODE_DELETE_REGEX_STRING = QString("\\%1\\/(%2)\\/$").arg(URI_NODES).arg(UUID_REGEX_STRING);
QRegExp allNodesDeleteRegex(ALL_NODE_DELETE_REGEX_STRING);
QRegExp nodeDeleteRegex(NODE_DELETE_REGEX_STRING);
if (nodeDeleteRegex.indexIn(url.path()) != -1) {
// this is a request to DELETE one node by UUID
// pull the UUID from the url
QUuid deleteUUID = QUuid(url.path().mid(URI_NODES.size() + sizeof('/')));
// pull the captured string, if it exists
QUuid deleteUUID = QUuid(nodeDeleteRegex.cap(1));
if (!deleteUUID.isNull()) {
SharedNodePointer nodeToKill = NodeList::getInstance()->nodeWithUUID(deleteUUID);
SharedNodePointer nodeToKill = NodeList::getInstance()->nodeWithUUID(deleteUUID);
if (nodeToKill) {
// start with a 200 response
connection->respond(HTTPConnection::StatusCode200);
if (nodeToKill) {
// start with a 200 response
connection->respond(HTTPConnection::StatusCode200);
// we have a valid UUID and node - kill the node that has this assignment
QMetaObject::invokeMethod(NodeList::getInstance(), "killNodeWithUUID", Q_ARG(const QUuid&, deleteUUID));
// successfully processed request
return true;
}
// we have a valid UUID and node - kill the node that has this assignment
QMetaObject::invokeMethod(NodeList::getInstance(), "killNodeWithUUID", Q_ARG(const QUuid&, deleteUUID));
// successfully processed request
return true;
}
// bad request, couldn't pull a node ID
connection->respond(HTTPConnection::StatusCode400);
return true;
} else if (allNodesDeleteRegex.indexIn(url.path()) != -1) {
qDebug() << "Received request to kill all nodes.";
NodeList::getInstance()->eraseAllNodes();
return true;
}

View file

@ -215,6 +215,20 @@ void linearResampling(int16_t* sourceSamples, int16_t* destinationSamples,
}
} else {
if (sourceAudioFormat.sampleRate() == destinationAudioFormat.sampleRate()) {
// mono to stereo, same sample rate
if (!(sourceAudioFormat.channelCount() == 1 && destinationAudioFormat.channelCount() == 2)) {
qWarning() << "Unsupported format conversion" << sourceAudioFormat << destinationAudioFormat;
return;
}
for (const int16_t* sourceEnd = sourceSamples + numSourceSamples; sourceSamples != sourceEnd;
sourceSamples++) {
*destinationSamples++ = *sourceSamples;
*destinationSamples++ = *sourceSamples;
}
return;
}
// upsample from 24 to 48
// for now this only supports a stereo to stereo conversion - this is our case for network audio to output
int sourceIndex = 0;
@ -551,12 +565,14 @@ void Audio::handleAudioInput() {
// send whatever procedural sounds we want to locally loop back to the _proceduralOutputDevice
QByteArray proceduralOutput;
proceduralOutput.resize(NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL * 4 * sizeof(int16_t));
proceduralOutput.resize(NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL * _outputFormat.sampleRate() *
_outputFormat.channelCount() * sizeof(int16_t) / (_desiredInputFormat.sampleRate() *
_desiredInputFormat.channelCount()));
linearResampling(_localProceduralSamples,
reinterpret_cast<int16_t*>(proceduralOutput.data()),
NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL,
NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL * 4,
proceduralOutput.size() / sizeof(int16_t),
_desiredInputFormat, _outputFormat);
if (_proceduralOutputDevice) {

View file

@ -18,9 +18,9 @@
AudioRingBuffer::AudioRingBuffer(int numFrameSamples) :
NodeData(),
_sampleCapacity(numFrameSamples * RING_BUFFER_LENGTH_FRAMES),
_numFrameSamples(numFrameSamples),
_isStarved(true),
_hasStarted(false),
_averageLoudness(0)
_hasStarted(false)
{
if (numFrameSamples) {
_buffer = new int16_t[_sampleCapacity];
@ -56,33 +56,6 @@ int AudioRingBuffer::parseData(const QByteArray& packet) {
return writeData(packet.data() + numBytesPacketHeader, packet.size() - numBytesPacketHeader);
}
void AudioRingBuffer::updateAverageLoudnessForBoundarySamples(int numSamples) {
// ForBoundarySamples means that we expect the number of samples not to roll of the end of the ring buffer
float nextLoudness = 0;
for (int i = 0; i < numSamples; ++i) {
nextLoudness += fabsf(_nextOutput[i]);
}
nextLoudness /= numSamples;
nextLoudness /= MAX_SAMPLE_VALUE;
const int TRAILING_AVERAGE_FRAMES = 100;
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
const float LOUDNESS_EPSILON = 0.01f;
if (nextLoudness >= _averageLoudness) {
_averageLoudness = nextLoudness;
} else {
_averageLoudness = (_averageLoudness * PREVIOUS_FRAMES_RATIO) + (CURRENT_FRAME_RATIO * nextLoudness);
if (_averageLoudness < LOUDNESS_EPSILON) {
_averageLoudness = 0;
}
}
}
qint64 AudioRingBuffer::readSamples(int16_t* destination, qint64 maxSamples) {
return readData((char*) destination, maxSamples * sizeof(int16_t));
}
@ -112,7 +85,7 @@ qint64 AudioRingBuffer::readData(char *data, qint64 maxSize) {
return numReadSamples * sizeof(int16_t);
}
qint64 AudioRingBuffer::writeSamples(const int16_t* source, qint64 maxSamples) {
qint64 AudioRingBuffer::writeSamples(const int16_t* source, qint64 maxSamples) {
return writeData((const char*) source, maxSamples * sizeof(int16_t));
}

View file

@ -49,9 +49,6 @@ public:
// assume callers using this will never wrap around the end
const int16_t* getNextOutput() { return _nextOutput; }
const int16_t* getBuffer() { return _buffer; }
void updateAverageLoudnessForBoundarySamples(int numSamples);
float getAverageLoudness() const { return _averageLoudness; }
qint64 readSamples(int16_t* destination, qint64 maxSamples);
qint64 writeSamples(const int16_t* source, qint64 maxSamples);
@ -81,13 +78,12 @@ protected:
int16_t* shiftedPositionAccomodatingWrap(int16_t* position, int numSamplesShift) const;
int _sampleCapacity;
int _numFrameSamples;
int16_t* _nextOutput;
int16_t* _endOfLastWrite;
int16_t* _buffer;
bool _isStarved;
bool _hasStarted;
float _averageLoudness;
};
#endif /* defined(__interface__AudioRingBuffer__) */

View file

@ -0,0 +1,49 @@
//
// MixedAudioRingBuffer.cpp
// hifi
//
// Created by Stephen Birarda on 2014-03-26.
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
//
#include "MixedAudioRingBuffer.h"
MixedAudioRingBuffer::MixedAudioRingBuffer(int numFrameSamples) :
AudioRingBuffer(numFrameSamples),
_lastReadFrameAverageLoudness(0.0f)
{
}
qint64 MixedAudioRingBuffer::readSamples(int16_t* destination, qint64 maxSamples) {
// calculate the average loudness for the frame about to go out
// read from _nextOutput either _numFrameSamples or to the end of the buffer
int samplesFromNextOutput = _buffer + _sampleCapacity - _nextOutput;
if (samplesFromNextOutput > _numFrameSamples) {
samplesFromNextOutput = _numFrameSamples;
}
float averageLoudness = 0.0f;
for (int s = 0; s < samplesFromNextOutput; s++) {
averageLoudness += fabsf(_nextOutput[s]);
}
// read samples from the beginning of the buffer, if any
int samplesFromBeginning = _numFrameSamples - samplesFromNextOutput;
if (samplesFromBeginning > 0) {
for (int b = 0; b < samplesFromBeginning; b++) {
averageLoudness += fabsf(_buffer[b]);
}
}
// divide by the number of samples and the MAX_SAMPLE_VALUE to get a float from 0 - 1
averageLoudness /= (float) _numFrameSamples;
averageLoudness /= (float) MAX_SAMPLE_VALUE;
_lastReadFrameAverageLoudness = averageLoudness;
return AudioRingBuffer::readSamples(destination, maxSamples);
}

View file

@ -0,0 +1,26 @@
//
// MixedAudioRingBuffer.h
// hifi
//
// Created by Stephen Birarda on 2014-03-26.
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
//
#ifndef __hifi__MixedAudioRingBuffer__
#define __hifi__MixedAudioRingBuffer__
#include "AudioRingBuffer.h"
class MixedAudioRingBuffer : public AudioRingBuffer {
Q_OBJECT
public:
MixedAudioRingBuffer(int numFrameSamples);
float getLastReadFrameAverageLoudness() const { return _lastReadFrameAverageLoudness; }
qint64 readSamples(int16_t* destination, qint64 maxSamples);
private:
float _lastReadFrameAverageLoudness;
};
#endif /* defined(__hifi__MixedAudioRingBuffer__) */

View file

@ -72,6 +72,33 @@ int PositionalAudioRingBuffer::parsePositionalData(const QByteArray& positionalB
return packetStream.device()->pos();
}
void PositionalAudioRingBuffer::updateNextOutputTrailingLoudness() {
// ForBoundarySamples means that we expect the number of samples not to roll of the end of the ring buffer
float nextLoudness = 0;
for (int i = 0; i < _numFrameSamples; ++i) {
nextLoudness += fabsf(_nextOutput[i]);
}
nextLoudness /= _numFrameSamples;
nextLoudness /= MAX_SAMPLE_VALUE;
const int TRAILING_AVERAGE_FRAMES = 100;
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
const float LOUDNESS_EPSILON = 0.01f;
if (nextLoudness >= _nextOutputTrailingLoudness) {
_nextOutputTrailingLoudness = nextLoudness;
} else {
_nextOutputTrailingLoudness = (_nextOutputTrailingLoudness * PREVIOUS_FRAMES_RATIO) + (CURRENT_FRAME_RATIO * nextLoudness);
if (_nextOutputTrailingLoudness < LOUDNESS_EPSILON) {
_nextOutputTrailingLoudness = 0;
}
}
}
bool PositionalAudioRingBuffer::shouldBeAddedToMix(int numJitterBufferSamples) {
if (!isNotStarvedOrHasMinimumSamples(NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL + numJitterBufferSamples)) {
if (_shouldOutputStarveDebug) {

View file

@ -28,6 +28,9 @@ public:
int parsePositionalData(const QByteArray& positionalByteArray);
int parseListenModeData(const QByteArray& listenModeByteArray);
void updateNextOutputTrailingLoudness();
float getNextOutputTrailingLoudness() const { return _nextOutputTrailingLoudness; }
bool shouldBeAddedToMix(int numJitterBufferSamples);
bool willBeAddedToMix() const { return _willBeAddedToMix; }
@ -50,6 +53,8 @@ protected:
bool _willBeAddedToMix;
bool _shouldLoopbackForNode;
bool _shouldOutputStarveDebug;
float _nextOutputTrailingLoudness;
};
#endif /* defined(__hifi__PositionalAudioRingBuffer__) */

View file

@ -83,9 +83,33 @@ NodeList::NodeList(char newOwnerType, unsigned short int newSocketListenPort) :
// clear our NodeList when logout is requested
connect(&AccountManager::getInstance(), &AccountManager::logoutComplete , this, &NodeList::reset);
const int LARGER_SNDBUF_SIZE = 1048576;
changeSendSocketBufferSize(LARGER_SNDBUF_SIZE);
_packetStatTimer.start();
}
void NodeList::changeSendSocketBufferSize(int numSendBytes) {
// change the socket send buffer size to be 1MB
int oldBufferSize = 0;
#ifdef Q_OS_WIN
int sizeOfInt = sizeof(oldBufferSize);
#else
unsigned int sizeOfInt = sizeof(oldBufferSize);
#endif
getsockopt(_nodeSocket.socketDescriptor(), SOL_SOCKET, SO_SNDBUF, reinterpret_cast<char*>(&oldBufferSize), &sizeOfInt);
setsockopt(_nodeSocket.socketDescriptor(), SOL_SOCKET, SO_SNDBUF, reinterpret_cast<const char*>(&numSendBytes),
sizeof(numSendBytes));
int newBufferSize = 0;
getsockopt(_nodeSocket.socketDescriptor(), SOL_SOCKET, SO_SNDBUF, reinterpret_cast<char*>(&newBufferSize), &sizeOfInt);
qDebug() << "Changed socket send buffer size from" << oldBufferSize << "to" << newBufferSize << "bytes";
}
bool NodeList::packetVersionAndHashMatch(const QByteArray& packet) {
PacketType checkType = packetTypeForPacket(packet);
if (packet[1] != versionForPacketType(checkType)
@ -359,7 +383,7 @@ NodeHash NodeList::getNodeHash() {
return NodeHash(_nodeHash);
}
void NodeList::clear() {
void NodeList::eraseAllNodes() {
qDebug() << "Clearing the NodeList. Deleting all nodes in list.";
QMutexLocker locker(&_nodeHashMutex);
@ -373,7 +397,7 @@ void NodeList::clear() {
}
void NodeList::reset() {
clear();
eraseAllNodes();
_numNoReplyDomainCheckIns = 0;
// refresh the owner UUID to the NULL UUID

View file

@ -128,6 +128,7 @@ public:
void saveData(QSettings* settings);
public slots:
void reset();
void eraseAllNodes();
void sendDomainServerCheckIn();
void pingInactiveNodes();
@ -154,13 +155,13 @@ private:
const QUuid& connectionSecret);
NodeHash::iterator killNodeAtHashIterator(NodeHash::iterator& nodeItemToKill);
void clear();
void processDomainServerAuthRequest(const QByteArray& packet);
void requestAuthForDomainServer();
void activateSocketFromNodeCommunication(const QByteArray& packet, const SharedNodePointer& sendingNode);
void timePingReply(const QByteArray& packet, const SharedNodePointer& sendingNode);
void changeSendSocketBufferSize(int numSendBytes);
NodeHash _nodeHash;
QMutex _nodeHashMutex;