mirror of
https://github.com/lubosz/overte.git
synced 2025-04-19 17:03:43 +02:00
Merge branch 'master' of https://github.com/worklist/hifi into metavoxels
Conflicts: interface/src/Application.cpp
This commit is contained in:
commit
a5c8531aa1
62 changed files with 1791 additions and 752 deletions
|
@ -24,6 +24,8 @@
|
|||
Agent::Agent(const unsigned char* dataBuffer, int numBytes) :
|
||||
ThreadedAssignment(dataBuffer, numBytes)
|
||||
{
|
||||
_particleScriptingInterface.init();
|
||||
_voxelScriptingInterface.init();
|
||||
}
|
||||
|
||||
void Agent::processDatagram(const QByteArray& dataByteArray, const HifiSockAddr& senderSockAddr) {
|
||||
|
|
|
@ -10,7 +10,6 @@
|
|||
#include <fcntl.h>
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <limits>
|
||||
#include <math.h>
|
||||
#include <signal.h>
|
||||
#include <stdio.h>
|
||||
|
@ -54,10 +53,7 @@
|
|||
const short JITTER_BUFFER_MSECS = 12;
|
||||
const short JITTER_BUFFER_SAMPLES = JITTER_BUFFER_MSECS * (SAMPLE_RATE / 1000.0);
|
||||
|
||||
const unsigned int BUFFER_SEND_INTERVAL_USECS = floorf((BUFFER_LENGTH_SAMPLES_PER_CHANNEL / (float) SAMPLE_RATE) * 1000 * 1000);
|
||||
|
||||
const int MAX_SAMPLE_VALUE = std::numeric_limits<int16_t>::max();
|
||||
const int MIN_SAMPLE_VALUE = std::numeric_limits<int16_t>::min();
|
||||
const unsigned int BUFFER_SEND_INTERVAL_USECS = floorf((NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL / (float) SAMPLE_RATE) * 1000 * 1000);
|
||||
|
||||
const char AUDIO_MIXER_LOGGING_TARGET_NAME[] = "audio-mixer";
|
||||
|
||||
|
@ -160,35 +156,29 @@ void AudioMixer::addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuf
|
|||
}
|
||||
}
|
||||
|
||||
int16_t* sourceBuffer = bufferToAdd->getNextOutput();
|
||||
// if the bearing relative angle to source is > 0 then the delayed channel is the right one
|
||||
int delayedChannelOffset = (bearingRelativeAngleToSource > 0.0f) ? 1 : 0;
|
||||
int goodChannelOffset = delayedChannelOffset == 0 ? 1 : 0;
|
||||
|
||||
int16_t* goodChannel = (bearingRelativeAngleToSource > 0.0f)
|
||||
? _clientSamples
|
||||
: _clientSamples + BUFFER_LENGTH_SAMPLES_PER_CHANNEL;
|
||||
int16_t* delayedChannel = (bearingRelativeAngleToSource > 0.0f)
|
||||
? _clientSamples + BUFFER_LENGTH_SAMPLES_PER_CHANNEL
|
||||
: _clientSamples;
|
||||
|
||||
int16_t* delaySamplePointer = bufferToAdd->getNextOutput() == bufferToAdd->getBuffer()
|
||||
? bufferToAdd->getBuffer() + RING_BUFFER_LENGTH_SAMPLES - numSamplesDelay
|
||||
: bufferToAdd->getNextOutput() - numSamplesDelay;
|
||||
|
||||
for (int s = 0; s < BUFFER_LENGTH_SAMPLES_PER_CHANNEL; s++) {
|
||||
if (s < numSamplesDelay) {
|
||||
for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s += 2) {
|
||||
if ((s / 2) < numSamplesDelay) {
|
||||
// pull the earlier sample for the delayed channel
|
||||
int earlierSample = delaySamplePointer[s] * attenuationCoefficient * weakChannelAmplitudeRatio;
|
||||
|
||||
delayedChannel[s] = glm::clamp(delayedChannel[s] + earlierSample, MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
int earlierSample = (*bufferToAdd)[(s / 2) - numSamplesDelay] * attenuationCoefficient * weakChannelAmplitudeRatio;
|
||||
_clientSamples[s + delayedChannelOffset] = glm::clamp(_clientSamples[s + delayedChannelOffset] + earlierSample,
|
||||
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
}
|
||||
|
||||
// pull the current sample for the good channel
|
||||
int16_t currentSample = sourceBuffer[s] * attenuationCoefficient;
|
||||
goodChannel[s] = glm::clamp(goodChannel[s] + currentSample, MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
int16_t currentSample = (*bufferToAdd)[s / 2] * attenuationCoefficient;
|
||||
_clientSamples[s + goodChannelOffset] = glm::clamp(_clientSamples[s + goodChannelOffset] + currentSample,
|
||||
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
|
||||
if (s + numSamplesDelay < BUFFER_LENGTH_SAMPLES_PER_CHANNEL) {
|
||||
// place the curernt sample at the right spot in the delayed channel
|
||||
int sumSample = delayedChannel[s + numSamplesDelay] + (currentSample * weakChannelAmplitudeRatio);
|
||||
delayedChannel[s + numSamplesDelay] = glm::clamp(sumSample, MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
if ((s / 2) + numSamplesDelay < NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL) {
|
||||
// place the current sample at the right spot in the delayed channel
|
||||
int16_t clampedSample = glm::clamp((int) (_clientSamples[s + (numSamplesDelay * 2) + delayedChannelOffset]
|
||||
+ (currentSample * weakChannelAmplitudeRatio)),
|
||||
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
_clientSamples[s + (numSamplesDelay * 2) + delayedChannelOffset] = clampedSample;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -282,7 +272,7 @@ void AudioMixer::run() {
|
|||
gettimeofday(&startTime, NULL);
|
||||
|
||||
int numBytesPacketHeader = numBytesForPacketHeader((unsigned char*) &PACKET_TYPE_MIXED_AUDIO);
|
||||
unsigned char clientPacket[BUFFER_LENGTH_BYTES_STEREO + numBytesPacketHeader];
|
||||
unsigned char clientPacket[NETWORK_BUFFER_LENGTH_BYTES_STEREO + numBytesPacketHeader];
|
||||
populateTypeAndVersion(clientPacket, PACKET_TYPE_MIXED_AUDIO);
|
||||
|
||||
while (!_isFinished) {
|
||||
|
|
|
@ -35,7 +35,7 @@ private:
|
|||
void prepareMixForListeningNode(Node* node);
|
||||
|
||||
|
||||
int16_t _clientSamples[BUFFER_LENGTH_SAMPLES_PER_CHANNEL * 2];
|
||||
int16_t _clientSamples[NETWORK_BUFFER_LENGTH_SAMPLES_STEREO];
|
||||
};
|
||||
|
||||
#endif /* defined(__hifi__AudioMixer__) */
|
||||
|
|
|
@ -90,17 +90,15 @@ void AudioMixerClientData::pushBuffersAfterFrameSend() {
|
|||
// this was a used buffer, push the output pointer forwards
|
||||
PositionalAudioRingBuffer* audioBuffer = _ringBuffers[i];
|
||||
|
||||
if (audioBuffer->willBeAddedToMix()) {
|
||||
audioBuffer->setNextOutput(audioBuffer->getNextOutput() + BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
|
||||
|
||||
if (audioBuffer->getNextOutput() >= audioBuffer->getBuffer() + RING_BUFFER_LENGTH_SAMPLES) {
|
||||
audioBuffer->setNextOutput(audioBuffer->getBuffer());
|
||||
}
|
||||
if (audioBuffer->willBeAddedToMix()) {
|
||||
audioBuffer->shiftReadPosition(NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
|
||||
|
||||
audioBuffer->setWillBeAddedToMix(false);
|
||||
} else if (audioBuffer->hasStarted() && audioBuffer->isStarved()) {
|
||||
delete audioBuffer;
|
||||
_ringBuffers.erase(_ringBuffers.begin() + i);
|
||||
} else if (audioBuffer->isStarved()) {
|
||||
// this was previously the kill for injected audio from a client
|
||||
// fix when that is added back
|
||||
// delete audioBuffer;
|
||||
// _ringBuffers.erase(_ringBuffers.begin() + i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -97,6 +97,7 @@ link_hifi_library(metavoxels ${TARGET_NAME} ${ROOT_DIR})
|
|||
link_hifi_library(particles ${TARGET_NAME} ${ROOT_DIR})
|
||||
link_hifi_library(avatars ${TARGET_NAME} ${ROOT_DIR})
|
||||
link_hifi_library(audio ${TARGET_NAME} ${ROOT_DIR})
|
||||
link_hifi_library(scriptengine ${TARGET_NAME} ${ROOT_DIR})
|
||||
|
||||
# find required libraries
|
||||
find_package(Faceshift)
|
||||
|
|
|
@ -72,7 +72,7 @@ const int IDLE_SIMULATE_MSECS = 16; // How often should call simul
|
|||
// in the idle loop? (60 FPS is default)
|
||||
static QTimer* idleTimer = NULL;
|
||||
|
||||
const int STARTUP_JITTER_SAMPLES = PACKET_LENGTH_SAMPLES_PER_CHANNEL / 2;
|
||||
const int STARTUP_JITTER_SAMPLES = NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL / 2;
|
||||
// Startup optimistically with small jitter buffer that
|
||||
// will start playback on the second received audio packet.
|
||||
|
||||
|
@ -266,7 +266,7 @@ Application::~Application() {
|
|||
_sharedVoxelSystem.changeTree(new VoxelTree);
|
||||
|
||||
VoxelTreeElement::removeDeleteHook(&_voxels); // we don't need to do this processing on shutdown
|
||||
delete Menu::getInstance();
|
||||
Menu::getInstance()->deleteLater();
|
||||
|
||||
delete _settings;
|
||||
delete _followMode;
|
||||
|
@ -1401,6 +1401,7 @@ void Application::terminate() {
|
|||
pthread_join(_networkReceiveThread, NULL);
|
||||
}
|
||||
|
||||
printf("");
|
||||
_voxelProcessor.terminate();
|
||||
_voxelHideShowThread.terminate();
|
||||
_voxelEditSender.terminate();
|
||||
|
@ -1522,7 +1523,7 @@ void Application::shootParticle() {
|
|||
QString updateScript("");
|
||||
|
||||
ParticleEditHandle* particleEditHandle = makeParticle(position / (float)TREE_SCALE, radius, color,
|
||||
velocity / (float)TREE_SCALE, gravity, damping, updateScript);
|
||||
velocity / (float)TREE_SCALE, gravity, damping, NOT_IN_HAND, updateScript);
|
||||
|
||||
// If we wanted to be able to edit this particle after shooting, then we could store this value
|
||||
// and use it for editing later. But we don't care about that for "shooting" and therefore we just
|
||||
|
@ -1533,16 +1534,16 @@ void Application::shootParticle() {
|
|||
|
||||
// Caller is responsible for managing this EditableParticle
|
||||
ParticleEditHandle* Application::newParticleEditHandle(uint32_t id) {
|
||||
ParticleEditHandle* particleEditHandle = new ParticleEditHandle(&_particleEditSender, _particles.getTree());
|
||||
ParticleEditHandle* particleEditHandle = new ParticleEditHandle(&_particleEditSender, _particles.getTree(), id);
|
||||
return particleEditHandle;
|
||||
}
|
||||
|
||||
// Caller is responsible for managing this EditableParticle
|
||||
ParticleEditHandle* Application::makeParticle(glm::vec3 position, float radius, xColor color, glm::vec3 velocity,
|
||||
glm::vec3 gravity, float damping, QString updateScript) {
|
||||
glm::vec3 gravity, float damping, bool inHand, QString updateScript) {
|
||||
|
||||
ParticleEditHandle* particleEditHandle = newParticleEditHandle();
|
||||
particleEditHandle->createParticle(position, radius, color, velocity, gravity, damping, updateScript);
|
||||
particleEditHandle->createParticle(position, radius, color, velocity, gravity, damping, inHand, updateScript);
|
||||
return particleEditHandle;
|
||||
}
|
||||
|
||||
|
@ -1859,7 +1860,7 @@ void Application::init() {
|
|||
|
||||
_metavoxels.init();
|
||||
|
||||
_particleCollisionSystem.init(&_particleEditSender, _particles.getTree(), _voxels.getTree(), &_audio);
|
||||
_particleCollisionSystem.init(&_particleEditSender, _particles.getTree(), _voxels.getTree(), &_audio, &_myAvatar);
|
||||
|
||||
_palette.init(_glWidget->width(), _glWidget->height());
|
||||
_palette.addAction(Menu::getInstance()->getActionForOption(MenuOption::VoxelAddMode), 0, 0);
|
||||
|
@ -3497,13 +3498,13 @@ void Application::displayStats() {
|
|||
|
||||
// iterate all the current voxel stats, and list their sending modes, and total voxel counts
|
||||
std::stringstream sendingMode("");
|
||||
sendingMode << "Voxel Sending Mode: [";
|
||||
sendingMode << "Octree Sending Mode: [";
|
||||
int serverCount = 0;
|
||||
int movingServerCount = 0;
|
||||
unsigned long totalNodes = 0;
|
||||
unsigned long totalInternal = 0;
|
||||
unsigned long totalLeaves = 0;
|
||||
for(NodeToVoxelSceneStatsIterator i = _voxelServerSceneStats.begin(); i != _voxelServerSceneStats.end(); i++) {
|
||||
for(NodeToVoxelSceneStatsIterator i = _octreeServerSceneStats.begin(); i != _octreeServerSceneStats.end(); i++) {
|
||||
//const QUuid& uuid = i->first;
|
||||
VoxelSceneStats& stats = i->second;
|
||||
serverCount++;
|
||||
|
@ -4151,7 +4152,7 @@ void Application::domainChanged(QString domain) {
|
|||
|
||||
// reset our node to stats and node to jurisdiction maps... since these must be changing...
|
||||
_voxelServerJurisdictions.clear();
|
||||
_voxelServerSceneStats.clear();
|
||||
_octreeServerSceneStats.clear();
|
||||
_particleServerJurisdictions.clear();
|
||||
}
|
||||
|
||||
|
@ -4186,8 +4187,8 @@ void Application::nodeKilled(Node* node) {
|
|||
|
||||
// also clean up scene stats for that server
|
||||
_voxelSceneStatsLock.lockForWrite();
|
||||
if (_voxelServerSceneStats.find(nodeUUID) != _voxelServerSceneStats.end()) {
|
||||
_voxelServerSceneStats.erase(nodeUUID);
|
||||
if (_octreeServerSceneStats.find(nodeUUID) != _octreeServerSceneStats.end()) {
|
||||
_octreeServerSceneStats.erase(nodeUUID);
|
||||
}
|
||||
_voxelSceneStatsLock.unlock();
|
||||
|
||||
|
@ -4217,8 +4218,8 @@ void Application::nodeKilled(Node* node) {
|
|||
|
||||
// also clean up scene stats for that server
|
||||
_voxelSceneStatsLock.lockForWrite();
|
||||
if (_voxelServerSceneStats.find(nodeUUID) != _voxelServerSceneStats.end()) {
|
||||
_voxelServerSceneStats.erase(nodeUUID);
|
||||
if (_octreeServerSceneStats.find(nodeUUID) != _octreeServerSceneStats.end()) {
|
||||
_octreeServerSceneStats.erase(nodeUUID);
|
||||
}
|
||||
_voxelSceneStatsLock.unlock();
|
||||
|
||||
|
@ -4245,8 +4246,8 @@ void Application::trackIncomingVoxelPacket(unsigned char* messageData, ssize_t m
|
|||
|
||||
// now that we know the node ID, let's add these stats to the stats for that node...
|
||||
_voxelSceneStatsLock.lockForWrite();
|
||||
if (_voxelServerSceneStats.find(nodeUUID) != _voxelServerSceneStats.end()) {
|
||||
VoxelSceneStats& stats = _voxelServerSceneStats[nodeUUID];
|
||||
if (_octreeServerSceneStats.find(nodeUUID) != _octreeServerSceneStats.end()) {
|
||||
VoxelSceneStats& stats = _octreeServerSceneStats[nodeUUID];
|
||||
stats.trackIncomingOctreePacket(messageData, messageLength, wasStatsPacket);
|
||||
}
|
||||
_voxelSceneStatsLock.unlock();
|
||||
|
@ -4269,10 +4270,10 @@ int Application::parseOctreeStats(unsigned char* messageData, ssize_t messageLen
|
|||
|
||||
// now that we know the node ID, let's add these stats to the stats for that node...
|
||||
_voxelSceneStatsLock.lockForWrite();
|
||||
if (_voxelServerSceneStats.find(nodeUUID) != _voxelServerSceneStats.end()) {
|
||||
_voxelServerSceneStats[nodeUUID].unpackFromMessage(messageData, messageLength);
|
||||
if (_octreeServerSceneStats.find(nodeUUID) != _octreeServerSceneStats.end()) {
|
||||
_octreeServerSceneStats[nodeUUID].unpackFromMessage(messageData, messageLength);
|
||||
} else {
|
||||
_voxelServerSceneStats[nodeUUID] = temp;
|
||||
_octreeServerSceneStats[nodeUUID] = temp;
|
||||
}
|
||||
_voxelSceneStatsLock.unlock();
|
||||
|
||||
|
@ -4414,3 +4415,67 @@ void Application::packetSentNotification(ssize_t length) {
|
|||
_bandwidthMeter.outputStream(BandwidthMeter::VOXELS).updateValue(length);
|
||||
}
|
||||
|
||||
void Application::loadScript() {
|
||||
// shut down and stop any existing script
|
||||
QString desktopLocation = QStandardPaths::writableLocation(QStandardPaths::DesktopLocation);
|
||||
QString suggestedName = desktopLocation.append("/script.js");
|
||||
|
||||
QString fileNameString = QFileDialog::getOpenFileName(_glWidget, tr("Open Script"), suggestedName,
|
||||
tr("JavaScript Files (*.js)"));
|
||||
QByteArray fileNameAscii = fileNameString.toLocal8Bit();
|
||||
const char* fileName = fileNameAscii.data();
|
||||
|
||||
printf("fileName:%s\n",fileName);
|
||||
|
||||
std::ifstream file(fileName, std::ios::in|std::ios::binary|std::ios::ate);
|
||||
if(!file.is_open()) {
|
||||
printf("error loading file\n");
|
||||
return;
|
||||
}
|
||||
qDebug("loading file %s...\n", fileName);
|
||||
|
||||
// get file length....
|
||||
unsigned long fileLength = file.tellg();
|
||||
file.seekg( 0, std::ios::beg );
|
||||
|
||||
// read the entire file into a buffer, WHAT!? Why not.
|
||||
char* entireFile = new char[fileLength+1];
|
||||
file.read((char*)entireFile, fileLength);
|
||||
file.close();
|
||||
|
||||
entireFile[fileLength] = 0;// null terminate
|
||||
QString script(entireFile);
|
||||
delete[] entireFile;
|
||||
|
||||
// start the script on a new thread...
|
||||
bool wantMenuItems = true; // tells the ScriptEngine object to add menu items for itself
|
||||
|
||||
|
||||
ScriptEngine* scriptEngine = new ScriptEngine(script, wantMenuItems, fileName, Menu::getInstance());
|
||||
scriptEngine->setupMenuItems();
|
||||
|
||||
// setup the packet senders and jurisdiction listeners of the script engine's scripting interfaces so
|
||||
// we can use the same ones from the application.
|
||||
scriptEngine->getVoxelScriptingInterface()->setPacketSender(&_voxelEditSender);
|
||||
scriptEngine->getParticleScriptingInterface()->setPacketSender(&_particleEditSender);
|
||||
|
||||
QThread* workerThread = new QThread(this);
|
||||
|
||||
// when the worker thread is started, call our engine's run..
|
||||
connect(workerThread, SIGNAL(started()), scriptEngine, SLOT(run()));
|
||||
|
||||
// when the thread is terminated, add both scriptEngine and thread to the deleteLater queue
|
||||
connect(scriptEngine, SIGNAL(finished()), scriptEngine, SLOT(deleteLater()));
|
||||
connect(workerThread, SIGNAL(finished()), workerThread, SLOT(deleteLater()));
|
||||
|
||||
// when the application is about to quit, stop our script engine so it unwinds properly
|
||||
connect(this, SIGNAL(aboutToQuit()), scriptEngine, SLOT(stop()));
|
||||
|
||||
scriptEngine->moveToThread(workerThread);
|
||||
|
||||
// Starts an event loop, and emits workerThread->started()
|
||||
workerThread->start();
|
||||
|
||||
// restore the main window's active state
|
||||
_window->activateWindow();
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
#include <PacketHeaders.h>
|
||||
#include <ParticleCollisionSystem.h>
|
||||
#include <ParticleEditPacketSender.h>
|
||||
#include <ScriptEngine.h>
|
||||
#include <VoxelQuery.h>
|
||||
|
||||
#ifndef _WIN32
|
||||
|
@ -125,7 +126,7 @@ public:
|
|||
void shootParticle(); // shoots a particle in the direction you're looking
|
||||
ParticleEditHandle* newParticleEditHandle(uint32_t id = NEW_PARTICLE);
|
||||
ParticleEditHandle* makeParticle(glm::vec3 position, float radius, xColor color, glm::vec3 velocity,
|
||||
glm::vec3 gravity, float damping, QString updateScript);
|
||||
glm::vec3 gravity, float damping, bool inHand, QString updateScript);
|
||||
|
||||
void makeVoxel(glm::vec3 position,
|
||||
float scale,
|
||||
|
@ -157,7 +158,7 @@ public:
|
|||
QSettings* getSettings() { return _settings; }
|
||||
Swatch* getSwatch() { return &_swatch; }
|
||||
QMainWindow* getWindow() { return _window; }
|
||||
NodeToVoxelSceneStats* getVoxelSceneStats() { return &_voxelServerSceneStats; }
|
||||
NodeToVoxelSceneStats* getOcteeSceneStats() { return &_octreeServerSceneStats; }
|
||||
void lockVoxelSceneStats() { _voxelSceneStatsLock.lockForRead(); }
|
||||
void unlockVoxelSceneStats() { _voxelSceneStatsLock.unlock(); }
|
||||
|
||||
|
@ -199,6 +200,7 @@ public:
|
|||
|
||||
glm::vec2 getViewportDimensions() const{ return glm::vec2(_glWidget->width(),_glWidget->height()); }
|
||||
NodeToJurisdictionMap& getVoxelServerJurisdictions() { return _voxelServerJurisdictions; }
|
||||
NodeToJurisdictionMap& getParticleServerJurisdictions() { return _particleServerJurisdictions; }
|
||||
void pasteVoxelsToOctalCode(const unsigned char* octalCodeDestination);
|
||||
|
||||
/// set a voxel which is to be rendered with a highlight
|
||||
|
@ -219,6 +221,7 @@ public slots:
|
|||
void doKillLocalVoxels();
|
||||
void decreaseVoxelSize();
|
||||
void increaseVoxelSize();
|
||||
void loadScript();
|
||||
|
||||
|
||||
private slots:
|
||||
|
@ -495,7 +498,7 @@ private:
|
|||
|
||||
NodeToJurisdictionMap _voxelServerJurisdictions;
|
||||
NodeToJurisdictionMap _particleServerJurisdictions;
|
||||
NodeToVoxelSceneStats _voxelServerSceneStats;
|
||||
NodeToVoxelSceneStats _octreeServerSceneStats;
|
||||
QReadWriteLock _voxelSceneStatsLock;
|
||||
|
||||
std::vector<VoxelFade> _voxelFades;
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
#include <CoreAudio/AudioHardware.h>
|
||||
#endif
|
||||
|
||||
#include <QtCore/QBuffer>
|
||||
#include <QtMultimedia/QAudioInput>
|
||||
#include <QtMultimedia/QAudioOutput>
|
||||
#include <QSvgRenderer>
|
||||
|
@ -33,7 +34,7 @@
|
|||
static const float JITTER_BUFFER_LENGTH_MSECS = 12;
|
||||
static const short JITTER_BUFFER_SAMPLES = JITTER_BUFFER_LENGTH_MSECS * NUM_AUDIO_CHANNELS * (SAMPLE_RATE / 1000.0);
|
||||
|
||||
static const float AUDIO_CALLBACK_MSECS = (float)BUFFER_LENGTH_SAMPLES_PER_CHANNEL / (float)SAMPLE_RATE * 1000.0;
|
||||
static const float AUDIO_CALLBACK_MSECS = (float) NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL / (float)SAMPLE_RATE * 1000.0;
|
||||
|
||||
// Mute icon configration
|
||||
static const int ICON_SIZE = 24;
|
||||
|
@ -43,12 +44,18 @@ static const int BOTTOM_PADDING = 110;
|
|||
Audio::Audio(Oscilloscope* scope, int16_t initialJitterBufferSamples, QObject* parent) :
|
||||
QObject(parent),
|
||||
_audioInput(NULL),
|
||||
_inputDevice(NULL),
|
||||
_desiredInputFormat(),
|
||||
_inputFormat(),
|
||||
_numInputCallbackBytes(0),
|
||||
_audioOutput(NULL),
|
||||
_desiredOutputFormat(),
|
||||
_outputFormat(),
|
||||
_outputDevice(NULL),
|
||||
_isBufferSendCallback(false),
|
||||
_nextOutputSamples(NULL),
|
||||
_ringBuffer(true),
|
||||
_numOutputCallbackBytes(0),
|
||||
_loopbackAudioOutput(NULL),
|
||||
_loopbackOutputDevice(NULL),
|
||||
_inputRingBuffer(0),
|
||||
_ringBuffer(NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL),
|
||||
_scope(scope),
|
||||
_averagedLatency(0.0),
|
||||
_measuredJitter(0),
|
||||
|
@ -65,7 +72,8 @@ Audio::Audio(Oscilloscope* scope, int16_t initialJitterBufferSamples, QObject* p
|
|||
_numFramesDisplayStarve(0),
|
||||
_muted(false)
|
||||
{
|
||||
|
||||
// clear the array of locally injected samples
|
||||
memset(_localInjectedSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL);
|
||||
}
|
||||
|
||||
void Audio::init(QGLWidget *parent) {
|
||||
|
@ -124,242 +132,278 @@ QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
|
|||
return (mode == QAudio::AudioInput) ? QAudioDeviceInfo::defaultInputDevice() : QAudioDeviceInfo::defaultOutputDevice();
|
||||
}
|
||||
|
||||
const int QT_SAMPLE_RATE = 44100;
|
||||
const int SAMPLE_RATE_RATIO = QT_SAMPLE_RATE / SAMPLE_RATE;
|
||||
bool adjustedFormatForAudioDevice(const QAudioDeviceInfo& audioDevice,
|
||||
const QAudioFormat& desiredAudioFormat,
|
||||
QAudioFormat& adjustedAudioFormat) {
|
||||
if (!audioDevice.isFormatSupported(desiredAudioFormat)) {
|
||||
qDebug() << "The desired format for audio I/O is" << desiredAudioFormat << "\n";
|
||||
qDebug() << "The desired audio format is not supported by this device.\n";
|
||||
|
||||
if (desiredAudioFormat.channelCount() == 1) {
|
||||
adjustedAudioFormat = desiredAudioFormat;
|
||||
adjustedAudioFormat.setChannelCount(2);
|
||||
|
||||
if (audioDevice.isFormatSupported(adjustedAudioFormat)) {
|
||||
return true;
|
||||
} else {
|
||||
adjustedAudioFormat.setChannelCount(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (audioDevice.supportedSampleRates().contains(SAMPLE_RATE * 2)) {
|
||||
// use 48, which is a sample downsample, upsample
|
||||
adjustedAudioFormat = desiredAudioFormat;
|
||||
adjustedAudioFormat.setSampleRate(SAMPLE_RATE * 2);
|
||||
|
||||
// return the nearest in case it needs 2 channels
|
||||
adjustedAudioFormat = audioDevice.nearestFormat(adjustedAudioFormat);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
} else {
|
||||
// set the adjustedAudioFormat to the desiredAudioFormat, since it will work
|
||||
adjustedAudioFormat = desiredAudioFormat;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
void linearResampling(int16_t* sourceSamples, int16_t* destinationSamples,
|
||||
unsigned int numSourceSamples, unsigned int numDestinationSamples,
|
||||
const QAudioFormat& sourceAudioFormat, const QAudioFormat& destinationAudioFormat) {
|
||||
if (sourceAudioFormat == destinationAudioFormat) {
|
||||
memcpy(destinationSamples, sourceSamples, numSourceSamples * sizeof(int16_t));
|
||||
} else {
|
||||
int destinationChannels = (destinationAudioFormat.channelCount() >= 2) ? 2 : destinationAudioFormat.channelCount();
|
||||
float sourceToDestinationFactor = (sourceAudioFormat.sampleRate() / (float) destinationAudioFormat.sampleRate())
|
||||
* (sourceAudioFormat.channelCount() / (float) destinationChannels);
|
||||
|
||||
// take into account the number of channels in source and destination
|
||||
// accomodate for the case where have an output with > 2 channels
|
||||
// this is the case with our HDMI capture
|
||||
|
||||
if (sourceToDestinationFactor >= 2) {
|
||||
// we need to downsample from 48 to 24
|
||||
// for now this only supports a mono output - this would be the case for audio input
|
||||
|
||||
for (int i = sourceAudioFormat.channelCount(); i < numSourceSamples; i += 2 * sourceAudioFormat.channelCount()) {
|
||||
if (i + (sourceAudioFormat.channelCount()) >= numSourceSamples) {
|
||||
destinationSamples[(i - sourceAudioFormat.channelCount()) / (int) sourceToDestinationFactor] =
|
||||
(sourceSamples[i - sourceAudioFormat.channelCount()] / 2)
|
||||
+ (sourceSamples[i] / 2);
|
||||
} else {
|
||||
destinationSamples[(i - sourceAudioFormat.channelCount()) / (int) sourceToDestinationFactor] =
|
||||
(sourceSamples[i - sourceAudioFormat.channelCount()] / 4)
|
||||
+ (sourceSamples[i] / 2)
|
||||
+ (sourceSamples[i + sourceAudioFormat.channelCount()] / 4);
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
// upsample from 24 to 48
|
||||
// for now this only supports a stereo to stereo conversion - this is our case for network audio to output
|
||||
int sourceIndex = 0;
|
||||
int destinationToSourceFactor = (1 / sourceToDestinationFactor);
|
||||
int dtsSampleRateFactor = (destinationAudioFormat.sampleRate() / sourceAudioFormat.sampleRate());
|
||||
|
||||
for (int i = 0; i < numDestinationSamples; i += destinationAudioFormat.channelCount() * dtsSampleRateFactor) {
|
||||
sourceIndex = (i / destinationToSourceFactor);
|
||||
|
||||
// fill the L/R channels and make the rest silent
|
||||
for (int j = i; j < i + (dtsSampleRateFactor * destinationAudioFormat.channelCount()); j++) {
|
||||
if (j % destinationAudioFormat.channelCount() == 0) {
|
||||
// left channel
|
||||
destinationSamples[j] = sourceSamples[sourceIndex];
|
||||
} else if (j % destinationAudioFormat.channelCount() == 1) {
|
||||
// right channel
|
||||
destinationSamples[j] = sourceSamples[sourceIndex + (sourceAudioFormat.channelCount() > 1 ? 1 : 0)];
|
||||
} else {
|
||||
// channels above 2, fill with silence
|
||||
destinationSamples[j] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const int CALLBACK_ACCELERATOR_RATIO = 2;
|
||||
const int CALLBACK_IO_BUFFER_SIZE = BUFFER_LENGTH_BYTES_STEREO * SAMPLE_RATE_RATIO / CALLBACK_ACCELERATOR_RATIO;
|
||||
|
||||
void Audio::start() {
|
||||
|
||||
QAudioFormat audioFormat;
|
||||
// set up the desired audio format
|
||||
audioFormat.setSampleRate(QT_SAMPLE_RATE);
|
||||
audioFormat.setSampleSize(16);
|
||||
audioFormat.setCodec("audio/pcm");
|
||||
audioFormat.setSampleType(QAudioFormat::SignedInt);
|
||||
audioFormat.setByteOrder(QAudioFormat::LittleEndian);
|
||||
audioFormat.setChannelCount(2);
|
||||
_desiredInputFormat.setSampleRate(SAMPLE_RATE);
|
||||
_desiredInputFormat.setSampleSize(16);
|
||||
_desiredInputFormat.setCodec("audio/pcm");
|
||||
_desiredInputFormat.setSampleType(QAudioFormat::SignedInt);
|
||||
_desiredInputFormat.setByteOrder(QAudioFormat::LittleEndian);
|
||||
_desiredInputFormat.setChannelCount(1);
|
||||
|
||||
qDebug() << "The format for audio I/O is" << audioFormat << "\n";
|
||||
_desiredOutputFormat = _desiredInputFormat;
|
||||
_desiredOutputFormat.setChannelCount(2);
|
||||
|
||||
QAudioDeviceInfo inputAudioDevice = defaultAudioDeviceForMode(QAudio::AudioInput);
|
||||
QAudioDeviceInfo inputDeviceInfo = defaultAudioDeviceForMode(QAudio::AudioInput);
|
||||
|
||||
qDebug() << "Audio input device is" << inputAudioDevice.deviceName() << "\n";
|
||||
if (!inputAudioDevice.isFormatSupported(audioFormat)) {
|
||||
qDebug() << "The desired audio input format is not supported by this device. Not starting audio input.\n";
|
||||
qDebug() << "The audio input device is" << inputDeviceInfo.deviceName() << "\n";
|
||||
|
||||
if (adjustedFormatForAudioDevice(inputDeviceInfo, _desiredInputFormat, _inputFormat)) {
|
||||
qDebug() << "The format to be used for audio input is" << _inputFormat << "\n";
|
||||
|
||||
_audioInput = new QAudioInput(inputDeviceInfo, _inputFormat, this);
|
||||
_numInputCallbackBytes = NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL * _inputFormat.channelCount()
|
||||
* (_inputFormat.sampleRate() / SAMPLE_RATE)
|
||||
/ CALLBACK_ACCELERATOR_RATIO;
|
||||
_audioInput->setBufferSize(_numInputCallbackBytes);
|
||||
|
||||
QAudioDeviceInfo outputDeviceInfo = defaultAudioDeviceForMode(QAudio::AudioOutput);
|
||||
|
||||
qDebug() << "The audio output device is" << outputDeviceInfo.deviceName() << "\n";
|
||||
|
||||
if (adjustedFormatForAudioDevice(outputDeviceInfo, _desiredOutputFormat, _outputFormat)) {
|
||||
qDebug() << "The format to be used for audio output is" << _outputFormat << "\n";
|
||||
|
||||
_inputRingBuffer.resizeForFrameSize(_numInputCallbackBytes * CALLBACK_ACCELERATOR_RATIO / sizeof(int16_t));
|
||||
_inputDevice = _audioInput->start();
|
||||
connect(_inputDevice, SIGNAL(readyRead()), this, SLOT(handleAudioInput()));
|
||||
|
||||
// setup our general output device for audio-mixer audio
|
||||
_audioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
||||
_outputDevice = _audioOutput->start();
|
||||
|
||||
// setup a loopback audio output device
|
||||
_loopbackAudioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
||||
|
||||
gettimeofday(&_lastReceiveTime, NULL);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
_audioInput = new QAudioInput(inputAudioDevice, audioFormat, this);
|
||||
_audioInput->setBufferSize(CALLBACK_IO_BUFFER_SIZE);
|
||||
_inputDevice = _audioInput->start();
|
||||
|
||||
connect(_inputDevice, SIGNAL(readyRead()), SLOT(handleAudioInput()));
|
||||
|
||||
QAudioDeviceInfo outputDeviceInfo = defaultAudioDeviceForMode(QAudio::AudioOutput);
|
||||
|
||||
qDebug() << outputDeviceInfo.supportedSampleRates() << "\n";
|
||||
|
||||
qDebug() << "Audio output device is" << outputDeviceInfo.deviceName() << "\n";
|
||||
|
||||
if (!outputDeviceInfo.isFormatSupported(audioFormat)) {
|
||||
qDebug() << "The desired audio output format is not supported by this device.\n";
|
||||
return;
|
||||
}
|
||||
|
||||
_audioOutput = new QAudioOutput(outputDeviceInfo, audioFormat, this);
|
||||
_audioOutput->setBufferSize(CALLBACK_IO_BUFFER_SIZE);
|
||||
_outputDevice = _audioOutput->start();
|
||||
|
||||
gettimeofday(&_lastReceiveTime, NULL);
|
||||
qDebug() << "Unable to set up audio I/O because of a problem with input or output formats.\n";
|
||||
}
|
||||
|
||||
void Audio::handleAudioInput() {
|
||||
static int16_t stereoInputBuffer[CALLBACK_IO_BUFFER_SIZE * 2];
|
||||
static char monoAudioDataPacket[MAX_PACKET_SIZE];
|
||||
static int bufferSizeSamples = _audioInput->bufferSize() / sizeof(int16_t);
|
||||
|
||||
static int numBytesPacketHeader = numBytesForPacketHeader((unsigned char*) &PACKET_TYPE_MICROPHONE_AUDIO_NO_ECHO);
|
||||
static int leadingBytes = numBytesPacketHeader + sizeof(glm::vec3) + sizeof(glm::quat) + NUM_BYTES_RFC4122_UUID;
|
||||
|
||||
static int16_t* monoAudioSamples = (int16_t*) (monoAudioDataPacket + leadingBytes);
|
||||
|
||||
QByteArray inputByteArray = _inputDevice->read(CALLBACK_IO_BUFFER_SIZE);
|
||||
static float inputToNetworkInputRatio = _numInputCallbackBytes * CALLBACK_ACCELERATOR_RATIO
|
||||
/ NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL;
|
||||
|
||||
if (_isBufferSendCallback) {
|
||||
// copy samples from the inputByteArray to the stereoInputBuffer
|
||||
memcpy((char*) (stereoInputBuffer + bufferSizeSamples), inputByteArray.data(), inputByteArray.size());
|
||||
static int inputSamplesRequired = NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL * inputToNetworkInputRatio;
|
||||
|
||||
QByteArray inputByteArray = _inputDevice->readAll();
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EchoLocalAudio)) {
|
||||
// if this person wants local loopback add that to the locally injected audio
|
||||
|
||||
// Measure the loudness of the signal from the microphone and store in audio object
|
||||
float loudness = 0;
|
||||
for (int i = 0; i < BUFFER_LENGTH_SAMPLES_PER_CHANNEL * SAMPLE_RATE_RATIO; i += 2) {
|
||||
loudness += abs(stereoInputBuffer[i]);
|
||||
if (!_loopbackOutputDevice) {
|
||||
// we didn't have the loopback output device going so set that up now
|
||||
_loopbackOutputDevice = _loopbackAudioOutput->start();
|
||||
}
|
||||
|
||||
loudness /= BUFFER_LENGTH_SAMPLES_PER_CHANNEL * SAMPLE_RATE_RATIO;
|
||||
_lastInputLoudness = loudness;
|
||||
|
||||
} else {
|
||||
// this is the first half of a full buffer of data
|
||||
// zero out the monoAudioSamples array
|
||||
memset(monoAudioSamples, 0, BUFFER_LENGTH_BYTES_PER_CHANNEL);
|
||||
|
||||
// take samples we have in this callback and store them in the first half of the static buffer
|
||||
// to send off in the next callback
|
||||
memcpy((char*) stereoInputBuffer, inputByteArray.data(), inputByteArray.size());
|
||||
}
|
||||
|
||||
// add input data just written to the scope
|
||||
QMetaObject::invokeMethod(_scope, "addStereoSamples", Qt::QueuedConnection,
|
||||
Q_ARG(QByteArray, inputByteArray), Q_ARG(bool, true));
|
||||
|
||||
QByteArray stereoOutputBuffer;
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EchoLocalAudio) && !_muted) {
|
||||
// if local loopback enabled, copy input to output
|
||||
if (_isBufferSendCallback) {
|
||||
stereoOutputBuffer.append((char*) (stereoInputBuffer + bufferSizeSamples), CALLBACK_IO_BUFFER_SIZE);
|
||||
if (_inputFormat == _outputFormat) {
|
||||
_loopbackOutputDevice->write(inputByteArray);
|
||||
} else {
|
||||
stereoOutputBuffer.append((char*) stereoInputBuffer, CALLBACK_IO_BUFFER_SIZE);
|
||||
static float loopbackOutputToInputRatio = (_outputFormat.sampleRate() / (float) _inputFormat.sampleRate())
|
||||
* (_outputFormat.channelCount() / _inputFormat.channelCount());
|
||||
|
||||
QByteArray loopBackByteArray(inputByteArray.size() * loopbackOutputToInputRatio, 0);
|
||||
|
||||
linearResampling((int16_t*) inputByteArray.data(), (int16_t*) loopBackByteArray.data(),
|
||||
inputByteArray.size() / sizeof(int16_t),
|
||||
loopBackByteArray.size() / sizeof(int16_t), _inputFormat, _outputFormat);
|
||||
|
||||
_loopbackOutputDevice->write(loopBackByteArray);
|
||||
}
|
||||
} else {
|
||||
// zero out the stereoOutputBuffer
|
||||
stereoOutputBuffer = QByteArray(CALLBACK_IO_BUFFER_SIZE, 0);
|
||||
}
|
||||
|
||||
// add procedural effects to the appropriate input samples
|
||||
addProceduralSounds(monoAudioSamples + (_isBufferSendCallback
|
||||
? BUFFER_LENGTH_SAMPLES_PER_CHANNEL / CALLBACK_ACCELERATOR_RATIO : 0),
|
||||
(int16_t*) stereoOutputBuffer.data(),
|
||||
BUFFER_LENGTH_SAMPLES_PER_CHANNEL / CALLBACK_ACCELERATOR_RATIO);
|
||||
_inputRingBuffer.writeData(inputByteArray.data(), inputByteArray.size());
|
||||
|
||||
if (_isBufferSendCallback) {
|
||||
while (_inputRingBuffer.samplesAvailable() > inputSamplesRequired) {
|
||||
|
||||
int16_t inputAudioSamples[inputSamplesRequired];
|
||||
_inputRingBuffer.readSamples(inputAudioSamples, inputSamplesRequired);
|
||||
|
||||
// zero out the monoAudioSamples array and the locally injected audio
|
||||
memset(monoAudioSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL);
|
||||
|
||||
// zero out the locally injected audio in preparation for audio procedural sounds
|
||||
memset(_localInjectedSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL);
|
||||
|
||||
if (!_muted) {
|
||||
// we aren't muted, downsample the input audio
|
||||
linearResampling((int16_t*) inputAudioSamples,
|
||||
monoAudioSamples,
|
||||
inputSamplesRequired,
|
||||
NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL,
|
||||
_inputFormat, _desiredInputFormat);
|
||||
|
||||
float loudness = 0;
|
||||
|
||||
for (int i = 0; i < NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL; i++) {
|
||||
loudness += fabsf(monoAudioSamples[i]);
|
||||
}
|
||||
|
||||
_lastInputLoudness = loudness / NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL;
|
||||
|
||||
// add input data just written to the scope
|
||||
QMetaObject::invokeMethod(_scope, "addSamples", Qt::QueuedConnection,
|
||||
Q_ARG(QByteArray, QByteArray((char*) monoAudioSamples,
|
||||
NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL)),
|
||||
Q_ARG(bool, false), Q_ARG(bool, true));
|
||||
} else {
|
||||
// our input loudness is 0, since we're muted
|
||||
_lastInputLoudness = 0;
|
||||
}
|
||||
|
||||
// add procedural effects to the appropriate input samples
|
||||
addProceduralSounds(monoAudioSamples,
|
||||
NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
|
||||
|
||||
NodeList* nodeList = NodeList::getInstance();
|
||||
Node* audioMixer = nodeList->soloNodeOfType(NODE_TYPE_AUDIO_MIXER);
|
||||
|
||||
if (audioMixer) {
|
||||
if (audioMixer->getActiveSocket()) {
|
||||
MyAvatar* interfaceAvatar = Application::getInstance()->getAvatar();
|
||||
|
||||
glm::vec3 headPosition = interfaceAvatar->getHeadJointPosition();
|
||||
glm::quat headOrientation = interfaceAvatar->getHead().getOrientation();
|
||||
|
||||
// we need the amount of bytes in the buffer + 1 for type
|
||||
// + 12 for 3 floats for position + float for bearing + 1 attenuation byte
|
||||
|
||||
PACKET_TYPE packetType = Menu::getInstance()->isOptionChecked(MenuOption::EchoServerAudio)
|
||||
? PACKET_TYPE_MICROPHONE_AUDIO_WITH_ECHO : PACKET_TYPE_MICROPHONE_AUDIO_NO_ECHO;
|
||||
|
||||
char* currentPacketPtr = monoAudioDataPacket + populateTypeAndVersion((unsigned char*) monoAudioDataPacket,
|
||||
packetType);
|
||||
|
||||
// pack Source Data
|
||||
QByteArray rfcUUID = NodeList::getInstance()->getOwnerUUID().toRfc4122();
|
||||
memcpy(currentPacketPtr, rfcUUID.constData(), rfcUUID.size());
|
||||
currentPacketPtr += rfcUUID.size();
|
||||
|
||||
// memcpy the three float positions
|
||||
memcpy(currentPacketPtr, &headPosition, sizeof(headPosition));
|
||||
currentPacketPtr += (sizeof(headPosition));
|
||||
|
||||
// memcpy our orientation
|
||||
memcpy(currentPacketPtr, &headOrientation, sizeof(headOrientation));
|
||||
currentPacketPtr += sizeof(headOrientation);
|
||||
|
||||
if (!_muted) {
|
||||
// we aren't muted, average each set of four samples together to set up the mono input buffers
|
||||
for (int i = 2; i < BUFFER_LENGTH_SAMPLES_PER_CHANNEL * 2 * SAMPLE_RATE_RATIO; i += 4) {
|
||||
|
||||
int16_t averagedSample = 0;
|
||||
if (i + 2 == BUFFER_LENGTH_SAMPLES_PER_CHANNEL * 2 * SAMPLE_RATE_RATIO) {
|
||||
averagedSample = (stereoInputBuffer[i - 2] / 2) + (stereoInputBuffer[i] / 2);
|
||||
} else {
|
||||
averagedSample = (stereoInputBuffer[i - 2] / 4) + (stereoInputBuffer[i] / 2)
|
||||
+ (stereoInputBuffer[i + 2] / 4);
|
||||
}
|
||||
|
||||
// add the averaged sample to our array of audio samples
|
||||
monoAudioSamples[(i - 2) / 4] += averagedSample;
|
||||
}
|
||||
}
|
||||
|
||||
nodeList->getNodeSocket().writeDatagram(monoAudioDataPacket, BUFFER_LENGTH_BYTES_PER_CHANNEL + leadingBytes,
|
||||
audioMixer->getActiveSocket()->getAddress(),
|
||||
audioMixer->getActiveSocket()->getPort());
|
||||
|
||||
Application::getInstance()->getBandwidthMeter()->outputStream(BandwidthMeter::AUDIO)
|
||||
.updateValue(BUFFER_LENGTH_BYTES_PER_CHANNEL + leadingBytes);
|
||||
} else {
|
||||
nodeList->pingPublicAndLocalSocketsForInactiveNode(audioMixer);
|
||||
}
|
||||
if (audioMixer && nodeList->getNodeActiveSocketOrPing(audioMixer)) {
|
||||
MyAvatar* interfaceAvatar = Application::getInstance()->getAvatar();
|
||||
|
||||
glm::vec3 headPosition = interfaceAvatar->getHeadJointPosition();
|
||||
glm::quat headOrientation = interfaceAvatar->getHead().getOrientation();
|
||||
|
||||
// we need the amount of bytes in the buffer + 1 for type
|
||||
// + 12 for 3 floats for position + float for bearing + 1 attenuation byte
|
||||
|
||||
PACKET_TYPE packetType = Menu::getInstance()->isOptionChecked(MenuOption::EchoServerAudio)
|
||||
? PACKET_TYPE_MICROPHONE_AUDIO_WITH_ECHO : PACKET_TYPE_MICROPHONE_AUDIO_NO_ECHO;
|
||||
|
||||
char* currentPacketPtr = monoAudioDataPacket + populateTypeAndVersion((unsigned char*) monoAudioDataPacket,
|
||||
packetType);
|
||||
|
||||
// pack Source Data
|
||||
QByteArray rfcUUID = NodeList::getInstance()->getOwnerUUID().toRfc4122();
|
||||
memcpy(currentPacketPtr, rfcUUID.constData(), rfcUUID.size());
|
||||
currentPacketPtr += rfcUUID.size();
|
||||
|
||||
// memcpy the three float positions
|
||||
memcpy(currentPacketPtr, &headPosition, sizeof(headPosition));
|
||||
currentPacketPtr += (sizeof(headPosition));
|
||||
|
||||
// memcpy our orientation
|
||||
memcpy(currentPacketPtr, &headOrientation, sizeof(headOrientation));
|
||||
currentPacketPtr += sizeof(headOrientation);
|
||||
|
||||
nodeList->getNodeSocket().writeDatagram(monoAudioDataPacket,
|
||||
NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL + leadingBytes,
|
||||
audioMixer->getActiveSocket()->getAddress(),
|
||||
audioMixer->getActiveSocket()->getPort());
|
||||
|
||||
Application::getInstance()->getBandwidthMeter()->outputStream(BandwidthMeter::AUDIO)
|
||||
.updateValue(NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL + leadingBytes);
|
||||
}
|
||||
}
|
||||
|
||||
// if there is anything in the ring buffer, decide what to do
|
||||
|
||||
if (!_nextOutputSamples) {
|
||||
if (_ringBuffer.getEndOfLastWrite()) {
|
||||
if (_ringBuffer.isStarved() && _ringBuffer.diffLastWriteNextOutput() <
|
||||
(PACKET_LENGTH_SAMPLES + _jitterBufferSamples * (_ringBuffer.isStereo() ? 2 : 1))) {
|
||||
// If not enough audio has arrived to start playback, keep waiting
|
||||
} else if (!_ringBuffer.isStarved() && _ringBuffer.diffLastWriteNextOutput() == 0) {
|
||||
// If we have started and now have run out of audio to send to the audio device,
|
||||
// this means we've starved and should restart.
|
||||
_ringBuffer.setIsStarved(true);
|
||||
|
||||
// show a starve in the GUI for 10 frames
|
||||
_numFramesDisplayStarve = 10;
|
||||
|
||||
} else {
|
||||
// We are either already playing back, or we have enough audio to start playing back.
|
||||
if (_ringBuffer.isStarved()) {
|
||||
_ringBuffer.setIsStarved(false);
|
||||
_ringBuffer.setHasStarted(true);
|
||||
}
|
||||
|
||||
_nextOutputSamples = _ringBuffer.getNextOutput();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (_nextOutputSamples) {
|
||||
|
||||
int16_t* stereoOutputBufferSamples = (int16_t*) stereoOutputBuffer.data();
|
||||
|
||||
// play whatever we have in the audio buffer
|
||||
for (int s = 0; s < PACKET_LENGTH_SAMPLES_PER_CHANNEL / CALLBACK_ACCELERATOR_RATIO; s++) {
|
||||
int16_t leftSample = _nextOutputSamples[s];
|
||||
int16_t rightSample = _nextOutputSamples[s + PACKET_LENGTH_SAMPLES_PER_CHANNEL];
|
||||
|
||||
stereoOutputBufferSamples[(s * 4)] += leftSample;
|
||||
stereoOutputBufferSamples[(s * 4) + 2] += leftSample;
|
||||
|
||||
stereoOutputBufferSamples[(s * 4) + 1] += rightSample;
|
||||
stereoOutputBufferSamples[(s * 4) + 3] += rightSample;
|
||||
}
|
||||
|
||||
if (_isBufferSendCallback) {
|
||||
_ringBuffer.setNextOutput(_ringBuffer.getNextOutput() + PACKET_LENGTH_SAMPLES);
|
||||
|
||||
if (_ringBuffer.getNextOutput() == _ringBuffer.getBuffer() + RING_BUFFER_LENGTH_SAMPLES) {
|
||||
_ringBuffer.setNextOutput(_ringBuffer.getBuffer());
|
||||
}
|
||||
|
||||
_nextOutputSamples = NULL;
|
||||
} else {
|
||||
_nextOutputSamples += PACKET_LENGTH_SAMPLES_PER_CHANNEL / CALLBACK_ACCELERATOR_RATIO;
|
||||
}
|
||||
}
|
||||
|
||||
_outputDevice->write(stereoOutputBuffer);
|
||||
|
||||
|
||||
// add output (@speakers) data just written to the scope
|
||||
QMetaObject::invokeMethod(_scope, "addStereoSamples", Qt::QueuedConnection,
|
||||
Q_ARG(QByteArray, stereoOutputBuffer), Q_ARG(bool, false));
|
||||
|
||||
_isBufferSendCallback = !_isBufferSendCallback;
|
||||
|
||||
gettimeofday(&_lastCallbackTime, NULL);
|
||||
}
|
||||
|
||||
void Audio::addReceivedAudioToBuffer(const QByteArray& audioByteArray) {
|
||||
|
@ -381,7 +425,7 @@ void Audio::addReceivedAudioToBuffer(const QByteArray& audioByteArray) {
|
|||
_measuredJitter = _stdev.getStDev();
|
||||
_stdev.reset();
|
||||
// Set jitter buffer to be a multiple of the measured standard deviation
|
||||
const int MAX_JITTER_BUFFER_SAMPLES = RING_BUFFER_LENGTH_SAMPLES / 2;
|
||||
const int MAX_JITTER_BUFFER_SAMPLES = _ringBuffer.getSampleCapacity() / 2;
|
||||
const float NUM_STANDARD_DEVIATIONS = 3.f;
|
||||
if (Menu::getInstance()->getAudioJitterBufferSamples() == 0) {
|
||||
float newJitterBufferSamples = (NUM_STANDARD_DEVIATIONS * _measuredJitter) / 1000.f * SAMPLE_RATE;
|
||||
|
@ -389,22 +433,69 @@ void Audio::addReceivedAudioToBuffer(const QByteArray& audioByteArray) {
|
|||
}
|
||||
}
|
||||
|
||||
if (_ringBuffer.diffLastWriteNextOutput() + PACKET_LENGTH_SAMPLES >
|
||||
PACKET_LENGTH_SAMPLES + (ceilf((float) (_jitterBufferSamples * 2) / PACKET_LENGTH_SAMPLES) * PACKET_LENGTH_SAMPLES)) {
|
||||
// this packet would give us more than the required amount for play out
|
||||
// discard the first packet in the buffer
|
||||
|
||||
_ringBuffer.setNextOutput(_ringBuffer.getNextOutput() + PACKET_LENGTH_SAMPLES);
|
||||
|
||||
if (_ringBuffer.getNextOutput() == _ringBuffer.getBuffer() + RING_BUFFER_LENGTH_SAMPLES) {
|
||||
_ringBuffer.setNextOutput(_ringBuffer.getBuffer());
|
||||
}
|
||||
}
|
||||
|
||||
_ringBuffer.parseData((unsigned char*) audioByteArray.data(), audioByteArray.size());
|
||||
|
||||
Application::getInstance()->getBandwidthMeter()->inputStream(BandwidthMeter::AUDIO).updateValue(PACKET_LENGTH_BYTES
|
||||
+ sizeof(PACKET_TYPE));
|
||||
static float networkOutputToOutputRatio = (_desiredOutputFormat.sampleRate() / (float) _outputFormat.sampleRate())
|
||||
* (_desiredOutputFormat.channelCount() / (float) _outputFormat.channelCount());
|
||||
|
||||
static int numRequiredOutputSamples = NETWORK_BUFFER_LENGTH_SAMPLES_STEREO / networkOutputToOutputRatio;
|
||||
|
||||
QByteArray outputBuffer;
|
||||
outputBuffer.resize(numRequiredOutputSamples * sizeof(int16_t));
|
||||
|
||||
// if there is anything in the ring buffer, decide what to do
|
||||
if (_ringBuffer.samplesAvailable() > 0) {
|
||||
if (!_ringBuffer.isNotStarvedOrHasMinimumSamples(NETWORK_BUFFER_LENGTH_SAMPLES_STEREO
|
||||
+ (_jitterBufferSamples * 2))) {
|
||||
// starved and we don't have enough to start, keep waiting
|
||||
qDebug() << "Buffer is starved and doesn't have enough samples to start. Held back.\n";
|
||||
} else {
|
||||
// We are either already playing back, or we have enough audio to start playing back.
|
||||
_ringBuffer.setIsStarved(false);
|
||||
|
||||
// copy the samples we'll resample from the ring buffer - this also
|
||||
// pushes the read pointer of the ring buffer forwards
|
||||
int16_t ringBufferSamples[NETWORK_BUFFER_LENGTH_SAMPLES_STEREO];
|
||||
_ringBuffer.readSamples(ringBufferSamples, NETWORK_BUFFER_LENGTH_SAMPLES_STEREO);
|
||||
|
||||
// add to the output samples whatever is in the _localAudioOutput byte array
|
||||
// that lets this user hear sound effects and loopback (if enabled)
|
||||
|
||||
for (int i = 0; i < NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL; i++) {
|
||||
ringBufferSamples[i * 2] = glm::clamp(ringBufferSamples[i * 2] + _localInjectedSamples[i],
|
||||
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
ringBufferSamples[(i * 2) + 1] = glm::clamp(ringBufferSamples[(i * 2) + 1] + _localInjectedSamples[i],
|
||||
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
}
|
||||
|
||||
// copy the packet from the RB to the output
|
||||
linearResampling(ringBufferSamples,
|
||||
(int16_t*) outputBuffer.data(),
|
||||
NETWORK_BUFFER_LENGTH_SAMPLES_STEREO,
|
||||
numRequiredOutputSamples,
|
||||
_desiredOutputFormat, _outputFormat);
|
||||
|
||||
if (_outputDevice) {
|
||||
|
||||
_outputDevice->write(outputBuffer);
|
||||
|
||||
// add output (@speakers) data just written to the scope
|
||||
QMetaObject::invokeMethod(_scope, "addSamples", Qt::QueuedConnection,
|
||||
Q_ARG(QByteArray, QByteArray((char*) ringBufferSamples,
|
||||
NETWORK_BUFFER_LENGTH_BYTES_STEREO)),
|
||||
Q_ARG(bool, true), Q_ARG(bool, false));
|
||||
}
|
||||
}
|
||||
|
||||
} else if (_audioOutput->bytesFree() == _audioOutput->bufferSize()) {
|
||||
// we don't have any audio data left in the output buffer, and the ring buffer from
|
||||
// the network has nothing in it either - we just starved
|
||||
qDebug() << "Audio output just starved.\n";
|
||||
_ringBuffer.setIsStarved(true);
|
||||
_numFramesDisplayStarve = 10;
|
||||
}
|
||||
|
||||
Application::getInstance()->getBandwidthMeter()->inputStream(BandwidthMeter::AUDIO).updateValue(audioByteArray.size());
|
||||
|
||||
_lastReceiveTime = currentReceiveTime;
|
||||
}
|
||||
|
@ -435,7 +526,7 @@ void Audio::render(int screenWidth, int screenHeight) {
|
|||
glVertex2f(currentX, topY);
|
||||
glVertex2f(currentX, bottomY);
|
||||
|
||||
for (int i = 0; i < RING_BUFFER_LENGTH_FRAMES / 2; i++) {
|
||||
for (int i = 0; i < RING_BUFFER_LENGTH_FRAMES; i++) {
|
||||
glVertex2f(currentX, halfY);
|
||||
glVertex2f(currentX + frameWidth, halfY);
|
||||
currentX += frameWidth;
|
||||
|
@ -445,17 +536,15 @@ void Audio::render(int screenWidth, int screenHeight) {
|
|||
}
|
||||
glEnd();
|
||||
|
||||
// Show a bar with the amount of audio remaining in ring buffer beyond current playback
|
||||
float remainingBuffer = 0;
|
||||
timeval currentTime;
|
||||
gettimeofday(¤tTime, NULL);
|
||||
float timeLeftInCurrentBuffer = 0;
|
||||
if (_lastCallbackTime.tv_usec > 0) {
|
||||
timeLeftInCurrentBuffer = AUDIO_CALLBACK_MSECS - diffclock(&_lastCallbackTime, ¤tTime);
|
||||
}
|
||||
// show a bar with the amount of audio remaining in ring buffer and output device
|
||||
// beyond the current playback
|
||||
|
||||
if (_ringBuffer.getEndOfLastWrite() != NULL)
|
||||
remainingBuffer = _ringBuffer.diffLastWriteNextOutput() / PACKET_LENGTH_SAMPLES * AUDIO_CALLBACK_MSECS;
|
||||
int bytesLeftInAudioOutput = _audioOutput->bufferSize() - _audioOutput->bytesFree();
|
||||
float secondsLeftForAudioOutput = (bytesLeftInAudioOutput / sizeof(int16_t))
|
||||
/ ((float) _outputFormat.sampleRate() * _outputFormat.channelCount());
|
||||
float secondsLeftForRingBuffer = _ringBuffer.samplesAvailable()
|
||||
/ ((float) _desiredOutputFormat.sampleRate() * _desiredOutputFormat.channelCount());
|
||||
float msLeftForAudioOutput = (secondsLeftForAudioOutput + secondsLeftForRingBuffer) * 1000;
|
||||
|
||||
if (_numFramesDisplayStarve == 0) {
|
||||
glColor3f(0, 1, 0);
|
||||
|
@ -464,19 +553,19 @@ void Audio::render(int screenWidth, int screenHeight) {
|
|||
_numFramesDisplayStarve--;
|
||||
}
|
||||
|
||||
if (_averagedLatency == 0.0) {
|
||||
_averagedLatency = msLeftForAudioOutput;
|
||||
} else {
|
||||
_averagedLatency = 0.99f * _averagedLatency + 0.01f * (msLeftForAudioOutput);
|
||||
}
|
||||
|
||||
glBegin(GL_QUADS);
|
||||
glVertex2f(startX, topY + 2);
|
||||
glVertex2f(startX + (remainingBuffer + timeLeftInCurrentBuffer) / AUDIO_CALLBACK_MSECS * frameWidth, topY + 2);
|
||||
glVertex2f(startX + (remainingBuffer + timeLeftInCurrentBuffer) / AUDIO_CALLBACK_MSECS * frameWidth, bottomY - 2);
|
||||
glVertex2f(startX + _averagedLatency / AUDIO_CALLBACK_MSECS * frameWidth, topY + 2);
|
||||
glVertex2f(startX + _averagedLatency / AUDIO_CALLBACK_MSECS * frameWidth, bottomY - 2);
|
||||
glVertex2f(startX, bottomY - 2);
|
||||
glEnd();
|
||||
|
||||
if (_averagedLatency == 0.0) {
|
||||
_averagedLatency = remainingBuffer + timeLeftInCurrentBuffer;
|
||||
} else {
|
||||
_averagedLatency = 0.99f * _averagedLatency + 0.01f * (remainingBuffer + timeLeftInCurrentBuffer);
|
||||
}
|
||||
|
||||
// Show a yellow bar with the averaged msecs latency you are hearing (from time of packet receipt)
|
||||
glColor3f(1,1,0);
|
||||
glBegin(GL_QUADS);
|
||||
|
@ -493,7 +582,8 @@ void Audio::render(int screenWidth, int screenHeight) {
|
|||
// Show a red bar with the 'start' point of one frame plus the jitter buffer
|
||||
|
||||
glColor3f(1, 0, 0);
|
||||
int jitterBufferPels = (1.f + (float)getJitterBufferSamples() / (float) PACKET_LENGTH_SAMPLES_PER_CHANNEL) * frameWidth;
|
||||
int jitterBufferPels = (1.f + (float)getJitterBufferSamples()
|
||||
/ (float) NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL) * frameWidth;
|
||||
sprintf(out, "%.0f\n", getJitterBufferSamples() / SAMPLE_RATE * 1000.f);
|
||||
drawtext(startX + jitterBufferPels - 5, topY - 9, 0.10, 0, 1, 0, out, 1, 0, 0);
|
||||
sprintf(out, "j %.1f\n", _measuredJitter);
|
||||
|
@ -515,7 +605,7 @@ void Audio::render(int screenWidth, int screenHeight) {
|
|||
}
|
||||
|
||||
// Take a pointer to the acquired microphone input samples and add procedural sounds
|
||||
void Audio::addProceduralSounds(int16_t* monoInput, int16_t* stereoUpsampledOutput, int numSamples) {
|
||||
void Audio::addProceduralSounds(int16_t* monoInput, int numSamples) {
|
||||
const float MAX_AUDIBLE_VELOCITY = 6.0;
|
||||
const float MIN_AUDIBLE_VELOCITY = 0.1;
|
||||
const int VOLUME_BASELINE = 400;
|
||||
|
@ -551,11 +641,9 @@ void Audio::addProceduralSounds(int16_t* monoInput, int16_t* stereoUpsampledOutp
|
|||
|
||||
int16_t collisionSample = (int16_t) sample;
|
||||
|
||||
monoInput[i] += collisionSample;
|
||||
|
||||
for (int j = (i * 4); j < (i * 4) + 4; j++) {
|
||||
stereoUpsampledOutput[j] += collisionSample;
|
||||
}
|
||||
monoInput[i] = glm::clamp(monoInput[i] + collisionSample, MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
_localInjectedSamples[i] = glm::clamp(_localInjectedSamples[i] + collisionSample,
|
||||
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
|
||||
_collisionSoundMagnitude *= _collisionSoundDuration;
|
||||
}
|
||||
|
@ -577,11 +665,9 @@ void Audio::addProceduralSounds(int16_t* monoInput, int16_t* stereoUpsampledOutp
|
|||
|
||||
int16_t collisionSample = (int16_t) sample;
|
||||
|
||||
monoInput[i] += collisionSample;
|
||||
|
||||
for (int j = (i * 4); j < (i * 4) + 4; j++) {
|
||||
stereoUpsampledOutput[j] += collisionSample;
|
||||
}
|
||||
monoInput[i] = glm::clamp(monoInput[i] + collisionSample, MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
_localInjectedSamples[i] = glm::clamp(_localInjectedSamples[i] + collisionSample,
|
||||
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
|
||||
_drumSoundVolume *= (1.f - _drumSoundDecay);
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
#include "InterfaceConfig.h"
|
||||
|
||||
#include <QtCore/QObject>
|
||||
#include <QtMultimedia/QAudioFormat>
|
||||
|
||||
#include <AbstractAudioInterface.h>
|
||||
#include <AudioRingBuffer.h>
|
||||
|
@ -26,11 +27,6 @@
|
|||
|
||||
static const int NUM_AUDIO_CHANNELS = 2;
|
||||
|
||||
static const int PACKET_LENGTH_BYTES = 1024;
|
||||
static const int PACKET_LENGTH_BYTES_PER_CHANNEL = PACKET_LENGTH_BYTES / 2;
|
||||
static const int PACKET_LENGTH_SAMPLES = PACKET_LENGTH_BYTES / sizeof(int16_t);
|
||||
static const int PACKET_LENGTH_SAMPLES_PER_CHANNEL = PACKET_LENGTH_SAMPLES / 2;
|
||||
|
||||
class QAudioInput;
|
||||
class QAudioOutput;
|
||||
class QIODevice;
|
||||
|
@ -70,16 +66,25 @@ public slots:
|
|||
void reset();
|
||||
|
||||
private:
|
||||
QByteArray firstInputFrame;
|
||||
QAudioInput* _audioInput;
|
||||
QAudioFormat _desiredInputFormat;
|
||||
QAudioFormat _inputFormat;
|
||||
QIODevice* _inputDevice;
|
||||
int16_t _localInjectedSamples[NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL];
|
||||
int _numInputCallbackBytes;
|
||||
QAudioOutput* _audioOutput;
|
||||
QAudioFormat _desiredOutputFormat;
|
||||
QAudioFormat _outputFormat;
|
||||
QIODevice* _outputDevice;
|
||||
bool _isBufferSendCallback;
|
||||
int16_t* _nextOutputSamples;
|
||||
int _numOutputCallbackBytes;
|
||||
QAudioOutput* _loopbackAudioOutput;
|
||||
QIODevice* _loopbackOutputDevice;
|
||||
AudioRingBuffer _inputRingBuffer;
|
||||
AudioRingBuffer _ringBuffer;
|
||||
|
||||
Oscilloscope* _scope;
|
||||
StDev _stdev;
|
||||
timeval _lastCallbackTime;
|
||||
timeval _lastReceiveTime;
|
||||
float _averagedLatency;
|
||||
float _measuredJitter;
|
||||
|
@ -114,7 +119,7 @@ private:
|
|||
inline void performIO(int16_t* inputLeft, int16_t* outputLeft, int16_t* outputRight);
|
||||
|
||||
// Add sounds that we want the user to not hear themselves, by adding on top of mic input signal
|
||||
void addProceduralSounds(int16_t* monoInput, int16_t* stereoUpsampledOutput, int numSamples);
|
||||
void addProceduralSounds(int16_t* monoInput, int numSamples);
|
||||
|
||||
void renderToolIcon(int screenHeight);
|
||||
};
|
||||
|
|
|
@ -90,6 +90,10 @@ Menu::Menu() :
|
|||
this,
|
||||
SLOT(login())));
|
||||
|
||||
addDisabledActionAndSeparator(fileMenu, "Scripts");
|
||||
addActionToQMenuAndActionHash(fileMenu, MenuOption::LoadScript, Qt::CTRL | Qt::Key_O, appInstance, SLOT(loadScript()));
|
||||
_activeScriptsMenu = fileMenu->addMenu("Running Scripts");
|
||||
|
||||
addDisabledActionAndSeparator(fileMenu, "Voxels");
|
||||
addActionToQMenuAndActionHash(fileMenu, MenuOption::ExportVoxels, Qt::CTRL | Qt::Key_E, appInstance, SLOT(exportVoxels()));
|
||||
addActionToQMenuAndActionHash(fileMenu, MenuOption::ImportVoxels, Qt::CTRL | Qt::Key_I, appInstance, SLOT(importVoxels()));
|
||||
|
@ -369,6 +373,11 @@ Menu::Menu() :
|
|||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::SimulateLeapHand);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayLeapHands, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::LeapDrive, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHandTargets, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::BallFromHand, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::VoxelDrumming, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::PlaySlaps, 0, false);
|
||||
|
||||
|
||||
|
||||
QMenu* trackingOptionsMenu = developerMenu->addMenu("Tracking Options");
|
||||
|
@ -713,6 +722,11 @@ QAction* Menu::addCheckableActionToQMenuAndActionHash(QMenu* destinationMenu,
|
|||
return action;
|
||||
}
|
||||
|
||||
void Menu::removeAction(QMenu* menu, const QString& actionName) {
|
||||
menu->removeAction(_actionHash.value(actionName));
|
||||
}
|
||||
|
||||
|
||||
bool Menu::isOptionChecked(const QString& menuOption) {
|
||||
return _actionHash.value(menuOption)->isChecked();
|
||||
}
|
||||
|
@ -1035,7 +1049,7 @@ void Menu::bandwidthDetailsClosed() {
|
|||
void Menu::voxelStatsDetails() {
|
||||
if (!_voxelStatsDialog) {
|
||||
_voxelStatsDialog = new VoxelStatsDialog(Application::getInstance()->getGLWidget(),
|
||||
Application::getInstance()->getVoxelSceneStats());
|
||||
Application::getInstance()->getOcteeSceneStats());
|
||||
connect(_voxelStatsDialog, SIGNAL(closed()), SLOT(voxelStatsDetailsClosed()));
|
||||
_voxelStatsDialog->show();
|
||||
}
|
||||
|
|
|
@ -13,6 +13,8 @@
|
|||
#include <QHash>
|
||||
#include <QKeySequence>
|
||||
|
||||
#include <AbstractMenuInterface.h>
|
||||
|
||||
enum FrustumDrawMode {
|
||||
FRUSTUM_DRAW_MODE_ALL,
|
||||
FRUSTUM_DRAW_MODE_VECTORS,
|
||||
|
@ -37,7 +39,7 @@ class BandwidthDialog;
|
|||
class VoxelStatsDialog;
|
||||
class LodToolsDialog;
|
||||
|
||||
class Menu : public QMenuBar {
|
||||
class Menu : public QMenuBar, public AbstractMenuInterface {
|
||||
Q_OBJECT
|
||||
public:
|
||||
static Menu* getInstance();
|
||||
|
@ -71,6 +73,15 @@ public:
|
|||
// User Tweakable PPS from Voxel Server
|
||||
int getMaxVoxelPacketsPerSecond() const { return _maxVoxelPacketsPerSecond; }
|
||||
|
||||
virtual QMenu* getActiveScriptsMenu() { return _activeScriptsMenu;}
|
||||
virtual QAction* addActionToQMenuAndActionHash(QMenu* destinationMenu,
|
||||
const QString actionName,
|
||||
const QKeySequence& shortcut = 0,
|
||||
const QObject* receiver = NULL,
|
||||
const char* member = NULL,
|
||||
QAction::MenuRole role = QAction::NoRole);
|
||||
virtual void removeAction(QMenu* menu, const QString& actionName);
|
||||
|
||||
public slots:
|
||||
void bandwidthDetails();
|
||||
void voxelStatsDetails();
|
||||
|
@ -110,12 +121,6 @@ private:
|
|||
|
||||
/// helper method to have separators with labels that are also compatible with OS X
|
||||
void addDisabledActionAndSeparator(QMenu* destinationMenu, const QString& actionName);
|
||||
QAction* addActionToQMenuAndActionHash(QMenu* destinationMenu,
|
||||
const QString actionName,
|
||||
const QKeySequence& shortcut = 0,
|
||||
const QObject* receiver = NULL,
|
||||
const char* member = NULL,
|
||||
QAction::MenuRole role = QAction::NoRole);
|
||||
|
||||
QAction* addCheckableActionToQMenuAndActionHash(QMenu* destinationMenu,
|
||||
const QString actionName,
|
||||
|
@ -141,6 +146,8 @@ private:
|
|||
int _boundaryLevelAdjust;
|
||||
QAction* _useVoxelShader;
|
||||
int _maxVoxelPacketsPerSecond;
|
||||
|
||||
QMenu* _activeScriptsMenu;
|
||||
};
|
||||
|
||||
namespace MenuOption {
|
||||
|
@ -149,6 +156,7 @@ namespace MenuOption {
|
|||
const QString Avatars = "Avatars";
|
||||
const QString Atmosphere = "Atmosphere";
|
||||
const QString AutomaticallyAuditTree = "Automatically Audit Tree Stats";
|
||||
const QString BallFromHand = "Ball from Hand";
|
||||
const QString Bandwidth = "Bandwidth Display";
|
||||
const QString BandwidthDetails = "Bandwidth Details";
|
||||
const QString ChatCircling = "Chat Circling";
|
||||
|
@ -167,6 +175,7 @@ namespace MenuOption {
|
|||
const QString DisableLowRes = "Disable Lower Resolution While Moving";
|
||||
const QString DisplayFrustum = "Display Frustum";
|
||||
const QString DisplayLeapHands = "Display Leap Hands";
|
||||
const QString DisplayHandTargets = "Display Hand Targets";
|
||||
const QString FilterSixense = "Smooth Sixense Movement";
|
||||
const QString DontRenderVoxels = "Don't call _voxels.render()";
|
||||
const QString DontCallOpenGLForVoxels = "Don't call glDrawRangeElementsEXT() for Voxels";
|
||||
|
@ -219,6 +228,7 @@ namespace MenuOption {
|
|||
const QString OldVoxelCullingMode = "Old Voxel Culling Mode";
|
||||
const QString TurnWithHead = "Turn using Head";
|
||||
const QString ClickToFly = "Fly to voxel on click";
|
||||
const QString LoadScript = "Open and Run Script...";
|
||||
const QString Oscilloscope = "Audio Oscilloscope";
|
||||
const QString Pair = "Pair";
|
||||
const QString PasteVoxels = "Paste";
|
||||
|
@ -235,6 +245,8 @@ namespace MenuOption {
|
|||
const QString ShowAllLocalVoxels = "Show All Local Voxels";
|
||||
const QString ShowTrueColors = "Show TRUE Colors";
|
||||
const QString SimulateLeapHand = "Simulate Leap Hand";
|
||||
const QString VoxelDrumming = "Voxel Drumming";
|
||||
const QString PlaySlaps = "Play Slaps";
|
||||
const QString SkeletonTracking = "Skeleton Tracking";
|
||||
const QString SuppressShortTimings = "Suppress Timings Less than 10ms";
|
||||
const QString LEDTracking = "LED Tracking";
|
||||
|
|
|
@ -68,24 +68,18 @@ Oscilloscope::~Oscilloscope() {
|
|||
delete[] _samples;
|
||||
}
|
||||
|
||||
void Oscilloscope::addStereoSamples(const QByteArray& audioByteArray, bool isInput) {
|
||||
void Oscilloscope::addSamples(const QByteArray& audioByteArray, bool isStereo, bool isInput) {
|
||||
|
||||
if (! enabled || inputPaused) {
|
||||
return;
|
||||
}
|
||||
|
||||
unsigned int numSamplesPerChannel = audioByteArray.size() / (sizeof(int16_t) * 2);
|
||||
int16_t samples[numSamplesPerChannel];
|
||||
const int16_t* stereoSamples = (int16_t*) audioByteArray.constData();
|
||||
int numSamplesPerChannel = audioByteArray.size() / (sizeof(int16_t) * (isStereo ? 2 : 1));
|
||||
int16_t* samples = (int16_t*) audioByteArray.data();
|
||||
|
||||
for (int channel = 0; channel < (isInput ? 1 : 2); channel++) {
|
||||
for (int channel = 0; channel < (isStereo ? 2 : 1); channel++) {
|
||||
// add samples for each of the channels
|
||||
|
||||
// enumerate the interleaved stereoSamples array and pull out the samples for this channel
|
||||
for (int i = 0; i < audioByteArray.size() / sizeof(int16_t); i += 2) {
|
||||
samples[i / 2] = stereoSamples[i + channel];
|
||||
}
|
||||
|
||||
|
||||
// determine start/end offset of this channel's region
|
||||
unsigned baseOffs = MAX_SAMPLES_PER_CHANNEL * (channel + !isInput);
|
||||
unsigned endOffs = baseOffs + MAX_SAMPLES_PER_CHANNEL;
|
||||
|
@ -103,10 +97,21 @@ void Oscilloscope::addStereoSamples(const QByteArray& audioByteArray, bool isInp
|
|||
numSamplesPerChannel -= n2;
|
||||
}
|
||||
|
||||
// copy data
|
||||
memcpy(_samples + writePos, samples, numSamplesPerChannel * sizeof(int16_t));
|
||||
if (n2 > 0) {
|
||||
memcpy(_samples + baseOffs, samples + numSamplesPerChannel, n2 * sizeof(int16_t));
|
||||
if (!isStereo) {
|
||||
// copy data
|
||||
memcpy(_samples + writePos, samples, numSamplesPerChannel * sizeof(int16_t));
|
||||
if (n2 > 0) {
|
||||
memcpy(_samples + baseOffs, samples + numSamplesPerChannel, n2 * sizeof(int16_t));
|
||||
}
|
||||
} else {
|
||||
// we have interleaved samples we need to separate into two channels
|
||||
for (int i = 0; i < numSamplesPerChannel + n2; i++) {
|
||||
if (i < numSamplesPerChannel - n2) {
|
||||
_samples[writePos] = samples[(i * 2) + channel];
|
||||
} else {
|
||||
_samples[baseOffs] = samples[(i * 2) + channel];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// set new write position for this channel
|
||||
|
|
|
@ -59,7 +59,7 @@ public:
|
|||
// just uses every nTh sample.
|
||||
void setDownsampleRatio(unsigned n) { assert(n > 0); _downsampleRatio = n; }
|
||||
public slots:
|
||||
void addStereoSamples(const QByteArray& audioByteArray, bool isInput);
|
||||
void addSamples(const QByteArray& audioByteArray, bool isStereo, bool isInput);
|
||||
private:
|
||||
// don't copy/assign
|
||||
Oscilloscope(Oscilloscope const&); // = delete;
|
||||
|
|
|
@ -17,18 +17,21 @@
|
|||
#include "Util.h"
|
||||
#include "renderer/ProgramObject.h"
|
||||
|
||||
//#define DEBUG_HAND
|
||||
|
||||
using namespace std;
|
||||
|
||||
const float FINGERTIP_VOXEL_SIZE = 0.05;
|
||||
const int TOY_BALL_HAND = 1;
|
||||
const float TOY_BALL_RADIUS = 0.05f;
|
||||
const float TOY_BALL_DAMPING = 0.99f;
|
||||
const glm::vec3 NO_GRAVITY = glm::vec3(0,0,0);
|
||||
const glm::vec3 NO_VELOCITY = glm::vec3(0,0,0);
|
||||
const glm::vec3 TOY_BALL_GRAVITY = glm::vec3(0,-1,0);
|
||||
const glm::vec3 NO_GRAVITY = glm::vec3(0,0,0);
|
||||
const float NO_DAMPING = 0.f;
|
||||
const glm::vec3 TOY_BALL_GRAVITY = glm::vec3(0,-0.5,0);
|
||||
const QString TOY_BALL_UPDATE_SCRIPT("");
|
||||
const QString TOY_BALL_DONT_DIE_SCRIPT("Particle.setShouldDie(false);");
|
||||
const float PALM_COLLISION_RADIUS = 0.03f;
|
||||
const float CATCH_RADIUS = 0.2f;
|
||||
const xColor TOY_BALL_ON_SERVER_COLOR[] =
|
||||
{
|
||||
{ 255, 0, 0 },
|
||||
|
@ -74,21 +77,46 @@ void Hand::reset() {
|
|||
}
|
||||
|
||||
void Hand::simulateToyBall(PalmData& palm, const glm::vec3& fingerTipPosition, float deltaTime) {
|
||||
glm::vec3 targetPosition = fingerTipPosition / (float)TREE_SCALE;
|
||||
float targetRadius = (TOY_BALL_RADIUS * 2.0f) / (float)TREE_SCALE;
|
||||
bool ballFromHand = Menu::getInstance()->isOptionChecked(MenuOption::BallFromHand);
|
||||
int handID = palm.getSixenseID();
|
||||
|
||||
bool grabButtonPressed = (palm.getControllerButtons() & BUTTON_FWD);
|
||||
bool ballAlreadyInHand = _toyBallInHand[handID];
|
||||
|
||||
glm::vec3 targetPosition = (ballFromHand ? palm.getPosition() : fingerTipPosition) / (float)TREE_SCALE;
|
||||
float targetRadius = CATCH_RADIUS / (float)TREE_SCALE;
|
||||
const Particle* closestParticle = Application::getInstance()->getParticles()
|
||||
->getTree()->findClosestParticle(targetPosition, targetRadius);
|
||||
|
||||
|
||||
if (closestParticle) {
|
||||
//printf("potentially caught... particle ID:%d\n", closestParticle->getID());
|
||||
|
||||
// you can create a ParticleEditHandle by doing this...
|
||||
ParticleEditHandle* caughtParticle = Application::getInstance()->newParticleEditHandle(closestParticle->getID());
|
||||
|
||||
// but make sure you clean it up, when you're done
|
||||
delete caughtParticle;
|
||||
// If I don't currently have a ball in my hand, then I can catch this closest particle
|
||||
if (!ballAlreadyInHand && grabButtonPressed) {
|
||||
ParticleEditHandle* caughtParticle = Application::getInstance()->newParticleEditHandle(closestParticle->getID());
|
||||
glm::vec3 newPosition = targetPosition;
|
||||
glm::vec3 newVelocity = NO_VELOCITY;
|
||||
|
||||
// update the particle with it's new state...
|
||||
#ifdef DEBUG_HAND
|
||||
qDebug("Update caught particle!\n");
|
||||
#endif
|
||||
caughtParticle->updateParticle(newPosition,
|
||||
closestParticle->getRadius(),
|
||||
closestParticle->getXColor(),
|
||||
newVelocity,
|
||||
NO_GRAVITY,
|
||||
NO_DAMPING,
|
||||
IN_HAND, // we just grabbed it!
|
||||
closestParticle->getUpdateScript());
|
||||
|
||||
// now tell our hand about us having caught it...
|
||||
_toyBallInHand[handID] = true;
|
||||
|
||||
//printf(">>>>>>> caught... handID:%d particle ID:%d _toyBallInHand[handID] = true\n", handID, closestParticle->getID());
|
||||
_ballParticleEditHandles[handID] = caughtParticle;
|
||||
caughtParticle = NULL;
|
||||
}
|
||||
}
|
||||
int handID = palm.getSixenseID();
|
||||
|
||||
// If there's a ball in hand, and the user presses the skinny button, then change the color of the ball
|
||||
int currentControllerButtons = palm.getControllerButtons();
|
||||
|
@ -117,42 +145,56 @@ void Hand::simulateToyBall(PalmData& palm, const glm::vec3& fingerTipPosition, f
|
|||
// create the ball, call MakeParticle, and use the resulting ParticleEditHandle to
|
||||
// manage the newly created particle.
|
||||
// Create a particle on the particle server
|
||||
#ifdef DEBUG_HAND
|
||||
qDebug("Created New Ball\n");
|
||||
#endif
|
||||
glm::vec3 ballPosition = ballFromHand ? palm.getPosition() : fingerTipPosition;
|
||||
_ballParticleEditHandles[handID] = Application::getInstance()->makeParticle(
|
||||
fingerTipPosition / (float)TREE_SCALE,
|
||||
ballPosition / (float)TREE_SCALE,
|
||||
TOY_BALL_RADIUS / (float) TREE_SCALE,
|
||||
TOY_BALL_ON_SERVER_COLOR[_whichBallColor[handID]],
|
||||
NO_VELOCITY / (float)TREE_SCALE,
|
||||
NO_GRAVITY / (float) TREE_SCALE,
|
||||
TOY_BALL_DAMPING,
|
||||
TOY_BALL_DONT_DIE_SCRIPT);
|
||||
TOY_BALL_GRAVITY / (float) TREE_SCALE,
|
||||
TOY_BALL_DAMPING,
|
||||
IN_HAND,
|
||||
TOY_BALL_UPDATE_SCRIPT);
|
||||
}
|
||||
} else {
|
||||
// Ball is in hand
|
||||
_ballParticleEditHandles[handID]->updateParticle(fingerTipPosition / (float)TREE_SCALE,
|
||||
#ifdef DEBUG_HAND
|
||||
qDebug("Ball in hand\n");
|
||||
#endif
|
||||
glm::vec3 ballPosition = ballFromHand ? palm.getPosition() : fingerTipPosition;
|
||||
_ballParticleEditHandles[handID]->updateParticle(ballPosition / (float)TREE_SCALE,
|
||||
TOY_BALL_RADIUS / (float) TREE_SCALE,
|
||||
TOY_BALL_ON_SERVER_COLOR[_whichBallColor[handID]],
|
||||
NO_VELOCITY / (float)TREE_SCALE,
|
||||
NO_GRAVITY / (float) TREE_SCALE,
|
||||
TOY_BALL_DAMPING,
|
||||
TOY_BALL_DONT_DIE_SCRIPT);
|
||||
TOY_BALL_GRAVITY / (float) TREE_SCALE,
|
||||
TOY_BALL_DAMPING,
|
||||
IN_HAND,
|
||||
TOY_BALL_UPDATE_SCRIPT);
|
||||
}
|
||||
} else {
|
||||
// If toy ball just released, add velocity to it!
|
||||
if (_toyBallInHand[handID]) {
|
||||
|
||||
_toyBallInHand[handID] = false;
|
||||
glm::vec3 handVelocity = palm.getRawVelocity();
|
||||
glm::vec3 fingerTipVelocity = palm.getTipVelocity();
|
||||
glm::vec3 ballPosition = ballFromHand ? palm.getPosition() : fingerTipPosition;
|
||||
glm::vec3 ballVelocity = ballFromHand ? palm.getRawVelocity() : palm.getTipVelocity();
|
||||
glm::quat avatarRotation = _owningAvatar->getOrientation();
|
||||
glm::vec3 toyBallVelocity = avatarRotation * fingerTipVelocity;
|
||||
ballVelocity = avatarRotation * ballVelocity;
|
||||
|
||||
// ball is no longer in hand...
|
||||
_ballParticleEditHandles[handID]->updateParticle(fingerTipPosition / (float)TREE_SCALE,
|
||||
#ifdef DEBUG_HAND
|
||||
qDebug("Threw ball, v = %.3f\n", glm::length(ballVelocity));
|
||||
#endif
|
||||
_ballParticleEditHandles[handID]->updateParticle(ballPosition / (float)TREE_SCALE,
|
||||
TOY_BALL_RADIUS / (float) TREE_SCALE,
|
||||
TOY_BALL_ON_SERVER_COLOR[_whichBallColor[handID]],
|
||||
toyBallVelocity / (float)TREE_SCALE,
|
||||
ballVelocity / (float)TREE_SCALE,
|
||||
TOY_BALL_GRAVITY / (float) TREE_SCALE,
|
||||
TOY_BALL_DAMPING,
|
||||
TOY_BALL_DAMPING,
|
||||
NOT_IN_HAND,
|
||||
TOY_BALL_UPDATE_SCRIPT);
|
||||
|
||||
// after releasing the ball, we free our ParticleEditHandle so we can't edit it further
|
||||
|
@ -240,32 +282,35 @@ void Hand::simulate(float deltaTime, bool isMine) {
|
|||
_lastFingerDeleteVoxel = fingerTipPosition;
|
||||
}
|
||||
}
|
||||
// Check if the finger is intersecting with a voxel in the client voxel tree
|
||||
VoxelTreeElement* fingerNode = Application::getInstance()->getVoxels()->getVoxelEnclosing(
|
||||
glm::vec3(fingerTipPosition / (float)TREE_SCALE));
|
||||
if (fingerNode) {
|
||||
if (!palm.getIsCollidingWithVoxel()) {
|
||||
// Collision has just started
|
||||
palm.setIsCollidingWithVoxel(true);
|
||||
handleVoxelCollision(&palm, fingerTipPosition, fingerNode, deltaTime);
|
||||
// Set highlight voxel
|
||||
VoxelDetail voxel;
|
||||
glm::vec3 pos = fingerNode->getCorner();
|
||||
voxel.x = pos.x;
|
||||
voxel.y = pos.y;
|
||||
voxel.z = pos.z;
|
||||
voxel.s = fingerNode->getScale();
|
||||
voxel.red = fingerNode->getColor()[0];
|
||||
voxel.green = fingerNode->getColor()[1];
|
||||
voxel.blue = fingerNode->getColor()[2];
|
||||
Application::getInstance()->setHighlightVoxel(voxel);
|
||||
Application::getInstance()->setIsHighlightVoxel(true);
|
||||
}
|
||||
} else {
|
||||
if (palm.getIsCollidingWithVoxel()) {
|
||||
// Collision has just ended
|
||||
palm.setIsCollidingWithVoxel(false);
|
||||
Application::getInstance()->setIsHighlightVoxel(false);
|
||||
|
||||
// Voxel Drumming with fingertips if enabled
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::VoxelDrumming)) {
|
||||
VoxelTreeElement* fingerNode = Application::getInstance()->getVoxels()->getVoxelEnclosing(
|
||||
glm::vec3(fingerTipPosition / (float)TREE_SCALE));
|
||||
if (fingerNode) {
|
||||
if (!palm.getIsCollidingWithVoxel()) {
|
||||
// Collision has just started
|
||||
palm.setIsCollidingWithVoxel(true);
|
||||
handleVoxelCollision(&palm, fingerTipPosition, fingerNode, deltaTime);
|
||||
// Set highlight voxel
|
||||
VoxelDetail voxel;
|
||||
glm::vec3 pos = fingerNode->getCorner();
|
||||
voxel.x = pos.x;
|
||||
voxel.y = pos.y;
|
||||
voxel.z = pos.z;
|
||||
voxel.s = fingerNode->getScale();
|
||||
voxel.red = fingerNode->getColor()[0];
|
||||
voxel.green = fingerNode->getColor()[1];
|
||||
voxel.blue = fingerNode->getColor()[2];
|
||||
Application::getInstance()->setHighlightVoxel(voxel);
|
||||
Application::getInstance()->setIsHighlightVoxel(true);
|
||||
}
|
||||
} else {
|
||||
if (palm.getIsCollidingWithVoxel()) {
|
||||
// Collision has just ended
|
||||
palm.setIsCollidingWithVoxel(false);
|
||||
Application::getInstance()->setIsHighlightVoxel(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -293,9 +338,37 @@ void Hand::updateCollisions() {
|
|||
for (NodeList::iterator node = nodeList->begin(); node != nodeList->end(); node++) {
|
||||
if (node->getLinkedData() && node->getType() == NODE_TYPE_AGENT) {
|
||||
Avatar* otherAvatar = (Avatar*)node->getLinkedData();
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::PlaySlaps)) {
|
||||
// Check for palm collisions
|
||||
glm::vec3 myPalmPosition = palm.getPosition();
|
||||
float palmCollisionDistance = 0.1f;
|
||||
palm.setIsCollidingWithPalm(false);
|
||||
// If 'Play Slaps' is enabled, look for palm-to-palm collisions and make sound
|
||||
for (int j = 0; j < otherAvatar->getHand().getNumPalms(); j++) {
|
||||
PalmData& otherPalm = otherAvatar->getHand().getPalms()[j];
|
||||
if (!otherPalm.isActive()) {
|
||||
continue;
|
||||
}
|
||||
glm::vec3 otherPalmPosition = otherPalm.getPosition();
|
||||
if (glm::length(otherPalmPosition - myPalmPosition) < palmCollisionDistance) {
|
||||
palm.setIsCollidingWithPalm(true);
|
||||
const float PALM_COLLIDE_VOLUME = 1.f;
|
||||
const float PALM_COLLIDE_FREQUENCY = 150.f;
|
||||
const float PALM_COLLIDE_DURATION_MAX = 2.f;
|
||||
const float PALM_COLLIDE_DECAY_PER_SAMPLE = 0.005f;
|
||||
Application::getInstance()->getAudio()->startDrumSound(PALM_COLLIDE_VOLUME,
|
||||
PALM_COLLIDE_FREQUENCY,
|
||||
PALM_COLLIDE_DURATION_MAX,
|
||||
PALM_COLLIDE_DECAY_PER_SAMPLE);
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
glm::vec3 avatarPenetration;
|
||||
if (otherAvatar->findSpherePenetration(palm.getPosition(), scaledPalmRadius, avatarPenetration)) {
|
||||
totalPenetration = addPenetrations(totalPenetration, avatarPenetration);
|
||||
// Check for collisions with the other avatar's leap palms
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -385,20 +458,23 @@ void Hand::render( bool isMine) {
|
|||
|
||||
_renderAlpha = 1.0;
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::CollisionProxies)) {
|
||||
for (int i = 0; i < getNumPalms(); i++) {
|
||||
PalmData& palm = getPalms()[i];
|
||||
if (!palm.isActive()) {
|
||||
continue;
|
||||
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::CollisionProxies)) {
|
||||
for (int i = 0; i < getNumPalms(); i++) {
|
||||
PalmData& palm = getPalms()[i];
|
||||
if (!palm.isActive()) {
|
||||
continue;
|
||||
}
|
||||
glm::vec3 position = palm.getPosition();
|
||||
glPushMatrix();
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
glColor3f(0.0f, 1.0f, 0.0f);
|
||||
glutSolidSphere(PALM_COLLISION_RADIUS * _owningAvatar->getScale(), 10, 10);
|
||||
glPopMatrix();
|
||||
}
|
||||
glm::vec3 position = palm.getPosition();
|
||||
glPushMatrix();
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
glColor3f(0.0f, 1.0f, 0.0f);
|
||||
glutSolidSphere(PALM_COLLISION_RADIUS * _owningAvatar->getScale(), 10, 10);
|
||||
glPopMatrix();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::DisplayLeapHands)) {
|
||||
renderLeapHands();
|
||||
|
@ -427,11 +503,44 @@ void Hand::render( bool isMine) {
|
|||
void Hand::renderLeapHands() {
|
||||
|
||||
const float alpha = 1.0f;
|
||||
const float TARGET_ALPHA = 0.5f;
|
||||
|
||||
//const glm::vec3 handColor = _ballColor;
|
||||
const glm::vec3 handColor(1.0, 0.84, 0.66); // use the skin color
|
||||
bool ballFromHand = Menu::getInstance()->isOptionChecked(MenuOption::BallFromHand);
|
||||
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
glDepthMask(GL_TRUE);
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::DisplayHandTargets)) {
|
||||
for (size_t i = 0; i < getNumPalms(); ++i) {
|
||||
PalmData& palm = getPalms()[i];
|
||||
if (!palm.isActive()) {
|
||||
continue;
|
||||
}
|
||||
glm::vec3 targetPosition = ballFromHand ? palm.getPosition() : palm.getTipPosition();
|
||||
glPushMatrix();
|
||||
|
||||
const Particle* closestParticle = Application::getInstance()->getParticles()
|
||||
->getTree()->findClosestParticle(targetPosition / (float)TREE_SCALE,
|
||||
CATCH_RADIUS / (float)TREE_SCALE);
|
||||
|
||||
// If we are hitting a particle then draw the target green, otherwise yellow
|
||||
if (closestParticle) {
|
||||
glColor4f(0,1,0, TARGET_ALPHA);
|
||||
} else {
|
||||
glColor4f(1,1,0, TARGET_ALPHA);
|
||||
}
|
||||
glTranslatef(targetPosition.x, targetPosition.y, targetPosition.z);
|
||||
glutWireSphere(CATCH_RADIUS, 10.f, 10.f);
|
||||
|
||||
const float collisionRadius = 0.05f;
|
||||
glColor4f(0.5f,0.5f,0.5f, alpha);
|
||||
glutWireSphere(collisionRadius, 10.f, 10.f);
|
||||
glPopMatrix();
|
||||
}
|
||||
}
|
||||
|
||||
glPushMatrix();
|
||||
// Draw the leap balls
|
||||
for (size_t i = 0; i < _leapFingerTipBalls.size(); i++) {
|
||||
|
@ -469,10 +578,16 @@ void Hand::renderLeapHands() {
|
|||
PalmData& palm = getPalms()[i];
|
||||
if (palm.isActive()) {
|
||||
const float palmThickness = 0.02f;
|
||||
glColor4f(handColor.r, handColor.g, handColor.b, 0.25);
|
||||
if (palm.getIsCollidingWithPalm()) {
|
||||
glColor4f(1, 0, 0, 0.50);
|
||||
} else {
|
||||
glColor4f(handColor.r, handColor.g, handColor.b, 0.25);
|
||||
}
|
||||
glm::vec3 tip = palm.getPosition();
|
||||
glm::vec3 root = palm.getPosition() + palm.getNormal() * palmThickness;
|
||||
Avatar::renderJointConnectingCone(root, tip, 0.05, 0.03);
|
||||
const float radiusA = 0.05f;
|
||||
const float radiusB = 0.03f;
|
||||
Avatar::renderJointConnectingCone(root, tip, radiusA, radiusB);
|
||||
}
|
||||
}
|
||||
glDepthMask(GL_TRUE);
|
||||
|
|
|
@ -64,7 +64,7 @@ public:
|
|||
// Get the drag distance to move
|
||||
glm::vec3 getAndResetGrabDelta();
|
||||
glm::vec3 getAndResetGrabDeltaVelocity();
|
||||
|
||||
|
||||
private:
|
||||
// disallow copies of the Hand, copy of owning Avatar is disallowed too
|
||||
Hand(const Hand&);
|
||||
|
@ -79,7 +79,6 @@ private:
|
|||
std::vector<HandBall> _leapFingerRootBalls;
|
||||
|
||||
glm::vec3 _lastFingerAddVoxel, _lastFingerDeleteVoxel;
|
||||
bool _isCollidingWithVoxel;
|
||||
VoxelDetail _collidingVoxel;
|
||||
|
||||
glm::vec3 _collisionCenter;
|
||||
|
|
|
@ -765,3 +765,4 @@ void Head::updateHairPhysics(float deltaTime) {
|
|||
_hairTuft[t].update(deltaTime);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -426,6 +426,31 @@ static TextRenderer* textRenderer() {
|
|||
return renderer;
|
||||
}
|
||||
|
||||
void MyAvatar::renderDebugBodyPoints() {
|
||||
glm::vec3 torsoPosition(getPosition());
|
||||
glm::vec3 headPosition(getHead().getEyePosition());
|
||||
float torsoToHead = glm::length(headPosition - torsoPosition);
|
||||
glm::vec3 position;
|
||||
printf("head-above-torso %.2f, scale = %0.2f\n", torsoToHead, getScale());
|
||||
|
||||
// Torso Sphere
|
||||
position = torsoPosition;
|
||||
glPushMatrix();
|
||||
glColor4f(0, 1, 0, .5f);
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
glutSolidSphere(0.2, 10, 10);
|
||||
glPopMatrix();
|
||||
|
||||
// Head Sphere
|
||||
position = headPosition;
|
||||
glPushMatrix();
|
||||
glColor4f(0, 1, 0, .5f);
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
glutSolidSphere(0.15, 10, 10);
|
||||
glPopMatrix();
|
||||
|
||||
|
||||
}
|
||||
void MyAvatar::render(bool forceRenderHead) {
|
||||
|
||||
// render body
|
||||
|
@ -438,6 +463,9 @@ void MyAvatar::render(bool forceRenderHead) {
|
|||
glPopMatrix();
|
||||
}
|
||||
|
||||
//renderDebugBodyPoints();
|
||||
|
||||
|
||||
if (!_chatMessage.empty()) {
|
||||
int width = 0;
|
||||
int lastWidth = 0;
|
||||
|
|
|
@ -30,6 +30,7 @@ public:
|
|||
void simulate(float deltaTime, Transmitter* transmitter);
|
||||
void updateFromGyrosAndOrWebcam(bool turnWithHead);
|
||||
void render(bool forceRenderHead);
|
||||
void renderDebugBodyPoints();
|
||||
|
||||
// setters
|
||||
void setMousePressed(bool mousePressed) { _mousePressed = mousePressed; }
|
||||
|
|
|
@ -79,12 +79,6 @@ void SixenseManager::update(float deltaTime) {
|
|||
palm->setTrigger(data.trigger);
|
||||
palm->setJoystick(data.joystick_x, data.joystick_y);
|
||||
|
||||
// Vibrate if needed
|
||||
if (palm->getIsCollidingWithVoxel()) {
|
||||
//printf("vibrate!\n");
|
||||
//vibrate(data.controller_index, 100, 1);
|
||||
}
|
||||
|
||||
glm::vec3 position(data.pos[0], data.pos[1], data.pos[2]);
|
||||
// Adjust for distance between acquisition 'orb' and the user's torso
|
||||
// (distance to the right of body center, distance below torso, distance behind torso)
|
||||
|
@ -113,8 +107,8 @@ void SixenseManager::update(float deltaTime) {
|
|||
const glm::vec3 newTipPosition = position + rotation * FINGER_VECTOR;
|
||||
finger.setRawTipPosition(position + rotation * FINGER_VECTOR);
|
||||
|
||||
// temporary for toy ball - store first finger tip velocity
|
||||
glm::vec3 oldTipPosition = palm->getTipPosition();
|
||||
// Store the one fingertip in the palm structure so we can track velocity
|
||||
glm::vec3 oldTipPosition = palm->getTipRawPosition();
|
||||
palm->setTipVelocity((newTipPosition - oldTipPosition) / deltaTime / 1000.f);
|
||||
palm->setTipPosition(newTipPosition);
|
||||
|
||||
|
|
|
@ -170,7 +170,7 @@ void VoxelStatsDialog::paintEvent(QPaintEvent* event) {
|
|||
unsigned long totalLeaves = 0;
|
||||
|
||||
Application::getInstance()->lockVoxelSceneStats();
|
||||
NodeToVoxelSceneStats* sceneStats = Application::getInstance()->getVoxelSceneStats();
|
||||
NodeToVoxelSceneStats* sceneStats = Application::getInstance()->getOcteeSceneStats();
|
||||
for(NodeToVoxelSceneStatsIterator i = sceneStats->begin(); i != sceneStats->end(); i++) {
|
||||
//const QUuid& uuid = i->first;
|
||||
VoxelSceneStats& stats = i->second;
|
||||
|
@ -215,26 +215,42 @@ void VoxelStatsDialog::paintEvent(QPaintEvent* event) {
|
|||
"Leaves: " << serversLeavesString.toLocal8Bit().constData() << "";
|
||||
label->setText(statsValue.str().c_str());
|
||||
|
||||
showAllVoxelServers();
|
||||
showAllOctreeServers();
|
||||
|
||||
this->QDialog::paintEvent(event);
|
||||
}
|
||||
void VoxelStatsDialog::showAllOctreeServers() {
|
||||
int serverCount = 0;
|
||||
|
||||
void VoxelStatsDialog::showAllVoxelServers() {
|
||||
showOctreeServersOfType(serverCount, NODE_TYPE_VOXEL_SERVER, "Voxel",
|
||||
Application::getInstance()->getVoxelServerJurisdictions());
|
||||
showOctreeServersOfType(serverCount, NODE_TYPE_PARTICLE_SERVER, "Particle",
|
||||
Application::getInstance()->getParticleServerJurisdictions());
|
||||
|
||||
if (_voxelServerLabelsCount > serverCount) {
|
||||
for (int i = serverCount; i < _voxelServerLabelsCount; i++) {
|
||||
int serverLabel = _voxelServerLables[i];
|
||||
RemoveStatItem(serverLabel);
|
||||
_voxelServerLables[i] = 0;
|
||||
}
|
||||
_voxelServerLabelsCount = serverCount;
|
||||
}
|
||||
}
|
||||
|
||||
void VoxelStatsDialog::showOctreeServersOfType(int& serverCount, NODE_TYPE serverType, const char* serverTypeName,
|
||||
NodeToJurisdictionMap& serverJurisdictions) {
|
||||
|
||||
QLocale locale(QLocale::English);
|
||||
|
||||
int serverNumber = 0;
|
||||
int serverCount = 0;
|
||||
NodeList* nodeList = NodeList::getInstance();
|
||||
for (NodeList::iterator node = nodeList->begin(); node != nodeList->end(); node++) {
|
||||
// only send to the NodeTypes that are NODE_TYPE_VOXEL_SERVER
|
||||
if (node->getType() == NODE_TYPE_VOXEL_SERVER) {
|
||||
serverNumber++;
|
||||
if (node->getType() == serverType) {
|
||||
serverCount++;
|
||||
|
||||
if (serverCount > _voxelServerLabelsCount) {
|
||||
char label[128] = { 0 };
|
||||
sprintf(label, "Voxel Server %d",serverCount);
|
||||
sprintf(label, "%s Server %d", serverTypeName, serverCount);
|
||||
int thisServerRow = _voxelServerLables[serverCount-1] = AddStatItem(label);
|
||||
_labels[thisServerRow]->setTextFormat(Qt::RichText);
|
||||
_labels[thisServerRow]->setTextInteractionFlags(Qt::TextBrowserInteraction);
|
||||
|
@ -254,14 +270,12 @@ void VoxelStatsDialog::showAllVoxelServers() {
|
|||
|
||||
QUuid nodeUUID = node->getUUID();
|
||||
|
||||
NodeToJurisdictionMap& voxelServerJurisdictions = Application::getInstance()->getVoxelServerJurisdictions();
|
||||
|
||||
// lookup our nodeUUID in the jurisdiction map, if it's missing then we're
|
||||
// missing at least one jurisdiction
|
||||
if (voxelServerJurisdictions.find(nodeUUID) == voxelServerJurisdictions.end()) {
|
||||
if (serverJurisdictions.find(nodeUUID) == serverJurisdictions.end()) {
|
||||
serverDetails << " unknown jurisdiction ";
|
||||
} else {
|
||||
const JurisdictionMap& map = voxelServerJurisdictions[nodeUUID];
|
||||
const JurisdictionMap& map = serverJurisdictions[nodeUUID];
|
||||
|
||||
unsigned char* rootCode = map.getRootOctalCode();
|
||||
|
||||
|
@ -285,13 +299,13 @@ void VoxelStatsDialog::showAllVoxelServers() {
|
|||
} // jurisdiction
|
||||
|
||||
// now lookup stats details for this server...
|
||||
if (_extraServerDetails[serverNumber-1] != LESS) {
|
||||
if (_extraServerDetails[serverCount-1] != LESS) {
|
||||
Application::getInstance()->lockVoxelSceneStats();
|
||||
NodeToVoxelSceneStats* sceneStats = Application::getInstance()->getVoxelSceneStats();
|
||||
NodeToVoxelSceneStats* sceneStats = Application::getInstance()->getOcteeSceneStats();
|
||||
if (sceneStats->find(nodeUUID) != sceneStats->end()) {
|
||||
VoxelSceneStats& stats = sceneStats->at(nodeUUID);
|
||||
|
||||
switch (_extraServerDetails[serverNumber-1]) {
|
||||
switch (_extraServerDetails[serverCount-1]) {
|
||||
case MOST: {
|
||||
extraDetails << "<br/>" ;
|
||||
|
||||
|
@ -345,12 +359,12 @@ void VoxelStatsDialog::showAllVoxelServers() {
|
|||
" Wasted Bytes: " << incomingWastedBytesString.toLocal8Bit().constData();
|
||||
|
||||
serverDetails << extraDetails.str();
|
||||
if (_extraServerDetails[serverNumber-1] == MORE) {
|
||||
linkDetails << " " << " [<a href='most-" << serverNumber << "'>most...</a>]";
|
||||
linkDetails << " " << " [<a href='less-" << serverNumber << "'>less...</a>]";
|
||||
if (_extraServerDetails[serverCount-1] == MORE) {
|
||||
linkDetails << " " << " [<a href='most-" << serverCount << "'>most...</a>]";
|
||||
linkDetails << " " << " [<a href='less-" << serverCount << "'>less...</a>]";
|
||||
} else {
|
||||
linkDetails << " " << " [<a href='more-" << serverNumber << "'>less...</a>]";
|
||||
linkDetails << " " << " [<a href='less-" << serverNumber << "'>least...</a>]";
|
||||
linkDetails << " " << " [<a href='more-" << serverCount << "'>less...</a>]";
|
||||
linkDetails << " " << " [<a href='less-" << serverCount << "'>least...</a>]";
|
||||
}
|
||||
|
||||
} break;
|
||||
|
@ -361,22 +375,13 @@ void VoxelStatsDialog::showAllVoxelServers() {
|
|||
}
|
||||
Application::getInstance()->unlockVoxelSceneStats();
|
||||
} else {
|
||||
linkDetails << " " << " [<a href='more-" << serverNumber << "'>more...</a>]";
|
||||
linkDetails << " " << " [<a href='most-" << serverNumber << "'>most...</a>]";
|
||||
linkDetails << " " << " [<a href='more-" << serverCount << "'>more...</a>]";
|
||||
linkDetails << " " << " [<a href='most-" << serverCount << "'>most...</a>]";
|
||||
}
|
||||
serverDetails << linkDetails.str();
|
||||
_labels[_voxelServerLables[serverCount - 1]]->setText(serverDetails.str().c_str());
|
||||
} // is VOXEL_SERVER
|
||||
} // Node Loop
|
||||
|
||||
if (_voxelServerLabelsCount > serverCount) {
|
||||
for (int i = serverCount; i < _voxelServerLabelsCount; i++) {
|
||||
int serverLabel = _voxelServerLables[i];
|
||||
RemoveStatItem(serverLabel);
|
||||
_voxelServerLables[i] = 0;
|
||||
}
|
||||
_voxelServerLabelsCount = serverCount;
|
||||
}
|
||||
}
|
||||
|
||||
void VoxelStatsDialog::reject() {
|
||||
|
|
|
@ -42,7 +42,10 @@ protected:
|
|||
|
||||
int AddStatItem(const char* caption, unsigned colorRGBA = DEFAULT_COLOR);
|
||||
void RemoveStatItem(int item);
|
||||
void showAllVoxelServers();
|
||||
void showAllOctreeServers();
|
||||
|
||||
void showOctreeServersOfType(int& serverNumber, NODE_TYPE serverType,
|
||||
const char* serverTypeName, NodeToJurisdictionMap& serverJurisdictions);
|
||||
|
||||
private:
|
||||
|
||||
|
|
|
@ -9,19 +9,27 @@
|
|||
#include <cstring>
|
||||
#include <math.h>
|
||||
|
||||
#include <QtCore/QDebug>
|
||||
|
||||
#include "PacketHeaders.h"
|
||||
|
||||
#include "AudioRingBuffer.h"
|
||||
|
||||
AudioRingBuffer::AudioRingBuffer(bool isStereo) :
|
||||
AudioRingBuffer::AudioRingBuffer(int numFrameSamples) :
|
||||
NodeData(NULL),
|
||||
_endOfLastWrite(NULL),
|
||||
_sampleCapacity(numFrameSamples * RING_BUFFER_LENGTH_FRAMES),
|
||||
_isStarved(true),
|
||||
_hasStarted(false),
|
||||
_isStereo(isStereo)
|
||||
_hasStarted(false)
|
||||
{
|
||||
_buffer = new int16_t[RING_BUFFER_LENGTH_SAMPLES];
|
||||
_nextOutput = _buffer;
|
||||
if (numFrameSamples) {
|
||||
_buffer = new int16_t[_sampleCapacity];
|
||||
_nextOutput = _buffer;
|
||||
_endOfLastWrite = _buffer;
|
||||
} else {
|
||||
_buffer = NULL;
|
||||
_nextOutput = NULL;
|
||||
_endOfLastWrite = NULL;
|
||||
}
|
||||
};
|
||||
|
||||
AudioRingBuffer::~AudioRingBuffer() {
|
||||
|
@ -32,53 +40,130 @@ void AudioRingBuffer::reset() {
|
|||
_endOfLastWrite = _buffer;
|
||||
_nextOutput = _buffer;
|
||||
_isStarved = true;
|
||||
_hasStarted = false;
|
||||
}
|
||||
|
||||
void AudioRingBuffer::resizeForFrameSize(qint64 numFrameSamples) {
|
||||
delete[] _buffer;
|
||||
_sampleCapacity = numFrameSamples * RING_BUFFER_LENGTH_FRAMES;
|
||||
_buffer = new int16_t[_sampleCapacity];
|
||||
_nextOutput = _buffer;
|
||||
_endOfLastWrite = _buffer;
|
||||
}
|
||||
|
||||
int AudioRingBuffer::parseData(unsigned char* sourceBuffer, int numBytes) {
|
||||
int numBytesPacketHeader = numBytesForPacketHeader(sourceBuffer);
|
||||
return parseAudioSamples(sourceBuffer + numBytesPacketHeader, numBytes - numBytesPacketHeader);
|
||||
return writeData((char*) sourceBuffer + numBytesPacketHeader, numBytes - numBytesPacketHeader);
|
||||
}
|
||||
|
||||
int AudioRingBuffer::parseAudioSamples(unsigned char* sourceBuffer, int numBytes) {
|
||||
qint64 AudioRingBuffer::readSamples(int16_t* destination, qint64 maxSamples) {
|
||||
return readData((char*) destination, maxSamples * sizeof(int16_t));
|
||||
}
|
||||
|
||||
qint64 AudioRingBuffer::readData(char *data, qint64 maxSize) {
|
||||
|
||||
// only copy up to the number of samples we have available
|
||||
int numReadSamples = std::min((unsigned) (maxSize / sizeof(int16_t)), samplesAvailable());
|
||||
|
||||
if (_nextOutput + numReadSamples > _buffer + _sampleCapacity) {
|
||||
// we're going to need to do two reads to get this data, it wraps around the edge
|
||||
|
||||
// read to the end of the buffer
|
||||
int numSamplesToEnd = (_buffer + _sampleCapacity) - _nextOutput;
|
||||
memcpy(data, _nextOutput, numSamplesToEnd * sizeof(int16_t));
|
||||
|
||||
// read the rest from the beginning of the buffer
|
||||
memcpy(data + numSamplesToEnd, _buffer, (numReadSamples - numSamplesToEnd) * sizeof(int16_t));
|
||||
} else {
|
||||
// read the data
|
||||
memcpy(data, _nextOutput, numReadSamples * sizeof(int16_t));
|
||||
}
|
||||
|
||||
// push the position of _nextOutput by the number of samples read
|
||||
_nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, numReadSamples);
|
||||
|
||||
return numReadSamples * sizeof(int16_t);
|
||||
}
|
||||
|
||||
qint64 AudioRingBuffer::writeSamples(const int16_t* source, qint64 maxSamples) {
|
||||
return writeData((const char*) source, maxSamples * sizeof(int16_t));
|
||||
}
|
||||
|
||||
qint64 AudioRingBuffer::writeData(const char* data, qint64 maxSize) {
|
||||
// make sure we have enough bytes left for this to be the right amount of audio
|
||||
// otherwise we should not copy that data, and leave the buffer pointers where they are
|
||||
int samplesToCopy = BUFFER_LENGTH_SAMPLES_PER_CHANNEL * (_isStereo ? 2 : 1);
|
||||
|
||||
if (numBytes == samplesToCopy * sizeof(int16_t)) {
|
||||
|
||||
if (!_endOfLastWrite) {
|
||||
_endOfLastWrite = _buffer;
|
||||
} else if (diffLastWriteNextOutput() > RING_BUFFER_LENGTH_SAMPLES - samplesToCopy) {
|
||||
_endOfLastWrite = _buffer;
|
||||
_nextOutput = _buffer;
|
||||
_isStarved = true;
|
||||
}
|
||||
|
||||
memcpy(_endOfLastWrite, sourceBuffer, numBytes);
|
||||
|
||||
_endOfLastWrite += samplesToCopy;
|
||||
|
||||
if (_endOfLastWrite >= _buffer + RING_BUFFER_LENGTH_SAMPLES) {
|
||||
_endOfLastWrite = _buffer;
|
||||
}
|
||||
|
||||
return numBytes;
|
||||
int samplesToCopy = std::min(maxSize / sizeof(int16_t), (quint64) _sampleCapacity);
|
||||
|
||||
std::less<int16_t*> less;
|
||||
std::less_equal<int16_t*> lessEqual;
|
||||
|
||||
if (_hasStarted
|
||||
&& (less(_endOfLastWrite, _nextOutput)
|
||||
&& lessEqual(_nextOutput, shiftedPositionAccomodatingWrap(_endOfLastWrite, samplesToCopy)))) {
|
||||
// this read will cross the next output, so call us starved and reset the buffer
|
||||
qDebug() << "Filled the ring buffer. Resetting.\n";
|
||||
_endOfLastWrite = _buffer;
|
||||
_nextOutput = _buffer;
|
||||
_isStarved = true;
|
||||
}
|
||||
|
||||
_hasStarted = true;
|
||||
|
||||
if (_endOfLastWrite + samplesToCopy <= _buffer + _sampleCapacity) {
|
||||
memcpy(_endOfLastWrite, data, samplesToCopy * sizeof(int16_t));
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
int numSamplesToEnd = (_buffer + _sampleCapacity) - _endOfLastWrite;
|
||||
memcpy(_endOfLastWrite, data, numSamplesToEnd * sizeof(int16_t));
|
||||
memcpy(_buffer, data + (numSamplesToEnd * sizeof(int16_t)), (samplesToCopy - numSamplesToEnd) * sizeof(int16_t));
|
||||
}
|
||||
|
||||
_endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, samplesToCopy);
|
||||
|
||||
return samplesToCopy * sizeof(int16_t);
|
||||
}
|
||||
|
||||
int AudioRingBuffer::diffLastWriteNextOutput() const {
|
||||
int16_t& AudioRingBuffer::operator[](const int index) {
|
||||
// make sure this is a valid index
|
||||
assert(index > -_sampleCapacity && index < _sampleCapacity);
|
||||
|
||||
return *shiftedPositionAccomodatingWrap(_nextOutput, index);
|
||||
}
|
||||
|
||||
void AudioRingBuffer::shiftReadPosition(unsigned int numSamples) {
|
||||
_nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, numSamples);
|
||||
}
|
||||
|
||||
unsigned int AudioRingBuffer::samplesAvailable() const {
|
||||
if (!_endOfLastWrite) {
|
||||
return 0;
|
||||
} else {
|
||||
int sampleDifference = _endOfLastWrite - _nextOutput;
|
||||
|
||||
if (sampleDifference < 0) {
|
||||
sampleDifference += RING_BUFFER_LENGTH_SAMPLES;
|
||||
sampleDifference += _sampleCapacity;
|
||||
}
|
||||
|
||||
return sampleDifference;
|
||||
}
|
||||
}
|
||||
|
||||
bool AudioRingBuffer::isNotStarvedOrHasMinimumSamples(unsigned int numRequiredSamples) const {
|
||||
if (!_isStarved) {
|
||||
return true;
|
||||
} else {
|
||||
return samplesAvailable() >= numRequiredSamples;
|
||||
}
|
||||
}
|
||||
|
||||
int16_t* AudioRingBuffer::shiftedPositionAccomodatingWrap(int16_t* position, int numSamplesShift) const {
|
||||
|
||||
if (numSamplesShift > 0 && position + numSamplesShift >= _buffer + _sampleCapacity) {
|
||||
// this shift will wrap the position around to the beginning of the ring
|
||||
return position + numSamplesShift - _sampleCapacity;
|
||||
} else if (numSamplesShift < 0 && position + numSamplesShift < _buffer) {
|
||||
// this shift will go around to the end of the ring
|
||||
return position + numSamplesShift + _sampleCapacity;
|
||||
} else {
|
||||
return position + numSamplesShift;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,61 +9,69 @@
|
|||
#ifndef __interface__AudioRingBuffer__
|
||||
#define __interface__AudioRingBuffer__
|
||||
|
||||
#include <limits>
|
||||
#include <stdint.h>
|
||||
#include <map>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
|
||||
#include <QtCore/QIODevice>
|
||||
|
||||
#include "NodeData.h"
|
||||
|
||||
const int SAMPLE_RATE = 22050;
|
||||
const int SAMPLE_RATE = 24000;
|
||||
|
||||
const int BUFFER_LENGTH_BYTES_STEREO = 1024;
|
||||
const int BUFFER_LENGTH_BYTES_PER_CHANNEL = 512;
|
||||
const int BUFFER_LENGTH_SAMPLES_PER_CHANNEL = BUFFER_LENGTH_BYTES_PER_CHANNEL / sizeof(int16_t);
|
||||
const int NETWORK_BUFFER_LENGTH_BYTES_STEREO = 1024;
|
||||
const int NETWORK_BUFFER_LENGTH_SAMPLES_STEREO = NETWORK_BUFFER_LENGTH_BYTES_STEREO / sizeof(int16_t);
|
||||
const int NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL = 512;
|
||||
const int NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL = NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL / sizeof(int16_t);
|
||||
|
||||
const short RING_BUFFER_LENGTH_FRAMES = 20;
|
||||
const short RING_BUFFER_LENGTH_SAMPLES = RING_BUFFER_LENGTH_FRAMES * BUFFER_LENGTH_SAMPLES_PER_CHANNEL;
|
||||
const short RING_BUFFER_LENGTH_FRAMES = 10;
|
||||
|
||||
const int MAX_SAMPLE_VALUE = std::numeric_limits<int16_t>::max();
|
||||
const int MIN_SAMPLE_VALUE = std::numeric_limits<int16_t>::min();
|
||||
|
||||
class AudioRingBuffer : public NodeData {
|
||||
Q_OBJECT
|
||||
public:
|
||||
AudioRingBuffer(bool isStereo);
|
||||
AudioRingBuffer(int numFrameSamples);
|
||||
~AudioRingBuffer();
|
||||
|
||||
void reset();
|
||||
|
||||
void resizeForFrameSize(qint64 numFrameSamples);
|
||||
|
||||
int getSampleCapacity() const { return _sampleCapacity; }
|
||||
|
||||
int parseData(unsigned char* sourceBuffer, int numBytes);
|
||||
int parseAudioSamples(unsigned char* sourceBuffer, int numBytes);
|
||||
|
||||
int16_t* getNextOutput() const { return _nextOutput; }
|
||||
void setNextOutput(int16_t* nextOutput) { _nextOutput = nextOutput; }
|
||||
qint64 readSamples(int16_t* destination, qint64 maxSamples);
|
||||
qint64 writeSamples(const int16_t* source, qint64 maxSamples);
|
||||
|
||||
int16_t* getEndOfLastWrite() const { return _endOfLastWrite; }
|
||||
void setEndOfLastWrite(int16_t* endOfLastWrite) { _endOfLastWrite = endOfLastWrite; }
|
||||
qint64 readData(char* data, qint64 maxSize);
|
||||
qint64 writeData(const char* data, qint64 maxSize);
|
||||
|
||||
int16_t* getBuffer() const { return _buffer; }
|
||||
int16_t& operator[](const int index);
|
||||
|
||||
void shiftReadPosition(unsigned int numSamples);
|
||||
|
||||
unsigned int samplesAvailable() const;
|
||||
|
||||
bool isNotStarvedOrHasMinimumSamples(unsigned int numRequiredSamples) const;
|
||||
|
||||
bool isStarved() const { return _isStarved; }
|
||||
void setIsStarved(bool isStarved) { _isStarved = isStarved; }
|
||||
|
||||
bool hasStarted() const { return _hasStarted; }
|
||||
void setHasStarted(bool hasStarted) { _hasStarted = hasStarted; }
|
||||
|
||||
int diffLastWriteNextOutput() const;
|
||||
|
||||
bool isStereo() const { return _isStereo; }
|
||||
|
||||
protected:
|
||||
// disallow copying of AudioRingBuffer objects
|
||||
AudioRingBuffer(const AudioRingBuffer&);
|
||||
AudioRingBuffer& operator= (const AudioRingBuffer&);
|
||||
|
||||
int16_t* shiftedPositionAccomodatingWrap(int16_t* position, int numSamplesShift) const;
|
||||
|
||||
int _sampleCapacity;
|
||||
int16_t* _nextOutput;
|
||||
int16_t* _endOfLastWrite;
|
||||
int16_t* _buffer;
|
||||
bool _isStarved;
|
||||
bool _hasStarted;
|
||||
bool _isStereo;
|
||||
};
|
||||
|
||||
#endif /* defined(__interface__AudioRingBuffer__) */
|
||||
|
|
|
@ -42,7 +42,7 @@ int InjectedAudioRingBuffer::parseData(unsigned char* sourceBuffer, int numBytes
|
|||
unsigned int attenuationByte = *(currentBuffer++);
|
||||
_attenuationRatio = attenuationByte / (float) MAX_INJECTOR_VOLUME;
|
||||
|
||||
currentBuffer += parseAudioSamples(currentBuffer, numBytes - (currentBuffer - sourceBuffer));
|
||||
currentBuffer += writeData((char*) currentBuffer, numBytes - (currentBuffer - sourceBuffer));
|
||||
|
||||
return currentBuffer - sourceBuffer;
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
#include "PositionalAudioRingBuffer.h"
|
||||
|
||||
PositionalAudioRingBuffer::PositionalAudioRingBuffer(PositionalAudioRingBuffer::Type type) :
|
||||
AudioRingBuffer(false),
|
||||
AudioRingBuffer(NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL),
|
||||
_type(type),
|
||||
_position(0.0f, 0.0f, 0.0f),
|
||||
_orientation(0.0f, 0.0f, 0.0f, 0.0f),
|
||||
|
@ -31,7 +31,7 @@ int PositionalAudioRingBuffer::parseData(unsigned char* sourceBuffer, int numByt
|
|||
unsigned char* currentBuffer = sourceBuffer + numBytesForPacketHeader(sourceBuffer);
|
||||
currentBuffer += NUM_BYTES_RFC4122_UUID; // the source UUID
|
||||
currentBuffer += parsePositionalData(currentBuffer, numBytes - (currentBuffer - sourceBuffer));
|
||||
currentBuffer += parseAudioSamples(currentBuffer, numBytes - (currentBuffer - sourceBuffer));
|
||||
currentBuffer += writeData((char*) currentBuffer, numBytes - (currentBuffer - sourceBuffer));
|
||||
|
||||
return currentBuffer - sourceBuffer;
|
||||
}
|
||||
|
@ -47,8 +47,7 @@ int PositionalAudioRingBuffer::parsePositionalData(unsigned char* sourceBuffer,
|
|||
|
||||
// if this node sent us a NaN for first float in orientation then don't consider this good audio and bail
|
||||
if (std::isnan(_orientation.x)) {
|
||||
_endOfLastWrite = _nextOutput = _buffer;
|
||||
_isStarved = true;
|
||||
reset();
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -56,20 +55,17 @@ int PositionalAudioRingBuffer::parsePositionalData(unsigned char* sourceBuffer,
|
|||
}
|
||||
|
||||
bool PositionalAudioRingBuffer::shouldBeAddedToMix(int numJitterBufferSamples) {
|
||||
if (_endOfLastWrite) {
|
||||
if (_isStarved && diffLastWriteNextOutput() <= BUFFER_LENGTH_SAMPLES_PER_CHANNEL + numJitterBufferSamples) {
|
||||
printf("Buffer held back\n");
|
||||
return false;
|
||||
} else if (diffLastWriteNextOutput() < BUFFER_LENGTH_SAMPLES_PER_CHANNEL) {
|
||||
printf("Buffer starved.\n");
|
||||
_isStarved = true;
|
||||
return false;
|
||||
} else {
|
||||
// good buffer, add this to the mix
|
||||
_isStarved = false;
|
||||
_hasStarted = true;
|
||||
return true;
|
||||
}
|
||||
if (!isNotStarvedOrHasMinimumSamples(NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL + numJitterBufferSamples)) {
|
||||
qDebug() << "Starved and do not have minimum samples to start. Buffer held back.\n";
|
||||
return false;
|
||||
} else if (samplesAvailable() < NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL) {
|
||||
qDebug() << "Do not have number of samples needed for interval. Buffer starved.\n";
|
||||
_isStarved = true;
|
||||
return false;
|
||||
} else {
|
||||
// good buffer, add this to the mix
|
||||
_isStarved = false;
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
|
|
|
@ -98,6 +98,9 @@ public:
|
|||
|
||||
const QUuid& getLeaderUUID() const { return _leaderUUID; }
|
||||
|
||||
const HeadData* getHeadData() const { return _headData; }
|
||||
const HandData* getHandData() const { return _handData; }
|
||||
|
||||
void setHeadData(HeadData* headData) { _headData = headData; }
|
||||
void setHandData(HandData* handData) { _handData = handData; }
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
#include "HandData.h"
|
||||
#include "AvatarData.h"
|
||||
#include <SharedUtil.h>
|
||||
#include <GeometryUtil.h>
|
||||
|
||||
|
||||
// When converting between fixed and float, use this as the radix.
|
||||
|
@ -37,22 +38,20 @@ PalmData& HandData::addNewPalm() {
|
|||
return _palms.back();
|
||||
}
|
||||
|
||||
const int SIXENSE_CONTROLLER_ID_LEFT_HAND = 0;
|
||||
const int SIXENSE_CONTROLLER_ID_RIGHT_HAND = 1;
|
||||
|
||||
void HandData::getLeftRightPalmIndices(int& leftPalmIndex, int& rightPalmIndex) const {
|
||||
leftPalmIndex = -1;
|
||||
float leftPalmX = FLT_MAX;
|
||||
rightPalmIndex = -1;
|
||||
float rightPalmX = -FLT_MAX;
|
||||
rightPalmIndex = -1;
|
||||
for (int i = 0; i < _palms.size(); i++) {
|
||||
const PalmData& palm = _palms[i];
|
||||
if (palm.isActive()) {
|
||||
float x = palm.getRawPosition().x;
|
||||
if (x < leftPalmX) {
|
||||
if (palm.getSixenseID() == SIXENSE_CONTROLLER_ID_LEFT_HAND) {
|
||||
leftPalmIndex = i;
|
||||
leftPalmX = x;
|
||||
}
|
||||
if (x > rightPalmX) {
|
||||
if (palm.getSixenseID() == SIXENSE_CONTROLLER_ID_RIGHT_HAND) {
|
||||
rightPalmIndex = i;
|
||||
rightPalmX = x;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -70,7 +69,8 @@ _leapID(LEAPID_INVALID),
|
|||
_sixenseID(SIXENSEID_INVALID),
|
||||
_numFramesWithoutData(0),
|
||||
_owningHandData(owningHandData),
|
||||
_isCollidingWithVoxel(false)
|
||||
_isCollidingWithVoxel(false),
|
||||
_isCollidingWithPalm(false)
|
||||
{
|
||||
for (int i = 0; i < NUM_FINGERS_PER_HAND; ++i) {
|
||||
_fingers.push_back(FingerData(this, owningHandData));
|
||||
|
@ -223,6 +223,25 @@ void HandData::updateFingerTrails() {
|
|||
}
|
||||
}
|
||||
|
||||
bool HandData::findSpherePenetration(const glm::vec3& penetratorCenter, float penetratorRadius, glm::vec3& penetration,
|
||||
const PalmData*& collidingPalm) const {
|
||||
|
||||
for (size_t i = 0; i < _palms.size(); ++i) {
|
||||
const PalmData& palm = _palms[i];
|
||||
if (!palm.isActive()) {
|
||||
continue;
|
||||
}
|
||||
glm::vec3 palmPosition = palm.getPosition();
|
||||
const float PALM_RADIUS = 0.05f; // in world (not voxel) coordinates
|
||||
if (findSphereSpherePenetration(penetratorCenter, penetratorRadius, palmPosition, PALM_RADIUS, penetration)) {
|
||||
collidingPalm = &palm;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
void FingerData::setTrailLength(unsigned int length) {
|
||||
_tipTrailPositions.resize(length);
|
||||
_tipTrailCurrentStartIndex = 0;
|
||||
|
@ -265,3 +284,4 @@ const glm::vec3& FingerData::getTrailPosition(int index) {
|
|||
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -68,6 +68,15 @@ public:
|
|||
int encodeRemoteData(unsigned char* destinationBuffer);
|
||||
int decodeRemoteData(unsigned char* sourceBuffer);
|
||||
|
||||
/// Checks for penetration between the described sphere and the hand.
|
||||
/// \param penetratorCenter the center of the penetration test sphere
|
||||
/// \param penetratorRadius the radius of the penetration test sphere
|
||||
/// \param penetration[out] the vector in which to store the penetration
|
||||
/// \param collidingPalm[out] a const PalmData* to the palm that was collided with
|
||||
/// \return whether or not the sphere penetrated
|
||||
bool findSpherePenetration(const glm::vec3& penetratorCenter, float penetratorRadius, glm::vec3& penetration,
|
||||
const PalmData*& collidingPalm) const;
|
||||
|
||||
friend class AvatarData;
|
||||
protected:
|
||||
glm::vec3 _basePosition; // Hands are placed relative to this
|
||||
|
@ -122,8 +131,9 @@ private:
|
|||
class PalmData {
|
||||
public:
|
||||
PalmData(HandData* owningHandData);
|
||||
glm::vec3 getPosition() const { return _owningHandData->leapPositionToWorldPosition(_rawPosition); }
|
||||
glm::vec3 getNormal() const { return _owningHandData->leapDirectionToWorldDirection(_rawNormal); }
|
||||
glm::vec3 getPosition() const { return _owningHandData->leapPositionToWorldPosition(_rawPosition); }
|
||||
glm::vec3 getNormal() const { return _owningHandData->leapDirectionToWorldDirection(_rawNormal); }
|
||||
glm::vec3 getVelocity() const { return _owningHandData->leapDirectionToWorldDirection(_rawVelocity); }
|
||||
|
||||
const glm::vec3& getRawPosition() const { return _rawPosition; }
|
||||
const glm::vec3& getRawNormal() const { return _rawNormal; }
|
||||
|
@ -140,7 +150,7 @@ public:
|
|||
void setSixenseID(int id) { _sixenseID = id; }
|
||||
|
||||
void setRawRotation(const glm::quat rawRotation) { _rawRotation = rawRotation; };
|
||||
const glm::quat getRawRotation() const { return _rawRotation; }
|
||||
glm::quat getRawRotation() const { return _rawRotation; }
|
||||
void setRawPosition(const glm::vec3& pos) { _rawPosition = pos; }
|
||||
void setRawNormal(const glm::vec3& normal) { _rawNormal = normal; }
|
||||
void setRawVelocity(const glm::vec3& velocity) { _rawVelocity = velocity; }
|
||||
|
@ -148,7 +158,9 @@ public:
|
|||
void addToPosition(const glm::vec3& delta);
|
||||
|
||||
void setTipPosition(const glm::vec3& position) { _tipPosition = position; }
|
||||
const glm::vec3 getTipPosition() const { return _tipPosition; }
|
||||
const glm::vec3 getTipPosition() const { return _owningHandData->leapPositionToWorldPosition(_tipPosition); }
|
||||
const glm::vec3 getTipRawPosition() const { return _tipPosition; }
|
||||
|
||||
const glm::vec3& getTipVelocity() const { return _tipVelocity; }
|
||||
void setTipVelocity(const glm::vec3& velocity) { _tipVelocity = velocity; }
|
||||
|
||||
|
@ -172,6 +184,9 @@ public:
|
|||
bool getIsCollidingWithVoxel() { return _isCollidingWithVoxel; }
|
||||
void setIsCollidingWithVoxel(bool isCollidingWithVoxel) { _isCollidingWithVoxel = isCollidingWithVoxel; }
|
||||
|
||||
bool getIsCollidingWithPalm() { return _isCollidingWithPalm; }
|
||||
void setIsCollidingWithPalm(bool isCollidingWithPalm) { _isCollidingWithPalm = isCollidingWithPalm; }
|
||||
|
||||
private:
|
||||
std::vector<FingerData> _fingers;
|
||||
glm::quat _rawRotation;
|
||||
|
@ -195,6 +210,7 @@ private:
|
|||
HandData* _owningHandData;
|
||||
|
||||
bool _isCollidingWithVoxel; /// Whether the finger of this palm is inside a leaf voxel
|
||||
bool _isCollidingWithPalm;
|
||||
|
||||
};
|
||||
|
||||
|
|
|
@ -44,3 +44,11 @@ void HeadData::addLean(float sideways, float forwards) {
|
|||
_leanSideways += sideways;
|
||||
_leanForward += forwards;
|
||||
}
|
||||
|
||||
bool HeadData::findSpherePenetration(const glm::vec3& penetratorCenter, float penetratorRadius, glm::vec3& penetration) const {
|
||||
// we would like to update this to determine collisions/penetrations with the Avatar's head sphere...
|
||||
// but right now it does not appear as if the HeadData has a position and radius.
|
||||
// this is a placeholder for now.
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -58,6 +58,13 @@ public:
|
|||
void setLookAtPosition(const glm::vec3& lookAtPosition) { _lookAtPosition = lookAtPosition; }
|
||||
|
||||
friend class AvatarData;
|
||||
|
||||
/// Checks for penetration between the described sphere and the hand.
|
||||
/// \param penetratorCenter the center of the penetration test sphere
|
||||
/// \param penetratorRadius the radius of the penetration test sphere
|
||||
/// \param penetration[out] the vector in which to store the penetration
|
||||
/// \return whether or not the sphere penetrated
|
||||
bool findSpherePenetration(const glm::vec3& penetratorCenter, float penetratorRadius, glm::vec3& penetration) const;
|
||||
|
||||
protected:
|
||||
float _yaw;
|
||||
|
|
|
@ -1299,6 +1299,7 @@ int Octree::encodeTreeBitstreamRecursion(OctreeElement* node,
|
|||
}
|
||||
|
||||
bool Octree::readFromSVOFile(const char* fileName) {
|
||||
bool fileOk = false;
|
||||
std::ifstream file(fileName, std::ios::in|std::ios::binary|std::ios::ate);
|
||||
if(file.is_open()) {
|
||||
emit importSize(1.0f, 1.0f, 1.0f);
|
||||
|
@ -1314,16 +1315,44 @@ bool Octree::readFromSVOFile(const char* fileName) {
|
|||
unsigned char* entireFile = new unsigned char[fileLength];
|
||||
file.read((char*)entireFile, fileLength);
|
||||
bool wantImportProgress = true;
|
||||
ReadBitstreamToTreeParams args(WANT_COLOR, NO_EXISTS_BITS, NULL, 0, wantImportProgress);
|
||||
readBitstreamToTree(entireFile, fileLength, args);
|
||||
|
||||
unsigned char* dataAt = entireFile;
|
||||
unsigned long dataLength = fileLength;
|
||||
|
||||
// before reading the file, check to see if this version of the Octree supports file versions
|
||||
if (getWantSVOfileVersions()) {
|
||||
// if so, read the first byte of the file and see if it matches the expected version code
|
||||
PACKET_TYPE expectedType = expectedDataPacketType();
|
||||
PACKET_TYPE gotType = *dataAt;
|
||||
if (gotType == expectedType) {
|
||||
dataAt += sizeof(expectedType);
|
||||
dataLength -= sizeof(expectedType);
|
||||
PACKET_VERSION expectedVersion = versionForPacketType(expectedType);
|
||||
PACKET_VERSION gotVersion = *dataAt;
|
||||
if (gotVersion == expectedVersion) {
|
||||
dataAt += sizeof(expectedVersion);
|
||||
dataLength -= sizeof(expectedVersion);
|
||||
fileOk = true;
|
||||
} else {
|
||||
qDebug("SVO file version mismatch. Expected: %d Got: %d\n", expectedVersion, gotVersion);
|
||||
}
|
||||
} else {
|
||||
qDebug("SVO file type mismatch. Expected: %c Got: %c\n", expectedType, gotType);
|
||||
}
|
||||
} else {
|
||||
fileOk = true; // assume the file is ok
|
||||
}
|
||||
if (fileOk) {
|
||||
ReadBitstreamToTreeParams args(WANT_COLOR, NO_EXISTS_BITS, NULL, 0, wantImportProgress);
|
||||
readBitstreamToTree(dataAt, dataLength, args);
|
||||
}
|
||||
delete[] entireFile;
|
||||
|
||||
emit importProgress(100);
|
||||
|
||||
file.close();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
return fileOk;
|
||||
}
|
||||
|
||||
void Octree::writeToSVOFile(const char* fileName, OctreeElement* node) {
|
||||
|
@ -1332,7 +1361,16 @@ void Octree::writeToSVOFile(const char* fileName, OctreeElement* node) {
|
|||
|
||||
if(file.is_open()) {
|
||||
qDebug("saving to file %s...\n", fileName);
|
||||
|
||||
|
||||
// before reading the file, check to see if this version of the Octree supports file versions
|
||||
if (getWantSVOfileVersions()) {
|
||||
// if so, read the first byte of the file and see if it matches the expected version code
|
||||
PACKET_TYPE expectedType = expectedDataPacketType();
|
||||
PACKET_VERSION expectedVersion = versionForPacketType(expectedType);
|
||||
file.write(&expectedType, sizeof(expectedType));
|
||||
file.write(&expectedVersion, sizeof(expectedType));
|
||||
}
|
||||
|
||||
OctreeElementBag nodeBag;
|
||||
// If we were given a specific node, start from there, otherwise start from root
|
||||
if (node) {
|
||||
|
|
|
@ -183,6 +183,8 @@ public:
|
|||
|
||||
// These methods will allow the OctreeServer to send your tree inbound edit packets of your
|
||||
// own definition. Implement these to allow your octree based server to support editing
|
||||
virtual bool getWantSVOfileVersions() const { return false; }
|
||||
virtual PACKET_TYPE expectedDataPacketType() const { return PACKET_TYPE_UNKNOWN; }
|
||||
virtual bool handlesEditPacketType(PACKET_TYPE packetType) const { return false; }
|
||||
virtual int processEditPacketData(PACKET_TYPE packetType, unsigned char* packetData, int packetLength,
|
||||
unsigned char* editData, int maxLength, Node* senderNode) { return 0; }
|
||||
|
|
|
@ -35,6 +35,7 @@ OctreeEditPacketSender::OctreeEditPacketSender(PacketSenderNotify* notify) :
|
|||
_serverJurisdictions(NULL),
|
||||
_sequenceNumber(0),
|
||||
_maxPacketSize(MAX_PACKET_SIZE) {
|
||||
//printf("OctreeEditPacketSender::OctreeEditPacketSender() [%p] created... \n", this);
|
||||
}
|
||||
|
||||
OctreeEditPacketSender::~OctreeEditPacketSender() {
|
||||
|
@ -48,6 +49,7 @@ OctreeEditPacketSender::~OctreeEditPacketSender() {
|
|||
delete packet;
|
||||
_preServerPackets.erase(_preServerPackets.begin());
|
||||
}
|
||||
//printf("OctreeEditPacketSender::~OctreeEditPacketSender() [%p] destroyed... \n", this);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ public:
|
|||
};
|
||||
|
||||
/// Utility for processing, packing, queueing and sending of outbound edit messages.
|
||||
class OctreeEditPacketSender : public virtual PacketSender {
|
||||
class OctreeEditPacketSender : public PacketSender {
|
||||
public:
|
||||
OctreeEditPacketSender(PacketSenderNotify* notify = NULL);
|
||||
~OctreeEditPacketSender();
|
||||
|
|
|
@ -305,6 +305,15 @@ bool OctreePacketData::appendValue(const glm::vec3& value) {
|
|||
return success;
|
||||
}
|
||||
|
||||
bool OctreePacketData::appendValue(bool value) {
|
||||
bool success = append((uint8_t)value); // used unsigned char version
|
||||
if (success) {
|
||||
_bytesOfValues++;
|
||||
_totalBytesOfValues++;
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
bool OctreePacketData::appendPosition(const glm::vec3& value) {
|
||||
const unsigned char* data = (const unsigned char*)&value;
|
||||
int length = sizeof(value);
|
||||
|
|
|
@ -130,6 +130,9 @@ public:
|
|||
/// appends a non-position vector to the end of the stream, may fail if new data stream is too long to fit in packet
|
||||
bool appendValue(const glm::vec3& value);
|
||||
|
||||
/// appends a bool value to the end of the stream, may fail if new data stream is too long to fit in packet
|
||||
bool appendValue(bool value);
|
||||
|
||||
/// appends a position to the end of the stream, may fail if new data stream is too long to fit in packet
|
||||
bool appendPosition(const glm::vec3& value);
|
||||
|
||||
|
|
61
libraries/octree/src/OctreeScriptingInterface.cpp
Normal file
61
libraries/octree/src/OctreeScriptingInterface.cpp
Normal file
|
@ -0,0 +1,61 @@
|
|||
//
|
||||
// OctreeScriptingInterface.cpp
|
||||
// hifi
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 12/6/13
|
||||
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#include "OctreeScriptingInterface.h"
|
||||
|
||||
OctreeScriptingInterface::OctreeScriptingInterface(OctreeEditPacketSender* packetSender,
|
||||
JurisdictionListener* jurisdictionListener)
|
||||
{
|
||||
setPacketSender(packetSender);
|
||||
setJurisdictionListener(jurisdictionListener);
|
||||
}
|
||||
|
||||
OctreeScriptingInterface::~OctreeScriptingInterface() {
|
||||
//printf("OctreeScriptingInterface::~OctreeScriptingInterface()\n");
|
||||
if (_managedJurisdictionListener) {
|
||||
//printf("OctreeScriptingInterface::~OctreeScriptingInterface() _managedJurisdictionListener... _jurisdictionListener->terminate()\n");
|
||||
_jurisdictionListener->terminate();
|
||||
//printf("OctreeScriptingInterface::~OctreeScriptingInterface() _managedJurisdictionListener... deleting _jurisdictionListener\n");
|
||||
delete _jurisdictionListener;
|
||||
}
|
||||
if (_managedPacketSender) {
|
||||
//printf("OctreeScriptingInterface::~OctreeScriptingInterface() _managedJurisdictionListener... _packetSender->terminate()\n");
|
||||
_packetSender->terminate();
|
||||
//printf("OctreeScriptingInterface::~OctreeScriptingInterface() _managedPacketSender... deleting _packetSender\n");
|
||||
delete _packetSender;
|
||||
}
|
||||
}
|
||||
|
||||
void OctreeScriptingInterface::setPacketSender(OctreeEditPacketSender* packetSender) {
|
||||
_packetSender = packetSender;
|
||||
}
|
||||
|
||||
void OctreeScriptingInterface::setJurisdictionListener(JurisdictionListener* jurisdictionListener) {
|
||||
_jurisdictionListener = jurisdictionListener;
|
||||
}
|
||||
|
||||
void OctreeScriptingInterface::init() {
|
||||
//printf("OctreeScriptingInterface::init()\n");
|
||||
if (_jurisdictionListener) {
|
||||
_managedJurisdictionListener = false;
|
||||
} else {
|
||||
_managedJurisdictionListener = true;
|
||||
_jurisdictionListener = new JurisdictionListener(getServerNodeType());
|
||||
//printf("OctreeScriptingInterface::init() _managedJurisdictionListener=true, creating _jurisdictionListener=%p\n", _jurisdictionListener);
|
||||
_jurisdictionListener->initialize(true);
|
||||
}
|
||||
|
||||
if (_packetSender) {
|
||||
_managedPacketSender = false;
|
||||
} else {
|
||||
_managedPacketSender = true;
|
||||
_packetSender = createPacketSender();
|
||||
//printf("OctreeScriptingInterface::init() _managedPacketSender=true, creating _packetSender=%p\n", _packetSender);
|
||||
_packetSender->setServerJurisdictions(_jurisdictionListener->getJurisdictions());
|
||||
}
|
||||
}
|
95
libraries/octree/src/OctreeScriptingInterface.h
Normal file
95
libraries/octree/src/OctreeScriptingInterface.h
Normal file
|
@ -0,0 +1,95 @@
|
|||
//
|
||||
// OctreeScriptingInterface.h
|
||||
// hifi
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 12/6/13
|
||||
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#ifndef __hifi__OctreeScriptingInterface__
|
||||
#define __hifi__OctreeScriptingInterface__
|
||||
|
||||
#include <QtCore/QObject>
|
||||
|
||||
#include "JurisdictionListener.h"
|
||||
#include "OctreeEditPacketSender.h"
|
||||
|
||||
/// handles scripting of Particle commands from JS passed to assigned clients
|
||||
class OctreeScriptingInterface : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
OctreeScriptingInterface(OctreeEditPacketSender* packetSender = NULL,
|
||||
JurisdictionListener* jurisdictionListener = NULL);
|
||||
|
||||
~OctreeScriptingInterface();
|
||||
|
||||
OctreeEditPacketSender* getPacketSender() const { return _packetSender; }
|
||||
JurisdictionListener* getJurisdictionListener() const { return _jurisdictionListener; }
|
||||
void setPacketSender(OctreeEditPacketSender* packetSender);
|
||||
void setJurisdictionListener(JurisdictionListener* jurisdictionListener);
|
||||
void init();
|
||||
|
||||
virtual NODE_TYPE getServerNodeType() const = 0;
|
||||
virtual OctreeEditPacketSender* createPacketSender() = 0;
|
||||
|
||||
public slots:
|
||||
/// Set the desired max packet size in bytes that should be created
|
||||
void setMaxPacketSize(int maxPacketSize) { return _packetSender->setMaxPacketSize(maxPacketSize); }
|
||||
|
||||
/// returns the current desired max packet size in bytes that will be created
|
||||
int getMaxPacketSize() const { return _packetSender->getMaxPacketSize(); }
|
||||
|
||||
/// set the max packets per second send rate
|
||||
void setPacketsPerSecond(int packetsPerSecond) { return _packetSender->setPacketsPerSecond(packetsPerSecond); }
|
||||
|
||||
/// get the max packets per second send rate
|
||||
int getPacketsPerSecond() const { return _packetSender->getPacketsPerSecond(); }
|
||||
|
||||
/// does a particle server exist to send to
|
||||
bool serversExist() const { return _packetSender->serversExist(); }
|
||||
|
||||
/// are there packets waiting in the send queue to be sent
|
||||
bool hasPacketsToSend() const { return _packetSender->hasPacketsToSend(); }
|
||||
|
||||
/// how many packets are there in the send queue waiting to be sent
|
||||
int packetsToSendCount() const { return _packetSender->packetsToSendCount(); }
|
||||
|
||||
/// returns the packets per second send rate of this object over its lifetime
|
||||
float getLifetimePPS() const { return _packetSender->getLifetimePPS(); }
|
||||
|
||||
/// returns the bytes per second send rate of this object over its lifetime
|
||||
float getLifetimeBPS() const { return _packetSender->getLifetimeBPS(); }
|
||||
|
||||
/// returns the packets per second queued rate of this object over its lifetime
|
||||
float getLifetimePPSQueued() const { return _packetSender->getLifetimePPSQueued(); }
|
||||
|
||||
/// returns the bytes per second queued rate of this object over its lifetime
|
||||
float getLifetimeBPSQueued() const { return _packetSender->getLifetimeBPSQueued(); }
|
||||
|
||||
/// returns lifetime of this object from first packet sent to now in usecs
|
||||
long long unsigned int getLifetimeInUsecs() const { return _packetSender->getLifetimeInUsecs(); }
|
||||
|
||||
/// returns lifetime of this object from first packet sent to now in usecs
|
||||
float getLifetimeInSeconds() const { return _packetSender->getLifetimeInSeconds(); }
|
||||
|
||||
/// returns the total packets sent by this object over its lifetime
|
||||
long long unsigned int getLifetimePacketsSent() const { return _packetSender->getLifetimePacketsSent(); }
|
||||
|
||||
/// returns the total bytes sent by this object over its lifetime
|
||||
long long unsigned int getLifetimeBytesSent() const { return _packetSender->getLifetimeBytesSent(); }
|
||||
|
||||
/// returns the total packets queued by this object over its lifetime
|
||||
long long unsigned int getLifetimePacketsQueued() const { return _packetSender->getLifetimePacketsQueued(); }
|
||||
|
||||
/// returns the total bytes queued by this object over its lifetime
|
||||
long long unsigned int getLifetimeBytesQueued() const { return _packetSender->getLifetimeBytesQueued(); }
|
||||
|
||||
protected:
|
||||
/// attached OctreeEditPacketSender that handles queuing and sending of packets to VS
|
||||
OctreeEditPacketSender* _packetSender;
|
||||
JurisdictionListener* _jurisdictionListener;
|
||||
bool _managedPacketSender;
|
||||
bool _managedJurisdictionListener;
|
||||
};
|
||||
|
||||
#endif /* defined(__hifi__OctreeScriptingInterface__) */
|
|
@ -18,22 +18,23 @@
|
|||
uint32_t Particle::_nextID = 0;
|
||||
|
||||
|
||||
Particle::Particle(glm::vec3 position, float radius, rgbColor color, glm::vec3 velocity,
|
||||
float damping, glm::vec3 gravity, QString updateScript, uint32_t id) {
|
||||
Particle::Particle(glm::vec3 position, float radius, rgbColor color, glm::vec3 velocity, glm::vec3 gravity,
|
||||
float damping, bool inHand, QString updateScript, uint32_t id) {
|
||||
|
||||
init(position, radius, color, velocity, damping, gravity, updateScript, id);
|
||||
init(position, radius, color, velocity, gravity, damping, inHand, updateScript, id);
|
||||
}
|
||||
|
||||
Particle::Particle() {
|
||||
rgbColor noColor = { 0, 0, 0 };
|
||||
init(glm::vec3(0,0,0), 0, noColor, glm::vec3(0,0,0), DEFAULT_DAMPING, DEFAULT_GRAVITY, DEFAULT_SCRIPT, NEW_PARTICLE);
|
||||
init(glm::vec3(0,0,0), 0, noColor, glm::vec3(0,0,0),
|
||||
DEFAULT_GRAVITY, DEFAULT_DAMPING, NOT_IN_HAND, DEFAULT_SCRIPT, NEW_PARTICLE);
|
||||
}
|
||||
|
||||
Particle::~Particle() {
|
||||
}
|
||||
|
||||
void Particle::init(glm::vec3 position, float radius, rgbColor color, glm::vec3 velocity,
|
||||
float damping, glm::vec3 gravity, QString updateScript, uint32_t id) {
|
||||
void Particle::init(glm::vec3 position, float radius, rgbColor color, glm::vec3 velocity, glm::vec3 gravity,
|
||||
float damping, bool inHand, QString updateScript, uint32_t id) {
|
||||
if (id == NEW_PARTICLE) {
|
||||
_created = usecTimestampNow();
|
||||
_id = _nextID;
|
||||
|
@ -42,6 +43,7 @@ void Particle::init(glm::vec3 position, float radius, rgbColor color, glm::vec3
|
|||
_id = id;
|
||||
}
|
||||
_lastUpdated = usecTimestampNow();
|
||||
_lastEdited = _lastUpdated;
|
||||
|
||||
_position = position;
|
||||
_radius = radius;
|
||||
|
@ -50,6 +52,7 @@ void Particle::init(glm::vec3 position, float radius, rgbColor color, glm::vec3
|
|||
_damping = damping;
|
||||
_gravity = gravity;
|
||||
_updateScript = updateScript;
|
||||
_inHand = inHand;
|
||||
}
|
||||
|
||||
|
||||
|
@ -65,6 +68,9 @@ bool Particle::appendParticleData(OctreePacketData* packetData) const {
|
|||
if (success) {
|
||||
success = packetData->appendValue(getLastUpdated());
|
||||
}
|
||||
if (success) {
|
||||
success = packetData->appendValue(getLastEdited());
|
||||
}
|
||||
if (success) {
|
||||
success = packetData->appendValue(getRadius());
|
||||
}
|
||||
|
@ -83,6 +89,9 @@ bool Particle::appendParticleData(OctreePacketData* packetData) const {
|
|||
if (success) {
|
||||
success = packetData->appendValue(getDamping());
|
||||
}
|
||||
if (success) {
|
||||
success = packetData->appendValue(getInHand());
|
||||
}
|
||||
if (success) {
|
||||
uint16_t scriptLength = _updateScript.size() + 1; // include NULL
|
||||
success = packetData->appendValue(scriptLength);
|
||||
|
@ -94,9 +103,9 @@ bool Particle::appendParticleData(OctreePacketData* packetData) const {
|
|||
}
|
||||
|
||||
int Particle::expectedBytes() {
|
||||
int expectedBytes = sizeof(uint32_t) + sizeof(uint64_t) + sizeof(uint64_t) + sizeof(float) +
|
||||
int expectedBytes = sizeof(uint32_t) + sizeof(uint64_t) + sizeof(uint64_t) + sizeof(uint64_t) + sizeof(float) +
|
||||
sizeof(glm::vec3) + sizeof(rgbColor) + sizeof(glm::vec3) +
|
||||
sizeof(glm::vec3) + sizeof(float);
|
||||
sizeof(glm::vec3) + sizeof(float) + sizeof(bool);
|
||||
return expectedBytes;
|
||||
}
|
||||
|
||||
|
@ -120,6 +129,11 @@ int Particle::readParticleDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
dataAt += sizeof(_lastUpdated);
|
||||
bytesRead += sizeof(_lastUpdated);
|
||||
|
||||
// _lastEdited
|
||||
memcpy(&_lastEdited, dataAt, sizeof(_lastEdited));
|
||||
dataAt += sizeof(_lastEdited);
|
||||
bytesRead += sizeof(_lastEdited);
|
||||
|
||||
// radius
|
||||
memcpy(&_radius, dataAt, sizeof(_radius));
|
||||
dataAt += sizeof(_radius);
|
||||
|
@ -150,6 +164,11 @@ int Particle::readParticleDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
dataAt += sizeof(_damping);
|
||||
bytesRead += sizeof(_damping);
|
||||
|
||||
// inHand
|
||||
memcpy(&_inHand, dataAt, sizeof(_inHand));
|
||||
dataAt += sizeof(_inHand);
|
||||
bytesRead += sizeof(_inHand);
|
||||
|
||||
// script
|
||||
uint16_t scriptLength;
|
||||
memcpy(&scriptLength, dataAt, sizeof(scriptLength));
|
||||
|
@ -209,6 +228,11 @@ Particle Particle::fromEditPacket(unsigned char* data, int length, int& processe
|
|||
memcpy(&newParticle._lastUpdated, dataAt, sizeof(newParticle._lastUpdated));
|
||||
dataAt += sizeof(newParticle._lastUpdated);
|
||||
processedBytes += sizeof(newParticle._lastUpdated);
|
||||
|
||||
// lastEdited
|
||||
memcpy(&newParticle._lastEdited, dataAt, sizeof(newParticle._lastEdited));
|
||||
dataAt += sizeof(newParticle._lastEdited);
|
||||
processedBytes += sizeof(newParticle._lastEdited);
|
||||
|
||||
// radius
|
||||
memcpy(&newParticle._radius, dataAt, sizeof(newParticle._radius));
|
||||
|
@ -240,6 +264,11 @@ Particle Particle::fromEditPacket(unsigned char* data, int length, int& processe
|
|||
dataAt += sizeof(newParticle._damping);
|
||||
processedBytes += sizeof(newParticle._damping);
|
||||
|
||||
// inHand
|
||||
memcpy(&newParticle._inHand, dataAt, sizeof(newParticle._inHand));
|
||||
dataAt += sizeof(newParticle._inHand);
|
||||
processedBytes += sizeof(newParticle._inHand);
|
||||
|
||||
// script
|
||||
uint16_t scriptLength;
|
||||
memcpy(&scriptLength, dataAt, sizeof(scriptLength));
|
||||
|
@ -264,6 +293,7 @@ void Particle::debugDump() const {
|
|||
printf("Particle id :%u\n", _id);
|
||||
printf(" created:%llu\n", _created);
|
||||
printf(" last updated:%llu\n", _lastUpdated);
|
||||
printf(" last edited:%llu\n", _lastEdited);
|
||||
printf(" position:%f,%f,%f\n", _position.x, _position.y, _position.z);
|
||||
printf(" velocity:%f,%f,%f\n", _velocity.x, _velocity.y, _velocity.z);
|
||||
printf(" gravity:%f,%f,%f\n", _gravity.x, _gravity.y, _gravity.z);
|
||||
|
@ -322,6 +352,11 @@ bool Particle::encodeParticleEditMessageDetails(PACKET_TYPE command, int count,
|
|||
copyAt += sizeof(details[i].lastUpdated);
|
||||
sizeOut += sizeof(details[i].lastUpdated);
|
||||
|
||||
// lastEdited
|
||||
memcpy(copyAt, &details[i].lastEdited, sizeof(details[i].lastEdited));
|
||||
copyAt += sizeof(details[i].lastEdited);
|
||||
sizeOut += sizeof(details[i].lastEdited);
|
||||
|
||||
// radius
|
||||
memcpy(copyAt, &details[i].radius, sizeof(details[i].radius));
|
||||
copyAt += sizeof(details[i].radius);
|
||||
|
@ -352,6 +387,11 @@ bool Particle::encodeParticleEditMessageDetails(PACKET_TYPE command, int count,
|
|||
copyAt += sizeof(details[i].damping);
|
||||
sizeOut += sizeof(details[i].damping);
|
||||
|
||||
// inHand
|
||||
memcpy(copyAt, &details[i].inHand, sizeof(details[i].inHand));
|
||||
copyAt += sizeof(details[i].inHand);
|
||||
sizeOut += sizeof(details[i].inHand);
|
||||
|
||||
// script
|
||||
uint16_t scriptLength = details[i].updateScript.size() + 1;
|
||||
memcpy(copyAt, &scriptLength, sizeof(scriptLength));
|
||||
|
@ -389,32 +429,39 @@ void Particle::update() {
|
|||
bool isStillMoving = (velocityScalar > STILL_MOVING);
|
||||
const uint64_t REALLY_OLD = 30 * 1000 * 1000;
|
||||
bool isReallyOld = (getLifetime() > REALLY_OLD);
|
||||
bool shouldDie = !isStillMoving && isReallyOld;
|
||||
bool isInHand = getInHand();
|
||||
bool shouldDie = !isInHand && !isStillMoving && isReallyOld;
|
||||
setShouldDie(shouldDie);
|
||||
|
||||
runScript(); // allow the javascript to alter our state
|
||||
|
||||
_position += _velocity * timeElapsed;
|
||||
|
||||
// handle bounces off the ground...
|
||||
if (_position.y <= 0) {
|
||||
_velocity = _velocity * glm::vec3(1,-1,1);
|
||||
_position.y = 0;
|
||||
// If the ball is in hand, it doesn't move or have gravity effect it
|
||||
if (!isInHand) {
|
||||
_position += _velocity * timeElapsed;
|
||||
|
||||
// handle bounces off the ground...
|
||||
if (_position.y <= 0) {
|
||||
_velocity = _velocity * glm::vec3(1,-1,1);
|
||||
_position.y = 0;
|
||||
}
|
||||
|
||||
// handle gravity....
|
||||
_velocity += _gravity * timeElapsed;
|
||||
|
||||
// handle damping
|
||||
glm::vec3 dampingResistance = _velocity * _damping;
|
||||
_velocity -= dampingResistance * timeElapsed;
|
||||
//printf("applying damping to Particle timeElapsed=%f\n",timeElapsed);
|
||||
}
|
||||
|
||||
// handle gravity....
|
||||
_velocity += _gravity * timeElapsed;
|
||||
|
||||
// handle damping
|
||||
glm::vec3 dampingResistance = _velocity * _damping;
|
||||
_velocity -= dampingResistance * timeElapsed;
|
||||
//printf("applying damping to Particle timeElapsed=%f\n",timeElapsed);
|
||||
|
||||
|
||||
_lastUpdated = now;
|
||||
}
|
||||
|
||||
void Particle::runScript() {
|
||||
if (!_updateScript.isEmpty()) {
|
||||
|
||||
//qDebug() << "Script: " << _updateScript << "\n";
|
||||
|
||||
QScriptEngine engine;
|
||||
|
||||
// register meta-type for glm::vec3 and rgbColor conversions
|
||||
|
|
|
@ -26,12 +26,14 @@ class ParticleDetail {
|
|||
public:
|
||||
uint32_t id;
|
||||
uint64_t lastUpdated;
|
||||
uint64_t lastEdited;
|
||||
glm::vec3 position;
|
||||
float radius;
|
||||
rgbColor color;
|
||||
glm::vec3 velocity;
|
||||
glm::vec3 gravity;
|
||||
float damping;
|
||||
bool inHand;
|
||||
QString updateScript;
|
||||
uint32_t creatorTokenID;
|
||||
};
|
||||
|
@ -39,32 +41,37 @@ public:
|
|||
const float DEFAULT_DAMPING = 0.99f;
|
||||
const glm::vec3 DEFAULT_GRAVITY(0, (-9.8f / TREE_SCALE), 0);
|
||||
const QString DEFAULT_SCRIPT("");
|
||||
const bool IN_HAND = true; // it's in a hand
|
||||
const bool NOT_IN_HAND = !IN_HAND; // it's not in a hand
|
||||
|
||||
class Particle {
|
||||
|
||||
public:
|
||||
Particle();
|
||||
Particle(glm::vec3 position, float radius, rgbColor color, glm::vec3 velocity,
|
||||
float damping = DEFAULT_DAMPING, glm::vec3 gravity = DEFAULT_GRAVITY, QString updateScript = DEFAULT_SCRIPT,
|
||||
uint32_t id = NEW_PARTICLE);
|
||||
glm::vec3 gravity = DEFAULT_GRAVITY, float damping = DEFAULT_DAMPING, bool inHand = NOT_IN_HAND,
|
||||
QString updateScript = DEFAULT_SCRIPT, uint32_t id = NEW_PARTICLE);
|
||||
|
||||
/// creates an NEW particle from an PACKET_TYPE_PARTICLE_ADD_OR_EDIT edit data buffer
|
||||
static Particle fromEditPacket(unsigned char* data, int length, int& processedBytes);
|
||||
|
||||
virtual ~Particle();
|
||||
virtual void init(glm::vec3 position, float radius, rgbColor color, glm::vec3 velocity,
|
||||
float damping, glm::vec3 gravity, QString updateScript, uint32_t id);
|
||||
glm::vec3 gravity = DEFAULT_GRAVITY, float damping = DEFAULT_DAMPING, bool inHand = NOT_IN_HAND,
|
||||
QString updateScript = DEFAULT_SCRIPT, uint32_t id = NEW_PARTICLE);
|
||||
|
||||
const glm::vec3& getPosition() const { return _position; }
|
||||
const rgbColor& getColor() const { return _color; }
|
||||
xColor getColor() { xColor color = { _color[RED_INDEX], _color[GREEN_INDEX], _color[BLUE_INDEX] }; return color; }
|
||||
xColor getXColor() const { xColor color = { _color[RED_INDEX], _color[GREEN_INDEX], _color[BLUE_INDEX] }; return color; }
|
||||
float getRadius() const { return _radius; }
|
||||
const glm::vec3& getVelocity() const { return _velocity; }
|
||||
const glm::vec3& getGravity() const { return _gravity; }
|
||||
bool getInHand() const { return _inHand; }
|
||||
float getDamping() const { return _damping; }
|
||||
uint64_t getCreated() const { return _created; }
|
||||
uint64_t getLifetime() const { return usecTimestampNow() - _created; }
|
||||
uint64_t getLastUpdated() const { return _lastUpdated; }
|
||||
uint64_t getLastEdited() const { return _lastEdited; }
|
||||
uint32_t getID() const { return _id; }
|
||||
bool getShouldDie() const { return _shouldDie; }
|
||||
QString getUpdateScript() const { return _updateScript; }
|
||||
|
@ -81,12 +88,13 @@ public:
|
|||
}
|
||||
void setRadius(float value) { _radius = value; }
|
||||
void setGravity(const glm::vec3& value) { _gravity = value; }
|
||||
void setInHand(bool inHand) { _inHand = inHand; }
|
||||
void setDamping(float value) { _damping = value; }
|
||||
void setShouldDie(bool shouldDie) { _shouldDie = shouldDie; }
|
||||
void setUpdateScript(QString updateScript) { _updateScript = updateScript; }
|
||||
void setCreatorTokenID(uint32_t creatorTokenID) { _creatorTokenID = creatorTokenID; }
|
||||
void setCreated(uint64_t created) { _created = created; }
|
||||
|
||||
|
||||
bool appendParticleData(OctreePacketData* packetData) const;
|
||||
int readParticleDataFromBuffer(const unsigned char* data, int bytesLeftToRead, ReadBitstreamToTreeParams& args);
|
||||
static int expectedBytes();
|
||||
|
@ -110,12 +118,14 @@ protected:
|
|||
glm::vec3 _velocity;
|
||||
uint64_t _lastUpdated;
|
||||
uint64_t _created;
|
||||
uint64_t _lastEdited;
|
||||
uint32_t _id;
|
||||
static uint32_t _nextID;
|
||||
bool _shouldDie;
|
||||
glm::vec3 _gravity;
|
||||
float _damping;
|
||||
QString _updateScript;
|
||||
bool _inHand;
|
||||
|
||||
uint32_t _creatorTokenID;
|
||||
bool _newlyCreated;
|
||||
|
@ -129,7 +139,7 @@ public:
|
|||
public slots:
|
||||
glm::vec3 getPosition() const { return _particle->getPosition(); }
|
||||
glm::vec3 getVelocity() const { return _particle->getVelocity(); }
|
||||
xColor getColor() const { return _particle->getColor(); }
|
||||
xColor getColor() const { return _particle->getXColor(); }
|
||||
glm::vec3 getGravity() const { return _particle->getGravity(); }
|
||||
float getDamping() const { return _particle->getDamping(); }
|
||||
float getRadius() const { return _particle->getRadius(); }
|
||||
|
|
|
@ -9,6 +9,9 @@
|
|||
|
||||
#include <AbstractAudioInterface.h>
|
||||
#include <VoxelTree.h>
|
||||
#include <AvatarData.h>
|
||||
#include <HeadData.h>
|
||||
#include <HandData.h>
|
||||
|
||||
#include "Particle.h"
|
||||
#include "ParticleCollisionSystem.h"
|
||||
|
@ -17,16 +20,17 @@
|
|||
#include "ParticleTree.h"
|
||||
|
||||
ParticleCollisionSystem::ParticleCollisionSystem(ParticleEditPacketSender* packetSender,
|
||||
ParticleTree* particles, VoxelTree* voxels, AbstractAudioInterface* audio) {
|
||||
init(packetSender, particles, voxels, audio);
|
||||
ParticleTree* particles, VoxelTree* voxels, AbstractAudioInterface* audio, AvatarData* selfAvatar) {
|
||||
init(packetSender, particles, voxels, audio, selfAvatar);
|
||||
}
|
||||
|
||||
void ParticleCollisionSystem::init(ParticleEditPacketSender* packetSender,
|
||||
ParticleTree* particles, VoxelTree* voxels, AbstractAudioInterface* audio) {
|
||||
ParticleTree* particles, VoxelTree* voxels, AbstractAudioInterface* audio, AvatarData* selfAvatar) {
|
||||
_packetSender = packetSender;
|
||||
_particles = particles;
|
||||
_voxels = voxels;
|
||||
_audio = audio;
|
||||
_selfAvatar = selfAvatar;
|
||||
}
|
||||
|
||||
ParticleCollisionSystem::~ParticleCollisionSystem() {
|
||||
|
@ -59,6 +63,7 @@ void ParticleCollisionSystem::update() {
|
|||
void ParticleCollisionSystem::checkParticle(Particle* particle) {
|
||||
updateCollisionWithVoxels(particle);
|
||||
updateCollisionWithParticles(particle);
|
||||
updateCollisionWithAvatars(particle);
|
||||
}
|
||||
|
||||
void ParticleCollisionSystem::updateCollisionWithVoxels(Particle* particle) {
|
||||
|
@ -91,8 +96,81 @@ void ParticleCollisionSystem::updateCollisionWithParticles(Particle* particle) {
|
|||
}
|
||||
}
|
||||
|
||||
void ParticleCollisionSystem::updateCollisionWithAvatars(Particle* particle) {
|
||||
|
||||
// particles that are in hand, don't collide with other avatar parts
|
||||
if (particle->getInHand()) {
|
||||
return;
|
||||
}
|
||||
|
||||
//printf("updateCollisionWithAvatars()...\n");
|
||||
glm::vec3 center = particle->getPosition() * (float)TREE_SCALE;
|
||||
float radius = particle->getRadius() * (float)TREE_SCALE;
|
||||
const float VOXEL_ELASTICITY = 1.4f;
|
||||
const float VOXEL_DAMPING = 0.0;
|
||||
const float VOXEL_COLLISION_FREQUENCY = 0.5f;
|
||||
glm::vec3 penetration;
|
||||
const PalmData* collidingPalm = NULL;
|
||||
|
||||
// first check the selfAvatar if set...
|
||||
if (_selfAvatar) {
|
||||
AvatarData* avatar = (AvatarData*)_selfAvatar;
|
||||
//printf("updateCollisionWithAvatars()..._selfAvatar=%p\n", avatar);
|
||||
|
||||
// check hands...
|
||||
const HandData* handData = avatar->getHandData();
|
||||
|
||||
// if the particle penetrates the hand, then apply a hard collision
|
||||
if (handData->findSpherePenetration(center, radius, penetration, collidingPalm)) {
|
||||
penetration /= (float)TREE_SCALE;
|
||||
updateCollisionSound(particle, penetration, VOXEL_COLLISION_FREQUENCY);
|
||||
|
||||
// determine if the palm that collided was moving, if so, then we add that palm velocity as well...
|
||||
glm::vec3 addedVelocity = NO_ADDED_VELOCITY;
|
||||
if (collidingPalm) {
|
||||
glm::vec3 palmVelocity = collidingPalm->getVelocity() / (float)TREE_SCALE;
|
||||
//printf("collidingPalm Velocity=%f,%f,%f\n", palmVelocity.x, palmVelocity.y, palmVelocity.z);
|
||||
addedVelocity = palmVelocity;
|
||||
}
|
||||
|
||||
applyHardCollision(particle, penetration, VOXEL_ELASTICITY, VOXEL_DAMPING, addedVelocity);
|
||||
}
|
||||
}
|
||||
|
||||
// loop through all the other avatars for potential interactions...
|
||||
NodeList* nodeList = NodeList::getInstance();
|
||||
for (NodeList::iterator node = nodeList->begin(); node != nodeList->end(); node++) {
|
||||
//qDebug() << "updateCollisionWithAvatars()... node:" << *node << "\n";
|
||||
if (node->getLinkedData() && node->getType() == NODE_TYPE_AGENT) {
|
||||
AvatarData* avatar = (AvatarData*)node->getLinkedData();
|
||||
//printf("updateCollisionWithAvatars()...avatar=%p\n", avatar);
|
||||
|
||||
// check hands...
|
||||
const HandData* handData = avatar->getHandData();
|
||||
|
||||
// if the particle penetrates the hand, then apply a hard collision
|
||||
if (handData->findSpherePenetration(center, radius, penetration, collidingPalm)) {
|
||||
penetration /= (float)TREE_SCALE;
|
||||
updateCollisionSound(particle, penetration, VOXEL_COLLISION_FREQUENCY);
|
||||
|
||||
// determine if the palm that collided was moving, if so, then we add that palm velocity as well...
|
||||
glm::vec3 addedVelocity = NO_ADDED_VELOCITY;
|
||||
if (collidingPalm) {
|
||||
glm::vec3 palmVelocity = collidingPalm->getVelocity() / (float)TREE_SCALE;
|
||||
//printf("collidingPalm Velocity=%f,%f,%f\n", palmVelocity.x, palmVelocity.y, palmVelocity.z);
|
||||
addedVelocity = palmVelocity;
|
||||
}
|
||||
|
||||
applyHardCollision(particle, penetration, VOXEL_ELASTICITY, VOXEL_DAMPING, addedVelocity);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void ParticleCollisionSystem::applyHardCollision(Particle* particle, const glm::vec3& penetration,
|
||||
float elasticity, float damping) {
|
||||
float elasticity, float damping, const glm::vec3& addedVelocity) {
|
||||
//
|
||||
// Update the avatar in response to a hard collision. Position will be reset exactly
|
||||
// to outside the colliding surface. Velocity will be modified according to elasticity.
|
||||
|
@ -112,6 +190,7 @@ void ParticleCollisionSystem::applyHardCollision(Particle* particle, const glm::
|
|||
if (penetrationLength > EPSILON) {
|
||||
glm::vec3 direction = penetration / penetrationLength;
|
||||
velocity -= glm::dot(velocity, direction) * direction * elasticity;
|
||||
velocity += addedVelocity;
|
||||
velocity *= glm::clamp(1.f - damping, 0.0f, 1.0f);
|
||||
if (glm::length(velocity) < HALTING_VELOCITY) {
|
||||
// If moving really slowly after a collision, and not applying forces, stop altogether
|
||||
|
@ -119,8 +198,8 @@ void ParticleCollisionSystem::applyHardCollision(Particle* particle, const glm::
|
|||
}
|
||||
}
|
||||
ParticleEditHandle particleEditHandle(_packetSender, _particles, particle->getID());
|
||||
particleEditHandle.updateParticle(position, particle->getRadius(), particle->getColor(), velocity,
|
||||
particle->getGravity(), particle->getDamping(), particle->getUpdateScript());
|
||||
particleEditHandle.updateParticle(position, particle->getRadius(), particle->getXColor(), velocity,
|
||||
particle->getGravity(), particle->getDamping(), particle->getInHand(), particle->getUpdateScript());
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -22,18 +22,22 @@
|
|||
#include "Particle.h"
|
||||
|
||||
class AbstractAudioInterface;
|
||||
class AvatarData;
|
||||
class ParticleEditPacketSender;
|
||||
class ParticleTree;
|
||||
class VoxelTree;
|
||||
|
||||
const glm::vec3 NO_ADDED_VELOCITY = glm::vec3(0);
|
||||
|
||||
class ParticleCollisionSystem {
|
||||
public:
|
||||
ParticleCollisionSystem(ParticleEditPacketSender* packetSender = NULL, ParticleTree* particles = NULL,
|
||||
VoxelTree* voxels = NULL,
|
||||
AbstractAudioInterface* audio = NULL);
|
||||
AbstractAudioInterface* audio = NULL,
|
||||
AvatarData* selfAvatar = NULL);
|
||||
|
||||
void init(ParticleEditPacketSender* packetSender, ParticleTree* particles, VoxelTree* voxels,
|
||||
AbstractAudioInterface* audio = NULL);
|
||||
AbstractAudioInterface* audio = NULL, AvatarData* selfAvatar = NULL);
|
||||
|
||||
~ParticleCollisionSystem();
|
||||
|
||||
|
@ -41,7 +45,9 @@ public:
|
|||
void checkParticle(Particle* particle);
|
||||
void updateCollisionWithVoxels(Particle* particle);
|
||||
void updateCollisionWithParticles(Particle* particle);
|
||||
void applyHardCollision(Particle* particle, const glm::vec3& penetration, float elasticity, float damping);
|
||||
void updateCollisionWithAvatars(Particle* particle);
|
||||
void applyHardCollision(Particle* particle, const glm::vec3& penetration, float elasticity, float damping,
|
||||
const glm::vec3& addedVelocity = NO_ADDED_VELOCITY);
|
||||
void updateCollisionSound(Particle* particle, const glm::vec3 &penetration, float frequency);
|
||||
|
||||
private:
|
||||
|
@ -52,6 +58,7 @@ private:
|
|||
ParticleTree* _particles;
|
||||
VoxelTree* _voxels;
|
||||
AbstractAudioInterface* _audio;
|
||||
AvatarData* _selfAvatar;
|
||||
};
|
||||
|
||||
#endif /* defined(__hifi__ParticleCollisionSystem__) */
|
|
@ -41,12 +41,13 @@ ParticleEditHandle::~ParticleEditHandle() {
|
|||
}
|
||||
|
||||
void ParticleEditHandle::createParticle(glm::vec3 position, float radius, xColor color, glm::vec3 velocity,
|
||||
glm::vec3 gravity, float damping, QString updateScript) {
|
||||
glm::vec3 gravity, float damping, bool inHand, QString updateScript) {
|
||||
|
||||
// setup a ParticleDetail struct with the data
|
||||
ParticleDetail addParticleDetail = { NEW_PARTICLE, usecTimestampNow(),
|
||||
uint64_t now = usecTimestampNow();
|
||||
ParticleDetail addParticleDetail = { NEW_PARTICLE, now, now,
|
||||
position, radius, {color.red, color.green, color.blue },
|
||||
velocity, gravity, damping, updateScript, _creatorTokenID };
|
||||
velocity, gravity, damping, inHand, updateScript, _creatorTokenID };
|
||||
|
||||
// queue the packet
|
||||
_packetSender->queueParticleEditMessages(PACKET_TYPE_PARTICLE_ADD_OR_EDIT, 1, &addParticleDetail);
|
||||
|
@ -62,16 +63,17 @@ void ParticleEditHandle::createParticle(glm::vec3 position, float radius, xColor
|
|||
}
|
||||
|
||||
bool ParticleEditHandle::updateParticle(glm::vec3 position, float radius, xColor color, glm::vec3 velocity,
|
||||
glm::vec3 gravity, float damping, QString updateScript) {
|
||||
glm::vec3 gravity, float damping, bool inHand, QString updateScript) {
|
||||
|
||||
if (!isKnownID()) {
|
||||
return false; // not allowed until we know the id
|
||||
}
|
||||
|
||||
// setup a ParticleDetail struct with the data
|
||||
ParticleDetail newParticleDetail = { _id, usecTimestampNow(),
|
||||
uint64_t now = usecTimestampNow();
|
||||
ParticleDetail newParticleDetail = { _id, now, now,
|
||||
position, radius, {color.red, color.green, color.blue },
|
||||
velocity, gravity, damping, updateScript, _creatorTokenID };
|
||||
velocity, gravity, damping, inHand, updateScript, _creatorTokenID };
|
||||
|
||||
// queue the packet
|
||||
_packetSender->queueParticleEditMessages(PACKET_TYPE_PARTICLE_ADD_OR_EDIT, 1, &newParticleDetail);
|
||||
|
@ -82,7 +84,7 @@ bool ParticleEditHandle::updateParticle(glm::vec3 position, float radius, xColor
|
|||
// if we have a local tree, also update it...
|
||||
if (_localTree) {
|
||||
rgbColor rcolor = {color.red, color.green, color.blue };
|
||||
Particle tempParticle(position, radius, rcolor, velocity, damping, gravity, updateScript, _id);
|
||||
Particle tempParticle(position, radius, rcolor, velocity, gravity, damping, inHand, updateScript, _id);
|
||||
_localTree->storeParticle(tempParticle);
|
||||
}
|
||||
|
||||
|
|
|
@ -35,10 +35,10 @@ public:
|
|||
bool isKnownID() const { return _isKnownID; }
|
||||
|
||||
void createParticle(glm::vec3 position, float radius, xColor color, glm::vec3 velocity,
|
||||
glm::vec3 gravity, float damping, QString updateScript);
|
||||
glm::vec3 gravity, float damping, bool inHand, QString updateScript);
|
||||
|
||||
bool updateParticle(glm::vec3 position, float radius, xColor color, glm::vec3 velocity,
|
||||
glm::vec3 gravity, float damping, QString updateScript);
|
||||
glm::vec3 gravity, float damping, bool inHand, QString updateScript);
|
||||
|
||||
static void handleAddResponse(unsigned char* packetData , int packetLength);
|
||||
private:
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
#include "Particle.h"
|
||||
|
||||
/// Utility for processing, packing, queueing and sending of outbound edit voxel messages.
|
||||
class ParticleEditPacketSender : public virtual OctreeEditPacketSender {
|
||||
class ParticleEditPacketSender : public OctreeEditPacketSender {
|
||||
public:
|
||||
ParticleEditPacketSender(PacketSenderNotify* notify = NULL) : OctreeEditPacketSender(notify) { }
|
||||
~ParticleEditPacketSender() { }
|
||||
|
|
|
@ -8,29 +8,24 @@
|
|||
|
||||
#include "ParticleScriptingInterface.h"
|
||||
|
||||
ParticleScriptingInterface::ParticleScriptingInterface() :
|
||||
_jurisdictionListener(NODE_TYPE_PARTICLE_SERVER),
|
||||
_nextCreatorTokenID(0)
|
||||
{
|
||||
_jurisdictionListener.initialize(true);
|
||||
_particlePacketSender.setServerJurisdictions(_jurisdictionListener.getJurisdictions());
|
||||
}
|
||||
|
||||
|
||||
void ParticleScriptingInterface::queueParticleAdd(PACKET_TYPE addPacketType, ParticleDetail& addParticleDetails) {
|
||||
_particlePacketSender.queueParticleEditMessages(addPacketType, 1, &addParticleDetails);
|
||||
getParticlePacketSender()->queueParticleEditMessages(addPacketType, 1, &addParticleDetails);
|
||||
}
|
||||
|
||||
uint32_t ParticleScriptingInterface::queueParticleAdd(glm::vec3 position, float radius,
|
||||
xColor color, glm::vec3 velocity, glm::vec3 gravity, float damping, QString updateScript) {
|
||||
unsigned int ParticleScriptingInterface::queueParticleAdd(glm::vec3 position, float radius,
|
||||
xColor color, glm::vec3 velocity, glm::vec3 gravity, float damping, bool inHand, QString updateScript) {
|
||||
|
||||
// The application will keep track of creatorTokenID
|
||||
uint32_t creatorTokenID = _nextCreatorTokenID;
|
||||
_nextCreatorTokenID++;
|
||||
|
||||
// setup a ParticleDetail struct with the data
|
||||
ParticleDetail addParticleDetail = { NEW_PARTICLE, usecTimestampNow(),
|
||||
uint64_t now = usecTimestampNow();
|
||||
ParticleDetail addParticleDetail = { NEW_PARTICLE, now, now,
|
||||
position, radius, {color.red, color.green, color.blue }, velocity,
|
||||
gravity, damping, updateScript, creatorTokenID };
|
||||
gravity, damping, inHand, updateScript, creatorTokenID };
|
||||
|
||||
// queue the packet
|
||||
queueParticleAdd(PACKET_TYPE_PARTICLE_ADD_OR_EDIT, addParticleDetail);
|
||||
|
|
|
@ -12,78 +12,24 @@
|
|||
#include <QtCore/QObject>
|
||||
|
||||
#include <JurisdictionListener.h>
|
||||
#include <OctreeScriptingInterface.h>
|
||||
#include "ParticleEditPacketSender.h"
|
||||
|
||||
/// handles scripting of Particle commands from JS passed to assigned clients
|
||||
class ParticleScriptingInterface : public QObject {
|
||||
class ParticleScriptingInterface : public OctreeScriptingInterface {
|
||||
Q_OBJECT
|
||||
public:
|
||||
ParticleScriptingInterface();
|
||||
|
||||
ParticleEditPacketSender* getParticlePacketSender() { return &_particlePacketSender; }
|
||||
JurisdictionListener* getJurisdictionListener() { return &_jurisdictionListener; }
|
||||
ParticleEditPacketSender* getParticlePacketSender() const { return (ParticleEditPacketSender*)getPacketSender(); }
|
||||
virtual NODE_TYPE getServerNodeType() const { return NODE_TYPE_PARTICLE_SERVER; }
|
||||
virtual OctreeEditPacketSender* createPacketSender() { return new ParticleEditPacketSender(); }
|
||||
|
||||
public slots:
|
||||
/// queues the creation of a Particle which will be sent by calling process on the PacketSender
|
||||
/// returns the creatorTokenID for the newly created particle
|
||||
uint32_t queueParticleAdd(glm::vec3 position, float radius,
|
||||
xColor color, glm::vec3 velocity, glm::vec3 gravity, float damping, QString updateScript);
|
||||
|
||||
/// Set the desired max packet size in bytes that should be created
|
||||
void setMaxPacketSize(int maxPacketSize) { return _particlePacketSender.setMaxPacketSize(maxPacketSize); }
|
||||
|
||||
/// returns the current desired max packet size in bytes that will be created
|
||||
int getMaxPacketSize() const { return _particlePacketSender.getMaxPacketSize(); }
|
||||
|
||||
/// set the max packets per second send rate
|
||||
void setPacketsPerSecond(int packetsPerSecond) { return _particlePacketSender.setPacketsPerSecond(packetsPerSecond); }
|
||||
|
||||
/// get the max packets per second send rate
|
||||
int getPacketsPerSecond() const { return _particlePacketSender.getPacketsPerSecond(); }
|
||||
|
||||
/// does a particle server exist to send to
|
||||
bool serversExist() const { return _particlePacketSender.serversExist(); }
|
||||
|
||||
/// are there packets waiting in the send queue to be sent
|
||||
bool hasPacketsToSend() const { return _particlePacketSender.hasPacketsToSend(); }
|
||||
|
||||
/// how many packets are there in the send queue waiting to be sent
|
||||
int packetsToSendCount() const { return _particlePacketSender.packetsToSendCount(); }
|
||||
|
||||
/// returns the packets per second send rate of this object over its lifetime
|
||||
float getLifetimePPS() const { return _particlePacketSender.getLifetimePPS(); }
|
||||
|
||||
/// returns the bytes per second send rate of this object over its lifetime
|
||||
float getLifetimeBPS() const { return _particlePacketSender.getLifetimeBPS(); }
|
||||
|
||||
/// returns the packets per second queued rate of this object over its lifetime
|
||||
float getLifetimePPSQueued() const { return _particlePacketSender.getLifetimePPSQueued(); }
|
||||
|
||||
/// returns the bytes per second queued rate of this object over its lifetime
|
||||
float getLifetimeBPSQueued() const { return _particlePacketSender.getLifetimeBPSQueued(); }
|
||||
|
||||
/// returns lifetime of this object from first packet sent to now in usecs
|
||||
long long unsigned int getLifetimeInUsecs() const { return _particlePacketSender.getLifetimeInUsecs(); }
|
||||
|
||||
/// returns lifetime of this object from first packet sent to now in usecs
|
||||
float getLifetimeInSeconds() const { return _particlePacketSender.getLifetimeInSeconds(); }
|
||||
|
||||
/// returns the total packets sent by this object over its lifetime
|
||||
long long unsigned int getLifetimePacketsSent() const { return _particlePacketSender.getLifetimePacketsSent(); }
|
||||
|
||||
/// returns the total bytes sent by this object over its lifetime
|
||||
long long unsigned int getLifetimeBytesSent() const { return _particlePacketSender.getLifetimeBytesSent(); }
|
||||
|
||||
/// returns the total packets queued by this object over its lifetime
|
||||
long long unsigned int getLifetimePacketsQueued() const { return _particlePacketSender.getLifetimePacketsQueued(); }
|
||||
|
||||
/// returns the total bytes queued by this object over its lifetime
|
||||
long long unsigned int getLifetimeBytesQueued() const { return _particlePacketSender.getLifetimeBytesQueued(); }
|
||||
unsigned int queueParticleAdd(glm::vec3 position, float radius,
|
||||
xColor color, glm::vec3 velocity, glm::vec3 gravity, float damping, bool inHand, QString updateScript);
|
||||
|
||||
private:
|
||||
/// attached ParticleEditPacketSender that handles queuing and sending of packets to VS
|
||||
ParticleEditPacketSender _particlePacketSender;
|
||||
JurisdictionListener _jurisdictionListener;
|
||||
|
||||
void queueParticleAdd(PACKET_TYPE addPacketType, ParticleDetail& addParticleDetails);
|
||||
|
||||
uint32_t _nextCreatorTokenID;
|
||||
|
|
|
@ -78,9 +78,12 @@ bool ParticleTree::findNearPointOperation(OctreeElement* element, void* extraDat
|
|||
FindNearPointArgs* args = static_cast<FindNearPointArgs*>(extraData);
|
||||
ParticleTreeElement* particleTreeElement = static_cast<ParticleTreeElement*>(element);
|
||||
|
||||
glm::vec3 penetration;
|
||||
bool sphereIntersection = particleTreeElement->getAABox().findSpherePenetration(args->position,
|
||||
args->targetRadius, penetration);
|
||||
|
||||
// If this particleTreeElement contains the point, then search it...
|
||||
if (particleTreeElement->getAABox().contains(args->position)) {
|
||||
if (sphereIntersection) {
|
||||
const Particle* thisClosestParticle = particleTreeElement->getClosestParticle(args->position);
|
||||
|
||||
// we may have gotten NULL back, meaning no particle was available
|
||||
|
|
|
@ -36,6 +36,8 @@ public:
|
|||
|
||||
// These methods will allow the OctreeServer to send your tree inbound edit packets of your
|
||||
// own definition. Implement these to allow your octree based server to support editing
|
||||
virtual bool getWantSVOfileVersions() const { return true; }
|
||||
virtual PACKET_TYPE expectedDataPacketType() const { return PACKET_TYPE_PARTICLE_DATA; }
|
||||
virtual bool handlesEditPacketType(PACKET_TYPE packetType) const;
|
||||
virtual int processEditPacketData(PACKET_TYPE packetType, unsigned char* packetData, int packetLength,
|
||||
unsigned char* editData, int maxLength, Node* senderNode);
|
||||
|
|
|
@ -115,15 +115,39 @@ bool ParticleTreeElement::containsParticle(const Particle& particle) const {
|
|||
}
|
||||
|
||||
bool ParticleTreeElement::updateParticle(const Particle& particle) {
|
||||
bool wantDebug = false;
|
||||
uint16_t numberOfParticles = _particles.size();
|
||||
for (uint16_t i = 0; i < numberOfParticles; i++) {
|
||||
if (_particles[i].getID() == particle.getID()) {
|
||||
uint64_t actuallyCreated = particle.getCreated();
|
||||
if (!particle.isNewlyCreated()) {
|
||||
actuallyCreated = _particles[i].getCreated();
|
||||
int difference = _particles[i].getLastUpdated() - particle.getLastUpdated();
|
||||
|
||||
bool changedOnServer = _particles[i].getLastEdited() < particle.getLastEdited();
|
||||
bool localOlder = _particles[i].getLastUpdated() < particle.getLastUpdated();
|
||||
|
||||
if (changedOnServer || localOlder) {
|
||||
|
||||
if (wantDebug) {
|
||||
printf("local particle [id:%d] %s and %s than server particle by %d, particle.isNewlyCreated()=%s\n",
|
||||
particle.getID(), (changedOnServer ? "CHANGED" : "same"),
|
||||
(localOlder ? "OLDER" : "NEWER"),
|
||||
difference, debug::valueOf(particle.isNewlyCreated()) );
|
||||
}
|
||||
|
||||
uint64_t actuallyCreated = particle.getCreated();
|
||||
if (!particle.isNewlyCreated()) {
|
||||
actuallyCreated = _particles[i].getCreated();
|
||||
}
|
||||
_particles[i] = particle;
|
||||
_particles[i].setCreated(actuallyCreated);
|
||||
} else {
|
||||
if (wantDebug) {
|
||||
printf(">>> NO CHANGE <<< -- local particle [id:%d] %s and %s than server particle by %d, "
|
||||
"particle.isNewlyCreated()=%s\n",
|
||||
particle.getID(), (changedOnServer ? "CHANGED" : "same"),
|
||||
(localOlder ? "OLDER" : "NEWER"),
|
||||
difference, debug::valueOf(particle.isNewlyCreated()) );
|
||||
}
|
||||
}
|
||||
_particles[i] = particle;
|
||||
_particles[i].setCreated(actuallyCreated);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
30
libraries/scriptengine/CMakeLists.txt
Normal file
30
libraries/scriptengine/CMakeLists.txt
Normal file
|
@ -0,0 +1,30 @@
|
|||
cmake_minimum_required(VERSION 2.8)
|
||||
|
||||
set(ROOT_DIR ../..)
|
||||
set(MACRO_DIR ${ROOT_DIR}/cmake/macros)
|
||||
|
||||
# setup for find modules
|
||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/../../cmake/modules/")
|
||||
|
||||
set(TARGET_NAME scriptengine)
|
||||
|
||||
find_package(Qt5Widgets REQUIRED)
|
||||
|
||||
include(${MACRO_DIR}/SetupHifiLibrary.cmake)
|
||||
setup_hifi_library(${TARGET_NAME})
|
||||
|
||||
qt5_use_modules(${TARGET_NAME} Widgets)
|
||||
|
||||
include(${MACRO_DIR}/IncludeGLM.cmake)
|
||||
include_glm(${TARGET_NAME} ${ROOT_DIR})
|
||||
|
||||
include(${MACRO_DIR}/LinkHifiLibrary.cmake)
|
||||
link_hifi_library(shared ${TARGET_NAME} ${ROOT_DIR})
|
||||
link_hifi_library(octree ${TARGET_NAME} ${ROOT_DIR})
|
||||
link_hifi_library(voxels ${TARGET_NAME} ${ROOT_DIR})
|
||||
link_hifi_library(particles ${TARGET_NAME} ${ROOT_DIR})
|
||||
|
||||
# link ZLIB
|
||||
find_package(ZLIB)
|
||||
include_directories(${ZLIB_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${ZLIB_LIBRARIES})
|
169
libraries/scriptengine/src/ScriptEngine.cpp
Normal file
169
libraries/scriptengine/src/ScriptEngine.cpp
Normal file
|
@ -0,0 +1,169 @@
|
|||
//
|
||||
// Agent.cpp
|
||||
// hifi
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 12/14/13.
|
||||
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#include <QtCore/QCoreApplication>
|
||||
#include <QtCore/QEventLoop>
|
||||
#include <QtCore/QTimer>
|
||||
#include <QtCore/QThread>
|
||||
#include <QtNetwork/QNetworkAccessManager>
|
||||
#include <QtNetwork/QNetworkRequest>
|
||||
#include <QtNetwork/QNetworkReply>
|
||||
|
||||
#include <AvatarData.h>
|
||||
#include <NodeList.h>
|
||||
#include <PacketHeaders.h>
|
||||
#include <UUID.h>
|
||||
#include <VoxelConstants.h>
|
||||
|
||||
#include "ScriptEngine.h"
|
||||
|
||||
int ScriptEngine::_scriptNumber = 1;
|
||||
|
||||
ScriptEngine::ScriptEngine(QString scriptContents, bool wantMenuItems,
|
||||
const char* scriptMenuName, AbstractMenuInterface* menu) {
|
||||
_scriptContents = scriptContents;
|
||||
_isFinished = false;
|
||||
_wantMenuItems = wantMenuItems;
|
||||
if (scriptMenuName) {
|
||||
_scriptMenuName = "Stop ";
|
||||
_scriptMenuName.append(scriptMenuName);
|
||||
} else {
|
||||
_scriptMenuName = "Stop Script ";
|
||||
_scriptNumber++;
|
||||
_scriptMenuName.append(_scriptNumber);
|
||||
}
|
||||
_menu = menu;
|
||||
}
|
||||
|
||||
ScriptEngine::~ScriptEngine() {
|
||||
//printf("ScriptEngine::~ScriptEngine()...\n");
|
||||
}
|
||||
|
||||
|
||||
void ScriptEngine::setupMenuItems() {
|
||||
if (_menu && _wantMenuItems) {
|
||||
_menu->addActionToQMenuAndActionHash(_menu->getActiveScriptsMenu(), _scriptMenuName, 0, this, SLOT(stop()));
|
||||
}
|
||||
}
|
||||
|
||||
void ScriptEngine::cleanMenuItems() {
|
||||
if (_menu && _wantMenuItems) {
|
||||
_menu->removeAction(_menu->getActiveScriptsMenu(), _scriptMenuName);
|
||||
}
|
||||
}
|
||||
|
||||
void ScriptEngine::run() {
|
||||
|
||||
//setupMenuItems();
|
||||
|
||||
QScriptEngine engine;
|
||||
|
||||
_voxelScriptingInterface.init();
|
||||
_particleScriptingInterface.init();
|
||||
|
||||
// register meta-type for glm::vec3 conversions
|
||||
registerMetaTypes(&engine);
|
||||
|
||||
QScriptValue agentValue = engine.newQObject(this);
|
||||
engine.globalObject().setProperty("Agent", agentValue);
|
||||
|
||||
QScriptValue voxelScripterValue = engine.newQObject(&_voxelScriptingInterface);
|
||||
engine.globalObject().setProperty("Voxels", voxelScripterValue);
|
||||
|
||||
QScriptValue particleScripterValue = engine.newQObject(&_particleScriptingInterface);
|
||||
engine.globalObject().setProperty("Particles", particleScripterValue);
|
||||
|
||||
QScriptValue treeScaleValue = engine.newVariant(QVariant(TREE_SCALE));
|
||||
engine.globalObject().setProperty("TREE_SCALE", treeScaleValue);
|
||||
|
||||
const unsigned int VISUAL_DATA_CALLBACK_USECS = (1.0 / 60.0) * 1000 * 1000;
|
||||
|
||||
// let the VoxelPacketSender know how frequently we plan to call it
|
||||
_voxelScriptingInterface.getVoxelPacketSender()->setProcessCallIntervalHint(VISUAL_DATA_CALLBACK_USECS);
|
||||
_particleScriptingInterface.getParticlePacketSender()->setProcessCallIntervalHint(VISUAL_DATA_CALLBACK_USECS);
|
||||
|
||||
//qDebug() << "Script:\n" << _scriptContents << "\n";
|
||||
|
||||
QScriptValue result = engine.evaluate(_scriptContents);
|
||||
qDebug() << "Evaluated script.\n";
|
||||
|
||||
if (engine.hasUncaughtException()) {
|
||||
int line = engine.uncaughtExceptionLineNumber();
|
||||
qDebug() << "Uncaught exception at line" << line << ":" << result.toString() << "\n";
|
||||
}
|
||||
|
||||
timeval startTime;
|
||||
gettimeofday(&startTime, NULL);
|
||||
|
||||
int thisFrame = 0;
|
||||
|
||||
while (!_isFinished) {
|
||||
int usecToSleep = usecTimestamp(&startTime) + (thisFrame++ * VISUAL_DATA_CALLBACK_USECS) - usecTimestampNow();
|
||||
if (usecToSleep > 0) {
|
||||
usleep(usecToSleep);
|
||||
}
|
||||
|
||||
if (_isFinished) {
|
||||
//qDebug() << "line: " << __LINE__ << " _isFinished... breaking loop\n";
|
||||
break;
|
||||
}
|
||||
|
||||
QCoreApplication::processEvents();
|
||||
|
||||
if (_isFinished) {
|
||||
//qDebug() << "line: " << __LINE__ << " _isFinished... breaking loop\n";
|
||||
break;
|
||||
}
|
||||
|
||||
bool willSendVisualDataCallBack = false;
|
||||
if (_voxelScriptingInterface.getVoxelPacketSender()->serversExist()) {
|
||||
// allow the scripter's call back to setup visual data
|
||||
willSendVisualDataCallBack = true;
|
||||
|
||||
// release the queue of edit voxel messages.
|
||||
_voxelScriptingInterface.getVoxelPacketSender()->releaseQueuedMessages();
|
||||
|
||||
// since we're in non-threaded mode, call process so that the packets are sent
|
||||
//_voxelScriptingInterface.getVoxelPacketSender()->process();
|
||||
}
|
||||
|
||||
if (_particleScriptingInterface.getParticlePacketSender()->serversExist()) {
|
||||
// allow the scripter's call back to setup visual data
|
||||
willSendVisualDataCallBack = true;
|
||||
|
||||
// release the queue of edit voxel messages.
|
||||
_particleScriptingInterface.getParticlePacketSender()->releaseQueuedMessages();
|
||||
|
||||
// since we're in non-threaded mode, call process so that the packets are sent
|
||||
//_particleScriptingInterface.getParticlePacketSender()->process();
|
||||
}
|
||||
|
||||
if (willSendVisualDataCallBack) {
|
||||
emit willSendVisualDataCallback();
|
||||
}
|
||||
|
||||
|
||||
if (engine.hasUncaughtException()) {
|
||||
int line = engine.uncaughtExceptionLineNumber();
|
||||
qDebug() << "Uncaught exception at line" << line << ":" << engine.uncaughtException().toString() << "\n";
|
||||
}
|
||||
}
|
||||
cleanMenuItems();
|
||||
|
||||
// If we were on a thread, then wait till it's done
|
||||
if (thread()) {
|
||||
thread()->quit();
|
||||
}
|
||||
|
||||
emit finished();
|
||||
}
|
||||
|
||||
void ScriptEngine::stop() {
|
||||
_isFinished = true;
|
||||
}
|
||||
|
61
libraries/scriptengine/src/ScriptEngine.h
Normal file
61
libraries/scriptengine/src/ScriptEngine.h
Normal file
|
@ -0,0 +1,61 @@
|
|||
//
|
||||
// ScriptEngine.h
|
||||
// hifi
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 12/14/13.
|
||||
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#ifndef __hifi__ScriptEngine__
|
||||
#define __hifi__ScriptEngine__
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include <QtScript/QScriptEngine>
|
||||
#include <QtCore/QObject>
|
||||
#include <QtCore/QUrl>
|
||||
|
||||
#include <AbstractMenuInterface.h>
|
||||
#include <ParticleScriptingInterface.h>
|
||||
#include <VoxelScriptingInterface.h>
|
||||
|
||||
class ScriptEngine : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
ScriptEngine(QString scriptContents, bool wantMenuItems = false,
|
||||
const char* scriptMenuName = NULL, AbstractMenuInterface* menu = NULL);
|
||||
|
||||
~ScriptEngine();
|
||||
|
||||
/// Access the VoxelScriptingInterface in order to initialize it with a custom packet sender and jurisdiction listener
|
||||
VoxelScriptingInterface* getVoxelScriptingInterface() { return &_voxelScriptingInterface; }
|
||||
|
||||
/// Access the ParticleScriptingInterface in order to initialize it with a custom packet sender and jurisdiction listener
|
||||
ParticleScriptingInterface* getParticleScriptingInterface() { return &_particleScriptingInterface; }
|
||||
|
||||
void setupMenuItems();
|
||||
void cleanMenuItems();
|
||||
|
||||
public slots:
|
||||
void run();
|
||||
void stop();
|
||||
|
||||
signals:
|
||||
void willSendAudioDataCallback();
|
||||
void willSendVisualDataCallback();
|
||||
void finished();
|
||||
protected:
|
||||
QString _scriptContents;
|
||||
bool _isFinished;
|
||||
|
||||
|
||||
private:
|
||||
VoxelScriptingInterface _voxelScriptingInterface;
|
||||
ParticleScriptingInterface _particleScriptingInterface;
|
||||
bool _wantMenuItems;
|
||||
QString _scriptMenuName;
|
||||
AbstractMenuInterface* _menu;
|
||||
static int _scriptNumber;
|
||||
};
|
||||
|
||||
#endif /* defined(__hifi__ScriptEngine__) */
|
29
libraries/shared/src/AbstractMenuInterface.h
Normal file
29
libraries/shared/src/AbstractMenuInterface.h
Normal file
|
@ -0,0 +1,29 @@
|
|||
//
|
||||
// AbstractMenuInterface.h
|
||||
// hifi
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 12/16/13.
|
||||
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
|
||||
//
|
||||
//
|
||||
|
||||
#ifndef __hifi__AbstractMenuInterface__
|
||||
#define __hifi__AbstractMenuInterface__
|
||||
|
||||
#include <QMenuBar>
|
||||
//#include <QHash>
|
||||
//#include <QKeySequence>
|
||||
|
||||
class AbstractMenuInterface {
|
||||
public:
|
||||
virtual QMenu* getActiveScriptsMenu() = 0;
|
||||
virtual QAction* addActionToQMenuAndActionHash(QMenu* destinationMenu,
|
||||
const QString actionName,
|
||||
const QKeySequence& shortcut = 0,
|
||||
const QObject* receiver = NULL,
|
||||
const char* member = NULL,
|
||||
QAction::MenuRole role = QAction::NoRole) = 0;
|
||||
virtual void removeAction(QMenu* menu, const QString& actionName) = 0;
|
||||
};
|
||||
|
||||
#endif /* defined(__hifi__AbstractMenuInterface__) */
|
|
@ -42,8 +42,11 @@ HifiSockAddr::HifiSockAddr(const QString& hostname, quint16 hostOrderPort) {
|
|||
}
|
||||
|
||||
HifiSockAddr& HifiSockAddr::operator=(const HifiSockAddr& rhsSockAddr) {
|
||||
HifiSockAddr temp(rhsSockAddr);
|
||||
swap(temp);
|
||||
//HifiSockAddr temp(rhsSockAddr);
|
||||
//swap(temp);
|
||||
_address = rhsSockAddr._address;
|
||||
_port = rhsSockAddr._port;
|
||||
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
|
|
@ -51,7 +51,10 @@ PACKET_VERSION versionForPacketType(PACKET_TYPE type) {
|
|||
return 1;
|
||||
|
||||
case PACKET_TYPE_PARTICLE_ADD_OR_EDIT:
|
||||
return 1;
|
||||
return 2;
|
||||
|
||||
case PACKET_TYPE_PARTICLE_DATA:
|
||||
return 3;
|
||||
|
||||
default:
|
||||
return 0;
|
||||
|
|
|
@ -41,6 +41,11 @@ PacketSender::PacketSender(PacketSenderNotify* notify, int packetsPerSecond) :
|
|||
_totalPacketsQueued(0),
|
||||
_totalBytesQueued(0)
|
||||
{
|
||||
//printf("PacketSender[%p] created... \n", this);
|
||||
}
|
||||
|
||||
PacketSender::~PacketSender() {
|
||||
//printf("PacketSender::~PacketSender[%p] destroyed... \n", this);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -36,6 +36,7 @@ public:
|
|||
static const int MINIMAL_SLEEP_INTERVAL;
|
||||
|
||||
PacketSender(PacketSenderNotify* notify = NULL, int packetsPerSecond = DEFAULT_PACKETS_PER_SECOND);
|
||||
~PacketSender();
|
||||
|
||||
/// Add packet to outbound queue.
|
||||
/// \param HifiSockAddr& address the destination address
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
#include <OctreeEditPacketSender.h>
|
||||
|
||||
/// Utility for processing, packing, queueing and sending of outbound edit voxel messages.
|
||||
class VoxelEditPacketSender : public virtual OctreeEditPacketSender {
|
||||
class VoxelEditPacketSender : public OctreeEditPacketSender {
|
||||
public:
|
||||
VoxelEditPacketSender(PacketSenderNotify* notify = NULL) : OctreeEditPacketSender(notify) { }
|
||||
~VoxelEditPacketSender() { }
|
||||
|
|
|
@ -8,13 +8,8 @@
|
|||
|
||||
#include "VoxelScriptingInterface.h"
|
||||
|
||||
VoxelScriptingInterface::VoxelScriptingInterface() {
|
||||
_jurisdictionListener.initialize(true);
|
||||
_voxelPacketSender.setVoxelServerJurisdictions(_jurisdictionListener.getJurisdictions());
|
||||
}
|
||||
|
||||
void VoxelScriptingInterface::queueVoxelAdd(PACKET_TYPE addPacketType, VoxelDetail& addVoxelDetails) {
|
||||
_voxelPacketSender.queueVoxelEditMessages(addPacketType, 1, &addVoxelDetails);
|
||||
getVoxelPacketSender()->queueVoxelEditMessages(addPacketType, 1, &addVoxelDetails);
|
||||
}
|
||||
|
||||
void VoxelScriptingInterface::queueVoxelAdd(float x, float y, float z, float scale, uchar red, uchar green, uchar blue) {
|
||||
|
@ -39,6 +34,6 @@ void VoxelScriptingInterface::queueVoxelDelete(float x, float y, float z, float
|
|||
// setup a VoxelDetail struct with data
|
||||
VoxelDetail deleteVoxelDetail = {x, y, z, scale, 0, 0, 0};
|
||||
|
||||
_voxelPacketSender.queueVoxelEditMessages(PACKET_TYPE_VOXEL_ERASE, 1, &deleteVoxelDetail);
|
||||
getVoxelPacketSender()->queueVoxelEditMessages(PACKET_TYPE_VOXEL_ERASE, 1, &deleteVoxelDetail);
|
||||
}
|
||||
|
||||
|
|
|
@ -12,16 +12,18 @@
|
|||
#include <QtCore/QObject>
|
||||
|
||||
#include <JurisdictionListener.h>
|
||||
#include <OctreeScriptingInterface.h>
|
||||
#include "VoxelEditPacketSender.h"
|
||||
|
||||
/// handles scripting of voxel commands from JS passed to assigned clients
|
||||
class VoxelScriptingInterface : public QObject {
|
||||
class VoxelScriptingInterface : public OctreeScriptingInterface {
|
||||
Q_OBJECT
|
||||
public:
|
||||
VoxelScriptingInterface();
|
||||
|
||||
VoxelEditPacketSender* getVoxelPacketSender() { return &_voxelPacketSender; }
|
||||
JurisdictionListener* getJurisdictionListener() { return &_jurisdictionListener; }
|
||||
VoxelEditPacketSender* getVoxelPacketSender() { return (VoxelEditPacketSender*)getPacketSender(); }
|
||||
|
||||
virtual NODE_TYPE getServerNodeType() const { return NODE_TYPE_VOXEL_SERVER; }
|
||||
virtual OctreeEditPacketSender* createPacketSender() { return new VoxelEditPacketSender(); }
|
||||
|
||||
public slots:
|
||||
/// queues the creation of a voxel which will be sent by calling process on the PacketSender
|
||||
/// \param x the x-coordinate of the voxel (in VS space)
|
||||
|
@ -50,62 +52,7 @@ public slots:
|
|||
/// \param scale the scale of the voxel (in VS space)
|
||||
void queueVoxelDelete(float x, float y, float z, float scale);
|
||||
|
||||
/// Set the desired max packet size in bytes that should be created
|
||||
void setMaxPacketSize(int maxPacketSize) { return _voxelPacketSender.setMaxPacketSize(maxPacketSize); }
|
||||
|
||||
/// returns the current desired max packet size in bytes that will be created
|
||||
int getMaxPacketSize() const { return _voxelPacketSender.getMaxPacketSize(); }
|
||||
|
||||
/// set the max packets per second send rate
|
||||
void setPacketsPerSecond(int packetsPerSecond) { return _voxelPacketSender.setPacketsPerSecond(packetsPerSecond); }
|
||||
|
||||
/// get the max packets per second send rate
|
||||
int getPacketsPerSecond() const { return _voxelPacketSender.getPacketsPerSecond(); }
|
||||
|
||||
/// does a voxel server exist to send to
|
||||
bool voxelServersExist() const { return _voxelPacketSender.voxelServersExist(); }
|
||||
|
||||
/// are there packets waiting in the send queue to be sent
|
||||
bool hasPacketsToSend() const { return _voxelPacketSender.hasPacketsToSend(); }
|
||||
|
||||
/// how many packets are there in the send queue waiting to be sent
|
||||
int packetsToSendCount() const { return _voxelPacketSender.packetsToSendCount(); }
|
||||
|
||||
/// returns the packets per second send rate of this object over its lifetime
|
||||
float getLifetimePPS() const { return _voxelPacketSender.getLifetimePPS(); }
|
||||
|
||||
/// returns the bytes per second send rate of this object over its lifetime
|
||||
float getLifetimeBPS() const { return _voxelPacketSender.getLifetimeBPS(); }
|
||||
|
||||
/// returns the packets per second queued rate of this object over its lifetime
|
||||
float getLifetimePPSQueued() const { return _voxelPacketSender.getLifetimePPSQueued(); }
|
||||
|
||||
/// returns the bytes per second queued rate of this object over its lifetime
|
||||
float getLifetimeBPSQueued() const { return _voxelPacketSender.getLifetimeBPSQueued(); }
|
||||
|
||||
/// returns lifetime of this object from first packet sent to now in usecs
|
||||
long long unsigned int getLifetimeInUsecs() const { return _voxelPacketSender.getLifetimeInUsecs(); }
|
||||
|
||||
/// returns lifetime of this object from first packet sent to now in usecs
|
||||
float getLifetimeInSeconds() const { return _voxelPacketSender.getLifetimeInSeconds(); }
|
||||
|
||||
/// returns the total packets sent by this object over its lifetime
|
||||
long long unsigned int getLifetimePacketsSent() const { return _voxelPacketSender.getLifetimePacketsSent(); }
|
||||
|
||||
/// returns the total bytes sent by this object over its lifetime
|
||||
long long unsigned int getLifetimeBytesSent() const { return _voxelPacketSender.getLifetimeBytesSent(); }
|
||||
|
||||
/// returns the total packets queued by this object over its lifetime
|
||||
long long unsigned int getLifetimePacketsQueued() const { return _voxelPacketSender.getLifetimePacketsQueued(); }
|
||||
|
||||
/// returns the total bytes queued by this object over its lifetime
|
||||
long long unsigned int getLifetimeBytesQueued() const { return _voxelPacketSender.getLifetimeBytesQueued(); }
|
||||
|
||||
private:
|
||||
/// attached VoxelEditPacketSender that handles queuing and sending of packets to VS
|
||||
VoxelEditPacketSender _voxelPacketSender;
|
||||
JurisdictionListener _jurisdictionListener;
|
||||
|
||||
void queueVoxelAdd(PACKET_TYPE addPacketType, VoxelDetail& addVoxelDetails);
|
||||
};
|
||||
|
||||
|
|
|
@ -53,6 +53,7 @@ public:
|
|||
|
||||
void readCodeColorBufferToTree(const unsigned char* codeColorBuffer, bool destructive = false);
|
||||
|
||||
virtual PACKET_TYPE expectedDataPacketType() const { return PACKET_TYPE_VOXEL_DATA; }
|
||||
virtual bool handlesEditPacketType(PACKET_TYPE packetType) const;
|
||||
virtual int processEditPacketData(PACKET_TYPE packetType, unsigned char* packetData, int packetLength,
|
||||
unsigned char* editData, int maxLength, Node* senderNode);
|
||||
|
|
Loading…
Reference in a new issue