Merge branch 'master' of github.com:highfidelity/hifi into model-scripting

This commit is contained in:
Seth Alves 2017-01-31 09:22:16 -08:00
commit 5131d1edf2
20 changed files with 734 additions and 433 deletions

View file

@ -372,6 +372,13 @@
"help": "Password used for basic HTTP authentication. Leave this blank if you do not want to change it.",
"value-hidden": true
},
{
"name": "verify_http_password",
"label": "Verify HTTP Password",
"type": "password",
"help": "Must match the password entered above for change to be saved.",
"value-hidden": true
},
{
"name": "maximum_user_capacity",
"label": "Maximum User Capacity",

View file

@ -904,10 +904,18 @@ function saveSettings() {
var formJSON = form2js('settings-form', ".", false, cleanupFormValues, true);
// check if we've set the basic http password - if so convert it to base64
var canPost = true;
if (formJSON["security"]) {
var password = formJSON["security"]["http_password"];
var verify_password = formJSON["security"]["verify_http_password"];
if (password && password.length > 0) {
formJSON["security"]["http_password"] = sha256_digest(password);
if (password != verify_password) {
bootbox.alert({"message": "Passwords must match!", "title":"Password Error"});
canPost = false;
} else {
formJSON["security"]["http_password"] = sha256_digest(password);
delete formJSON["security"]["verify_http_password"];
}
}
}
@ -923,7 +931,9 @@ function saveSettings() {
$(this).blur();
// POST the form JSON to the domain-server settings.json endpoint so the settings are saved
postSettings(formJSON);
if (canPost) {
postSettings(formJSON);
}
}
$('body').on('click', '.save-button', function(e){

View file

@ -4380,6 +4380,10 @@ void Application::update(float deltaTime) {
PROFILE_RANGE_EX(simulation_physics, "HarvestChanges", 0xffffff00, (uint64_t)getActiveDisplayPlugin()->presentCount());
PerformanceTimer perfTimer("harvestChanges");
if (_physicsEngine->hasOutgoingChanges()) {
// grab the collision events BEFORE handleOutgoingChanges() because at this point
// we have a better idea of which objects we own or should own.
auto& collisionEvents = _physicsEngine->getCollisionEvents();
getEntities()->getTree()->withWriteLock([&] {
PerformanceTimer perfTimer("handleOutgoingChanges");
const VectorOfMotionStates& outgoingChanges = _physicsEngine->getOutgoingChanges();
@ -4387,11 +4391,10 @@ void Application::update(float deltaTime) {
avatarManager->handleOutgoingChanges(outgoingChanges);
});
auto collisionEvents = _physicsEngine->getCollisionEvents();
avatarManager->handleCollisionEvents(collisionEvents);
if (!_aboutToQuit) {
// handleCollisionEvents() AFTER handleOutgoinChanges()
PerformanceTimer perfTimer("entities");
avatarManager->handleCollisionEvents(collisionEvents);
// Collision events (and their scripts) must not be handled when we're locked, above. (That would risk
// deadlock.)
_entitySimulation->handleCollisionEvents(collisionEvents);

View file

@ -112,6 +112,42 @@ private:
bool _quit { false };
};
void AudioInjectorsThread::prepare() {
_audio->prepareLocalAudioInjectors();
}
static void channelUpmix(int16_t* source, int16_t* dest, int numSamples, int numExtraChannels) {
for (int i = 0; i < numSamples/2; i++) {
// read 2 samples
int16_t left = *source++;
int16_t right = *source++;
// write 2 + N samples
*dest++ = left;
*dest++ = right;
for (int n = 0; n < numExtraChannels; n++) {
*dest++ = 0;
}
}
}
static void channelDownmix(int16_t* source, int16_t* dest, int numSamples) {
for (int i = 0; i < numSamples/2; i++) {
// read 2 samples
int16_t left = *source++;
int16_t right = *source++;
// write 1 sample
*dest++ = (int16_t)((left + right) / 2);
}
}
static inline float convertToFloat(int16_t sample) {
return (float)sample * (1 / 32768.0f);
}
AudioClient::AudioClient() :
AbstractAudioInterface(),
_gate(this),
@ -127,6 +163,7 @@ AudioClient::AudioClient() :
_loopbackAudioOutput(NULL),
_loopbackOutputDevice(NULL),
_inputRingBuffer(0),
_localInjectorsStream(0),
_receivedAudioStream(RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES),
_isStereoInput(false),
_outputStarveDetectionStartTimeMsec(0),
@ -144,13 +181,18 @@ AudioClient::AudioClient() :
_reverbOptions(&_scriptReverbOptions),
_inputToNetworkResampler(NULL),
_networkToOutputResampler(NULL),
_localToOutputResampler(NULL),
_localAudioThread(this),
_audioLimiter(AudioConstants::SAMPLE_RATE, OUTPUT_CHANNEL_COUNT),
_outgoingAvatarAudioSequenceNumber(0),
_audioOutputIODevice(_receivedAudioStream, this),
_audioOutputIODevice(_localInjectorsStream, _receivedAudioStream, this),
_stats(&_receivedAudioStream),
_inputGate(),
_positionGetter(DEFAULT_POSITION_GETTER),
_orientationGetter(DEFAULT_ORIENTATION_GETTER) {
// avoid putting a lock in the device callback
assert(_localSamplesAvailable.is_lock_free());
// deprecate legacy settings
{
Setting::Handle<int>::Deprecated("maxFramesOverDesired", InboundAudioStream::MAX_FRAMES_OVER_DESIRED);
@ -176,6 +218,10 @@ AudioClient::AudioClient() :
_checkDevicesThread->setPriority(QThread::LowPriority);
_checkDevicesThread->start();
// start a thread to process local injectors
_localAudioThread.setObjectName("LocalAudio Thread");
_localAudioThread.start();
configureReverb();
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
@ -213,6 +259,7 @@ void AudioClient::reset() {
_stats.reset();
_sourceReverb.reset();
_listenerReverb.reset();
_localReverb.reset();
}
void AudioClient::audioMixerKilled() {
@ -365,7 +412,7 @@ QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
CoUninitialize();
}
qCDebug(audioclient) << "DEBUG [" << deviceName << "] [" << getNamedAudioDeviceForMode(mode, deviceName).deviceName() << "]";
qCDebug(audioclient) << "[" << deviceName << "] [" << getNamedAudioDeviceForMode(mode, deviceName).deviceName() << "]";
return getNamedAudioDeviceForMode(mode, deviceName);
#endif
@ -387,12 +434,12 @@ bool nativeFormatForAudioDevice(const QAudioDeviceInfo& audioDevice,
audioFormat.setByteOrder(QAudioFormat::LittleEndian);
if (!audioDevice.isFormatSupported(audioFormat)) {
qCDebug(audioclient) << "WARNING: The native format is" << audioFormat << "but isFormatSupported() failed.";
qCWarning(audioclient) << "The native format is" << audioFormat << "but isFormatSupported() failed.";
return false;
}
// converting to/from this rate must produce an integral number of samples
if (audioFormat.sampleRate() * AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL % AudioConstants::SAMPLE_RATE != 0) {
qCDebug(audioclient) << "WARNING: The native sample rate [" << audioFormat.sampleRate() << "] is not supported.";
qCWarning(audioclient) << "The native sample rate [" << audioFormat.sampleRate() << "] is not supported.";
return false;
}
return true;
@ -726,12 +773,12 @@ QVector<QString> AudioClient::getDeviceNames(QAudio::Mode mode) {
}
bool AudioClient::switchInputToAudioDevice(const QString& inputDeviceName) {
qCDebug(audioclient) << "DEBUG [" << inputDeviceName << "] [" << getNamedAudioDeviceForMode(QAudio::AudioInput, inputDeviceName).deviceName() << "]";
qCDebug(audioclient) << "[" << inputDeviceName << "] [" << getNamedAudioDeviceForMode(QAudio::AudioInput, inputDeviceName).deviceName() << "]";
return switchInputToAudioDevice(getNamedAudioDeviceForMode(QAudio::AudioInput, inputDeviceName));
}
bool AudioClient::switchOutputToAudioDevice(const QString& outputDeviceName) {
qCDebug(audioclient) << "DEBUG [" << outputDeviceName << "] [" << getNamedAudioDeviceForMode(QAudio::AudioOutput, outputDeviceName).deviceName() << "]";
qCDebug(audioclient) << "[" << outputDeviceName << "] [" << getNamedAudioDeviceForMode(QAudio::AudioOutput, outputDeviceName).deviceName() << "]";
return switchOutputToAudioDevice(getNamedAudioDeviceForMode(QAudio::AudioOutput, outputDeviceName));
}
@ -762,6 +809,7 @@ void AudioClient::configureReverb() {
p.wetDryMix = _reverbOptions->getWetDryMix();
_listenerReverb.setParameters(&p);
_localReverb.setParameters(&p);
// used only for adding self-reverb to loopback audio
p.sampleRate = _outputFormat.sampleRate();
@ -808,6 +856,7 @@ void AudioClient::setReverb(bool reverb) {
if (!_reverb) {
_sourceReverb.reset();
_listenerReverb.reset();
_localReverb.reset();
}
}
@ -841,36 +890,6 @@ void AudioClient::setReverbOptions(const AudioEffectOptions* options) {
}
}
static void channelUpmix(int16_t* source, int16_t* dest, int numSamples, int numExtraChannels) {
for (int i = 0; i < numSamples/2; i++) {
// read 2 samples
int16_t left = *source++;
int16_t right = *source++;
// write 2 + N samples
*dest++ = left;
*dest++ = right;
for (int n = 0; n < numExtraChannels; n++) {
*dest++ = 0;
}
}
}
static void channelDownmix(int16_t* source, int16_t* dest, int numSamples) {
for (int i = 0; i < numSamples/2; i++) {
// read 2 samples
int16_t left = *source++;
int16_t right = *source++;
// write 1 sample
*dest++ = (int16_t)((left + right) / 2);
}
}
void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
// If there is server echo, reverb will be applied to the recieved audio stream so no need to have it here.
bool hasReverb = _reverb || _receivedAudioStream.hasReverb();
@ -1082,14 +1101,78 @@ void AudioClient::handleRecordedAudioInput(const QByteArray& audio) {
PacketType::MicrophoneAudioWithEcho, _selectedCodecName);
}
void AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
void AudioClient::prepareLocalAudioInjectors() {
if (_outputPeriod == 0) {
return;
}
int bufferCapacity = _localInjectorsStream.getSampleCapacity();
if (_localToOutputResampler) {
// avoid overwriting the buffer,
// instead of failing on writes because the buffer is used as a lock-free pipe
bufferCapacity -=
_localToOutputResampler->getMaxOutput(AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL) *
AudioConstants::STEREO;
bufferCapacity += 1;
}
int samplesNeeded = std::numeric_limits<int>::max();
while (samplesNeeded > 0) {
// lock for every write to avoid locking out the device callback
// this lock is intentional - the buffer is only lock-free in its use in the device callback
Lock lock(_localAudioMutex);
samplesNeeded = bufferCapacity - _localSamplesAvailable.load(std::memory_order_relaxed);
if (samplesNeeded <= 0) {
break;
}
// get a network frame of local injectors' audio
if (!mixLocalAudioInjectors(_localMixBuffer)) {
break;
}
// reverb
if (_reverb) {
_localReverb.render(_localMixBuffer, _localMixBuffer, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
}
int samples;
if (_localToOutputResampler) {
// resample to output sample rate
int frames = _localToOutputResampler->render(_localMixBuffer, _localOutputMixBuffer,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
// write to local injectors' ring buffer
samples = frames * AudioConstants::STEREO;
_localInjectorsStream.writeSamples(_localOutputMixBuffer, samples);
} else {
// write to local injectors' ring buffer
samples = AudioConstants::NETWORK_FRAME_SAMPLES_STEREO;
_localInjectorsStream.writeSamples(_localMixBuffer,
AudioConstants::NETWORK_FRAME_SAMPLES_STEREO);
}
_localSamplesAvailable.fetch_add(samples, std::memory_order_release);
samplesNeeded -= samples;
}
}
bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
QVector<AudioInjector*> injectorsToRemove;
// lock the injector vector
Lock lock(_injectorsMutex);
for (AudioInjector* injector : getActiveLocalAudioInjectors()) {
if (_activeLocalAudioInjectors.size() == 0) {
return false;
}
memset(mixBuffer, 0, AudioConstants::NETWORK_FRAME_SAMPLES_STEREO * sizeof(float));
for (AudioInjector* injector : _activeLocalAudioInjectors) {
if (injector->getLocalBuffer()) {
static const int HRTF_DATASET_INDEX = 1;
@ -1098,8 +1181,8 @@ void AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
qint64 bytesToRead = numChannels * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
// get one frame from the injector
memset(_scratchBuffer, 0, bytesToRead);
if (0 < injector->getLocalBuffer()->readData((char*)_scratchBuffer, bytesToRead)) {
memset(_localScratchBuffer, 0, bytesToRead);
if (0 < injector->getLocalBuffer()->readData((char*)_localScratchBuffer, bytesToRead)) {
if (injector->isAmbisonic()) {
@ -1119,7 +1202,7 @@ void AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
float qz = relativeOrientation.y;
// Ambisonic gets spatialized into mixBuffer
injector->getLocalFOA().render(_scratchBuffer, mixBuffer, HRTF_DATASET_INDEX,
injector->getLocalFOA().render(_localScratchBuffer, mixBuffer, HRTF_DATASET_INDEX,
qw, qx, qy, qz, gain, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
} else if (injector->isStereo()) {
@ -1127,7 +1210,7 @@ void AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
// stereo gets directly mixed into mixBuffer
float gain = injector->getVolume();
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; i++) {
mixBuffer[i] += (float)_scratchBuffer[i] * (1/32768.0f) * gain;
mixBuffer[i] += convertToFloat(_localScratchBuffer[i]) * gain;
}
} else {
@ -1136,10 +1219,10 @@ void AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
glm::vec3 relativePosition = injector->getPosition() - _positionGetter();
float distance = glm::max(glm::length(relativePosition), EPSILON);
float gain = gainForSource(distance, injector->getVolume());
float azimuth = azimuthForSource(relativePosition);
float azimuth = azimuthForSource(relativePosition);
// mono gets spatialized into mixBuffer
injector->getLocalHRTF().render(_scratchBuffer, mixBuffer, HRTF_DATASET_INDEX,
injector->getLocalHRTF().render(_localScratchBuffer, mixBuffer, HRTF_DATASET_INDEX,
azimuth, distance, gain, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
}
@ -1160,8 +1243,10 @@ void AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
for (AudioInjector* injector : injectorsToRemove) {
qCDebug(audioclient) << "removing injector";
getActiveLocalAudioInjectors().removeOne(injector);
_activeLocalAudioInjectors.removeOne(injector);
}
return true;
}
void AudioClient::processReceivedSamples(const QByteArray& decodedBuffer, QByteArray& outputBuffer) {
@ -1172,33 +1257,24 @@ void AudioClient::processReceivedSamples(const QByteArray& decodedBuffer, QByteA
outputBuffer.resize(_outputFrameSize * AudioConstants::SAMPLE_SIZE);
int16_t* outputSamples = reinterpret_cast<int16_t*>(outputBuffer.data());
// convert network audio to float
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; i++) {
_mixBuffer[i] = (float)decodedSamples[i] * (1/32768.0f);
}
// mix in active injectors
if (getActiveLocalAudioInjectors().size() > 0) {
mixLocalAudioInjectors(_mixBuffer);
}
bool hasReverb = _reverb || _receivedAudioStream.hasReverb();
// apply stereo reverb
bool hasReverb = _reverb || _receivedAudioStream.hasReverb();
if (hasReverb) {
updateReverbOptions();
_listenerReverb.render(_mixBuffer, _mixBuffer, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
int16_t* reverbSamples = _networkToOutputResampler ? _networkScratchBuffer : outputSamples;
_listenerReverb.render(decodedSamples, reverbSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
}
// resample to output sample rate
if (_networkToOutputResampler) {
const int16_t* inputSamples = hasReverb ? _networkScratchBuffer : decodedSamples;
_networkToOutputResampler->render(inputSamples, outputSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
}
// resample to output sample rate
_audioLimiter.render(_mixBuffer, _scratchBuffer, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
_networkToOutputResampler->render(_scratchBuffer, outputSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
} else {
// no resampling needed
_audioLimiter.render(_mixBuffer, outputSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
// if no transformations were applied, we still need to copy the buffer
if (!hasReverb && !_networkToOutputResampler) {
memcpy(outputSamples, decodedSamples, decodedBuffer.size());
}
}
@ -1381,6 +1457,9 @@ void AudioClient::outputNotify() {
bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo) {
bool supportedFormat = false;
Lock lock(_localAudioMutex);
_localSamplesAvailable.exchange(0, std::memory_order_release);
// cleanup any previously initialized device
if (_audioOutput) {
_audioOutput->stop();
@ -1391,12 +1470,24 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
_loopbackOutputDevice = NULL;
delete _loopbackAudioOutput;
_loopbackAudioOutput = NULL;
delete[] _outputMixBuffer;
_outputMixBuffer = NULL;
delete[] _outputScratchBuffer;
_outputScratchBuffer = NULL;
delete[] _localOutputMixBuffer;
_localOutputMixBuffer = NULL;
}
if (_networkToOutputResampler) {
// if we were using an input to network resampler, delete it here
delete _networkToOutputResampler;
_networkToOutputResampler = NULL;
delete _localToOutputResampler;
_localToOutputResampler = NULL;
}
if (!outputDeviceInfo.isNull()) {
@ -1416,6 +1507,7 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
assert(_outputFormat.sampleSize() == 16);
_networkToOutputResampler = new AudioSRC(_desiredOutputFormat.sampleRate(), _outputFormat.sampleRate(), OUTPUT_CHANNEL_COUNT);
_localToOutputResampler = new AudioSRC(_desiredOutputFormat.sampleRate(), _outputFormat.sampleRate(), OUTPUT_CHANNEL_COUNT);
} else {
qCDebug(audioclient) << "No resampling required for network output to match actual output format.";
@ -1441,6 +1533,14 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
_audioOutput->start(&_audioOutputIODevice);
lock.unlock();
int periodSampleSize = _audioOutput->periodSize() / AudioConstants::SAMPLE_SIZE;
// device callback is not restricted to periodSampleSize, so double the mix/scratch buffer sizes
_outputPeriod = periodSampleSize * 2;
_outputMixBuffer = new float[_outputPeriod];
_outputScratchBuffer = new int16_t[_outputPeriod];
_localOutputMixBuffer = new float[_outputPeriod];
_localInjectorsStream.resizeForFrameSize(_outputPeriod * 2);
qCDebug(audioclient) << "Output Buffer capacity in frames: " << _audioOutput->bufferSize() / AudioConstants::SAMPLE_SIZE / (float)deviceFrameSize <<
"requested bytes:" << requestedSize << "actual bytes:" << _audioOutput->bufferSize() <<
"os default:" << osDefaultBufferSize << "period size:" << _audioOutput->periodSize();
@ -1550,26 +1650,61 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
// samples requested from OUTPUT_CHANNEL_COUNT
int deviceChannelCount = _audio->_outputFormat.channelCount();
int samplesRequested = (int)(maxSize / AudioConstants::SAMPLE_SIZE) * OUTPUT_CHANNEL_COUNT / deviceChannelCount;
// restrict samplesRequested to the size of our mix/scratch buffers
samplesRequested = std::min(samplesRequested, _audio->_outputPeriod);
int samplesPopped;
int bytesWritten;
int16_t* scratchBuffer = _audio->_outputScratchBuffer;
float* mixBuffer = _audio->_outputMixBuffer;
if ((samplesPopped = _receivedAudioStream.popSamples(samplesRequested, false)) > 0) {
qCDebug(audiostream, "Read %d samples from buffer (%d available)", samplesPopped, _receivedAudioStream.getSamplesAvailable());
int networkSamplesPopped;
if ((networkSamplesPopped = _receivedAudioStream.popSamples(samplesRequested, false)) > 0) {
qCDebug(audiostream, "Read %d samples from buffer (%d available, %d requested)", networkSamplesPopped, _receivedAudioStream.getSamplesAvailable(), samplesRequested);
AudioRingBuffer::ConstIterator lastPopOutput = _receivedAudioStream.getLastPopOutput();
lastPopOutput.readSamples(scratchBuffer, networkSamplesPopped);
// if required, upmix or downmix to deviceChannelCount
if (deviceChannelCount == OUTPUT_CHANNEL_COUNT) {
lastPopOutput.readSamples((int16_t*)data, samplesPopped);
} else if (deviceChannelCount > OUTPUT_CHANNEL_COUNT) {
lastPopOutput.readSamplesWithUpmix((int16_t*)data, samplesPopped, deviceChannelCount - OUTPUT_CHANNEL_COUNT);
} else {
lastPopOutput.readSamplesWithDownmix((int16_t*)data, samplesPopped);
for (int i = 0; i < networkSamplesPopped; i++) {
mixBuffer[i] = convertToFloat(scratchBuffer[i]);
}
bytesWritten = (samplesPopped * AudioConstants::SAMPLE_SIZE) * deviceChannelCount / OUTPUT_CHANNEL_COUNT;
samplesRequested = networkSamplesPopped;
}
int injectorSamplesPopped = 0;
{
Lock lock(_audio->_localAudioMutex);
bool append = networkSamplesPopped > 0;
samplesRequested = std::min(samplesRequested, _audio->_localSamplesAvailable.load(std::memory_order_acquire));
if ((injectorSamplesPopped = _localInjectorsStream.appendSamples(mixBuffer, samplesRequested, append)) > 0) {
_audio->_localSamplesAvailable.fetch_sub(injectorSamplesPopped, std::memory_order_release);
qCDebug(audiostream, "Read %d samples from injectors (%d available, %d requested)", injectorSamplesPopped, _localInjectorsStream.samplesAvailable(), samplesRequested);
}
}
// prepare injectors for the next callback
QMetaObject::invokeMethod(&_audio->_localAudioThread, "prepare", Qt::QueuedConnection);
int samplesPopped = std::max(networkSamplesPopped, injectorSamplesPopped);
int framesPopped = samplesPopped / AudioConstants::STEREO;
int bytesWritten;
if (samplesPopped > 0) {
if (deviceChannelCount == OUTPUT_CHANNEL_COUNT) {
// limit the audio
_audio->_audioLimiter.render(mixBuffer, (int16_t*)data, framesPopped);
} else {
_audio->_audioLimiter.render(mixBuffer, scratchBuffer, framesPopped);
// upmix or downmix to deviceChannelCount
if (deviceChannelCount > OUTPUT_CHANNEL_COUNT) {
int extraChannels = deviceChannelCount - OUTPUT_CHANNEL_COUNT;
channelUpmix(scratchBuffer, (int16_t*)data, samplesPopped, extraChannels);
} else {
channelDownmix(scratchBuffer, (int16_t*)data, samplesPopped);
}
}
bytesWritten = framesPopped * AudioConstants::SAMPLE_SIZE * deviceChannelCount;
} else {
// nothing on network, don't grab anything from injectors, and just return 0s
// this will flood the log: qCDebug(audioclient, "empty/partial network buffer");
memset(data, 0, maxSize);
bytesWritten = maxSize;
}

View file

@ -69,9 +69,24 @@ class QIODevice;
class Transform;
class NLPacket;
class AudioInjectorsThread : public QThread {
Q_OBJECT
public:
AudioInjectorsThread(AudioClient* audio) : _audio(audio) {}
public slots :
void prepare();
private:
AudioClient* _audio;
};
class AudioClient : public AbstractAudioInterface, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
using LocalInjectorsStream = AudioMixRingBuffer;
public:
static const int MIN_BUFFER_FRAMES;
static const int MAX_BUFFER_FRAMES;
@ -84,8 +99,10 @@ public:
class AudioOutputIODevice : public QIODevice {
public:
AudioOutputIODevice(MixedProcessedAudioStream& receivedAudioStream, AudioClient* audio) :
_receivedAudioStream(receivedAudioStream), _audio(audio), _unfulfilledReads(0) {};
AudioOutputIODevice(LocalInjectorsStream& localInjectorsStream, MixedProcessedAudioStream& receivedAudioStream,
AudioClient* audio) :
_localInjectorsStream(localInjectorsStream), _receivedAudioStream(receivedAudioStream),
_audio(audio), _unfulfilledReads(0) {}
void start() { open(QIODevice::ReadOnly | QIODevice::Unbuffered); }
void stop() { close(); }
@ -93,6 +110,7 @@ public:
qint64 writeData(const char * data, qint64 maxSize) override { return 0; }
int getRecentUnfulfilledReads() { int unfulfilledReads = _unfulfilledReads; _unfulfilledReads = 0; return unfulfilledReads; }
private:
LocalInjectorsStream& _localInjectorsStream;
MixedProcessedAudioStream& _receivedAudioStream;
AudioClient* _audio;
int _unfulfilledReads;
@ -129,8 +147,6 @@ public:
Q_INVOKABLE void setAvatarBoundingBoxParameters(glm::vec3 corner, glm::vec3 scale);
QVector<AudioInjector*>& getActiveLocalAudioInjectors() { return _activeLocalAudioInjectors; }
void checkDevices();
static const float CALLBACK_ACCELERATOR_RATIO;
@ -171,6 +187,7 @@ public slots:
int setOutputBufferSize(int numFrames, bool persist = true);
void prepareLocalAudioInjectors();
bool outputLocalInjector(AudioInjector* injector) override;
bool shouldLoopbackInjectors() override { return _shouldEchoToServer; }
@ -218,7 +235,7 @@ protected:
private:
void outputFormatChanged();
void mixLocalAudioInjectors(float* mixBuffer);
bool mixLocalAudioInjectors(float* mixBuffer);
float azimuthForSource(const glm::vec3& relativePosition);
float gainForSource(float distance, float volume);
@ -262,6 +279,10 @@ private:
QAudioOutput* _loopbackAudioOutput;
QIODevice* _loopbackOutputDevice;
AudioRingBuffer _inputRingBuffer;
LocalInjectorsStream _localInjectorsStream;
// In order to use _localInjectorsStream as a lock-free pipe,
// use it with a single producer/consumer, and track available samples
std::atomic<int> _localSamplesAvailable { 0 };
MixedProcessedAudioStream _receivedAudioStream;
bool _isStereoInput;
@ -292,14 +313,28 @@ private:
AudioEffectOptions* _reverbOptions;
AudioReverb _sourceReverb { AudioConstants::SAMPLE_RATE };
AudioReverb _listenerReverb { AudioConstants::SAMPLE_RATE };
AudioReverb _localReverb { AudioConstants::SAMPLE_RATE };
// possible streams needed for resample
AudioSRC* _inputToNetworkResampler;
AudioSRC* _networkToOutputResampler;
AudioSRC* _localToOutputResampler;
// for network audio (used by network audio thread)
int16_t _networkScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
// for local audio (used by audio injectors thread)
float _localMixBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
int16_t _localScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
float* _localOutputMixBuffer { NULL };
AudioInjectorsThread _localAudioThread;
Mutex _localAudioMutex;
// for output audio (used by this thread)
int _outputPeriod { 0 };
float* _outputMixBuffer { NULL };
int16_t* _outputScratchBuffer { NULL };
// for local hrtf-ing
float _mixBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
int16_t _scratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
AudioLimiter _audioLimiter;
// Adds Reverb

View file

@ -26,46 +26,51 @@
static const QString RING_BUFFER_OVERFLOW_DEBUG { "AudioRingBuffer::writeData has overflown the buffer. Overwriting old data." };
static const QString DROPPED_SILENT_DEBUG { "AudioRingBuffer::addSilentSamples dropping silent samples to prevent overflow." };
AudioRingBuffer::AudioRingBuffer(int numFrameSamples, int numFramesCapacity) :
template <class T>
AudioRingBufferTemplate<T>::AudioRingBufferTemplate(int numFrameSamples, int numFramesCapacity) :
_numFrameSamples(numFrameSamples),
_frameCapacity(numFramesCapacity),
_sampleCapacity(numFrameSamples * numFramesCapacity),
_bufferLength(numFrameSamples * (numFramesCapacity + 1))
{
if (numFrameSamples) {
_buffer = new int16_t[_bufferLength];
memset(_buffer, 0, _bufferLength * sizeof(int16_t));
_buffer = new Sample[_bufferLength];
memset(_buffer, 0, _bufferLength * SampleSize);
_nextOutput = _buffer;
_endOfLastWrite = _buffer;
}
static QString repeatedOverflowMessage = LogHandler::getInstance().addRepeatedMessageRegex(RING_BUFFER_OVERFLOW_DEBUG);
static QString repeatedDroppedMessage = LogHandler::getInstance().addRepeatedMessageRegex(DROPPED_SILENT_DEBUG);
};
}
AudioRingBuffer::~AudioRingBuffer() {
template <class T>
AudioRingBufferTemplate<T>::~AudioRingBufferTemplate() {
delete[] _buffer;
}
void AudioRingBuffer::clear() {
template <class T>
void AudioRingBufferTemplate<T>::clear() {
_endOfLastWrite = _buffer;
_nextOutput = _buffer;
}
void AudioRingBuffer::reset() {
template <class T>
void AudioRingBufferTemplate<T>::reset() {
clear();
_overflowCount = 0;
}
void AudioRingBuffer::resizeForFrameSize(int numFrameSamples) {
template <class T>
void AudioRingBufferTemplate<T>::resizeForFrameSize(int numFrameSamples) {
delete[] _buffer;
_numFrameSamples = numFrameSamples;
_sampleCapacity = numFrameSamples * _frameCapacity;
_bufferLength = numFrameSamples * (_frameCapacity + 1);
if (numFrameSamples) {
_buffer = new int16_t[_bufferLength];
memset(_buffer, 0, _bufferLength * sizeof(int16_t));
_buffer = new Sample[_bufferLength];
memset(_buffer, 0, _bufferLength * SampleSize);
} else {
_buffer = nullptr;
}
@ -73,17 +78,29 @@ void AudioRingBuffer::resizeForFrameSize(int numFrameSamples) {
reset();
}
int AudioRingBuffer::readSamples(int16_t* destination, int maxSamples) {
return readData((char*)destination, maxSamples * sizeof(int16_t)) / sizeof(int16_t);
template <class T>
int AudioRingBufferTemplate<T>::readSamples(Sample* destination, int maxSamples) {
return readData((char*)destination, maxSamples * SampleSize) / SampleSize;
}
int AudioRingBuffer::writeSamples(const int16_t* source, int maxSamples) {
return writeData((char*)source, maxSamples * sizeof(int16_t)) / sizeof(int16_t);
template <class T>
int AudioRingBufferTemplate<T>::appendSamples(Sample* destination, int maxSamples, bool append) {
if (append) {
return appendData((char*)destination, maxSamples * SampleSize) / SampleSize;
} else {
return readData((char*)destination, maxSamples * SampleSize) / SampleSize;
}
}
int AudioRingBuffer::readData(char *data, int maxSize) {
template <class T>
int AudioRingBufferTemplate<T>::writeSamples(const Sample* source, int maxSamples) {
return writeData((char*)source, maxSamples * SampleSize) / SampleSize;
}
template <class T>
int AudioRingBufferTemplate<T>::readData(char *data, int maxSize) {
// only copy up to the number of samples we have available
int maxSamples = maxSize / sizeof(int16_t);
int maxSamples = maxSize / SampleSize;
int numReadSamples = std::min(maxSamples, samplesAvailable());
if (_nextOutput + numReadSamples > _buffer + _bufferLength) {
@ -91,22 +108,56 @@ int AudioRingBuffer::readData(char *data, int maxSize) {
int numSamplesToEnd = (_buffer + _bufferLength) - _nextOutput;
// read to the end of the buffer
memcpy(data, _nextOutput, numSamplesToEnd * sizeof(int16_t));
memcpy(data, _nextOutput, numSamplesToEnd * SampleSize);
// read the rest from the beginning of the buffer
memcpy(data + (numSamplesToEnd * sizeof(int16_t)), _buffer, (numReadSamples - numSamplesToEnd) * sizeof(int16_t));
memcpy(data + (numSamplesToEnd * SampleSize), _buffer, (numReadSamples - numSamplesToEnd) * SampleSize);
} else {
memcpy(data, _nextOutput, numReadSamples * sizeof(int16_t));
memcpy(data, _nextOutput, numReadSamples * SampleSize);
}
shiftReadPosition(numReadSamples);
return numReadSamples * sizeof(int16_t);
return numReadSamples * SampleSize;
}
int AudioRingBuffer::writeData(const char* data, int maxSize) {
template <class T>
int AudioRingBufferTemplate<T>::appendData(char *data, int maxSize) {
// only copy up to the number of samples we have available
int maxSamples = maxSize / SampleSize;
int numReadSamples = std::min(maxSamples, samplesAvailable());
Sample* dest = reinterpret_cast<Sample*>(data);
Sample* output = _nextOutput;
if (_nextOutput + numReadSamples > _buffer + _bufferLength) {
// we're going to need to do two reads to get this data, it wraps around the edge
int numSamplesToEnd = (_buffer + _bufferLength) - _nextOutput;
// read to the end of the buffer
for (int i = 0; i < numSamplesToEnd; i++) {
*dest++ += *output++;
}
// read the rest from the beginning of the buffer
output = _buffer;
for (int i = 0; i < (numReadSamples - numSamplesToEnd); i++) {
*dest++ += *output++;
}
} else {
for (int i = 0; i < numReadSamples; i++) {
*dest++ += *output++;
}
}
shiftReadPosition(numReadSamples);
return numReadSamples * SampleSize;
}
template <class T>
int AudioRingBufferTemplate<T>::writeData(const char* data, int maxSize) {
// only copy up to the number of samples we have capacity for
int maxSamples = maxSize / sizeof(int16_t);
int maxSamples = maxSize / SampleSize;
int numWriteSamples = std::min(maxSamples, _sampleCapacity);
int samplesRoomFor = _sampleCapacity - samplesAvailable();
@ -124,20 +175,21 @@ int AudioRingBuffer::writeData(const char* data, int maxSize) {
int numSamplesToEnd = (_buffer + _bufferLength) - _endOfLastWrite;
// write to the end of the buffer
memcpy(_endOfLastWrite, data, numSamplesToEnd * sizeof(int16_t));
memcpy(_endOfLastWrite, data, numSamplesToEnd * SampleSize);
// write the rest to the beginning of the buffer
memcpy(_buffer, data + (numSamplesToEnd * sizeof(int16_t)), (numWriteSamples - numSamplesToEnd) * sizeof(int16_t));
memcpy(_buffer, data + (numSamplesToEnd * SampleSize), (numWriteSamples - numSamplesToEnd) * SampleSize);
} else {
memcpy(_endOfLastWrite, data, numWriteSamples * sizeof(int16_t));
memcpy(_endOfLastWrite, data, numWriteSamples * SampleSize);
}
_endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, numWriteSamples);
return numWriteSamples * sizeof(int16_t);
return numWriteSamples * SampleSize;
}
int AudioRingBuffer::samplesAvailable() const {
template <class T>
int AudioRingBufferTemplate<T>::samplesAvailable() const {
if (!_endOfLastWrite) {
return 0;
}
@ -149,31 +201,8 @@ int AudioRingBuffer::samplesAvailable() const {
return sampleDifference;
}
int AudioRingBuffer::addSilentSamples(int silentSamples) {
// NOTE: This implementation is nearly identical to writeData save for s/memcpy/memset, refer to comments there
int numWriteSamples = std::min(silentSamples, _sampleCapacity);
int samplesRoomFor = _sampleCapacity - samplesAvailable();
if (numWriteSamples > samplesRoomFor) {
numWriteSamples = samplesRoomFor;
qCDebug(audio) << qPrintable(DROPPED_SILENT_DEBUG);
}
if (_endOfLastWrite + numWriteSamples > _buffer + _bufferLength) {
int numSamplesToEnd = (_buffer + _bufferLength) - _endOfLastWrite;
memset(_endOfLastWrite, 0, numSamplesToEnd * sizeof(int16_t));
memset(_buffer, 0, (numWriteSamples - numSamplesToEnd) * sizeof(int16_t));
} else {
memset(_endOfLastWrite, 0, numWriteSamples * sizeof(int16_t));
}
_endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, numWriteSamples);
return numWriteSamples;
}
int16_t* AudioRingBuffer::shiftedPositionAccomodatingWrap(int16_t* position, int numSamplesShift) const {
template <class T>
typename AudioRingBufferTemplate<T>::Sample* AudioRingBufferTemplate<T>::shiftedPositionAccomodatingWrap(Sample* position, int numSamplesShift) const {
// NOTE: It is possible to shift out-of-bounds if (|numSamplesShift| > 2 * _bufferLength), but this should not occur
if (numSamplesShift > 0 && position + numSamplesShift >= _buffer + _bufferLength) {
// this shift will wrap the position around to the beginning of the ring
@ -186,11 +215,37 @@ int16_t* AudioRingBuffer::shiftedPositionAccomodatingWrap(int16_t* position, int
}
}
float AudioRingBuffer::getFrameLoudness(const int16_t* frameStart) const {
template <class T>
int AudioRingBufferTemplate<T>::addSilentSamples(int silentSamples) {
// NOTE: This implementation is nearly identical to writeData save for s/memcpy/memset, refer to comments there
int numWriteSamples = std::min(silentSamples, _sampleCapacity);
int samplesRoomFor = _sampleCapacity - samplesAvailable();
if (numWriteSamples > samplesRoomFor) {
numWriteSamples = samplesRoomFor;
qCDebug(audio) << qPrintable(DROPPED_SILENT_DEBUG);
}
if (_endOfLastWrite + numWriteSamples > _buffer + _bufferLength) {
int numSamplesToEnd = (_buffer + _bufferLength) - _endOfLastWrite;
memset(_endOfLastWrite, 0, numSamplesToEnd * SampleSize);
memset(_buffer, 0, (numWriteSamples - numSamplesToEnd) * SampleSize);
} else {
memset(_endOfLastWrite, 0, numWriteSamples * SampleSize);
}
_endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, numWriteSamples);
return numWriteSamples;
}
template <class T>
float AudioRingBufferTemplate<T>::getFrameLoudness(const Sample* frameStart) const {
// FIXME: This is a bad measure of loudness - normal estimation uses sqrt(sum(x*x))
float loudness = 0.0f;
const int16_t* sampleAt = frameStart;
const int16_t* bufferLastAt = _buffer + _bufferLength - 1;
const Sample* sampleAt = frameStart;
const Sample* bufferLastAt = _buffer + _bufferLength - 1;
for (int i = 0; i < _numFrameSamples; ++i) {
loudness += (float) std::abs(*sampleAt);
@ -203,14 +258,16 @@ float AudioRingBuffer::getFrameLoudness(const int16_t* frameStart) const {
return loudness;
}
float AudioRingBuffer::getFrameLoudness(ConstIterator frameStart) const {
template <class T>
float AudioRingBufferTemplate<T>::getFrameLoudness(ConstIterator frameStart) const {
if (frameStart.isNull()) {
return 0.0f;
}
return getFrameLoudness(&(*frameStart));
}
int AudioRingBuffer::writeSamples(ConstIterator source, int maxSamples) {
template <class T>
int AudioRingBufferTemplate<T>::writeSamples(ConstIterator source, int maxSamples) {
int samplesToCopy = std::min(maxSamples, _sampleCapacity);
int samplesRoomFor = _sampleCapacity - samplesAvailable();
if (samplesToCopy > samplesRoomFor) {
@ -221,7 +278,7 @@ int AudioRingBuffer::writeSamples(ConstIterator source, int maxSamples) {
qCDebug(audio) << qPrintable(RING_BUFFER_OVERFLOW_DEBUG);
}
int16_t* bufferLast = _buffer + _bufferLength - 1;
Sample* bufferLast = _buffer + _bufferLength - 1;
for (int i = 0; i < samplesToCopy; i++) {
*_endOfLastWrite = *source;
_endOfLastWrite = (_endOfLastWrite == bufferLast) ? _buffer : _endOfLastWrite + 1;
@ -231,7 +288,8 @@ int AudioRingBuffer::writeSamples(ConstIterator source, int maxSamples) {
return samplesToCopy;
}
int AudioRingBuffer::writeSamplesWithFade(ConstIterator source, int maxSamples, float fade) {
template <class T>
int AudioRingBufferTemplate<T>::writeSamplesWithFade(ConstIterator source, int maxSamples, float fade) {
int samplesToCopy = std::min(maxSamples, _sampleCapacity);
int samplesRoomFor = _sampleCapacity - samplesAvailable();
if (samplesToCopy > samplesRoomFor) {
@ -242,12 +300,16 @@ int AudioRingBuffer::writeSamplesWithFade(ConstIterator source, int maxSamples,
qCDebug(audio) << qPrintable(RING_BUFFER_OVERFLOW_DEBUG);
}
int16_t* bufferLast = _buffer + _bufferLength - 1;
Sample* bufferLast = _buffer + _bufferLength - 1;
for (int i = 0; i < samplesToCopy; i++) {
*_endOfLastWrite = (int16_t)((float)(*source) * fade);
*_endOfLastWrite = (Sample)((float)(*source) * fade);
_endOfLastWrite = (_endOfLastWrite == bufferLast) ? _buffer : _endOfLastWrite + 1;
++source;
}
return samplesToCopy;
}
// explicit instantiations for scratch/mix buffers
template class AudioRingBufferTemplate<int16_t>;
template class AudioRingBufferTemplate<float>;

View file

@ -21,15 +21,19 @@
const int DEFAULT_RING_BUFFER_FRAME_CAPACITY = 10;
class AudioRingBuffer {
template <class T>
class AudioRingBufferTemplate {
using Sample = T;
static const int SampleSize = sizeof(Sample);
public:
AudioRingBuffer(int numFrameSamples, int numFramesCapacity = DEFAULT_RING_BUFFER_FRAME_CAPACITY);
~AudioRingBuffer();
AudioRingBufferTemplate(int numFrameSamples, int numFramesCapacity = DEFAULT_RING_BUFFER_FRAME_CAPACITY);
~AudioRingBufferTemplate();
// disallow copying
AudioRingBuffer(const AudioRingBuffer&) = delete;
AudioRingBuffer(AudioRingBuffer&&) = delete;
AudioRingBuffer& operator=(const AudioRingBuffer&) = delete;
AudioRingBufferTemplate(const AudioRingBufferTemplate&) = delete;
AudioRingBufferTemplate(AudioRingBufferTemplate&&) = delete;
AudioRingBufferTemplate& operator=(const AudioRingBufferTemplate&) = delete;
/// Invalidate any data in the buffer
void clear();
@ -41,13 +45,27 @@ public:
// FIXME: discards any data in the buffer
void resizeForFrameSize(int numFrameSamples);
// Reading and writing to the buffer uses minimal shared data, such that
// in cases that avoid overwriting the buffer, a single producer/consumer
// may use this as a lock-free pipe (see audio-client/src/AudioClient.cpp).
// IMPORTANT: Avoid changes to the implementation that touch shared data unless you can
// maintain this behavior.
/// Read up to maxSamples into destination (will only read up to samplesAvailable())
/// Returns number of read samples
int readSamples(int16_t* destination, int maxSamples);
int readSamples(Sample* destination, int maxSamples);
/// Append up to maxSamples into destination (will only read up to samplesAvailable())
/// If append == false, behaves as readSamples
/// Returns number of appended samples
int appendSamples(Sample* destination, int maxSamples, bool append = true);
/// Skip up to maxSamples (will only skip up to samplesAvailable())
void skipSamples(int maxSamples) { shiftReadPosition(std::min(maxSamples, samplesAvailable())); }
/// Write up to maxSamples from source (will only write up to sample capacity)
/// Returns number of written samples
int writeSamples(const int16_t* source, int maxSamples);
int writeSamples(const Sample* source, int maxSamples);
/// Write up to maxSamples silent samples (will only write until other data exists in the buffer)
/// This method will not overwrite existing data in the buffer, instead dropping silent samples that would overflow
@ -58,13 +76,17 @@ public:
/// Returns number of read bytes
int readData(char* destination, int maxSize);
/// Append up to maxSize into destination
/// Returns number of read bytes
int appendData(char* destination, int maxSize);
/// Write up to maxSize from source
/// Returns number of written bytes
int writeData(const char* source, int maxSize);
/// Returns a reference to the index-th sample offset from the current read sample
int16_t& operator[](const int index) { return *shiftedPositionAccomodatingWrap(_nextOutput, index); }
const int16_t& operator[] (const int index) const { return *shiftedPositionAccomodatingWrap(_nextOutput, index); }
Sample& operator[](const int index) { return *shiftedPositionAccomodatingWrap(_nextOutput, index); }
const Sample& operator[] (const int index) const { return *shiftedPositionAccomodatingWrap(_nextOutput, index); }
/// Essentially discards the next numSamples from the ring buffer
/// NOTE: This is not checked - it is possible to shift past written data
@ -84,41 +106,104 @@ public:
class ConstIterator {
public:
ConstIterator();
ConstIterator(int16_t* bufferFirst, int capacity, int16_t* at);
ConstIterator() :
_bufferLength(0),
_bufferFirst(NULL),
_bufferLast(NULL),
_at(NULL) {}
ConstIterator(Sample* bufferFirst, int capacity, Sample* at) :
_bufferLength(capacity),
_bufferFirst(bufferFirst),
_bufferLast(bufferFirst + capacity - 1),
_at(at) {}
ConstIterator(const ConstIterator& rhs) = default;
bool isNull() const { return _at == NULL; }
bool operator==(const ConstIterator& rhs) { return _at == rhs._at; }
bool operator!=(const ConstIterator& rhs) { return _at != rhs._at; }
const int16_t& operator*() { return *_at; }
const Sample& operator*() { return *_at; }
ConstIterator& operator=(const ConstIterator& rhs);
ConstIterator& operator++();
ConstIterator operator++(int);
ConstIterator& operator--();
ConstIterator operator--(int);
const int16_t& operator[] (int i);
ConstIterator operator+(int i);
ConstIterator operator-(int i);
ConstIterator& operator=(const ConstIterator& rhs) {
_bufferLength = rhs._bufferLength;
_bufferFirst = rhs._bufferFirst;
_bufferLast = rhs._bufferLast;
_at = rhs._at;
return *this;
}
ConstIterator& operator++() {
_at = (_at == _bufferLast) ? _bufferFirst : _at + 1;
return *this;
}
ConstIterator operator++(int) {
ConstIterator tmp(*this);
++(*this);
return tmp;
}
ConstIterator& operator--() {
_at = (_at == _bufferFirst) ? _bufferLast : _at - 1;
return *this;
}
ConstIterator operator--(int) {
ConstIterator tmp(*this);
--(*this);
return tmp;
}
const Sample& operator[] (int i) {
return *atShiftedBy(i);
}
ConstIterator operator+(int i) {
return ConstIterator(_bufferFirst, _bufferLength, atShiftedBy(i));
}
ConstIterator operator-(int i) {
return ConstIterator(_bufferFirst, _bufferLength, atShiftedBy(-i));
}
void readSamples(Sample* dest, int numSamples) {
auto samplesToEnd = _bufferLast - _at + 1;
if (samplesToEnd >= numSamples) {
memcpy(dest, _at, numSamples * SampleSize);
_at += numSamples;
} else {
auto samplesFromStart = numSamples - samplesToEnd;
memcpy(dest, _at, samplesToEnd * SampleSize);
memcpy(dest + samplesToEnd, _bufferFirst, samplesFromStart * SampleSize);
_at = _bufferFirst + samplesFromStart;
}
}
void readSamplesWithFade(Sample* dest, int numSamples, float fade) {
Sample* at = _at;
for (int i = 0; i < numSamples; i++) {
*dest = (float)*at * fade;
++dest;
at = (at == _bufferLast) ? _bufferFirst : at + 1;
}
}
void readSamples(int16_t* dest, int numSamples);
void readSamplesWithFade(int16_t* dest, int numSamples, float fade);
void readSamplesWithUpmix(int16_t* dest, int numSamples, int numExtraChannels);
void readSamplesWithDownmix(int16_t* dest, int numSamples);
private:
int16_t* atShiftedBy(int i);
Sample* atShiftedBy(int i) {
i = (_at - _bufferFirst + i) % _bufferLength;
if (i < 0) {
i += _bufferLength;
}
return _bufferFirst + i;
}
int _bufferLength;
int16_t* _bufferFirst;
int16_t* _bufferLast;
int16_t* _at;
Sample* _bufferFirst;
Sample* _bufferLast;
Sample* _at;
};
ConstIterator nextOutput() const;
ConstIterator lastFrameWritten() const;
ConstIterator nextOutput() const {
return ConstIterator(_buffer, _bufferLength, _nextOutput);
}
ConstIterator lastFrameWritten() const {
return ConstIterator(_buffer, _bufferLength, _endOfLastWrite) - _numFrameSamples;
}
int writeSamples(ConstIterator source, int maxSamples);
int writeSamplesWithFade(ConstIterator source, int maxSamples, float fade);
@ -126,8 +211,8 @@ public:
float getFrameLoudness(ConstIterator frameStart) const;
protected:
int16_t* shiftedPositionAccomodatingWrap(int16_t* position, int numSamplesShift) const;
float getFrameLoudness(const int16_t* frameStart) const;
Sample* shiftedPositionAccomodatingWrap(Sample* position, int numSamplesShift) const;
float getFrameLoudness(const Sample* frameStart) const;
int _numFrameSamples;
int _frameCapacity;
@ -135,138 +220,13 @@ protected:
int _bufferLength; // actual _buffer length (_sampleCapacity + 1)
int _overflowCount{ 0 }; // times the ring buffer has overwritten data
int16_t* _nextOutput{ nullptr };
int16_t* _endOfLastWrite{ nullptr };
int16_t* _buffer{ nullptr };
Sample* _nextOutput{ nullptr };
Sample* _endOfLastWrite{ nullptr };
Sample* _buffer{ nullptr };
};
// inline the iterator:
inline AudioRingBuffer::ConstIterator::ConstIterator() :
_bufferLength(0),
_bufferFirst(NULL),
_bufferLast(NULL),
_at(NULL) {}
inline AudioRingBuffer::ConstIterator::ConstIterator(int16_t* bufferFirst, int capacity, int16_t* at) :
_bufferLength(capacity),
_bufferFirst(bufferFirst),
_bufferLast(bufferFirst + capacity - 1),
_at(at) {}
inline AudioRingBuffer::ConstIterator& AudioRingBuffer::ConstIterator::operator=(const ConstIterator& rhs) {
_bufferLength = rhs._bufferLength;
_bufferFirst = rhs._bufferFirst;
_bufferLast = rhs._bufferLast;
_at = rhs._at;
return *this;
}
inline AudioRingBuffer::ConstIterator& AudioRingBuffer::ConstIterator::operator++() {
_at = (_at == _bufferLast) ? _bufferFirst : _at + 1;
return *this;
}
inline AudioRingBuffer::ConstIterator AudioRingBuffer::ConstIterator::operator++(int) {
ConstIterator tmp(*this);
++(*this);
return tmp;
}
inline AudioRingBuffer::ConstIterator& AudioRingBuffer::ConstIterator::operator--() {
_at = (_at == _bufferFirst) ? _bufferLast : _at - 1;
return *this;
}
inline AudioRingBuffer::ConstIterator AudioRingBuffer::ConstIterator::operator--(int) {
ConstIterator tmp(*this);
--(*this);
return tmp;
}
inline const int16_t& AudioRingBuffer::ConstIterator::operator[] (int i) {
return *atShiftedBy(i);
}
inline AudioRingBuffer::ConstIterator AudioRingBuffer::ConstIterator::operator+(int i) {
return ConstIterator(_bufferFirst, _bufferLength, atShiftedBy(i));
}
inline AudioRingBuffer::ConstIterator AudioRingBuffer::ConstIterator::operator-(int i) {
return ConstIterator(_bufferFirst, _bufferLength, atShiftedBy(-i));
}
inline int16_t* AudioRingBuffer::ConstIterator::atShiftedBy(int i) {
i = (_at - _bufferFirst + i) % _bufferLength;
if (i < 0) {
i += _bufferLength;
}
return _bufferFirst + i;
}
inline void AudioRingBuffer::ConstIterator::readSamples(int16_t* dest, int numSamples) {
auto samplesToEnd = _bufferLast - _at + 1;
if (samplesToEnd >= numSamples) {
memcpy(dest, _at, numSamples * sizeof(int16_t));
_at += numSamples;
} else {
auto samplesFromStart = numSamples - samplesToEnd;
memcpy(dest, _at, samplesToEnd * sizeof(int16_t));
memcpy(dest + samplesToEnd, _bufferFirst, samplesFromStart * sizeof(int16_t));
_at = _bufferFirst + samplesFromStart;
}
}
inline void AudioRingBuffer::ConstIterator::readSamplesWithFade(int16_t* dest, int numSamples, float fade) {
int16_t* at = _at;
for (int i = 0; i < numSamples; i++) {
*dest = (float)*at * fade;
++dest;
at = (at == _bufferLast) ? _bufferFirst : at + 1;
}
}
inline void AudioRingBuffer::ConstIterator::readSamplesWithUpmix(int16_t* dest, int numSamples, int numExtraChannels) {
int16_t* at = _at;
for (int i = 0; i < numSamples/2; i++) {
// read 2 samples
int16_t left = *at;
at = (at == _bufferLast) ? _bufferFirst : at + 1;
int16_t right = *at;
at = (at == _bufferLast) ? _bufferFirst : at + 1;
// write 2 + N samples
*dest++ = left;
*dest++ = right;
for (int n = 0; n < numExtraChannels; n++) {
*dest++ = 0;
}
}
}
inline void AudioRingBuffer::ConstIterator::readSamplesWithDownmix(int16_t* dest, int numSamples) {
int16_t* at = _at;
for (int i = 0; i < numSamples/2; i++) {
// read 2 samples
int16_t left = *at;
at = (at == _bufferLast) ? _bufferFirst : at + 1;
int16_t right = *at;
at = (at == _bufferLast) ? _bufferFirst : at + 1;
// write 1 sample
*dest++ = (int16_t)((left + right) / 2);
}
}
inline AudioRingBuffer::ConstIterator AudioRingBuffer::nextOutput() const {
return ConstIterator(_buffer, _bufferLength, _nextOutput);
}
inline AudioRingBuffer::ConstIterator AudioRingBuffer::lastFrameWritten() const {
return ConstIterator(_buffer, _bufferLength, _endOfLastWrite) - _numFrameSamples;
}
// expose explicit instantiations for scratch/mix buffers
using AudioRingBuffer = AudioRingBufferTemplate<int16_t>;
using AudioMixRingBuffer = AudioRingBufferTemplate<float>;
#endif // hifi_AudioRingBuffer_h

View file

@ -956,68 +956,29 @@ void EntityTreeRenderer::checkAndCallPreload(const EntityItemID& entityID, const
}
}
bool EntityTreeRenderer::isCollisionOwner(const QUuid& myNodeID, EntityTreePointer entityTree,
const EntityItemID& id, const Collision& collision) {
EntityItemPointer entity = entityTree->findEntityByEntityItemID(id);
if (!entity) {
return false;
}
QUuid simulatorID = entity->getSimulatorID();
if (simulatorID.isNull()) {
// Can be null if it has never moved since being created or coming out of persistence.
// However, for there to be a collission, one of the two objects must be moving.
const EntityItemID& otherID = (id == collision.idA) ? collision.idB : collision.idA;
EntityItemPointer otherEntity = entityTree->findEntityByEntityItemID(otherID);
if (!otherEntity) {
return false;
}
simulatorID = otherEntity->getSimulatorID();
}
if (simulatorID.isNull() || (simulatorID != myNodeID)) {
return false;
}
return true;
}
void EntityTreeRenderer::playEntityCollisionSound(const QUuid& myNodeID, EntityTreePointer entityTree,
const EntityItemID& id, const Collision& collision) {
if (!isCollisionOwner(myNodeID, entityTree, id, collision)) {
return;
}
SharedSoundPointer collisionSound;
float mass = 1.0; // value doesn't get used, but set it so compiler is quiet
AACube minAACube;
bool success = false;
_tree->withReadLock([&] {
EntityItemPointer entity = entityTree->findEntityByEntityItemID(id);
if (entity) {
collisionSound = entity->getCollisionSound();
mass = entity->computeMass();
minAACube = entity->getMinimumAACube(success);
}
});
if (!success) {
return;
}
void EntityTreeRenderer::playEntityCollisionSound(EntityItemPointer entity, const Collision& collision) {
assert((bool)entity);
SharedSoundPointer collisionSound = entity->getCollisionSound();
if (!collisionSound) {
return;
}
bool success = false;
AACube minAACube = entity->getMinimumAACube(success);
if (!success) {
return;
}
float mass = entity->computeMass();
const float COLLISION_PENETRATION_TO_VELOCITY = 50; // as a subsitute for RELATIVE entity->getVelocity()
const float COLLISION_PENETRATION_TO_VELOCITY = 50.0f; // as a subsitute for RELATIVE entity->getVelocity()
// The collision.penetration is a pretty good indicator of changed velocity AFTER the initial contact,
// but that first contact depends on exactly where we hit in the physics step.
// We can get a more consistent initial-contact energy reading by using the changed velocity.
// Note that velocityChange is not a good indicator for continuing collisions, because it does not distinguish
// between bounce and sliding along a surface.
const float linearVelocity = (collision.type == CONTACT_EVENT_TYPE_START) ?
glm::length(collision.velocityChange) :
glm::length(collision.penetration) * COLLISION_PENETRATION_TO_VELOCITY;
const float energy = mass * linearVelocity * linearVelocity / 2.0f;
const glm::vec3 position = collision.contactPoint;
const float speedSquared = (collision.type == CONTACT_EVENT_TYPE_START) ?
glm::length2(collision.velocityChange) :
glm::length2(collision.penetration) * COLLISION_PENETRATION_TO_VELOCITY;
const float energy = mass * speedSquared / 2.0f;
const float COLLISION_ENERGY_AT_FULL_VOLUME = (collision.type == CONTACT_EVENT_TYPE_START) ? 150.0f : 5.0f;
const float COLLISION_MINIMUM_VOLUME = 0.005f;
const float energyFactorOfFull = fmin(1.0f, energy / COLLISION_ENERGY_AT_FULL_VOLUME);
@ -1031,7 +992,7 @@ void EntityTreeRenderer::playEntityCollisionSound(const QUuid& myNodeID, EntityT
// Shift the pitch down by ln(1 + (size / COLLISION_SIZE_FOR_STANDARD_PITCH)) / ln(2)
const float COLLISION_SIZE_FOR_STANDARD_PITCH = 0.2f;
const float stretchFactor = log(1.0f + (minAACube.getLargestDimension() / COLLISION_SIZE_FOR_STANDARD_PITCH)) / log(2);
AudioInjector::playSound(collisionSound, volume, stretchFactor, position);
AudioInjector::playSound(collisionSound, volume, stretchFactor, collision.contactPoint);
}
void EntityTreeRenderer::entityCollisionWithEntity(const EntityItemID& idA, const EntityItemID& idB,
@ -1041,30 +1002,28 @@ void EntityTreeRenderer::entityCollisionWithEntity(const EntityItemID& idA, cons
if (!_tree || _shuttingDown) {
return;
}
// Don't respond to small continuous contacts.
const float COLLISION_MINUMUM_PENETRATION = 0.002f;
if ((collision.type == CONTACT_EVENT_TYPE_CONTINUE) && (glm::length(collision.penetration) < COLLISION_MINUMUM_PENETRATION)) {
return;
}
// See if we should play sounds
EntityTreePointer entityTree = std::static_pointer_cast<EntityTree>(_tree);
const QUuid& myNodeID = DependencyManager::get<NodeList>()->getSessionUUID();
playEntityCollisionSound(myNodeID, entityTree, idA, collision);
playEntityCollisionSound(myNodeID, entityTree, idB, collision);
// And now the entity scripts
if (isCollisionOwner(myNodeID, entityTree, idA, collision)) {
// trigger scripted collision sounds and events for locally owned objects
EntityItemPointer entityA = entityTree->findEntityByEntityItemID(idA);
if ((bool)entityA && myNodeID == entityA->getSimulatorID()) {
playEntityCollisionSound(entityA, collision);
emit collisionWithEntity(idA, idB, collision);
if (_entitiesScriptEngine) {
_entitiesScriptEngine->callEntityScriptMethod(idA, "collisionWithEntity", idB, collision);
}
}
if (isCollisionOwner(myNodeID, entityTree, idB, collision)) {
emit collisionWithEntity(idB, idA, collision);
EntityItemPointer entityB = entityTree->findEntityByEntityItemID(idB);
if ((bool)entityB && myNodeID == entityB->getSimulatorID()) {
playEntityCollisionSound(entityB, collision);
// since we're swapping A and B we need to send the inverted collision
Collision invertedCollision(collision);
invertedCollision.invert();
emit collisionWithEntity(idB, idA, invertedCollision);
if (_entitiesScriptEngine) {
_entitiesScriptEngine->callEntityScriptMethod(idB, "collisionWithEntity", idA, collision);
_entitiesScriptEngine->callEntityScriptMethod(idB, "collisionWithEntity", idA, invertedCollision);
}
}
}

View file

@ -170,11 +170,7 @@ private:
bool _wantScripts;
QSharedPointer<ScriptEngine> _entitiesScriptEngine;
bool isCollisionOwner(const QUuid& myNodeID, EntityTreePointer entityTree,
const EntityItemID& id, const Collision& collision);
void playEntityCollisionSound(const QUuid& myNodeID, EntityTreePointer entityTree,
const EntityItemID& id, const Collision& collision);
static void playEntityCollisionSound(EntityItemPointer entity, const Collision& collision);
bool _lastPointerEventValid;
PointerEvent _lastPointerEvent;

View file

@ -1376,12 +1376,14 @@ bool EntityScriptingInterface::isChildOfParent(QUuid childID, QUuid parentID) {
_entityTree->withReadLock([&] {
EntityItemPointer parent = _entityTree->findEntityByEntityItemID(parentID);
parent->forEachDescendant([&](SpatiallyNestablePointer descendant) {
if(descendant->getID() == childID) {
isChild = true;
return;
}
});
if (parent) {
parent->forEachDescendant([&](SpatiallyNestablePointer descendant) {
if (descendant->getID() == childID) {
isChild = true;
return;
}
});
}
});
return isChild;

View file

@ -1560,6 +1560,8 @@ bool EntityTree::sendEntitiesOperation(OctreeElementPointer element, void* extra
return args->map->value(oldID);
}
EntityItemID newID = QUuid::createUuid();
args->map->insert(oldID, newID);
EntityItemProperties properties = item->getProperties();
EntityItemID oldParentID = properties.getParentID();
if (oldParentID.isInvalidID()) { // no parent
@ -1575,6 +1577,43 @@ bool EntityTree::sendEntitiesOperation(OctreeElementPointer element, void* extra
}
}
if (!properties.getXNNeighborID().isInvalidID()) {
auto neighborEntity = args->ourTree->findEntityByEntityItemID(properties.getXNNeighborID());
if (neighborEntity) {
properties.setXNNeighborID(getMapped(neighborEntity));
}
}
if (!properties.getXPNeighborID().isInvalidID()) {
auto neighborEntity = args->ourTree->findEntityByEntityItemID(properties.getXPNeighborID());
if (neighborEntity) {
properties.setXPNeighborID(getMapped(neighborEntity));
}
}
if (!properties.getYNNeighborID().isInvalidID()) {
auto neighborEntity = args->ourTree->findEntityByEntityItemID(properties.getYNNeighborID());
if (neighborEntity) {
properties.setYNNeighborID(getMapped(neighborEntity));
}
}
if (!properties.getYPNeighborID().isInvalidID()) {
auto neighborEntity = args->ourTree->findEntityByEntityItemID(properties.getYPNeighborID());
if (neighborEntity) {
properties.setYPNeighborID(getMapped(neighborEntity));
}
}
if (!properties.getZNNeighborID().isInvalidID()) {
auto neighborEntity = args->ourTree->findEntityByEntityItemID(properties.getZNNeighborID());
if (neighborEntity) {
properties.setZNNeighborID(getMapped(neighborEntity));
}
}
if (!properties.getZPNeighborID().isInvalidID()) {
auto neighborEntity = args->ourTree->findEntityByEntityItemID(properties.getZPNeighborID());
if (neighborEntity) {
properties.setZPNeighborID(getMapped(neighborEntity));
}
}
// set creation time to "now" for imported entities
properties.setCreated(usecTimestampNow());
@ -1592,7 +1631,6 @@ bool EntityTree::sendEntitiesOperation(OctreeElementPointer element, void* extra
args->otherTree->addEntity(newID, properties);
});
}
args->map->insert(oldID, newID);
return newID;
};

View file

@ -13,15 +13,25 @@
void ContactInfo::update(uint32_t currentStep, const btManifoldPoint& p) {
_lastStep = currentStep;
++_numSteps;
positionWorldOnB = p.m_positionWorldOnB;
normalWorldOnB = p.m_normalWorldOnB;
distance = p.m_distance1;
}
}
const uint32_t STEPS_BETWEEN_CONTINUE_EVENTS = 9;
ContactEventType ContactInfo::computeType(uint32_t thisStep) {
if (_lastStep != thisStep) {
return CONTACT_EVENT_TYPE_END;
if (_continueExpiry == 0) {
_continueExpiry = thisStep + STEPS_BETWEEN_CONTINUE_EVENTS;
return CONTACT_EVENT_TYPE_START;
}
return (_numSteps == 1) ? CONTACT_EVENT_TYPE_START : CONTACT_EVENT_TYPE_CONTINUE;
return (_lastStep == thisStep) ? CONTACT_EVENT_TYPE_CONTINUE : CONTACT_EVENT_TYPE_END;
}
bool ContactInfo::readyForContinue(uint32_t thisStep) {
if (thisStep > _continueExpiry) {
_continueExpiry = thisStep + STEPS_BETWEEN_CONTINUE_EVENTS;
return true;
}
return false;
}

View file

@ -19,20 +19,22 @@
class ContactInfo {
public:
public:
void update(uint32_t currentStep, const btManifoldPoint& p);
ContactEventType computeType(uint32_t thisStep);
const btVector3& getPositionWorldOnB() const { return positionWorldOnB; }
btVector3 getPositionWorldOnA() const { return positionWorldOnB + normalWorldOnB * distance; }
bool readyForContinue(uint32_t thisStep);
btVector3 positionWorldOnB;
btVector3 normalWorldOnB;
btScalar distance;
private:
uint32_t _lastStep = 0;
uint32_t _numSteps = 0;
};
uint32_t _lastStep { 0 };
uint32_t _continueExpiry { 0 };
};
#endif // hifi_ContactEvent_h

View file

@ -762,6 +762,11 @@ void EntityMotionState::computeCollisionGroupAndMask(int16_t& group, int16_t& ma
_entity->computeCollisionGroupAndFinalMask(group, mask);
}
bool EntityMotionState::shouldBeLocallyOwned() const {
return (_outgoingPriority > VOLUNTEER_SIMULATION_PRIORITY && _outgoingPriority > _entity->getSimulationPriority()) ||
_entity->getSimulatorID() == Physics::getSessionUUID();
}
void EntityMotionState::upgradeOutgoingPriority(uint8_t priority) {
_outgoingPriority = glm::max<uint8_t>(_outgoingPriority, priority);
}

View file

@ -78,6 +78,8 @@ public:
virtual void computeCollisionGroupAndMask(int16_t& group, int16_t& mask) const override;
bool shouldBeLocallyOwned() const override;
friend class PhysicalEntitySimulation;
protected:

View file

@ -146,6 +146,8 @@ public:
void dirtyInternalKinematicChanges() { _hasInternalKinematicChanges = true; }
void clearInternalKinematicChanges() { _hasInternalKinematicChanges = false; }
virtual bool shouldBeLocallyOwned() const { return false; }
friend class PhysicsEngine;
protected:

View file

@ -270,7 +270,7 @@ void PhysicsEngine::stepSimulation() {
}
auto onSubStep = [this]() {
updateContactMap();
this->updateContactMap();
};
int numSubsteps = _dynamicsWorld->stepSimulationWithSubstepCallback(timeStep, PHYSICS_ENGINE_MAX_NUM_SUBSTEPS,
@ -393,7 +393,6 @@ void PhysicsEngine::updateContactMap() {
}
const CollisionEvents& PhysicsEngine::getCollisionEvents() {
const uint32_t CONTINUE_EVENT_FILTER_FREQUENCY = 10;
_collisionEvents.clear();
// scan known contacts and trigger events
@ -402,28 +401,42 @@ const CollisionEvents& PhysicsEngine::getCollisionEvents() {
while (contactItr != _contactMap.end()) {
ContactInfo& contact = contactItr->second;
ContactEventType type = contact.computeType(_numContactFrames);
if(type != CONTACT_EVENT_TYPE_CONTINUE || _numSubsteps % CONTINUE_EVENT_FILTER_FREQUENCY == 0) {
const btScalar SIGNIFICANT_DEPTH = -0.002f; // penetrations have negative distance
if (type != CONTACT_EVENT_TYPE_CONTINUE ||
(contact.distance < SIGNIFICANT_DEPTH &&
contact.readyForContinue(_numContactFrames))) {
ObjectMotionState* motionStateA = static_cast<ObjectMotionState*>(contactItr->first._a);
ObjectMotionState* motionStateB = static_cast<ObjectMotionState*>(contactItr->first._b);
glm::vec3 velocityChange = (motionStateA ? motionStateA->getObjectLinearVelocityChange() : glm::vec3(0.0f)) +
(motionStateB ? motionStateB->getObjectLinearVelocityChange() : glm::vec3(0.0f));
if (motionStateA) {
// NOTE: the MyAvatar RigidBody is the only object in the simulation that does NOT have a MotionState
// which means should we ever want to report ALL collision events against the avatar we can
// modify the logic below.
//
// We only create events when at least one of the objects is (or should be) owned in the local simulation.
if (motionStateA && (motionStateA->shouldBeLocallyOwned())) {
QUuid idA = motionStateA->getObjectID();
QUuid idB;
if (motionStateB) {
idB = motionStateB->getObjectID();
}
glm::vec3 position = bulletToGLM(contact.getPositionWorldOnB()) + _originOffset;
glm::vec3 velocityChange = motionStateA->getObjectLinearVelocityChange() +
(motionStateB ? motionStateB->getObjectLinearVelocityChange() : glm::vec3(0.0f));
glm::vec3 penetration = bulletToGLM(contact.distance * contact.normalWorldOnB);
_collisionEvents.push_back(Collision(type, idA, idB, position, penetration, velocityChange));
} else if (motionStateB) {
} else if (motionStateB && (motionStateB->shouldBeLocallyOwned())) {
QUuid idB = motionStateB->getObjectID();
QUuid idA;
if (motionStateA) {
idA = motionStateA->getObjectID();
}
glm::vec3 position = bulletToGLM(contact.getPositionWorldOnA()) + _originOffset;
glm::vec3 velocityChange = motionStateB->getObjectLinearVelocityChange() +
(motionStateA ? motionStateA->getObjectLinearVelocityChange() : glm::vec3(0.0f));
// NOTE: we're flipping the order of A and B (so that the first objectID is never NULL)
// hence we must negate the penetration.
// hence we negate the penetration (because penetration always points from B to A).
glm::vec3 penetration = - bulletToGLM(contact.distance * contact.normalWorldOnB);
_collisionEvents.push_back(Collision(type, idB, QUuid(), position, penetration, velocityChange));
_collisionEvents.push_back(Collision(type, idB, idA, position, penetration, velocityChange));
}
}

View file

@ -742,6 +742,12 @@ void collisionFromScriptValue(const QScriptValue &object, Collision& collision)
// TODO: implement this when we know what it means to accept collision events from JS
}
void Collision::invert() {
std::swap(idA, idB);
contactPoint += penetration;
penetration *= -1.0f;
}
QScriptValue quuidToScriptValue(QScriptEngine* engine, const QUuid& uuid) {
if (uuid.isNull()) {
return QScriptValue::NullValue;

View file

@ -142,11 +142,13 @@ public:
const glm::vec3& cPenetration, const glm::vec3& velocityChange)
: type(cType), idA(cIdA), idB(cIdB), contactPoint(cPoint), penetration(cPenetration), velocityChange(velocityChange) { }
void invert(); // swap A and B
ContactEventType type;
QUuid idA;
QUuid idB;
glm::vec3 contactPoint;
glm::vec3 penetration;
glm::vec3 contactPoint; // on B in world-frame
glm::vec3 penetration; // from B towards A in world-frame
glm::vec3 velocityChange;
};
Q_DECLARE_METATYPE(Collision)

View file

@ -1,54 +1,106 @@
function filter(p) {
/* block comments are ok, but not double-slash end-of-line-comments */
/******************************************************/
/* General Filter Comments
/*
- Custom filters must be named "filter" and must be global
- Block comments are ok, but not double-slash end-of-line-comments
- Certain JavaScript functions are not available, like Math.sign(), as they are undefined in QT's non-conforming JS
- HiFi's scripting interface is unavailable here. That means you can't call, for example, Users.*()
*/
/******************************************************/
/******************************************************/
/* Simple Filter Examples
/******************************************************/
/* Simple example: if someone specifies name, add an 'x' to it. Note that print is ok to use. */
if (p.name) {p.name += 'x'; print('fixme name', p. name);}
/* This example clamps y. A better filter would probably zero y component of velocity and acceleration. */
if (p.position) {p.position.y = Math.min(1, p.position.y); print('fixme p.y', p.position.y);}
/* Can also reject altogether */
/* Can also reject new properties altogether by returning false */
if (p.userData) { return false; }
/* Reject if modifications made to Model properties */
if (p.modelURL || p.compoundShapeURL || p.shape || p.shapeType || p.url || p.fps || p.currentFrame || p.running || p.loop || p.firstFrame || p.lastFrame || p.hold || p.textures || p.xTextureURL || p.yTextureURL || p.zTextureURL) { return false; }
/******************************************************/
/* Physical Property Filter Examples
/*
NOTES about filtering physical properties:
- For now, ensure you always supply a new value for the filtered physical property
(instead of simply removing the property)
- Ensure you always specify a slightly different value for physical properties every
time your filter returns. Look to "var nearZero" below for an example).
This is necessary because Interface checks if a physical property has changed
when deciding whether to apply or reject the server's physical properties.
If a physical property's value doesn't change, Interface will reject the server's property value,
and Bullet will continue simulating the entity with stale physical properties.
Ensure that this value is not changed by such a small amount such that new values
fall within floating point precision boundaries. If you accidentally do this, prepare for many
hours of frustrating debugging :).
*/
/******************************************************/
/* Clamp velocity to maxVelocity units/second. Zeroing each component of acceleration keeps us from slamming.*/
var maxVelocity = 5;
if (p.velocity) {
var maxVelocity = 5;
/* Random near-zero value used as "zero" to prevent two sequential updates from being
exactly the same (which would cause them to be ignored) */
var nearZero = 0.0001 * Math.random() + 0.001;
function sign(val) {
if (val > 0) {
return 1;
} else if (val < 0) {
return -1;
} else {
return 0;
}
}
if (Math.abs(p.velocity.x) > maxVelocity) {
p.velocity.x = Math.sign(p.velocity.x) * maxVelocity;
p.acceleration.x = 0;
p.velocity.x = sign(p.velocity.x) * (maxVelocity + nearZero);
p.acceleration.x = nearZero;
}
if (Math.abs(p.velocity.y) > maxVelocity) {
p.velocity.y = Math.sign(p.velocity.y) * maxVelocity;
p.acceleration.y = 0;
p.velocity.y = sign(p.velocity.y) * (maxVelocity + nearZero);
p.acceleration.y = nearZero;
}
if (Math.abs(p.velocity.z) > maxVelocity) {
p.velocity.z = Math.sign(p.velocity.z) * maxVelocity;
p.acceleration.z = 0;
p.velocity.z = sign(p.velocity.z) * (maxVelocity + nearZero);
p.acceleration.z = nearZero;
}
}
/* Define an axis-aligned zone in which entities are not allowed to enter. */
/* This example zone corresponds to an area to the right of the spawnpoint
in your Sandbox. It's an area near the big rock to the right. If an entity
enters the zone, it'll move behind the rock.*/
var boxMin = {x: 25.5, y: -0.48, z: -9.9};
var boxMax = {x: 31.1, y: 4, z: -3.79};
var zero = {x: 0.0, y: 0.0, z: 0.0};
if (p.position) {
/* Random near-zero value used as "zero" to prevent two sequential updates from being
exactly the same (which would cause them to be ignored) */
var nearZero = 0.0001 * Math.random() + 0.001;
/* Define the points that create the "NO ENTITIES ALLOWED" box */
var boxMin = {x: 25.5, y: -0.48, z: -9.9};
var boxMax = {x: 31.1, y: 4, z: -3.79};
/* Define the point that you want entites that enter the box to appear */
var resetPoint = {x: 29.5, y: 0.37 + nearZero, z: -2};
var x = p.position.x;
var y = p.position.y;
var z = p.position.z;
if ((x > boxMin.x && x < boxMax.x) &&
(y > boxMin.y && y < boxMax.y) &&
(z > boxMin.z && z < boxMax.z)) {
/* Move it to the origin of the zone */
p.position = boxMin;
p.velocity = zero;
p.acceleration = zero;
p.position = resetPoint;
if (p.velocity) {
p.velocity = {x: 0, y: nearZero, z: 0};
}
if (p.acceleration) {
p.acceleration = {x: 0, y: nearZero, z: 0};
}
}
}