Merge branch 'master' of github.com:highfidelity/hifi into motor-action

This commit is contained in:
Seth Alves 2017-05-10 09:30:39 -07:00
commit 9184936496
14 changed files with 428 additions and 156 deletions

View file

@ -35,6 +35,11 @@
{ "from": "Vive.RightApplicationMenu", "to": "Standard.RightSecondaryThumb" }, { "from": "Vive.RightApplicationMenu", "to": "Standard.RightSecondaryThumb" },
{ "from": "Vive.LeftHand", "to": "Standard.LeftHand", "when": [ "Application.InHMD" ] }, { "from": "Vive.LeftHand", "to": "Standard.LeftHand", "when": [ "Application.InHMD" ] },
{ "from": "Vive.RightHand", "to": "Standard.RightHand", "when": [ "Application.InHMD" ] } { "from": "Vive.RightHand", "to": "Standard.RightHand", "when": [ "Application.InHMD" ] },
{ "from": "Vive.LeftFoot", "to" : "Standard.LeftFoot", "when": [ "Application.InHMD"] },
{ "from": "Vive.RightFoot", "to" : "Standard.RightFoot", "when": [ "Application.InHMD"] },
{ "from": "Vive.Hips", "to" : "Standard.Hips", "when": [ "Application.InHMD"] },
{ "from": "Vive.Spine2", "to" : "Standard.Spine2", "when": [ "Application.InHMD"] },
{ "from": "Vive.Head", "to" : "Standard.Head", "when" : [ "Application.InHMD"] }
] ]
} }

View file

@ -844,7 +844,7 @@ Rectangle {
boxSize: 24; boxSize: 24;
onClicked: { onClicked: {
var newValue = model.connection !== "friend"; var newValue = model.connection !== "friend";
connectionsUserModel.setProperty(model.userIndex, styleData.role, newValue); connectionsUserModel.setProperty(model.userIndex, styleData.role, (newValue ? "friend" : "connection"));
connectionsUserModelData[model.userIndex][styleData.role] = newValue; // Defensive programming connectionsUserModelData[model.userIndex][styleData.role] = newValue; // Defensive programming
pal.sendToScript({method: newValue ? 'addFriend' : 'removeFriend', params: model.userName}); pal.sendToScript({method: newValue ? 'addFriend' : 'removeFriend', params: model.userName});

View file

@ -1688,7 +1688,6 @@ void Application::updateHeartbeat() const {
void Application::aboutToQuit() { void Application::aboutToQuit() {
emit beforeAboutToQuit(); emit beforeAboutToQuit();
DependencyManager::get<AudioClient>()->beforeAboutToQuit();
foreach(auto inputPlugin, PluginManager::getInstance()->getInputPlugins()) { foreach(auto inputPlugin, PluginManager::getInstance()->getInputPlugins()) {
if (inputPlugin->isActive()) { if (inputPlugin->isActive()) {
@ -1789,14 +1788,13 @@ void Application::cleanupBeforeQuit() {
_snapshotSoundInjector->stop(); _snapshotSoundInjector->stop();
} }
// stop audio after QML, as there are unexplained audio crashes originating in qtwebengine // FIXME: something else is holding a reference to AudioClient,
// so it must be explicitly synchronously stopped here
// stop the AudioClient, synchronously
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(),
"stop", Qt::BlockingQueuedConnection); "cleanupBeforeQuit", Qt::BlockingQueuedConnection);
// destroy Audio so it and its threads have a chance to go down safely // destroy Audio so it and its threads have a chance to go down safely
// this must happen after QML, as there are unexplained audio crashes originating in qtwebengine
DependencyManager::destroy<AudioClient>(); DependencyManager::destroy<AudioClient>();
DependencyManager::destroy<AudioInjectorManager>(); DependencyManager::destroy<AudioInjectorManager>();

View file

@ -76,42 +76,58 @@ using Mutex = std::mutex;
using Lock = std::unique_lock<Mutex>; using Lock = std::unique_lock<Mutex>;
static Mutex _deviceMutex; static Mutex _deviceMutex;
// background thread that continuously polls for device changes class BackgroundThread : public QThread {
class CheckDevicesThread : public QThread {
public: public:
const unsigned long DEVICE_CHECK_INTERVAL_MSECS = 2 * 1000; BackgroundThread(AudioClient* client) : QThread((QObject*)client), _client(client) {}
virtual void join() = 0;
protected:
AudioClient* _client;
};
CheckDevicesThread(AudioClient* audioClient) // background thread continuously polling device changes
: _audioClient(audioClient) { class CheckDevicesThread : public BackgroundThread {
} public:
CheckDevicesThread(AudioClient* client) : BackgroundThread(client) {}
void beforeAboutToQuit() {
Lock lock(_checkDevicesMutex); void join() override {
_quit = true; _shouldQuit = true;
std::unique_lock<std::mutex> lock(_joinMutex);
_joinCondition.wait(lock, [&]{ return !_isRunning; });
} }
protected:
void run() override { void run() override {
while (true) { while (!_shouldQuit) {
{ _client->checkDevices();
Lock lock(_checkDevicesMutex);
if (_quit) { const unsigned long DEVICE_CHECK_INTERVAL_MSECS = 2 * 1000;
break;
}
_audioClient->checkDevices();
}
QThread::msleep(DEVICE_CHECK_INTERVAL_MSECS); QThread::msleep(DEVICE_CHECK_INTERVAL_MSECS);
} }
std::lock_guard<std::mutex> lock(_joinMutex);
_isRunning = false;
_joinCondition.notify_one();
} }
private: private:
AudioClient* _audioClient { nullptr }; std::atomic<bool> _shouldQuit { false };
Mutex _checkDevicesMutex; bool _isRunning { true };
bool _quit { false }; std::mutex _joinMutex;
std::condition_variable _joinCondition;
}; };
void AudioInjectorsThread::prepare() { // background thread buffering local injectors
_audio->prepareLocalAudioInjectors(); class LocalInjectorsThread : public BackgroundThread {
} Q_OBJECT
public:
LocalInjectorsThread(AudioClient* client) : BackgroundThread(client) {}
void join() override { return; }
private slots:
void prepare() { _client->prepareLocalAudioInjectors(); }
};
#include "AudioClient.moc"
static void channelUpmix(int16_t* source, int16_t* dest, int numSamples, int numExtraChannels) { static void channelUpmix(int16_t* source, int16_t* dest, int numSamples, int numExtraChannels) {
for (int i = 0; i < numSamples/2; i++) { for (int i = 0; i < numSamples/2; i++) {
@ -179,7 +195,6 @@ AudioClient::AudioClient() :
_inputToNetworkResampler(NULL), _inputToNetworkResampler(NULL),
_networkToOutputResampler(NULL), _networkToOutputResampler(NULL),
_localToOutputResampler(NULL), _localToOutputResampler(NULL),
_localAudioThread(this),
_audioLimiter(AudioConstants::SAMPLE_RATE, OUTPUT_CHANNEL_COUNT), _audioLimiter(AudioConstants::SAMPLE_RATE, OUTPUT_CHANNEL_COUNT),
_outgoingAvatarAudioSequenceNumber(0), _outgoingAvatarAudioSequenceNumber(0),
_audioOutputIODevice(_localInjectorsStream, _receivedAudioStream, this), _audioOutputIODevice(_localInjectorsStream, _receivedAudioStream, this),
@ -210,13 +225,14 @@ AudioClient::AudioClient() :
// start a thread to detect any device changes // start a thread to detect any device changes
_checkDevicesThread = new CheckDevicesThread(this); _checkDevicesThread = new CheckDevicesThread(this);
_checkDevicesThread->setObjectName("CheckDevices Thread"); _checkDevicesThread->setObjectName("AudioClient CheckDevices Thread");
_checkDevicesThread->setPriority(QThread::LowPriority); _checkDevicesThread->setPriority(QThread::LowPriority);
_checkDevicesThread->start(); _checkDevicesThread->start();
// start a thread to process local injectors // start a thread to process local injectors
_localAudioThread.setObjectName("LocalAudio Thread"); _localInjectorsThread = new LocalInjectorsThread(this);
_localAudioThread.start(); _localInjectorsThread->setObjectName("AudioClient LocalInjectors Thread");
_localInjectorsThread->start();
configureReverb(); configureReverb();
@ -231,18 +247,32 @@ AudioClient::AudioClient() :
} }
AudioClient::~AudioClient() { AudioClient::~AudioClient() {
delete _checkDevicesThread;
stop();
if (_codec && _encoder) { if (_codec && _encoder) {
_codec->releaseEncoder(_encoder); _codec->releaseEncoder(_encoder);
_encoder = nullptr; _encoder = nullptr;
} }
} }
void AudioClient::beforeAboutToQuit() { void AudioClient::customDeleter() {
static_cast<CheckDevicesThread*>(_checkDevicesThread)->beforeAboutToQuit(); deleteLater();
} }
void AudioClient::cleanupBeforeQuit() {
// FIXME: this should be put in customDeleter, but there is still a reference to this when it is called,
// so this must be explicitly, synchronously stopped
stop();
if (_checkDevicesThread) {
static_cast<BackgroundThread*>(_checkDevicesThread)->join();
delete _checkDevicesThread;
}
if (_localInjectorsThread) {
static_cast<BackgroundThread*>(_localInjectorsThread)->join();
delete _localInjectorsThread;
}
}
void AudioClient::handleMismatchAudioFormat(SharedNodePointer node, const QString& currentCodec, const QString& recievedCodec) { void AudioClient::handleMismatchAudioFormat(SharedNodePointer node, const QString& currentCodec, const QString& recievedCodec) {
qCDebug(audioclient) << __FUNCTION__ << "sendingNode:" << *node << "currentCodec:" << currentCodec << "recievedCodec:" << recievedCodec; qCDebug(audioclient) << __FUNCTION__ << "sendingNode:" << *node << "currentCodec:" << currentCodec << "recievedCodec:" << recievedCodec;
@ -1096,11 +1126,19 @@ void AudioClient::handleRecordedAudioInput(const QByteArray& audio) {
handleAudioInput(audioBuffer); handleAudioInput(audioBuffer);
} }
void AudioClient::prepareLocalAudioInjectors() { void AudioClient::prepareLocalAudioInjectors(std::unique_ptr<Lock> localAudioLock) {
bool doSynchronously = localAudioLock.operator bool();
if (!localAudioLock) {
localAudioLock.reset(new Lock(_localAudioMutex));
}
int samplesNeeded = std::numeric_limits<int>::max(); int samplesNeeded = std::numeric_limits<int>::max();
while (samplesNeeded > 0) { while (samplesNeeded > 0) {
if (!doSynchronously) {
// unlock between every write to allow device switching // unlock between every write to allow device switching
Lock lock(_localAudioMutex); localAudioLock->unlock();
localAudioLock->lock();
}
// in case of a device switch, consider bufferCapacity volatile across iterations // in case of a device switch, consider bufferCapacity volatile across iterations
if (_outputPeriod == 0) { if (_outputPeriod == 0) {
@ -1154,16 +1192,16 @@ void AudioClient::prepareLocalAudioInjectors() {
} }
bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) { bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
// check the flag for injectors before attempting to lock
QVector<AudioInjector*> injectorsToRemove; if (!_localInjectorsAvailable.load(std::memory_order_acquire)) {
// lock the injector vector
Lock lock(_injectorsMutex);
if (_activeLocalAudioInjectors.size() == 0) {
return false; return false;
} }
// lock the injectors
Lock lock(_injectorsMutex);
QVector<AudioInjector*> injectorsToRemove;
memset(mixBuffer, 0, AudioConstants::NETWORK_FRAME_SAMPLES_STEREO * sizeof(float)); memset(mixBuffer, 0, AudioConstants::NETWORK_FRAME_SAMPLES_STEREO * sizeof(float));
for (AudioInjector* injector : _activeLocalAudioInjectors) { for (AudioInjector* injector : _activeLocalAudioInjectors) {
@ -1242,6 +1280,9 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
_activeLocalAudioInjectors.removeOne(injector); _activeLocalAudioInjectors.removeOne(injector);
} }
// update the flag
_localInjectorsAvailable.exchange(!_activeLocalAudioInjectors.empty(), std::memory_order_release);
return true; return true;
} }
@ -1328,7 +1369,10 @@ bool AudioClient::outputLocalInjector(AudioInjector* injector) {
// move local buffer to the LocalAudioThread to avoid dataraces with AudioInjector (like stop()) // move local buffer to the LocalAudioThread to avoid dataraces with AudioInjector (like stop())
injectorBuffer->setParent(nullptr); injectorBuffer->setParent(nullptr);
injectorBuffer->moveToThread(&_localAudioThread); injectorBuffer->moveToThread(_localInjectorsThread);
// update the flag
_localInjectorsAvailable.exchange(true, std::memory_order_release);
} else { } else {
qCDebug(audioclient) << "injector exists in active list already"; qCDebug(audioclient) << "injector exists in active list already";
} }
@ -1455,7 +1499,7 @@ void AudioClient::outputNotify() {
bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo) { bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo) {
bool supportedFormat = false; bool supportedFormat = false;
Lock lock(_localAudioMutex); Lock localAudioLock(_localAudioMutex);
_localSamplesAvailable.exchange(0, std::memory_order_release); _localSamplesAvailable.exchange(0, std::memory_order_release);
// cleanup any previously initialized device // cleanup any previously initialized device
@ -1525,14 +1569,23 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
connect(_audioOutput, &QAudioOutput::stateChanged, [&, frameSize, requestedSize](QAudio::State state) { connect(_audioOutput, &QAudioOutput::stateChanged, [&, frameSize, requestedSize](QAudio::State state) {
if (state == QAudio::ActiveState) { if (state == QAudio::ActiveState) {
// restrict device callback to _outputPeriod samples // restrict device callback to _outputPeriod samples
_outputPeriod = (_audioOutput->periodSize() / AudioConstants::SAMPLE_SIZE) * 2; _outputPeriod = _audioOutput->periodSize() / AudioConstants::SAMPLE_SIZE;
// device callback may exceed reported period, so double it to avoid stutter
_outputPeriod *= 2;
_outputMixBuffer = new float[_outputPeriod]; _outputMixBuffer = new float[_outputPeriod];
_outputScratchBuffer = new int16_t[_outputPeriod]; _outputScratchBuffer = new int16_t[_outputPeriod];
// size local output mix buffer based on resampled network frame size // size local output mix buffer based on resampled network frame size
_networkPeriod = _localToOutputResampler->getMaxOutput(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO); int networkPeriod = _localToOutputResampler->getMaxOutput(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO);
_localOutputMixBuffer = new float[_networkPeriod]; _localOutputMixBuffer = new float[networkPeriod];
// local period should be at least twice the output period,
// in case two device reads happen before more data can be read (worst case)
int localPeriod = _outputPeriod * 2; int localPeriod = _outputPeriod * 2;
// round up to an exact multiple of networkPeriod
localPeriod = ((localPeriod + networkPeriod - 1) / networkPeriod) * networkPeriod;
// this ensures lowest latency without stutter from underrun
_localInjectorsStream.resizeForFrameSize(localPeriod); _localInjectorsStream.resizeForFrameSize(localPeriod);
int bufferSize = _audioOutput->bufferSize(); int bufferSize = _audioOutput->bufferSize();
@ -1547,6 +1600,9 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
qCDebug(audioclient) << "local buffer (samples):" << localPeriod; qCDebug(audioclient) << "local buffer (samples):" << localPeriod;
disconnect(_audioOutput, &QAudioOutput::stateChanged, 0, 0); disconnect(_audioOutput, &QAudioOutput::stateChanged, 0, 0);
// unlock to avoid a deadlock with the device callback (which always succeeds this initialization)
localAudioLock.unlock();
} }
}); });
connect(_audioOutput, &QAudioOutput::notify, this, &AudioClient::outputNotify); connect(_audioOutput, &QAudioOutput::notify, this, &AudioClient::outputNotify);
@ -1685,12 +1741,24 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
int injectorSamplesPopped = 0; int injectorSamplesPopped = 0;
{ {
bool append = networkSamplesPopped > 0; bool append = networkSamplesPopped > 0;
// this does not require a lock as of the only two functions adding to _localSamplesAvailable (samples count): // check the samples we have available locklessly; this is possible because only two functions add to the count:
// - prepareLocalAudioInjectors will only increase samples count // - prepareLocalAudioInjectors will only increase samples count
// - switchOutputToAudioDevice will zero samples count // - switchOutputToAudioDevice will zero samples count,
// stop the device, so that readData will exhaust the existing buffer or see a zeroed samples count // stop the device - so that readData will exhaust the existing buffer or see a zeroed samples count,
// and start the device, which can only see a zeroed samples count // and start the device - which can then only see a zeroed samples count
samplesRequested = std::min(samplesRequested, _audio->_localSamplesAvailable.load(std::memory_order_acquire)); int samplesAvailable = _audio->_localSamplesAvailable.load(std::memory_order_acquire);
// if we do not have enough samples buffered despite having injectors, buffer them synchronously
if (samplesAvailable < samplesRequested && _audio->_localInjectorsAvailable.load(std::memory_order_acquire)) {
// try_to_lock, in case the device is being shut down already
std::unique_ptr<Lock> localAudioLock(new Lock(_audio->_localAudioMutex, std::try_to_lock));
if (localAudioLock->owns_lock()) {
_audio->prepareLocalAudioInjectors(std::move(localAudioLock));
samplesAvailable = _audio->_localSamplesAvailable.load(std::memory_order_acquire);
}
}
samplesRequested = std::min(samplesRequested, samplesAvailable);
if ((injectorSamplesPopped = _localInjectorsStream.appendSamples(mixBuffer, samplesRequested, append)) > 0) { if ((injectorSamplesPopped = _localInjectorsStream.appendSamples(mixBuffer, samplesRequested, append)) > 0) {
_audio->_localSamplesAvailable.fetch_sub(injectorSamplesPopped, std::memory_order_release); _audio->_localSamplesAvailable.fetch_sub(injectorSamplesPopped, std::memory_order_release);
qCDebug(audiostream, "Read %d samples from injectors (%d available, %d requested)", injectorSamplesPopped, _localInjectorsStream.samplesAvailable(), samplesRequested); qCDebug(audiostream, "Read %d samples from injectors (%d available, %d requested)", injectorSamplesPopped, _localInjectorsStream.samplesAvailable(), samplesRequested);
@ -1698,7 +1766,7 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
} }
// prepare injectors for the next callback // prepare injectors for the next callback
QMetaObject::invokeMethod(&_audio->_localAudioThread, "prepare", Qt::QueuedConnection); QMetaObject::invokeMethod(_audio->_localInjectorsThread, "prepare", Qt::QueuedConnection);
int samplesPopped = std::max(networkSamplesPopped, injectorSamplesPopped); int samplesPopped = std::max(networkSamplesPopped, injectorSamplesPopped);
int framesPopped = samplesPopped / AudioConstants::STEREO; int framesPopped = samplesPopped / AudioConstants::STEREO;

View file

@ -71,19 +71,6 @@ class QIODevice;
class Transform; class Transform;
class NLPacket; class NLPacket;
class AudioInjectorsThread : public QThread {
Q_OBJECT
public:
AudioInjectorsThread(AudioClient* audio) : _audio(audio) {}
public slots :
void prepare();
private:
AudioClient* _audio;
};
class AudioClient : public AbstractAudioInterface, public Dependency { class AudioClient : public AbstractAudioInterface, public Dependency {
Q_OBJECT Q_OBJECT
SINGLETON_DEPENDENCY SINGLETON_DEPENDENCY
@ -158,7 +145,7 @@ public:
Q_INVOKABLE void setAvatarBoundingBoxParameters(glm::vec3 corner, glm::vec3 scale); Q_INVOKABLE void setAvatarBoundingBoxParameters(glm::vec3 corner, glm::vec3 scale);
void checkDevices(); bool outputLocalInjector(AudioInjector* injector) override;
static const float CALLBACK_ACCELERATOR_RATIO; static const float CALLBACK_ACCELERATOR_RATIO;
@ -169,6 +156,7 @@ public:
public slots: public slots:
void start(); void start();
void stop(); void stop();
void cleanupBeforeQuit();
void handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessage> message); void handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessage> message);
void handleAudioDataPacket(QSharedPointer<ReceivedMessage> message); void handleAudioDataPacket(QSharedPointer<ReceivedMessage> message);
@ -184,8 +172,6 @@ public slots:
void audioMixerKilled(); void audioMixerKilled();
void toggleMute(); void toggleMute();
void beforeAboutToQuit();
virtual void setIsStereoInput(bool stereo) override; virtual void setIsStereoInput(bool stereo) override;
void toggleAudioNoiseReduction() { _isNoiseGateEnabled = !_isNoiseGateEnabled; } void toggleAudioNoiseReduction() { _isNoiseGateEnabled = !_isNoiseGateEnabled; }
@ -198,8 +184,6 @@ public slots:
int setOutputBufferSize(int numFrames, bool persist = true); int setOutputBufferSize(int numFrames, bool persist = true);
void prepareLocalAudioInjectors();
bool outputLocalInjector(AudioInjector* injector) override;
bool shouldLoopbackInjectors() override { return _shouldEchoToServer; } bool shouldLoopbackInjectors() override { return _shouldEchoToServer; }
bool switchInputToAudioDevice(const QString& inputDeviceName); bool switchInputToAudioDevice(const QString& inputDeviceName);
@ -242,13 +226,16 @@ protected:
AudioClient(); AudioClient();
~AudioClient(); ~AudioClient();
virtual void customDeleter() override { virtual void customDeleter() override;
deleteLater();
}
private: private:
friend class CheckDevicesThread;
friend class LocalInjectorsThread;
void outputFormatChanged(); void outputFormatChanged();
void handleAudioInput(QByteArray& audioBuffer); void handleAudioInput(QByteArray& audioBuffer);
void checkDevices();
void prepareLocalAudioInjectors(std::unique_ptr<Lock> localAudioLock = nullptr);
bool mixLocalAudioInjectors(float* mixBuffer); bool mixLocalAudioInjectors(float* mixBuffer);
float azimuthForSource(const glm::vec3& relativePosition); float azimuthForSource(const glm::vec3& relativePosition);
float gainForSource(float distance, float volume); float gainForSource(float distance, float volume);
@ -295,8 +282,9 @@ private:
AudioRingBuffer _inputRingBuffer; AudioRingBuffer _inputRingBuffer;
LocalInjectorsStream _localInjectorsStream; LocalInjectorsStream _localInjectorsStream;
// In order to use _localInjectorsStream as a lock-free pipe, // In order to use _localInjectorsStream as a lock-free pipe,
// use it with a single producer/consumer, and track available samples // use it with a single producer/consumer, and track available samples and injectors
std::atomic<int> _localSamplesAvailable { 0 }; std::atomic<int> _localSamplesAvailable { 0 };
std::atomic<bool> _localInjectorsAvailable { false };
MixedProcessedAudioStream _receivedAudioStream; MixedProcessedAudioStream _receivedAudioStream;
bool _isStereoInput; bool _isStereoInput;
@ -337,19 +325,17 @@ private:
// for network audio (used by network audio thread) // for network audio (used by network audio thread)
int16_t _networkScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC]; int16_t _networkScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
// for local audio (used by audio injectors thread)
int _networkPeriod { 0 };
float _localMixBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
int16_t _localScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
float* _localOutputMixBuffer { NULL };
AudioInjectorsThread _localAudioThread;
Mutex _localAudioMutex;
// for output audio (used by this thread) // for output audio (used by this thread)
int _outputPeriod { 0 }; int _outputPeriod { 0 };
float* _outputMixBuffer { NULL }; float* _outputMixBuffer { NULL };
int16_t* _outputScratchBuffer { NULL }; int16_t* _outputScratchBuffer { NULL };
// for local audio (used by audio injectors thread)
float _localMixBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
int16_t _localScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
float* _localOutputMixBuffer { NULL };
Mutex _localAudioMutex;
AudioLimiter _audioLimiter; AudioLimiter _audioLimiter;
// Adds Reverb // Adds Reverb
@ -392,12 +378,13 @@ private:
QString _selectedCodecName; QString _selectedCodecName;
Encoder* _encoder { nullptr }; // for outbound mic stream Encoder* _encoder { nullptr }; // for outbound mic stream
QThread* _checkDevicesThread { nullptr };
RateCounter<> _silentOutbound; RateCounter<> _silentOutbound;
RateCounter<> _audioOutbound; RateCounter<> _audioOutbound;
RateCounter<> _silentInbound; RateCounter<> _silentInbound;
RateCounter<> _audioInbound; RateCounter<> _audioInbound;
QThread* _checkDevicesThread { nullptr };
QThread* _localInjectorsThread { nullptr };
}; };

View file

@ -32,12 +32,12 @@ public:
const Transform& transform, glm::vec3 avatarBoundingBoxCorner, glm::vec3 avatarBoundingBoxScale, const Transform& transform, glm::vec3 avatarBoundingBoxCorner, glm::vec3 avatarBoundingBoxScale,
PacketType packetType, QString codecName = QString("")); PacketType packetType, QString codecName = QString(""));
public slots:
// threadsafe // threadsafe
// moves injector->getLocalBuffer() to another thread (so removes its parent) // moves injector->getLocalBuffer() to another thread (so removes its parent)
// take care to delete it when ~AudioInjector, as parenting Qt semantics will not work // take care to delete it when ~AudioInjector, as parenting Qt semantics will not work
virtual bool outputLocalInjector(AudioInjector* injector) = 0; virtual bool outputLocalInjector(AudioInjector* injector) = 0;
public slots:
virtual bool shouldLoopbackInjectors() { return false; } virtual bool shouldLoopbackInjectors() { return false; }
virtual void setIsStereoInput(bool stereo) = 0; virtual void setIsStereoInput(bool stereo) = 0;

View file

@ -369,7 +369,9 @@ void Avatar::simulate(float deltaTime, bool inView) {
PerformanceTimer perfTimer("simulate"); PerformanceTimer perfTimer("simulate");
{ {
PROFILE_RANGE(simulation, "updateJoints"); PROFILE_RANGE(simulation, "updateJoints");
if (inView && _hasNewJointData) { if (inView) {
Head* head = getHead();
if (_hasNewJointData) {
_skeletonModel->getRig()->copyJointsFromJointData(_jointData); _skeletonModel->getRig()->copyJointsFromJointData(_jointData);
glm::mat4 rootTransform = glm::scale(_skeletonModel->getScale()) * glm::translate(_skeletonModel->getOffset()); glm::mat4 rootTransform = glm::scale(_skeletonModel->getScale()) * glm::translate(_skeletonModel->getOffset());
_skeletonModel->getRig()->computeExternalPoses(rootTransform); _skeletonModel->getRig()->computeExternalPoses(rootTransform);
@ -384,8 +386,8 @@ void Avatar::simulate(float deltaTime, bool inView) {
if (!_skeletonModel->getHeadPosition(headPosition)) { if (!_skeletonModel->getHeadPosition(headPosition)) {
headPosition = getPosition(); headPosition = getPosition();
} }
Head* head = getHead();
head->setPosition(headPosition); head->setPosition(headPosition);
}
head->setScale(getUniformScale()); head->setScale(getUniformScale());
head->simulate(deltaTime); head->simulate(deltaTime);
} else { } else {

View file

@ -89,8 +89,7 @@ void Head::simulate(float deltaTime) {
_timeWithoutTalking += deltaTime; _timeWithoutTalking += deltaTime;
if ((_averageLoudness - _longTermAverageLoudness) > TALKING_LOUDNESS) { if ((_averageLoudness - _longTermAverageLoudness) > TALKING_LOUDNESS) {
_timeWithoutTalking = 0.0f; _timeWithoutTalking = 0.0f;
} else if (_timeWithoutTalking - deltaTime < BLINK_AFTER_TALKING && _timeWithoutTalking >= BLINK_AFTER_TALKING) {
} else if (_timeWithoutTalking < BLINK_AFTER_TALKING && _timeWithoutTalking >= BLINK_AFTER_TALKING) {
forceBlink = true; forceBlink = true;
} }

View file

@ -273,10 +273,9 @@ std::tuple<bool, QByteArray> requestData(QUrl& url) {
return std::make_tuple(false, QByteArray()); return std::make_tuple(false, QByteArray());
} }
request->send();
QEventLoop loop; QEventLoop loop;
QObject::connect(request, &ResourceRequest::finished, &loop, &QEventLoop::quit); QObject::connect(request, &ResourceRequest::finished, &loop, &QEventLoop::quit);
request->send();
loop.exec(); loop.exec();
if (request->getResult() == ResourceRequest::Success) { if (request->getResult() == ResourceRequest::Success) {

View file

@ -250,6 +250,10 @@ static void addButtonProxyToQmlTablet(QQuickItem* qmlTablet, TabletButtonProxy*
if (QThread::currentThread() != qmlTablet->thread()) { if (QThread::currentThread() != qmlTablet->thread()) {
connectionType = Qt::BlockingQueuedConnection; connectionType = Qt::BlockingQueuedConnection;
} }
if (buttonProxy == NULL){
qCCritical(scriptengine) << "TabletScriptingInterface addButtonProxyToQmlTablet buttonProxy is NULL";
return;
}
bool hasResult = QMetaObject::invokeMethod(qmlTablet, "addButtonProxy", connectionType, bool hasResult = QMetaObject::invokeMethod(qmlTablet, "addButtonProxy", connectionType,
Q_RETURN_ARG(QVariant, resultVar), Q_ARG(QVariant, buttonProxy->getProperties())); Q_RETURN_ARG(QVariant, resultVar), Q_ARG(QVariant, buttonProxy->getProperties()));
if (!hasResult) { if (!hasResult) {

View file

@ -10,6 +10,7 @@
// //
#include "ViveControllerManager.h" #include "ViveControllerManager.h"
#include <algorithm>
#include <PerfStat.h> #include <PerfStat.h>
#include <PathUtils.h> #include <PathUtils.h>
@ -20,7 +21,11 @@
#include <NumericalConstants.h> #include <NumericalConstants.h>
#include <ui-plugins/PluginContainer.h> #include <ui-plugins/PluginContainer.h>
#include <UserActivityLogger.h> #include <UserActivityLogger.h>
#include <NumericalConstants.h>
#include <OffscreenUi.h> #include <OffscreenUi.h>
#include <GLMHelpers.h>
#include <glm/ext.hpp>
#include <glm/gtc/quaternion.hpp>
#include <controllers/UserInputMapper.h> #include <controllers/UserInputMapper.h>
@ -36,14 +41,32 @@ void releaseOpenVrSystem();
static const char* CONTROLLER_MODEL_STRING = "vr_controller_05_wireless_b"; static const char* CONTROLLER_MODEL_STRING = "vr_controller_05_wireless_b";
const quint64 CALIBRATION_TIMELAPSE = 2 * USECS_PER_SECOND;
static const char* MENU_PARENT = "Avatar"; static const char* MENU_PARENT = "Avatar";
static const char* MENU_NAME = "Vive Controllers"; static const char* MENU_NAME = "Vive Controllers";
static const char* MENU_PATH = "Avatar" ">" "Vive Controllers"; static const char* MENU_PATH = "Avatar" ">" "Vive Controllers";
static const char* RENDER_CONTROLLERS = "Render Hand Controllers"; static const char* RENDER_CONTROLLERS = "Render Hand Controllers";
static const int MIN_PUCK_COUNT = 2;
static const int MIN_FEET_AND_HIPS = 3;
static const int MIN_FEET_HIPS_CHEST = 4;
static const int FIRST_FOOT = 0;
static const int SECOND_FOOT = 1;
static const int HIP = 2;
static const int CHEST = 3;
const char* ViveControllerManager::NAME { "OpenVR" }; const char* ViveControllerManager::NAME { "OpenVR" };
static glm::mat4 computeOffset(glm::mat4 defaultToReferenceMat, glm::mat4 defaultJointMat, controller::Pose puckPose) {
glm::mat4 poseMat = createMatFromQuatAndPos(puckPose.rotation, puckPose.translation);
glm::mat4 referenceJointMat = defaultToReferenceMat * defaultJointMat;
return glm::inverse(poseMat) * referenceJointMat;
}
static bool sortPucksYPosition(std::pair<uint32_t, controller::Pose> firstPuck, std::pair<uint32_t, controller::Pose> secondPuck) {
return (firstPuck.second.translation.y < firstPuck.second.translation.y);
}
bool ViveControllerManager::isSupported() const { bool ViveControllerManager::isSupported() const {
return openVrSupported(); return openVrSupported();
} }
@ -125,6 +148,7 @@ void ViveControllerManager::pluginUpdate(float deltaTime, const controller::Inpu
void ViveControllerManager::InputDevice::update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) { void ViveControllerManager::InputDevice::update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
_poseStateMap.clear(); _poseStateMap.clear();
_buttonPressedMap.clear(); _buttonPressedMap.clear();
_validTrackedObjects.clear();
// While the keyboard is open, we defer strictly to the keyboard values // While the keyboard is open, we defer strictly to the keyboard values
if (isOpenVrKeyboardShown()) { if (isOpenVrKeyboardShown()) {
@ -143,6 +167,7 @@ void ViveControllerManager::InputDevice::update(float deltaTime, const controlle
// collect poses for all generic trackers // collect poses for all generic trackers
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) { for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
handleTrackedObject(i, inputCalibrationData); handleTrackedObject(i, inputCalibrationData);
handleHmd(i, inputCalibrationData);
} }
// handle haptics // handle haptics
@ -164,10 +189,27 @@ void ViveControllerManager::InputDevice::update(float deltaTime, const controlle
numTrackedControllers++; numTrackedControllers++;
} }
_trackedControllers = numTrackedControllers; _trackedControllers = numTrackedControllers;
if (checkForCalibrationEvent()) {
quint64 currentTime = usecTimestampNow();
if (!_timeTilCalibrationSet) {
_timeTilCalibrationSet = true;
_timeTilCalibration = currentTime + CALIBRATION_TIMELAPSE;
}
if (currentTime > _timeTilCalibration && !_triggersPressedHandled) {
_triggersPressedHandled = true;
calibrateOrUncalibrate(inputCalibrationData);
}
} else {
_triggersPressedHandled = false;
_timeTilCalibrationSet = false;
}
updateCalibratedLimbs();
} }
void ViveControllerManager::InputDevice::handleTrackedObject(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData) { void ViveControllerManager::InputDevice::handleTrackedObject(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData) {
uint32_t poseIndex = controller::TRACKED_OBJECT_00 + deviceIndex; uint32_t poseIndex = controller::TRACKED_OBJECT_00 + deviceIndex;
if (_system->IsTrackedDeviceConnected(deviceIndex) && if (_system->IsTrackedDeviceConnected(deviceIndex) &&
@ -185,12 +227,129 @@ void ViveControllerManager::InputDevice::handleTrackedObject(uint32_t deviceInde
// transform into avatar frame // transform into avatar frame
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat; glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
_poseStateMap[poseIndex] = pose.transform(controllerToAvatar); _poseStateMap[poseIndex] = pose.transform(controllerToAvatar);
_validTrackedObjects.push_back(std::make_pair(poseIndex, _poseStateMap[poseIndex]));
} else { } else {
controller::Pose invalidPose; controller::Pose invalidPose;
_poseStateMap[poseIndex] = invalidPose; _poseStateMap[poseIndex] = invalidPose;
} }
} }
void ViveControllerManager::InputDevice::calibrateOrUncalibrate(const controller::InputCalibrationData& inputCalibration) {
if (!_calibrated) {
calibrate(inputCalibration);
} else {
uncalibrate();
}
}
void ViveControllerManager::InputDevice::calibrate(const controller::InputCalibrationData& inputCalibration) {
// convert the hmd head from sensor space to avatar space
glm::mat4 hmdSensorFlippedMat = inputCalibration.hmdSensorMat * Matrices::Y_180;
glm::mat4 sensorToAvatarMat = glm::inverse(inputCalibration.avatarMat) * inputCalibration.sensorToWorldMat;
glm::mat4 hmdAvatarMat = sensorToAvatarMat * hmdSensorFlippedMat;
// cancel the roll and pitch for the hmd head
glm::quat hmdRotation = cancelOutRollAndPitch(glmExtractRotation(hmdAvatarMat));
glm::vec3 hmdTranslation = extractTranslation(hmdAvatarMat);
glm::mat4 currentHmd = createMatFromQuatAndPos(hmdRotation, hmdTranslation);
// calculate the offset from the centerOfEye to defaultHeadMat
glm::mat4 defaultHeadOffset = glm::inverse(inputCalibration.defaultCenterEyeMat) * inputCalibration.defaultHeadMat;
glm::mat4 currentHead = currentHmd * defaultHeadOffset;
// calculate the defaultToRefrenceXform
glm::mat4 defaultToReferenceMat = currentHead * glm::inverse(inputCalibration.defaultHeadMat);
int puckCount = (int)_validTrackedObjects.size();
if (puckCount == MIN_PUCK_COUNT) {
_config = Config::Feet;
} else if (puckCount == MIN_FEET_AND_HIPS) {
_config = Config::FeetAndHips;
} else if (puckCount >= MIN_FEET_HIPS_CHEST) {
_config = Config::FeetHipsAndChest;
} else {
return;
}
std::sort(_validTrackedObjects.begin(), _validTrackedObjects.end(), sortPucksYPosition);
auto& firstFoot = _validTrackedObjects[FIRST_FOOT];
auto& secondFoot = _validTrackedObjects[SECOND_FOOT];
controller::Pose& firstFootPose = firstFoot.second;
controller::Pose& secondFootPose = secondFoot.second;
if (firstFootPose.translation.x < secondFootPose.translation.x) {
_jointToPuckMap[controller::LEFT_FOOT] = firstFoot.first;
_pucksOffset[firstFoot.first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultLeftFoot, firstFootPose);
_jointToPuckMap[controller::RIGHT_FOOT] = secondFoot.first;
_pucksOffset[secondFoot.first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultRightFoot, secondFootPose);
} else {
_jointToPuckMap[controller::LEFT_FOOT] = secondFoot.first;
_pucksOffset[secondFoot.first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultLeftFoot, secondFootPose);
_jointToPuckMap[controller::RIGHT_FOOT] = firstFoot.first;
_pucksOffset[firstFoot.first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultRightFoot, firstFootPose);
}
if (_config == Config::Feet) {
// done
} else if (_config == Config::FeetAndHips) {
_jointToPuckMap[controller::HIPS] = _validTrackedObjects[HIP].first;
_pucksOffset[_validTrackedObjects[HIP].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultHips, _validTrackedObjects[HIP].second);
} else if (_config == Config::FeetHipsAndChest) {
_jointToPuckMap[controller::HIPS] = _validTrackedObjects[HIP].first;
_pucksOffset[_validTrackedObjects[HIP].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultHips, _validTrackedObjects[HIP].second);
_jointToPuckMap[controller::SPINE2] = _validTrackedObjects[CHEST].first;
_pucksOffset[_validTrackedObjects[CHEST].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultSpine2, _validTrackedObjects[CHEST].second);
}
_calibrated = true;
}
void ViveControllerManager::InputDevice::uncalibrate() {
_pucksOffset.clear();
_jointToPuckMap.clear();
_calibrated = false;
}
void ViveControllerManager::InputDevice::updateCalibratedLimbs() {
_poseStateMap[controller::LEFT_FOOT] = addOffsetToPuckPose(controller::LEFT_FOOT);
_poseStateMap[controller::RIGHT_FOOT] = addOffsetToPuckPose(controller::RIGHT_FOOT);
_poseStateMap[controller::HIPS] = addOffsetToPuckPose(controller::HIPS);
_poseStateMap[controller::SPINE2] = addOffsetToPuckPose(controller::SPINE2);
}
controller::Pose ViveControllerManager::InputDevice::addOffsetToPuckPose(int joint) const {
auto puck = _jointToPuckMap.find(joint);
if (puck != _jointToPuckMap.end()) {
uint32_t puckIndex = puck->second;
auto puckPose = _poseStateMap.find(puckIndex);
auto puckOffset = _pucksOffset.find(puckIndex);
if ((puckPose != _poseStateMap.end()) && (puckOffset != _pucksOffset.end())) {
return puckPose->second.postTransform(puckOffset->second);
}
}
return controller::Pose();
}
void ViveControllerManager::InputDevice::handleHmd(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData) {
uint32_t poseIndex = controller::TRACKED_OBJECT_00 + deviceIndex;
if (_system->IsTrackedDeviceConnected(deviceIndex) &&
_system->GetTrackedDeviceClass(deviceIndex) == vr::TrackedDeviceClass_HMD &&
_nextSimPoseData.vrPoses[deviceIndex].bPoseIsValid) {
const mat4& mat = _nextSimPoseData.poses[deviceIndex];
const vec3 linearVelocity = _nextSimPoseData.linearVelocities[deviceIndex];
const vec3 angularVelocity = _nextSimPoseData.angularVelocities[deviceIndex];
handleHeadPoseEvent(inputCalibrationData, mat, linearVelocity, angularVelocity);
}
}
void ViveControllerManager::InputDevice::handleHandController(float deltaTime, uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData, bool isLeftHand) { void ViveControllerManager::InputDevice::handleHandController(float deltaTime, uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData, bool isLeftHand) {
if (_system->IsTrackedDeviceConnected(deviceIndex) && if (_system->IsTrackedDeviceConnected(deviceIndex) &&
@ -276,6 +435,14 @@ enum ViveButtonChannel {
RIGHT_APP_MENU RIGHT_APP_MENU
}; };
bool ViveControllerManager::InputDevice::checkForCalibrationEvent() {
auto& endOfMap = _buttonPressedMap.end();
auto& leftTrigger = _buttonPressedMap.find(controller::LT);
auto& rightTrigger = _buttonPressedMap.find(controller::RT);
auto& leftAppButton = _buttonPressedMap.find(LEFT_APP_MENU);
auto& rightAppButton = _buttonPressedMap.find(RIGHT_APP_MENU);
return ((leftTrigger != endOfMap && leftAppButton != endOfMap) && (rightTrigger != endOfMap && rightAppButton != endOfMap));
}
// These functions do translation from the Steam IDs to the standard controller IDs // These functions do translation from the Steam IDs to the standard controller IDs
void ViveControllerManager::InputDevice::handleButtonEvent(float deltaTime, uint32_t button, bool pressed, bool touched, bool isLeftHand) { void ViveControllerManager::InputDevice::handleButtonEvent(float deltaTime, uint32_t button, bool pressed, bool touched, bool isLeftHand) {
@ -305,6 +472,19 @@ void ViveControllerManager::InputDevice::handleButtonEvent(float deltaTime, uint
} }
} }
void ViveControllerManager::InputDevice::handleHeadPoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat,
const vec3& linearVelocity, const vec3& angularVelocity) {
//perform a 180 flip to make the HMD face the +z instead of -z, beacuse the head faces +z
glm::mat4 matYFlip = mat * Matrices::Y_180;
controller::Pose pose(extractTranslation(matYFlip), glmExtractRotation(matYFlip), linearVelocity, angularVelocity);
glm::mat4 sensorToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
glm::mat4 defaultHeadOffset = glm::inverse(inputCalibrationData.defaultCenterEyeMat) * inputCalibrationData.defaultHeadMat;
controller::Pose hmdHeadPose = pose.transform(sensorToAvatar);
_poseStateMap[controller::HEAD] = hmdHeadPose.postTransform(defaultHeadOffset);
}
void ViveControllerManager::InputDevice::handlePoseEvent(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, void ViveControllerManager::InputDevice::handlePoseEvent(float deltaTime, const controller::InputCalibrationData& inputCalibrationData,
const mat4& mat, const vec3& linearVelocity, const mat4& mat, const vec3& linearVelocity,
const vec3& angularVelocity, bool isLeftHand) { const vec3& angularVelocity, bool isLeftHand) {
@ -404,6 +584,11 @@ controller::Input::NamedVector ViveControllerManager::InputDevice::getAvailableI
// 3d location of controller // 3d location of controller
makePair(LEFT_HAND, "LeftHand"), makePair(LEFT_HAND, "LeftHand"),
makePair(RIGHT_HAND, "RightHand"), makePair(RIGHT_HAND, "RightHand"),
makePair(LEFT_FOOT, "LeftFoot"),
makePair(RIGHT_FOOT, "RightFoot"),
makePair(HIPS, "Hips"),
makePair(SPINE2, "Spine2"),
makePair(HEAD, "Head"),
// 16 tracked poses // 16 tracked poses
makePair(TRACKED_OBJECT_00, "TrackedObject00"), makePair(TRACKED_OBJECT_00, "TrackedObject00"),

View file

@ -14,9 +14,11 @@
#include <QObject> #include <QObject>
#include <unordered_set> #include <unordered_set>
#include <vector>
#include <map>
#include <utility>
#include <GLMHelpers.h> #include <GLMHelpers.h>
#include <model/Geometry.h> #include <model/Geometry.h>
#include <gpu/Texture.h> #include <gpu/Texture.h>
#include <controllers/InputDevice.h> #include <controllers/InputDevice.h>
@ -58,13 +60,21 @@ private:
bool triggerHapticPulse(float strength, float duration, controller::Hand hand) override; bool triggerHapticPulse(float strength, float duration, controller::Hand hand) override;
void hapticsHelper(float deltaTime, bool leftHand); void hapticsHelper(float deltaTime, bool leftHand);
void calibrateOrUncalibrate(const controller::InputCalibrationData& inputCalibration);
void calibrate(const controller::InputCalibrationData& inputCalibration);
void uncalibrate();
controller::Pose addOffsetToPuckPose(int joint) const;
void updateCalibratedLimbs();
bool checkForCalibrationEvent();
void handleHandController(float deltaTime, uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData, bool isLeftHand); void handleHandController(float deltaTime, uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData, bool isLeftHand);
void handleHmd(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData);
void handleTrackedObject(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData); void handleTrackedObject(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData);
void handleButtonEvent(float deltaTime, uint32_t button, bool pressed, bool touched, bool isLeftHand); void handleButtonEvent(float deltaTime, uint32_t button, bool pressed, bool touched, bool isLeftHand);
void handleAxisEvent(float deltaTime, uint32_t axis, float x, float y, bool isLeftHand); void handleAxisEvent(float deltaTime, uint32_t axis, float x, float y, bool isLeftHand);
void handlePoseEvent(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, void handlePoseEvent(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, const mat4& mat,
const vec3& linearVelocity, const vec3& angularVelocity, bool isLeftHand); const vec3& linearVelocity, const vec3& angularVelocity, bool isLeftHand);
void handleHeadPoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, const vec3& linearVelocity,
const vec3& angularVelocity);
void partitionTouchpad(int sButton, int xAxis, int yAxis, int centerPsuedoButton, int xPseudoButton, int yPseudoButton); void partitionTouchpad(int sButton, int xAxis, int yAxis, int centerPsuedoButton, int xPseudoButton, int yPseudoButton);
class FilteredStick { class FilteredStick {
@ -90,10 +100,14 @@ private:
float _timer { 0.0f }; float _timer { 0.0f };
glm::vec2 _stick { 0.0f, 0.0f }; glm::vec2 _stick { 0.0f, 0.0f };
}; };
enum class Config { Feet, FeetAndHips, FeetHipsAndChest, NoConfig };
Config _config { Config::NoConfig };
FilteredStick _filteredLeftStick; FilteredStick _filteredLeftStick;
FilteredStick _filteredRightStick; FilteredStick _filteredRightStick;
std::vector<std::pair<uint32_t, controller::Pose>> _validTrackedObjects;
std::map<uint32_t, glm::mat4> _pucksOffset;
std::map<int, uint32_t> _jointToPuckMap;
// perform an action when the InputDevice mutex is acquired. // perform an action when the InputDevice mutex is acquired.
using Locker = std::unique_lock<std::recursive_mutex>; using Locker = std::unique_lock<std::recursive_mutex>;
template <typename F> template <typename F>
@ -101,10 +115,14 @@ private:
int _trackedControllers { 0 }; int _trackedControllers { 0 };
vr::IVRSystem*& _system; vr::IVRSystem*& _system;
quint64 _timeTilCalibration { 0.0f };
float _leftHapticStrength { 0.0f }; float _leftHapticStrength { 0.0f };
float _leftHapticDuration { 0.0f }; float _leftHapticDuration { 0.0f };
float _rightHapticStrength { 0.0f }; float _rightHapticStrength { 0.0f };
float _rightHapticDuration { 0.0f }; float _rightHapticDuration { 0.0f };
bool _triggersPressedHandled { false };
bool _calibrated { false };
bool _timeTilCalibrationSet { false };
mutable std::recursive_mutex _lock; mutable std::recursive_mutex _lock;
friend class ViveControllerManager; friend class ViveControllerManager;

View file

@ -760,7 +760,7 @@
break; break;
case "done": case "done":
delete waitingList[senderID]; delete waitingList[senderID];
if (connectionId !== senderID) { if (connectingId !== senderID) {
break; break;
} }
if (state === STATES.CONNECTING) { if (state === STATES.CONNECTING) {

View file

@ -268,7 +268,7 @@ function fromQml(message) { // messages are {method, params}, like json-rpc. See
break; break;
case 'refreshConnections': case 'refreshConnections':
print('Refreshing Connections...'); print('Refreshing Connections...');
getConnectionData(); getConnectionData(false);
UserActivityLogger.palAction("refresh_connections", ""); UserActivityLogger.palAction("refresh_connections", "");
break; break;
case 'removeConnection': case 'removeConnection':
@ -281,25 +281,27 @@ function fromQml(message) { // messages are {method, params}, like json-rpc. See
print("Error: unable to remove connection", connectionUserName, error || response.status); print("Error: unable to remove connection", connectionUserName, error || response.status);
return; return;
} }
getConnectionData(); getConnectionData(false);
}); });
break break
case 'removeFriend': case 'removeFriend':
friendUserName = message.params; friendUserName = message.params;
print("Removing " + friendUserName + " from friends.");
request({ request({
uri: METAVERSE_BASE + '/api/v1/user/friends/' + friendUserName, uri: METAVERSE_BASE + '/api/v1/user/friends/' + friendUserName,
method: 'DELETE' method: 'DELETE'
}, function (error, response) { }, function (error, response) {
if (error || (response.status !== 'success')) { if (error || (response.status !== 'success')) {
print("Error: unable to unfriend", friendUserName, error || response.status); print("Error: unable to unfriend " + friendUserName, error || response.status);
return; return;
} }
getConnectionData(); getConnectionData(friendUserName);
}); });
break break
case 'addFriend': case 'addFriend':
friendUserName = message.params; friendUserName = message.params;
print("Adding " + friendUserName + " to friends.");
request({ request({
uri: METAVERSE_BASE + '/api/v1/user/friends', uri: METAVERSE_BASE + '/api/v1/user/friends',
method: 'POST', method: 'POST',
@ -312,7 +314,7 @@ function fromQml(message) { // messages are {method, params}, like json-rpc. See
print("Error: unable to friend " + friendUserName, error || response.status); print("Error: unable to friend " + friendUserName, error || response.status);
return; return;
} }
getConnectionData(); // For now, just refresh all connection data. Later, just refresh the one friended row. getConnectionData(friendUserName);
} }
); );
break; break;
@ -360,8 +362,6 @@ function getProfilePicture(username, callback) { // callback(url) if successfull
}); });
} }
function getAvailableConnections(domain, callback) { // callback([{usename, location}...]) if successfull. (Logs otherwise) function getAvailableConnections(domain, callback) { // callback([{usename, location}...]) if successfull. (Logs otherwise)
// The back end doesn't do user connections yet. Fake it by getting all users that have made themselves accessible to us,
// and pretending that they are all connections.
url = METAVERSE_BASE + '/api/v1/users?' url = METAVERSE_BASE + '/api/v1/users?'
if (domain) { if (domain) {
url += 'status=' + domain.slice(1, -1); // without curly braces url += 'status=' + domain.slice(1, -1); // without curly braces
@ -369,25 +369,22 @@ function getAvailableConnections(domain, callback) { // callback([{usename, loca
url += 'filter=connections'; // regardless of whether online url += 'filter=connections'; // regardless of whether online
} }
requestJSON(url, function (connectionsData) { requestJSON(url, function (connectionsData) {
// The back end doesn't include the profile picture data, but we can add that here. callback(connectionsData.users);
// For our current purposes, there's no need to be fancy and try to reduce latency by doing some number of requests in parallel,
// so these requests are all sequential.
var users = connectionsData.users;
function addPicture(index) {
if (index >= users.length) {
return callback(users);
}
var user = users[index];
getProfilePicture(user.username, function (url) {
user.profileUrl = url;
addPicture(index + 1);
});
}
addPicture(0);
}); });
} }
function getInfoAboutUser(specificUsername, callback) {
function getConnectionData(domain) { // Update all the usernames that I am entitled to see, using my login but not dependent on canKick. url = METAVERSE_BASE + '/api/v1/users?filter=connections'
requestJSON(url, function (connectionsData) {
for (user in connectionsData.users) {
if (connectionsData.users[user].username === specificUsername) {
callback(connectionsData.users[user]);
return;
}
}
callback(false);
});
}
function getConnectionData(specificUsername, domain) { // Update all the usernames that I am entitled to see, using my login but not dependent on canKick.
function frob(user) { // get into the right format function frob(user) { // get into the right format
var formattedSessionId = user.location.node_id || ''; var formattedSessionId = user.location.node_id || '';
if (formattedSessionId !== '' && formattedSessionId.indexOf("{") != 0) { if (formattedSessionId !== '' && formattedSessionId.indexOf("{") != 0) {
@ -397,10 +394,19 @@ function getConnectionData(domain) { // Update all the usernames that I am entit
sessionId: formattedSessionId, sessionId: formattedSessionId,
userName: user.username, userName: user.username,
connection: user.connection, connection: user.connection,
profileUrl: user.profileUrl, profileUrl: user.images.thumbnail,
placeName: (user.location.root || user.location.domain || {}).name || '' placeName: (user.location.root || user.location.domain || {}).name || ''
}; };
} }
if (specificUsername) {
getInfoAboutUser(specificUsername, function (user) {
if (user) {
updateUser(frob(user));
} else {
print('Error: Unable to find information about ' + specificUsername + ' in connectionsData!');
}
});
} else {
getAvailableConnections(domain, function (users) { getAvailableConnections(domain, function (users) {
if (domain) { if (domain) {
users.forEach(function (user) { users.forEach(function (user) {
@ -410,6 +416,7 @@ function getConnectionData(domain) { // Update all the usernames that I am entit
sendToQml({ method: 'connections', params: users.map(frob) }); sendToQml({ method: 'connections', params: users.map(frob) });
} }
}); });
}
} }
// //
@ -486,7 +493,7 @@ function populateNearbyUserList(selectData, oldAudioData) {
data.push(avatarPalDatum); data.push(avatarPalDatum);
print('PAL data:', JSON.stringify(avatarPalDatum)); print('PAL data:', JSON.stringify(avatarPalDatum));
}); });
getConnectionData(location.domainId); // Even admins don't get relationship data in requestUsernameFromID (which is still needed for admin status, which comes from domain). getConnectionData(false, location.domainId); // Even admins don't get relationship data in requestUsernameFromID (which is still needed for admin status, which comes from domain).
conserveResources = Object.keys(avatarsOfInterest).length > 20; conserveResources = Object.keys(avatarsOfInterest).length > 20;
sendToQml({ method: 'nearbyUsers', params: data }); sendToQml({ method: 'nearbyUsers', params: data });
if (selectData) { if (selectData) {