diff --git a/cmake/externals/wasapi/CMakeLists.txt b/cmake/externals/wasapi/CMakeLists.txt index d4d4b42e10..1bf195fc84 100644 --- a/cmake/externals/wasapi/CMakeLists.txt +++ b/cmake/externals/wasapi/CMakeLists.txt @@ -6,8 +6,8 @@ if (WIN32) include(ExternalProject) ExternalProject_Add( ${EXTERNAL_NAME} - URL http://hifi-public.s3.amazonaws.com/dependencies/qtaudio_wasapi7.zip - URL_MD5 bc2861e50852dd590cdc773a14a041a7 + URL http://hifi-public.s3.amazonaws.com/dependencies/qtaudio_wasapi8.zip + URL_MD5 b01510437ea15527156bc25cdf733bd9 CONFIGURE_COMMAND "" BUILD_COMMAND "" INSTALL_COMMAND "" diff --git a/interface/resources/controllers/vive.json b/interface/resources/controllers/vive.json index 4fbdb37abf..4491507a9c 100644 --- a/interface/resources/controllers/vive.json +++ b/interface/resources/controllers/vive.json @@ -35,6 +35,11 @@ { "from": "Vive.RightApplicationMenu", "to": "Standard.RightSecondaryThumb" }, { "from": "Vive.LeftHand", "to": "Standard.LeftHand", "when": [ "Application.InHMD" ] }, - { "from": "Vive.RightHand", "to": "Standard.RightHand", "when": [ "Application.InHMD" ] } + { "from": "Vive.RightHand", "to": "Standard.RightHand", "when": [ "Application.InHMD" ] }, + { "from": "Vive.LeftFoot", "to" : "Standard.LeftFoot", "when": [ "Application.InHMD"] }, + { "from": "Vive.RightFoot", "to" : "Standard.RightFoot", "when": [ "Application.InHMD"] }, + { "from": "Vive.Hips", "to" : "Standard.Hips", "when": [ "Application.InHMD"] }, + { "from": "Vive.Spine2", "to" : "Standard.Spine2", "when": [ "Application.InHMD"] }, + { "from": "Vive.Head", "to" : "Standard.Head", "when" : [ "Application.InHMD"] } ] } diff --git a/interface/resources/qml/hifi/Pal.qml b/interface/resources/qml/hifi/Pal.qml index 1755d2fbec..8f6b00f459 100644 --- a/interface/resources/qml/hifi/Pal.qml +++ b/interface/resources/qml/hifi/Pal.qml @@ -844,7 +844,7 @@ Rectangle { boxSize: 24; onClicked: { var newValue = model.connection !== "friend"; - connectionsUserModel.setProperty(model.userIndex, styleData.role, newValue); + connectionsUserModel.setProperty(model.userIndex, styleData.role, (newValue ? "friend" : "connection")); connectionsUserModelData[model.userIndex][styleData.role] = newValue; // Defensive programming pal.sendToScript({method: newValue ? 'addFriend' : 'removeFriend', params: model.userName}); diff --git a/interface/src/Application.cpp b/interface/src/Application.cpp index 56b9017e9f..32f6d9554e 100644 --- a/interface/src/Application.cpp +++ b/interface/src/Application.cpp @@ -941,10 +941,12 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo // sessionRunTime will be reset soon by loadSettings. Grab it now to get previous session value. // The value will be 0 if the user blew away settings this session, which is both a feature and a bug. + static const QString TESTER = "HIFI_TESTER"; auto gpuIdent = GPUIdent::getInstance(); auto glContextData = getGLContextData(); QJsonObject properties = { { "version", applicationVersion() }, + { "tester", QProcessEnvironment::systemEnvironment().contains(TESTER) }, { "previousSessionCrashed", _previousSessionCrashed }, { "previousSessionRuntime", sessionRunTime.get() }, { "cpu_architecture", QSysInfo::currentCpuArchitecture() }, @@ -1688,7 +1690,6 @@ void Application::updateHeartbeat() const { void Application::aboutToQuit() { emit beforeAboutToQuit(); - DependencyManager::get()->beforeAboutToQuit(); foreach(auto inputPlugin, PluginManager::getInstance()->getInputPlugins()) { if (inputPlugin->isActive()) { @@ -1789,14 +1790,13 @@ void Application::cleanupBeforeQuit() { _snapshotSoundInjector->stop(); } - // stop audio after QML, as there are unexplained audio crashes originating in qtwebengine - - // stop the AudioClient, synchronously + // FIXME: something else is holding a reference to AudioClient, + // so it must be explicitly synchronously stopped here QMetaObject::invokeMethod(DependencyManager::get().data(), - "stop", Qt::BlockingQueuedConnection); - + "cleanupBeforeQuit", Qt::BlockingQueuedConnection); // destroy Audio so it and its threads have a chance to go down safely + // this must happen after QML, as there are unexplained audio crashes originating in qtwebengine DependencyManager::destroy(); DependencyManager::destroy(); diff --git a/interface/src/avatar/MySkeletonModel.cpp b/interface/src/avatar/MySkeletonModel.cpp index 1b9aa4dc18..e60481fc62 100644 --- a/interface/src/avatar/MySkeletonModel.cpp +++ b/interface/src/avatar/MySkeletonModel.cpp @@ -37,7 +37,14 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) { Head* head = _owningAvatar->getHead(); // make sure lookAt is not too close to face (avoid crosseyes) - glm::vec3 lookAt = _owningAvatar->isMyAvatar() ? head->getLookAtPosition() : head->getCorrectedLookAtPosition(); + glm::vec3 lookAt = head->getLookAtPosition(); + glm::vec3 focusOffset = lookAt - _owningAvatar->getHead()->getEyePosition(); + float focusDistance = glm::length(focusOffset); + const float MIN_LOOK_AT_FOCUS_DISTANCE = 1.0f; + if (focusDistance < MIN_LOOK_AT_FOCUS_DISTANCE && focusDistance > EPSILON) { + lookAt = _owningAvatar->getHead()->getEyePosition() + (MIN_LOOK_AT_FOCUS_DISTANCE / focusDistance) * focusOffset; + } + MyAvatar* myAvatar = static_cast(_owningAvatar); Rig::HeadParameters headParams; @@ -140,6 +147,9 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) { auto orientation = myAvatar->getLocalOrientation(); _rig->computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState); + // evaluate AnimGraph animation and update jointStates. + Model::updateRig(deltaTime, parentTransform); + Rig::EyeParameters eyeParams; eyeParams.eyeLookAt = lookAt; eyeParams.eyeSaccade = head->getSaccade(); @@ -149,8 +159,5 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) { eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex; _rig->updateFromEyeParameters(eyeParams); - - // evaluate AnimGraph animation and update jointStates. - Parent::updateRig(deltaTime, parentTransform); } diff --git a/libraries/audio-client/src/AudioClient.cpp b/libraries/audio-client/src/AudioClient.cpp index e9134b3bd7..225ef69ae3 100644 --- a/libraries/audio-client/src/AudioClient.cpp +++ b/libraries/audio-client/src/AudioClient.cpp @@ -76,42 +76,58 @@ using Mutex = std::mutex; using Lock = std::unique_lock; static Mutex _deviceMutex; -// background thread that continuously polls for device changes -class CheckDevicesThread : public QThread { +class BackgroundThread : public QThread { public: - const unsigned long DEVICE_CHECK_INTERVAL_MSECS = 2 * 1000; + BackgroundThread(AudioClient* client) : QThread((QObject*)client), _client(client) {} + virtual void join() = 0; +protected: + AudioClient* _client; +}; - CheckDevicesThread(AudioClient* audioClient) - : _audioClient(audioClient) { - } - - void beforeAboutToQuit() { - Lock lock(_checkDevicesMutex); - _quit = true; +// background thread continuously polling device changes +class CheckDevicesThread : public BackgroundThread { +public: + CheckDevicesThread(AudioClient* client) : BackgroundThread(client) {} + + void join() override { + _shouldQuit = true; + std::unique_lock lock(_joinMutex); + _joinCondition.wait(lock, [&]{ return !_isRunning; }); } +protected: void run() override { - while (true) { - { - Lock lock(_checkDevicesMutex); - if (_quit) { - break; - } - _audioClient->checkDevices(); - } + while (!_shouldQuit) { + _client->checkDevices(); + + const unsigned long DEVICE_CHECK_INTERVAL_MSECS = 2 * 1000; QThread::msleep(DEVICE_CHECK_INTERVAL_MSECS); } + std::lock_guard lock(_joinMutex); + _isRunning = false; + _joinCondition.notify_one(); } private: - AudioClient* _audioClient { nullptr }; - Mutex _checkDevicesMutex; - bool _quit { false }; + std::atomic _shouldQuit { false }; + bool _isRunning { true }; + std::mutex _joinMutex; + std::condition_variable _joinCondition; }; -void AudioInjectorsThread::prepare() { - _audio->prepareLocalAudioInjectors(); -} +// background thread buffering local injectors +class LocalInjectorsThread : public BackgroundThread { + Q_OBJECT +public: + LocalInjectorsThread(AudioClient* client) : BackgroundThread(client) {} + + void join() override { return; } + +private slots: + void prepare() { _client->prepareLocalAudioInjectors(); } +}; + +#include "AudioClient.moc" static void channelUpmix(int16_t* source, int16_t* dest, int numSamples, int numExtraChannels) { for (int i = 0; i < numSamples/2; i++) { @@ -179,7 +195,6 @@ AudioClient::AudioClient() : _inputToNetworkResampler(NULL), _networkToOutputResampler(NULL), _localToOutputResampler(NULL), - _localAudioThread(this), _audioLimiter(AudioConstants::SAMPLE_RATE, OUTPUT_CHANNEL_COUNT), _outgoingAvatarAudioSequenceNumber(0), _audioOutputIODevice(_localInjectorsStream, _receivedAudioStream, this), @@ -210,13 +225,14 @@ AudioClient::AudioClient() : // start a thread to detect any device changes _checkDevicesThread = new CheckDevicesThread(this); - _checkDevicesThread->setObjectName("CheckDevices Thread"); + _checkDevicesThread->setObjectName("AudioClient CheckDevices Thread"); _checkDevicesThread->setPriority(QThread::LowPriority); _checkDevicesThread->start(); // start a thread to process local injectors - _localAudioThread.setObjectName("LocalAudio Thread"); - _localAudioThread.start(); + _localInjectorsThread = new LocalInjectorsThread(this); + _localInjectorsThread->setObjectName("AudioClient LocalInjectors Thread"); + _localInjectorsThread->start(); configureReverb(); @@ -231,18 +247,32 @@ AudioClient::AudioClient() : } AudioClient::~AudioClient() { - delete _checkDevicesThread; - stop(); if (_codec && _encoder) { _codec->releaseEncoder(_encoder); _encoder = nullptr; } } -void AudioClient::beforeAboutToQuit() { - static_cast(_checkDevicesThread)->beforeAboutToQuit(); +void AudioClient::customDeleter() { + deleteLater(); } +void AudioClient::cleanupBeforeQuit() { + // FIXME: this should be put in customDeleter, but there is still a reference to this when it is called, + // so this must be explicitly, synchronously stopped + + stop(); + + if (_checkDevicesThread) { + static_cast(_checkDevicesThread)->join(); + delete _checkDevicesThread; + } + + if (_localInjectorsThread) { + static_cast(_localInjectorsThread)->join(); + delete _localInjectorsThread; + } +} void AudioClient::handleMismatchAudioFormat(SharedNodePointer node, const QString& currentCodec, const QString& recievedCodec) { qCDebug(audioclient) << __FUNCTION__ << "sendingNode:" << *node << "currentCodec:" << currentCodec << "recievedCodec:" << recievedCodec; @@ -1097,11 +1127,19 @@ void AudioClient::handleRecordedAudioInput(const QByteArray& audio) { handleAudioInput(audioBuffer); } -void AudioClient::prepareLocalAudioInjectors() { +void AudioClient::prepareLocalAudioInjectors(std::unique_ptr localAudioLock) { + bool doSynchronously = localAudioLock.operator bool(); + if (!localAudioLock) { + localAudioLock.reset(new Lock(_localAudioMutex)); + } + int samplesNeeded = std::numeric_limits::max(); while (samplesNeeded > 0) { - // unlock between every write to allow device switching - Lock lock(_localAudioMutex); + if (!doSynchronously) { + // unlock between every write to allow device switching + localAudioLock->unlock(); + localAudioLock->lock(); + } // in case of a device switch, consider bufferCapacity volatile across iterations if (_outputPeriod == 0) { @@ -1155,16 +1193,16 @@ void AudioClient::prepareLocalAudioInjectors() { } bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) { - - QVector injectorsToRemove; - - // lock the injector vector - Lock lock(_injectorsMutex); - - if (_activeLocalAudioInjectors.size() == 0) { + // check the flag for injectors before attempting to lock + if (!_localInjectorsAvailable.load(std::memory_order_acquire)) { return false; } + // lock the injectors + Lock lock(_injectorsMutex); + + QVector injectorsToRemove; + memset(mixBuffer, 0, AudioConstants::NETWORK_FRAME_SAMPLES_STEREO * sizeof(float)); for (AudioInjector* injector : _activeLocalAudioInjectors) { @@ -1243,6 +1281,9 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) { _activeLocalAudioInjectors.removeOne(injector); } + // update the flag + _localInjectorsAvailable.exchange(!_activeLocalAudioInjectors.empty(), std::memory_order_release); + return true; } @@ -1329,11 +1370,14 @@ bool AudioClient::outputLocalInjector(AudioInjector* injector) { // move local buffer to the LocalAudioThread to avoid dataraces with AudioInjector (like stop()) injectorBuffer->setParent(nullptr); - injectorBuffer->moveToThread(&_localAudioThread); + injectorBuffer->moveToThread(_localInjectorsThread); + + // update the flag + _localInjectorsAvailable.exchange(true, std::memory_order_release); } else { qCDebug(audioclient) << "injector exists in active list already"; } - + return true; } else { @@ -1457,7 +1501,7 @@ void AudioClient::outputNotify() { bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo) { bool supportedFormat = false; - Lock lock(_localAudioMutex); + Lock localAudioLock(_localAudioMutex); _localSamplesAvailable.exchange(0, std::memory_order_release); // cleanup any previously initialized device @@ -1528,14 +1572,23 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice connect(_audioOutput, &QAudioOutput::stateChanged, [&, frameSize, requestedSize](QAudio::State state) { if (state == QAudio::ActiveState) { // restrict device callback to _outputPeriod samples - _outputPeriod = (_audioOutput->periodSize() / AudioConstants::SAMPLE_SIZE) * 2; + _outputPeriod = _audioOutput->periodSize() / AudioConstants::SAMPLE_SIZE; + // device callback may exceed reported period, so double it to avoid stutter + _outputPeriod *= 2; + _outputMixBuffer = new float[_outputPeriod]; _outputScratchBuffer = new int16_t[_outputPeriod]; // size local output mix buffer based on resampled network frame size - _networkPeriod = _localToOutputResampler->getMaxOutput(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO); - _localOutputMixBuffer = new float[_networkPeriod]; + int networkPeriod = _localToOutputResampler->getMaxOutput(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO); + _localOutputMixBuffer = new float[networkPeriod]; + + // local period should be at least twice the output period, + // in case two device reads happen before more data can be read (worst case) int localPeriod = _outputPeriod * 2; + // round up to an exact multiple of networkPeriod + localPeriod = ((localPeriod + networkPeriod - 1) / networkPeriod) * networkPeriod; + // this ensures lowest latency without stutter from underrun _localInjectorsStream.resizeForFrameSize(localPeriod); int bufferSize = _audioOutput->bufferSize(); @@ -1550,6 +1603,9 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice qCDebug(audioclient) << "local buffer (samples):" << localPeriod; disconnect(_audioOutput, &QAudioOutput::stateChanged, 0, 0); + + // unlock to avoid a deadlock with the device callback (which always succeeds this initialization) + localAudioLock.unlock(); } }); connect(_audioOutput, &QAudioOutput::notify, this, &AudioClient::outputNotify); @@ -1688,12 +1744,24 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) { int injectorSamplesPopped = 0; { bool append = networkSamplesPopped > 0; - // this does not require a lock as of the only two functions adding to _localSamplesAvailable (samples count): + // check the samples we have available locklessly; this is possible because only two functions add to the count: // - prepareLocalAudioInjectors will only increase samples count - // - switchOutputToAudioDevice will zero samples count - // stop the device, so that readData will exhaust the existing buffer or see a zeroed samples count - // and start the device, which can only see a zeroed samples count - samplesRequested = std::min(samplesRequested, _audio->_localSamplesAvailable.load(std::memory_order_acquire)); + // - switchOutputToAudioDevice will zero samples count, + // stop the device - so that readData will exhaust the existing buffer or see a zeroed samples count, + // and start the device - which can then only see a zeroed samples count + int samplesAvailable = _audio->_localSamplesAvailable.load(std::memory_order_acquire); + + // if we do not have enough samples buffered despite having injectors, buffer them synchronously + if (samplesAvailable < samplesRequested && _audio->_localInjectorsAvailable.load(std::memory_order_acquire)) { + // try_to_lock, in case the device is being shut down already + std::unique_ptr localAudioLock(new Lock(_audio->_localAudioMutex, std::try_to_lock)); + if (localAudioLock->owns_lock()) { + _audio->prepareLocalAudioInjectors(std::move(localAudioLock)); + samplesAvailable = _audio->_localSamplesAvailable.load(std::memory_order_acquire); + } + } + + samplesRequested = std::min(samplesRequested, samplesAvailable); if ((injectorSamplesPopped = _localInjectorsStream.appendSamples(mixBuffer, samplesRequested, append)) > 0) { _audio->_localSamplesAvailable.fetch_sub(injectorSamplesPopped, std::memory_order_release); qCDebug(audiostream, "Read %d samples from injectors (%d available, %d requested)", injectorSamplesPopped, _localInjectorsStream.samplesAvailable(), samplesRequested); @@ -1701,7 +1769,7 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) { } // prepare injectors for the next callback - QMetaObject::invokeMethod(&_audio->_localAudioThread, "prepare", Qt::QueuedConnection); + QMetaObject::invokeMethod(_audio->_localInjectorsThread, "prepare", Qt::QueuedConnection); int samplesPopped = std::max(networkSamplesPopped, injectorSamplesPopped); int framesPopped = samplesPopped / AudioConstants::STEREO; diff --git a/libraries/audio-client/src/AudioClient.h b/libraries/audio-client/src/AudioClient.h index 89ba3db14f..c65cacb129 100644 --- a/libraries/audio-client/src/AudioClient.h +++ b/libraries/audio-client/src/AudioClient.h @@ -71,19 +71,6 @@ class QIODevice; class Transform; class NLPacket; -class AudioInjectorsThread : public QThread { - Q_OBJECT - -public: - AudioInjectorsThread(AudioClient* audio) : _audio(audio) {} - -public slots : - void prepare(); - -private: - AudioClient* _audio; -}; - class AudioClient : public AbstractAudioInterface, public Dependency { Q_OBJECT SINGLETON_DEPENDENCY @@ -158,7 +145,7 @@ public: Q_INVOKABLE void setAvatarBoundingBoxParameters(glm::vec3 corner, glm::vec3 scale); - void checkDevices(); + bool outputLocalInjector(AudioInjector* injector) override; static const float CALLBACK_ACCELERATOR_RATIO; @@ -169,6 +156,7 @@ public: public slots: void start(); void stop(); + void cleanupBeforeQuit(); void handleAudioEnvironmentDataPacket(QSharedPointer message); void handleAudioDataPacket(QSharedPointer message); @@ -184,8 +172,6 @@ public slots: void audioMixerKilled(); void toggleMute(); - void beforeAboutToQuit(); - virtual void setIsStereoInput(bool stereo) override; void toggleAudioNoiseReduction() { _isNoiseGateEnabled = !_isNoiseGateEnabled; } @@ -198,8 +184,6 @@ public slots: int setOutputBufferSize(int numFrames, bool persist = true); - void prepareLocalAudioInjectors(); - bool outputLocalInjector(AudioInjector* injector) override; bool shouldLoopbackInjectors() override { return _shouldEchoToServer; } bool switchInputToAudioDevice(const QString& inputDeviceName); @@ -245,13 +229,16 @@ protected: AudioClient(); ~AudioClient(); - virtual void customDeleter() override { - deleteLater(); - } + virtual void customDeleter() override; private: + friend class CheckDevicesThread; + friend class LocalInjectorsThread; + void outputFormatChanged(); void handleAudioInput(QByteArray& audioBuffer); + void checkDevices(); + void prepareLocalAudioInjectors(std::unique_ptr localAudioLock = nullptr); bool mixLocalAudioInjectors(float* mixBuffer); float azimuthForSource(const glm::vec3& relativePosition); float gainForSource(float distance, float volume); @@ -298,8 +285,9 @@ private: AudioRingBuffer _inputRingBuffer; LocalInjectorsStream _localInjectorsStream; // In order to use _localInjectorsStream as a lock-free pipe, - // use it with a single producer/consumer, and track available samples + // use it with a single producer/consumer, and track available samples and injectors std::atomic _localSamplesAvailable { 0 }; + std::atomic _localInjectorsAvailable { false }; MixedProcessedAudioStream _receivedAudioStream; bool _isStereoInput; @@ -340,19 +328,17 @@ private: // for network audio (used by network audio thread) int16_t _networkScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC]; - // for local audio (used by audio injectors thread) - int _networkPeriod { 0 }; - float _localMixBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO]; - int16_t _localScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC]; - float* _localOutputMixBuffer { NULL }; - AudioInjectorsThread _localAudioThread; - Mutex _localAudioMutex; - // for output audio (used by this thread) int _outputPeriod { 0 }; float* _outputMixBuffer { NULL }; int16_t* _outputScratchBuffer { NULL }; + // for local audio (used by audio injectors thread) + float _localMixBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO]; + int16_t _localScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC]; + float* _localOutputMixBuffer { NULL }; + Mutex _localAudioMutex; + AudioLimiter _audioLimiter; // Adds Reverb @@ -395,12 +381,13 @@ private: QString _selectedCodecName; Encoder* _encoder { nullptr }; // for outbound mic stream - QThread* _checkDevicesThread { nullptr }; - RateCounter<> _silentOutbound; RateCounter<> _audioOutbound; RateCounter<> _silentInbound; RateCounter<> _audioInbound; + + QThread* _checkDevicesThread { nullptr }; + QThread* _localInjectorsThread { nullptr }; }; diff --git a/libraries/audio/src/AbstractAudioInterface.h b/libraries/audio/src/AbstractAudioInterface.h index 2e4611cd4e..2e14b9956b 100644 --- a/libraries/audio/src/AbstractAudioInterface.h +++ b/libraries/audio/src/AbstractAudioInterface.h @@ -32,12 +32,12 @@ public: const Transform& transform, glm::vec3 avatarBoundingBoxCorner, glm::vec3 avatarBoundingBoxScale, PacketType packetType, QString codecName = QString("")); -public slots: // threadsafe // moves injector->getLocalBuffer() to another thread (so removes its parent) // take care to delete it when ~AudioInjector, as parenting Qt semantics will not work virtual bool outputLocalInjector(AudioInjector* injector) = 0; +public slots: virtual bool shouldLoopbackInjectors() { return false; } virtual void setIsStereoInput(bool stereo) = 0; diff --git a/libraries/avatars-renderer/src/avatars-renderer/Avatar.cpp b/libraries/avatars-renderer/src/avatars-renderer/Avatar.cpp index be55653f64..664b0094f4 100644 --- a/libraries/avatars-renderer/src/avatars-renderer/Avatar.cpp +++ b/libraries/avatars-renderer/src/avatars-renderer/Avatar.cpp @@ -369,23 +369,25 @@ void Avatar::simulate(float deltaTime, bool inView) { PerformanceTimer perfTimer("simulate"); { PROFILE_RANGE(simulation, "updateJoints"); - if (inView && _hasNewJointData) { - _skeletonModel->getRig()->copyJointsFromJointData(_jointData); - glm::mat4 rootTransform = glm::scale(_skeletonModel->getScale()) * glm::translate(_skeletonModel->getOffset()); - _skeletonModel->getRig()->computeExternalPoses(rootTransform); - _jointDataSimulationRate.increment(); - - _skeletonModel->simulate(deltaTime, true); - - locationChanged(); // joints changed, so if there are any children, update them. - _hasNewJointData = false; - - glm::vec3 headPosition = getPosition(); - if (!_skeletonModel->getHeadPosition(headPosition)) { - headPosition = getPosition(); - } + if (inView) { Head* head = getHead(); - head->setPosition(headPosition); + if (_hasNewJointData) { + _skeletonModel->getRig()->copyJointsFromJointData(_jointData); + glm::mat4 rootTransform = glm::scale(_skeletonModel->getScale()) * glm::translate(_skeletonModel->getOffset()); + _skeletonModel->getRig()->computeExternalPoses(rootTransform); + _jointDataSimulationRate.increment(); + + _skeletonModel->simulate(deltaTime, true); + + locationChanged(); // joints changed, so if there are any children, update them. + _hasNewJointData = false; + + glm::vec3 headPosition = getPosition(); + if (!_skeletonModel->getHeadPosition(headPosition)) { + headPosition = getPosition(); + } + head->setPosition(headPosition); + } head->setScale(getUniformScale()); head->simulate(deltaTime); } else { diff --git a/libraries/avatars-renderer/src/avatars-renderer/Head.cpp b/libraries/avatars-renderer/src/avatars-renderer/Head.cpp index a90c9cd5f7..1c54ea269a 100644 --- a/libraries/avatars-renderer/src/avatars-renderer/Head.cpp +++ b/libraries/avatars-renderer/src/avatars-renderer/Head.cpp @@ -89,8 +89,7 @@ void Head::simulate(float deltaTime) { _timeWithoutTalking += deltaTime; if ((_averageLoudness - _longTermAverageLoudness) > TALKING_LOUDNESS) { _timeWithoutTalking = 0.0f; - - } else if (_timeWithoutTalking < BLINK_AFTER_TALKING && _timeWithoutTalking >= BLINK_AFTER_TALKING) { + } else if (_timeWithoutTalking - deltaTime < BLINK_AFTER_TALKING && _timeWithoutTalking >= BLINK_AFTER_TALKING) { forceBlink = true; } diff --git a/libraries/avatars-renderer/src/avatars-renderer/SkeletonModel.cpp b/libraries/avatars-renderer/src/avatars-renderer/SkeletonModel.cpp index e1e5dc4282..d3453280ac 100644 --- a/libraries/avatars-renderer/src/avatars-renderer/SkeletonModel.cpp +++ b/libraries/avatars-renderer/src/avatars-renderer/SkeletonModel.cpp @@ -73,12 +73,13 @@ void SkeletonModel::initJointStates() { // Called within Model::simulate call, below. void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) { + assert(!_owningAvatar->isMyAvatar()); const FBXGeometry& geometry = getFBXGeometry(); Head* head = _owningAvatar->getHead(); // make sure lookAt is not too close to face (avoid crosseyes) - glm::vec3 lookAt = _owningAvatar->isMyAvatar() ? head->getLookAtPosition() : head->getCorrectedLookAtPosition(); + glm::vec3 lookAt = head->getCorrectedLookAtPosition(); glm::vec3 focusOffset = lookAt - _owningAvatar->getHead()->getEyePosition(); float focusDistance = glm::length(focusOffset); const float MIN_LOOK_AT_FOCUS_DISTANCE = 1.0f; @@ -86,41 +87,36 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) { lookAt = _owningAvatar->getHead()->getEyePosition() + (MIN_LOOK_AT_FOCUS_DISTANCE / focusDistance) * focusOffset; } - if (!_owningAvatar->isMyAvatar()) { - // no need to call Model::updateRig() because otherAvatars get their joint state - // copied directly from AvtarData::_jointData (there are no Rig animations to blend) - _needsUpdateClusterMatrices = true; + // no need to call Model::updateRig() because otherAvatars get their joint state + // copied directly from AvtarData::_jointData (there are no Rig animations to blend) + _needsUpdateClusterMatrices = true; - // This is a little more work than we really want. - // - // Other avatars joint, including their eyes, should already be set just like any other joints - // from the wire data. But when looking at me, we want the eyes to use the corrected lookAt. - // - // Thus this should really only be ... else if (_owningAvatar->getHead()->isLookingAtMe()) {... - // However, in the !isLookingAtMe case, the eyes aren't rotating the way they should right now. - // We will revisit that as priorities allow, and particularly after the new rig/animation/joints. + // This is a little more work than we really want. + // + // Other avatars joint, including their eyes, should already be set just like any other joints + // from the wire data. But when looking at me, we want the eyes to use the corrected lookAt. + // + // Thus this should really only be ... else if (_owningAvatar->getHead()->isLookingAtMe()) {... + // However, in the !isLookingAtMe case, the eyes aren't rotating the way they should right now. + // We will revisit that as priorities allow, and particularly after the new rig/animation/joints. - // If the head is not positioned, updateEyeJoints won't get the math right - glm::quat headOrientation; - _rig->getJointRotation(geometry.headJointIndex, headOrientation); - glm::vec3 eulers = safeEulerAngles(headOrientation); - head->setBasePitch(glm::degrees(-eulers.x)); - head->setBaseYaw(glm::degrees(eulers.y)); - head->setBaseRoll(glm::degrees(-eulers.z)); + // If the head is not positioned, updateEyeJoints won't get the math right + glm::quat headOrientation; + _rig->getJointRotation(geometry.headJointIndex, headOrientation); + glm::vec3 eulers = safeEulerAngles(headOrientation); + head->setBasePitch(glm::degrees(-eulers.x)); + head->setBaseYaw(glm::degrees(eulers.y)); + head->setBaseRoll(glm::degrees(-eulers.z)); - Rig::EyeParameters eyeParams; - eyeParams.eyeLookAt = lookAt; - eyeParams.eyeSaccade = glm::vec3(0.0f); - eyeParams.modelRotation = getRotation(); - eyeParams.modelTranslation = getTranslation(); - eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex; - eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex; + Rig::EyeParameters eyeParams; + eyeParams.eyeLookAt = lookAt; + eyeParams.eyeSaccade = glm::vec3(0.0f); + eyeParams.modelRotation = getRotation(); + eyeParams.modelTranslation = getTranslation(); + eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex; + eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex; - _rig->updateFromEyeParameters(eyeParams); - } - - // evaluate AnimGraph animation and update jointStates. - Parent::updateRig(deltaTime, parentTransform); + _rig->updateFromEyeParameters(eyeParams); } void SkeletonModel::updateAttitude() { diff --git a/libraries/fbx/src/OBJReader.cpp b/libraries/fbx/src/OBJReader.cpp index 167cb8caac..1445d14d84 100644 --- a/libraries/fbx/src/OBJReader.cpp +++ b/libraries/fbx/src/OBJReader.cpp @@ -273,10 +273,9 @@ std::tuple requestData(QUrl& url) { return std::make_tuple(false, QByteArray()); } - request->send(); - QEventLoop loop; QObject::connect(request, &ResourceRequest::finished, &loop, &QEventLoop::quit); + request->send(); loop.exec(); if (request->getResult() == ResourceRequest::Success) { diff --git a/libraries/gpu-gl/CMakeLists.txt b/libraries/gpu-gl/CMakeLists.txt index 3e3853532a..65130d6d07 100644 --- a/libraries/gpu-gl/CMakeLists.txt +++ b/libraries/gpu-gl/CMakeLists.txt @@ -1,5 +1,5 @@ set(TARGET_NAME gpu-gl) -setup_hifi_library() +setup_hifi_library(Concurrent) link_hifi_libraries(shared gl gpu) if (UNIX) target_link_libraries(${TARGET_NAME} pthread) diff --git a/libraries/gpu-gl/src/gpu/gl/GLTexture.cpp b/libraries/gpu-gl/src/gpu/gl/GLTexture.cpp index 5534419eaa..84dc49deba 100644 --- a/libraries/gpu-gl/src/gpu/gl/GLTexture.cpp +++ b/libraries/gpu-gl/src/gpu/gl/GLTexture.cpp @@ -160,8 +160,6 @@ const uvec3 GLVariableAllocationSupport::INITIAL_MIP_TRANSFER_DIMENSIONS { 64, 6 WorkQueue GLVariableAllocationSupport::_transferQueue; WorkQueue GLVariableAllocationSupport::_promoteQueue; WorkQueue GLVariableAllocationSupport::_demoteQueue; -TexturePointer GLVariableAllocationSupport::_currentTransferTexture; -TransferJobPointer GLVariableAllocationSupport::_currentTransferJob; size_t GLVariableAllocationSupport::_frameTexturesCreated { 0 }; #define OVERSUBSCRIBED_PRESSURE_VALUE 0.95f @@ -176,30 +174,19 @@ const uvec3 GLVariableAllocationSupport::MAX_TRANSFER_DIMENSIONS { 1024, 1024, 1 const size_t GLVariableAllocationSupport::MAX_TRANSFER_SIZE = GLVariableAllocationSupport::MAX_TRANSFER_DIMENSIONS.x * GLVariableAllocationSupport::MAX_TRANSFER_DIMENSIONS.y * 4; #if THREADED_TEXTURE_BUFFERING -std::shared_ptr TransferJob::_bufferThread { nullptr }; -std::atomic TransferJob::_shutdownBufferingThread { false }; -Mutex TransferJob::_mutex; -TransferJob::VoidLambdaQueue TransferJob::_bufferLambdaQueue; -void TransferJob::startTransferLoop() { - if (_bufferThread) { - return; - } - _shutdownBufferingThread = false; - _bufferThread = std::make_shared([] { - TransferJob::bufferLoop(); +TexturePointer GLVariableAllocationSupport::_currentTransferTexture; +TransferJobPointer GLVariableAllocationSupport::_currentTransferJob; +QThreadPool* TransferJob::_bufferThreadPool { nullptr }; + +void TransferJob::startBufferingThread() { + static std::once_flag once; + std::call_once(once, [&] { + _bufferThreadPool = new QThreadPool(qApp); + _bufferThreadPool->setMaxThreadCount(1); }); } -void TransferJob::stopTransferLoop() { - if (!_bufferThread) { - return; - } - _shutdownBufferingThread = true; - _bufferThread->join(); - _bufferThread.reset(); - _shutdownBufferingThread = false; -} #endif TransferJob::TransferJob(const GLTexture& parent, uint16_t sourceMip, uint16_t targetMip, uint8_t face, uint32_t lines, uint32_t lineOffset) @@ -233,7 +220,6 @@ TransferJob::TransferJob(const GLTexture& parent, uint16_t sourceMip, uint16_t t // Buffering can invoke disk IO, so it should be off of the main and render threads _bufferingLambda = [=] { _mipData = _parent._gpuObject.accessStoredMipFace(sourceMip, face)->createView(_transferSize, _transferOffset); - _bufferingCompleted = true; }; _transferLambda = [=] { @@ -243,65 +229,66 @@ TransferJob::TransferJob(const GLTexture& parent, uint16_t sourceMip, uint16_t t } TransferJob::TransferJob(const GLTexture& parent, std::function transferLambda) - : _parent(parent), _bufferingCompleted(true), _transferLambda(transferLambda) { + : _parent(parent), _bufferingRequired(false), _transferLambda(transferLambda) { } TransferJob::~TransferJob() { Backend::updateTextureTransferPendingSize(_transferSize, 0); } - bool TransferJob::tryTransfer() { - // Disable threaded texture transfer for now #if THREADED_TEXTURE_BUFFERING // Are we ready to transfer - if (_bufferingCompleted) { - _transferLambda(); + if (!bufferingCompleted()) { + startBuffering(); + return false; + } +#else + if (_bufferingRequired) { + _bufferingLambda(); + } +#endif + _transferLambda(); + return true; +} + +#if THREADED_TEXTURE_BUFFERING +bool TransferJob::bufferingRequired() const { + if (!_bufferingRequired) { + return false; + } + + // The default state of a QFuture is with status Canceled | Started | Finished, + // so we have to check isCancelled before we check the actual state + if (_bufferingStatus.isCanceled()) { return true; } - startBuffering(); - return false; -#else - if (!_bufferingCompleted) { - _bufferingLambda(); - _bufferingCompleted = true; - } - _transferLambda(); - return true; -#endif + return !_bufferingStatus.isStarted(); } -#if THREADED_TEXTURE_BUFFERING +bool TransferJob::bufferingCompleted() const { + if (!_bufferingRequired) { + return true; + } + + // The default state of a QFuture is with status Canceled | Started | Finished, + // so we have to check isCancelled before we check the actual state + if (_bufferingStatus.isCanceled()) { + return false; + } + + return _bufferingStatus.isFinished(); +} void TransferJob::startBuffering() { - if (_bufferingStarted) { - return; - } - _bufferingStarted = true; - { - Lock lock(_mutex); - _bufferLambdaQueue.push(_bufferingLambda); - } -} - -void TransferJob::bufferLoop() { - while (!_shutdownBufferingThread) { - VoidLambdaQueue workingQueue; - { - Lock lock(_mutex); - _bufferLambdaQueue.swap(workingQueue); - } - - if (workingQueue.empty()) { - QThread::msleep(5); - continue; - } - - while (!workingQueue.empty()) { - workingQueue.front()(); - workingQueue.pop(); - } + if (bufferingRequired()) { + assert(_bufferingStatus.isCanceled()); + _bufferingStatus = QtConcurrent::run(_bufferThreadPool, [=] { + _bufferingLambda(); + }); + assert(!_bufferingStatus.isCanceled()); + assert(_bufferingStatus.isStarted()); } } #endif @@ -316,7 +303,9 @@ GLVariableAllocationSupport::~GLVariableAllocationSupport() { void GLVariableAllocationSupport::addMemoryManagedTexture(const TexturePointer& texturePointer) { _memoryManagedTextures.push_back(texturePointer); - addToWorkQueue(texturePointer); + if (MemoryPressureState::Idle != _memoryPressureState) { + addToWorkQueue(texturePointer); + } } void GLVariableAllocationSupport::addToWorkQueue(const TexturePointer& texturePointer) { @@ -345,10 +334,8 @@ void GLVariableAllocationSupport::addToWorkQueue(const TexturePointer& texturePo break; case MemoryPressureState::Idle: - break; - - default: Q_UNREACHABLE(); + break; } } @@ -364,10 +351,10 @@ WorkQueue& GLVariableAllocationSupport::getActiveWorkQueue() { case MemoryPressureState::Transfer: return _transferQueue; - default: + case MemoryPressureState::Idle: + Q_UNREACHABLE(); break; } - Q_UNREACHABLE(); return empty; } @@ -460,16 +447,11 @@ void GLVariableAllocationSupport::updateMemoryPressure() { } if (newState != _memoryPressureState) { + _memoryPressureState = newState; #if THREADED_TEXTURE_BUFFERING if (MemoryPressureState::Transfer == _memoryPressureState) { - TransferJob::stopTransferLoop(); + TransferJob::startBufferingThread(); } - _memoryPressureState = newState; - if (MemoryPressureState::Transfer == _memoryPressureState) { - TransferJob::startTransferLoop(); - } -#else - _memoryPressureState = newState; #endif // Clear the existing queue _transferQueue = WorkQueue(); @@ -487,49 +469,111 @@ void GLVariableAllocationSupport::updateMemoryPressure() { } } +TexturePointer GLVariableAllocationSupport::getNextWorkQueueItem(WorkQueue& workQueue) { + while (!workQueue.empty()) { + auto workTarget = workQueue.top(); + + auto texture = workTarget.first.lock(); + if (!texture) { + workQueue.pop(); + continue; + } + + // Check whether the resulting texture can actually have work performed + GLTexture* gltexture = Backend::getGPUObject(*texture); + GLVariableAllocationSupport* vartexture = dynamic_cast(gltexture); + switch (_memoryPressureState) { + case MemoryPressureState::Oversubscribed: + if (vartexture->canDemote()) { + return texture; + } + break; + + case MemoryPressureState::Undersubscribed: + if (vartexture->canPromote()) { + return texture; + } + break; + + case MemoryPressureState::Transfer: + if (vartexture->hasPendingTransfers()) { + return texture; + } + break; + + case MemoryPressureState::Idle: + Q_UNREACHABLE(); + break; + } + + // If we got here, then the texture has no work to do in the current state, + // so pop it off the queue and continue + workQueue.pop(); + } + + return TexturePointer(); +} + +void GLVariableAllocationSupport::processWorkQueue(WorkQueue& workQueue) { + if (workQueue.empty()) { + return; + } + + // Get the front of the work queue to perform work + auto texture = getNextWorkQueueItem(workQueue); + if (!texture) { + return; + } + + // Grab the first item off the demote queue + PROFILE_RANGE(render_gpu_gl, __FUNCTION__); + + GLTexture* gltexture = Backend::getGPUObject(*texture); + GLVariableAllocationSupport* vartexture = dynamic_cast(gltexture); + switch (_memoryPressureState) { + case MemoryPressureState::Oversubscribed: + vartexture->demote(); + workQueue.pop(); + addToWorkQueue(texture); + break; + + case MemoryPressureState::Undersubscribed: + vartexture->promote(); + workQueue.pop(); + addToWorkQueue(texture); + break; + + case MemoryPressureState::Transfer: + if (vartexture->executeNextTransfer(texture)) { + workQueue.pop(); + addToWorkQueue(texture); + +#if THREADED_TEXTURE_BUFFERING + // Eagerly start the next buffering job if possible + texture = getNextWorkQueueItem(workQueue); + if (texture) { + gltexture = Backend::getGPUObject(*texture); + vartexture = dynamic_cast(gltexture); + vartexture->executeNextBuffer(texture); + } +#endif + } + break; + + case MemoryPressureState::Idle: + Q_UNREACHABLE(); + break; + } +} + void GLVariableAllocationSupport::processWorkQueues() { if (MemoryPressureState::Idle == _memoryPressureState) { return; } auto& workQueue = getActiveWorkQueue(); - PROFILE_RANGE(render_gpu_gl, __FUNCTION__); - while (!workQueue.empty()) { - auto workTarget = workQueue.top(); - workQueue.pop(); - auto texture = workTarget.first.lock(); - if (!texture) { - continue; - } - - // Grab the first item off the demote queue - GLTexture* gltexture = Backend::getGPUObject(*texture); - GLVariableAllocationSupport* vartexture = dynamic_cast(gltexture); - if (MemoryPressureState::Oversubscribed == _memoryPressureState) { - if (!vartexture->canDemote()) { - continue; - } - vartexture->demote(); - _memoryPressureStateStale = true; - } else if (MemoryPressureState::Undersubscribed == _memoryPressureState) { - if (!vartexture->canPromote()) { - continue; - } - vartexture->promote(); - _memoryPressureStateStale = true; - } else if (MemoryPressureState::Transfer == _memoryPressureState) { - if (!vartexture->hasPendingTransfers()) { - continue; - } - vartexture->executeNextTransfer(texture); - } else { - Q_UNREACHABLE(); - } - - // Reinject into the queue if more work to be done - addToWorkQueue(texture); - break; - } + // Do work on the front of the queue + processWorkQueue(workQueue); if (workQueue.empty()) { _memoryPressureState = MemoryPressureState::Idle; @@ -543,28 +587,83 @@ void GLVariableAllocationSupport::manageMemory() { processWorkQueues(); } +bool GLVariableAllocationSupport::executeNextTransfer(const TexturePointer& currentTexture) { +#if THREADED_TEXTURE_BUFFERING + // If a transfer job is active on the buffering thread, but has not completed it's buffering lambda, + // then we need to exit early, since we don't want to have the transfer job leave scope while it's + // being used in another thread -- See https://highfidelity.fogbugz.com/f/cases/4626 + if (_currentTransferJob && !_currentTransferJob->bufferingCompleted()) { + return false; + } +#endif -void GLVariableAllocationSupport::executeNextTransfer(const TexturePointer& currentTexture) { if (_populatedMip <= _allocatedMip) { +#if THREADED_TEXTURE_BUFFERING + _currentTransferJob.reset(); + _currentTransferTexture.reset(); +#endif + return true; + } + + // If the transfer queue is empty, rebuild it + if (_pendingTransfers.empty()) { + populateTransferQueue(); + } + + bool result = false; + if (!_pendingTransfers.empty()) { +#if THREADED_TEXTURE_BUFFERING + // If there is a current transfer, but it's not the top of the pending transfer queue, then it's an orphan, so we want to abandon it. + if (_currentTransferJob && _currentTransferJob != _pendingTransfers.front()) { + _currentTransferJob.reset(); + } + + if (!_currentTransferJob) { + // Keeping hold of a strong pointer to the transfer job ensures that if the pending transfer queue is rebuilt, the transfer job + // doesn't leave scope, causing a crash in the buffering thread + _currentTransferJob = _pendingTransfers.front(); + + // Keeping hold of a strong pointer during the transfer ensures that the transfer thread cannot try to access a destroyed texture + _currentTransferTexture = currentTexture; + } + + // transfer jobs use asynchronous buffering of the texture data because it may involve disk IO, so we execute a try here to determine if the buffering + // is complete + if (_currentTransferJob->tryTransfer()) { + _pendingTransfers.pop(); + // Once a given job is finished, release the shared pointers keeping them alive + _currentTransferTexture.reset(); + _currentTransferJob.reset(); + result = true; + } +#else + if (_pendingTransfers.front()->tryTransfer()) { + _pendingTransfers.pop(); + result = true; + } +#endif + } + return result; +} + +#if THREADED_TEXTURE_BUFFERING +void GLVariableAllocationSupport::executeNextBuffer(const TexturePointer& currentTexture) { + if (_currentTransferJob && !_currentTransferJob->bufferingCompleted()) { return; } + // If the transfer queue is empty, rebuild it if (_pendingTransfers.empty()) { populateTransferQueue(); } if (!_pendingTransfers.empty()) { - // Keeping hold of a strong pointer during the transfer ensures that the transfer thread cannot try to access a destroyed texture - _currentTransferTexture = currentTexture; - // Keeping hold of a strong pointer to the transfer job ensures that if the pending transfer queue is rebuilt, the transfer job - // doesn't leave scope, causing a crash in the buffering thread - _currentTransferJob = _pendingTransfers.front(); - // transfer jobs use asynchronous buffering of the texture data because it may involve disk IO, so we execute a try here to determine if the buffering - // is complete - if (_currentTransferJob->tryTransfer()) { - _pendingTransfers.pop(); - _currentTransferTexture.reset(); - _currentTransferJob.reset(); + if (!_currentTransferJob) { + _currentTransferJob = _pendingTransfers.front(); + _currentTransferTexture = currentTexture; } + + _currentTransferJob->startBuffering(); } } +#endif diff --git a/libraries/gpu-gl/src/gpu/gl/GLTexture.h b/libraries/gpu-gl/src/gpu/gl/GLTexture.h index 877966f2d9..c6ce2a2495 100644 --- a/libraries/gpu-gl/src/gpu/gl/GLTexture.h +++ b/libraries/gpu-gl/src/gpu/gl/GLTexture.h @@ -8,6 +8,9 @@ #ifndef hifi_gpu_gl_GLTexture_h #define hifi_gpu_gl_GLTexture_h +#include +#include + #include "GLShared.h" #include "GLBackend.h" #include "GLTexelFormat.h" @@ -47,24 +50,19 @@ public: class TransferJob { using VoidLambda = std::function; using VoidLambdaQueue = std::queue; - using ThreadPointer = std::shared_ptr; const GLTexture& _parent; Texture::PixelsPointer _mipData; size_t _transferOffset { 0 }; size_t _transferSize { 0 }; - // Indicates if a transfer from backing storage to interal storage has started - bool _bufferingStarted { false }; - bool _bufferingCompleted { false }; + bool _bufferingRequired { true }; VoidLambda _transferLambda; VoidLambda _bufferingLambda; #if THREADED_TEXTURE_BUFFERING - static Mutex _mutex; - static VoidLambdaQueue _bufferLambdaQueue; - static ThreadPointer _bufferThread; - static std::atomic _shutdownBufferingThread; - static void bufferLoop(); + // Indicates if a transfer from backing storage to interal storage has started + QFuture _bufferingStatus; + static QThreadPool* _bufferThreadPool; #endif public: @@ -75,14 +73,13 @@ public: bool tryTransfer(); #if THREADED_TEXTURE_BUFFERING - static void startTransferLoop(); - static void stopTransferLoop(); + void startBuffering(); + bool bufferingRequired() const; + bool bufferingCompleted() const; + static void startBufferingThread(); #endif private: -#if THREADED_TEXTURE_BUFFERING - void startBuffering(); -#endif void transfer(); }; @@ -100,8 +97,10 @@ protected: static WorkQueue _transferQueue; static WorkQueue _promoteQueue; static WorkQueue _demoteQueue; +#if THREADED_TEXTURE_BUFFERING static TexturePointer _currentTransferTexture; static TransferJobPointer _currentTransferJob; +#endif static const uvec3 INITIAL_MIP_TRANSFER_DIMENSIONS; static const uvec3 MAX_TRANSFER_DIMENSIONS; static const size_t MAX_TRANSFER_SIZE; @@ -109,6 +108,8 @@ protected: static void updateMemoryPressure(); static void processWorkQueues(); + static void processWorkQueue(WorkQueue& workQueue); + static TexturePointer getNextWorkQueueItem(WorkQueue& workQueue); static void addToWorkQueue(const TexturePointer& texture); static WorkQueue& getActiveWorkQueue(); @@ -118,7 +119,10 @@ protected: bool canPromote() const { return _allocatedMip > _minAllocatedMip; } bool canDemote() const { return _allocatedMip < _maxAllocatedMip; } bool hasPendingTransfers() const { return _populatedMip > _allocatedMip; } - void executeNextTransfer(const TexturePointer& currentTexture); +#if THREADED_TEXTURE_BUFFERING + void executeNextBuffer(const TexturePointer& currentTexture); +#endif + bool executeNextTransfer(const TexturePointer& currentTexture); virtual void populateTransferQueue() = 0; virtual void promote() = 0; virtual void demote() = 0; diff --git a/libraries/gpu-gl/src/gpu/gl45/GL45Backend.h b/libraries/gpu-gl/src/gpu/gl45/GL45Backend.h index 8319e61382..fe2761b37d 100644 --- a/libraries/gpu-gl/src/gpu/gl45/GL45Backend.h +++ b/libraries/gpu-gl/src/gpu/gl45/GL45Backend.h @@ -17,7 +17,6 @@ #include #define INCREMENTAL_TRANSFER 0 -#define THREADED_TEXTURE_BUFFERING 1 #define GPU_SSBO_TRANSFORM_OBJECT 1 namespace gpu { namespace gl45 { diff --git a/libraries/networking/src/FileCache.cpp b/libraries/networking/src/FileCache.cpp index 0a859d511b..43055e7ed6 100644 --- a/libraries/networking/src/FileCache.cpp +++ b/libraries/networking/src/FileCache.cpp @@ -17,6 +17,7 @@ #include #include +#include #include @@ -110,13 +111,14 @@ FilePointer FileCache::writeFile(const char* data, File::Metadata&& metadata) { return file; } - // write the new file - FILE* saveFile = fopen(filepath.c_str(), "wb"); - if (saveFile != nullptr && fwrite(data, metadata.length, 1, saveFile) && fclose(saveFile) == 0) { + QSaveFile saveFile(QString::fromStdString(filepath)); + if (saveFile.open(QIODevice::WriteOnly) + && saveFile.write(data, metadata.length) == static_cast(metadata.length) + && saveFile.commit()) { + file = addFile(std::move(metadata), filepath); } else { - qCWarning(file_cache, "[%s] Failed to write %s (%s)", _dirname.c_str(), metadata.key.c_str(), strerror(errno)); - errno = 0; + qCWarning(file_cache, "[%s] Failed to write %s", _dirname.c_str(), metadata.key.c_str()); } return file; diff --git a/libraries/script-engine/src/TabletScriptingInterface.cpp b/libraries/script-engine/src/TabletScriptingInterface.cpp index 139fe0552d..644f1e6f0c 100644 --- a/libraries/script-engine/src/TabletScriptingInterface.cpp +++ b/libraries/script-engine/src/TabletScriptingInterface.cpp @@ -250,6 +250,10 @@ static void addButtonProxyToQmlTablet(QQuickItem* qmlTablet, TabletButtonProxy* if (QThread::currentThread() != qmlTablet->thread()) { connectionType = Qt::BlockingQueuedConnection; } + if (buttonProxy == NULL){ + qCCritical(scriptengine) << "TabletScriptingInterface addButtonProxyToQmlTablet buttonProxy is NULL"; + return; + } bool hasResult = QMetaObject::invokeMethod(qmlTablet, "addButtonProxy", connectionType, Q_RETURN_ARG(QVariant, resultVar), Q_ARG(QVariant, buttonProxy->getProperties())); if (!hasResult) { diff --git a/plugins/openvr/src/ViveControllerManager.cpp b/plugins/openvr/src/ViveControllerManager.cpp index 86b37135d2..6e5697730b 100644 --- a/plugins/openvr/src/ViveControllerManager.cpp +++ b/plugins/openvr/src/ViveControllerManager.cpp @@ -10,6 +10,7 @@ // #include "ViveControllerManager.h" +#include #include #include @@ -20,7 +21,11 @@ #include #include #include +#include #include +#include +#include +#include #include @@ -36,14 +41,32 @@ void releaseOpenVrSystem(); static const char* CONTROLLER_MODEL_STRING = "vr_controller_05_wireless_b"; +const quint64 CALIBRATION_TIMELAPSE = 2 * USECS_PER_SECOND; static const char* MENU_PARENT = "Avatar"; static const char* MENU_NAME = "Vive Controllers"; static const char* MENU_PATH = "Avatar" ">" "Vive Controllers"; static const char* RENDER_CONTROLLERS = "Render Hand Controllers"; +static const int MIN_PUCK_COUNT = 2; +static const int MIN_FEET_AND_HIPS = 3; +static const int MIN_FEET_HIPS_CHEST = 4; +static const int FIRST_FOOT = 0; +static const int SECOND_FOOT = 1; +static const int HIP = 2; +static const int CHEST = 3; const char* ViveControllerManager::NAME { "OpenVR" }; +static glm::mat4 computeOffset(glm::mat4 defaultToReferenceMat, glm::mat4 defaultJointMat, controller::Pose puckPose) { + glm::mat4 poseMat = createMatFromQuatAndPos(puckPose.rotation, puckPose.translation); + glm::mat4 referenceJointMat = defaultToReferenceMat * defaultJointMat; + return glm::inverse(poseMat) * referenceJointMat; +} + +static bool sortPucksYPosition(std::pair firstPuck, std::pair secondPuck) { + return (firstPuck.second.translation.y < firstPuck.second.translation.y); +} + bool ViveControllerManager::isSupported() const { return openVrSupported(); } @@ -125,6 +148,7 @@ void ViveControllerManager::pluginUpdate(float deltaTime, const controller::Inpu void ViveControllerManager::InputDevice::update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) { _poseStateMap.clear(); _buttonPressedMap.clear(); + _validTrackedObjects.clear(); // While the keyboard is open, we defer strictly to the keyboard values if (isOpenVrKeyboardShown()) { @@ -143,6 +167,7 @@ void ViveControllerManager::InputDevice::update(float deltaTime, const controlle // collect poses for all generic trackers for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) { handleTrackedObject(i, inputCalibrationData); + handleHmd(i, inputCalibrationData); } // handle haptics @@ -164,10 +189,27 @@ void ViveControllerManager::InputDevice::update(float deltaTime, const controlle numTrackedControllers++; } _trackedControllers = numTrackedControllers; + + if (checkForCalibrationEvent()) { + quint64 currentTime = usecTimestampNow(); + if (!_timeTilCalibrationSet) { + _timeTilCalibrationSet = true; + _timeTilCalibration = currentTime + CALIBRATION_TIMELAPSE; + } + + if (currentTime > _timeTilCalibration && !_triggersPressedHandled) { + _triggersPressedHandled = true; + calibrateOrUncalibrate(inputCalibrationData); + } + } else { + _triggersPressedHandled = false; + _timeTilCalibrationSet = false; + } + + updateCalibratedLimbs(); } void ViveControllerManager::InputDevice::handleTrackedObject(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData) { - uint32_t poseIndex = controller::TRACKED_OBJECT_00 + deviceIndex; if (_system->IsTrackedDeviceConnected(deviceIndex) && @@ -185,12 +227,129 @@ void ViveControllerManager::InputDevice::handleTrackedObject(uint32_t deviceInde // transform into avatar frame glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat; _poseStateMap[poseIndex] = pose.transform(controllerToAvatar); + _validTrackedObjects.push_back(std::make_pair(poseIndex, _poseStateMap[poseIndex])); } else { controller::Pose invalidPose; _poseStateMap[poseIndex] = invalidPose; } } +void ViveControllerManager::InputDevice::calibrateOrUncalibrate(const controller::InputCalibrationData& inputCalibration) { + if (!_calibrated) { + calibrate(inputCalibration); + } else { + uncalibrate(); + } +} + +void ViveControllerManager::InputDevice::calibrate(const controller::InputCalibrationData& inputCalibration) { + // convert the hmd head from sensor space to avatar space + glm::mat4 hmdSensorFlippedMat = inputCalibration.hmdSensorMat * Matrices::Y_180; + glm::mat4 sensorToAvatarMat = glm::inverse(inputCalibration.avatarMat) * inputCalibration.sensorToWorldMat; + glm::mat4 hmdAvatarMat = sensorToAvatarMat * hmdSensorFlippedMat; + + // cancel the roll and pitch for the hmd head + glm::quat hmdRotation = cancelOutRollAndPitch(glmExtractRotation(hmdAvatarMat)); + glm::vec3 hmdTranslation = extractTranslation(hmdAvatarMat); + glm::mat4 currentHmd = createMatFromQuatAndPos(hmdRotation, hmdTranslation); + + // calculate the offset from the centerOfEye to defaultHeadMat + glm::mat4 defaultHeadOffset = glm::inverse(inputCalibration.defaultCenterEyeMat) * inputCalibration.defaultHeadMat; + + glm::mat4 currentHead = currentHmd * defaultHeadOffset; + + // calculate the defaultToRefrenceXform + glm::mat4 defaultToReferenceMat = currentHead * glm::inverse(inputCalibration.defaultHeadMat); + + int puckCount = (int)_validTrackedObjects.size(); + if (puckCount == MIN_PUCK_COUNT) { + _config = Config::Feet; + } else if (puckCount == MIN_FEET_AND_HIPS) { + _config = Config::FeetAndHips; + } else if (puckCount >= MIN_FEET_HIPS_CHEST) { + _config = Config::FeetHipsAndChest; + } else { + return; + } + + std::sort(_validTrackedObjects.begin(), _validTrackedObjects.end(), sortPucksYPosition); + + + + auto& firstFoot = _validTrackedObjects[FIRST_FOOT]; + auto& secondFoot = _validTrackedObjects[SECOND_FOOT]; + controller::Pose& firstFootPose = firstFoot.second; + controller::Pose& secondFootPose = secondFoot.second; + + if (firstFootPose.translation.x < secondFootPose.translation.x) { + _jointToPuckMap[controller::LEFT_FOOT] = firstFoot.first; + _pucksOffset[firstFoot.first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultLeftFoot, firstFootPose); + _jointToPuckMap[controller::RIGHT_FOOT] = secondFoot.first; + _pucksOffset[secondFoot.first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultRightFoot, secondFootPose); + + } else { + _jointToPuckMap[controller::LEFT_FOOT] = secondFoot.first; + _pucksOffset[secondFoot.first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultLeftFoot, secondFootPose); + _jointToPuckMap[controller::RIGHT_FOOT] = firstFoot.first; + _pucksOffset[firstFoot.first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultRightFoot, firstFootPose); + } + + if (_config == Config::Feet) { + // done + } else if (_config == Config::FeetAndHips) { + _jointToPuckMap[controller::HIPS] = _validTrackedObjects[HIP].first; + _pucksOffset[_validTrackedObjects[HIP].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultHips, _validTrackedObjects[HIP].second); + } else if (_config == Config::FeetHipsAndChest) { + _jointToPuckMap[controller::HIPS] = _validTrackedObjects[HIP].first; + _pucksOffset[_validTrackedObjects[HIP].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultHips, _validTrackedObjects[HIP].second); + _jointToPuckMap[controller::SPINE2] = _validTrackedObjects[CHEST].first; + _pucksOffset[_validTrackedObjects[CHEST].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultSpine2, _validTrackedObjects[CHEST].second); + } + _calibrated = true; +} + +void ViveControllerManager::InputDevice::uncalibrate() { + _pucksOffset.clear(); + _jointToPuckMap.clear(); + _calibrated = false; +} + +void ViveControllerManager::InputDevice::updateCalibratedLimbs() { + _poseStateMap[controller::LEFT_FOOT] = addOffsetToPuckPose(controller::LEFT_FOOT); + _poseStateMap[controller::RIGHT_FOOT] = addOffsetToPuckPose(controller::RIGHT_FOOT); + _poseStateMap[controller::HIPS] = addOffsetToPuckPose(controller::HIPS); + _poseStateMap[controller::SPINE2] = addOffsetToPuckPose(controller::SPINE2); +} + +controller::Pose ViveControllerManager::InputDevice::addOffsetToPuckPose(int joint) const { + auto puck = _jointToPuckMap.find(joint); + if (puck != _jointToPuckMap.end()) { + uint32_t puckIndex = puck->second; + auto puckPose = _poseStateMap.find(puckIndex); + auto puckOffset = _pucksOffset.find(puckIndex); + + if ((puckPose != _poseStateMap.end()) && (puckOffset != _pucksOffset.end())) { + return puckPose->second.postTransform(puckOffset->second); + } + } + return controller::Pose(); +} + +void ViveControllerManager::InputDevice::handleHmd(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData) { + uint32_t poseIndex = controller::TRACKED_OBJECT_00 + deviceIndex; + + if (_system->IsTrackedDeviceConnected(deviceIndex) && + _system->GetTrackedDeviceClass(deviceIndex) == vr::TrackedDeviceClass_HMD && + _nextSimPoseData.vrPoses[deviceIndex].bPoseIsValid) { + + const mat4& mat = _nextSimPoseData.poses[deviceIndex]; + const vec3 linearVelocity = _nextSimPoseData.linearVelocities[deviceIndex]; + const vec3 angularVelocity = _nextSimPoseData.angularVelocities[deviceIndex]; + + handleHeadPoseEvent(inputCalibrationData, mat, linearVelocity, angularVelocity); + } +} + void ViveControllerManager::InputDevice::handleHandController(float deltaTime, uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData, bool isLeftHand) { if (_system->IsTrackedDeviceConnected(deviceIndex) && @@ -262,7 +421,7 @@ void ViveControllerManager::InputDevice::handleAxisEvent(float deltaTime, uint32 _axisStateMap[isLeftHand ? LY : RY] = stick.y; } else if (axis == vr::k_EButton_SteamVR_Trigger) { _axisStateMap[isLeftHand ? LT : RT] = x; - // The click feeling on the Vive controller trigger represents a value of *precisely* 1.0, + // The click feeling on the Vive controller trigger represents a value of *precisely* 1.0, // so we can expose that as an additional button if (x >= 1.0f) { _buttonPressedMap.insert(isLeftHand ? LT_CLICK : RT_CLICK); @@ -276,6 +435,14 @@ enum ViveButtonChannel { RIGHT_APP_MENU }; +bool ViveControllerManager::InputDevice::checkForCalibrationEvent() { + auto& endOfMap = _buttonPressedMap.end(); + auto& leftTrigger = _buttonPressedMap.find(controller::LT); + auto& rightTrigger = _buttonPressedMap.find(controller::RT); + auto& leftAppButton = _buttonPressedMap.find(LEFT_APP_MENU); + auto& rightAppButton = _buttonPressedMap.find(RIGHT_APP_MENU); + return ((leftTrigger != endOfMap && leftAppButton != endOfMap) && (rightTrigger != endOfMap && rightAppButton != endOfMap)); +} // These functions do translation from the Steam IDs to the standard controller IDs void ViveControllerManager::InputDevice::handleButtonEvent(float deltaTime, uint32_t button, bool pressed, bool touched, bool isLeftHand) { @@ -305,6 +472,19 @@ void ViveControllerManager::InputDevice::handleButtonEvent(float deltaTime, uint } } +void ViveControllerManager::InputDevice::handleHeadPoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, + const vec3& linearVelocity, const vec3& angularVelocity) { + + //perform a 180 flip to make the HMD face the +z instead of -z, beacuse the head faces +z + glm::mat4 matYFlip = mat * Matrices::Y_180; + controller::Pose pose(extractTranslation(matYFlip), glmExtractRotation(matYFlip), linearVelocity, angularVelocity); + + glm::mat4 sensorToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat; + glm::mat4 defaultHeadOffset = glm::inverse(inputCalibrationData.defaultCenterEyeMat) * inputCalibrationData.defaultHeadMat; + controller::Pose hmdHeadPose = pose.transform(sensorToAvatar); + _poseStateMap[controller::HEAD] = hmdHeadPose.postTransform(defaultHeadOffset); +} + void ViveControllerManager::InputDevice::handlePoseEvent(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, const vec3& linearVelocity, const vec3& angularVelocity, bool isLeftHand) { @@ -353,7 +533,7 @@ void ViveControllerManager::InputDevice::hapticsHelper(float deltaTime, bool lef float hapticTime = strength * MAX_HAPTIC_TIME; if (hapticTime < duration * 1000.0f) { _system->TriggerHapticPulse(deviceIndex, 0, hapticTime); - } + } float remainingHapticTime = duration - (hapticTime / 1000.0f + deltaTime * 1000.0f); // in milliseconds if (leftHand) { @@ -404,6 +584,11 @@ controller::Input::NamedVector ViveControllerManager::InputDevice::getAvailableI // 3d location of controller makePair(LEFT_HAND, "LeftHand"), makePair(RIGHT_HAND, "RightHand"), + makePair(LEFT_FOOT, "LeftFoot"), + makePair(RIGHT_FOOT, "RightFoot"), + makePair(HIPS, "Hips"), + makePair(SPINE2, "Spine2"), + makePair(HEAD, "Head"), // 16 tracked poses makePair(TRACKED_OBJECT_00, "TrackedObject00"), diff --git a/plugins/openvr/src/ViveControllerManager.h b/plugins/openvr/src/ViveControllerManager.h index dc1883d5e4..4e8b2b3a04 100644 --- a/plugins/openvr/src/ViveControllerManager.h +++ b/plugins/openvr/src/ViveControllerManager.h @@ -14,9 +14,11 @@ #include #include +#include +#include +#include #include - #include #include #include @@ -58,13 +60,21 @@ private: bool triggerHapticPulse(float strength, float duration, controller::Hand hand) override; void hapticsHelper(float deltaTime, bool leftHand); - + void calibrateOrUncalibrate(const controller::InputCalibrationData& inputCalibration); + void calibrate(const controller::InputCalibrationData& inputCalibration); + void uncalibrate(); + controller::Pose addOffsetToPuckPose(int joint) const; + void updateCalibratedLimbs(); + bool checkForCalibrationEvent(); void handleHandController(float deltaTime, uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData, bool isLeftHand); + void handleHmd(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData); void handleTrackedObject(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData); void handleButtonEvent(float deltaTime, uint32_t button, bool pressed, bool touched, bool isLeftHand); void handleAxisEvent(float deltaTime, uint32_t axis, float x, float y, bool isLeftHand); void handlePoseEvent(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, const vec3& linearVelocity, const vec3& angularVelocity, bool isLeftHand); + void handleHeadPoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, const vec3& linearVelocity, + const vec3& angularVelocity); void partitionTouchpad(int sButton, int xAxis, int yAxis, int centerPsuedoButton, int xPseudoButton, int yPseudoButton); class FilteredStick { @@ -90,10 +100,14 @@ private: float _timer { 0.0f }; glm::vec2 _stick { 0.0f, 0.0f }; }; - + enum class Config { Feet, FeetAndHips, FeetHipsAndChest, NoConfig }; + Config _config { Config::NoConfig }; FilteredStick _filteredLeftStick; FilteredStick _filteredRightStick; + std::vector> _validTrackedObjects; + std::map _pucksOffset; + std::map _jointToPuckMap; // perform an action when the InputDevice mutex is acquired. using Locker = std::unique_lock; template @@ -101,10 +115,14 @@ private: int _trackedControllers { 0 }; vr::IVRSystem*& _system; + quint64 _timeTilCalibration { 0.0f }; float _leftHapticStrength { 0.0f }; float _leftHapticDuration { 0.0f }; float _rightHapticStrength { 0.0f }; float _rightHapticDuration { 0.0f }; + bool _triggersPressedHandled { false }; + bool _calibrated { false }; + bool _timeTilCalibrationSet { false }; mutable std::recursive_mutex _lock; friend class ViveControllerManager; diff --git a/scripts/system/controllers/handControllerGrab.js b/scripts/system/controllers/handControllerGrab.js index b97e1ff049..f5c3e6eafa 100644 --- a/scripts/system/controllers/handControllerGrab.js +++ b/scripts/system/controllers/handControllerGrab.js @@ -1376,7 +1376,9 @@ function MyController(hand) { visible: true, alpha: 1, parentID: AVATAR_SELF_ID, - parentJointIndex: this.controllerJointIndex, + parentJointIndex: MyAvatar.getJointIndex(this.hand === RIGHT_HAND ? + "_CAMERA_RELATIVE_CONTROLLER_RIGHTHAND" : + "_CAMERA_RELATIVE_CONTROLLER_LEFTHAND"), endParentID: farParentID }; this.overlayLine = Overlays.addOverlay("line3d", lineProperties); diff --git a/scripts/system/makeUserConnection.js b/scripts/system/makeUserConnection.js index 78be54f774..52afc8883d 100644 --- a/scripts/system/makeUserConnection.js +++ b/scripts/system/makeUserConnection.js @@ -122,7 +122,8 @@ function debug() { var stateString = "<" + STATE_STRINGS[state] + ">"; var connecting = "[" + connectingId + "/" + connectingHandJointIndex + "]"; - print.apply(null, [].concat.apply([LABEL, stateString, JSON.stringify(waitingList), connecting], + var current = "[" + currentHand + "/" + currentHandJointIndex + "]" + print.apply(null, [].concat.apply([LABEL, stateString, current, JSON.stringify(waitingList), connecting], [].map.call(arguments, JSON.stringify))); } @@ -759,7 +760,10 @@ break; case "done": delete waitingList[senderID]; - if (state === STATES.CONNECTING && connectingId === senderID) { + if (connectingId !== senderID) { + break; + } + if (state === STATES.CONNECTING) { // if they are done, and didn't connect us, terminate our // connecting if (message.connectionId !== MyAvatar.sessionUUID) { @@ -768,11 +772,20 @@ // value for isKeyboard, as we should not change the animation // state anyways (if any) startHandshake(); + } else { + // they just created a connection request to us, and we are connecting to + // them, so lets just stop connecting and make connection.. + makeConnection(connectingId); + stopConnecting(); } } else { - // if waiting or inactive, lets clear the connecting id. If in makingConnection, - // do nothing - if (state !== STATES.MAKING_CONNECTION && connectingId === senderID) { + if (state == STATES.MAKING_CONNECTION) { + // we are making connection, they just started, so lets reset the + // poll count just in case + pollCount = 0; + } else { + // if waiting or inactive, lets clear the connecting id. If in makingConnection, + // do nothing clearConnecting(); if (state !== STATES.INACTIVE) { startHandshake(); diff --git a/scripts/system/pal.js b/scripts/system/pal.js index 9229ec772a..0500c13f9b 100644 --- a/scripts/system/pal.js +++ b/scripts/system/pal.js @@ -268,7 +268,7 @@ function fromQml(message) { // messages are {method, params}, like json-rpc. See break; case 'refreshConnections': print('Refreshing Connections...'); - getConnectionData(); + getConnectionData(false); UserActivityLogger.palAction("refresh_connections", ""); break; case 'removeConnection': @@ -281,25 +281,27 @@ function fromQml(message) { // messages are {method, params}, like json-rpc. See print("Error: unable to remove connection", connectionUserName, error || response.status); return; } - getConnectionData(); + getConnectionData(false); }); break case 'removeFriend': friendUserName = message.params; + print("Removing " + friendUserName + " from friends."); request({ uri: METAVERSE_BASE + '/api/v1/user/friends/' + friendUserName, method: 'DELETE' }, function (error, response) { if (error || (response.status !== 'success')) { - print("Error: unable to unfriend", friendUserName, error || response.status); + print("Error: unable to unfriend " + friendUserName, error || response.status); return; } - getConnectionData(); + getConnectionData(friendUserName); }); break case 'addFriend': friendUserName = message.params; + print("Adding " + friendUserName + " to friends."); request({ uri: METAVERSE_BASE + '/api/v1/user/friends', method: 'POST', @@ -312,7 +314,7 @@ function fromQml(message) { // messages are {method, params}, like json-rpc. See print("Error: unable to friend " + friendUserName, error || response.status); return; } - getConnectionData(); // For now, just refresh all connection data. Later, just refresh the one friended row. + getConnectionData(friendUserName); } ); break; @@ -360,8 +362,6 @@ function getProfilePicture(username, callback) { // callback(url) if successfull }); } function getAvailableConnections(domain, callback) { // callback([{usename, location}...]) if successfull. (Logs otherwise) - // The back end doesn't do user connections yet. Fake it by getting all users that have made themselves accessible to us, - // and pretending that they are all connections. url = METAVERSE_BASE + '/api/v1/users?' if (domain) { url += 'status=' + domain.slice(1, -1); // without curly braces @@ -369,25 +369,22 @@ function getAvailableConnections(domain, callback) { // callback([{usename, loca url += 'filter=connections'; // regardless of whether online } requestJSON(url, function (connectionsData) { - // The back end doesn't include the profile picture data, but we can add that here. - // For our current purposes, there's no need to be fancy and try to reduce latency by doing some number of requests in parallel, - // so these requests are all sequential. - var users = connectionsData.users; - function addPicture(index) { - if (index >= users.length) { - return callback(users); - } - var user = users[index]; - getProfilePicture(user.username, function (url) { - user.profileUrl = url; - addPicture(index + 1); - }); - } - addPicture(0); + callback(connectionsData.users); }); } - -function getConnectionData(domain) { // Update all the usernames that I am entitled to see, using my login but not dependent on canKick. +function getInfoAboutUser(specificUsername, callback) { + url = METAVERSE_BASE + '/api/v1/users?filter=connections' + requestJSON(url, function (connectionsData) { + for (user in connectionsData.users) { + if (connectionsData.users[user].username === specificUsername) { + callback(connectionsData.users[user]); + return; + } + } + callback(false); + }); +} +function getConnectionData(specificUsername, domain) { // Update all the usernames that I am entitled to see, using my login but not dependent on canKick. function frob(user) { // get into the right format var formattedSessionId = user.location.node_id || ''; if (formattedSessionId !== '' && formattedSessionId.indexOf("{") != 0) { @@ -397,19 +394,29 @@ function getConnectionData(domain) { // Update all the usernames that I am entit sessionId: formattedSessionId, userName: user.username, connection: user.connection, - profileUrl: user.profileUrl, + profileUrl: user.images.thumbnail, placeName: (user.location.root || user.location.domain || {}).name || '' }; } - getAvailableConnections(domain, function (users) { - if (domain) { - users.forEach(function (user) { + if (specificUsername) { + getInfoAboutUser(specificUsername, function (user) { + if (user) { updateUser(frob(user)); - }); - } else { - sendToQml({ method: 'connections', params: users.map(frob) }); - } - }); + } else { + print('Error: Unable to find information about ' + specificUsername + ' in connectionsData!'); + } + }); + } else { + getAvailableConnections(domain, function (users) { + if (domain) { + users.forEach(function (user) { + updateUser(frob(user)); + }); + } else { + sendToQml({ method: 'connections', params: users.map(frob) }); + } + }); + } } // @@ -486,7 +493,7 @@ function populateNearbyUserList(selectData, oldAudioData) { data.push(avatarPalDatum); print('PAL data:', JSON.stringify(avatarPalDatum)); }); - getConnectionData(location.domainId); // Even admins don't get relationship data in requestUsernameFromID (which is still needed for admin status, which comes from domain). + getConnectionData(false, location.domainId); // Even admins don't get relationship data in requestUsernameFromID (which is still needed for admin status, which comes from domain). conserveResources = Object.keys(avatarsOfInterest).length > 20; sendToQml({ method: 'nearbyUsers', params: data }); if (selectData) { diff --git a/tutorial/ACAudioSearchAndInject_tutorial.js b/tutorial/ACAudioSearchAndInject_tutorial.js index 70e936bb1c..5e2998ff1e 100644 --- a/tutorial/ACAudioSearchAndInject_tutorial.js +++ b/tutorial/ACAudioSearchAndInject_tutorial.js @@ -1,4 +1,5 @@ "use strict"; + /*jslint nomen: true, plusplus: true, vars: true*/ /*global AvatarList, Entities, EntityViewer, Script, SoundCache, Audio, print, randFloat*/ // @@ -38,19 +39,27 @@ var DEFAULT_SOUND_DATA = { playbackGapRange: 0 // in ms }; +//var AGENT_AVATAR_POSITION = { x: -1.5327, y: 0.672515, z: 5.91573 }; +var AGENT_AVATAR_POSITION = { x: -2.83785, y: 1.45243, z: -13.6042 }; + //var isACScript = this.EntityViewer !== undefined; var isACScript = true; -Script.include("http://hifi-content.s3.amazonaws.com/ryan/development/utils_ryan.js"); if (isACScript) { Agent.isAvatar = true; // This puts a robot at 0,0,0, but is currently necessary in order to use AvatarList. Avatar.skeletonModelURL = "http://hifi-content.s3.amazonaws.com/ozan/dev/avatars/invisible_avatar/invisible_avatar.fst"; + Avatar.position = AGENT_AVATAR_POSITION; + Agent.isListeningToAudioStream = true; } function ignore() {} function debug() { // Display the arguments not just [Object object]. //print.apply(null, [].map.call(arguments, JSON.stringify)); } +function randFloat(low, high) { + return low + Math.random() * (high - low); +} + if (isACScript) { EntityViewer.setCenterRadius(QUERY_RADIUS); } @@ -93,6 +102,7 @@ function EntityDatum(entityIdentifier) { // Just the data of an entity that we n return; } var properties, soundData; // Latest data, pulled from local octree. + // getEntityProperties locks the tree, which competes with the asynchronous processing of queryOctree results. // Most entity updates are fast and only a very few do getEntityProperties. function ensureSoundData() { // We only getEntityProperities when we need to. @@ -115,43 +125,54 @@ function EntityDatum(entityIdentifier) { // Just the data of an entity that we n } } } + // Stumbling on big new pile of entities will do a lot of getEntityProperties. Once. if (that.lastUserDataUpdate < userDataCutoff) { // NO DATA => SOUND DATA ensureSoundData(); } + if (!that.url) { // NO DATA => NO DATA return that.stop(); } + if (!that.sound) { // SOUND DATA => DOWNLOADING that.sound = SoundCache.getSound(soundData.url); // SoundCache can manage duplicates better than we can. } + if (!that.sound.downloaded) { // DOWNLOADING => DOWNLOADING return; } + if (that.playAfter > now) { // DOWNLOADING | WAITING => WAITING return; } + ensureSoundData(); // We'll try to play/setOptions and will need position, so we might as well get soundData, too. if (soundData.url !== that.url) { // WAITING => NO DATA (update next time around) return that.stop(); } + var options = { position: properties.position, loop: soundData.loop || DEFAULT_SOUND_DATA.loop, volume: soundData.volume || DEFAULT_SOUND_DATA.volume }; + function repeat() { return !options.loop && (soundData.playbackGap >= 0); } + function randomizedNextPlay() { // time of next play or recheck, randomized to distribute the work var range = soundData.playbackGapRange || DEFAULT_SOUND_DATA.playbackGapRange, base = repeat() ? ((that.sound.duration * MSEC_PER_SEC) + (soundData.playbackGap || DEFAULT_SOUND_DATA.playbackGap)) : RECHECK_TIME; return now + base + randFloat(-Math.min(base, range), range); } + if (that.injector && soundData.playing === false) { that.injector.stop(); that.injector = null; } + if (!that.injector) { if (soundData.playing === false) { // WAITING => PLAYING | WAITING return; @@ -165,6 +186,7 @@ function EntityDatum(entityIdentifier) { // Just the data of an entity that we n } return; } + that.injector.setOptions(options); // PLAYING => UPDATE POSITION ETC if (!that.injector.playing) { // Subtle: a looping sound will not check playbackGap. if (repeat()) { // WAITING => PLAYING @@ -178,6 +200,7 @@ function EntityDatum(entityIdentifier) { // Just the data of an entity that we n } }; } + function internEntityDatum(entityIdentifier, timestamp, avatarPosition, avatar) { ignore(avatarPosition, avatar); // We could use avatars and/or avatarPositions to prioritize which ones to play. var entitySound = entityCache[entityIdentifier]; @@ -186,7 +209,9 @@ function internEntityDatum(entityIdentifier, timestamp, avatarPosition, avatar) } entitySound.timestamp = timestamp; // Might be updated for multiple avatars. That's fine. } + var nUpdates = UPDATES_PER_STATS_LOG, lastStats = Date.now(); + function updateAllEntityData() { // A fast update of all entities we know about. A few make sounds. var now = Date.now(), expirationCutoff = now - EXPIRATION_TIME,