mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-05-07 23:38:19 +02:00
Merge branch 'master' into audio-source-sync
This commit is contained in:
commit
2328da3c48
24 changed files with 771 additions and 350 deletions
4
cmake/externals/wasapi/CMakeLists.txt
vendored
4
cmake/externals/wasapi/CMakeLists.txt
vendored
|
@ -6,8 +6,8 @@ if (WIN32)
|
||||||
include(ExternalProject)
|
include(ExternalProject)
|
||||||
ExternalProject_Add(
|
ExternalProject_Add(
|
||||||
${EXTERNAL_NAME}
|
${EXTERNAL_NAME}
|
||||||
URL http://hifi-public.s3.amazonaws.com/dependencies/qtaudio_wasapi7.zip
|
URL http://hifi-public.s3.amazonaws.com/dependencies/qtaudio_wasapi8.zip
|
||||||
URL_MD5 bc2861e50852dd590cdc773a14a041a7
|
URL_MD5 b01510437ea15527156bc25cdf733bd9
|
||||||
CONFIGURE_COMMAND ""
|
CONFIGURE_COMMAND ""
|
||||||
BUILD_COMMAND ""
|
BUILD_COMMAND ""
|
||||||
INSTALL_COMMAND ""
|
INSTALL_COMMAND ""
|
||||||
|
|
|
@ -35,6 +35,11 @@
|
||||||
{ "from": "Vive.RightApplicationMenu", "to": "Standard.RightSecondaryThumb" },
|
{ "from": "Vive.RightApplicationMenu", "to": "Standard.RightSecondaryThumb" },
|
||||||
|
|
||||||
{ "from": "Vive.LeftHand", "to": "Standard.LeftHand", "when": [ "Application.InHMD" ] },
|
{ "from": "Vive.LeftHand", "to": "Standard.LeftHand", "when": [ "Application.InHMD" ] },
|
||||||
{ "from": "Vive.RightHand", "to": "Standard.RightHand", "when": [ "Application.InHMD" ] }
|
{ "from": "Vive.RightHand", "to": "Standard.RightHand", "when": [ "Application.InHMD" ] },
|
||||||
|
{ "from": "Vive.LeftFoot", "to" : "Standard.LeftFoot", "when": [ "Application.InHMD"] },
|
||||||
|
{ "from": "Vive.RightFoot", "to" : "Standard.RightFoot", "when": [ "Application.InHMD"] },
|
||||||
|
{ "from": "Vive.Hips", "to" : "Standard.Hips", "when": [ "Application.InHMD"] },
|
||||||
|
{ "from": "Vive.Spine2", "to" : "Standard.Spine2", "when": [ "Application.InHMD"] },
|
||||||
|
{ "from": "Vive.Head", "to" : "Standard.Head", "when" : [ "Application.InHMD"] }
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -844,7 +844,7 @@ Rectangle {
|
||||||
boxSize: 24;
|
boxSize: 24;
|
||||||
onClicked: {
|
onClicked: {
|
||||||
var newValue = model.connection !== "friend";
|
var newValue = model.connection !== "friend";
|
||||||
connectionsUserModel.setProperty(model.userIndex, styleData.role, newValue);
|
connectionsUserModel.setProperty(model.userIndex, styleData.role, (newValue ? "friend" : "connection"));
|
||||||
connectionsUserModelData[model.userIndex][styleData.role] = newValue; // Defensive programming
|
connectionsUserModelData[model.userIndex][styleData.role] = newValue; // Defensive programming
|
||||||
pal.sendToScript({method: newValue ? 'addFriend' : 'removeFriend', params: model.userName});
|
pal.sendToScript({method: newValue ? 'addFriend' : 'removeFriend', params: model.userName});
|
||||||
|
|
||||||
|
|
|
@ -941,10 +941,12 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
||||||
|
|
||||||
// sessionRunTime will be reset soon by loadSettings. Grab it now to get previous session value.
|
// sessionRunTime will be reset soon by loadSettings. Grab it now to get previous session value.
|
||||||
// The value will be 0 if the user blew away settings this session, which is both a feature and a bug.
|
// The value will be 0 if the user blew away settings this session, which is both a feature and a bug.
|
||||||
|
static const QString TESTER = "HIFI_TESTER";
|
||||||
auto gpuIdent = GPUIdent::getInstance();
|
auto gpuIdent = GPUIdent::getInstance();
|
||||||
auto glContextData = getGLContextData();
|
auto glContextData = getGLContextData();
|
||||||
QJsonObject properties = {
|
QJsonObject properties = {
|
||||||
{ "version", applicationVersion() },
|
{ "version", applicationVersion() },
|
||||||
|
{ "tester", QProcessEnvironment::systemEnvironment().contains(TESTER) },
|
||||||
{ "previousSessionCrashed", _previousSessionCrashed },
|
{ "previousSessionCrashed", _previousSessionCrashed },
|
||||||
{ "previousSessionRuntime", sessionRunTime.get() },
|
{ "previousSessionRuntime", sessionRunTime.get() },
|
||||||
{ "cpu_architecture", QSysInfo::currentCpuArchitecture() },
|
{ "cpu_architecture", QSysInfo::currentCpuArchitecture() },
|
||||||
|
@ -1688,7 +1690,6 @@ void Application::updateHeartbeat() const {
|
||||||
|
|
||||||
void Application::aboutToQuit() {
|
void Application::aboutToQuit() {
|
||||||
emit beforeAboutToQuit();
|
emit beforeAboutToQuit();
|
||||||
DependencyManager::get<AudioClient>()->beforeAboutToQuit();
|
|
||||||
|
|
||||||
foreach(auto inputPlugin, PluginManager::getInstance()->getInputPlugins()) {
|
foreach(auto inputPlugin, PluginManager::getInstance()->getInputPlugins()) {
|
||||||
if (inputPlugin->isActive()) {
|
if (inputPlugin->isActive()) {
|
||||||
|
@ -1789,14 +1790,13 @@ void Application::cleanupBeforeQuit() {
|
||||||
_snapshotSoundInjector->stop();
|
_snapshotSoundInjector->stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
// stop audio after QML, as there are unexplained audio crashes originating in qtwebengine
|
// FIXME: something else is holding a reference to AudioClient,
|
||||||
|
// so it must be explicitly synchronously stopped here
|
||||||
// stop the AudioClient, synchronously
|
|
||||||
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(),
|
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(),
|
||||||
"stop", Qt::BlockingQueuedConnection);
|
"cleanupBeforeQuit", Qt::BlockingQueuedConnection);
|
||||||
|
|
||||||
|
|
||||||
// destroy Audio so it and its threads have a chance to go down safely
|
// destroy Audio so it and its threads have a chance to go down safely
|
||||||
|
// this must happen after QML, as there are unexplained audio crashes originating in qtwebengine
|
||||||
DependencyManager::destroy<AudioClient>();
|
DependencyManager::destroy<AudioClient>();
|
||||||
DependencyManager::destroy<AudioInjectorManager>();
|
DependencyManager::destroy<AudioInjectorManager>();
|
||||||
|
|
||||||
|
|
|
@ -37,7 +37,14 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||||
Head* head = _owningAvatar->getHead();
|
Head* head = _owningAvatar->getHead();
|
||||||
|
|
||||||
// make sure lookAt is not too close to face (avoid crosseyes)
|
// make sure lookAt is not too close to face (avoid crosseyes)
|
||||||
glm::vec3 lookAt = _owningAvatar->isMyAvatar() ? head->getLookAtPosition() : head->getCorrectedLookAtPosition();
|
glm::vec3 lookAt = head->getLookAtPosition();
|
||||||
|
glm::vec3 focusOffset = lookAt - _owningAvatar->getHead()->getEyePosition();
|
||||||
|
float focusDistance = glm::length(focusOffset);
|
||||||
|
const float MIN_LOOK_AT_FOCUS_DISTANCE = 1.0f;
|
||||||
|
if (focusDistance < MIN_LOOK_AT_FOCUS_DISTANCE && focusDistance > EPSILON) {
|
||||||
|
lookAt = _owningAvatar->getHead()->getEyePosition() + (MIN_LOOK_AT_FOCUS_DISTANCE / focusDistance) * focusOffset;
|
||||||
|
}
|
||||||
|
|
||||||
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
||||||
|
|
||||||
Rig::HeadParameters headParams;
|
Rig::HeadParameters headParams;
|
||||||
|
@ -140,6 +147,9 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||||
auto orientation = myAvatar->getLocalOrientation();
|
auto orientation = myAvatar->getLocalOrientation();
|
||||||
_rig->computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState);
|
_rig->computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState);
|
||||||
|
|
||||||
|
// evaluate AnimGraph animation and update jointStates.
|
||||||
|
Model::updateRig(deltaTime, parentTransform);
|
||||||
|
|
||||||
Rig::EyeParameters eyeParams;
|
Rig::EyeParameters eyeParams;
|
||||||
eyeParams.eyeLookAt = lookAt;
|
eyeParams.eyeLookAt = lookAt;
|
||||||
eyeParams.eyeSaccade = head->getSaccade();
|
eyeParams.eyeSaccade = head->getSaccade();
|
||||||
|
@ -149,8 +159,5 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||||
eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;
|
eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;
|
||||||
|
|
||||||
_rig->updateFromEyeParameters(eyeParams);
|
_rig->updateFromEyeParameters(eyeParams);
|
||||||
|
|
||||||
// evaluate AnimGraph animation and update jointStates.
|
|
||||||
Parent::updateRig(deltaTime, parentTransform);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -76,42 +76,58 @@ using Mutex = std::mutex;
|
||||||
using Lock = std::unique_lock<Mutex>;
|
using Lock = std::unique_lock<Mutex>;
|
||||||
static Mutex _deviceMutex;
|
static Mutex _deviceMutex;
|
||||||
|
|
||||||
// background thread that continuously polls for device changes
|
class BackgroundThread : public QThread {
|
||||||
class CheckDevicesThread : public QThread {
|
|
||||||
public:
|
public:
|
||||||
const unsigned long DEVICE_CHECK_INTERVAL_MSECS = 2 * 1000;
|
BackgroundThread(AudioClient* client) : QThread((QObject*)client), _client(client) {}
|
||||||
|
virtual void join() = 0;
|
||||||
|
protected:
|
||||||
|
AudioClient* _client;
|
||||||
|
};
|
||||||
|
|
||||||
CheckDevicesThread(AudioClient* audioClient)
|
// background thread continuously polling device changes
|
||||||
: _audioClient(audioClient) {
|
class CheckDevicesThread : public BackgroundThread {
|
||||||
}
|
public:
|
||||||
|
CheckDevicesThread(AudioClient* client) : BackgroundThread(client) {}
|
||||||
void beforeAboutToQuit() {
|
|
||||||
Lock lock(_checkDevicesMutex);
|
void join() override {
|
||||||
_quit = true;
|
_shouldQuit = true;
|
||||||
|
std::unique_lock<std::mutex> lock(_joinMutex);
|
||||||
|
_joinCondition.wait(lock, [&]{ return !_isRunning; });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected:
|
||||||
void run() override {
|
void run() override {
|
||||||
while (true) {
|
while (!_shouldQuit) {
|
||||||
{
|
_client->checkDevices();
|
||||||
Lock lock(_checkDevicesMutex);
|
|
||||||
if (_quit) {
|
const unsigned long DEVICE_CHECK_INTERVAL_MSECS = 2 * 1000;
|
||||||
break;
|
|
||||||
}
|
|
||||||
_audioClient->checkDevices();
|
|
||||||
}
|
|
||||||
QThread::msleep(DEVICE_CHECK_INTERVAL_MSECS);
|
QThread::msleep(DEVICE_CHECK_INTERVAL_MSECS);
|
||||||
}
|
}
|
||||||
|
std::lock_guard<std::mutex> lock(_joinMutex);
|
||||||
|
_isRunning = false;
|
||||||
|
_joinCondition.notify_one();
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
AudioClient* _audioClient { nullptr };
|
std::atomic<bool> _shouldQuit { false };
|
||||||
Mutex _checkDevicesMutex;
|
bool _isRunning { true };
|
||||||
bool _quit { false };
|
std::mutex _joinMutex;
|
||||||
|
std::condition_variable _joinCondition;
|
||||||
};
|
};
|
||||||
|
|
||||||
void AudioInjectorsThread::prepare() {
|
// background thread buffering local injectors
|
||||||
_audio->prepareLocalAudioInjectors();
|
class LocalInjectorsThread : public BackgroundThread {
|
||||||
}
|
Q_OBJECT
|
||||||
|
public:
|
||||||
|
LocalInjectorsThread(AudioClient* client) : BackgroundThread(client) {}
|
||||||
|
|
||||||
|
void join() override { return; }
|
||||||
|
|
||||||
|
private slots:
|
||||||
|
void prepare() { _client->prepareLocalAudioInjectors(); }
|
||||||
|
};
|
||||||
|
|
||||||
|
#include "AudioClient.moc"
|
||||||
|
|
||||||
static void channelUpmix(int16_t* source, int16_t* dest, int numSamples, int numExtraChannels) {
|
static void channelUpmix(int16_t* source, int16_t* dest, int numSamples, int numExtraChannels) {
|
||||||
for (int i = 0; i < numSamples/2; i++) {
|
for (int i = 0; i < numSamples/2; i++) {
|
||||||
|
@ -179,7 +195,6 @@ AudioClient::AudioClient() :
|
||||||
_inputToNetworkResampler(NULL),
|
_inputToNetworkResampler(NULL),
|
||||||
_networkToOutputResampler(NULL),
|
_networkToOutputResampler(NULL),
|
||||||
_localToOutputResampler(NULL),
|
_localToOutputResampler(NULL),
|
||||||
_localAudioThread(this),
|
|
||||||
_audioLimiter(AudioConstants::SAMPLE_RATE, OUTPUT_CHANNEL_COUNT),
|
_audioLimiter(AudioConstants::SAMPLE_RATE, OUTPUT_CHANNEL_COUNT),
|
||||||
_outgoingAvatarAudioSequenceNumber(0),
|
_outgoingAvatarAudioSequenceNumber(0),
|
||||||
_audioOutputIODevice(_localInjectorsStream, _receivedAudioStream, this),
|
_audioOutputIODevice(_localInjectorsStream, _receivedAudioStream, this),
|
||||||
|
@ -210,13 +225,14 @@ AudioClient::AudioClient() :
|
||||||
|
|
||||||
// start a thread to detect any device changes
|
// start a thread to detect any device changes
|
||||||
_checkDevicesThread = new CheckDevicesThread(this);
|
_checkDevicesThread = new CheckDevicesThread(this);
|
||||||
_checkDevicesThread->setObjectName("CheckDevices Thread");
|
_checkDevicesThread->setObjectName("AudioClient CheckDevices Thread");
|
||||||
_checkDevicesThread->setPriority(QThread::LowPriority);
|
_checkDevicesThread->setPriority(QThread::LowPriority);
|
||||||
_checkDevicesThread->start();
|
_checkDevicesThread->start();
|
||||||
|
|
||||||
// start a thread to process local injectors
|
// start a thread to process local injectors
|
||||||
_localAudioThread.setObjectName("LocalAudio Thread");
|
_localInjectorsThread = new LocalInjectorsThread(this);
|
||||||
_localAudioThread.start();
|
_localInjectorsThread->setObjectName("AudioClient LocalInjectors Thread");
|
||||||
|
_localInjectorsThread->start();
|
||||||
|
|
||||||
configureReverb();
|
configureReverb();
|
||||||
|
|
||||||
|
@ -231,18 +247,32 @@ AudioClient::AudioClient() :
|
||||||
}
|
}
|
||||||
|
|
||||||
AudioClient::~AudioClient() {
|
AudioClient::~AudioClient() {
|
||||||
delete _checkDevicesThread;
|
|
||||||
stop();
|
|
||||||
if (_codec && _encoder) {
|
if (_codec && _encoder) {
|
||||||
_codec->releaseEncoder(_encoder);
|
_codec->releaseEncoder(_encoder);
|
||||||
_encoder = nullptr;
|
_encoder = nullptr;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void AudioClient::beforeAboutToQuit() {
|
void AudioClient::customDeleter() {
|
||||||
static_cast<CheckDevicesThread*>(_checkDevicesThread)->beforeAboutToQuit();
|
deleteLater();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void AudioClient::cleanupBeforeQuit() {
|
||||||
|
// FIXME: this should be put in customDeleter, but there is still a reference to this when it is called,
|
||||||
|
// so this must be explicitly, synchronously stopped
|
||||||
|
|
||||||
|
stop();
|
||||||
|
|
||||||
|
if (_checkDevicesThread) {
|
||||||
|
static_cast<BackgroundThread*>(_checkDevicesThread)->join();
|
||||||
|
delete _checkDevicesThread;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_localInjectorsThread) {
|
||||||
|
static_cast<BackgroundThread*>(_localInjectorsThread)->join();
|
||||||
|
delete _localInjectorsThread;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void AudioClient::handleMismatchAudioFormat(SharedNodePointer node, const QString& currentCodec, const QString& recievedCodec) {
|
void AudioClient::handleMismatchAudioFormat(SharedNodePointer node, const QString& currentCodec, const QString& recievedCodec) {
|
||||||
qCDebug(audioclient) << __FUNCTION__ << "sendingNode:" << *node << "currentCodec:" << currentCodec << "recievedCodec:" << recievedCodec;
|
qCDebug(audioclient) << __FUNCTION__ << "sendingNode:" << *node << "currentCodec:" << currentCodec << "recievedCodec:" << recievedCodec;
|
||||||
|
@ -1097,11 +1127,19 @@ void AudioClient::handleRecordedAudioInput(const QByteArray& audio) {
|
||||||
handleAudioInput(audioBuffer);
|
handleAudioInput(audioBuffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AudioClient::prepareLocalAudioInjectors() {
|
void AudioClient::prepareLocalAudioInjectors(std::unique_ptr<Lock> localAudioLock) {
|
||||||
|
bool doSynchronously = localAudioLock.operator bool();
|
||||||
|
if (!localAudioLock) {
|
||||||
|
localAudioLock.reset(new Lock(_localAudioMutex));
|
||||||
|
}
|
||||||
|
|
||||||
int samplesNeeded = std::numeric_limits<int>::max();
|
int samplesNeeded = std::numeric_limits<int>::max();
|
||||||
while (samplesNeeded > 0) {
|
while (samplesNeeded > 0) {
|
||||||
// unlock between every write to allow device switching
|
if (!doSynchronously) {
|
||||||
Lock lock(_localAudioMutex);
|
// unlock between every write to allow device switching
|
||||||
|
localAudioLock->unlock();
|
||||||
|
localAudioLock->lock();
|
||||||
|
}
|
||||||
|
|
||||||
// in case of a device switch, consider bufferCapacity volatile across iterations
|
// in case of a device switch, consider bufferCapacity volatile across iterations
|
||||||
if (_outputPeriod == 0) {
|
if (_outputPeriod == 0) {
|
||||||
|
@ -1155,16 +1193,16 @@ void AudioClient::prepareLocalAudioInjectors() {
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
|
bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
|
||||||
|
// check the flag for injectors before attempting to lock
|
||||||
QVector<AudioInjector*> injectorsToRemove;
|
if (!_localInjectorsAvailable.load(std::memory_order_acquire)) {
|
||||||
|
|
||||||
// lock the injector vector
|
|
||||||
Lock lock(_injectorsMutex);
|
|
||||||
|
|
||||||
if (_activeLocalAudioInjectors.size() == 0) {
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// lock the injectors
|
||||||
|
Lock lock(_injectorsMutex);
|
||||||
|
|
||||||
|
QVector<AudioInjector*> injectorsToRemove;
|
||||||
|
|
||||||
memset(mixBuffer, 0, AudioConstants::NETWORK_FRAME_SAMPLES_STEREO * sizeof(float));
|
memset(mixBuffer, 0, AudioConstants::NETWORK_FRAME_SAMPLES_STEREO * sizeof(float));
|
||||||
|
|
||||||
for (AudioInjector* injector : _activeLocalAudioInjectors) {
|
for (AudioInjector* injector : _activeLocalAudioInjectors) {
|
||||||
|
@ -1243,6 +1281,9 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
|
||||||
_activeLocalAudioInjectors.removeOne(injector);
|
_activeLocalAudioInjectors.removeOne(injector);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// update the flag
|
||||||
|
_localInjectorsAvailable.exchange(!_activeLocalAudioInjectors.empty(), std::memory_order_release);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1329,11 +1370,14 @@ bool AudioClient::outputLocalInjector(AudioInjector* injector) {
|
||||||
|
|
||||||
// move local buffer to the LocalAudioThread to avoid dataraces with AudioInjector (like stop())
|
// move local buffer to the LocalAudioThread to avoid dataraces with AudioInjector (like stop())
|
||||||
injectorBuffer->setParent(nullptr);
|
injectorBuffer->setParent(nullptr);
|
||||||
injectorBuffer->moveToThread(&_localAudioThread);
|
injectorBuffer->moveToThread(_localInjectorsThread);
|
||||||
|
|
||||||
|
// update the flag
|
||||||
|
_localInjectorsAvailable.exchange(true, std::memory_order_release);
|
||||||
} else {
|
} else {
|
||||||
qCDebug(audioclient) << "injector exists in active list already";
|
qCDebug(audioclient) << "injector exists in active list already";
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
@ -1457,7 +1501,7 @@ void AudioClient::outputNotify() {
|
||||||
bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo) {
|
bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo) {
|
||||||
bool supportedFormat = false;
|
bool supportedFormat = false;
|
||||||
|
|
||||||
Lock lock(_localAudioMutex);
|
Lock localAudioLock(_localAudioMutex);
|
||||||
_localSamplesAvailable.exchange(0, std::memory_order_release);
|
_localSamplesAvailable.exchange(0, std::memory_order_release);
|
||||||
|
|
||||||
// cleanup any previously initialized device
|
// cleanup any previously initialized device
|
||||||
|
@ -1528,14 +1572,23 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
|
||||||
connect(_audioOutput, &QAudioOutput::stateChanged, [&, frameSize, requestedSize](QAudio::State state) {
|
connect(_audioOutput, &QAudioOutput::stateChanged, [&, frameSize, requestedSize](QAudio::State state) {
|
||||||
if (state == QAudio::ActiveState) {
|
if (state == QAudio::ActiveState) {
|
||||||
// restrict device callback to _outputPeriod samples
|
// restrict device callback to _outputPeriod samples
|
||||||
_outputPeriod = (_audioOutput->periodSize() / AudioConstants::SAMPLE_SIZE) * 2;
|
_outputPeriod = _audioOutput->periodSize() / AudioConstants::SAMPLE_SIZE;
|
||||||
|
// device callback may exceed reported period, so double it to avoid stutter
|
||||||
|
_outputPeriod *= 2;
|
||||||
|
|
||||||
_outputMixBuffer = new float[_outputPeriod];
|
_outputMixBuffer = new float[_outputPeriod];
|
||||||
_outputScratchBuffer = new int16_t[_outputPeriod];
|
_outputScratchBuffer = new int16_t[_outputPeriod];
|
||||||
|
|
||||||
// size local output mix buffer based on resampled network frame size
|
// size local output mix buffer based on resampled network frame size
|
||||||
_networkPeriod = _localToOutputResampler->getMaxOutput(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO);
|
int networkPeriod = _localToOutputResampler->getMaxOutput(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO);
|
||||||
_localOutputMixBuffer = new float[_networkPeriod];
|
_localOutputMixBuffer = new float[networkPeriod];
|
||||||
|
|
||||||
|
// local period should be at least twice the output period,
|
||||||
|
// in case two device reads happen before more data can be read (worst case)
|
||||||
int localPeriod = _outputPeriod * 2;
|
int localPeriod = _outputPeriod * 2;
|
||||||
|
// round up to an exact multiple of networkPeriod
|
||||||
|
localPeriod = ((localPeriod + networkPeriod - 1) / networkPeriod) * networkPeriod;
|
||||||
|
// this ensures lowest latency without stutter from underrun
|
||||||
_localInjectorsStream.resizeForFrameSize(localPeriod);
|
_localInjectorsStream.resizeForFrameSize(localPeriod);
|
||||||
|
|
||||||
int bufferSize = _audioOutput->bufferSize();
|
int bufferSize = _audioOutput->bufferSize();
|
||||||
|
@ -1550,6 +1603,9 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
|
||||||
qCDebug(audioclient) << "local buffer (samples):" << localPeriod;
|
qCDebug(audioclient) << "local buffer (samples):" << localPeriod;
|
||||||
|
|
||||||
disconnect(_audioOutput, &QAudioOutput::stateChanged, 0, 0);
|
disconnect(_audioOutput, &QAudioOutput::stateChanged, 0, 0);
|
||||||
|
|
||||||
|
// unlock to avoid a deadlock with the device callback (which always succeeds this initialization)
|
||||||
|
localAudioLock.unlock();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
connect(_audioOutput, &QAudioOutput::notify, this, &AudioClient::outputNotify);
|
connect(_audioOutput, &QAudioOutput::notify, this, &AudioClient::outputNotify);
|
||||||
|
@ -1688,12 +1744,24 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
|
||||||
int injectorSamplesPopped = 0;
|
int injectorSamplesPopped = 0;
|
||||||
{
|
{
|
||||||
bool append = networkSamplesPopped > 0;
|
bool append = networkSamplesPopped > 0;
|
||||||
// this does not require a lock as of the only two functions adding to _localSamplesAvailable (samples count):
|
// check the samples we have available locklessly; this is possible because only two functions add to the count:
|
||||||
// - prepareLocalAudioInjectors will only increase samples count
|
// - prepareLocalAudioInjectors will only increase samples count
|
||||||
// - switchOutputToAudioDevice will zero samples count
|
// - switchOutputToAudioDevice will zero samples count,
|
||||||
// stop the device, so that readData will exhaust the existing buffer or see a zeroed samples count
|
// stop the device - so that readData will exhaust the existing buffer or see a zeroed samples count,
|
||||||
// and start the device, which can only see a zeroed samples count
|
// and start the device - which can then only see a zeroed samples count
|
||||||
samplesRequested = std::min(samplesRequested, _audio->_localSamplesAvailable.load(std::memory_order_acquire));
|
int samplesAvailable = _audio->_localSamplesAvailable.load(std::memory_order_acquire);
|
||||||
|
|
||||||
|
// if we do not have enough samples buffered despite having injectors, buffer them synchronously
|
||||||
|
if (samplesAvailable < samplesRequested && _audio->_localInjectorsAvailable.load(std::memory_order_acquire)) {
|
||||||
|
// try_to_lock, in case the device is being shut down already
|
||||||
|
std::unique_ptr<Lock> localAudioLock(new Lock(_audio->_localAudioMutex, std::try_to_lock));
|
||||||
|
if (localAudioLock->owns_lock()) {
|
||||||
|
_audio->prepareLocalAudioInjectors(std::move(localAudioLock));
|
||||||
|
samplesAvailable = _audio->_localSamplesAvailable.load(std::memory_order_acquire);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
samplesRequested = std::min(samplesRequested, samplesAvailable);
|
||||||
if ((injectorSamplesPopped = _localInjectorsStream.appendSamples(mixBuffer, samplesRequested, append)) > 0) {
|
if ((injectorSamplesPopped = _localInjectorsStream.appendSamples(mixBuffer, samplesRequested, append)) > 0) {
|
||||||
_audio->_localSamplesAvailable.fetch_sub(injectorSamplesPopped, std::memory_order_release);
|
_audio->_localSamplesAvailable.fetch_sub(injectorSamplesPopped, std::memory_order_release);
|
||||||
qCDebug(audiostream, "Read %d samples from injectors (%d available, %d requested)", injectorSamplesPopped, _localInjectorsStream.samplesAvailable(), samplesRequested);
|
qCDebug(audiostream, "Read %d samples from injectors (%d available, %d requested)", injectorSamplesPopped, _localInjectorsStream.samplesAvailable(), samplesRequested);
|
||||||
|
@ -1701,7 +1769,7 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// prepare injectors for the next callback
|
// prepare injectors for the next callback
|
||||||
QMetaObject::invokeMethod(&_audio->_localAudioThread, "prepare", Qt::QueuedConnection);
|
QMetaObject::invokeMethod(_audio->_localInjectorsThread, "prepare", Qt::QueuedConnection);
|
||||||
|
|
||||||
int samplesPopped = std::max(networkSamplesPopped, injectorSamplesPopped);
|
int samplesPopped = std::max(networkSamplesPopped, injectorSamplesPopped);
|
||||||
int framesPopped = samplesPopped / AudioConstants::STEREO;
|
int framesPopped = samplesPopped / AudioConstants::STEREO;
|
||||||
|
|
|
@ -71,19 +71,6 @@ class QIODevice;
|
||||||
class Transform;
|
class Transform;
|
||||||
class NLPacket;
|
class NLPacket;
|
||||||
|
|
||||||
class AudioInjectorsThread : public QThread {
|
|
||||||
Q_OBJECT
|
|
||||||
|
|
||||||
public:
|
|
||||||
AudioInjectorsThread(AudioClient* audio) : _audio(audio) {}
|
|
||||||
|
|
||||||
public slots :
|
|
||||||
void prepare();
|
|
||||||
|
|
||||||
private:
|
|
||||||
AudioClient* _audio;
|
|
||||||
};
|
|
||||||
|
|
||||||
class AudioClient : public AbstractAudioInterface, public Dependency {
|
class AudioClient : public AbstractAudioInterface, public Dependency {
|
||||||
Q_OBJECT
|
Q_OBJECT
|
||||||
SINGLETON_DEPENDENCY
|
SINGLETON_DEPENDENCY
|
||||||
|
@ -158,7 +145,7 @@ public:
|
||||||
|
|
||||||
Q_INVOKABLE void setAvatarBoundingBoxParameters(glm::vec3 corner, glm::vec3 scale);
|
Q_INVOKABLE void setAvatarBoundingBoxParameters(glm::vec3 corner, glm::vec3 scale);
|
||||||
|
|
||||||
void checkDevices();
|
bool outputLocalInjector(AudioInjector* injector) override;
|
||||||
|
|
||||||
static const float CALLBACK_ACCELERATOR_RATIO;
|
static const float CALLBACK_ACCELERATOR_RATIO;
|
||||||
|
|
||||||
|
@ -169,6 +156,7 @@ public:
|
||||||
public slots:
|
public slots:
|
||||||
void start();
|
void start();
|
||||||
void stop();
|
void stop();
|
||||||
|
void cleanupBeforeQuit();
|
||||||
|
|
||||||
void handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessage> message);
|
void handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessage> message);
|
||||||
void handleAudioDataPacket(QSharedPointer<ReceivedMessage> message);
|
void handleAudioDataPacket(QSharedPointer<ReceivedMessage> message);
|
||||||
|
@ -184,8 +172,6 @@ public slots:
|
||||||
void audioMixerKilled();
|
void audioMixerKilled();
|
||||||
void toggleMute();
|
void toggleMute();
|
||||||
|
|
||||||
void beforeAboutToQuit();
|
|
||||||
|
|
||||||
virtual void setIsStereoInput(bool stereo) override;
|
virtual void setIsStereoInput(bool stereo) override;
|
||||||
|
|
||||||
void toggleAudioNoiseReduction() { _isNoiseGateEnabled = !_isNoiseGateEnabled; }
|
void toggleAudioNoiseReduction() { _isNoiseGateEnabled = !_isNoiseGateEnabled; }
|
||||||
|
@ -198,8 +184,6 @@ public slots:
|
||||||
|
|
||||||
int setOutputBufferSize(int numFrames, bool persist = true);
|
int setOutputBufferSize(int numFrames, bool persist = true);
|
||||||
|
|
||||||
void prepareLocalAudioInjectors();
|
|
||||||
bool outputLocalInjector(AudioInjector* injector) override;
|
|
||||||
bool shouldLoopbackInjectors() override { return _shouldEchoToServer; }
|
bool shouldLoopbackInjectors() override { return _shouldEchoToServer; }
|
||||||
|
|
||||||
bool switchInputToAudioDevice(const QString& inputDeviceName);
|
bool switchInputToAudioDevice(const QString& inputDeviceName);
|
||||||
|
@ -245,13 +229,16 @@ protected:
|
||||||
AudioClient();
|
AudioClient();
|
||||||
~AudioClient();
|
~AudioClient();
|
||||||
|
|
||||||
virtual void customDeleter() override {
|
virtual void customDeleter() override;
|
||||||
deleteLater();
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
friend class CheckDevicesThread;
|
||||||
|
friend class LocalInjectorsThread;
|
||||||
|
|
||||||
void outputFormatChanged();
|
void outputFormatChanged();
|
||||||
void handleAudioInput(QByteArray& audioBuffer);
|
void handleAudioInput(QByteArray& audioBuffer);
|
||||||
|
void checkDevices();
|
||||||
|
void prepareLocalAudioInjectors(std::unique_ptr<Lock> localAudioLock = nullptr);
|
||||||
bool mixLocalAudioInjectors(float* mixBuffer);
|
bool mixLocalAudioInjectors(float* mixBuffer);
|
||||||
float azimuthForSource(const glm::vec3& relativePosition);
|
float azimuthForSource(const glm::vec3& relativePosition);
|
||||||
float gainForSource(float distance, float volume);
|
float gainForSource(float distance, float volume);
|
||||||
|
@ -298,8 +285,9 @@ private:
|
||||||
AudioRingBuffer _inputRingBuffer;
|
AudioRingBuffer _inputRingBuffer;
|
||||||
LocalInjectorsStream _localInjectorsStream;
|
LocalInjectorsStream _localInjectorsStream;
|
||||||
// In order to use _localInjectorsStream as a lock-free pipe,
|
// In order to use _localInjectorsStream as a lock-free pipe,
|
||||||
// use it with a single producer/consumer, and track available samples
|
// use it with a single producer/consumer, and track available samples and injectors
|
||||||
std::atomic<int> _localSamplesAvailable { 0 };
|
std::atomic<int> _localSamplesAvailable { 0 };
|
||||||
|
std::atomic<bool> _localInjectorsAvailable { false };
|
||||||
MixedProcessedAudioStream _receivedAudioStream;
|
MixedProcessedAudioStream _receivedAudioStream;
|
||||||
bool _isStereoInput;
|
bool _isStereoInput;
|
||||||
|
|
||||||
|
@ -340,19 +328,17 @@ private:
|
||||||
// for network audio (used by network audio thread)
|
// for network audio (used by network audio thread)
|
||||||
int16_t _networkScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
|
int16_t _networkScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
|
||||||
|
|
||||||
// for local audio (used by audio injectors thread)
|
|
||||||
int _networkPeriod { 0 };
|
|
||||||
float _localMixBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
|
|
||||||
int16_t _localScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
|
|
||||||
float* _localOutputMixBuffer { NULL };
|
|
||||||
AudioInjectorsThread _localAudioThread;
|
|
||||||
Mutex _localAudioMutex;
|
|
||||||
|
|
||||||
// for output audio (used by this thread)
|
// for output audio (used by this thread)
|
||||||
int _outputPeriod { 0 };
|
int _outputPeriod { 0 };
|
||||||
float* _outputMixBuffer { NULL };
|
float* _outputMixBuffer { NULL };
|
||||||
int16_t* _outputScratchBuffer { NULL };
|
int16_t* _outputScratchBuffer { NULL };
|
||||||
|
|
||||||
|
// for local audio (used by audio injectors thread)
|
||||||
|
float _localMixBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
|
||||||
|
int16_t _localScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
|
||||||
|
float* _localOutputMixBuffer { NULL };
|
||||||
|
Mutex _localAudioMutex;
|
||||||
|
|
||||||
AudioLimiter _audioLimiter;
|
AudioLimiter _audioLimiter;
|
||||||
|
|
||||||
// Adds Reverb
|
// Adds Reverb
|
||||||
|
@ -395,12 +381,13 @@ private:
|
||||||
QString _selectedCodecName;
|
QString _selectedCodecName;
|
||||||
Encoder* _encoder { nullptr }; // for outbound mic stream
|
Encoder* _encoder { nullptr }; // for outbound mic stream
|
||||||
|
|
||||||
QThread* _checkDevicesThread { nullptr };
|
|
||||||
|
|
||||||
RateCounter<> _silentOutbound;
|
RateCounter<> _silentOutbound;
|
||||||
RateCounter<> _audioOutbound;
|
RateCounter<> _audioOutbound;
|
||||||
RateCounter<> _silentInbound;
|
RateCounter<> _silentInbound;
|
||||||
RateCounter<> _audioInbound;
|
RateCounter<> _audioInbound;
|
||||||
|
|
||||||
|
QThread* _checkDevicesThread { nullptr };
|
||||||
|
QThread* _localInjectorsThread { nullptr };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -32,12 +32,12 @@ public:
|
||||||
const Transform& transform, glm::vec3 avatarBoundingBoxCorner, glm::vec3 avatarBoundingBoxScale,
|
const Transform& transform, glm::vec3 avatarBoundingBoxCorner, glm::vec3 avatarBoundingBoxScale,
|
||||||
PacketType packetType, QString codecName = QString(""));
|
PacketType packetType, QString codecName = QString(""));
|
||||||
|
|
||||||
public slots:
|
|
||||||
// threadsafe
|
// threadsafe
|
||||||
// moves injector->getLocalBuffer() to another thread (so removes its parent)
|
// moves injector->getLocalBuffer() to another thread (so removes its parent)
|
||||||
// take care to delete it when ~AudioInjector, as parenting Qt semantics will not work
|
// take care to delete it when ~AudioInjector, as parenting Qt semantics will not work
|
||||||
virtual bool outputLocalInjector(AudioInjector* injector) = 0;
|
virtual bool outputLocalInjector(AudioInjector* injector) = 0;
|
||||||
|
|
||||||
|
public slots:
|
||||||
virtual bool shouldLoopbackInjectors() { return false; }
|
virtual bool shouldLoopbackInjectors() { return false; }
|
||||||
|
|
||||||
virtual void setIsStereoInput(bool stereo) = 0;
|
virtual void setIsStereoInput(bool stereo) = 0;
|
||||||
|
|
|
@ -369,23 +369,25 @@ void Avatar::simulate(float deltaTime, bool inView) {
|
||||||
PerformanceTimer perfTimer("simulate");
|
PerformanceTimer perfTimer("simulate");
|
||||||
{
|
{
|
||||||
PROFILE_RANGE(simulation, "updateJoints");
|
PROFILE_RANGE(simulation, "updateJoints");
|
||||||
if (inView && _hasNewJointData) {
|
if (inView) {
|
||||||
_skeletonModel->getRig()->copyJointsFromJointData(_jointData);
|
|
||||||
glm::mat4 rootTransform = glm::scale(_skeletonModel->getScale()) * glm::translate(_skeletonModel->getOffset());
|
|
||||||
_skeletonModel->getRig()->computeExternalPoses(rootTransform);
|
|
||||||
_jointDataSimulationRate.increment();
|
|
||||||
|
|
||||||
_skeletonModel->simulate(deltaTime, true);
|
|
||||||
|
|
||||||
locationChanged(); // joints changed, so if there are any children, update them.
|
|
||||||
_hasNewJointData = false;
|
|
||||||
|
|
||||||
glm::vec3 headPosition = getPosition();
|
|
||||||
if (!_skeletonModel->getHeadPosition(headPosition)) {
|
|
||||||
headPosition = getPosition();
|
|
||||||
}
|
|
||||||
Head* head = getHead();
|
Head* head = getHead();
|
||||||
head->setPosition(headPosition);
|
if (_hasNewJointData) {
|
||||||
|
_skeletonModel->getRig()->copyJointsFromJointData(_jointData);
|
||||||
|
glm::mat4 rootTransform = glm::scale(_skeletonModel->getScale()) * glm::translate(_skeletonModel->getOffset());
|
||||||
|
_skeletonModel->getRig()->computeExternalPoses(rootTransform);
|
||||||
|
_jointDataSimulationRate.increment();
|
||||||
|
|
||||||
|
_skeletonModel->simulate(deltaTime, true);
|
||||||
|
|
||||||
|
locationChanged(); // joints changed, so if there are any children, update them.
|
||||||
|
_hasNewJointData = false;
|
||||||
|
|
||||||
|
glm::vec3 headPosition = getPosition();
|
||||||
|
if (!_skeletonModel->getHeadPosition(headPosition)) {
|
||||||
|
headPosition = getPosition();
|
||||||
|
}
|
||||||
|
head->setPosition(headPosition);
|
||||||
|
}
|
||||||
head->setScale(getUniformScale());
|
head->setScale(getUniformScale());
|
||||||
head->simulate(deltaTime);
|
head->simulate(deltaTime);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -89,8 +89,7 @@ void Head::simulate(float deltaTime) {
|
||||||
_timeWithoutTalking += deltaTime;
|
_timeWithoutTalking += deltaTime;
|
||||||
if ((_averageLoudness - _longTermAverageLoudness) > TALKING_LOUDNESS) {
|
if ((_averageLoudness - _longTermAverageLoudness) > TALKING_LOUDNESS) {
|
||||||
_timeWithoutTalking = 0.0f;
|
_timeWithoutTalking = 0.0f;
|
||||||
|
} else if (_timeWithoutTalking - deltaTime < BLINK_AFTER_TALKING && _timeWithoutTalking >= BLINK_AFTER_TALKING) {
|
||||||
} else if (_timeWithoutTalking < BLINK_AFTER_TALKING && _timeWithoutTalking >= BLINK_AFTER_TALKING) {
|
|
||||||
forceBlink = true;
|
forceBlink = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -73,12 +73,13 @@ void SkeletonModel::initJointStates() {
|
||||||
|
|
||||||
// Called within Model::simulate call, below.
|
// Called within Model::simulate call, below.
|
||||||
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||||
|
assert(!_owningAvatar->isMyAvatar());
|
||||||
const FBXGeometry& geometry = getFBXGeometry();
|
const FBXGeometry& geometry = getFBXGeometry();
|
||||||
|
|
||||||
Head* head = _owningAvatar->getHead();
|
Head* head = _owningAvatar->getHead();
|
||||||
|
|
||||||
// make sure lookAt is not too close to face (avoid crosseyes)
|
// make sure lookAt is not too close to face (avoid crosseyes)
|
||||||
glm::vec3 lookAt = _owningAvatar->isMyAvatar() ? head->getLookAtPosition() : head->getCorrectedLookAtPosition();
|
glm::vec3 lookAt = head->getCorrectedLookAtPosition();
|
||||||
glm::vec3 focusOffset = lookAt - _owningAvatar->getHead()->getEyePosition();
|
glm::vec3 focusOffset = lookAt - _owningAvatar->getHead()->getEyePosition();
|
||||||
float focusDistance = glm::length(focusOffset);
|
float focusDistance = glm::length(focusOffset);
|
||||||
const float MIN_LOOK_AT_FOCUS_DISTANCE = 1.0f;
|
const float MIN_LOOK_AT_FOCUS_DISTANCE = 1.0f;
|
||||||
|
@ -86,41 +87,36 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||||
lookAt = _owningAvatar->getHead()->getEyePosition() + (MIN_LOOK_AT_FOCUS_DISTANCE / focusDistance) * focusOffset;
|
lookAt = _owningAvatar->getHead()->getEyePosition() + (MIN_LOOK_AT_FOCUS_DISTANCE / focusDistance) * focusOffset;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!_owningAvatar->isMyAvatar()) {
|
// no need to call Model::updateRig() because otherAvatars get their joint state
|
||||||
// no need to call Model::updateRig() because otherAvatars get their joint state
|
// copied directly from AvtarData::_jointData (there are no Rig animations to blend)
|
||||||
// copied directly from AvtarData::_jointData (there are no Rig animations to blend)
|
_needsUpdateClusterMatrices = true;
|
||||||
_needsUpdateClusterMatrices = true;
|
|
||||||
|
|
||||||
// This is a little more work than we really want.
|
// This is a little more work than we really want.
|
||||||
//
|
//
|
||||||
// Other avatars joint, including their eyes, should already be set just like any other joints
|
// Other avatars joint, including their eyes, should already be set just like any other joints
|
||||||
// from the wire data. But when looking at me, we want the eyes to use the corrected lookAt.
|
// from the wire data. But when looking at me, we want the eyes to use the corrected lookAt.
|
||||||
//
|
//
|
||||||
// Thus this should really only be ... else if (_owningAvatar->getHead()->isLookingAtMe()) {...
|
// Thus this should really only be ... else if (_owningAvatar->getHead()->isLookingAtMe()) {...
|
||||||
// However, in the !isLookingAtMe case, the eyes aren't rotating the way they should right now.
|
// However, in the !isLookingAtMe case, the eyes aren't rotating the way they should right now.
|
||||||
// We will revisit that as priorities allow, and particularly after the new rig/animation/joints.
|
// We will revisit that as priorities allow, and particularly after the new rig/animation/joints.
|
||||||
|
|
||||||
// If the head is not positioned, updateEyeJoints won't get the math right
|
// If the head is not positioned, updateEyeJoints won't get the math right
|
||||||
glm::quat headOrientation;
|
glm::quat headOrientation;
|
||||||
_rig->getJointRotation(geometry.headJointIndex, headOrientation);
|
_rig->getJointRotation(geometry.headJointIndex, headOrientation);
|
||||||
glm::vec3 eulers = safeEulerAngles(headOrientation);
|
glm::vec3 eulers = safeEulerAngles(headOrientation);
|
||||||
head->setBasePitch(glm::degrees(-eulers.x));
|
head->setBasePitch(glm::degrees(-eulers.x));
|
||||||
head->setBaseYaw(glm::degrees(eulers.y));
|
head->setBaseYaw(glm::degrees(eulers.y));
|
||||||
head->setBaseRoll(glm::degrees(-eulers.z));
|
head->setBaseRoll(glm::degrees(-eulers.z));
|
||||||
|
|
||||||
Rig::EyeParameters eyeParams;
|
Rig::EyeParameters eyeParams;
|
||||||
eyeParams.eyeLookAt = lookAt;
|
eyeParams.eyeLookAt = lookAt;
|
||||||
eyeParams.eyeSaccade = glm::vec3(0.0f);
|
eyeParams.eyeSaccade = glm::vec3(0.0f);
|
||||||
eyeParams.modelRotation = getRotation();
|
eyeParams.modelRotation = getRotation();
|
||||||
eyeParams.modelTranslation = getTranslation();
|
eyeParams.modelTranslation = getTranslation();
|
||||||
eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
|
eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
|
||||||
eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;
|
eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;
|
||||||
|
|
||||||
_rig->updateFromEyeParameters(eyeParams);
|
_rig->updateFromEyeParameters(eyeParams);
|
||||||
}
|
|
||||||
|
|
||||||
// evaluate AnimGraph animation and update jointStates.
|
|
||||||
Parent::updateRig(deltaTime, parentTransform);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void SkeletonModel::updateAttitude() {
|
void SkeletonModel::updateAttitude() {
|
||||||
|
|
|
@ -273,10 +273,9 @@ std::tuple<bool, QByteArray> requestData(QUrl& url) {
|
||||||
return std::make_tuple(false, QByteArray());
|
return std::make_tuple(false, QByteArray());
|
||||||
}
|
}
|
||||||
|
|
||||||
request->send();
|
|
||||||
|
|
||||||
QEventLoop loop;
|
QEventLoop loop;
|
||||||
QObject::connect(request, &ResourceRequest::finished, &loop, &QEventLoop::quit);
|
QObject::connect(request, &ResourceRequest::finished, &loop, &QEventLoop::quit);
|
||||||
|
request->send();
|
||||||
loop.exec();
|
loop.exec();
|
||||||
|
|
||||||
if (request->getResult() == ResourceRequest::Success) {
|
if (request->getResult() == ResourceRequest::Success) {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
set(TARGET_NAME gpu-gl)
|
set(TARGET_NAME gpu-gl)
|
||||||
setup_hifi_library()
|
setup_hifi_library(Concurrent)
|
||||||
link_hifi_libraries(shared gl gpu)
|
link_hifi_libraries(shared gl gpu)
|
||||||
if (UNIX)
|
if (UNIX)
|
||||||
target_link_libraries(${TARGET_NAME} pthread)
|
target_link_libraries(${TARGET_NAME} pthread)
|
||||||
|
|
|
@ -160,8 +160,6 @@ const uvec3 GLVariableAllocationSupport::INITIAL_MIP_TRANSFER_DIMENSIONS { 64, 6
|
||||||
WorkQueue GLVariableAllocationSupport::_transferQueue;
|
WorkQueue GLVariableAllocationSupport::_transferQueue;
|
||||||
WorkQueue GLVariableAllocationSupport::_promoteQueue;
|
WorkQueue GLVariableAllocationSupport::_promoteQueue;
|
||||||
WorkQueue GLVariableAllocationSupport::_demoteQueue;
|
WorkQueue GLVariableAllocationSupport::_demoteQueue;
|
||||||
TexturePointer GLVariableAllocationSupport::_currentTransferTexture;
|
|
||||||
TransferJobPointer GLVariableAllocationSupport::_currentTransferJob;
|
|
||||||
size_t GLVariableAllocationSupport::_frameTexturesCreated { 0 };
|
size_t GLVariableAllocationSupport::_frameTexturesCreated { 0 };
|
||||||
|
|
||||||
#define OVERSUBSCRIBED_PRESSURE_VALUE 0.95f
|
#define OVERSUBSCRIBED_PRESSURE_VALUE 0.95f
|
||||||
|
@ -176,30 +174,19 @@ const uvec3 GLVariableAllocationSupport::MAX_TRANSFER_DIMENSIONS { 1024, 1024, 1
|
||||||
const size_t GLVariableAllocationSupport::MAX_TRANSFER_SIZE = GLVariableAllocationSupport::MAX_TRANSFER_DIMENSIONS.x * GLVariableAllocationSupport::MAX_TRANSFER_DIMENSIONS.y * 4;
|
const size_t GLVariableAllocationSupport::MAX_TRANSFER_SIZE = GLVariableAllocationSupport::MAX_TRANSFER_DIMENSIONS.x * GLVariableAllocationSupport::MAX_TRANSFER_DIMENSIONS.y * 4;
|
||||||
|
|
||||||
#if THREADED_TEXTURE_BUFFERING
|
#if THREADED_TEXTURE_BUFFERING
|
||||||
std::shared_ptr<std::thread> TransferJob::_bufferThread { nullptr };
|
|
||||||
std::atomic<bool> TransferJob::_shutdownBufferingThread { false };
|
|
||||||
Mutex TransferJob::_mutex;
|
|
||||||
TransferJob::VoidLambdaQueue TransferJob::_bufferLambdaQueue;
|
|
||||||
|
|
||||||
void TransferJob::startTransferLoop() {
|
TexturePointer GLVariableAllocationSupport::_currentTransferTexture;
|
||||||
if (_bufferThread) {
|
TransferJobPointer GLVariableAllocationSupport::_currentTransferJob;
|
||||||
return;
|
QThreadPool* TransferJob::_bufferThreadPool { nullptr };
|
||||||
}
|
|
||||||
_shutdownBufferingThread = false;
|
void TransferJob::startBufferingThread() {
|
||||||
_bufferThread = std::make_shared<std::thread>([] {
|
static std::once_flag once;
|
||||||
TransferJob::bufferLoop();
|
std::call_once(once, [&] {
|
||||||
|
_bufferThreadPool = new QThreadPool(qApp);
|
||||||
|
_bufferThreadPool->setMaxThreadCount(1);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
void TransferJob::stopTransferLoop() {
|
|
||||||
if (!_bufferThread) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
_shutdownBufferingThread = true;
|
|
||||||
_bufferThread->join();
|
|
||||||
_bufferThread.reset();
|
|
||||||
_shutdownBufferingThread = false;
|
|
||||||
}
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
TransferJob::TransferJob(const GLTexture& parent, uint16_t sourceMip, uint16_t targetMip, uint8_t face, uint32_t lines, uint32_t lineOffset)
|
TransferJob::TransferJob(const GLTexture& parent, uint16_t sourceMip, uint16_t targetMip, uint8_t face, uint32_t lines, uint32_t lineOffset)
|
||||||
|
@ -233,7 +220,6 @@ TransferJob::TransferJob(const GLTexture& parent, uint16_t sourceMip, uint16_t t
|
||||||
// Buffering can invoke disk IO, so it should be off of the main and render threads
|
// Buffering can invoke disk IO, so it should be off of the main and render threads
|
||||||
_bufferingLambda = [=] {
|
_bufferingLambda = [=] {
|
||||||
_mipData = _parent._gpuObject.accessStoredMipFace(sourceMip, face)->createView(_transferSize, _transferOffset);
|
_mipData = _parent._gpuObject.accessStoredMipFace(sourceMip, face)->createView(_transferSize, _transferOffset);
|
||||||
_bufferingCompleted = true;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
_transferLambda = [=] {
|
_transferLambda = [=] {
|
||||||
|
@ -243,65 +229,66 @@ TransferJob::TransferJob(const GLTexture& parent, uint16_t sourceMip, uint16_t t
|
||||||
}
|
}
|
||||||
|
|
||||||
TransferJob::TransferJob(const GLTexture& parent, std::function<void()> transferLambda)
|
TransferJob::TransferJob(const GLTexture& parent, std::function<void()> transferLambda)
|
||||||
: _parent(parent), _bufferingCompleted(true), _transferLambda(transferLambda) {
|
: _parent(parent), _bufferingRequired(false), _transferLambda(transferLambda) {
|
||||||
}
|
}
|
||||||
|
|
||||||
TransferJob::~TransferJob() {
|
TransferJob::~TransferJob() {
|
||||||
Backend::updateTextureTransferPendingSize(_transferSize, 0);
|
Backend::updateTextureTransferPendingSize(_transferSize, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
bool TransferJob::tryTransfer() {
|
bool TransferJob::tryTransfer() {
|
||||||
// Disable threaded texture transfer for now
|
|
||||||
#if THREADED_TEXTURE_BUFFERING
|
#if THREADED_TEXTURE_BUFFERING
|
||||||
// Are we ready to transfer
|
// Are we ready to transfer
|
||||||
if (_bufferingCompleted) {
|
if (!bufferingCompleted()) {
|
||||||
_transferLambda();
|
startBuffering();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
#else
|
||||||
|
if (_bufferingRequired) {
|
||||||
|
_bufferingLambda();
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
_transferLambda();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
#if THREADED_TEXTURE_BUFFERING
|
||||||
|
bool TransferJob::bufferingRequired() const {
|
||||||
|
if (!_bufferingRequired) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The default state of a QFuture is with status Canceled | Started | Finished,
|
||||||
|
// so we have to check isCancelled before we check the actual state
|
||||||
|
if (_bufferingStatus.isCanceled()) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
startBuffering();
|
return !_bufferingStatus.isStarted();
|
||||||
return false;
|
|
||||||
#else
|
|
||||||
if (!_bufferingCompleted) {
|
|
||||||
_bufferingLambda();
|
|
||||||
_bufferingCompleted = true;
|
|
||||||
}
|
|
||||||
_transferLambda();
|
|
||||||
return true;
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#if THREADED_TEXTURE_BUFFERING
|
bool TransferJob::bufferingCompleted() const {
|
||||||
|
if (!_bufferingRequired) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The default state of a QFuture is with status Canceled | Started | Finished,
|
||||||
|
// so we have to check isCancelled before we check the actual state
|
||||||
|
if (_bufferingStatus.isCanceled()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return _bufferingStatus.isFinished();
|
||||||
|
}
|
||||||
|
|
||||||
void TransferJob::startBuffering() {
|
void TransferJob::startBuffering() {
|
||||||
if (_bufferingStarted) {
|
if (bufferingRequired()) {
|
||||||
return;
|
assert(_bufferingStatus.isCanceled());
|
||||||
}
|
_bufferingStatus = QtConcurrent::run(_bufferThreadPool, [=] {
|
||||||
_bufferingStarted = true;
|
_bufferingLambda();
|
||||||
{
|
});
|
||||||
Lock lock(_mutex);
|
assert(!_bufferingStatus.isCanceled());
|
||||||
_bufferLambdaQueue.push(_bufferingLambda);
|
assert(_bufferingStatus.isStarted());
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void TransferJob::bufferLoop() {
|
|
||||||
while (!_shutdownBufferingThread) {
|
|
||||||
VoidLambdaQueue workingQueue;
|
|
||||||
{
|
|
||||||
Lock lock(_mutex);
|
|
||||||
_bufferLambdaQueue.swap(workingQueue);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (workingQueue.empty()) {
|
|
||||||
QThread::msleep(5);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
while (!workingQueue.empty()) {
|
|
||||||
workingQueue.front()();
|
|
||||||
workingQueue.pop();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
@ -316,7 +303,9 @@ GLVariableAllocationSupport::~GLVariableAllocationSupport() {
|
||||||
|
|
||||||
void GLVariableAllocationSupport::addMemoryManagedTexture(const TexturePointer& texturePointer) {
|
void GLVariableAllocationSupport::addMemoryManagedTexture(const TexturePointer& texturePointer) {
|
||||||
_memoryManagedTextures.push_back(texturePointer);
|
_memoryManagedTextures.push_back(texturePointer);
|
||||||
addToWorkQueue(texturePointer);
|
if (MemoryPressureState::Idle != _memoryPressureState) {
|
||||||
|
addToWorkQueue(texturePointer);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void GLVariableAllocationSupport::addToWorkQueue(const TexturePointer& texturePointer) {
|
void GLVariableAllocationSupport::addToWorkQueue(const TexturePointer& texturePointer) {
|
||||||
|
@ -345,10 +334,8 @@ void GLVariableAllocationSupport::addToWorkQueue(const TexturePointer& texturePo
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case MemoryPressureState::Idle:
|
case MemoryPressureState::Idle:
|
||||||
break;
|
|
||||||
|
|
||||||
default:
|
|
||||||
Q_UNREACHABLE();
|
Q_UNREACHABLE();
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -364,10 +351,10 @@ WorkQueue& GLVariableAllocationSupport::getActiveWorkQueue() {
|
||||||
case MemoryPressureState::Transfer:
|
case MemoryPressureState::Transfer:
|
||||||
return _transferQueue;
|
return _transferQueue;
|
||||||
|
|
||||||
default:
|
case MemoryPressureState::Idle:
|
||||||
|
Q_UNREACHABLE();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
Q_UNREACHABLE();
|
|
||||||
return empty;
|
return empty;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -460,16 +447,11 @@ void GLVariableAllocationSupport::updateMemoryPressure() {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (newState != _memoryPressureState) {
|
if (newState != _memoryPressureState) {
|
||||||
|
_memoryPressureState = newState;
|
||||||
#if THREADED_TEXTURE_BUFFERING
|
#if THREADED_TEXTURE_BUFFERING
|
||||||
if (MemoryPressureState::Transfer == _memoryPressureState) {
|
if (MemoryPressureState::Transfer == _memoryPressureState) {
|
||||||
TransferJob::stopTransferLoop();
|
TransferJob::startBufferingThread();
|
||||||
}
|
}
|
||||||
_memoryPressureState = newState;
|
|
||||||
if (MemoryPressureState::Transfer == _memoryPressureState) {
|
|
||||||
TransferJob::startTransferLoop();
|
|
||||||
}
|
|
||||||
#else
|
|
||||||
_memoryPressureState = newState;
|
|
||||||
#endif
|
#endif
|
||||||
// Clear the existing queue
|
// Clear the existing queue
|
||||||
_transferQueue = WorkQueue();
|
_transferQueue = WorkQueue();
|
||||||
|
@ -487,49 +469,111 @@ void GLVariableAllocationSupport::updateMemoryPressure() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TexturePointer GLVariableAllocationSupport::getNextWorkQueueItem(WorkQueue& workQueue) {
|
||||||
|
while (!workQueue.empty()) {
|
||||||
|
auto workTarget = workQueue.top();
|
||||||
|
|
||||||
|
auto texture = workTarget.first.lock();
|
||||||
|
if (!texture) {
|
||||||
|
workQueue.pop();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check whether the resulting texture can actually have work performed
|
||||||
|
GLTexture* gltexture = Backend::getGPUObject<GLTexture>(*texture);
|
||||||
|
GLVariableAllocationSupport* vartexture = dynamic_cast<GLVariableAllocationSupport*>(gltexture);
|
||||||
|
switch (_memoryPressureState) {
|
||||||
|
case MemoryPressureState::Oversubscribed:
|
||||||
|
if (vartexture->canDemote()) {
|
||||||
|
return texture;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case MemoryPressureState::Undersubscribed:
|
||||||
|
if (vartexture->canPromote()) {
|
||||||
|
return texture;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case MemoryPressureState::Transfer:
|
||||||
|
if (vartexture->hasPendingTransfers()) {
|
||||||
|
return texture;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case MemoryPressureState::Idle:
|
||||||
|
Q_UNREACHABLE();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we got here, then the texture has no work to do in the current state,
|
||||||
|
// so pop it off the queue and continue
|
||||||
|
workQueue.pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
return TexturePointer();
|
||||||
|
}
|
||||||
|
|
||||||
|
void GLVariableAllocationSupport::processWorkQueue(WorkQueue& workQueue) {
|
||||||
|
if (workQueue.empty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the front of the work queue to perform work
|
||||||
|
auto texture = getNextWorkQueueItem(workQueue);
|
||||||
|
if (!texture) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Grab the first item off the demote queue
|
||||||
|
PROFILE_RANGE(render_gpu_gl, __FUNCTION__);
|
||||||
|
|
||||||
|
GLTexture* gltexture = Backend::getGPUObject<GLTexture>(*texture);
|
||||||
|
GLVariableAllocationSupport* vartexture = dynamic_cast<GLVariableAllocationSupport*>(gltexture);
|
||||||
|
switch (_memoryPressureState) {
|
||||||
|
case MemoryPressureState::Oversubscribed:
|
||||||
|
vartexture->demote();
|
||||||
|
workQueue.pop();
|
||||||
|
addToWorkQueue(texture);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case MemoryPressureState::Undersubscribed:
|
||||||
|
vartexture->promote();
|
||||||
|
workQueue.pop();
|
||||||
|
addToWorkQueue(texture);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case MemoryPressureState::Transfer:
|
||||||
|
if (vartexture->executeNextTransfer(texture)) {
|
||||||
|
workQueue.pop();
|
||||||
|
addToWorkQueue(texture);
|
||||||
|
|
||||||
|
#if THREADED_TEXTURE_BUFFERING
|
||||||
|
// Eagerly start the next buffering job if possible
|
||||||
|
texture = getNextWorkQueueItem(workQueue);
|
||||||
|
if (texture) {
|
||||||
|
gltexture = Backend::getGPUObject<GLTexture>(*texture);
|
||||||
|
vartexture = dynamic_cast<GLVariableAllocationSupport*>(gltexture);
|
||||||
|
vartexture->executeNextBuffer(texture);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case MemoryPressureState::Idle:
|
||||||
|
Q_UNREACHABLE();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void GLVariableAllocationSupport::processWorkQueues() {
|
void GLVariableAllocationSupport::processWorkQueues() {
|
||||||
if (MemoryPressureState::Idle == _memoryPressureState) {
|
if (MemoryPressureState::Idle == _memoryPressureState) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto& workQueue = getActiveWorkQueue();
|
auto& workQueue = getActiveWorkQueue();
|
||||||
PROFILE_RANGE(render_gpu_gl, __FUNCTION__);
|
// Do work on the front of the queue
|
||||||
while (!workQueue.empty()) {
|
processWorkQueue(workQueue);
|
||||||
auto workTarget = workQueue.top();
|
|
||||||
workQueue.pop();
|
|
||||||
auto texture = workTarget.first.lock();
|
|
||||||
if (!texture) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Grab the first item off the demote queue
|
|
||||||
GLTexture* gltexture = Backend::getGPUObject<GLTexture>(*texture);
|
|
||||||
GLVariableAllocationSupport* vartexture = dynamic_cast<GLVariableAllocationSupport*>(gltexture);
|
|
||||||
if (MemoryPressureState::Oversubscribed == _memoryPressureState) {
|
|
||||||
if (!vartexture->canDemote()) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
vartexture->demote();
|
|
||||||
_memoryPressureStateStale = true;
|
|
||||||
} else if (MemoryPressureState::Undersubscribed == _memoryPressureState) {
|
|
||||||
if (!vartexture->canPromote()) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
vartexture->promote();
|
|
||||||
_memoryPressureStateStale = true;
|
|
||||||
} else if (MemoryPressureState::Transfer == _memoryPressureState) {
|
|
||||||
if (!vartexture->hasPendingTransfers()) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
vartexture->executeNextTransfer(texture);
|
|
||||||
} else {
|
|
||||||
Q_UNREACHABLE();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reinject into the queue if more work to be done
|
|
||||||
addToWorkQueue(texture);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (workQueue.empty()) {
|
if (workQueue.empty()) {
|
||||||
_memoryPressureState = MemoryPressureState::Idle;
|
_memoryPressureState = MemoryPressureState::Idle;
|
||||||
|
@ -543,28 +587,83 @@ void GLVariableAllocationSupport::manageMemory() {
|
||||||
processWorkQueues();
|
processWorkQueues();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool GLVariableAllocationSupport::executeNextTransfer(const TexturePointer& currentTexture) {
|
||||||
|
#if THREADED_TEXTURE_BUFFERING
|
||||||
|
// If a transfer job is active on the buffering thread, but has not completed it's buffering lambda,
|
||||||
|
// then we need to exit early, since we don't want to have the transfer job leave scope while it's
|
||||||
|
// being used in another thread -- See https://highfidelity.fogbugz.com/f/cases/4626
|
||||||
|
if (_currentTransferJob && !_currentTransferJob->bufferingCompleted()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
void GLVariableAllocationSupport::executeNextTransfer(const TexturePointer& currentTexture) {
|
|
||||||
if (_populatedMip <= _allocatedMip) {
|
if (_populatedMip <= _allocatedMip) {
|
||||||
|
#if THREADED_TEXTURE_BUFFERING
|
||||||
|
_currentTransferJob.reset();
|
||||||
|
_currentTransferTexture.reset();
|
||||||
|
#endif
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the transfer queue is empty, rebuild it
|
||||||
|
if (_pendingTransfers.empty()) {
|
||||||
|
populateTransferQueue();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool result = false;
|
||||||
|
if (!_pendingTransfers.empty()) {
|
||||||
|
#if THREADED_TEXTURE_BUFFERING
|
||||||
|
// If there is a current transfer, but it's not the top of the pending transfer queue, then it's an orphan, so we want to abandon it.
|
||||||
|
if (_currentTransferJob && _currentTransferJob != _pendingTransfers.front()) {
|
||||||
|
_currentTransferJob.reset();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!_currentTransferJob) {
|
||||||
|
// Keeping hold of a strong pointer to the transfer job ensures that if the pending transfer queue is rebuilt, the transfer job
|
||||||
|
// doesn't leave scope, causing a crash in the buffering thread
|
||||||
|
_currentTransferJob = _pendingTransfers.front();
|
||||||
|
|
||||||
|
// Keeping hold of a strong pointer during the transfer ensures that the transfer thread cannot try to access a destroyed texture
|
||||||
|
_currentTransferTexture = currentTexture;
|
||||||
|
}
|
||||||
|
|
||||||
|
// transfer jobs use asynchronous buffering of the texture data because it may involve disk IO, so we execute a try here to determine if the buffering
|
||||||
|
// is complete
|
||||||
|
if (_currentTransferJob->tryTransfer()) {
|
||||||
|
_pendingTransfers.pop();
|
||||||
|
// Once a given job is finished, release the shared pointers keeping them alive
|
||||||
|
_currentTransferTexture.reset();
|
||||||
|
_currentTransferJob.reset();
|
||||||
|
result = true;
|
||||||
|
}
|
||||||
|
#else
|
||||||
|
if (_pendingTransfers.front()->tryTransfer()) {
|
||||||
|
_pendingTransfers.pop();
|
||||||
|
result = true;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
#if THREADED_TEXTURE_BUFFERING
|
||||||
|
void GLVariableAllocationSupport::executeNextBuffer(const TexturePointer& currentTexture) {
|
||||||
|
if (_currentTransferJob && !_currentTransferJob->bufferingCompleted()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If the transfer queue is empty, rebuild it
|
||||||
if (_pendingTransfers.empty()) {
|
if (_pendingTransfers.empty()) {
|
||||||
populateTransferQueue();
|
populateTransferQueue();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!_pendingTransfers.empty()) {
|
if (!_pendingTransfers.empty()) {
|
||||||
// Keeping hold of a strong pointer during the transfer ensures that the transfer thread cannot try to access a destroyed texture
|
if (!_currentTransferJob) {
|
||||||
_currentTransferTexture = currentTexture;
|
_currentTransferJob = _pendingTransfers.front();
|
||||||
// Keeping hold of a strong pointer to the transfer job ensures that if the pending transfer queue is rebuilt, the transfer job
|
_currentTransferTexture = currentTexture;
|
||||||
// doesn't leave scope, causing a crash in the buffering thread
|
|
||||||
_currentTransferJob = _pendingTransfers.front();
|
|
||||||
// transfer jobs use asynchronous buffering of the texture data because it may involve disk IO, so we execute a try here to determine if the buffering
|
|
||||||
// is complete
|
|
||||||
if (_currentTransferJob->tryTransfer()) {
|
|
||||||
_pendingTransfers.pop();
|
|
||||||
_currentTransferTexture.reset();
|
|
||||||
_currentTransferJob.reset();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_currentTransferJob->startBuffering();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
|
@ -8,6 +8,9 @@
|
||||||
#ifndef hifi_gpu_gl_GLTexture_h
|
#ifndef hifi_gpu_gl_GLTexture_h
|
||||||
#define hifi_gpu_gl_GLTexture_h
|
#define hifi_gpu_gl_GLTexture_h
|
||||||
|
|
||||||
|
#include <QtCore/QThreadPool>
|
||||||
|
#include <QtConcurrent>
|
||||||
|
|
||||||
#include "GLShared.h"
|
#include "GLShared.h"
|
||||||
#include "GLBackend.h"
|
#include "GLBackend.h"
|
||||||
#include "GLTexelFormat.h"
|
#include "GLTexelFormat.h"
|
||||||
|
@ -47,24 +50,19 @@ public:
|
||||||
class TransferJob {
|
class TransferJob {
|
||||||
using VoidLambda = std::function<void()>;
|
using VoidLambda = std::function<void()>;
|
||||||
using VoidLambdaQueue = std::queue<VoidLambda>;
|
using VoidLambdaQueue = std::queue<VoidLambda>;
|
||||||
using ThreadPointer = std::shared_ptr<std::thread>;
|
|
||||||
const GLTexture& _parent;
|
const GLTexture& _parent;
|
||||||
Texture::PixelsPointer _mipData;
|
Texture::PixelsPointer _mipData;
|
||||||
size_t _transferOffset { 0 };
|
size_t _transferOffset { 0 };
|
||||||
size_t _transferSize { 0 };
|
size_t _transferSize { 0 };
|
||||||
|
|
||||||
// Indicates if a transfer from backing storage to interal storage has started
|
bool _bufferingRequired { true };
|
||||||
bool _bufferingStarted { false };
|
|
||||||
bool _bufferingCompleted { false };
|
|
||||||
VoidLambda _transferLambda;
|
VoidLambda _transferLambda;
|
||||||
VoidLambda _bufferingLambda;
|
VoidLambda _bufferingLambda;
|
||||||
|
|
||||||
#if THREADED_TEXTURE_BUFFERING
|
#if THREADED_TEXTURE_BUFFERING
|
||||||
static Mutex _mutex;
|
// Indicates if a transfer from backing storage to interal storage has started
|
||||||
static VoidLambdaQueue _bufferLambdaQueue;
|
QFuture<void> _bufferingStatus;
|
||||||
static ThreadPointer _bufferThread;
|
static QThreadPool* _bufferThreadPool;
|
||||||
static std::atomic<bool> _shutdownBufferingThread;
|
|
||||||
static void bufferLoop();
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@ -75,14 +73,13 @@ public:
|
||||||
bool tryTransfer();
|
bool tryTransfer();
|
||||||
|
|
||||||
#if THREADED_TEXTURE_BUFFERING
|
#if THREADED_TEXTURE_BUFFERING
|
||||||
static void startTransferLoop();
|
void startBuffering();
|
||||||
static void stopTransferLoop();
|
bool bufferingRequired() const;
|
||||||
|
bool bufferingCompleted() const;
|
||||||
|
static void startBufferingThread();
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
private:
|
private:
|
||||||
#if THREADED_TEXTURE_BUFFERING
|
|
||||||
void startBuffering();
|
|
||||||
#endif
|
|
||||||
void transfer();
|
void transfer();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -100,8 +97,10 @@ protected:
|
||||||
static WorkQueue _transferQueue;
|
static WorkQueue _transferQueue;
|
||||||
static WorkQueue _promoteQueue;
|
static WorkQueue _promoteQueue;
|
||||||
static WorkQueue _demoteQueue;
|
static WorkQueue _demoteQueue;
|
||||||
|
#if THREADED_TEXTURE_BUFFERING
|
||||||
static TexturePointer _currentTransferTexture;
|
static TexturePointer _currentTransferTexture;
|
||||||
static TransferJobPointer _currentTransferJob;
|
static TransferJobPointer _currentTransferJob;
|
||||||
|
#endif
|
||||||
static const uvec3 INITIAL_MIP_TRANSFER_DIMENSIONS;
|
static const uvec3 INITIAL_MIP_TRANSFER_DIMENSIONS;
|
||||||
static const uvec3 MAX_TRANSFER_DIMENSIONS;
|
static const uvec3 MAX_TRANSFER_DIMENSIONS;
|
||||||
static const size_t MAX_TRANSFER_SIZE;
|
static const size_t MAX_TRANSFER_SIZE;
|
||||||
|
@ -109,6 +108,8 @@ protected:
|
||||||
|
|
||||||
static void updateMemoryPressure();
|
static void updateMemoryPressure();
|
||||||
static void processWorkQueues();
|
static void processWorkQueues();
|
||||||
|
static void processWorkQueue(WorkQueue& workQueue);
|
||||||
|
static TexturePointer getNextWorkQueueItem(WorkQueue& workQueue);
|
||||||
static void addToWorkQueue(const TexturePointer& texture);
|
static void addToWorkQueue(const TexturePointer& texture);
|
||||||
static WorkQueue& getActiveWorkQueue();
|
static WorkQueue& getActiveWorkQueue();
|
||||||
|
|
||||||
|
@ -118,7 +119,10 @@ protected:
|
||||||
bool canPromote() const { return _allocatedMip > _minAllocatedMip; }
|
bool canPromote() const { return _allocatedMip > _minAllocatedMip; }
|
||||||
bool canDemote() const { return _allocatedMip < _maxAllocatedMip; }
|
bool canDemote() const { return _allocatedMip < _maxAllocatedMip; }
|
||||||
bool hasPendingTransfers() const { return _populatedMip > _allocatedMip; }
|
bool hasPendingTransfers() const { return _populatedMip > _allocatedMip; }
|
||||||
void executeNextTransfer(const TexturePointer& currentTexture);
|
#if THREADED_TEXTURE_BUFFERING
|
||||||
|
void executeNextBuffer(const TexturePointer& currentTexture);
|
||||||
|
#endif
|
||||||
|
bool executeNextTransfer(const TexturePointer& currentTexture);
|
||||||
virtual void populateTransferQueue() = 0;
|
virtual void populateTransferQueue() = 0;
|
||||||
virtual void promote() = 0;
|
virtual void promote() = 0;
|
||||||
virtual void demote() = 0;
|
virtual void demote() = 0;
|
||||||
|
|
|
@ -17,7 +17,6 @@
|
||||||
#include <thread>
|
#include <thread>
|
||||||
|
|
||||||
#define INCREMENTAL_TRANSFER 0
|
#define INCREMENTAL_TRANSFER 0
|
||||||
#define THREADED_TEXTURE_BUFFERING 1
|
|
||||||
#define GPU_SSBO_TRANSFORM_OBJECT 1
|
#define GPU_SSBO_TRANSFORM_OBJECT 1
|
||||||
|
|
||||||
namespace gpu { namespace gl45 {
|
namespace gpu { namespace gl45 {
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
#include <unordered_set>
|
#include <unordered_set>
|
||||||
|
|
||||||
#include <QDir>
|
#include <QDir>
|
||||||
|
#include <QSaveFile>
|
||||||
|
|
||||||
#include <PathUtils.h>
|
#include <PathUtils.h>
|
||||||
|
|
||||||
|
@ -110,13 +111,14 @@ FilePointer FileCache::writeFile(const char* data, File::Metadata&& metadata) {
|
||||||
return file;
|
return file;
|
||||||
}
|
}
|
||||||
|
|
||||||
// write the new file
|
QSaveFile saveFile(QString::fromStdString(filepath));
|
||||||
FILE* saveFile = fopen(filepath.c_str(), "wb");
|
if (saveFile.open(QIODevice::WriteOnly)
|
||||||
if (saveFile != nullptr && fwrite(data, metadata.length, 1, saveFile) && fclose(saveFile) == 0) {
|
&& saveFile.write(data, metadata.length) == static_cast<qint64>(metadata.length)
|
||||||
|
&& saveFile.commit()) {
|
||||||
|
|
||||||
file = addFile(std::move(metadata), filepath);
|
file = addFile(std::move(metadata), filepath);
|
||||||
} else {
|
} else {
|
||||||
qCWarning(file_cache, "[%s] Failed to write %s (%s)", _dirname.c_str(), metadata.key.c_str(), strerror(errno));
|
qCWarning(file_cache, "[%s] Failed to write %s", _dirname.c_str(), metadata.key.c_str());
|
||||||
errno = 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return file;
|
return file;
|
||||||
|
|
|
@ -250,6 +250,10 @@ static void addButtonProxyToQmlTablet(QQuickItem* qmlTablet, TabletButtonProxy*
|
||||||
if (QThread::currentThread() != qmlTablet->thread()) {
|
if (QThread::currentThread() != qmlTablet->thread()) {
|
||||||
connectionType = Qt::BlockingQueuedConnection;
|
connectionType = Qt::BlockingQueuedConnection;
|
||||||
}
|
}
|
||||||
|
if (buttonProxy == NULL){
|
||||||
|
qCCritical(scriptengine) << "TabletScriptingInterface addButtonProxyToQmlTablet buttonProxy is NULL";
|
||||||
|
return;
|
||||||
|
}
|
||||||
bool hasResult = QMetaObject::invokeMethod(qmlTablet, "addButtonProxy", connectionType,
|
bool hasResult = QMetaObject::invokeMethod(qmlTablet, "addButtonProxy", connectionType,
|
||||||
Q_RETURN_ARG(QVariant, resultVar), Q_ARG(QVariant, buttonProxy->getProperties()));
|
Q_RETURN_ARG(QVariant, resultVar), Q_ARG(QVariant, buttonProxy->getProperties()));
|
||||||
if (!hasResult) {
|
if (!hasResult) {
|
||||||
|
|
|
@ -10,6 +10,7 @@
|
||||||
//
|
//
|
||||||
|
|
||||||
#include "ViveControllerManager.h"
|
#include "ViveControllerManager.h"
|
||||||
|
#include <algorithm>
|
||||||
|
|
||||||
#include <PerfStat.h>
|
#include <PerfStat.h>
|
||||||
#include <PathUtils.h>
|
#include <PathUtils.h>
|
||||||
|
@ -20,7 +21,11 @@
|
||||||
#include <NumericalConstants.h>
|
#include <NumericalConstants.h>
|
||||||
#include <ui-plugins/PluginContainer.h>
|
#include <ui-plugins/PluginContainer.h>
|
||||||
#include <UserActivityLogger.h>
|
#include <UserActivityLogger.h>
|
||||||
|
#include <NumericalConstants.h>
|
||||||
#include <OffscreenUi.h>
|
#include <OffscreenUi.h>
|
||||||
|
#include <GLMHelpers.h>
|
||||||
|
#include <glm/ext.hpp>
|
||||||
|
#include <glm/gtc/quaternion.hpp>
|
||||||
|
|
||||||
|
|
||||||
#include <controllers/UserInputMapper.h>
|
#include <controllers/UserInputMapper.h>
|
||||||
|
@ -36,14 +41,32 @@ void releaseOpenVrSystem();
|
||||||
|
|
||||||
|
|
||||||
static const char* CONTROLLER_MODEL_STRING = "vr_controller_05_wireless_b";
|
static const char* CONTROLLER_MODEL_STRING = "vr_controller_05_wireless_b";
|
||||||
|
const quint64 CALIBRATION_TIMELAPSE = 2 * USECS_PER_SECOND;
|
||||||
|
|
||||||
static const char* MENU_PARENT = "Avatar";
|
static const char* MENU_PARENT = "Avatar";
|
||||||
static const char* MENU_NAME = "Vive Controllers";
|
static const char* MENU_NAME = "Vive Controllers";
|
||||||
static const char* MENU_PATH = "Avatar" ">" "Vive Controllers";
|
static const char* MENU_PATH = "Avatar" ">" "Vive Controllers";
|
||||||
static const char* RENDER_CONTROLLERS = "Render Hand Controllers";
|
static const char* RENDER_CONTROLLERS = "Render Hand Controllers";
|
||||||
|
static const int MIN_PUCK_COUNT = 2;
|
||||||
|
static const int MIN_FEET_AND_HIPS = 3;
|
||||||
|
static const int MIN_FEET_HIPS_CHEST = 4;
|
||||||
|
static const int FIRST_FOOT = 0;
|
||||||
|
static const int SECOND_FOOT = 1;
|
||||||
|
static const int HIP = 2;
|
||||||
|
static const int CHEST = 3;
|
||||||
|
|
||||||
const char* ViveControllerManager::NAME { "OpenVR" };
|
const char* ViveControllerManager::NAME { "OpenVR" };
|
||||||
|
|
||||||
|
static glm::mat4 computeOffset(glm::mat4 defaultToReferenceMat, glm::mat4 defaultJointMat, controller::Pose puckPose) {
|
||||||
|
glm::mat4 poseMat = createMatFromQuatAndPos(puckPose.rotation, puckPose.translation);
|
||||||
|
glm::mat4 referenceJointMat = defaultToReferenceMat * defaultJointMat;
|
||||||
|
return glm::inverse(poseMat) * referenceJointMat;
|
||||||
|
}
|
||||||
|
|
||||||
|
static bool sortPucksYPosition(std::pair<uint32_t, controller::Pose> firstPuck, std::pair<uint32_t, controller::Pose> secondPuck) {
|
||||||
|
return (firstPuck.second.translation.y < firstPuck.second.translation.y);
|
||||||
|
}
|
||||||
|
|
||||||
bool ViveControllerManager::isSupported() const {
|
bool ViveControllerManager::isSupported() const {
|
||||||
return openVrSupported();
|
return openVrSupported();
|
||||||
}
|
}
|
||||||
|
@ -125,6 +148,7 @@ void ViveControllerManager::pluginUpdate(float deltaTime, const controller::Inpu
|
||||||
void ViveControllerManager::InputDevice::update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
|
void ViveControllerManager::InputDevice::update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
|
||||||
_poseStateMap.clear();
|
_poseStateMap.clear();
|
||||||
_buttonPressedMap.clear();
|
_buttonPressedMap.clear();
|
||||||
|
_validTrackedObjects.clear();
|
||||||
|
|
||||||
// While the keyboard is open, we defer strictly to the keyboard values
|
// While the keyboard is open, we defer strictly to the keyboard values
|
||||||
if (isOpenVrKeyboardShown()) {
|
if (isOpenVrKeyboardShown()) {
|
||||||
|
@ -143,6 +167,7 @@ void ViveControllerManager::InputDevice::update(float deltaTime, const controlle
|
||||||
// collect poses for all generic trackers
|
// collect poses for all generic trackers
|
||||||
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
|
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
|
||||||
handleTrackedObject(i, inputCalibrationData);
|
handleTrackedObject(i, inputCalibrationData);
|
||||||
|
handleHmd(i, inputCalibrationData);
|
||||||
}
|
}
|
||||||
|
|
||||||
// handle haptics
|
// handle haptics
|
||||||
|
@ -164,10 +189,27 @@ void ViveControllerManager::InputDevice::update(float deltaTime, const controlle
|
||||||
numTrackedControllers++;
|
numTrackedControllers++;
|
||||||
}
|
}
|
||||||
_trackedControllers = numTrackedControllers;
|
_trackedControllers = numTrackedControllers;
|
||||||
|
|
||||||
|
if (checkForCalibrationEvent()) {
|
||||||
|
quint64 currentTime = usecTimestampNow();
|
||||||
|
if (!_timeTilCalibrationSet) {
|
||||||
|
_timeTilCalibrationSet = true;
|
||||||
|
_timeTilCalibration = currentTime + CALIBRATION_TIMELAPSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (currentTime > _timeTilCalibration && !_triggersPressedHandled) {
|
||||||
|
_triggersPressedHandled = true;
|
||||||
|
calibrateOrUncalibrate(inputCalibrationData);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
_triggersPressedHandled = false;
|
||||||
|
_timeTilCalibrationSet = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
updateCalibratedLimbs();
|
||||||
}
|
}
|
||||||
|
|
||||||
void ViveControllerManager::InputDevice::handleTrackedObject(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData) {
|
void ViveControllerManager::InputDevice::handleTrackedObject(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData) {
|
||||||
|
|
||||||
uint32_t poseIndex = controller::TRACKED_OBJECT_00 + deviceIndex;
|
uint32_t poseIndex = controller::TRACKED_OBJECT_00 + deviceIndex;
|
||||||
|
|
||||||
if (_system->IsTrackedDeviceConnected(deviceIndex) &&
|
if (_system->IsTrackedDeviceConnected(deviceIndex) &&
|
||||||
|
@ -185,12 +227,129 @@ void ViveControllerManager::InputDevice::handleTrackedObject(uint32_t deviceInde
|
||||||
// transform into avatar frame
|
// transform into avatar frame
|
||||||
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
||||||
_poseStateMap[poseIndex] = pose.transform(controllerToAvatar);
|
_poseStateMap[poseIndex] = pose.transform(controllerToAvatar);
|
||||||
|
_validTrackedObjects.push_back(std::make_pair(poseIndex, _poseStateMap[poseIndex]));
|
||||||
} else {
|
} else {
|
||||||
controller::Pose invalidPose;
|
controller::Pose invalidPose;
|
||||||
_poseStateMap[poseIndex] = invalidPose;
|
_poseStateMap[poseIndex] = invalidPose;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void ViveControllerManager::InputDevice::calibrateOrUncalibrate(const controller::InputCalibrationData& inputCalibration) {
|
||||||
|
if (!_calibrated) {
|
||||||
|
calibrate(inputCalibration);
|
||||||
|
} else {
|
||||||
|
uncalibrate();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void ViveControllerManager::InputDevice::calibrate(const controller::InputCalibrationData& inputCalibration) {
|
||||||
|
// convert the hmd head from sensor space to avatar space
|
||||||
|
glm::mat4 hmdSensorFlippedMat = inputCalibration.hmdSensorMat * Matrices::Y_180;
|
||||||
|
glm::mat4 sensorToAvatarMat = glm::inverse(inputCalibration.avatarMat) * inputCalibration.sensorToWorldMat;
|
||||||
|
glm::mat4 hmdAvatarMat = sensorToAvatarMat * hmdSensorFlippedMat;
|
||||||
|
|
||||||
|
// cancel the roll and pitch for the hmd head
|
||||||
|
glm::quat hmdRotation = cancelOutRollAndPitch(glmExtractRotation(hmdAvatarMat));
|
||||||
|
glm::vec3 hmdTranslation = extractTranslation(hmdAvatarMat);
|
||||||
|
glm::mat4 currentHmd = createMatFromQuatAndPos(hmdRotation, hmdTranslation);
|
||||||
|
|
||||||
|
// calculate the offset from the centerOfEye to defaultHeadMat
|
||||||
|
glm::mat4 defaultHeadOffset = glm::inverse(inputCalibration.defaultCenterEyeMat) * inputCalibration.defaultHeadMat;
|
||||||
|
|
||||||
|
glm::mat4 currentHead = currentHmd * defaultHeadOffset;
|
||||||
|
|
||||||
|
// calculate the defaultToRefrenceXform
|
||||||
|
glm::mat4 defaultToReferenceMat = currentHead * glm::inverse(inputCalibration.defaultHeadMat);
|
||||||
|
|
||||||
|
int puckCount = (int)_validTrackedObjects.size();
|
||||||
|
if (puckCount == MIN_PUCK_COUNT) {
|
||||||
|
_config = Config::Feet;
|
||||||
|
} else if (puckCount == MIN_FEET_AND_HIPS) {
|
||||||
|
_config = Config::FeetAndHips;
|
||||||
|
} else if (puckCount >= MIN_FEET_HIPS_CHEST) {
|
||||||
|
_config = Config::FeetHipsAndChest;
|
||||||
|
} else {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::sort(_validTrackedObjects.begin(), _validTrackedObjects.end(), sortPucksYPosition);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
auto& firstFoot = _validTrackedObjects[FIRST_FOOT];
|
||||||
|
auto& secondFoot = _validTrackedObjects[SECOND_FOOT];
|
||||||
|
controller::Pose& firstFootPose = firstFoot.second;
|
||||||
|
controller::Pose& secondFootPose = secondFoot.second;
|
||||||
|
|
||||||
|
if (firstFootPose.translation.x < secondFootPose.translation.x) {
|
||||||
|
_jointToPuckMap[controller::LEFT_FOOT] = firstFoot.first;
|
||||||
|
_pucksOffset[firstFoot.first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultLeftFoot, firstFootPose);
|
||||||
|
_jointToPuckMap[controller::RIGHT_FOOT] = secondFoot.first;
|
||||||
|
_pucksOffset[secondFoot.first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultRightFoot, secondFootPose);
|
||||||
|
|
||||||
|
} else {
|
||||||
|
_jointToPuckMap[controller::LEFT_FOOT] = secondFoot.first;
|
||||||
|
_pucksOffset[secondFoot.first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultLeftFoot, secondFootPose);
|
||||||
|
_jointToPuckMap[controller::RIGHT_FOOT] = firstFoot.first;
|
||||||
|
_pucksOffset[firstFoot.first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultRightFoot, firstFootPose);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_config == Config::Feet) {
|
||||||
|
// done
|
||||||
|
} else if (_config == Config::FeetAndHips) {
|
||||||
|
_jointToPuckMap[controller::HIPS] = _validTrackedObjects[HIP].first;
|
||||||
|
_pucksOffset[_validTrackedObjects[HIP].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultHips, _validTrackedObjects[HIP].second);
|
||||||
|
} else if (_config == Config::FeetHipsAndChest) {
|
||||||
|
_jointToPuckMap[controller::HIPS] = _validTrackedObjects[HIP].first;
|
||||||
|
_pucksOffset[_validTrackedObjects[HIP].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultHips, _validTrackedObjects[HIP].second);
|
||||||
|
_jointToPuckMap[controller::SPINE2] = _validTrackedObjects[CHEST].first;
|
||||||
|
_pucksOffset[_validTrackedObjects[CHEST].first] = computeOffset(defaultToReferenceMat, inputCalibration.defaultSpine2, _validTrackedObjects[CHEST].second);
|
||||||
|
}
|
||||||
|
_calibrated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void ViveControllerManager::InputDevice::uncalibrate() {
|
||||||
|
_pucksOffset.clear();
|
||||||
|
_jointToPuckMap.clear();
|
||||||
|
_calibrated = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
void ViveControllerManager::InputDevice::updateCalibratedLimbs() {
|
||||||
|
_poseStateMap[controller::LEFT_FOOT] = addOffsetToPuckPose(controller::LEFT_FOOT);
|
||||||
|
_poseStateMap[controller::RIGHT_FOOT] = addOffsetToPuckPose(controller::RIGHT_FOOT);
|
||||||
|
_poseStateMap[controller::HIPS] = addOffsetToPuckPose(controller::HIPS);
|
||||||
|
_poseStateMap[controller::SPINE2] = addOffsetToPuckPose(controller::SPINE2);
|
||||||
|
}
|
||||||
|
|
||||||
|
controller::Pose ViveControllerManager::InputDevice::addOffsetToPuckPose(int joint) const {
|
||||||
|
auto puck = _jointToPuckMap.find(joint);
|
||||||
|
if (puck != _jointToPuckMap.end()) {
|
||||||
|
uint32_t puckIndex = puck->second;
|
||||||
|
auto puckPose = _poseStateMap.find(puckIndex);
|
||||||
|
auto puckOffset = _pucksOffset.find(puckIndex);
|
||||||
|
|
||||||
|
if ((puckPose != _poseStateMap.end()) && (puckOffset != _pucksOffset.end())) {
|
||||||
|
return puckPose->second.postTransform(puckOffset->second);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return controller::Pose();
|
||||||
|
}
|
||||||
|
|
||||||
|
void ViveControllerManager::InputDevice::handleHmd(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData) {
|
||||||
|
uint32_t poseIndex = controller::TRACKED_OBJECT_00 + deviceIndex;
|
||||||
|
|
||||||
|
if (_system->IsTrackedDeviceConnected(deviceIndex) &&
|
||||||
|
_system->GetTrackedDeviceClass(deviceIndex) == vr::TrackedDeviceClass_HMD &&
|
||||||
|
_nextSimPoseData.vrPoses[deviceIndex].bPoseIsValid) {
|
||||||
|
|
||||||
|
const mat4& mat = _nextSimPoseData.poses[deviceIndex];
|
||||||
|
const vec3 linearVelocity = _nextSimPoseData.linearVelocities[deviceIndex];
|
||||||
|
const vec3 angularVelocity = _nextSimPoseData.angularVelocities[deviceIndex];
|
||||||
|
|
||||||
|
handleHeadPoseEvent(inputCalibrationData, mat, linearVelocity, angularVelocity);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void ViveControllerManager::InputDevice::handleHandController(float deltaTime, uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData, bool isLeftHand) {
|
void ViveControllerManager::InputDevice::handleHandController(float deltaTime, uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData, bool isLeftHand) {
|
||||||
|
|
||||||
if (_system->IsTrackedDeviceConnected(deviceIndex) &&
|
if (_system->IsTrackedDeviceConnected(deviceIndex) &&
|
||||||
|
@ -262,7 +421,7 @@ void ViveControllerManager::InputDevice::handleAxisEvent(float deltaTime, uint32
|
||||||
_axisStateMap[isLeftHand ? LY : RY] = stick.y;
|
_axisStateMap[isLeftHand ? LY : RY] = stick.y;
|
||||||
} else if (axis == vr::k_EButton_SteamVR_Trigger) {
|
} else if (axis == vr::k_EButton_SteamVR_Trigger) {
|
||||||
_axisStateMap[isLeftHand ? LT : RT] = x;
|
_axisStateMap[isLeftHand ? LT : RT] = x;
|
||||||
// The click feeling on the Vive controller trigger represents a value of *precisely* 1.0,
|
// The click feeling on the Vive controller trigger represents a value of *precisely* 1.0,
|
||||||
// so we can expose that as an additional button
|
// so we can expose that as an additional button
|
||||||
if (x >= 1.0f) {
|
if (x >= 1.0f) {
|
||||||
_buttonPressedMap.insert(isLeftHand ? LT_CLICK : RT_CLICK);
|
_buttonPressedMap.insert(isLeftHand ? LT_CLICK : RT_CLICK);
|
||||||
|
@ -276,6 +435,14 @@ enum ViveButtonChannel {
|
||||||
RIGHT_APP_MENU
|
RIGHT_APP_MENU
|
||||||
};
|
};
|
||||||
|
|
||||||
|
bool ViveControllerManager::InputDevice::checkForCalibrationEvent() {
|
||||||
|
auto& endOfMap = _buttonPressedMap.end();
|
||||||
|
auto& leftTrigger = _buttonPressedMap.find(controller::LT);
|
||||||
|
auto& rightTrigger = _buttonPressedMap.find(controller::RT);
|
||||||
|
auto& leftAppButton = _buttonPressedMap.find(LEFT_APP_MENU);
|
||||||
|
auto& rightAppButton = _buttonPressedMap.find(RIGHT_APP_MENU);
|
||||||
|
return ((leftTrigger != endOfMap && leftAppButton != endOfMap) && (rightTrigger != endOfMap && rightAppButton != endOfMap));
|
||||||
|
}
|
||||||
|
|
||||||
// These functions do translation from the Steam IDs to the standard controller IDs
|
// These functions do translation from the Steam IDs to the standard controller IDs
|
||||||
void ViveControllerManager::InputDevice::handleButtonEvent(float deltaTime, uint32_t button, bool pressed, bool touched, bool isLeftHand) {
|
void ViveControllerManager::InputDevice::handleButtonEvent(float deltaTime, uint32_t button, bool pressed, bool touched, bool isLeftHand) {
|
||||||
|
@ -305,6 +472,19 @@ void ViveControllerManager::InputDevice::handleButtonEvent(float deltaTime, uint
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void ViveControllerManager::InputDevice::handleHeadPoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat,
|
||||||
|
const vec3& linearVelocity, const vec3& angularVelocity) {
|
||||||
|
|
||||||
|
//perform a 180 flip to make the HMD face the +z instead of -z, beacuse the head faces +z
|
||||||
|
glm::mat4 matYFlip = mat * Matrices::Y_180;
|
||||||
|
controller::Pose pose(extractTranslation(matYFlip), glmExtractRotation(matYFlip), linearVelocity, angularVelocity);
|
||||||
|
|
||||||
|
glm::mat4 sensorToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
||||||
|
glm::mat4 defaultHeadOffset = glm::inverse(inputCalibrationData.defaultCenterEyeMat) * inputCalibrationData.defaultHeadMat;
|
||||||
|
controller::Pose hmdHeadPose = pose.transform(sensorToAvatar);
|
||||||
|
_poseStateMap[controller::HEAD] = hmdHeadPose.postTransform(defaultHeadOffset);
|
||||||
|
}
|
||||||
|
|
||||||
void ViveControllerManager::InputDevice::handlePoseEvent(float deltaTime, const controller::InputCalibrationData& inputCalibrationData,
|
void ViveControllerManager::InputDevice::handlePoseEvent(float deltaTime, const controller::InputCalibrationData& inputCalibrationData,
|
||||||
const mat4& mat, const vec3& linearVelocity,
|
const mat4& mat, const vec3& linearVelocity,
|
||||||
const vec3& angularVelocity, bool isLeftHand) {
|
const vec3& angularVelocity, bool isLeftHand) {
|
||||||
|
@ -353,7 +533,7 @@ void ViveControllerManager::InputDevice::hapticsHelper(float deltaTime, bool lef
|
||||||
float hapticTime = strength * MAX_HAPTIC_TIME;
|
float hapticTime = strength * MAX_HAPTIC_TIME;
|
||||||
if (hapticTime < duration * 1000.0f) {
|
if (hapticTime < duration * 1000.0f) {
|
||||||
_system->TriggerHapticPulse(deviceIndex, 0, hapticTime);
|
_system->TriggerHapticPulse(deviceIndex, 0, hapticTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
float remainingHapticTime = duration - (hapticTime / 1000.0f + deltaTime * 1000.0f); // in milliseconds
|
float remainingHapticTime = duration - (hapticTime / 1000.0f + deltaTime * 1000.0f); // in milliseconds
|
||||||
if (leftHand) {
|
if (leftHand) {
|
||||||
|
@ -404,6 +584,11 @@ controller::Input::NamedVector ViveControllerManager::InputDevice::getAvailableI
|
||||||
// 3d location of controller
|
// 3d location of controller
|
||||||
makePair(LEFT_HAND, "LeftHand"),
|
makePair(LEFT_HAND, "LeftHand"),
|
||||||
makePair(RIGHT_HAND, "RightHand"),
|
makePair(RIGHT_HAND, "RightHand"),
|
||||||
|
makePair(LEFT_FOOT, "LeftFoot"),
|
||||||
|
makePair(RIGHT_FOOT, "RightFoot"),
|
||||||
|
makePair(HIPS, "Hips"),
|
||||||
|
makePair(SPINE2, "Spine2"),
|
||||||
|
makePair(HEAD, "Head"),
|
||||||
|
|
||||||
// 16 tracked poses
|
// 16 tracked poses
|
||||||
makePair(TRACKED_OBJECT_00, "TrackedObject00"),
|
makePair(TRACKED_OBJECT_00, "TrackedObject00"),
|
||||||
|
|
|
@ -14,9 +14,11 @@
|
||||||
|
|
||||||
#include <QObject>
|
#include <QObject>
|
||||||
#include <unordered_set>
|
#include <unordered_set>
|
||||||
|
#include <vector>
|
||||||
|
#include <map>
|
||||||
|
#include <utility>
|
||||||
|
|
||||||
#include <GLMHelpers.h>
|
#include <GLMHelpers.h>
|
||||||
|
|
||||||
#include <model/Geometry.h>
|
#include <model/Geometry.h>
|
||||||
#include <gpu/Texture.h>
|
#include <gpu/Texture.h>
|
||||||
#include <controllers/InputDevice.h>
|
#include <controllers/InputDevice.h>
|
||||||
|
@ -58,13 +60,21 @@ private:
|
||||||
|
|
||||||
bool triggerHapticPulse(float strength, float duration, controller::Hand hand) override;
|
bool triggerHapticPulse(float strength, float duration, controller::Hand hand) override;
|
||||||
void hapticsHelper(float deltaTime, bool leftHand);
|
void hapticsHelper(float deltaTime, bool leftHand);
|
||||||
|
void calibrateOrUncalibrate(const controller::InputCalibrationData& inputCalibration);
|
||||||
|
void calibrate(const controller::InputCalibrationData& inputCalibration);
|
||||||
|
void uncalibrate();
|
||||||
|
controller::Pose addOffsetToPuckPose(int joint) const;
|
||||||
|
void updateCalibratedLimbs();
|
||||||
|
bool checkForCalibrationEvent();
|
||||||
void handleHandController(float deltaTime, uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData, bool isLeftHand);
|
void handleHandController(float deltaTime, uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData, bool isLeftHand);
|
||||||
|
void handleHmd(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData);
|
||||||
void handleTrackedObject(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData);
|
void handleTrackedObject(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData);
|
||||||
void handleButtonEvent(float deltaTime, uint32_t button, bool pressed, bool touched, bool isLeftHand);
|
void handleButtonEvent(float deltaTime, uint32_t button, bool pressed, bool touched, bool isLeftHand);
|
||||||
void handleAxisEvent(float deltaTime, uint32_t axis, float x, float y, bool isLeftHand);
|
void handleAxisEvent(float deltaTime, uint32_t axis, float x, float y, bool isLeftHand);
|
||||||
void handlePoseEvent(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, const mat4& mat,
|
void handlePoseEvent(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, const mat4& mat,
|
||||||
const vec3& linearVelocity, const vec3& angularVelocity, bool isLeftHand);
|
const vec3& linearVelocity, const vec3& angularVelocity, bool isLeftHand);
|
||||||
|
void handleHeadPoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, const vec3& linearVelocity,
|
||||||
|
const vec3& angularVelocity);
|
||||||
void partitionTouchpad(int sButton, int xAxis, int yAxis, int centerPsuedoButton, int xPseudoButton, int yPseudoButton);
|
void partitionTouchpad(int sButton, int xAxis, int yAxis, int centerPsuedoButton, int xPseudoButton, int yPseudoButton);
|
||||||
|
|
||||||
class FilteredStick {
|
class FilteredStick {
|
||||||
|
@ -90,10 +100,14 @@ private:
|
||||||
float _timer { 0.0f };
|
float _timer { 0.0f };
|
||||||
glm::vec2 _stick { 0.0f, 0.0f };
|
glm::vec2 _stick { 0.0f, 0.0f };
|
||||||
};
|
};
|
||||||
|
enum class Config { Feet, FeetAndHips, FeetHipsAndChest, NoConfig };
|
||||||
|
Config _config { Config::NoConfig };
|
||||||
FilteredStick _filteredLeftStick;
|
FilteredStick _filteredLeftStick;
|
||||||
FilteredStick _filteredRightStick;
|
FilteredStick _filteredRightStick;
|
||||||
|
|
||||||
|
std::vector<std::pair<uint32_t, controller::Pose>> _validTrackedObjects;
|
||||||
|
std::map<uint32_t, glm::mat4> _pucksOffset;
|
||||||
|
std::map<int, uint32_t> _jointToPuckMap;
|
||||||
// perform an action when the InputDevice mutex is acquired.
|
// perform an action when the InputDevice mutex is acquired.
|
||||||
using Locker = std::unique_lock<std::recursive_mutex>;
|
using Locker = std::unique_lock<std::recursive_mutex>;
|
||||||
template <typename F>
|
template <typename F>
|
||||||
|
@ -101,10 +115,14 @@ private:
|
||||||
|
|
||||||
int _trackedControllers { 0 };
|
int _trackedControllers { 0 };
|
||||||
vr::IVRSystem*& _system;
|
vr::IVRSystem*& _system;
|
||||||
|
quint64 _timeTilCalibration { 0.0f };
|
||||||
float _leftHapticStrength { 0.0f };
|
float _leftHapticStrength { 0.0f };
|
||||||
float _leftHapticDuration { 0.0f };
|
float _leftHapticDuration { 0.0f };
|
||||||
float _rightHapticStrength { 0.0f };
|
float _rightHapticStrength { 0.0f };
|
||||||
float _rightHapticDuration { 0.0f };
|
float _rightHapticDuration { 0.0f };
|
||||||
|
bool _triggersPressedHandled { false };
|
||||||
|
bool _calibrated { false };
|
||||||
|
bool _timeTilCalibrationSet { false };
|
||||||
mutable std::recursive_mutex _lock;
|
mutable std::recursive_mutex _lock;
|
||||||
|
|
||||||
friend class ViveControllerManager;
|
friend class ViveControllerManager;
|
||||||
|
|
|
@ -1376,7 +1376,9 @@ function MyController(hand) {
|
||||||
visible: true,
|
visible: true,
|
||||||
alpha: 1,
|
alpha: 1,
|
||||||
parentID: AVATAR_SELF_ID,
|
parentID: AVATAR_SELF_ID,
|
||||||
parentJointIndex: this.controllerJointIndex,
|
parentJointIndex: MyAvatar.getJointIndex(this.hand === RIGHT_HAND ?
|
||||||
|
"_CAMERA_RELATIVE_CONTROLLER_RIGHTHAND" :
|
||||||
|
"_CAMERA_RELATIVE_CONTROLLER_LEFTHAND"),
|
||||||
endParentID: farParentID
|
endParentID: farParentID
|
||||||
};
|
};
|
||||||
this.overlayLine = Overlays.addOverlay("line3d", lineProperties);
|
this.overlayLine = Overlays.addOverlay("line3d", lineProperties);
|
||||||
|
|
|
@ -122,7 +122,8 @@
|
||||||
function debug() {
|
function debug() {
|
||||||
var stateString = "<" + STATE_STRINGS[state] + ">";
|
var stateString = "<" + STATE_STRINGS[state] + ">";
|
||||||
var connecting = "[" + connectingId + "/" + connectingHandJointIndex + "]";
|
var connecting = "[" + connectingId + "/" + connectingHandJointIndex + "]";
|
||||||
print.apply(null, [].concat.apply([LABEL, stateString, JSON.stringify(waitingList), connecting],
|
var current = "[" + currentHand + "/" + currentHandJointIndex + "]"
|
||||||
|
print.apply(null, [].concat.apply([LABEL, stateString, current, JSON.stringify(waitingList), connecting],
|
||||||
[].map.call(arguments, JSON.stringify)));
|
[].map.call(arguments, JSON.stringify)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -759,7 +760,10 @@
|
||||||
break;
|
break;
|
||||||
case "done":
|
case "done":
|
||||||
delete waitingList[senderID];
|
delete waitingList[senderID];
|
||||||
if (state === STATES.CONNECTING && connectingId === senderID) {
|
if (connectingId !== senderID) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (state === STATES.CONNECTING) {
|
||||||
// if they are done, and didn't connect us, terminate our
|
// if they are done, and didn't connect us, terminate our
|
||||||
// connecting
|
// connecting
|
||||||
if (message.connectionId !== MyAvatar.sessionUUID) {
|
if (message.connectionId !== MyAvatar.sessionUUID) {
|
||||||
|
@ -768,11 +772,20 @@
|
||||||
// value for isKeyboard, as we should not change the animation
|
// value for isKeyboard, as we should not change the animation
|
||||||
// state anyways (if any)
|
// state anyways (if any)
|
||||||
startHandshake();
|
startHandshake();
|
||||||
|
} else {
|
||||||
|
// they just created a connection request to us, and we are connecting to
|
||||||
|
// them, so lets just stop connecting and make connection..
|
||||||
|
makeConnection(connectingId);
|
||||||
|
stopConnecting();
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// if waiting or inactive, lets clear the connecting id. If in makingConnection,
|
if (state == STATES.MAKING_CONNECTION) {
|
||||||
// do nothing
|
// we are making connection, they just started, so lets reset the
|
||||||
if (state !== STATES.MAKING_CONNECTION && connectingId === senderID) {
|
// poll count just in case
|
||||||
|
pollCount = 0;
|
||||||
|
} else {
|
||||||
|
// if waiting or inactive, lets clear the connecting id. If in makingConnection,
|
||||||
|
// do nothing
|
||||||
clearConnecting();
|
clearConnecting();
|
||||||
if (state !== STATES.INACTIVE) {
|
if (state !== STATES.INACTIVE) {
|
||||||
startHandshake();
|
startHandshake();
|
||||||
|
|
|
@ -268,7 +268,7 @@ function fromQml(message) { // messages are {method, params}, like json-rpc. See
|
||||||
break;
|
break;
|
||||||
case 'refreshConnections':
|
case 'refreshConnections':
|
||||||
print('Refreshing Connections...');
|
print('Refreshing Connections...');
|
||||||
getConnectionData();
|
getConnectionData(false);
|
||||||
UserActivityLogger.palAction("refresh_connections", "");
|
UserActivityLogger.palAction("refresh_connections", "");
|
||||||
break;
|
break;
|
||||||
case 'removeConnection':
|
case 'removeConnection':
|
||||||
|
@ -281,25 +281,27 @@ function fromQml(message) { // messages are {method, params}, like json-rpc. See
|
||||||
print("Error: unable to remove connection", connectionUserName, error || response.status);
|
print("Error: unable to remove connection", connectionUserName, error || response.status);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
getConnectionData();
|
getConnectionData(false);
|
||||||
});
|
});
|
||||||
break
|
break
|
||||||
|
|
||||||
case 'removeFriend':
|
case 'removeFriend':
|
||||||
friendUserName = message.params;
|
friendUserName = message.params;
|
||||||
|
print("Removing " + friendUserName + " from friends.");
|
||||||
request({
|
request({
|
||||||
uri: METAVERSE_BASE + '/api/v1/user/friends/' + friendUserName,
|
uri: METAVERSE_BASE + '/api/v1/user/friends/' + friendUserName,
|
||||||
method: 'DELETE'
|
method: 'DELETE'
|
||||||
}, function (error, response) {
|
}, function (error, response) {
|
||||||
if (error || (response.status !== 'success')) {
|
if (error || (response.status !== 'success')) {
|
||||||
print("Error: unable to unfriend", friendUserName, error || response.status);
|
print("Error: unable to unfriend " + friendUserName, error || response.status);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
getConnectionData();
|
getConnectionData(friendUserName);
|
||||||
});
|
});
|
||||||
break
|
break
|
||||||
case 'addFriend':
|
case 'addFriend':
|
||||||
friendUserName = message.params;
|
friendUserName = message.params;
|
||||||
|
print("Adding " + friendUserName + " to friends.");
|
||||||
request({
|
request({
|
||||||
uri: METAVERSE_BASE + '/api/v1/user/friends',
|
uri: METAVERSE_BASE + '/api/v1/user/friends',
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
@ -312,7 +314,7 @@ function fromQml(message) { // messages are {method, params}, like json-rpc. See
|
||||||
print("Error: unable to friend " + friendUserName, error || response.status);
|
print("Error: unable to friend " + friendUserName, error || response.status);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
getConnectionData(); // For now, just refresh all connection data. Later, just refresh the one friended row.
|
getConnectionData(friendUserName);
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
break;
|
break;
|
||||||
|
@ -360,8 +362,6 @@ function getProfilePicture(username, callback) { // callback(url) if successfull
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function getAvailableConnections(domain, callback) { // callback([{usename, location}...]) if successfull. (Logs otherwise)
|
function getAvailableConnections(domain, callback) { // callback([{usename, location}...]) if successfull. (Logs otherwise)
|
||||||
// The back end doesn't do user connections yet. Fake it by getting all users that have made themselves accessible to us,
|
|
||||||
// and pretending that they are all connections.
|
|
||||||
url = METAVERSE_BASE + '/api/v1/users?'
|
url = METAVERSE_BASE + '/api/v1/users?'
|
||||||
if (domain) {
|
if (domain) {
|
||||||
url += 'status=' + domain.slice(1, -1); // without curly braces
|
url += 'status=' + domain.slice(1, -1); // without curly braces
|
||||||
|
@ -369,25 +369,22 @@ function getAvailableConnections(domain, callback) { // callback([{usename, loca
|
||||||
url += 'filter=connections'; // regardless of whether online
|
url += 'filter=connections'; // regardless of whether online
|
||||||
}
|
}
|
||||||
requestJSON(url, function (connectionsData) {
|
requestJSON(url, function (connectionsData) {
|
||||||
// The back end doesn't include the profile picture data, but we can add that here.
|
callback(connectionsData.users);
|
||||||
// For our current purposes, there's no need to be fancy and try to reduce latency by doing some number of requests in parallel,
|
|
||||||
// so these requests are all sequential.
|
|
||||||
var users = connectionsData.users;
|
|
||||||
function addPicture(index) {
|
|
||||||
if (index >= users.length) {
|
|
||||||
return callback(users);
|
|
||||||
}
|
|
||||||
var user = users[index];
|
|
||||||
getProfilePicture(user.username, function (url) {
|
|
||||||
user.profileUrl = url;
|
|
||||||
addPicture(index + 1);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
addPicture(0);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
function getInfoAboutUser(specificUsername, callback) {
|
||||||
function getConnectionData(domain) { // Update all the usernames that I am entitled to see, using my login but not dependent on canKick.
|
url = METAVERSE_BASE + '/api/v1/users?filter=connections'
|
||||||
|
requestJSON(url, function (connectionsData) {
|
||||||
|
for (user in connectionsData.users) {
|
||||||
|
if (connectionsData.users[user].username === specificUsername) {
|
||||||
|
callback(connectionsData.users[user]);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
callback(false);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function getConnectionData(specificUsername, domain) { // Update all the usernames that I am entitled to see, using my login but not dependent on canKick.
|
||||||
function frob(user) { // get into the right format
|
function frob(user) { // get into the right format
|
||||||
var formattedSessionId = user.location.node_id || '';
|
var formattedSessionId = user.location.node_id || '';
|
||||||
if (formattedSessionId !== '' && formattedSessionId.indexOf("{") != 0) {
|
if (formattedSessionId !== '' && formattedSessionId.indexOf("{") != 0) {
|
||||||
|
@ -397,19 +394,29 @@ function getConnectionData(domain) { // Update all the usernames that I am entit
|
||||||
sessionId: formattedSessionId,
|
sessionId: formattedSessionId,
|
||||||
userName: user.username,
|
userName: user.username,
|
||||||
connection: user.connection,
|
connection: user.connection,
|
||||||
profileUrl: user.profileUrl,
|
profileUrl: user.images.thumbnail,
|
||||||
placeName: (user.location.root || user.location.domain || {}).name || ''
|
placeName: (user.location.root || user.location.domain || {}).name || ''
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
getAvailableConnections(domain, function (users) {
|
if (specificUsername) {
|
||||||
if (domain) {
|
getInfoAboutUser(specificUsername, function (user) {
|
||||||
users.forEach(function (user) {
|
if (user) {
|
||||||
updateUser(frob(user));
|
updateUser(frob(user));
|
||||||
});
|
} else {
|
||||||
} else {
|
print('Error: Unable to find information about ' + specificUsername + ' in connectionsData!');
|
||||||
sendToQml({ method: 'connections', params: users.map(frob) });
|
}
|
||||||
}
|
});
|
||||||
});
|
} else {
|
||||||
|
getAvailableConnections(domain, function (users) {
|
||||||
|
if (domain) {
|
||||||
|
users.forEach(function (user) {
|
||||||
|
updateUser(frob(user));
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
sendToQml({ method: 'connections', params: users.map(frob) });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
|
@ -486,7 +493,7 @@ function populateNearbyUserList(selectData, oldAudioData) {
|
||||||
data.push(avatarPalDatum);
|
data.push(avatarPalDatum);
|
||||||
print('PAL data:', JSON.stringify(avatarPalDatum));
|
print('PAL data:', JSON.stringify(avatarPalDatum));
|
||||||
});
|
});
|
||||||
getConnectionData(location.domainId); // Even admins don't get relationship data in requestUsernameFromID (which is still needed for admin status, which comes from domain).
|
getConnectionData(false, location.domainId); // Even admins don't get relationship data in requestUsernameFromID (which is still needed for admin status, which comes from domain).
|
||||||
conserveResources = Object.keys(avatarsOfInterest).length > 20;
|
conserveResources = Object.keys(avatarsOfInterest).length > 20;
|
||||||
sendToQml({ method: 'nearbyUsers', params: data });
|
sendToQml({ method: 'nearbyUsers', params: data });
|
||||||
if (selectData) {
|
if (selectData) {
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
/*jslint nomen: true, plusplus: true, vars: true*/
|
/*jslint nomen: true, plusplus: true, vars: true*/
|
||||||
/*global AvatarList, Entities, EntityViewer, Script, SoundCache, Audio, print, randFloat*/
|
/*global AvatarList, Entities, EntityViewer, Script, SoundCache, Audio, print, randFloat*/
|
||||||
//
|
//
|
||||||
|
@ -38,19 +39,27 @@ var DEFAULT_SOUND_DATA = {
|
||||||
playbackGapRange: 0 // in ms
|
playbackGapRange: 0 // in ms
|
||||||
};
|
};
|
||||||
|
|
||||||
|
//var AGENT_AVATAR_POSITION = { x: -1.5327, y: 0.672515, z: 5.91573 };
|
||||||
|
var AGENT_AVATAR_POSITION = { x: -2.83785, y: 1.45243, z: -13.6042 };
|
||||||
|
|
||||||
//var isACScript = this.EntityViewer !== undefined;
|
//var isACScript = this.EntityViewer !== undefined;
|
||||||
var isACScript = true;
|
var isACScript = true;
|
||||||
|
|
||||||
Script.include("http://hifi-content.s3.amazonaws.com/ryan/development/utils_ryan.js");
|
|
||||||
if (isACScript) {
|
if (isACScript) {
|
||||||
Agent.isAvatar = true; // This puts a robot at 0,0,0, but is currently necessary in order to use AvatarList.
|
Agent.isAvatar = true; // This puts a robot at 0,0,0, but is currently necessary in order to use AvatarList.
|
||||||
Avatar.skeletonModelURL = "http://hifi-content.s3.amazonaws.com/ozan/dev/avatars/invisible_avatar/invisible_avatar.fst";
|
Avatar.skeletonModelURL = "http://hifi-content.s3.amazonaws.com/ozan/dev/avatars/invisible_avatar/invisible_avatar.fst";
|
||||||
|
Avatar.position = AGENT_AVATAR_POSITION;
|
||||||
|
Agent.isListeningToAudioStream = true;
|
||||||
}
|
}
|
||||||
function ignore() {}
|
function ignore() {}
|
||||||
function debug() { // Display the arguments not just [Object object].
|
function debug() { // Display the arguments not just [Object object].
|
||||||
//print.apply(null, [].map.call(arguments, JSON.stringify));
|
//print.apply(null, [].map.call(arguments, JSON.stringify));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function randFloat(low, high) {
|
||||||
|
return low + Math.random() * (high - low);
|
||||||
|
}
|
||||||
|
|
||||||
if (isACScript) {
|
if (isACScript) {
|
||||||
EntityViewer.setCenterRadius(QUERY_RADIUS);
|
EntityViewer.setCenterRadius(QUERY_RADIUS);
|
||||||
}
|
}
|
||||||
|
@ -93,6 +102,7 @@ function EntityDatum(entityIdentifier) { // Just the data of an entity that we n
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
var properties, soundData; // Latest data, pulled from local octree.
|
var properties, soundData; // Latest data, pulled from local octree.
|
||||||
|
|
||||||
// getEntityProperties locks the tree, which competes with the asynchronous processing of queryOctree results.
|
// getEntityProperties locks the tree, which competes with the asynchronous processing of queryOctree results.
|
||||||
// Most entity updates are fast and only a very few do getEntityProperties.
|
// Most entity updates are fast and only a very few do getEntityProperties.
|
||||||
function ensureSoundData() { // We only getEntityProperities when we need to.
|
function ensureSoundData() { // We only getEntityProperities when we need to.
|
||||||
|
@ -115,43 +125,54 @@ function EntityDatum(entityIdentifier) { // Just the data of an entity that we n
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Stumbling on big new pile of entities will do a lot of getEntityProperties. Once.
|
// Stumbling on big new pile of entities will do a lot of getEntityProperties. Once.
|
||||||
if (that.lastUserDataUpdate < userDataCutoff) { // NO DATA => SOUND DATA
|
if (that.lastUserDataUpdate < userDataCutoff) { // NO DATA => SOUND DATA
|
||||||
ensureSoundData();
|
ensureSoundData();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!that.url) { // NO DATA => NO DATA
|
if (!that.url) { // NO DATA => NO DATA
|
||||||
return that.stop();
|
return that.stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!that.sound) { // SOUND DATA => DOWNLOADING
|
if (!that.sound) { // SOUND DATA => DOWNLOADING
|
||||||
that.sound = SoundCache.getSound(soundData.url); // SoundCache can manage duplicates better than we can.
|
that.sound = SoundCache.getSound(soundData.url); // SoundCache can manage duplicates better than we can.
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!that.sound.downloaded) { // DOWNLOADING => DOWNLOADING
|
if (!that.sound.downloaded) { // DOWNLOADING => DOWNLOADING
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (that.playAfter > now) { // DOWNLOADING | WAITING => WAITING
|
if (that.playAfter > now) { // DOWNLOADING | WAITING => WAITING
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
ensureSoundData(); // We'll try to play/setOptions and will need position, so we might as well get soundData, too.
|
ensureSoundData(); // We'll try to play/setOptions and will need position, so we might as well get soundData, too.
|
||||||
if (soundData.url !== that.url) { // WAITING => NO DATA (update next time around)
|
if (soundData.url !== that.url) { // WAITING => NO DATA (update next time around)
|
||||||
return that.stop();
|
return that.stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
var options = {
|
var options = {
|
||||||
position: properties.position,
|
position: properties.position,
|
||||||
loop: soundData.loop || DEFAULT_SOUND_DATA.loop,
|
loop: soundData.loop || DEFAULT_SOUND_DATA.loop,
|
||||||
volume: soundData.volume || DEFAULT_SOUND_DATA.volume
|
volume: soundData.volume || DEFAULT_SOUND_DATA.volume
|
||||||
};
|
};
|
||||||
|
|
||||||
function repeat() {
|
function repeat() {
|
||||||
return !options.loop && (soundData.playbackGap >= 0);
|
return !options.loop && (soundData.playbackGap >= 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
function randomizedNextPlay() { // time of next play or recheck, randomized to distribute the work
|
function randomizedNextPlay() { // time of next play or recheck, randomized to distribute the work
|
||||||
var range = soundData.playbackGapRange || DEFAULT_SOUND_DATA.playbackGapRange,
|
var range = soundData.playbackGapRange || DEFAULT_SOUND_DATA.playbackGapRange,
|
||||||
base = repeat() ? ((that.sound.duration * MSEC_PER_SEC) + (soundData.playbackGap || DEFAULT_SOUND_DATA.playbackGap)) : RECHECK_TIME;
|
base = repeat() ? ((that.sound.duration * MSEC_PER_SEC) + (soundData.playbackGap || DEFAULT_SOUND_DATA.playbackGap)) : RECHECK_TIME;
|
||||||
return now + base + randFloat(-Math.min(base, range), range);
|
return now + base + randFloat(-Math.min(base, range), range);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (that.injector && soundData.playing === false) {
|
if (that.injector && soundData.playing === false) {
|
||||||
that.injector.stop();
|
that.injector.stop();
|
||||||
that.injector = null;
|
that.injector = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!that.injector) {
|
if (!that.injector) {
|
||||||
if (soundData.playing === false) { // WAITING => PLAYING | WAITING
|
if (soundData.playing === false) { // WAITING => PLAYING | WAITING
|
||||||
return;
|
return;
|
||||||
|
@ -165,6 +186,7 @@ function EntityDatum(entityIdentifier) { // Just the data of an entity that we n
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
that.injector.setOptions(options); // PLAYING => UPDATE POSITION ETC
|
that.injector.setOptions(options); // PLAYING => UPDATE POSITION ETC
|
||||||
if (!that.injector.playing) { // Subtle: a looping sound will not check playbackGap.
|
if (!that.injector.playing) { // Subtle: a looping sound will not check playbackGap.
|
||||||
if (repeat()) { // WAITING => PLAYING
|
if (repeat()) { // WAITING => PLAYING
|
||||||
|
@ -178,6 +200,7 @@ function EntityDatum(entityIdentifier) { // Just the data of an entity that we n
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function internEntityDatum(entityIdentifier, timestamp, avatarPosition, avatar) {
|
function internEntityDatum(entityIdentifier, timestamp, avatarPosition, avatar) {
|
||||||
ignore(avatarPosition, avatar); // We could use avatars and/or avatarPositions to prioritize which ones to play.
|
ignore(avatarPosition, avatar); // We could use avatars and/or avatarPositions to prioritize which ones to play.
|
||||||
var entitySound = entityCache[entityIdentifier];
|
var entitySound = entityCache[entityIdentifier];
|
||||||
|
@ -186,7 +209,9 @@ function internEntityDatum(entityIdentifier, timestamp, avatarPosition, avatar)
|
||||||
}
|
}
|
||||||
entitySound.timestamp = timestamp; // Might be updated for multiple avatars. That's fine.
|
entitySound.timestamp = timestamp; // Might be updated for multiple avatars. That's fine.
|
||||||
}
|
}
|
||||||
|
|
||||||
var nUpdates = UPDATES_PER_STATS_LOG, lastStats = Date.now();
|
var nUpdates = UPDATES_PER_STATS_LOG, lastStats = Date.now();
|
||||||
|
|
||||||
function updateAllEntityData() { // A fast update of all entities we know about. A few make sounds.
|
function updateAllEntityData() { // A fast update of all entities we know about. A few make sounds.
|
||||||
var now = Date.now(),
|
var now = Date.now(),
|
||||||
expirationCutoff = now - EXPIRATION_TIME,
|
expirationCutoff = now - EXPIRATION_TIME,
|
||||||
|
|
Loading…
Reference in a new issue