Merge branch 'master' into feature/controller-display

This commit is contained in:
Anthony J. Thibault 2016-10-21 13:48:07 -07:00
commit c431c69d60
41 changed files with 988 additions and 793 deletions

View file

@ -49,7 +49,7 @@ AudioMixerClientData::~AudioMixerClientData() {
AvatarAudioStream* AudioMixerClientData::getAvatarAudioStream() {
QReadLocker readLocker { &_streamsLock };
auto it = _audioStreams.find(QUuid());
if (it != _audioStreams.end()) {
return dynamic_cast<AvatarAudioStream*>(it->second.get());
@ -75,7 +75,7 @@ void AudioMixerClientData::removeHRTFForStream(const QUuid& nodeID, const QUuid&
int AudioMixerClientData::parseData(ReceivedMessage& message) {
PacketType packetType = message.getType();
if (packetType == PacketType::AudioStreamStats) {
// skip over header, appendFlag, and num stats packed
@ -218,11 +218,10 @@ void AudioMixerClientData::sendAudioStreamStatsPackets(const SharedNodePointer&
auto nodeList = DependencyManager::get<NodeList>();
// The append flag is a boolean value that will be packed right after the header. The first packet sent
// inside this method will have 0 for this flag, while every subsequent packet will have 1 for this flag.
// The sole purpose of this flag is so the client can clear its map of injected audio stream stats when
// it receives a packet with an appendFlag of 0. This prevents the buildup of dead audio stream stats in the client.
quint8 appendFlag = 0;
// The append flag is a boolean value that will be packed right after the header.
// This flag allows the client to know when it has received all stats packets, so it can group any downstream effects,
// and clear its cache of injector stream stats; it helps to prevent buildup of dead audio stream stats in the client.
quint8 appendFlag = AudioStreamStats::START;
auto streamsCopy = getAudioStreams();
@ -233,14 +232,21 @@ void AudioMixerClientData::sendAudioStreamStatsPackets(const SharedNodePointer&
while (numStreamStatsRemaining > 0) {
auto statsPacket = NLPacket::create(PacketType::AudioStreamStats);
// pack the append flag in this packet
statsPacket->writePrimitive(appendFlag);
appendFlag = 1;
int numStreamStatsRoomFor = (int)(statsPacket->size() - sizeof(quint8) - sizeof(quint16)) / sizeof(AudioStreamStats);
// calculate and pack the number of stream stats to follow
// calculate the number of stream stats to follow
quint16 numStreamStatsToPack = std::min(numStreamStatsRemaining, numStreamStatsRoomFor);
// is this the terminal packet?
if (numStreamStatsRemaining <= numStreamStatsToPack) {
appendFlag |= AudioStreamStats::END;
}
// pack the append flag in this packet
statsPacket->writePrimitive(appendFlag);
appendFlag = 0;
// pack the number of stream stats to follow
statsPacket->writePrimitive(numStreamStatsToPack);
// pack the calculated number of stream stats

View file

@ -51,9 +51,10 @@ OriginalDesktop.Desktop {
Toolbar {
id: sysToolbar;
objectName: "com.highfidelity.interface.toolbar.system";
// Magic: sysToolbar.x and y come from settings, and are bound before the properties specified here are applied.
x: sysToolbar.x;
y: sysToolbar.y;
// These values will be overridden by sysToolbar.x/y if there is a saved position in Settings
// On exit, the sysToolbar position is saved to settings
x: 30
y: 50
}
property var toolbars: (function (map) { // answer dictionary preloaded with sysToolbar
map[sysToolbar.objectName] = sysToolbar;

View file

@ -1729,6 +1729,7 @@ void Application::initializeUi() {
// though I can't find it. Hence, "ApplicationInterface"
rootContext->setContextProperty("ApplicationInterface", this);
rootContext->setContextProperty("Audio", &AudioScriptingInterface::getInstance());
rootContext->setContextProperty("AudioStats", DependencyManager::get<AudioClient>()->getStats().data());
rootContext->setContextProperty("Controller", DependencyManager::get<controller::ScriptingInterface>().data());
rootContext->setContextProperty("Entities", DependencyManager::get<EntityScriptingInterface>().data());
FileScriptingInterface* fileDownload = new FileScriptingInterface(engine);
@ -3759,12 +3760,6 @@ void Application::updateDialogs(float deltaTime) const {
PerformanceWarning warn(showWarnings, "Application::updateDialogs()");
auto dialogsManager = DependencyManager::get<DialogsManager>();
// Update audio stats dialog, if any
AudioStatsDialog* audioStatsDialog = dialogsManager->getAudioStatsDialog();
if(audioStatsDialog) {
audioStatsDialog->update();
}
// Update bandwidth dialog, if any
BandwidthDialog* bandwidthDialog = dialogsManager->getBandwidthDialog();
if (bandwidthDialog) {
@ -5013,6 +5008,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
scriptEngine->registerGlobalObject("Stats", Stats::getInstance());
scriptEngine->registerGlobalObject("Settings", SettingsScriptingInterface::getInstance());
scriptEngine->registerGlobalObject("AudioDevice", AudioDeviceScriptingInterface::getInstance());
scriptEngine->registerGlobalObject("AudioStats", DependencyManager::get<AudioClient>()->getStats().data());
// Caches
scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCache>().data());

View file

@ -59,7 +59,6 @@
#include "scripting/ControllerScriptingInterface.h"
#include "scripting/DialogsManagerScriptingInterface.h"
#include "ui/ApplicationOverlay.h"
#include "ui/AudioStatsDialog.h"
#include "ui/BandwidthDialog.h"
#include "ui/LodToolsDialog.h"
#include "ui/LogDialog.h"

View file

@ -654,10 +654,6 @@ Menu::Menu() {
audioScopeFramesGroup->addAction(fiftyFrames);
}
// Developer > Audio > Audio Network Stats...
addActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioNetworkStats, 0,
dialogsManager.data(), SLOT(audioStatsDetails()));
// Developer > Physics >>>
MenuWrapper* physicsOptionsMenu = developerMenu->addMenu("Physics");
{

View file

@ -37,7 +37,6 @@ namespace MenuOption {
const QString AssetMigration = "ATP Asset Migration";
const QString AssetServer = "Asset Browser";
const QString Attachments = "Attachments...";
const QString AudioNetworkStats = "Audio Network Stats";
const QString AudioNoiseReduction = "Audio Noise Reduction";
const QString AudioScope = "Show Scope";
const QString AudioScopeFiftyFrames = "Fifty";

View file

@ -98,9 +98,7 @@ void ApplicationOverlay::renderQmlUi(RenderArgs* renderArgs) {
PROFILE_RANGE(__FUNCTION__);
if (!_uiTexture) {
_uiTexture = gpu::TexturePointer(gpu::Texture::createExternal2D([](uint32_t recycleTexture, void* recycleFence){
DependencyManager::get<OffscreenUi>()->releaseTexture({ recycleTexture, recycleFence });
}));
_uiTexture = gpu::TexturePointer(gpu::Texture::createExternal2D(OffscreenQmlSurface::getDiscardLambda()));
_uiTexture->setSource(__FUNCTION__);
}
// Once we move UI rendering and screen rendering to different

View file

@ -1,296 +0,0 @@
//
// AudioStatsDialog.cpp
// interface/src/ui
//
// Created by Bridget Went on 7/9/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AudioStatsDialog.h"
#include <cstdio>
#include <AudioClient.h>
#include <AudioConstants.h>
#include <AudioIOStats.h>
#include <DependencyManager.h>
#include <GeometryCache.h>
#include <NodeList.h>
#include <Util.h>
const unsigned COLOR0 = 0x33cc99ff;
const unsigned COLOR1 = 0xffef40c0;
const unsigned COLOR2 = 0xd0d0d0a0;
const unsigned COLOR3 = 0x01DD7880;
AudioStatsDisplay::AudioStatsDisplay(QFormLayout* form,
QString text, unsigned colorRGBA) :
_text(text),
_colorRGBA(colorRGBA)
{
_label = new QLabel();
_label->setAlignment(Qt::AlignCenter);
QPalette palette = _label->palette();
unsigned rgb = colorRGBA >> 8;
rgb = ((rgb & 0xfefefeu) >> 1) + ((rgb & 0xf8f8f8) >> 3);
palette.setColor(QPalette::WindowText, QColor::fromRgb(rgb));
_label->setPalette(palette);
form->addRow(_label);
}
void AudioStatsDisplay::paint() {
_label->setText(_strBuf);
}
void AudioStatsDisplay::updatedDisplay(QString str) {
_strBuf = str;
}
AudioStatsDialog::AudioStatsDialog(QWidget* parent) :
QDialog(parent, Qt::Window | Qt::WindowCloseButtonHint | Qt::WindowStaysOnTopHint) {
setWindowTitle("Audio Network Statistics");
// Get statistics from the Audio Client
_stats = &DependencyManager::get<AudioClient>()->getStats();
// Create layout
_form = new QFormLayout();
_form->setSizeConstraint(QLayout::SetFixedSize);
// Initialize channels' content (needed to correctly size channels)
updateStats();
// Create channels
_audioDisplayChannels = QVector<QVector<AudioStatsDisplay*>>(1);
_audioMixerID = addChannel(_form, _audioMixerStats, COLOR0);
_upstreamClientID = addChannel(_form, _upstreamClientStats, COLOR1);
_upstreamMixerID = addChannel(_form, _upstreamMixerStats, COLOR2);
_downstreamID = addChannel(_form, _downstreamStats, COLOR3);
_upstreamInjectedID = addChannel(_form, _upstreamInjectedStats, COLOR0);
// Initialize channels
updateChannels();
// Future renders
connect(averageUpdateTimer, SIGNAL(timeout()), this, SLOT(renderStats()));
averageUpdateTimer->start(200);
// Initial render
QDialog::setLayout(_form);
}
int AudioStatsDialog::addChannel(QFormLayout* form, QVector<QString>& stats, const unsigned color) {
int channelID = _audioDisplayChannels.size() - 1;
for (int i = 0; i < stats.size(); i++)
// Create new display label
_audioDisplayChannels[channelID].push_back(new AudioStatsDisplay(form, stats.at(i), color));
// Expand vector to fit next channel
_audioDisplayChannels.resize(_audioDisplayChannels.size() + 1);
return channelID;
}
void AudioStatsDialog::renderStats() {
updateStats();
updateChannels();
}
void AudioStatsDialog::updateChannels() {
updateChannel(_audioMixerStats, _audioMixerID);
updateChannel(_upstreamClientStats, _upstreamClientID);
updateChannel(_upstreamMixerStats, _upstreamMixerID);
updateChannel(_downstreamStats, _downstreamID);
updateChannel(_upstreamInjectedStats, _upstreamInjectedID);
}
void AudioStatsDialog::updateChannel(QVector<QString>& stats, int channelID) {
// Update all stat displays at specified channel
for (int i = 0; i < stats.size(); i++)
_audioDisplayChannels[channelID].at(i)->updatedDisplay(stats.at(i));
}
void AudioStatsDialog::updateStats() {
// Clear current stats from all vectors
clearAllChannels();
double audioInputBufferLatency{ 0.0 };
double inputRingBufferLatency{ 0.0 };
double networkRoundtripLatency{ 0.0 };
double mixerRingBufferLatency{ 0.0 };
double outputRingBufferLatency{ 0.0 };
double audioOutputBufferLatency{ 0.0 };
if (SharedNodePointer audioMixerNodePointer = DependencyManager::get<NodeList>()->soloNodeOfType(NodeType::AudioMixer)) {
audioInputBufferLatency = (double)_stats->getInputMsRead().getWindowMax();
inputRingBufferLatency = (double)_stats->getInputMsUnplayed().getWindowMax();
networkRoundtripLatency = (double)audioMixerNodePointer->getPingMs();
mixerRingBufferLatency = (double)_stats->getMixerAvatarStreamStats()._unplayedMs;
outputRingBufferLatency = (double)_stats->getMixerDownstreamStats()._unplayedMs;
audioOutputBufferLatency = (double)_stats->getOutputMsUnplayed().getWindowMax();
}
double totalLatency = audioInputBufferLatency + inputRingBufferLatency + mixerRingBufferLatency
+ outputRingBufferLatency + audioOutputBufferLatency + networkRoundtripLatency;
QString stats;
_audioMixerStats.push_back("PIPELINE (averaged over the past 10s)");
stats = "Input Read:\t%1 ms";
_audioMixerStats.push_back(stats.arg(QString::number(audioInputBufferLatency, 'f', 0)));
stats = "Input Ring:\t%1 ms";
_audioMixerStats.push_back(stats.arg(QString::number(inputRingBufferLatency, 'f', 0)));
stats = "Network (client->mixer):\t%1 ms";
_audioMixerStats.push_back(stats.arg(QString::number(networkRoundtripLatency / 2, 'f', 0)));
stats = "Mixer Ring:\t%1 ms";
_audioMixerStats.push_back(stats.arg(QString::number(mixerRingBufferLatency, 'f', 0)));
stats = "Network (mixer->client):\t%1 ms";
_audioMixerStats.push_back(stats.arg(QString::number(networkRoundtripLatency / 2, 'f', 0)));
stats = "Output Ring:\t%1 ms";
_audioMixerStats.push_back(stats.arg(QString::number(outputRingBufferLatency, 'f', 0)));
stats = "Output Read:\t%1 ms";
_audioMixerStats.push_back(stats.arg(QString::number(audioOutputBufferLatency, 'f', 0)));
stats = "TOTAL:\t%1 ms";
_audioMixerStats.push_back(stats.arg(QString::number(totalLatency, 'f', 0)));
const MovingMinMaxAvg<quint64>& packetSentTimeGaps = _stats->getPacketTimegaps();
_upstreamClientStats.push_back("\nUpstream Mic Audio Packets Sent Gaps (by client):");
stats = "Inter-packet timegaps";
_upstreamClientStats.push_back(stats);
stats = "overall min:\t%1, max:\t%2, avg:\t%3";
stats = stats.arg(formatUsecTime(packetSentTimeGaps.getMin()),
formatUsecTime(packetSentTimeGaps.getMax()),
formatUsecTime(packetSentTimeGaps.getAverage()));
_upstreamClientStats.push_back(stats);
stats = "last window min:\t%1, max:\t%2, avg:\t%3";
stats = stats.arg(formatUsecTime(packetSentTimeGaps.getWindowMin()),
formatUsecTime(packetSentTimeGaps.getWindowMax()),
formatUsecTime(packetSentTimeGaps.getWindowAverage()));
_upstreamClientStats.push_back(stats);
_upstreamMixerStats.push_back("\nMIXER STREAM");
_upstreamMixerStats.push_back("(this client's remote mixer stream performance)");
renderAudioStreamStats(&_stats->getMixerAvatarStreamStats(), &_upstreamMixerStats);
_downstreamStats.push_back("\nCLIENT STREAM");
AudioStreamStats downstreamStats = _stats->getMixerDownstreamStats();
renderAudioStreamStats(&downstreamStats, &_downstreamStats);
if (_shouldShowInjectedStreams) {
foreach(const AudioStreamStats& injectedStreamAudioStats, _stats->getMixerInjectedStreamStatsMap()) {
stats = "\nINJECTED STREAM (ID: %1)";
stats = stats.arg(injectedStreamAudioStats._streamIdentifier.toString());
_upstreamInjectedStats.push_back(stats);
renderAudioStreamStats(&injectedStreamAudioStats, &_upstreamInjectedStats);
}
}
}
void AudioStatsDialog::renderAudioStreamStats(const AudioStreamStats* streamStats, QVector<QString>* audioStreamStats) {
QString stats = "Packet Loss";
audioStreamStats->push_back(stats);
stats = "overall:\t%1%\t(%2 lost), window:\t%3%\t(%4 lost)";
stats = stats.arg(QString::number((int)(streamStats->_packetStreamStats.getLostRate() * 100.0f)),
QString::number((int)(streamStats->_packetStreamStats._lost)),
QString::number((int)(streamStats->_packetStreamWindowStats.getLostRate() * 100.0f)),
QString::number((int)(streamStats->_packetStreamWindowStats._lost)));
audioStreamStats->push_back(stats);
stats = "Ringbuffer";
audioStreamStats->push_back(stats);
stats = "available frames (avg):\t%1\t(%2), desired:\t%3";
stats = stats.arg(QString::number(streamStats->_framesAvailable),
QString::number(streamStats->_framesAvailableAverage),
QString::number(streamStats->_desiredJitterBufferFrames));
audioStreamStats->push_back(stats);
stats = "starves:\t%1, last starve duration:\t%2, drops:\t%3, overflows:\t%4";
stats = stats.arg(QString::number(streamStats->_starveCount),
QString::number(streamStats->_consecutiveNotMixedCount),
QString::number(streamStats->_framesDropped),
QString::number(streamStats->_overflowCount));
audioStreamStats->push_back(stats);
stats = "Inter-packet timegaps";
audioStreamStats->push_back(stats);
stats = "overall min:\t%1, max:\t%2, avg:\t%3";
stats = stats.arg(formatUsecTime(streamStats->_timeGapMin),
formatUsecTime(streamStats->_timeGapMax),
formatUsecTime(streamStats->_timeGapAverage));
audioStreamStats->push_back(stats);
stats = "last window min:\t%1, max:\t%2, avg:\t%3";
stats = stats.arg(formatUsecTime(streamStats->_timeGapWindowMin),
formatUsecTime(streamStats->_timeGapWindowMax),
formatUsecTime(streamStats->_timeGapWindowAverage));
audioStreamStats->push_back(stats);
}
void AudioStatsDialog::clearAllChannels() {
_audioMixerStats.clear();
_upstreamClientStats.clear();
_upstreamMixerStats.clear();
_downstreamStats.clear();
_upstreamInjectedStats.clear();
}
void AudioStatsDialog::paintEvent(QPaintEvent* event) {
// Repaint each stat in each channel
for (int i = 0; i < _audioDisplayChannels.size(); i++) {
for(int j = 0; j < _audioDisplayChannels[i].size(); j++) {
_audioDisplayChannels[i].at(j)->paint();
}
}
QDialog::paintEvent(event);
}
void AudioStatsDialog::reject() {
// Just regularly close upon ESC
QDialog::close();
}
void AudioStatsDialog::closeEvent(QCloseEvent* event) {
QDialog::closeEvent(event);
emit closed();
}
AudioStatsDialog::~AudioStatsDialog() {
clearAllChannels();
for (int i = 0; i < _audioDisplayChannels.size(); i++) {
_audioDisplayChannels[i].clear();
for(int j = 0; j < _audioDisplayChannels[i].size(); j++) {
delete _audioDisplayChannels[i].at(j);
}
}
}

View file

@ -1,112 +0,0 @@
//
// AudioStatsDialog.h
// hifi
//
// Created by Bridget Went on 7/9/15.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef __hifi__AudioStatsDialog__
#define __hifi__AudioStatsDialog__
#include <stdio.h>
#include <QDialog>
#include <QLabel>
#include <QFormLayout>
#include <QVector>
#include <QTimer>
#include <QString>
#include <QObject>
#include <DependencyManager.h>
class AudioIOStats;
class AudioStreamStats;
//display
class AudioStatsDisplay : public QObject, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
public:
AudioStatsDisplay(QFormLayout* form, QString text, unsigned colorRGBA);
void updatedDisplay(QString str);
void paint();
private:
QString _strBuf;
QLabel* _label;
QString _text;
unsigned _colorRGBA;
};
//dialog
class AudioStatsDialog : public QDialog {
Q_OBJECT
public:
AudioStatsDialog(QWidget* parent);
~AudioStatsDialog();
void paintEvent(QPaintEvent*) override;
private:
// audio stats methods for rendering
QVector<QString> _audioMixerStats;
QVector<QString> _upstreamClientStats;
QVector<QString> _upstreamMixerStats;
QVector<QString> _downstreamStats;
QVector<QString> _upstreamInjectedStats;
int _audioMixerID;
int _upstreamClientID;
int _upstreamMixerID;
int _downstreamID;
int _upstreamInjectedID;
QVector<QVector<AudioStatsDisplay*>> _audioDisplayChannels;
void updateStats();
int addChannel(QFormLayout* form, QVector<QString>& stats, const unsigned color);
void updateChannel(QVector<QString>& stats, const int channelID);
void updateChannels();
void clearAllChannels();
void renderAudioStreamStats(const AudioStreamStats* streamStats, QVector<QString>* audioStreamstats);
const AudioIOStats* _stats;
QFormLayout* _form;
bool _shouldShowInjectedStreams{ false };
signals:
void closed();
public slots:
void reject() override;
void renderStats();
protected:
// Emits a 'closed' signal when this dialog is closed.
void closeEvent(QCloseEvent*) override;
private:
QTimer* averageUpdateTimer = new QTimer(this);
};
#endif /* defined(__hifi__AudioStatsDialog__) */

View file

@ -103,20 +103,6 @@ void DialogsManager::cachesSizeDialog() {
_cachesSizeDialog->raise();
}
void DialogsManager::audioStatsDetails() {
if (! _audioStatsDialog) {
_audioStatsDialog = new AudioStatsDialog(qApp->getWindow());
connect(_audioStatsDialog, SIGNAL(closed()), _audioStatsDialog, SLOT(deleteLater()));
if (_hmdToolsDialog) {
_hmdToolsDialog->watchWindow(_audioStatsDialog->windowHandle());
}
_audioStatsDialog->show();
}
_audioStatsDialog->raise();
}
void DialogsManager::bandwidthDetails() {
if (! _bandwidthDialog) {
_bandwidthDialog = new BandwidthDialog(qApp->getWindow());

View file

@ -20,7 +20,6 @@
class AnimationsDialog;
class AttachmentsDialog;
class AudioStatsDialog;
class BandwidthDialog;
class CachesSizeDialog;
class DiskCacheEditor;
@ -35,7 +34,6 @@ class DialogsManager : public QObject, public Dependency {
SINGLETON_DEPENDENCY
public:
QPointer<AudioStatsDialog> getAudioStatsDialog() const { return _audioStatsDialog; }
QPointer<BandwidthDialog> getBandwidthDialog() const { return _bandwidthDialog; }
QPointer<HMDToolsDialog> getHMDToolsDialog() const { return _hmdToolsDialog; }
QPointer<LodToolsDialog> getLodToolsDialog() const { return _lodToolsDialog; }
@ -52,7 +50,6 @@ public slots:
void showLoginDialog();
void octreeStatsDetails();
void cachesSizeDialog();
void audioStatsDetails();
void bandwidthDetails();
void lodTools();
void hmdTools(bool showTools);
@ -78,7 +75,6 @@ private:
QPointer<AnimationsDialog> _animationsDialog;
QPointer<AttachmentsDialog> _attachmentsDialog;
QPointer<AudioStatsDialog> _audioStatsDialog;
QPointer<BandwidthDialog> _bandwidthDialog;
QPointer<CachesSizeDialog> _cachesSizeDialog;
QPointer<DiskCacheEditor> _diskCacheEditor;

View file

@ -28,8 +28,6 @@ class Stats : public QQuickItem {
Q_PROPERTY(bool expanded READ isExpanded WRITE setExpanded NOTIFY expandedChanged)
Q_PROPERTY(bool timingExpanded READ isTimingExpanded NOTIFY timingExpandedChanged)
Q_PROPERTY(QString monospaceFont READ monospaceFont CONSTANT)
Q_PROPERTY(float audioPacketlossUpstream READ getAudioPacketLossUpstream)
Q_PROPERTY(float audioPacketlossDownstream READ getAudioPacketLossDownstream)
STATS_PROPERTY(int, serverCount, 0)
// How often the app is creating new gpu::Frames
@ -102,9 +100,6 @@ public:
return _monospaceFont;
}
float getAudioPacketLossUpstream() { return _audioStats->getMixerAvatarStreamStats()._packetStreamStats.getLostRate(); }
float getAudioPacketLossDownstream() { return _audioStats->getMixerDownstreamStats()._packetStreamStats.getLostRate(); }
void updateStats(bool force = false);
bool isExpanded() { return _expanded; }

View file

@ -49,6 +49,8 @@ Web3DOverlay::~Web3DOverlay() {
if (_webSurface) {
_webSurface->pause();
_webSurface->disconnect(_connection);
// The lifetime of the QML surface MUST be managed by the main thread
// Additionally, we MUST use local variables copied by value, rather than
// member variables, since they would implicitly refer to a this that
@ -111,9 +113,7 @@ void Web3DOverlay::render(RenderArgs* args) {
if (!_texture) {
auto webSurface = _webSurface;
_texture = gpu::TexturePointer(gpu::Texture::createExternal2D([webSurface](uint32_t recycleTexture, void* recycleFence) {
webSurface->releaseTexture({ recycleTexture, recycleFence });
}));
_texture = gpu::TexturePointer(gpu::Texture::createExternal2D(OffscreenQmlSurface::getDiscardLambda()));
_texture->setSource(__FUNCTION__);
}
OffscreenQmlSurface::TextureAndFence newTextureAndFence;

View file

@ -148,7 +148,7 @@ public slots:
void handleSelectedAudioFormat(QSharedPointer<ReceivedMessage> message);
void handleMismatchAudioFormat(SharedNodePointer node, const QString& currentCodec, const QString& recievedCodec);
void sendDownstreamAudioStatsPacket() { _stats.sendDownstreamAudioStatsPacket(); }
void sendDownstreamAudioStatsPacket() { _stats.publish(); }
void handleAudioInput();
void handleRecordedAudioInput(const QByteArray& audio);
void reset();

View file

@ -18,22 +18,24 @@
#include "AudioIOStats.h"
// This is called 5x/sec (see AudioStatsDialog), and we want it to log the last 5s
static const int INPUT_READS_WINDOW = 25;
static const int INPUT_UNPLAYED_WINDOW = 25;
static const int OUTPUT_UNPLAYED_WINDOW = 25;
// This is called 1x/sec (see AudioClient) and we want it to log the last 5s
static const int INPUT_READS_WINDOW = 5;
static const int INPUT_UNPLAYED_WINDOW = 5;
static const int OUTPUT_UNPLAYED_WINDOW = 5;
static const int APPROXIMATELY_30_SECONDS_OF_AUDIO_PACKETS = (int)(30.0f * 1000.0f / AudioConstants::NETWORK_FRAME_MSECS);
AudioIOStats::AudioIOStats(MixedProcessedAudioStream* receivedAudioStream) :
_receivedAudioStream(receivedAudioStream),
_inputMsRead(0, INPUT_READS_WINDOW),
_inputMsUnplayed(0, INPUT_UNPLAYED_WINDOW),
_outputMsUnplayed(0, OUTPUT_UNPLAYED_WINDOW),
_interface(new AudioStatsInterface(this)),
_inputMsRead(1, INPUT_READS_WINDOW),
_inputMsUnplayed(1, INPUT_UNPLAYED_WINDOW),
_outputMsUnplayed(1, OUTPUT_UNPLAYED_WINDOW),
_lastSentPacketTime(0),
_packetTimegaps(0, APPROXIMATELY_30_SECONDS_OF_AUDIO_PACKETS)
_packetTimegaps(1, APPROXIMATELY_30_SECONDS_OF_AUDIO_PACKETS),
_receivedAudioStream(receivedAudioStream)
{
}
void AudioIOStats::reset() {
@ -44,11 +46,13 @@ void AudioIOStats::reset() {
_outputMsUnplayed.reset();
_packetTimegaps.reset();
_mixerAvatarStreamStats = AudioStreamStats();
_mixerInjectedStreamStatsMap.clear();
_interface->updateLocalBuffers(_inputMsRead, _inputMsUnplayed, _outputMsUnplayed, _packetTimegaps);
_interface->updateMixerStream(AudioStreamStats());
_interface->updateClientStream(AudioStreamStats());
_interface->updateInjectorStreams(QHash<QUuid, AudioStreamStats>());
}
void AudioIOStats::sentPacket() {
void AudioIOStats::sentPacket() const {
// first time this is 0
if (_lastSentPacketTime == 0) {
_lastSentPacketTime = usecTimestampNow();
@ -60,37 +64,13 @@ void AudioIOStats::sentPacket() {
}
}
const MovingMinMaxAvg<float>& AudioIOStats::getInputMsRead() const {
_inputMsRead.currentIntervalComplete();
return _inputMsRead;
}
const MovingMinMaxAvg<float>& AudioIOStats::getInputMsUnplayed() const {
_inputMsUnplayed.currentIntervalComplete();
return _inputMsUnplayed;
}
const MovingMinMaxAvg<float>& AudioIOStats::getOutputMsUnplayed() const {
_outputMsUnplayed.currentIntervalComplete();
return _outputMsUnplayed;
}
const MovingMinMaxAvg<quint64>& AudioIOStats::getPacketTimegaps() const {
_packetTimegaps.currentIntervalComplete();
return _packetTimegaps;
}
const AudioStreamStats AudioIOStats::getMixerDownstreamStats() const {
return _receivedAudioStream->getAudioStreamStats();
}
void AudioIOStats::processStreamStatsPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
// parse the appendFlag, clear injected audio stream stats if 0
quint8 appendFlag;
message->readPrimitive(&appendFlag);
if (!appendFlag) {
_mixerInjectedStreamStatsMap.clear();
if (appendFlag & AudioStreamStats::START) {
_injectorStreams.clear();
}
// parse the number of stream stats structs to follow
@ -103,14 +83,18 @@ void AudioIOStats::processStreamStatsPacket(QSharedPointer<ReceivedMessage> mess
message->readPrimitive(&streamStats);
if (streamStats._streamType == PositionalAudioStream::Microphone) {
_mixerAvatarStreamStats = streamStats;
_interface->updateMixerStream(streamStats);
} else {
_mixerInjectedStreamStatsMap[streamStats._streamIdentifier] = streamStats;
_injectorStreams[streamStats._streamIdentifier] = streamStats;
}
}
if (appendFlag & AudioStreamStats::END) {
_interface->updateInjectorStreams(_injectorStreams);
}
}
void AudioIOStats::sendDownstreamAudioStatsPacket() {
void AudioIOStats::publish() {
auto audioIO = DependencyManager::get<AudioClient>();
// call _receivedAudioStream's per-second callback
@ -122,10 +106,15 @@ void AudioIOStats::sendDownstreamAudioStatsPacket() {
return;
}
quint8 appendFlag = 0;
quint8 appendFlag = AudioStreamStats::START | AudioStreamStats::END;
quint16 numStreamStatsToPack = 1;
AudioStreamStats stats = _receivedAudioStream->getAudioStreamStats();
// update the interface
_interface->updateLocalBuffers(_inputMsRead, _inputMsUnplayed, _outputMsUnplayed, _packetTimegaps);
_interface->updateClientStream(stats);
// prepare a packet to the mixer
int statsPacketSize = sizeof(appendFlag) + sizeof(numStreamStatsToPack) + sizeof(stats);
auto statsPacket = NLPacket::create(PacketType::AudioStreamStats, statsPacketSize);
@ -137,7 +126,88 @@ void AudioIOStats::sendDownstreamAudioStatsPacket() {
// pack downstream audio stream stats
statsPacket->writePrimitive(stats);
// send packet
nodeList->sendPacket(std::move(statsPacket), *audioMixer);
}
AudioStreamStatsInterface::AudioStreamStatsInterface(QObject* parent) :
QObject(parent) {}
void AudioStreamStatsInterface::updateStream(const AudioStreamStats& stats) {
lossRate(stats._packetStreamStats.getLostRate());
lossCount(stats._packetStreamStats._lost);
lossRateWindow(stats._packetStreamWindowStats.getLostRate());
lossCountWindow(stats._packetStreamWindowStats._lost);
framesDesired(stats._desiredJitterBufferFrames);
framesAvailable(stats._framesAvailable);
framesAvailableAvg(stats._framesAvailableAverage);
unplayedMsMax(stats._unplayedMs);
starveCount(stats._starveCount);
lastStarveDurationCount(stats._consecutiveNotMixedCount);
dropCount(stats._framesDropped);
overflowCount(stats._overflowCount);
timegapMsMax(stats._timeGapMax / USECS_PER_MSEC);
timegapMsAvg(stats._timeGapAverage / USECS_PER_MSEC);
timegapMsMaxWindow(stats._timeGapWindowMax / USECS_PER_MSEC);
timegapMsAvgWindow(stats._timeGapWindowAverage / USECS_PER_MSEC);
}
AudioStatsInterface::AudioStatsInterface(QObject* parent) :
QObject(parent),
_client(new AudioStreamStatsInterface(this)),
_mixer(new AudioStreamStatsInterface(this)),
_injectors(new QObject(this)) {}
void AudioStatsInterface::updateLocalBuffers(const MovingMinMaxAvg<float>& inputMsRead,
const MovingMinMaxAvg<float>& inputMsUnplayed,
const MovingMinMaxAvg<float>& outputMsUnplayed,
const MovingMinMaxAvg<quint64>& timegaps) {
if (SharedNodePointer audioNode = DependencyManager::get<NodeList>()->soloNodeOfType(NodeType::AudioMixer)) {
pingMs(audioNode->getPingMs());
}
inputReadMsMax(inputMsRead.getWindowMax());
inputUnplayedMsMax(inputMsUnplayed.getWindowMax());
outputUnplayedMsMax(outputMsUnplayed.getWindowMax());
sentTimegapMsMax(timegaps.getMax() / USECS_PER_MSEC);
sentTimegapMsAvg(timegaps.getAverage() / USECS_PER_MSEC);
sentTimegapMsMaxWindow(timegaps.getWindowMax() / USECS_PER_MSEC);
sentTimegapMsAvgWindow(timegaps.getWindowAverage() / USECS_PER_MSEC);
}
void AudioStatsInterface::updateInjectorStreams(const QHash<QUuid, AudioStreamStats>& stats) {
// Get existing injectors
auto injectorIds = _injectors->dynamicPropertyNames();
// Go over reported injectors
QHash<QUuid, AudioStreamStats>::const_iterator injector = stats.constBegin();
while (injector != stats.constEnd()) {
const auto id = injector.key().toByteArray();
// Mark existing injector (those left will be removed)
injectorIds.removeOne(id);
auto injectorProperty = _injectors->property(id);
// Add new injector
if (!injectorProperty.isValid()) {
injectorProperty = QVariant::fromValue(new AudioStreamStatsInterface(this));
_injectors->setProperty(id, injectorProperty);
}
// Update property with reported injector
injectorProperty.value<AudioStreamStatsInterface*>()->updateStream(injector.value());
++injector;
}
// Remove unreported injectors
for (auto& id : injectorIds) {
_injectors->property(id).value<AudioStreamStatsInterface*>()->deleteLater();
_injectors->setProperty(id, QVariant());
}
emit injectorStreamsChanged();
}

View file

@ -22,44 +22,124 @@
class MixedProcessedAudioStream;
#define AUDIO_PROPERTY(TYPE, NAME) \
Q_PROPERTY(TYPE NAME READ NAME NOTIFY NAME##Changed) \
public: \
TYPE NAME() const { return _##NAME; } \
void NAME(TYPE value) { \
if (_##NAME != value) { \
_##NAME = value; \
emit NAME##Changed(value); \
} \
} \
Q_SIGNAL void NAME##Changed(TYPE value); \
private: \
TYPE _##NAME{ (TYPE)0 };
class AudioStreamStatsInterface : public QObject {
Q_OBJECT
AUDIO_PROPERTY(float, lossRate)
AUDIO_PROPERTY(float, lossCount)
AUDIO_PROPERTY(float, lossRateWindow)
AUDIO_PROPERTY(float, lossCountWindow)
AUDIO_PROPERTY(int, framesDesired)
AUDIO_PROPERTY(int, framesAvailable)
AUDIO_PROPERTY(int, framesAvailableAvg)
AUDIO_PROPERTY(float, unplayedMsMax)
AUDIO_PROPERTY(int, starveCount)
AUDIO_PROPERTY(int, lastStarveDurationCount)
AUDIO_PROPERTY(int, dropCount)
AUDIO_PROPERTY(int, overflowCount)
AUDIO_PROPERTY(quint64, timegapMsMax)
AUDIO_PROPERTY(quint64, timegapMsAvg)
AUDIO_PROPERTY(quint64, timegapMsMaxWindow)
AUDIO_PROPERTY(quint64, timegapMsAvgWindow)
public:
void updateStream(const AudioStreamStats& stats);
private:
friend class AudioStatsInterface;
AudioStreamStatsInterface(QObject* parent);
};
class AudioStatsInterface : public QObject {
Q_OBJECT
AUDIO_PROPERTY(float, pingMs);
AUDIO_PROPERTY(float, inputReadMsMax);
AUDIO_PROPERTY(float, inputUnplayedMsMax);
AUDIO_PROPERTY(float, outputUnplayedMsMax);
AUDIO_PROPERTY(quint64, sentTimegapMsMax);
AUDIO_PROPERTY(quint64, sentTimegapMsAvg);
AUDIO_PROPERTY(quint64, sentTimegapMsMaxWindow);
AUDIO_PROPERTY(quint64, sentTimegapMsAvgWindow);
Q_PROPERTY(AudioStreamStatsInterface* mixerStream READ getMixerStream NOTIFY mixerStreamChanged);
Q_PROPERTY(AudioStreamStatsInterface* clientStream READ getClientStream NOTIFY clientStreamChanged);
Q_PROPERTY(QObject* injectorStreams READ getInjectorStreams NOTIFY injectorStreamsChanged);
public:
AudioStreamStatsInterface* getMixerStream() const { return _mixer; }
AudioStreamStatsInterface* getClientStream() const { return _client; }
QObject* getInjectorStreams() const { return _injectors; }
void updateLocalBuffers(const MovingMinMaxAvg<float>& inputMsRead,
const MovingMinMaxAvg<float>& inputMsUnplayed,
const MovingMinMaxAvg<float>& outputMsUnplayed,
const MovingMinMaxAvg<quint64>& timegaps);
void updateMixerStream(const AudioStreamStats& stats) { _mixer->updateStream(stats); emit mixerStreamChanged(); }
void updateClientStream(const AudioStreamStats& stats) { _client->updateStream(stats); emit clientStreamChanged(); }
void updateInjectorStreams(const QHash<QUuid, AudioStreamStats>& stats);
signals:
void mixerStreamChanged();
void clientStreamChanged();
void injectorStreamsChanged();
private:
friend class AudioIOStats;
AudioStatsInterface(QObject* parent);
AudioStreamStatsInterface* _client;
AudioStreamStatsInterface* _mixer;
QObject* _injectors;
};
class AudioIOStats : public QObject {
Q_OBJECT
public:
AudioIOStats(MixedProcessedAudioStream* receivedAudioStream);
void reset();
void updateInputMsRead(float ms) { _inputMsRead.update(ms); }
void updateInputMsUnplayed(float ms) { _inputMsUnplayed.update(ms); }
void updateOutputMsUnplayed(float ms) { _outputMsUnplayed.update(ms); }
void sentPacket();
const MovingMinMaxAvg<float>& getInputMsRead() const;
const MovingMinMaxAvg<float>& getInputMsUnplayed() const;
const MovingMinMaxAvg<float>& getOutputMsUnplayed() const;
const MovingMinMaxAvg<quint64>& getPacketTimegaps() const;
const AudioStreamStats getMixerDownstreamStats() const;
const AudioStreamStats& getMixerAvatarStreamStats() const { return _mixerAvatarStreamStats; }
const QHash<QUuid, AudioStreamStats>& getMixerInjectedStreamStatsMap() const { return _mixerInjectedStreamStatsMap; }
void sendDownstreamAudioStatsPacket();
void reset();
AudioStatsInterface* data() const { return _interface; }
void updateInputMsRead(float ms) const { _inputMsRead.update(ms); }
void updateInputMsUnplayed(float ms) const { _inputMsUnplayed.update(ms); }
void updateOutputMsUnplayed(float ms) const { _outputMsUnplayed.update(ms); }
void sentPacket() const;
void publish();
public slots:
void processStreamStatsPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
private:
MixedProcessedAudioStream* _receivedAudioStream;
AudioStatsInterface* _interface;
mutable MovingMinMaxAvg<float> _inputMsRead;
mutable MovingMinMaxAvg<float> _inputMsUnplayed;
mutable MovingMinMaxAvg<float> _outputMsUnplayed;
quint64 _lastSentPacketTime;
mutable quint64 _lastSentPacketTime;
mutable MovingMinMaxAvg<quint64> _packetTimegaps;
AudioStreamStats _mixerAvatarStreamStats;
QHash<QUuid, AudioStreamStats> _mixerInjectedStreamStatsMap;
MixedProcessedAudioStream* _receivedAudioStream;
QHash<QUuid, AudioStreamStats> _injectorStreams;
};
#endif // hifi_AudioIOStats_h

View file

@ -16,6 +16,13 @@
class AudioStreamStats {
public:
// Intermediate packets should have no flag set
// Unique packets should have both flags set
enum AppendFlag : quint8 {
START = 1,
END = 2
};
AudioStreamStats()
: _streamType(-1),
_streamIdentifier(),

View file

@ -379,10 +379,6 @@ void HmdDisplayPlugin::updateFrameData() {
mat4 model = _presentHandPoses[i];
vec3 castStart = vec3(model[3]);
vec3 castDirection = glm::quat_cast(model) * laserDirection;
if (glm::abs(glm::length2(castDirection) - 1.0f) > EPSILON) {
castDirection = glm::normalize(castDirection);
castDirection = glm::inverse(_presentUiModelTransform.getRotation()) * castDirection;
}
// this offset needs to match GRAB_POINT_SPHERE_OFFSET in scripts/system/libraries/controllers.js:19
static const vec3 GRAB_POINT_SPHERE_OFFSET(0.04f, 0.13f, 0.039f); // x = upward, y = forward, z = lateral

View file

@ -201,10 +201,7 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
if (!_texture) {
auto webSurface = _webSurface;
auto recycler = [webSurface] (uint32_t recycleTexture, void* recycleFence) {
webSurface->releaseTexture({ recycleTexture, recycleFence });
};
_texture = gpu::TexturePointer(gpu::Texture::createExternal2D(recycler));
_texture = gpu::TexturePointer(gpu::Texture::createExternal2D(OffscreenQmlSurface::getDiscardLambda()));
_texture->setSource(__FUNCTION__);
}
OffscreenQmlSurface::TextureAndFence newTextureAndFence;

View file

@ -8,9 +8,8 @@
#include "OffscreenQmlSurface.h"
#include "Config.h"
#include <queue>
#include <set>
#include <map>
#include <unordered_set>
#include <unordered_map>
#include <QtWidgets/QWidget>
#include <QtQml/QtQml>
@ -37,9 +36,148 @@
#include "OffscreenGLCanvas.h"
#include "GLHelpers.h"
#include "GLLogging.h"
#include "TextureRecycler.h"
#include "Context.h"
struct TextureSet {
// The number of surfaces with this size
size_t count { 0 };
std::list<OffscreenQmlSurface::TextureAndFence> returnedTextures;
};
uint64_t uvec2ToUint64(const uvec2& v) {
uint64_t result = v.x;
result <<= 32;
result |= v.y;
return result;
}
class OffscreenTextures {
public:
GLuint getNextTexture(const uvec2& size) {
assert(QThread::currentThread() == qApp->thread());
recycle();
++_activeTextureCount;
auto sizeKey = uvec2ToUint64(size);
assert(_textures.count(sizeKey));
auto& textureSet = _textures[sizeKey];
if (!textureSet.returnedTextures.empty()) {
auto textureAndFence = textureSet.returnedTextures.front();
textureSet.returnedTextures.pop_front();
waitOnFence(static_cast<GLsync>(textureAndFence.second));
return textureAndFence.first;
}
return createTexture(size);
}
void releaseSize(const uvec2& size) {
assert(QThread::currentThread() == qApp->thread());
auto sizeKey = uvec2ToUint64(size);
assert(_textures.count(sizeKey));
auto& textureSet = _textures[sizeKey];
if (0 == --textureSet.count) {
for (const auto& textureAndFence : textureSet.returnedTextures) {
destroy(textureAndFence);
}
_textures.erase(sizeKey);
}
}
void acquireSize(const uvec2& size) {
assert(QThread::currentThread() == qApp->thread());
auto sizeKey = uvec2ToUint64(size);
auto& textureSet = _textures[sizeKey];
++textureSet.count;
}
// May be called on any thread
void releaseTexture(const OffscreenQmlSurface::TextureAndFence & textureAndFence) {
--_activeTextureCount;
Lock lock(_mutex);
_returnedTextures.push_back(textureAndFence);
}
void report() {
uint64_t now = usecTimestampNow();
if ((now - _lastReport) > USECS_PER_SECOND * 5) {
_lastReport = now;
qCDebug(glLogging) << "Current offscreen texture count " << _allTextureCount;
qCDebug(glLogging) << "Current offscreen active texture count " << _activeTextureCount;
}
}
private:
static void waitOnFence(GLsync fence) {
glWaitSync(fence, 0, GL_TIMEOUT_IGNORED);
glDeleteSync(fence);
}
void destroyTexture(GLuint texture) {
--_allTextureCount;
_textureSizes.erase(texture);
glDeleteTextures(1, &texture);
}
void destroy(const OffscreenQmlSurface::TextureAndFence& textureAndFence) {
waitOnFence(static_cast<GLsync>(textureAndFence.second));
destroyTexture(textureAndFence.first);
}
GLuint createTexture(const uvec2& size) {
// Need a new texture
uint32_t newTexture;
glGenTextures(1, &newTexture);
++_allTextureCount;
_textureSizes[newTexture] = size;
glBindTexture(GL_TEXTURE_2D, newTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAX_ANISOTROPY_EXT, 8.0f);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_LOD_BIAS, -0.2f);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAX_ANISOTROPY_EXT, 8.0f);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, size.x, size.y, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
return newTexture;
}
void recycle() {
assert(QThread::currentThread() == qApp->thread());
// First handle any global returns
std::list<OffscreenQmlSurface::TextureAndFence> returnedTextures;
{
Lock lock(_mutex);
returnedTextures.swap(_returnedTextures);
}
for (auto textureAndFence : returnedTextures) {
GLuint texture = textureAndFence.first;
uvec2 size = _textureSizes[texture];
auto sizeKey = uvec2ToUint64(size);
// Textures can be returned after all surfaces of the given size have been destroyed,
// in which case we just destroy the texture
if (!_textures.count(sizeKey)) {
destroy(textureAndFence);
continue;
}
_textures[sizeKey].returnedTextures.push_back(textureAndFence);
}
}
using Mutex = std::mutex;
using Lock = std::unique_lock<Mutex>;
std::atomic<int> _allTextureCount;
std::atomic<int> _activeTextureCount;
std::unordered_map<uint64_t, TextureSet> _textures;
std::unordered_map<GLuint, uvec2> _textureSizes;
Mutex _mutex;
std::list<OffscreenQmlSurface::TextureAndFence> _returnedTextures;
uint64_t _lastReport { 0 };
} offscreenTextures;
class UrlHandler : public QObject {
Q_OBJECT
@ -98,31 +236,10 @@ QNetworkAccessManager* QmlNetworkAccessManagerFactory::create(QObject* parent) {
Q_DECLARE_LOGGING_CATEGORY(offscreenFocus)
Q_LOGGING_CATEGORY(offscreenFocus, "hifi.offscreen.focus")
void OffscreenQmlSurface::setupFbo() {
_canvas->makeCurrent();
_textures.setSize(_size);
if (_depthStencil) {
glDeleteRenderbuffers(1, &_depthStencil);
_depthStencil = 0;
}
glGenRenderbuffers(1, &_depthStencil);
glBindRenderbuffer(GL_RENDERBUFFER, _depthStencil);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, _size.x, _size.y);
if (_fbo) {
glDeleteFramebuffers(1, &_fbo);
_fbo = 0;
}
glGenFramebuffers(1, &_fbo);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, _fbo);
glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, _depthStencil);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
_canvas->doneCurrent();
}
void OffscreenQmlSurface::cleanup() {
_canvas->makeCurrent();
_renderControl->invalidate();
delete _renderControl; // and invalidate
if (_depthStencil) {
@ -134,7 +251,8 @@ void OffscreenQmlSurface::cleanup() {
_fbo = 0;
}
_textures.clear();
offscreenTextures.releaseSize(_size);
_canvas->doneCurrent();
}
@ -148,26 +266,7 @@ void OffscreenQmlSurface::render() {
_renderControl->sync();
_quickWindow->setRenderTarget(_fbo, QSize(_size.x, _size.y));
// Clear out any pending textures to be returned
{
std::list<OffscreenQmlSurface::TextureAndFence> returnedTextures;
{
std::unique_lock<std::mutex> lock(_textureMutex);
returnedTextures.swap(_returnedTextures);
}
if (!returnedTextures.empty()) {
for (const auto& textureAndFence : returnedTextures) {
GLsync fence = static_cast<GLsync>(textureAndFence.second);
if (fence) {
glWaitSync(fence, 0, GL_TIMEOUT_IGNORED);
glDeleteSync(fence);
}
_textures.recycleTexture(textureAndFence.first);
}
}
}
GLuint texture = _textures.getNextTexture();
GLuint texture = offscreenTextures.getNextTexture(_size);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, _fbo);
glFramebufferTexture(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, texture, 0);
PROFILE_RANGE("qml_render->rendercontrol")
@ -177,12 +276,11 @@ void OffscreenQmlSurface::render() {
glGenerateMipmap(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, 0);
{
std::unique_lock<std::mutex> lock(_textureMutex);
// If the most recent texture was unused, we can directly recycle it
if (_latestTextureAndFence.first) {
_textures.recycleTexture(_latestTextureAndFence.first);
glDeleteSync(static_cast<GLsync>(_latestTextureAndFence.second));
offscreenTextures.releaseTexture(_latestTextureAndFence);
_latestTextureAndFence = { 0, 0 };
}
@ -199,7 +297,6 @@ void OffscreenQmlSurface::render() {
bool OffscreenQmlSurface::fetchTexture(TextureAndFence& textureAndFence) {
textureAndFence = { 0, 0 };
std::unique_lock<std::mutex> lock(_textureMutex);
if (0 == _latestTextureAndFence.first) {
return false;
}
@ -210,20 +307,18 @@ bool OffscreenQmlSurface::fetchTexture(TextureAndFence& textureAndFence) {
return true;
}
void OffscreenQmlSurface::releaseTexture(const TextureAndFence& textureAndFence) {
std::unique_lock<std::mutex> lock(_textureMutex);
_returnedTextures.push_back(textureAndFence);
std::function<void(uint32_t, void*)> OffscreenQmlSurface::getDiscardLambda() {
return [](uint32_t texture, void* fence) {
offscreenTextures.releaseTexture({ texture, static_cast<GLsync>(fence) });
};
}
bool OffscreenQmlSurface::allowNewFrame(uint8_t fps) {
// If we already have a pending texture, don't render another one
// i.e. don't render faster than the consumer context, since it wastes
// GPU cycles on producing output that will never be seen
{
std::unique_lock<std::mutex> lock(_textureMutex);
if (0 != _latestTextureAndFence.first) {
return false;
}
if (0 != _latestTextureAndFence.first) {
return false;
}
auto minRenderInterval = USECS_PER_SECOND / fps;
@ -307,7 +402,6 @@ void OffscreenQmlSurface::create(QOpenGLContext* shareContext) {
}
_glData = ::getGLContextData();
_renderControl->initialize(_canvas->getContext());
setupFbo();
// When Quick says there is a need to render, we will not render immediately. Instead,
// a timer with a small interval is used to get better performance.
@ -367,9 +461,40 @@ void OffscreenQmlSurface::resize(const QSize& newSize_, bool forceResize) {
}
qCDebug(glLogging) << "Offscreen UI resizing to " << newSize.width() << "x" << newSize.height() << " with pixel ratio " << pixelRatio;
_canvas->makeCurrent();
// Release hold on the textures of the old size
if (uvec2() != _size) {
// If the most recent texture was unused, we can directly recycle it
if (_latestTextureAndFence.first) {
offscreenTextures.releaseTexture(_latestTextureAndFence);
_latestTextureAndFence = { 0, 0 };
}
offscreenTextures.releaseSize(_size);
}
_size = newOffscreenSize;
_textures.setSize(_size);
setupFbo();
// Acquire the new texture size
if (uvec2() != _size) {
offscreenTextures.acquireSize(_size);
if (_depthStencil) {
glDeleteRenderbuffers(1, &_depthStencil);
_depthStencil = 0;
}
glGenRenderbuffers(1, &_depthStencil);
glBindRenderbuffer(GL_RENDERBUFFER, _depthStencil);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, _size.x, _size.y);
if (!_fbo) {
glGenFramebuffers(1, &_fbo);
}
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, _fbo);
glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, _depthStencil);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
}
_canvas->doneCurrent();
}
QQuickItem* OffscreenQmlSurface::getRootItem() {
@ -421,7 +546,7 @@ QObject* OffscreenQmlSurface::finishQmlLoad(std::function<void(QQmlContext*, QOb
QString createGlobalEventBridgeStr = QTextStream(&createGlobalEventBridgeFile).readAll();
javaScriptToInject = webChannelStr + createGlobalEventBridgeStr;
} else {
qWarning() << "Unable to find qwebchannel.js or createGlobalEventBridge.js";
qCWarning(glLogging) << "Unable to find qwebchannel.js or createGlobalEventBridge.js";
}
QQmlContext* newContext = new QQmlContext(_qmlEngine, qApp);
@ -429,7 +554,7 @@ QObject* OffscreenQmlSurface::finishQmlLoad(std::function<void(QQmlContext*, QOb
if (_qmlComponent->isError()) {
QList<QQmlError> errorList = _qmlComponent->errors();
foreach(const QQmlError& error, errorList)
qWarning() << error.url() << error.line() << error;
qCWarning(glLogging) << error.url() << error.line() << error;
if (!_rootItem) {
qFatal("Unable to finish loading QML root");
}
@ -474,6 +599,7 @@ QObject* OffscreenQmlSurface::finishQmlLoad(std::function<void(QQmlContext*, QOb
}
void OffscreenQmlSurface::updateQuick() {
offscreenTextures.report();
// If we're
// a) not set up
// b) already rendering a frame

View file

@ -10,16 +10,16 @@
#define hifi_OffscreenQmlSurface_h
#include <atomic>
#include <queue>
#include <map>
#include <functional>
#include <QtCore/QJsonObject>
#include <QTimer>
#include <QUrl>
#include <QtCore/QTimer>
#include <QtCore/QUrl>
#include <GLMHelpers.h>
#include <ThreadHelpers.h>
#include "TextureRecycler.h"
class QWindow;
class QMyQuickRenderControl;
@ -30,6 +30,11 @@ class QQmlContext;
class QQmlComponent;
class QQuickWindow;
class QQuickItem;
// GPU resources are typically buffered for one copy being used by the renderer,
// one copy in flight, and one copy being used by the receiver
#define GPU_RESOURCE_BUFFER_SIZE 3
class OffscreenQmlSurface : public QObject {
Q_OBJECT
Q_PROPERTY(bool focusText READ isFocusText NOTIFY focusTextChanged)
@ -82,9 +87,8 @@ public:
// when the texture is safe to read.
// Returns false if no new texture is available
bool fetchTexture(TextureAndFence& textureAndFence);
// Release a previously acquired texture, along with a fence which indicates when reads from the
// texture have completed.
void releaseTexture(const TextureAndFence& textureAndFence);
static std::function<void(uint32_t, void*)> getDiscardLambda();
signals:
void focusObjectChanged(QObject* newFocus);
@ -133,14 +137,10 @@ private:
uint32_t _fbo { 0 };
uint32_t _depthStencil { 0 };
uint64_t _lastRenderTime { 0 };
uvec2 _size { 1920, 1080 };
TextureRecycler _textures { true };
uvec2 _size;
// Texture management
std::mutex _textureMutex;
TextureAndFence _latestTextureAndFence { 0, 0 };
std::list<TextureAndFence> _returnedTextures;
bool _render { false };
bool _polish { true };

View file

@ -1,91 +0,0 @@
//
// Created by Bradley Austin Davis on 2016-10-05
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "TextureRecycler.h"
#include "Config.h"
#include <set>
void TextureRecycler::setSize(const uvec2& size) {
if (size == _size) {
return;
}
_size = size;
while (!_readyTextures.empty()) {
_readyTextures.pop();
}
std::set<Map::key_type> toDelete;
std::for_each(_allTextures.begin(), _allTextures.end(), [&](Map::const_reference item) {
if (!item.second._active && item.second._size != _size) {
toDelete.insert(item.first);
}
});
std::for_each(toDelete.begin(), toDelete.end(), [&](Map::key_type key) {
_allTextures.erase(key);
});
}
void TextureRecycler::clear() {
while (!_readyTextures.empty()) {
_readyTextures.pop();
}
_allTextures.clear();
}
void TextureRecycler::addTexture() {
uint32_t newTexture;
glGenTextures(1, &newTexture);
glBindTexture(GL_TEXTURE_2D, newTexture);
if (_useMipmaps) {
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
} else {
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
}
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAX_ANISOTROPY_EXT, 8.0f);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_LOD_BIAS, -0.2f);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAX_ANISOTROPY_EXT, 8.0f);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, _size.x, _size.y, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
_allTextures.emplace(std::piecewise_construct, std::forward_as_tuple(newTexture), std::forward_as_tuple(newTexture, _size));
_readyTextures.push(newTexture);
}
uint32_t TextureRecycler::getNextTexture() {
while (_allTextures.size() < _textureCount) {
addTexture();
}
if (_readyTextures.empty()) {
addTexture();
}
uint32_t result = _readyTextures.front();
_readyTextures.pop();
auto& item = _allTextures[result];
item._active = true;
return result;
}
void TextureRecycler::recycleTexture(GLuint texture) {
Q_ASSERT(_allTextures.count(texture));
auto& item = _allTextures[texture];
Q_ASSERT(item._active);
item._active = false;
if (item._size != _size) {
// Buh-bye
_allTextures.erase(texture);
return;
}
_readyTextures.push(item._tex);
}

View file

@ -1,54 +0,0 @@
//
// Created by Bradley Austin Davis on 2015-04-04
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_TextureRecycler_h
#define hifi_TextureRecycler_h
#include <atomic>
#include <queue>
#include <map>
#include <GLMHelpers.h>
// GPU resources are typically buffered for one copy being used by the renderer,
// one copy in flight, and one copy being used by the receiver
#define GPU_RESOURCE_BUFFER_SIZE 3
class TextureRecycler {
public:
TextureRecycler(bool useMipmaps) : _useMipmaps(useMipmaps) {}
void setSize(const uvec2& size);
void setTextureCount(uint8_t textureCount);
void clear();
uint32_t getNextTexture();
void recycleTexture(uint32_t texture);
private:
void addTexture();
struct TexInfo {
const uint32_t _tex{ 0 };
const uvec2 _size;
bool _active { false };
TexInfo() {}
TexInfo(uint32_t tex, const uvec2& size) : _tex(tex), _size(size) {}
TexInfo(const TexInfo& other) : _tex(other._tex), _size(other._size) {}
};
using Map = std::map<uint32_t, TexInfo>;
using Queue = std::queue<uint32_t>;
Map _allTextures;
Queue _readyTextures;
uvec2 _size{ 1920, 1080 };
bool _useMipmaps;
uint8_t _textureCount { GPU_RESOURCE_BUFFER_SIZE };
};
#endif

View file

@ -115,6 +115,7 @@ namespace gpu {
GPUObject* getGPUObject() const { return _gpuObject.get(); }
friend class Backend;
friend class Texture;
};
namespace gl {

View file

@ -287,6 +287,22 @@ Texture::Texture():
Texture::~Texture()
{
_textureCPUCount--;
if (getUsage().isExternal()) {
Texture::ExternalUpdates externalUpdates;
{
Lock lock(_externalMutex);
_externalUpdates.swap(externalUpdates);
}
for (const auto& update : externalUpdates) {
assert(_externalRecycler);
_externalRecycler(update.first, update.second);
}
// Force the GL object to be destroyed here
// If we let the normal destructor do it, then it will be
// cleared after the _externalRecycler has been destroyed,
// resulting in leaked texture memory
gpuObject.setGPUObject(nullptr);
}
}
Texture::Size Texture::resize(Type type, const Element& texelFormat, uint16 width, uint16 height, uint16 depth, uint16 numSamples, uint16 numSlices) {
@ -935,8 +951,20 @@ Vec3u Texture::evalMipDimensions(uint16 level) const {
return glm::max(dimensions, Vec3u(1));
}
void Texture::setExternalRecycler(const ExternalRecycler& recycler) {
Lock lock(_externalMutex);
_externalRecycler = recycler;
}
Texture::ExternalRecycler Texture::getExternalRecycler() const {
Lock lock(_externalMutex);
Texture::ExternalRecycler result = _externalRecycler;
return result;
}
void Texture::setExternalTexture(uint32 externalId, void* externalFence) {
Lock lock(_externalMutex);
assert(_externalRecycler);
_externalUpdates.push_back({ externalId, externalFence });
}

View file

@ -466,8 +466,8 @@ public:
void notifyMipFaceGPULoaded(uint16 level, uint8 face = 0) const { return _storage->notifyMipFaceGPULoaded(level, face); }
void setExternalTexture(uint32 externalId, void* externalFence);
void setExternalRecycler(const ExternalRecycler& recycler) { _externalRecycler = recycler; }
ExternalRecycler getExternalRecycler() const { return _externalRecycler; }
void setExternalRecycler(const ExternalRecycler& recycler);
ExternalRecycler getExternalRecycler() const;
const GPUObjectPointer gpuObject {};

View file

@ -23,7 +23,13 @@
#include "AccountManager.h"
const QString HIFI_URL_SCHEME = "hifi";
#if USE_STABLE_GLOBAL_SERVICES
const QString DEFAULT_HIFI_ADDRESS = "hifi://welcome";
#else
const QString DEFAULT_HIFI_ADDRESS = "hifi://dev-welcome";
#endif
const QString SANDBOX_HIFI_ADDRESS = "hifi://localhost";
const QString INDEX_PATH = "/";

View file

@ -76,7 +76,8 @@ PacketVersion versionForPacketType(PacketType packetType) {
case PacketType::InjectAudio:
case PacketType::MicrophoneAudioNoEcho:
case PacketType::MicrophoneAudioWithEcho:
return static_cast<PacketVersion>(AudioVersion::Exactly10msAudioPackets);
case PacketType::AudioStreamStats:
return static_cast<PacketVersion>(AudioVersion::TerminatingStreamStats);
default:
return 17;

View file

@ -227,7 +227,8 @@ enum class DomainListVersion : PacketVersion {
enum class AudioVersion : PacketVersion {
HasCompressedAudio = 17,
CodecNameInAudioPackets,
Exactly10msAudioPackets
Exactly10msAudioPackets,
TerminatingStreamStats,
};
#endif // hifi_PacketHeaders_h

View file

@ -524,7 +524,7 @@ void CharacterController::computeNewVelocity(btScalar dt, glm::vec3& velocity) {
}
void CharacterController::preSimulation() {
if (_enabled && _dynamicsWorld) {
if (_enabled && _dynamicsWorld && _rigidBody) {
quint64 now = usecTimestampNow();
// slam body to where it is supposed to be
@ -632,9 +632,10 @@ void CharacterController::preSimulation() {
void CharacterController::postSimulation() {
// postSimulation() exists for symmetry and just in case we need to do something here later
btVector3 velocity = _rigidBody->getLinearVelocity();
_velocityChange = velocity - _preSimulationVelocity;
if (_enabled && _dynamicsWorld && _rigidBody) {
btVector3 velocity = _rigidBody->getLinearVelocity();
_velocityChange = velocity - _preSimulationVelocity;
}
}

View file

@ -0,0 +1,105 @@
// webSpawnTool.js
//
// Stress tests the rendering of web surfaces over time
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
ENTITY_SPAWNER = function (properties) {
properties = properties || {};
var RADIUS = properties.radius || 5.0; // Spawn within this radius (square)
var TEST_ENTITY_NAME = properties.entityName || "WebEntitySpawnTest";
var NUM_ENTITIES = properties.count || 10000; // number of entities to spawn
var ENTITY_SPAWN_LIMIT = properties.spawnLimit || 1;
var ENTITY_SPAWN_INTERVAL = properties.spawnInterval || properties.interval || 2;
var ENTITY_LIFETIME = properties.lifetime || 10; // Entity timeout (when/if we crash, we need the entities to delete themselves)
function makeEntity(properties) {
var entity = Entities.addEntity(properties);
return {
destroy: function () {
Entities.deleteEntity(entity)
},
getAge: function () {
return Entities.getEntityProperties(entity).age;
}
};
}
function randomPositionXZ(center, radius) {
return {
x: center.x + (Math.random() * radius * 2.0) - radius,
y: center.y,
z: center.z + (Math.random() * radius * 2.0) - radius
};
}
var entities = [];
var entitiesToCreate = 0;
var entitiesSpawned = 0;
var spawnTimer = 0.0;
var keepAliveTimer = 0.0;
function clear () {
var ids = Entities.findEntities(MyAvatar.position, 50);
var that = this;
ids.forEach(function(id) {
var properties = Entities.getEntityProperties(id);
if (properties.name == TEST_ENTITY_NAME) {
Entities.deleteEntity(id);
}
}, this);
}
function createEntities () {
entitiesToCreate = NUM_ENTITIES;
Script.update.connect(spawnEntities);
}
function spawnEntities (dt) {
if (entitiesToCreate <= 0) {
Script.update.disconnect(spawnEntities);
print("Finished spawning entities");
}
else if ((spawnTimer -= dt) < 0.0){
spawnTimer = ENTITY_SPAWN_INTERVAL;
var n = Math.min(entitiesToCreate, ENTITY_SPAWN_LIMIT);
print("Spawning " + n + " entities (" + (entitiesSpawned += n) + ")");
entitiesToCreate -= n;
var center = Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0, z: RADIUS * -1.5 }));
for (; n > 0; --n) {
entities.push(makeEntity({
type: "Web",
sourceUrl: "https://www.reddit.com/r/random/",
name: TEST_ENTITY_NAME,
position: randomPositionXZ(center, RADIUS),
rotation: MyAvatar.orientation,
dimensions: { x: .8 + Math.random() * 0.8, y: 0.45 + Math.random() * 0.45, z: 0.01 },
lifetime: ENTITY_LIFETIME
}));
}
}
}
function despawnEntities () {
print("despawning entities");
entities.forEach(function (entity) {
entity.destroy();
});
entities = [];
}
function init () {
Script.update.disconnect(init);
clear();
createEntities();
Script.scriptEnding.connect(despawnEntities);
}
Script.update.connect(init);
};
ENTITY_SPAWNER();

View file

@ -0,0 +1,32 @@
//
// Jitter.qml
// scripts/developer/utilities/audio
//
// Created by Zach Pomerantz on 9/22/2016
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import QtQuick.Controls 1.4
import QtQuick.Layouts 1.3
ColumnLayout {
id: jitter
property var max
property var avg
property bool showGraphs: false
MovingValue {
label: "Jitter"
color: "red"
source: max - avg
showGraphs: jitter.showGraphs
}
Value {
label: "Average"
source: avg
}
}

View file

@ -0,0 +1,51 @@
//
// MovingValue.qml
// scripts/developer/utilities/audio
//
// Created by Zach Pomerantz on 9/22/2016
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import QtQuick.Controls 1.4
import QtQuick.Layouts 1.3
import "../lib/plotperf"
RowLayout {
id: value
property string label
property var source
property string unit: "ms"
property bool showGraphs: false
property color color: "darkslategrey"
property int decimals: 0
width: parent.width
Label {
Layout.preferredWidth: 100
color: value.color
text: value.label
}
Label {
visible: !value.showGraphs
Layout.preferredWidth: 50
horizontalAlignment: Text.AlignRight
color: value.color
text: value.source.toFixed(decimals) + ' ' + unit
}
PlotPerf {
visible: value.showGraphs
Layout.fillWidth: true
height: 70
valueUnit: value.unit
valueNumDigits: 0
backgroundOpacity: 0.2
plots: [{ binding: "source", color: value.color }]
}
}

View file

@ -0,0 +1,56 @@
//
// Section.qml
// scripts/developer/utilities/audio
//
// Created by Zach Pomerantz on 9/22/2016
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import QtQuick.Controls 1.4
import QtQuick.Layouts 1.3
Rectangle {
id: section
property string label: "Section"
property string description: "Description"
property alias control : loader.sourceComponent
width: parent.width
height: content.height + border.width * 2 + content.spacing * 2
border.color: "black"
border.width: 5
radius: border.width * 2
ColumnLayout {
id: content
x: section.radius; y: section.radius
spacing: section.border.width
width: section.width - 2 * x
// label
Label {
Layout.alignment: Qt.AlignCenter
text: hoverArea.containsMouse ? section.description : section.label
font.bold: true
MouseArea {
id: hoverArea
anchors.fill: parent
hoverEnabled: true
}
}
// spacer
Item { }
// control
Loader {
id: loader
Layout.preferredWidth: parent.width
}
}
}

View file

@ -0,0 +1,69 @@
//
// Stream.qml
// scripts/developer/utilities/audio
//
// Created by Zach Pomerantz on 9/22/2016
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import QtQuick.Controls 1.4
import QtQuick.Layouts 1.3
ColumnLayout {
id: root
property var stream
property bool showGraphs: false
Label {
Layout.alignment: Qt.AlignCenter
text: "Ring Buffer"
font.italic: true
}
MovingValue {
label: "Minimum Depth"
color: "limegreen"
source: stream.framesDesired
unit: "frames"
showGraphs: root.showGraphs
}
MovingValue {
label: "Buffer Depth"
color: "darkblue"
source: stream.unplayedMsMax
showGraphs: root.showGraphs
}
Value {
label: "Available (avg)"
source: stream.framesAvailable + " (" + stream.framesAvailableAvg + ") frames"
}
Label {
Layout.alignment: Qt.AlignCenter
text: "Jitter"
font.italic: true
}
Jitter {
max: stream.timegapMsMaxWindow
avg: stream.timegapMsAvgWindow
showGraphs: root.showGraphs
}
Label {
Layout.alignment: Qt.AlignCenter
text: "Packet Loss"
font.italic: true
}
Value {
label: "Window"
source: stream.lossRateWindow.toFixed(2) + "% (" + stream.lossCountWindow + " lost)"
}
Value {
label: "Overall"
color: "dimgrey"
source: stream.lossRate.toFixed(2) + "% (" + stream.lossCount + " lost)"
}
}

View file

@ -0,0 +1,36 @@
//
// Value.qml
// scripts/developer/utilities/audio
//
// Created by Zach Pomerantz on 9/22/2016
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import QtQuick.Controls 1.4
import QtQuick.Layouts 1.3
RowLayout {
id: value
property string label
property var source
property color color: "darkslategrey"
width: parent.width
property int dataPixelWidth: 150
Label {
Layout.preferredWidth: dataPixelWidth
color: value.color
text: value.label
}
Label {
Layout.preferredWidth: 0
horizontalAlignment: Text.AlignRight
color: value.color
text: value.source
}
}

View file

@ -0,0 +1,25 @@
//
// stats.js
// scripts/developer/utilities/audio
//
// Zach Pomerantz, created on 9/22/2016.
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var INITIAL_WIDTH = 400;
var INITIAL_OFFSET = 50;
// Set up the qml ui
var qml = Script.resolvePath('stats.qml');
var window = new OverlayWindow({
title: 'Audio Interface Statistics',
source: qml,
width: 500, height: 520 // stats.qml may be too large for some screens
});
window.setPosition(INITIAL_OFFSET, INITIAL_OFFSET);
window.closed.connect(function() { Script.stop(); });

View file

@ -0,0 +1,95 @@
//
// stats.qml
// scripts/developer/utilities/audio
//
// Created by Zach Pomerantz on 9/22/2016
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import QtQuick.Controls 1.4
import QtQuick.Layouts 1.3
Column {
id: stats
width: parent.width
height: parent.height
property bool showGraphs: toggleGraphs.checked
RowLayout {
width: parent.width
height: 30
Button {
id: toggleGraphs
property bool checked: false
Layout.alignment: Qt.AlignCenter
text: checked ? "Hide graphs" : "Show graphs"
onClicked: function() { checked = !checked; }
}
}
Grid {
width: parent.width
height: parent.height - 30
Column {
width: parent.width / 2
height: parent.height
Section {
label: "Latency"
description: "Audio pipeline latency, broken out and summed"
control: ColumnLayout {
MovingValue { label: "Input Read"; source: AudioStats.inputReadMsMax; showGraphs: stats.showGraphs }
MovingValue { label: "Input Ring"; source: AudioStats.inputUnplayedMsMax; showGraphs: stats.showGraphs }
MovingValue { label: "Network (up)"; source: AudioStats.pingMs / 2; showGraphs: stats.showGraphs; decimals: 1 }
MovingValue { label: "Mixer Ring"; source: AudioStats.mixerStream.unplayedMsMax; showGraphs: stats.showGraphs }
MovingValue { label: "Network (down)"; source: AudioStats.pingMs / 2; showGraphs: stats.showGraphs; decimals: 1 }
MovingValue { label: "Output Ring"; source: AudioStats.clientStream.unplayedMsMax; showGraphs: stats.showGraphs }
MovingValue { label: "Output Read"; source: AudioStats.outputUnplayedMsMax; showGraphs: stats.showGraphs }
MovingValue { label: "TOTAL"; color: "black"; showGraphs: stats.showGraphs
source: AudioStats.inputReadMsMax +
AudioStats.inputUnplayedMsMax +
AudioStats.outputUnplayedMsMax +
AudioStats.mixerStream.unplayedMsMax +
AudioStats.clientStream.unplayedMsMax +
AudioStats.pingMs
}
}
}
Section {
label: "Upstream Jitter"
description: "Timegaps in packets sent to the mixer"
control: Jitter {
max: AudioStats.sentTimegapMsMaxWindow
avg: AudioStats.sentTimegapMsAvgWindow
showGraphs: stats.showGraphs
}
}
}
Column {
width: parent.width / 2
height: parent.height
Section {
label: "Mixer (upstream)"
description: "This client's remote audio stream, as seen by the server's mixer"
control: Stream { stream: AudioStats.mixerStream; showGraphs: stats.showGraphs }
}
Section {
label: "Client (downstream)"
description: "This client's received audio stream, between the network and the OS"
control: Stream { stream: AudioStats.clientStream; showGraphs: stats.showGraphs }
}
}
}
}

View file

@ -16,14 +16,14 @@ Item {
width: parent.width
height: 100
property int hitboxExtension : 20
// The title of the graph
property string title
// The object used as the default source object for the prop plots
property var object
property var backgroundOpacity: 0.6
// Plots is an array of plot descriptor
// a default plot descriptor expects the following object:
// prop: [ {
@ -55,9 +55,17 @@ Item {
function createValues() {
for (var i =0; i < plots.length; i++) {
var plot = plots[i];
var object = plot["object"] || root.object;
var value = plot["prop"];
var isBinding = plot["binding"];
if (isBinding) {
object = root.parent;
value = isBinding;
}
_values.push( {
object: (plot["object"] !== undefined ? plot["object"] : root.object),
value: plot["prop"],
object: object,
value: value,
fromBinding: isBinding,
valueMax: 1,
numSamplesConstantMax: 0,
valueHistory: new Array(),
@ -179,7 +187,7 @@ Item {
ctx.fillText(text, 0, lineHeight);
}
function displayBackground(ctx) {
ctx.fillStyle = Qt.rgba(0, 0, 0, 0.6);
ctx.fillStyle = Qt.rgba(0, 0, 0, root.backgroundOpacity);
ctx.fillRect(0, 0, width, height);
ctx.strokeStyle= "grey";
@ -210,15 +218,9 @@ Item {
MouseArea {
id: hitbox
anchors.fill:mycanvas
anchors.topMargin: -hitboxExtension
anchors.bottomMargin: -hitboxExtension
anchors.leftMargin: -hitboxExtension
anchors.rightMargin: -hitboxExtension
anchors.fill: mycanvas
onClicked: {
print("PerfPlot clicked!")
resetMax();
}
}

View file

@ -54,7 +54,6 @@ var AWAY_INTRO = {
var isEnabled = true;
var wasMuted; // unknonwn?
var isAway = false; // we start in the un-away state
var wasOverlaysVisible = Menu.isOptionChecked("Overlays");
var eventMappingName = "io.highfidelity.away"; // goActive on hand controller button events, too.
var eventMapping = Controller.newMapping(eventMappingName);
var avatarPosition = MyAvatar.position;
@ -180,12 +179,6 @@ function goAway(fromStartup) {
HMD.requestShowHandControllers();
// remember the View > Overlays state...
wasOverlaysVisible = Menu.isOptionChecked("Overlays");
// show overlays so that people can see the "Away" message
Menu.setIsOptionChecked("Overlays", true);
// tell the Reticle, we want to stop capturing the mouse until we come back
Reticle.allowMouseCapture = false;
// Allow users to find their way to other applications, our menus, etc.
@ -237,9 +230,6 @@ function goActive() {
hideOverlay();
// restore overlays state to what it was when we went "away"
Menu.setIsOptionChecked("Overlays", wasOverlaysVisible);
// tell the Reticle, we are ready to capture the mouse again and it should be visible
Reticle.allowMouseCapture = true;
Reticle.visible = true;

View file

@ -16,6 +16,8 @@ HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
SPACE_LOCAL = "local";
SPACE_WORLD = "world";
Script.include("./controllers.js");
function objectTranslationPlanePoint(position, dimensions) {
var newPosition = { x: position.x, y: position.y, z: position.z };
newPosition.y -= dimensions.y / 2.0;
@ -1046,12 +1048,11 @@ SelectionDisplay = (function() {
that.triggerMapping.from(Controller.Standard.RT).peek().to(makeTriggerHandler(Controller.Standard.RightHand));
that.triggerMapping.from(Controller.Standard.LT).peek().to(makeTriggerHandler(Controller.Standard.LeftHand));
function controllerComputePickRay() {
var controllerPose = Controller.getPoseValue(activeHand);
var controllerPose = getControllerWorldLocation(activeHand, true);
if (controllerPose.valid && that.triggered) {
var controllerPosition = Vec3.sum(Vec3.multiplyQbyV(MyAvatar.orientation, controllerPose.translation),
MyAvatar.position);
var controllerPosition = controllerPose.translation;
// This gets point direction right, but if you want general quaternion it would be more complicated:
var controllerDirection = Quat.getUp(Quat.multiply(MyAvatar.orientation, controllerPose.rotation));
var controllerDirection = Quat.getUp(controllerPose.rotation);
return {origin: controllerPosition, direction: controllerDirection};
}
}