Merge branch 'master' of github.com:highfidelity/hifi into qml-keyboard

This commit is contained in:
Seth Alves 2016-09-20 13:28:20 -07:00
commit 2468b56079
53 changed files with 735 additions and 524 deletions

View file

@ -270,6 +270,7 @@ QJsonObject AudioMixerClientData::getAudioStreamStats() {
downstreamStats["desired"] = streamStats._desiredJitterBufferFrames;
downstreamStats["available_avg_10s"] = streamStats._framesAvailableAverage;
downstreamStats["available"] = (double) streamStats._framesAvailable;
downstreamStats["unplayed"] = (double) streamStats._unplayedMs;
downstreamStats["starves"] = (double) streamStats._starveCount;
downstreamStats["not_mixed"] = (double) streamStats._consecutiveNotMixedCount;
downstreamStats["overflows"] = (double) streamStats._overflowCount;
@ -294,6 +295,7 @@ QJsonObject AudioMixerClientData::getAudioStreamStats() {
upstreamStats["desired_calc"] = avatarAudioStream->getCalculatedJitterBufferFrames();
upstreamStats["available_avg_10s"] = streamStats._framesAvailableAverage;
upstreamStats["available"] = (double) streamStats._framesAvailable;
upstreamStats["unplayed"] = (double) streamStats._unplayedMs;
upstreamStats["starves"] = (double) streamStats._starveCount;
upstreamStats["not_mixed"] = (double) streamStats._consecutiveNotMixedCount;
upstreamStats["overflows"] = (double) streamStats._overflowCount;
@ -323,6 +325,7 @@ QJsonObject AudioMixerClientData::getAudioStreamStats() {
upstreamStats["desired_calc"] = injectorPair.second->getCalculatedJitterBufferFrames();
upstreamStats["available_avg_10s"] = streamStats._framesAvailableAverage;
upstreamStats["available"] = (double) streamStats._framesAvailable;
upstreamStats["unplayed"] = (double) streamStats._unplayedMs;
upstreamStats["starves"] = (double) streamStats._starveCount;
upstreamStats["not_mixed"] = (double) streamStats._consecutiveNotMixedCount;
upstreamStats["overflows"] = (double) streamStats._overflowCount;

View file

@ -18,6 +18,13 @@ macro(SET_PACKAGING_PARAMETERS)
set(RELEASE_TYPE $ENV{RELEASE_TYPE})
set(RELEASE_NUMBER $ENV{RELEASE_NUMBER})
string(TOLOWER "$ENV{BRANCH}" BUILD_BRANCH)
set(BUILD_GLOBAL_SERVICES "DEVELOPMENT")
set(USE_STABLE_GLOBAL_SERVICES FALSE)
message(STATUS "The BUILD_BRANCH variable is: ${BUILD_BRANCH}")
message(STATUS "The BRANCH environment variable is: $ENV{BRANCH}")
message(STATUS "The RELEASE_TYPE variable is: ${RELEASE_TYPE}")
if (RELEASE_TYPE STREQUAL "PRODUCTION")
set(DEPLOY_PACKAGE TRUE)
@ -31,6 +38,14 @@ macro(SET_PACKAGING_PARAMETERS)
# add definition for this release type
add_definitions(-DPRODUCTION_BUILD)
# if the build is a PRODUCTION_BUILD from the "stable" branch
# then use the STABLE gobal services
if (BUILD_BRANCH STREQUAL "stable")
message(STATUS "The RELEASE_TYPE is PRODUCTION and the BUILD_BRANCH is stable...")
set(BUILD_GLOBAL_SERVICES "STABLE")
set(USE_STABLE_GLOBAL_SERVICES TRUE)
endif()
elseif (RELEASE_TYPE STREQUAL "PR")
set(DEPLOY_PACKAGE TRUE)
set(PR_BUILD 1)
@ -132,6 +147,10 @@ macro(SET_PACKAGING_PARAMETERS)
set(CLIENT_COMPONENT client)
set(SERVER_COMPONENT server)
# print out some results for testing this new build feature
message(STATUS "The BUILD_GLOBAL_SERVICES variable is: ${BUILD_GLOBAL_SERVICES}")
message(STATUS "The USE_STABLE_GLOBAL_SERVICES variable is: ${USE_STABLE_GLOBAL_SERVICES}")
# create a header file our targets can use to find out the application version
file(MAKE_DIRECTORY "${CMAKE_BINARY_DIR}/includes")
configure_file("${HF_CMAKE_DIR}/templates/BuildInfo.h.in" "${CMAKE_BINARY_DIR}/includes/BuildInfo.h")

View file

@ -9,7 +9,12 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#define USE_STABLE_GLOBAL_SERVICES @USE_STABLE_GLOBAL_SERVICES@
namespace BuildInfo {
const QString MODIFIED_ORGANIZATION = "@BUILD_ORGANIZATION@";
const QString VERSION = "@BUILD_VERSION@";
const QString BUILD_BRANCH = "@BUILD_BRANCH@";
const QString BUILD_GLOBAL_SERVICES = "@BUILD_GLOBAL_SERVICES@";
}

View file

@ -600,6 +600,9 @@ Section "-Core installation"
Delete "$INSTDIR\version"
Delete "$INSTDIR\xinput1_3.dll"
;Delete old Qt files
Delete "$INSTDIR\audio\qtaudio_windows.dll"
; Delete old desktop shortcuts before they were renamed during Sandbox rename
Delete "$DESKTOP\@PRE_SANDBOX_INTERFACE_SHORTCUT_NAME@.lnk"
Delete "$DESKTOP\@PRE_SANDBOX_CONSOLE_SHORTCUT_NAME@.lnk"

View file

@ -45,7 +45,11 @@
int const DomainServer::EXIT_CODE_REBOOT = 234923;
#if USE_STABLE_GLOBAL_SERVICES
const QString ICE_SERVER_DEFAULT_HOSTNAME = "ice.highfidelity.com";
#else
const QString ICE_SERVER_DEFAULT_HOSTNAME = "dev-ice.highfidelity.com";
#endif
DomainServer::DomainServer(int argc, char* argv[]) :
QCoreApplication(argc, argv),
@ -79,6 +83,14 @@ DomainServer::DomainServer(int argc, char* argv[]) :
qDebug() << "Setting up domain-server";
qDebug() << "[VERSION] Build sequence:" << qPrintable(applicationVersion());
qDebug() << "[VERSION] MODIFIED_ORGANIZATION:" << BuildInfo::MODIFIED_ORGANIZATION;
qDebug() << "[VERSION] VERSION:" << BuildInfo::VERSION;
qDebug() << "[VERSION] BUILD_BRANCH:" << BuildInfo::BUILD_BRANCH;
qDebug() << "[VERSION] BUILD_GLOBAL_SERVICES:" << BuildInfo::BUILD_GLOBAL_SERVICES;
qDebug() << "[VERSION] We will be using this default ICE server:" << ICE_SERVER_DEFAULT_HOSTNAME;
// make sure we have a fresh AccountManager instance
// (need this since domain-server can restart itself and maintain static variables)
DependencyManager::set<AccountManager>();

View file

@ -139,7 +139,10 @@ Window {
buttonState: 1
defaultState: 1
hoverState: 2
onClicked: addressBarDialog.loadHome();
onClicked: {
addressBarDialog.loadHome();
root.shown = false;
}
anchors {
left: parent.left
leftMargin: homeButton.width / 2

View file

@ -199,7 +199,6 @@ ScrollingWindow {
return i;
}
}
console.warn("Could not find tab for " + source);
return -1;
}
@ -234,7 +233,6 @@ ScrollingWindow {
return i;
}
}
console.warn("Could not find free tab");
return -1;
}
@ -261,7 +259,6 @@ ScrollingWindow {
var existingTabIndex = findIndexForUrl(properties.source);
if (existingTabIndex >= 0) {
console.log("Existing tab " + existingTabIndex + " found with URL " + properties.source);
var tab = tabView.getTab(existingTabIndex);
return tab.item;
}
@ -284,16 +281,13 @@ ScrollingWindow {
var tab = tabView.getTab(freeTabIndex);
tab.title = properties.title || "Unknown";
tab.enabled = true;
console.log("New tab URL: " + properties.source)
tab.originalUrl = properties.source;
var eventBridge = properties.eventBridge;
console.log("Event bridge: " + eventBridge);
var result = tab.item;
result.enabled = true;
tabView.tabCount++;
console.log("Setting event bridge: " + eventBridge);
result.eventBridgeWrapper.eventBridge = eventBridge;
result.url = properties.source;
return result;

View file

@ -44,7 +44,6 @@ Item {
webChannel.registeredObjects: [eventBridgeWrapper]
Component.onCompleted: {
console.log("Connecting JS messaging to Hifi Logging");
// Ensure the JS from the web-engine makes it to our logging
root.javaScriptConsoleMessage.connect(function(level, message, lineNumber, sourceID) {
console.log("Web Entity JS message: " + sourceID + " " + lineNumber + " " + message);

View file

@ -264,7 +264,7 @@ public:
auto elapsedMovingAverage = _movingAverage.getAverage();
if (elapsedMovingAverage > _maxElapsedAverage) {
qDebug() << "DEADLOCK WATCHDOG WARNING:"
qCDebug(interfaceapp_deadlock) << "DEADLOCK WATCHDOG WARNING:"
<< "lastHeartbeatAge:" << lastHeartbeatAge
<< "elapsedMovingAverage:" << elapsedMovingAverage
<< "maxElapsed:" << _maxElapsed
@ -274,7 +274,7 @@ public:
_maxElapsedAverage = elapsedMovingAverage;
}
if (lastHeartbeatAge > _maxElapsed) {
qDebug() << "DEADLOCK WATCHDOG WARNING:"
qCDebug(interfaceapp_deadlock) << "DEADLOCK WATCHDOG WARNING:"
<< "lastHeartbeatAge:" << lastHeartbeatAge
<< "elapsedMovingAverage:" << elapsedMovingAverage
<< "PREVIOUS maxElapsed:" << _maxElapsed
@ -284,7 +284,7 @@ public:
_maxElapsed = lastHeartbeatAge;
}
if (elapsedMovingAverage > WARNING_ELAPSED_HEARTBEAT) {
qDebug() << "DEADLOCK WATCHDOG WARNING:"
qCDebug(interfaceapp_deadlock) << "DEADLOCK WATCHDOG WARNING:"
<< "lastHeartbeatAge:" << lastHeartbeatAge
<< "elapsedMovingAverage:" << elapsedMovingAverage << "** OVER EXPECTED VALUE **"
<< "maxElapsed:" << _maxElapsed
@ -293,7 +293,7 @@ public:
}
if (lastHeartbeatAge > MAX_HEARTBEAT_AGE_USECS) {
qDebug() << "DEADLOCK DETECTED -- "
qCDebug(interfaceapp_deadlock) << "DEADLOCK DETECTED -- "
<< "lastHeartbeatAge:" << lastHeartbeatAge
<< "[ lastHeartbeat :" << lastHeartbeat
<< "now:" << now << " ]"
@ -562,6 +562,16 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
_deadlockWatchdogThread->start();
qCDebug(interfaceapp) << "[VERSION] Build sequence:" << qPrintable(applicationVersion());
qCDebug(interfaceapp) << "[VERSION] MODIFIED_ORGANIZATION:" << BuildInfo::MODIFIED_ORGANIZATION;
qCDebug(interfaceapp) << "[VERSION] VERSION:" << BuildInfo::VERSION;
qCDebug(interfaceapp) << "[VERSION] BUILD_BRANCH:" << BuildInfo::BUILD_BRANCH;
qCDebug(interfaceapp) << "[VERSION] BUILD_GLOBAL_SERVICES:" << BuildInfo::BUILD_GLOBAL_SERVICES;
#if USE_STABLE_GLOBAL_SERVICES
qCDebug(interfaceapp) << "[VERSION] We will use STABLE global services.";
#else
qCDebug(interfaceapp) << "[VERSION] We will use DEVELOPMENT global services.";
#endif
_bookmarks = new Bookmarks(); // Before setting up the menu
@ -2005,7 +2015,7 @@ void Application::resizeGL() {
static qreal lastDevicePixelRatio = 0;
qreal devicePixelRatio = _window->devicePixelRatio();
if (offscreenUi->size() != fromGlm(uiSize) || devicePixelRatio != lastDevicePixelRatio) {
qDebug() << "Device pixel ratio changed, triggering resize to " << uiSize;
qCDebug(interfaceapp) << "Device pixel ratio changed, triggering resize to " << uiSize;
offscreenUi->resize(fromGlm(uiSize), true);
_offscreenContext->makeCurrent();
lastDevicePixelRatio = devicePixelRatio;
@ -3260,17 +3270,17 @@ void Application::init() {
Setting::Handle<bool> firstRun { Settings::firstRun, true };
if (addressLookupString.isEmpty() && firstRun.get()) {
qDebug() << "First run and no URL passed... attempting to go to Home or Entry...";
qCDebug(interfaceapp) << "First run and no URL passed... attempting to go to Home or Entry...";
DependencyManager::get<AddressManager>()->ifLocalSandboxRunningElse([](){
qDebug() << "Home sandbox appears to be running, going to Home.";
qCDebug(interfaceapp) << "Home sandbox appears to be running, going to Home.";
DependencyManager::get<AddressManager>()->goToLocalSandbox();
},
[](){
qDebug() << "Home sandbox does not appear to be running, going to Entry.";
qCDebug(interfaceapp) << "Home sandbox does not appear to be running, going to Entry.";
DependencyManager::get<AddressManager>()->goToEntry();
});
} else {
qDebug() << "Not first run... going to" << qPrintable(addressLookupString.isEmpty() ? QString("previous location") : addressLookupString);
qCDebug(interfaceapp) << "Not first run... going to" << qPrintable(addressLookupString.isEmpty() ? QString("previous location") : addressLookupString);
DependencyManager::get<AddressManager>()->loadSettings(addressLookupString);
}
@ -5617,7 +5627,7 @@ void Application::setActiveDisplayPlugin(const QString& pluginName) {
void Application::handleLocalServerConnection() const {
auto server = qobject_cast<QLocalServer*>(sender());
qDebug() << "Got connection on local server from additional instance - waiting for parameters";
qCDebug(interfaceapp) << "Got connection on local server from additional instance - waiting for parameters";
auto socket = server->nextPendingConnection();
@ -5633,7 +5643,7 @@ void Application::readArgumentsFromLocalSocket() const {
auto message = socket->readAll();
socket->deleteLater();
qDebug() << "Read from connection: " << message;
qCDebug(interfaceapp) << "Read from connection: " << message;
// If we received a message, try to open it as a URL
if (message.length() > 0) {
@ -5735,8 +5745,8 @@ void Application::updateThreadPoolCount() const {
auto reservedThreads = UI_RESERVED_THREADS + OS_RESERVED_THREADS + _displayPlugin->getRequiredThreadCount();
auto availableThreads = QThread::idealThreadCount() - reservedThreads;
auto threadPoolSize = std::max(MIN_PROCESSING_THREAD_POOL_SIZE, availableThreads);
qDebug() << "Ideal Thread Count " << QThread::idealThreadCount();
qDebug() << "Reserved threads " << reservedThreads;
qDebug() << "Setting thread pool size to " << threadPoolSize;
qCDebug(interfaceapp) << "Ideal Thread Count " << QThread::idealThreadCount();
qCDebug(interfaceapp) << "Reserved threads " << reservedThreads;
qCDebug(interfaceapp) << "Setting thread pool size to " << threadPoolSize;
QThreadPool::globalInstance()->setMaxThreadCount(threadPoolSize);
}

View file

@ -13,3 +13,4 @@
Q_LOGGING_CATEGORY(interfaceapp, "hifi.interface")
Q_LOGGING_CATEGORY(interfaceapp_timing, "hifi.interface.timing")
Q_LOGGING_CATEGORY(interfaceapp_deadlock, "hifi.interface.deadlock")

View file

@ -16,5 +16,6 @@
Q_DECLARE_LOGGING_CATEGORY(interfaceapp)
Q_DECLARE_LOGGING_CATEGORY(interfaceapp_timing)
Q_DECLARE_LOGGING_CATEGORY(interfaceapp_deadlock)
#endif // hifi_InterfaceLogging_h

View file

@ -244,14 +244,19 @@ void AvatarActionHold::doKinematicUpdate(float deltaTimeStep) {
// 3 -- ignore i of 0 1 2
// 4 -- ignore i of 1 2 3
// 5 -- ignore i of 2 3 4
if ((i + 1) % AvatarActionHold::velocitySmoothFrames == _measuredLinearVelocitiesIndex ||
(i + 2) % AvatarActionHold::velocitySmoothFrames == _measuredLinearVelocitiesIndex ||
(i + 3) % AvatarActionHold::velocitySmoothFrames == _measuredLinearVelocitiesIndex) {
continue;
}
// This code is now disabled, but I'm leaving it commented-out because I suspect it will come back.
// if ((i + 1) % AvatarActionHold::velocitySmoothFrames == _measuredLinearVelocitiesIndex ||
// (i + 2) % AvatarActionHold::velocitySmoothFrames == _measuredLinearVelocitiesIndex ||
// (i + 3) % AvatarActionHold::velocitySmoothFrames == _measuredLinearVelocitiesIndex) {
// continue;
// }
measuredLinearVelocity += _measuredLinearVelocities[i];
}
measuredLinearVelocity /= (float)(AvatarActionHold::velocitySmoothFrames - 3); // 3 because of the 3 we skipped, above
measuredLinearVelocity /= (float)(AvatarActionHold::velocitySmoothFrames
// - 3 // 3 because of the 3 we skipped, above
);
if (_kinematicSetVelocity) {
rigidBody->setLinearVelocity(glmToBullet(measuredLinearVelocity));

View file

@ -58,21 +58,19 @@ void AudioStatsDisplay::updatedDisplay(QString str) {
AudioStatsDialog::AudioStatsDialog(QWidget* parent) :
QDialog(parent, Qt::Window | Qt::WindowCloseButtonHint | Qt::WindowStaysOnTopHint) {
_shouldShowInjectedStreams = false;
setWindowTitle("Audio Network Statistics");
// Get statistics from the Audio Client
_stats = &DependencyManager::get<AudioClient>()->getStats();
// Create layout
_form = new QFormLayout();
_form->setSizeConstraint(QLayout::SetFixedSize);
QDialog::setLayout(_form);
// Load and initialize all channels
renderStats();
// Initialize channels' content (needed to correctly size channels)
updateStats();
// Create channels
_audioDisplayChannels = QVector<QVector<AudioStatsDisplay*>>(1);
_audioMixerID = addChannel(_form, _audioMixerStats, COLOR0);
@ -80,9 +78,16 @@ AudioStatsDialog::AudioStatsDialog(QWidget* parent) :
_upstreamMixerID = addChannel(_form, _upstreamMixerStats, COLOR2);
_downstreamID = addChannel(_form, _downstreamStats, COLOR3);
_upstreamInjectedID = addChannel(_form, _upstreamInjectedStats, COLOR0);
connect(averageUpdateTimer, SIGNAL(timeout()), this, SLOT(updateTimerTimeout()));
averageUpdateTimer->start(1000);
// Initialize channels
updateChannels();
// Future renders
connect(averageUpdateTimer, SIGNAL(timeout()), this, SLOT(renderStats()));
averageUpdateTimer->start(200);
// Initial render
QDialog::setLayout(_form);
}
int AudioStatsDialog::addChannel(QFormLayout* form, QVector<QString>& stats, const unsigned color) {
@ -99,148 +104,152 @@ int AudioStatsDialog::addChannel(QFormLayout* form, QVector<QString>& stats, con
return channelID;
}
void AudioStatsDialog::updateStats(QVector<QString>& stats, int channelID) {
void AudioStatsDialog::renderStats() {
updateStats();
updateChannels();
}
void AudioStatsDialog::updateChannels() {
updateChannel(_audioMixerStats, _audioMixerID);
updateChannel(_upstreamClientStats, _upstreamClientID);
updateChannel(_upstreamMixerStats, _upstreamMixerID);
updateChannel(_downstreamStats, _downstreamID);
updateChannel(_upstreamInjectedStats, _upstreamInjectedID);
}
void AudioStatsDialog::updateChannel(QVector<QString>& stats, int channelID) {
// Update all stat displays at specified channel
for (int i = 0; i < stats.size(); i++)
_audioDisplayChannels[channelID].at(i)->updatedDisplay(stats.at(i));
}
void AudioStatsDialog::renderStats() {
void AudioStatsDialog::updateStats() {
// Clear current stats from all vectors
clearAllChannels();
double audioInputBufferLatency = 0.0,
inputRingBufferLatency = 0.0,
networkRoundtripLatency = 0.0,
mixerRingBufferLatency = 0.0,
outputRingBufferLatency = 0.0,
audioOutputBufferLatency = 0.0;
double audioInputBufferLatency{ 0.0 };
double inputRingBufferLatency{ 0.0 };
double networkRoundtripLatency{ 0.0 };
double mixerRingBufferLatency{ 0.0 };
double outputRingBufferLatency{ 0.0 };
double audioOutputBufferLatency{ 0.0 };
AudioStreamStats downstreamAudioStreamStats = _stats->getMixerDownstreamStats();
SharedNodePointer audioMixerNodePointer = DependencyManager::get<NodeList>()->soloNodeOfType(NodeType::AudioMixer);
if (!audioMixerNodePointer.isNull()) {
audioInputBufferLatency = (double)_stats->getAudioInputMsecsReadStats().getWindowAverage();
inputRingBufferLatency = (double)_stats->getInputRungBufferMsecsAvailableStats().getWindowAverage();
networkRoundtripLatency = (double) audioMixerNodePointer->getPingMs();
mixerRingBufferLatency = (double)_stats->getMixerAvatarStreamStats()._framesAvailableAverage *
(double)AudioConstants::NETWORK_FRAME_MSECS;
outputRingBufferLatency = (double)downstreamAudioStreamStats._framesAvailableAverage *
(double)AudioConstants::NETWORK_FRAME_MSECS;
audioOutputBufferLatency = (double)_stats->getAudioOutputMsecsUnplayedStats().getWindowAverage();
if (SharedNodePointer audioMixerNodePointer = DependencyManager::get<NodeList>()->soloNodeOfType(NodeType::AudioMixer)) {
audioInputBufferLatency = (double)_stats->getInputMsRead().getWindowMax();
inputRingBufferLatency = (double)_stats->getInputMsUnplayed().getWindowMax();
networkRoundtripLatency = (double)audioMixerNodePointer->getPingMs();
mixerRingBufferLatency = (double)_stats->getMixerAvatarStreamStats()._unplayedMs;
outputRingBufferLatency = (double)_stats->getMixerDownstreamStats()._unplayedMs;
audioOutputBufferLatency = (double)_stats->getOutputMsUnplayed().getWindowMax();
}
double totalLatency = audioInputBufferLatency + inputRingBufferLatency + networkRoundtripLatency + mixerRingBufferLatency
+ outputRingBufferLatency + audioOutputBufferLatency;
QString stats = "Audio input buffer: %1ms - avg msecs of samples read to the audio input buffer in last 10s";
_audioMixerStats.push_back(stats.arg(QString::number(audioInputBufferLatency, 'f', 2)));
double totalLatency = audioInputBufferLatency + inputRingBufferLatency + mixerRingBufferLatency
+ outputRingBufferLatency + audioOutputBufferLatency + networkRoundtripLatency;
stats = "Input ring buffer: %1ms - avg msecs of samples read to the input ring buffer in last 10s";
_audioMixerStats.push_back(stats.arg(QString::number(inputRingBufferLatency, 'f', 2)));
stats = "Network to mixer: %1ms - half of last ping value calculated by the node list";
_audioMixerStats.push_back(stats.arg(QString::number((networkRoundtripLatency / 2.0), 'f', 2)));
stats = "Network to client: %1ms - half of last ping value calculated by the node list";
_audioMixerStats.push_back(stats.arg(QString::number((mixerRingBufferLatency / 2.0),'f', 2)));
stats = "Output ring buffer: %1ms - avg msecs of samples in output ring buffer in last 10s";
_audioMixerStats.push_back(stats.arg(QString::number(outputRingBufferLatency,'f', 2)));
stats = "Audio output buffer: %1ms - avg msecs of samples in audio output buffer in last 10s";
_audioMixerStats.push_back(stats.arg(QString::number(mixerRingBufferLatency,'f', 2)));
stats = "TOTAL: %1ms - avg msecs of samples in audio output buffer in last 10s";
_audioMixerStats.push_back(stats.arg(QString::number(totalLatency, 'f', 2)));
QString stats;
_audioMixerStats.push_back("PIPELINE (averaged over the past 10s)");
stats = "Input Read:\t%1 ms";
_audioMixerStats.push_back(stats.arg(QString::number(audioInputBufferLatency, 'f', 0)));
stats = "Input Ring:\t%1 ms";
_audioMixerStats.push_back(stats.arg(QString::number(inputRingBufferLatency, 'f', 0)));
stats = "Network (client->mixer):\t%1 ms";
_audioMixerStats.push_back(stats.arg(QString::number(networkRoundtripLatency / 2, 'f', 0)));
stats = "Mixer Ring:\t%1 ms";
_audioMixerStats.push_back(stats.arg(QString::number(mixerRingBufferLatency, 'f', 0)));
stats = "Network (mixer->client):\t%1 ms";
_audioMixerStats.push_back(stats.arg(QString::number(networkRoundtripLatency / 2, 'f', 0)));
stats = "Output Ring:\t%1 ms";
_audioMixerStats.push_back(stats.arg(QString::number(outputRingBufferLatency, 'f', 0)));
stats = "Output Read:\t%1 ms";
_audioMixerStats.push_back(stats.arg(QString::number(audioOutputBufferLatency, 'f', 0)));
stats = "TOTAL:\t%1 ms";
_audioMixerStats.push_back(stats.arg(QString::number(totalLatency, 'f', 0)));
const MovingMinMaxAvg<quint64>& packetSentTimeGaps = _stats->getPacketSentTimeGaps();
const MovingMinMaxAvg<quint64>& packetSentTimeGaps = _stats->getPacketTimegaps();
_upstreamClientStats.push_back("\nUpstream Mic Audio Packets Sent Gaps (by client):");
stats = "Inter-packet timegaps (overall) | min: %1, max: %2, avg: %3";
stats = "Inter-packet timegaps";
_upstreamClientStats.push_back(stats);
stats = "overall min:\t%1, max:\t%2, avg:\t%3";
stats = stats.arg(formatUsecTime(packetSentTimeGaps.getMin()),
formatUsecTime(packetSentTimeGaps.getMax()),
formatUsecTime(packetSentTimeGaps.getAverage()));
_upstreamClientStats.push_back(stats);
stats = "Inter-packet timegaps (last 30s) | min: %1, max: %2, avg: %3";
stats = "last window min:\t%1, max:\t%2, avg:\t%3";
stats = stats.arg(formatUsecTime(packetSentTimeGaps.getWindowMin()),
formatUsecTime(packetSentTimeGaps.getWindowMax()),
formatUsecTime(packetSentTimeGaps.getWindowAverage()));
_upstreamClientStats.push_back(stats);
_upstreamMixerStats.push_back("\nUpstream mic audio stats (received and reported by audio-mixer):");
_upstreamMixerStats.push_back("\nMIXER STREAM");
_upstreamMixerStats.push_back("(this client's remote mixer stream performance)");
renderAudioStreamStats(&_stats->getMixerAvatarStreamStats(), &_upstreamMixerStats, true);
renderAudioStreamStats(&_stats->getMixerAvatarStreamStats(), &_upstreamMixerStats);
_downstreamStats.push_back("\nDownstream mixed audio stats:");
_downstreamStats.push_back("\nCLIENT STREAM");
AudioStreamStats downstreamStats = _stats->getMixerDownstreamStats();
renderAudioStreamStats(&downstreamStats, &_downstreamStats, true);
renderAudioStreamStats(&downstreamStats, &_downstreamStats);
if (_shouldShowInjectedStreams) {
foreach(const AudioStreamStats& injectedStreamAudioStats, _stats->getMixerInjectedStreamStatsMap()) {
stats = "\nUpstream injected audio stats: stream ID: %1";
stats = "\nINJECTED STREAM (ID: %1)";
stats = stats.arg(injectedStreamAudioStats._streamIdentifier.toString());
_upstreamInjectedStats.push_back(stats);
renderAudioStreamStats(&injectedStreamAudioStats, &_upstreamInjectedStats, true);
renderAudioStreamStats(&injectedStreamAudioStats, &_upstreamInjectedStats);
}
}
}
void AudioStatsDialog::renderAudioStreamStats(const AudioStreamStats* streamStats, QVector<QString>* audioStreamStats, bool isDownstreamStats) {
void AudioStatsDialog::renderAudioStreamStats(const AudioStreamStats* streamStats, QVector<QString>* audioStreamStats) {
QString stats = "Packet loss | overall: %1% (%2 lost), last_30s: %3% (%4 lost)";
QString stats = "Packet Loss";
audioStreamStats->push_back(stats);
stats = "overall:\t%1%\t(%2 lost), window:\t%3%\t(%4 lost)";
stats = stats.arg(QString::number((int)(streamStats->_packetStreamStats.getLostRate() * 100.0f)),
QString::number((int)(streamStats->_packetStreamStats._lost)),
QString::number((int)(streamStats->_packetStreamWindowStats.getLostRate() * 100.0f)),
QString::number((int)(streamStats->_packetStreamWindowStats._lost)));
QString::number((int)(streamStats->_packetStreamStats._lost)),
QString::number((int)(streamStats->_packetStreamWindowStats.getLostRate() * 100.0f)),
QString::number((int)(streamStats->_packetStreamWindowStats._lost)));
audioStreamStats->push_back(stats);
if (isDownstreamStats) {
stats = "Ringbuffer frames | desired: %1, avg_available(10s): %2 + %3, available: %4 + %5";
stats = stats.arg(QString::number(streamStats->_desiredJitterBufferFrames),
QString::number(streamStats->_framesAvailableAverage),
QString::number((int)((float)_stats->getAudioInputMsecsReadStats().getWindowAverage() /
AudioConstants::NETWORK_FRAME_MSECS)),
QString::number(streamStats->_framesAvailable),
QString::number((int)(_stats->getAudioOutputMsecsUnplayedStats().getCurrentIntervalLastSample() /
AudioConstants::NETWORK_FRAME_MSECS)));
audioStreamStats->push_back(stats);
} else {
stats = "Ringbuffer frames | desired: %1, avg_available(10s): %2, available: %3";
stats = stats.arg(QString::number(streamStats->_desiredJitterBufferFrames),
QString::number(streamStats->_framesAvailableAverage),
QString::number(streamStats->_framesAvailable));
audioStreamStats->push_back(stats);
}
stats = "Ringbuffer stats | starves: %1, prev_starve_lasted: %2, frames_dropped: %3, overflows: %4";
stats = "Ringbuffer";
audioStreamStats->push_back(stats);
stats = "available frames (avg):\t%1\t(%2), desired:\t%3";
stats = stats.arg(QString::number(streamStats->_framesAvailable),
QString::number(streamStats->_framesAvailableAverage),
QString::number(streamStats->_desiredJitterBufferFrames));
audioStreamStats->push_back(stats);
stats = "starves:\t%1, last starve duration:\t%2, drops:\t%3, overflows:\t%4";
stats = stats.arg(QString::number(streamStats->_starveCount),
QString::number(streamStats->_consecutiveNotMixedCount),
QString::number(streamStats->_framesDropped),
QString::number(streamStats->_overflowCount));
QString::number(streamStats->_consecutiveNotMixedCount),
QString::number(streamStats->_framesDropped),
QString::number(streamStats->_overflowCount));
audioStreamStats->push_back(stats);
stats = "Inter-packet timegaps";
audioStreamStats->push_back(stats);
stats = "Inter-packet timegaps (overall) | min: %1, max: %2, avg: %3";
stats = "overall min:\t%1, max:\t%2, avg:\t%3";
stats = stats.arg(formatUsecTime(streamStats->_timeGapMin),
formatUsecTime(streamStats->_timeGapMax),
formatUsecTime(streamStats->_timeGapAverage));
formatUsecTime(streamStats->_timeGapMax),
formatUsecTime(streamStats->_timeGapAverage));
audioStreamStats->push_back(stats);
stats = "Inter-packet timegaps (last 30s) | min: %1, max: %2, avg: %3";
stats = "last window min:\t%1, max:\t%2, avg:\t%3";
stats = stats.arg(formatUsecTime(streamStats->_timeGapWindowMin),
formatUsecTime(streamStats->_timeGapWindowMax),
formatUsecTime(streamStats->_timeGapWindowAverage));
formatUsecTime(streamStats->_timeGapWindowMax),
formatUsecTime(streamStats->_timeGapWindowAverage));
audioStreamStats->push_back(stats);
}
void AudioStatsDialog::clearAllChannels() {
@ -251,21 +260,6 @@ void AudioStatsDialog::clearAllChannels() {
_upstreamInjectedStats.clear();
}
void AudioStatsDialog::updateTimerTimeout() {
renderStats();
// Update all audio stats
updateStats(_audioMixerStats, _audioMixerID);
updateStats(_upstreamClientStats, _upstreamClientID);
updateStats(_upstreamMixerStats, _upstreamMixerID);
updateStats(_downstreamStats, _downstreamID);
updateStats(_upstreamInjectedStats, _upstreamInjectedID);
}
void AudioStatsDialog::paintEvent(QPaintEvent* event) {
// Repaint each stat in each channel

View file

@ -70,18 +70,18 @@ private:
QVector<QVector<AudioStatsDisplay*>> _audioDisplayChannels;
void updateStats();
int addChannel(QFormLayout* form, QVector<QString>& stats, const unsigned color);
void updateStats(QVector<QString>& stats, const int channelID);
void renderStats();
void updateChannel(QVector<QString>& stats, const int channelID);
void updateChannels();
void clearAllChannels();
void renderAudioStreamStats(const AudioStreamStats* streamStats, QVector<QString>* audioStreamstats, bool isDownstreamStats);
void renderAudioStreamStats(const AudioStreamStats* streamStats, QVector<QString>* audioStreamstats);
const AudioIOStats* _stats;
QFormLayout* _form;
bool _isEnabled;
bool _shouldShowInjectedStreams;
bool _shouldShowInjectedStreams{ false };
signals:
@ -93,7 +93,7 @@ signals:
void reject() override;
void updateTimerTimeout();
void renderStats();
protected:

View file

@ -47,6 +47,7 @@
#include "PositionalAudioStream.h"
#include "AudioClientLogging.h"
#include "AudioLogging.h"
#include "AudioClient.h"
@ -122,12 +123,11 @@ AudioClient::AudioClient() :
_outputBufferSizeFrames("audioOutputBufferSizeFrames", DEFAULT_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES),
_sessionOutputBufferSizeFrames(_outputBufferSizeFrames.get()),
_outputStarveDetectionEnabled("audioOutputBufferStarveDetectionEnabled",
DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_ENABLED),
DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_ENABLED),
_outputStarveDetectionPeriodMsec("audioOutputStarveDetectionPeriod",
DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_PERIOD),
DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_PERIOD),
_outputStarveDetectionThreshold("audioOutputStarveDetectionThreshold",
DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_THRESHOLD),
_averagedLatency(0.0f),
DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_THRESHOLD),
_lastInputLoudness(0.0f),
_timeSinceLastClip(-1.0f),
_muted(false),
@ -146,9 +146,6 @@ AudioClient::AudioClient() :
_positionGetter(DEFAULT_POSITION_GETTER),
_orientationGetter(DEFAULT_ORIENTATION_GETTER)
{
// clear the array of locally injected samples
memset(_localProceduralSamples, 0, AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL);
connect(&_receivedAudioStream, &MixedProcessedAudioStream::processSamples,
this, &AudioClient::processReceivedSamples, Qt::DirectConnection);
connect(this, &AudioClient::changeDevice, this, [=](const QAudioDeviceInfo& outputDeviceInfo) { switchOutputToAudioDevice(outputDeviceInfo); });
@ -185,7 +182,7 @@ AudioClient::~AudioClient() {
}
void AudioClient::handleMismatchAudioFormat(SharedNodePointer node, const QString& currentCodec, const QString& recievedCodec) {
qDebug() << __FUNCTION__ << "sendingNode:" << *node << "currentCodec:" << currentCodec << "recievedCodec:" << recievedCodec;
qCDebug(audioclient) << __FUNCTION__ << "sendingNode:" << *node << "currentCodec:" << currentCodec << "recievedCodec:" << recievedCodec;
selectAudioFormat(recievedCodec);
}
@ -374,7 +371,8 @@ bool adjustedFormatForAudioDevice(const QAudioDeviceInfo& audioDevice,
adjustedAudioFormat = desiredAudioFormat;
#ifdef Q_OS_ANDROID
adjustedAudioFormat.setSampleRate(44100);
// FIXME: query the native sample rate of the device?
adjustedAudioFormat.setSampleRate(48000);
#else
//
@ -443,7 +441,7 @@ void possibleResampling(AudioSRC* resampler,
if (!sampleChannelConversion(sourceSamples, destinationSamples, numSourceSamples,
sourceAudioFormat, destinationAudioFormat)) {
// no conversion, we can copy the samples directly across
memcpy(destinationSamples, sourceSamples, numSourceSamples * sizeof(int16_t));
memcpy(destinationSamples, sourceSamples, numSourceSamples * AudioConstants::SAMPLE_SIZE);
}
} else {
@ -634,7 +632,7 @@ void AudioClient::selectAudioFormat(const QString& selectedCodecName) {
_selectedCodecName = selectedCodecName;
qDebug() << "Selected Codec:" << _selectedCodecName;
qCDebug(audioclient) << "Selected Codec:" << _selectedCodecName;
// release any old codec encoder/decoder first...
if (_codec && _encoder) {
@ -650,7 +648,7 @@ void AudioClient::selectAudioFormat(const QString& selectedCodecName) {
_codec = plugin;
_receivedAudioStream.setupCodec(plugin, _selectedCodecName, AudioConstants::STEREO);
_encoder = plugin->createEncoder(AudioConstants::SAMPLE_RATE, AudioConstants::MONO);
qDebug() << "Selected Codec Plugin:" << _codec.get();
qCDebug(audioclient) << "Selected Codec Plugin:" << _codec.get();
break;
}
}
@ -683,8 +681,8 @@ bool AudioClient::switchOutputToAudioDevice(const QString& outputDeviceName) {
void AudioClient::configureReverb() {
ReverbParameters p;
p.sampleRate = _outputFormat.sampleRate();
p.sampleRate = AudioConstants::SAMPLE_RATE;
p.bandwidth = _reverbOptions->getBandwidth();
p.preDelay = _reverbOptions->getPreDelay();
p.lateDelay = _reverbOptions->getLateDelay();
@ -710,6 +708,7 @@ void AudioClient::configureReverb() {
_listenerReverb.setParameters(&p);
// used only for adding self-reverb to loopback audio
p.sampleRate = _outputFormat.sampleRate();
p.wetDryMix = 100.0f;
p.preDelay = 0.0f;
p.earlyGain = -96.0f; // disable ER
@ -816,23 +815,22 @@ void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
static QByteArray loopBackByteArray;
int numInputSamples = inputByteArray.size() / sizeof(int16_t);
int numInputSamples = inputByteArray.size() / AudioConstants::SAMPLE_SIZE;
int numLoopbackSamples = numDestinationSamplesRequired(_inputFormat, _outputFormat, numInputSamples);
loopBackByteArray.resize(numLoopbackSamples * sizeof(int16_t));
loopBackByteArray.resize(numLoopbackSamples * AudioConstants::SAMPLE_SIZE);
int16_t* inputSamples = reinterpret_cast<int16_t*>(inputByteArray.data());
int16_t* loopbackSamples = reinterpret_cast<int16_t*>(loopBackByteArray.data());
auto NO_RESAMPLER = nullptr;
possibleResampling(NO_RESAMPLER,
inputSamples, loopbackSamples,
numInputSamples, numLoopbackSamples,
_inputFormat, _outputFormat);
// upmix mono to stereo
if (!sampleChannelConversion(inputSamples, loopbackSamples, numInputSamples, _inputFormat, _outputFormat)) {
// no conversion, just copy the samples
memcpy(loopbackSamples, inputSamples, numInputSamples * AudioConstants::SAMPLE_SIZE);
}
// apply stereo reverb at the source, to the loopback audio
if (!_shouldEchoLocally && hasReverb) {
assert(_outputFormat.channelCount() == 2);
updateReverbOptions();
_sourceReverb.render(loopbackSamples, loopbackSamples, numLoopbackSamples/2);
}
@ -841,8 +839,12 @@ void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
}
void AudioClient::handleAudioInput() {
// input samples required to produce exactly NETWORK_FRAME_SAMPLES of output
const int inputSamplesRequired = _inputFormat.channelCount() * _inputToNetworkResampler->getMinInput(AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
const int inputSamplesRequired = (_inputToNetworkResampler ?
_inputToNetworkResampler->getMinInput(AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL) :
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL) * _inputFormat.channelCount();
const auto inputAudioSamples = std::unique_ptr<int16_t[]>(new int16_t[inputSamplesRequired]);
QByteArray inputByteArray = _inputDevice->readAll();
@ -851,7 +853,7 @@ void AudioClient::handleAudioInput() {
_inputRingBuffer.writeData(inputByteArray.data(), inputByteArray.size());
float audioInputMsecsRead = inputByteArray.size() / (float)(_inputFormat.bytesForDuration(USECS_PER_MSEC));
_stats.updateInputMsecsRead(audioInputMsecsRead);
_stats.updateInputMsRead(audioInputMsecsRead);
const int numNetworkBytes = _isStereoInput
? AudioConstants::NETWORK_FRAME_BYTES_STEREO
@ -929,16 +931,20 @@ void AudioClient::handleAudioInput() {
audioTransform.setRotation(_orientationGetter());
// FIXME find a way to properly handle both playback audio and user audio concurrently
QByteArray decocedBuffer(reinterpret_cast<char*>(networkAudioSamples), numNetworkBytes);
QByteArray decodedBuffer(reinterpret_cast<char*>(networkAudioSamples), numNetworkBytes);
QByteArray encodedBuffer;
if (_encoder) {
_encoder->encode(decocedBuffer, encodedBuffer);
_encoder->encode(decodedBuffer, encodedBuffer);
} else {
encodedBuffer = decocedBuffer;
encodedBuffer = decodedBuffer;
}
emitAudioPacket(encodedBuffer.constData(), encodedBuffer.size(), _outgoingAvatarAudioSequenceNumber, audioTransform, packetType, _selectedCodecName);
_stats.sentPacket();
int bytesInInputRingBuffer = _inputRingBuffer.samplesAvailable() * AudioConstants::SAMPLE_SIZE;
float msecsInInputRingBuffer = bytesInInputRingBuffer / (float)(_inputFormat.bytesForDuration(USECS_PER_MSEC));
_stats.updateInputMsUnplayed(msecsInInputRingBuffer);
}
}
@ -958,13 +964,9 @@ void AudioClient::handleRecordedAudioInput(const QByteArray& audio) {
emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), _outgoingAvatarAudioSequenceNumber, audioTransform, PacketType::MicrophoneAudioWithEcho, _selectedCodecName);
}
void AudioClient::mixLocalAudioInjectors(int16_t* inputBuffer) {
void AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
memset(_hrtfBuffer, 0, sizeof(_hrtfBuffer));
QVector<AudioInjector*> injectorsToRemove;
static const float INT16_TO_FLOAT_SCALE_FACTOR = 1/32768.0f;
bool injectorsHaveData = false;
// lock the injector vector
Lock lock(_injectorsMutex);
@ -972,19 +974,17 @@ void AudioClient::mixLocalAudioInjectors(int16_t* inputBuffer) {
for (AudioInjector* injector : getActiveLocalAudioInjectors()) {
if (injector->getLocalBuffer()) {
qint64 samplesToRead = injector->isStereo() ?
AudioConstants::NETWORK_FRAME_BYTES_STEREO :
AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
qint64 samplesToRead = injector->isStereo() ? AudioConstants::NETWORK_FRAME_BYTES_STEREO : AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
// get one frame from the injector (mono or stereo)
memset(_scratchBuffer, 0, sizeof(_scratchBuffer));
if (0 < injector->getLocalBuffer()->readData((char*)_scratchBuffer, samplesToRead)) {
injectorsHaveData = true;
if (injector->isStereo()) {
if (injector->isStereo() ) {
for(int i=0; i<AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; i++) {
_hrtfBuffer[i] += (float)(_scratchBuffer[i]) * INT16_TO_FLOAT_SCALE_FACTOR;
// stereo gets directly mixed into mixBuffer
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; i++) {
mixBuffer[i] += (float)_scratchBuffer[i] * (1/32768.0f);
}
} else {
@ -995,73 +995,66 @@ void AudioClient::mixLocalAudioInjectors(int16_t* inputBuffer) {
float gain = gainForSource(distance, injector->getVolume());
float azimuth = azimuthForSource(relativePosition);
injector->getLocalHRTF().render(_scratchBuffer, _hrtfBuffer, 1, azimuth, distance, gain, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
// mono gets spatialized into mixBuffer
injector->getLocalHRTF().render(_scratchBuffer, mixBuffer, 1, azimuth, distance, gain, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
}
} else {
qDebug() << "injector has no more data, marking finished for removal";
qCDebug(audioclient) << "injector has no more data, marking finished for removal";
injector->finishLocalInjection();
injectorsToRemove.append(injector);
}
} else {
qDebug() << "injector has no local buffer, marking as finished for removal";
qCDebug(audioclient) << "injector has no local buffer, marking as finished for removal";
injector->finishLocalInjection();
injectorsToRemove.append(injector);
}
}
if(injectorsHaveData) {
// mix network into the hrtfBuffer
for(int i=0; i<AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; i++) {
_hrtfBuffer[i] += (float)(inputBuffer[i]) * INT16_TO_FLOAT_SCALE_FACTOR;
}
// now, use limiter to write back to the inputBuffer
_audioLimiter.render(_hrtfBuffer, inputBuffer, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
}
for(AudioInjector* injector : injectorsToRemove) {
qDebug() << "removing injector";
for (AudioInjector* injector : injectorsToRemove) {
qCDebug(audioclient) << "removing injector";
getActiveLocalAudioInjectors().removeOne(injector);
}
}
void AudioClient::processReceivedSamples(const QByteArray& decodedBuffer, QByteArray& outputBuffer) {
const int numDecodecSamples = decodedBuffer.size() / sizeof(int16_t);
const int numDeviceOutputSamples = _outputFrameSize;
Q_ASSERT(_outputFrameSize == numDecodecSamples * (_outputFormat.sampleRate() * _outputFormat.channelCount())
/ (_desiredOutputFormat.sampleRate() * _desiredOutputFormat.channelCount()));
outputBuffer.resize(numDeviceOutputSamples * sizeof(int16_t));
const int16_t* decodedSamples;
int16_t* outputSamples = reinterpret_cast<int16_t*>(outputBuffer.data());
QByteArray decodedBufferCopy = decodedBuffer;
const int16_t* decodedSamples = reinterpret_cast<const int16_t*>(decodedBuffer.data());
assert(decodedBuffer.size() == AudioConstants::NETWORK_FRAME_BYTES_STEREO);
if(getActiveLocalAudioInjectors().size() > 0) {
mixLocalAudioInjectors((int16_t*)decodedBufferCopy.data());
decodedSamples = reinterpret_cast<const int16_t*>(decodedBufferCopy.data());
} else {
decodedSamples = reinterpret_cast<const int16_t*>(decodedBuffer.data());
outputBuffer.resize(_outputFrameSize * AudioConstants::SAMPLE_SIZE);
int16_t* outputSamples = reinterpret_cast<int16_t*>(outputBuffer.data());
// convert network audio to float
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; i++) {
_mixBuffer[i] = (float)decodedSamples[i] * (1/32768.0f);
}
// mix in active injectors
if (getActiveLocalAudioInjectors().size() > 0) {
mixLocalAudioInjectors(_mixBuffer);
}
// copy the packet from the RB to the output
possibleResampling(_networkToOutputResampler, decodedSamples, outputSamples,
numDecodecSamples, numDeviceOutputSamples,
_desiredOutputFormat, _outputFormat);
// apply stereo reverb at the listener, to the received audio
// apply stereo reverb
bool hasReverb = _reverb || _receivedAudioStream.hasReverb();
if (hasReverb) {
assert(_outputFormat.channelCount() == 2);
updateReverbOptions();
_listenerReverb.render(outputSamples, outputSamples, numDeviceOutputSamples/2);
_listenerReverb.render(_mixBuffer, _mixBuffer, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
}
if (_networkToOutputResampler) {
// resample to output sample rate
_audioLimiter.render(_mixBuffer, _scratchBuffer, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
_networkToOutputResampler->render(_scratchBuffer, outputSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
} else {
// no resampling needed
_audioLimiter.render(_mixBuffer, outputSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
}
}
@ -1117,10 +1110,10 @@ bool AudioClient::outputLocalInjector(bool isStereo, AudioInjector* injector) {
// Since this is invoked with invokeMethod, there _should_ be
// no reason to lock access to the vector of injectors.
if (!_activeLocalAudioInjectors.contains(injector)) {
qDebug() << "adding new injector";
qCDebug(audioclient) << "adding new injector";
_activeLocalAudioInjectors.append(injector);
} else {
qDebug() << "injector exists in active list already";
qCDebug(audioclient) << "injector exists in active list already";
}
return true;
@ -1215,7 +1208,7 @@ bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceIn
void AudioClient::outputNotify() {
int recentUnfulfilled = _audioOutputIODevice.getRecentUnfulfilledReads();
if (recentUnfulfilled > 0) {
qCInfo(audioclient, "Starve detected, %d new unfulfilled reads", recentUnfulfilled);
qCDebug(audioclient, "Starve detected, %d new unfulfilled reads", recentUnfulfilled);
if (_outputStarveDetectionEnabled.get()) {
quint64 now = usecTimestampNow() / 1000;
@ -1230,7 +1223,8 @@ void AudioClient::outputNotify() {
int newOutputBufferSizeFrames = setOutputBufferSize(oldOutputBufferSizeFrames + 1, false);
if (newOutputBufferSizeFrames > oldOutputBufferSizeFrames) {
qCInfo(audioclient, "Starve threshold surpassed (%d starves in %d ms)", _outputStarveDetectionCount, dt);
qCDebug(audioclient,
"Starve threshold surpassed (%d starves in %d ms)", _outputStarveDetectionCount, dt);
}
_outputStarveDetectionStartTimeMsec = now;
@ -1290,7 +1284,7 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
// setup our general output device for audio-mixer audio
_audioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
int osDefaultBufferSize = _audioOutput->bufferSize();
int requestedSize = _sessionOutputBufferSizeFrames *_outputFrameSize * sizeof(int16_t);
int requestedSize = _sessionOutputBufferSizeFrames *_outputFrameSize * AudioConstants::SAMPLE_SIZE;
_audioOutput->setBufferSize(requestedSize);
connect(_audioOutput, &QAudioOutput::notify, this, &AudioClient::outputNotify);
@ -1302,7 +1296,7 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
_audioOutput->start(&_audioOutputIODevice);
lock.unlock();
qCDebug(audioclient) << "Output Buffer capacity in frames: " << _audioOutput->bufferSize() / sizeof(int16_t) / (float)_outputFrameSize <<
qCDebug(audioclient) << "Output Buffer capacity in frames: " << _audioOutput->bufferSize() / AudioConstants::SAMPLE_SIZE / (float)_outputFrameSize <<
"requested bytes:" << requestedSize << "actual bytes:" << _audioOutput->bufferSize() <<
"os default:" << osDefaultBufferSize << "period size:" << _audioOutput->periodSize();
@ -1364,26 +1358,10 @@ int AudioClient::calculateNumberOfInputCallbackBytes(const QAudioFormat& format)
}
int AudioClient::calculateNumberOfFrameSamples(int numBytes) const {
int frameSamples = (int)(numBytes * CALLBACK_ACCELERATOR_RATIO + 0.5f) / sizeof(int16_t);
int frameSamples = (int)(numBytes * CALLBACK_ACCELERATOR_RATIO + 0.5f) / AudioConstants::SAMPLE_SIZE;
return frameSamples;
}
float AudioClient::getInputRingBufferMsecsAvailable() const {
int bytesInInputRingBuffer = _inputRingBuffer.samplesAvailable() * sizeof(int16_t);
float msecsInInputRingBuffer = bytesInInputRingBuffer / (float)(_inputFormat.bytesForDuration(USECS_PER_MSEC));
return msecsInInputRingBuffer;
}
float AudioClient::getAudioOutputMsecsUnplayed() const {
if (!_audioOutput) {
return 0.0f;
}
int bytesAudioOutputUnplayed = _audioOutput->bufferSize() - _audioOutput->bytesFree();
float msecsAudioOutputUnplayed = bytesAudioOutputUnplayed / (float)_outputFormat.bytesForDuration(USECS_PER_MSEC);
return msecsAudioOutputUnplayed;
}
float AudioClient::azimuthForSource(const glm::vec3& relativePosition) {
// copied from AudioMixer, more or less
glm::quat inverseOrientation = glm::inverse(_orientationGetter());
@ -1424,14 +1402,15 @@ float AudioClient::gainForSource(float distance, float volume) {
}
qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
auto samplesRequested = maxSize / sizeof(int16_t);
auto samplesRequested = maxSize / AudioConstants::SAMPLE_SIZE;
int samplesPopped;
int bytesWritten;
if ((samplesPopped = _receivedAudioStream.popSamples((int)samplesRequested, false)) > 0) {
qCDebug(audiostream, "Read %d samples from buffer (%d available)", samplesPopped, _receivedAudioStream.getSamplesAvailable());
AudioRingBuffer::ConstIterator lastPopOutput = _receivedAudioStream.getLastPopOutput();
lastPopOutput.readSamples((int16_t*)data, samplesPopped);
bytesWritten = samplesPopped * sizeof(int16_t);
bytesWritten = samplesPopped * AudioConstants::SAMPLE_SIZE;
} else {
// nothing on network, don't grab anything from injectors, and just return 0s
// this will flood the log: qCDebug(audioclient, "empty/partial network buffer");
@ -1439,8 +1418,11 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
bytesWritten = maxSize;
}
bool wasBufferStarved = _audio->_audioOutput->bufferSize() == _audio->_audioOutput->bytesFree();
if (wasBufferStarved) {
int bytesAudioOutputUnplayed = _audio->_audioOutput->bufferSize() - _audio->_audioOutput->bytesFree();
float msecsAudioOutputUnplayed = bytesAudioOutputUnplayed / (float)_audio->_outputFormat.bytesForDuration(USECS_PER_MSEC);
_audio->_stats.updateOutputMsUnplayed(msecsAudioOutputUnplayed);
if (bytesAudioOutputUnplayed == 0) {
_unfulfilledReads++;
}
@ -1471,10 +1453,10 @@ void AudioClient::loadSettings() {
_receivedAudioStream.setWindowSecondsForDesiredReduction(windowSecondsForDesiredReduction.get());
_receivedAudioStream.setRepetitionWithFade(repetitionWithFade.get());
qDebug() << "---- Initializing Audio Client ----";
qCDebug(audioclient) << "---- Initializing Audio Client ----";
auto codecPlugins = PluginManager::getInstance()->getCodecPlugins();
for (auto& plugin : codecPlugins) {
qDebug() << "Codec available:" << plugin->getName();
qCDebug(audioclient) << "Codec available:" << plugin->getName();
}
}

View file

@ -121,9 +121,6 @@ public:
const AudioIOStats& getStats() const { return _stats; }
float getInputRingBufferMsecsAvailable() const;
float getAudioOutputMsecsUnplayed() const;
int getOutputBufferSize() { return _outputBufferSizeFrames.get(); }
bool getOutputStarveDetectionEnabled() { return _outputStarveDetectionEnabled.get(); }
@ -227,7 +224,7 @@ protected:
private:
void outputFormatChanged();
void mixLocalAudioInjectors(int16_t* inputBuffer);
void mixLocalAudioInjectors(float* mixBuffer);
float azimuthForSource(const glm::vec3& relativePosition);
float gainForSource(float distance, float volume);
@ -253,18 +250,15 @@ private:
Gate _gate;
Mutex _injectorsMutex;
QByteArray firstInputFrame;
QAudioInput* _audioInput;
QAudioFormat _desiredInputFormat;
QAudioFormat _inputFormat;
QIODevice* _inputDevice;
int _numInputCallbackBytes;
int16_t _localProceduralSamples[AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL];
QAudioOutput* _audioOutput;
QAudioFormat _desiredOutputFormat;
QAudioFormat _outputFormat;
int _outputFrameSize;
int16_t _outputProcessingBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
int _numOutputCallbackBytes;
QAudioOutput* _loopbackAudioOutput;
QIODevice* _loopbackOutputDevice;
@ -287,7 +281,6 @@ private:
StDev _stdev;
QElapsedTimer _timeSinceLastReceived;
float _averagedLatency;
float _lastInputLoudness;
float _timeSinceLastClip;
int _totalInputAudioSamples;
@ -309,7 +302,7 @@ private:
AudioSRC* _networkToOutputResampler;
// for local hrtf-ing
float _hrtfBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
float _mixBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
int16_t _scratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
AudioLimiter _audioLimiter;

View file

@ -18,54 +18,73 @@
#include "AudioIOStats.h"
const int FRAMES_AVAILABLE_STATS_WINDOW_SECONDS = 10;
// This is called 5x/sec (see AudioStatsDialog), and we want it to log the last 5s
static const int INPUT_READS_WINDOW = 25;
static const int INPUT_UNPLAYED_WINDOW = 25;
static const int OUTPUT_UNPLAYED_WINDOW = 25;
const int APPROXIMATELY_30_SECONDS_OF_AUDIO_PACKETS = (int)(30.0f * 1000.0f / AudioConstants::NETWORK_FRAME_MSECS);
static const int APPROXIMATELY_30_SECONDS_OF_AUDIO_PACKETS = (int)(30.0f * 1000.0f / AudioConstants::NETWORK_FRAME_MSECS);
AudioIOStats::AudioIOStats(MixedProcessedAudioStream* receivedAudioStream) :
_receivedAudioStream(receivedAudioStream),
_audioInputMsecsReadStats(MSECS_PER_SECOND / (float)AudioConstants::NETWORK_FRAME_MSECS * AudioClient::CALLBACK_ACCELERATOR_RATIO, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
_inputRingBufferMsecsAvailableStats(1, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
_audioOutputMsecsUnplayedStats(1, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
_lastSentAudioPacket(0),
_packetSentTimeGaps(1, APPROXIMATELY_30_SECONDS_OF_AUDIO_PACKETS)
_inputMsRead(0, INPUT_READS_WINDOW),
_inputMsUnplayed(0, INPUT_UNPLAYED_WINDOW),
_outputMsUnplayed(0, OUTPUT_UNPLAYED_WINDOW),
_lastSentPacketTime(0),
_packetTimegaps(0, APPROXIMATELY_30_SECONDS_OF_AUDIO_PACKETS)
{
}
AudioStreamStats AudioIOStats::getMixerDownstreamStats() const {
return _receivedAudioStream->getAudioStreamStats();
}
void AudioIOStats::reset() {
_receivedAudioStream->resetStats();
_inputMsRead.reset();
_inputMsUnplayed.reset();
_outputMsUnplayed.reset();
_packetTimegaps.reset();
_mixerAvatarStreamStats = AudioStreamStats();
_mixerInjectedStreamStatsMap.clear();
_audioInputMsecsReadStats.reset();
_inputRingBufferMsecsAvailableStats.reset();
_audioOutputMsecsUnplayedStats.reset();
_packetSentTimeGaps.reset();
}
void AudioIOStats::sentPacket() {
// first time this is 0
if (_lastSentAudioPacket == 0) {
_lastSentAudioPacket = usecTimestampNow();
if (_lastSentPacketTime == 0) {
_lastSentPacketTime = usecTimestampNow();
} else {
quint64 now = usecTimestampNow();
quint64 gap = now - _lastSentAudioPacket;
_packetSentTimeGaps.update(gap);
_lastSentAudioPacket = now;
quint64 gap = now - _lastSentPacketTime;
_lastSentPacketTime = now;
_packetTimegaps.update(gap);
}
}
void AudioIOStats::processStreamStatsPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
const MovingMinMaxAvg<float>& AudioIOStats::getInputMsRead() const {
_inputMsRead.currentIntervalComplete();
return _inputMsRead;
}
const MovingMinMaxAvg<float>& AudioIOStats::getInputMsUnplayed() const {
_inputMsUnplayed.currentIntervalComplete();
return _inputMsUnplayed;
}
const MovingMinMaxAvg<float>& AudioIOStats::getOutputMsUnplayed() const {
_outputMsUnplayed.currentIntervalComplete();
return _outputMsUnplayed;
}
const MovingMinMaxAvg<quint64>& AudioIOStats::getPacketTimegaps() const {
_packetTimegaps.currentIntervalComplete();
return _packetTimegaps;
}
const AudioStreamStats AudioIOStats::getMixerDownstreamStats() const {
return _receivedAudioStream->getAudioStreamStats();
}
void AudioIOStats::processStreamStatsPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
// parse the appendFlag, clear injected audio stream stats if 0
quint8 appendFlag;
message->readPrimitive(&appendFlag);
@ -92,14 +111,9 @@ void AudioIOStats::processStreamStatsPacket(QSharedPointer<ReceivedMessage> mess
}
void AudioIOStats::sendDownstreamAudioStatsPacket() {
auto audioIO = DependencyManager::get<AudioClient>();
// since this function is called every second, we'll sample for some of our stats here
_inputRingBufferMsecsAvailableStats.update(audioIO->getInputRingBufferMsecsAvailable());
_audioOutputMsecsUnplayedStats.update(audioIO->getAudioOutputMsecsUnplayed());
// also, call _receivedAudioStream's per-second callback
// call _receivedAudioStream's per-second callback
_receivedAudioStream->perSecondCallbackForUpdatingStats();
auto nodeList = DependencyManager::get<NodeList>();

View file

@ -29,19 +29,20 @@ public:
void reset();
void updateInputMsecsRead(float msecsRead) { _audioInputMsecsReadStats.update(msecsRead); }
void updateInputMsRead(float ms) { _inputMsRead.update(ms); }
void updateInputMsUnplayed(float ms) { _inputMsUnplayed.update(ms); }
void updateOutputMsUnplayed(float ms) { _outputMsUnplayed.update(ms); }
void sentPacket();
AudioStreamStats getMixerDownstreamStats() const;
const MovingMinMaxAvg<float>& getInputMsRead() const;
const MovingMinMaxAvg<float>& getInputMsUnplayed() const;
const MovingMinMaxAvg<float>& getOutputMsUnplayed() const;
const MovingMinMaxAvg<quint64>& getPacketTimegaps() const;
const AudioStreamStats getMixerDownstreamStats() const;
const AudioStreamStats& getMixerAvatarStreamStats() const { return _mixerAvatarStreamStats; }
const QHash<QUuid, AudioStreamStats>& getMixerInjectedStreamStatsMap() const { return _mixerInjectedStreamStatsMap; }
const MovingMinMaxAvg<float>& getAudioInputMsecsReadStats() const { return _audioInputMsecsReadStats; }
const MovingMinMaxAvg<float>& getInputRungBufferMsecsAvailableStats() const { return _inputRingBufferMsecsAvailableStats; }
const MovingMinMaxAvg<float>& getAudioOutputMsecsUnplayedStats() const { return _audioOutputMsecsUnplayedStats; }
const MovingMinMaxAvg<quint64>& getPacketSentTimeGaps() const { return _packetSentTimeGaps; }
void sendDownstreamAudioStatsPacket();
public slots:
@ -49,17 +50,16 @@ public slots:
private:
MixedProcessedAudioStream* _receivedAudioStream;
MovingMinMaxAvg<float> _audioInputMsecsReadStats;
MovingMinMaxAvg<float> _inputRingBufferMsecsAvailableStats;
MovingMinMaxAvg<float> _audioOutputMsecsUnplayedStats;
mutable MovingMinMaxAvg<float> _inputMsRead;
mutable MovingMinMaxAvg<float> _inputMsUnplayed;
mutable MovingMinMaxAvg<float> _outputMsUnplayed;
quint64 _lastSentPacketTime;
mutable MovingMinMaxAvg<quint64> _packetTimegaps;
AudioStreamStats _mixerAvatarStreamStats;
QHash<QUuid, AudioStreamStats> _mixerInjectedStreamStatsMap;
quint64 _lastSentAudioPacket;
MovingMinMaxAvg<quint64> _packetSentTimeGaps;
};
#endif // hifi_AudioIOStats_h

View file

@ -23,15 +23,16 @@ namespace AudioConstants {
typedef int16_t AudioSample;
const int SAMPLE_SIZE = sizeof(AudioSample);
inline const char* getAudioFrameName() { return "com.highfidelity.recording.Audio"; }
const int MAX_CODEC_NAME_LENGTH = 30;
const int MAX_CODEC_NAME_LENGTH_ON_WIRE = MAX_CODEC_NAME_LENGTH + sizeof(uint32_t);
const int NETWORK_FRAME_BYTES_STEREO = 960;
const int NETWORK_FRAME_SAMPLES_STEREO = NETWORK_FRAME_BYTES_STEREO / sizeof(AudioSample);
const int NETWORK_FRAME_SAMPLES_STEREO = NETWORK_FRAME_BYTES_STEREO / SAMPLE_SIZE;
const int NETWORK_FRAME_BYTES_PER_CHANNEL = NETWORK_FRAME_BYTES_STEREO / 2;
const int NETWORK_FRAME_SAMPLES_PER_CHANNEL = NETWORK_FRAME_BYTES_PER_CHANNEL / sizeof(AudioSample);
const int NETWORK_FRAME_SAMPLES_PER_CHANNEL = NETWORK_FRAME_BYTES_PER_CHANNEL / SAMPLE_SIZE;
const float NETWORK_FRAME_SECS = (AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL / float(AudioConstants::SAMPLE_RATE));
const float NETWORK_FRAME_MSECS = NETWORK_FRAME_SECS * 1000.0f;
const float NETWORK_FRAMES_PER_SEC = 1.0f / NETWORK_FRAME_SECS;

View file

@ -26,7 +26,7 @@
#include "SoundCache.h"
#include "AudioSRC.h"
//int audioInjectorPtrMetaTypeId = qRegisterMetaType<AudioInjector*>();
int audioInjectorPtrMetaTypeId = qRegisterMetaType<AudioInjector*>();
AudioInjectorState operator& (AudioInjectorState lhs, AudioInjectorState rhs) {
return static_cast<AudioInjectorState>(static_cast<uint8_t>(lhs) & static_cast<uint8_t>(rhs));

View file

@ -14,7 +14,7 @@
Q_LOGGING_CATEGORY(audio, "hifi.audio")
#if DEV_BUILD || PR_BUILD
Q_LOGGING_CATEGORY(audiostream, "hifi.audio-stream", QtDebugMsg)
#else
Q_LOGGING_CATEGORY(audiostream, "hifi.audio-stream", QtInfoMsg)
#else
Q_LOGGING_CATEGORY(audiostream, "hifi.audio-stream", QtWarningMsg)
#endif

View file

@ -48,6 +48,7 @@ public:
quint32 _framesAvailable;
quint16 _framesAvailableAverage;
quint16 _unplayedMs;
quint16 _desiredJitterBufferFrames;
quint32 _starveCount;
quint32 _consecutiveNotMixedCount;

View file

@ -18,7 +18,10 @@
#include "InboundAudioStream.h"
#include "AudioLogging.h"
const int STARVE_HISTORY_CAPACITY = 50;
static const int STARVE_HISTORY_CAPACITY = 50;
// This is called 1x/s, and we want it to log the last 5s
static const int UNPLAYED_MS_WINDOW_SECS = 5;
InboundAudioStream::InboundAudioStream(int numFrameSamples, int numFramesCapacity, const Settings& settings) :
_ringBuffer(numFrameSamples, numFramesCapacity),
@ -46,6 +49,7 @@ InboundAudioStream::InboundAudioStream(int numFrameSamples, int numFramesCapacit
_starveHistory(STARVE_HISTORY_CAPACITY),
_starveThreshold(settings._windowStarveThreshold),
_framesAvailableStat(),
_unplayedMs(0, UNPLAYED_MS_WINDOW_SECS),
_currentJitterBufferFrames(0),
_timeGapStatsForStatsPacket(0, STATS_FOR_STATS_PACKET_WINDOW_SECONDS),
_repetitionWithFade(settings._repetitionWithFade),
@ -82,6 +86,7 @@ void InboundAudioStream::resetStats() {
_framesAvailableStat.reset();
_currentJitterBufferFrames = 0;
_timeGapStatsForStatsPacket.reset();
_unplayedMs.reset();
}
void InboundAudioStream::clearBuffer() {
@ -101,6 +106,7 @@ void InboundAudioStream::perSecondCallbackForUpdatingStats() {
_timeGapStatsForDesiredCalcOnTooManyStarves.currentIntervalComplete();
_timeGapStatsForDesiredReduction.currentIntervalComplete();
_timeGapStatsForStatsPacket.currentIntervalComplete();
_unplayedMs.currentIntervalComplete();
}
int InboundAudioStream::parseData(ReceivedMessage& message) {
@ -163,6 +169,7 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
int framesAvailable = _ringBuffer.framesAvailable();
// if this stream was starved, check if we're still starved.
if (_isStarved && framesAvailable >= _desiredJitterBufferFrames) {
qCInfo(audiostream, "Starve ended");
_isStarved = false;
}
// if the ringbuffer exceeds the desired size by more than the threshold specified,
@ -176,8 +183,8 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
_oldFramesDropped += framesToDrop;
qCDebug(audiostream, "Dropped %d frames", framesToDrop);
qCDebug(audiostream, "Resetted current jitter frames");
qCInfo(audiostream, "Dropped %d frames", framesToDrop);
qCInfo(audiostream, "Reset current jitter frames");
}
framesAvailableChanged();
@ -232,8 +239,8 @@ int InboundAudioStream::writeDroppableSilentSamples(int silentSamples) {
_currentJitterBufferFrames -= numSilentFramesToDrop;
_silentFramesDropped += numSilentFramesToDrop;
qCDebug(audiostream, "Dropped %d silent frames", numSilentFramesToDrop);
qCDebug(audiostream, "Set current jitter frames to %d", _currentJitterBufferFrames);
qCInfo(audiostream, "Dropped %d silent frames", numSilentFramesToDrop);
qCInfo(audiostream, "Set current jitter frames to %d (dropped)", _currentJitterBufferFrames);
_framesAvailableStat.reset();
}
@ -302,6 +309,9 @@ int InboundAudioStream::popFrames(int maxFrames, bool allOrNothing, bool starveI
}
void InboundAudioStream::popSamplesNoCheck(int samples) {
float unplayedMs = (_ringBuffer.samplesAvailable() / (float)_ringBuffer.getNumFrameSamples()) * AudioConstants::NETWORK_FRAME_MSECS;
_unplayedMs.update(unplayedMs);
_lastPopOutput = _ringBuffer.nextOutput();
_ringBuffer.shiftReadPosition(samples);
framesAvailableChanged();
@ -315,13 +325,17 @@ void InboundAudioStream::framesAvailableChanged() {
if (_framesAvailableStat.getElapsedUsecs() >= FRAMES_AVAILABLE_STAT_WINDOW_USECS) {
_currentJitterBufferFrames = (int)ceil(_framesAvailableStat.getAverage());
qCDebug(audiostream, "Set current jitter frames to %d", _currentJitterBufferFrames);
qCInfo(audiostream, "Set current jitter frames to %d (changed)", _currentJitterBufferFrames);
_framesAvailableStat.reset();
}
}
void InboundAudioStream::setToStarved() {
if (!_isStarved) {
qCInfo(audiostream, "Starved");
}
_consecutiveNotMixedCount = 0;
_starveCount++;
// if we have more than the desired frames when setToStarved() is called, then we'll immediately
@ -364,7 +378,7 @@ void InboundAudioStream::setToStarved() {
// make sure _desiredJitterBufferFrames does not become lower here
if (calculatedJitterBufferFrames >= _desiredJitterBufferFrames) {
_desiredJitterBufferFrames = calculatedJitterBufferFrames;
qCDebug(audiostream, "Set desired jitter frames to %d", _desiredJitterBufferFrames);
qCInfo(audiostream, "Set desired jitter frames to %d (starved)", _desiredJitterBufferFrames);
}
}
}
@ -420,7 +434,7 @@ void InboundAudioStream::packetReceivedUpdateTimingStats() {
// update our timegap stats and desired jitter buffer frames if necessary
// discard the first few packets we receive since they usually have gaps that aren't represensative of normal jitter
const quint32 NUM_INITIAL_PACKETS_DISCARD = 3;
const quint32 NUM_INITIAL_PACKETS_DISCARD = 1000; // 10s
quint64 now = usecTimestampNow();
if (_incomingSequenceNumberStats.getReceived() > NUM_INITIAL_PACKETS_DISCARD) {
quint64 gap = now - _lastPacketReceivedTime;
@ -454,7 +468,7 @@ void InboundAudioStream::packetReceivedUpdateTimingStats() {
/ (float)AudioConstants::NETWORK_FRAME_USECS);
if (calculatedJitterBufferFrames < _desiredJitterBufferFrames) {
_desiredJitterBufferFrames = calculatedJitterBufferFrames;
qCDebug(audiostream, "Set desired jitter frames to %d", _desiredJitterBufferFrames);
qCInfo(audiostream, "Set desired jitter frames to %d (reduced)", _desiredJitterBufferFrames);
}
_timeGapStatsForDesiredReduction.clearNewStatsAvailableFlag();
}
@ -502,6 +516,7 @@ AudioStreamStats InboundAudioStream::getAudioStreamStats() const {
streamStats._framesAvailable = _ringBuffer.framesAvailable();
streamStats._framesAvailableAverage = _framesAvailableStat.getAverage();
streamStats._unplayedMs = (quint16)_unplayedMs.getWindowMax();
streamStats._desiredJitterBufferFrames = _desiredJitterBufferFrames;
streamStats._starveCount = _starveCount;
streamStats._consecutiveNotMixedCount = _consecutiveNotMixedCount;

View file

@ -161,6 +161,7 @@ public:
int getFrameCapacity() const { return _ringBuffer.getFrameCapacity(); }
int getFramesAvailable() const { return _ringBuffer.framesAvailable(); }
double getFramesAvailableAverage() const { return _framesAvailableStat.getAverage(); }
int getSamplesAvailable() const { return _ringBuffer.samplesAvailable(); }
bool isStarved() const { return _isStarved; }
bool hasStarted() const { return _hasStarted; }
@ -264,6 +265,7 @@ protected:
int _starveThreshold;
TimeWeightedAvg<int> _framesAvailableStat;
MovingMinMaxAvg<float> _unplayedMs;
// this value is periodically updated with the time-weighted avg from _framesAvailableStat. it is only used for
// dropping silent frames right now.

View file

@ -10,6 +10,7 @@
//
#include "MixedProcessedAudioStream.h"
#include "AudioLogging.h"
static const int STEREO_FACTOR = 2;
@ -56,6 +57,7 @@ int MixedProcessedAudioStream::parseAudioData(PacketType type, const QByteArray&
emit processSamples(decodedBuffer, outputBuffer);
_ringBuffer.writeData(outputBuffer.data(), outputBuffer.size());
qCDebug(audiostream, "Wrote %d samples to buffer (%d available)", outputBuffer.size() / (int)sizeof(int16_t), getSamplesAvailable());
return packetAfterStreamProperties.size();
}

View file

@ -665,7 +665,7 @@ Mapping::Pointer UserInputMapper::newMapping(const QString& mappingName) {
if (_mappingsByName.count(mappingName)) {
qCWarning(controllers) << "Refusing to recreate mapping named " << mappingName;
}
qDebug() << "Creating new Mapping " << mappingName;
qCDebug(controllers) << "Creating new Mapping " << mappingName;
auto mapping = std::make_shared<Mapping>(mappingName);
_mappingsByName[mappingName] = mapping;
return mapping;
@ -1121,15 +1121,15 @@ Mapping::Pointer UserInputMapper::parseMapping(const QString& json) {
QJsonDocument doc = QJsonDocument::fromJson(json.toUtf8(), &error);
// check validity of the document
if (doc.isNull()) {
qDebug() << "Invalid JSON...\n";
qDebug() << error.errorString();
qDebug() << "JSON was:\n" << json << endl;
qCDebug(controllers) << "Invalid JSON...\n";
qCDebug(controllers) << error.errorString();
qCDebug(controllers) << "JSON was:\n" << json << endl;
return Mapping::Pointer();
}
if (!doc.isObject()) {
qWarning() << "Mapping json Document is not an object" << endl;
qDebug() << "JSON was:\n" << json << endl;
qCDebug(controllers) << "JSON was:\n" << json << endl;
return Mapping::Pointer();
}
return parseMapping(doc.object());

View file

@ -277,27 +277,37 @@ void HmdDisplayPlugin::updateFrameData() {
continue;
}
const auto& laserDirection = handLaser.direction;
auto model = _presentHandPoses[i];
auto castDirection = glm::quat_cast(model) * laserDirection;
const vec3& laserDirection = handLaser.direction;
mat4 model = _presentHandPoses[i];
vec3 castStart = vec3(model[3]);
vec3 castDirection = glm::quat_cast(model) * laserDirection;
if (glm::abs(glm::length2(castDirection) - 1.0f) > EPSILON) {
castDirection = glm::normalize(castDirection);
castDirection = glm::inverse(_presentUiModelTransform.getRotation()) * castDirection;
}
// this offset needs to match GRAB_POINT_SPHERE_OFFSET in scripts/system/libraries/controllers.js
static const vec3 GRAB_POINT_SPHERE_OFFSET = vec3(0.1f, 0.04f, -0.32f);
vec3 grabPointOffset = GRAB_POINT_SPHERE_OFFSET;
if (i == 0) {
grabPointOffset.x *= -1.0f; // this changes between left and right hands
}
castStart += glm::quat_cast(model) * grabPointOffset;
// FIXME fetch the actual UI radius from... somewhere?
float uiRadius = 1.0f;
// Find the intersection of the laser with he UI and use it to scale the model matrix
float distance;
if (!glm::intersectRaySphere(vec3(_presentHandPoses[i][3]), castDirection, _presentUiModelTransform.getTranslation(), uiRadius * uiRadius, distance)) {
if (!glm::intersectRaySphere(castStart, castDirection,
_presentUiModelTransform.getTranslation(), uiRadius * uiRadius, distance)) {
continue;
}
_presentHandLaserPoints[i].first = vec3(_presentHandPoses[i][3]);
_presentHandLaserPoints[i].first = castStart;
_presentHandLaserPoints[i].second = _presentHandLaserPoints[i].first + (castDirection * distance);
vec3 intersectionPosition = vec3(_presentHandPoses[i][3]) + (castDirection * distance) - _presentUiModelTransform.getTranslation();
vec3 intersectionPosition = castStart + (castDirection * distance) - _presentUiModelTransform.getTranslation();
intersectionPosition = glm::inverse(_presentUiModelTransform.getRotation()) * intersectionPosition;
// Take the interesection normal and convert it to a texture coordinate

View file

@ -22,6 +22,7 @@
#include <QtCore/QFileInfo>
#include <shared/NsightHelpers.h>
#include "ModelFormatLogging.h"
template<class T> int streamSize() {
return sizeof(T);
@ -356,7 +357,7 @@ FBXNode FBXReader::parseFBX(QIODevice* device) {
quint32 fileVersion;
in >> fileVersion;
position += sizeof(fileVersion);
qDebug() << "fileVersion:" << fileVersion;
qCDebug(modelformat) << "fileVersion:" << fileVersion;
bool has64BitPositions = (fileVersion >= VERSION_FBX2016);
// parse the top-level node

View file

@ -20,6 +20,8 @@
#include <QtGui/QGuiApplication>
#include <GLMHelpers.h>
#include "GLLogging.h"
#ifdef Q_OS_WIN
@ -111,7 +113,7 @@ void GLAPIENTRY debugMessageCallback(GLenum source, GLenum type, GLuint id, GLen
if (GL_DEBUG_SEVERITY_NOTIFICATION == severity) {
return;
}
qDebug() << "QQQ " << message;
qCDebug(glLogging) << "QQQ " << message;
}
// FIXME build the PFD based on the

View file

@ -65,7 +65,7 @@ class Context {
QWindow* _window { nullptr };
public:
virtual ~OffscreenContext();
virtual void create();
void create() override;
};
}

View file

@ -0,0 +1,14 @@
//
// GLLogging.cpp
// libraries/gl/src/gl/
//
// Created by Seth Alves on 2016-9-14.
// Copyright 2016 High Fidelity, Inc.
//
// Distribucted under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "GLLogging.h"
Q_LOGGING_CATEGORY(glLogging, "hifi.glLogging")

View file

@ -0,0 +1,19 @@
//
// GLLogging.h
// libraries/gl/src/gl/
//
// Created by Seth Alves on 2016-9-14.
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_GLLogging_h
#define hifi_GLLogging_h
#include <QLoggingCategory>
Q_DECLARE_LOGGING_CATEGORY(glLogging)
#endif // hifi_GLLogging_h

View file

@ -12,6 +12,7 @@
#include <QtGui/QOpenGLContext>
#include "GLHelpers.h"
#include "GLLogging.h"
void GLWindow::createContext(QOpenGLContext* shareContext) {
createContext(getDefaultOpenGLSurfaceFormat(), shareContext);
@ -41,10 +42,10 @@ bool GLWindow::makeCurrent() {
Q_ASSERT(makeCurrentResult);
std::call_once(_reportOnce, []{
qDebug() << "GL Version: " << QString((const char*) glGetString(GL_VERSION));
qDebug() << "GL Shader Language Version: " << QString((const char*) glGetString(GL_SHADING_LANGUAGE_VERSION));
qDebug() << "GL Vendor: " << QString((const char*) glGetString(GL_VENDOR));
qDebug() << "GL Renderer: " << QString((const char*) glGetString(GL_RENDERER));
qCDebug(glLogging) << "GL Version: " << QString((const char*) glGetString(GL_VERSION));
qCDebug(glLogging) << "GL Shader Language Version: " << QString((const char*) glGetString(GL_SHADING_LANGUAGE_VERSION));
qCDebug(glLogging) << "GL Vendor: " << QString((const char*) glGetString(GL_VENDOR));
qCDebug(glLogging) << "GL Renderer: " << QString((const char*) glGetString(GL_RENDERER));
});
Q_ASSERT(_context == QOpenGLContext::currentContext());

View file

@ -18,6 +18,8 @@
#include <QtGui/QOpenGLContext>
#include "GLHelpers.h"
#include "GLLogging.h"
OffscreenGLCanvas::OffscreenGLCanvas() : _context(new QOpenGLContext), _offscreenSurface(new QOffscreenSurface){
}
@ -56,10 +58,10 @@ bool OffscreenGLCanvas::makeCurrent() {
Q_ASSERT(result);
std::call_once(_reportOnce, [this]{
qDebug() << "GL Version: " << QString((const char*) glGetString(GL_VERSION));
qDebug() << "GL Shader Language Version: " << QString((const char*) glGetString(GL_SHADING_LANGUAGE_VERSION));
qDebug() << "GL Vendor: " << QString((const char*) glGetString(GL_VENDOR));
qDebug() << "GL Renderer: " << QString((const char*) glGetString(GL_RENDERER));
qCDebug(glLogging) << "GL Version: " << QString((const char*) glGetString(GL_VERSION));
qCDebug(glLogging) << "GL Shader Language Version: " << QString((const char*) glGetString(GL_SHADING_LANGUAGE_VERSION));
qCDebug(glLogging) << "GL Vendor: " << QString((const char*) glGetString(GL_VENDOR));
qCDebug(glLogging) << "GL Renderer: " << QString((const char*) glGetString(GL_RENDERER));
});
return result;

View file

@ -32,6 +32,7 @@
#include "OffscreenGLCanvas.h"
#include "GLEscrow.h"
#include "GLHelpers.h"
#include "GLLogging.h"
QString fixupHifiUrl(const QString& urlString) {
@ -196,7 +197,7 @@ QEvent* OffscreenQmlRenderThread::Queue::take() {
OffscreenQmlRenderThread::OffscreenQmlRenderThread(OffscreenQmlSurface* surface, QOpenGLContext* shareContext) : _surface(surface) {
_canvas.setObjectName("OffscreenQmlRenderCanvas");
qDebug() << "Building QML Renderer";
qCDebug(glLogging) << "Building QML Renderer";
if (!_canvas.create(shareContext)) {
qWarning("Failed to create OffscreenGLCanvas");
_quit = true;
@ -223,7 +224,7 @@ OffscreenQmlRenderThread::OffscreenQmlRenderThread(OffscreenQmlSurface* surface,
}
void OffscreenQmlRenderThread::run() {
qDebug() << "Starting QML Renderer thread";
qCDebug(glLogging) << "Starting QML Renderer thread";
while (!_quit) {
QEvent* e = _queue.take();
@ -282,7 +283,7 @@ QJsonObject OffscreenQmlRenderThread::getGLContextData() {
}
void OffscreenQmlRenderThread::init() {
qDebug() << "Initializing QML Renderer";
qCDebug(glLogging) << "Initializing QML Renderer";
if (!_canvas.makeCurrent()) {
qWarning("Failed to make context current on QML Renderer Thread");
@ -341,7 +342,7 @@ void OffscreenQmlRenderThread::resize() {
return;
}
qDebug() << "Offscreen UI resizing to " << _newSize.width() << "x" << _newSize.height() << " with pixel ratio " << pixelRatio;
qCDebug(glLogging) << "Offscreen UI resizing to " << _newSize.width() << "x" << _newSize.height() << " with pixel ratio " << pixelRatio;
_size = newOffscreenSize;
}
@ -427,7 +428,7 @@ OffscreenQmlSurface::~OffscreenQmlSurface() {
QObject::disconnect(&_updateTimer);
QObject::disconnect(qApp);
qDebug() << "Stopping QML Renderer Thread " << _renderer->currentThreadId();
qCDebug(glLogging) << "Stopping QML Renderer Thread " << _renderer->currentThreadId();
_renderer->_queue.add(STOP);
if (!_renderer->wait(MAX_SHUTDOWN_WAIT_SECS * USECS_PER_SECOND)) {
qWarning() << "Failed to shut down the QML Renderer Thread";
@ -444,7 +445,7 @@ void OffscreenQmlSurface::onAboutToQuit() {
}
void OffscreenQmlSurface::create(QOpenGLContext* shareContext) {
qDebug() << "Building QML surface";
qCDebug(glLogging) << "Building QML surface";
_renderer = new OffscreenQmlRenderThread(this, shareContext);
_renderer->moveToThread(_renderer);

View file

@ -10,6 +10,7 @@
#include <set>
#include <oglplus/shapes/plane.hpp>
#include <oglplus/shapes/sky_box.hpp>
#include "GLLogging.h"
using namespace oglplus;
using namespace oglplus::shapes;
@ -190,7 +191,7 @@ public:
const int stacks_) {
//UV mapping source: http://www.mvps.org/directx/articles/spheremap.htm
if (fov >= PI) {
qDebug() << "TexturedHemisphere::buildVBO(): FOV greater or equal than Pi will create issues";
qCDebug(glLogging) << "TexturedHemisphere::buildVBO(): FOV greater or equal than Pi will create issues";
}
int gridSize = std::max(slices_, stacks_);

View file

@ -44,10 +44,10 @@ BackendPointer GLBackend::createBackend() {
auto version = QOpenGLContextWrapper::currentContextVersion();
std::shared_ptr<GLBackend> result;
if (!disableOpenGL45 && version >= 0x0405) {
qDebug() << "Using OpenGL 4.5 backend";
qCDebug(gpugllogging) << "Using OpenGL 4.5 backend";
result = std::make_shared<gpu::gl45::GL45Backend>();
} else {
qDebug() << "Using OpenGL 4.1 backend";
qCDebug(gpugllogging) << "Using OpenGL 4.1 backend";
result = std::make_shared<gpu::gl41::GL41Backend>();
}
result->initInput();

View file

@ -765,7 +765,7 @@ void AddressManager::handleShareableNameAPIResponse(QNetworkReply& requestReply)
}
if (shareableNameChanged) {
qDebug() << "AddressManager shareable name changed to" << _shareablePlaceName;
qCDebug(networking) << "AddressManager shareable name changed to" << _shareablePlaceName;
}
}
}

View file

@ -60,7 +60,7 @@ void AssetClient::init() {
cache->setMaximumCacheSize(MAXIMUM_CACHE_SIZE);
cache->setCacheDirectory(cachePath);
networkAccessManager.setCache(cache);
qDebug() << "ResourceManager disk cache setup at" << cachePath
qInfo() << "ResourceManager disk cache setup at" << cachePath
<< "(size:" << MAXIMUM_CACHE_SIZE / BYTES_PER_GIGABYTES << "GB)";
}
}
@ -91,7 +91,7 @@ void AssetClient::clearCache() {
}
if (auto cache = NetworkAccessManager::getInstance().cache()) {
qDebug() << "AssetClient::clearCache(): Clearing disk cache.";
qInfo() << "AssetClient::clearCache(): Clearing disk cache.";
cache->clear();
} else {
qCWarning(asset_client) << "No disk cache to clear.";

View file

@ -351,7 +351,7 @@ qint64 LimitedNodeList::sendPacket(std::unique_ptr<NLPacket> packet, const Node&
return sendPacket(std::move(packet), *activeSocket, destinationNode.getConnectionSecret());
} else {
qDebug() << "LimitedNodeList::sendPacket called without active socket for node" << destinationNode << "- not sending";
qCDebug(networking) << "LimitedNodeList::sendPacket called without active socket for node" << destinationNode << "- not sending";
return 0;
}
}
@ -389,7 +389,7 @@ qint64 LimitedNodeList::sendPacketList(NLPacketList& packetList, const Node& des
emit dataSent(destinationNode.getType(), bytesSent);
return bytesSent;
} else {
qDebug() << "LimitedNodeList::sendPacketList called without active socket for node" << destinationNode
qCDebug(networking) << "LimitedNodeList::sendPacketList called without active socket for node" << destinationNode
<< " - not sending.";
return 0;
}

View file

@ -15,6 +15,7 @@
#include <EntityItemProperties.h>
#include <EntityEditPacketSender.h>
#include <PhysicsCollisionGroups.h>
#include <LogHandler.h>
#include "BulletUtil.h"
#include "EntityMotionState.h"
@ -230,11 +231,17 @@ void EntityMotionState::setWorldTransform(const btTransform& worldTrans) {
bool positionSuccess;
_entity->setPosition(bulletToGLM(worldTrans.getOrigin()) + ObjectMotionState::getWorldOffset(), positionSuccess, false);
if (!positionSuccess) {
static QString repeatedMessage =
LogHandler::getInstance().addRepeatedMessageRegex("EntityMotionState::setWorldTransform "
"setPosition failed.*");
qDebug() << "EntityMotionState::setWorldTransform setPosition failed" << _entity->getID();
}
bool orientationSuccess;
_entity->setOrientation(bulletToGLM(worldTrans.getRotation()), orientationSuccess, false);
if (!orientationSuccess) {
static QString repeatedMessage =
LogHandler::getInstance().addRepeatedMessageRegex("EntityMotionState::setWorldTransform "
"setOrientation failed.*");
qDebug() << "EntityMotionState::setWorldTransform setOrientation failed" << _entity->getID();
}
_entity->setVelocity(getBodyLinearVelocity());

View file

@ -322,7 +322,8 @@ void PhysicalEntitySimulation::addAction(EntityActionPointer action) {
QMutexLocker lock(&_mutex);
const QUuid& actionID = action->getID();
if (_physicsEngine->getActionByID(actionID)) {
qDebug() << "warning -- PhysicalEntitySimulation::addAction -- adding an action that was already in _physicsEngine";
qCDebug(physics) << "warning -- PhysicalEntitySimulation::addAction -- adding an "
"action that was already in _physicsEngine";
}
}
EntitySimulation::addAction(action);

View file

@ -0,0 +1,14 @@
//
// PluginLogging.cpp
// libraries/gl/src/gl/
//
// Created by Seth Alves on 2016-9-14.
// Copyright 2016 High Fidelity, Inc.
//
// Distribucted under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "PluginLogging.h"
Q_LOGGING_CATEGORY(plugins, "hifi.plugins")

View file

@ -0,0 +1,19 @@
//
// PluginLogging.h
// libraries/gl/src/gl/
//
// Created by Seth Alves on 2016-9-14.
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_PluginLogging_h
#define hifi_PluginLogging_h
#include <QLoggingCategory>
Q_DECLARE_LOGGING_CATEGORY(plugins)
#endif // hifi_PluginLogging_h

View file

@ -21,6 +21,7 @@
#include "CodecPlugin.h"
#include "DisplayPlugin.h"
#include "InputPlugin.h"
#include "PluginLogging.h"
PluginManager* PluginManager::getInstance() {
@ -87,10 +88,10 @@ const LoaderList& getLoadedPlugins() {
QDir pluginDir(pluginPath);
pluginDir.setFilter(QDir::Files);
if (pluginDir.exists()) {
qDebug() << "Loading runtime plugins from " << pluginPath;
qInfo() << "Loading runtime plugins from " << pluginPath;
auto candidates = pluginDir.entryList();
for (auto plugin : candidates) {
qDebug() << "Attempting plugin" << qPrintable(plugin);
qCDebug(plugins) << "Attempting plugin" << qPrintable(plugin);
QSharedPointer<QPluginLoader> loader(new QPluginLoader(pluginPath + plugin));
if (isDisabled(loader->metaData())) {
@ -100,11 +101,11 @@ const LoaderList& getLoadedPlugins() {
}
if (loader->load()) {
qDebug() << "Plugin" << qPrintable(plugin) << "loaded successfully";
qCDebug(plugins) << "Plugin" << qPrintable(plugin) << "loaded successfully";
loadedPlugins.push_back(loader);
} else {
qDebug() << "Plugin" << qPrintable(plugin) << "failed to load:";
qDebug() << " " << qPrintable(loader->errorString());
qCDebug(plugins) << "Plugin" << qPrintable(plugin) << "failed to load:";
qCDebug(plugins) << " " << qPrintable(loader->errorString());
}
}
}
@ -139,7 +140,7 @@ const CodecPluginList& PluginManager::getCodecPlugins() {
plugin->setContainer(_container);
plugin->init();
qDebug() << "init codec:" << plugin->getName();
qCDebug(plugins) << "init codec:" << plugin->getName();
}
});
return codecPlugins;
@ -157,11 +158,11 @@ static DisplayPluginList displayPlugins;
const DisplayPluginList& PluginManager::getDisplayPlugins() {
static std::once_flag once;
static auto deviceAddedCallback = [](QString deviceName) {
qDebug() << "Added device: " << deviceName;
qCDebug(plugins) << "Added device: " << deviceName;
UserActivityLogger::getInstance().connectedDevice("display", deviceName);
};
static auto subdeviceAddedCallback = [](QString pluginName, QString deviceName) {
qDebug() << "Added subdevice: " << deviceName;
qCDebug(plugins) << "Added subdevice: " << deviceName;
UserActivityLogger::getInstance().connectedDevice("display", pluginName + " | " + deviceName);
};
@ -204,11 +205,11 @@ const InputPluginList& PluginManager::getInputPlugins() {
static InputPluginList inputPlugins;
static std::once_flag once;
static auto deviceAddedCallback = [](QString deviceName) {
qDebug() << "Added device: " << deviceName;
qCDebug(plugins) << "Added device: " << deviceName;
UserActivityLogger::getInstance().connectedDevice("input", deviceName);
};
static auto subdeviceAddedCallback = [](QString pluginName, QString deviceName) {
qDebug() << "Added subdevice: " << deviceName;
qCDebug(plugins) << "Added subdevice: " << deviceName;
UserActivityLogger::getInstance().connectedDevice("input", pluginName + " | " + deviceName);
};

View file

@ -121,7 +121,9 @@ bool Procedural::parseShader(const QUrl& shaderPath) {
if (_shaderUrl.isLocalFile()) {
_shaderPath = _shaderUrl.toLocalFile();
#if WANT_DEBUG
qDebug() << "Shader path: " << _shaderPath;
#endif
if (!QFile(_shaderPath).exists()) {
_networkShader.reset();
return false;;

View file

@ -767,72 +767,79 @@ void disableQtBearerPoll() {
void printSystemInformation() {
// Write system information to log
qDebug() << "Build Information";
qDebug().noquote() << "\tBuild ABI: " << QSysInfo::buildAbi();
qDebug().noquote() << "\tBuild CPU Architecture: " << QSysInfo::buildCpuArchitecture();
qCDebug(shared) << "Build Information";
qCDebug(shared).noquote() << "\tBuild ABI: " << QSysInfo::buildAbi();
qCDebug(shared).noquote() << "\tBuild CPU Architecture: " << QSysInfo::buildCpuArchitecture();
qDebug().noquote() << "System Information";
qDebug().noquote() << "\tProduct Name: " << QSysInfo::prettyProductName();
qDebug().noquote() << "\tCPU Architecture: " << QSysInfo::currentCpuArchitecture();
qDebug().noquote() << "\tKernel Type: " << QSysInfo::kernelType();
qDebug().noquote() << "\tKernel Version: " << QSysInfo::kernelVersion();
qCDebug(shared).noquote() << "System Information";
qCDebug(shared).noquote() << "\tProduct Name: " << QSysInfo::prettyProductName();
qCDebug(shared).noquote() << "\tCPU Architecture: " << QSysInfo::currentCpuArchitecture();
qCDebug(shared).noquote() << "\tKernel Type: " << QSysInfo::kernelType();
qCDebug(shared).noquote() << "\tKernel Version: " << QSysInfo::kernelVersion();
auto macVersion = QSysInfo::macVersion();
if (macVersion != QSysInfo::MV_None) {
qDebug() << "\tMac Version: " << macVersion;
qCDebug(shared) << "\tMac Version: " << macVersion;
}
auto windowsVersion = QSysInfo::windowsVersion();
if (windowsVersion != QSysInfo::WV_None) {
qDebug() << "\tWindows Version: " << windowsVersion;
qCDebug(shared) << "\tWindows Version: " << windowsVersion;
}
#ifdef Q_OS_WIN
SYSTEM_INFO si;
GetNativeSystemInfo(&si);
qDebug() << "SYSTEM_INFO";
qDebug().noquote() << "\tOEM ID: " << si.dwOemId;
qDebug().noquote() << "\tProcessor Architecture: " << si.wProcessorArchitecture;
qDebug().noquote() << "\tProcessor Type: " << si.dwProcessorType;
qDebug().noquote() << "\tProcessor Level: " << si.wProcessorLevel;
qDebug().noquote() << "\tProcessor Revision: "
qCDebug(shared) << "SYSTEM_INFO";
qCDebug(shared).noquote() << "\tOEM ID: " << si.dwOemId;
qCDebug(shared).noquote() << "\tProcessor Architecture: " << si.wProcessorArchitecture;
qCDebug(shared).noquote() << "\tProcessor Type: " << si.dwProcessorType;
qCDebug(shared).noquote() << "\tProcessor Level: " << si.wProcessorLevel;
qCDebug(shared).noquote() << "\tProcessor Revision: "
<< QString("0x%1").arg(si.wProcessorRevision, 4, 16, QChar('0'));
qDebug().noquote() << "\tNumber of Processors: " << si.dwNumberOfProcessors;
qDebug().noquote() << "\tPage size: " << si.dwPageSize << " Bytes";
qDebug().noquote() << "\tMin Application Address: "
qCDebug(shared).noquote() << "\tNumber of Processors: " << si.dwNumberOfProcessors;
qCDebug(shared).noquote() << "\tPage size: " << si.dwPageSize << " Bytes";
qCDebug(shared).noquote() << "\tMin Application Address: "
<< QString("0x%1").arg(qulonglong(si.lpMinimumApplicationAddress), 16, 16, QChar('0'));
qDebug().noquote() << "\tMax Application Address: "
qCDebug(shared).noquote() << "\tMax Application Address: "
<< QString("0x%1").arg(qulonglong(si.lpMaximumApplicationAddress), 16, 16, QChar('0'));
const double BYTES_TO_MEGABYTE = 1.0 / (1024 * 1024);
qDebug() << "MEMORYSTATUSEX";
qCDebug(shared) << "MEMORYSTATUSEX";
MEMORYSTATUSEX ms;
ms.dwLength = sizeof(ms);
if (GlobalMemoryStatusEx(&ms)) {
qDebug().noquote() << QString("\tCurrent System Memory Usage: %1%").arg(ms.dwMemoryLoad);
qDebug().noquote() << QString("\tAvail Physical Memory: %1 MB").arg(ms.ullAvailPhys * BYTES_TO_MEGABYTE, 20, 'f', 2);
qDebug().noquote() << QString("\tTotal Physical Memory: %1 MB").arg(ms.ullTotalPhys * BYTES_TO_MEGABYTE, 20, 'f', 2);
qDebug().noquote() << QString("\tAvail in Page File: %1 MB").arg(ms.ullAvailPageFile * BYTES_TO_MEGABYTE, 20, 'f', 2);
qDebug().noquote() << QString("\tTotal in Page File: %1 MB").arg(ms.ullTotalPageFile * BYTES_TO_MEGABYTE, 20, 'f', 2);
qDebug().noquote() << QString("\tAvail Virtual Memory: %1 MB").arg(ms.ullAvailVirtual * BYTES_TO_MEGABYTE, 20, 'f', 2);
qDebug().noquote() << QString("\tTotal Virtual Memory: %1 MB").arg(ms.ullTotalVirtual * BYTES_TO_MEGABYTE, 20, 'f', 2);
qCDebug(shared).noquote()
<< QString("\tCurrent System Memory Usage: %1%").arg(ms.dwMemoryLoad);
qCDebug(shared).noquote()
<< QString("\tAvail Physical Memory: %1 MB").arg(ms.ullAvailPhys * BYTES_TO_MEGABYTE, 20, 'f', 2);
qCDebug(shared).noquote()
<< QString("\tTotal Physical Memory: %1 MB").arg(ms.ullTotalPhys * BYTES_TO_MEGABYTE, 20, 'f', 2);
qCDebug(shared).noquote()
<< QString("\tAvail in Page File: %1 MB").arg(ms.ullAvailPageFile * BYTES_TO_MEGABYTE, 20, 'f', 2);
qCDebug(shared).noquote()
<< QString("\tTotal in Page File: %1 MB").arg(ms.ullTotalPageFile * BYTES_TO_MEGABYTE, 20, 'f', 2);
qCDebug(shared).noquote()
<< QString("\tAvail Virtual Memory: %1 MB").arg(ms.ullAvailVirtual * BYTES_TO_MEGABYTE, 20, 'f', 2);
qCDebug(shared).noquote()
<< QString("\tTotal Virtual Memory: %1 MB").arg(ms.ullTotalVirtual * BYTES_TO_MEGABYTE, 20, 'f', 2);
} else {
qDebug() << "\tFailed to retrieve memory status: " << GetLastError();
qCDebug(shared) << "\tFailed to retrieve memory status: " << GetLastError();
}
qDebug() << "CPUID";
qCDebug(shared) << "CPUID";
qDebug() << "\tCPU Vendor: " << CPUIdent::Vendor().c_str();
qDebug() << "\tCPU Brand: " << CPUIdent::Brand().c_str();
qCDebug(shared) << "\tCPU Vendor: " << CPUIdent::Vendor().c_str();
qCDebug(shared) << "\tCPU Brand: " << CPUIdent::Brand().c_str();
for (auto& feature : CPUIdent::getAllFeatures()) {
qDebug().nospace().noquote() << "\t[" << (feature.supported ? "x" : " ") << "] " << feature.name.c_str();
qCDebug(shared).nospace().noquote() << "\t[" << (feature.supported ? "x" : " ") << "] " << feature.name.c_str();
}
#endif
qDebug() << "Environment Variables";
qCDebug(shared) << "Environment Variables";
// List of env variables to include in the log. For privacy reasons we don't send all env variables.
const QStringList envWhitelist = {
"QTWEBENGINE_REMOTE_DEBUGGING"
@ -840,7 +847,7 @@ void printSystemInformation() {
auto envVariables = QProcessEnvironment::systemEnvironment();
for (auto& env : envWhitelist)
{
qDebug().noquote().nospace() << "\t" <<
qCDebug(shared).noquote().nospace() << "\t" <<
(envVariables.contains(env) ? " = " + envVariables.value(env) : " NOT FOUND");
}
}
@ -869,4 +876,4 @@ bool getMemoryInfo(MemoryInfo& info) {
#endif
return false;
}
}

View file

@ -11,12 +11,13 @@
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
/* global setEntityCustomData, getEntityCustomData, flatten, Xform, Script, Quat, Vec3, MyAvatar, Entities, Overlays, Settings, Reticle, Controller, Camera, Messages, Mat4 */
/* global setEntityCustomData, getEntityCustomData, flatten, Xform, Script, Quat, Vec3, MyAvatar, Entities, Overlays, Settings, Reticle, Controller, Camera, Messages, Mat4, getControllerWorldLocation, getGrabPointSphereOffset */
(function() { // BEGIN LOCAL_SCOPE
Script.include("/~/system/libraries/utils.js");
Script.include("/~/system/libraries/Xform.js");
Script.include("/~/system/libraries/controllers.js");
//
// add lines where the hand ray picking is happening
@ -55,12 +56,6 @@ var HAND_HEAD_MIX_RATIO = 0.0; // 0 = only use hands for search/move. 1 = only
var PICK_WITH_HAND_RAY = true;
var EQUIP_SPHERE_COLOR = {
red: 116,
green: 90,
blue: 238
};
var EQUIP_SPHERE_ALPHA = 0.15;
var EQUIP_SPHERE_SCALE_FACTOR = 0.65;
@ -106,19 +101,16 @@ var MAX_EQUIP_HOTSPOT_RADIUS = 1.0;
var NEAR_GRABBING_ACTION_TIMEFRAME = 0.05; // how quickly objects move to their new position
var NEAR_GRAB_RADIUS = 0.07; // radius used for palm vs object for near grabbing.
var NEAR_GRAB_RADIUS = 0.04; // radius used for palm vs object for near grabbing.
var NEAR_GRAB_MAX_DISTANCE = 1.0; // you cannot grab objects that are this far away from your hand
var NEAR_GRAB_PICK_RADIUS = 0.25; // radius used for search ray vs object for near grabbing.
var PICK_BACKOFF_DISTANCE = 0.2; // helps when hand is intersecting the grabble object
var NEAR_GRABBING_KINEMATIC = true; // force objects to be kinematic when near-grabbed
// if an equipped item is "adjusted" to be too far from the hand it's in, it will be unequipped.
var CHECK_TOO_FAR_UNEQUIP_TIME = 0.3; // seconds, duration between checks
var GRAB_POINT_SPHERE_OFFSET = { x: 0.0, y: 0.2, z: 0.0 };
var GRAB_POINT_SPHERE_RADIUS = NEAR_GRAB_RADIUS;
var GRAB_POINT_SPHERE_COLOR = { red: 20, green: 90, blue: 238 };
var GRAB_POINT_SPHERE_ALPHA = 0.85;
@ -207,6 +199,8 @@ var CONTROLLER_STATE_MACHINE = {};
var mostRecentSearchingHand = RIGHT_HAND;
var DEFAULT_SPHERE_MODEL_URL = "http://hifi-content.s3.amazonaws.com/alan/dev/equip-Fresnel-3.fbx";
CONTROLLER_STATE_MACHINE[STATE_OFF] = {
name: "off",
enterMethod: "offEnter",
@ -607,43 +601,21 @@ EquipHotspotBuddy.prototype.updateHotspot = function(hotspot, timestamp) {
var diameter = hotspot.radius * 2;
if (hotspot.modelURL) {
// override default sphere with a user specified model
overlayInfoSet.overlays.push(Overlays.addOverlay("model", {
url: hotspot.modelURL,
position: hotspot.worldPosition,
rotation: {
x: 0,
y: 0,
z: 0,
w: 1
},
dimensions: diameter * EQUIP_SPHERE_SCALE_FACTOR,
scale: hotspot.modelScale,
ignoreRayIntersection: true
}));
overlayInfoSet.type = "model";
} else {
// default sphere overlay
overlayInfoSet.overlays.push(Overlays.addOverlay("sphere", {
position: hotspot.worldPosition,
rotation: {
x: 0,
y: 0,
z: 0,
w: 1
},
dimensions: diameter * EQUIP_SPHERE_SCALE_FACTOR,
color: EQUIP_SPHERE_COLOR,
alpha: EQUIP_SPHERE_ALPHA,
solid: true,
visible: true,
ignoreRayIntersection: true,
drawInFront: false
}));
overlayInfoSet.type = "sphere";
}
// override default sphere with a user specified model, if it exists.
overlayInfoSet.overlays.push(Overlays.addOverlay("model", {
url: hotspot.modelURL ? hotspot.modelURL : DEFAULT_SPHERE_MODEL_URL,
position: hotspot.worldPosition,
rotation: {
x: 0,
y: 0,
z: 0,
w: 1
},
dimensions: diameter * EQUIP_SPHERE_SCALE_FACTOR,
scale: hotspot.modelScale,
ignoreRayIntersection: true
}));
overlayInfoSet.type = "model";
this.map[hotspot.key] = overlayInfoSet;
} else {
overlayInfoSet.timestamp = timestamp;
@ -722,6 +694,7 @@ var equipHotspotBuddy = new EquipHotspotBuddy();
function MyController(hand) {
this.hand = hand;
this.autoUnequipCounter = 0;
this.grabPointIntersectsEntity = false;
// handPosition is where the avatar's hand appears to be, in-world.
this.getHandPosition = function () {
@ -738,19 +711,9 @@ function MyController(hand) {
return MyAvatar.getLeftPalmRotation();
}
};
// controllerLocation is where the controller would be, in-world.
this.getControllerLocation = function (doOffset) {
var standardControllerValue = (hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand;
var pose = Controller.getPoseValue(standardControllerValue);
var orientation = Quat.multiply(MyAvatar.orientation, pose.rotation);
var position = Vec3.sum(Vec3.multiplyQbyV(MyAvatar.orientation, pose.translation), MyAvatar.position);
// add to the real position so the grab-point is out in front of the hand, a bit
if (doOffset) {
position = Vec3.sum(position, Vec3.multiplyQbyV(orientation, GRAB_POINT_SPHERE_OFFSET));
}
return {position: position, orientation: orientation};
this.handToController = function() {
return (hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand;
};
this.actionID = null; // action this script created...
@ -866,7 +829,7 @@ function MyController(hand) {
}
if (!this.grabPointSphere) {
this.grabPointSphere = Overlays.addOverlay("sphere", {
localPosition: GRAB_POINT_SPHERE_OFFSET,
localPosition: getGrabPointSphereOffset(this.handToController()),
localRotation: { x: 0, y: 0, z: 0, w: 1 },
dimensions: GRAB_POINT_SPHERE_RADIUS,
color: GRAB_POINT_SPHERE_COLOR,
@ -1094,20 +1057,28 @@ function MyController(hand) {
}
if (!this.waitForTriggerRelease && this.triggerSmoothedSqueezed()) {
this.lastPickTime = 0;
this.startingHandRotation = this.getControllerLocation(true).orientation;
this.startingHandRotation = getControllerWorldLocation(this.handToController(), true).orientation;
if (this.triggerSmoothedSqueezed()) {
this.setState(STATE_SEARCHING, "trigger squeeze detected");
return;
}
}
this.grabPointSphereOn();
var candidateEntities = Entities.findEntities(this.getControllerLocation(true).position, MAX_EQUIP_HOTSPOT_RADIUS);
var controllerLocation = getControllerWorldLocation(this.handToController(), true);
var worldHandPosition = controllerLocation.position;
if (controllerLocation.valid) {
this.grabPointSphereOn();
} else {
this.grabPointSphereOff();
}
var candidateEntities = Entities.findEntities(worldHandPosition, MAX_EQUIP_HOTSPOT_RADIUS);
entityPropertiesCache.addEntities(candidateEntities);
var potentialEquipHotspot = this.chooseBestEquipHotspot(candidateEntities);
if (!this.waitForTriggerRelease) {
this.updateEquipHaptics(potentialEquipHotspot, this.getControllerLocation(true).position);
this.updateEquipHaptics(potentialEquipHotspot, worldHandPosition);
}
var nearEquipHotspots = this.chooseNearEquipHotspots(candidateEntities, EQUIP_HOTSPOT_RENDER_RADIUS);
@ -1115,6 +1086,20 @@ function MyController(hand) {
if (potentialEquipHotspot) {
equipHotspotBuddy.highlightHotspot(potentialEquipHotspot);
}
// when the grab-point enters a grabable entity, give a haptic pulse
candidateEntities = Entities.findEntities(worldHandPosition, NEAR_GRAB_RADIUS);
var grabbableEntities = candidateEntities.filter(function(entity) {
return _this.entityIsNearGrabbable(entity, worldHandPosition, NEAR_GRAB_MAX_DISTANCE);
});
if (grabbableEntities.length > 0) {
if (!this.grabPointIntersectsEntity) {
Controller.triggerHapticPulse(1, 20, this.hand);
this.grabPointIntersectsEntity = true;
}
} else {
this.grabPointIntersectsEntity = false;
}
};
this.clearEquipHaptics = function() {
@ -1144,7 +1129,7 @@ function MyController(hand) {
// @returns {object} returns object with two keys entityID and distance
//
this.calcRayPickInfo = function(hand) {
var controllerLocation = this.getControllerLocation(true);
var controllerLocation = getControllerWorldLocation(this.handToController(), true);
var worldHandPosition = controllerLocation.position;
var worldHandRotation = controllerLocation.orientation;
@ -1168,18 +1153,11 @@ function MyController(hand) {
}
this.lastPickTime = now;
var directionNormalized = Vec3.normalize(pickRay.direction);
var directionBacked = Vec3.multiply(directionNormalized, PICK_BACKOFF_DISTANCE);
var pickRayBacked = {
origin: Vec3.subtract(pickRay.origin, directionBacked),
direction: pickRay.direction
};
var intersection;
if (USE_BLACKLIST === true && blacklist.length !== 0) {
intersection = findRayIntersection(pickRayBacked, true, [], blacklist);
intersection = findRayIntersection(pickRay, true, [], blacklist);
} else {
intersection = findRayIntersection(pickRayBacked, true);
intersection = findRayIntersection(pickRay, true);
}
if (intersection.intersects) {
@ -1392,7 +1370,8 @@ function MyController(hand) {
return _this.collectEquipHotspots(entityID);
})).filter(function(hotspot) {
return (_this.hotspotIsEquippable(hotspot) &&
Vec3.distance(hotspot.worldPosition, _this.getControllerLocation(true).position) < hotspot.radius + distance);
Vec3.distance(hotspot.worldPosition, getControllerWorldLocation(_this.handToController(), true).position) <
hotspot.radius + distance);
});
return equippableHotspots;
};
@ -1403,8 +1382,9 @@ function MyController(hand) {
if (equippableHotspots.length > 0) {
// sort by distance
equippableHotspots.sort(function(a, b) {
var aDistance = Vec3.distance(a.worldPosition, this.getControllerLocation(true).position);
var bDistance = Vec3.distance(b.worldPosition, this.getControllerLocation(true).position);
var handControllerLocation = getControllerWorldLocation(this.handToController(), true);
var aDistance = Vec3.distance(a.worldPosition, handControllerLocation.position);
var bDistance = Vec3.distance(b.worldPosition, handControllerLocation.position);
return aDistance - bDistance;
});
return equippableHotspots[0];
@ -1430,8 +1410,6 @@ function MyController(hand) {
this.isInitialGrab = false;
this.shouldResetParentOnRelease = false;
this.grabPointSphereOn();
this.checkForStrayChildren();
if (this.triggerSmoothedReleased()) {
@ -1439,7 +1417,14 @@ function MyController(hand) {
return;
}
var handPosition = this.getControllerLocation(true).position;
var controllerLocation = getControllerWorldLocation(this.handToController(), true);
var handPosition = controllerLocation.position;
if (controllerLocation.valid) {
this.grabPointSphereOn();
} else {
this.grabPointSphereOff();
}
var rayPickInfo = this.calcRayPickInfo(this.hand);
@ -1624,7 +1609,7 @@ function MyController(hand) {
this.clearEquipHaptics();
this.grabPointSphereOff();
var worldControllerPosition = this.getControllerLocation(true).position;
var worldControllerPosition = getControllerWorldLocation(this.handToController(), true).position;
// transform the position into room space
var worldToSensorMat = Mat4.inverse(MyAvatar.getSensorToWorldMatrix());
@ -1642,7 +1627,8 @@ function MyController(hand) {
this.grabRadius = Vec3.distance(this.currentObjectPosition, worldControllerPosition);
this.grabRadialVelocity = 0.0;
// compute a constant based on the initial conditions which we use below to exagerate hand motion onto the held object
// compute a constant based on the initial conditions which we use below to exagerate hand motion
// onto the held object
this.radiusScalar = Math.log(this.grabRadius + 1.0);
if (this.radiusScalar < 1.0) {
this.radiusScalar = 1.0;
@ -1668,7 +1654,7 @@ function MyController(hand) {
this.actionTimeout = now + (ACTION_TTL * MSECS_PER_SEC);
if (this.actionID !== null) {
this.activateEntity(this.grabbedEntity, grabbedProperties, false);
this.activateEntity(this.grabbedEntity, grabbedProperties, false, true);
this.callEntityMethodOnGrabbed("startDistanceGrab");
}
@ -1683,14 +1669,22 @@ function MyController(hand) {
if (!this.triggerClicked) {
this.callEntityMethodOnGrabbed("releaseGrab");
// if we distance hold something and keep it very still before releasing it, it ends up
// non-dynamic in bullet. If it's too still, give it a little bounce so it will fall.
var velocity = Entities.getEntityProperties(this.grabbedEntity, ["velocity"]).velocity;
if (Vec3.length(velocity) < 0.05) { // see EntityMotionState.cpp DYNAMIC_LINEAR_VELOCITY_THRESHOLD
velocity = { x: 0.0, y: 0.2, z:0.0 };
Entities.editEntity(this.grabbedEntity, { velocity: velocity });
}
this.setState(STATE_OFF, "trigger released");
return;
}
this.heartBeat(this.grabbedEntity);
var controllerLocation = this.getControllerLocation(true);
var controllerLocation = getControllerWorldLocation(this.handToController(), true);
var worldControllerPosition = controllerLocation.position;
var worldControllerRotation = controllerLocation.orientation;
@ -1730,13 +1724,24 @@ function MyController(hand) {
var newRadialVelocity = Vec3.dot(lastVelocity, delta);
var VELOCITY_AVERAGING_TIME = 0.016;
this.grabRadialVelocity = (deltaObjectTime / VELOCITY_AVERAGING_TIME) * newRadialVelocity +
(1.0 - (deltaObjectTime / VELOCITY_AVERAGING_TIME)) * this.grabRadialVelocity;
var blendFactor = deltaObjectTime / VELOCITY_AVERAGING_TIME;
if (blendFactor < 0.0) {
blendFactor = 0.0;
} else if (blendFactor > 1.0) {
blendFactor = 1.0;
}
this.grabRadialVelocity = blendFactor * newRadialVelocity + (1.0 - blendFactor) * this.grabRadialVelocity;
var RADIAL_GRAB_AMPLIFIER = 10.0;
if (Math.abs(this.grabRadialVelocity) > 0.0) {
this.grabRadius = this.grabRadius + (this.grabRadialVelocity * deltaObjectTime *
this.grabRadius * RADIAL_GRAB_AMPLIFIER);
this.grabRadius * RADIAL_GRAB_AMPLIFIER);
}
// don't let grabRadius go all the way to zero, because it can't come back from that
var MINIMUM_GRAB_RADIUS = 0.1;
if (this.grabRadius < MINIMUM_GRAB_RADIUS) {
this.grabRadius = MINIMUM_GRAB_RADIUS;
}
var newTargetPosition = Vec3.multiply(this.grabRadius, Quat.getUp(worldControllerRotation));
@ -1827,7 +1832,7 @@ function MyController(hand) {
};
this.dropGestureProcess = function(deltaTime) {
var worldHandRotation = this.getControllerLocation(true).orientation;
var worldHandRotation = getControllerWorldLocation(this.handToController(), true).orientation;
var localHandUpAxis = this.hand === RIGHT_HAND ? {
x: 1,
y: 0,
@ -1893,7 +1898,7 @@ function MyController(hand) {
}
var grabbedProperties = Entities.getEntityProperties(this.grabbedEntity, GRABBABLE_PROPERTIES);
this.activateEntity(this.grabbedEntity, grabbedProperties, false);
this.activateEntity(this.grabbedEntity, grabbedProperties, false, false);
var grabbableData = getEntityCustomData(GRABBABLE_DATA_KEY, this.grabbedEntity, DEFAULT_GRABBABLE_DATA);
if (FORCE_IGNORE_IK) {
@ -1905,7 +1910,7 @@ function MyController(hand) {
var handRotation;
var handPosition;
if (this.ignoreIK) {
var controllerLocation = this.getControllerLocation(false);
var controllerLocation = getControllerWorldLocation(this.handToController(), false);
handRotation = controllerLocation.orientation;
handPosition = controllerLocation.position;
} else {
@ -2085,7 +2090,7 @@ function MyController(hand) {
if (props.parentID == MyAvatar.sessionUUID) {
var handPosition;
if (this.ignoreIK) {
handPosition = this.getControllerLocation(false).position;
handPosition = getControllerWorldLocation(this.handToController(), false).position;
} else {
handPosition = this.getHandPosition();
}
@ -2201,8 +2206,8 @@ function MyController(hand) {
}
var pickRay = {
origin: this.getControllerLocation().position,
direction: Quat.getUp(this.getControllerLocation().orientation)
origin: getControllerWorldLocation(this.handToController(), false).position,
direction: Quat.getUp(getControllerWorldLocation(this.handToController(), false).orientation)
};
var now = Date.now();
@ -2231,7 +2236,8 @@ function MyController(hand) {
this.entityTouchingEnter = function() {
// test for intersection between controller laser and web entity plane.
var intersectInfo = handLaserIntersectEntity(this.grabbedEntity, this.getControllerLocation(true));
var intersectInfo = handLaserIntersectEntity(this.grabbedEntity,
getControllerWorldLocation(this.handToController(), true));
if (intersectInfo) {
var pointerEvent = {
type: "Press",
@ -2256,7 +2262,8 @@ function MyController(hand) {
this.entityTouchingExit = function() {
// test for intersection between controller laser and web entity plane.
var intersectInfo = handLaserIntersectEntity(this.grabbedEntity, this.getControllerLocation(true));
var intersectInfo = handLaserIntersectEntity(this.grabbedEntity,
getControllerWorldLocation(this.handToController(), true));
if (intersectInfo) {
var pointerEvent;
if (this.deadspotExpired) {
@ -2295,7 +2302,8 @@ function MyController(hand) {
}
// test for intersection between controller laser and web entity plane.
var intersectInfo = handLaserIntersectEntity(this.grabbedEntity, this.getControllerLocation(true));
var intersectInfo = handLaserIntersectEntity(this.grabbedEntity,
getControllerWorldLocation(this.handToController(), true));
if (intersectInfo) {
if (Entities.keyboardFocusEntity != this.grabbedEntity) {
@ -2400,7 +2408,7 @@ function MyController(hand) {
this.deactivateEntity(entityID, false);
};
this.activateEntity = function(entityID, grabbedProperties, wasLoaded) {
this.activateEntity = function(entityID, grabbedProperties, wasLoaded, collideWithStatic) {
this.autoUnequipCounter = 0;
if (this.entityActivated) {
@ -2441,15 +2449,10 @@ function MyController(hand) {
data.parentJointIndex = grabbedProperties.parentJointIndex;
var whileHeldProperties = {
gravity: {
x: 0,
y: 0,
z: 0
},
// bummer, it isn't easy to do bitwise collisionMask operations like this:
// "collisionMask": COLLISION_MASK_WHILE_GRABBED | grabbedProperties.collisionMask
// when using string values
"collidesWith": COLLIDES_WITH_WHILE_GRABBED
gravity: { x: 0, y: 0, z: 0 },
"collidesWith": collideWithStatic ?
COLLIDES_WITH_WHILE_GRABBED + ",static" :
COLLIDES_WITH_WHILE_GRABBED
};
Entities.editEntity(entityID, whileHeldProperties);
} else if (data.refCount > 1) {
@ -2458,7 +2461,7 @@ function MyController(hand) {
// deactivate it before grabbing.
this.resetAbandonedGrab(entityID);
grabbedProperties = Entities.getEntityProperties(this.grabbedEntity, GRABBABLE_PROPERTIES);
return this.activateEntity(entityID, grabbedProperties, wasLoaded);
return this.activateEntity(entityID, grabbedProperties, wasLoaded, false);
}
this.isInitialGrab = false;

View file

@ -20,6 +20,7 @@
// When partially squeezing over a HUD element, a laser or the reticle is shown where the active hand
// controller beam intersects the HUD.
Script.include("/~/system/libraries/controllers.js");
// UTILITIES -------------
//
@ -203,16 +204,13 @@ function overlayFromWorldPoint(point) {
}
function activeHudPoint2d(activeHand) { // if controller is valid, update reticle position and answer 2d point. Otherwise falsey.
var controllerPose = Controller.getPoseValue(activeHand);
// Valid if any plugged-in hand controller is "on". (uncradled Hydra, green-lighted Vive...)
var controllerPose = getControllerWorldLocation(activeHand, true);
if (!controllerPose.valid) {
return; // Controller is cradled.
}
var controllerPosition = Vec3.sum(Vec3.multiplyQbyV(MyAvatar.orientation, controllerPose.translation),
MyAvatar.position);
// This gets point direction right, but if you want general quaternion it would be more complicated:
var controllerDirection = Quat.getUp(Quat.multiply(MyAvatar.orientation, controllerPose.rotation));
var controllerPosition = controllerPose.position;
var controllerDirection = Quat.getUp(controllerPose.rotation);
var hudPoint3d = calculateRayUICollisionPoint(controllerPosition, controllerDirection);
if (!hudPoint3d) {
if (Menu.isOptionChecked("Overlays")) { // With our hud resetting strategy, hudPoint3d should be valid here

View file

@ -0,0 +1,46 @@
// handControllerGrab.js
//
// Created by Seth Alves on 2016-9-7
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
/* global MyAvatar, Vec3, Controller, Quat */
// var GRAB_POINT_SPHERE_OFFSET = { x: 0, y: 0.2, z: 0 };
// var GRAB_POINT_SPHERE_OFFSET = { x: 0.1, y: 0.175, z: 0.04 };
// this offset needs to match the one in libraries/display-plugins/src/display-plugins/hmd/HmdDisplayPlugin.cpp
var GRAB_POINT_SPHERE_OFFSET = { x: 0.1, y: 0.32, z: 0.04 };
getGrabPointSphereOffset = function(handController) {
if (handController === Controller.Standard.RightHand) {
return GRAB_POINT_SPHERE_OFFSET;
}
return {
x: GRAB_POINT_SPHERE_OFFSET.x * -1,
y: GRAB_POINT_SPHERE_OFFSET.y,
z: GRAB_POINT_SPHERE_OFFSET.z
};
};
// controllerWorldLocation is where the controller would be, in-world, with an added offset
getControllerWorldLocation = function (handController, doOffset) {
var orientation;
var position;
var pose = Controller.getPoseValue(handController);
if (pose.valid) {
orientation = Quat.multiply(MyAvatar.orientation, pose.rotation);
position = Vec3.sum(Vec3.multiplyQbyV(MyAvatar.orientation, pose.translation), MyAvatar.position);
// add to the real position so the grab-point is out in front of the hand, a bit
if (doOffset) {
position = Vec3.sum(position, Vec3.multiplyQbyV(orientation, getGrabPointSphereOffset(handController)));
}
}
return {position: position,
translation: position,
orientation: orientation,
rotation: orientation,
valid: pose.valid};
};

View file

@ -1022,6 +1022,9 @@ SelectionDisplay = (function() {
// No switching while the other is already triggered, so no need to release.
activeHand = (activeHand === Controller.Standard.RightHand) ? Controller.Standard.LeftHand : Controller.Standard.RightHand;
}
if (Reticle.pointingAtSystemOverlay || Overlays.getOverlayAtPoint(Reticle.position)) {
return;
}
var eventResult = that.mousePressEvent({});
if (!eventResult || (eventResult === 'selectionBox')) {
var pickRay = controllerComputePickRay();

View file

@ -10,9 +10,13 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
/* global Toolbars, Script, Users, Overlays, AvatarList, Controller, Camera, getControllerWorldLocation */
(function() { // BEGIN LOCAL_SCOPE
Script.include("/~/system/libraries/controllers.js");
// grab the toolbar
var toolbar = Toolbars.getToolbar("com.highfidelity.interface.toolbar.system");
@ -144,7 +148,7 @@ AvatarList.avatarRemovedEvent.connect(function(avatarID){
function handleSelectedOverlay(clickedOverlay) {
// see this is one of our mod overlays
var modOverlayKeys = Object.keys(modOverlays)
var modOverlayKeys = Object.keys(modOverlays);
for (var i = 0; i < modOverlayKeys.length; ++i) {
var avatarID = modOverlayKeys[i];
var modOverlay = modOverlays[avatarID];
@ -187,13 +191,9 @@ Controller.mousePressEvent.connect(function(event){
var triggerMapping = Controller.newMapping(Script.resolvePath('') + '-click');
function controllerComputePickRay(hand) {
var controllerPose = Controller.getPoseValue(hand);
var controllerPose = getControllerWorldLocation(hand, true);
if (controllerPose.valid) {
var controllerPosition = Vec3.sum(Vec3.multiplyQbyV(MyAvatar.orientation, controllerPose.translation),
MyAvatar.position);
// This gets point direction right, but if you want general quaternion it would be more complicated:
var controllerDirection = Quat.getUp(Quat.multiply(MyAvatar.orientation, controllerPose.rotation));
return { origin: controllerPosition, direction: controllerDirection };
return { origin: controllerPose.position, direction: controllerPose.orientation };
}
}

View file

@ -17,7 +17,7 @@
<script type="text/javascript" src="dat.gui.min.js"></script>
<script type="text/javascript" src="underscore-min.js"></script>
<script type="text/javascript" src="qrc:///qtwebchannel/qwebchannel.js"></script>
<script type="text/javascript" src="../html/eventBridgeLoader.js"></script>
<script type="text/javascript" src="../html/js/eventBridgeLoader.js"></script>
<script type="text/javascript" src="particleExplorer.js"></script>
<script>
function loaded() {