mirror of
https://thingvellir.net/git/overte
synced 2025-03-27 23:52:03 +01:00
Fix the unsupported ktx format, it was coming from the BUmp map case
This commit is contained in:
commit
e0f0cc2102
66 changed files with 1440 additions and 1715 deletions
|
@ -17,6 +17,7 @@ module.exports = {
|
|||
"Clipboard": false,
|
||||
"Controller": false,
|
||||
"DialogsManager": false,
|
||||
"DebugDraw": false,
|
||||
"Entities": false,
|
||||
"FaceTracker": false,
|
||||
"GlobalServices": false,
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
#include <QtCore/QString>
|
||||
|
||||
#include <SharedUtil.h>
|
||||
#include <ServerPathUtils.h>
|
||||
#include <PathUtils.h>
|
||||
|
||||
#include "NetworkLogging.h"
|
||||
#include "NodeType.h"
|
||||
|
@ -162,7 +162,7 @@ void AssetServer::completeSetup() {
|
|||
if (assetsPath.isRelative()) {
|
||||
// if the domain settings passed us a relative path, make an absolute path that is relative to the
|
||||
// default data directory
|
||||
absoluteFilePath = ServerPathUtils::getDataFilePath("assets/" + assetsPathString);
|
||||
absoluteFilePath = PathUtils::getAppDataFilePath("assets/" + assetsPathString);
|
||||
}
|
||||
|
||||
_resourcesDirectory = QDir(absoluteFilePath);
|
||||
|
|
|
@ -241,6 +241,7 @@ void AudioMixer::sendStatsPacket() {
|
|||
|
||||
statsObject["avg_streams_per_frame"] = (float)_stats.sumStreams / (float)_numStatFrames;
|
||||
statsObject["avg_listeners_per_frame"] = (float)_stats.sumListeners / (float)_numStatFrames;
|
||||
statsObject["avg_listeners_(silent)_per_frame"] = (float)_stats.sumListenersSilent / (float)_numStatFrames;
|
||||
|
||||
statsObject["silent_packets_per_frame"] = (float)_numSilentPackets / (float)_numStatFrames;
|
||||
|
||||
|
|
|
@ -106,6 +106,7 @@ void AudioMixerSlave::mix(const SharedNodePointer& node) {
|
|||
|
||||
sendMixPacket(node, *data, encodedBuffer);
|
||||
} else {
|
||||
++stats.sumListenersSilent;
|
||||
sendSilentPacket(node, *data);
|
||||
}
|
||||
|
||||
|
@ -221,17 +222,19 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
|
|||
stats.mixTime += mixTime.count();
|
||||
#endif
|
||||
|
||||
// use the per listener AudioLimiter to render the mixed data...
|
||||
listenerData->audioLimiter.render(_mixSamples, _bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
// check for silent audio after the peak limiter has converted the samples
|
||||
// check for silent audio before limiting
|
||||
// limiting uses a dither and can only guarantee abs(sample) <= 1
|
||||
bool hasAudio = false;
|
||||
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; ++i) {
|
||||
if (_bufferSamples[i] != 0) {
|
||||
if (_mixSamples[i] != 0.0f) {
|
||||
hasAudio = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// use the per listener AudioLimiter to render the mixed data
|
||||
listenerData->audioLimiter.render(_mixSamples, _bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
return hasAudio;
|
||||
}
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
void AudioMixerStats::reset() {
|
||||
sumStreams = 0;
|
||||
sumListeners = 0;
|
||||
sumListenersSilent = 0;
|
||||
totalMixes = 0;
|
||||
hrtfRenders = 0;
|
||||
hrtfSilentRenders = 0;
|
||||
|
@ -28,6 +29,7 @@ void AudioMixerStats::reset() {
|
|||
void AudioMixerStats::accumulate(const AudioMixerStats& otherStats) {
|
||||
sumStreams += otherStats.sumStreams;
|
||||
sumListeners += otherStats.sumListeners;
|
||||
sumListenersSilent += otherStats.sumListenersSilent;
|
||||
totalMixes += otherStats.totalMixes;
|
||||
hrtfRenders += otherStats.hrtfRenders;
|
||||
hrtfSilentRenders += otherStats.hrtfSilentRenders;
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
struct AudioMixerStats {
|
||||
int sumStreams { 0 };
|
||||
int sumListeners { 0 };
|
||||
int sumListenersSilent { 0 };
|
||||
|
||||
int totalMixes { 0 };
|
||||
|
||||
|
|
|
@ -37,7 +37,6 @@ const QString AVATAR_MIXER_LOGGING_NAME = "avatar-mixer";
|
|||
|
||||
// FIXME - what we'd actually like to do is send to users at ~50% of their present rate down to 30hz. Assume 90 for now.
|
||||
const int AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND = 45;
|
||||
const unsigned int AVATAR_DATA_SEND_INTERVAL_MSECS = (1.0f / (float) AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND) * 1000;
|
||||
|
||||
AvatarMixer::AvatarMixer(ReceivedMessage& message) :
|
||||
ThreadedAssignment(message)
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
#include "OctreeQueryNode.h"
|
||||
#include "OctreeServerConsts.h"
|
||||
#include <QtCore/QStandardPaths>
|
||||
#include <ServerPathUtils.h>
|
||||
#include <PathUtils.h>
|
||||
#include <QtCore/QDir>
|
||||
|
||||
int OctreeServer::_clientCount = 0;
|
||||
|
@ -279,8 +279,7 @@ OctreeServer::~OctreeServer() {
|
|||
|
||||
void OctreeServer::initHTTPManager(int port) {
|
||||
// setup the embedded web server
|
||||
|
||||
QString documentRoot = QString("%1/web").arg(ServerPathUtils::getDataDirectory());
|
||||
QString documentRoot = QString("%1/web").arg(PathUtils::getAppDataPath());
|
||||
|
||||
// setup an httpManager with us as the request handler and the parent
|
||||
_httpManager = new HTTPManager(QHostAddress::AnyIPv4, port, documentRoot, this, this);
|
||||
|
@ -1179,7 +1178,7 @@ void OctreeServer::domainSettingsRequestComplete() {
|
|||
if (persistPath.isRelative()) {
|
||||
// if the domain settings passed us a relative path, make an absolute path that is relative to the
|
||||
// default data directory
|
||||
persistAbsoluteFilePath = QDir(ServerPathUtils::getDataFilePath("entities/")).absoluteFilePath(_persistFilePath);
|
||||
persistAbsoluteFilePath = QDir(PathUtils::getAppDataFilePath("entities/")).absoluteFilePath(_persistFilePath);
|
||||
}
|
||||
|
||||
static const QString ENTITY_PERSIST_EXTENSION = ".json.gz";
|
||||
|
@ -1245,7 +1244,7 @@ void OctreeServer::domainSettingsRequestComplete() {
|
|||
QDir backupDirectory { _backupDirectoryPath };
|
||||
QString absoluteBackupDirectory;
|
||||
if (backupDirectory.isRelative()) {
|
||||
absoluteBackupDirectory = QDir(ServerPathUtils::getDataFilePath("entities/")).absoluteFilePath(_backupDirectoryPath);
|
||||
absoluteBackupDirectory = QDir(PathUtils::getAppDataFilePath("entities/")).absoluteFilePath(_backupDirectoryPath);
|
||||
absoluteBackupDirectory = QDir(absoluteBackupDirectory).absolutePath();
|
||||
} else {
|
||||
absoluteBackupDirectory = backupDirectory.absolutePath();
|
||||
|
|
|
@ -38,7 +38,7 @@
|
|||
#include <ShutdownEventListener.h>
|
||||
#include <UUID.h>
|
||||
#include <LogHandler.h>
|
||||
#include <ServerPathUtils.h>
|
||||
#include <PathUtils.h>
|
||||
#include <NumericalConstants.h>
|
||||
|
||||
#include "DomainServerNodeData.h"
|
||||
|
@ -1618,7 +1618,7 @@ QJsonObject DomainServer::jsonObjectForNode(const SharedNodePointer& node) {
|
|||
QDir pathForAssignmentScriptsDirectory() {
|
||||
static const QString SCRIPTS_DIRECTORY_NAME = "/scripts/";
|
||||
|
||||
QDir directory(ServerPathUtils::getDataDirectory() + SCRIPTS_DIRECTORY_NAME);
|
||||
QDir directory(PathUtils::getAppDataPath() + SCRIPTS_DIRECTORY_NAME);
|
||||
if (!directory.exists()) {
|
||||
directory.mkpath(".");
|
||||
qInfo() << "Created path to " << directory.path();
|
||||
|
|
46
interface/resources/icons/tablet-icons/scope-auto.svg
Normal file
46
interface/resources/icons/tablet-icons/scope-auto.svg
Normal file
|
@ -0,0 +1,46 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<g id="Layer_2">
|
||||
</g>
|
||||
<g>
|
||||
<path class="st0" d="M25.3,23.7c-0.6,0-1.2,0.2-1.6,0.7S23,25.4,23,26c0,0.6,0.2,1.1,0.7,1.6c0.5,0.5,1,0.7,1.6,0.7
|
||||
c0.6,0,1.2-0.2,1.6-0.7c0.5-0.5,0.7-1,0.7-1.6c0-0.6-0.2-1.2-0.7-1.6C26.5,23.9,26,23.7,25.3,23.7z"/>
|
||||
<path class="st0" d="M25.3,17.2c-5,0-9,4-9,9c0,5,4,9,9,9s9-4,9-9C34.4,21.2,30.3,17.2,25.3,17.2z M31.5,26c0,0.3-0.1,0.6-0.3,0.8
|
||||
c-0.2,0.2-0.5,0.3-0.8,0.3c-0.2,0-0.3,0-0.5-0.1c-0.1,0-0.3,0-0.5-0.1c-0.2,0.6-0.3,1.1-0.6,1.4c0.2,0.1,0.3,0.2,0.4,0.3h0.1
|
||||
c0.2,0.1,0.4,0.2,0.4,0.2c0.5,0.5,0.5,1.1,0,1.6c-0.2,0.2-0.5,0.3-0.8,0.3c-0.3,0-0.6-0.1-0.8-0.3c0,0-0.1-0.2-0.2-0.4
|
||||
c0,0,0-0.1-0.1-0.1c-0.1-0.1-0.2-0.2-0.2-0.4c-0.4,0.3-0.9,0.5-1.4,0.6c0.1,0.2,0.1,0.4,0.1,0.5c0.1,0.2,0.1,0.3,0.1,0.5
|
||||
c0,0.3-0.1,0.6-0.3,0.8c-0.2,0.2-0.5,0.3-0.8,0.3c-0.3,0-0.6-0.1-0.8-0.3c-0.2-0.2-0.3-0.5-0.3-0.8c0-0.2,0-0.3,0.1-0.5
|
||||
c0-0.1,0-0.3,0.1-0.5c-0.5-0.1-1-0.3-1.4-0.6c-0.1,0.2-0.2,0.3-0.2,0.4L22.8,30c0,0.1-0.1,0.2-0.2,0.4c-0.2,0.2-0.5,0.3-0.8,0.3
|
||||
c-0.3,0-0.6-0.1-0.8-0.3c-0.5-0.5-0.5-1.1,0-1.6c0.1-0.1,0.2-0.2,0.5-0.2c0-0.1,0.2-0.2,0.4-0.3c-0.2-0.3-0.4-0.8-0.6-1.4
|
||||
C21.1,27,20.9,27,20.7,27V27c-0.1,0.1-0.2,0.1-0.5,0.1c-0.3,0-0.6-0.1-0.8-0.3c-0.2-0.2-0.3-0.5-0.3-0.8c0-0.3,0.1-0.6,0.3-0.8
|
||||
c0.2-0.2,0.5-0.3,0.8-0.3c0.2,0,0.3,0,0.5,0.1c0.1,0,0.3,0,0.5,0.1c0.1-0.5,0.3-1,0.6-1.4c-0.1,0-0.2-0.1-0.4-0.2h-0.1
|
||||
c-0.2-0.1-0.3-0.2-0.4-0.3c-0.5-0.5-0.5-1,0-1.5c0.5-0.5,1.1-0.5,1.6,0c0.1,0.1,0.2,0.2,0.2,0.4c0.1,0.1,0.2,0.2,0.3,0.5
|
||||
c0.4-0.3,0.9-0.5,1.4-0.6c-0.1-0.2-0.1-0.3-0.1-0.5v-0.1c-0.1-0.2-0.1-0.3-0.1-0.5c0-0.3,0.1-0.6,0.3-0.8c0.2-0.2,0.5-0.3,0.8-0.3
|
||||
c0.3,0,0.6,0.1,0.8,0.3c0.2,0.2,0.3,0.5,0.3,0.8c0,0.2,0,0.3-0.1,0.5v0.1c0,0.2,0,0.3-0.1,0.5c0.4,0.1,0.9,0.3,1.4,0.6
|
||||
c0-0.2,0.1-0.3,0.2-0.5c0-0.1,0.1-0.2,0.3-0.4c0.2-0.2,0.4-0.3,0.8-0.3c0.3,0,0.6,0.1,0.8,0.3c0.5,0.5,0.5,1.1,0,1.6
|
||||
c-0.2,0.2-0.3,0.2-0.4,0.3c-0.1,0.1-0.2,0.2-0.5,0.2c0.3,0.5,0.5,1,0.6,1.4c0.2-0.1,0.3-0.1,0.5-0.1H30c0.2-0.1,0.3-0.1,0.5-0.1
|
||||
c0.3,0,0.6,0.1,0.8,0.3C31.4,25.5,31.5,25.7,31.5,26L31.5,26z"/>
|
||||
</g>
|
||||
<path class="st0" d="M22.3,15.4v-2.6c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2v3.5C20.7,15.9,21.5,15.6,22.3,15.4z"/>
|
||||
<path class="st0" d="M25.3,15c0.4,0,0.8,0,1.1,0.1V8.6c0-0.6-0.5-1.2-1.2-1.2S24.1,8,24.1,8.6V15C24.5,15,24.9,15,25.3,15z"/>
|
||||
<path class="st0" d="M30.6,16.3V3.6c0-0.6-0.5-1.2-1.2-1.2S28.3,3,28.3,3.6v11.7C29.1,15.6,29.9,15.9,30.6,16.3z"/>
|
||||
<path class="st0" d="M48,24.9h-0.6v-2.1c0-0.6-0.5-1.2-1.2-1.2S45,22.2,45,22.9v2.1h-1.8V12c0-0.6-0.5-1.2-1.2-1.2s-1.2,0.5-1.2,1.2
|
||||
v12.9H39v-9.6c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2V36c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2v-8.7h1.8v12.9
|
||||
c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2V27.3H45v2.9c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-2.9H48c0.6,0,1.2-0.5,1.2-1.2
|
||||
S48.6,24.9,48,24.9z"/>
|
||||
<path class="st0" d="M13.9,12c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2v12.9H9.7v-4.6c0-0.6-0.5-1.2-1.2-1.2
|
||||
c-0.6,0-1.2,0.5-1.2,1.2v4.6H5.5v-2.1c0-0.6-0.5-1.2-1.2-1.2s-1.2,0.5-1.2,1.2v2.1H2.6c-0.6,0-1.2,0.5-1.2,1.2s0.5,1.2,1.2,1.2h0.6
|
||||
v2.1c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-2.1h1.8v6.2c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2v-6.2h1.8v12.1
|
||||
c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2V12z"/>
|
||||
<path class="st0" d="M28.3,37v9.8c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2V36.1C29.9,36.5,29.1,36.8,28.3,37z"/>
|
||||
<path class="st0" d="M25.3,37.5c-0.4,0-0.8,0-1.2-0.1v4.5c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-4.5
|
||||
C26.1,37.4,25.7,37.5,25.3,37.5z"/>
|
||||
<path class="st0" d="M19.9,36.1v3.3c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2V37C21.5,36.8,20.7,36.5,19.9,36.1z"/>
|
||||
<rect x="12" y="24.6" class="st0" width="6.9" height="3"/>
|
||||
<rect x="32.9" y="24.6" class="st0" width="5.1" height="3"/>
|
||||
</svg>
|
After Width: | Height: | Size: 4 KiB |
30
interface/resources/icons/tablet-icons/scope-pause.svg
Normal file
30
interface/resources/icons/tablet-icons/scope-pause.svg
Normal file
|
@ -0,0 +1,30 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<g id="Layer_2">
|
||||
</g>
|
||||
<path class="st0" d="M22.3,15.4v-2.6c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2v3.5C20.7,15.9,21.5,15.6,22.3,15.4z"/>
|
||||
<path class="st0" d="M25.3,15c0.4,0,0.8,0,1.1,0.1V8.6c0-0.6-0.5-1.2-1.2-1.2S24.1,8,24.1,8.6V15C24.5,15,24.9,15,25.3,15z"/>
|
||||
<path class="st0" d="M30.6,16.3V3.6c0-0.6-0.5-1.2-1.2-1.2S28.3,3,28.3,3.6v11.7C29.1,15.6,29.9,15.9,30.6,16.3z"/>
|
||||
<path class="st0" d="M48,24.9h-0.6v-2.1c0-0.6-0.5-1.2-1.2-1.2S45,22.2,45,22.9v2.1h-1.8V12c0-0.6-0.5-1.2-1.2-1.2s-1.2,0.5-1.2,1.2
|
||||
v12.9H39v-9.6c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2V36c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2v-8.7h1.8v12.9
|
||||
c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2V27.3H45v2.9c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-2.9H48c0.6,0,1.2-0.5,1.2-1.2
|
||||
S48.6,24.9,48,24.9z"/>
|
||||
<path class="st0" d="M13.9,12c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2v12.9H9.7v-4.6c0-0.6-0.5-1.2-1.2-1.2
|
||||
c-0.6,0-1.2,0.5-1.2,1.2v4.6H5.5v-2.1c0-0.6-0.5-1.2-1.2-1.2s-1.2,0.5-1.2,1.2v2.1H2.6c-0.6,0-1.2,0.5-1.2,1.2s0.5,1.2,1.2,1.2h0.6
|
||||
v2.1c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-2.1h1.8v6.2c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2v-6.2h1.8v12.1
|
||||
c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2V12z"/>
|
||||
<path class="st0" d="M28.3,37v9.8c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2V36.1C29.9,36.5,29.1,36.8,28.3,37z"/>
|
||||
<path class="st0" d="M25.3,37.5c-0.4,0-0.8,0-1.2-0.1v4.5c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-4.5
|
||||
C26.1,37.4,25.7,37.5,25.3,37.5z"/>
|
||||
<path class="st0" d="M19.9,36.1v3.3c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2V37C21.5,36.8,20.7,36.5,19.9,36.1z"/>
|
||||
<rect x="12" y="24.6" class="st0" width="7.3" height="3"/>
|
||||
<rect x="32.9" y="24.6" class="st0" width="5.3" height="3"/>
|
||||
<path class="st0" d="M25.3,17c-5,0-9,4-9,9c0,5,4,9,9,9s9-4,9-9C34.4,21,30.3,17,25.3,17z M24.1,29.7c0,0.5-0.6,1-1.4,1
|
||||
s-1.4-0.4-1.4-1v-7.3c0-0.5,0.6-1,1.4-1s1.4,0.4,1.4,1V29.7z M29.3,29.7c0,0.5-0.6,1-1.4,1c-0.8,0-1.4-0.4-1.4-1v-7.3
|
||||
c0-0.5,0.6-1,1.4-1c0.8,0,1.4,0.4,1.4,1V29.7z"/>
|
||||
</svg>
|
After Width: | Height: | Size: 2.3 KiB |
30
interface/resources/icons/tablet-icons/scope-play.svg
Normal file
30
interface/resources/icons/tablet-icons/scope-play.svg
Normal file
|
@ -0,0 +1,30 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<g id="Layer_2">
|
||||
</g>
|
||||
<path class="st0" d="M22.3,15.4v-2.6c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2v3.5C20.7,15.9,21.5,15.6,22.3,15.4z"/>
|
||||
<path class="st0" d="M25.3,15c0.4,0,0.8,0,1.1,0.1V8.6c0-0.6-0.5-1.2-1.2-1.2S24.1,8,24.1,8.6V15C24.5,15,24.9,15,25.3,15z"/>
|
||||
<path class="st0" d="M30.6,16.3V3.6c0-0.6-0.5-1.2-1.2-1.2S28.3,3,28.3,3.6v11.7C29.1,15.6,29.9,15.9,30.6,16.3z"/>
|
||||
<path class="st0" d="M48,24.9h-0.6v-2.1c0-0.6-0.5-1.2-1.2-1.2S45,22.2,45,22.9v2.1h-1.8V12c0-0.6-0.5-1.2-1.2-1.2s-1.2,0.5-1.2,1.2
|
||||
v12.9H39v-9.6c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2V36c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2v-8.7h1.8v12.9
|
||||
c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2V27.3H45v2.9c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-2.9H48c0.6,0,1.2-0.5,1.2-1.2
|
||||
S48.6,24.9,48,24.9z"/>
|
||||
<path class="st0" d="M13.9,12c0-0.6-0.5-1.2-1.2-1.2c-0.6,0-1.2,0.5-1.2,1.2v12.9H9.7v-4.6c0-0.6-0.5-1.2-1.2-1.2
|
||||
c-0.6,0-1.2,0.5-1.2,1.2v4.6H5.5v-2.1c0-0.6-0.5-1.2-1.2-1.2s-1.2,0.5-1.2,1.2v2.1H2.6c-0.6,0-1.2,0.5-1.2,1.2s0.5,1.2,1.2,1.2h0.6
|
||||
v2.1c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-2.1h1.8v6.2c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2v-6.2h1.8v12.1
|
||||
c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2V12z"/>
|
||||
<path class="st0" d="M28.3,37v9.8c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2V36.1C29.9,36.5,29.1,36.8,28.3,37z"/>
|
||||
<path class="st0" d="M25.3,37.5c-0.4,0-0.8,0-1.2-0.1v4.5c0,0.6,0.5,1.2,1.2,1.2s1.2-0.5,1.2-1.2v-4.5
|
||||
C26.1,37.4,25.7,37.5,25.3,37.5z"/>
|
||||
<path class="st0" d="M19.9,36.1v3.3c0,0.6,0.5,1.2,1.2,1.2c0.6,0,1.2-0.5,1.2-1.2V37C21.5,36.8,20.7,36.5,19.9,36.1z"/>
|
||||
<path class="st0" d="M25.3,17.2c-5,0-9,4-9,9c0,5,4,9,9,9s9-4,9-9C34.4,21.2,30.3,17.2,25.3,17.2z M30.3,26.1l-6,5.4
|
||||
c-0.1,0.1-0.2,0.1-0.4,0.1c-0.1,0-0.3,0-0.3-0.1c-0.3-0.1-0.5-0.4-0.5-0.5V20.9c0-0.1,0.2-0.4,0.4-0.5c0.1,0,0.2-0.1,0.3-0.1
|
||||
c0.2,0,0.3,0,0.4,0.1l6,4.8c0.1,0.1,0.2,0.3,0.2,0.4C30.5,25.8,30.4,26,30.3,26.1z"/>
|
||||
<rect x="12" y="24.6" class="st0" width="7.3" height="3"/>
|
||||
<rect x="32.9" y="24.6" class="st0" width="7.3" height="3"/>
|
||||
</svg>
|
After Width: | Height: | Size: 2.3 KiB |
|
@ -181,6 +181,31 @@ Item {
|
|||
root.avatarMixerOutPps + "pps, " +
|
||||
root.myAvatarSendRate.toFixed(2) + "hz";
|
||||
}
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: "Audio Mixer In: " + root.audioMixerInKbps + " kbps, " +
|
||||
root.audioMixerInPps + "pps";
|
||||
}
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: "Audio In Audio: " + root.audioAudioInboundPPS + " pps, " +
|
||||
"Silent: " + root.audioSilentInboundPPS + " pps";
|
||||
}
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: "Audio Mixer Out: " + root.audioMixerOutKbps + " kbps, " +
|
||||
root.audioMixerOutPps + "pps";
|
||||
}
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: "Audio Out Mic: " + root.audioMicOutboundPPS + " pps, " +
|
||||
"Silent: " + root.audioSilentOutboundPPS + " pps";
|
||||
}
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: "Audio Codec: " + root.audioCodec + " Noise Gate: " +
|
||||
root.audioNoiseGate;
|
||||
}
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: "Downloads: " + root.downloads + "/" + root.downloadLimit +
|
||||
|
|
|
@ -608,6 +608,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
}
|
||||
}
|
||||
|
||||
// make sure the debug draw singleton is initialized on the main thread.
|
||||
DebugDraw::getInstance().removeMarker("");
|
||||
|
||||
_runningMarker.startRunningMarker();
|
||||
|
||||
|
@ -1182,6 +1184,10 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
// set the local loopback interface for local sounds
|
||||
AudioInjector::setLocalAudioInterface(audioIO.data());
|
||||
AudioScriptingInterface::getInstance().setLocalAudioInterface(audioIO.data());
|
||||
connect(audioIO.data(), &AudioClient::noiseGateOpened, &AudioScriptingInterface::getInstance(), &AudioScriptingInterface::noiseGateOpened);
|
||||
connect(audioIO.data(), &AudioClient::noiseGateClosed, &AudioScriptingInterface::getInstance(), &AudioScriptingInterface::noiseGateClosed);
|
||||
connect(audioIO.data(), &AudioClient::inputReceived, &AudioScriptingInterface::getInstance(), &AudioScriptingInterface::inputReceived);
|
||||
|
||||
|
||||
this->installEventFilter(this);
|
||||
|
||||
|
@ -1439,7 +1445,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
scriptEngines->loadScript(testScript, false);
|
||||
} else {
|
||||
// Get sandbox content set version, if available
|
||||
auto acDirPath = PathUtils::getRootDataDirectory() + BuildInfo::MODIFIED_ORGANIZATION + "/assignment-client/";
|
||||
auto acDirPath = PathUtils::getAppDataPath() + "../../" + BuildInfo::MODIFIED_ORGANIZATION + "/assignment-client/";
|
||||
auto contentVersionPath = acDirPath + "content-version.txt";
|
||||
qCDebug(interfaceapp) << "Checking " << contentVersionPath << " for content version";
|
||||
auto contentVersion = 0;
|
||||
|
@ -1947,6 +1953,8 @@ void Application::initializeUi() {
|
|||
rootContext->setContextProperty("ApplicationInterface", this);
|
||||
rootContext->setContextProperty("Audio", &AudioScriptingInterface::getInstance());
|
||||
rootContext->setContextProperty("AudioStats", DependencyManager::get<AudioClient>()->getStats().data());
|
||||
rootContext->setContextProperty("AudioScope", DependencyManager::get<AudioScope>().data());
|
||||
|
||||
rootContext->setContextProperty("Controller", DependencyManager::get<controller::ScriptingInterface>().data());
|
||||
rootContext->setContextProperty("Entities", DependencyManager::get<EntityScriptingInterface>().data());
|
||||
_fileDownload = new FileScriptingInterface(engine);
|
||||
|
@ -3174,7 +3182,23 @@ void Application::mousePressEvent(QMouseEvent* event) {
|
|||
}
|
||||
}
|
||||
|
||||
void Application::mouseDoublePressEvent(QMouseEvent* event) const {
|
||||
void Application::mouseDoublePressEvent(QMouseEvent* event) {
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
auto eventPosition = getApplicationCompositor().getMouseEventPosition(event);
|
||||
QPointF transformedPos = offscreenUi->mapToVirtualScreen(eventPosition, _glWidget);
|
||||
QMouseEvent mappedEvent(event->type(),
|
||||
transformedPos,
|
||||
event->screenPos(), event->button(),
|
||||
event->buttons(), event->modifiers());
|
||||
|
||||
if (!_aboutToQuit) {
|
||||
getOverlays().mouseDoublePressEvent(&mappedEvent);
|
||||
if (!_controllerScriptingInterface->areEntityClicksCaptured()) {
|
||||
getEntities()->mouseDoublePressEvent(&mappedEvent);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// if one of our scripts have asked to capture this event, then stop processing it
|
||||
if (_controllerScriptingInterface->isMouseCaptured()) {
|
||||
return;
|
||||
|
@ -5521,6 +5545,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
|||
scriptEngine->registerGlobalObject("Settings", SettingsScriptingInterface::getInstance());
|
||||
scriptEngine->registerGlobalObject("AudioDevice", AudioDeviceScriptingInterface::getInstance());
|
||||
scriptEngine->registerGlobalObject("AudioStats", DependencyManager::get<AudioClient>()->getStats().data());
|
||||
scriptEngine->registerGlobalObject("AudioScope", DependencyManager::get<AudioScope>().data());
|
||||
|
||||
// Caches
|
||||
scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCache>().data());
|
||||
|
|
|
@ -494,7 +494,7 @@ private:
|
|||
|
||||
void mouseMoveEvent(QMouseEvent* event);
|
||||
void mousePressEvent(QMouseEvent* event);
|
||||
void mouseDoublePressEvent(QMouseEvent* event) const;
|
||||
void mouseDoublePressEvent(QMouseEvent* event);
|
||||
void mouseReleaseEvent(QMouseEvent* event);
|
||||
|
||||
void touchBeginEvent(QTouchEvent* event);
|
||||
|
|
|
@ -52,12 +52,14 @@ AudioScope::AudioScope() :
|
|||
connect(audioIO.data(), &AudioClient::inputReceived, this, &AudioScope::addInputToScope);
|
||||
}
|
||||
|
||||
void AudioScope::toggle() {
|
||||
_isEnabled = !_isEnabled;
|
||||
if (_isEnabled) {
|
||||
allocateScope();
|
||||
} else {
|
||||
freeScope();
|
||||
void AudioScope::setVisible(bool visible) {
|
||||
if (_isEnabled != visible) {
|
||||
_isEnabled = visible;
|
||||
if (_isEnabled) {
|
||||
allocateScope();
|
||||
} else {
|
||||
freeScope();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -34,8 +34,14 @@ public:
|
|||
void render(RenderArgs* renderArgs, int width, int height);
|
||||
|
||||
public slots:
|
||||
void toggle();
|
||||
void toggle() { setVisible(!_isEnabled); }
|
||||
void setVisible(bool visible);
|
||||
bool getVisible() const { return _isEnabled; }
|
||||
|
||||
void togglePause() { _isPaused = !_isPaused; }
|
||||
void setPause(bool paused) { _isPaused = paused; }
|
||||
bool getPause() { return _isPaused; }
|
||||
|
||||
void selectAudioScopeFiveFrames();
|
||||
void selectAudioScopeTwentyFrames();
|
||||
void selectAudioScopeFiftyFrames();
|
||||
|
@ -74,7 +80,6 @@ private:
|
|||
int _inputID;
|
||||
int _outputLeftID;
|
||||
int _outputRightD;
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi_AudioScope_h
|
||||
|
|
|
@ -81,6 +81,10 @@ void HMDScriptingInterface::closeTablet() {
|
|||
_showTablet = false;
|
||||
}
|
||||
|
||||
void HMDScriptingInterface::openTablet() {
|
||||
_showTablet = true;
|
||||
}
|
||||
|
||||
QScriptValue HMDScriptingInterface::getHUDLookAtPosition2D(QScriptContext* context, QScriptEngine* engine) {
|
||||
glm::vec3 hudIntersection;
|
||||
auto instance = DependencyManager::get<HMDScriptingInterface>();
|
||||
|
|
|
@ -76,6 +76,8 @@ public:
|
|||
|
||||
Q_INVOKABLE void closeTablet();
|
||||
|
||||
Q_INVOKABLE void openTablet();
|
||||
|
||||
signals:
|
||||
bool shouldShowHandControllersChanged();
|
||||
|
||||
|
|
|
@ -200,15 +200,16 @@ void Stats::updateStats(bool force) {
|
|||
STAT_UPDATE(avatarMixerInPps, roundf(bandwidthRecorder->getAverageInputPacketsPerSecond(NodeType::AvatarMixer)));
|
||||
STAT_UPDATE(avatarMixerOutKbps, roundf(bandwidthRecorder->getAverageOutputKilobitsPerSecond(NodeType::AvatarMixer)));
|
||||
STAT_UPDATE(avatarMixerOutPps, roundf(bandwidthRecorder->getAverageOutputPacketsPerSecond(NodeType::AvatarMixer)));
|
||||
STAT_UPDATE(myAvatarSendRate, avatarManager->getMyAvatarSendRate());
|
||||
} else {
|
||||
STAT_UPDATE(avatarMixerInKbps, -1);
|
||||
STAT_UPDATE(avatarMixerInPps, -1);
|
||||
STAT_UPDATE(avatarMixerOutKbps, -1);
|
||||
STAT_UPDATE(avatarMixerOutPps, -1);
|
||||
STAT_UPDATE(myAvatarSendRate, avatarManager->getMyAvatarSendRate());
|
||||
}
|
||||
STAT_UPDATE(myAvatarSendRate, avatarManager->getMyAvatarSendRate());
|
||||
|
||||
SharedNodePointer audioMixerNode = nodeList->soloNodeOfType(NodeType::AudioMixer);
|
||||
auto audioClient = DependencyManager::get<AudioClient>();
|
||||
if (audioMixerNode || force) {
|
||||
STAT_UPDATE(audioMixerKbps, roundf(
|
||||
bandwidthRecorder->getAverageInputKilobitsPerSecond(NodeType::AudioMixer) +
|
||||
|
@ -216,10 +217,30 @@ void Stats::updateStats(bool force) {
|
|||
STAT_UPDATE(audioMixerPps, roundf(
|
||||
bandwidthRecorder->getAverageInputPacketsPerSecond(NodeType::AudioMixer) +
|
||||
bandwidthRecorder->getAverageOutputPacketsPerSecond(NodeType::AudioMixer)));
|
||||
|
||||
STAT_UPDATE(audioMixerInKbps, roundf(bandwidthRecorder->getAverageInputKilobitsPerSecond(NodeType::AudioMixer)));
|
||||
STAT_UPDATE(audioMixerInPps, roundf(bandwidthRecorder->getAverageInputPacketsPerSecond(NodeType::AudioMixer)));
|
||||
STAT_UPDATE(audioMixerOutKbps, roundf(bandwidthRecorder->getAverageOutputKilobitsPerSecond(NodeType::AudioMixer)));
|
||||
STAT_UPDATE(audioMixerOutPps, roundf(bandwidthRecorder->getAverageOutputPacketsPerSecond(NodeType::AudioMixer)));
|
||||
STAT_UPDATE(audioMicOutboundPPS, audioClient->getMicAudioOutboundPPS());
|
||||
STAT_UPDATE(audioSilentOutboundPPS, audioClient->getSilentOutboundPPS());
|
||||
STAT_UPDATE(audioAudioInboundPPS, audioClient->getAudioInboundPPS());
|
||||
STAT_UPDATE(audioSilentInboundPPS, audioClient->getSilentInboundPPS());
|
||||
} else {
|
||||
STAT_UPDATE(audioMixerKbps, -1);
|
||||
STAT_UPDATE(audioMixerPps, -1);
|
||||
STAT_UPDATE(audioMixerInKbps, -1);
|
||||
STAT_UPDATE(audioMixerInPps, -1);
|
||||
STAT_UPDATE(audioMixerOutKbps, -1);
|
||||
STAT_UPDATE(audioMixerOutPps, -1);
|
||||
STAT_UPDATE(audioMicOutboundPPS, -1);
|
||||
STAT_UPDATE(audioSilentOutboundPPS, -1);
|
||||
STAT_UPDATE(audioAudioInboundPPS, -1);
|
||||
STAT_UPDATE(audioSilentInboundPPS, -1);
|
||||
}
|
||||
STAT_UPDATE(audioCodec, audioClient->getSelectedAudioFormat());
|
||||
STAT_UPDATE(audioNoiseGate, audioClient->getNoiseGateOpen() ? "Open" : "Closed");
|
||||
|
||||
|
||||
auto loadingRequests = ResourceCache::getLoadingRequests();
|
||||
STAT_UPDATE(downloads, loadingRequests.size());
|
||||
|
|
|
@ -70,8 +70,20 @@ class Stats : public QQuickItem {
|
|||
STATS_PROPERTY(int, avatarMixerOutKbps, 0)
|
||||
STATS_PROPERTY(int, avatarMixerOutPps, 0)
|
||||
STATS_PROPERTY(float, myAvatarSendRate, 0)
|
||||
|
||||
STATS_PROPERTY(int, audioMixerInKbps, 0)
|
||||
STATS_PROPERTY(int, audioMixerInPps, 0)
|
||||
STATS_PROPERTY(int, audioMixerOutKbps, 0)
|
||||
STATS_PROPERTY(int, audioMixerOutPps, 0)
|
||||
STATS_PROPERTY(int, audioMixerKbps, 0)
|
||||
STATS_PROPERTY(int, audioMixerPps, 0)
|
||||
STATS_PROPERTY(int, audioMicOutboundPPS, 0)
|
||||
STATS_PROPERTY(int, audioSilentOutboundPPS, 0)
|
||||
STATS_PROPERTY(int, audioAudioInboundPPS, 0)
|
||||
STATS_PROPERTY(int, audioSilentInboundPPS, 0)
|
||||
STATS_PROPERTY(QString, audioCodec, QString())
|
||||
STATS_PROPERTY(QString, audioNoiseGate, QString())
|
||||
|
||||
STATS_PROPERTY(int, downloads, 0)
|
||||
STATS_PROPERTY(int, downloadLimit, 0)
|
||||
STATS_PROPERTY(int, downloadsPending, 0)
|
||||
|
@ -182,8 +194,19 @@ signals:
|
|||
void avatarMixerOutKbpsChanged();
|
||||
void avatarMixerOutPpsChanged();
|
||||
void myAvatarSendRateChanged();
|
||||
void audioMixerInKbpsChanged();
|
||||
void audioMixerInPpsChanged();
|
||||
void audioMixerOutKbpsChanged();
|
||||
void audioMixerOutPpsChanged();
|
||||
void audioMixerKbpsChanged();
|
||||
void audioMixerPpsChanged();
|
||||
void audioMicOutboundPPSChanged();
|
||||
void audioSilentOutboundPPSChanged();
|
||||
void audioAudioInboundPPSChanged();
|
||||
void audioSilentInboundPPSChanged();
|
||||
void audioCodecChanged();
|
||||
void audioNoiseGateChanged();
|
||||
|
||||
void downloadsChanged();
|
||||
void downloadLimitChanged();
|
||||
void downloadsPendingChanged();
|
||||
|
|
|
@ -258,8 +258,3 @@ QVariant Line3DOverlay::getProperty(const QString& property) {
|
|||
Line3DOverlay* Line3DOverlay::createClone() const {
|
||||
return new Line3DOverlay(this);
|
||||
}
|
||||
|
||||
|
||||
void Line3DOverlay::locationChanged(bool tellPhysics) {
|
||||
// do nothing
|
||||
}
|
||||
|
|
|
@ -48,8 +48,6 @@ public:
|
|||
|
||||
virtual Line3DOverlay* createClone() const override;
|
||||
|
||||
virtual void locationChanged(bool tellPhysics = true) override;
|
||||
|
||||
glm::vec3 getDirection() const { return _direction; }
|
||||
float getLength() const { return _length; }
|
||||
glm::vec3 getLocalStart() const { return getLocalPosition(); }
|
||||
|
|
|
@ -769,6 +769,26 @@ bool Overlays::mousePressEvent(QMouseEvent* event) {
|
|||
return false;
|
||||
}
|
||||
|
||||
bool Overlays::mouseDoublePressEvent(QMouseEvent* event) {
|
||||
PerformanceTimer perfTimer("Overlays::mouseDoublePressEvent");
|
||||
|
||||
PickRay ray = qApp->computePickRay(event->x(), event->y());
|
||||
RayToOverlayIntersectionResult rayPickResult = findRayIntersectionForMouseEvent(ray);
|
||||
if (rayPickResult.intersects) {
|
||||
_currentClickingOnOverlayID = rayPickResult.overlayID;
|
||||
|
||||
// Only Web overlays can have focus.
|
||||
auto thisOverlay = std::dynamic_pointer_cast<Web3DOverlay>(getOverlay(_currentClickingOnOverlayID));
|
||||
if (thisOverlay) {
|
||||
auto pointerEvent = calculatePointerEvent(thisOverlay, ray, rayPickResult, event, PointerEvent::Press);
|
||||
emit mouseDoublePressOnOverlay(_currentClickingOnOverlayID, pointerEvent);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
emit mouseDoublePressOffOverlay();
|
||||
return false;
|
||||
}
|
||||
|
||||
bool Overlays::mouseReleaseEvent(QMouseEvent* event) {
|
||||
PerformanceTimer perfTimer("Overlays::mouseReleaseEvent");
|
||||
|
||||
|
|
|
@ -101,6 +101,7 @@ public:
|
|||
OverlayID addOverlay(Overlay::Pointer overlay);
|
||||
|
||||
bool mousePressEvent(QMouseEvent* event);
|
||||
bool mouseDoublePressEvent(QMouseEvent* event);
|
||||
bool mouseReleaseEvent(QMouseEvent* event);
|
||||
bool mouseMoveEvent(QMouseEvent* event);
|
||||
|
||||
|
@ -300,9 +301,11 @@ signals:
|
|||
void panelDeleted(OverlayID id);
|
||||
|
||||
void mousePressOnOverlay(OverlayID overlayID, const PointerEvent& event);
|
||||
void mouseDoublePressOnOverlay(OverlayID overlayID, const PointerEvent& event);
|
||||
void mouseReleaseOnOverlay(OverlayID overlayID, const PointerEvent& event);
|
||||
void mouseMoveOnOverlay(OverlayID overlayID, const PointerEvent& event);
|
||||
void mousePressOffOverlay();
|
||||
void mouseDoublePressOffOverlay();
|
||||
|
||||
void hoverEnterOverlay(OverlayID overlayID, const PointerEvent& event);
|
||||
void hoverOverOverlay(OverlayID overlayID, const PointerEvent& event);
|
||||
|
|
|
@ -1310,17 +1310,18 @@ void Rig::copyJointsFromJointData(const QVector<JointData>& jointDataVec) {
|
|||
if (!_animSkeleton) {
|
||||
return;
|
||||
}
|
||||
if (jointDataVec.size() != (int)_internalPoseSet._relativePoses.size()) {
|
||||
// animations haven't fully loaded yet.
|
||||
_internalPoseSet._relativePoses = _animSkeleton->getRelativeDefaultPoses();
|
||||
int numJoints = jointDataVec.size();
|
||||
const AnimPoseVec& absoluteDefaultPoses = _animSkeleton->getAbsoluteDefaultPoses();
|
||||
if (numJoints != (int)absoluteDefaultPoses.size()) {
|
||||
// jointData is incompatible
|
||||
return;
|
||||
}
|
||||
|
||||
// make a vector of rotations in absolute-geometry-frame
|
||||
const AnimPoseVec& absoluteDefaultPoses = _animSkeleton->getAbsoluteDefaultPoses();
|
||||
std::vector<glm::quat> rotations;
|
||||
rotations.reserve(absoluteDefaultPoses.size());
|
||||
rotations.reserve(numJoints);
|
||||
const glm::quat rigToGeometryRot(glmExtractRotation(_rigToGeometryTransform));
|
||||
for (int i = 0; i < jointDataVec.size(); i++) {
|
||||
for (int i = 0; i < numJoints; i++) {
|
||||
const JointData& data = jointDataVec.at(i);
|
||||
if (data.rotationSet) {
|
||||
// JointData rotations are in absolute rig-frame so we rotate them to absolute geometry-frame
|
||||
|
@ -1334,8 +1335,11 @@ void Rig::copyJointsFromJointData(const QVector<JointData>& jointDataVec) {
|
|||
_animSkeleton->convertAbsoluteRotationsToRelative(rotations);
|
||||
|
||||
// store new relative poses
|
||||
if (numJoints != (int)_internalPoseSet._relativePoses.size()) {
|
||||
_internalPoseSet._relativePoses = _animSkeleton->getRelativeDefaultPoses();
|
||||
}
|
||||
const AnimPoseVec& relativeDefaultPoses = _animSkeleton->getRelativeDefaultPoses();
|
||||
for (int i = 0; i < jointDataVec.size(); i++) {
|
||||
for (int i = 0; i < numJoints; i++) {
|
||||
const JointData& data = jointDataVec.at(i);
|
||||
_internalPoseSet._relativePoses[i].scale() = Vectors::ONE;
|
||||
_internalPoseSet._relativePoses[i].rot() = rotations[i];
|
||||
|
|
|
@ -608,6 +608,13 @@ void AudioClient::handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessag
|
|||
}
|
||||
|
||||
void AudioClient::handleAudioDataPacket(QSharedPointer<ReceivedMessage> message) {
|
||||
|
||||
if (message->getType() == PacketType::SilentAudioFrame) {
|
||||
_silentInbound.increment();
|
||||
} else {
|
||||
_audioInbound.increment();
|
||||
}
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->flagTimeForConnectionStep(LimitedNodeList::ConnectionStep::ReceiveFirstAudioPacket);
|
||||
|
||||
|
@ -1024,6 +1031,7 @@ void AudioClient::handleAudioInput() {
|
|||
if (_inputGate.clippedInLastFrame()) {
|
||||
_timeSinceLastClip = 0.0f;
|
||||
}
|
||||
|
||||
} else {
|
||||
float loudness = 0.0f;
|
||||
|
||||
|
@ -1041,6 +1049,13 @@ void AudioClient::handleAudioInput() {
|
|||
|
||||
emit inputReceived({ reinterpret_cast<char*>(networkAudioSamples), numNetworkBytes });
|
||||
|
||||
if (_inputGate.openedInLastFrame()) {
|
||||
emit noiseGateOpened();
|
||||
}
|
||||
if (_inputGate.closedInLastFrame()) {
|
||||
emit noiseGateClosed();
|
||||
}
|
||||
|
||||
} else {
|
||||
// our input loudness is 0, since we're muted
|
||||
_lastInputLoudness = 0;
|
||||
|
@ -1052,9 +1067,18 @@ void AudioClient::handleAudioInput() {
|
|||
auto packetType = _shouldEchoToServer ?
|
||||
PacketType::MicrophoneAudioWithEcho : PacketType::MicrophoneAudioNoEcho;
|
||||
|
||||
if (_lastInputLoudness == 0) {
|
||||
// if the _inputGate closed in this last frame, then we don't actually want
|
||||
// to send a silent packet, instead, we want to go ahead and encode and send
|
||||
// the output from the input gate (eventually, this could be crossfaded)
|
||||
// and allow the codec to properly encode down to silent/zero. If we still
|
||||
// have _lastInputLoudness of 0 in our NEXT frame, we will send a silent packet
|
||||
if (_lastInputLoudness == 0 && !_inputGate.closedInLastFrame()) {
|
||||
packetType = PacketType::SilentAudioFrame;
|
||||
_silentOutbound.increment();
|
||||
} else {
|
||||
_micAudioOutbound.increment();
|
||||
}
|
||||
|
||||
Transform audioTransform;
|
||||
audioTransform.setTranslation(_positionGetter());
|
||||
audioTransform.setRotation(_orientationGetter());
|
||||
|
@ -1079,6 +1103,7 @@ void AudioClient::handleAudioInput() {
|
|||
}
|
||||
}
|
||||
|
||||
// FIXME - should this go through the noise gate and honor mute and echo?
|
||||
void AudioClient::handleRecordedAudioInput(const QByteArray& audio) {
|
||||
Transform audioTransform;
|
||||
audioTransform.setTranslation(_positionGetter());
|
||||
|
@ -1091,6 +1116,8 @@ void AudioClient::handleRecordedAudioInput(const QByteArray& audio) {
|
|||
encodedBuffer = audio;
|
||||
}
|
||||
|
||||
_micAudioOutbound.increment();
|
||||
|
||||
// FIXME check a flag to see if we should echo audio?
|
||||
emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), _outgoingAvatarAudioSequenceNumber,
|
||||
audioTransform, avatarBoundingBoxCorner, avatarBoundingBoxScale,
|
||||
|
|
|
@ -46,6 +46,8 @@
|
|||
#include <AudioLimiter.h>
|
||||
#include <AudioConstants.h>
|
||||
|
||||
#include <shared/RateCounter.h>
|
||||
|
||||
#include <plugins/CodecPlugin.h>
|
||||
|
||||
#include "AudioIOStats.h"
|
||||
|
@ -121,6 +123,13 @@ public:
|
|||
void negotiateAudioFormat();
|
||||
void selectAudioFormat(const QString& selectedCodecName);
|
||||
|
||||
Q_INVOKABLE QString getSelectedAudioFormat() const { return _selectedCodecName; }
|
||||
Q_INVOKABLE bool getNoiseGateOpen() const { return _inputGate.isOpen(); }
|
||||
Q_INVOKABLE float getSilentOutboundPPS() const { return _silentOutbound.rate(); }
|
||||
Q_INVOKABLE float getMicAudioOutboundPPS() const { return _micAudioOutbound.rate(); }
|
||||
Q_INVOKABLE float getSilentInboundPPS() const { return _silentInbound.rate(); }
|
||||
Q_INVOKABLE float getAudioInboundPPS() const { return _audioInbound.rate(); }
|
||||
|
||||
const MixedProcessedAudioStream& getReceivedAudioStream() const { return _receivedAudioStream; }
|
||||
MixedProcessedAudioStream& getReceivedAudioStream() { return _receivedAudioStream; }
|
||||
|
||||
|
@ -218,6 +227,8 @@ signals:
|
|||
void inputReceived(const QByteArray& inputSamples);
|
||||
void outputBytesToNetwork(int numBytes);
|
||||
void inputBytesFromNetwork(int numBytes);
|
||||
void noiseGateOpened();
|
||||
void noiseGateClosed();
|
||||
|
||||
void changeDevice(const QAudioDeviceInfo& outputDeviceInfo);
|
||||
void deviceChanged();
|
||||
|
@ -382,6 +393,11 @@ private:
|
|||
Encoder* _encoder { nullptr }; // for outbound mic stream
|
||||
|
||||
QThread* _checkDevicesThread { nullptr };
|
||||
|
||||
RateCounter<> _silentOutbound;
|
||||
RateCounter<> _micAudioOutbound;
|
||||
RateCounter<> _silentInbound;
|
||||
RateCounter<> _audioInbound;
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -58,7 +58,6 @@ void AudioNoiseGate::removeDCOffset(int16_t* samples, int numSamples) {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
void AudioNoiseGate::gateSamples(int16_t* samples, int numSamples) {
|
||||
//
|
||||
// Impose Noise Gate
|
||||
|
@ -77,8 +76,7 @@ void AudioNoiseGate::gateSamples(int16_t* samples, int numSamples) {
|
|||
// NOISE_GATE_FRAMES_TO_AVERAGE: How many audio frames should we average together to compute noise floor.
|
||||
// More means better rejection but also can reject continuous things like singing.
|
||||
// NUMBER_OF_NOISE_SAMPLE_FRAMES: How often should we re-evaluate the noise floor?
|
||||
|
||||
|
||||
|
||||
float loudness = 0;
|
||||
int thisSample = 0;
|
||||
int samplesOverNoiseGate = 0;
|
||||
|
@ -142,16 +140,38 @@ void AudioNoiseGate::gateSamples(int16_t* samples, int numSamples) {
|
|||
_sampleCounter = 0;
|
||||
|
||||
}
|
||||
|
||||
_closedInLastFrame = false;
|
||||
_openedInLastFrame = false;
|
||||
|
||||
if (samplesOverNoiseGate > NOISE_GATE_WIDTH) {
|
||||
_openedInLastFrame = !_isOpen;
|
||||
_isOpen = true;
|
||||
_framesToClose = NOISE_GATE_CLOSE_FRAME_DELAY;
|
||||
} else {
|
||||
if (--_framesToClose == 0) {
|
||||
_closedInLastFrame = _isOpen;
|
||||
_isOpen = false;
|
||||
}
|
||||
}
|
||||
if (!_isOpen) {
|
||||
memset(samples, 0, numSamples * sizeof(int16_t));
|
||||
if (_closedInLastFrame) {
|
||||
// would be nice to do a little crossfade to silence
|
||||
for (int i = 0; i < numSamples; i++) {
|
||||
float fadedSample = (1.0f - (float)i / (float)numSamples) * (float)samples[i];
|
||||
samples[i] = (int16_t)fadedSample;
|
||||
}
|
||||
} else {
|
||||
memset(samples, 0, numSamples * sizeof(int16_t));
|
||||
}
|
||||
_lastLoudness = 0;
|
||||
}
|
||||
|
||||
if (_openedInLastFrame) {
|
||||
// would be nice to do a little crossfade from silence
|
||||
for (int i = 0; i < numSamples; i++) {
|
||||
float fadedSample = ((float)i / (float)numSamples) * (float)samples[i];
|
||||
samples[i] = (int16_t)fadedSample;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,9 @@ public:
|
|||
void removeDCOffset(int16_t* samples, int numSamples);
|
||||
|
||||
bool clippedInLastFrame() const { return _didClipInLastFrame; }
|
||||
bool closedInLastFrame() const { return _closedInLastFrame; }
|
||||
bool openedInLastFrame() const { return _openedInLastFrame; }
|
||||
bool isOpen() const { return _isOpen; }
|
||||
float getMeasuredFloor() const { return _measuredFloor; }
|
||||
float getLastLoudness() const { return _lastLoudness; }
|
||||
|
||||
|
@ -40,6 +43,8 @@ private:
|
|||
float _sampleFrames[NUMBER_OF_NOISE_SAMPLE_FRAMES];
|
||||
int _sampleCounter;
|
||||
bool _isOpen;
|
||||
bool _closedInLastFrame { false };
|
||||
bool _openedInLastFrame { false };
|
||||
int _framesToClose;
|
||||
};
|
||||
|
||||
|
|
|
@ -136,9 +136,10 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
|
|||
break;
|
||||
}
|
||||
case SequenceNumberStats::Early: {
|
||||
// Packet is early; write droppable silent samples for each of the skipped packets.
|
||||
// NOTE: we assume that each dropped packet contains the same number of samples
|
||||
// as the packet we just received.
|
||||
// Packet is early. Treat the packets as if all the packets between the last
|
||||
// OnTime packet and this packet were lost. If we're using a codec this will
|
||||
// also result in allowing the codec to interpolate lost data. Then
|
||||
// fall through to the "on time" logic to actually handle this packet
|
||||
int packetsDropped = arrivalInfo._seqDiffFromExpected;
|
||||
lostAudioData(packetsDropped);
|
||||
|
||||
|
@ -147,7 +148,8 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
|
|||
case SequenceNumberStats::OnTime: {
|
||||
// Packet is on time; parse its data to the ringbuffer
|
||||
if (message.getType() == PacketType::SilentAudioFrame) {
|
||||
// FIXME - Some codecs need to know about these silent frames... and can produce better output
|
||||
// If we recieved a SilentAudioFrame from our sender, we might want to drop
|
||||
// some of the samples in order to catch up to our desired jitter buffer size.
|
||||
writeDroppableSilentFrames(networkFrames);
|
||||
} else {
|
||||
// note: PCM and no codec are identical
|
||||
|
@ -158,7 +160,12 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
|
|||
parseAudioData(message.getType(), afterProperties);
|
||||
} else {
|
||||
qDebug(audio) << "Codec mismatch: expected" << _selectedCodecName << "got" << codecInPacket << "writing silence";
|
||||
writeDroppableSilentFrames(networkFrames);
|
||||
|
||||
// Since the data in the stream is using a codec that we aren't prepared for,
|
||||
// we need to let the codec know that we don't have data for it, this will
|
||||
// allow the codec to interpolate missing data and produce a fade to silence.
|
||||
lostAudioData(1);
|
||||
|
||||
// inform others of the mismatch
|
||||
auto sendingNode = DependencyManager::get<NodeList>()->nodeWithUUID(message.getSourceID());
|
||||
emit mismatchedAudioCodec(sendingNode, _selectedCodecName, codecInPacket);
|
||||
|
@ -240,6 +247,25 @@ int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packet
|
|||
|
||||
int InboundAudioStream::writeDroppableSilentFrames(int silentFrames) {
|
||||
|
||||
// We can't guarentee that all clients have faded the stream down
|
||||
// to silence and encoded that silence before sending us a
|
||||
// SilentAudioFrame. If the encoder has truncated the stream it will
|
||||
// leave the decoder holding some unknown loud state. To handle this
|
||||
// case we will call the decoder's lostFrame() method, which indicates
|
||||
// that it should interpolate from its last known state down toward
|
||||
// silence.
|
||||
if (_decoder) {
|
||||
// FIXME - We could potentially use the output from the codec, in which
|
||||
// case we might get a cleaner fade toward silence. NOTE: The below logic
|
||||
// attempts to catch up in the event that the jitter buffers have grown.
|
||||
// The better long term fix is to use the output from the decode, detect
|
||||
// when it actually reaches silence, and then delete the silent portions
|
||||
// of the jitter buffers. Or petentially do a cross fade from the decode
|
||||
// output to silence.
|
||||
QByteArray decodedBuffer;
|
||||
_decoder->lostFrame(decodedBuffer);
|
||||
}
|
||||
|
||||
// calculate how many silent frames we should drop.
|
||||
int silentSamples = silentFrames * _numChannels;
|
||||
int samplesPerFrame = _ringBuffer.getNumFrameSamples();
|
||||
|
|
|
@ -2000,6 +2000,11 @@ void AvatarData::fromJson(const QJsonObject& json, bool useFrameSkeleton) {
|
|||
}
|
||||
}
|
||||
|
||||
auto currentBasis = getRecordingBasis();
|
||||
if (!currentBasis) {
|
||||
currentBasis = std::make_shared<Transform>(Transform::fromJson(json[JSON_AVATAR_BASIS]));
|
||||
}
|
||||
|
||||
if (json.contains(JSON_AVATAR_RELATIVE)) {
|
||||
// During playback you can either have the recording basis set to the avatar current state
|
||||
// meaning that all playback is relative to this avatars starting position, or
|
||||
|
@ -2008,15 +2013,14 @@ void AvatarData::fromJson(const QJsonObject& json, bool useFrameSkeleton) {
|
|||
// The first is more useful for playing back recordings on your own avatar, while
|
||||
// the latter is more useful for playing back other avatars within your scene.
|
||||
|
||||
auto currentBasis = getRecordingBasis();
|
||||
if (!currentBasis) {
|
||||
currentBasis = std::make_shared<Transform>(Transform::fromJson(json[JSON_AVATAR_BASIS]));
|
||||
}
|
||||
|
||||
auto relativeTransform = Transform::fromJson(json[JSON_AVATAR_RELATIVE]);
|
||||
auto worldTransform = currentBasis->worldTransform(relativeTransform);
|
||||
setPosition(worldTransform.getTranslation());
|
||||
setOrientation(worldTransform.getRotation());
|
||||
} else {
|
||||
// We still set the position in the case that there is no movement.
|
||||
setPosition(currentBasis->getTranslation());
|
||||
setOrientation(currentBasis->getRotation());
|
||||
}
|
||||
|
||||
if (json.contains(JSON_AVATAR_SCALE)) {
|
||||
|
|
|
@ -735,6 +735,52 @@ void EntityTreeRenderer::mousePressEvent(QMouseEvent* event) {
|
|||
}
|
||||
}
|
||||
|
||||
void EntityTreeRenderer::mouseDoublePressEvent(QMouseEvent* event) {
|
||||
// If we don't have a tree, or we're in the process of shutting down, then don't
|
||||
// process these events.
|
||||
if (!_tree || _shuttingDown) {
|
||||
return;
|
||||
}
|
||||
PerformanceTimer perfTimer("EntityTreeRenderer::mouseDoublePressEvent");
|
||||
PickRay ray = _viewState->computePickRay(event->x(), event->y());
|
||||
|
||||
bool precisionPicking = !_dontDoPrecisionPicking;
|
||||
RayToEntityIntersectionResult rayPickResult = findRayIntersectionWorker(ray, Octree::Lock, precisionPicking);
|
||||
if (rayPickResult.intersects) {
|
||||
//qCDebug(entitiesrenderer) << "mouseDoublePressEvent over entity:" << rayPickResult.entityID;
|
||||
|
||||
QString urlString = rayPickResult.properties.getHref();
|
||||
QUrl url = QUrl(urlString, QUrl::StrictMode);
|
||||
if (url.isValid() && !url.isEmpty()){
|
||||
DependencyManager::get<AddressManager>()->handleLookupString(urlString);
|
||||
}
|
||||
|
||||
glm::vec2 pos2D = projectOntoEntityXYPlane(rayPickResult.entity, ray, rayPickResult);
|
||||
PointerEvent pointerEvent(PointerEvent::Press, MOUSE_POINTER_ID,
|
||||
pos2D, rayPickResult.intersection,
|
||||
rayPickResult.surfaceNormal, ray.direction,
|
||||
toPointerButton(*event), toPointerButtons(*event));
|
||||
|
||||
emit mouseDoublePressOnEntity(rayPickResult.entityID, pointerEvent);
|
||||
|
||||
if (_entitiesScriptEngine) {
|
||||
_entitiesScriptEngine->callEntityScriptMethod(rayPickResult.entityID, "mouseDoublePressOnEntity", pointerEvent);
|
||||
}
|
||||
|
||||
_currentClickingOnEntityID = rayPickResult.entityID;
|
||||
emit clickDownOnEntity(_currentClickingOnEntityID, pointerEvent);
|
||||
if (_entitiesScriptEngine) {
|
||||
_entitiesScriptEngine->callEntityScriptMethod(_currentClickingOnEntityID, "doubleclickOnEntity", pointerEvent);
|
||||
}
|
||||
|
||||
_lastPointerEvent = pointerEvent;
|
||||
_lastPointerEventValid = true;
|
||||
|
||||
} else {
|
||||
emit mouseDoublePressOffEntity();
|
||||
}
|
||||
}
|
||||
|
||||
void EntityTreeRenderer::mouseReleaseEvent(QMouseEvent* event) {
|
||||
// If we don't have a tree, or we're in the process of shutting down, then don't
|
||||
// process these events.
|
||||
|
|
|
@ -90,6 +90,7 @@ public:
|
|||
// event handles which may generate entity related events
|
||||
void mouseReleaseEvent(QMouseEvent* event);
|
||||
void mousePressEvent(QMouseEvent* event);
|
||||
void mouseDoublePressEvent(QMouseEvent* event);
|
||||
void mouseMoveEvent(QMouseEvent* event);
|
||||
|
||||
/// connect our signals to anEntityScriptingInterface for firing of events related clicking,
|
||||
|
@ -103,9 +104,11 @@ public:
|
|||
|
||||
signals:
|
||||
void mousePressOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
|
||||
void mouseDoublePressOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
|
||||
void mouseMoveOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
|
||||
void mouseReleaseOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
|
||||
void mousePressOffEntity();
|
||||
void mouseDoublePressOffEntity();
|
||||
|
||||
void clickDownOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
|
||||
void holdingClickOnEntity(const EntityItemID& entityItemID, const PointerEvent& event);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
set(TARGET_NAME model-networking)
|
||||
setup_hifi_library()
|
||||
link_hifi_libraries(shared networking model fbx)
|
||||
link_hifi_libraries(shared networking model fbx ktx)
|
||||
|
||||
|
|
44
libraries/model-networking/src/model-networking/KTXCache.cpp
Normal file
44
libraries/model-networking/src/model-networking/KTXCache.cpp
Normal file
|
@ -0,0 +1,44 @@
|
|||
//
|
||||
// KTXCache.cpp
|
||||
// libraries/model-networking/src
|
||||
//
|
||||
// Created by Zach Pomerantz on 2/22/2017.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "KTXCache.h"
|
||||
|
||||
#include <ktx/KTX.h>
|
||||
|
||||
using File = cache::File;
|
||||
using FilePointer = cache::FilePointer;
|
||||
|
||||
KTXCache::KTXCache(const std::string& dir, const std::string& ext) :
|
||||
FileCache(dir, ext) {
|
||||
initialize();
|
||||
}
|
||||
|
||||
KTXFilePointer KTXCache::writeFile(const char* data, Metadata&& metadata) {
|
||||
FilePointer file = FileCache::writeFile(data, std::move(metadata));
|
||||
return std::static_pointer_cast<KTXFile>(file);
|
||||
}
|
||||
|
||||
KTXFilePointer KTXCache::getFile(const Key& key) {
|
||||
return std::static_pointer_cast<KTXFile>(FileCache::getFile(key));
|
||||
}
|
||||
|
||||
std::unique_ptr<File> KTXCache::createFile(Metadata&& metadata, const std::string& filepath) {
|
||||
qCInfo(file_cache) << "Wrote KTX" << metadata.key.c_str();
|
||||
return std::unique_ptr<File>(new KTXFile(std::move(metadata), filepath));
|
||||
}
|
||||
|
||||
KTXFile::KTXFile(Metadata&& metadata, const std::string& filepath) :
|
||||
cache::File(std::move(metadata), filepath) {}
|
||||
|
||||
std::unique_ptr<ktx::KTX> KTXFile::getKTX() const {
|
||||
ktx::StoragePointer storage = std::make_shared<storage::FileStorage>(getFilepath().c_str());
|
||||
return ktx::KTX::create(storage);
|
||||
}
|
51
libraries/model-networking/src/model-networking/KTXCache.h
Normal file
51
libraries/model-networking/src/model-networking/KTXCache.h
Normal file
|
@ -0,0 +1,51 @@
|
|||
//
|
||||
// KTXCache.h
|
||||
// libraries/model-networking/src
|
||||
//
|
||||
// Created by Zach Pomerantz 2/22/2017.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_KTXCache_h
|
||||
#define hifi_KTXCache_h
|
||||
|
||||
#include <QUrl>
|
||||
|
||||
#include <FileCache.h>
|
||||
|
||||
namespace ktx {
|
||||
class KTX;
|
||||
}
|
||||
|
||||
class KTXFile;
|
||||
using KTXFilePointer = std::shared_ptr<KTXFile>;
|
||||
|
||||
class KTXCache : public cache::FileCache {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
KTXCache(const std::string& dir, const std::string& ext);
|
||||
|
||||
KTXFilePointer writeFile(const char* data, Metadata&& metadata);
|
||||
KTXFilePointer getFile(const Key& key);
|
||||
|
||||
protected:
|
||||
std::unique_ptr<cache::File> createFile(Metadata&& metadata, const std::string& filepath) override final;
|
||||
};
|
||||
|
||||
class KTXFile : public cache::File {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
std::unique_ptr<ktx::KTX> getKTX() const;
|
||||
|
||||
protected:
|
||||
friend class KTXCache;
|
||||
|
||||
KTXFile(Metadata&& metadata, const std::string& filepath);
|
||||
};
|
||||
|
||||
#endif // hifi_KTXCache_h
|
|
@ -18,27 +18,37 @@
|
|||
#include <QRunnable>
|
||||
#include <QThreadPool>
|
||||
#include <QImageReader>
|
||||
|
||||
#if DEBUG_DUMP_TEXTURE_LOADS
|
||||
#include <QtCore/QFile>
|
||||
#include <QtCore/QFileInfo>
|
||||
#endif
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/random.hpp>
|
||||
|
||||
#include <gpu/Batch.h>
|
||||
|
||||
#include <ktx/KTX.h>
|
||||
|
||||
#include <NumericalConstants.h>
|
||||
#include <shared/NsightHelpers.h>
|
||||
|
||||
#include <Finally.h>
|
||||
#include <PathUtils.h>
|
||||
|
||||
#include "ModelNetworkingLogging.h"
|
||||
#include <Trace.h>
|
||||
#include <StatTracker.h>
|
||||
|
||||
Q_LOGGING_CATEGORY(trace_resource_parse_image, "trace.resource.parse.image")
|
||||
Q_LOGGING_CATEGORY(trace_resource_parse_image_raw, "trace.resource.parse.image.raw")
|
||||
Q_LOGGING_CATEGORY(trace_resource_parse_image_ktx, "trace.resource.parse.image.ktx")
|
||||
|
||||
TextureCache::TextureCache() {
|
||||
const std::string TextureCache::KTX_DIRNAME { "ktx_cache" };
|
||||
const std::string TextureCache::KTX_EXT { "ktx" };
|
||||
|
||||
TextureCache::TextureCache() :
|
||||
_ktxCache(KTX_DIRNAME, KTX_EXT) {
|
||||
setUnusedResourceCacheSize(0);
|
||||
setObjectName("TextureCache");
|
||||
|
||||
|
@ -61,7 +71,7 @@ TextureCache::~TextureCache() {
|
|||
// this list taken from Ken Perlin's Improved Noise reference implementation (orig. in Java) at
|
||||
// http://mrl.nyu.edu/~perlin/noise/
|
||||
|
||||
const int permutation[256] =
|
||||
const int permutation[256] =
|
||||
{
|
||||
151, 160, 137, 91, 90, 15, 131, 13, 201, 95, 96, 53, 194, 233, 7, 225,
|
||||
140, 36, 103, 30, 69, 142, 8, 99, 37, 240, 21, 10, 23, 190, 6, 148,
|
||||
|
@ -266,7 +276,7 @@ NetworkTexture::TextureLoaderFunc getTextureLoaderForType(NetworkTexture::Type t
|
|||
return NetworkTexture::TextureLoaderFunc();
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
case Type::DEFAULT_TEXTURE:
|
||||
default: {
|
||||
return model::TextureUsage::create2DTextureFromImage;
|
||||
|
@ -288,8 +298,8 @@ QSharedPointer<Resource> TextureCache::createResource(const QUrl& url, const QSh
|
|||
auto type = textureExtra ? textureExtra->type : Type::DEFAULT_TEXTURE;
|
||||
auto content = textureExtra ? textureExtra->content : QByteArray();
|
||||
auto maxNumPixels = textureExtra ? textureExtra->maxNumPixels : ABSOLUTE_MAX_TEXTURE_NUM_PIXELS;
|
||||
return QSharedPointer<Resource>(new NetworkTexture(url, type, content, maxNumPixels),
|
||||
&Resource::deleter);
|
||||
NetworkTexture* texture = new NetworkTexture(url, type, content, maxNumPixels);
|
||||
return QSharedPointer<Resource>(texture, &Resource::deleter);
|
||||
}
|
||||
|
||||
NetworkTexture::NetworkTexture(const QUrl& url, Type type, const QByteArray& content, int maxNumPixels) :
|
||||
|
@ -303,7 +313,6 @@ NetworkTexture::NetworkTexture(const QUrl& url, Type type, const QByteArray& con
|
|||
_loaded = true;
|
||||
}
|
||||
|
||||
std::string theName = url.toString().toStdString();
|
||||
// if we have content, load it after we have our self pointer
|
||||
if (!content.isEmpty()) {
|
||||
_startedLoading = true;
|
||||
|
@ -311,12 +320,6 @@ NetworkTexture::NetworkTexture(const QUrl& url, Type type, const QByteArray& con
|
|||
}
|
||||
}
|
||||
|
||||
NetworkTexture::NetworkTexture(const QUrl& url, const TextureLoaderFunc& textureLoader, const QByteArray& content) :
|
||||
NetworkTexture(url, CUSTOM_TEXTURE, content, ABSOLUTE_MAX_TEXTURE_NUM_PIXELS)
|
||||
{
|
||||
_textureLoader = textureLoader;
|
||||
}
|
||||
|
||||
NetworkTexture::TextureLoaderFunc NetworkTexture::getTextureLoader() const {
|
||||
if (_type == CUSTOM_TEXTURE) {
|
||||
return _textureLoader;
|
||||
|
@ -324,160 +327,6 @@ NetworkTexture::TextureLoaderFunc NetworkTexture::getTextureLoader() const {
|
|||
return getTextureLoaderForType(_type);
|
||||
}
|
||||
|
||||
gpu::TexturePointer NetworkTexture::getFallbackTexture() const {
|
||||
if (_type == CUSTOM_TEXTURE) {
|
||||
return gpu::TexturePointer();
|
||||
}
|
||||
return getFallbackTextureForType(_type);
|
||||
}
|
||||
|
||||
|
||||
class ImageReader : public QRunnable {
|
||||
public:
|
||||
|
||||
ImageReader(const QWeakPointer<Resource>& resource, const QByteArray& data,
|
||||
const QUrl& url = QUrl(), int maxNumPixels = ABSOLUTE_MAX_TEXTURE_NUM_PIXELS);
|
||||
|
||||
virtual void run() override;
|
||||
|
||||
private:
|
||||
static void listSupportedImageFormats();
|
||||
|
||||
QWeakPointer<Resource> _resource;
|
||||
QUrl _url;
|
||||
QByteArray _content;
|
||||
int _maxNumPixels;
|
||||
};
|
||||
|
||||
void NetworkTexture::downloadFinished(const QByteArray& data) {
|
||||
// send the reader off to the thread pool
|
||||
QThreadPool::globalInstance()->start(new ImageReader(_self, data, _url));
|
||||
}
|
||||
|
||||
void NetworkTexture::loadContent(const QByteArray& content) {
|
||||
QThreadPool::globalInstance()->start(new ImageReader(_self, content, _url, _maxNumPixels));
|
||||
}
|
||||
|
||||
ImageReader::ImageReader(const QWeakPointer<Resource>& resource, const QByteArray& data,
|
||||
const QUrl& url, int maxNumPixels) :
|
||||
_resource(resource),
|
||||
_url(url),
|
||||
_content(data),
|
||||
_maxNumPixels(maxNumPixels)
|
||||
{
|
||||
#if DEBUG_DUMP_TEXTURE_LOADS
|
||||
static auto start = usecTimestampNow() / USECS_PER_MSEC;
|
||||
auto now = usecTimestampNow() / USECS_PER_MSEC - start;
|
||||
QString urlStr = _url.toString();
|
||||
auto dot = urlStr.lastIndexOf(".");
|
||||
QString outFileName = QString(QCryptographicHash::hash(urlStr.toLocal8Bit(), QCryptographicHash::Md5).toHex()) + urlStr.right(urlStr.length() - dot);
|
||||
QFile loadRecord("h:/textures/loads.txt");
|
||||
loadRecord.open(QFile::Text | QFile::Append | QFile::ReadWrite);
|
||||
loadRecord.write(QString("%1 %2\n").arg(now).arg(outFileName).toLocal8Bit());
|
||||
outFileName = "h:/textures/" + outFileName;
|
||||
QFileInfo outInfo(outFileName);
|
||||
if (!outInfo.exists()) {
|
||||
QFile outFile(outFileName);
|
||||
outFile.open(QFile::WriteOnly | QFile::Truncate);
|
||||
outFile.write(data);
|
||||
outFile.close();
|
||||
}
|
||||
#endif
|
||||
DependencyManager::get<StatTracker>()->incrementStat("PendingProcessing");
|
||||
}
|
||||
|
||||
void ImageReader::listSupportedImageFormats() {
|
||||
static std::once_flag once;
|
||||
std::call_once(once, []{
|
||||
auto supportedFormats = QImageReader::supportedImageFormats();
|
||||
qCDebug(modelnetworking) << "List of supported Image formats:" << supportedFormats.join(", ");
|
||||
});
|
||||
}
|
||||
|
||||
void ImageReader::run() {
|
||||
DependencyManager::get<StatTracker>()->decrementStat("PendingProcessing");
|
||||
|
||||
CounterStat counter("Processing");
|
||||
|
||||
PROFILE_RANGE_EX(resource_parse_image, __FUNCTION__, 0xffff0000, 0, { { "url", _url.toString() } });
|
||||
auto originalPriority = QThread::currentThread()->priority();
|
||||
if (originalPriority == QThread::InheritPriority) {
|
||||
originalPriority = QThread::NormalPriority;
|
||||
}
|
||||
QThread::currentThread()->setPriority(QThread::LowPriority);
|
||||
Finally restorePriority([originalPriority]{
|
||||
QThread::currentThread()->setPriority(originalPriority);
|
||||
});
|
||||
|
||||
if (!_resource.data()) {
|
||||
qCWarning(modelnetworking) << "Abandoning load of" << _url << "; could not get strong ref";
|
||||
return;
|
||||
}
|
||||
listSupportedImageFormats();
|
||||
|
||||
// Help the QImage loader by extracting the image file format from the url filename ext.
|
||||
// Some tga are not created properly without it.
|
||||
auto filename = _url.fileName().toStdString();
|
||||
auto filenameExtension = filename.substr(filename.find_last_of('.') + 1);
|
||||
QImage image = QImage::fromData(_content, filenameExtension.c_str());
|
||||
|
||||
// Note that QImage.format is the pixel format which is different from the "format" of the image file...
|
||||
auto imageFormat = image.format();
|
||||
int imageWidth = image.width();
|
||||
int imageHeight = image.height();
|
||||
|
||||
if (imageWidth == 0 || imageHeight == 0 || imageFormat == QImage::Format_Invalid) {
|
||||
if (filenameExtension.empty()) {
|
||||
qCDebug(modelnetworking) << "QImage failed to create from content, no file extension:" << _url;
|
||||
} else {
|
||||
qCDebug(modelnetworking) << "QImage failed to create from content" << _url;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (imageWidth * imageHeight > _maxNumPixels) {
|
||||
float scaleFactor = sqrtf(_maxNumPixels / (float)(imageWidth * imageHeight));
|
||||
int originalWidth = imageWidth;
|
||||
int originalHeight = imageHeight;
|
||||
imageWidth = (int)(scaleFactor * (float)imageWidth + 0.5f);
|
||||
imageHeight = (int)(scaleFactor * (float)imageHeight + 0.5f);
|
||||
QImage newImage = image.scaled(QSize(imageWidth, imageHeight), Qt::IgnoreAspectRatio, Qt::SmoothTransformation);
|
||||
image.swap(newImage);
|
||||
qCDebug(modelnetworking) << "Downscale image" << _url
|
||||
<< "from" << originalWidth << "x" << originalHeight
|
||||
<< "to" << imageWidth << "x" << imageHeight;
|
||||
}
|
||||
|
||||
gpu::TexturePointer texture = nullptr;
|
||||
{
|
||||
// Double-check the resource still exists between long operations.
|
||||
auto resource = _resource.toStrongRef();
|
||||
if (!resource) {
|
||||
qCWarning(modelnetworking) << "Abandoning load of" << _url << "; could not get strong ref";
|
||||
return;
|
||||
}
|
||||
|
||||
auto url = _url.toString().toStdString();
|
||||
|
||||
PROFILE_RANGE_EX(resource_parse_image, __FUNCTION__, 0xffffff00, 0);
|
||||
auto networkTexture = resource.dynamicCast<NetworkTexture>();
|
||||
texture.reset(networkTexture->getTextureLoader()(image, url));
|
||||
if (texture) {
|
||||
texture->setFallbackTexture(networkTexture->getFallbackTexture());
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure the resource has not been deleted
|
||||
auto resource = _resource.toStrongRef();
|
||||
if (!resource) {
|
||||
qCWarning(modelnetworking) << "Abandoning load of" << _url << "; could not get strong ref";
|
||||
} else {
|
||||
QMetaObject::invokeMethod(resource.data(), "setImage",
|
||||
Q_ARG(gpu::TexturePointer, texture),
|
||||
Q_ARG(int, imageWidth), Q_ARG(int, imageHeight));
|
||||
}
|
||||
}
|
||||
|
||||
void NetworkTexture::setImage(gpu::TexturePointer texture, int originalWidth,
|
||||
int originalHeight) {
|
||||
_originalWidth = originalWidth;
|
||||
|
@ -500,3 +349,236 @@ void NetworkTexture::setImage(gpu::TexturePointer texture, int originalWidth,
|
|||
|
||||
emit networkTextureCreated(qWeakPointerCast<NetworkTexture, Resource> (_self));
|
||||
}
|
||||
|
||||
gpu::TexturePointer NetworkTexture::getFallbackTexture() const {
|
||||
if (_type == CUSTOM_TEXTURE) {
|
||||
return gpu::TexturePointer();
|
||||
}
|
||||
return getFallbackTextureForType(_type);
|
||||
}
|
||||
|
||||
class Reader : public QRunnable {
|
||||
public:
|
||||
Reader(const QWeakPointer<Resource>& resource, const QUrl& url);
|
||||
void run() override final;
|
||||
virtual void read() = 0;
|
||||
|
||||
protected:
|
||||
QWeakPointer<Resource> _resource;
|
||||
QUrl _url;
|
||||
};
|
||||
|
||||
class ImageReader : public Reader {
|
||||
public:
|
||||
ImageReader(const QWeakPointer<Resource>& resource, const QUrl& url,
|
||||
const QByteArray& data, const std::string& hash, int maxNumPixels);
|
||||
void read() override final;
|
||||
|
||||
private:
|
||||
static void listSupportedImageFormats();
|
||||
|
||||
QByteArray _content;
|
||||
std::string _hash;
|
||||
int _maxNumPixels;
|
||||
};
|
||||
|
||||
class KTXReader : public Reader {
|
||||
public:
|
||||
KTXReader(const QWeakPointer<Resource>& resource, const QUrl& url, const KTXFilePointer& ktxFile);
|
||||
void read() override final;
|
||||
|
||||
private:
|
||||
KTXFilePointer _file;
|
||||
};
|
||||
|
||||
void NetworkTexture::downloadFinished(const QByteArray& data) {
|
||||
loadContent(data);
|
||||
}
|
||||
|
||||
void NetworkTexture::loadContent(const QByteArray& content) {
|
||||
// Hash the source image to for KTX caching
|
||||
std::string hash;
|
||||
{
|
||||
QCryptographicHash hasher(QCryptographicHash::Md5);
|
||||
hasher.addData(content);
|
||||
hash = hasher.result().toHex().toStdString();
|
||||
}
|
||||
|
||||
std::unique_ptr<Reader> reader;
|
||||
KTXFilePointer ktxFile;
|
||||
if (!_cache.isNull() && (ktxFile = static_cast<TextureCache*>(_cache.data())->_ktxCache.getFile(hash))) {
|
||||
reader.reset(new KTXReader(_self, _url, ktxFile));
|
||||
} else {
|
||||
reader.reset(new ImageReader(_self, _url, content, hash, _maxNumPixels));
|
||||
}
|
||||
|
||||
QThreadPool::globalInstance()->start(reader.release());
|
||||
}
|
||||
|
||||
Reader::Reader(const QWeakPointer<Resource>& resource, const QUrl& url) :
|
||||
_resource(resource), _url(url) {
|
||||
DependencyManager::get<StatTracker>()->incrementStat("PendingProcessing");
|
||||
}
|
||||
|
||||
void Reader::run() {
|
||||
PROFILE_RANGE_EX(resource_parse_image, __FUNCTION__, 0xffff0000, 0, { { "url", _url.toString() } });
|
||||
DependencyManager::get<StatTracker>()->decrementStat("PendingProcessing");
|
||||
CounterStat counter("Processing");
|
||||
|
||||
auto originalPriority = QThread::currentThread()->priority();
|
||||
if (originalPriority == QThread::InheritPriority) {
|
||||
originalPriority = QThread::NormalPriority;
|
||||
}
|
||||
QThread::currentThread()->setPriority(QThread::LowPriority);
|
||||
Finally restorePriority([originalPriority]{ QThread::currentThread()->setPriority(originalPriority); });
|
||||
|
||||
if (!_resource.data()) {
|
||||
qCWarning(modelnetworking) << "Abandoning load of" << _url << "; could not get strong ref";
|
||||
return;
|
||||
}
|
||||
|
||||
read();
|
||||
}
|
||||
|
||||
ImageReader::ImageReader(const QWeakPointer<Resource>& resource, const QUrl& url,
|
||||
const QByteArray& data, const std::string& hash, int maxNumPixels) :
|
||||
Reader(resource, url), _content(data), _hash(hash), _maxNumPixels(maxNumPixels) {
|
||||
listSupportedImageFormats();
|
||||
|
||||
#if DEBUG_DUMP_TEXTURE_LOADS
|
||||
static auto start = usecTimestampNow() / USECS_PER_MSEC;
|
||||
auto now = usecTimestampNow() / USECS_PER_MSEC - start;
|
||||
QString urlStr = _url.toString();
|
||||
auto dot = urlStr.lastIndexOf(".");
|
||||
QString outFileName = QString(QCryptographicHash::hash(urlStr.toLocal8Bit(), QCryptographicHash::Md5).toHex()) + urlStr.right(urlStr.length() - dot);
|
||||
QFile loadRecord("h:/textures/loads.txt");
|
||||
loadRecord.open(QFile::Text | QFile::Append | QFile::ReadWrite);
|
||||
loadRecord.write(QString("%1 %2\n").arg(now).arg(outFileName).toLocal8Bit());
|
||||
outFileName = "h:/textures/" + outFileName;
|
||||
QFileInfo outInfo(outFileName);
|
||||
if (!outInfo.exists()) {
|
||||
QFile outFile(outFileName);
|
||||
outFile.open(QFile::WriteOnly | QFile::Truncate);
|
||||
outFile.write(data);
|
||||
outFile.close();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void ImageReader::listSupportedImageFormats() {
|
||||
static std::once_flag once;
|
||||
std::call_once(once, []{
|
||||
auto supportedFormats = QImageReader::supportedImageFormats();
|
||||
qCDebug(modelnetworking) << "List of supported Image formats:" << supportedFormats.join(", ");
|
||||
});
|
||||
}
|
||||
|
||||
void ImageReader::read() {
|
||||
// Help the QImage loader by extracting the image file format from the url filename ext.
|
||||
// Some tga are not created properly without it.
|
||||
auto filename = _url.fileName().toStdString();
|
||||
auto filenameExtension = filename.substr(filename.find_last_of('.') + 1);
|
||||
QImage image = QImage::fromData(_content, filenameExtension.c_str());
|
||||
int imageWidth = image.width();
|
||||
int imageHeight = image.height();
|
||||
|
||||
// Validate that the image loaded
|
||||
if (imageWidth == 0 || imageHeight == 0 || image.format() == QImage::Format_Invalid) {
|
||||
QString reason(filenameExtension.empty() ? "" : "(no file extension)");
|
||||
qCWarning(modelnetworking) << "Failed to load" << _url << reason;
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate the image is less than _maxNumPixels, and downscale if necessary
|
||||
if (imageWidth * imageHeight > _maxNumPixels) {
|
||||
float scaleFactor = sqrtf(_maxNumPixels / (float)(imageWidth * imageHeight));
|
||||
int originalWidth = imageWidth;
|
||||
int originalHeight = imageHeight;
|
||||
imageWidth = (int)(scaleFactor * (float)imageWidth + 0.5f);
|
||||
imageHeight = (int)(scaleFactor * (float)imageHeight + 0.5f);
|
||||
QImage newImage = image.scaled(QSize(imageWidth, imageHeight), Qt::IgnoreAspectRatio, Qt::SmoothTransformation);
|
||||
image.swap(newImage);
|
||||
qCDebug(modelnetworking).nospace() << "Downscaled " << _url << " (" <<
|
||||
QSize(originalWidth, originalHeight) << " to " <<
|
||||
QSize(imageWidth, imageHeight) << ")";
|
||||
}
|
||||
|
||||
gpu::TexturePointer texture = nullptr;
|
||||
{
|
||||
auto resource = _resource.lock(); // to ensure the resource is still needed
|
||||
if (!resource) {
|
||||
qCDebug(modelnetworking) << _url << "loading stopped; resource out of scope";
|
||||
return;
|
||||
}
|
||||
|
||||
auto url = _url.toString().toStdString();
|
||||
|
||||
PROFILE_RANGE_EX(resource_parse_image_raw, __FUNCTION__, 0xffff0000, 0);
|
||||
// Load the image into a gpu::Texture
|
||||
auto networkTexture = resource.staticCast<NetworkTexture>();
|
||||
texture.reset(networkTexture->getTextureLoader()(image, url));
|
||||
texture->setSource(url);
|
||||
if (texture) {
|
||||
texture->setFallbackTexture(networkTexture->getFallbackTexture());
|
||||
}
|
||||
|
||||
// Save the image into a KTXFile
|
||||
auto ktx = gpu::Texture::serialize(*texture);
|
||||
if (ktx) {
|
||||
const char* data = reinterpret_cast<const char*>(ktx->_storage->data());
|
||||
size_t length = ktx->_storage->size();
|
||||
KTXFilePointer file;
|
||||
auto& ktxCache = DependencyManager::get<TextureCache>()->_ktxCache;
|
||||
if (!ktx || !(file = ktxCache.writeFile(data, KTXCache::Metadata(_hash, length)))) {
|
||||
qCWarning(modelnetworking) << _url << "file cache failed";
|
||||
} else {
|
||||
resource.staticCast<NetworkTexture>()->_file = file;
|
||||
auto ktx = file->getKTX();
|
||||
texture->setKtxBacking(ktx);
|
||||
}
|
||||
} else {
|
||||
qCWarning(modelnetworking) << "Unable to serialize texture to KTX " << _url;
|
||||
}
|
||||
}
|
||||
|
||||
auto resource = _resource.lock(); // to ensure the resource is still needed
|
||||
if (resource) {
|
||||
QMetaObject::invokeMethod(resource.data(), "setImage",
|
||||
Q_ARG(gpu::TexturePointer, texture),
|
||||
Q_ARG(int, imageWidth), Q_ARG(int, imageHeight));
|
||||
} else {
|
||||
qCDebug(modelnetworking) << _url << "loading stopped; resource out of scope";
|
||||
}
|
||||
}
|
||||
|
||||
KTXReader::KTXReader(const QWeakPointer<Resource>& resource, const QUrl& url,
|
||||
const KTXFilePointer& ktxFile) :
|
||||
Reader(resource, url), _file(ktxFile) {}
|
||||
|
||||
void KTXReader::read() {
|
||||
PROFILE_RANGE_EX(resource_parse_image_ktx, __FUNCTION__, 0xffff0000, 0, { { "url", _url.toString() } });
|
||||
|
||||
gpu::TexturePointer texture;
|
||||
{
|
||||
auto resource = _resource.lock(); // to ensure the resource is still needed
|
||||
if (!resource) {
|
||||
qCDebug(modelnetworking) << _url << "loading stopped; resource out of scope";
|
||||
return;
|
||||
}
|
||||
|
||||
resource.staticCast<NetworkTexture>()->_file = _file;
|
||||
auto ktx = _file->getKTX();
|
||||
texture.reset(gpu::Texture::unserialize(ktx));
|
||||
texture->setKtxBacking(ktx);
|
||||
}
|
||||
|
||||
auto resource = _resource.lock(); // to ensure the resource is still needed
|
||||
if (resource) {
|
||||
QMetaObject::invokeMethod(resource.data(), "setImage",
|
||||
Q_ARG(gpu::TexturePointer, texture),
|
||||
Q_ARG(int, texture->getWidth()), Q_ARG(int, texture->getHeight()));
|
||||
} else {
|
||||
qCDebug(modelnetworking) << _url << "loading stopped; resource out of scope";
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -23,6 +23,8 @@
|
|||
#include <ResourceCache.h>
|
||||
#include <model/TextureMap.h>
|
||||
|
||||
#include "KTXCache.h"
|
||||
|
||||
const int ABSOLUTE_MAX_TEXTURE_NUM_PIXELS = 8192 * 8192;
|
||||
|
||||
namespace gpu {
|
||||
|
@ -64,7 +66,6 @@ public:
|
|||
using TextureLoaderFunc = std::function<TextureLoader>;
|
||||
|
||||
NetworkTexture(const QUrl& url, Type type, const QByteArray& content, int maxNumPixels);
|
||||
NetworkTexture(const QUrl& url, const TextureLoaderFunc& textureLoader, const QByteArray& content);
|
||||
|
||||
QString getType() const override { return "NetworkTexture"; }
|
||||
|
||||
|
@ -81,7 +82,6 @@ signals:
|
|||
void networkTextureCreated(const QWeakPointer<NetworkTexture>& self);
|
||||
|
||||
protected:
|
||||
|
||||
virtual bool isCacheable() const override { return _loaded; }
|
||||
|
||||
virtual void downloadFinished(const QByteArray& data) override;
|
||||
|
@ -90,8 +90,12 @@ protected:
|
|||
Q_INVOKABLE void setImage(gpu::TexturePointer texture, int originalWidth, int originalHeight);
|
||||
|
||||
private:
|
||||
friend class KTXReader;
|
||||
friend class ImageReader;
|
||||
|
||||
Type _type;
|
||||
TextureLoaderFunc _textureLoader { [](const QImage&, const std::string&){ return nullptr; } };
|
||||
KTXFilePointer _file;
|
||||
int _originalWidth { 0 };
|
||||
int _originalHeight { 0 };
|
||||
int _width { 0 };
|
||||
|
@ -141,9 +145,16 @@ protected:
|
|||
const void* extra) override;
|
||||
|
||||
private:
|
||||
friend class ImageReader;
|
||||
friend class NetworkTexture;
|
||||
friend class DilatableNetworkTexture;
|
||||
|
||||
TextureCache();
|
||||
virtual ~TextureCache();
|
||||
friend class DilatableNetworkTexture;
|
||||
|
||||
static const std::string KTX_DIRNAME;
|
||||
static const std::string KTX_EXT;
|
||||
KTXCache _ktxCache;
|
||||
|
||||
gpu::TexturePointer _permutationNormalTexture;
|
||||
gpu::TexturePointer _whiteTexture;
|
||||
|
|
|
@ -85,85 +85,6 @@ QImage processSourceImage(const QImage& srcImage, bool cubemap) {
|
|||
return srcImage;
|
||||
}
|
||||
|
||||
gpu::Texture* cacheTexture(const std::string& name, gpu::Texture* srcTexture, bool write = true, bool read = false) {
|
||||
if (!srcTexture) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
static QString ktxCacheFolder;
|
||||
static std::once_flag once;
|
||||
std::call_once(once, [&] {
|
||||
// Prepare cache directory
|
||||
static const QString HIFI_KTX_FOLDER("hifi_ktx");
|
||||
QString docsLocation = QStandardPaths::writableLocation(QStandardPaths::AppDataLocation);
|
||||
ktxCacheFolder = docsLocation + "/" + HIFI_KTX_FOLDER + "/";
|
||||
QFileInfo info(ktxCacheFolder);
|
||||
if (!info.exists()) {
|
||||
QDir(docsLocation).mkpath(HIFI_KTX_FOLDER);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
std::string cleanedName = QCryptographicHash::hash(QUrl::toPercentEncoding(name.c_str()), QCryptographicHash::Sha1).toHex().toStdString();
|
||||
std::string cacheFilename(ktxCacheFolder.toStdString());
|
||||
cacheFilename += "/";
|
||||
cacheFilename += cleanedName;
|
||||
cacheFilename += ".ktx";
|
||||
|
||||
gpu::Texture* returnedTexture = srcTexture;
|
||||
{
|
||||
if (write && !QFileInfo(cacheFilename.c_str()).exists()) {
|
||||
auto ktxMemory = gpu::Texture::serialize(*srcTexture);
|
||||
if (ktxMemory) {
|
||||
const auto& ktxStorage = ktxMemory->getStorage();
|
||||
QFile outFile(cacheFilename.c_str());
|
||||
if (!outFile.open(QFile::Truncate | QFile::ReadWrite)) {
|
||||
throw std::runtime_error("Unable to open file");
|
||||
}
|
||||
auto ktxSize = ktxStorage->size();
|
||||
outFile.resize(ktxSize);
|
||||
auto dest = outFile.map(0, ktxSize);
|
||||
memcpy(dest, ktxStorage->data(), ktxSize);
|
||||
outFile.unmap(dest);
|
||||
outFile.close();
|
||||
}
|
||||
}
|
||||
|
||||
if (read && QFileInfo(cacheFilename.c_str()).exists()) {
|
||||
#define DEBUG_KTX_LOADING 0
|
||||
#if DEBUG_KTX_LOADING
|
||||
{
|
||||
FILE* file = fopen(cacheFilename.c_str(), "rb");
|
||||
if (file != nullptr) {
|
||||
// obtain file size:
|
||||
fseek (file , 0 , SEEK_END);
|
||||
auto size = ftell(file);
|
||||
rewind(file);
|
||||
|
||||
auto storage = std::make_shared<storage::MemoryStorage>(size);
|
||||
fread(storage->data(), 1, storage->size(), file);
|
||||
fclose (file);
|
||||
|
||||
//then create a new texture out of the ktx
|
||||
auto theNewTexure = Texture::unserialize(ktx::KTX::create(std::static_pointer_cast<storage::Storage>(storage)),
|
||||
srcTexture->getUsageType(), srcTexture->getUsage(), srcTexture->getSampler().getDesc());
|
||||
|
||||
if (theNewTexure) {
|
||||
returnedTexture = theNewTexure;
|
||||
delete srcTexture;
|
||||
}
|
||||
}
|
||||
}
|
||||
#else
|
||||
ktx::StoragePointer storage = std::make_shared<storage::FileStorage>(cacheFilename.c_str());
|
||||
auto ktxFile = ktx::KTX::create(storage);
|
||||
returnedTexture->setKtxBacking(ktxFile);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
return returnedTexture;
|
||||
}
|
||||
|
||||
void TextureMap::setTextureSource(TextureSourcePointer& textureSource) {
|
||||
_textureSource = textureSource;
|
||||
}
|
||||
|
@ -233,7 +154,7 @@ const QImage TextureUsage::process2DImageColor(const QImage& srcImage, bool& val
|
|||
return image;
|
||||
}
|
||||
|
||||
void TextureUsage::defineColorTexelFormats(gpu::Element& formatGPU, gpu::Element& formatMip,
|
||||
void TextureUsage::defineColorTexelFormats(gpu::Element& formatGPU, gpu::Element& formatMip,
|
||||
const QImage& image, bool isLinear, bool doCompress) {
|
||||
|
||||
#ifdef COMPRESS_TEXTURES
|
||||
|
@ -300,6 +221,7 @@ void generateMips(gpu::Texture* texture, QImage& image, bool fastResize) {
|
|||
texture->assignStoredMip(level, mipImage.byteCount(), mipImage.constBits());
|
||||
}
|
||||
}
|
||||
|
||||
#else
|
||||
texture->autoGenerateMips(-1);
|
||||
#endif
|
||||
|
@ -353,7 +275,6 @@ gpu::Texture* TextureUsage::process2DTextureColorFromImage(const QImage& srcImag
|
|||
::generateMips(theTexture, image, false);
|
||||
}
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture = cacheTexture(theTexture->source(), theTexture);
|
||||
}
|
||||
|
||||
return theTexture;
|
||||
|
@ -388,7 +309,7 @@ gpu::Texture* TextureUsage::createNormalTextureFromNormalImage(const QImage& src
|
|||
if (image.format() != QImage::Format_ARGB32) {
|
||||
image = image.convertToFormat(QImage::Format_ARGB32);
|
||||
}
|
||||
|
||||
|
||||
|
||||
gpu::Texture* theTexture = nullptr;
|
||||
if ((image.width() > 0) && (image.height() > 0)) {
|
||||
|
@ -403,7 +324,6 @@ gpu::Texture* TextureUsage::createNormalTextureFromNormalImage(const QImage& src
|
|||
generateMips(theTexture, image, true);
|
||||
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture = cacheTexture(theTexture->source(), theTexture, true, true);
|
||||
}
|
||||
|
||||
return theTexture;
|
||||
|
@ -434,16 +354,18 @@ gpu::Texture* TextureUsage::createNormalTextureFromBumpImage(const QImage& srcIm
|
|||
const double pStrength = 2.0;
|
||||
int width = image.width();
|
||||
int height = image.height();
|
||||
QImage result(width, height, QImage::Format_RGB888);
|
||||
|
||||
// QImage result(width, height, QImage::Format_RGB888);
|
||||
|
||||
QImage result(width, height, QImage::Format_ARGB32);
|
||||
|
||||
for (int i = 0; i < width; i++) {
|
||||
const int iNextClamped = clampPixelCoordinate(i + 1, width - 1);
|
||||
const int iPrevClamped = clampPixelCoordinate(i - 1, width - 1);
|
||||
|
||||
|
||||
for (int j = 0; j < height; j++) {
|
||||
const int jNextClamped = clampPixelCoordinate(j + 1, height - 1);
|
||||
const int jPrevClamped = clampPixelCoordinate(j - 1, height - 1);
|
||||
|
||||
|
||||
// surrounding pixels
|
||||
const QRgb topLeft = image.pixel(iPrevClamped, jPrevClamped);
|
||||
const QRgb top = image.pixel(iPrevClamped, j);
|
||||
|
@ -453,7 +375,7 @@ gpu::Texture* TextureUsage::createNormalTextureFromBumpImage(const QImage& srcIm
|
|||
const QRgb bottom = image.pixel(iNextClamped, j);
|
||||
const QRgb bottomLeft = image.pixel(iNextClamped, jPrevClamped);
|
||||
const QRgb left = image.pixel(i, jPrevClamped);
|
||||
|
||||
|
||||
// take their gray intensities
|
||||
// since it's a grayscale image, the value of each component RGB is the same
|
||||
const double tl = qRed(topLeft);
|
||||
|
@ -464,15 +386,15 @@ gpu::Texture* TextureUsage::createNormalTextureFromBumpImage(const QImage& srcIm
|
|||
const double b = qRed(bottom);
|
||||
const double bl = qRed(bottomLeft);
|
||||
const double l = qRed(left);
|
||||
|
||||
|
||||
// apply the sobel filter
|
||||
const double dX = (tr + pStrength * r + br) - (tl + pStrength * l + bl);
|
||||
const double dY = (bl + pStrength * b + br) - (tl + pStrength * t + tr);
|
||||
const double dZ = RGBA_MAX / pStrength;
|
||||
|
||||
|
||||
glm::vec3 v(dX, dY, dZ);
|
||||
glm::normalize(v);
|
||||
|
||||
|
||||
// convert to rgb from the value obtained computing the filter
|
||||
QRgb qRgbValue = qRgba(mapComponent(v.x), mapComponent(v.y), mapComponent(v.z), 1.0);
|
||||
result.setPixel(i, j, qRgbValue);
|
||||
|
@ -480,17 +402,19 @@ gpu::Texture* TextureUsage::createNormalTextureFromBumpImage(const QImage& srcIm
|
|||
}
|
||||
|
||||
gpu::Texture* theTexture = nullptr;
|
||||
if ((image.width() > 0) && (image.height() > 0)) {
|
||||
gpu::Element formatGPU = gpu::Element(gpu::VEC3, gpu::NUINT8, gpu::RGB);
|
||||
gpu::Element formatMip = gpu::Element(gpu::VEC3, gpu::NUINT8, gpu::RGB);
|
||||
if ((result.width() > 0) && (result.height() > 0)) {
|
||||
|
||||
theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
|
||||
gpu::Element formatMip = gpu::Element::COLOR_BGRA_32;
|
||||
gpu::Element formatGPU = gpu::Element::COLOR_RGBA_32;
|
||||
|
||||
|
||||
theTexture = (gpu::Texture::create2D(formatGPU, result.width(), result.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture->setStoredMipFormat(formatMip);
|
||||
theTexture->assignStoredMip(0, image.byteCount(), image.constBits());
|
||||
generateMips(theTexture, image, true);
|
||||
theTexture->assignStoredMip(0, result.byteCount(), result.constBits());
|
||||
generateMips(theTexture, result, true);
|
||||
|
||||
theTexture = cacheTexture(theTexture->source(), theTexture, true, false);
|
||||
theTexture->setSource(srcImageName);
|
||||
}
|
||||
|
||||
return theTexture;
|
||||
|
@ -525,9 +449,8 @@ gpu::Texture* TextureUsage::createRoughnessTextureFromImage(const QImage& srcIma
|
|||
theTexture->setStoredMipFormat(formatMip);
|
||||
theTexture->assignStoredMip(0, image.byteCount(), image.constBits());
|
||||
generateMips(theTexture, image, true);
|
||||
|
||||
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture = cacheTexture(theTexture->source(), theTexture);
|
||||
}
|
||||
|
||||
return theTexture;
|
||||
|
@ -548,12 +471,12 @@ gpu::Texture* TextureUsage::createRoughnessTextureFromGlossImage(const QImage& s
|
|||
|
||||
// Gloss turned into Rough
|
||||
image.invertPixels(QImage::InvertRgba);
|
||||
|
||||
|
||||
image = image.convertToFormat(QImage::Format_Grayscale8);
|
||||
|
||||
|
||||
gpu::Texture* theTexture = nullptr;
|
||||
if ((image.width() > 0) && (image.height() > 0)) {
|
||||
|
||||
|
||||
#ifdef COMPRESS_TEXTURES
|
||||
gpu::Element formatGPU = gpu::Element(gpu::SCALAR, gpu::NUINT8, gpu::COMPRESSED_R);
|
||||
#else
|
||||
|
@ -568,9 +491,8 @@ gpu::Texture* TextureUsage::createRoughnessTextureFromGlossImage(const QImage& s
|
|||
generateMips(theTexture, image, true);
|
||||
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture = cacheTexture(theTexture->source(), theTexture);
|
||||
}
|
||||
|
||||
|
||||
return theTexture;
|
||||
}
|
||||
|
||||
|
@ -606,7 +528,6 @@ gpu::Texture* TextureUsage::createMetallicTextureFromImage(const QImage& srcImag
|
|||
generateMips(theTexture, image, true);
|
||||
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture = cacheTexture(theTexture->source(), theTexture);
|
||||
}
|
||||
|
||||
return theTexture;
|
||||
|
@ -629,18 +550,18 @@ public:
|
|||
int _y = 0;
|
||||
bool _horizontalMirror = false;
|
||||
bool _verticalMirror = false;
|
||||
|
||||
|
||||
Face() {}
|
||||
Face(int x, int y, bool horizontalMirror, bool verticalMirror) : _x(x), _y(y), _horizontalMirror(horizontalMirror), _verticalMirror(verticalMirror) {}
|
||||
};
|
||||
|
||||
|
||||
Face _faceXPos;
|
||||
Face _faceXNeg;
|
||||
Face _faceYPos;
|
||||
Face _faceYNeg;
|
||||
Face _faceZPos;
|
||||
Face _faceZNeg;
|
||||
|
||||
|
||||
CubeLayout(int wr, int hr, Face fXP, Face fXN, Face fYP, Face fYN, Face fZP, Face fZN) :
|
||||
_type(FLAT),
|
||||
_widthRatio(wr),
|
||||
|
@ -883,7 +804,7 @@ gpu::Texture* TextureUsage::processCubeTextureColorFromImage(const QImage& srcIm
|
|||
defineColorTexelFormats(formatGPU, formatMip, image, isLinear, doCompress);
|
||||
|
||||
// Find the layout of the cubemap in the 2D image
|
||||
// Use the original image size since processSourceImage may have altered the size / aspect ratio
|
||||
// Use the original image size since processSourceImage may have altered the size / aspect ratio
|
||||
int foundLayout = CubeLayout::findLayout(srcImage.width(), srcImage.height());
|
||||
|
||||
std::vector<QImage> faces;
|
||||
|
@ -940,7 +861,6 @@ gpu::Texture* TextureUsage::processCubeTextureColorFromImage(const QImage& srcIm
|
|||
}
|
||||
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture = cacheTexture(theTexture->source(), theTexture);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -12,7 +12,6 @@
|
|||
#include "udt/PacketHeaders.h"
|
||||
#include "SharedUtil.h"
|
||||
#include "UUID.h"
|
||||
#include "ServerPathUtils.h"
|
||||
|
||||
#include <QtCore/QDataStream>
|
||||
|
||||
|
|
243
libraries/networking/src/FileCache.cpp
Normal file
243
libraries/networking/src/FileCache.cpp
Normal file
|
@ -0,0 +1,243 @@
|
|||
//
|
||||
// FileCache.cpp
|
||||
// libraries/model-networking/src
|
||||
//
|
||||
// Created by Zach Pomerantz on 2/21/2017.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "FileCache.h"
|
||||
|
||||
#include <cstdio>
|
||||
#include <cassert>
|
||||
#include <fstream>
|
||||
#include <unordered_set>
|
||||
|
||||
#include <QDir>
|
||||
|
||||
#include <PathUtils.h>
|
||||
|
||||
Q_LOGGING_CATEGORY(file_cache, "hifi.file_cache", QtWarningMsg)
|
||||
|
||||
using namespace cache;
|
||||
|
||||
static const std::string MANIFEST_NAME = "manifest";
|
||||
|
||||
static const size_t BYTES_PER_MEGABYTES = 1024 * 1024;
|
||||
static const size_t BYTES_PER_GIGABYTES = 1024 * BYTES_PER_MEGABYTES;
|
||||
const size_t FileCache::DEFAULT_UNUSED_MAX_SIZE = 5 * BYTES_PER_GIGABYTES; // 5GB
|
||||
const size_t FileCache::MAX_UNUSED_MAX_SIZE = 100 * BYTES_PER_GIGABYTES; // 100GB
|
||||
const size_t FileCache::DEFAULT_OFFLINE_MAX_SIZE = 2 * BYTES_PER_GIGABYTES; // 2GB
|
||||
|
||||
void FileCache::setUnusedFileCacheSize(size_t unusedFilesMaxSize) {
|
||||
_unusedFilesMaxSize = std::min(unusedFilesMaxSize, MAX_UNUSED_MAX_SIZE);
|
||||
reserve(0);
|
||||
emit dirty();
|
||||
}
|
||||
|
||||
void FileCache::setOfflineFileCacheSize(size_t offlineFilesMaxSize) {
|
||||
_offlineFilesMaxSize = std::min(offlineFilesMaxSize, MAX_UNUSED_MAX_SIZE);
|
||||
}
|
||||
|
||||
FileCache::FileCache(const std::string& dirname, const std::string& ext, QObject* parent) :
|
||||
QObject(parent),
|
||||
_ext(ext),
|
||||
_dirname(dirname),
|
||||
_dirpath(PathUtils::getAppLocalDataFilePath(dirname.c_str()).toStdString()) {}
|
||||
|
||||
FileCache::~FileCache() {
|
||||
clear();
|
||||
}
|
||||
|
||||
void fileDeleter(File* file) {
|
||||
file->deleter();
|
||||
}
|
||||
|
||||
void FileCache::initialize() {
|
||||
QDir dir(_dirpath.c_str());
|
||||
|
||||
if (dir.exists()) {
|
||||
auto nameFilters = QStringList(("*." + _ext).c_str());
|
||||
auto filters = QDir::Filters(QDir::NoDotAndDotDot | QDir::Files);
|
||||
auto sort = QDir::SortFlags(QDir::Time);
|
||||
auto files = dir.entryList(nameFilters, filters, sort);
|
||||
|
||||
// load persisted files
|
||||
foreach(QString filename, files) {
|
||||
const Key key = filename.section('.', 0, 1).toStdString();
|
||||
const std::string filepath = dir.filePath(filename).toStdString();
|
||||
const size_t length = std::ifstream(filepath, std::ios::binary | std::ios::ate).tellg();
|
||||
addFile(Metadata(key, length), filepath);
|
||||
}
|
||||
|
||||
qCDebug(file_cache, "[%s] Initialized %s", _dirname.c_str(), _dirpath.c_str());
|
||||
} else {
|
||||
dir.mkpath(_dirpath.c_str());
|
||||
qCDebug(file_cache, "[%s] Created %s", _dirname.c_str(), _dirpath.c_str());
|
||||
}
|
||||
|
||||
_initialized = true;
|
||||
}
|
||||
|
||||
FilePointer FileCache::addFile(Metadata&& metadata, const std::string& filepath) {
|
||||
FilePointer file(createFile(std::move(metadata), filepath).release(), &fileDeleter);
|
||||
if (file) {
|
||||
_numTotalFiles += 1;
|
||||
_totalFilesSize += file->getLength();
|
||||
file->_cache = this;
|
||||
emit dirty();
|
||||
|
||||
Lock lock(_filesMutex);
|
||||
_files[file->getKey()] = file;
|
||||
}
|
||||
return file;
|
||||
}
|
||||
|
||||
FilePointer FileCache::writeFile(const char* data, File::Metadata&& metadata) {
|
||||
assert(_initialized);
|
||||
|
||||
std::string filepath = getFilepath(metadata.key);
|
||||
|
||||
Lock lock(_filesMutex);
|
||||
|
||||
// if file already exists, return it
|
||||
FilePointer file = getFile(metadata.key);
|
||||
if (file) {
|
||||
qCWarning(file_cache, "[%s] Attempted to overwrite %s", _dirname.c_str(), metadata.key.c_str());
|
||||
return file;
|
||||
}
|
||||
|
||||
// write the new file
|
||||
FILE* saveFile = fopen(filepath.c_str(), "wb");
|
||||
if (saveFile != nullptr && fwrite(data, metadata.length, 1, saveFile) && fclose(saveFile) == 0) {
|
||||
file = addFile(std::move(metadata), filepath);
|
||||
} else {
|
||||
qCWarning(file_cache, "[%s] Failed to write %s (%s)", _dirname.c_str(), metadata.key.c_str(), strerror(errno));
|
||||
errno = 0;
|
||||
}
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
FilePointer FileCache::getFile(const Key& key) {
|
||||
assert(_initialized);
|
||||
|
||||
FilePointer file;
|
||||
|
||||
Lock lock(_filesMutex);
|
||||
|
||||
// check if file exists
|
||||
const auto it = _files.find(key);
|
||||
if (it != _files.cend()) {
|
||||
file = it->second.lock();
|
||||
if (file) {
|
||||
// if it exists, it is active - remove it from the cache
|
||||
removeUnusedFile(file);
|
||||
qCDebug(file_cache, "[%s] Found %s", _dirname.c_str(), key.c_str());
|
||||
emit dirty();
|
||||
} else {
|
||||
// if not, remove the weak_ptr
|
||||
_files.erase(it);
|
||||
}
|
||||
}
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
std::string FileCache::getFilepath(const Key& key) {
|
||||
return _dirpath + '/' + key + '.' + _ext;
|
||||
}
|
||||
|
||||
void FileCache::addUnusedFile(const FilePointer file) {
|
||||
{
|
||||
Lock lock(_filesMutex);
|
||||
_files[file->getKey()] = file;
|
||||
}
|
||||
|
||||
reserve(file->getLength());
|
||||
file->_LRUKey = ++_lastLRUKey;
|
||||
|
||||
{
|
||||
Lock lock(_unusedFilesMutex);
|
||||
_unusedFiles.insert({ file->_LRUKey, file });
|
||||
_numUnusedFiles += 1;
|
||||
_unusedFilesSize += file->getLength();
|
||||
}
|
||||
|
||||
emit dirty();
|
||||
}
|
||||
|
||||
void FileCache::removeUnusedFile(const FilePointer file) {
|
||||
Lock lock(_unusedFilesMutex);
|
||||
const auto it = _unusedFiles.find(file->_LRUKey);
|
||||
if (it != _unusedFiles.cend()) {
|
||||
_unusedFiles.erase(it);
|
||||
_numUnusedFiles -= 1;
|
||||
_unusedFilesSize -= file->getLength();
|
||||
}
|
||||
}
|
||||
|
||||
void FileCache::reserve(size_t length) {
|
||||
Lock unusedLock(_unusedFilesMutex);
|
||||
while (!_unusedFiles.empty() &&
|
||||
_unusedFilesSize + length > _unusedFilesMaxSize) {
|
||||
auto it = _unusedFiles.begin();
|
||||
auto file = it->second;
|
||||
auto length = file->getLength();
|
||||
|
||||
unusedLock.unlock();
|
||||
{
|
||||
file->_cache = nullptr;
|
||||
Lock lock(_filesMutex);
|
||||
_files.erase(file->getKey());
|
||||
}
|
||||
unusedLock.lock();
|
||||
|
||||
_unusedFiles.erase(it);
|
||||
_numTotalFiles -= 1;
|
||||
_numUnusedFiles -= 1;
|
||||
_totalFilesSize -= length;
|
||||
_unusedFilesSize -= length;
|
||||
}
|
||||
}
|
||||
|
||||
void FileCache::clear() {
|
||||
Lock unusedFilesLock(_unusedFilesMutex);
|
||||
for (const auto& pair : _unusedFiles) {
|
||||
auto& file = pair.second;
|
||||
file->_cache = nullptr;
|
||||
|
||||
if (_totalFilesSize > _offlineFilesMaxSize) {
|
||||
_totalFilesSize -= file->getLength();
|
||||
} else {
|
||||
file->_shouldPersist = true;
|
||||
qCDebug(file_cache, "[%s] Persisting %s", _dirname.c_str(), file->getKey().c_str());
|
||||
}
|
||||
}
|
||||
_unusedFiles.clear();
|
||||
}
|
||||
|
||||
void File::deleter() {
|
||||
if (_cache) {
|
||||
FilePointer self(this, &fileDeleter);
|
||||
_cache->addUnusedFile(self);
|
||||
} else {
|
||||
deleteLater();
|
||||
}
|
||||
}
|
||||
|
||||
File::File(Metadata&& metadata, const std::string& filepath) :
|
||||
_key(std::move(metadata.key)),
|
||||
_length(metadata.length),
|
||||
_filepath(filepath) {}
|
||||
|
||||
File::~File() {
|
||||
QFile file(getFilepath().c_str());
|
||||
if (file.exists() && !_shouldPersist) {
|
||||
qCInfo(file_cache, "Unlinked %s", getFilepath().c_str());
|
||||
file.remove();
|
||||
}
|
||||
}
|
158
libraries/networking/src/FileCache.h
Normal file
158
libraries/networking/src/FileCache.h
Normal file
|
@ -0,0 +1,158 @@
|
|||
//
|
||||
// FileCache.h
|
||||
// libraries/networking/src
|
||||
//
|
||||
// Created by Zach Pomerantz on 2/21/2017.
|
||||
// Copyright 2017 High Fidelity, Inc. // // Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_FileCache_h
|
||||
#define hifi_FileCache_h
|
||||
|
||||
#include <atomic>
|
||||
#include <memory>
|
||||
#include <cstddef>
|
||||
#include <map>
|
||||
#include <mutex>
|
||||
#include <string>
|
||||
#include <unordered_map>
|
||||
|
||||
#include <QObject>
|
||||
#include <QLoggingCategory>
|
||||
|
||||
Q_DECLARE_LOGGING_CATEGORY(file_cache)
|
||||
|
||||
namespace cache {
|
||||
|
||||
class File;
|
||||
using FilePointer = std::shared_ptr<File>;
|
||||
|
||||
class FileCache : public QObject {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(size_t numTotal READ getNumTotalFiles NOTIFY dirty)
|
||||
Q_PROPERTY(size_t numCached READ getNumCachedFiles NOTIFY dirty)
|
||||
Q_PROPERTY(size_t sizeTotal READ getSizeTotalFiles NOTIFY dirty)
|
||||
Q_PROPERTY(size_t sizeCached READ getSizeCachedFiles NOTIFY dirty)
|
||||
|
||||
static const size_t DEFAULT_UNUSED_MAX_SIZE;
|
||||
static const size_t MAX_UNUSED_MAX_SIZE;
|
||||
static const size_t DEFAULT_OFFLINE_MAX_SIZE;
|
||||
|
||||
public:
|
||||
size_t getNumTotalFiles() const { return _numTotalFiles; }
|
||||
size_t getNumCachedFiles() const { return _numUnusedFiles; }
|
||||
size_t getSizeTotalFiles() const { return _totalFilesSize; }
|
||||
size_t getSizeCachedFiles() const { return _unusedFilesSize; }
|
||||
|
||||
void setUnusedFileCacheSize(size_t unusedFilesMaxSize);
|
||||
size_t getUnusedFileCacheSize() const { return _unusedFilesSize; }
|
||||
|
||||
void setOfflineFileCacheSize(size_t offlineFilesMaxSize);
|
||||
|
||||
// initialize FileCache with a directory name (not a path, ex.: "temp_jpgs") and an ext (ex.: "jpg")
|
||||
FileCache(const std::string& dirname, const std::string& ext, QObject* parent = nullptr);
|
||||
virtual ~FileCache();
|
||||
|
||||
using Key = std::string;
|
||||
struct Metadata {
|
||||
Metadata(const Key& key, size_t length) :
|
||||
key(key), length(length) {}
|
||||
Key key;
|
||||
size_t length;
|
||||
};
|
||||
|
||||
// derived classes should implement a setter/getter, for example, for a FileCache backing a network cache:
|
||||
//
|
||||
// DerivedFilePointer writeFile(const char* data, DerivedMetadata&& metadata) {
|
||||
// return writeFile(data, std::forward(metadata));
|
||||
// }
|
||||
//
|
||||
// DerivedFilePointer getFile(const QUrl& url) {
|
||||
// auto key = lookup_hash_for(url); // assuming hashing url in create/evictedFile overrides
|
||||
// return getFile(key);
|
||||
// }
|
||||
|
||||
signals:
|
||||
void dirty();
|
||||
|
||||
protected:
|
||||
/// must be called after construction to create the cache on the fs and restore persisted files
|
||||
void initialize();
|
||||
|
||||
FilePointer writeFile(const char* data, Metadata&& metadata);
|
||||
FilePointer getFile(const Key& key);
|
||||
|
||||
/// create a file
|
||||
virtual std::unique_ptr<File> createFile(Metadata&& metadata, const std::string& filepath) = 0;
|
||||
|
||||
private:
|
||||
using Mutex = std::recursive_mutex;
|
||||
using Lock = std::unique_lock<Mutex>;
|
||||
|
||||
friend class File;
|
||||
|
||||
std::string getFilepath(const Key& key);
|
||||
|
||||
FilePointer addFile(Metadata&& metadata, const std::string& filepath);
|
||||
void addUnusedFile(const FilePointer file);
|
||||
void removeUnusedFile(const FilePointer file);
|
||||
void reserve(size_t length);
|
||||
void clear();
|
||||
|
||||
std::atomic<size_t> _numTotalFiles { 0 };
|
||||
std::atomic<size_t> _numUnusedFiles { 0 };
|
||||
std::atomic<size_t> _totalFilesSize { 0 };
|
||||
std::atomic<size_t> _unusedFilesSize { 0 };
|
||||
|
||||
std::string _ext;
|
||||
std::string _dirname;
|
||||
std::string _dirpath;
|
||||
bool _initialized { false };
|
||||
|
||||
std::unordered_map<Key, std::weak_ptr<File>> _files;
|
||||
Mutex _filesMutex;
|
||||
|
||||
std::map<int, FilePointer> _unusedFiles;
|
||||
Mutex _unusedFilesMutex;
|
||||
size_t _unusedFilesMaxSize { DEFAULT_UNUSED_MAX_SIZE };
|
||||
int _lastLRUKey { 0 };
|
||||
|
||||
size_t _offlineFilesMaxSize { DEFAULT_OFFLINE_MAX_SIZE };
|
||||
};
|
||||
|
||||
class File : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
using Key = FileCache::Key;
|
||||
using Metadata = FileCache::Metadata;
|
||||
|
||||
Key getKey() const { return _key; }
|
||||
size_t getLength() const { return _length; }
|
||||
std::string getFilepath() const { return _filepath; }
|
||||
|
||||
virtual ~File();
|
||||
/// overrides should call File::deleter to maintain caching behavior
|
||||
virtual void deleter();
|
||||
|
||||
protected:
|
||||
/// when constructed, the file has already been created/written
|
||||
File(Metadata&& metadata, const std::string& filepath);
|
||||
|
||||
private:
|
||||
friend class FileCache;
|
||||
|
||||
const Key _key;
|
||||
const size_t _length;
|
||||
const std::string _filepath;
|
||||
|
||||
FileCache* _cache;
|
||||
int _LRUKey { 0 };
|
||||
|
||||
bool _shouldPersist { false };
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // hifi_FileCache_h
|
|
@ -346,7 +346,9 @@ void AnimDebugDraw::update() {
|
|||
numVerts += (int)markerMap.size() * VERTICES_PER_BONE;
|
||||
auto myAvatarMarkerMap = DebugDraw::getInstance().getMyAvatarMarkerMap();
|
||||
numVerts += (int)myAvatarMarkerMap.size() * VERTICES_PER_BONE;
|
||||
numVerts += (int)DebugDraw::getInstance().getRays().size() * VERTICES_PER_RAY;
|
||||
auto rays = DebugDraw::getInstance().getRays();
|
||||
DebugDraw::getInstance().clearRays();
|
||||
numVerts += (int)rays.size() * VERTICES_PER_RAY;
|
||||
|
||||
// allocate verts!
|
||||
std::vector<AnimDebugDrawData::Vertex> vertices;
|
||||
|
@ -398,7 +400,7 @@ void AnimDebugDraw::update() {
|
|||
}
|
||||
|
||||
// draw rays from shared DebugDraw singleton
|
||||
for (auto& iter : DebugDraw::getInstance().getRays()) {
|
||||
for (auto& iter : rays) {
|
||||
addLine(std::get<0>(iter), std::get<1>(iter), std::get<2>(iter), v);
|
||||
}
|
||||
DebugDraw::getInstance().clearRays();
|
||||
|
|
|
@ -32,10 +32,13 @@ protected:
|
|||
Q_INVOKABLE void setStereoInput(bool stereo);
|
||||
|
||||
signals:
|
||||
void mutedByMixer();
|
||||
void environmentMuted();
|
||||
void receivedFirstPacket();
|
||||
void disconnected();
|
||||
void mutedByMixer(); /// the client has been muted by the mixer
|
||||
void environmentMuted(); /// the entire environment has been muted by the mixer
|
||||
void receivedFirstPacket(); /// the client has received its first packet from the audio mixer
|
||||
void disconnected(); /// the client has been disconnected from the audio mixer
|
||||
void noiseGateOpened(); /// the noise gate has opened
|
||||
void noiseGateClosed(); /// the noise gate has closed
|
||||
void inputReceived(const QByteArray& inputSamples); /// a frame of mic input audio has been received and processed
|
||||
|
||||
private:
|
||||
AudioScriptingInterface();
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
#include <QtCore/QStandardPaths>
|
||||
#include <QtCore/QVariant>
|
||||
|
||||
#include "ServerPathUtils.h"
|
||||
#include "PathUtils.h"
|
||||
#include "SharedLogging.h"
|
||||
|
||||
QVariantMap HifiConfigVariantMap::mergeCLParametersWithJSONConfig(const QStringList& argumentList) {
|
||||
|
@ -127,7 +127,7 @@ void HifiConfigVariantMap::loadConfig(const QStringList& argumentList) {
|
|||
_userConfigFilename = argumentList[userConfigIndex + 1];
|
||||
} else {
|
||||
// we weren't passed a user config path
|
||||
_userConfigFilename = ServerPathUtils::getDataFilePath(USER_CONFIG_FILE_NAME);
|
||||
_userConfigFilename = PathUtils::getAppDataFilePath(USER_CONFIG_FILE_NAME);
|
||||
|
||||
// as of 1/19/2016 this path was moved so we attempt a migration for first run post migration here
|
||||
|
||||
|
@ -153,7 +153,7 @@ void HifiConfigVariantMap::loadConfig(const QStringList& argumentList) {
|
|||
// we have the old file and not the new file - time to copy the file
|
||||
|
||||
// make the destination directory if it doesn't exist
|
||||
auto dataDirectory = ServerPathUtils::getDataDirectory();
|
||||
auto dataDirectory = PathUtils::getAppDataPath();
|
||||
if (QDir().mkpath(dataDirectory)) {
|
||||
if (oldConfigFile.copy(_userConfigFilename)) {
|
||||
qCDebug(shared) << "Migrated config file from" << oldConfigFilename << "to" << _userConfigFilename;
|
||||
|
|
|
@ -30,18 +30,20 @@ const QString& PathUtils::resourcesPath() {
|
|||
return staticResourcePath;
|
||||
}
|
||||
|
||||
QString PathUtils::getRootDataDirectory() {
|
||||
auto dataPath = QStandardPaths::writableLocation(QStandardPaths::HomeLocation);
|
||||
QString PathUtils::getAppDataPath() {
|
||||
return QStandardPaths::writableLocation(QStandardPaths::AppDataLocation) + "/";
|
||||
}
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
dataPath += "/AppData/Roaming/";
|
||||
#elif defined(Q_OS_OSX)
|
||||
dataPath += "/Library/Application Support/";
|
||||
#else
|
||||
dataPath += "/.local/share/";
|
||||
#endif
|
||||
QString PathUtils::getAppLocalDataPath() {
|
||||
return QStandardPaths::writableLocation(QStandardPaths::AppLocalDataLocation) + "/";
|
||||
}
|
||||
|
||||
return dataPath;
|
||||
QString PathUtils::getAppDataFilePath(const QString& filename) {
|
||||
return QDir(getAppDataPath()).absoluteFilePath(filename);
|
||||
}
|
||||
|
||||
QString PathUtils::getAppLocalDataFilePath(const QString& filename) {
|
||||
return QDir(getAppLocalDataPath()).absoluteFilePath(filename);
|
||||
}
|
||||
|
||||
QString fileNameWithoutExtension(const QString& fileName, const QVector<QString> possibleExtensions) {
|
||||
|
|
|
@ -27,7 +27,12 @@ class PathUtils : public QObject, public Dependency {
|
|||
Q_PROPERTY(QString resources READ resourcesPath)
|
||||
public:
|
||||
static const QString& resourcesPath();
|
||||
static QString getRootDataDirectory();
|
||||
|
||||
static QString getAppDataPath();
|
||||
static QString getAppLocalDataPath();
|
||||
|
||||
static QString getAppDataFilePath(const QString& filename);
|
||||
static QString getAppLocalDataFilePath(const QString& filename);
|
||||
|
||||
static Qt::CaseSensitivity getFSCaseSensitivity();
|
||||
static QString stripFilename(const QUrl& url);
|
||||
|
|
|
@ -47,6 +47,9 @@ QScriptValue PointerEvent::toScriptValue(QScriptEngine* engine, const PointerEve
|
|||
case Press:
|
||||
obj.setProperty("type", "Press");
|
||||
break;
|
||||
case DoublePress:
|
||||
obj.setProperty("type", "DoublePress");
|
||||
break;
|
||||
case Release:
|
||||
obj.setProperty("type", "Release");
|
||||
break;
|
||||
|
@ -128,6 +131,8 @@ void PointerEvent::fromScriptValue(const QScriptValue& object, PointerEvent& eve
|
|||
QString typeStr = type.isString() ? type.toString() : "Move";
|
||||
if (typeStr == "Press") {
|
||||
event._type = Press;
|
||||
} else if (typeStr == "DoublePress") {
|
||||
event._type = DoublePress;
|
||||
} else if (typeStr == "Release") {
|
||||
event._type = Release;
|
||||
} else {
|
||||
|
|
|
@ -26,9 +26,10 @@ public:
|
|||
};
|
||||
|
||||
enum EventType {
|
||||
Press, // A button has just been pressed
|
||||
Release, // A button has just been released
|
||||
Move // The pointer has just moved
|
||||
Press, // A button has just been pressed
|
||||
DoublePress, // A button has just been double pressed
|
||||
Release, // A button has just been released
|
||||
Move // The pointer has just moved
|
||||
};
|
||||
|
||||
PointerEvent();
|
||||
|
|
|
@ -1,31 +0,0 @@
|
|||
//
|
||||
// ServerPathUtils.cpp
|
||||
// libraries/shared/src
|
||||
//
|
||||
// Created by Ryan Huffman on 01/12/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "ServerPathUtils.h"
|
||||
|
||||
#include <QStandardPaths>
|
||||
#include <QtCore/QDir>
|
||||
#include <QtWidgets/qapplication.h>
|
||||
#include <QDebug>
|
||||
|
||||
#include "PathUtils.h"
|
||||
|
||||
QString ServerPathUtils::getDataDirectory() {
|
||||
auto dataPath = PathUtils::getRootDataDirectory();
|
||||
|
||||
dataPath += qApp->organizationName() + "/" + qApp->applicationName();
|
||||
|
||||
return QDir::cleanPath(dataPath);
|
||||
}
|
||||
|
||||
QString ServerPathUtils::getDataFilePath(QString filename) {
|
||||
return QDir(getDataDirectory()).absoluteFilePath(filename);
|
||||
}
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
//
|
||||
// ServerPathUtils.h
|
||||
// libraries/shared/src
|
||||
//
|
||||
// Created by Ryan Huffman on 01/12/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_ServerPathUtils_h
|
||||
#define hifi_ServerPathUtils_h
|
||||
|
||||
#include <QString>
|
||||
|
||||
namespace ServerPathUtils {
|
||||
QString getDataDirectory();
|
||||
QString getDataFilePath(QString filename);
|
||||
}
|
||||
|
||||
#endif // hifi_ServerPathUtils_h
|
|
@ -24,29 +24,34 @@ public:
|
|||
RateCounter() { _rate = 0; } // avoid use of std::atomic copy ctor
|
||||
|
||||
void increment(size_t count = 1) {
|
||||
auto now = usecTimestampNow();
|
||||
float currentIntervalMs = (now - _start) / (float) USECS_PER_MSEC;
|
||||
if (currentIntervalMs > (float) INTERVAL) {
|
||||
float currentCount = _count;
|
||||
float intervalSeconds = currentIntervalMs / (float) MSECS_PER_SECOND;
|
||||
_rate = roundf(currentCount / intervalSeconds * _scale) / _scale;
|
||||
_start = now;
|
||||
_count = 0;
|
||||
};
|
||||
checkRate();
|
||||
_count += count;
|
||||
}
|
||||
|
||||
float rate() const { return _rate; }
|
||||
float rate() const { checkRate(); return _rate; }
|
||||
|
||||
uint8_t precision() const { return PRECISION; }
|
||||
|
||||
uint32_t interval() const { return INTERVAL; }
|
||||
|
||||
private:
|
||||
uint64_t _start { usecTimestampNow() };
|
||||
size_t _count { 0 };
|
||||
mutable uint64_t _start { usecTimestampNow() };
|
||||
mutable size_t _count { 0 };
|
||||
const float _scale { powf(10, PRECISION) };
|
||||
std::atomic<float> _rate;
|
||||
mutable std::atomic<float> _rate;
|
||||
|
||||
void checkRate() const {
|
||||
auto now = usecTimestampNow();
|
||||
float currentIntervalMs = (now - _start) / (float)USECS_PER_MSEC;
|
||||
if (currentIntervalMs > (float)INTERVAL) {
|
||||
float currentCount = _count;
|
||||
float intervalSeconds = currentIntervalMs / (float)MSECS_PER_SECOND;
|
||||
_rate = roundf(currentCount / intervalSeconds * _scale) / _scale;
|
||||
_start = now;
|
||||
_count = 0;
|
||||
};
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
19
script-archive/entityScripts/doubleClickExample.js
Normal file
19
script-archive/entityScripts/doubleClickExample.js
Normal file
|
@ -0,0 +1,19 @@
|
|||
(function() {
|
||||
var _this;
|
||||
function DoubleClickExample() {
|
||||
_this = this;
|
||||
return;
|
||||
}
|
||||
|
||||
DoubleClickExample.prototype = {
|
||||
clickDownOnEntity: function() {
|
||||
print("clickDownOnEntity");
|
||||
},
|
||||
|
||||
doubleclickOnEntity: function() {
|
||||
print("doubleclickOnEntity");
|
||||
}
|
||||
|
||||
};
|
||||
return new DoubleClickExample();
|
||||
});
|
|
@ -72,6 +72,9 @@ tablet.screenChanged.connect(onScreenChanged);
|
|||
AudioDevice.muteToggled.connect(onMuteToggled);
|
||||
|
||||
Script.scriptEnding.connect(function () {
|
||||
if (onAudioScreen) {
|
||||
tablet.gotoHomeScreen();
|
||||
}
|
||||
button.clicked.disconnect(onClicked);
|
||||
tablet.screenChanged.disconnect(onScreenChanged);
|
||||
AudioDevice.muteToggled.disconnect(onMuteToggled);
|
||||
|
|
95
scripts/system/audioScope.js
Normal file
95
scripts/system/audioScope.js
Normal file
|
@ -0,0 +1,95 @@
|
|||
"use strict";
|
||||
//
|
||||
// audioScope.js
|
||||
// scripts/system/
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 3/10/2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
/* global Script, Tablet, AudioScope, Audio */
|
||||
|
||||
(function () { // BEGIN LOCAL_SCOPE
|
||||
|
||||
var scopeVisibile = AudioScope.getVisible();
|
||||
var scopePaused = AudioScope.getPause();
|
||||
var autoPause = false;
|
||||
|
||||
var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
|
||||
var showScopeButton = tablet.addButton({
|
||||
icon: "icons/tablet-icons/scope.svg",
|
||||
text: "Audio Scope",
|
||||
isActive: scopeVisibile
|
||||
});
|
||||
|
||||
var scopePauseImage = "icons/tablet-icons/scope-pause.svg";
|
||||
var scopePlayImage = "icons/tablet-icons/scope-play.svg";
|
||||
|
||||
var pauseScopeButton = tablet.addButton({
|
||||
icon: scopePaused ? scopePlayImage : scopePauseImage,
|
||||
text: scopePaused ? "Unpause" : "Pause",
|
||||
isActive: scopePaused
|
||||
});
|
||||
|
||||
var autoPauseScopeButton = tablet.addButton({
|
||||
icon: "icons/tablet-icons/scope-auto.svg",
|
||||
text: "Auto Pause",
|
||||
isActive: autoPause
|
||||
});
|
||||
|
||||
function setScopePause(paused) {
|
||||
scopePaused = paused;
|
||||
pauseScopeButton.editProperties({
|
||||
isActive: scopePaused,
|
||||
icon: scopePaused ? scopePlayImage : scopePauseImage,
|
||||
text: scopePaused ? "Unpause" : "Pause"
|
||||
});
|
||||
AudioScope.setPause(scopePaused);
|
||||
}
|
||||
|
||||
showScopeButton.clicked.connect(function () {
|
||||
// toggle button active state
|
||||
scopeVisibile = !scopeVisibile;
|
||||
showScopeButton.editProperties({
|
||||
isActive: scopeVisibile
|
||||
});
|
||||
|
||||
AudioScope.setVisible(scopeVisibile);
|
||||
});
|
||||
|
||||
pauseScopeButton.clicked.connect(function () {
|
||||
// toggle button active state
|
||||
setScopePause(!scopePaused);
|
||||
});
|
||||
|
||||
autoPauseScopeButton.clicked.connect(function () {
|
||||
// toggle button active state
|
||||
autoPause = !autoPause;
|
||||
autoPauseScopeButton.editProperties({
|
||||
isActive: autoPause,
|
||||
text: autoPause ? "Auto Pause" : "Manual"
|
||||
});
|
||||
});
|
||||
|
||||
Script.scriptEnding.connect(function () {
|
||||
tablet.removeButton(showScopeButton);
|
||||
tablet.removeButton(pauseScopeButton);
|
||||
tablet.removeButton(autoPauseScopeButton);
|
||||
});
|
||||
|
||||
Audio.noiseGateOpened.connect(function(){
|
||||
if (autoPause) {
|
||||
setScopePause(false);
|
||||
}
|
||||
});
|
||||
|
||||
Audio.noiseGateClosed.connect(function(){
|
||||
// noise gate closed
|
||||
if (autoPause) {
|
||||
setScopePause(true);
|
||||
}
|
||||
});
|
||||
|
||||
}()); // END LOCAL_SCOPE
|
|
@ -18,13 +18,14 @@ var button;
|
|||
var buttonName = "GOTO";
|
||||
var toolBar = null;
|
||||
var tablet = null;
|
||||
|
||||
var onGotoScreen = false;
|
||||
function onAddressBarShown(visible) {
|
||||
button.editProperties({isActive: visible});
|
||||
}
|
||||
|
||||
function onClicked(){
|
||||
DialogsManager.toggleAddressBar();
|
||||
onGotoScreen = !onGotoScreen;
|
||||
}
|
||||
|
||||
if (Settings.getValue("HUDUIEnabled")) {
|
||||
|
@ -49,6 +50,9 @@ button.clicked.connect(onClicked);
|
|||
DialogsManager.addressBarShown.connect(onAddressBarShown);
|
||||
|
||||
Script.scriptEnding.connect(function () {
|
||||
if (onGotoScreen) {
|
||||
DialogsManager.toggleAddressBar();
|
||||
}
|
||||
button.clicked.disconnect(onClicked);
|
||||
if (tablet) {
|
||||
tablet.removeButton(button);
|
||||
|
|
|
@ -48,6 +48,9 @@
|
|||
}, POLL_RATE);
|
||||
|
||||
Script.scriptEnding.connect(function () {
|
||||
if (enabled) {
|
||||
Menu.closeInfoView('InfoView_html/help.html');
|
||||
}
|
||||
button.clicked.disconnect(onClicked);
|
||||
Script.clearInterval(interval);
|
||||
if (tablet) {
|
||||
|
|
|
@ -45,6 +45,7 @@ var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
|
|||
var desktopOnlyViews = ['Mirror', 'Independent Mode', 'Entity Mode'];
|
||||
|
||||
function onHmdChanged(isHmd) {
|
||||
HMD.closeTablet();
|
||||
if (isHmd) {
|
||||
button.editProperties({
|
||||
icon: "icons/tablet-icons/switch-desk-i.svg",
|
||||
|
|
|
@ -121,6 +121,7 @@ function onClick() {
|
|||
if (onMarketplaceScreen) {
|
||||
// for toolbar-mode: go back to home screen, this will close the window.
|
||||
tablet.gotoHomeScreen();
|
||||
onMarketplaceScreen = false;
|
||||
} else {
|
||||
var entity = HMD.tabletID;
|
||||
Entities.editEntity(entity, {textures: JSON.stringify({"tex.close": HOME_BUTTON_TEXTURE})});
|
||||
|
@ -140,6 +141,9 @@ tablet.screenChanged.connect(onScreenChanged);
|
|||
Entities.canWriteAssetsChanged.connect(onCanWriteAssetsChanged);
|
||||
|
||||
Script.scriptEnding.connect(function () {
|
||||
if (onMarketplaceScreen) {
|
||||
tablet.gotoHomeScreen();
|
||||
}
|
||||
tablet.removeButton(marketplaceButton);
|
||||
tablet.screenChanged.disconnect(onScreenChanged);
|
||||
Entities.canWriteAssetsChanged.disconnect(onCanWriteAssetsChanged);
|
||||
|
|
|
@ -48,6 +48,9 @@ var HOME_BUTTON_TEXTURE = "http://hifi-content.s3.amazonaws.com/alan/dev/tablet-
|
|||
tablet.screenChanged.connect(onScreenChanged);
|
||||
|
||||
Script.scriptEnding.connect(function () {
|
||||
if (onMenuScreen) {
|
||||
tablet.gotoHomeScreen();
|
||||
}
|
||||
button.clicked.disconnect(onClicked);
|
||||
tablet.removeButton(button);
|
||||
tablet.screenChanged.disconnect(onScreenChanged);
|
||||
|
|
|
@ -696,6 +696,9 @@ function clearLocalQMLDataAndClosePAL() {
|
|||
}
|
||||
|
||||
function shutdown() {
|
||||
if (onPalScreen) {
|
||||
tablet.gotoHomeScreen();
|
||||
}
|
||||
button.clicked.disconnect(onTabletButtonClicked);
|
||||
tablet.removeButton(button);
|
||||
tablet.screenChanged.disconnect(onTabletScreenChanged);
|
||||
|
|
|
@ -191,12 +191,12 @@ function resetButtons(pathStillSnapshot, pathAnimatedSnapshot, notify) {
|
|||
if (clearOverlayWhenMoving) {
|
||||
MyAvatar.setClearOverlayWhenMoving(true); // not until after the share dialog
|
||||
}
|
||||
HMD.openTablet();
|
||||
}
|
||||
|
||||
function processingGif() {
|
||||
// show hud
|
||||
Reticle.visible = reticleVisible;
|
||||
|
||||
button.clicked.disconnect(onClicked);
|
||||
buttonConnected = false;
|
||||
// show overlays if they were on
|
||||
|
@ -211,8 +211,10 @@ Window.snapshotShared.connect(snapshotShared);
|
|||
Window.processingGif.connect(processingGif);
|
||||
|
||||
Script.scriptEnding.connect(function () {
|
||||
button.clicked.disconnect(onClicked);
|
||||
buttonConnected = false;
|
||||
if (buttonConnected) {
|
||||
button.clicked.disconnect(onClicked);
|
||||
buttonConnected = false;
|
||||
}
|
||||
if (tablet) {
|
||||
tablet.removeButton(button);
|
||||
}
|
||||
|
|
|
@ -131,7 +131,9 @@
|
|||
}
|
||||
|
||||
Script.scriptEnding.connect(function () {
|
||||
Entities.deleteEntity(HMD.tabletID);
|
||||
var tabletID = HMD.tabletID;
|
||||
Entities.deleteEntity(tabletID);
|
||||
Overlays.deleteOverlay(tabletID)
|
||||
HMD.tabletID = null;
|
||||
HMD.homeButtonID = null;
|
||||
HMD.tabletScreenID = null;
|
||||
|
|
|
@ -115,6 +115,9 @@
|
|||
tablet.screenChanged.connect(onScreenChanged);
|
||||
|
||||
function cleanup() {
|
||||
if (onUsersScreen) {
|
||||
tablet.gotoHomeScreen();
|
||||
}
|
||||
button.clicked.disconnect(onClicked);
|
||||
tablet.removeButton(button);
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue