mirror of
https://thingvellir.net/git/overte
synced 2025-03-27 23:52:03 +01:00
Merge branch 'master' of https://github.com/highfidelity/hifi into entityListReorderColumns
This commit is contained in:
commit
e4efc90605
320 changed files with 4801 additions and 3126 deletions
|
@ -656,6 +656,8 @@ void Agent::queryAvatars() {
|
|||
ViewFrustum view;
|
||||
view.setPosition(scriptedAvatar->getWorldPosition());
|
||||
view.setOrientation(scriptedAvatar->getHeadOrientation());
|
||||
view.setProjection(DEFAULT_FIELD_OF_VIEW_DEGREES, DEFAULT_ASPECT_RATIO,
|
||||
DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP);
|
||||
view.calculate();
|
||||
ConicalViewFrustum conicalView { view };
|
||||
|
||||
|
@ -876,18 +878,30 @@ void Agent::aboutToFinish() {
|
|||
DependencyManager::destroy<AudioInjectorManager>();
|
||||
|
||||
// destroy all other created dependencies
|
||||
DependencyManager::destroy<ScriptCache>();
|
||||
|
||||
DependencyManager::destroy<ResourceCacheSharedItems>();
|
||||
DependencyManager::destroy<SoundCacheScriptingInterface>();
|
||||
DependencyManager::destroy<SoundCache>();
|
||||
DependencyManager::destroy<AudioScriptingInterface>();
|
||||
|
||||
DependencyManager::destroy<RecordingScriptingInterface>();
|
||||
DependencyManager::destroy<AnimationCacheScriptingInterface>();
|
||||
DependencyManager::destroy<EntityScriptingInterface>();
|
||||
DependencyManager::destroy<ResourceScriptingInterface>();
|
||||
DependencyManager::destroy<UserActivityLoggerScriptingInterface>();
|
||||
|
||||
DependencyManager::destroy<ScriptCache>();
|
||||
DependencyManager::destroy<SoundCache>();
|
||||
DependencyManager::destroy<AnimationCache>();
|
||||
|
||||
DependencyManager::destroy<recording::Deck>();
|
||||
DependencyManager::destroy<recording::Recorder>();
|
||||
DependencyManager::destroy<recording::ClipCache>();
|
||||
|
||||
DependencyManager::destroy<AvatarHashMap>();
|
||||
DependencyManager::destroy<AssignmentParentFinder>();
|
||||
DependencyManager::destroy<MessagesClient>();
|
||||
DependencyManager::destroy<ResourceManager>();
|
||||
|
||||
DependencyManager::destroy<ResourceCacheSharedItems>();
|
||||
|
||||
// drop our shared pointer to the script engine, then ask ScriptEngines to shutdown scripting
|
||||
// this ensures that the ScriptEngine goes down before ScriptEngines
|
||||
_scriptEngine.clear();
|
||||
|
|
|
@ -129,17 +129,12 @@ void AssignmentClient::stopAssignmentClient() {
|
|||
QThread* currentAssignmentThread = _currentAssignment->thread();
|
||||
|
||||
// ask the current assignment to stop
|
||||
BLOCKING_INVOKE_METHOD(_currentAssignment, "stop");
|
||||
QMetaObject::invokeMethod(_currentAssignment, "stop");
|
||||
|
||||
// ask the current assignment to delete itself on its thread
|
||||
_currentAssignment->deleteLater();
|
||||
|
||||
// when this thread is destroyed we don't need to run our assignment complete method
|
||||
disconnect(currentAssignmentThread, &QThread::destroyed, this, &AssignmentClient::assignmentCompleted);
|
||||
|
||||
// wait on the thread from that assignment - it will be gone once the current assignment deletes
|
||||
currentAssignmentThread->quit();
|
||||
currentAssignmentThread->wait();
|
||||
auto PROCESS_EVENTS_INTERVAL_MS = 100;
|
||||
while (!currentAssignmentThread->wait(PROCESS_EVENTS_INTERVAL_MS)) {
|
||||
QCoreApplication::processEvents();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -435,7 +435,11 @@ void AudioMixer::start() {
|
|||
QCoreApplication::processEvents();
|
||||
}
|
||||
|
||||
int numToRetain = nodeList->size() * (1 - _throttlingRatio);
|
||||
int numToRetain = -1;
|
||||
assert(_throttlingRatio >= 0.0f && _throttlingRatio <= 1.0f);
|
||||
if (_throttlingRatio > EPSILON) {
|
||||
numToRetain = nodeList->size() * (1.0f - _throttlingRatio);
|
||||
}
|
||||
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
|
||||
// mix across slave threads
|
||||
auto mixTimer = _mixTiming.timer();
|
||||
|
@ -488,11 +492,8 @@ void AudioMixer::throttle(chrono::microseconds duration, int frame) {
|
|||
|
||||
// target different mix and backoff ratios (they also have different backoff rates)
|
||||
// this is to prevent oscillation, and encourage throttling to find a steady state
|
||||
const float TARGET = 0.9f;
|
||||
// on a "regular" machine with 100 avatars, this is the largest value where
|
||||
// - overthrottling can be recovered
|
||||
// - oscillations will not occur after the recovery
|
||||
const float BACKOFF_TARGET = 0.44f;
|
||||
const float TARGET = _throttleStartTarget;
|
||||
const float BACKOFF_TARGET = _throttleBackoffTarget;
|
||||
|
||||
// the mixer is known to struggle at about 80 on a "regular" machine
|
||||
// so throttle 2/80 the streams to ensure smooth audio (throttling is linear)
|
||||
|
@ -551,6 +552,24 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
_slavePool.setNumThreads(numThreads);
|
||||
}
|
||||
}
|
||||
|
||||
const QString THROTTLE_START_KEY = "throttle_start";
|
||||
const QString THROTTLE_BACKOFF_KEY = "throttle_backoff";
|
||||
|
||||
float settingsThrottleStart = audioThreadingGroupObject[THROTTLE_START_KEY].toDouble(_throttleStartTarget);
|
||||
float settingsThrottleBackoff = audioThreadingGroupObject[THROTTLE_BACKOFF_KEY].toDouble(_throttleBackoffTarget);
|
||||
|
||||
if (settingsThrottleBackoff > settingsThrottleStart) {
|
||||
qCWarning(audio) << "Throttle backoff target cannot be higher than throttle start target. Using default values.";
|
||||
} else if (settingsThrottleBackoff < 0.0f || settingsThrottleStart > 1.0f) {
|
||||
qCWarning(audio) << "Throttle start and backoff targets must be greater than or equal to 0.0"
|
||||
<< "and lesser than or equal to 1.0. Using default values.";
|
||||
} else {
|
||||
_throttleStartTarget = settingsThrottleStart;
|
||||
_throttleBackoffTarget = settingsThrottleBackoff;
|
||||
}
|
||||
|
||||
qCDebug(audio) << "Throttle Start:" << _throttleStartTarget << "Throttle Backoff:" << _throttleBackoffTarget;
|
||||
}
|
||||
|
||||
if (settingsObject.contains(AUDIO_BUFFER_GROUP_KEY)) {
|
||||
|
|
|
@ -144,11 +144,13 @@ private:
|
|||
static std::map<QString, CodecPluginPointer> _availableCodecs;
|
||||
static QStringList _codecPreferenceOrder;
|
||||
|
||||
|
||||
static std::vector<ZoneDescription> _audioZones;
|
||||
static std::vector<ZoneSettings> _zoneSettings;
|
||||
static std::vector<ReverbSettings> _zoneReverbSettings;
|
||||
|
||||
float _throttleStartTarget = 0.9f;
|
||||
float _throttleBackoffTarget = 0.44f;
|
||||
|
||||
AudioMixerSlave::SharedData _workerSharedData;
|
||||
};
|
||||
|
||||
|
|
|
@ -337,6 +337,13 @@ void AudioMixerClientData::removeAgentAvatarAudioStream() {
|
|||
|
||||
if (it != _audioStreams.end()) {
|
||||
_audioStreams.erase(it);
|
||||
|
||||
// Clear mixing structures so that they get recreated with up to date
|
||||
// data if the stream comes back
|
||||
setHasReceivedFirstMix(false);
|
||||
_streams.skipped.clear();
|
||||
_streams.inactive.clear();
|
||||
_streams.active.clear();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -152,6 +152,8 @@ void AvatarMixerClientData::processSetTraitsMessage(ReceivedMessage& message,
|
|||
if (packetTraitVersion > instanceVersionRef) {
|
||||
if (traitSize == AvatarTraits::DELETED_TRAIT_SIZE) {
|
||||
_avatar->processDeletedTraitInstance(traitType, instanceID);
|
||||
// Mixer doesn't need deleted IDs.
|
||||
_avatar->getAndClearRecentlyDetachedIDs();
|
||||
|
||||
// to track a deleted instance but keep version information
|
||||
// the avatar mixer uses the negative value of the sent version
|
||||
|
|
|
@ -416,7 +416,8 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
// NOTE: Here's where we determine if we are over budget and drop remaining avatars,
|
||||
// or send minimal avatar data in uncommon case of PALIsOpen.
|
||||
int minimRemainingAvatarBytes = minimumBytesPerAvatar * remainingAvatars;
|
||||
bool overBudget = (identityBytesSent + numAvatarDataBytes + minimRemainingAvatarBytes) > maxAvatarBytesPerFrame;
|
||||
auto frameByteEstimate = identityBytesSent + traitBytesSent + numAvatarDataBytes + minimRemainingAvatarBytes;
|
||||
bool overBudget = frameByteEstimate > maxAvatarBytesPerFrame;
|
||||
if (overBudget) {
|
||||
if (PALIsOpen) {
|
||||
_stats.overBudgetAvatars++;
|
||||
|
@ -497,8 +498,11 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
_stats.avatarDataPackingElapsedTime +=
|
||||
(quint64) chrono::duration_cast<chrono::microseconds>(endAvatarDataPacking - startAvatarDataPacking).count();
|
||||
|
||||
// use helper to add any changed traits to our packet list
|
||||
traitBytesSent += addChangedTraitsToBulkPacket(nodeData, otherNodeData, *traitsPacketList);
|
||||
if (!overBudget) {
|
||||
// use helper to add any changed traits to our packet list
|
||||
traitBytesSent += addChangedTraitsToBulkPacket(nodeData, otherNodeData, *traitsPacketList);
|
||||
}
|
||||
|
||||
remainingAvatars--;
|
||||
}
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
#include <GLMHelpers.h>
|
||||
|
||||
ScriptableAvatar::ScriptableAvatar() {
|
||||
_clientTraitsHandler = std::unique_ptr<ClientTraitsHandler>(new ClientTraitsHandler(this));
|
||||
_clientTraitsHandler.reset(new ClientTraitsHandler(this));
|
||||
}
|
||||
|
||||
QByteArray ScriptableAvatar::toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking) {
|
||||
|
|
|
@ -583,15 +583,29 @@ void EntityScriptServer::handleOctreePacket(QSharedPointer<ReceivedMessage> mess
|
|||
void EntityScriptServer::aboutToFinish() {
|
||||
shutdownScriptEngine();
|
||||
|
||||
DependencyManager::get<EntityScriptingInterface>()->setEntityTree(nullptr);
|
||||
DependencyManager::get<ResourceManager>()->cleanup();
|
||||
|
||||
DependencyManager::destroy<AudioScriptingInterface>();
|
||||
DependencyManager::destroy<SoundCacheScriptingInterface>();
|
||||
DependencyManager::destroy<ResourceScriptingInterface>();
|
||||
DependencyManager::destroy<EntityScriptingInterface>();
|
||||
|
||||
DependencyManager::destroy<SoundCache>();
|
||||
DependencyManager::destroy<ScriptCache>();
|
||||
|
||||
DependencyManager::destroy<ResourceManager>();
|
||||
DependencyManager::destroy<ResourceCacheSharedItems>();
|
||||
|
||||
DependencyManager::destroy<MessagesClient>();
|
||||
|
||||
DependencyManager::destroy<AssignmentDynamicFactory>();
|
||||
DependencyManager::destroy<AssignmentParentFinder>();
|
||||
DependencyManager::destroy<AvatarHashMap>();
|
||||
|
||||
DependencyManager::get<ResourceManager>()->cleanup();
|
||||
|
||||
DependencyManager::destroy<PluginManager>();
|
||||
|
||||
DependencyManager::destroy<ResourceScriptingInterface>();
|
||||
DependencyManager::destroy<EntityScriptingInterface>();
|
||||
|
||||
// cleanup the AudioInjectorManager (and any still running injectors)
|
||||
DependencyManager::destroy<AudioInjectorManager>();
|
||||
|
|
|
@ -16,9 +16,9 @@ if (HIFI_MEMORY_DEBUGGING)
|
|||
if (UNIX)
|
||||
if (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
# for clang on Linux
|
||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-omit-frame-pointer -fsanitize=undefined -fsanitize=address -fsanitize-recover=address")
|
||||
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=undefined -fsanitize=address -fsanitize-recover=address")
|
||||
SET(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=undefined -fsanitize=address -fsanitize-recover=address")
|
||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-omit-frame-pointer -shared-libasan -fsanitize=undefined -fsanitize=address -fsanitize-recover=address")
|
||||
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -shared-libasan -fsanitize=undefined -fsanitize=address -fsanitize-recover=address")
|
||||
SET(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -shared-libasan -fsanitize=undefined -fsanitize=address -fsanitize-recover=address")
|
||||
else ()
|
||||
# for gcc on Linux
|
||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=undefined -fsanitize=address -U_FORTIFY_SOURCE -fno-stack-protector -fno-omit-frame-pointer")
|
||||
|
|
|
@ -1012,6 +1012,24 @@
|
|||
"placeholder": "1",
|
||||
"default": "1",
|
||||
"advanced": true
|
||||
},
|
||||
{
|
||||
"name": "throttle_start",
|
||||
"type": "double",
|
||||
"label": "Throttle Start Target",
|
||||
"help": "Target percentage of frame time to start throttling",
|
||||
"placeholder": "0.9",
|
||||
"default": 0.9,
|
||||
"advanced": true
|
||||
},
|
||||
{
|
||||
"name": "throttle_backoff",
|
||||
"type": "double",
|
||||
"label": "Throttle Backoff Target",
|
||||
"help": "Target percentage of frame time to backoff throttling",
|
||||
"placeholder": "0.44",
|
||||
"default": 0.44,
|
||||
"advanced": true
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
@ -10,10 +10,85 @@ $(document).ready(function(){
|
|||
function progressBarHTML(extraClass, label) {
|
||||
var html = "<div class='progress'>";
|
||||
html += "<div class='" + extraClass + " progress-bar progress-bar-success progress-bar-striped active' role='progressbar' aria-valuemin='0' aria-valuemax='100'>";
|
||||
html += label + "<span class='sr-only'></span></div></div>";
|
||||
html += "<span class='ongoing-msg'></span></div></div>";
|
||||
return html;
|
||||
}
|
||||
|
||||
function showUploadProgress(title) {
|
||||
swal({
|
||||
title: title,
|
||||
text: progressBarHTML('upload-content-progress', 'Upload'),
|
||||
html: true,
|
||||
showConfirmButton: false,
|
||||
allowEscapeKey: false
|
||||
});
|
||||
}
|
||||
|
||||
function uploadNextChunk(file, offset, id) {
|
||||
if (offset == undefined) {
|
||||
offset = 0;
|
||||
}
|
||||
if (id == undefined) {
|
||||
// Identify this upload session
|
||||
id = Math.round(Math.random() * 2147483647);
|
||||
}
|
||||
|
||||
var fileSize = file.size;
|
||||
var filename = file.name;
|
||||
|
||||
var CHUNK_SIZE = 1048576; // 1 MiB
|
||||
|
||||
var isFinal = Boolean(fileSize - offset <= CHUNK_SIZE);
|
||||
var nextChunkSize = Math.min(fileSize - offset, CHUNK_SIZE);
|
||||
var chunk = file.slice(offset, offset + nextChunkSize, file.type);
|
||||
var chunkFormData = new FormData();
|
||||
|
||||
var formItemName = 'restore-file-chunk';
|
||||
if (offset == 0) {
|
||||
formItemName = isFinal ? 'restore-file-chunk-only' : 'restore-file-chunk-initial';
|
||||
} else if (isFinal) {
|
||||
formItemName = 'restore-file-chunk-final';
|
||||
}
|
||||
|
||||
chunkFormData.append(formItemName, chunk, filename);
|
||||
var ajaxParams = {
|
||||
url: '/content/upload',
|
||||
type: 'POST',
|
||||
timeout: 30000, // 30 s
|
||||
headers: {"X-Session-Id": id},
|
||||
cache: false,
|
||||
processData: false,
|
||||
contentType: false,
|
||||
data: chunkFormData
|
||||
};
|
||||
|
||||
var ajaxObject = $.ajax(ajaxParams);
|
||||
ajaxObject.fail(function (jqXHR, textStatus, errorThrown) {
|
||||
showErrorMessage(
|
||||
"Error",
|
||||
"There was a problem restoring domain content.\n"
|
||||
+ "Please ensure that the content archive or entity file is valid and try again."
|
||||
);
|
||||
});
|
||||
|
||||
updateProgressBars($('.upload-content-progress'), (offset + nextChunkSize) * 100 / fileSize);
|
||||
|
||||
if (!isFinal) {
|
||||
ajaxObject.done(function (data, textStatus, jqXHR)
|
||||
{ uploadNextChunk(file, offset + CHUNK_SIZE, id); });
|
||||
} else {
|
||||
ajaxObject.done(function(data, textStatus, jqXHR) {
|
||||
isRestoring = true;
|
||||
|
||||
// immediately reload backup information since one should be restoring now
|
||||
reloadBackupInformation();
|
||||
|
||||
swal.close();
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function setupBackupUpload() {
|
||||
// construct the HTML needed for the settings backup panel
|
||||
var html = "<div class='form-group'><div id='" + UPLOAD_CONTENT_ALLOWED_DIV_ID + "'>";
|
||||
|
@ -50,34 +125,10 @@ $(document).ready(function(){
|
|||
"Restore content",
|
||||
function() {
|
||||
var files = $('#' + RESTORE_SETTINGS_FILE_ID).prop('files');
|
||||
var file = files[0];
|
||||
|
||||
var fileFormData = new FormData();
|
||||
fileFormData.append('restore-file', files[0]);
|
||||
|
||||
showSpinnerAlert("Uploading content to restore");
|
||||
|
||||
$.ajax({
|
||||
url: '/content/upload',
|
||||
type: 'POST',
|
||||
timeout: 3600000, // Set timeout to 1h
|
||||
cache: false,
|
||||
processData: false,
|
||||
contentType: false,
|
||||
data: fileFormData
|
||||
}).done(function(data, textStatus, jqXHR) {
|
||||
isRestoring = true;
|
||||
|
||||
// immediately reload backup information since one should be restoring now
|
||||
reloadBackupInformation();
|
||||
|
||||
swal.close();
|
||||
}).fail(function(jqXHR, textStatus, errorThrown) {
|
||||
showErrorMessage(
|
||||
"Error",
|
||||
"There was a problem restoring domain content.\n"
|
||||
+ "Please ensure that the content archive or entity file is valid and try again."
|
||||
);
|
||||
});
|
||||
showUploadProgress("Uploading " + file.name);
|
||||
uploadNextChunk(file);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
@ -168,6 +219,11 @@ $(document).ready(function(){
|
|||
checkBackupStatus();
|
||||
});
|
||||
|
||||
function updateProgressBars($progressBar, value) {
|
||||
$progressBar.attr('aria-valuenow', value).attr('style', 'width: ' + value + '%');
|
||||
$progressBar.find('.ongoing-msg').html(" " + Math.round(value) + "%");
|
||||
}
|
||||
|
||||
function reloadBackupInformation() {
|
||||
// make a GET request to get backup information to populate the table
|
||||
$.ajax({
|
||||
|
@ -204,11 +260,6 @@ $(document).ready(function(){
|
|||
+ "<li><a class='" + BACKUP_DELETE_LINK_CLASS + "' href='#' target='_blank'>Delete</a></li></ul></div></td>";
|
||||
}
|
||||
|
||||
function updateProgressBars($progressBar, value) {
|
||||
$progressBar.attr('aria-valuenow', value).attr('style', 'width: ' + value + '%');
|
||||
$progressBar.find('.sr-only').html(value + "% Complete");
|
||||
}
|
||||
|
||||
// before we add any new rows and update existing ones
|
||||
// remove our flag for active rows
|
||||
$('.' + ACTIVE_BACKUP_ROW_CLASS).removeClass(ACTIVE_BACKUP_ROW_CLASS);
|
||||
|
|
|
@ -348,6 +348,27 @@ void DomainContentBackupManager::recoverFromUploadedBackup(MiniPromise::Promise
|
|||
});
|
||||
}
|
||||
|
||||
void DomainContentBackupManager::recoverFromUploadedFile(MiniPromise::Promise promise, QString uploadedFilename) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "recoverFromUploadedFile", Q_ARG(MiniPromise::Promise, promise),
|
||||
Q_ARG(QString, uploadedFilename));
|
||||
return;
|
||||
}
|
||||
|
||||
qDebug() << "Recovering from uploaded file -" << uploadedFilename;
|
||||
|
||||
QFile uploadedFile(uploadedFilename);
|
||||
QuaZip uploadedZip { &uploadedFile };
|
||||
|
||||
QString backupName = MANUAL_BACKUP_PREFIX + "uploaded.zip";
|
||||
|
||||
bool success = recoverFromBackupZip(backupName, uploadedZip);
|
||||
|
||||
promise->resolve({
|
||||
{ "success", success }
|
||||
});
|
||||
}
|
||||
|
||||
std::vector<BackupItemInfo> DomainContentBackupManager::getAllBackups() {
|
||||
|
||||
QDir backupDir { _backupDirectory };
|
||||
|
|
|
@ -86,6 +86,7 @@ public slots:
|
|||
void createManualBackup(MiniPromise::Promise promise, const QString& name);
|
||||
void recoverFromBackup(MiniPromise::Promise promise, const QString& backupName);
|
||||
void recoverFromUploadedBackup(MiniPromise::Promise promise, QByteArray uploadedBackup);
|
||||
void recoverFromUploadedFile(MiniPromise::Promise promise, QString uploadedFilename);
|
||||
void deleteBackup(MiniPromise::Promise promise, const QString& backupName);
|
||||
|
||||
signals:
|
||||
|
|
|
@ -2258,46 +2258,18 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
|
|||
// check the file extension to see what kind of file this is
|
||||
// to make sure we handle this filetype for a content restore
|
||||
auto dispositionValue = QString(firstFormData.first.value("Content-Disposition"));
|
||||
auto formDataFilenameRegex = QRegExp("filename=\"(.+)\"");
|
||||
auto matchIndex = formDataFilenameRegex.indexIn(dispositionValue);
|
||||
QRegExp formDataFieldsRegex(R":(name="(restore-file.*)".*filename="(.+)"):");
|
||||
auto matchIndex = formDataFieldsRegex.indexIn(dispositionValue);
|
||||
|
||||
QString formItemName = "";
|
||||
QString uploadedFilename = "";
|
||||
if (matchIndex != -1) {
|
||||
uploadedFilename = formDataFilenameRegex.cap(1);
|
||||
}
|
||||
|
||||
if (uploadedFilename.endsWith(".json", Qt::CaseInsensitive)
|
||||
|| uploadedFilename.endsWith(".json.gz", Qt::CaseInsensitive)) {
|
||||
// invoke our method to hand the new octree file off to the octree server
|
||||
QMetaObject::invokeMethod(this, "handleOctreeFileReplacement",
|
||||
Qt::QueuedConnection, Q_ARG(QByteArray, firstFormData.second));
|
||||
|
||||
// respond with a 200 for success
|
||||
connection->respond(HTTPConnection::StatusCode200);
|
||||
} else if (uploadedFilename.endsWith(".zip", Qt::CaseInsensitive)) {
|
||||
auto deferred = makePromise("recoverFromUploadedBackup");
|
||||
|
||||
deferred->then([connectionPtr, JSON_MIME_TYPE](QString error, QVariantMap result) {
|
||||
if (!connectionPtr) {
|
||||
return;
|
||||
}
|
||||
|
||||
QJsonObject rootJSON;
|
||||
auto success = result["success"].toBool();
|
||||
rootJSON["success"] = success;
|
||||
QJsonDocument docJSON(rootJSON);
|
||||
connectionPtr->respond(success ? HTTPConnection::StatusCode200 : HTTPConnection::StatusCode400, docJSON.toJson(),
|
||||
JSON_MIME_TYPE.toUtf8());
|
||||
});
|
||||
|
||||
_contentManager->recoverFromUploadedBackup(deferred, firstFormData.second);
|
||||
|
||||
return true;
|
||||
} else {
|
||||
// we don't have handling for this filetype, send back a 400 for failure
|
||||
connection->respond(HTTPConnection::StatusCode400);
|
||||
formItemName = formDataFieldsRegex.cap(1);
|
||||
uploadedFilename = formDataFieldsRegex.cap(2);
|
||||
}
|
||||
|
||||
// Received a chunk
|
||||
processPendingContent(connection, formItemName, uploadedFilename, firstFormData.second);
|
||||
} else {
|
||||
// respond with a 400 for failure
|
||||
connection->respond(HTTPConnection::StatusCode400);
|
||||
|
@ -2546,6 +2518,72 @@ bool DomainServer::handleHTTPSRequest(HTTPSConnection* connection, const QUrl &u
|
|||
}
|
||||
}
|
||||
|
||||
bool DomainServer::processPendingContent(HTTPConnection* connection, QString itemName, QString filename, QByteArray dataChunk) {
|
||||
static const QString UPLOAD_SESSION_KEY { "X-Session-Id" };
|
||||
QByteArray sessionIdBytes = connection->requestHeader(UPLOAD_SESSION_KEY);
|
||||
int sessionId = sessionIdBytes.toInt();
|
||||
|
||||
bool newUpload = itemName == "restore-file" || itemName == "restore-file-chunk-initial" || itemName == "restore-file-chunk-only";
|
||||
|
||||
if (filename.endsWith(".zip", Qt::CaseInsensitive)) {
|
||||
static const QString TEMPORARY_CONTENT_FILEPATH { QDir::tempPath() + "/hifiUploadContent_XXXXXX.zip" };
|
||||
|
||||
if (_pendingContentFiles.find(sessionId) == _pendingContentFiles.end()) {
|
||||
if (!newUpload) {
|
||||
return false;
|
||||
}
|
||||
std::unique_ptr<QTemporaryFile> newTemp(new QTemporaryFile(TEMPORARY_CONTENT_FILEPATH));
|
||||
_pendingContentFiles[sessionId] = std::move(newTemp);
|
||||
} else if (newUpload) {
|
||||
qCDebug(domain_server) << "New upload received using existing session ID";
|
||||
_pendingContentFiles[sessionId]->resize(0);
|
||||
}
|
||||
|
||||
QTemporaryFile& _pendingFileContent = *_pendingContentFiles[sessionId];
|
||||
if (!_pendingFileContent.open()) {
|
||||
_pendingContentFiles.erase(sessionId);
|
||||
connection->respond(HTTPConnection::StatusCode400);
|
||||
return false;
|
||||
}
|
||||
_pendingFileContent.seek(_pendingFileContent.size());
|
||||
_pendingFileContent.write(dataChunk);
|
||||
_pendingFileContent.close();
|
||||
|
||||
// Respond immediately - will timeout if we wait for restore.
|
||||
connection->respond(HTTPConnection::StatusCode200);
|
||||
if (itemName == "restore-file" || itemName == "restore-file-chunk-final" || itemName == "restore-file-chunk-only") {
|
||||
auto deferred = makePromise("recoverFromUploadedBackup");
|
||||
|
||||
deferred->then([this, sessionId](QString error, QVariantMap result) {
|
||||
_pendingContentFiles.erase(sessionId);
|
||||
});
|
||||
|
||||
_contentManager->recoverFromUploadedFile(deferred, _pendingFileContent.fileName());
|
||||
}
|
||||
} else if (filename.endsWith(".json", Qt::CaseInsensitive)
|
||||
|| filename.endsWith(".json.gz", Qt::CaseInsensitive)) {
|
||||
if (_pendingUploadedContents.find(sessionId) == _pendingUploadedContents.end() && !newUpload) {
|
||||
qCDebug(domain_server) << "Json upload with invalid session ID received";
|
||||
return false;
|
||||
}
|
||||
QByteArray& _pendingUploadedContent = _pendingUploadedContents[sessionId];
|
||||
_pendingUploadedContent += dataChunk;
|
||||
connection->respond(HTTPConnection::StatusCode200);
|
||||
|
||||
if (itemName == "restore-file" || itemName == "restore-file-chunk-final" || itemName == "restore-file-chunk-only") {
|
||||
// invoke our method to hand the new octree file off to the octree server
|
||||
QMetaObject::invokeMethod(this, "handleOctreeFileReplacement",
|
||||
Qt::QueuedConnection, Q_ARG(QByteArray, _pendingUploadedContent));
|
||||
_pendingUploadedContents.erase(sessionId);
|
||||
}
|
||||
} else {
|
||||
connection->respond(HTTPConnection::StatusCode400);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
HTTPSConnection* DomainServer::connectionFromReplyWithState(QNetworkReply* reply) {
|
||||
// grab the UUID state property from the reply
|
||||
QUuid stateUUID = reply->property(STATE_QUERY_KEY.toLocal8Bit()).toUuid();
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
#include <QtCore/QStringList>
|
||||
#include <QtCore/QThread>
|
||||
#include <QtCore/QUrl>
|
||||
#include <QHostAddress>
|
||||
#include <QAbstractNativeEventFilter>
|
||||
|
||||
#include <Assignment.h>
|
||||
|
@ -209,6 +210,8 @@ private:
|
|||
|
||||
HTTPSConnection* connectionFromReplyWithState(QNetworkReply* reply);
|
||||
|
||||
bool processPendingContent(HTTPConnection* connection, QString itemName, QString filename, QByteArray dataChunk);
|
||||
|
||||
bool forwardMetaverseAPIRequest(HTTPConnection* connection,
|
||||
const QString& metaversePath,
|
||||
const QString& requestSubobject,
|
||||
|
@ -281,6 +284,9 @@ private:
|
|||
|
||||
QHash<QUuid, QPointer<HTTPSConnection>> _pendingOAuthConnections;
|
||||
|
||||
std::unordered_map<int, QByteArray> _pendingUploadedContents;
|
||||
std::unordered_map<int, std::unique_ptr<QTemporaryFile>> _pendingContentFiles;
|
||||
|
||||
QThread _assetClientThread;
|
||||
};
|
||||
|
||||
|
|
|
@ -701,9 +701,9 @@
|
|||
"y": 0.04787999764084816
|
||||
},
|
||||
"position": {
|
||||
"x": -0.53203323516845703,
|
||||
"x": -0.59333323516845703,
|
||||
"y": 0.019300000742077827,
|
||||
"z": -0.07286686894893646
|
||||
"z": 0.037454843521118164
|
||||
},
|
||||
"modelURL": "meshes/keyboard/SM_key.fbx",
|
||||
"texture": {
|
||||
|
@ -752,7 +752,7 @@
|
|||
"y": 0.04787999764084816
|
||||
},
|
||||
"position": {
|
||||
"x": -0.59333323516845703,
|
||||
"x": -0.65333323516845703,
|
||||
"y": 0.019300000742077827,
|
||||
"z": 0.037454843521118164
|
||||
},
|
||||
|
@ -777,9 +777,9 @@
|
|||
"y": 0.04787999764084816
|
||||
},
|
||||
"position": {
|
||||
"x": -0.5103323516845703,
|
||||
"y": 0.019300000742077827,
|
||||
"z": -0.127054843521118164
|
||||
"x": -0.5503323516845703,
|
||||
"y": 0.019300000742077827,
|
||||
"z": -0.07282185554504395
|
||||
},
|
||||
"modelURL": "meshes/keyboard/SM_enter.fbx",
|
||||
"texture": {
|
||||
|
@ -1479,9 +1479,9 @@
|
|||
"y": 0.04787999764084816
|
||||
},
|
||||
"position": {
|
||||
"x": -0.53203323516845703,
|
||||
"x": -0.59333323516845703,
|
||||
"y": 0.019300000742077827,
|
||||
"z": -0.07286686894893646
|
||||
"z": 0.037454843521118164
|
||||
},
|
||||
"modelURL": "meshes/keyboard/SM_key.fbx",
|
||||
"texture": {
|
||||
|
@ -1530,7 +1530,7 @@
|
|||
"y": 0.04787999764084816
|
||||
},
|
||||
"position": {
|
||||
"x": -0.59333323516845703,
|
||||
"x": -0.65333323516845703,
|
||||
"y": 0.019300000742077827,
|
||||
"z": 0.037454843521118164
|
||||
},
|
||||
|
@ -1555,9 +1555,9 @@
|
|||
"y": 0.04787999764084816
|
||||
},
|
||||
"position": {
|
||||
"x": -0.5103323516845703,
|
||||
"y": 0.019300000742077827,
|
||||
"z": -0.127054843521118164
|
||||
"x": -0.5503323516845703,
|
||||
"y": 0.019300000742077827,
|
||||
"z": -0.07282185554504395
|
||||
},
|
||||
"modelURL": "meshes/keyboard/SM_enter.fbx",
|
||||
"texture": {
|
||||
|
@ -2305,9 +2305,9 @@
|
|||
"y": 0.04787999764084816
|
||||
},
|
||||
"position": {
|
||||
"x": -0.53203323516845703,
|
||||
"x": -0.59333323516845703,
|
||||
"y": 0.019300000742077827,
|
||||
"z": -0.07286686894893646
|
||||
"z": 0.037454843521118164
|
||||
},
|
||||
"modelURL": "meshes/keyboard/SM_key.fbx",
|
||||
"texture": {
|
||||
|
@ -2356,7 +2356,7 @@
|
|||
"y": 0.04787999764084816
|
||||
},
|
||||
"position": {
|
||||
"x": -0.59333323516845703,
|
||||
"x": -0.65333323516845703,
|
||||
"y": 0.019300000742077827,
|
||||
"z": 0.037454843521118164
|
||||
},
|
||||
|
@ -2381,9 +2381,9 @@
|
|||
"y": 0.04787999764084816
|
||||
},
|
||||
"position": {
|
||||
"x": -0.5103323516845703,
|
||||
"y": 0.019300000742077827,
|
||||
"z": -0.127054843521118164
|
||||
"x": -0.5503323516845703,
|
||||
"y": 0.019300000742077827,
|
||||
"z": -0.07282185554504395
|
||||
},
|
||||
"modelURL": "meshes/keyboard/SM_enter.fbx",
|
||||
"texture": {
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
window.isKeyboardRaised = false;
|
||||
window.isNumericKeyboard = false;
|
||||
window.isPasswordField = false;
|
||||
window.lastActiveElement = null;
|
||||
window.lastActiveInputElement = null;
|
||||
|
||||
function getActiveElement() {
|
||||
return document.activeElement;
|
||||
|
@ -70,11 +70,15 @@
|
|||
var keyboardRaised = shouldRaiseKeyboard();
|
||||
var numericKeyboard = shouldSetNumeric();
|
||||
var passwordField = shouldSetPasswordField();
|
||||
var activeElement = getActiveElement();
|
||||
var activeInputElement = null;
|
||||
// Only set the active input element when there is an input element focussed, otherwise it will scroll on body focus as well.
|
||||
if (keyboardRaised) {
|
||||
activeInputElement = getActiveElement();
|
||||
}
|
||||
|
||||
if (isWindowFocused &&
|
||||
(keyboardRaised !== window.isKeyboardRaised || numericKeyboard !== window.isNumericKeyboard
|
||||
|| passwordField !== window.isPasswordField || activeElement !== window.lastActiveElement)) {
|
||||
|| passwordField !== window.isPasswordField || activeInputElement !== window.lastActiveInputElement)) {
|
||||
|
||||
if (typeof EventBridge !== "undefined" && EventBridge !== null) {
|
||||
EventBridge.emitWebEvent(
|
||||
|
@ -96,7 +100,7 @@
|
|||
window.isKeyboardRaised = keyboardRaised;
|
||||
window.isNumericKeyboard = numericKeyboard;
|
||||
window.isPasswordField = passwordField;
|
||||
window.lastActiveElement = activeElement;
|
||||
window.lastActiveInputElement = activeInputElement;
|
||||
}
|
||||
}, POLL_FREQUENCY);
|
||||
|
||||
|
|
|
@ -66,7 +66,7 @@
|
|||
<script>
|
||||
var handControllerImageURL = null;
|
||||
var index = 0;
|
||||
var count = 5;
|
||||
var count = 3;
|
||||
|
||||
function showKbm() {
|
||||
document.getElementById("main_image").setAttribute("src", "img/tablet-help-keyboard.jpg");
|
||||
|
@ -94,24 +94,14 @@
|
|||
switch (index)
|
||||
{
|
||||
case 0:
|
||||
handControllerImageURL = "img/tablet-help-oculus.jpg";
|
||||
showHandControllers();
|
||||
break;
|
||||
case 1:
|
||||
handControllerImageURL = "img/tablet-help-vive.jpg";
|
||||
showHandControllers();
|
||||
break;
|
||||
case 2:
|
||||
handControllerImageURL = "img/tablet-help-windowsMR.jpg";
|
||||
showHandControllers();
|
||||
break;
|
||||
case 3:
|
||||
showGamepad();
|
||||
break;
|
||||
case 4:
|
||||
case 1:
|
||||
showKbm();
|
||||
break;
|
||||
|
||||
case 2:
|
||||
showHandControllers();
|
||||
break;
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
@ -144,34 +134,33 @@
|
|||
}
|
||||
|
||||
switch (params.handControllerName) {
|
||||
case "oculus":
|
||||
handControllerImageURL = "img/tablet-help-oculus.jpg";
|
||||
index = 0;
|
||||
break;
|
||||
case "windowsMR":
|
||||
handControllerImageURL = "img/tablet-help-windowsMR.jpg";
|
||||
index = 2;
|
||||
break;
|
||||
case "vive":
|
||||
default:
|
||||
handControllerImageURL = "img/tablet-help-vive.jpg";
|
||||
index = 1;
|
||||
break;
|
||||
case "oculus":
|
||||
handControllerImageURL = "img/tablet-help-oculus.jpg";
|
||||
break;
|
||||
default:
|
||||
handControllerImageURL = "";
|
||||
count = 2;
|
||||
}
|
||||
|
||||
switch (params.defaultTab) {
|
||||
case "gamepad":
|
||||
showGamepad();
|
||||
index = 3;
|
||||
break;
|
||||
|
||||
case "handControllers":
|
||||
showHandControllers();
|
||||
index = 2;
|
||||
break;
|
||||
case "gamepad":
|
||||
showGamepad();
|
||||
index = 0;
|
||||
break;
|
||||
|
||||
case "kbm":
|
||||
default:
|
||||
showKbm();
|
||||
index = 4;
|
||||
index = 1;
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
|
Binary file not shown.
|
@ -23,6 +23,8 @@ Item {
|
|||
width: root.pane.width
|
||||
property bool failAfterSignUp: false
|
||||
|
||||
onWidthChanged: d.resize();
|
||||
|
||||
function login() {
|
||||
flavorText.visible = false
|
||||
mainTextContainer.visible = false
|
||||
|
@ -127,7 +129,7 @@ Item {
|
|||
Column {
|
||||
id: form
|
||||
width: parent.width
|
||||
onHeightChanged: d.resize(); onWidthChanged: d.resize();
|
||||
onHeightChanged: d.resize();
|
||||
|
||||
anchors {
|
||||
top: mainTextContainer.bottom
|
||||
|
|
|
@ -44,14 +44,14 @@ Rectangle {
|
|||
|
||||
|
||||
onPasswordChanged: {
|
||||
var use3DKeyboard = (typeof MenuInterface === "undefined") ? false : MenuInterface.isOptionChecked("Use 3D Keyboard");
|
||||
var use3DKeyboard = (typeof KeyboardScriptingInterface === "undefined") ? false : KeyboardScriptingInterface.use3DKeyboard;
|
||||
if (use3DKeyboard) {
|
||||
KeyboardScriptingInterface.password = password;
|
||||
}
|
||||
}
|
||||
|
||||
onRaisedChanged: {
|
||||
var use3DKeyboard = (typeof MenuInterface === "undefined") ? false : MenuInterface.isOptionChecked("Use 3D Keyboard");
|
||||
var use3DKeyboard = (typeof KeyboardScriptingInterface === "undefined") ? false : KeyboardScriptingInterface.use3DKeyboard;
|
||||
if (!use3DKeyboard) {
|
||||
keyboardBase.height = raised ? raisedHeight : 0;
|
||||
keyboardBase.visible = raised;
|
||||
|
|
|
@ -141,6 +141,7 @@ TabletModalWindow {
|
|||
|
||||
Component.onDestruction: {
|
||||
loginKeyboard.raised = false;
|
||||
KeyboardScriptingInterface.raised = false;
|
||||
}
|
||||
|
||||
Component.onCompleted: {
|
||||
|
|
|
@ -70,8 +70,8 @@ OriginalDesktop.Desktop {
|
|||
anchors.horizontalCenter: settings.constrainToolbarToCenterX ? desktop.horizontalCenter : undefined;
|
||||
// Literal 50 is overwritten by settings from previous session, and sysToolbar.x comes from settings when not constrained.
|
||||
x: sysToolbar.x
|
||||
buttonModel: tablet.buttons;
|
||||
shown: tablet.toolbarMode;
|
||||
buttonModel: tablet ? tablet.buttons : null;
|
||||
shown: tablet ? tablet.toolbarMode : false;
|
||||
}
|
||||
|
||||
Settings {
|
||||
|
|
|
@ -19,6 +19,7 @@ import controlsUit 1.0 as HifiControlsUit
|
|||
import "../../../controls" as HifiControls
|
||||
import "../wallet" as HifiWallet
|
||||
import "../common" as HifiCommerceCommon
|
||||
import "../.." as HifiCommon
|
||||
|
||||
// references XXX from root context
|
||||
|
||||
|
@ -31,6 +32,7 @@ Rectangle {
|
|||
property bool ownershipStatusReceived: false;
|
||||
property bool balanceReceived: false;
|
||||
property bool availableUpdatesReceived: false;
|
||||
property bool itemInfoReceived: false;
|
||||
property string baseItemName: "";
|
||||
property string itemName;
|
||||
property string itemId;
|
||||
|
@ -181,11 +183,14 @@ Rectangle {
|
|||
|
||||
onItemIdChanged: {
|
||||
root.ownershipStatusReceived = false;
|
||||
root.itemInfoReceived = false;
|
||||
Commerce.alreadyOwned(root.itemId);
|
||||
root.availableUpdatesReceived = false;
|
||||
root.currentUpdatesPage = 1;
|
||||
Commerce.getAvailableUpdates(root.itemId);
|
||||
itemPreviewImage.source = "https://hifi-metaverse.s3-us-west-1.amazonaws.com/marketplace/previews/" + itemId + "/thumbnail/hifi-mp-" + itemId + ".jpg";
|
||||
|
||||
var MARKETPLACE_API_URL = Account.metaverseServerURL + "/api/v1/marketplace/items/";
|
||||
http.request({uri: MARKETPLACE_API_URL + root.itemId}, updateCheckoutQMLFromHTTP);
|
||||
}
|
||||
|
||||
onItemTypeChanged: {
|
||||
|
@ -279,6 +284,7 @@ Rectangle {
|
|||
ownershipStatusReceived = false;
|
||||
balanceReceived = false;
|
||||
availableUpdatesReceived = false;
|
||||
itemInfoReceived = false;
|
||||
Commerce.getWalletStatus();
|
||||
}
|
||||
}
|
||||
|
@ -355,7 +361,7 @@ Rectangle {
|
|||
Rectangle {
|
||||
id: loading;
|
||||
z: 997;
|
||||
visible: !root.ownershipStatusReceived || !root.balanceReceived || !root.availableUpdatesReceived;
|
||||
visible: !root.ownershipStatusReceived || !root.balanceReceived || !root.availableUpdatesReceived || !root.itemInfoReceived;
|
||||
anchors.fill: parent;
|
||||
color: hifi.colors.white;
|
||||
|
||||
|
@ -1063,10 +1069,33 @@ Rectangle {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
HifiCommon.RootHttpRequest {
|
||||
id: http;
|
||||
}
|
||||
|
||||
//
|
||||
// FUNCTION DEFINITIONS START
|
||||
//
|
||||
|
||||
function updateCheckoutQMLFromHTTP(error, result) {
|
||||
if (error || (result.status !== 'success')) {
|
||||
// The QML will display a loading spinner forever if the user is stuck here.
|
||||
console.log("Error in Checkout.qml when getting marketplace item info!");
|
||||
return;
|
||||
}
|
||||
|
||||
root.itemInfoReceived = true;
|
||||
root.itemName = result.data.title;
|
||||
root.itemPrice = result.data.cost;
|
||||
root.itemHref = Account.metaverseServerURL + result.data.path;
|
||||
root.itemAuthor = result.data.creator;
|
||||
root.itemType = result.data.item_type || "unknown";
|
||||
itemPreviewImage.source = result.data.thumbnail_url;
|
||||
refreshBuyUI();
|
||||
}
|
||||
|
||||
//
|
||||
// Function Name: fromScript()
|
||||
//
|
||||
|
@ -1080,18 +1109,24 @@ Rectangle {
|
|||
// Description:
|
||||
// Called when a message is received from a script.
|
||||
//
|
||||
|
||||
function fromScript(message) {
|
||||
switch (message.method) {
|
||||
case 'updateCheckoutQML':
|
||||
root.itemId = message.params.itemId;
|
||||
root.itemName = message.params.itemName.trim();
|
||||
root.itemPrice = message.params.itemPrice;
|
||||
root.itemHref = message.params.itemHref;
|
||||
root.referrer = message.params.referrer;
|
||||
root.itemAuthor = message.params.itemAuthor;
|
||||
case 'updateCheckoutQMLItemID':
|
||||
if (!message.params.itemId) {
|
||||
console.log("A message with method 'updateCheckoutQMLItemID' was sent without an itemId!");
|
||||
return;
|
||||
}
|
||||
|
||||
// If we end up following the referrer (i.e. in case the wallet "isn't set up" or the user cancels),
|
||||
// we want the user to be placed back on the individual item's page - thus we set the
|
||||
// default of the referrer in this case to "itemPage".
|
||||
root.referrer = message.params.referrer || "itemPage";
|
||||
root.itemEdition = message.params.itemEdition || -1;
|
||||
root.itemType = message.params.itemType || "unknown";
|
||||
refreshBuyUI();
|
||||
root.itemId = message.params.itemId;
|
||||
break;
|
||||
case 'http.response':
|
||||
http.handleHttpResponse(message);
|
||||
break;
|
||||
default:
|
||||
console.log('Checkout.qml: Unrecognized message from marketplaces.js');
|
||||
|
|
|
@ -25,14 +25,15 @@ Item {
|
|||
|
||||
id: root;
|
||||
|
||||
property bool isDisplayingNearby; // as opposed to 'connections'
|
||||
// true when sending to 'nearby' or when a script raises the send asset dialog
|
||||
property bool multiLineDisplay;
|
||||
property string displayName;
|
||||
property string userName;
|
||||
property string profilePic;
|
||||
property string textColor: hifi.colors.white;
|
||||
|
||||
Item {
|
||||
visible: root.isDisplayingNearby;
|
||||
visible: root.multiLineDisplay;
|
||||
anchors.fill: parent;
|
||||
|
||||
RalewaySemiBold {
|
||||
|
@ -71,7 +72,7 @@ Item {
|
|||
}
|
||||
|
||||
Item {
|
||||
visible: !root.isDisplayingNearby;
|
||||
visible: !root.multiLineDisplay;
|
||||
anchors.fill: parent;
|
||||
|
||||
Image {
|
||||
|
|
|
@ -39,7 +39,7 @@ Item {
|
|||
property string sendingPubliclyEffectImage;
|
||||
property var http;
|
||||
property var listModelName;
|
||||
property var keyboardContainer: nil;
|
||||
property var keyboardContainer;
|
||||
|
||||
// This object is always used in a popup or full-screen Wallet section.
|
||||
// This MouseArea is used to prevent a user from being
|
||||
|
@ -56,7 +56,7 @@ Item {
|
|||
// Background
|
||||
Rectangle {
|
||||
z: 1;
|
||||
visible: root.assetName !== "" && sendAssetStep.visible;
|
||||
visible: root.assetCertID !== "" && sendAssetStep.referrer !== "payIn" && sendAssetStep.visible;
|
||||
anchors.top: parent.top;
|
||||
anchors.topMargin: root.parentAppTitleBarHeight;
|
||||
anchors.left: parent.left;
|
||||
|
@ -84,7 +84,6 @@ Item {
|
|||
if (sendPubliclyCheckbox.checked && sendAssetStep.referrer === "nearby") {
|
||||
sendSignalToParent({
|
||||
method: 'sendAsset_sendPublicly',
|
||||
assetName: root.assetName,
|
||||
recipient: sendAssetStep.selectedRecipientNodeID,
|
||||
amount: parseInt(amountTextField.text),
|
||||
effectImage: root.sendingPubliclyEffectImage
|
||||
|
@ -108,6 +107,14 @@ Item {
|
|||
root.nextActiveView = 'paymentFailure';
|
||||
}
|
||||
}
|
||||
|
||||
onCertificateInfoResult: {
|
||||
if (result.status !== 'success') {
|
||||
console.log("Failed to get certificate info", result.data.message);
|
||||
} else {
|
||||
root.assetName = result.data.marketplace_item_name;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Connections {
|
||||
|
@ -155,7 +162,7 @@ Item {
|
|||
|
||||
Item {
|
||||
id: userInfoContainer;
|
||||
visible: root.assetName === "";
|
||||
visible: root.assetCertID === "";
|
||||
anchors.top: parent.top;
|
||||
anchors.left: parent.left;
|
||||
anchors.right: parent.right;
|
||||
|
@ -251,7 +258,7 @@ Item {
|
|||
|
||||
LinearGradient {
|
||||
anchors.fill: parent;
|
||||
visible: root.assetName === "";
|
||||
visible: root.assetCertID === "";
|
||||
start: Qt.point(0, 0);
|
||||
end: Qt.point(0, height);
|
||||
gradient: Gradient {
|
||||
|
@ -262,7 +269,7 @@ Item {
|
|||
|
||||
RalewaySemiBold {
|
||||
id: sendAssetText;
|
||||
text: root.assetName === "" ? "Send Money To:" : "Gift \"" + root.assetName + "\" To:";
|
||||
text: root.assetCertID === "" ? "Send Money To:" : "Gift \"" + root.assetName + "\" To:";
|
||||
// Anchors
|
||||
anchors.top: parent.top;
|
||||
anchors.topMargin: 26;
|
||||
|
@ -405,7 +412,7 @@ Item {
|
|||
HifiModels.PSFListModel {
|
||||
id: connectionsModel;
|
||||
http: root.http;
|
||||
listModelName: root.listModelName;
|
||||
listModelName: root.listModelName || "";
|
||||
endpoint: "/api/v1/users?filter=connections";
|
||||
itemsPerPage: 9;
|
||||
listView: connectionsList;
|
||||
|
@ -441,7 +448,7 @@ Item {
|
|||
HiFiGlyphs {
|
||||
id: closeGlyphButton_connections;
|
||||
text: hifi.glyphs.close;
|
||||
color: root.assetName === "" ? hifi.colors.lightGrayText : hifi.colors.baseGray;
|
||||
color: root.assetCertID === "" ? hifi.colors.lightGrayText : hifi.colors.baseGray;
|
||||
size: 26;
|
||||
anchors.top: parent.top;
|
||||
anchors.topMargin: 10;
|
||||
|
@ -684,7 +691,7 @@ Item {
|
|||
HiFiGlyphs {
|
||||
id: closeGlyphButton_nearby;
|
||||
text: hifi.glyphs.close;
|
||||
color: root.assetName === "" ? hifi.colors.lightGrayText : hifi.colors.baseGray;
|
||||
color: root.assetCertID === "" ? hifi.colors.lightGrayText : hifi.colors.baseGray;
|
||||
size: 26;
|
||||
anchors.top: parent.top;
|
||||
anchors.topMargin: 10;
|
||||
|
@ -760,7 +767,7 @@ Item {
|
|||
|
||||
RalewaySemiBold {
|
||||
id: sendToText;
|
||||
text: root.assetName === "" ? "Send to:" : "Gift to:";
|
||||
text: root.assetCertID === "" ? "Send to:" : "Gift to:";
|
||||
// Anchors
|
||||
anchors.top: parent.top;
|
||||
anchors.topMargin: 36;
|
||||
|
@ -853,7 +860,7 @@ Item {
|
|||
id: sendAssetStep;
|
||||
z: 996;
|
||||
|
||||
property string referrer; // either "connections" or "nearby"
|
||||
property string referrer; // either "connections", "nearby", or "payIn"
|
||||
property string selectedRecipientNodeID;
|
||||
property string selectedRecipientDisplayName;
|
||||
property string selectedRecipientUserName;
|
||||
|
@ -865,7 +872,8 @@ Item {
|
|||
|
||||
RalewaySemiBold {
|
||||
id: sendAssetText_sendAssetStep;
|
||||
text: root.assetName === "" ? "Send Money" : "Gift \"" + root.assetName + "\"";
|
||||
text: sendAssetStep.referrer === "payIn" && root.assetCertID !== "" ? "Send \"" + root.assetName + "\":" :
|
||||
(root.assetCertID === "" ? "Send Money To:" : "Gift \"" + root.assetName + "\" To:");
|
||||
// Anchors
|
||||
anchors.top: parent.top;
|
||||
anchors.topMargin: 26;
|
||||
|
@ -878,7 +886,7 @@ Item {
|
|||
// Text size
|
||||
size: 22;
|
||||
// Style
|
||||
color: root.assetName === "" ? hifi.colors.white : hifi.colors.black;
|
||||
color: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colors.white : hifi.colors.black;
|
||||
}
|
||||
|
||||
Item {
|
||||
|
@ -893,7 +901,7 @@ Item {
|
|||
|
||||
RalewaySemiBold {
|
||||
id: sendToText_sendAssetStep;
|
||||
text: root.assetName === "" ? "Send to:" : "Gift to:";
|
||||
text: (root.assetCertID === "" || sendAssetStep.referrer === "payIn") ? "Send to:" : "Gift to:";
|
||||
// Anchors
|
||||
anchors.top: parent.top;
|
||||
anchors.left: parent.left;
|
||||
|
@ -902,7 +910,7 @@ Item {
|
|||
// Text size
|
||||
size: 18;
|
||||
// Style
|
||||
color: root.assetName === "" ? hifi.colors.white : hifi.colors.black;
|
||||
color: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colors.white : hifi.colors.black;
|
||||
verticalAlignment: Text.AlignVCenter;
|
||||
}
|
||||
|
||||
|
@ -912,25 +920,26 @@ Item {
|
|||
anchors.right: changeButton.left;
|
||||
anchors.rightMargin: 12;
|
||||
height: parent.height;
|
||||
textColor: root.assetName === "" ? hifi.colors.white : hifi.colors.black;
|
||||
textColor: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colors.white : hifi.colors.black;
|
||||
|
||||
displayName: sendAssetStep.selectedRecipientDisplayName;
|
||||
userName: sendAssetStep.selectedRecipientUserName;
|
||||
profilePic: sendAssetStep.selectedRecipientProfilePic !== "" ? ((0 === sendAssetStep.selectedRecipientProfilePic.indexOf("http")) ?
|
||||
sendAssetStep.selectedRecipientProfilePic : (Account.metaverseServerURL + sendAssetStep.selectedRecipientProfilePic)) : "";
|
||||
isDisplayingNearby: sendAssetStep.referrer === "nearby";
|
||||
multiLineDisplay: sendAssetStep.referrer === "nearby" || sendAssetStep.referrer === "payIn";
|
||||
}
|
||||
|
||||
// "CHANGE" button
|
||||
HifiControlsUit.Button {
|
||||
id: changeButton;
|
||||
color: root.assetName === "" ? hifi.buttons.none : hifi.buttons.white;
|
||||
color: root.assetCertID === "" ? hifi.buttons.none : hifi.buttons.white;
|
||||
colorScheme: hifi.colorSchemes.dark;
|
||||
anchors.right: parent.right;
|
||||
anchors.verticalCenter: parent.verticalCenter;
|
||||
height: 35;
|
||||
width: 100;
|
||||
text: "CHANGE";
|
||||
visible: sendAssetStep.referrer !== "payIn";
|
||||
onClicked: {
|
||||
if (sendAssetStep.referrer === "connections") {
|
||||
root.nextActiveView = "chooseRecipientConnection";
|
||||
|
@ -944,7 +953,7 @@ Item {
|
|||
|
||||
Item {
|
||||
id: amountContainer;
|
||||
visible: root.assetName === "";
|
||||
visible: root.assetCertID === "";
|
||||
anchors.top: sendToContainer.bottom;
|
||||
anchors.topMargin: 2;
|
||||
anchors.left: parent.left;
|
||||
|
@ -970,8 +979,9 @@ Item {
|
|||
|
||||
HifiControlsUit.TextField {
|
||||
id: amountTextField;
|
||||
text: root.assetName === "" ? "" : "1";
|
||||
colorScheme: root.assetName === "" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
|
||||
readOnly: sendAssetStep.referrer === "payIn";
|
||||
text: root.assetCertID === "" ? "" : "1";
|
||||
colorScheme: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
|
||||
inputMethodHints: Qt.ImhDigitsOnly;
|
||||
// Anchors
|
||||
anchors.verticalCenter: parent.verticalCenter;
|
||||
|
@ -980,8 +990,8 @@ Item {
|
|||
height: 50;
|
||||
// Style
|
||||
leftPermanentGlyph: hifi.glyphs.hfc;
|
||||
activeFocusOnPress: true;
|
||||
activeFocusOnTab: true;
|
||||
activeFocusOnPress: !amountTextField.readOnly;
|
||||
activeFocusOnTab: !amountTextField.readOnly;
|
||||
|
||||
validator: IntValidator { bottom: 0; }
|
||||
|
||||
|
@ -1071,6 +1081,7 @@ Item {
|
|||
|
||||
TextArea {
|
||||
id: optionalMessage;
|
||||
readOnly: sendAssetStep.referrer === "payIn";
|
||||
property int maximumLength: 72;
|
||||
property string previousText: text;
|
||||
placeholderText: "<i>Optional Public Message (" + maximumLength + " character limit)</i>";
|
||||
|
@ -1081,12 +1092,13 @@ Item {
|
|||
// Style
|
||||
background: Rectangle {
|
||||
anchors.fill: parent;
|
||||
color: root.assetName === "" ? (optionalMessage.activeFocus ? hifi.colors.black : hifi.colors.baseGrayShadow) :
|
||||
color: (root.assetCertID === "" || sendAssetStep.referrer === "payIn") ?
|
||||
(optionalMessage.activeFocus && !optionalMessage.readOnly ? hifi.colors.black : hifi.colors.baseGrayShadow) :
|
||||
(optionalMessage.activeFocus ? "#EFEFEF" : "#EEEEEE");
|
||||
border.width: optionalMessage.activeFocus ? 1 : 0;
|
||||
border.color: optionalMessage.activeFocus ? hifi.colors.primaryHighlight : hifi.colors.textFieldLightBackground;
|
||||
border.width: optionalMessage.activeFocus && !optionalMessage.readOnly ? 1 : 0;
|
||||
border.color: optionalMessage.activeFocus && !optionalMessage.readOnly ? hifi.colors.primaryHighlight : hifi.colors.textFieldLightBackground;
|
||||
}
|
||||
color: root.assetName === "" ? hifi.colors.white : hifi.colors.black;
|
||||
color: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colors.white : hifi.colors.black;
|
||||
textFormat: TextEdit.PlainText;
|
||||
wrapMode: TextEdit.Wrap;
|
||||
activeFocusOnPress: true;
|
||||
|
@ -1122,7 +1134,8 @@ Item {
|
|||
// Text size
|
||||
size: 16;
|
||||
// Style
|
||||
color: optionalMessage.text.length === optionalMessage.maximumLength ? "#ea89a5" : (root.assetName === "" ? hifi.colors.lightGrayText : hifi.colors.baseGrayHighlight);
|
||||
color: optionalMessage.text.length === optionalMessage.maximumLength ? "#ea89a5" :
|
||||
(root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colors.lightGrayText : hifi.colors.baseGrayHighlight);
|
||||
verticalAlignment: Text.AlignTop;
|
||||
horizontalAlignment: Text.AlignRight;
|
||||
}
|
||||
|
@ -1167,7 +1180,7 @@ Item {
|
|||
parent.color = hifi.colors.blueAccent;
|
||||
}
|
||||
onClicked: {
|
||||
lightboxPopup.titleText = (root.assetName === "" ? "Send Effect" : "Gift Effect");
|
||||
lightboxPopup.titleText = (root.assetCertID === "" ? "Send Effect" : "Gift Effect");
|
||||
lightboxPopup.bodyImageSource = "sendAsset/images/send-money-effect-sm.jpg"; // Path relative to CommerceLightbox.qml
|
||||
lightboxPopup.bodyText = "Enabling this option will create a particle effect between you and " +
|
||||
"your recipient that is visible to everyone nearby.";
|
||||
|
@ -1196,7 +1209,7 @@ Item {
|
|||
// "CANCEL" button
|
||||
HifiControlsUit.Button {
|
||||
id: cancelButton_sendAssetStep;
|
||||
color: root.assetName === "" ? hifi.buttons.noneBorderlessWhite : hifi.buttons.noneBorderlessGray;
|
||||
color: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.buttons.noneBorderlessWhite : hifi.buttons.noneBorderlessGray;
|
||||
colorScheme: hifi.colorSchemes.dark;
|
||||
anchors.right: sendButton.left;
|
||||
anchors.rightMargin: 24;
|
||||
|
@ -1205,8 +1218,12 @@ Item {
|
|||
width: 100;
|
||||
text: "CANCEL";
|
||||
onClicked: {
|
||||
resetSendAssetData();
|
||||
root.nextActiveView = "sendAssetHome";
|
||||
if (sendAssetStep.referrer === "payIn") {
|
||||
sendToScript({method: "closeSendAsset"});
|
||||
} else {
|
||||
resetSendAssetData();
|
||||
root.nextActiveView = "sendAssetHome";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1214,7 +1231,7 @@ Item {
|
|||
HifiControlsUit.Button {
|
||||
id: sendButton;
|
||||
color: hifi.buttons.blue;
|
||||
colorScheme: root.assetName === "" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
|
||||
colorScheme: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
|
||||
anchors.right: parent.right;
|
||||
anchors.rightMargin: 0;
|
||||
anchors.verticalCenter: parent.verticalCenter;
|
||||
|
@ -1222,11 +1239,11 @@ Item {
|
|||
width: 100;
|
||||
text: "SUBMIT";
|
||||
onClicked: {
|
||||
if (root.assetName === "" && parseInt(amountTextField.text) > parseInt(balanceText.text)) {
|
||||
if (root.assetCertID === "" && parseInt(amountTextField.text) > parseInt(balanceText.text)) {
|
||||
amountTextField.focus = true;
|
||||
amountTextField.error = true;
|
||||
amountTextFieldError.text = "<i>amount exceeds available funds</i>";
|
||||
} else if (root.assetName === "" && (amountTextField.text === "" || parseInt(amountTextField.text) < 1)) {
|
||||
} else if (root.assetCertID === "" && (amountTextField.text === "" || parseInt(amountTextField.text) < 1)) {
|
||||
amountTextField.focus = true;
|
||||
amountTextField.error = true;
|
||||
amountTextFieldError.text = "<i>invalid amount</i>";
|
||||
|
@ -1236,7 +1253,7 @@ Item {
|
|||
root.isCurrentlySendingAsset = true;
|
||||
amountTextField.focus = false;
|
||||
optionalMessage.focus = false;
|
||||
if (sendAssetStep.referrer === "connections") {
|
||||
if (sendAssetStep.referrer === "connections" || sendAssetStep.referrer === "payIn") {
|
||||
Commerce.transferAssetToUsername(sendAssetStep.selectedRecipientUserName,
|
||||
root.assetCertID,
|
||||
parseInt(amountTextField.text),
|
||||
|
@ -1317,18 +1334,18 @@ Item {
|
|||
|
||||
Rectangle {
|
||||
anchors.top: parent.top;
|
||||
anchors.topMargin: root.assetName === "" ? 15 : 125;
|
||||
anchors.topMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 15 : 125;
|
||||
anchors.left: parent.left;
|
||||
anchors.leftMargin: root.assetName === "" ? 15 : 50;
|
||||
anchors.leftMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 15 : 50;
|
||||
anchors.right: parent.right;
|
||||
anchors.rightMargin: root.assetName === "" ? 15 : 50;
|
||||
anchors.rightMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 15 : 50;
|
||||
anchors.bottom: parent.bottom;
|
||||
anchors.bottomMargin: root.assetName === "" ? 15 : 125;
|
||||
anchors.bottomMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 15 : 125;
|
||||
color: "#FFFFFF";
|
||||
|
||||
RalewaySemiBold {
|
||||
id: paymentSentText;
|
||||
text: root.assetName === "" ? "Payment Sent" : "Gift Sent";
|
||||
text: root.assetCertID === "" ? "Payment Sent" : (sendAssetStep.referrer === "payIn" ? "Item Sent" : "Gift Sent");
|
||||
// Anchors
|
||||
anchors.top: parent.top;
|
||||
anchors.topMargin: 26;
|
||||
|
@ -1346,7 +1363,7 @@ Item {
|
|||
|
||||
HiFiGlyphs {
|
||||
id: closeGlyphButton_paymentSuccess;
|
||||
visible: root.assetName === "";
|
||||
visible: root.assetCertID === "" && sendAssetStep.referrer !== "payIn";
|
||||
text: hifi.glyphs.close;
|
||||
color: hifi.colors.lightGrayText;
|
||||
size: 26;
|
||||
|
@ -1364,10 +1381,14 @@ Item {
|
|||
parent.text = hifi.glyphs.close;
|
||||
}
|
||||
onClicked: {
|
||||
root.nextActiveView = "sendAssetHome";
|
||||
resetSendAssetData();
|
||||
if (root.assetName !== "") {
|
||||
sendSignalToParent({method: "closeSendAsset"});
|
||||
if (sendAssetStep.referrer === "payIn") {
|
||||
sendToScript({method: "closeSendAsset"});
|
||||
} else {
|
||||
root.nextActiveView = "sendAssetHome";
|
||||
resetSendAssetData();
|
||||
if (root.assetName !== "") {
|
||||
sendSignalToParent({method: "closeSendAsset"});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1409,14 +1430,14 @@ Item {
|
|||
userName: sendAssetStep.selectedRecipientUserName;
|
||||
profilePic: sendAssetStep.selectedRecipientProfilePic !== "" ? ((0 === sendAssetStep.selectedRecipientProfilePic.indexOf("http")) ?
|
||||
sendAssetStep.selectedRecipientProfilePic : (Account.metaverseServerURL + sendAssetStep.selectedRecipientProfilePic)) : "";
|
||||
isDisplayingNearby: sendAssetStep.referrer === "nearby";
|
||||
multiLineDisplay: sendAssetStep.referrer === "nearby" || sendAssetStep.referrer === "payIn";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Item {
|
||||
id: giftContainer_paymentSuccess;
|
||||
visible: root.assetName !== "";
|
||||
visible: root.assetCertID !== "";
|
||||
anchors.top: sendToContainer_paymentSuccess.bottom;
|
||||
anchors.topMargin: 8;
|
||||
anchors.left: parent.left;
|
||||
|
@ -1427,7 +1448,7 @@ Item {
|
|||
|
||||
RalewaySemiBold {
|
||||
id: gift_paymentSuccess;
|
||||
text: "Gift:";
|
||||
text: sendAssetStep.referrer === "payIn" ? "Item:" : "Gift:";
|
||||
// Anchors
|
||||
anchors.top: parent.top;
|
||||
anchors.left: parent.left;
|
||||
|
@ -1458,7 +1479,7 @@ Item {
|
|||
|
||||
Item {
|
||||
id: amountContainer_paymentSuccess;
|
||||
visible: root.assetName === "";
|
||||
visible: root.assetCertID === "";
|
||||
anchors.top: sendToContainer_paymentSuccess.bottom;
|
||||
anchors.topMargin: 16;
|
||||
anchors.left: parent.left;
|
||||
|
@ -1513,7 +1534,7 @@ Item {
|
|||
|
||||
RalewaySemiBold {
|
||||
id: optionalMessage_paymentSuccess;
|
||||
visible: root.assetName === "";
|
||||
visible: root.assetCertID === "";
|
||||
text: optionalMessage.text;
|
||||
// Anchors
|
||||
anchors.top: amountContainer_paymentSuccess.visible ? amountContainer_paymentSuccess.bottom : sendToContainer_paymentSuccess.bottom;
|
||||
|
@ -1535,18 +1556,22 @@ Item {
|
|||
HifiControlsUit.Button {
|
||||
id: closeButton;
|
||||
color: hifi.buttons.blue;
|
||||
colorScheme: root.assetName === "" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
|
||||
colorScheme: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
|
||||
anchors.horizontalCenter: parent.horizontalCenter;
|
||||
anchors.bottom: parent.bottom;
|
||||
anchors.bottomMargin: root.assetName === "" ? 80 : 30;
|
||||
anchors.bottomMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 80 : 30;
|
||||
height: 50;
|
||||
width: 120;
|
||||
text: "Close";
|
||||
onClicked: {
|
||||
root.nextActiveView = "sendAssetHome";
|
||||
resetSendAssetData();
|
||||
if (root.assetName !== "") {
|
||||
sendSignalToParent({method: "closeSendAsset"});
|
||||
if (sendAssetStep.referrer === "payIn") {
|
||||
sendToScript({method: "closeSendAsset"});
|
||||
} else {
|
||||
root.nextActiveView = "sendAssetHome";
|
||||
resetSendAssetData();
|
||||
if (root.assetName !== "") {
|
||||
sendSignalToParent({method: "closeSendAsset"});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1574,18 +1599,18 @@ Item {
|
|||
|
||||
Rectangle {
|
||||
anchors.top: parent.top;
|
||||
anchors.topMargin: root.assetName === "" ? 15 : 150;
|
||||
anchors.topMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 15 : 150;
|
||||
anchors.left: parent.left;
|
||||
anchors.leftMargin: root.assetName === "" ? 15 : 50;
|
||||
anchors.leftMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 15 : 50;
|
||||
anchors.right: parent.right;
|
||||
anchors.rightMargin: root.assetName === "" ? 15 : 50;
|
||||
anchors.rightMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 15 : 50;
|
||||
anchors.bottom: parent.bottom;
|
||||
anchors.bottomMargin: root.assetName === "" ? 15 : 300;
|
||||
anchors.bottomMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 15 : 300;
|
||||
color: "#FFFFFF";
|
||||
|
||||
RalewaySemiBold {
|
||||
id: paymentFailureText;
|
||||
text: root.assetName === "" ? "Payment Failed" : "Failed";
|
||||
text: root.assetCertID === "" && sendAssetStep.referrer !== "payIn" ? "Payment Failed" : "Failed";
|
||||
// Anchors
|
||||
anchors.top: parent.top;
|
||||
anchors.topMargin: 26;
|
||||
|
@ -1603,7 +1628,7 @@ Item {
|
|||
|
||||
HiFiGlyphs {
|
||||
id: closeGlyphButton_paymentFailure;
|
||||
visible: root.assetName === "";
|
||||
visible: root.assetCertID === "" && sendAssetStep.referrer !== "payIn";
|
||||
text: hifi.glyphs.close;
|
||||
color: hifi.colors.lightGrayText;
|
||||
size: 26;
|
||||
|
@ -1632,7 +1657,8 @@ Item {
|
|||
|
||||
RalewaySemiBold {
|
||||
id: paymentFailureDetailText;
|
||||
text: "The recipient you specified was unable to receive your " + (root.assetName === "" ? "payment." : "gift.");
|
||||
text: "The recipient you specified was unable to receive your " +
|
||||
(root.assetCertID === "" ? "payment." : (sendAssetStep.referrer === "payIn" ? "item." : "gift."));
|
||||
anchors.top: paymentFailureText.bottom;
|
||||
anchors.topMargin: 20;
|
||||
anchors.left: parent.left;
|
||||
|
@ -1650,7 +1676,7 @@ Item {
|
|||
|
||||
Item {
|
||||
id: sendToContainer_paymentFailure;
|
||||
visible: root.assetName === "";
|
||||
visible: root.assetCertID === "" || sendAssetStep.referrer === "payIn";
|
||||
anchors.top: paymentFailureDetailText.bottom;
|
||||
anchors.topMargin: 8;
|
||||
anchors.left: parent.left;
|
||||
|
@ -1685,13 +1711,13 @@ Item {
|
|||
userName: sendAssetStep.selectedRecipientUserName;
|
||||
profilePic: sendAssetStep.selectedRecipientProfilePic !== "" ? ((0 === sendAssetStep.selectedRecipientProfilePic.indexOf("http")) ?
|
||||
sendAssetStep.selectedRecipientProfilePic : (Account.metaverseServerURL + sendAssetStep.selectedRecipientProfilePic)) : "";
|
||||
isDisplayingNearby: sendAssetStep.referrer === "nearby";
|
||||
multiLineDisplay: sendAssetStep.referrer === "nearby" || sendAssetStep.referrer === "payIn";
|
||||
}
|
||||
}
|
||||
|
||||
Item {
|
||||
id: amountContainer_paymentFailure;
|
||||
visible: root.assetName === "";
|
||||
visible: root.assetCertID === "";
|
||||
anchors.top: sendToContainer_paymentFailure.bottom;
|
||||
anchors.topMargin: 16;
|
||||
anchors.left: parent.left;
|
||||
|
@ -1746,7 +1772,7 @@ Item {
|
|||
|
||||
RalewaySemiBold {
|
||||
id: optionalMessage_paymentFailure;
|
||||
visible: root.assetName === "";
|
||||
visible: root.assetCertID === "" || sendAssetStep.referrer === "payIn";
|
||||
text: optionalMessage.text;
|
||||
// Anchors
|
||||
anchors.top: amountContainer_paymentFailure.visible ? amountContainer_paymentFailure.bottom : sendToContainer_paymentFailure.bottom;
|
||||
|
@ -1768,19 +1794,23 @@ Item {
|
|||
HifiControlsUit.Button {
|
||||
id: closeButton_paymentFailure;
|
||||
color: hifi.buttons.noneBorderless;
|
||||
colorScheme: root.assetName === "" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
|
||||
colorScheme: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
|
||||
anchors.right: retryButton_paymentFailure.left;
|
||||
anchors.rightMargin: 12;
|
||||
anchors.bottom: parent.bottom;
|
||||
anchors.bottomMargin: root.assetName === "" ? 80 : 30;
|
||||
anchors.bottomMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 80 : 30;
|
||||
height: 50;
|
||||
width: 120;
|
||||
text: "Cancel";
|
||||
onClicked: {
|
||||
root.nextActiveView = "sendAssetHome";
|
||||
resetSendAssetData();
|
||||
if (root.assetName !== "") {
|
||||
sendSignalToParent({method: "closeSendAsset"});
|
||||
if (sendAssetStep.referrer === "payIn") {
|
||||
sendToScript({method: "closeSendAsset"});
|
||||
} else {
|
||||
root.nextActiveView = "sendAssetHome";
|
||||
resetSendAssetData();
|
||||
if (root.assetName !== "") {
|
||||
sendSignalToParent({method: "closeSendAsset"});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1789,17 +1819,17 @@ Item {
|
|||
HifiControlsUit.Button {
|
||||
id: retryButton_paymentFailure;
|
||||
color: hifi.buttons.blue;
|
||||
colorScheme: root.assetName === "" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
|
||||
colorScheme: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? hifi.colorSchemes.dark : hifi.colorSchemes.light;
|
||||
anchors.right: parent.right;
|
||||
anchors.rightMargin: 12;
|
||||
anchors.bottom: parent.bottom;
|
||||
anchors.bottomMargin: root.assetName === "" ? 80 : 30;
|
||||
anchors.bottomMargin: root.assetCertID === "" || sendAssetStep.referrer === "payIn" ? 80 : 30;
|
||||
height: 50;
|
||||
width: 120;
|
||||
text: "Retry";
|
||||
onClicked: {
|
||||
root.isCurrentlySendingAsset = true;
|
||||
if (sendAssetStep.referrer === "connections") {
|
||||
if (sendAssetStep.referrer === "connections" || sendAssetStep.referrer === "payIn") {
|
||||
Commerce.transferAssetToUsername(sendAssetStep.selectedRecipientUserName,
|
||||
root.assetCertID,
|
||||
parseInt(amountTextField.text),
|
||||
|
@ -1866,11 +1896,32 @@ Item {
|
|||
case 'updateSelectedRecipientUsername':
|
||||
sendAssetStep.selectedRecipientUserName = message.userName;
|
||||
break;
|
||||
case 'updateSendAssetQML':
|
||||
root.assetName = "";
|
||||
root.assetCertID = message.assetCertID || "";
|
||||
if (root.assetCertID === "") {
|
||||
amountTextField.text = message.amount || 1;
|
||||
} else {
|
||||
amountTextField.text = "";
|
||||
Commerce.certificateInfo(root.assetCertID);
|
||||
}
|
||||
sendAssetStep.referrer = "payIn";
|
||||
sendAssetStep.selectedRecipientNodeID = "";
|
||||
sendAssetStep.selectedRecipientDisplayName = "Determined by script:";
|
||||
sendAssetStep.selectedRecipientUserName = message.username;
|
||||
optionalMessage.text = message.message || "No Message Provided";
|
||||
|
||||
root.nextActiveView = "sendAssetStep";
|
||||
break;
|
||||
case 'inspectionCertificate_resetCert':
|
||||
// NOP
|
||||
break;
|
||||
default:
|
||||
console.log('SendAsset: Unrecognized message from wallet.js');
|
||||
}
|
||||
}
|
||||
signal sendSignalToParent(var msg);
|
||||
signal sendToScript(var message);
|
||||
//
|
||||
// FUNCTION DEFINITIONS END
|
||||
//
|
||||
|
|
|
@ -256,7 +256,7 @@ Rectangle {
|
|||
color: hifi.colors.baseGrayHighlight;
|
||||
|
||||
HifiStylesUit.RalewaySemiBold {
|
||||
text: "Wallet";
|
||||
text: "Secure Transactions";
|
||||
anchors.fill: parent;
|
||||
anchors.leftMargin: 20;
|
||||
color: hifi.colors.white;
|
||||
|
@ -287,7 +287,7 @@ Rectangle {
|
|||
|
||||
HifiStylesUit.RalewaySemiBold {
|
||||
id: securityPictureText;
|
||||
text: "Wallet Security Picture";
|
||||
text: "Security Picture";
|
||||
// Anchors
|
||||
anchors.top: parent.top;
|
||||
anchors.bottom: parent.bottom;
|
||||
|
|
|
@ -207,12 +207,12 @@ Flickable {
|
|||
width: 112
|
||||
label: "Y Offset"
|
||||
suffix: " cm"
|
||||
minimumValue: -10
|
||||
minimumValue: -50
|
||||
maximumValue: 50
|
||||
realStepSize: 1
|
||||
realValue: -5
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
|
||||
onEditingFinished: {
|
||||
onRealValueChanged: {
|
||||
sendConfigurationSettings();
|
||||
openVrConfiguration.forceActiveFocus();
|
||||
}
|
||||
|
@ -223,14 +223,14 @@ Flickable {
|
|||
id: headZOffset
|
||||
width: 112
|
||||
label: "Z Offset"
|
||||
minimumValue: -10
|
||||
minimumValue: -50
|
||||
maximumValue: 50
|
||||
realStepSize: 1
|
||||
decimals: 1
|
||||
suffix: " cm"
|
||||
realValue: -5
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
|
||||
onEditingFinished: {
|
||||
onRealValueChanged: {
|
||||
sendConfigurationSettings();
|
||||
openVrConfiguration.forceActiveFocus();
|
||||
}
|
||||
|
@ -319,11 +319,12 @@ Flickable {
|
|||
width: 112
|
||||
suffix: " cm"
|
||||
label: "Y Offset"
|
||||
minimumValue: -10
|
||||
minimumValue: -30
|
||||
maximumValue: 30
|
||||
realStepSize: 1
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
|
||||
onEditingFinished: {
|
||||
onRealValueChanged: {
|
||||
sendConfigurationSettings();
|
||||
openVrConfiguration.forceActiveFocus();
|
||||
}
|
||||
|
@ -335,12 +336,13 @@ Flickable {
|
|||
width: 112
|
||||
label: "Z Offset"
|
||||
suffix: " cm"
|
||||
minimumValue: -10
|
||||
minimumValue: -30
|
||||
maximumValue: 30
|
||||
realStepSize: 1
|
||||
decimals: 1
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
|
||||
onEditingFinished: {
|
||||
onRealValueChanged: {
|
||||
sendConfigurationSettings();
|
||||
openVrConfiguration.forceActiveFocus();
|
||||
}
|
||||
|
@ -574,7 +576,7 @@ Flickable {
|
|||
colorScheme: hifi.colorSchemes.dark
|
||||
realValue: 33.0
|
||||
|
||||
onEditingFinished: {
|
||||
onRealValueChanged: {
|
||||
sendConfigurationSettings();
|
||||
openVrConfiguration.forceActiveFocus();
|
||||
}
|
||||
|
@ -592,7 +594,7 @@ Flickable {
|
|||
colorScheme: hifi.colorSchemes.dark
|
||||
realValue: 48
|
||||
|
||||
onEditingFinished: {
|
||||
onRealValueChanged: {
|
||||
sendConfigurationSettings();
|
||||
openVrConfiguration.forceActiveFocus();
|
||||
}
|
||||
|
@ -771,7 +773,7 @@ Flickable {
|
|||
realStepSize: 1.0
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
|
||||
onEditingFinished: {
|
||||
onRealValueChanged: {
|
||||
calibrationTimer.interval = realValue * 1000;
|
||||
openVrConfiguration.countDown = realValue;
|
||||
numberAnimation.duration = calibrationTimer.interval;
|
||||
|
@ -977,6 +979,13 @@ Flickable {
|
|||
var configurationType = settings["trackerConfiguration"];
|
||||
displayTrackerConfiguration(configurationType);
|
||||
|
||||
// default offset for user wearing puck on the center of their forehead.
|
||||
headYOffset.realValue = 4; // (cm), puck is above the head joint.
|
||||
headZOffset.realValue = 8; // (cm), puck is in front of the head joint.
|
||||
|
||||
// defaults for user wearing the pucks on the backs of their palms.
|
||||
handYOffset.realValue = 8; // (cm), puck is past the the hand joint. (set this to zero if puck is on the wrist)
|
||||
handZOffset.realValue = -4; // (cm), puck is on above hand joint.
|
||||
|
||||
var HmdHead = settings["HMDHead"];
|
||||
var viveController = settings["handController"];
|
||||
|
|
|
@ -56,7 +56,7 @@ StackView {
|
|||
Qt.callLater(function() {
|
||||
addressBarDialog.keyboardEnabled = HMD.active;
|
||||
addressLine.forceActiveFocus();
|
||||
addressBarDialog.raised = true;
|
||||
addressBarDialog.keyboardRaised = true;
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ Item {
|
|||
RalewaySemiBold {
|
||||
text: Account.loggedIn ? qsTr("Log out") : qsTr("Log in")
|
||||
horizontalAlignment: Text.AlignRight
|
||||
anchors.right: parent.right
|
||||
Layout.alignment: Qt.AlignRight
|
||||
font.pixelSize: 20
|
||||
color: "#afafaf"
|
||||
}
|
||||
|
@ -71,7 +71,7 @@ Item {
|
|||
height: Account.loggedIn ? parent.height/2 - parent.spacing/2 : 0
|
||||
text: Account.loggedIn ? "[" + tabletRoot.usernameShort + "]" : ""
|
||||
horizontalAlignment: Text.AlignRight
|
||||
anchors.right: parent.right
|
||||
Layout.alignment: Qt.AlignRight
|
||||
font.pixelSize: 20
|
||||
color: "#afafaf"
|
||||
}
|
||||
|
@ -115,9 +115,9 @@ Item {
|
|||
property int previousIndex: -1
|
||||
Repeater {
|
||||
id: pageRepeater
|
||||
model: Math.ceil(tabletProxy.buttons.rowCount() / TabletEnums.ButtonsOnPage)
|
||||
model: tabletProxy != null ? Math.ceil(tabletProxy.buttons.rowCount() / TabletEnums.ButtonsOnPage) : 0
|
||||
onItemAdded: {
|
||||
item.proxyModel.sourceModel = tabletProxy.buttons;
|
||||
item.proxyModel.sourceModel = tabletProxy != null ? tabletProxy.buttons : null;
|
||||
item.proxyModel.pageIndex = index;
|
||||
}
|
||||
|
||||
|
|
BIN
interface/resources/sounds/keyboardPress.mp3
Normal file
BIN
interface/resources/sounds/keyboardPress.mp3
Normal file
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load diff
|
@ -70,11 +70,11 @@
|
|||
#include "ui/overlays/Overlays.h"
|
||||
|
||||
#include "workload/GameWorkload.h"
|
||||
#include "graphics/GraphicsEngine.h"
|
||||
|
||||
#include <procedural/ProceduralSkybox.h>
|
||||
#include <graphics/Skybox.h>
|
||||
#include <ModelScriptingInterface.h>
|
||||
#include "FrameTimingsScriptingInterface.h"
|
||||
|
||||
#include "Sound.h"
|
||||
|
||||
|
@ -153,7 +153,6 @@ public:
|
|||
void updateSecondaryCameraViewFrustum();
|
||||
|
||||
void updateCamera(RenderArgs& renderArgs, float deltaTime);
|
||||
void paintGL();
|
||||
void resizeGL();
|
||||
|
||||
bool event(QEvent* event) override;
|
||||
|
@ -203,8 +202,8 @@ public:
|
|||
|
||||
Overlays& getOverlays() { return _overlays; }
|
||||
|
||||
size_t getRenderFrameCount() const { return _renderFrameCount; }
|
||||
float getRenderLoopRate() const { return _renderLoopCounter.rate(); }
|
||||
size_t getRenderFrameCount() const { return _graphicsEngine.getRenderFrameCount(); }
|
||||
float getRenderLoopRate() const { return _graphicsEngine.getRenderLoopRate(); }
|
||||
float getNumCollisionObjects() const;
|
||||
float getTargetRenderFrameRate() const; // frames/second
|
||||
|
||||
|
@ -275,10 +274,10 @@ public:
|
|||
void setMaxOctreePacketsPerSecond(int maxOctreePPS);
|
||||
int getMaxOctreePacketsPerSecond() const;
|
||||
|
||||
render::ScenePointer getMain3DScene() override { return _main3DScene; }
|
||||
const render::ScenePointer& getMain3DScene() const { return _main3DScene; }
|
||||
render::EnginePointer getRenderEngine() override { return _renderEngine; }
|
||||
gpu::ContextPointer getGPUContext() const { return _gpuContext; }
|
||||
render::ScenePointer getMain3DScene() override { return _graphicsEngine.getRenderScene(); }
|
||||
render::EnginePointer getRenderEngine() override { return _graphicsEngine.getRenderEngine(); }
|
||||
gpu::ContextPointer getGPUContext() const { return _graphicsEngine.getGPUContext(); }
|
||||
|
||||
|
||||
const GameWorkload& getGameWorkload() const { return _gameWorkload; }
|
||||
|
||||
|
@ -310,6 +309,7 @@ public:
|
|||
|
||||
bool isServerlessMode() const;
|
||||
bool isInterstitialMode() const { return _interstitialMode; }
|
||||
bool failedToConnectToEntityServer() const { return _failedToConnectToEntityServer; }
|
||||
|
||||
void replaceDomainContent(const QString& url);
|
||||
|
||||
|
@ -467,6 +467,7 @@ private slots:
|
|||
|
||||
void loadSettings();
|
||||
void saveSettings() const;
|
||||
void setFailedToConnectToEntityServer() { _failedToConnectToEntityServer = true; }
|
||||
|
||||
bool acceptSnapshot(const QString& urlString);
|
||||
bool askToSetAvatarUrl(const QString& url);
|
||||
|
@ -513,7 +514,6 @@ private:
|
|||
bool handleFileOpenEvent(QFileOpenEvent* event);
|
||||
void cleanupBeforeQuit();
|
||||
|
||||
bool shouldPaint() const;
|
||||
void idle();
|
||||
void update(float deltaTime);
|
||||
|
||||
|
@ -533,8 +533,6 @@ private:
|
|||
|
||||
void initializeAcceptedFiles();
|
||||
|
||||
void runRenderFrame(RenderArgs* renderArgs/*, Camera& whichCamera, bool selfAvatarOnly = false*/);
|
||||
|
||||
bool importJSONFromURL(const QString& urlString);
|
||||
bool importSVOFromURL(const QString& urlString);
|
||||
bool importFromZIP(const QString& filePath);
|
||||
|
@ -584,18 +582,12 @@ private:
|
|||
|
||||
bool _activatingDisplayPlugin { false };
|
||||
|
||||
uint32_t _renderFrameCount { 0 };
|
||||
|
||||
// Frame Rate Measurement
|
||||
RateCounter<500> _renderLoopCounter;
|
||||
RateCounter<500> _gameLoopCounter;
|
||||
|
||||
FrameTimingsScriptingInterface _frameTimingsScriptingInterface;
|
||||
|
||||
QTimer _minimizedWindowTimer;
|
||||
QElapsedTimer _timerStart;
|
||||
QElapsedTimer _lastTimeUpdated;
|
||||
QElapsedTimer _lastTimeRendered;
|
||||
|
||||
int _minimumGPUTextureMemSizeStabilityCount { 30 };
|
||||
|
||||
|
@ -681,29 +673,9 @@ private:
|
|||
|
||||
quint64 _lastFaceTrackerUpdate;
|
||||
|
||||
render::ScenePointer _main3DScene{ new render::Scene(glm::vec3(-0.5f * (float)TREE_SCALE), (float)TREE_SCALE) };
|
||||
render::EnginePointer _renderEngine{ new render::RenderEngine() };
|
||||
gpu::ContextPointer _gpuContext; // initialized during window creation
|
||||
|
||||
GameWorkload _gameWorkload;
|
||||
|
||||
mutable QMutex _renderArgsMutex{ QMutex::Recursive };
|
||||
struct AppRenderArgs {
|
||||
render::Args _renderArgs;
|
||||
glm::mat4 _eyeToWorld;
|
||||
glm::mat4 _view;
|
||||
glm::mat4 _eyeOffsets[2];
|
||||
glm::mat4 _eyeProjections[2];
|
||||
glm::mat4 _headPose;
|
||||
glm::mat4 _sensorToWorld;
|
||||
float _sensorToWorldScale { 1.0f };
|
||||
bool _isStereo{ false };
|
||||
};
|
||||
AppRenderArgs _appRenderArgs;
|
||||
|
||||
|
||||
using RenderArgsEditor = std::function <void (AppRenderArgs&)>;
|
||||
void editRenderArgs(RenderArgsEditor editor);
|
||||
GraphicsEngine _graphicsEngine;
|
||||
void updateRenderArgs(float deltaTime);
|
||||
|
||||
|
||||
|
@ -719,6 +691,7 @@ private:
|
|||
bool _isForeground = true; // starts out assumed to be in foreground
|
||||
bool _isGLInitialized { false };
|
||||
bool _physicsEnabled { false };
|
||||
bool _failedToConnectToEntityServer { false };
|
||||
|
||||
bool _reticleClickPressed { false };
|
||||
|
||||
|
@ -748,8 +721,6 @@ private:
|
|||
|
||||
bool _keyboardDeviceHasFocus { true };
|
||||
|
||||
QString _returnFromFullScreenMirrorTo;
|
||||
|
||||
ConnectionMonitor _connectionMonitor;
|
||||
|
||||
QTimer _addAssetToWorldResizeTimer;
|
||||
|
@ -765,6 +736,7 @@ private:
|
|||
QStringList _addAssetToWorldInfoMessages; // Info message
|
||||
QTimer _addAssetToWorldInfoTimer;
|
||||
QTimer _addAssetToWorldErrorTimer;
|
||||
mutable QTimer _entityServerConnectionTimer;
|
||||
|
||||
FileScriptingInterface* _fileDownload;
|
||||
AudioInjectorPointer _snapshotSoundInjector;
|
||||
|
@ -782,12 +754,8 @@ private:
|
|||
|
||||
QUrl _avatarOverrideUrl;
|
||||
bool _saveAvatarOverrideUrl { false };
|
||||
QObject* _renderEventHandler{ nullptr };
|
||||
|
||||
friend class RenderEventHandler;
|
||||
|
||||
std::atomic<bool> _pendingIdleEvent { true };
|
||||
std::atomic<bool> _pendingRenderEvent { true };
|
||||
|
||||
bool quitWhenFinished { false };
|
||||
|
||||
|
|
|
@ -19,218 +19,207 @@
|
|||
#include "Util.h"
|
||||
|
||||
|
||||
// Statically provided display and input plugins
|
||||
extern DisplayPluginList getDisplayPlugins();
|
||||
|
||||
void Application::editRenderArgs(RenderArgsEditor editor) {
|
||||
QMutexLocker renderLocker(&_renderArgsMutex);
|
||||
editor(_appRenderArgs);
|
||||
|
||||
}
|
||||
|
||||
void Application::paintGL() {
|
||||
// Some plugins process message events, allowing paintGL to be called reentrantly.
|
||||
|
||||
_renderFrameCount++;
|
||||
_lastTimeRendered.start();
|
||||
|
||||
auto lastPaintBegin = usecTimestampNow();
|
||||
PROFILE_RANGE_EX(render, __FUNCTION__, 0xff0000ff, (uint64_t)_renderFrameCount);
|
||||
PerformanceTimer perfTimer("paintGL");
|
||||
|
||||
if (nullptr == _displayPlugin) {
|
||||
return;
|
||||
}
|
||||
|
||||
DisplayPluginPointer displayPlugin;
|
||||
{
|
||||
PROFILE_RANGE(render, "/getActiveDisplayPlugin");
|
||||
displayPlugin = getActiveDisplayPlugin();
|
||||
}
|
||||
|
||||
{
|
||||
PROFILE_RANGE(render, "/pluginBeginFrameRender");
|
||||
// If a display plugin loses it's underlying support, it
|
||||
// needs to be able to signal us to not use it
|
||||
if (!displayPlugin->beginFrameRender(_renderFrameCount)) {
|
||||
QMetaObject::invokeMethod(this, "updateDisplayMode");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
RenderArgs renderArgs;
|
||||
glm::mat4 HMDSensorPose;
|
||||
glm::mat4 eyeToWorld;
|
||||
glm::mat4 sensorToWorld;
|
||||
|
||||
bool isStereo;
|
||||
glm::mat4 stereoEyeOffsets[2];
|
||||
glm::mat4 stereoEyeProjections[2];
|
||||
|
||||
{
|
||||
QMutexLocker viewLocker(&_renderArgsMutex);
|
||||
renderArgs = _appRenderArgs._renderArgs;
|
||||
|
||||
// don't render if there is no context.
|
||||
if (!_appRenderArgs._renderArgs._context) {
|
||||
return;
|
||||
}
|
||||
|
||||
HMDSensorPose = _appRenderArgs._headPose;
|
||||
eyeToWorld = _appRenderArgs._eyeToWorld;
|
||||
sensorToWorld = _appRenderArgs._sensorToWorld;
|
||||
isStereo = _appRenderArgs._isStereo;
|
||||
for_each_eye([&](Eye eye) {
|
||||
stereoEyeOffsets[eye] = _appRenderArgs._eyeOffsets[eye];
|
||||
stereoEyeProjections[eye] = _appRenderArgs._eyeProjections[eye];
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
PROFILE_RANGE(render, "/gpuContextReset");
|
||||
_gpuContext->beginFrame(_appRenderArgs._view, HMDSensorPose);
|
||||
// Reset the gpu::Context Stages
|
||||
// Back to the default framebuffer;
|
||||
gpu::doInBatch("Application_render::gpuContextReset", _gpuContext, [&](gpu::Batch& batch) {
|
||||
batch.resetStages();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
{
|
||||
PROFILE_RANGE(render, "/renderOverlay");
|
||||
PerformanceTimer perfTimer("renderOverlay");
|
||||
// NOTE: There is no batch associated with this renderArgs
|
||||
// the ApplicationOverlay class assumes it's viewport is setup to be the device size
|
||||
renderArgs._viewport = glm::ivec4(0, 0, getDeviceSize());
|
||||
_applicationOverlay.renderOverlay(&renderArgs);
|
||||
}
|
||||
|
||||
{
|
||||
PROFILE_RANGE(render, "/updateCompositor");
|
||||
getApplicationCompositor().setFrameInfo(_renderFrameCount, eyeToWorld, sensorToWorld);
|
||||
}
|
||||
|
||||
gpu::FramebufferPointer finalFramebuffer;
|
||||
QSize finalFramebufferSize;
|
||||
{
|
||||
PROFILE_RANGE(render, "/getOutputFramebuffer");
|
||||
// Primary rendering pass
|
||||
auto framebufferCache = DependencyManager::get<FramebufferCache>();
|
||||
finalFramebufferSize = framebufferCache->getFrameBufferSize();
|
||||
// Final framebuffer that will be handed to the display-plugin
|
||||
finalFramebuffer = framebufferCache->getFramebuffer();
|
||||
}
|
||||
|
||||
{
|
||||
if (isStereo) {
|
||||
renderArgs._context->enableStereo(true);
|
||||
renderArgs._context->setStereoProjections(stereoEyeProjections);
|
||||
renderArgs._context->setStereoViews(stereoEyeOffsets);
|
||||
}
|
||||
|
||||
renderArgs._hudOperator = displayPlugin->getHUDOperator();
|
||||
renderArgs._hudTexture = _applicationOverlay.getOverlayTexture();
|
||||
renderArgs._blitFramebuffer = finalFramebuffer;
|
||||
runRenderFrame(&renderArgs);
|
||||
}
|
||||
|
||||
auto frame = _gpuContext->endFrame();
|
||||
frame->frameIndex = _renderFrameCount;
|
||||
frame->framebuffer = finalFramebuffer;
|
||||
frame->framebufferRecycler = [](const gpu::FramebufferPointer& framebuffer) {
|
||||
auto frameBufferCache = DependencyManager::get<FramebufferCache>();
|
||||
if (frameBufferCache) {
|
||||
frameBufferCache->releaseFramebuffer(framebuffer);
|
||||
}
|
||||
};
|
||||
// deliver final scene rendering commands to the display plugin
|
||||
{
|
||||
PROFILE_RANGE(render, "/pluginOutput");
|
||||
PerformanceTimer perfTimer("pluginOutput");
|
||||
_renderLoopCounter.increment();
|
||||
displayPlugin->submitFrame(frame);
|
||||
}
|
||||
|
||||
// Reset the framebuffer and stereo state
|
||||
renderArgs._blitFramebuffer.reset();
|
||||
renderArgs._context->enableStereo(false);
|
||||
|
||||
{
|
||||
auto stats = Stats::getInstance();
|
||||
if (stats) {
|
||||
stats->setRenderDetails(renderArgs._details);
|
||||
}
|
||||
}
|
||||
|
||||
uint64_t lastPaintDuration = usecTimestampNow() - lastPaintBegin;
|
||||
_frameTimingsScriptingInterface.addValue(lastPaintDuration);
|
||||
}
|
||||
//void Application::paintGL() {
|
||||
// // Some plugins process message events, allowing paintGL to be called reentrantly.
|
||||
//
|
||||
// _renderFrameCount++;
|
||||
// // SG: Moved into the RenderEventHandler
|
||||
// //_lastTimeRendered.start();
|
||||
//
|
||||
// auto lastPaintBegin = usecTimestampNow();
|
||||
// PROFILE_RANGE_EX(render, __FUNCTION__, 0xff0000ff, (uint64_t)_renderFrameCount);
|
||||
// PerformanceTimer perfTimer("paintGL");
|
||||
//
|
||||
// if (nullptr == _displayPlugin) {
|
||||
// return;
|
||||
// }
|
||||
//
|
||||
// DisplayPluginPointer displayPlugin;
|
||||
// {
|
||||
// PROFILE_RANGE(render, "/getActiveDisplayPlugin");
|
||||
// displayPlugin = getActiveDisplayPlugin();
|
||||
// }
|
||||
//
|
||||
// {
|
||||
// PROFILE_RANGE(render, "/pluginBeginFrameRender");
|
||||
// // If a display plugin loses it's underlying support, it
|
||||
// // needs to be able to signal us to not use it
|
||||
// if (!displayPlugin->beginFrameRender(_renderFrameCount)) {
|
||||
// QMetaObject::invokeMethod(this, "updateDisplayMode");
|
||||
// return;
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// RenderArgs renderArgs;
|
||||
// glm::mat4 HMDSensorPose;
|
||||
// glm::mat4 eyeToWorld;
|
||||
// glm::mat4 sensorToWorld;
|
||||
//
|
||||
// bool isStereo;
|
||||
// glm::mat4 stereoEyeOffsets[2];
|
||||
// glm::mat4 stereoEyeProjections[2];
|
||||
//
|
||||
// {
|
||||
// QMutexLocker viewLocker(&_renderArgsMutex);
|
||||
// renderArgs = _appRenderArgs._renderArgs;
|
||||
//
|
||||
// // don't render if there is no context.
|
||||
// if (!_appRenderArgs._renderArgs._context) {
|
||||
// return;
|
||||
// }
|
||||
//
|
||||
// HMDSensorPose = _appRenderArgs._headPose;
|
||||
// eyeToWorld = _appRenderArgs._eyeToWorld;
|
||||
// sensorToWorld = _appRenderArgs._sensorToWorld;
|
||||
// isStereo = _appRenderArgs._isStereo;
|
||||
// for_each_eye([&](Eye eye) {
|
||||
// stereoEyeOffsets[eye] = _appRenderArgs._eyeOffsets[eye];
|
||||
// stereoEyeProjections[eye] = _appRenderArgs._eyeProjections[eye];
|
||||
// });
|
||||
// }
|
||||
//
|
||||
// {
|
||||
// PROFILE_RANGE(render, "/gpuContextReset");
|
||||
// _graphicsEngine.getGPUContext()->beginFrame(_appRenderArgs._view, HMDSensorPose);
|
||||
// // Reset the gpu::Context Stages
|
||||
// // Back to the default framebuffer;
|
||||
// gpu::doInBatch("Application_render::gpuContextReset", _graphicsEngine.getGPUContext(), [&](gpu::Batch& batch) {
|
||||
// batch.resetStages();
|
||||
// });
|
||||
// }
|
||||
//
|
||||
//
|
||||
// {
|
||||
// PROFILE_RANGE(render, "/renderOverlay");
|
||||
// PerformanceTimer perfTimer("renderOverlay");
|
||||
// // NOTE: There is no batch associated with this renderArgs
|
||||
// // the ApplicationOverlay class assumes it's viewport is setup to be the device size
|
||||
// renderArgs._viewport = glm::ivec4(0, 0, getDeviceSize() * getRenderResolutionScale());
|
||||
// _applicationOverlay.renderOverlay(&renderArgs);
|
||||
// }
|
||||
//
|
||||
// {
|
||||
// PROFILE_RANGE(render, "/updateCompositor");
|
||||
// getApplicationCompositor().setFrameInfo(_renderFrameCount, eyeToWorld, sensorToWorld);
|
||||
// }
|
||||
//
|
||||
// gpu::FramebufferPointer finalFramebuffer;
|
||||
// QSize finalFramebufferSize;
|
||||
// {
|
||||
// PROFILE_RANGE(render, "/getOutputFramebuffer");
|
||||
// // Primary rendering pass
|
||||
// auto framebufferCache = DependencyManager::get<FramebufferCache>();
|
||||
// finalFramebufferSize = framebufferCache->getFrameBufferSize();
|
||||
// // Final framebuffer that will be handled to the display-plugin
|
||||
// finalFramebuffer = framebufferCache->getFramebuffer();
|
||||
// }
|
||||
//
|
||||
// {
|
||||
// if (isStereo) {
|
||||
// renderArgs._context->enableStereo(true);
|
||||
// renderArgs._context->setStereoProjections(stereoEyeProjections);
|
||||
// renderArgs._context->setStereoViews(stereoEyeOffsets);
|
||||
// }
|
||||
//
|
||||
// renderArgs._hudOperator = displayPlugin->getHUDOperator();
|
||||
// renderArgs._hudTexture = _applicationOverlay.getOverlayTexture();
|
||||
// renderArgs._blitFramebuffer = finalFramebuffer;
|
||||
// _graphicsEngine.render_runRenderFrame(&renderArgs);
|
||||
// }
|
||||
//
|
||||
// auto frame = _graphicsEngine.getGPUContext()->endFrame();
|
||||
// frame->frameIndex = _renderFrameCount;
|
||||
// frame->framebuffer = finalFramebuffer;
|
||||
// frame->framebufferRecycler = [](const gpu::FramebufferPointer& framebuffer) {
|
||||
// auto frameBufferCache = DependencyManager::get<FramebufferCache>();
|
||||
// if (frameBufferCache) {
|
||||
// frameBufferCache->releaseFramebuffer(framebuffer);
|
||||
// }
|
||||
// };
|
||||
// // deliver final scene rendering commands to the display plugin
|
||||
// {
|
||||
// PROFILE_RANGE(render, "/pluginOutput");
|
||||
// PerformanceTimer perfTimer("pluginOutput");
|
||||
// _renderLoopCounter.increment();
|
||||
// displayPlugin->submitFrame(frame);
|
||||
// }
|
||||
//
|
||||
// // Reset the framebuffer and stereo state
|
||||
// renderArgs._blitFramebuffer.reset();
|
||||
// renderArgs._context->enableStereo(false);
|
||||
//
|
||||
// {
|
||||
// Stats::getInstance()->setRenderDetails(renderArgs._details);
|
||||
// }
|
||||
//
|
||||
// uint64_t lastPaintDuration = usecTimestampNow() - lastPaintBegin;
|
||||
// _frameTimingsScriptingInterface.addValue(lastPaintDuration);
|
||||
//}
|
||||
|
||||
|
||||
// WorldBox Render Data & rendering functions
|
||||
|
||||
class WorldBoxRenderData {
|
||||
public:
|
||||
typedef render::Payload<WorldBoxRenderData> Payload;
|
||||
typedef Payload::DataPointer Pointer;
|
||||
|
||||
int _val = 0;
|
||||
static render::ItemID _item; // unique WorldBoxRenderData
|
||||
};
|
||||
|
||||
render::ItemID WorldBoxRenderData::_item{ render::Item::INVALID_ITEM_ID };
|
||||
|
||||
namespace render {
|
||||
template <> const ItemKey payloadGetKey(const WorldBoxRenderData::Pointer& stuff) { return ItemKey::Builder::opaqueShape().withTagBits(ItemKey::TAG_BITS_0 | ItemKey::TAG_BITS_1); }
|
||||
template <> const Item::Bound payloadGetBound(const WorldBoxRenderData::Pointer& stuff) { return Item::Bound(); }
|
||||
template <> void payloadRender(const WorldBoxRenderData::Pointer& stuff, RenderArgs* args) {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::WorldAxes)) {
|
||||
PerformanceTimer perfTimer("worldBox");
|
||||
|
||||
auto& batch = *args->_batch;
|
||||
DependencyManager::get<GeometryCache>()->bindSimpleProgram(batch);
|
||||
renderWorldBox(args, batch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Application::runRenderFrame(RenderArgs* renderArgs) {
|
||||
PROFILE_RANGE(render, __FUNCTION__);
|
||||
PerformanceTimer perfTimer("display");
|
||||
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "Application::runRenderFrame()");
|
||||
|
||||
// The pending changes collecting the changes here
|
||||
render::Transaction transaction;
|
||||
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderEntities()) {
|
||||
// render models...
|
||||
PerformanceTimer perfTimer("entities");
|
||||
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
|
||||
"Application::runRenderFrame() ... entities...");
|
||||
|
||||
RenderArgs::DebugFlags renderDebugFlags = RenderArgs::RENDER_DEBUG_NONE;
|
||||
|
||||
renderArgs->_debugFlags = renderDebugFlags;
|
||||
}
|
||||
|
||||
// Make sure the WorldBox is in the scene
|
||||
// For the record, this one RenderItem is the first one we created and added to the scene.
|
||||
// We could move that code elsewhere but you know...
|
||||
if (!render::Item::isValidID(WorldBoxRenderData::_item)) {
|
||||
auto worldBoxRenderData = std::make_shared<WorldBoxRenderData>();
|
||||
auto worldBoxRenderPayload = std::make_shared<WorldBoxRenderData::Payload>(worldBoxRenderData);
|
||||
|
||||
WorldBoxRenderData::_item = _main3DScene->allocateID();
|
||||
|
||||
transaction.resetItem(WorldBoxRenderData::_item, worldBoxRenderPayload);
|
||||
_main3DScene->enqueueTransaction(transaction);
|
||||
}
|
||||
|
||||
{
|
||||
PerformanceTimer perfTimer("EngineRun");
|
||||
_renderEngine->getRenderContext()->args = renderArgs;
|
||||
_renderEngine->run();
|
||||
}
|
||||
}
|
||||
//
|
||||
//class WorldBoxRenderData {
|
||||
//public:
|
||||
// typedef render::Payload<WorldBoxRenderData> Payload;
|
||||
// typedef Payload::DataPointer Pointer;
|
||||
//
|
||||
// int _val = 0;
|
||||
// static render::ItemID _item; // unique WorldBoxRenderData
|
||||
//};
|
||||
//
|
||||
//render::ItemID WorldBoxRenderData::_item{ render::Item::INVALID_ITEM_ID };
|
||||
//
|
||||
//namespace render {
|
||||
// template <> const ItemKey payloadGetKey(const WorldBoxRenderData::Pointer& stuff) { return ItemKey::Builder::opaqueShape().withTagBits(ItemKey::TAG_BITS_0 | ItemKey::TAG_BITS_1); }
|
||||
// template <> const Item::Bound payloadGetBound(const WorldBoxRenderData::Pointer& stuff) { return Item::Bound(); }
|
||||
// template <> void payloadRender(const WorldBoxRenderData::Pointer& stuff, RenderArgs* args) {
|
||||
// if (Menu::getInstance()->isOptionChecked(MenuOption::WorldAxes)) {
|
||||
// PerformanceTimer perfTimer("worldBox");
|
||||
//
|
||||
// auto& batch = *args->_batch;
|
||||
// DependencyManager::get<GeometryCache>()->bindSimpleProgram(batch);
|
||||
// renderWorldBox(args, batch);
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
//
|
||||
//void Application::runRenderFrame(RenderArgs* renderArgs) {
|
||||
// PROFILE_RANGE(render, __FUNCTION__);
|
||||
// PerformanceTimer perfTimer("display");
|
||||
// PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "Application::runRenderFrame()");
|
||||
//
|
||||
// // The pending changes collecting the changes here
|
||||
// render::Transaction transaction;
|
||||
//
|
||||
// if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderEntities()) {
|
||||
// // render models...
|
||||
// PerformanceTimer perfTimer("entities");
|
||||
// PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
|
||||
// "Application::runRenderFrame() ... entities...");
|
||||
//
|
||||
// RenderArgs::DebugFlags renderDebugFlags = RenderArgs::RENDER_DEBUG_NONE;
|
||||
//
|
||||
// renderArgs->_debugFlags = renderDebugFlags;
|
||||
// }
|
||||
//
|
||||
// // Make sure the WorldBox is in the scene
|
||||
// // For the record, this one RenderItem is the first one we created and added to the scene.
|
||||
// // We could move that code elsewhere but you know...
|
||||
// if (!render::Item::isValidID(WorldBoxRenderData::_item)) {
|
||||
// auto worldBoxRenderData = std::make_shared<WorldBoxRenderData>();
|
||||
// auto worldBoxRenderPayload = std::make_shared<WorldBoxRenderData::Payload>(worldBoxRenderData);
|
||||
//
|
||||
// WorldBoxRenderData::_item = _main3DScene->allocateID();
|
||||
//
|
||||
// transaction.resetItem(WorldBoxRenderData::_item, worldBoxRenderPayload);
|
||||
// _main3DScene->enqueueTransaction(transaction);
|
||||
// }
|
||||
//
|
||||
// {
|
||||
// PerformanceTimer perfTimer("EngineRun");
|
||||
// _renderEngine->getRenderContext()->args = renderArgs;
|
||||
// _renderEngine->run();
|
||||
// }
|
||||
//}
|
||||
|
||||
|
|
|
@ -247,25 +247,35 @@ QVariantMap AvatarBookmarks::getAvatarDataToBookmark() {
|
|||
bookmark.insert(ENTRY_AVATAR_URL, avatarUrl);
|
||||
bookmark.insert(ENTRY_AVATAR_SCALE, avatarScale);
|
||||
|
||||
QScriptEngine scriptEngine;
|
||||
QVariantList wearableEntities;
|
||||
auto treeRenderer = DependencyManager::get<EntityTreeRenderer>();
|
||||
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
|
||||
auto avatarEntities = myAvatar->getAvatarEntityData();
|
||||
for (auto entityID : avatarEntities.keys()) {
|
||||
auto entity = entityTree->findEntityByID(entityID);
|
||||
if (!entity || !isWearableEntity(entity)) {
|
||||
continue;
|
||||
|
||||
if (entityTree) {
|
||||
QScriptEngine scriptEngine;
|
||||
auto avatarEntities = myAvatar->getAvatarEntityData();
|
||||
for (auto entityID : avatarEntities.keys()) {
|
||||
auto entity = entityTree->findEntityByID(entityID);
|
||||
if (!entity || !isWearableEntity(entity)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
QVariantMap avatarEntityData;
|
||||
|
||||
EncodeBitstreamParams params;
|
||||
auto desiredProperties = entity->getEntityProperties(params);
|
||||
desiredProperties += PROP_LOCAL_POSITION;
|
||||
desiredProperties += PROP_LOCAL_ROTATION;
|
||||
desiredProperties -= PROP_JOINT_ROTATIONS_SET;
|
||||
desiredProperties -= PROP_JOINT_ROTATIONS;
|
||||
desiredProperties -= PROP_JOINT_TRANSLATIONS_SET;
|
||||
desiredProperties -= PROP_JOINT_TRANSLATIONS;
|
||||
|
||||
EntityItemProperties entityProperties = entity->getProperties(desiredProperties);
|
||||
QScriptValue scriptProperties = EntityItemPropertiesToScriptValue(&scriptEngine, entityProperties);
|
||||
avatarEntityData["properties"] = scriptProperties.toVariant();
|
||||
wearableEntities.append(QVariant(avatarEntityData));
|
||||
}
|
||||
QVariantMap avatarEntityData;
|
||||
EncodeBitstreamParams params;
|
||||
auto desiredProperties = entity->getEntityProperties(params);
|
||||
desiredProperties += PROP_LOCAL_POSITION;
|
||||
desiredProperties += PROP_LOCAL_ROTATION;
|
||||
EntityItemProperties entityProperties = entity->getProperties(desiredProperties);
|
||||
QScriptValue scriptProperties = EntityItemPropertiesToScriptValue(&scriptEngine, entityProperties);
|
||||
avatarEntityData["properties"] = scriptProperties.toVariant();
|
||||
wearableEntities.append(QVariant(avatarEntityData));
|
||||
}
|
||||
bookmark.insert(ENTRY_AVATAR_ENTITIES, wearableEntities);
|
||||
return bookmark;
|
||||
|
|
|
@ -48,7 +48,9 @@ void ConnectionMonitor::init() {
|
|||
emit setRedirectErrorState(REDIRECT_HIFI_ADDRESS, "", 5);
|
||||
} else {
|
||||
qDebug() << "ConnectionMonitor: Showing connection failure window";
|
||||
#if !defined(DISABLE_QML)
|
||||
DependencyManager::get<DialogsManager>()->setDomainConnectionFailureVisibility(true);
|
||||
#endif
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -59,8 +61,10 @@ void ConnectionMonitor::startTimer() {
|
|||
|
||||
void ConnectionMonitor::stopTimer() {
|
||||
_timer.stop();
|
||||
#if !defined(DISABLE_QML)
|
||||
bool enableInterstitial = DependencyManager::get<NodeList>()->getDomainHandler().getInterstitialModeEnabled();
|
||||
if (!enableInterstitial) {
|
||||
DependencyManager::get<DialogsManager>()->setDomainConnectionFailureVisibility(false);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
|
|
@ -141,7 +141,7 @@ Menu::Menu() {
|
|||
assetServerAction->setEnabled(nodeList->getThisNodeCanWriteAssets());
|
||||
}
|
||||
|
||||
// Edit > Package Model as .fst...
|
||||
// Edit > Package Avatar as .fst...
|
||||
addActionToQMenuAndActionHash(editMenu, MenuOption::PackageModel, 0,
|
||||
qApp, SLOT(packageModel()));
|
||||
|
||||
|
@ -364,8 +364,6 @@ Menu::Menu() {
|
|||
qApp->setHmdTabletBecomesToolbarSetting(action->isChecked());
|
||||
});
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(uiOptionsMenu, MenuOption::Use3DKeyboard, 0, true);
|
||||
|
||||
// Developer > Render >>>
|
||||
MenuWrapper* renderOptionsMenu = developerMenu->addMenu("Render");
|
||||
|
||||
|
|
|
@ -117,7 +117,7 @@ namespace MenuOption {
|
|||
const QString FrameTimer = "Show Timer";
|
||||
const QString FullscreenMirror = "Mirror";
|
||||
const QString Help = "Help...";
|
||||
const QString HomeLocation = "Home";
|
||||
const QString HomeLocation = "Home ";
|
||||
const QString IncreaseAvatarSize = "Increase Avatar Size";
|
||||
const QString IndependentMode = "Independent Mode";
|
||||
const QString ActionMotorControl = "Enable Default Motor Control";
|
||||
|
@ -141,7 +141,7 @@ namespace MenuOption {
|
|||
const QString OpenVrThreadedSubmit = "OpenVR Threaded Submit";
|
||||
const QString OutputMenu = "Display";
|
||||
const QString Overlays = "Show Overlays";
|
||||
const QString PackageModel = "Package Model as .fst...";
|
||||
const QString PackageModel = "Package Avatar as .fst...";
|
||||
const QString Pair = "Pair";
|
||||
const QString PhysicsShowOwned = "Highlight Simulation Ownership";
|
||||
const QString VerboseLogging = "Verbose Logging";
|
||||
|
@ -213,7 +213,6 @@ namespace MenuOption {
|
|||
const QString TurnWithHead = "Turn using Head";
|
||||
const QString UseAudioForMouth = "Use Audio for Mouth";
|
||||
const QString UseCamera = "Use Camera";
|
||||
const QString Use3DKeyboard = "Use 3D Keyboard";
|
||||
const QString VelocityFilter = "Velocity Filter";
|
||||
const QString VisibleToEveryone = "Everyone";
|
||||
const QString VisibleToFriends = "Friends";
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#include <QTemporaryDir>
|
||||
|
||||
#include <FSTReader.h>
|
||||
#include <FBXSerializer.h>
|
||||
#include <OffscreenUi.h>
|
||||
|
||||
#include "ModelSelector.h"
|
||||
|
@ -68,7 +69,6 @@ bool ModelPackager::selectModel() {
|
|||
ModelSelector selector;
|
||||
if(selector.exec() == QDialog::Accepted) {
|
||||
_modelFile = selector.getFileInfo();
|
||||
_modelType = selector.getModelType();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
@ -109,7 +109,7 @@ bool ModelPackager::loadModel() {
|
|||
qCDebug(interfaceapp) << "Reading FBX file : " << _fbxInfo.filePath();
|
||||
QByteArray fbxContents = fbx.readAll();
|
||||
|
||||
_hfmModel.reset(readFBX(fbxContents, QVariantHash(), _fbxInfo.filePath()));
|
||||
_hfmModel = FBXSerializer().read(fbxContents, QVariantHash(), _fbxInfo.filePath());
|
||||
|
||||
// make sure we have some basic mappings
|
||||
populateBasicMapping(_mapping, _fbxInfo.filePath(), *_hfmModel);
|
||||
|
@ -122,28 +122,26 @@ bool ModelPackager::loadModel() {
|
|||
|
||||
bool ModelPackager::editProperties() {
|
||||
// open the dialog to configure the rest
|
||||
ModelPropertiesDialog properties(_modelType, _mapping, _modelFile.path(), *_hfmModel);
|
||||
ModelPropertiesDialog properties(_mapping, _modelFile.path(), *_hfmModel);
|
||||
if (properties.exec() == QDialog::Rejected) {
|
||||
return false;
|
||||
}
|
||||
_mapping = properties.getMapping();
|
||||
|
||||
if (_modelType == FSTReader::BODY_ONLY_MODEL || _modelType == FSTReader::HEAD_AND_BODY_MODEL) {
|
||||
// Make sure that a mapping for the root joint has been specified
|
||||
QVariantHash joints = _mapping.value(JOINT_FIELD).toHash();
|
||||
if (!joints.contains("jointRoot")) {
|
||||
qWarning() << "root joint not configured for skeleton.";
|
||||
// Make sure that a mapping for the root joint has been specified
|
||||
QVariantHash joints = _mapping.value(JOINT_FIELD).toHash();
|
||||
if (!joints.contains("jointRoot")) {
|
||||
qWarning() << "root joint not configured for skeleton.";
|
||||
|
||||
QString message = "Your did not configure a root joint for your skeleton model.\n\nPackaging will be canceled.";
|
||||
QMessageBox msgBox;
|
||||
msgBox.setWindowTitle("Model Packager");
|
||||
msgBox.setText(message);
|
||||
msgBox.setStandardButtons(QMessageBox::Ok);
|
||||
msgBox.setIcon(QMessageBox::Warning);
|
||||
msgBox.exec();
|
||||
QString message = "Your did not configure a root joint for your skeleton model.\n\nPackaging will be canceled.";
|
||||
QMessageBox msgBox;
|
||||
msgBox.setWindowTitle("Model Packager");
|
||||
msgBox.setText(message);
|
||||
msgBox.setStandardButtons(QMessageBox::Ok);
|
||||
msgBox.setIcon(QMessageBox::Warning);
|
||||
msgBox.exec();
|
||||
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
|
@ -237,8 +235,6 @@ bool ModelPackager::zipModel() {
|
|||
|
||||
void ModelPackager::populateBasicMapping(QVariantHash& mapping, QString filename, const hfm::Model& hfmModel) {
|
||||
|
||||
bool isBodyType = _modelType == FSTReader::BODY_ONLY_MODEL || _modelType == FSTReader::HEAD_AND_BODY_MODEL;
|
||||
|
||||
// mixamo files - in the event that a mixamo file was edited by some other tool, it's likely the applicationName will
|
||||
// be rewritten, so we detect the existence of several different blendshapes which indicate we're likely a mixamo file
|
||||
bool likelyMixamoFile = hfmModel.applicationName == "mixamo.com" ||
|
||||
|
@ -279,19 +275,17 @@ void ModelPackager::populateBasicMapping(QVariantHash& mapping, QString filename
|
|||
joints.insert("jointNeck", hfmModel.jointIndices.contains("jointNeck") ? "jointNeck" : "Neck");
|
||||
}
|
||||
|
||||
if (isBodyType) {
|
||||
if (!joints.contains("jointRoot")) {
|
||||
joints.insert("jointRoot", "Hips");
|
||||
}
|
||||
if (!joints.contains("jointLean")) {
|
||||
joints.insert("jointLean", "Spine");
|
||||
}
|
||||
if (!joints.contains("jointLeftHand")) {
|
||||
joints.insert("jointLeftHand", "LeftHand");
|
||||
}
|
||||
if (!joints.contains("jointRightHand")) {
|
||||
joints.insert("jointRightHand", "RightHand");
|
||||
}
|
||||
if (!joints.contains("jointRoot")) {
|
||||
joints.insert("jointRoot", "Hips");
|
||||
}
|
||||
if (!joints.contains("jointLean")) {
|
||||
joints.insert("jointLean", "Spine");
|
||||
}
|
||||
if (!joints.contains("jointLeftHand")) {
|
||||
joints.insert("jointLeftHand", "LeftHand");
|
||||
}
|
||||
if (!joints.contains("jointRightHand")) {
|
||||
joints.insert("jointRightHand", "RightHand");
|
||||
}
|
||||
|
||||
if (!joints.contains("jointHead")) {
|
||||
|
@ -301,13 +295,11 @@ void ModelPackager::populateBasicMapping(QVariantHash& mapping, QString filename
|
|||
|
||||
mapping.insert(JOINT_FIELD, joints);
|
||||
|
||||
if (isBodyType) {
|
||||
if (!mapping.contains(FREE_JOINT_FIELD)) {
|
||||
mapping.insertMulti(FREE_JOINT_FIELD, "LeftArm");
|
||||
mapping.insertMulti(FREE_JOINT_FIELD, "LeftForeArm");
|
||||
mapping.insertMulti(FREE_JOINT_FIELD, "RightArm");
|
||||
mapping.insertMulti(FREE_JOINT_FIELD, "RightForeArm");
|
||||
}
|
||||
if (!mapping.contains(FREE_JOINT_FIELD)) {
|
||||
mapping.insertMulti(FREE_JOINT_FIELD, "LeftArm");
|
||||
mapping.insertMulti(FREE_JOINT_FIELD, "LeftForeArm");
|
||||
mapping.insertMulti(FREE_JOINT_FIELD, "RightArm");
|
||||
mapping.insertMulti(FREE_JOINT_FIELD, "RightForeArm");
|
||||
}
|
||||
|
||||
// If there are no blendshape mappings, and we detect that this is likely a mixamo file,
|
||||
|
|
|
@ -41,12 +41,11 @@ private:
|
|||
|
||||
QFileInfo _modelFile;
|
||||
QFileInfo _fbxInfo;
|
||||
FSTReader::ModelType _modelType;
|
||||
QString _texDir;
|
||||
QString _scriptDir;
|
||||
|
||||
QVariantHash _mapping;
|
||||
std::unique_ptr<hfm::Model> _hfmModel;
|
||||
std::shared_ptr<hfm::Model> _hfmModel;
|
||||
QStringList _textures;
|
||||
QStringList _scripts;
|
||||
};
|
||||
|
|
|
@ -26,9 +26,8 @@
|
|||
#include <OffscreenUi.h>
|
||||
|
||||
|
||||
ModelPropertiesDialog::ModelPropertiesDialog(FSTReader::ModelType modelType, const QVariantHash& originalMapping,
|
||||
ModelPropertiesDialog::ModelPropertiesDialog(const QVariantHash& originalMapping,
|
||||
const QString& basePath, const HFMModel& hfmModel) :
|
||||
_modelType(modelType),
|
||||
_originalMapping(originalMapping),
|
||||
_basePath(basePath),
|
||||
_hfmModel(hfmModel)
|
||||
|
@ -50,36 +49,19 @@ _hfmModel(hfmModel)
|
|||
_scale->setMaximum(FLT_MAX);
|
||||
_scale->setSingleStep(0.01);
|
||||
|
||||
if (_modelType != FSTReader::ENTITY_MODEL) {
|
||||
if (_modelType == FSTReader::ATTACHMENT_MODEL) {
|
||||
QHBoxLayout* translation = new QHBoxLayout();
|
||||
form->addRow("Translation:", translation);
|
||||
translation->addWidget(_translationX = createTranslationBox());
|
||||
translation->addWidget(_translationY = createTranslationBox());
|
||||
translation->addWidget(_translationZ = createTranslationBox());
|
||||
form->addRow("Pivot About Center:", _pivotAboutCenter = new QCheckBox());
|
||||
form->addRow("Pivot Joint:", _pivotJoint = createJointBox());
|
||||
connect(_pivotAboutCenter, SIGNAL(toggled(bool)), SLOT(updatePivotJoint()));
|
||||
_pivotAboutCenter->setChecked(true);
|
||||
form->addRow("Left Eye Joint:", _leftEyeJoint = createJointBox());
|
||||
form->addRow("Right Eye Joint:", _rightEyeJoint = createJointBox());
|
||||
form->addRow("Neck Joint:", _neckJoint = createJointBox());
|
||||
form->addRow("Root Joint:", _rootJoint = createJointBox());
|
||||
form->addRow("Lean Joint:", _leanJoint = createJointBox());
|
||||
form->addRow("Head Joint:", _headJoint = createJointBox());
|
||||
form->addRow("Left Hand Joint:", _leftHandJoint = createJointBox());
|
||||
form->addRow("Right Hand Joint:", _rightHandJoint = createJointBox());
|
||||
|
||||
} else {
|
||||
form->addRow("Left Eye Joint:", _leftEyeJoint = createJointBox());
|
||||
form->addRow("Right Eye Joint:", _rightEyeJoint = createJointBox());
|
||||
form->addRow("Neck Joint:", _neckJoint = createJointBox());
|
||||
}
|
||||
if (_modelType == FSTReader::BODY_ONLY_MODEL || _modelType == FSTReader::HEAD_AND_BODY_MODEL) {
|
||||
form->addRow("Root Joint:", _rootJoint = createJointBox());
|
||||
form->addRow("Lean Joint:", _leanJoint = createJointBox());
|
||||
form->addRow("Head Joint:", _headJoint = createJointBox());
|
||||
form->addRow("Left Hand Joint:", _leftHandJoint = createJointBox());
|
||||
form->addRow("Right Hand Joint:", _rightHandJoint = createJointBox());
|
||||
|
||||
form->addRow("Free Joints:", _freeJoints = new QVBoxLayout());
|
||||
QPushButton* newFreeJoint = new QPushButton("New Free Joint");
|
||||
_freeJoints->addWidget(newFreeJoint);
|
||||
connect(newFreeJoint, SIGNAL(clicked(bool)), SLOT(createNewFreeJoint()));
|
||||
}
|
||||
}
|
||||
form->addRow("Free Joints:", _freeJoints = new QVBoxLayout());
|
||||
QPushButton* newFreeJoint = new QPushButton("New Free Joint");
|
||||
_freeJoints->addWidget(newFreeJoint);
|
||||
connect(newFreeJoint, SIGNAL(clicked(bool)), SLOT(createNewFreeJoint()));
|
||||
|
||||
QDialogButtonBox* buttons = new QDialogButtonBox(QDialogButtonBox::Ok |
|
||||
QDialogButtonBox::Cancel | QDialogButtonBox::Reset);
|
||||
|
@ -93,14 +75,9 @@ _hfmModel(hfmModel)
|
|||
reset();
|
||||
}
|
||||
|
||||
|
||||
QString ModelPropertiesDialog::getType() const {
|
||||
return FSTReader::getNameFromType(_modelType);
|
||||
}
|
||||
|
||||
QVariantHash ModelPropertiesDialog::getMapping() const {
|
||||
QVariantHash mapping = _originalMapping;
|
||||
mapping.insert(TYPE_FIELD, getType());
|
||||
mapping.insert(TYPE_FIELD, FSTReader::getNameFromType(FSTReader::HEAD_AND_BODY_MODEL));
|
||||
mapping.insert(NAME_FIELD, _name->text());
|
||||
mapping.insert(TEXDIR_FIELD, _textureDirectory->text());
|
||||
mapping.insert(SCRIPT_FIELD, _scriptDirectory->text());
|
||||
|
@ -113,42 +90,24 @@ QVariantHash ModelPropertiesDialog::getMapping() const {
|
|||
}
|
||||
mapping.insert(JOINT_INDEX_FIELD, jointIndices);
|
||||
|
||||
if (_modelType != FSTReader::ENTITY_MODEL) {
|
||||
QVariantHash joints = mapping.value(JOINT_FIELD).toHash();
|
||||
if (_modelType == FSTReader::ATTACHMENT_MODEL) {
|
||||
glm::vec3 pivot;
|
||||
if (_pivotAboutCenter->isChecked()) {
|
||||
pivot = (_hfmModel.meshExtents.minimum + _hfmModel.meshExtents.maximum) * 0.5f;
|
||||
|
||||
} else if (_pivotJoint->currentIndex() != 0) {
|
||||
pivot = extractTranslation(_hfmModel.joints.at(_pivotJoint->currentIndex() - 1).transform);
|
||||
}
|
||||
mapping.insert(TRANSLATION_X_FIELD, -pivot.x * (float)_scale->value() + (float)_translationX->value());
|
||||
mapping.insert(TRANSLATION_Y_FIELD, -pivot.y * (float)_scale->value() + (float)_translationY->value());
|
||||
mapping.insert(TRANSLATION_Z_FIELD, -pivot.z * (float)_scale->value() + (float)_translationZ->value());
|
||||
|
||||
} else {
|
||||
insertJointMapping(joints, "jointEyeLeft", _leftEyeJoint->currentText());
|
||||
insertJointMapping(joints, "jointEyeRight", _rightEyeJoint->currentText());
|
||||
insertJointMapping(joints, "jointNeck", _neckJoint->currentText());
|
||||
}
|
||||
QVariantHash joints = mapping.value(JOINT_FIELD).toHash();
|
||||
insertJointMapping(joints, "jointEyeLeft", _leftEyeJoint->currentText());
|
||||
insertJointMapping(joints, "jointEyeRight", _rightEyeJoint->currentText());
|
||||
insertJointMapping(joints, "jointNeck", _neckJoint->currentText());
|
||||
|
||||
|
||||
if (_modelType == FSTReader::BODY_ONLY_MODEL || _modelType == FSTReader::HEAD_AND_BODY_MODEL) {
|
||||
insertJointMapping(joints, "jointRoot", _rootJoint->currentText());
|
||||
insertJointMapping(joints, "jointLean", _leanJoint->currentText());
|
||||
insertJointMapping(joints, "jointHead", _headJoint->currentText());
|
||||
insertJointMapping(joints, "jointLeftHand", _leftHandJoint->currentText());
|
||||
insertJointMapping(joints, "jointRightHand", _rightHandJoint->currentText());
|
||||
insertJointMapping(joints, "jointRoot", _rootJoint->currentText());
|
||||
insertJointMapping(joints, "jointLean", _leanJoint->currentText());
|
||||
insertJointMapping(joints, "jointHead", _headJoint->currentText());
|
||||
insertJointMapping(joints, "jointLeftHand", _leftHandJoint->currentText());
|
||||
insertJointMapping(joints, "jointRightHand", _rightHandJoint->currentText());
|
||||
|
||||
mapping.remove(FREE_JOINT_FIELD);
|
||||
for (int i = 0; i < _freeJoints->count() - 1; i++) {
|
||||
QComboBox* box = static_cast<QComboBox*>(_freeJoints->itemAt(i)->widget()->layout()->itemAt(0)->widget());
|
||||
mapping.insertMulti(FREE_JOINT_FIELD, box->currentText());
|
||||
}
|
||||
}
|
||||
mapping.insert(JOINT_FIELD, joints);
|
||||
mapping.remove(FREE_JOINT_FIELD);
|
||||
for (int i = 0; i < _freeJoints->count() - 1; i++) {
|
||||
QComboBox* box = static_cast<QComboBox*>(_freeJoints->itemAt(i)->widget()->layout()->itemAt(0)->widget());
|
||||
mapping.insertMulti(FREE_JOINT_FIELD, box->currentText());
|
||||
}
|
||||
mapping.insert(JOINT_FIELD, joints);
|
||||
|
||||
return mapping;
|
||||
}
|
||||
|
@ -165,36 +124,23 @@ void ModelPropertiesDialog::reset() {
|
|||
|
||||
QVariantHash jointHash = _originalMapping.value(JOINT_FIELD).toHash();
|
||||
|
||||
if (_modelType != FSTReader::ENTITY_MODEL) {
|
||||
if (_modelType == FSTReader::ATTACHMENT_MODEL) {
|
||||
_translationX->setValue(_originalMapping.value(TRANSLATION_X_FIELD).toDouble());
|
||||
_translationY->setValue(_originalMapping.value(TRANSLATION_Y_FIELD).toDouble());
|
||||
_translationZ->setValue(_originalMapping.value(TRANSLATION_Z_FIELD).toDouble());
|
||||
_pivotAboutCenter->setChecked(true);
|
||||
_pivotJoint->setCurrentIndex(0);
|
||||
setJointText(_leftEyeJoint, jointHash.value("jointEyeLeft").toString());
|
||||
setJointText(_rightEyeJoint, jointHash.value("jointEyeRight").toString());
|
||||
setJointText(_neckJoint, jointHash.value("jointNeck").toString());
|
||||
|
||||
} else {
|
||||
setJointText(_leftEyeJoint, jointHash.value("jointEyeLeft").toString());
|
||||
setJointText(_rightEyeJoint, jointHash.value("jointEyeRight").toString());
|
||||
setJointText(_neckJoint, jointHash.value("jointNeck").toString());
|
||||
}
|
||||
setJointText(_rootJoint, jointHash.value("jointRoot").toString());
|
||||
setJointText(_leanJoint, jointHash.value("jointLean").toString());
|
||||
setJointText(_headJoint, jointHash.value("jointHead").toString());
|
||||
setJointText(_leftHandJoint, jointHash.value("jointLeftHand").toString());
|
||||
setJointText(_rightHandJoint, jointHash.value("jointRightHand").toString());
|
||||
|
||||
if (_modelType == FSTReader::BODY_ONLY_MODEL || _modelType == FSTReader::HEAD_AND_BODY_MODEL) {
|
||||
setJointText(_rootJoint, jointHash.value("jointRoot").toString());
|
||||
setJointText(_leanJoint, jointHash.value("jointLean").toString());
|
||||
setJointText(_headJoint, jointHash.value("jointHead").toString());
|
||||
setJointText(_leftHandJoint, jointHash.value("jointLeftHand").toString());
|
||||
setJointText(_rightHandJoint, jointHash.value("jointRightHand").toString());
|
||||
|
||||
while (_freeJoints->count() > 1) {
|
||||
delete _freeJoints->itemAt(0)->widget();
|
||||
}
|
||||
foreach (const QVariant& joint, _originalMapping.values(FREE_JOINT_FIELD)) {
|
||||
QString jointName = joint.toString();
|
||||
if (_hfmModel.jointIndices.contains(jointName)) {
|
||||
createNewFreeJoint(jointName);
|
||||
}
|
||||
}
|
||||
while (_freeJoints->count() > 1) {
|
||||
delete _freeJoints->itemAt(0)->widget();
|
||||
}
|
||||
foreach (const QVariant& joint, _originalMapping.values(FREE_JOINT_FIELD)) {
|
||||
QString jointName = joint.toString();
|
||||
if (_hfmModel.jointIndices.contains(jointName)) {
|
||||
createNewFreeJoint(jointName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
|
||||
#include <QDialog>
|
||||
|
||||
#include <FBXReader.h>
|
||||
#include <hfm/HFM.h>
|
||||
#include <FSTReader.h>
|
||||
|
||||
#include "ui/ModelsBrowser.h"
|
||||
|
@ -29,7 +29,7 @@ class ModelPropertiesDialog : public QDialog {
|
|||
Q_OBJECT
|
||||
|
||||
public:
|
||||
ModelPropertiesDialog(FSTReader::ModelType modelType, const QVariantHash& originalMapping,
|
||||
ModelPropertiesDialog(const QVariantHash& originalMapping,
|
||||
const QString& basePath, const HFMModel& hfmModel);
|
||||
|
||||
QVariantHash getMapping() const;
|
||||
|
@ -45,9 +45,7 @@ private:
|
|||
QComboBox* createJointBox(bool withNone = true) const;
|
||||
QDoubleSpinBox* createTranslationBox() const;
|
||||
void insertJointMapping(QVariantHash& joints, const QString& joint, const QString& name) const;
|
||||
QString getType() const;
|
||||
|
||||
FSTReader::ModelType _modelType;
|
||||
QVariantHash _originalMapping;
|
||||
QString _basePath;
|
||||
HFMModel _hfmModel;
|
||||
|
@ -71,4 +69,4 @@ private:
|
|||
QVBoxLayout* _freeJoints = nullptr;
|
||||
};
|
||||
|
||||
#endif // hifi_ModelPropertiesDialog_h
|
||||
#endif // hifi_ModelPropertiesDialog_h
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
#include <QStandardPaths>
|
||||
|
||||
static const QString AVATAR_HEAD_AND_BODY_STRING = "Avatar Body with Head";
|
||||
static const QString AVATAR_ATTACHEMENT_STRING = "Avatar Attachment";
|
||||
static const QString ENTITY_MODEL_STRING = "Entity Model";
|
||||
|
||||
ModelSelector::ModelSelector() {
|
||||
|
@ -27,18 +26,11 @@ ModelSelector::ModelSelector() {
|
|||
|
||||
setWindowTitle("Select Model");
|
||||
setLayout(form);
|
||||
|
||||
setWindowFlags(windowFlags() & ~Qt::WindowContextHelpButtonHint);
|
||||
_browseButton = new QPushButton("Browse", this);
|
||||
connect(_browseButton, &QPushButton::clicked, this, &ModelSelector::browse);
|
||||
form->addRow("Model File:", _browseButton);
|
||||
|
||||
_modelType = new QComboBox(this);
|
||||
|
||||
_modelType->addItem(AVATAR_HEAD_AND_BODY_STRING);
|
||||
_modelType->addItem(AVATAR_ATTACHEMENT_STRING);
|
||||
_modelType->addItem(ENTITY_MODEL_STRING);
|
||||
form->addRow("Model Type:", _modelType);
|
||||
|
||||
QDialogButtonBox* buttons = new QDialogButtonBox(QDialogButtonBox::Ok | QDialogButtonBox::Cancel, this);
|
||||
connect(buttons, &QDialogButtonBox::accepted, this, &ModelSelector::accept);
|
||||
connect(buttons, &QDialogButtonBox::rejected, this, &QDialog::reject);
|
||||
|
@ -49,19 +41,6 @@ QFileInfo ModelSelector::getFileInfo() const {
|
|||
return _modelFile;
|
||||
}
|
||||
|
||||
FSTReader::ModelType ModelSelector::getModelType() const {
|
||||
QString text = _modelType->currentText();
|
||||
|
||||
if (text == AVATAR_HEAD_AND_BODY_STRING) {
|
||||
return FSTReader::HEAD_AND_BODY_MODEL;
|
||||
} else if (text == AVATAR_ATTACHEMENT_STRING) {
|
||||
return FSTReader::ATTACHMENT_MODEL;
|
||||
} else if (text == ENTITY_MODEL_STRING) {
|
||||
return FSTReader::ENTITY_MODEL;
|
||||
}
|
||||
Q_UNREACHABLE();
|
||||
}
|
||||
|
||||
void ModelSelector::accept() {
|
||||
if (!_modelFile.isFile()) {
|
||||
return;
|
||||
|
|
|
@ -29,7 +29,6 @@ public:
|
|||
ModelSelector();
|
||||
|
||||
QFileInfo getFileInfo() const;
|
||||
FSTReader::ModelType getModelType() const;
|
||||
|
||||
public slots:
|
||||
virtual void accept() override;
|
||||
|
@ -40,7 +39,6 @@ public:
|
|||
private:
|
||||
QFileInfo _modelFile;
|
||||
QPushButton* _browseButton;
|
||||
QComboBox* _modelType;
|
||||
};
|
||||
|
||||
#endif // hifi_ModelSelector_h
|
||||
|
|
|
@ -171,7 +171,7 @@ void SecondaryCameraJobConfig::setOrientation(glm::quat orient) {
|
|||
}
|
||||
|
||||
void SecondaryCameraJobConfig::enableSecondaryCameraRenderConfigs(bool enabled) {
|
||||
qApp->getRenderEngine()->getConfiguration()->getConfig<SecondaryCameraRenderTask>()->setEnabled(enabled);
|
||||
qApp->getRenderEngine()->getConfiguration()->getConfig<SecondaryCameraRenderTask>("SecondaryCameraJob")->setEnabled(enabled);
|
||||
setEnabled(enabled);
|
||||
}
|
||||
|
||||
|
@ -187,11 +187,13 @@ public:
|
|||
|
||||
void run(const render::RenderContextPointer& renderContext, const RenderArgsPointer& cachedArgs) {
|
||||
auto args = renderContext->args;
|
||||
if (cachedArgs) {
|
||||
args->_blitFramebuffer = cachedArgs->_blitFramebuffer;
|
||||
args->_viewport = cachedArgs->_viewport;
|
||||
args->popViewFrustum();
|
||||
args->_displayMode = cachedArgs->_displayMode;
|
||||
args->_renderMode = cachedArgs->_renderMode;
|
||||
}
|
||||
args->popViewFrustum();
|
||||
|
||||
gpu::doInBatch("EndSecondaryCameraFrame::run", args->_context, [&](gpu::Batch& batch) {
|
||||
batch.restoreContextStereo();
|
||||
|
|
|
@ -36,114 +36,6 @@
|
|||
|
||||
using namespace std;
|
||||
|
||||
void renderWorldBox(RenderArgs* args, gpu::Batch& batch) {
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
// Show center of world
|
||||
static const glm::vec3 RED(1.0f, 0.0f, 0.0f);
|
||||
static const glm::vec3 GREEN(0.0f, 1.0f, 0.0f);
|
||||
static const glm::vec3 BLUE(0.0f, 0.0f, 1.0f);
|
||||
static const glm::vec3 GREY(0.5f, 0.5f, 0.5f);
|
||||
static const glm::vec4 GREY4(0.5f, 0.5f, 0.5f, 1.0f);
|
||||
|
||||
static const glm::vec4 DASHED_RED(1.0f, 0.0f, 0.0f, 1.0f);
|
||||
static const glm::vec4 DASHED_GREEN(0.0f, 1.0f, 0.0f, 1.0f);
|
||||
static const glm::vec4 DASHED_BLUE(0.0f, 0.0f, 1.0f, 1.0f);
|
||||
static const float DASH_LENGTH = 1.0f;
|
||||
static const float GAP_LENGTH = 1.0f;
|
||||
auto transform = Transform{};
|
||||
static std::array<int, 18> geometryIds;
|
||||
static std::once_flag initGeometryIds;
|
||||
std::call_once(initGeometryIds, [&] {
|
||||
for (size_t i = 0; i < geometryIds.size(); ++i) {
|
||||
geometryIds[i] = geometryCache->allocateID();
|
||||
}
|
||||
});
|
||||
|
||||
batch.setModelTransform(transform);
|
||||
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(HALF_TREE_SCALE, 0.0f, 0.0f), RED, geometryIds[0]);
|
||||
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(-HALF_TREE_SCALE, 0.0f, 0.0f), DASHED_RED,
|
||||
DASH_LENGTH, GAP_LENGTH, geometryIds[1]);
|
||||
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, HALF_TREE_SCALE, 0.0f), GREEN, geometryIds[2]);
|
||||
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, -HALF_TREE_SCALE, 0.0f), DASHED_GREEN,
|
||||
DASH_LENGTH, GAP_LENGTH, geometryIds[3]);
|
||||
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 0.0f, HALF_TREE_SCALE), BLUE, geometryIds[4]);
|
||||
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 0.0f, -HALF_TREE_SCALE), DASHED_BLUE,
|
||||
DASH_LENGTH, GAP_LENGTH, geometryIds[5]);
|
||||
|
||||
// X center boundaries
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
|
||||
glm::vec3(HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f), GREY,
|
||||
geometryIds[6]);
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
|
||||
glm::vec3(-HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY,
|
||||
geometryIds[7]);
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f),
|
||||
glm::vec3(HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY,
|
||||
geometryIds[8]);
|
||||
geometryCache->renderLine(batch, glm::vec3(HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
|
||||
glm::vec3(HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY,
|
||||
geometryIds[9]);
|
||||
|
||||
// Z center boundaries
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, -HALF_TREE_SCALE),
|
||||
glm::vec3(0.0f, -HALF_TREE_SCALE, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[10]);
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, -HALF_TREE_SCALE),
|
||||
glm::vec3(0.0f, HALF_TREE_SCALE, -HALF_TREE_SCALE), GREY,
|
||||
geometryIds[11]);
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, HALF_TREE_SCALE, -HALF_TREE_SCALE),
|
||||
glm::vec3(0.0f, HALF_TREE_SCALE, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[12]);
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, HALF_TREE_SCALE),
|
||||
glm::vec3(0.0f, HALF_TREE_SCALE, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[13]);
|
||||
|
||||
// Center boundaries
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
|
||||
glm::vec3(-HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[14]);
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
|
||||
glm::vec3(HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE), GREY,
|
||||
geometryIds[15]);
|
||||
geometryCache->renderLine(batch, glm::vec3(HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
|
||||
glm::vec3(HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[16]);
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE),
|
||||
glm::vec3(HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[17]);
|
||||
|
||||
|
||||
geometryCache->renderWireCubeInstance(args, batch, GREY4);
|
||||
|
||||
// Draw meter markers along the 3 axis to help with measuring things
|
||||
const float MARKER_DISTANCE = 1.0f;
|
||||
const float MARKER_RADIUS = 0.05f;
|
||||
|
||||
transform = Transform().setScale(MARKER_RADIUS);
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSolidSphereInstance(args, batch, RED);
|
||||
|
||||
transform = Transform().setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, 0.0f)).setScale(MARKER_RADIUS);
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSolidSphereInstance(args, batch, RED);
|
||||
|
||||
transform = Transform().setTranslation(glm::vec3(0.0f, MARKER_DISTANCE, 0.0f)).setScale(MARKER_RADIUS);
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSolidSphereInstance(args, batch, GREEN);
|
||||
|
||||
transform = Transform().setTranslation(glm::vec3(0.0f, 0.0f, MARKER_DISTANCE)).setScale(MARKER_RADIUS);
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSolidSphereInstance(args, batch, BLUE);
|
||||
|
||||
transform = Transform().setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, MARKER_DISTANCE)).setScale(MARKER_RADIUS);
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSolidSphereInstance(args, batch, GREY);
|
||||
}
|
||||
|
||||
// Do some basic timing tests and report the results
|
||||
void runTimingTests() {
|
||||
// How long does it take to make a call to get the time?
|
||||
|
|
|
@ -15,14 +15,9 @@
|
|||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/quaternion.hpp>
|
||||
|
||||
#include <gpu/Batch.h>
|
||||
#include <render/Forward.h>
|
||||
|
||||
class ShapeEntityItem;
|
||||
class ShapeInfo;
|
||||
|
||||
void renderWorldBox(RenderArgs* args, gpu::Batch& batch);
|
||||
|
||||
void runTimingTests();
|
||||
void runUnitTests();
|
||||
|
||||
|
|
|
@ -139,7 +139,7 @@ MyAvatar::MyAvatar(QThread* thread) :
|
|||
_flyingHMDSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "flyingHMD", _flyingPrefHMD),
|
||||
_avatarEntityCountSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "avatarEntityData" << "size", 0)
|
||||
{
|
||||
_clientTraitsHandler = std::unique_ptr<ClientTraitsHandler>(new ClientTraitsHandler(this));
|
||||
_clientTraitsHandler.reset(new ClientTraitsHandler(this));
|
||||
|
||||
// give the pointer to our head to inherited _headData variable from AvatarData
|
||||
_headData = new MyHead(this);
|
||||
|
@ -807,46 +807,6 @@ void MyAvatar::simulate(float deltaTime) {
|
|||
// before we perform rig animations and IK.
|
||||
updateSensorToWorldMatrix();
|
||||
|
||||
// if we detect the hand controller is at rest, i.e. lying on the table, or the hand is too far away from the hmd
|
||||
// disable the associated hand controller input.
|
||||
{
|
||||
// NOTE: all poses are in sensor space.
|
||||
auto leftHandIter = _controllerPoseMap.find(controller::Action::LEFT_HAND);
|
||||
if (leftHandIter != _controllerPoseMap.end() && leftHandIter->second.isValid()) {
|
||||
_leftHandAtRestDetector.update(leftHandIter->second.getTranslation(), leftHandIter->second.getRotation());
|
||||
if (_leftHandAtRestDetector.isAtRest()) {
|
||||
leftHandIter->second.valid = false;
|
||||
}
|
||||
} else {
|
||||
_leftHandAtRestDetector.invalidate();
|
||||
}
|
||||
|
||||
auto rightHandIter = _controllerPoseMap.find(controller::Action::RIGHT_HAND);
|
||||
if (rightHandIter != _controllerPoseMap.end() && rightHandIter->second.isValid()) {
|
||||
_rightHandAtRestDetector.update(rightHandIter->second.getTranslation(), rightHandIter->second.getRotation());
|
||||
if (_rightHandAtRestDetector.isAtRest()) {
|
||||
rightHandIter->second.valid = false;
|
||||
}
|
||||
} else {
|
||||
_rightHandAtRestDetector.invalidate();
|
||||
}
|
||||
|
||||
auto headIter = _controllerPoseMap.find(controller::Action::HEAD);
|
||||
|
||||
// The 99th percentile man has a spine to fingertip to height ratio of 0.45. Lets increase that by about 10% to 0.5
|
||||
// then measure the distance the center of the eyes to the finger tips. To come up with this ratio.
|
||||
// From "The Measure of Man and Woman: Human Factors in Design, Revised Edition" by Alvin R. Tilley, Henry Dreyfuss Associates
|
||||
const float MAX_HEAD_TO_HAND_DISTANCE_RATIO = 0.52f;
|
||||
|
||||
float maxHeadHandDistance = getUserHeight() * MAX_HEAD_TO_HAND_DISTANCE_RATIO;
|
||||
if (glm::length(headIter->second.getTranslation() - leftHandIter->second.getTranslation()) > maxHeadHandDistance) {
|
||||
leftHandIter->second.valid = false;
|
||||
}
|
||||
if (glm::length(headIter->second.getTranslation() - rightHandIter->second.getTranslation()) > maxHeadHandDistance) {
|
||||
rightHandIter->second.valid = false;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
PerformanceTimer perfTimer("skeleton");
|
||||
|
||||
|
|
301
interface/src/graphics/GraphicsEngine.cpp
Normal file
301
interface/src/graphics/GraphicsEngine.cpp
Normal file
|
@ -0,0 +1,301 @@
|
|||
//
|
||||
// GraphicsEngine.cpp
|
||||
//
|
||||
// Created by Sam Gateau on 29/6/2018.
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "GraphicsEngine.h"
|
||||
|
||||
#include <shared/GlobalAppProperties.h>
|
||||
|
||||
#include "WorldBox.h"
|
||||
#include "LODManager.h"
|
||||
|
||||
#include <GeometryCache.h>
|
||||
#include <TextureCache.h>
|
||||
#include <FramebufferCache.h>
|
||||
#include <UpdateSceneTask.h>
|
||||
#include <RenderViewTask.h>
|
||||
#include <SecondaryCamera.h>
|
||||
|
||||
#include "RenderEventHandler.h"
|
||||
|
||||
#include <gpu/Batch.h>
|
||||
#include <gpu/Context.h>
|
||||
#include <gpu/gl/GLBackend.h>
|
||||
#include <display-plugins/DisplayPlugin.h>
|
||||
|
||||
#include <display-plugins/CompositorHelper.h>
|
||||
#include <QMetaObject>
|
||||
#include "ui/Stats.h"
|
||||
#include "Application.h"
|
||||
|
||||
GraphicsEngine::GraphicsEngine() {
|
||||
}
|
||||
|
||||
GraphicsEngine::~GraphicsEngine() {
|
||||
}
|
||||
|
||||
void GraphicsEngine::initializeGPU(GLWidget* glwidget) {
|
||||
|
||||
_renderEventHandler = new RenderEventHandler(
|
||||
[this]() { return this->shouldPaint(); },
|
||||
[this]() { this->render_performFrame(); }
|
||||
);
|
||||
|
||||
// Requires the window context, because that's what's used in the actual rendering
|
||||
// and the GPU backend will make things like the VAO which cannot be shared across
|
||||
// contexts
|
||||
glwidget->makeCurrent();
|
||||
gpu::Context::init<gpu::gl::GLBackend>();
|
||||
glwidget->makeCurrent();
|
||||
_gpuContext = std::make_shared<gpu::Context>();
|
||||
|
||||
DependencyManager::get<TextureCache>()->setGPUContext(_gpuContext);
|
||||
}
|
||||
|
||||
void GraphicsEngine::initializeRender(bool disableDeferred) {
|
||||
|
||||
// Set up the render engine
|
||||
render::CullFunctor cullFunctor = LODManager::shouldRender;
|
||||
_renderEngine->addJob<UpdateSceneTask>("UpdateScene");
|
||||
#ifndef Q_OS_ANDROID
|
||||
_renderEngine->addJob<SecondaryCameraRenderTask>("SecondaryCameraJob", cullFunctor, !disableDeferred);
|
||||
#endif
|
||||
_renderEngine->addJob<RenderViewTask>("RenderMainView", cullFunctor, !disableDeferred, render::ItemKey::TAG_BITS_0, render::ItemKey::TAG_BITS_0);
|
||||
_renderEngine->load();
|
||||
_renderEngine->registerScene(_renderScene);
|
||||
|
||||
// Now that OpenGL is initialized, we are sure we have a valid context and can create the various pipeline shaders with success.
|
||||
DependencyManager::get<GeometryCache>()->initializeShapePipelines();
|
||||
}
|
||||
|
||||
void GraphicsEngine::startup() {
|
||||
static_cast<RenderEventHandler*>(_renderEventHandler)->resumeThread();
|
||||
}
|
||||
|
||||
void GraphicsEngine::shutdown() {
|
||||
// The cleanup process enqueues the transactions but does not process them. Calling this here will force the actual
|
||||
// removal of the items.
|
||||
// See https://highfidelity.fogbugz.com/f/cases/5328
|
||||
_renderScene->enqueueFrame(); // flush all the transactions
|
||||
_renderScene->processTransactionQueue(); // process and apply deletions
|
||||
|
||||
_gpuContext->shutdown();
|
||||
|
||||
|
||||
// shutdown render engine
|
||||
_renderScene = nullptr;
|
||||
_renderEngine = nullptr;
|
||||
|
||||
_renderEventHandler->deleteLater();
|
||||
}
|
||||
|
||||
|
||||
void GraphicsEngine::render_runRenderFrame(RenderArgs* renderArgs) {
|
||||
PROFILE_RANGE(render, __FUNCTION__);
|
||||
PerformanceTimer perfTimer("render");
|
||||
|
||||
// Make sure the WorldBox is in the scene
|
||||
// For the record, this one RenderItem is the first one we created and added to the scene.
|
||||
// We could move that code elsewhere but you know...
|
||||
if (!render::Item::isValidID(WorldBoxRenderData::_item)) {
|
||||
render::Transaction transaction;
|
||||
auto worldBoxRenderData = std::make_shared<WorldBoxRenderData>();
|
||||
auto worldBoxRenderPayload = std::make_shared<WorldBoxRenderData::Payload>(worldBoxRenderData);
|
||||
|
||||
WorldBoxRenderData::_item = _renderScene->allocateID();
|
||||
|
||||
transaction.resetItem(WorldBoxRenderData::_item, worldBoxRenderPayload);
|
||||
_renderScene->enqueueTransaction(transaction);
|
||||
}
|
||||
|
||||
{
|
||||
_renderEngine->getRenderContext()->args = renderArgs;
|
||||
_renderEngine->run();
|
||||
}
|
||||
}
|
||||
|
||||
static const unsigned int THROTTLED_SIM_FRAMERATE = 15;
|
||||
static const int THROTTLED_SIM_FRAME_PERIOD_MS = MSECS_PER_SECOND / THROTTLED_SIM_FRAMERATE;
|
||||
|
||||
|
||||
|
||||
|
||||
bool GraphicsEngine::shouldPaint() const {
|
||||
|
||||
auto displayPlugin = qApp->getActiveDisplayPlugin();
|
||||
|
||||
#ifdef DEBUG_PAINT_DELAY
|
||||
static uint64_t paintDelaySamples{ 0 };
|
||||
static uint64_t paintDelayUsecs{ 0 };
|
||||
|
||||
paintDelayUsecs += displayPlugin->getPaintDelayUsecs();
|
||||
|
||||
static const int PAINT_DELAY_THROTTLE = 1000;
|
||||
if (++paintDelaySamples % PAINT_DELAY_THROTTLE == 0) {
|
||||
qCDebug(interfaceapp).nospace() <<
|
||||
"Paint delay (" << paintDelaySamples << " samples): " <<
|
||||
(float)paintDelaySamples / paintDelayUsecs << "us";
|
||||
}
|
||||
#endif
|
||||
|
||||
// Throttle if requested
|
||||
//if (displayPlugin->isThrottled() && (_graphicsEngine._renderEventHandler->_lastTimeRendered.elapsed() < THROTTLED_SIM_FRAME_PERIOD_MS)) {
|
||||
if ( displayPlugin->isThrottled() &&
|
||||
(static_cast<RenderEventHandler*>(_renderEventHandler)->_lastTimeRendered.elapsed() < THROTTLED_SIM_FRAME_PERIOD_MS)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool GraphicsEngine::checkPendingRenderEvent() {
|
||||
bool expected = false;
|
||||
return (_renderEventHandler && static_cast<RenderEventHandler*>(_renderEventHandler)->_pendingRenderEvent.compare_exchange_strong(expected, true));
|
||||
}
|
||||
|
||||
|
||||
|
||||
void GraphicsEngine::render_performFrame() {
|
||||
// Some plugins process message events, allowing paintGL to be called reentrantly.
|
||||
|
||||
_renderFrameCount++;
|
||||
|
||||
auto lastPaintBegin = usecTimestampNow();
|
||||
PROFILE_RANGE_EX(render, __FUNCTION__, 0xff0000ff, (uint64_t)_renderFrameCount);
|
||||
PerformanceTimer perfTimer("paintGL");
|
||||
|
||||
DisplayPluginPointer displayPlugin;
|
||||
{
|
||||
PROFILE_RANGE(render, "/getActiveDisplayPlugin");
|
||||
displayPlugin = qApp->getActiveDisplayPlugin();
|
||||
}
|
||||
|
||||
{
|
||||
PROFILE_RANGE(render, "/pluginBeginFrameRender");
|
||||
// If a display plugin loses it's underlying support, it
|
||||
// needs to be able to signal us to not use it
|
||||
if (!displayPlugin->beginFrameRender(_renderFrameCount)) {
|
||||
QMetaObject::invokeMethod(qApp, "updateDisplayMode");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
RenderArgs renderArgs;
|
||||
glm::mat4 HMDSensorPose;
|
||||
glm::mat4 eyeToWorld;
|
||||
glm::mat4 sensorToWorld;
|
||||
|
||||
bool isStereo;
|
||||
glm::mat4 stereoEyeOffsets[2];
|
||||
glm::mat4 stereoEyeProjections[2];
|
||||
|
||||
{
|
||||
QMutexLocker viewLocker(&_renderArgsMutex);
|
||||
renderArgs = _appRenderArgs._renderArgs;
|
||||
|
||||
// don't render if there is no context.
|
||||
if (!_appRenderArgs._renderArgs._context) {
|
||||
return;
|
||||
}
|
||||
|
||||
HMDSensorPose = _appRenderArgs._headPose;
|
||||
eyeToWorld = _appRenderArgs._eyeToWorld;
|
||||
sensorToWorld = _appRenderArgs._sensorToWorld;
|
||||
isStereo = _appRenderArgs._isStereo;
|
||||
for_each_eye([&](Eye eye) {
|
||||
stereoEyeOffsets[eye] = _appRenderArgs._eyeOffsets[eye];
|
||||
stereoEyeProjections[eye] = _appRenderArgs._eyeProjections[eye];
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
PROFILE_RANGE(render, "/gpuContextReset");
|
||||
getGPUContext()->beginFrame(_appRenderArgs._view, HMDSensorPose);
|
||||
// Reset the gpu::Context Stages
|
||||
// Back to the default framebuffer;
|
||||
gpu::doInBatch("Application_render::gpuContextReset", getGPUContext(), [&](gpu::Batch& batch) {
|
||||
batch.resetStages();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
{
|
||||
PROFILE_RANGE(render, "/renderOverlay");
|
||||
PerformanceTimer perfTimer("renderOverlay");
|
||||
// NOTE: There is no batch associated with this renderArgs
|
||||
// the ApplicationOverlay class assumes it's viewport is setup to be the device size
|
||||
renderArgs._viewport = glm::ivec4(0, 0, qApp->getDeviceSize());
|
||||
qApp->getApplicationOverlay().renderOverlay(&renderArgs);
|
||||
}
|
||||
|
||||
{
|
||||
PROFILE_RANGE(render, "/updateCompositor");
|
||||
qApp->getApplicationCompositor().setFrameInfo(_renderFrameCount, eyeToWorld, sensorToWorld);
|
||||
}
|
||||
|
||||
gpu::FramebufferPointer finalFramebuffer;
|
||||
QSize finalFramebufferSize;
|
||||
{
|
||||
PROFILE_RANGE(render, "/getOutputFramebuffer");
|
||||
// Primary rendering pass
|
||||
auto framebufferCache = DependencyManager::get<FramebufferCache>();
|
||||
finalFramebufferSize = framebufferCache->getFrameBufferSize();
|
||||
// Final framebuffer that will be handled to the display-plugin
|
||||
finalFramebuffer = framebufferCache->getFramebuffer();
|
||||
}
|
||||
|
||||
{
|
||||
if (isStereo) {
|
||||
renderArgs._context->enableStereo(true);
|
||||
renderArgs._context->setStereoProjections(stereoEyeProjections);
|
||||
renderArgs._context->setStereoViews(stereoEyeOffsets);
|
||||
}
|
||||
|
||||
renderArgs._hudOperator = displayPlugin->getHUDOperator();
|
||||
renderArgs._hudTexture = qApp->getApplicationOverlay().getOverlayTexture();
|
||||
renderArgs._blitFramebuffer = finalFramebuffer;
|
||||
render_runRenderFrame(&renderArgs);
|
||||
}
|
||||
|
||||
auto frame = getGPUContext()->endFrame();
|
||||
frame->frameIndex = _renderFrameCount;
|
||||
frame->framebuffer = finalFramebuffer;
|
||||
frame->framebufferRecycler = [](const gpu::FramebufferPointer& framebuffer) {
|
||||
auto frameBufferCache = DependencyManager::get<FramebufferCache>();
|
||||
if (frameBufferCache) {
|
||||
frameBufferCache->releaseFramebuffer(framebuffer);
|
||||
}
|
||||
};
|
||||
// deliver final scene rendering commands to the display plugin
|
||||
{
|
||||
PROFILE_RANGE(render, "/pluginOutput");
|
||||
PerformanceTimer perfTimer("pluginOutput");
|
||||
_renderLoopCounter.increment();
|
||||
displayPlugin->submitFrame(frame);
|
||||
}
|
||||
|
||||
// Reset the framebuffer and stereo state
|
||||
renderArgs._blitFramebuffer.reset();
|
||||
renderArgs._context->enableStereo(false);
|
||||
|
||||
{
|
||||
auto stats = Stats::getInstance();
|
||||
if (stats) {
|
||||
stats->setRenderDetails(renderArgs._details);
|
||||
}
|
||||
}
|
||||
|
||||
uint64_t lastPaintDuration = usecTimestampNow() - lastPaintBegin;
|
||||
_frameTimingsScriptingInterface.addValue(lastPaintDuration);
|
||||
}
|
||||
|
||||
|
||||
void GraphicsEngine::editRenderArgs(RenderArgsEditor editor) {
|
||||
QMutexLocker renderLocker(&_renderArgsMutex);
|
||||
editor(_appRenderArgs);
|
||||
}
|
90
interface/src/graphics/GraphicsEngine.h
Normal file
90
interface/src/graphics/GraphicsEngine.h
Normal file
|
@ -0,0 +1,90 @@
|
|||
//
|
||||
// GraphicsEngine.h
|
||||
//
|
||||
// Created by Sam Gateau on 29/6/2018.
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#ifndef hifi_GraphicsEngine_h
|
||||
#define hifi_GraphicsEngine_h
|
||||
|
||||
#include <gl/OffscreenGLCanvas.h>
|
||||
#include <gl/GLWidget.h>
|
||||
#include <qmutex.h>
|
||||
|
||||
#include <render/Engine.h>
|
||||
|
||||
#include <OctreeConstants.h>
|
||||
#include <shared/RateCounter.h>
|
||||
|
||||
#include "FrameTimingsScriptingInterface.h"
|
||||
|
||||
|
||||
struct AppRenderArgs {
|
||||
render::Args _renderArgs;
|
||||
glm::mat4 _eyeToWorld;
|
||||
glm::mat4 _view;
|
||||
glm::mat4 _eyeOffsets[2];
|
||||
glm::mat4 _eyeProjections[2];
|
||||
glm::mat4 _headPose;
|
||||
glm::mat4 _sensorToWorld;
|
||||
float _sensorToWorldScale{ 1.0f };
|
||||
bool _isStereo{ false };
|
||||
};
|
||||
|
||||
using RenderArgsEditor = std::function <void(AppRenderArgs&)>;
|
||||
|
||||
|
||||
class GraphicsEngine {
|
||||
public:
|
||||
GraphicsEngine();
|
||||
~GraphicsEngine();
|
||||
|
||||
void initializeGPU(GLWidget*);
|
||||
void initializeRender(bool disableDeferred);
|
||||
void startup();
|
||||
void shutdown();
|
||||
|
||||
render::ScenePointer getRenderScene() const { return _renderScene; }
|
||||
render::EnginePointer getRenderEngine() const { return _renderEngine; }
|
||||
gpu::ContextPointer getGPUContext() const { return _gpuContext; }
|
||||
|
||||
// Same as the one in application
|
||||
bool shouldPaint() const;
|
||||
bool checkPendingRenderEvent();
|
||||
|
||||
size_t getRenderFrameCount() const { return _renderFrameCount; }
|
||||
float getRenderLoopRate() const { return _renderLoopCounter.rate(); }
|
||||
|
||||
// Feed GRaphics Engine with new frame configuration
|
||||
void editRenderArgs(RenderArgsEditor editor);
|
||||
|
||||
private:
|
||||
// Thread specific calls
|
||||
void render_performFrame();
|
||||
void render_runRenderFrame(RenderArgs* renderArgs);
|
||||
|
||||
protected:
|
||||
|
||||
mutable QMutex _renderArgsMutex{ QMutex::Recursive };
|
||||
AppRenderArgs _appRenderArgs;
|
||||
|
||||
RateCounter<500> _renderLoopCounter;
|
||||
|
||||
uint32_t _renderFrameCount{ 0 };
|
||||
render::ScenePointer _renderScene{ new render::Scene(glm::vec3(-0.5f * (float)TREE_SCALE), (float)TREE_SCALE) };
|
||||
render::EnginePointer _renderEngine{ new render::RenderEngine() };
|
||||
|
||||
gpu::ContextPointer _gpuContext; // initialized during window creation
|
||||
|
||||
QObject* _renderEventHandler{ nullptr };
|
||||
friend class RenderEventHandler;
|
||||
|
||||
FrameTimingsScriptingInterface _frameTimingsScriptingInterface;
|
||||
|
||||
friend class Application;
|
||||
};
|
||||
|
||||
#endif // hifi_GraphicsEngine_h
|
58
interface/src/graphics/RenderEventHandler.cpp
Normal file
58
interface/src/graphics/RenderEventHandler.cpp
Normal file
|
@ -0,0 +1,58 @@
|
|||
//
|
||||
// RenderEventHandler.cpp
|
||||
//
|
||||
// Created by Bradley Austin Davis on 29/6/2018.
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "RenderEventHandler.h"
|
||||
|
||||
#include "Application.h"
|
||||
#include <shared/GlobalAppProperties.h>
|
||||
#include <shared/QtHelpers.h>
|
||||
|
||||
#include "CrashHandler.h"
|
||||
|
||||
RenderEventHandler::RenderEventHandler(CheckCall checkCall, RenderCall renderCall) :
|
||||
_checkCall(checkCall),
|
||||
_renderCall(renderCall)
|
||||
{
|
||||
// Transfer to a new thread
|
||||
moveToNewNamedThread(this, "RenderThread", [this](QThread* renderThread) {
|
||||
hifi::qt::addBlockingForbiddenThread("Render", renderThread);
|
||||
_lastTimeRendered.start();
|
||||
}, std::bind(&RenderEventHandler::initialize, this), QThread::HighestPriority);
|
||||
}
|
||||
|
||||
void RenderEventHandler::initialize() {
|
||||
setObjectName("Render");
|
||||
PROFILE_SET_THREAD_NAME("Render");
|
||||
setCrashAnnotation("render_thread_id", std::to_string((size_t)QThread::currentThreadId()));
|
||||
}
|
||||
|
||||
void RenderEventHandler::resumeThread() {
|
||||
_pendingRenderEvent = false;
|
||||
}
|
||||
|
||||
void RenderEventHandler::render() {
|
||||
if (_checkCall()) {
|
||||
_lastTimeRendered.start();
|
||||
_renderCall();
|
||||
}
|
||||
}
|
||||
|
||||
bool RenderEventHandler::event(QEvent* event) {
|
||||
switch ((int)event->type()) {
|
||||
case ApplicationEvent::Render:
|
||||
render();
|
||||
_pendingRenderEvent.store(false);
|
||||
return true;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return Parent::event(event);
|
||||
}
|
||||
|
52
interface/src/graphics/RenderEventHandler.h
Normal file
52
interface/src/graphics/RenderEventHandler.h
Normal file
|
@ -0,0 +1,52 @@
|
|||
//
|
||||
// RenderEventHandler.h
|
||||
//
|
||||
// Created by Bradley Austin Davis on 29/6/2018.
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#ifndef hifi_RenderEventHandler_h
|
||||
#define hifi_RenderEventHandler_h
|
||||
|
||||
#include <QEvent>
|
||||
#include <QElapsedTimer>
|
||||
#include "gl/OffscreenGLCanvas.h"
|
||||
|
||||
enum ApplicationEvent {
|
||||
// Execute a lambda function
|
||||
Lambda = QEvent::User + 1,
|
||||
// Trigger the next render
|
||||
Render,
|
||||
// Trigger the next idle
|
||||
Idle,
|
||||
};
|
||||
|
||||
class RenderEventHandler : public QObject {
|
||||
using Parent = QObject;
|
||||
Q_OBJECT
|
||||
public:
|
||||
|
||||
using CheckCall = std::function <bool()>;
|
||||
using RenderCall = std::function <void()>;
|
||||
|
||||
CheckCall _checkCall;
|
||||
RenderCall _renderCall;
|
||||
|
||||
RenderEventHandler(CheckCall checkCall, RenderCall renderCall);
|
||||
|
||||
QElapsedTimer _lastTimeRendered;
|
||||
std::atomic<bool> _pendingRenderEvent{ true };
|
||||
|
||||
void resumeThread();
|
||||
|
||||
private:
|
||||
void initialize();
|
||||
|
||||
void render();
|
||||
|
||||
bool event(QEvent* event) override;
|
||||
};
|
||||
|
||||
#endif // #include hifi_RenderEventHandler_h
|
138
interface/src/graphics/WorldBox.cpp
Normal file
138
interface/src/graphics/WorldBox.cpp
Normal file
|
@ -0,0 +1,138 @@
|
|||
//
|
||||
// WorldBox.cpp
|
||||
//
|
||||
// Created by Sam Gateau on 01/07/2018.
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "WorldBox.h"
|
||||
|
||||
#include "OctreeConstants.h"
|
||||
|
||||
render::ItemID WorldBoxRenderData::_item{ render::Item::INVALID_ITEM_ID };
|
||||
|
||||
|
||||
namespace render {
|
||||
template <> const ItemKey payloadGetKey(const WorldBoxRenderData::Pointer& stuff) { return ItemKey::Builder::opaqueShape().withTagBits(ItemKey::TAG_BITS_0 | ItemKey::TAG_BITS_1); }
|
||||
template <> const Item::Bound payloadGetBound(const WorldBoxRenderData::Pointer& stuff) { return Item::Bound(); }
|
||||
template <> void payloadRender(const WorldBoxRenderData::Pointer& stuff, RenderArgs* args) {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::WorldAxes)) {
|
||||
PerformanceTimer perfTimer("worldBox");
|
||||
|
||||
auto& batch = *args->_batch;
|
||||
DependencyManager::get<GeometryCache>()->bindSimpleProgram(batch);
|
||||
WorldBoxRenderData::renderWorldBox(args, batch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void WorldBoxRenderData::renderWorldBox(RenderArgs* args, gpu::Batch& batch) {
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
// Show center of world
|
||||
static const glm::vec3 RED(1.0f, 0.0f, 0.0f);
|
||||
static const glm::vec3 GREEN(0.0f, 1.0f, 0.0f);
|
||||
static const glm::vec3 BLUE(0.0f, 0.0f, 1.0f);
|
||||
static const glm::vec3 GREY(0.5f, 0.5f, 0.5f);
|
||||
static const glm::vec4 GREY4(0.5f, 0.5f, 0.5f, 1.0f);
|
||||
|
||||
static const glm::vec4 DASHED_RED(1.0f, 0.0f, 0.0f, 1.0f);
|
||||
static const glm::vec4 DASHED_GREEN(0.0f, 1.0f, 0.0f, 1.0f);
|
||||
static const glm::vec4 DASHED_BLUE(0.0f, 0.0f, 1.0f, 1.0f);
|
||||
static const float DASH_LENGTH = 1.0f;
|
||||
static const float GAP_LENGTH = 1.0f;
|
||||
auto transform = Transform{};
|
||||
static std::array<int, 18> geometryIds;
|
||||
static std::once_flag initGeometryIds;
|
||||
std::call_once(initGeometryIds, [&] {
|
||||
for (size_t i = 0; i < geometryIds.size(); ++i) {
|
||||
geometryIds[i] = geometryCache->allocateID();
|
||||
}
|
||||
});
|
||||
|
||||
batch.setModelTransform(transform);
|
||||
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(HALF_TREE_SCALE, 0.0f, 0.0f), RED, geometryIds[0]);
|
||||
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(-HALF_TREE_SCALE, 0.0f, 0.0f), DASHED_RED,
|
||||
DASH_LENGTH, GAP_LENGTH, geometryIds[1]);
|
||||
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, HALF_TREE_SCALE, 0.0f), GREEN, geometryIds[2]);
|
||||
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, -HALF_TREE_SCALE, 0.0f), DASHED_GREEN,
|
||||
DASH_LENGTH, GAP_LENGTH, geometryIds[3]);
|
||||
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 0.0f, HALF_TREE_SCALE), BLUE, geometryIds[4]);
|
||||
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 0.0f, -HALF_TREE_SCALE), DASHED_BLUE,
|
||||
DASH_LENGTH, GAP_LENGTH, geometryIds[5]);
|
||||
|
||||
// X center boundaries
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
|
||||
glm::vec3(HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f), GREY,
|
||||
geometryIds[6]);
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
|
||||
glm::vec3(-HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY,
|
||||
geometryIds[7]);
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f),
|
||||
glm::vec3(HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY,
|
||||
geometryIds[8]);
|
||||
geometryCache->renderLine(batch, glm::vec3(HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
|
||||
glm::vec3(HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY,
|
||||
geometryIds[9]);
|
||||
|
||||
// Z center boundaries
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, -HALF_TREE_SCALE),
|
||||
glm::vec3(0.0f, -HALF_TREE_SCALE, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[10]);
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, -HALF_TREE_SCALE),
|
||||
glm::vec3(0.0f, HALF_TREE_SCALE, -HALF_TREE_SCALE), GREY,
|
||||
geometryIds[11]);
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, HALF_TREE_SCALE, -HALF_TREE_SCALE),
|
||||
glm::vec3(0.0f, HALF_TREE_SCALE, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[12]);
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, HALF_TREE_SCALE),
|
||||
glm::vec3(0.0f, HALF_TREE_SCALE, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[13]);
|
||||
|
||||
// Center boundaries
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
|
||||
glm::vec3(-HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[14]);
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
|
||||
glm::vec3(HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE), GREY,
|
||||
geometryIds[15]);
|
||||
geometryCache->renderLine(batch, glm::vec3(HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
|
||||
glm::vec3(HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[16]);
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE),
|
||||
glm::vec3(HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[17]);
|
||||
|
||||
|
||||
geometryCache->renderWireCubeInstance(args, batch, GREY4);
|
||||
|
||||
// Draw meter markers along the 3 axis to help with measuring things
|
||||
const float MARKER_DISTANCE = 1.0f;
|
||||
const float MARKER_RADIUS = 0.05f;
|
||||
|
||||
transform = Transform().setScale(MARKER_RADIUS);
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSolidSphereInstance(args, batch, RED);
|
||||
|
||||
transform = Transform().setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, 0.0f)).setScale(MARKER_RADIUS);
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSolidSphereInstance(args, batch, RED);
|
||||
|
||||
transform = Transform().setTranslation(glm::vec3(0.0f, MARKER_DISTANCE, 0.0f)).setScale(MARKER_RADIUS);
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSolidSphereInstance(args, batch, GREEN);
|
||||
|
||||
transform = Transform().setTranslation(glm::vec3(0.0f, 0.0f, MARKER_DISTANCE)).setScale(MARKER_RADIUS);
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSolidSphereInstance(args, batch, BLUE);
|
||||
|
||||
transform = Transform().setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, MARKER_DISTANCE)).setScale(MARKER_RADIUS);
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSolidSphereInstance(args, batch, GREY);
|
||||
}
|
||||
|
43
interface/src/graphics/WorldBox.h
Normal file
43
interface/src/graphics/WorldBox.h
Normal file
|
@ -0,0 +1,43 @@
|
|||
//
|
||||
// WorldBox.h
|
||||
//
|
||||
// Created by Sam Gateau on 01/07/2018.
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#ifndef hifi_WorldBox_h
|
||||
#define hifi_WorldBox_h
|
||||
|
||||
#include <PerfStat.h>
|
||||
|
||||
#include <gpu/Batch.h>
|
||||
#include <render/Forward.h>
|
||||
|
||||
#include <render/Item.h>
|
||||
#include <GeometryCache.h>
|
||||
#include "Menu.h"
|
||||
|
||||
|
||||
|
||||
class WorldBoxRenderData {
|
||||
public:
|
||||
typedef render::Payload<WorldBoxRenderData> Payload;
|
||||
typedef Payload::DataPointer Pointer;
|
||||
|
||||
int _val = 0;
|
||||
static render::ItemID _item; // unique WorldBoxRenderData
|
||||
|
||||
|
||||
|
||||
static void renderWorldBox(RenderArgs* args, gpu::Batch& batch);
|
||||
};
|
||||
|
||||
namespace render {
|
||||
template <> const ItemKey payloadGetKey(const WorldBoxRenderData::Pointer& stuff);
|
||||
template <> const Item::Bound payloadGetBound(const WorldBoxRenderData::Pointer& stuff);
|
||||
template <> void payloadRender(const WorldBoxRenderData::Pointer& stuff, RenderArgs* args);
|
||||
}
|
||||
|
||||
#endif // hifi_WorldBox_h
|
|
@ -107,7 +107,7 @@ void SafeLanding::noteReceivedsequenceNumber(int sequenceNumber) {
|
|||
}
|
||||
|
||||
bool SafeLanding::isLoadSequenceComplete() {
|
||||
if (isEntityLoadingComplete() && isSequenceNumbersComplete()) {
|
||||
if ((isEntityLoadingComplete() && isSequenceNumbersComplete()) || qApp->failedToConnectToEntityServer()) {
|
||||
Locker lock(_lock);
|
||||
_initialStart = INVALID_SEQUENCE;
|
||||
_initialEnd = INVALID_SEQUENCE;
|
||||
|
|
|
@ -149,7 +149,7 @@ void CollisionPick::computeShapeInfo(const CollisionRegion& pick, ShapeInfo& sha
|
|||
uint32_t numIndices = (uint32_t)meshPart.triangleIndices.size();
|
||||
// TODO: assert rather than workaround after we start sanitizing HFMMesh higher up
|
||||
//assert(numIndices % TRIANGLE_STRIDE == 0);
|
||||
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
|
||||
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXSerializer
|
||||
|
||||
for (uint32_t j = 0; j < numIndices; j += TRIANGLE_STRIDE) {
|
||||
glm::vec3 p0 = mesh.vertices[meshPart.triangleIndices[j]];
|
||||
|
@ -170,7 +170,7 @@ void CollisionPick::computeShapeInfo(const CollisionRegion& pick, ShapeInfo& sha
|
|||
numIndices = (uint32_t)meshPart.quadIndices.size();
|
||||
// TODO: assert rather than workaround after we start sanitizing HFMMesh higher up
|
||||
//assert(numIndices % QUAD_STRIDE == 0);
|
||||
numIndices -= numIndices % QUAD_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
|
||||
numIndices -= numIndices % QUAD_STRIDE; // WORKAROUND lack of sanity checking in FBXSerializer
|
||||
|
||||
for (uint32_t j = 0; j < numIndices; j += QUAD_STRIDE) {
|
||||
glm::vec3 p0 = mesh.vertices[meshPart.quadIndices[j]];
|
||||
|
@ -305,7 +305,7 @@ void CollisionPick::computeShapeInfo(const CollisionRegion& pick, ShapeInfo& sha
|
|||
auto numIndices = meshPart.triangleIndices.count();
|
||||
// TODO: assert rather than workaround after we start sanitizing HFMMesh higher up
|
||||
//assert(numIndices% TRIANGLE_STRIDE == 0);
|
||||
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
|
||||
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXSerializer
|
||||
|
||||
auto indexItr = meshPart.triangleIndices.cbegin();
|
||||
while (indexItr != meshPart.triangleIndices.cend()) {
|
||||
|
|
|
@ -32,3 +32,7 @@ void KeyboardScriptingInterface::setPassword(bool password) {
|
|||
void KeyboardScriptingInterface::loadKeyboardFile(const QString& keyboardFile) {
|
||||
DependencyManager::get<Keyboard>()->loadKeyboardFile(keyboardFile);
|
||||
}
|
||||
|
||||
bool KeyboardScriptingInterface::getUse3DKeyboard() {
|
||||
return DependencyManager::get<Keyboard>()->getUse3DKeyboard();
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ class KeyboardScriptingInterface : public QObject, public Dependency {
|
|||
Q_OBJECT
|
||||
Q_PROPERTY(bool raised READ isRaised WRITE setRaised)
|
||||
Q_PROPERTY(bool password READ isPassword WRITE setPassword)
|
||||
Q_PROPERTY(bool use3DKeyboard READ getUse3DKeyboard);
|
||||
|
||||
public:
|
||||
Q_INVOKABLE void loadKeyboardFile(const QString& string);
|
||||
|
@ -39,5 +40,7 @@ private:
|
|||
|
||||
bool isPassword();
|
||||
void setPassword(bool password);
|
||||
|
||||
bool getUse3DKeyboard();
|
||||
};
|
||||
#endif
|
||||
|
|
|
@ -198,4 +198,14 @@ void TestScriptingInterface::setOtherAvatarsReplicaCount(int count) {
|
|||
|
||||
int TestScriptingInterface::getOtherAvatarsReplicaCount() {
|
||||
return qApp->getOtherAvatarsReplicaCount();
|
||||
}
|
||||
}
|
||||
|
||||
QString TestScriptingInterface::getOperatingSystemType() {
|
||||
#ifdef Q_OS_WIN
|
||||
return "WINDOWS";
|
||||
#elif defined Q_OS_MAC
|
||||
return "MACOS";
|
||||
#else
|
||||
return "UNKNOWN";
|
||||
#endif
|
||||
}
|
||||
|
|
|
@ -163,6 +163,13 @@ public slots:
|
|||
*/
|
||||
Q_INVOKABLE int getOtherAvatarsReplicaCount();
|
||||
|
||||
/**jsdoc
|
||||
* Returns the Operating Sytem type
|
||||
* @function Test.getOperatingSystemType
|
||||
* @returns {string} "WINDOWS", "MACOS" or "UNKNOWN"
|
||||
*/
|
||||
QString getOperatingSystemType();
|
||||
|
||||
private:
|
||||
bool waitForCondition(qint64 maxWaitMs, std::function<bool()> condition);
|
||||
QString _testResultsLocation;
|
||||
|
|
|
@ -55,8 +55,6 @@ ApplicationOverlay::~ApplicationOverlay() {
|
|||
// Renders the overlays either to a texture or to the screen
|
||||
void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
|
||||
PROFILE_RANGE(render, __FUNCTION__);
|
||||
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "ApplicationOverlay::displayOverlay()");
|
||||
|
||||
buildFramebufferObject();
|
||||
|
||||
if (!_overlayFramebuffer) {
|
||||
|
@ -83,7 +81,9 @@ void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
|
|||
// Now render the overlay components together into a single texture
|
||||
renderDomainConnectionStatusBorder(renderArgs); // renders the connected domain line
|
||||
renderOverlays(renderArgs); // renders Scripts Overlay and AudioScope
|
||||
#if !defined(DISABLE_QML)
|
||||
renderQmlUi(renderArgs); // renders a unit quad with the QML UI texture, and the text overlays from scripts
|
||||
#endif
|
||||
});
|
||||
|
||||
renderArgs->_batch = nullptr; // so future users of renderArgs don't try to use our batch
|
||||
|
|
|
@ -45,6 +45,7 @@
|
|||
#include "scripting/HMDScriptingInterface.h"
|
||||
#include "scripting/WindowScriptingInterface.h"
|
||||
#include "scripting/SelectionScriptingInterface.h"
|
||||
#include "scripting/HMDScriptingInterface.h"
|
||||
#include "DependencyManager.h"
|
||||
|
||||
#include "raypick/StylusPointer.h"
|
||||
|
@ -54,25 +55,25 @@
|
|||
static const int LEFT_HAND_CONTROLLER_INDEX = 0;
|
||||
static const int RIGHT_HAND_CONTROLLER_INDEX = 1;
|
||||
|
||||
static const float MALLET_LENGTH = 0.2f;
|
||||
static const float MALLET_TOUCH_Y_OFFSET = 0.052f;
|
||||
static const float MALLET_Y_OFFSET = 0.180f;
|
||||
static const float MALLET_LENGTH = 0.18f;
|
||||
static const float MALLET_TOUCH_Y_OFFSET = 0.050f;
|
||||
static const float MALLET_Y_OFFSET = 0.160f;
|
||||
|
||||
static const glm::quat MALLET_ROTATION_OFFSET{0.70710678f, 0.0f, -0.70710678f, 0.0f};
|
||||
static const glm::vec3 MALLET_MODEL_DIMENSIONS{0.03f, MALLET_LENGTH, 0.03f};
|
||||
static const glm::vec3 MALLET_MODEL_DIMENSIONS{0.01f, MALLET_LENGTH, 0.01f};
|
||||
static const glm::vec3 MALLET_POSITION_OFFSET{0.0f, -MALLET_Y_OFFSET / 2.0f, 0.0f};
|
||||
static const glm::vec3 MALLET_TIP_OFFSET{0.0f, MALLET_LENGTH - MALLET_TOUCH_Y_OFFSET, 0.0f};
|
||||
|
||||
|
||||
static const glm::vec3 Z_AXIS {0.0f, 0.0f, 1.0f};
|
||||
static const glm::vec3 KEYBOARD_TABLET_OFFSET{0.28f, -0.3f, -0.05f};
|
||||
static const glm::vec3 KEYBOARD_TABLET_OFFSET{0.30f, -0.38f, -0.04f};
|
||||
static const glm::vec3 KEYBOARD_TABLET_DEGREES_OFFSET{-45.0f, 0.0f, 0.0f};
|
||||
static const glm::vec3 KEYBOARD_TABLET_LANDSCAPE_OFFSET{-0.2f, -0.27f, -0.05f};
|
||||
static const glm::vec3 KEYBOARD_TABLET_LANDSCAPE_DEGREES_OFFSET{-45.0f, 0.0f, -90.0f};
|
||||
static const glm::vec3 KEYBOARD_AVATAR_OFFSET{-0.6f, 0.3f, -0.7f};
|
||||
static const glm::vec3 KEYBOARD_AVATAR_DEGREES_OFFSET{0.0f, 180.0f, 0.0f};
|
||||
|
||||
static const QString SOUND_FILE = PathUtils::resourcesUrl() + "sounds/keyboard_key.mp3";
|
||||
static const QString SOUND_FILE = PathUtils::resourcesUrl() + "sounds/keyboardPress.mp3";
|
||||
static const QString MALLET_MODEL_URL = PathUtils::resourcesUrl() + "meshes/drumstick.fbx";
|
||||
|
||||
static const float PULSE_STRENGTH = 0.6f;
|
||||
|
@ -221,6 +222,7 @@ Keyboard::Keyboard() {
|
|||
auto pointerManager = DependencyManager::get<PointerManager>();
|
||||
auto windowScriptingInterface = DependencyManager::get<WindowScriptingInterface>();
|
||||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
auto hmdScriptingInterface = DependencyManager::get<HMDScriptingInterface>();
|
||||
connect(pointerManager.data(), &PointerManager::triggerBeginOverlay, this, &Keyboard::handleTriggerBegin, Qt::QueuedConnection);
|
||||
connect(pointerManager.data(), &PointerManager::triggerContinueOverlay, this, &Keyboard::handleTriggerContinue, Qt::QueuedConnection);
|
||||
connect(pointerManager.data(), &PointerManager::triggerEndOverlay, this, &Keyboard::handleTriggerEnd, Qt::QueuedConnection);
|
||||
|
@ -228,6 +230,7 @@ Keyboard::Keyboard() {
|
|||
connect(pointerManager.data(), &PointerManager::hoverEndOverlay, this, &Keyboard::handleHoverEnd, Qt::QueuedConnection);
|
||||
connect(myAvatar.get(), &MyAvatar::sensorToWorldScaleChanged, this, &Keyboard::scaleKeyboard, Qt::QueuedConnection);
|
||||
connect(windowScriptingInterface.data(), &WindowScriptingInterface::domainChanged, [&]() { setRaised(false); });
|
||||
connect(hmdScriptingInterface.data(), &HMDScriptingInterface::displayModeChanged, [&]() { setRaised(false); });
|
||||
}
|
||||
|
||||
void Keyboard::registerKeyboardHighlighting() {
|
||||
|
@ -238,6 +241,17 @@ void Keyboard::registerKeyboardHighlighting() {
|
|||
selection->enableListToScene(KEY_PRESSED_HIGHLIGHT);
|
||||
}
|
||||
|
||||
bool Keyboard::getUse3DKeyboard() const {
|
||||
return _use3DKeyboardLock.resultWithReadLock<bool>([&] {
|
||||
return _use3DKeyboard.get();
|
||||
});
|
||||
}
|
||||
|
||||
void Keyboard::setUse3DKeyboard(bool use) {
|
||||
_use3DKeyboardLock.withWriteLock([&] {
|
||||
_use3DKeyboard.set(use);
|
||||
});
|
||||
}
|
||||
|
||||
void Keyboard::createKeyboard() {
|
||||
auto pointerManager = DependencyManager::get<PointerManager>();
|
||||
|
@ -483,7 +497,7 @@ void Keyboard::handleTriggerBegin(const OverlayID& overlayID, const PointerEvent
|
|||
AudioInjectorOptions audioOptions;
|
||||
audioOptions.localOnly = true;
|
||||
audioOptions.position = keyWorldPosition;
|
||||
audioOptions.volume = 0.1f;
|
||||
audioOptions.volume = 0.05f;
|
||||
|
||||
AudioInjector::playSoundAndDelete(_keySound, audioOptions);
|
||||
|
||||
|
@ -835,8 +849,8 @@ void Keyboard::loadKeyboardFile(const QString& keyboardFile) {
|
|||
_textDisplay = textDisplay;
|
||||
|
||||
_ignoreItemsLock.withWriteLock([&] {
|
||||
_itemsToIgnore.push_back(_textDisplay.overlayID);
|
||||
_itemsToIgnore.push_back(_anchor.overlayID);
|
||||
_itemsToIgnore.append(_textDisplay.overlayID);
|
||||
_itemsToIgnore.append(_anchor.overlayID);
|
||||
});
|
||||
_layerIndex = 0;
|
||||
auto pointerManager = DependencyManager::get<PointerManager>();
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
#include <Sound.h>
|
||||
#include <AudioInjector.h>
|
||||
#include <shared/ReadWriteLockable.h>
|
||||
#include <SettingHandle.h>
|
||||
|
||||
#include "ui/overlays/Overlay.h"
|
||||
|
||||
|
@ -97,6 +98,9 @@ public:
|
|||
bool isPassword() const;
|
||||
void setPassword(bool password);
|
||||
|
||||
bool getUse3DKeyboard() const;
|
||||
void setUse3DKeyboard(bool use);
|
||||
|
||||
void loadKeyboardFile(const QString& keyboardFile);
|
||||
QVector<OverlayID> getKeysID();
|
||||
|
||||
|
@ -143,6 +147,9 @@ private:
|
|||
SharedSoundPointer _keySound { nullptr };
|
||||
std::shared_ptr<QTimer> _layerSwitchTimer { std::make_shared<QTimer>() };
|
||||
|
||||
mutable ReadWriteLockable _use3DKeyboardLock;
|
||||
Setting::Handle<bool> _use3DKeyboard { "use3DKeyboard", true };
|
||||
|
||||
QString _typedCharacters;
|
||||
TextDisplay _textDisplay;
|
||||
Anchor _anchor;
|
||||
|
|
|
@ -74,6 +74,7 @@ void OverlayConductor::centerUI() {
|
|||
}
|
||||
|
||||
void OverlayConductor::update(float dt) {
|
||||
#if !defined(DISABLE_QML)
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
if (!offscreenUi) {
|
||||
return;
|
||||
|
@ -115,4 +116,5 @@ void OverlayConductor::update(float dt) {
|
|||
if (shouldRecenter && !_suppressedByHead) {
|
||||
centerUI();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
|
|
@ -25,8 +25,10 @@ private:
|
|||
bool headOutsideOverlay() const;
|
||||
bool updateAvatarIsAtRest();
|
||||
|
||||
#if !defined(DISABLE_QML)
|
||||
bool _suppressedByHead { false };
|
||||
bool _hmdMode { false };
|
||||
#endif
|
||||
|
||||
// used by updateAvatarIsAtRest
|
||||
uint64_t _desiredAtRestTimer { 0 };
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
#include "Snapshot.h"
|
||||
#include "SnapshotAnimated.h"
|
||||
#include "UserActivityLogger.h"
|
||||
#include "ui/Keyboard.h"
|
||||
|
||||
void setupPreferences() {
|
||||
auto preferences = DependencyManager::get<Preferences>();
|
||||
|
@ -119,6 +120,12 @@ void setupPreferences() {
|
|||
preferences->addPreference(new CheckPreference(UI_CATEGORY, "Use reticle cursor instead of arrow", getter, setter));
|
||||
}
|
||||
|
||||
{
|
||||
auto getter = []()->bool { return DependencyManager::get<Keyboard>()->getUse3DKeyboard(); };
|
||||
auto setter = [](bool value) { DependencyManager::get<Keyboard>()->setUse3DKeyboard(value); };
|
||||
preferences->addPreference(new CheckPreference(UI_CATEGORY, "Use Virtual Keyboard", getter, setter));
|
||||
}
|
||||
|
||||
{
|
||||
auto getter = []()->bool { return qApp->getMiniTabletEnabled(); };
|
||||
auto setter = [](bool value) { qApp->setMiniTabletEnabled(value); };
|
||||
|
|
|
@ -232,11 +232,15 @@ OverlayID Overlays::addOverlay(const QString& type, const QVariant& properties)
|
|||
*/
|
||||
|
||||
if (type == ImageOverlay::TYPE) {
|
||||
#if !defined(DISABLE_QML)
|
||||
thisOverlay = Overlay::Pointer(new ImageOverlay(), [](Overlay* ptr) { ptr->deleteLater(); });
|
||||
#endif
|
||||
} else if (type == Image3DOverlay::TYPE || type == "billboard") { // "billboard" for backwards compatibility
|
||||
thisOverlay = Overlay::Pointer(new Image3DOverlay(), [](Overlay* ptr) { ptr->deleteLater(); });
|
||||
} else if (type == TextOverlay::TYPE) {
|
||||
#if !defined(DISABLE_QML)
|
||||
thisOverlay = Overlay::Pointer(new TextOverlay(), [](Overlay* ptr) { ptr->deleteLater(); });
|
||||
#endif
|
||||
} else if (type == Text3DOverlay::TYPE) {
|
||||
thisOverlay = Overlay::Pointer(new Text3DOverlay(), [](Overlay* ptr) { ptr->deleteLater(); });
|
||||
} else if (type == Shape3DOverlay::TYPE) {
|
||||
|
@ -535,7 +539,7 @@ RayToOverlayIntersectionResult Overlays::findRayIntersectionVector(const PickRay
|
|||
bool bestIsFront = false;
|
||||
bool bestIsTablet = false;
|
||||
auto tabletIDs = qApp->getTabletIDs();
|
||||
|
||||
const QVector<OverlayID> keyboardKeysToDiscard = DependencyManager::get<Keyboard>()->getKeysID();
|
||||
QMutexLocker locker(&_mutex);
|
||||
RayToOverlayIntersectionResult result;
|
||||
QMapIterator<OverlayID, Overlay::Pointer> i(_overlaysWorld);
|
||||
|
@ -545,7 +549,8 @@ RayToOverlayIntersectionResult Overlays::findRayIntersectionVector(const PickRay
|
|||
auto thisOverlay = std::dynamic_pointer_cast<Base3DOverlay>(i.value());
|
||||
|
||||
if ((overlaysToDiscard.size() > 0 && overlaysToDiscard.contains(thisID)) ||
|
||||
(overlaysToInclude.size() > 0 && !overlaysToInclude.contains(thisID))) {
|
||||
(overlaysToInclude.size() > 0 && !overlaysToInclude.contains(thisID)) ||
|
||||
(keyboardKeysToDiscard.size() > 0 && keyboardKeysToDiscard.contains(thisID))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
@ -23,11 +23,39 @@ AnimSkeleton::AnimSkeleton(const HFMModel& hfmModel) {
|
|||
for (auto& joint : hfmModel.joints) {
|
||||
joints.push_back(joint);
|
||||
}
|
||||
buildSkeletonFromJoints(joints);
|
||||
buildSkeletonFromJoints(joints, hfmModel.jointRotationOffsets);
|
||||
|
||||
// we make a copy of the inverseBindMatrices in order to prevent mutating the model bind pose
|
||||
// when we are dealing with a joint offset in the model
|
||||
for (int i = 0; i < (int)hfmModel.meshes.size(); i++) {
|
||||
const HFMMesh& mesh = hfmModel.meshes.at(i);
|
||||
std::vector<HFMCluster> dummyClustersList;
|
||||
|
||||
for (int j = 0; j < mesh.clusters.size(); j++) {
|
||||
std::vector<glm::mat4> bindMatrices;
|
||||
// cast into a non-const reference, so we can mutate the FBXCluster
|
||||
HFMCluster& cluster = const_cast<HFMCluster&>(mesh.clusters.at(j));
|
||||
|
||||
HFMCluster localCluster;
|
||||
localCluster.jointIndex = cluster.jointIndex;
|
||||
localCluster.inverseBindMatrix = cluster.inverseBindMatrix;
|
||||
localCluster.inverseBindTransform.evalFromRawMatrix(localCluster.inverseBindMatrix);
|
||||
|
||||
// if we have a joint offset in the fst file then multiply its inverse by the
|
||||
// model cluster inverse bind matrix
|
||||
if (hfmModel.jointRotationOffsets.contains(cluster.jointIndex)) {
|
||||
AnimPose localOffset(hfmModel.jointRotationOffsets[cluster.jointIndex], glm::vec3());
|
||||
localCluster.inverseBindMatrix = (glm::mat4)localOffset.inverse() * cluster.inverseBindMatrix;
|
||||
localCluster.inverseBindTransform.evalFromRawMatrix(localCluster.inverseBindMatrix);
|
||||
}
|
||||
dummyClustersList.push_back(localCluster);
|
||||
}
|
||||
_clusterBindMatrixOriginalValues.push_back(dummyClustersList);
|
||||
}
|
||||
}
|
||||
|
||||
AnimSkeleton::AnimSkeleton(const std::vector<HFMJoint>& joints) {
|
||||
buildSkeletonFromJoints(joints);
|
||||
AnimSkeleton::AnimSkeleton(const std::vector<HFMJoint>& joints, const QMap<int, glm::quat> jointOffsets) {
|
||||
buildSkeletonFromJoints(joints, jointOffsets);
|
||||
}
|
||||
|
||||
int AnimSkeleton::nameToJointIndex(const QString& jointName) const {
|
||||
|
@ -166,7 +194,8 @@ void AnimSkeleton::mirrorAbsolutePoses(AnimPoseVec& poses) const {
|
|||
}
|
||||
}
|
||||
|
||||
void AnimSkeleton::buildSkeletonFromJoints(const std::vector<HFMJoint>& joints) {
|
||||
void AnimSkeleton::buildSkeletonFromJoints(const std::vector<HFMJoint>& joints, const QMap<int, glm::quat> jointOffsets) {
|
||||
|
||||
_joints = joints;
|
||||
_jointsSize = (int)joints.size();
|
||||
// build a cache of bind poses
|
||||
|
@ -189,7 +218,7 @@ void AnimSkeleton::buildSkeletonFromJoints(const std::vector<HFMJoint>& joints)
|
|||
// build relative and absolute default poses
|
||||
glm::mat4 relDefaultMat = glm::translate(_joints[i].translation) * preRotationTransform * glm::mat4_cast(_joints[i].rotation) * postRotationTransform;
|
||||
AnimPose relDefaultPose(relDefaultMat);
|
||||
_relativeDefaultPoses.push_back(relDefaultPose);
|
||||
|
||||
int parentIndex = getParentIndex(i);
|
||||
if (parentIndex >= 0) {
|
||||
_absoluteDefaultPoses.push_back(_absoluteDefaultPoses[parentIndex] * relDefaultPose);
|
||||
|
@ -198,6 +227,16 @@ void AnimSkeleton::buildSkeletonFromJoints(const std::vector<HFMJoint>& joints)
|
|||
}
|
||||
}
|
||||
|
||||
for (int k = 0; k < _jointsSize; k++) {
|
||||
if (jointOffsets.contains(k)) {
|
||||
AnimPose localOffset(jointOffsets[k], glm::vec3());
|
||||
_absoluteDefaultPoses[k] = _absoluteDefaultPoses[k] * localOffset;
|
||||
}
|
||||
}
|
||||
// re-compute relative poses
|
||||
_relativeDefaultPoses = _absoluteDefaultPoses;
|
||||
convertAbsolutePosesToRelative(_relativeDefaultPoses);
|
||||
|
||||
for (int i = 0; i < _jointsSize; i++) {
|
||||
_jointIndicesByName[_joints[i].name] = i;
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/quaternion.hpp>
|
||||
|
||||
#include <FBXReader.h>
|
||||
#include <FBXSerializer.h>
|
||||
#include "AnimPose.h"
|
||||
|
||||
class AnimSkeleton {
|
||||
|
@ -24,7 +24,8 @@ public:
|
|||
using ConstPointer = std::shared_ptr<const AnimSkeleton>;
|
||||
|
||||
explicit AnimSkeleton(const HFMModel& hfmModel);
|
||||
explicit AnimSkeleton(const std::vector<HFMJoint>& joints);
|
||||
explicit AnimSkeleton(const std::vector<HFMJoint>& joints, const QMap<int, glm::quat> jointOffsets);
|
||||
|
||||
int nameToJointIndex(const QString& jointName) const;
|
||||
const QString& getJointName(int jointIndex) const;
|
||||
int getNumJoints() const;
|
||||
|
@ -62,9 +63,10 @@ public:
|
|||
void dump(const AnimPoseVec& poses) const;
|
||||
|
||||
std::vector<int> lookUpJointIndices(const std::vector<QString>& jointNames) const;
|
||||
const HFMCluster getClusterBindMatricesOriginalValues(const int meshIndex, const int clusterIndex) const { return _clusterBindMatrixOriginalValues[meshIndex][clusterIndex]; }
|
||||
|
||||
protected:
|
||||
void buildSkeletonFromJoints(const std::vector<HFMJoint>& joints);
|
||||
void buildSkeletonFromJoints(const std::vector<HFMJoint>& joints, const QMap<int, glm::quat> jointOffsets);
|
||||
|
||||
std::vector<HFMJoint> _joints;
|
||||
int _jointsSize { 0 };
|
||||
|
@ -76,6 +78,7 @@ protected:
|
|||
std::vector<int> _nonMirroredIndices;
|
||||
std::vector<int> _mirrorMap;
|
||||
QHash<QString, int> _jointIndicesByName;
|
||||
std::vector<std::vector<HFMCluster>> _clusterBindMatrixOriginalValues;
|
||||
|
||||
// no copies
|
||||
AnimSkeleton(const AnimSkeleton&) = delete;
|
||||
|
|
|
@ -71,7 +71,7 @@ void AnimationReader::run() {
|
|||
// Parse the FBX directly from the QNetworkReply
|
||||
HFMModel::Pointer hfmModel;
|
||||
if (_url.path().toLower().endsWith(".fbx")) {
|
||||
hfmModel.reset(readFBX(_data, QVariantHash(), _url.path()));
|
||||
hfmModel = FBXSerializer().read(_data, QVariantHash(), _url.path());
|
||||
} else {
|
||||
QString errorStr("usupported format");
|
||||
emit onError(299, errorStr);
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
#include <QtScript/QScriptValue>
|
||||
|
||||
#include <DependencyManager.h>
|
||||
#include <FBXReader.h>
|
||||
#include <FBXSerializer.h>
|
||||
#include <ResourceCache.h>
|
||||
|
||||
class Animation;
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
#include <QObject>
|
||||
#include <QScriptable>
|
||||
|
||||
#include <FBXReader.h>
|
||||
#include <FBXSerializer.h>
|
||||
|
||||
class QScriptEngine;
|
||||
|
||||
|
|
|
@ -360,8 +360,10 @@ void Rig::initJointStates(const HFMModel& hfmModel, const glm::mat4& modelOffset
|
|||
void Rig::reset(const HFMModel& hfmModel) {
|
||||
_geometryOffset = AnimPose(hfmModel.offset);
|
||||
_invGeometryOffset = _geometryOffset.inverse();
|
||||
|
||||
_animSkeleton = std::make_shared<AnimSkeleton>(hfmModel);
|
||||
|
||||
|
||||
_internalPoseSet._relativePoses.clear();
|
||||
_internalPoseSet._relativePoses = _animSkeleton->getRelativeDefaultPoses();
|
||||
|
||||
|
|
|
@ -302,7 +302,6 @@ void AudioClient::customDeleter() {
|
|||
#if defined(Q_OS_ANDROID)
|
||||
_shouldRestartInputSetup = false;
|
||||
#endif
|
||||
stop();
|
||||
deleteLater();
|
||||
}
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@
|
|||
#include <xmmintrin.h>
|
||||
// convert float to int using round-to-nearest
|
||||
FORCEINLINE static int32_t floatToInt(float x) {
|
||||
return _mm_cvt_ss2si(_mm_load_ss(&x));
|
||||
return _mm_cvt_ss2si(_mm_set_ss(x));
|
||||
}
|
||||
|
||||
#else
|
||||
|
@ -150,7 +150,7 @@ static const int IEEE754_EXPN_BIAS = 127;
|
|||
//
|
||||
// Peak detection and -log2(x) for float input (mono)
|
||||
// x < 2^(31-LOG2_HEADROOM) returns 0x7fffffff
|
||||
// x > 2^LOG2_HEADROOM undefined
|
||||
// x > 2^LOG2_HEADROOM returns 0
|
||||
//
|
||||
FORCEINLINE static int32_t peaklog2(float* input) {
|
||||
|
||||
|
@ -161,12 +161,12 @@ FORCEINLINE static int32_t peaklog2(float* input) {
|
|||
uint32_t peak = u & IEEE754_FABS_MASK;
|
||||
|
||||
// split into e and x - 1.0
|
||||
int e = IEEE754_EXPN_BIAS - (peak >> IEEE754_MANT_BITS) + LOG2_HEADROOM;
|
||||
int32_t e = IEEE754_EXPN_BIAS - (peak >> IEEE754_MANT_BITS) + LOG2_HEADROOM;
|
||||
int32_t x = (peak << IEEE754_EXPN_BITS) & 0x7fffffff;
|
||||
|
||||
// saturate
|
||||
if (e > 31) {
|
||||
return 0x7fffffff;
|
||||
// saturate when e > 31 or e < 0
|
||||
if ((uint32_t)e > 31) {
|
||||
return 0x7fffffff & ~(e >> 31);
|
||||
}
|
||||
|
||||
int k = x >> (31 - LOG2_TABBITS);
|
||||
|
@ -186,7 +186,7 @@ FORCEINLINE static int32_t peaklog2(float* input) {
|
|||
//
|
||||
// Peak detection and -log2(x) for float input (stereo)
|
||||
// x < 2^(31-LOG2_HEADROOM) returns 0x7fffffff
|
||||
// x > 2^LOG2_HEADROOM undefined
|
||||
// x > 2^LOG2_HEADROOM returns 0
|
||||
//
|
||||
FORCEINLINE static int32_t peaklog2(float* input0, float* input1) {
|
||||
|
||||
|
@ -200,12 +200,12 @@ FORCEINLINE static int32_t peaklog2(float* input0, float* input1) {
|
|||
uint32_t peak = MAX(u0, u1);
|
||||
|
||||
// split into e and x - 1.0
|
||||
int e = IEEE754_EXPN_BIAS - (peak >> IEEE754_MANT_BITS) + LOG2_HEADROOM;
|
||||
int32_t e = IEEE754_EXPN_BIAS - (peak >> IEEE754_MANT_BITS) + LOG2_HEADROOM;
|
||||
int32_t x = (peak << IEEE754_EXPN_BITS) & 0x7fffffff;
|
||||
|
||||
// saturate
|
||||
if (e > 31) {
|
||||
return 0x7fffffff;
|
||||
// saturate when e > 31 or e < 0
|
||||
if ((uint32_t)e > 31) {
|
||||
return 0x7fffffff & ~(e >> 31);
|
||||
}
|
||||
|
||||
int k = x >> (31 - LOG2_TABBITS);
|
||||
|
@ -225,7 +225,7 @@ FORCEINLINE static int32_t peaklog2(float* input0, float* input1) {
|
|||
//
|
||||
// Peak detection and -log2(x) for float input (quad)
|
||||
// x < 2^(31-LOG2_HEADROOM) returns 0x7fffffff
|
||||
// x > 2^LOG2_HEADROOM undefined
|
||||
// x > 2^LOG2_HEADROOM returns 0
|
||||
//
|
||||
FORCEINLINE static int32_t peaklog2(float* input0, float* input1, float* input2, float* input3) {
|
||||
|
||||
|
@ -243,12 +243,12 @@ FORCEINLINE static int32_t peaklog2(float* input0, float* input1, float* input2,
|
|||
uint32_t peak = MAX(MAX(u0, u1), MAX(u2, u3));
|
||||
|
||||
// split into e and x - 1.0
|
||||
int e = IEEE754_EXPN_BIAS - (peak >> IEEE754_MANT_BITS) + LOG2_HEADROOM;
|
||||
int32_t e = IEEE754_EXPN_BIAS - (peak >> IEEE754_MANT_BITS) + LOG2_HEADROOM;
|
||||
int32_t x = (peak << IEEE754_EXPN_BITS) & 0x7fffffff;
|
||||
|
||||
// saturate
|
||||
if (e > 31) {
|
||||
return 0x7fffffff;
|
||||
// saturate when e > 31 or e < 0
|
||||
if ((uint32_t)e > 31) {
|
||||
return 0x7fffffff & ~(e >> 31);
|
||||
}
|
||||
|
||||
int k = x >> (31 - LOG2_TABBITS);
|
||||
|
|
|
@ -447,9 +447,9 @@ AudioInjectorPointer AudioInjector::playSound(SharedSoundPointer sound, const A
|
|||
using AudioConstants::AudioSample;
|
||||
using AudioConstants::SAMPLE_RATE;
|
||||
const int standardRate = SAMPLE_RATE;
|
||||
// limit to 4 octaves
|
||||
const int pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
|
||||
const int resampledRate = SAMPLE_RATE / pitch;
|
||||
// limit pitch to 4 octaves
|
||||
const float pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
|
||||
const int resampledRate = glm::round(SAMPLE_RATE / pitch);
|
||||
|
||||
auto audioData = sound->getAudioData();
|
||||
auto numChannels = audioData->getNumChannels();
|
||||
|
@ -499,9 +499,9 @@ AudioInjectorPointer AudioInjector::playSound(AudioDataPointer audioData, const
|
|||
using AudioConstants::AudioSample;
|
||||
using AudioConstants::SAMPLE_RATE;
|
||||
const int standardRate = SAMPLE_RATE;
|
||||
// limit to 4 octaves
|
||||
const int pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
|
||||
const int resampledRate = SAMPLE_RATE / pitch;
|
||||
// limit pitch to 4 octaves
|
||||
const float pitch = glm::clamp(options.pitch, 1 / 16.0f, 16.0f);
|
||||
const int resampledRate = glm::round(SAMPLE_RATE / pitch);
|
||||
|
||||
auto numChannels = audioData->getNumChannels();
|
||||
auto numFrames = audioData->getNumFrames();
|
||||
|
|
|
@ -2103,8 +2103,9 @@ void AvatarData::setJointMappingsFromNetworkReply() {
|
|||
|
||||
// before we process this update, make sure that the skeleton model URL hasn't changed
|
||||
// since we made the FST request
|
||||
if (networkReply->url() != _skeletonModelURL) {
|
||||
if (networkReply->error() != QNetworkReply::NoError || networkReply->url() != _skeletonModelURL) {
|
||||
qCDebug(avatars) << "Refusing to set joint mappings for FST URL that does not match the current URL";
|
||||
networkReply->deleteLater();
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -1490,7 +1490,7 @@ protected:
|
|||
bool _isClientAvatar { false };
|
||||
|
||||
// null unless MyAvatar or ScriptableAvatar sending traits data to mixer
|
||||
std::unique_ptr<ClientTraitsHandler> _clientTraitsHandler;
|
||||
std::unique_ptr<ClientTraitsHandler, LaterDeleter> _clientTraitsHandler;
|
||||
|
||||
template <typename T, typename F>
|
||||
T readLockWithNamedJointIndex(const QString& name, const T& defaultValue, F f) const {
|
||||
|
|
|
@ -22,7 +22,7 @@ ClientTraitsHandler::ClientTraitsHandler(AvatarData* owningAvatar) :
|
|||
_owningAvatar(owningAvatar)
|
||||
{
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
QObject::connect(nodeList.data(), &NodeList::nodeAdded, [this](SharedNodePointer addedNode){
|
||||
QObject::connect(nodeList.data(), &NodeList::nodeAdded, this, [this](SharedNodePointer addedNode) {
|
||||
if (addedNode->getType() == NodeType::AvatarMixer) {
|
||||
resetForNewMixer();
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
|
||||
#include <PathUtils.h>
|
||||
|
||||
#include <FBXReader.h>
|
||||
#include <FBXSerializer.h>
|
||||
#include <FBXWriter.h>
|
||||
|
||||
#include "ModelBakingLoggingCategory.h"
|
||||
|
@ -187,10 +187,10 @@ void FBXBaker::importScene() {
|
|||
return;
|
||||
}
|
||||
|
||||
FBXReader reader;
|
||||
FBXSerializer fbxSerializer;
|
||||
|
||||
qCDebug(model_baking) << "Parsing" << _modelURL;
|
||||
_rootNode = reader._rootNode = reader.parseFBX(&fbxFile);
|
||||
_rootNode = fbxSerializer._rootNode = fbxSerializer.parseFBX(&fbxFile);
|
||||
|
||||
#ifdef HIFI_DUMP_FBX
|
||||
{
|
||||
|
@ -206,8 +206,8 @@ void FBXBaker::importScene() {
|
|||
}
|
||||
#endif
|
||||
|
||||
_hfmModel = reader.extractHFMModel({}, _modelURL.toString());
|
||||
_textureContentMap = reader._textureContent;
|
||||
_hfmModel = fbxSerializer.extractHFMModel({}, _modelURL.toString());
|
||||
_textureContentMap = fbxSerializer._textureContent;
|
||||
}
|
||||
|
||||
void FBXBaker::rewriteAndBakeSceneModels() {
|
||||
|
@ -232,7 +232,7 @@ void FBXBaker::rewriteAndBakeSceneModels() {
|
|||
if (objectChild.name == "Geometry") {
|
||||
|
||||
// TODO Pull this out of _hfmModel instead so we don't have to reprocess it
|
||||
auto extractedMesh = FBXReader::extractMesh(objectChild, meshIndex, false);
|
||||
auto extractedMesh = FBXSerializer::extractMesh(objectChild, meshIndex, false);
|
||||
|
||||
// Callback to get MaterialID
|
||||
GetMaterialIDCallback materialIDcallback = [&extractedMesh](int partIndex) {
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
|
||||
#include <PathUtils.h>
|
||||
|
||||
#include <FBXReader.h>
|
||||
#include <FBXWriter.h>
|
||||
|
||||
#ifdef _WIN32
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
#include <PathUtils.h>
|
||||
#include <NetworkAccessManager.h>
|
||||
|
||||
#include "OBJReader.h"
|
||||
#include "OBJSerializer.h"
|
||||
#include "FBXWriter.h"
|
||||
|
||||
const double UNIT_SCALE_FACTOR = 100.0;
|
||||
|
@ -143,9 +143,10 @@ void OBJBaker::bakeOBJ() {
|
|||
|
||||
QByteArray objData = objFile.readAll();
|
||||
|
||||
bool combineParts = true; // set true so that OBJReader reads material info from material library
|
||||
OBJReader reader;
|
||||
auto geometry = reader.readOBJ(objData, QVariantHash(), combineParts, _modelURL);
|
||||
OBJSerializer serializer;
|
||||
QVariantHash mapping;
|
||||
mapping["combineParts"] = true; // set true so that OBJSerializer reads material info from material library
|
||||
auto geometry = serializer.read(objData, mapping, _modelURL);
|
||||
|
||||
// Write OBJ Data as FBX tree nodes
|
||||
createFBXNodeTree(_rootNode, *geometry);
|
||||
|
@ -219,7 +220,7 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, HFMModel& hfmModel) {
|
|||
FBXNode materialNode;
|
||||
materialNode.name = MATERIAL_NODE_NAME;
|
||||
if (hfmModel.materials.size() == 1) {
|
||||
// case when no material information is provided, OBJReader considers it as a single default material
|
||||
// case when no material information is provided, OBJSerializer considers it as a single default material
|
||||
for (auto& materialID : hfmModel.materials.keys()) {
|
||||
setMaterialNodeProperties(materialNode, materialID, hfmModel);
|
||||
}
|
||||
|
|
|
@ -14,8 +14,6 @@ void main(void) {
|
|||
ivec2 texCoord = ivec2(floor(varTexCoord0 * vec2(textureData.textureSize)));
|
||||
texCoord.x /= 2;
|
||||
int row = int(floor(gl_FragCoord.y));
|
||||
if (row % 2 > 0) {
|
||||
texCoord.x += (textureData.textureSize.x / 2);
|
||||
}
|
||||
texCoord.x += int(row % 2 > 0) * (textureData.textureSize.x / 2);
|
||||
outFragColor = vec4(pow(texelFetch(colorMap, texCoord, 0).rgb, vec3(2.2)), 1.0);
|
||||
}
|
||||
|
|
|
@ -534,18 +534,26 @@ void OpenGLDisplayPlugin::updateFrameData() {
|
|||
}
|
||||
|
||||
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> OpenGLDisplayPlugin::getHUDOperator() {
|
||||
return [this](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, bool mirror) {
|
||||
if (_hudPipeline && hudTexture) {
|
||||
auto hudPipeline = _hudPipeline;
|
||||
auto hudMirrorPipeline = _mirrorHUDPipeline;
|
||||
auto hudStereo = isStereo();
|
||||
auto hudCompositeFramebufferSize = _compositeFramebuffer->getSize();
|
||||
std::array<glm::ivec4, 2> hudEyeViewports;
|
||||
for_each_eye([&](Eye eye) {
|
||||
hudEyeViewports[eye] = eyeViewport(eye);
|
||||
});
|
||||
return [=](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, bool mirror) {
|
||||
if (hudPipeline && hudTexture) {
|
||||
batch.enableStereo(false);
|
||||
batch.setPipeline(mirror ? _mirrorHUDPipeline : _hudPipeline);
|
||||
batch.setPipeline(mirror ? hudMirrorPipeline : hudPipeline);
|
||||
batch.setResourceTexture(0, hudTexture);
|
||||
if (isStereo()) {
|
||||
if (hudStereo) {
|
||||
for_each_eye([&](Eye eye) {
|
||||
batch.setViewportTransform(eyeViewport(eye));
|
||||
batch.setViewportTransform(hudEyeViewports[eye]);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
});
|
||||
} else {
|
||||
batch.setViewportTransform(ivec4(uvec2(0), _compositeFramebuffer->getSize()));
|
||||
batch.setViewportTransform(ivec4(uvec2(0), hudCompositeFramebufferSize));
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ layout(location=0) out vec4 outFragColor;
|
|||
float sRGBFloatToLinear(float value) {
|
||||
const float SRGB_ELBOW = 0.04045;
|
||||
|
||||
return (value <= SRGB_ELBOW) ? value / 12.92 : pow((value + 0.055) / 1.055, 2.4);
|
||||
return mix(pow((value + 0.055) / 1.055, 2.4), value / 12.92, float(value <= SRGB_ELBOW));
|
||||
}
|
||||
|
||||
vec3 colorToLinearRGB(vec3 srgb) {
|
||||
|
|
|
@ -420,18 +420,26 @@ void HmdDisplayPlugin::HUDRenderer::updatePipeline() {
|
|||
|
||||
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> HmdDisplayPlugin::HUDRenderer::render(HmdDisplayPlugin& plugin) {
|
||||
updatePipeline();
|
||||
return [this](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, bool mirror) {
|
||||
if (pipeline && hudTexture) {
|
||||
batch.setPipeline(pipeline);
|
||||
|
||||
batch.setInputFormat(format);
|
||||
gpu::BufferView posView(vertices, VERTEX_OFFSET, vertices->getSize(), VERTEX_STRIDE, format->getAttributes().at(gpu::Stream::POSITION)._element);
|
||||
gpu::BufferView uvView(vertices, TEXTURE_OFFSET, vertices->getSize(), VERTEX_STRIDE, format->getAttributes().at(gpu::Stream::TEXCOORD)._element);
|
||||
auto hudPipeline = pipeline;
|
||||
auto hudFormat = format;
|
||||
auto hudVertices = vertices;
|
||||
auto hudIndices = indices;
|
||||
auto hudUniformBuffer = uniformsBuffer;
|
||||
auto hudUniforms = uniforms;
|
||||
auto hudIndexCount = indexCount;
|
||||
return [=](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, bool mirror) {
|
||||
if (hudPipeline && hudTexture) {
|
||||
batch.setPipeline(hudPipeline);
|
||||
|
||||
batch.setInputFormat(hudFormat);
|
||||
gpu::BufferView posView(hudVertices, VERTEX_OFFSET, hudVertices->getSize(), VERTEX_STRIDE, hudFormat->getAttributes().at(gpu::Stream::POSITION)._element);
|
||||
gpu::BufferView uvView(hudVertices, TEXTURE_OFFSET, hudVertices->getSize(), VERTEX_STRIDE, hudFormat->getAttributes().at(gpu::Stream::TEXCOORD)._element);
|
||||
batch.setInputBuffer(gpu::Stream::POSITION, posView);
|
||||
batch.setInputBuffer(gpu::Stream::TEXCOORD, uvView);
|
||||
batch.setIndexBuffer(gpu::UINT16, indices, 0);
|
||||
uniformsBuffer->setSubData(0, uniforms);
|
||||
batch.setUniformBuffer(0, uniformsBuffer);
|
||||
batch.setIndexBuffer(gpu::UINT16, hudIndices, 0);
|
||||
hudUniformBuffer->setSubData(0, hudUniforms);
|
||||
batch.setUniformBuffer(0, hudUniformBuffer);
|
||||
|
||||
auto compositorHelper = DependencyManager::get<CompositorHelper>();
|
||||
glm::mat4 modelTransform = compositorHelper->getUiTransform();
|
||||
|
@ -441,7 +449,7 @@ std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> HmdDis
|
|||
batch.setModelTransform(modelTransform);
|
||||
batch.setResourceTexture(0, hudTexture);
|
||||
|
||||
batch.drawIndexed(gpu::TRIANGLES, indexCount);
|
||||
batch.drawIndexed(gpu::TRIANGLES, hudIndexCount);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -126,7 +126,7 @@ void MaterialEntityRenderer::doRender(RenderArgs* args) {
|
|||
batch.setModelTransform(renderTransform);
|
||||
|
||||
if (args->_renderMode != render::Args::RenderMode::SHADOW_RENDER_MODE) {
|
||||
drawMaterial->setTextureTransforms(textureTransform);
|
||||
drawMaterial->setTextureTransforms(textureTransform, MaterialMappingMode::UV, true);
|
||||
|
||||
// bind the material
|
||||
RenderPipelines::bindMaterial(drawMaterial, batch, args->_enableTexturing);
|
||||
|
|
|
@ -421,7 +421,7 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& shapeInfo) {
|
|||
uint32_t numIndices = (uint32_t)meshPart.triangleIndices.size();
|
||||
// TODO: assert rather than workaround after we start sanitizing HFMMesh higher up
|
||||
//assert(numIndices % TRIANGLE_STRIDE == 0);
|
||||
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
|
||||
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXSerializer
|
||||
|
||||
for (uint32_t j = 0; j < numIndices; j += TRIANGLE_STRIDE) {
|
||||
glm::vec3 p0 = mesh.vertices[meshPart.triangleIndices[j]];
|
||||
|
@ -442,7 +442,7 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& shapeInfo) {
|
|||
numIndices = (uint32_t)meshPart.quadIndices.size();
|
||||
// TODO: assert rather than workaround after we start sanitizing HFMMesh higher up
|
||||
//assert(numIndices % QUAD_STRIDE == 0);
|
||||
numIndices -= numIndices % QUAD_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
|
||||
numIndices -= numIndices % QUAD_STRIDE; // WORKAROUND lack of sanity checking in FBXSerializer
|
||||
|
||||
for (uint32_t j = 0; j < numIndices; j += QUAD_STRIDE) {
|
||||
glm::vec3 p0 = mesh.vertices[meshPart.quadIndices[j]];
|
||||
|
@ -595,7 +595,7 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& shapeInfo) {
|
|||
if (partItr->_topology == graphics::Mesh::TRIANGLES) {
|
||||
// TODO: assert rather than workaround after we start sanitizing HFMMesh higher up
|
||||
//assert(numIndices % TRIANGLE_STRIDE == 0);
|
||||
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
|
||||
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXSerializer
|
||||
|
||||
auto indexItr = indices.cbegin<const gpu::BufferView::Index>() + partItr->_startIndex;
|
||||
auto indexEnd = indexItr + numIndices;
|
||||
|
@ -652,7 +652,7 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& shapeInfo) {
|
|||
if (partItr->_topology == graphics::Mesh::TRIANGLES) {
|
||||
// TODO: assert rather than workaround after we start sanitizing HFMMesh higher up
|
||||
//assert(numIndices% TRIANGLE_STRIDE == 0);
|
||||
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
|
||||
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXSerializer
|
||||
|
||||
auto indexItr = indices.cbegin<const gpu::BufferView::Index>() + partItr->_startIndex;
|
||||
auto indexEnd = indexItr + numIndices;
|
||||
|
|
|
@ -80,10 +80,11 @@ float interpolate3Points(float y1, float y2, float y3, float u) {
|
|||
halfSlope = (y3 - y1) / 2.0f;
|
||||
float slope12 = y2 - y1;
|
||||
float slope23 = y3 - y2;
|
||||
if (abs(halfSlope) > abs(slope12)) {
|
||||
halfSlope = slope12;
|
||||
} else if (abs(halfSlope) > abs(slope23)) {
|
||||
halfSlope = slope23;
|
||||
|
||||
{
|
||||
float check = float(abs(halfSlope) > abs(slope12));
|
||||
halfSlope = mix(halfSlope, slope12, check);
|
||||
halfSlope = mix(halfSlope, slope23, (1.0 - check) * float(abs(halfSlope) > abs(slope23)));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -385,6 +385,7 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
|
|||
CHECK_PROPERTY_CHANGE(PROP_MATERIAL_MAPPING_SCALE, materialMappingScale);
|
||||
CHECK_PROPERTY_CHANGE(PROP_MATERIAL_MAPPING_ROT, materialMappingRot);
|
||||
CHECK_PROPERTY_CHANGE(PROP_MATERIAL_DATA, materialData);
|
||||
CHECK_PROPERTY_CHANGE(PROP_MATERIAL_REPEAT, materialRepeat);
|
||||
CHECK_PROPERTY_CHANGE(PROP_VISIBLE_IN_SECONDARY_CAMERA, isVisibleInSecondaryCamera);
|
||||
CHECK_PROPERTY_CHANGE(PROP_PARTICLE_SPIN, particleSpin);
|
||||
CHECK_PROPERTY_CHANGE(PROP_SPIN_SPREAD, spinSpread);
|
||||
|
@ -754,7 +755,8 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
|
|||
* Otherwise the property value is parsed as an unsigned integer, specifying the mesh index to modify. Invalid values are
|
||||
* parsed to <code>0</code>.
|
||||
* @property {string} materialMappingMode="uv" - How the material is mapped to the entity. Either <code>"uv"</code> or
|
||||
* <code>"projected"</code>. <em>Currently, only <code>"uv"</code> is supported.
|
||||
* <code>"projected"</code>. In "uv" mode, the material will be evaluated within the UV space of the mesh it is applied to. In
|
||||
* "projected" mode, the 3D transform of the Material Entity will be used to evaluate the texture coordinates for the material.
|
||||
* @property {Vec2} materialMappingPos=0,0 - Offset position in UV-space of the top left of the material, range
|
||||
* <code>{ x: 0, y: 0 }</code> – <code>{ x: 1, y: 1 }</code>.
|
||||
* @property {Vec2} materialMappingScale=1,1 - How much to scale the material within the parent's UV-space.
|
||||
|
@ -762,6 +764,8 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
|
|||
* @property {string} materialData="" - Used to store {@link MaterialResource} data as a JSON string. You can use
|
||||
* <code>JSON.parse()</code> to parse the string into a JavaScript object which you can manipulate the properties of, and
|
||||
* use <code>JSON.stringify()</code> to convert the object into a string to put in the property.
|
||||
* @property {boolean} materialRepeat=true - If true, the material will repeat. If false, fragments outside of texCoord 0 - 1 will be discarded.
|
||||
* Works in both "uv" and "projected" modes.
|
||||
* @example <caption>Color a sphere using a Material entity.</caption>
|
||||
* var entityID = Entities.addEntity({
|
||||
* type: "Sphere",
|
||||
|
@ -1485,6 +1489,7 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
|
|||
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_MATERIAL_MAPPING_SCALE, materialMappingScale);
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_MATERIAL_MAPPING_ROT, materialMappingRot);
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_MATERIAL_DATA, materialData);
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_MATERIAL_REPEAT, materialRepeat);
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
|
@ -1666,6 +1671,7 @@ void EntityItemProperties::copyFromScriptValue(const QScriptValue& object, bool
|
|||
COPY_PROPERTY_FROM_QSCRIPTVALUE(materialMappingScale, vec2, setMaterialMappingScale);
|
||||
COPY_PROPERTY_FROM_QSCRIPTVALUE(materialMappingRot, float, setMaterialMappingRot);
|
||||
COPY_PROPERTY_FROM_QSCRIPTVALUE(materialData, QString, setMaterialData);
|
||||
COPY_PROPERTY_FROM_QSCRIPTVALUE(materialRepeat, bool, setMaterialRepeat);
|
||||
COPY_PROPERTY_FROM_QSCRIPTVALUE(isVisibleInSecondaryCamera, bool, setIsVisibleInSecondaryCamera);
|
||||
COPY_PROPERTY_FROM_QSCRIPTVALUE(particleSpin, float, setParticleSpin);
|
||||
COPY_PROPERTY_FROM_QSCRIPTVALUE(spinSpread, float, setSpinSpread);
|
||||
|
@ -2061,6 +2067,7 @@ void EntityItemProperties::entityPropertyFlagsFromScriptValue(const QScriptValue
|
|||
ADD_PROPERTY_TO_MAP(PROP_MATERIAL_MAPPING_SCALE, MaterialMappingScale, materialMappingScale, vec2);
|
||||
ADD_PROPERTY_TO_MAP(PROP_MATERIAL_MAPPING_ROT, MaterialMappingRot, materialMappingRot, float);
|
||||
ADD_PROPERTY_TO_MAP(PROP_MATERIAL_DATA, MaterialData, materialData, QString);
|
||||
ADD_PROPERTY_TO_MAP(PROP_MATERIAL_REPEAT, MaterialRepeat, materialRepeat, bool);
|
||||
|
||||
ADD_PROPERTY_TO_MAP(PROP_VISIBLE_IN_SECONDARY_CAMERA, IsVisibleInSecondaryCamera, isVisibleInSecondaryCamera, bool);
|
||||
|
||||
|
@ -2511,6 +2518,7 @@ OctreeElement::AppendState EntityItemProperties::encodeEntityEditPacket(PacketTy
|
|||
APPEND_ENTITY_PROPERTY(PROP_MATERIAL_MAPPING_SCALE, properties.getMaterialMappingScale());
|
||||
APPEND_ENTITY_PROPERTY(PROP_MATERIAL_MAPPING_ROT, properties.getMaterialMappingRot());
|
||||
APPEND_ENTITY_PROPERTY(PROP_MATERIAL_DATA, properties.getMaterialData());
|
||||
APPEND_ENTITY_PROPERTY(PROP_MATERIAL_REPEAT, properties.getMaterialRepeat());
|
||||
}
|
||||
|
||||
APPEND_ENTITY_PROPERTY(PROP_NAME, properties.getName());
|
||||
|
@ -2898,6 +2906,7 @@ bool EntityItemProperties::decodeEntityEditPacket(const unsigned char* data, int
|
|||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_MATERIAL_MAPPING_SCALE, vec2, setMaterialMappingScale);
|
||||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_MATERIAL_MAPPING_ROT, float, setMaterialMappingRot);
|
||||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_MATERIAL_DATA, QString, setMaterialData);
|
||||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_MATERIAL_REPEAT, bool, setMaterialRepeat);
|
||||
}
|
||||
|
||||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_NAME, QString, setName);
|
||||
|
@ -3137,6 +3146,7 @@ void EntityItemProperties::markAllChanged() {
|
|||
_materialMappingScaleChanged = true;
|
||||
_materialMappingRotChanged = true;
|
||||
_materialDataChanged = true;
|
||||
_materialRepeatChanged = true;
|
||||
|
||||
// Certifiable Properties
|
||||
_itemNameChanged = true;
|
||||
|
@ -3587,6 +3597,9 @@ QList<QString> EntityItemProperties::listChangedProperties() {
|
|||
if (materialDataChanged()) {
|
||||
out += "materialData";
|
||||
}
|
||||
if (materialRepeatChanged()) {
|
||||
out += "materialRepeat";
|
||||
}
|
||||
if (isVisibleInSecondaryCameraChanged()) {
|
||||
out += "isVisibleInSecondaryCamera";
|
||||
}
|
||||
|
|
|
@ -241,6 +241,7 @@ public:
|
|||
DEFINE_PROPERTY_REF(PROP_MATERIAL_MAPPING_SCALE, MaterialMappingScale, materialMappingScale, glm::vec2, glm::vec2(1.0f));
|
||||
DEFINE_PROPERTY_REF(PROP_MATERIAL_MAPPING_ROT, MaterialMappingRot, materialMappingRot, float, 0);
|
||||
DEFINE_PROPERTY_REF(PROP_MATERIAL_DATA, MaterialData, materialData, QString, "");
|
||||
DEFINE_PROPERTY_REF(PROP_MATERIAL_REPEAT, MaterialRepeat, materialRepeat, bool, true);
|
||||
|
||||
DEFINE_PROPERTY(PROP_VISIBLE_IN_SECONDARY_CAMERA, IsVisibleInSecondaryCamera, isVisibleInSecondaryCamera, bool, ENTITY_ITEM_DEFAULT_VISIBLE_IN_SECONDARY_CAMERA);
|
||||
|
||||
|
|
|
@ -175,6 +175,7 @@ QDebug& operator<<(QDebug& dbg, const EntityPropertyFlags& f) {
|
|||
result = f.getHasProperty(PROP_MATERIAL_MAPPING_SCALE) ? result + "materialMappingScale " : result;
|
||||
result = f.getHasProperty(PROP_MATERIAL_MAPPING_ROT) ? result + "materialMappingRot " : result;
|
||||
result = f.getHasProperty(PROP_MATERIAL_DATA) ? result + "materialData " : result;
|
||||
result = f.getHasProperty(PROP_MATERIAL_REPEAT) ? result + "materialRepeat " : result;
|
||||
result = f.getHasProperty(PROP_VISIBLE_IN_SECONDARY_CAMERA) ? result + "visibleInSecondaryCamera " : result;
|
||||
result = f.getHasProperty(PROP_PARTICLE_SPIN) ? result + "particleSpin " : result;
|
||||
result = f.getHasProperty(PROP_SPIN_START) ? result + "spinStart " : result;
|
||||
|
|
|
@ -275,6 +275,8 @@ enum EntityPropertyList {
|
|||
PROP_GRAB_EQUIPPABLE_INDICATOR_SCALE,
|
||||
PROP_GRAB_EQUIPPABLE_INDICATOR_OFFSET,
|
||||
|
||||
PROP_MATERIAL_REPEAT,
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// ATTENTION: add new properties to end of list just ABOVE this line
|
||||
PROP_AFTER_LAST_ITEM,
|
||||
|
|
|
@ -486,6 +486,8 @@ QUuid EntityScriptingInterface::addEntity(const EntityItemProperties& properties
|
|||
|
||||
propertiesWithSimID.setLastEditedBy(sessionID);
|
||||
|
||||
propertiesWithSimID.setActionData(QByteArray());
|
||||
|
||||
bool scalesWithParent = propertiesWithSimID.getScalesWithParent();
|
||||
|
||||
propertiesWithSimID = convertPropertiesFromScriptSemantics(propertiesWithSimID, scalesWithParent);
|
||||
|
@ -830,6 +832,8 @@ QUuid EntityScriptingInterface::editEntity(QUuid id, const EntityItemProperties&
|
|||
properties.setClientOnly(entity->getClientOnly());
|
||||
properties.setOwningAvatarID(entity->getOwningAvatarID());
|
||||
|
||||
properties.setActionData(entity->getDynamicData());
|
||||
|
||||
// make sure the properties has a type, so that the encode can know which properties to include
|
||||
properties.setType(entity->getType());
|
||||
|
||||
|
@ -954,11 +958,6 @@ void EntityScriptingInterface::deleteEntity(QUuid id) {
|
|||
const QUuid myNodeID = nodeList->getSessionUUID();
|
||||
if (entity->getClientOnly() && entity->getOwningAvatarID() != myNodeID) {
|
||||
// don't delete other avatar's avatarEntities
|
||||
// If you actually own the entity but the onwership property is not set because of a domain switch
|
||||
// The lines below makes sure the entity is deleted once its properties are set.
|
||||
auto avatarHashMap = DependencyManager::get<AvatarHashMap>();
|
||||
AvatarSharedPointer myAvatar = avatarHashMap->getAvatarBySessionID(myNodeID);
|
||||
myAvatar->insertDetachedEntityID(id);
|
||||
shouldSendDeleteToServer = false;
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -41,6 +41,7 @@ EntityItemProperties MaterialEntityItem::getProperties(const EntityPropertyFlags
|
|||
COPY_ENTITY_PROPERTY_TO_PROPERTIES(materialMappingScale, getMaterialMappingScale);
|
||||
COPY_ENTITY_PROPERTY_TO_PROPERTIES(materialMappingRot, getMaterialMappingRot);
|
||||
COPY_ENTITY_PROPERTY_TO_PROPERTIES(materialData, getMaterialData);
|
||||
COPY_ENTITY_PROPERTY_TO_PROPERTIES(materialRepeat, getMaterialRepeat);
|
||||
return properties;
|
||||
}
|
||||
|
||||
|
@ -55,6 +56,7 @@ bool MaterialEntityItem::setProperties(const EntityItemProperties& properties) {
|
|||
SET_ENTITY_PROPERTY_FROM_PROPERTIES(materialMappingScale, setMaterialMappingScale);
|
||||
SET_ENTITY_PROPERTY_FROM_PROPERTIES(materialMappingRot, setMaterialMappingRot);
|
||||
SET_ENTITY_PROPERTY_FROM_PROPERTIES(materialData, setMaterialData);
|
||||
SET_ENTITY_PROPERTY_FROM_PROPERTIES(materialRepeat, setMaterialRepeat);
|
||||
|
||||
if (somethingChanged) {
|
||||
bool wantDebug = false;
|
||||
|
@ -85,6 +87,7 @@ int MaterialEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* da
|
|||
READ_ENTITY_PROPERTY(PROP_MATERIAL_MAPPING_SCALE, glm::vec2, setMaterialMappingScale);
|
||||
READ_ENTITY_PROPERTY(PROP_MATERIAL_MAPPING_ROT, float, setMaterialMappingRot);
|
||||
READ_ENTITY_PROPERTY(PROP_MATERIAL_DATA, QString, setMaterialData);
|
||||
READ_ENTITY_PROPERTY(PROP_MATERIAL_REPEAT, bool, setMaterialRepeat);
|
||||
|
||||
return bytesRead;
|
||||
}
|
||||
|
@ -99,6 +102,7 @@ EntityPropertyFlags MaterialEntityItem::getEntityProperties(EncodeBitstreamParam
|
|||
requestedProperties += PROP_MATERIAL_MAPPING_SCALE;
|
||||
requestedProperties += PROP_MATERIAL_MAPPING_ROT;
|
||||
requestedProperties += PROP_MATERIAL_DATA;
|
||||
requestedProperties += PROP_MATERIAL_REPEAT;
|
||||
return requestedProperties;
|
||||
}
|
||||
|
||||
|
@ -119,6 +123,7 @@ void MaterialEntityItem::appendSubclassData(OctreePacketData* packetData, Encode
|
|||
APPEND_ENTITY_PROPERTY(PROP_MATERIAL_MAPPING_SCALE, getMaterialMappingScale());
|
||||
APPEND_ENTITY_PROPERTY(PROP_MATERIAL_MAPPING_ROT, getMaterialMappingRot());
|
||||
APPEND_ENTITY_PROPERTY(PROP_MATERIAL_DATA, getMaterialData());
|
||||
APPEND_ENTITY_PROPERTY(PROP_MATERIAL_REPEAT, getMaterialRepeat());
|
||||
}
|
||||
|
||||
void MaterialEntityItem::debugDump() const {
|
||||
|
@ -128,6 +133,7 @@ void MaterialEntityItem::debugDump() const {
|
|||
qCDebug(entities) << " material url:" << _materialURL;
|
||||
qCDebug(entities) << " current material name:" << _currentMaterialName.c_str();
|
||||
qCDebug(entities) << " material mapping mode:" << _materialMappingMode;
|
||||
qCDebug(entities) << " material repeat:" << _materialRepeat;
|
||||
qCDebug(entities) << " priority:" << _priority;
|
||||
qCDebug(entities) << " parent material name:" << _parentMaterialName;
|
||||
qCDebug(entities) << " material mapping pos:" << _materialMappingPos;
|
||||
|
@ -140,7 +146,12 @@ void MaterialEntityItem::debugDump() const {
|
|||
}
|
||||
|
||||
void MaterialEntityItem::setUnscaledDimensions(const glm::vec3& value) {
|
||||
EntityItem::setUnscaledDimensions(ENTITY_ITEM_DEFAULT_DIMENSIONS);
|
||||
_desiredDimensions = value;
|
||||
if (_materialMappingMode == MaterialMappingMode::UV) {
|
||||
EntityItem::setUnscaledDimensions(ENTITY_ITEM_DEFAULT_DIMENSIONS);
|
||||
} else if (_materialMappingMode == MaterialMappingMode::PROJECTED) {
|
||||
EntityItem::setUnscaledDimensions(value);
|
||||
}
|
||||
}
|
||||
|
||||
std::shared_ptr<NetworkMaterial> MaterialEntityItem::getMaterial() const {
|
||||
|
@ -208,6 +219,23 @@ void MaterialEntityItem::setMaterialData(const QString& materialData) {
|
|||
}
|
||||
}
|
||||
|
||||
void MaterialEntityItem::setMaterialMappingMode(MaterialMappingMode mode) {
|
||||
if (_materialMappingMode != mode) {
|
||||
removeMaterial();
|
||||
_materialMappingMode = mode;
|
||||
setUnscaledDimensions(_desiredDimensions);
|
||||
applyMaterial();
|
||||
}
|
||||
}
|
||||
|
||||
void MaterialEntityItem::setMaterialRepeat(bool repeat) {
|
||||
if (_materialRepeat != repeat) {
|
||||
removeMaterial();
|
||||
_materialRepeat = repeat;
|
||||
applyMaterial();
|
||||
}
|
||||
}
|
||||
|
||||
void MaterialEntityItem::setMaterialMappingPos(const glm::vec2& materialMappingPos) {
|
||||
if (_materialMappingPos != materialMappingPos) {
|
||||
removeMaterial();
|
||||
|
@ -256,6 +284,22 @@ void MaterialEntityItem::setParentID(const QUuid& parentID) {
|
|||
}
|
||||
}
|
||||
|
||||
void MaterialEntityItem::locationChanged(bool tellPhysics) {
|
||||
EntityItem::locationChanged();
|
||||
if (_materialMappingMode == MaterialMappingMode::PROJECTED) {
|
||||
removeMaterial();
|
||||
applyMaterial();
|
||||
}
|
||||
}
|
||||
|
||||
void MaterialEntityItem::dimensionsChanged() {
|
||||
EntityItem::dimensionsChanged();
|
||||
if (_materialMappingMode == MaterialMappingMode::PROJECTED) {
|
||||
removeMaterial();
|
||||
applyMaterial();
|
||||
}
|
||||
}
|
||||
|
||||
void MaterialEntityItem::removeMaterial() {
|
||||
graphics::MaterialPointer material = getMaterial();
|
||||
if (!material) {
|
||||
|
@ -289,11 +333,19 @@ void MaterialEntityItem::applyMaterial() {
|
|||
if (!material || parentID.isNull()) {
|
||||
return;
|
||||
}
|
||||
|
||||
Transform textureTransform;
|
||||
textureTransform.setTranslation(glm::vec3(_materialMappingPos, 0.0f));
|
||||
textureTransform.setRotation(glm::vec3(0.0f, 0.0f, glm::radians(_materialMappingRot)));
|
||||
textureTransform.setScale(glm::vec3(_materialMappingScale, 1.0f));
|
||||
material->setTextureTransforms(textureTransform);
|
||||
if (_materialMappingMode == MaterialMappingMode::UV) {
|
||||
textureTransform.setTranslation(glm::vec3(_materialMappingPos, 0.0f));
|
||||
textureTransform.setRotation(glm::vec3(0.0f, 0.0f, glm::radians(_materialMappingRot)));
|
||||
textureTransform.setScale(glm::vec3(_materialMappingScale, 1.0f));
|
||||
} else if (_materialMappingMode == MaterialMappingMode::PROJECTED) {
|
||||
textureTransform = getTransform();
|
||||
textureTransform.postScale(getUnscaledDimensions());
|
||||
// Pass the inverse transform here so we don't need to compute it in the shaders
|
||||
textureTransform.evalFromRawMatrix(textureTransform.getInverseMatrix());
|
||||
}
|
||||
material->setTextureTransforms(textureTransform, _materialMappingMode, _materialRepeat);
|
||||
|
||||
graphics::MaterialLayer materialLayer = graphics::MaterialLayer(material, getPriority());
|
||||
|
||||
|
|
|
@ -58,7 +58,10 @@ public:
|
|||
void setCurrentMaterialName(const std::string& currentMaterialName);
|
||||
|
||||
MaterialMappingMode getMaterialMappingMode() const { return _materialMappingMode; }
|
||||
void setMaterialMappingMode(MaterialMappingMode mode) { _materialMappingMode = mode; }
|
||||
void setMaterialMappingMode(MaterialMappingMode mode);
|
||||
|
||||
bool getMaterialRepeat() const { return _materialRepeat; }
|
||||
void setMaterialRepeat(bool repeat);
|
||||
|
||||
quint16 getPriority() const { return _priority; }
|
||||
void setPriority(quint16 priority);
|
||||
|
@ -80,6 +83,9 @@ public:
|
|||
|
||||
void setParentID(const QUuid& parentID) override;
|
||||
|
||||
void locationChanged(bool tellPhysics) override;
|
||||
void dimensionsChanged() override;
|
||||
|
||||
void applyMaterial();
|
||||
void removeMaterial();
|
||||
|
||||
|
@ -104,8 +110,10 @@ private:
|
|||
// emissiveMap, albedoMap (set opacityMap = albedoMap for transparency), metallicMap or specularMap, roughnessMap or glossMap,
|
||||
// normalMap or bumpMap, occlusionMap, lightmapMap (broken, FIXME), scatteringMap (only works if normal mapped)
|
||||
QString _materialURL;
|
||||
// Type of material. "uv" or "projected". NOT YET IMPLEMENTED, only UV is used
|
||||
// Type of material. "uv" or "projected".
|
||||
MaterialMappingMode _materialMappingMode { UV };
|
||||
bool _materialRepeat { true };
|
||||
glm::vec3 _desiredDimensions;
|
||||
// Priority for this material when applying it to its parent. Only the highest priority material will be used. Materials with the same priority are (essentially) randomly sorted.
|
||||
// Base materials that come with models always have priority 0.
|
||||
quint16 _priority { 0 };
|
||||
|
|
|
@ -33,7 +33,7 @@ using NormalType = glm::vec3;
|
|||
#define FBX_NORMAL_ELEMENT gpu::Element::VEC3F_XYZ
|
||||
#endif
|
||||
|
||||
// See comment in FBXReader::parseFBX().
|
||||
// See comment in FBXSerializer::parseFBX().
|
||||
static const int FBX_HEADER_BYTES_BEFORE_VERSION = 23;
|
||||
static const QByteArray FBX_BINARY_PROLOG("Kaydara FBX Binary ");
|
||||
static const QByteArray FBX_BINARY_PROLOG2("\0\x1a\0", 3);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//
|
||||
// FBXReader.cpp
|
||||
// interface/src/renderer
|
||||
// FBXSerializer.cpp
|
||||
// libraries/fbx/src
|
||||
//
|
||||
// Created by Andrzej Kapolka on 9/18/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
|
@ -9,7 +9,7 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "FBXReader.h"
|
||||
#include "FBXSerializer.h"
|
||||
|
||||
#include <iostream>
|
||||
#include <QBuffer>
|
||||
|
@ -36,7 +36,7 @@
|
|||
#include <hfm/ModelFormatLogging.h>
|
||||
|
||||
// TOOL: Uncomment the following line to enable the filtering of all the unkwnon fields of a node so we can break point easily while loading a model with problems...
|
||||
//#define DEBUG_FBXREADER
|
||||
//#define DEBUG_FBXSERIALIZER
|
||||
|
||||
using namespace std;
|
||||
|
||||
|
@ -254,13 +254,13 @@ HFMBlendshape extractBlendshape(const FBXNode& object) {
|
|||
HFMBlendshape blendshape;
|
||||
foreach (const FBXNode& data, object.children) {
|
||||
if (data.name == "Indexes") {
|
||||
blendshape.indices = FBXReader::getIntVector(data);
|
||||
blendshape.indices = FBXSerializer::getIntVector(data);
|
||||
|
||||
} else if (data.name == "Vertices") {
|
||||
blendshape.vertices = FBXReader::createVec3Vector(FBXReader::getDoubleVector(data));
|
||||
blendshape.vertices = FBXSerializer::createVec3Vector(FBXSerializer::getDoubleVector(data));
|
||||
|
||||
} else if (data.name == "Normals") {
|
||||
blendshape.normals = FBXReader::createVec3Vector(FBXReader::getDoubleVector(data));
|
||||
blendshape.normals = FBXSerializer::createVec3Vector(FBXSerializer::getDoubleVector(data));
|
||||
}
|
||||
}
|
||||
return blendshape;
|
||||
|
@ -384,7 +384,7 @@ HFMLight extractLight(const FBXNode& object) {
|
|||
if (propname == "Intensity") {
|
||||
light.intensity = 0.01f * property.properties.at(valIndex).value<float>();
|
||||
} else if (propname == "Color") {
|
||||
light.color = FBXReader::getVec3(property.properties, valIndex);
|
||||
light.color = FBXSerializer::getVec3(property.properties, valIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -392,7 +392,7 @@ HFMLight extractLight(const FBXNode& object) {
|
|||
|| subobject.name == "TypeFlags") {
|
||||
}
|
||||
}
|
||||
#if defined(DEBUG_FBXREADER)
|
||||
#if defined(DEBUG_FBXSERIALIZER)
|
||||
|
||||
QString type = object.properties.at(0).toString();
|
||||
type = object.properties.at(1).toString();
|
||||
|
@ -417,7 +417,31 @@ QByteArray fileOnUrl(const QByteArray& filepath, const QString& url) {
|
|||
return filepath.mid(filepath.lastIndexOf('/') + 1);
|
||||
}
|
||||
|
||||
HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString& url) {
|
||||
QMap<QString, glm::quat> getJointRotationOffsets(const QVariantHash& mapping) {
|
||||
QMap<QString, glm::quat> jointRotationOffsets;
|
||||
static const QString JOINT_ROTATION_OFFSET_FIELD = "jointRotationOffset";
|
||||
if (!mapping.isEmpty() && mapping.contains(JOINT_ROTATION_OFFSET_FIELD) && mapping[JOINT_ROTATION_OFFSET_FIELD].type() == QVariant::Hash) {
|
||||
auto offsets = mapping[JOINT_ROTATION_OFFSET_FIELD].toHash();
|
||||
for (auto itr = offsets.begin(); itr != offsets.end(); itr++) {
|
||||
QString jointName = itr.key();
|
||||
QString line = itr.value().toString();
|
||||
auto quatCoords = line.split(',');
|
||||
if (quatCoords.size() == 4) {
|
||||
float quatX = quatCoords[0].mid(1).toFloat();
|
||||
float quatY = quatCoords[1].toFloat();
|
||||
float quatZ = quatCoords[2].toFloat();
|
||||
float quatW = quatCoords[3].mid(0, quatCoords[3].size() - 1).toFloat();
|
||||
if (!isNaN(quatX) && !isNaN(quatY) && !isNaN(quatZ) && !isNaN(quatW)) {
|
||||
glm::quat rotationOffset = glm::quat(quatW, quatX, quatY, quatZ);
|
||||
jointRotationOffsets.insert(jointName, rotationOffset);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return jointRotationOffsets;
|
||||
}
|
||||
|
||||
HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QString& url) {
|
||||
const FBXNode& node = _rootNode;
|
||||
QMap<QString, ExtractedMesh> meshes;
|
||||
QHash<QString, QString> modelIDsToNames;
|
||||
|
@ -488,7 +512,7 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
|
|||
}
|
||||
}
|
||||
QMultiHash<QString, WeightedIndex> blendshapeChannelIndices;
|
||||
#if defined(DEBUG_FBXREADER)
|
||||
#if defined(DEBUG_FBXSERIALIZER)
|
||||
int unknown = 0;
|
||||
#endif
|
||||
HFMModel* hfmModelPtr = new HFMModel;
|
||||
|
@ -736,7 +760,7 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
|
|||
extractBlendshape(subobject) };
|
||||
blendshapes.append(blendshape);
|
||||
}
|
||||
#if defined(DEBUG_FBXREADER)
|
||||
#if defined(DEBUG_FBXSERIALIZER)
|
||||
else if (subobject.name == "TypeFlags") {
|
||||
QString attributetype = subobject.properties.at(0).toString();
|
||||
if (!attributetype.empty()) {
|
||||
|
@ -862,7 +886,7 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
|
|||
tex.scaling.z = 1.0f;
|
||||
}
|
||||
}
|
||||
#if defined(DEBUG_FBXREADER)
|
||||
#if defined(DEBUG_FBXSERIALIZER)
|
||||
else {
|
||||
QString propName = v;
|
||||
unknown++;
|
||||
|
@ -871,7 +895,7 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
|
|||
}
|
||||
}
|
||||
}
|
||||
#if defined(DEBUG_FBXREADER)
|
||||
#if defined(DEBUG_FBXSERIALIZER)
|
||||
else {
|
||||
if (subobject.name == "Type") {
|
||||
} else if (subobject.name == "Version") {
|
||||
|
@ -1044,7 +1068,7 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
|
|||
}
|
||||
}
|
||||
}
|
||||
#if defined(DEBUG_FBXREADER)
|
||||
#if defined(DEBUG_FBXSERIALIZER)
|
||||
else {
|
||||
QString propname = subobject.name.data();
|
||||
int unknown = 0;
|
||||
|
@ -1061,7 +1085,7 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
|
|||
|
||||
|
||||
} else if (object.name == "NodeAttribute") {
|
||||
#if defined(DEBUG_FBXREADER)
|
||||
#if defined(DEBUG_FBXSERIALIZER)
|
||||
std::vector<QString> properties;
|
||||
foreach(const QVariant& v, object.properties) {
|
||||
properties.push_back(v.toString());
|
||||
|
@ -1124,7 +1148,7 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
|
|||
animationCurves.insert(getID(object.properties), curve);
|
||||
|
||||
}
|
||||
#if defined(DEBUG_FBXREADER)
|
||||
#if defined(DEBUG_FBXSERIALIZER)
|
||||
else {
|
||||
QString objectname = object.name.data();
|
||||
if ( objectname == "Pose"
|
||||
|
@ -1215,7 +1239,7 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
|
|||
}
|
||||
}
|
||||
}
|
||||
#if defined(DEBUG_FBXREADER)
|
||||
#if defined(DEBUG_FBXSERIALIZER)
|
||||
else {
|
||||
QString objectname = child.name.data();
|
||||
if ( objectname == "Pose"
|
||||
|
@ -1793,20 +1817,27 @@ HFMModel* FBXReader::extractHFMModel(const QVariantHash& mapping, const QString&
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
auto offsets = getJointRotationOffsets(mapping);
|
||||
hfmModel.jointRotationOffsets.clear();
|
||||
for (auto itr = offsets.begin(); itr != offsets.end(); itr++) {
|
||||
QString jointName = itr.key();
|
||||
glm::quat rotationOffset = itr.value();
|
||||
int jointIndex = hfmModel.getJointIndex(jointName);
|
||||
if (jointIndex != -1) {
|
||||
hfmModel.jointRotationOffsets.insert(jointIndex, rotationOffset);
|
||||
}
|
||||
qCDebug(modelformat) << "Joint Rotation Offset added to Rig._jointRotationOffsets : " << " jointName: " << jointName << " jointIndex: " << jointIndex << " rotation offset: " << rotationOffset;
|
||||
}
|
||||
|
||||
return hfmModelPtr;
|
||||
}
|
||||
|
||||
HFMModel* readFBX(const QByteArray& data, const QVariantHash& mapping, const QString& url, bool loadLightmaps, float lightmapLevel) {
|
||||
HFMModel::Pointer FBXSerializer::read(const QByteArray& data, const QVariantHash& mapping, const QUrl& url) {
|
||||
QBuffer buffer(const_cast<QByteArray*>(&data));
|
||||
buffer.open(QIODevice::ReadOnly);
|
||||
return readFBX(&buffer, mapping, url, loadLightmaps, lightmapLevel);
|
||||
}
|
||||
|
||||
HFMModel* readFBX(QIODevice* device, const QVariantHash& mapping, const QString& url, bool loadLightmaps, float lightmapLevel) {
|
||||
FBXReader reader;
|
||||
reader._rootNode = FBXReader::parseFBX(device);
|
||||
reader._loadLightmaps = loadLightmaps;
|
||||
reader._lightmapLevel = lightmapLevel;
|
||||
_rootNode = parseFBX(&buffer);
|
||||
|
||||
return reader.extractHFMModel(mapping, url);
|
||||
return HFMModel::Pointer(extractHFMModel(mapping, url.toString()));
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue