Merge branch 'master' into M22052

This commit is contained in:
David Rowe 2019-04-11 10:24:27 +12:00
commit 617617736e
155 changed files with 4666 additions and 2958 deletions

View file

@ -33,7 +33,7 @@
#include <NodeType.h>
#include <SharedUtil.h>
#include <PathUtils.h>
#include <image/Image.h>
#include <image/TextureProcessing.h>
#include "AssetServerLogging.h"
#include "BakeAssetTask.h"

View file

@ -98,7 +98,8 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
PacketType::RequestsDomainListData,
PacketType::PerAvatarGainSet,
PacketType::InjectorGainSet,
PacketType::AudioSoloRequest },
PacketType::AudioSoloRequest,
PacketType::StopInjector },
this, "queueAudioPacket");
// packets whose consequences are global should be processed on the main thread
@ -246,7 +247,8 @@ void AudioMixer::removeHRTFsForFinishedInjector(const QUuid& streamID) {
if (injectorClientData) {
// stage the removal of this stream, workers handle when preparing mixes for listeners
_workerSharedData.removedStreams.emplace_back(injectorClientData->getNodeID(), injectorClientData->getNodeLocalID(),
_workerSharedData.removedStreams.emplace_back(injectorClientData->getNodeID(),
injectorClientData->getNodeLocalID(),
streamID);
}
}

View file

@ -104,6 +104,9 @@ int AudioMixerClientData::processPackets(ConcurrentAddedStreams& addedStreams) {
case PacketType::AudioSoloRequest:
parseSoloRequest(packet, node);
break;
case PacketType::StopInjector:
parseStopInjectorPacket(packet);
break;
default:
Q_UNREACHABLE();
}
@ -574,6 +577,19 @@ int AudioMixerClientData::checkBuffersBeforeFrameSend() {
return (int)_audioStreams.size();
}
void AudioMixerClientData::parseStopInjectorPacket(QSharedPointer<ReceivedMessage> packet) {
auto streamID = QUuid::fromRfc4122(packet->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
auto it = std::find_if(std::begin(_audioStreams), std::end(_audioStreams), [&](auto stream) {
return streamID == stream->getStreamIdentifier();
});
if (it != std::end(_audioStreams)) {
_audioStreams.erase(it);
emit injectorStreamFinished(streamID);
}
}
bool AudioMixerClientData::shouldSendStats(int frameNumber) {
return frameNumber == _frameToSendStats;
}

View file

@ -67,12 +67,11 @@ public:
void parseNodeIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node);
void parseRadiusIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node);
void parseSoloRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node);
void parseStopInjectorPacket(QSharedPointer<ReceivedMessage> packet);
// attempt to pop a frame from each audio stream, and return the number of streams from this client
int checkBuffersBeforeFrameSend();
void removeDeadInjectedStreams();
QJsonObject getAudioStreamStats();
void sendAudioStreamStatsPackets(const SharedNodePointer& destinationNode);
@ -163,7 +162,7 @@ public:
// end of methods called non-concurrently from single AudioMixerSlave
signals:
void injectorStreamFinished(const QUuid& streamIdentifier);
void injectorStreamFinished(const QUuid& streamID);
public slots:
void handleMismatchAudioFormat(SharedNodePointer node, const QString& currentCodec, const QString& recievedCodec);

View file

@ -53,10 +53,5 @@ macro(add_crashpad)
POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy ${CRASHPAD_HANDLER_EXE_PATH} "$<TARGET_FILE_DIR:${TARGET_NAME}>/"
)
install(
PROGRAMS ${CRASHPAD_HANDLER_EXE_PATH}
DESTINATION ${INTERFACE_INSTALL_DIR}
COMPONENT ${CLIENT_COMPONENT}
)
endif ()
endmacro()

View file

@ -0,0 +1,74 @@
#
# Copyright 2015 High Fidelity, Inc.
# Created by Olivier Prat on 2019/03/26
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
macro(TARGET_OPENEXR)
if (NOT ANDROID)
set(openexr_config_file "${VCPKG_INSTALL_ROOT}/include/OpenEXR/OpenEXRConfig.h")
if(EXISTS ${openexr_config_file})
file(STRINGS
${openexr_config_file}
TMP
REGEX "#define OPENEXR_VERSION_STRING.*$")
string(REGEX MATCHALL "[0-9.]+" OPENEXR_VERSION ${TMP})
file(STRINGS
${openexr_config_file}
TMP
REGEX "#define OPENEXR_VERSION_MAJOR.*$")
string(REGEX MATCHALL "[0-9]" OPENEXR_MAJOR_VERSION ${TMP})
file(STRINGS
${openexr_config_file}
TMP
REGEX "#define OPENEXR_VERSION_MINOR.*$")
string(REGEX MATCHALL "[0-9]" OPENEXR_MINOR_VERSION ${TMP})
endif()
foreach(OPENEXR_LIB
IlmImf
IlmImfUtil
Half
Iex
IexMath
Imath
IlmThread)
# OpenEXR libraries may be suffixed with the version number, so we search
# using both versioned and unversioned names.
find_library(OPENEXR_${OPENEXR_LIB}_LIBRARY_RELEASE
NAMES
${OPENEXR_LIB}-${OPENEXR_MAJOR_VERSION}_${OPENEXR_MINOR_VERSION}_s
${OPENEXR_LIB}_s
PATHS ${VCPKG_INSTALL_ROOT}/lib NO_DEFAULT_PATH
)
#mark_as_advanced(OPENEXR_${OPENEXR_LIB}_LIBRARY)
if(OPENEXR_${OPENEXR_LIB}_LIBRARY_RELEASE)
list(APPEND OPENEXR_LIBRARY_RELEASE ${OPENEXR_${OPENEXR_LIB}_LIBRARY_RELEASE})
endif()
# OpenEXR libraries may be suffixed with the version number, so we search
# using both versioned and unversioned names.
find_library(OPENEXR_${OPENEXR_LIB}_LIBRARY_DEBUG
NAMES
${OPENEXR_LIB}-${OPENEXR_MAJOR_VERSION}_${OPENEXR_MINOR_VERSION}_s_d
${OPENEXR_LIB}_s_d
PATHS ${VCPKG_INSTALL_ROOT}/debug/lib NO_DEFAULT_PATH
)
#mark_as_advanced(OPENEXR_${OPENEXR_LIB}_DEBUG_LIBRARY)
if(OPENEXR_${OPENEXR_LIB}_LIBRARY_DEBUG)
list(APPEND OPENEXR_LIBRARY_DEBUG ${OPENEXR_${OPENEXR_LIB}_LIBRARY_DEBUG})
endif()
endforeach(OPENEXR_LIB)
select_library_configurations(OPENEXR)
target_link_libraries(${TARGET_NAME} ${OPENEXR_LIBRARY})
endif()
endmacro()

View file

@ -1,4 +1,4 @@
Source: hifi-deps
Version: 0
Version: 0.1
Description: Collected dependencies for High Fidelity applications
Build-Depends: bullet3, draco, etc2comp, glm, nvtt, openssl (windows), tbb (!android&!osx), zlib
Build-Depends: bullet3, draco, etc2comp, glm, nvtt, openexr (!android), openssl (windows), tbb (!android&!osx), zlib

View file

@ -0,0 +1,4 @@
Source: openexr
Version: 2.3.0-2
Description: OpenEXR is a high dynamic-range (HDR) image file format developed by Industrial Light & Magic for use in computer imaging applications
Build-Depends: zlib

View file

@ -0,0 +1,87 @@
include(FindPackageHandleStandardArgs)
find_path(OpenEXR_INCLUDE_DIRS OpenEXR/OpenEXRConfig.h)
find_path(OPENEXR_INCLUDE_PATHS NAMES ImfRgbaFile.h PATH_SUFFIXES OpenEXR)
file(STRINGS "${OpenEXR_INCLUDE_DIRS}/OpenEXR/OpenEXRConfig.h" OPENEXR_CONFIG_H)
string(REGEX REPLACE "^.*define OPENEXR_VERSION_MAJOR ([0-9]+).*$" "\\1" OpenEXR_VERSION_MAJOR "${OPENEXR_CONFIG_H}")
string(REGEX REPLACE "^.*define OPENEXR_VERSION_MINOR ([0-9]+).*$" "\\1" OpenEXR_VERSION_MINOR "${OPENEXR_CONFIG_H}")
set(OpenEXR_LIB_SUFFIX "${OpenEXR_VERSION_MAJOR}_${OpenEXR_VERSION_MINOR}")
include(SelectLibraryConfigurations)
if(NOT OpenEXR_BASE_LIBRARY)
find_library(OpenEXR_BASE_LIBRARY_RELEASE NAMES IlmImf-${OpenEXR_LIB_SUFFIX})
find_library(OpenEXR_BASE_LIBRARY_DEBUG NAMES IlmImf-${OpenEXR_LIB_SUFFIX}_d)
select_library_configurations(OpenEXR_BASE)
endif()
if(NOT OpenEXR_UTIL_LIBRARY)
find_library(OpenEXR_UTIL_LIBRARY_RELEASE NAMES IlmImfUtil-${OpenEXR_LIB_SUFFIX})
find_library(OpenEXR_UTIL_LIBRARY_DEBUG NAMES IlmImfUtil-${OpenEXR_LIB_SUFFIX}_d)
select_library_configurations(OpenEXR_UTIL)
endif()
if(NOT OpenEXR_HALF_LIBRARY)
find_library(OpenEXR_HALF_LIBRARY_RELEASE NAMES Half-${OpenEXR_LIB_SUFFIX})
find_library(OpenEXR_HALF_LIBRARY_DEBUG NAMES Half-${OpenEXR_LIB_SUFFIX}_d)
select_library_configurations(OpenEXR_HALF)
endif()
if(NOT OpenEXR_IEX_LIBRARY)
find_library(OpenEXR_IEX_LIBRARY_RELEASE NAMES Iex-${OpenEXR_LIB_SUFFIX})
find_library(OpenEXR_IEX_LIBRARY_DEBUG NAMES Iex-${OpenEXR_LIB_SUFFIX}_d)
select_library_configurations(OpenEXR_IEX)
endif()
if(NOT OpenEXR_MATH_LIBRARY)
find_library(OpenEXR_MATH_LIBRARY_RELEASE NAMES Imath-${OpenEXR_LIB_SUFFIX})
find_library(OpenEXR_MATH_LIBRARY_DEBUG NAMES Imath-${OpenEXR_LIB_SUFFIX}_d)
select_library_configurations(OpenEXR_MATH)
endif()
if(NOT OpenEXR_THREAD_LIBRARY)
find_library(OpenEXR_THREAD_LIBRARY_RELEASE NAMES IlmThread-${OpenEXR_LIB_SUFFIX})
find_library(OpenEXR_THREAD_LIBRARY_DEBUG NAMES IlmThread-${OpenEXR_LIB_SUFFIX}_d)
select_library_configurations(OpenEXR_THREAD)
endif()
if(NOT OpenEXR_IEXMATH_LIBRARY)
find_library(OpenEXR_IEXMATH_LIBRARY_RELEASE NAMES IexMath-${OpenEXR_LIB_SUFFIX})
find_library(OpenEXR_IEXMATH_LIBRARY_DEBUG NAMES IexMath-${OpenEXR_LIB_SUFFIX}d)
select_library_configurations(OpenEXR_IEXMATH)
endif()
set(OPENEXR_HALF_LIBRARY "${OpenEXR_HALF_LIBRARY}")
set(OPENEXR_IEX_LIBRARY "${OpenEXR_IEX_LIBRARY}")
set(OPENEXR_IMATH_LIBRARY "${OpenEXR_MATH_LIBRARY}")
set(OPENEXR_ILMIMF_LIBRARY "${OpenEXR_BASE_LIBRARY}")
set(OPENEXR_ILMIMFUTIL_LIBRARY "${OpenEXR_UTIL_LIBRARY}")
set(OPENEXR_ILMTHREAD_LIBRARY "${OpenEXR_THREAD_LIBRARY}")
set(OpenEXR_LIBRARY "${OpenEXR_BASE_LIBRARY}")
set(OpenEXR_LIBRARIES
${OpenEXR_LIBRARY}
${OpenEXR_MATH_LIBRARY}
${OpenEXR_IEXMATH_LIBRARY}
${OpenEXR_UTIL_LIBRARY}
${OpenEXR_HALF_LIBRARY}
${OpenEXR_IEX_LIBRARY}
${OpenEXR_THREAD_LIBRARY}
)
set(OPENEXR_LIBRARIES
${OPENEXR_HALF_LIBRARY}
${OPENEXR_IEX_LIBRARY}
${OPENEXR_IMATH_LIBRARY}
${OPENEXR_ILMIMF_LIBRARY}
${OPENEXR_ILMTHREAD_LIBRARY}
)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(OpenEXR REQUIRED_VARS OpenEXR_LIBRARIES OpenEXR_INCLUDE_DIRS)
if(OpenEXR_FOUND)
set(OPENEXR_FOUND 1)
endif()

View file

@ -0,0 +1,19 @@
diff --git a/OpenEXR/IlmImf/CMakeLists.txt b/OpenEXR/IlmImf/CMakeLists.txt
index e1a8740..d31cf68 100644
--- a/OpenEXR/IlmImf/CMakeLists.txt
+++ b/OpenEXR/IlmImf/CMakeLists.txt
@@ -2,14 +2,6 @@
SET(CMAKE_INCLUDE_CURRENT_DIR 1)
-IF (WIN32)
- SET(RUNTIME_DIR ${OPENEXR_PACKAGE_PREFIX}/bin)
- SET(WORKING_DIR ${RUNTIME_DIR})
-ELSE ()
- SET(RUNTIME_DIR ${OPENEXR_PACKAGE_PREFIX}/lib)
- SET(WORKING_DIR .)
-ENDIF ()
-
SET(BUILD_B44EXPLOGTABLE OFF)
IF (NOT EXISTS "${CMAKE_CURRENT_BINARY_DIR}/b44ExpLogTable.h")
SET(BUILD_B44EXPLOGTABLE ON)

View file

@ -0,0 +1,74 @@
include(vcpkg_common_functions)
set(OPENEXR_VERSION 2.3.0)
set(OPENEXR_HASH 268ae64b40d21d662f405fba97c307dad1456b7d996a447aadafd41b640ca736d4851d9544b4741a94e7b7c335fe6e9d3b16180e710671abfc0c8b2740b147b2)
vcpkg_from_github(
OUT_SOURCE_PATH SOURCE_PATH
REPO openexr/openexr
REF v${OPENEXR_VERSION}
SHA512 ${OPENEXR_HASH}
HEAD_REF master
PATCHES "fix_install_ilmimf.patch"
)
set(OPENEXR_STATIC ON)
set(OPENEXR_SHARED OFF)
vcpkg_configure_cmake(SOURCE_PATH ${SOURCE_PATH}
PREFER_NINJA
OPTIONS
-DOPENEXR_BUILD_PYTHON_LIBS=OFF
-DOPENEXR_BUILD_VIEWERS=OFF
-DOPENEXR_ENABLE_TESTS=OFF
-DOPENEXR_RUN_FUZZ_TESTS=OFF
-DOPENEXR_BUILD_SHARED=${OPENEXR_SHARED}
-DOPENEXR_BUILD_STATIC=${OPENEXR_STATIC}
OPTIONS_DEBUG
-DILMBASE_PACKAGE_PREFIX=${CURRENT_INSTALLED_DIR}/debug
OPTIONS_RELEASE
-DILMBASE_PACKAGE_PREFIX=${CURRENT_INSTALLED_DIR})
vcpkg_install_cmake()
file(REMOVE_RECURSE ${CURRENT_PACKAGES_DIR}/debug/include)
file(REMOVE_RECURSE ${CURRENT_PACKAGES_DIR}/debug/share)
# NOTE: Only use ".exe" extension on Windows executables.
# Is there a cleaner way to do this?
if(WIN32)
set(EXECUTABLE_SUFFIX ".exe")
else()
set(EXECUTABLE_SUFFIX "")
endif()
file(REMOVE ${CURRENT_PACKAGES_DIR}/debug/bin/exrenvmap${EXECUTABLE_SUFFIX})
file(REMOVE ${CURRENT_PACKAGES_DIR}/debug/bin/exrheader${EXECUTABLE_SUFFIX})
file(REMOVE ${CURRENT_PACKAGES_DIR}/debug/bin/exrmakepreview${EXECUTABLE_SUFFIX})
file(REMOVE ${CURRENT_PACKAGES_DIR}/debug/bin/exrmaketiled${EXECUTABLE_SUFFIX})
file(REMOVE ${CURRENT_PACKAGES_DIR}/debug/bin/exrmultipart${EXECUTABLE_SUFFIX})
file(REMOVE ${CURRENT_PACKAGES_DIR}/debug/bin/exrmultiview${EXECUTABLE_SUFFIX})
file(REMOVE ${CURRENT_PACKAGES_DIR}/debug/bin/exrstdattr${EXECUTABLE_SUFFIX})
file(REMOVE ${CURRENT_PACKAGES_DIR}/bin/exrenvmap${EXECUTABLE_SUFFIX})
file(REMOVE ${CURRENT_PACKAGES_DIR}/bin/exrheader${EXECUTABLE_SUFFIX})
file(REMOVE ${CURRENT_PACKAGES_DIR}/bin/exrmakepreview${EXECUTABLE_SUFFIX})
file(REMOVE ${CURRENT_PACKAGES_DIR}/bin/exrmaketiled${EXECUTABLE_SUFFIX})
file(REMOVE ${CURRENT_PACKAGES_DIR}/bin/exrmultipart${EXECUTABLE_SUFFIX})
file(REMOVE ${CURRENT_PACKAGES_DIR}/bin/exrmultiview${EXECUTABLE_SUFFIX})
file(REMOVE ${CURRENT_PACKAGES_DIR}/bin/exrstdattr${EXECUTABLE_SUFFIX})
vcpkg_copy_pdbs()
if (OPENEXR_STATIC)
file(REMOVE_RECURSE ${CURRENT_PACKAGES_DIR}/bin ${CURRENT_PACKAGES_DIR}/debug/bin)
endif()
if (VCPKG_CMAKE_SYSTEM_NAME STREQUAL "Linux")
set(OPENEXR_PORT_DIR "openexr")
else()
set(OPENEXR_PORT_DIR "OpenEXR")
endif()
file(COPY ${SOURCE_PATH}/LICENSE DESTINATION ${CURRENT_PACKAGES_DIR}/share/${OPENEXR_PORT_DIR})
file(RENAME ${CURRENT_PACKAGES_DIR}/share/${OPENEXR_PORT_DIR}/LICENSE ${CURRENT_PACKAGES_DIR}/share/${OPENEXR_PORT_DIR}/copyright)
file(COPY ${CMAKE_CURRENT_LIST_DIR}/FindOpenEXR.cmake DESTINATION ${CURRENT_PACKAGES_DIR}/share/${OPENEXR_PORT_DIR})

View file

@ -481,3 +481,15 @@ function prepareAccessTokenPrompt(callback) {
swal.close();
});
}
function getMetaverseUrl(callback) {
$.ajax('/api/metaverse_info', {
success: function(data) {
callback(data.metaverse_url);
},
error: function() {
callback(URLs.METAVERSE_URL);
}
});
}

View file

@ -16,47 +16,55 @@ $(document).ready(function(){
Settings.extraGroupsAtEnd = Settings.extraDomainGroupsAtEnd;
Settings.extraGroupsAtIndex = Settings.extraDomainGroupsAtIndex;
var METAVERSE_URL = URLs.METAVERSE_URL;
Settings.afterReloadActions = function() {
// call our method to setup the HF account button
setupHFAccountButton();
// call our method to setup the place names table
setupPlacesTable();
getMetaverseUrl(function(metaverse_url) {
METAVERSE_URL = metaverse_url;
setupDomainNetworkingSettings();
// setupDomainLabelSetting();
// call our method to setup the HF account button
setupHFAccountButton();
setupSettingsBackup();
// call our method to setup the place names table
setupPlacesTable();
if (domainIDIsSet()) {
// now, ask the API for what places, if any, point to this domain
reloadDomainInfo();
setupDomainNetworkingSettings();
// setupDomainLabelSetting();
// we need to ask the API what a shareable name for this domain is
getShareName(function(success, shareName) {
if (success) {
var shareLink = "https://hifi.place/" + shareName;
$('#visit-domain-link').attr("href", shareLink).show();
}
});
}
setupSettingsBackup();
if (Settings.data.values.wizard.cloud_domain) {
$('#manage-cloud-domains-link').show();
if (domainIDIsSet()) {
// now, ask the API for what places, if any, point to this domain
reloadDomainInfo();
var cloudWizardExit = qs["cloud-wizard-exit"];
if (cloudWizardExit != undefined) {
$('#cloud-domains-alert').show();
// we need to ask the API what a shareable name for this domain is
getShareName(function(success, shareName) {
if (success) {
var shareLink = "https://hifi.place/" + shareName;
$('#visit-domain-link').attr("href", shareLink).show();
}
});
} else if (accessTokenIsSet()) {
$('#' + Settings.GET_TEMPORARY_NAME_BTN_ID).show();
}
$(Settings.DOMAIN_ID_SELECTOR).siblings('span').append("</br><strong>Changing the domain ID for a Cloud Domain may result in an incorrect status for the domain on your Cloud Domains page.</strong>");
} else {
// append the domain selection modal
appendDomainIDButtons();
}
if (Settings.data.values.wizard.cloud_domain) {
$('#manage-cloud-domains-link').show();
handleAction();
var cloudWizardExit = qs["cloud-wizard-exit"];
if (cloudWizardExit != undefined) {
$('#cloud-domains-alert').show();
}
$(Settings.DOMAIN_ID_SELECTOR).siblings('span').append("</br><strong>Changing the domain ID for a Cloud Domain may result in an incorrect status for the domain on your Cloud Domains page.</strong>");
} else {
// append the domain selection modal
appendDomainIDButtons();
}
handleAction();
});
}
Settings.handlePostSettings = function(formJSON) {
@ -258,7 +266,7 @@ $(document).ready(function(){
buttonSetting.button_label = "Connect High Fidelity Account";
buttonSetting.html_id = Settings.CONNECT_ACCOUNT_BTN_ID;
buttonSetting.href = URLs.METAVERSE_URL + "/user/tokens/new?for_domain_server=true";
buttonSetting.href = METAVERSE_URL + "/user/tokens/new?for_domain_server=true";
// since we do not have an access token we change hide domain ID and auto networking settings
// without an access token niether of them can do anything
@ -645,7 +653,7 @@ $(document).ready(function(){
label: 'Places',
html_id: Settings.PLACES_TABLE_ID,
help: "The following places currently point to this domain.</br>To point places to this domain, "
+ " go to the <a href='" + URLs.METAVERSE_URL + "/user/places'>My Places</a> "
+ " go to the <a href='" + METAVERSE_URL + "/user/places'>My Places</a> "
+ "page in your High Fidelity Metaverse account.",
read_only: true,
can_add_new_rows: false,
@ -678,12 +686,9 @@ $(document).ready(function(){
var errorEl = createDomainLoadingError("There was an error retrieving your places.");
$("#" + Settings.PLACES_TABLE_ID).after(errorEl);
// do we have a domain ID?
if (!domainIDIsSet()) {
// we don't have a domain ID - add a button to offer the user a chance to get a temporary one
var temporaryPlaceButton = dynamicButton(Settings.GET_TEMPORARY_NAME_BTN_ID, 'Get a temporary place name');
$('#' + Settings.PLACES_TABLE_ID).after(temporaryPlaceButton);
}
var temporaryPlaceButton = dynamicButton(Settings.GET_TEMPORARY_NAME_BTN_ID, 'Get a temporary place name');
temporaryPlaceButton.hide();
$('#' + Settings.PLACES_TABLE_ID).after(temporaryPlaceButton);
if (accessTokenIsSet()) {
appendAddButtonToPlacesTable();
}
@ -774,8 +779,9 @@ $(document).ready(function(){
// check if we have owner_places (for a real domain) or a name (for a temporary domain)
if (data.status == "success") {
$('#' + Settings.GET_TEMPORARY_NAME_BTN_ID).hide();
$('.domain-loading-hide').show();
if (data.domain.owner_places) {
if (data.domain.owner_places && data.domain.owner_places.length > 0) {
// add a table row for each of these names
_.each(data.domain.owner_places, function(place){
$('#' + Settings.PLACES_TABLE_ID + " tbody").append(placeTableRowForPlaceObject(place));
@ -783,8 +789,9 @@ $(document).ready(function(){
} else if (data.domain.name) {
// add a table row for this temporary domain name
$('#' + Settings.PLACES_TABLE_ID + " tbody").append(placeTableRow(data.domain.name, '/', true));
} else {
$('#' + Settings.GET_TEMPORARY_NAME_BTN_ID).show();
}
// Update label
if (showOrHideLabel()) {
var label = data.domain.label;
@ -953,7 +960,7 @@ $(document).ready(function(){
modal_buttons["success"] = {
label: 'Create new domain',
callback: function() {
window.open(URLs.METAVERSE_URL + "/user/domains", '_blank');
window.open(METAVERSE_URL + "/user/domains", '_blank');
}
}
modal_body = "<p>You do not have any domains in your High Fidelity account." +
@ -1001,7 +1008,7 @@ $(document).ready(function(){
showSpinnerAlert('Creating temporary place name');
// make a get request to get a temporary domain
$.post(URLs.METAVERSE_URL + '/api/v1/domains/temporary', function(data){
$.post(METAVERSE_URL + '/api/v1/domains/temporary', function(data){
if (data.status == "success") {
var domain = data.data.domain;

View file

@ -1916,6 +1916,7 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
const QString URI_SETTINGS = "/settings";
const QString URI_CONTENT_UPLOAD = "/content/upload";
const QString URI_RESTART = "/restart";
const QString URI_API_METAVERSE_INFO = "/api/metaverse_info";
const QString URI_API_PLACES = "/api/places";
const QString URI_API_DOMAINS = "/api/domains";
const QString URI_API_DOMAINS_ID = "/api/domains/";
@ -2164,6 +2165,15 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
} else if (url.path() == URI_RESTART) {
connection->respond(HTTPConnection::StatusCode200);
restart();
return true;
} else if (url.path() == URI_API_METAVERSE_INFO) {
QJsonObject rootJSON {
{ "metaverse_url", NetworkingConstants::METAVERSE_SERVER_URL().toString() }
};
QJsonDocument docJSON{ rootJSON };
connectionPtr->respond(HTTPConnection::StatusCode200, docJSON.toJson(), JSON_MIME_TYPE.toUtf8());
return true;
} else if (url.path() == URI_API_DOMAINS) {
return forwardMetaverseAPIRequest(connection, "/api/v1/domains", "");

View file

@ -13,11 +13,11 @@
{ "from": "OculusTouch.LY", "to": "Standard.LY",
"filters": [
{ "type": "deadZone", "min": 0.7 },
{ "type": "deadZone", "min": 0.15 },
"invert"
]
},
{ "from": "OculusTouch.LX", "filters": { "type": "deadZone", "min": 0.7 }, "to": "Standard.LX" },
{ "from": "OculusTouch.LX", "filters": { "type": "deadZone", "min": 0.15 }, "to": "Standard.LX" },
{ "from": "OculusTouch.LT", "to": "Standard.LTClick",
"peek": true,
"filters": [ { "type": "hysteresis", "min": 0.85, "max": 0.9 } ]
@ -29,11 +29,11 @@
{ "from": "OculusTouch.RY", "to": "Standard.RY",
"filters": [
{ "type": "deadZone", "min": 0.7 },
{ "type": "deadZone", "min": 0.15 },
"invert"
]
},
{ "from": "OculusTouch.RX", "filters": { "type": "deadZone", "min": 0.7 }, "to": "Standard.RX" },
{ "from": "OculusTouch.RX", "filters": { "type": "deadZone", "min": 0.15 }, "to": "Standard.RX" },
{ "from": "OculusTouch.RT", "to": "Standard.RTClick",
"peek": true,
"filters": [ { "type": "hysteresis", "min": 0.85, "max": 0.9 } ]

View file

@ -1,11 +1,14 @@
{
"name": "Standard to Action",
"channels": [
{ "from": "Standard.LY", "to": "Actions.TranslateZ" },
{ "from": "Standard.LY",
"when": ["Application.RightHandDominant", "!Standard.RY"],
"to": "Actions.TranslateZ"
},
{ "from": "Standard.LX",
"when": [
"Application.InHMD", "!Application.AdvancedMovement",
"Application.InHMD", "!Application.AdvancedMovement", "Application.RightHandDominant",
"Application.SnapTurn", "!Standard.RX"
],
"to": "Actions.StepYaw",
@ -18,14 +21,14 @@
]
},
{ "from": "Standard.LX", "to": "Actions.TranslateX",
"when": [ "Application.AdvancedMovement" ]
"when": [ "Application.AdvancedMovement", "Application.StrafeEnabled", "Application.RightHandDominant" ]
},
{ "from": "Standard.LX", "to": "Actions.Yaw",
"when": [ "!Application.AdvancedMovement", "!Application.SnapTurn" ]
"when": [ "!Application.AdvancedMovement", "!Application.SnapTurn", "Application.RightHandDominant" ]
},
{ "from": "Standard.RX",
"when": [ "Application.SnapTurn" ],
"when": [ "Application.SnapTurn", "Application.RightHandDominant" ],
"to": "Actions.StepYaw",
"filters":
[
@ -36,20 +39,69 @@
]
},
{ "from": "Standard.RX", "to": "Actions.Yaw",
"when": [ "!Application.SnapTurn" ]
"when": [ "!Application.SnapTurn", "Application.RightHandDominant" ]
},
{ "from": "Standard.LeftSecondaryThumb",
"when": [ "Application.Grounded", "Application.LeftHandDominant" ],
"to": "Actions.Up"
},
{ "from": "Standard.LeftSecondaryThumb",
"when": "Application.LeftHandDominant",
"to": "Actions.Up"
},
{ "from": "Standard.RY",
"when": "Application.Grounded",
"to": "Actions.Up",
"when": ["Application.LeftHandDominant", "!Standard.LY"],
"to": "Actions.TranslateZ"
},
{ "from": "Standard.RX",
"when": [
"Application.InHMD", "!Application.AdvancedMovement", "Application.LeftHandDominant",
"Application.SnapTurn", "!Standard.RX"
],
"to": "Actions.StepYaw",
"filters":
[
{ "type": "deadZone", "min": 0.6 },
"invert"
{ "type": "deadZone", "min": 0.15 },
"constrainToInteger",
{ "type": "pulse", "interval": 0.25 },
{ "type": "scale", "scale": 22.5 }
]
},
{ "from": "Standard.RX", "to": "Actions.TranslateX",
"when": [ "Application.AdvancedMovement", "Application.StrafeEnabled", "Application.LeftHandDominant" ]
},
{ "from": "Standard.RX", "to": "Actions.Yaw",
"when": [ "!Application.AdvancedMovement", "!Application.SnapTurn", "Application.LeftHandDominant" ]
},
{ "from": "Standard.RY", "to": "Actions.Up", "filters": "invert"},
{ "from": "Standard.LX",
"when": [ "Application.SnapTurn", "Application.LeftHandDominant" ],
"to": "Actions.StepYaw",
"filters":
[
{ "type": "deadZone", "min": 0.15 },
"constrainToInteger",
{ "type": "pulse", "interval": 0.25 },
{ "type": "scale", "scale": 22.5 }
]
},
{ "from": "Standard.LX", "to": "Actions.Yaw",
"when": [ "!Application.SnapTurn", "Application.LeftHandDominant" ]
},
{ "from": "Standard.RightSecondaryThumb",
"when": [ "Application.Grounded", "Application.RightHandDominant" ],
"to": "Actions.Up"
},
{ "from": "Standard.RightSecondaryThumb",
"when": "Application.RightHandDominant",
"to": "Actions.Up"
},
{ "from": "Standard.Back", "to": "Actions.CycleCamera" },
{ "from": "Standard.Start", "to": "Actions.ContextMenu" },
@ -128,4 +180,4 @@
{ "from": "Standard.TrackedObject14", "to" : "Actions.TrackedObject14" },
{ "from": "Standard.TrackedObject15", "to" : "Actions.TrackedObject15" }
]
}
}

View file

@ -10,8 +10,9 @@
"filters": [ { "type": "hysteresis", "min": 0.7, "max": 0.75 } ]
},
{ "from": "Vive.LY", "when": "Vive.LSY", "filters": ["invert"], "to": "Standard.LY" },
{ "from": "Vive.LX", "when": "Vive.LSX", "to": "Standard.LX" },
{ "from": "Vive.LY", "when": "Vive.LS", "filters": [ { "type": "deadZone", "min": 0.15 }, "invert" ], "to": "Standard.LY" },
{ "from": "Vive.LX", "when": ["Vive.LS", "Application.RightHandDominant"], "filters": { "type": "deadZone", "min": 0.15 }, "to": "Standard.LX" },
{ "from": "Vive.LX", "when": ["Vive.LS", "Vive.LSX", "!Vive.LSY", "Application.LeftHandDominant"], "filters": { "type": "deadZone", "min": 0.15 }, "to": "Standard.LX" },
{
"from": "Vive.LT", "to": "Standard.LT",
"filters": [
@ -28,8 +29,9 @@
},
{ "from": "Vive.LSTouch", "to": "Standard.LSTouch" },
{ "from": "Vive.RY", "when": "Vive.RSY", "filters": ["invert"], "to": "Standard.RY" },
{ "from": "Vive.RX", "when": "Vive.RSX", "to": "Standard.RX" },
{ "from": "Vive.RY", "when": "Vive.RS", "filters": [ { "type": "deadZone", "min": 0.15 }, "invert" ], "to": "Standard.RY" },
{ "from": "Vive.RX", "when": ["Vive.RS", "Application.LeftHandDominant"], "filters": { "type": "deadZone", "min": 0.15 }, "to": "Standard.RX" },
{ "from": "Vive.RX", "when": ["Vive.RS", "Vive.RSX", "!Vive.RSY", "Application.RightHandDominant"], "filters": { "type": "deadZone", "min": 0.15 }, "to": "Standard.RX" },
{
"from": "Vive.RT", "to": "Standard.RT",
"filters": [

Binary file not shown.

Before

Width:  |  Height:  |  Size: 91 KiB

After

Width:  |  Height:  |  Size: 246 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 82 KiB

After

Width:  |  Height:  |  Size: 331 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 107 KiB

After

Width:  |  Height:  |  Size: 308 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 92 KiB

After

Width:  |  Height:  |  Size: 229 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 94 KiB

After

Width:  |  Height:  |  Size: 267 KiB

View file

@ -53,6 +53,16 @@
position: absolute;
top: 0; left: 0; bottom: 0; right: 0;
}
#image_button {
position: absolute;
width: 463;
height: 410;
top: 155;
left: 8;
right: 8;
bottom: 146;
}
#report_problem {
position: fixed;
@ -67,17 +77,23 @@
var handControllerImageURL = null;
var index = 0;
var count = 3;
var handControllerRefURL = "https://docs.highfidelity.com/en/rc81/explore/get-started/vr-controls.html#vr-controls";
var keyboardRefURL = "https://docs.highfidelity.com/en/rc81/explore/get-started/desktop.html#movement-controls";
var gamepadRefURL = "https://docs.highfidelity.com/en/rc81/explore/get-started/vr-controls.html#gamepad";
function showKbm() {
document.getElementById("main_image").setAttribute("src", "img/tablet-help-keyboard.jpg");
document.getElementById("image_button").setAttribute("href", keyboardRefURL);
}
function showHandControllers() {
document.getElementById("main_image").setAttribute("src", handControllerImageURL);
document.getElementById("image_button").setAttribute("href", handControllerRefURL);
}
function showGamepad() {
document.getElementById("main_image").setAttribute("src", "img/tablet-help-gamepad.jpg");
document.getElementById("image_button").setAttribute("href", gamepadRefURL);
}
function cycleRight() {
@ -171,6 +187,7 @@
<img id="main_image" src="img/tablet-help-keyboard.jpg" width="480px" height="720px"></img>
<a href="#" id="left_button" onmousedown="cycleLeft()"></a>
<a href="#" id="right_button" onmousedown="cycleRight()"></a>
<a href="#" id="image_button"></a>
</div>
<a href="mailto:support@highfidelity.com" id="report_problem">Report Problem</a>
</body>

View file

@ -336,6 +336,8 @@ Rectangle {
case Qt.Key_Return:
case Qt.Key_Enter:
event.accepted = true;
keypressTimer.stop();
root.searchString = searchField.text;
searchField.text = "";
getMarketplaceItems();

View file

@ -680,6 +680,8 @@ private:
* <tr><td><code>InHMD</code></td><td>number</td><td>number</td><td>The user is in HMD mode.</td></tr>
* <tr><td><code>AdvancedMovement</code></td><td>number</td><td>number</td><td>Advanced movement controls are enabled.
* </td></tr>
* <tr><td><code>LeftHandDominant</code></td><td>number</td><td>number</td><td>Dominant hand set to left.</td></tr>
* <tr><td><code>RightHandDominant</code></td><td>number</td><td>number</td><td>Dominant hand set to right.</td></tr>
* <tr><td><code>SnapTurn</code></td><td>number</td><td>number</td><td>Snap turn is enabled.</td></tr>
* <tr><td><code>Grounded</code></td><td>number</td><td>number</td><td>The user's avatar is on the ground.</td></tr>
* <tr><td><code>NavigationFocused</code></td><td>number</td><td>number</td><td><em>Not used.</em></td></tr>
@ -701,6 +703,9 @@ static const QString STATE_NAV_FOCUSED = "NavigationFocused";
static const QString STATE_PLATFORM_WINDOWS = "PlatformWindows";
static const QString STATE_PLATFORM_MAC = "PlatformMac";
static const QString STATE_PLATFORM_ANDROID = "PlatformAndroid";
static const QString STATE_LEFT_HAND_DOMINANT = "LeftHandDominant";
static const QString STATE_RIGHT_HAND_DOMINANT = "RightHandDominant";
static const QString STATE_STRAFE_ENABLED = "StrafeEnabled";
// Statically provided display and input plugins
extern DisplayPluginList getDisplayPlugins();
@ -902,7 +907,7 @@ bool setupEssentials(int& argc, char** argv, bool runningMarkerExisted) {
controller::StateController::setStateVariables({ { STATE_IN_HMD, STATE_CAMERA_FULL_SCREEN_MIRROR,
STATE_CAMERA_FIRST_PERSON, STATE_CAMERA_THIRD_PERSON, STATE_CAMERA_ENTITY, STATE_CAMERA_INDEPENDENT,
STATE_SNAP_TURN, STATE_ADVANCED_MOVEMENT_CONTROLS, STATE_GROUNDED, STATE_NAV_FOCUSED,
STATE_PLATFORM_WINDOWS, STATE_PLATFORM_MAC, STATE_PLATFORM_ANDROID } });
STATE_PLATFORM_WINDOWS, STATE_PLATFORM_MAC, STATE_PLATFORM_ANDROID, STATE_LEFT_HAND_DOMINANT, STATE_RIGHT_HAND_DOMINANT, STATE_STRAFE_ENABLED } });
DependencyManager::set<UserInputMapper>();
DependencyManager::set<controller::ScriptingInterface, ControllerScriptingInterface>();
DependencyManager::set<InterfaceParentFinder>();
@ -1740,6 +1745,15 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
_applicationStateDevice->setInputVariant(STATE_ADVANCED_MOVEMENT_CONTROLS, []() -> float {
return qApp->getMyAvatar()->useAdvancedMovementControls() ? 1 : 0;
});
_applicationStateDevice->setInputVariant(STATE_LEFT_HAND_DOMINANT, []() -> float {
return qApp->getMyAvatar()->getDominantHand() == "left" ? 1 : 0;
});
_applicationStateDevice->setInputVariant(STATE_RIGHT_HAND_DOMINANT, []() -> float {
return qApp->getMyAvatar()->getDominantHand() == "right" ? 1 : 0;
});
_applicationStateDevice->setInputVariant(STATE_STRAFE_ENABLED, []() -> float {
return qApp->getMyAvatar()->getStrafeEnabled() ? 1 : 0;
});
_applicationStateDevice->setInputVariant(STATE_GROUNDED, []() -> float {
return qApp->getMyAvatar()->getCharacterController()->onGround() ? 1 : 0;

View file

@ -116,7 +116,7 @@ Menu::Menu() {
// Edit > Delete
auto deleteAction = addActionToQMenuAndActionHash(editMenu, "Delete", QKeySequence::Delete);
connect(deleteAction, &QAction::triggered, [] {
QKeyEvent* keyEvent = new QKeyEvent(QEvent::KeyPress, Qt::Key_Delete, Qt::ControlModifier);
QKeyEvent* keyEvent = new QKeyEvent(QEvent::KeyPress, Qt::Key_Delete, Qt::NoModifier);
QCoreApplication::postEvent(QCoreApplication::instance(), keyEvent);
});

View file

@ -497,7 +497,7 @@ void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar
// it might not fire until after we create a new instance for the same remote avatar, which creates a race
// on the creation of entities for that avatar instance and the deletion of entities for this instance
avatar->removeAvatarEntitiesFromTree();
if (removalReason == KillAvatarReason::TheirAvatarEnteredYourBubble) {
if (removalReason != KillAvatarReason::AvatarDisconnected) {
emit AvatarInputs::getInstance()->avatarEnteredIgnoreRadius(avatar->getSessionUUID());
emit DependencyManager::get<UsersScriptingInterface>()->enteredIgnoreRadius();
@ -509,7 +509,7 @@ void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar
render::Transaction transaction;
avatar->removeFromScene(avatar, scene, transaction);
scene->enqueueTransaction(transaction);
} else if (removalReason == KillAvatarReason::AvatarDisconnected) {
} else {
// remove from node sets, if present
DependencyManager::get<NodeList>()->removeFromIgnoreMuteSets(avatar->getSessionUUID());
DependencyManager::get<UsersScriptingInterface>()->avatarDisconnected(avatar->getSessionUUID());

View file

@ -155,6 +155,7 @@ MyAvatar::MyAvatar(QThread* thread) :
_prevShouldDrawHead(true),
_audioListenerMode(FROM_HEAD),
_dominantHandSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "dominantHand", DOMINANT_RIGHT_HAND),
_strafeEnabledSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "strafeEnabled", DEFAULT_STRAFE_ENABLED),
_hmdAvatarAlignmentTypeSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "hmdAvatarAlignmentType", DEFAULT_HMD_AVATAR_ALIGNMENT_TYPE),
_headPitchSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "", 0.0f),
_scaleSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "scale", _targetScale),
@ -169,7 +170,16 @@ MyAvatar::MyAvatar(QThread* thread) :
_useSnapTurnSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "useSnapTurn", _useSnapTurn),
_userHeightSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "userHeight", DEFAULT_AVATAR_HEIGHT),
_flyingHMDSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "flyingHMD", _flyingPrefHMD),
_movementReferenceSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "movementReference", _movementReference),
_avatarEntityCountSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "avatarEntityData" << "size", 0),
_driveGear1Setting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "driveGear1", _driveGear1),
_driveGear2Setting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "driveGear2", _driveGear2),
_driveGear3Setting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "driveGear3", _driveGear3),
_driveGear4Setting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "driveGear4", _driveGear4),
_driveGear5Setting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "driveGear5", _driveGear5),
_analogWalkSpeedSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "analogWalkSpeed", _analogWalkSpeed.get()),
_analogPlusWalkSpeedSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "analogPlusWalkSpeed", _analogPlusWalkSpeed.get()),
_controlSchemeIndexSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "controlSchemeIndex", _controlSchemeIndex),
_userRecenterModelSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "userRecenterModel", USER_RECENTER_MODEL_AUTO)
{
_clientTraitsHandler.reset(new ClientTraitsHandler(this));
@ -322,6 +332,14 @@ QString MyAvatar::getDominantHand() const {
return _dominantHand.get();
}
void MyAvatar::setStrafeEnabled(bool enabled) {
_strafeEnabled.set(enabled);
}
bool MyAvatar::getStrafeEnabled() const {
return _strafeEnabled.get();
}
void MyAvatar::setDominantHand(const QString& hand) {
if (hand == DOMINANT_LEFT_HAND || hand == DOMINANT_RIGHT_HAND) {
bool changed = (hand != _dominantHand.get());
@ -1256,6 +1274,7 @@ void MyAvatar::resizeAvatarEntitySettingHandles(uint32_t maxIndex) {
void MyAvatar::saveData() {
_dominantHandSetting.set(getDominantHand());
_strafeEnabledSetting.set(getStrafeEnabled());
_hmdAvatarAlignmentTypeSetting.set(getHmdAvatarAlignmentType());
_headPitchSetting.set(getHead()->getBasePitch());
_scaleSetting.set(_targetScale);
@ -1279,6 +1298,15 @@ void MyAvatar::saveData() {
_useSnapTurnSetting.set(_useSnapTurn);
_userHeightSetting.set(getUserHeight());
_flyingHMDSetting.set(getFlyingHMDPref());
_movementReferenceSetting.set(getMovementReference());
_driveGear1Setting.set(getDriveGear1());
_driveGear2Setting.set(getDriveGear2());
_driveGear3Setting.set(getDriveGear3());
_driveGear4Setting.set(getDriveGear4());
_driveGear5Setting.set(getDriveGear5());
_analogWalkSpeedSetting.set(getAnalogWalkSpeed());
_analogPlusWalkSpeedSetting.set(getAnalogPlusWalkSpeed());
_controlSchemeIndexSetting.set(getControlSchemeIndex());
_userRecenterModelSetting.set(userRecenterModelToString(getUserRecenterModel()));
auto hmdInterface = DependencyManager::get<HMDScriptingInterface>();
@ -1856,12 +1884,22 @@ void MyAvatar::loadData() {
// Flying preferences must be loaded before calling setFlyingEnabled()
Setting::Handle<bool> firstRunVal { Settings::firstRun, true };
setFlyingHMDPref(firstRunVal.get() ? false : _flyingHMDSetting.get());
setMovementReference(firstRunVal.get() ? false : _movementReferenceSetting.get());
setDriveGear1(firstRunVal.get() ? DEFAULT_GEAR_1 : _driveGear1Setting.get());
setDriveGear2(firstRunVal.get() ? DEFAULT_GEAR_2 : _driveGear2Setting.get());
setDriveGear3(firstRunVal.get() ? DEFAULT_GEAR_3 : _driveGear3Setting.get());
setDriveGear4(firstRunVal.get() ? DEFAULT_GEAR_4 : _driveGear4Setting.get());
setDriveGear5(firstRunVal.get() ? DEFAULT_GEAR_5 : _driveGear5Setting.get());
setControlSchemeIndex(firstRunVal.get() ? LocomotionControlsMode::CONTROLS_DEFAULT : _controlSchemeIndexSetting.get());
setAnalogWalkSpeed(firstRunVal.get() ? ANALOG_AVATAR_MAX_WALKING_SPEED : _analogWalkSpeedSetting.get());
setAnalogPlusWalkSpeed(firstRunVal.get() ? ANALOG_PLUS_AVATAR_MAX_WALKING_SPEED : _analogPlusWalkSpeedSetting.get());
setFlyingEnabled(getFlyingEnabled());
setDisplayName(_displayNameSetting.get());
setCollisionSoundURL(_collisionSoundURLSetting.get(QUrl(DEFAULT_AVATAR_COLLISION_SOUND_URL)).toString());
setSnapTurn(_useSnapTurnSetting.get());
setDominantHand(_dominantHandSetting.get(DOMINANT_RIGHT_HAND).toLower());
setStrafeEnabled(_strafeEnabledSetting.get(DEFAULT_STRAFE_ENABLED));
setHmdAvatarAlignmentType(_hmdAvatarAlignmentTypeSetting.get(DEFAULT_HMD_AVATAR_ALIGNMENT_TYPE).toLower());
setUserHeight(_userHeightSetting.get(DEFAULT_AVATAR_HEIGHT));
setTargetScale(_scaleSetting.get());
@ -2519,6 +2557,12 @@ controller::Pose MyAvatar::getControllerPoseInAvatarFrame(controller::Action act
}
}
glm::quat MyAvatar::getOffHandRotation() const {
auto hand = (getDominantHand() == DOMINANT_RIGHT_HAND) ? controller::Action::LEFT_HAND : controller::Action::RIGHT_HAND;
auto pose = getControllerPoseInAvatarFrame(hand);
return pose.rotation;
}
void MyAvatar::updateMotors() {
_characterController.clearMotors();
glm::quat motorRotation;
@ -3285,21 +3329,131 @@ void MyAvatar::updateOrientation(float deltaTime) {
}
}
static float scaleSpeedByDirection(const glm::vec2 velocityDirection, const float forwardSpeed, const float backwardSpeed) {
// for the elipse function --> (x^2)/(backwardSpeed*backwardSpeed) + y^2/(forwardSpeed*forwardSpeed) = 1, scale == y^2 when x is 0
float fwdScale = forwardSpeed * forwardSpeed;
float backScale = backwardSpeed * backwardSpeed;
float scaledX = velocityDirection.x * backwardSpeed;
float scaledSpeed = forwardSpeed;
if (velocityDirection.y < 0.0f) {
if (backScale > 0.0f) {
float yValue = sqrtf(fwdScale * (1.0f - ((scaledX * scaledX) / backScale)));
scaledSpeed = sqrtf((scaledX * scaledX) + (yValue * yValue));
float MyAvatar::calculateGearedSpeed(const float driveKey) {
float absDriveKey = abs(driveKey);
float sign = (driveKey < 0.0f) ? -1.0f : 1.0f;
if (absDriveKey > getDriveGear5()) {
return sign * 1.0f;
}
else if (absDriveKey > getDriveGear4()) {
return sign * 0.8f;
}
else if (absDriveKey > getDriveGear3()) {
return sign * 0.6f;
}
else if (absDriveKey > getDriveGear2()) {
return sign * 0.4f;
}
else if (absDriveKey > getDriveGear1()) {
return sign * 0.2f;
}
else {
return sign * 0.0f;
}
}
glm::vec3 MyAvatar::scaleMotorSpeed(const glm::vec3 forward, const glm::vec3 right) {
float stickFullOn = 0.85f;
auto zSpeed = getDriveKey(TRANSLATE_Z);
auto xSpeed = getDriveKey(TRANSLATE_X);
glm::vec3 direction;
if (!useAdvancedMovementControls() && qApp->isHMDMode()) {
// Walking disabled in settings.
return Vectors::ZERO;
} else if (qApp->isHMDMode()) {
// HMD advanced movement controls.
switch (_controlSchemeIndex) {
case LocomotionControlsMode::CONTROLS_DEFAULT:
// No acceleration curve for this one, constant speed.
if (zSpeed || xSpeed) {
direction = (zSpeed * forward) + (xSpeed * right);
// Normalize direction.
auto length = glm::length(direction);
if (length > EPSILON) {
direction /= length;
}
return getSensorToWorldScale() * direction * getSprintSpeed() * _walkSpeedScalar;
} else {
return Vectors::ZERO;
}
case LocomotionControlsMode::CONTROLS_ANALOG:
case LocomotionControlsMode::CONTROLS_ANALOG_PLUS:
if (zSpeed || xSpeed) {
glm::vec3 scaledForward = getSensorToWorldScale() * calculateGearedSpeed(zSpeed) * _walkSpeedScalar * ((zSpeed >= stickFullOn) ? getSprintSpeed() : getWalkSpeed()) * forward;
glm::vec3 scaledRight = getSensorToWorldScale() * calculateGearedSpeed(xSpeed) * _walkSpeedScalar * ((xSpeed > stickFullOn) ? getSprintSpeed() : getWalkSpeed()) * right;
direction = scaledForward + scaledRight;
return direction;
} else {
return Vectors::ZERO;
}
default:
qDebug() << "Invalid control scheme index.";
return Vectors::ZERO;
}
} else {
scaledSpeed = backwardSpeed;
// Desktop mode.
direction = (zSpeed * forward) + (xSpeed * right);
auto length = glm::length(direction);
if (length > EPSILON) {
direction /= length;
}
direction *= getWalkSpeed() * _walkSpeedScalar;
return direction;
}
return scaledSpeed;
}
glm::vec3 MyAvatar::calculateScaledDirection(){
CharacterController::State state = _characterController.getState();
// compute action input
// Determine if we're head or controller relative...
glm::vec3 forward, right;
if (qApp->isHMDMode()) {
auto handRotation = getOffHandRotation();
glm::vec3 controllerForward(0.0f, 1.0f, 0.0f);
glm::vec3 controllerRight(0.0f, 0.0f, (getDominantHand() == DOMINANT_RIGHT_HAND ? 1.0f : -1.0f));
glm::vec3 transform;
switch (getMovementReference()) {
case LocomotionRelativeMovementMode::MOVEMENT_HAND_RELATIVE:
forward = (handRotation * controllerForward);
right = (handRotation * controllerRight);
break;
case LocomotionRelativeMovementMode::MOVEMENT_HAND_RELATIVE_LEVELED:
forward = (handRotation * controllerForward);
transform = forward - (glm::dot(forward, Vectors::UNIT_Y) * Vectors::UNIT_Y);
if (glm::length(transform) > EPSILON) {
forward = glm::normalize(transform);
} else {
forward = Vectors::ZERO;
}
right = (handRotation * controllerRight);
transform = right - (glm::dot(right, Vectors::UNIT_Y) * Vectors::UNIT_Y);
if (glm::length(transform) > EPSILON) {
right = glm::normalize(transform);
} else {
right = Vectors::ZERO;
}
break;
case LocomotionRelativeMovementMode::MOVEMENT_HMD_RELATIVE:
default:
forward = IDENTITY_FORWARD;
right = IDENTITY_RIGHT;
}
} else {
forward = IDENTITY_FORWARD;
right = IDENTITY_RIGHT;
}
glm::vec3 direction = scaleMotorSpeed(forward, right);
if (state == CharacterController::State::Hover ||
_characterController.computeCollisionMask() == BULLET_COLLISION_MASK_COLLISIONLESS) {
glm::vec3 up = (getDriveKey(TRANSLATE_Y)) * IDENTITY_UP;
direction += up;
}
return direction;
}
void MyAvatar::updateActionMotor(float deltaTime) {
@ -3319,25 +3473,13 @@ void MyAvatar::updateActionMotor(float deltaTime) {
CharacterController::State state = _characterController.getState();
// compute action input
glm::vec3 forward = (getDriveKey(TRANSLATE_Z)) * IDENTITY_FORWARD;
glm::vec3 right = (getDriveKey(TRANSLATE_X)) * IDENTITY_RIGHT;
glm::vec3 direction = forward + right;
if (state == CharacterController::State::Hover ||
_characterController.computeCollisionMask() == BULLET_COLLISION_MASK_COLLISIONLESS) {
glm::vec3 up = (getDriveKey(TRANSLATE_Y)) * IDENTITY_UP;
direction += up;
}
glm::vec3 direction = calculateScaledDirection();
_wasPushing = _isPushing;
float directionLength = glm::length(direction);
_isPushing = directionLength > EPSILON;
// normalize direction
if (_isPushing) {
direction /= directionLength;
} else {
if (!_isPushing) {
direction = Vectors::ZERO;
}
@ -3353,6 +3495,7 @@ void MyAvatar::updateActionMotor(float deltaTime) {
const float maxBoostSpeed = sensorToWorldScale * MAX_BOOST_SPEED;
if (_isPushing) {
direction /= directionLength;
if (motorSpeed < maxBoostSpeed) {
// an active action motor should never be slower than this
float boostCoefficient = (maxBoostSpeed - motorSpeed) / maxBoostSpeed;
@ -3363,11 +3506,17 @@ void MyAvatar::updateActionMotor(float deltaTime) {
}
_actionMotorVelocity = motorSpeed * direction;
} else {
// we're interacting with a floor --> simple horizontal speed and exponential decay
const glm::vec2 currentVel = { direction.x, direction.z };
float scaledSpeed = scaleSpeedByDirection(currentVel, _walkSpeed.get(), _walkBackwardSpeed.get());
// _walkSpeedScalar is a multiplier if we are in sprint mode, otherwise 1.0
_actionMotorVelocity = sensorToWorldScale * (scaledSpeed * _walkSpeedScalar) * direction;
_actionMotorVelocity = direction;
}
float previousBoomLength = _boomLength;
float boomChange = getDriveKey(ZOOM);
_boomLength += 2.0f * _boomLength * boomChange + boomChange * boomChange;
_boomLength = glm::clamp<float>(_boomLength, ZOOM_MIN, ZOOM_MAX);
// May need to change view if boom length has changed
if (previousBoomLength != _boomLength) {
qApp->changeViewAsNeeded(_boomLength);
}
}
@ -3880,6 +4029,136 @@ void MyAvatar::setFlyingHMDPref(bool enabled) {
_flyingPrefHMD = enabled;
}
void MyAvatar::setMovementReference(int enabled) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "setMovementReference", Q_ARG(bool, enabled));
return;
}
_movementReference = enabled;
}
int MyAvatar::getMovementReference() {
return _movementReference;
}
void MyAvatar::setControlSchemeIndex(int index){
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "setControlSchemeIndex", Q_ARG(int, index));
return;
}
// Need to add checks for valid indices.
_controlSchemeIndex = index;
}
int MyAvatar::getControlSchemeIndex() {
return _controlSchemeIndex;
}
void MyAvatar::setDriveGear1(float shiftPoint) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "setDriveGear1", Q_ARG(float, shiftPoint));
return;
}
if (shiftPoint > 1.0f || shiftPoint < 0.0f) return;
_driveGear1 = (shiftPoint < _driveGear2) ? shiftPoint : _driveGear1;
}
float MyAvatar::getDriveGear1() {
switch (_controlSchemeIndex) {
case LocomotionControlsMode::CONTROLS_ANALOG:
return ANALOG_AVATAR_GEAR_1;
case LocomotionControlsMode::CONTROLS_ANALOG_PLUS:
return _driveGear1;
case LocomotionControlsMode::CONTROLS_DEFAULT:
default:
return 1.0f;
}
}
void MyAvatar::setDriveGear2(float shiftPoint) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "setDriveGear2", Q_ARG(float, shiftPoint));
return;
}
if (shiftPoint > 1.0f || shiftPoint < 0.0f) return;
_driveGear2 = (shiftPoint < _driveGear3 && shiftPoint >= _driveGear1) ? shiftPoint : _driveGear2;
}
float MyAvatar::getDriveGear2() {
switch (_controlSchemeIndex) {
case LocomotionControlsMode::CONTROLS_ANALOG:
return ANALOG_AVATAR_GEAR_2;
case LocomotionControlsMode::CONTROLS_ANALOG_PLUS:
return _driveGear2;
case LocomotionControlsMode::CONTROLS_DEFAULT:
default:
return 1.0f;
}
}
void MyAvatar::setDriveGear3(float shiftPoint) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "setDriveGear3", Q_ARG(float, shiftPoint));
return;
}
if (shiftPoint > 1.0f || shiftPoint < 0.0f) return;
_driveGear3 = (shiftPoint < _driveGear4 && shiftPoint >= _driveGear2) ? shiftPoint : _driveGear3;
}
float MyAvatar::getDriveGear3() {
switch (_controlSchemeIndex) {
case LocomotionControlsMode::CONTROLS_ANALOG:
return ANALOG_AVATAR_GEAR_3;
case LocomotionControlsMode::CONTROLS_ANALOG_PLUS:
return _driveGear3;
case LocomotionControlsMode::CONTROLS_DEFAULT:
default:
return 1.0f;
}
}
void MyAvatar::setDriveGear4(float shiftPoint) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "setDriveGear4", Q_ARG(float, shiftPoint));
return;
}
if (shiftPoint > 1.0f || shiftPoint < 0.0f) return;
_driveGear4 = (shiftPoint < _driveGear5 && shiftPoint >= _driveGear3) ? shiftPoint : _driveGear4;
}
float MyAvatar::getDriveGear4() {
switch (_controlSchemeIndex) {
case LocomotionControlsMode::CONTROLS_ANALOG:
return ANALOG_AVATAR_GEAR_4;
case LocomotionControlsMode::CONTROLS_ANALOG_PLUS:
return _driveGear4;
case LocomotionControlsMode::CONTROLS_DEFAULT:
default:
return 1.0f;
}
}
void MyAvatar::setDriveGear5(float shiftPoint) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "setDriveGear5", Q_ARG(float, shiftPoint));
return;
}
if (shiftPoint > 1.0f || shiftPoint < 0.0f) return;
_driveGear5 = (shiftPoint > _driveGear4) ? shiftPoint : _driveGear5;
}
float MyAvatar::getDriveGear5() {
switch (_controlSchemeIndex) {
case LocomotionControlsMode::CONTROLS_ANALOG:
return ANALOG_AVATAR_GEAR_5;
case LocomotionControlsMode::CONTROLS_ANALOG_PLUS:
return _driveGear5;
case LocomotionControlsMode::CONTROLS_DEFAULT:
default:
return 1.0f;
}
}
bool MyAvatar::getFlyingHMDPref() {
return _flyingPrefHMD;
}
@ -4488,11 +4767,37 @@ bool MyAvatar::getIsSitStandStateLocked() const {
}
float MyAvatar::getWalkSpeed() const {
return _walkSpeed.get() * _walkSpeedScalar;
if (qApp->isHMDMode()) {
switch (_controlSchemeIndex) {
case LocomotionControlsMode::CONTROLS_ANALOG:
return _analogWalkSpeed.get();
case LocomotionControlsMode::CONTROLS_ANALOG_PLUS:
return _analogPlusWalkSpeed.get();
case LocomotionControlsMode::CONTROLS_DEFAULT:
default:
return _defaultWalkSpeed.get();
}
} else {
return _defaultWalkSpeed.get();
}
}
float MyAvatar::getWalkBackwardSpeed() const {
return _walkSpeed.get() * _walkSpeedScalar;
if (qApp->isHMDMode()) {
switch (_controlSchemeIndex) {
case LocomotionControlsMode::CONTROLS_ANALOG:
return _analogWalkBackwardSpeed.get();
case LocomotionControlsMode::CONTROLS_ANALOG_PLUS:
return _analogPlusWalkBackwardSpeed.get();
case LocomotionControlsMode::CONTROLS_DEFAULT:
default:
return _defaultWalkBackwardSpeed.get();
}
} else {
return _defaultWalkBackwardSpeed.get();
}
}
bool MyAvatar::isReadyForPhysics() const {
@ -4500,7 +4805,7 @@ bool MyAvatar::isReadyForPhysics() const {
}
void MyAvatar::setSprintMode(bool sprint) {
_walkSpeedScalar = sprint ? _sprintSpeed.get() : AVATAR_WALK_SPEED_SCALAR;
_walkSpeedScalar = sprint ? AVATAR_SPRINT_SPEED_SCALAR : AVATAR_WALK_SPEED_SCALAR;
}
void MyAvatar::setIsInWalkingState(bool isWalking) {
@ -4563,19 +4868,103 @@ void MyAvatar::setIsSitStandStateLocked(bool isLocked) {
}
void MyAvatar::setWalkSpeed(float value) {
_walkSpeed.set(value);
switch (_controlSchemeIndex) {
case LocomotionControlsMode::CONTROLS_DEFAULT:
_defaultWalkSpeed.set(value);
break;
case LocomotionControlsMode::CONTROLS_ANALOG:
_analogWalkSpeed.set(value);
break;
case LocomotionControlsMode::CONTROLS_ANALOG_PLUS:
_analogPlusWalkSpeed.set(value);
break;
default:
break;
}
}
void MyAvatar::setWalkBackwardSpeed(float value) {
_walkBackwardSpeed.set(value);
switch (_controlSchemeIndex) {
case LocomotionControlsMode::CONTROLS_DEFAULT:
_defaultWalkBackwardSpeed.set(value);
break;
case LocomotionControlsMode::CONTROLS_ANALOG:
_analogWalkBackwardSpeed.set(value);
break;
case LocomotionControlsMode::CONTROLS_ANALOG_PLUS:
_analogPlusWalkBackwardSpeed.set(value);
break;
default:
break;
}
}
void MyAvatar::setSprintSpeed(float value) {
_sprintSpeed.set(value);
switch (_controlSchemeIndex) {
case LocomotionControlsMode::CONTROLS_DEFAULT:
_defaultSprintSpeed.set(value);
break;
case LocomotionControlsMode::CONTROLS_ANALOG:
_analogSprintSpeed.set(value);
break;
case LocomotionControlsMode::CONTROLS_ANALOG_PLUS:
_analogPlusSprintSpeed.set(value);
break;
default:
break;
}
}
float MyAvatar::getSprintSpeed() const {
return _sprintSpeed.get();
if (qApp->isHMDMode()) {
switch (_controlSchemeIndex) {
case LocomotionControlsMode::CONTROLS_ANALOG:
return _analogSprintSpeed.get();
case LocomotionControlsMode::CONTROLS_ANALOG_PLUS:
return _analogPlusSprintSpeed.get();
case LocomotionControlsMode::CONTROLS_DEFAULT:
default:
return _defaultSprintSpeed.get();
}
} else {
return _defaultSprintSpeed.get();
}
}
void MyAvatar::setAnalogWalkSpeed(float value) {
_analogWalkSpeed.set(value);
// Sprint speed for Analog should be double walk speed.
_analogSprintSpeed.set(value * 2.0f);
}
float MyAvatar::getAnalogWalkSpeed() const {
return _analogWalkSpeed.get();
}
void MyAvatar::setAnalogSprintSpeed(float value) {
_analogSprintSpeed.set(value);
}
float MyAvatar::getAnalogSprintSpeed() const {
return _analogSprintSpeed.get();
}
void MyAvatar::setAnalogPlusWalkSpeed(float value) {
_analogPlusWalkSpeed.set(value);
// Sprint speed for Analog Plus should be double walk speed.
_analogPlusSprintSpeed.set(value * 2.0f);
}
float MyAvatar::getAnalogPlusWalkSpeed() const {
return _analogPlusWalkSpeed.get();
}
void MyAvatar::setAnalogPlusSprintSpeed(float value) {
_analogPlusSprintSpeed.set(value);
}
float MyAvatar::getAnalogPlusSprintSpeed() const {
return _analogPlusSprintSpeed.get();
}
void MyAvatar::setSitStandStateChange(bool stateChanged) {

View file

@ -39,6 +39,18 @@ class ModelItemID;
class MyHead;
class DetailedMotionState;
enum LocomotionControlsMode {
CONTROLS_DEFAULT = 0,
CONTROLS_ANALOG,
CONTROLS_ANALOG_PLUS
};
enum LocomotionRelativeMovementMode {
MOVEMENT_HMD_RELATIVE = 0,
MOVEMENT_HAND_RELATIVE,
MOVEMENT_HAND_RELATIVE_LEVELED
};
enum eyeContactTarget {
LEFT_EYE,
RIGHT_EYE,
@ -371,6 +383,13 @@ class MyAvatar : public Avatar {
using Clock = std::chrono::system_clock;
using TimePoint = Clock::time_point;
const float DEFAULT_GEAR_1 = 0.2f;
const float DEFAULT_GEAR_2 = 0.4f;
const float DEFAULT_GEAR_3 = 0.8f;
const float DEFAULT_GEAR_4 = 0.9f;
const float DEFAULT_GEAR_5 = 1.0f;
const bool DEFAULT_STRAFE_ENABLED = true;
public:
/**jsdoc
@ -729,7 +748,17 @@ public:
*/
Q_INVOKABLE void setSnapTurn(bool on) { _useSnapTurn = on; }
/**
* @function MyAvatar.getControlScheme
* @returns {number}
*/
Q_INVOKABLE int getControlScheme() const { return _controlSchemeIndex; }
/**
* @function MyAvatar.setControlScheme
* @param {number} index
*/
Q_INVOKABLE void setControlScheme(int index) { _controlSchemeIndex = (index >= 0 && index <= 2) ? index : 0; }
/**jsdoc
* Sets the avatar's dominant hand.
* @function MyAvatar.setDominantHand
@ -744,7 +773,16 @@ public:
* @returns {string} <code>"left"</code> for the left hand, <code>"right"</code> for the right hand.
*/
Q_INVOKABLE QString getDominantHand() const;
/**jsdoc
* @function MyAVatar.setStrafeEnabled
* @param {bool} enabled
*/
Q_INVOKABLE void setStrafeEnabled(bool enabled);
/**jsdoc
* @function MyAvatar.getStrafeEnabled
* @returns {bool}
*/
Q_INVOKABLE bool getStrafeEnabled() const;
/**jsdoc
* @function MyAvatar.setHmdAvatarAlignmentType
* @param {string} type - <code>"head"</code> to align your head and your avatar's head, <code>"eyes"</code> to align your
@ -1235,6 +1273,7 @@ public:
controller::Pose getControllerPoseInSensorFrame(controller::Action action) const;
controller::Pose getControllerPoseInWorldFrame(controller::Action action) const;
controller::Pose getControllerPoseInAvatarFrame(controller::Action action) const;
glm::quat getOffHandRotation() const;
bool hasDriveInput() const;
@ -1317,6 +1356,106 @@ public:
*/
Q_INVOKABLE bool getFlyingHMDPref();
/**jsdoc
* Set your preference for hand-relative movement.
* @function MyAvatar.setHandRelativeMovement
* @param {number} enabled - Set <code>true</code> if you want to enable hand-relative movement, otherwise set to <code>false</code>.
*
*/
Q_INVOKABLE void setMovementReference(int enabled);
/**jsdoc
* Get your preference for hand-relative movement.
* @function MyAvatar.getHandRelativeMovement
* @returns {number} <code>true</code> if your preference is for user locomotion to be relative to the direction your
* controller is pointing, otherwise <code>false</code>.
*/
Q_INVOKABLE int getMovementReference();
/**jsdoc
* Set the first 'shifting point' for acceleration step function.
* @function MyAvatar.setDriveGear1
* @param {number} shiftPoint - Set the first shift point for analog movement acceleration step function, between [0.0, 1.0]. Must be less than or equal to Gear 2.
*/
Q_INVOKABLE void setDriveGear1(float shiftPoint);
/**jsdoc
* Get the first 'shifting point' for acceleration step function.
* @function MyAvatar.getDriveGear1
* @returns {number} Value between [0.0, 1.0].
*/
Q_INVOKABLE float getDriveGear1();
/**jsdoc
* Set the second 'shifting point' for acceleration step function.
* @function MyAvatar.setDriveGear2
* @param {number} shiftPoint - Defines the second shift point for analog movement acceleration step function, between [0, 1]. Must be greater than or equal to Gear 1 and less than or equal to Gear 2.
*/
Q_INVOKABLE void setDriveGear2(float shiftPoint);
/**jsdoc
* Get the second 'shifting point' for acceleration step function.
* @function MyAvatar.getDriveGear2
* @returns {number} Value between [0.0, 1.0].
*/
Q_INVOKABLE float getDriveGear2();
/**jsdoc
* Set the third 'shifting point' for acceleration step function.
* @function MyAvatar.setDriveGear3
* @param {number} shiftPoint - Defines the third shift point for analog movement acceleration step function, between [0, 1]. Must be greater than or equal to Gear 2 and less than or equal to Gear 4.
*/
Q_INVOKABLE void setDriveGear3(float shiftPoint);
/**jsdoc
* Get the third 'shifting point' for acceleration step function.
* @function MyAvatar.getDriveGear3
* @returns {number} Value between [0.0, 1.0].
*/
Q_INVOKABLE float getDriveGear3();
/**jsdoc
* Set the fourth 'shifting point' for acceleration step function.
* @function MyAvatar.setDriveGear4
* @param {number} shiftPoint - Defines the fourth shift point for analog movement acceleration step function, between [0, 1]. Must be greater than Gear 3 and less than Gear 5.
*/
Q_INVOKABLE void setDriveGear4(float shiftPoint);
/**jsdoc
* Get the fourth 'shifting point' for acceleration step function.
* @function MyAvatar.getDriveGear4
* @returns {number} Value between [0.0, 1.0].
*/
Q_INVOKABLE float getDriveGear4();
/**jsdoc
* Set the fifth 'shifting point' for acceleration step function.
* @function MyAvatar.setDriveGear5
* @param {number} shiftPoint - Defines the fifth shift point for analog movement acceleration step function, between [0, 1]. Must be greater than or equal to Gear 4.
*/
Q_INVOKABLE void setDriveGear5(float shiftPoint);
/**jsdoc
* Get the fifth 'shifting point' for acceleration step function.
* @function MyAvatar.getDriveGear5
* @returns {number} Value between [0.0, 1.0].
*/
Q_INVOKABLE float getDriveGear5();
/**jsdoc
* Choose the control scheme.
* @function MyAvatar.setControlSchemeIndex
* @param {number} Choose the control scheme to be used.
*/
void setControlSchemeIndex(int index);
/**jsdoc
* Check what control scheme is in use.
* @function MyAvatar.getControlSchemeIndex
* @returns {number} Returns the index associated with a given control scheme.
*/
int getControlSchemeIndex();
/**jsdoc
* Gets the target scale of the avatar. The target scale is the desired scale of the avatar without any restrictions on
* permissible scale values imposed by the domain.
@ -1490,6 +1629,14 @@ public:
float getWalkBackwardSpeed() const;
void setSprintSpeed(float value);
float getSprintSpeed() const;
void setAnalogWalkSpeed(float value);
float getAnalogWalkSpeed() const;
void setAnalogSprintSpeed(float value);
float getAnalogSprintSpeed() const;
void setAnalogPlusWalkSpeed(float value);
float getAnalogPlusWalkSpeed() const;
void setAnalogPlusSprintSpeed(float value);
float getAnalogPlusSprintSpeed() const;
void setSitStandStateChange(bool stateChanged);
float getSitStandStateChange() const;
void updateSitStandState(float newHeightReading, float dt);
@ -2230,6 +2377,13 @@ private:
float _boomLength { ZOOM_DEFAULT };
float _yawSpeed; // degrees/sec
float _pitchSpeed; // degrees/sec
float _driveGear1 { DEFAULT_GEAR_1 };
float _driveGear2 { DEFAULT_GEAR_2 };
float _driveGear3 { DEFAULT_GEAR_3 };
float _driveGear4 { DEFAULT_GEAR_4 };
float _driveGear5 { DEFAULT_GEAR_5 };
int _controlSchemeIndex { CONTROLS_DEFAULT };
int _movementReference{ 0 };
glm::vec3 _thrust { 0.0f }; // impulse accumulator for outside sources
@ -2270,6 +2424,9 @@ private:
// private methods
void updateOrientation(float deltaTime);
glm::vec3 calculateScaledDirection();
float calculateGearedSpeed(const float driveKey);
glm::vec3 scaleMotorSpeed(const glm::vec3 forward, const glm::vec3 right);
void updateActionMotor(float deltaTime);
void updatePosition(float deltaTime);
void updateViewBoom();
@ -2287,6 +2444,7 @@ private:
bool _useSnapTurn { true };
ThreadSafeValueCache<QString> _dominantHand { DOMINANT_RIGHT_HAND };
ThreadSafeValueCache<QString> _hmdAvatarAlignmentType { DEFAULT_HMD_AVATAR_ALIGNMENT_TYPE };
ThreadSafeValueCache<bool> _strafeEnabled{ DEFAULT_STRAFE_ENABLED };
const float ROLL_CONTROL_DEAD_ZONE_DEFAULT = 8.0f; // degrees
const float ROLL_CONTROL_RATE_DEFAULT = 114.0f; // degrees / sec
@ -2438,9 +2596,16 @@ private:
ThreadSafeValueCache<bool> _lockSitStandState { false };
// max unscaled forward movement speed
ThreadSafeValueCache<float> _walkSpeed { DEFAULT_AVATAR_MAX_WALKING_SPEED };
ThreadSafeValueCache<float> _walkBackwardSpeed { DEFAULT_AVATAR_MAX_WALKING_BACKWARD_SPEED };
ThreadSafeValueCache<float> _sprintSpeed { AVATAR_SPRINT_SPEED_SCALAR };
ThreadSafeValueCache<float> _defaultWalkSpeed { DEFAULT_AVATAR_MAX_WALKING_SPEED };
ThreadSafeValueCache<float> _defaultWalkBackwardSpeed { DEFAULT_AVATAR_MAX_WALKING_BACKWARD_SPEED };
ThreadSafeValueCache<float> _defaultSprintSpeed { DEFAULT_AVATAR_MAX_SPRINT_SPEED };
ThreadSafeValueCache<float> _analogWalkSpeed { ANALOG_AVATAR_MAX_WALKING_SPEED };
ThreadSafeValueCache<float> _analogWalkBackwardSpeed { ANALOG_AVATAR_MAX_WALKING_BACKWARD_SPEED };
ThreadSafeValueCache<float> _analogSprintSpeed { ANALOG_AVATAR_MAX_SPRINT_SPEED };
ThreadSafeValueCache<float> _analogPlusWalkSpeed { ANALOG_PLUS_AVATAR_MAX_WALKING_SPEED };
ThreadSafeValueCache<float> _analogPlusWalkBackwardSpeed { ANALOG_PLUS_AVATAR_MAX_WALKING_BACKWARD_SPEED };
ThreadSafeValueCache<float> _analogPlusSprintSpeed { ANALOG_PLUS_AVATAR_MAX_SPRINT_SPEED };
float _walkSpeedScalar { AVATAR_WALK_SPEED_SCALAR };
bool _isInWalkingState { false };
ThreadSafeValueCache<bool> _isInSittingState { false };
@ -2460,6 +2625,7 @@ private:
TimePoint _nextTraitsSendWindow;
Setting::Handle<QString> _dominantHandSetting;
Setting::Handle<bool> _strafeEnabledSetting;
Setting::Handle<QString> _hmdAvatarAlignmentTypeSetting;
Setting::Handle<float> _headPitchSetting;
Setting::Handle<float> _scaleSetting;
@ -2473,8 +2639,17 @@ private:
Setting::Handle<bool> _useSnapTurnSetting;
Setting::Handle<float> _userHeightSetting;
Setting::Handle<bool> _flyingHMDSetting;
Setting::Handle<int> _movementReferenceSetting;
Setting::Handle<int> _avatarEntityCountSetting;
Setting::Handle<bool> _allowTeleportingSetting { "allowTeleporting", true };
Setting::Handle<float> _driveGear1Setting;
Setting::Handle<float> _driveGear2Setting;
Setting::Handle<float> _driveGear3Setting;
Setting::Handle<float> _driveGear4Setting;
Setting::Handle<float> _driveGear5Setting;
Setting::Handle<float> _analogWalkSpeedSetting;
Setting::Handle<float> _analogPlusWalkSpeedSetting;
Setting::Handle<int> _controlSchemeIndexSetting;
std::vector<Setting::Handle<QUuid>> _avatarEntityIDSettings;
std::vector<Setting::Handle<QByteArray>> _avatarEntityDataSettings;
Setting::Handle<QString> _userRecenterModelSetting;

View file

@ -266,6 +266,11 @@ void setupPreferences() {
auto preference = new CheckPreference(VR_MOVEMENT, "Walking", getter, setter);
preferences->addPreference(preference);
}
{
auto getter = [myAvatar]()->bool { return myAvatar->getStrafeEnabled(); };
auto setter = [myAvatar](bool value) { myAvatar->setStrafeEnabled(value); };
preferences->addPreference(new CheckPreference(VR_MOVEMENT, "Strafing", getter, setter));
}
{
auto getter = [myAvatar]()->bool { return myAvatar->getFlyingHMDPref(); };
auto setter = [myAvatar](bool value) { myAvatar->setFlyingHMDPref(value); };
@ -273,6 +278,22 @@ void setupPreferences() {
preference->setIndented(true);
preferences->addPreference(preference);
}
{
auto getter = [myAvatar]()->int { return myAvatar->getMovementReference(); };
auto setter = [myAvatar](int value) { myAvatar->setMovementReference(value); };
//auto preference = new CheckPreference(VR_MOVEMENT, "Hand-Relative Movement", getter, setter);
auto preference = new RadioButtonsPreference(VR_MOVEMENT, "Movement Direction", getter, setter);
QStringList items;
items << "HMD-Relative" << "Hand-Relative" << "Hand-Relative (Leveled)";
preference->setHeading("Movement Direction");
preference->setItems(items);
preferences->addPreference(preference);
}
{
auto getter = [myAvatar]()->QString { return myAvatar->getDominantHand(); };
auto setter = [myAvatar](const QString& value) { myAvatar->setDominantHand(value); };
preferences->addPreference(new PrimaryHandPreference(VR_MOVEMENT, "Dominant Hand", getter, setter));
}
{
auto getter = [myAvatar]()->int { return myAvatar->getSnapTurn() ? 0 : 1; };
auto setter = [myAvatar](int value) { myAvatar->setSnapTurn(value == 0); };
@ -283,6 +304,26 @@ void setupPreferences() {
preference->setItems(items);
preferences->addPreference(preference);
}
{
auto getter = [myAvatar]()->int { return myAvatar->getControlScheme(); };
auto setter = [myAvatar](int index) { myAvatar->setControlScheme(index); };
auto preference = new RadioButtonsPreference(VR_MOVEMENT, "Control Scheme", getter, setter);
QStringList items;
items << "Default" << "Analog" << "Analog++";
preference->setHeading("Control Scheme Selection");
preference->setItems(items);
preferences->addPreference(preference);
}
{
auto getter = [myAvatar]()->float { return myAvatar->getAnalogPlusWalkSpeed(); };
auto setter = [myAvatar](float value) { myAvatar->setAnalogPlusWalkSpeed(value); };
auto preference = new SpinnerSliderPreference(VR_MOVEMENT, "Analog++ Walk Speed", getter, setter);
preference->setMin(6.0f);
preference->setMax(30.0f);
preference->setStep(1);
preference->setDecimals(2);
preferences->addPreference(preference);
}
{
auto getter = [myAvatar]()->bool { return myAvatar->getShowPlayArea(); };
auto setter = [myAvatar](bool value) { myAvatar->setShowPlayArea(value); };

View file

@ -45,18 +45,6 @@ private:
Q_DECLARE_METATYPE(AnimationPointer)
/**jsdoc
* @class AnimationObject
*
* @hifi-interface
* @hifi-client-entity
* @hifi-avatar
* @hifi-server-entity
* @hifi-assignment-client
*
* @property {string[]} jointNames
* @property {FBXAnimationFrame[]} frames
*/
/// An animation loaded from the network.
class Animation : public Resource {
Q_OBJECT
@ -72,16 +60,8 @@ public:
virtual bool isLoaded() const override;
/**jsdoc
* @function AnimationObject.getJointNames
* @returns {string[]}
*/
Q_INVOKABLE QStringList getJointNames() const;
/**jsdoc
* @function AnimationObject.getFrames
* @returns {FBXAnimationFrame[]}
*/
Q_INVOKABLE QVector<HFMAnimationFrame> getFrames() const;
const QVector<HFMAnimationFrame>& getFramesReference() const;

View file

@ -25,7 +25,8 @@ class AnimationCacheScriptingInterface : public ScriptableResourceCache, public
// Properties are copied over from ResourceCache (see ResourceCache.h for reason).
/**jsdoc
* API to manage animation cache resources.
* The <code>AnimationCache</code> API manages animation cache resources.
*
* @namespace AnimationCache
*
* @hifi-interface
@ -48,10 +49,10 @@ public:
AnimationCacheScriptingInterface();
/**jsdoc
* Returns animation resource for particular animation.
* Gets information about an animation resource.
* @function AnimationCache.getAnimation
* @param {string} url - URL to load.
* @returns {AnimationObject} animation
* @param {string} url - The URL of the animation.
* @returns {AnimationObject} An animation object.
*/
Q_INVOKABLE AnimationPointer getAnimation(const QString& url);
};

View file

@ -19,6 +19,20 @@
class QScriptEngine;
/**jsdoc
* Information about an animation resource, created by {@link AnimationCache.getAnimation}.
*
* @class AnimationObject
*
* @hifi-interface
* @hifi-client-entity
* @hifi-avatar
* @hifi-server-entity
* @hifi-assignment-client
*
* @property {string[]} jointNames - The names of the joints that are animated. <em>Read-only.</em>
* @property {AnimationFrameObject[]} frames - The frames in the animation. <em>Read-only.</em>
*/
/// Scriptable wrapper for animation pointers.
class AnimationObject : public QObject, protected QScriptable {
Q_OBJECT
@ -27,11 +41,34 @@ class AnimationObject : public QObject, protected QScriptable {
public:
/**jsdoc
* Gets the names of the joints that are animated.
* @function AnimationObject.getJointNames
* @returns {string[]} The names of the joints that are animated.
*/
Q_INVOKABLE QStringList getJointNames() const;
/**jsdoc
* Gets the frames in the animation.
* @function AnimationObject.getFrames
* @returns {AnimationFrameObject[]} The frames in the animation.
*/
Q_INVOKABLE QVector<HFMAnimationFrame> getFrames() const;
};
/**jsdoc
* Joint rotations in one frame of an animation.
*
* @class AnimationFrameObject
*
* @hifi-interface
* @hifi-client-entity
* @hifi-avatar
* @hifi-server-entity
* @hifi-assignment-client
*
* @property {Quat[]} rotations - Joint rotations. <em>Read-only.</em>
*/
/// Scriptable wrapper for animation frames.
class AnimationFrameObject : public QObject, protected QScriptable {
Q_OBJECT
@ -39,6 +76,11 @@ class AnimationFrameObject : public QObject, protected QScriptable {
public:
/**jsdoc
* Gets the joint rotations in the animation frame.
* @function AnimationFrameObject.getRotations
* @returns {Quat[]} The joint rotations in the animation frame.
*/
Q_INVOKABLE QVector<glm::quat> getRotations() const;
};

View file

@ -499,12 +499,12 @@ void Flow::calculateConstraints(const std::shared_ptr<AnimSkeleton>& skeleton,
bool toFloatSuccess;
QStringRef(&name, (int)(name.size() - j), 1).toString().toFloat(&toFloatSuccess);
if (!toFloatSuccess && (name.size() - j) > (int)simPrefix.size()) {
group = QStringRef(&name, (int)simPrefix.size(), (int)(name.size() - j + 1)).toString();
group = QStringRef(&name, (int)simPrefix.size(), (int)(name.size() - j + 1) - (int)simPrefix.size()).toString();
break;
}
}
if (group.isEmpty()) {
group = QStringRef(&name, (int)simPrefix.size(), name.size() - 1).toString();
group = QStringRef(&name, (int)simPrefix.size(), name.size() - (int)simPrefix.size()).toString();
}
qCDebug(animation) << "Sim joint added to flow: " << name;
} else {

View file

@ -103,6 +103,8 @@ void AudioInjector::finishLocalInjection() {
void AudioInjector::finish() {
withWriteLock([&] {
_state |= AudioInjectorState::LocalInjectionFinished;
_state |= AudioInjectorState::NetworkInjectionFinished;
_state |= AudioInjectorState::Finished;
});
emit finished();
@ -252,7 +254,7 @@ int64_t AudioInjector::injectNextFrame() {
writeStringToStream(noCodecForInjectors, audioPacketStream);
// pack stream identifier (a generated UUID)
audioPacketStream << QUuid::createUuid();
audioPacketStream << _streamID;
// pack the stereo/mono type of the stream
audioPacketStream << options.stereo;
@ -402,4 +404,17 @@ int64_t AudioInjector::injectNextFrame() {
int64_t playNextFrameAt = ++_nextFrame * AudioConstants::NETWORK_FRAME_USECS;
return std::max(INT64_C(0), playNextFrameAt - currentTime);
}
}
void AudioInjector::sendStopInjectorPacket() {
auto nodeList = DependencyManager::get<NodeList>();
if (auto audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer)) {
// Build packet
auto stopInjectorPacket = NLPacket::create(PacketType::StopInjector);
stopInjectorPacket->write(_streamID.toRfc4122());
// Send packet
nodeList->sendUnreliablePacket(*stopInjectorPacket, *audioMixer);
}
}

View file

@ -100,6 +100,7 @@ private:
int64_t injectNextFrame();
bool inject(bool(AudioInjectorManager::*injection)(const AudioInjectorPointer&));
bool injectLocally();
void sendStopInjectorPacket();
static AbstractAudioInterface* _localAudioInterface;
@ -120,6 +121,9 @@ private:
// when the injector is local, we need this
AudioHRTF _localHRTF;
AudioFOA _localFOA;
QUuid _streamID { QUuid::createUuid() };
friend class AudioInjectorManager;
};

View file

@ -105,6 +105,8 @@ void AudioInjectorManager::run() {
if (nextCallDelta >= 0 && !injector->isFinished()) {
// enqueue the injector with the correct timing in our holding queue
heldInjectors.emplace(heldInjectors.end(), usecTimestampNow() + nextCallDelta, injector);
} else {
injector->sendStopInjectorPacket();
}
}
@ -354,4 +356,4 @@ void AudioInjectorManager::stop(const AudioInjectorPointer& injector) {
size_t AudioInjectorManager::getNumInjectors() {
Lock lock(_injectorsMutex);
return _injectors.size();
}
}

View file

@ -124,9 +124,9 @@ typedef QSharedPointer<Sound> SharedSoundPointer;
* An audio resource, created by {@link SoundCache.getSound}, to be played back using {@link Audio.playSound}.
* <p>Supported formats:</p>
* <ul>
* <li>WAV: 16-bit uncompressed WAV at any sample rate, with 1 (mono), 2(stereo), or 4 (ambisonic) channels.</li>
* <li>WAV: 16-bit uncompressed WAV at any sample rate, with 1 (mono), 2 (stereo), or 4 (ambisonic) channels.</li>
* <li>MP3: Mono or stereo, at any sample rate.</li>
* <li>RAW: 48khz 16-bit mono or stereo. Filename must include <code>".stereo"</code> to be interpreted as stereo.</li>
* <li>RAW: 48khz 16-bit mono or stereo. File name must include <code>".stereo"</code> to be interpreted as stereo.</li>
* </ul>
*
* @class SoundObject
@ -138,8 +138,8 @@ typedef QSharedPointer<Sound> SharedSoundPointer;
* @hifi-assignment-client
*
* @property {boolean} downloaded - <code>true</code> if the sound has been downloaded and is ready to be played, otherwise
* <code>false</code>.
* @property {number} duration - The duration of the sound, in seconds.
* <code>false</code>. <em>Read-only.</em>
* @property {number} duration - The duration of the sound, in seconds. <em>Read-only.</em>
*/
class SoundScriptingInterface : public QObject {
Q_OBJECT

View file

@ -25,7 +25,8 @@ class SoundCacheScriptingInterface : public ScriptableResourceCache, public Depe
// Properties are copied over from ResourceCache (see ResourceCache.h for reason).
/**jsdoc
* API to manage sound cache resources.
* The <code>SoundCache</code> API manages sound cache resources.
*
* @namespace SoundCache
*
* @hifi-interface

View file

@ -1525,8 +1525,8 @@ void Avatar::rigReset() {
void Avatar::computeMultiSphereShapes() {
const Rig& rig = getSkeletonModel()->getRig();
glm::vec3 scale = extractScale(rig.getGeometryOffsetPose());
const HFMModel& geometry = getSkeletonModel()->getHFMModel();
glm::vec3 geometryScale = extractScale(rig.getGeometryOffsetPose());
int jointCount = rig.getJointStateCount();
_multiSphereShapes.clear();
_multiSphereShapes.reserve(jointCount);
@ -1535,9 +1535,10 @@ void Avatar::computeMultiSphereShapes() {
std::vector<btVector3> btPoints;
int lineCount = (int)shapeInfo.debugLines.size();
btPoints.reserve(lineCount);
glm::vec3 jointScale = rig.getJointPose(i).scale() / extractScale(rig.getGeometryToRigTransform());
for (int j = 0; j < lineCount; j++) {
const glm::vec3 &point = shapeInfo.debugLines[j];
auto rigPoint = scale * point;
auto rigPoint = jointScale * geometryScale * point;
btVector3 btPoint = glmToBullet(rigPoint);
btPoints.push_back(btPoint);
}

View file

@ -229,8 +229,9 @@ AvatarSharedPointer AvatarHashMap::newOrExistingAvatar(const QUuid& sessionUUID,
AvatarSharedPointer AvatarHashMap::findAvatar(const QUuid& sessionUUID) const {
QReadLocker locker(&_hashLock);
if (_avatarHash.contains(sessionUUID)) {
return _avatarHash.value(sessionUUID);
auto avatarIter = _avatarHash.find(sessionUUID);
if (avatarIter != _avatarHash.end()) {
return avatarIter.value();
}
return nullptr;
}

View file

@ -33,9 +33,8 @@
#include "ModelBakingLoggingCategory.h"
#include "TextureBaker.h"
FBXBaker::FBXBaker(const QUrl& inputModelURL, TextureBakerThreadGetter inputTextureThreadGetter,
const QString& bakedOutputDirectory, const QString& originalOutputDirectory, bool hasBeenBaked) :
ModelBaker(inputModelURL, inputTextureThreadGetter, bakedOutputDirectory, originalOutputDirectory, hasBeenBaked) {
FBXBaker::FBXBaker(const QUrl& inputModelURL, const QString& bakedOutputDirectory, const QString& originalOutputDirectory, bool hasBeenBaked) :
ModelBaker(inputModelURL, bakedOutputDirectory, originalOutputDirectory, hasBeenBaked) {
if (hasBeenBaked) {
// Look for the original model file one directory higher. Perhaps this is an oven output directory.
QUrl originalRelativePath = QUrl("../original/" + inputModelURL.fileName().replace(BAKED_FBX_EXTENSION, FBX_EXTENSION));
@ -45,15 +44,6 @@ FBXBaker::FBXBaker(const QUrl& inputModelURL, TextureBakerThreadGetter inputText
}
void FBXBaker::bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) {
_hfmModel = hfmModel;
if (shouldStop()) {
return;
}
// enumerate the models and textures found in the scene and start a bake for them
rewriteAndBakeSceneTextures();
if (shouldStop()) {
return;
}
@ -114,15 +104,15 @@ void FBXBaker::rewriteAndBakeSceneModels(const QVector<hfm::Mesh>& meshes, const
int meshIndex = 0;
for (FBXNode& rootChild : _rootNode.children) {
if (rootChild.name == "Objects") {
for (FBXNode& object : rootChild.children) {
if (object.name == "Geometry") {
if (object.properties.at(2) == "Mesh") {
for (auto object = rootChild.children.begin(); object != rootChild.children.end(); object++) {
if (object->name == "Geometry") {
if (object->properties.at(2) == "Mesh") {
int meshNum = meshIndexToRuntimeOrder[meshIndex];
replaceMeshNodeWithDraco(object, dracoMeshes[meshNum], dracoMaterialLists[meshNum]);
replaceMeshNodeWithDraco(*object, dracoMeshes[meshNum], dracoMaterialLists[meshNum]);
meshIndex++;
}
} else if (object.name == "Model") {
for (FBXNode& modelChild : object.children) {
} else if (object->name == "Model") {
for (FBXNode& modelChild : object->children) {
if (modelChild.name == "Properties60" || modelChild.name == "Properties70") {
// This is a properties node
// Remove the geometric transform because that has been applied directly to the vertices in FBXSerializer
@ -142,10 +132,13 @@ void FBXBaker::rewriteAndBakeSceneModels(const QVector<hfm::Mesh>& meshes, const
} else if (modelChild.name == "Vertices") {
// This model is also a mesh
int meshNum = meshIndexToRuntimeOrder[meshIndex];
replaceMeshNodeWithDraco(object, dracoMeshes[meshNum], dracoMaterialLists[meshNum]);
replaceMeshNodeWithDraco(*object, dracoMeshes[meshNum], dracoMaterialLists[meshNum]);
meshIndex++;
}
}
} else if (object->name == "Texture" || object->name == "Video") {
// this is an embedded texture, we need to remove it from the FBX
object = rootChild.children.erase(object);
}
if (hasErrors()) {
@ -154,82 +147,4 @@ void FBXBaker::rewriteAndBakeSceneModels(const QVector<hfm::Mesh>& meshes, const
}
}
}
}
void FBXBaker::rewriteAndBakeSceneTextures() {
using namespace image::TextureUsage;
QHash<QString, image::TextureUsage::Type> textureTypes;
// enumerate the materials in the extracted geometry so we can determine the texture type for each texture ID
for (const auto& material : _hfmModel->materials) {
if (material.normalTexture.isBumpmap) {
textureTypes[material.normalTexture.id] = BUMP_TEXTURE;
} else {
textureTypes[material.normalTexture.id] = NORMAL_TEXTURE;
}
textureTypes[material.albedoTexture.id] = ALBEDO_TEXTURE;
textureTypes[material.glossTexture.id] = GLOSS_TEXTURE;
textureTypes[material.roughnessTexture.id] = ROUGHNESS_TEXTURE;
textureTypes[material.specularTexture.id] = SPECULAR_TEXTURE;
textureTypes[material.metallicTexture.id] = METALLIC_TEXTURE;
textureTypes[material.emissiveTexture.id] = EMISSIVE_TEXTURE;
textureTypes[material.occlusionTexture.id] = OCCLUSION_TEXTURE;
textureTypes[material.lightmapTexture.id] = LIGHTMAP_TEXTURE;
}
// enumerate the children of the root node
for (FBXNode& rootChild : _rootNode.children) {
if (rootChild.name == "Objects") {
// enumerate the objects
auto object = rootChild.children.begin();
while (object != rootChild.children.end()) {
if (object->name == "Texture") {
// double check that we didn't get an abort while baking the last texture
if (shouldStop()) {
return;
}
// enumerate the texture children
for (FBXNode& textureChild : object->children) {
if (textureChild.name == "RelativeFilename") {
QString hfmTextureFileName { textureChild.properties.at(0).toString() };
// grab the ID for this texture so we can figure out the
// texture type from the loaded materials
auto textureID { object->properties[0].toString() };
auto textureType = textureTypes[textureID];
// Compress the texture information and return the new filename to be added into the FBX scene
auto bakedTextureFile = compressTexture(hfmTextureFileName, textureType);
// If no errors or warnings have occurred during texture compression add the filename to the FBX scene
if (!bakedTextureFile.isNull()) {
textureChild.properties[0] = bakedTextureFile;
} else {
// if bake fails - return, if there were errors and continue, if there were warnings.
if (hasErrors()) {
return;
} else if (hasWarnings()) {
continue;
}
}
}
}
++object;
} else if (object->name == "Video") {
// this is an embedded texture, we need to remove it from the FBX
object = rootChild.children.erase(object);
} else {
++object;
}
}
}
}
}
}

View file

@ -31,20 +31,14 @@ using TextureBakerThreadGetter = std::function<QThread*()>;
class FBXBaker : public ModelBaker {
Q_OBJECT
public:
FBXBaker(const QUrl& inputModelURL, TextureBakerThreadGetter inputTextureThreadGetter,
const QString& bakedOutputDirectory, const QString& originalOutputDirectory = "", bool hasBeenBaked = false);
FBXBaker(const QUrl& inputModelURL, const QString& bakedOutputDirectory, const QString& originalOutputDirectory = "", bool hasBeenBaked = false);
protected:
virtual void bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) override;
private:
void rewriteAndBakeSceneModels(const QVector<hfm::Mesh>& meshes, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists);
void rewriteAndBakeSceneTextures();
void replaceMeshNodeWithDraco(FBXNode& meshNode, const QByteArray& dracoMeshBytes, const std::vector<hifi::ByteArray>& dracoMaterialList);
hfm::Model::Pointer _hfmModel;
bool _pendingErrorEmission { false };
};
#endif // hifi_FBXBaker_h

View file

@ -27,21 +27,11 @@ std::function<QThread*()> MaterialBaker::_getNextOvenWorkerThreadOperator;
static int materialNum = 0;
namespace std {
template <>
struct hash<graphics::Material::MapChannel> {
size_t operator()(const graphics::Material::MapChannel& a) const {
return std::hash<size_t>()((size_t)a);
}
};
};
MaterialBaker::MaterialBaker(const QString& materialData, bool isURL, const QString& bakedOutputDir, const QUrl& destinationPath) :
MaterialBaker::MaterialBaker(const QString& materialData, bool isURL, const QString& bakedOutputDir) :
_materialData(materialData),
_isURL(isURL),
_bakedOutputDir(bakedOutputDir),
_textureOutputDir(bakedOutputDir + "/materialTextures/" + QString::number(materialNum++)),
_destinationPath(destinationPath)
_textureOutputDir(bakedOutputDir + "/materialTextures/" + QString::number(materialNum++))
{
}
@ -64,6 +54,14 @@ void MaterialBaker::bake() {
}
}
void MaterialBaker::abort() {
Baker::abort();
for (auto& textureBaker : _textureBakers) {
textureBaker->abort();
}
}
void MaterialBaker::loadMaterial() {
if (!_isURL) {
qCDebug(material_baking) << "Loading local material" << _materialData;
@ -104,45 +102,42 @@ void MaterialBaker::processMaterial() {
for (auto networkMaterial : _materialResource->parsedMaterials.networkMaterials) {
if (networkMaterial.second) {
auto textureMaps = networkMaterial.second->getTextureMaps();
for (auto textureMap : textureMaps) {
if (textureMap.second && textureMap.second->getTextureSource()) {
graphics::Material::MapChannel mapChannel = textureMap.first;
auto texture = textureMap.second->getTextureSource();
auto textures = networkMaterial.second->getTextures();
for (auto texturePair : textures) {
auto mapChannel = texturePair.first;
auto textureMap = texturePair.second;
if (textureMap.texture && textureMap.texture->_textureSource) {
auto type = textureMap.texture->getTextureType();
QUrl url = texture->getUrl();
QString cleanURL = url.adjusted(QUrl::RemoveQuery | QUrl::RemoveFragment).toDisplayString();
QByteArray content;
QUrl textureURL;
{
bool foundEmbeddedTexture = false;
auto textureContentMapIter = _textureContentMap.find(networkMaterial.second->getName());
if (textureContentMapIter != _textureContentMap.end()) {
auto textureUsageIter = textureContentMapIter->second.find(type);
if (textureUsageIter != textureContentMapIter->second.end()) {
content = textureUsageIter->second.first;
textureURL = textureUsageIter->second.second;
foundEmbeddedTexture = true;
}
}
if (!foundEmbeddedTexture && textureMap.texture->_textureSource) {
textureURL = textureMap.texture->_textureSource->getUrl().adjusted(QUrl::RemoveQuery | QUrl::RemoveFragment);
}
}
QString cleanURL = textureURL.toDisplayString();
auto idx = cleanURL.lastIndexOf('.');
auto extension = idx >= 0 ? url.toDisplayString().mid(idx + 1).toLower() : "";
QString extension = idx >= 0 ? cleanURL.mid(idx + 1).toLower() : "";
if (QImageReader::supportedImageFormats().contains(extension.toLatin1())) {
QUrl textureURL = url.adjusted(QUrl::RemoveQuery | QUrl::RemoveFragment);
// FIXME: this isn't properly handling bumpMaps or glossMaps
static std::unordered_map<graphics::Material::MapChannel, image::TextureUsage::Type> MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP;
if (MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP.empty()) {
MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP[graphics::Material::MapChannel::EMISSIVE_MAP] = image::TextureUsage::EMISSIVE_TEXTURE;
MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP[graphics::Material::MapChannel::ALBEDO_MAP] = image::TextureUsage::ALBEDO_TEXTURE;
MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP[graphics::Material::MapChannel::METALLIC_MAP] = image::TextureUsage::METALLIC_TEXTURE;
MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP[graphics::Material::MapChannel::ROUGHNESS_MAP] = image::TextureUsage::ROUGHNESS_TEXTURE;
MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP[graphics::Material::MapChannel::NORMAL_MAP] = image::TextureUsage::NORMAL_TEXTURE;
MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP[graphics::Material::MapChannel::OCCLUSION_MAP] = image::TextureUsage::OCCLUSION_TEXTURE;
MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP[graphics::Material::MapChannel::LIGHTMAP_MAP] = image::TextureUsage::LIGHTMAP_TEXTURE;
MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP[graphics::Material::MapChannel::SCATTERING_MAP] = image::TextureUsage::SCATTERING_TEXTURE;
}
auto it = MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP.find(mapChannel);
if (it == MAP_CHANNEL_TO_TEXTURE_USAGE_TYPE_MAP.end()) {
handleError("Unknown map channel");
return;
}
QPair<QUrl, image::TextureUsage::Type> textureKey(textureURL, it->second);
QPair<QUrl, image::TextureUsage::Type> textureKey(textureURL, type);
if (!_textureBakers.contains(textureKey)) {
auto baseTextureFileName = _textureFileNamer.createBaseTextureFileName(textureURL.fileName(), it->second);
auto baseTextureFileName = _textureFileNamer.createBaseTextureFileName(textureURL.fileName(), type);
QSharedPointer<TextureBaker> textureBaker {
new TextureBaker(textureURL, it->second, _textureOutputDir, "", baseTextureFileName),
new TextureBaker(textureURL, type, _textureOutputDir, "", baseTextureFileName, content),
&TextureBaker::deleteLater
};
textureBaker->setMapChannel(mapChannel);
@ -179,7 +174,7 @@ void MaterialBaker::handleFinishedTextureBaker() {
// Replace the old texture URLs
for (auto networkMaterial : _materialsNeedingRewrite.values(textureKey)) {
networkMaterial->getTextureMap(baker->getMapChannel())->getTextureSource()->setUrl(_destinationPath.resolved(relativeURL));
networkMaterial->getTextureMap(baker->getMapChannel())->getTextureSource()->setUrl(relativeURL);
}
} else {
// this texture failed to bake - this doesn't fail the entire bake but we need to add the errors from
@ -245,3 +240,30 @@ void MaterialBaker::outputMaterial() {
// emit signal to indicate the material baking is finished
emit finished();
}
void MaterialBaker::addTexture(const QString& materialName, image::TextureUsage::Type textureUsage, const hfm::Texture& texture) {
auto& textureUsageMap = _textureContentMap[materialName.toStdString()];
if (textureUsageMap.find(textureUsage) == textureUsageMap.end() && !texture.content.isEmpty()) {
textureUsageMap[textureUsage] = { texture.content, texture.filename };
}
};
void MaterialBaker::setMaterials(const QHash<QString, hfm::Material>& materials, const QString& baseURL) {
_materialResource = NetworkMaterialResourcePointer(new NetworkMaterialResource(), [](NetworkMaterialResource* ptr) { ptr->deleteLater(); });
for (auto& material : materials) {
_materialResource->parsedMaterials.names.push_back(material.name.toStdString());
_materialResource->parsedMaterials.networkMaterials[material.name.toStdString()] = std::make_shared<NetworkMaterial>(material, baseURL);
// Store any embedded texture content
addTexture(material.name, image::TextureUsage::NORMAL_TEXTURE, material.normalTexture);
addTexture(material.name, image::TextureUsage::ALBEDO_TEXTURE, material.albedoTexture);
addTexture(material.name, image::TextureUsage::GLOSS_TEXTURE, material.glossTexture);
addTexture(material.name, image::TextureUsage::ROUGHNESS_TEXTURE, material.roughnessTexture);
addTexture(material.name, image::TextureUsage::SPECULAR_TEXTURE, material.specularTexture);
addTexture(material.name, image::TextureUsage::METALLIC_TEXTURE, material.metallicTexture);
addTexture(material.name, image::TextureUsage::EMISSIVE_TEXTURE, material.emissiveTexture);
addTexture(material.name, image::TextureUsage::OCCLUSION_TEXTURE, material.occlusionTexture);
addTexture(material.name, image::TextureUsage::SCATTERING_TEXTURE, material.scatteringTexture);
addTexture(material.name, image::TextureUsage::LIGHTMAP_TEXTURE, material.lightmapTexture);
}
}

View file

@ -24,16 +24,19 @@ static const QString BAKED_MATERIAL_EXTENSION = ".baked.json";
class MaterialBaker : public Baker {
Q_OBJECT
public:
MaterialBaker(const QString& materialData, bool isURL, const QString& bakedOutputDir, const QUrl& destinationPath);
MaterialBaker(const QString& materialData, bool isURL, const QString& bakedOutputDir);
QString getMaterialData() const { return _materialData; }
bool isURL() const { return _isURL; }
QString getBakedMaterialData() const { return _bakedMaterialData; }
void setMaterials(const QHash<QString, hfm::Material>& materials, const QString& baseURL);
static void setNextOvenWorkerThreadOperator(std::function<QThread*()> getNextOvenWorkerThreadOperator) { _getNextOvenWorkerThreadOperator = getNextOvenWorkerThreadOperator; }
public slots:
virtual void bake() override;
virtual void abort() override;
signals:
void originalMaterialLoaded();
@ -57,11 +60,18 @@ private:
QString _bakedOutputDir;
QString _textureOutputDir;
QString _bakedMaterialData;
QUrl _destinationPath;
QScriptEngine _scriptEngine;
static std::function<QThread*()> _getNextOvenWorkerThreadOperator;
TextureFileNamer _textureFileNamer;
void addTexture(const QString& materialName, image::TextureUsage::Type textureUsage, const hfm::Texture& texture);
struct TextureUsageHash {
std::size_t operator()(image::TextureUsage::Type textureUsage) const {
return static_cast<std::size_t>(textureUsage);
}
};
std::unordered_map<std::string, std::unordered_map<image::TextureUsage::Type, std::pair<QByteArray, QString>, TextureUsageHash>> _textureContentMap;
};
#endif // !hifi_MaterialBaker_h

View file

@ -42,12 +42,12 @@
#include "baking/BakerLibrary.h"
ModelBaker::ModelBaker(const QUrl& inputModelURL, TextureBakerThreadGetter inputTextureThreadGetter,
const QString& bakedOutputDirectory, const QString& originalOutputDirectory, bool hasBeenBaked) :
#include <QJsonArray>
ModelBaker::ModelBaker(const QUrl& inputModelURL, const QString& bakedOutputDirectory, const QString& originalOutputDirectory, bool hasBeenBaked) :
_modelURL(inputModelURL),
_bakedOutputDir(bakedOutputDirectory),
_originalOutputDir(originalOutputDirectory),
_textureThreadGetter(inputTextureThreadGetter),
_hasBeenBaked(hasBeenBaked)
{
auto bakedFilename = _modelURL.fileName();
@ -209,7 +209,6 @@ void ModelBaker::bakeSourceCopy() {
}
hifi::ByteArray modelData = modelFile.readAll();
hfm::Model::Pointer bakedModel;
std::vector<hifi::ByteArray> dracoMeshes;
std::vector<std::vector<hifi::ByteArray>> dracoMaterialLists; // Material order for per-mesh material lookup used by dracoMeshes
@ -245,40 +244,76 @@ void ModelBaker::bakeSourceCopy() {
// Begin hfm baking
baker.run();
bakedModel = baker.getHFMModel();
_hfmModel = baker.getHFMModel();
dracoMeshes = baker.getDracoMeshes();
dracoMaterialLists = baker.getDracoMaterialLists();
}
// Populate _textureContentMap with path to content mappings, for quick lookup by URL
for (auto materialIt = bakedModel->materials.cbegin(); materialIt != bakedModel->materials.cend(); materialIt++) {
static const auto addTexture = [](QHash<hifi::ByteArray, hifi::ByteArray>& textureContentMap, const hfm::Texture& texture) {
if (!textureContentMap.contains(texture.filename)) {
// Content may be empty, unless the data is inlined
textureContentMap[texture.filename] = texture.content;
}
};
const hfm::Material& material = *materialIt;
addTexture(_textureContentMap, material.normalTexture);
addTexture(_textureContentMap, material.albedoTexture);
addTexture(_textureContentMap, material.opacityTexture);
addTexture(_textureContentMap, material.glossTexture);
addTexture(_textureContentMap, material.roughnessTexture);
addTexture(_textureContentMap, material.specularTexture);
addTexture(_textureContentMap, material.metallicTexture);
addTexture(_textureContentMap, material.emissiveTexture);
addTexture(_textureContentMap, material.occlusionTexture);
addTexture(_textureContentMap, material.scatteringTexture);
addTexture(_textureContentMap, material.lightmapTexture);
}
// Do format-specific baking
bakeProcessedSource(bakedModel, dracoMeshes, dracoMaterialLists);
bakeProcessedSource(_hfmModel, dracoMeshes, dracoMaterialLists);
if (shouldStop()) {
return;
}
if (_hfmModel->materials.size() > 0) {
_materialBaker = QSharedPointer<MaterialBaker>(
new MaterialBaker(_modelURL.fileName(), true, _bakedOutputDir),
&MaterialBaker::deleteLater
);
_materialBaker->setMaterials(_hfmModel->materials, _modelURL.toString());
connect(_materialBaker.data(), &MaterialBaker::finished, this, &ModelBaker::handleFinishedMaterialBaker);
_materialBaker->bake();
} else {
outputBakedFST();
}
}
void ModelBaker::handleFinishedMaterialBaker() {
auto baker = qobject_cast<MaterialBaker*>(sender());
if (baker) {
if (!baker->hasErrors()) {
// this MaterialBaker is done and everything went according to plan
qCDebug(model_baking) << "Adding baked material to FST mapping " << baker->getBakedMaterialData();
QString relativeBakedMaterialURL = _modelURL.fileName();
auto baseName = relativeBakedMaterialURL.left(relativeBakedMaterialURL.lastIndexOf('.'));
relativeBakedMaterialURL = baseName + BAKED_MATERIAL_EXTENSION;
// First we add the materials in the model
QJsonArray materialMapping;
for (auto material : _hfmModel->materials) {
QJsonObject json;
json["mat::" + material.name] = relativeBakedMaterialURL + "#" + material.name;
materialMapping.push_back(json);
}
// The we add any existing mappings from the mapping
if (_mapping.contains(MATERIAL_MAPPING_FIELD)) {
QByteArray materialMapValue = _mapping[MATERIAL_MAPPING_FIELD].toByteArray();
QJsonObject oldMaterialMapping = QJsonDocument::fromJson(materialMapValue).object();
for (auto key : oldMaterialMapping.keys()) {
QJsonObject json;
json[key] = oldMaterialMapping[key];
materialMapping.push_back(json);
}
}
_mapping[MATERIAL_MAPPING_FIELD] = QJsonDocument(materialMapping).toJson(QJsonDocument::Compact);
} else {
// this material failed to bake - this doesn't fail the entire bake but we need to add the errors from
// the material to our warnings
_warningList << baker->getWarnings();
}
} else {
handleWarning("Failed to bake the materials for model with URL " + _modelURL.toString());
}
outputBakedFST();
}
void ModelBaker::outputBakedFST() {
// Output FST file, copying over input mappings if available
QString outputFSTFilename = !_mappingURL.isEmpty() ? _mappingURL.fileName() : _modelURL.fileName();
auto extensionStart = outputFSTFilename.indexOf(".");
@ -291,8 +326,7 @@ void ModelBaker::bakeSourceCopy() {
auto outputMapping = _mapping;
outputMapping[FST_VERSION_FIELD] = FST_VERSION;
outputMapping[FILENAME_FIELD] = _bakedModelURL.fileName();
// All textures will be found in the same directory as the model
outputMapping[TEXDIR_FIELD] = ".";
outputMapping.remove(TEXDIR_FIELD);
hifi::ByteArray fstOut = FSTReader::writeMapping(outputMapping);
QFile fstOutputFile { outputFSTURL };
@ -307,17 +341,16 @@ void ModelBaker::bakeSourceCopy() {
_outputFiles.push_back(outputFSTURL);
_outputMappingURL = outputFSTURL;
// check if we're already done with textures (in case we had none to re-write)
checkIfTexturesFinished();
exportScene();
qCDebug(model_baking) << "Finished baking, emitting finished" << _modelURL;
emit finished();
}
void ModelBaker::abort() {
Baker::abort();
// tell our underlying TextureBaker instances to abort
// the ModelBaker will wait until all are aborted before emitting its own abort signal
for (auto& textureBaker : _bakingTextures) {
textureBaker->abort();
if (_materialBaker) {
_materialBaker->abort();
}
}
@ -354,247 +387,6 @@ bool ModelBaker::buildDracoMeshNode(FBXNode& dracoMeshNode, const QByteArray& dr
return true;
}
QString ModelBaker::compressTexture(QString modelTextureFileName, image::TextureUsage::Type textureType) {
QFileInfo modelTextureFileInfo { modelTextureFileName.replace("\\", "/") };
if (modelTextureFileInfo.suffix().toLower() == BAKED_TEXTURE_KTX_EXT.mid(1)) {
// re-baking a model that already references baked textures
// this is an error - return from here
handleError("Cannot re-bake a file that already references compressed textures");
return QString::null;
}
if (!image::getSupportedFormats().contains(modelTextureFileInfo.suffix())) {
// this is a texture format we don't bake, skip it
handleWarning(modelTextureFileName + " is not a bakeable texture format");
return QString::null;
}
// make sure this texture points to something and isn't one we've already re-mapped
QString textureChild { QString::null };
if (!modelTextureFileInfo.filePath().isEmpty()) {
// check if this was an embedded texture that we already have in-memory content for
QByteArray textureContent;
// figure out the URL to this texture, embedded or external
if (!modelTextureFileInfo.filePath().isEmpty()) {
textureContent = _textureContentMap.value(modelTextureFileName.toLocal8Bit());
}
auto urlToTexture = getTextureURL(modelTextureFileInfo, !textureContent.isNull());
TextureKey textureKey { urlToTexture, textureType };
auto bakingTextureIt = _bakingTextures.find(textureKey);
if (bakingTextureIt == _bakingTextures.cend()) {
// construct the new baked texture file name and file path
// ensuring that the baked texture will have a unique name
// even if there was another texture with the same name at a different path
QString baseTextureFileName = _textureFileNamer.createBaseTextureFileName(modelTextureFileInfo, textureType);
QString bakedTextureFilePath {
_bakedOutputDir + "/" + baseTextureFileName + BAKED_META_TEXTURE_SUFFIX
};
textureChild = baseTextureFileName + BAKED_META_TEXTURE_SUFFIX;
_outputFiles.push_back(bakedTextureFilePath);
// bake this texture asynchronously
bakeTexture(textureKey, _bakedOutputDir, baseTextureFileName, textureContent);
} else {
// Fetch existing texture meta name
textureChild = (*bakingTextureIt)->getBaseFilename() + BAKED_META_TEXTURE_SUFFIX;
}
}
qCDebug(model_baking).noquote() << "Re-mapping" << modelTextureFileName
<< "to" << textureChild;
return textureChild;
}
void ModelBaker::bakeTexture(const TextureKey& textureKey, const QDir& outputDir, const QString& bakedFilename, const QByteArray& textureContent) {
// start a bake for this texture and add it to our list to keep track of
QSharedPointer<TextureBaker> bakingTexture{
new TextureBaker(textureKey.first, textureKey.second, outputDir, "../", bakedFilename, textureContent),
&TextureBaker::deleteLater
};
// make sure we hear when the baking texture is done or aborted
connect(bakingTexture.data(), &Baker::finished, this, &ModelBaker::handleBakedTexture);
connect(bakingTexture.data(), &TextureBaker::aborted, this, &ModelBaker::handleAbortedTexture);
// keep a shared pointer to the baking texture
_bakingTextures.insert(textureKey, bakingTexture);
// start baking the texture on one of our available worker threads
bakingTexture->moveToThread(_textureThreadGetter());
QMetaObject::invokeMethod(bakingTexture.data(), "bake");
}
void ModelBaker::handleBakedTexture() {
TextureBaker* bakedTexture = qobject_cast<TextureBaker*>(sender());
qDebug() << "Handling baked texture" << bakedTexture->getTextureURL();
// make sure we haven't already run into errors, and that this is a valid texture
if (bakedTexture) {
if (!shouldStop()) {
if (!bakedTexture->hasErrors()) {
if (!_originalOutputDir.isEmpty()) {
// we've been asked to make copies of the originals, so we need to make copies of this if it is a linked texture
// use the path to the texture being baked to determine if this was an embedded or a linked texture
// it is embeddded if the texure being baked was inside a folder with the name of the model
// since that is the fake URL we provide when baking external textures
if (!_modelURL.isParentOf(bakedTexture->getTextureURL())) {
// for linked textures we want to save a copy of original texture beside the original model
qCDebug(model_baking) << "Saving original texture for" << bakedTexture->getTextureURL();
// check if we have a relative path to use for the texture
auto relativeTexturePath = texturePathRelativeToModel(_modelURL, bakedTexture->getTextureURL());
QFile originalTextureFile{
_originalOutputDir + "/" + relativeTexturePath + bakedTexture->getTextureURL().fileName()
};
if (relativeTexturePath.length() > 0) {
// make the folders needed by the relative path
}
if (originalTextureFile.open(QIODevice::WriteOnly) && originalTextureFile.write(bakedTexture->getOriginalTexture()) != -1) {
qCDebug(model_baking) << "Saved original texture file" << originalTextureFile.fileName()
<< "for" << _modelURL;
} else {
handleError("Could not save original external texture " + originalTextureFile.fileName()
+ " for " + _modelURL.toString());
return;
}
}
}
// now that this texture has been baked and handled, we can remove that TextureBaker from our hash
_bakingTextures.remove({ bakedTexture->getTextureURL(), bakedTexture->getTextureType() });
checkIfTexturesFinished();
} else {
// there was an error baking this texture - add it to our list of errors
_errorList.append(bakedTexture->getErrors());
// we don't emit finished yet so that the other textures can finish baking first
_pendingErrorEmission = true;
// now that this texture has been baked, even though it failed, we can remove that TextureBaker from our list
_bakingTextures.remove({ bakedTexture->getTextureURL(), bakedTexture->getTextureType() });
// abort any other ongoing texture bakes since we know we'll end up failing
for (auto& bakingTexture : _bakingTextures) {
bakingTexture->abort();
}
checkIfTexturesFinished();
}
} else {
// we have errors to attend to, so we don't do extra processing for this texture
// but we do need to remove that TextureBaker from our list
// and then check if we're done with all textures
_bakingTextures.remove({ bakedTexture->getTextureURL(), bakedTexture->getTextureType() });
checkIfTexturesFinished();
}
}
}
void ModelBaker::handleAbortedTexture() {
// grab the texture bake that was aborted and remove it from our hash since we don't need to track it anymore
TextureBaker* bakedTexture = qobject_cast<TextureBaker*>(sender());
qDebug() << "Texture aborted: " << bakedTexture->getTextureURL();
if (bakedTexture) {
_bakingTextures.remove({ bakedTexture->getTextureURL(), bakedTexture->getTextureType() });
}
// since a texture we were baking aborted, our status is also aborted
_shouldAbort.store(true);
// abort any other ongoing texture bakes since we know we'll end up failing
for (auto& bakingTexture : _bakingTextures) {
bakingTexture->abort();
}
checkIfTexturesFinished();
}
QUrl ModelBaker::getTextureURL(const QFileInfo& textureFileInfo, bool isEmbedded) {
QUrl urlToTexture;
if (isEmbedded) {
urlToTexture = _modelURL.toString() + "/" + textureFileInfo.filePath();
} else {
if (textureFileInfo.exists() && textureFileInfo.isFile()) {
// set the texture URL to the local texture that we have confirmed exists
urlToTexture = QUrl::fromLocalFile(textureFileInfo.absoluteFilePath());
} else {
// external texture that we'll need to download or find
// this is a relative file path which will require different handling
// depending on the location of the original model
if (_modelURL.isLocalFile() && textureFileInfo.exists() && textureFileInfo.isFile()) {
// the absolute path we ran into for the texture in the model exists on this machine
// so use that file
urlToTexture = QUrl::fromLocalFile(textureFileInfo.absoluteFilePath());
} else {
// we didn't find the texture on this machine at the absolute path
// so assume that it is right beside the model to match the behaviour of interface
urlToTexture = _modelURL.resolved(textureFileInfo.fileName());
}
}
}
return urlToTexture;
}
QString ModelBaker::texturePathRelativeToModel(QUrl modelURL, QUrl textureURL) {
auto modelPath = modelURL.toString(QUrl::RemoveFilename | QUrl::RemoveQuery | QUrl::RemoveFragment);
auto texturePath = textureURL.toString(QUrl::RemoveFilename | QUrl::RemoveQuery | QUrl::RemoveFragment);
if (texturePath.startsWith(modelPath)) {
// texture path is a child of the model path, return the texture path without the model path
return texturePath.mid(modelPath.length());
} else {
// the texture path was not a child of the model path, return the empty string
return "";
}
}
void ModelBaker::checkIfTexturesFinished() {
// check if we're done everything we need to do for this model
// and emit our finished signal if we're done
if (_bakingTextures.isEmpty()) {
if (shouldStop()) {
// if we're checking for completion but we have errors
// that means one or more of our texture baking operations failed
if (_pendingErrorEmission) {
setIsFinished(true);
}
return;
} else {
qCDebug(model_baking) << "Finished baking, emitting finished" << _modelURL;
texturesFinished();
setIsFinished(true);
}
}
}
void ModelBaker::setWasAborted(bool wasAborted) {
if (wasAborted != _wasAborted.load()) {
Baker::setWasAborted(wasAborted);
@ -605,70 +397,6 @@ void ModelBaker::setWasAborted(bool wasAborted) {
}
}
void ModelBaker::texturesFinished() {
embedTextureMetaData();
exportScene();
}
void ModelBaker::embedTextureMetaData() {
std::vector<FBXNode> embeddedTextureNodes;
for (FBXNode& rootChild : _rootNode.children) {
if (rootChild.name == "Objects") {
qlonglong maxId = 0;
for (auto &child : rootChild.children) {
if (child.properties.length() == 3) {
maxId = std::max(maxId, child.properties[0].toLongLong());
}
}
for (auto& object : rootChild.children) {
if (object.name == "Texture") {
QVariant relativeFilename;
for (auto& child : object.children) {
if (child.name == "RelativeFilename") {
relativeFilename = child.properties[0];
break;
}
}
if (relativeFilename.isNull()
|| !relativeFilename.toString().endsWith(BAKED_META_TEXTURE_SUFFIX)) {
continue;
}
if (object.properties.length() < 2) {
qWarning() << "Found texture with unexpected number of properties: " << object.name;
continue;
}
FBXNode videoNode;
videoNode.name = "Video";
videoNode.properties.append(++maxId);
videoNode.properties.append(object.properties[1]);
videoNode.properties.append("Clip");
QString bakedTextureFilePath {
_bakedOutputDir + "/" + relativeFilename.toString()
};
QFile textureFile { bakedTextureFilePath };
if (!textureFile.open(QIODevice::ReadOnly)) {
qWarning() << "Failed to open: " << bakedTextureFilePath;
continue;
}
videoNode.children.append({ "RelativeFilename", { relativeFilename }, { } });
videoNode.children.append({ "Content", { textureFile.readAll() }, { } });
rootChild.children.append(videoNode);
textureFile.close();
}
}
}
}
}
void ModelBaker::exportScene() {
auto fbxData = FBXWriter::encodeFBX(_rootNode);

View file

@ -18,17 +18,13 @@
#include <QtNetwork/QNetworkReply>
#include "Baker.h"
#include "TextureBaker.h"
#include "baking/TextureFileNamer.h"
#include "MaterialBaker.h"
#include "ModelBakingLoggingCategory.h"
#include <gpu/Texture.h>
#include <FBX.h>
#include <hfm/HFM.h>
using TextureBakerThreadGetter = std::function<QThread*()>;
using GetMaterialIDCallback = std::function <int(int)>;
static const QString FST_EXTENSION { ".fst" };
@ -42,10 +38,7 @@ class ModelBaker : public Baker {
Q_OBJECT
public:
using TextureKey = QPair<QUrl, image::TextureUsage::Type>;
ModelBaker(const QUrl& inputModelURL, TextureBakerThreadGetter inputTextureThreadGetter,
const QString& bakedOutputDirectory, const QString& originalOutputDirectory = "", bool hasBeenBaked = false);
ModelBaker(const QUrl& inputModelURL, const QString& bakedOutputDirectory, const QString& originalOutputDirectory = "", bool hasBeenBaked = false);
void setOutputURLSuffix(const QUrl& urlSuffix);
void setMappingURL(const QUrl& mappingURL);
@ -54,7 +47,6 @@ public:
void initializeOutputDirs();
bool buildDracoMeshNode(FBXNode& dracoMeshNode, const QByteArray& dracoMeshBytes, const std::vector<hifi::ByteArray>& dracoMaterialList);
QString compressTexture(QString textureFileName, image::TextureUsage::Type = image::TextureUsage::Type::DEFAULT_TEXTURE);
virtual void setWasAborted(bool wasAborted) override;
QUrl getModelURL() const { return _modelURL; }
@ -71,20 +63,15 @@ public slots:
protected:
void saveSourceModel();
virtual void bakeProcessedSource(const hfm::Model::Pointer& hfmModel, const std::vector<hifi::ByteArray>& dracoMeshes, const std::vector<std::vector<hifi::ByteArray>>& dracoMaterialLists) = 0;
void checkIfTexturesFinished();
void texturesFinished();
void embedTextureMetaData();
void exportScene();
FBXNode _rootNode;
QHash<QByteArray, QByteArray> _textureContentMap;
QUrl _modelURL;
QUrl _outputURLSuffix;
QUrl _mappingURL;
hifi::VariantHash _mapping;
QString _bakedOutputDir;
QString _originalOutputDir;
TextureBakerThreadGetter _textureThreadGetter;
QString _originalOutputModelPath;
QString _outputMappingURL;
QUrl _bakedModelURL;
@ -92,23 +79,15 @@ protected:
protected slots:
void handleModelNetworkReply();
virtual void bakeSourceCopy();
private slots:
void handleBakedTexture();
void handleAbortedTexture();
void handleFinishedMaterialBaker();
private:
QUrl getTextureURL(const QFileInfo& textureFileInfo, bool isEmbedded = false);
void bakeTexture(const TextureKey& textureKey, const QDir& outputDir, const QString& bakedFilename, const QByteArray& textureContent);
QString texturePathRelativeToModel(QUrl modelURL, QUrl textureURL);
QMultiHash<TextureKey, QSharedPointer<TextureBaker>> _bakingTextures;
QHash<QString, int> _textureNameMatchCount;
bool _pendingErrorEmission { false };
void outputBakedFST();
bool _hasBeenBaked { false };
TextureFileNamer _textureFileNamer;
hfm::Model::Pointer _hfmModel;
QSharedPointer<MaterialBaker> _materialBaker;
};
#endif // hifi_ModelBaker_h

View file

@ -132,55 +132,6 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, const hfm::Model::Pointer& h
handleWarning("Baked mesh for OBJ model '" + _modelURL.toString() + "' is empty");
}
// Generating Texture Node
// iterate through mesh parts and process the associated textures
auto size = meshParts.size();
for (int i = 0; i < size; i++) {
QString material = meshParts[i].materialID;
HFMMaterial currentMaterial = hfmModel->materials[material];
if (!currentMaterial.albedoTexture.filename.isEmpty() || !currentMaterial.specularTexture.filename.isEmpty()) {
auto textureID = nextNodeID();
_mapTextureMaterial.emplace_back(textureID, i);
FBXNode textureNode;
{
textureNode.name = TEXTURE_NODE_NAME;
textureNode.properties = { textureID, "texture" + QString::number(textureID) };
}
// Texture node child - TextureName node
FBXNode textureNameNode;
{
textureNameNode.name = TEXTURENAME_NODE_NAME;
QByteArray propertyString = (!currentMaterial.albedoTexture.filename.isEmpty()) ? "Kd" : "Ka";
textureNameNode.properties = { propertyString };
}
// Texture node child - Relative Filename node
FBXNode relativeFilenameNode;
{
relativeFilenameNode.name = RELATIVEFILENAME_NODE_NAME;
}
QByteArray textureFileName = (!currentMaterial.albedoTexture.filename.isEmpty()) ? currentMaterial.albedoTexture.filename : currentMaterial.specularTexture.filename;
auto textureType = (!currentMaterial.albedoTexture.filename.isEmpty()) ? image::TextureUsage::Type::ALBEDO_TEXTURE : image::TextureUsage::Type::SPECULAR_TEXTURE;
// Compress the texture using ModelBaker::compressTexture() and store compressed file's name in the node
auto textureFile = compressTexture(textureFileName, textureType);
if (textureFile.isNull()) {
// Baking failed return
handleError("Failed to compress texture: " + textureFileName);
return;
}
relativeFilenameNode.properties = { textureFile };
textureNode.children = { textureNameNode, relativeFilenameNode };
objectNode.children.append(textureNode);
}
}
// Generating Connections node
connectionsNode.name = CONNECTIONS_NODE_NAME;
@ -199,29 +150,6 @@ void OBJBaker::createFBXNodeTree(FBXNode& rootNode, const hfm::Model::Pointer& h
cNode.properties = { CONNECTIONS_NODE_PROPERTY, materialID, modelID };
connectionsNode.children.append(cNode);
}
// Connect textures to materials
for (const auto& texMat : _mapTextureMaterial) {
FBXNode cAmbientNode;
cAmbientNode.name = C_NODE_NAME;
cAmbientNode.properties = {
CONNECTIONS_NODE_PROPERTY_1,
texMat.first,
_materialIDs[texMat.second],
"AmbientFactor"
};
connectionsNode.children.append(cAmbientNode);
FBXNode cDiffuseNode;
cDiffuseNode.name = C_NODE_NAME;
cDiffuseNode.properties = {
CONNECTIONS_NODE_PROPERTY_1,
texMat.first,
_materialIDs[texMat.second],
"DiffuseColor"
};
connectionsNode.children.append(cDiffuseNode);
}
}
// Set properties for material nodes

View file

@ -35,9 +35,7 @@ private:
void setMaterialNodeProperties(FBXNode& materialNode, QString material, const hfm::Model::Pointer& hfmModel);
NodeID nextNodeID() { return _nodeID++; }
NodeID _nodeID { 0 };
std::vector<NodeID> _materialIDs;
std::vector<std::pair<NodeID, int>> _mapTextureMaterial;
};
#endif // hifi_OBJBaker_h

View file

@ -16,7 +16,7 @@
#include <QtCore/QFile>
#include <QtNetwork/QNetworkReply>
#include <image/Image.h>
#include <image/TextureProcessing.h>
#include <ktx/KTX.h>
#include <NetworkAccessManager.h>
#include <SharedUtil.h>

View file

@ -18,7 +18,7 @@
#include <QDir>
#include <QImageReader>
#include <image/Image.h>
#include <image/TextureProcessing.h>
#include "Baker.h"

View file

@ -44,7 +44,7 @@ bool isModelBaked(const QUrl& bakeableModelURL) {
return beforeModelExtension.endsWith(".baked");
}
std::unique_ptr<ModelBaker> getModelBaker(const QUrl& bakeableModelURL, TextureBakerThreadGetter inputTextureThreadGetter, const QString& contentOutputPath) {
std::unique_ptr<ModelBaker> getModelBaker(const QUrl& bakeableModelURL, const QString& contentOutputPath) {
auto filename = bakeableModelURL.fileName();
// Output in a sub-folder with the name of the model, potentially suffixed by a number to make it unique
@ -58,20 +58,20 @@ std::unique_ptr<ModelBaker> getModelBaker(const QUrl& bakeableModelURL, TextureB
QString bakedOutputDirectory = contentOutputPath + subDirName + "/baked";
QString originalOutputDirectory = contentOutputPath + subDirName + "/original";
return getModelBakerWithOutputDirectories(bakeableModelURL, inputTextureThreadGetter, bakedOutputDirectory, originalOutputDirectory);
return getModelBakerWithOutputDirectories(bakeableModelURL, bakedOutputDirectory, originalOutputDirectory);
}
std::unique_ptr<ModelBaker> getModelBakerWithOutputDirectories(const QUrl& bakeableModelURL, TextureBakerThreadGetter inputTextureThreadGetter, const QString& bakedOutputDirectory, const QString& originalOutputDirectory) {
std::unique_ptr<ModelBaker> getModelBakerWithOutputDirectories(const QUrl& bakeableModelURL, const QString& bakedOutputDirectory, const QString& originalOutputDirectory) {
auto filename = bakeableModelURL.fileName();
std::unique_ptr<ModelBaker> baker;
if (filename.endsWith(FST_EXTENSION, Qt::CaseInsensitive)) {
baker = std::make_unique<FSTBaker>(bakeableModelURL, inputTextureThreadGetter, bakedOutputDirectory, originalOutputDirectory, filename.endsWith(BAKED_FST_EXTENSION, Qt::CaseInsensitive));
baker = std::make_unique<FSTBaker>(bakeableModelURL, bakedOutputDirectory, originalOutputDirectory, filename.endsWith(BAKED_FST_EXTENSION, Qt::CaseInsensitive));
} else if (filename.endsWith(FBX_EXTENSION, Qt::CaseInsensitive)) {
baker = std::make_unique<FBXBaker>(bakeableModelURL, inputTextureThreadGetter, bakedOutputDirectory, originalOutputDirectory, filename.endsWith(BAKED_FBX_EXTENSION, Qt::CaseInsensitive));
baker = std::make_unique<FBXBaker>(bakeableModelURL, bakedOutputDirectory, originalOutputDirectory, filename.endsWith(BAKED_FBX_EXTENSION, Qt::CaseInsensitive));
} else if (filename.endsWith(OBJ_EXTENSION, Qt::CaseInsensitive)) {
baker = std::make_unique<OBJBaker>(bakeableModelURL, inputTextureThreadGetter, bakedOutputDirectory, originalOutputDirectory);
baker = std::make_unique<OBJBaker>(bakeableModelURL, bakedOutputDirectory, originalOutputDirectory);
//} else if (filename.endsWith(GLTF_EXTENSION, Qt::CaseInsensitive)) {
//baker = std::make_unique<GLTFBaker>(bakeableModelURL, inputTextureThreadGetter, bakedOutputDirectory, originalOutputDirectory);
} else {

View file

@ -23,9 +23,9 @@ bool isModelBaked(const QUrl& bakeableModelURL);
// Assuming the URL is valid, gets the appropriate baker for the given URL, and creates the base directory where the baker's output will later be stored
// Returns an empty pointer if a baker could not be created
std::unique_ptr<ModelBaker> getModelBaker(const QUrl& bakeableModelURL, TextureBakerThreadGetter inputTextureThreadGetter, const QString& contentOutputPath);
std::unique_ptr<ModelBaker> getModelBaker(const QUrl& bakeableModelURL, const QString& contentOutputPath);
// Similar to getModelBaker, but gives control over where the output folders will be
std::unique_ptr<ModelBaker> getModelBakerWithOutputDirectories(const QUrl& bakeableModelURL, TextureBakerThreadGetter inputTextureThreadGetter, const QString& bakedOutputDirectory, const QString& originalOutputDirectory);
std::unique_ptr<ModelBaker> getModelBakerWithOutputDirectories(const QUrl& bakeableModelURL, const QString& bakedOutputDirectory, const QString& originalOutputDirectory);
#endif // hifi_BakerLibrary_h

View file

@ -18,9 +18,8 @@
#include <FSTReader.h>
FSTBaker::FSTBaker(const QUrl& inputMappingURL, TextureBakerThreadGetter inputTextureThreadGetter,
const QString& bakedOutputDirectory, const QString& originalOutputDirectory, bool hasBeenBaked) :
ModelBaker(inputMappingURL, inputTextureThreadGetter, bakedOutputDirectory, originalOutputDirectory, hasBeenBaked) {
FSTBaker::FSTBaker(const QUrl& inputMappingURL, const QString& bakedOutputDirectory, const QString& originalOutputDirectory, bool hasBeenBaked) :
ModelBaker(inputMappingURL, bakedOutputDirectory, originalOutputDirectory, hasBeenBaked) {
if (hasBeenBaked) {
// Look for the original model file one directory higher. Perhaps this is an oven output directory.
QUrl originalRelativePath = QUrl("../original/" + inputMappingURL.fileName().replace(BAKED_FST_EXTENSION, FST_EXTENSION));
@ -70,7 +69,7 @@ void FSTBaker::bakeSourceCopy() {
return;
}
auto baker = getModelBakerWithOutputDirectories(bakeableModelURL, _textureThreadGetter, _bakedOutputDir, _originalOutputDir);
auto baker = getModelBakerWithOutputDirectories(bakeableModelURL, _bakedOutputDir, _originalOutputDir);
_modelBaker = std::unique_ptr<ModelBaker>(dynamic_cast<ModelBaker*>(baker.release()));
if (!_modelBaker) {
handleError("The model url '" + bakeableModelURL.toString() + "' from the FST file '" + _originalOutputModelPath + "' (property: '" + FILENAME_FIELD + "') could not be used to initialize a valid model baker");

View file

@ -18,8 +18,7 @@ class FSTBaker : public ModelBaker {
Q_OBJECT
public:
FSTBaker(const QUrl& inputMappingURL, TextureBakerThreadGetter inputTextureThreadGetter,
const QString& bakedOutputDirectory, const QString& originalOutputDirectory = "", bool hasBeenBaked = false);
FSTBaker(const QUrl& inputMappingURL, const QString& bakedOutputDirectory, const QString& originalOutputDirectory = "", bool hasBeenBaked = false);
virtual QUrl getFullOutputMappingURL() const override;

View file

@ -15,7 +15,7 @@
#include <QtCore/QFileInfo>
#include <QHash>
#include <image/Image.h>
#include <image/TextureProcessing.h>
class TextureFileNamer {
public:

View file

@ -109,7 +109,7 @@ bool Basic2DWindowOpenGLDisplayPlugin::internalActivate() {
return Parent::internalActivate();
}
void Basic2DWindowOpenGLDisplayPlugin::compositeExtra(const gpu::FramebufferPointer& compositeFramebuffer) {
void Basic2DWindowOpenGLDisplayPlugin::compositeExtra() {
#if defined(Q_OS_ANDROID)
auto& virtualPadManager = VirtualPad::Manager::instance();
if(virtualPadManager.getLeftVirtualPad()->isShown()) {
@ -121,7 +121,7 @@ void Basic2DWindowOpenGLDisplayPlugin::compositeExtra(const gpu::FramebufferPoin
render([&](gpu::Batch& batch) {
batch.enableStereo(false);
batch.setFramebuffer(compositeFramebuffer);
batch.setFramebuffer(_compositeFramebuffer);
batch.resetViewTransform();
batch.setProjectionTransform(mat4());
batch.setPipeline(_cursorPipeline);
@ -140,7 +140,7 @@ void Basic2DWindowOpenGLDisplayPlugin::compositeExtra(const gpu::FramebufferPoin
});
}
#endif
Parent::compositeExtra(compositeFramebuffer);
Parent::compositeExtra();
}
static const uint32_t MIN_THROTTLE_CHECK_FRAMES = 60;

View file

@ -33,7 +33,7 @@ public:
virtual bool isThrottled() const override;
virtual void compositeExtra(const gpu::FramebufferPointer&) override;
virtual void compositeExtra() override;
virtual void pluginUpdate() override {};

View file

@ -379,6 +379,14 @@ void OpenGLDisplayPlugin::customizeContext() {
scissorState->setDepthTest(gpu::State::DepthTest(false));
scissorState->setScissorEnable(true);
{
#ifdef Q_OS_ANDROID
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTextureGammaLinearToSRGB);
#else
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTexture);
#endif
_simplePipeline = gpu::Pipeline::create(program, scissorState);
}
{
#ifdef Q_OS_ANDROID
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTextureGammaLinearToSRGB);
@ -388,59 +396,29 @@ void OpenGLDisplayPlugin::customizeContext() {
_presentPipeline = gpu::Pipeline::create(program, scissorState);
}
// HUD operator
{
gpu::PipelinePointer hudPipeline;
{
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTexture);
hudPipeline = gpu::Pipeline::create(program, blendState);
}
gpu::PipelinePointer hudMirrorPipeline;
{
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTextureMirroredX);
hudMirrorPipeline = gpu::Pipeline::create(program, blendState);
}
_hudOperator = [=](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, const gpu::FramebufferPointer& compositeFramebuffer, bool mirror) {
auto hudStereo = isStereo();
auto hudCompositeFramebufferSize = compositeFramebuffer->getSize();
std::array<glm::ivec4, 2> hudEyeViewports;
for_each_eye([&](Eye eye) {
hudEyeViewports[eye] = eyeViewport(eye);
});
if (hudPipeline && hudTexture) {
batch.enableStereo(false);
batch.setPipeline(mirror ? hudMirrorPipeline : hudPipeline);
batch.setResourceTexture(0, hudTexture);
if (hudStereo) {
for_each_eye([&](Eye eye) {
batch.setViewportTransform(hudEyeViewports[eye]);
batch.draw(gpu::TRIANGLE_STRIP, 4);
});
} else {
batch.setViewportTransform(ivec4(uvec2(0), hudCompositeFramebufferSize));
batch.draw(gpu::TRIANGLE_STRIP, 4);
}
}
};
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTexture);
_hudPipeline = gpu::Pipeline::create(program, blendState);
}
{
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTextureMirroredX);
_mirrorHUDPipeline = gpu::Pipeline::create(program, blendState);
}
{
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTransformedTexture);
_cursorPipeline = gpu::Pipeline::create(program, blendState);
}
}
updateCompositeFramebuffer();
}
void OpenGLDisplayPlugin::uncustomizeContext() {
_presentPipeline.reset();
_cursorPipeline.reset();
_hudOperator = DEFAULT_HUD_OPERATOR;
_hudPipeline.reset();
_mirrorHUDPipeline.reset();
_compositeFramebuffer.reset();
withPresentThreadLock([&] {
_currentFrame.reset();
_lastFrame = nullptr;
@ -532,16 +510,24 @@ void OpenGLDisplayPlugin::captureFrame(const std::string& filename) const {
});
}
void OpenGLDisplayPlugin::renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor) {
renderFromTexture(batch, texture, viewport, scissor, nullptr);
}
void OpenGLDisplayPlugin::renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor, const gpu::FramebufferPointer& destFbo, const gpu::FramebufferPointer& copyFbo /*=gpu::FramebufferPointer()*/) {
void OpenGLDisplayPlugin::renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor, const gpu::FramebufferPointer& copyFbo /*=gpu::FramebufferPointer()*/) {
auto fbo = gpu::FramebufferPointer();
batch.enableStereo(false);
batch.resetViewTransform();
batch.setFramebuffer(destFbo);
batch.setFramebuffer(fbo);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, vec4(0));
batch.setStateScissorRect(scissor);
batch.setViewportTransform(viewport);
batch.setResourceTexture(0, texture);
#ifndef USE_GLES
batch.setPipeline(_presentPipeline);
#else
batch.setPipeline(_simplePipeline);
#endif
batch.draw(gpu::TRIANGLE_STRIP, 4);
if (copyFbo) {
gpu::Vec4i copyFboRect(0, 0, copyFbo->getWidth(), copyFbo->getHeight());
@ -567,7 +553,7 @@ void OpenGLDisplayPlugin::renderFromTexture(gpu::Batch& batch, const gpu::Textur
batch.setViewportTransform(copyFboRect);
batch.setStateScissorRect(copyFboRect);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, {0.0f, 0.0f, 0.0f, 1.0f});
batch.blit(destFbo, sourceRect, copyFbo, copyRect);
batch.blit(fbo, sourceRect, copyFbo, copyRect);
}
}
@ -595,14 +581,41 @@ void OpenGLDisplayPlugin::updateFrameData() {
});
}
void OpenGLDisplayPlugin::compositePointer(const gpu::FramebufferPointer& compositeFramebuffer) {
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> OpenGLDisplayPlugin::getHUDOperator() {
auto hudPipeline = _hudPipeline;
auto hudMirrorPipeline = _mirrorHUDPipeline;
auto hudStereo = isStereo();
auto hudCompositeFramebufferSize = _compositeFramebuffer->getSize();
std::array<glm::ivec4, 2> hudEyeViewports;
for_each_eye([&](Eye eye) {
hudEyeViewports[eye] = eyeViewport(eye);
});
return [=](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, bool mirror) {
if (hudPipeline && hudTexture) {
batch.enableStereo(false);
batch.setPipeline(mirror ? hudMirrorPipeline : hudPipeline);
batch.setResourceTexture(0, hudTexture);
if (hudStereo) {
for_each_eye([&](Eye eye) {
batch.setViewportTransform(hudEyeViewports[eye]);
batch.draw(gpu::TRIANGLE_STRIP, 4);
});
} else {
batch.setViewportTransform(ivec4(uvec2(0), hudCompositeFramebufferSize));
batch.draw(gpu::TRIANGLE_STRIP, 4);
}
}
};
}
void OpenGLDisplayPlugin::compositePointer() {
auto& cursorManager = Cursor::Manager::instance();
const auto& cursorData = _cursorsData[cursorManager.getCursor()->getIcon()];
auto cursorTransform = DependencyManager::get<CompositorHelper>()->getReticleTransform(glm::mat4());
render([&](gpu::Batch& batch) {
batch.enableStereo(false);
batch.setProjectionTransform(mat4());
batch.setFramebuffer(compositeFramebuffer);
batch.setFramebuffer(_compositeFramebuffer);
batch.setPipeline(_cursorPipeline);
batch.setResourceTexture(0, cursorData.texture);
batch.resetViewTransform();
@ -613,13 +626,34 @@ void OpenGLDisplayPlugin::compositePointer(const gpu::FramebufferPointer& compos
batch.draw(gpu::TRIANGLE_STRIP, 4);
});
} else {
batch.setViewportTransform(ivec4(uvec2(0), compositeFramebuffer->getSize()));
batch.setViewportTransform(ivec4(uvec2(0), _compositeFramebuffer->getSize()));
batch.draw(gpu::TRIANGLE_STRIP, 4);
}
});
}
void OpenGLDisplayPlugin::compositeLayers(const gpu::FramebufferPointer& compositeFramebuffer) {
void OpenGLDisplayPlugin::compositeScene() {
render([&](gpu::Batch& batch) {
batch.enableStereo(false);
batch.setFramebuffer(_compositeFramebuffer);
batch.setViewportTransform(ivec4(uvec2(), _compositeFramebuffer->getSize()));
batch.setStateScissorRect(ivec4(uvec2(), _compositeFramebuffer->getSize()));
batch.resetViewTransform();
batch.setProjectionTransform(mat4());
batch.setPipeline(_simplePipeline);
batch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0));
batch.draw(gpu::TRIANGLE_STRIP, 4);
});
}
void OpenGLDisplayPlugin::compositeLayers() {
updateCompositeFramebuffer();
{
PROFILE_RANGE_EX(render_detail, "compositeScene", 0xff0077ff, (uint64_t)presentCount())
compositeScene();
}
#ifdef HIFI_ENABLE_NSIGHT_DEBUG
if (false) // do not draw the HUD if running nsight debug
#endif
@ -633,35 +667,23 @@ void OpenGLDisplayPlugin::compositeLayers(const gpu::FramebufferPointer& composi
{
PROFILE_RANGE_EX(render_detail, "compositeExtra", 0xff0077ff, (uint64_t)presentCount())
compositeExtra(compositeFramebuffer);
compositeExtra();
}
// Draw the pointer last so it's on top of everything
auto compositorHelper = DependencyManager::get<CompositorHelper>();
if (compositorHelper->getReticleVisible()) {
PROFILE_RANGE_EX(render_detail, "compositePointer", 0xff0077ff, (uint64_t)presentCount())
compositePointer(compositeFramebuffer);
compositePointer();
}
}
void OpenGLDisplayPlugin::internalPresent(const gpu::FramebufferPointer& compositeFramebuffer) {
void OpenGLDisplayPlugin::internalPresent() {
render([&](gpu::Batch& batch) {
// Note: _displayTexture must currently be the same size as the display.
uvec2 dims = _displayTexture ? uvec2(_displayTexture->getDimensions()) : getSurfacePixels();
auto viewport = ivec4(uvec2(0), dims);
gpu::TexturePointer finalTexture;
if (_displayTexture) {
finalTexture = _displayTexture;
} else if (compositeFramebuffer) {
finalTexture = compositeFramebuffer->getRenderBuffer(0);
} else {
qCWarning(displayPlugins) << "No valid texture for output";
}
if (finalTexture) {
renderFromTexture(batch, finalTexture, viewport, viewport);
}
renderFromTexture(batch, _displayTexture ? _displayTexture : _compositeFramebuffer->getRenderBuffer(0), viewport, viewport);
});
swapBuffers();
_presentRate.increment();
@ -678,7 +700,7 @@ void OpenGLDisplayPlugin::present() {
}
incrementPresentCount();
if (_currentFrame && _currentFrame->framebuffer) {
if (_currentFrame) {
auto correction = getViewCorrection();
getGLBackend()->setCameraCorrection(correction, _prevRenderView);
_prevRenderView = correction * _currentFrame->view;
@ -698,18 +720,18 @@ void OpenGLDisplayPlugin::present() {
// Write all layers to a local framebuffer
{
PROFILE_RANGE_EX(render, "composite", 0xff00ffff, frameId)
compositeLayers(_currentFrame->framebuffer);
compositeLayers();
}
// Take the composite framebuffer and send it to the output device
{
PROFILE_RANGE_EX(render, "internalPresent", 0xff00ffff, frameId)
internalPresent(_currentFrame->framebuffer);
internalPresent();
}
gpu::Backend::freeGPUMemSize.set(gpu::gl::getFreeDedicatedMemory());
} else if (alwaysPresent()) {
internalPresent(nullptr);
internalPresent();
}
_movingAveragePresent.addSample((float)(usecTimestampNow() - startPresent));
}
@ -766,12 +788,7 @@ bool OpenGLDisplayPlugin::setDisplayTexture(const QString& name) {
}
QImage OpenGLDisplayPlugin::getScreenshot(float aspectRatio) const {
if (!_currentFrame || !_currentFrame->framebuffer) {
return QImage();
}
auto compositeFramebuffer = _currentFrame->framebuffer;
auto size = compositeFramebuffer->getSize();
auto size = _compositeFramebuffer->getSize();
if (isHmd()) {
size.x /= 2;
}
@ -789,7 +806,7 @@ QImage OpenGLDisplayPlugin::getScreenshot(float aspectRatio) const {
auto glBackend = const_cast<OpenGLDisplayPlugin&>(*this).getGLBackend();
QImage screenshot(bestSize.x, bestSize.y, QImage::Format_ARGB32);
withOtherThreadContext([&] {
glBackend->downloadFramebuffer(compositeFramebuffer, ivec4(corner, bestSize), screenshot);
glBackend->downloadFramebuffer(_compositeFramebuffer, ivec4(corner, bestSize), screenshot);
});
return screenshot.mirrored(false, true);
}
@ -841,7 +858,7 @@ bool OpenGLDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
}
ivec4 OpenGLDisplayPlugin::eyeViewport(Eye eye) const {
auto vpSize = glm::uvec2(getRecommendedRenderSize());
uvec2 vpSize = _compositeFramebuffer->getSize();
vpSize.x /= 2;
uvec2 vpPos;
if (eye == Eye::Right) {
@ -874,6 +891,14 @@ void OpenGLDisplayPlugin::render(std::function<void(gpu::Batch& batch)> f) {
OpenGLDisplayPlugin::~OpenGLDisplayPlugin() {
}
void OpenGLDisplayPlugin::updateCompositeFramebuffer() {
auto renderSize = glm::uvec2(getRecommendedRenderSize());
if (!_compositeFramebuffer || _compositeFramebuffer->getSize() != renderSize) {
_compositeFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("OpenGLDisplayPlugin::composite", gpu::Element::COLOR_RGBA_32, renderSize.x, renderSize.y));
// _compositeFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("OpenGLDisplayPlugin::composite", gpu::Element::COLOR_SRGBA_32, renderSize.x, renderSize.y));
}
}
void OpenGLDisplayPlugin::copyTextureToQuickFramebuffer(NetworkTexturePointer networkTexture, QOpenGLFramebufferObject* target, GLsync* fenceSync) {
#if !defined(USE_GLES)
auto glBackend = const_cast<OpenGLDisplayPlugin&>(*this).getGLBackend();

View file

@ -94,10 +94,14 @@ protected:
// is not populated
virtual bool alwaysPresent() const { return false; }
void updateCompositeFramebuffer();
virtual QThread::Priority getPresentPriority() { return QThread::HighPriority; }
virtual void compositeLayers(const gpu::FramebufferPointer&);
virtual void compositePointer(const gpu::FramebufferPointer&);
virtual void compositeExtra(const gpu::FramebufferPointer&) {};
virtual void compositeLayers();
virtual void compositeScene();
virtual std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> getHUDOperator();
virtual void compositePointer();
virtual void compositeExtra() {};
// These functions must only be called on the presentation thread
virtual void customizeContext();
@ -112,10 +116,10 @@ protected:
virtual void deactivateSession() {}
// Plugin specific functionality to send the composed scene to the output window or device
virtual void internalPresent(const gpu::FramebufferPointer&);
virtual void internalPresent();
void renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor, const gpu::FramebufferPointer& destFbo = nullptr, const gpu::FramebufferPointer& copyFbo = nullptr);
void renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor, const gpu::FramebufferPointer& fbo);
void renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer& texture, const glm::ivec4& viewport, const glm::ivec4& scissor);
virtual void updateFrameData();
virtual glm::mat4 getViewCorrection() { return glm::mat4(); }
@ -138,8 +142,14 @@ protected:
gpu::FramePointer _currentFrame;
gpu::Frame* _lastFrame { nullptr };
mat4 _prevRenderView;
gpu::FramebufferPointer _compositeFramebuffer;
gpu::PipelinePointer _hudPipeline;
gpu::PipelinePointer _mirrorHUDPipeline;
gpu::ShaderPointer _mirrorHUDPS;
gpu::PipelinePointer _simplePipeline;
gpu::PipelinePointer _presentPipeline;
gpu::PipelinePointer _cursorPipeline;
gpu::TexturePointer _displayTexture;
gpu::TexturePointer _displayTexture{};
float _compositeHUDAlpha { 1.0f };
struct CursorData {
@ -175,9 +185,5 @@ protected:
// be serialized through this mutex
mutable Mutex _presentMutex;
float _hudAlpha{ 1.0f };
private:
gpu::PipelinePointer _presentPipeline;
};

View file

@ -24,7 +24,7 @@ public:
protected:
void updatePresentPose() override;
void hmdPresent(const gpu::FramebufferPointer&) override {}
void hmdPresent() override {}
bool isHmdMounted() const override { return true; }
bool internalActivate() override;
private:

View file

@ -114,23 +114,20 @@ void HmdDisplayPlugin::internalDeactivate() {
void HmdDisplayPlugin::customizeContext() {
Parent::customizeContext();
_hudOperator = _hudRenderer.build();
_hudRenderer.build();
}
void HmdDisplayPlugin::uncustomizeContext() {
// This stops the weirdness where if the preview was disabled, on switching back to 2D,
// the vsync was stuck in the disabled state. No idea why that happens though.
_disablePreview = false;
if (_currentFrame && _currentFrame->framebuffer) {
render([&](gpu::Batch& batch) {
batch.enableStereo(false);
batch.resetViewTransform();
batch.setFramebuffer(_currentFrame->framebuffer);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, vec4(0));
});
}
_hudRenderer = {};
render([&](gpu::Batch& batch) {
batch.enableStereo(false);
batch.resetViewTransform();
batch.setFramebuffer(_compositeFramebuffer);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, vec4(0));
});
_hudRenderer = HUDRenderer();
_previewTexture.reset();
Parent::uncustomizeContext();
}
@ -177,11 +174,11 @@ float HmdDisplayPlugin::getLeftCenterPixel() const {
return leftCenterPixel;
}
void HmdDisplayPlugin::internalPresent(const gpu::FramebufferPointer& compositeFramebuffer) {
void HmdDisplayPlugin::internalPresent() {
PROFILE_RANGE_EX(render, __FUNCTION__, 0xff00ff00, (uint64_t)presentCount())
// Composite together the scene, hud and mouse cursor
hmdPresent(compositeFramebuffer);
hmdPresent();
if (_displayTexture) {
// Note: _displayTexture must currently be the same size as the display.
@ -263,7 +260,7 @@ void HmdDisplayPlugin::internalPresent(const gpu::FramebufferPointer& compositeF
viewport.z *= 2;
}
renderFromTexture(batch, compositeFramebuffer->getRenderBuffer(0), viewport, scissor, nullptr, fbo);
renderFromTexture(batch, _compositeFramebuffer->getRenderBuffer(0), viewport, scissor, fbo);
});
swapBuffers();
@ -348,7 +345,7 @@ glm::mat4 HmdDisplayPlugin::getViewCorrection() {
}
}
DisplayPlugin::HUDOperator HmdDisplayPlugin::HUDRenderer::build() {
void HmdDisplayPlugin::HUDRenderer::build() {
vertices = std::make_shared<gpu::Buffer>();
indices = std::make_shared<gpu::Buffer>();
@ -383,7 +380,7 @@ DisplayPlugin::HUDOperator HmdDisplayPlugin::HUDRenderer::build() {
indexCount = numberOfRectangles * TRIANGLE_PER_RECTANGLE * VERTEX_PER_TRANGLE;
// Compute indices order
std::vector<GLushort> indexData;
std::vector<GLushort> indices;
for (int i = 0; i < stacks - 1; i++) {
for (int j = 0; j < slices - 1; j++) {
GLushort bottomLeftIndex = i * slices + j;
@ -391,21 +388,24 @@ DisplayPlugin::HUDOperator HmdDisplayPlugin::HUDRenderer::build() {
GLushort topLeftIndex = bottomLeftIndex + slices;
GLushort topRightIndex = topLeftIndex + 1;
// FIXME make a z-order curve for better vertex cache locality
indexData.push_back(topLeftIndex);
indexData.push_back(bottomLeftIndex);
indexData.push_back(topRightIndex);
indices.push_back(topLeftIndex);
indices.push_back(bottomLeftIndex);
indices.push_back(topRightIndex);
indexData.push_back(topRightIndex);
indexData.push_back(bottomLeftIndex);
indexData.push_back(bottomRightIndex);
indices.push_back(topRightIndex);
indices.push_back(bottomLeftIndex);
indices.push_back(bottomRightIndex);
}
}
indices->append(indexData);
this->indices->append(indices);
format = std::make_shared<gpu::Stream::Format>(); // 1 for everyone
format->setAttribute(gpu::Stream::POSITION, gpu::Stream::POSITION, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), 0);
format->setAttribute(gpu::Stream::TEXCOORD, gpu::Stream::TEXCOORD, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::UV));
uniformsBuffer = std::make_shared<gpu::Buffer>(sizeof(Uniforms), nullptr);
updatePipeline();
}
void HmdDisplayPlugin::HUDRenderer::updatePipeline() {
if (!pipeline) {
auto program = gpu::Shader::createProgram(shader::render_utils::program::hmd_ui);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
@ -416,6 +416,10 @@ DisplayPlugin::HUDOperator HmdDisplayPlugin::HUDRenderer::build() {
pipeline = gpu::Pipeline::create(program, state);
}
}
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> HmdDisplayPlugin::HUDRenderer::render(HmdDisplayPlugin& plugin) {
updatePipeline();
auto hudPipeline = pipeline;
auto hudFormat = format;
@ -424,9 +428,9 @@ DisplayPlugin::HUDOperator HmdDisplayPlugin::HUDRenderer::build() {
auto hudUniformBuffer = uniformsBuffer;
auto hudUniforms = uniforms;
auto hudIndexCount = indexCount;
return [=](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, const gpu::FramebufferPointer&, const bool mirror) {
if (pipeline && hudTexture) {
batch.setPipeline(pipeline);
return [=](gpu::Batch& batch, const gpu::TexturePointer& hudTexture, bool mirror) {
if (hudPipeline && hudTexture) {
batch.setPipeline(hudPipeline);
batch.setInputFormat(hudFormat);
gpu::BufferView posView(hudVertices, VERTEX_OFFSET, hudVertices->getSize(), VERTEX_STRIDE, hudFormat->getAttributes().at(gpu::Stream::POSITION)._element);
@ -450,7 +454,7 @@ DisplayPlugin::HUDOperator HmdDisplayPlugin::HUDRenderer::build() {
};
}
void HmdDisplayPlugin::compositePointer(const gpu::FramebufferPointer& compositeFramebuffer) {
void HmdDisplayPlugin::compositePointer() {
auto& cursorManager = Cursor::Manager::instance();
const auto& cursorData = _cursorsData[cursorManager.getCursor()->getIcon()];
auto compositorHelper = DependencyManager::get<CompositorHelper>();
@ -459,7 +463,7 @@ void HmdDisplayPlugin::compositePointer(const gpu::FramebufferPointer& composite
render([&](gpu::Batch& batch) {
// FIXME use standard gpu stereo rendering for this.
batch.enableStereo(false);
batch.setFramebuffer(compositeFramebuffer);
batch.setFramebuffer(_compositeFramebuffer);
batch.setPipeline(_cursorPipeline);
batch.setResourceTexture(0, cursorData.texture);
batch.resetViewTransform();
@ -474,6 +478,10 @@ void HmdDisplayPlugin::compositePointer(const gpu::FramebufferPointer& composite
});
}
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> HmdDisplayPlugin::getHUDOperator() {
return _hudRenderer.render(*this);
}
HmdDisplayPlugin::~HmdDisplayPlugin() {
}

View file

@ -53,15 +53,16 @@ signals:
void hmdVisibleChanged(bool visible);
protected:
virtual void hmdPresent(const gpu::FramebufferPointer&) = 0;
virtual void hmdPresent() = 0;
virtual bool isHmdMounted() const = 0;
virtual void postPreview() {};
virtual void updatePresentPose();
bool internalActivate() override;
void internalDeactivate() override;
void compositePointer(const gpu::FramebufferPointer&) override;
void internalPresent(const gpu::FramebufferPointer&) override;
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> getHUDOperator() override;
void compositePointer() override;
void internalPresent() override;
void customizeContext() override;
void uncustomizeContext() override;
void updateFrameData() override;
@ -119,6 +120,8 @@ private:
static const size_t TEXTURE_OFFSET { offsetof(Vertex, uv) };
static const int VERTEX_STRIDE { sizeof(Vertex) };
HUDOperator build();
void build();
void updatePipeline();
std::function<void(gpu::Batch&, const gpu::TexturePointer&, bool mirror)> render(HmdDisplayPlugin& plugin);
} _hudRenderer;
};

View file

@ -37,13 +37,13 @@ glm::uvec2 InterleavedStereoDisplayPlugin::getRecommendedRenderSize() const {
return result;
}
void InterleavedStereoDisplayPlugin::internalPresent(const gpu::FramebufferPointer& compositeFramebuffer) {
void InterleavedStereoDisplayPlugin::internalPresent() {
render([&](gpu::Batch& batch) {
batch.enableStereo(false);
batch.resetViewTransform();
batch.setFramebuffer(gpu::FramebufferPointer());
batch.setViewportTransform(ivec4(uvec2(0), getSurfacePixels()));
batch.setResourceTexture(0, compositeFramebuffer->getRenderBuffer(0));
batch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0));
batch.setPipeline(_interleavedPresentPipeline);
batch.draw(gpu::TRIANGLE_STRIP, 4);
});

View file

@ -21,7 +21,7 @@ protected:
// initialize OpenGL context settings needed by the plugin
void customizeContext() override;
void uncustomizeContext() override;
void internalPresent(const gpu::FramebufferPointer&) override;
void internalPresent() override;
private:
static const QString NAME;

View file

@ -121,7 +121,11 @@ void MaterialEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
QString materialURL = entity->getMaterialURL();
if (materialURL != _materialURL) {
_materialURL = materialURL;
if (_materialURL.contains("?")) {
if (_materialURL.contains("#")) {
auto split = _materialURL.split("#");
newCurrentMaterialName = split.last().toStdString();
} else if (_materialURL.contains("?")) {
qDebug() << "DEPRECATED: Use # instead of ? for material URLS:" << _materialURL;
auto split = _materialURL.split("?");
newCurrentMaterialName = split.last().toStdString();
}
@ -358,7 +362,13 @@ void MaterialEntityRenderer::deleteMaterial(const QUuid& oldParentID, const QStr
return;
}
// if a remove fails, our parent is gone, so we don't need to retry
// if a remove fails, our parent is gone, so we don't need to retry, EXCEPT:
// MyAvatar can change UUIDs when you switch domains, which leads to a timing issue. Let's just make
// sure we weren't attached to MyAvatar by trying this (if we weren't, this will have no effect)
if (EntityTreeRenderer::removeMaterialFromAvatar(AVATAR_SELF_ID, material, oldParentMaterialNameStd)) {
_appliedMaterial = nullptr;
return;
}
}
void MaterialEntityRenderer::applyTextureTransform(std::shared_ptr<NetworkMaterial>& material) {

View file

@ -976,7 +976,7 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
* by setting the <code>entityHostType</code> parameter in {@link Entities.addEntity} to <code>"avatar"</code>.
* Material entities render as non-scalable spheres if they don't have their parent set.
* @typedef {object} Entities.EntityProperties-Material
* @property {string} materialURL="" - URL to a {@link MaterialResource}. If you append <code>?name</code> to the URL, the
* @property {string} materialURL="" - URL to a {@link MaterialResource}. If you append <code>#name</code> to the URL, the
* material with that name in the {@link MaterialResource} will be applied to the entity. <br />
* Alternatively, set the property value to <code>"materialData"</code> to use the <code>materialData</code> property
* for the {@link MaterialResource} values.

View file

@ -1459,7 +1459,7 @@ void EntityTree::startDynamicDomainVerificationOnServer(float minimumAgeToRemove
QNetworkReply* networkReply = networkAccessManager.put(networkRequest, QJsonDocument(request).toJson());
connect(networkReply, &QNetworkReply::finished, this, [this, entityIDs, networkReply, minimumAgeToRemove, &certificateID] {
connect(networkReply, &QNetworkReply::finished, this, [this, entityIDs, networkReply, minimumAgeToRemove, certificateID] {
QJsonObject jsonObject = QJsonDocument::fromJson(networkReply->readAll()).object();
jsonObject = jsonObject["data"].toObject();

View file

@ -97,7 +97,7 @@ QString processID(const QString& id) {
return id.mid(id.lastIndexOf(':') + 1);
}
QString getName(const QVariantList& properties) {
QString getModelName(const QVariantList& properties) {
QString name;
if (properties.size() == 3) {
name = properties.at(1).toString();
@ -108,6 +108,17 @@ QString getName(const QVariantList& properties) {
return name;
}
QString getMaterialName(const QVariantList& properties) {
QString name;
if (properties.size() == 1 || properties.at(1).toString().isEmpty()) {
name = properties.at(0).toString();
name = processID(name.left(name.indexOf(QChar('\0'))));
} else {
name = processID(properties.at(1).toString());
}
return name;
}
QString getID(const QVariantList& properties, int index = 0) {
return processID(properties.at(index).toString());
}
@ -300,8 +311,6 @@ QString getString(const QVariant& value) {
return list.isEmpty() ? value.toString() : list.at(0).toString();
}
typedef std::vector<glm::vec3> ShapeVertices;
class AnimationCurve {
public:
QVector<float> values;
@ -510,7 +519,7 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
blendshapes.append(extracted);
}
} else if (object.name == "Model") {
QString name = getName(object.properties);
QString name = getModelName(object.properties);
QString id = getID(object.properties);
modelIDsToNames.insert(id, name);
@ -829,7 +838,7 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
} else if (object.name == "Material") {
HFMMaterial material;
MaterialParam materialParam;
material.name = (object.properties.at(1).toString());
material.name = getMaterialName(object.properties);
foreach (const FBXNode& subobject, object.children) {
bool properties = false;
@ -1352,8 +1361,7 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
}
// NOTE: shapeVertices are in joint-frame
std::vector<ShapeVertices> shapeVertices;
shapeVertices.resize(std::max(1, hfmModel.joints.size()) );
hfmModel.shapeVertices.resize(std::max(1, hfmModel.joints.size()) );
hfmModel.bindExtents.reset();
hfmModel.meshExtents.reset();
@ -1527,7 +1535,7 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
HFMJoint& joint = hfmModel.joints[jointIndex];
glm::mat4 meshToJoint = glm::inverse(joint.bindTransform) * modelTransform;
ShapeVertices& points = shapeVertices.at(jointIndex);
ShapeVertices& points = hfmModel.shapeVertices.at(jointIndex);
for (int j = 0; j < cluster.indices.size(); j++) {
int oldIndex = cluster.indices.at(j);
@ -1601,7 +1609,7 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
// transform cluster vertices to joint-frame and save for later
glm::mat4 meshToJoint = glm::inverse(joint.bindTransform) * modelTransform;
ShapeVertices& points = shapeVertices.at(jointIndex);
ShapeVertices& points = hfmModel.shapeVertices.at(jointIndex);
foreach (const glm::vec3& vertex, extracted.mesh.vertices) {
const glm::mat4 vertexTransform = meshToJoint * glm::translate(vertex);
points.push_back(extractTranslation(vertexTransform));
@ -1621,54 +1629,6 @@ HFMModel* FBXSerializer::extractHFMModel(const hifi::VariantHash& mapping, const
meshIDsToMeshIndices.insert(it.key(), meshIndex);
}
const float INV_SQRT_3 = 0.57735026918f;
ShapeVertices cardinalDirections = {
Vectors::UNIT_X,
Vectors::UNIT_Y,
Vectors::UNIT_Z,
glm::vec3(INV_SQRT_3, INV_SQRT_3, INV_SQRT_3),
glm::vec3(INV_SQRT_3, -INV_SQRT_3, INV_SQRT_3),
glm::vec3(INV_SQRT_3, INV_SQRT_3, -INV_SQRT_3),
glm::vec3(INV_SQRT_3, -INV_SQRT_3, -INV_SQRT_3)
};
// now that all joints have been scanned compute a k-Dop bounding volume of mesh
for (int i = 0; i < hfmModel.joints.size(); ++i) {
HFMJoint& joint = hfmModel.joints[i];
// NOTE: points are in joint-frame
ShapeVertices& points = shapeVertices.at(i);
if (points.size() > 0) {
// compute average point
glm::vec3 avgPoint = glm::vec3(0.0f);
for (uint32_t j = 0; j < points.size(); ++j) {
avgPoint += points[j];
}
avgPoint /= (float)points.size();
joint.shapeInfo.avgPoint = avgPoint;
// compute a k-Dop bounding volume
for (uint32_t j = 0; j < cardinalDirections.size(); ++j) {
float maxDot = -FLT_MAX;
float minDot = FLT_MIN;
for (uint32_t k = 0; k < points.size(); ++k) {
float kDot = glm::dot(cardinalDirections[j], points[k] - avgPoint);
if (kDot > maxDot) {
maxDot = kDot;
}
if (kDot < minDot) {
minDot = kDot;
}
}
joint.shapeInfo.points.push_back(avgPoint + maxDot * cardinalDirections[j]);
joint.shapeInfo.dots.push_back(maxDot);
joint.shapeInfo.points.push_back(avgPoint + minDot * cardinalDirections[j]);
joint.shapeInfo.dots.push_back(-minDot);
}
generateBoundryLinesForDop14(joint.shapeInfo.dots, joint.shapeInfo.avgPoint, joint.shapeInfo.debugLines);
}
}
// attempt to map any meshes to a named model
for (QHash<QString, int>::const_iterator m = meshIDsToMeshIndices.constBegin();
m != meshIDsToMeshIndices.constEnd(); m++) {

View file

@ -32,6 +32,7 @@ static const QString JOINT_FIELD = "joint";
static const QString BLENDSHAPE_FIELD = "bs";
static const QString SCRIPT_FIELD = "script";
static const QString JOINT_NAME_MAPPING_FIELD = "jointMap";
static const QString MATERIAL_MAPPING_FIELD = "materialMap";
class FSTReader {
public:

View file

@ -891,12 +891,14 @@ HFMModel::Pointer OBJSerializer::read(const hifi::ByteArray& data, const hifi::V
if (!objMaterial.used) {
continue;
}
hfmModel.materials[materialID] = HFMMaterial(objMaterial.diffuseColor,
objMaterial.specularColor,
objMaterial.emissiveColor,
objMaterial.shininess,
objMaterial.opacity);
HFMMaterial& hfmMaterial = hfmModel.materials[materialID];
HFMMaterial& hfmMaterial = hfmModel.materials[materialID] = HFMMaterial(objMaterial.diffuseColor,
objMaterial.specularColor,
objMaterial.emissiveColor,
objMaterial.shininess,
objMaterial.opacity);
hfmMaterial.name = materialID;
hfmMaterial.materialID = materialID;
hfmMaterial._material = std::make_shared<graphics::Material>();
graphics::MaterialPointer modelMaterial = hfmMaterial._material;

View file

@ -15,7 +15,7 @@
#include "GraphicsScriptingUtil.h"
#include "ScriptableMesh.h"
#include "graphics/Material.h"
#include "image/Image.h"
#include "image/TextureProcessing.h"
// #define SCRIPTABLE_MESH_DEBUG 1

View file

@ -154,3 +154,57 @@ QString HFMModel::getModelNameOfMesh(int meshIndex) const {
}
return QString();
}
void HFMModel::computeKdops() {
const float INV_SQRT_3 = 0.57735026918f;
ShapeVertices cardinalDirections = {
Vectors::UNIT_X,
Vectors::UNIT_Y,
Vectors::UNIT_Z,
glm::vec3(INV_SQRT_3, INV_SQRT_3, INV_SQRT_3),
glm::vec3(INV_SQRT_3, -INV_SQRT_3, INV_SQRT_3),
glm::vec3(INV_SQRT_3, INV_SQRT_3, -INV_SQRT_3),
glm::vec3(INV_SQRT_3, -INV_SQRT_3, -INV_SQRT_3)
};
if (joints.size() != (int)shapeVertices.size()) {
return;
}
// now that all joints have been scanned compute a k-Dop bounding volume of mesh
for (int i = 0; i < joints.size(); ++i) {
HFMJoint& joint = joints[i];
// NOTE: points are in joint-frame
ShapeVertices& points = shapeVertices.at(i);
glm::quat rotOffset = jointRotationOffsets.contains(i) ? glm::inverse(jointRotationOffsets[i]) : quat();
if (points.size() > 0) {
// compute average point
glm::vec3 avgPoint = glm::vec3(0.0f);
for (uint32_t j = 0; j < points.size(); ++j) {
points[j] = rotOffset * points[j];
avgPoint += points[j];
}
avgPoint /= (float)points.size();
joint.shapeInfo.avgPoint = avgPoint;
// compute a k-Dop bounding volume
for (uint32_t j = 0; j < cardinalDirections.size(); ++j) {
float maxDot = -FLT_MAX;
float minDot = FLT_MIN;
for (uint32_t k = 0; k < points.size(); ++k) {
float kDot = glm::dot(cardinalDirections[j], points[k] - avgPoint);
if (kDot > maxDot) {
maxDot = kDot;
}
if (kDot < minDot) {
minDot = kDot;
}
}
joint.shapeInfo.points.push_back(avgPoint + maxDot * cardinalDirections[j]);
joint.shapeInfo.dots.push_back(maxDot);
joint.shapeInfo.points.push_back(avgPoint + minDot * cardinalDirections[j]);
joint.shapeInfo.dots.push_back(-minDot);
}
generateBoundryLinesForDop14(joint.shapeInfo.dots, joint.shapeInfo.avgPoint, joint.shapeInfo.debugLines);
}
}
}

View file

@ -53,6 +53,8 @@ using ColorType = glm::vec3;
const int MAX_NUM_PIXELS_FOR_FBX_TEXTURE = 2048 * 2048;
using ShapeVertices = std::vector<glm::vec3>;
// The version of the Draco mesh binary data itself. See also: FBX_DRACO_MESH_VERSION in FBX.h
static const int DRACO_MESH_VERSION = 2;
@ -251,11 +253,6 @@ public:
bool wasCompressed { false };
};
/**jsdoc
* @typedef {object} FBXAnimationFrame
* @property {Quat[]} rotations
* @property {Vec3[]} translations
*/
/// A single animation frame.
class AnimationFrame {
public:
@ -332,10 +329,12 @@ public:
/// given a meshIndex this will return the name of the model that mesh belongs to if known
QString getModelNameOfMesh(int meshIndex) const;
void computeKdops();
QList<QString> blendshapeChannelNames;
QMap<int, glm::quat> jointRotationOffsets;
std::vector<ShapeVertices> shapeVertices;
FlowData flowData;
};

View file

@ -3,6 +3,7 @@ setup_hifi_library()
link_hifi_libraries(shared gpu)
target_nvtt()
target_etc2comp()
target_openexr()
if (UNIX AND NOT APPLE)
set(THREADS_PREFER_PTHREAD_FLAG ON)

File diff suppressed because it is too large Load diff

View file

@ -1,9 +1,10 @@
#pragma once
//
// Image.h
// image/src/image
// image/src/Image
//
// Created by Clement Brisset on 4/5/2017.
// Copyright 2017 High Fidelity, Inc.
// Created by Olivier Prat on 29/3/2019.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
@ -12,80 +13,85 @@
#ifndef hifi_image_Image_h
#define hifi_image_Image_h
#include <QVariant>
#include <gpu/Texture.h>
#include <QImage>
#include "ColorChannel.h"
class QByteArray;
class QImage;
#include <glm/fwd.hpp>
#include <glm/vec2.hpp>
#include <GLMHelpers.h>
namespace image {
namespace TextureUsage {
class Image {
public:
enum Type {
DEFAULT_TEXTURE,
STRICT_TEXTURE,
ALBEDO_TEXTURE,
NORMAL_TEXTURE,
BUMP_TEXTURE,
SPECULAR_TEXTURE,
METALLIC_TEXTURE = SPECULAR_TEXTURE, // for now spec and metallic texture are the same, converted to grey
ROUGHNESS_TEXTURE,
GLOSS_TEXTURE,
EMISSIVE_TEXTURE,
CUBE_TEXTURE,
OCCLUSION_TEXTURE,
SCATTERING_TEXTURE = OCCLUSION_TEXTURE,
LIGHTMAP_TEXTURE,
UNUSED_TEXTURE
};
enum Format {
Format_Invalid = QImage::Format_Invalid,
Format_Mono = QImage::Format_Mono,
Format_MonoLSB = QImage::Format_MonoLSB,
Format_Indexed8 = QImage::Format_Indexed8,
Format_RGB32 = QImage::Format_RGB32,
Format_ARGB32 = QImage::Format_ARGB32,
Format_ARGB32_Premultiplied = QImage::Format_ARGB32_Premultiplied,
Format_RGB16 = QImage::Format_RGB16,
Format_ARGB8565_Premultiplied = QImage::Format_ARGB8565_Premultiplied,
Format_RGB666 = QImage::Format_RGB666,
Format_ARGB6666_Premultiplied = QImage::Format_ARGB6666_Premultiplied,
Format_RGB555 = QImage::Format_RGB555,
Format_ARGB8555_Premultiplied = QImage::Format_ARGB8555_Premultiplied,
Format_RGB888 = QImage::Format_RGB888,
Format_RGB444 = QImage::Format_RGB444,
Format_ARGB4444_Premultiplied = QImage::Format_ARGB4444_Premultiplied,
Format_RGBX8888 = QImage::Format_RGBX8888,
Format_RGBA8888 = QImage::Format_RGBA8888,
Format_RGBA8888_Premultiplied = QImage::Format_RGBA8888_Premultiplied,
Format_Grayscale8 = QImage::Format_Grayscale8,
Format_R11G11B10F = QImage::Format_RGB30,
Format_PACKED_FLOAT = Format_R11G11B10F
};
using TextureLoader = std::function<gpu::TexturePointer(QImage&&, const std::string&, bool, gpu::BackendTarget, const std::atomic<bool>&)>;
TextureLoader getTextureLoaderForType(Type type, const QVariantMap& options = QVariantMap());
using AspectRatioMode = Qt::AspectRatioMode;
using TransformationMode = Qt::TransformationMode;
gpu::TexturePointer create2DTextureFromImage(QImage&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createStrict2DTextureFromImage(QImage&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createAlbedoTextureFromImage(QImage&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createEmissiveTextureFromImage(QImage&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createNormalTextureFromNormalImage(QImage&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createNormalTextureFromBumpImage(QImage&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createRoughnessTextureFromImage(QImage&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createRoughnessTextureFromGlossImage(QImage&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createMetallicTextureFromImage(QImage&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createCubeTextureFromImage(QImage&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createCubeTextureFromImageWithoutIrradiance(QImage&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createLightmapTextureFromImage(QImage&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer process2DTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
gpu::BackendTarget target, bool isStrict, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer process2DTextureNormalMapFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
gpu::BackendTarget target, bool isBumpMap, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer process2DTextureGrayscaleFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
gpu::BackendTarget target, bool isInvertedPixels, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer processCubeTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
gpu::BackendTarget target, bool generateIrradiance, const std::atomic<bool>& abortProcessing);
Image() {}
Image(int width, int height, Format format) : _data(width, height, (QImage::Format)format) {}
Image(const QImage& data) : _data(data) {}
void operator=(const QImage& image) {
_data = image;
}
} // namespace TextureUsage
bool isNull() const { return _data.isNull(); }
const QStringList getSupportedFormats();
Format getFormat() const { return (Format)_data.format(); }
bool hasAlphaChannel() const { return _data.hasAlphaChannel(); }
gpu::TexturePointer processImage(std::shared_ptr<QIODevice> content, const std::string& url, ColorChannel sourceChannel,
int maxNumPixels, TextureUsage::Type textureType,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing = false);
glm::uint32 getWidth() const { return (glm::uint32)_data.width(); }
glm::uint32 getHeight() const { return (glm::uint32)_data.height(); }
glm::uvec2 getSize() const { return toGlm(_data.size()); }
size_t getByteCount() const { return _data.byteCount(); }
QRgb getPixel(int x, int y) const { return _data.pixel(x, y); }
void setPixel(int x, int y, QRgb value) {
_data.setPixel(x, y, value);
}
glm::uint8* editScanLine(int y) { return _data.scanLine(y); }
const glm::uint8* getScanLine(int y) const { return _data.scanLine(y); }
const glm::uint8* getBits() const { return _data.constBits(); }
Image getScaled(glm::uvec2 newSize, AspectRatioMode ratioMode, TransformationMode transformationMode = Qt::SmoothTransformation) const;
Image getConvertedToFormat(Format newFormat) const;
Image getSubImage(QRect rect) const;
Image getMirrored(bool horizontal, bool vertical) const;
// Inplace transformations
void invertPixels();
private:
QImage _data;
};
} // namespace image

View file

@ -0,0 +1,99 @@
//
// OpenEXRReader.cpp
// image/src/image
//
// Created by Olivier Prat
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "OpenEXRReader.h"
#include "TextureProcessing.h"
#include "ImageLogging.h"
#include <QIODevice>
#include <QDebug>
#if !defined(Q_OS_ANDROID)
#include <OpenEXR/ImfIO.h>
#include <OpenEXR/ImfRgbaFile.h>
#include <OpenEXR/ImfArray.h>
#include <OpenEXR/ImfTestFile.h>
class QIODeviceImfStream : public Imf::IStream {
public:
QIODeviceImfStream(QIODevice& device, const std::string& filename) :
Imf::IStream(filename.c_str()), _device(device) {
}
bool read(char c[/*n*/], int n) override {
if (_device.read(c, n) <= 0) {
qWarning(imagelogging) << "OpenEXR - in file " << fileName() << " : " << _device.errorString();
return false;
}
return true;
}
Imf::Int64 tellg() override {
return _device.pos();
}
void seekg(Imf::Int64 pos) override {
_device.seek(pos);
}
void clear() override {
// Not much to do
}
private:
QIODevice& _device;
};
#endif
image::Image image::readOpenEXR(QIODevice& content, const std::string& filename) {
#if !defined(Q_OS_ANDROID)
QIODeviceImfStream device(content, filename);
if (Imf::isOpenExrFile(device)) {
Imf::RgbaInputFile file(device);
Imath::Box2i viewport = file.dataWindow();
Imf::Array2D<Imf::Rgba> pixels;
int width = viewport.max.x - viewport.min.x + 1;
int height = viewport.max.y - viewport.min.y + 1;
pixels.resizeErase(height, width);
file.setFrameBuffer(&pixels[0][0] - viewport.min.x - viewport.min.y * width, 1, width);
file.readPixels(viewport.min.y, viewport.max.y);
Image image{ width, height, Image::Format_PACKED_FLOAT };
auto packHDRPixel = getHDRPackingFunction();
for (int y = 0; y < height; y++) {
const auto srcScanline = pixels[y];
gpu::uint32* dstScanline = (gpu::uint32*) image.editScanLine(y);
for (int x = 0; x < width; x++) {
const auto& srcPixel = srcScanline[x];
auto& dstPixel = dstScanline[x];
glm::vec3 floatPixel{ srcPixel.r, srcPixel.g, srcPixel.b };
dstPixel = packHDRPixel(floatPixel);
}
}
return image;
} else {
qWarning(imagelogging) << "OpenEXR - File " << filename.c_str() << " doesn't have the proper format";
}
#endif
return QImage();
}

View file

@ -0,0 +1,24 @@
//
// OpenEXRReader.h
// image/src/image
//
// Created by Olivier Prat
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_image_OpenEXRReader_h
#define hifi_image_OpenEXRReader_h
#include "Image.h"
namespace image {
// TODO Move this into a plugin that QImageReader can use
Image readOpenEXR(QIODevice& contents, const std::string& filename);
}
#endif // hifi_image_OpenEXRReader_h

View file

@ -16,7 +16,7 @@
#include <QIODevice>
#include <QDebug>
QImage image::readTGA(QIODevice& content) {
image::Image image::readTGA(QIODevice& content) {
enum class TGAImageType : uint8_t {
NoImageData = 0,
UncompressedColorMapped = 1,

View file

@ -12,12 +12,11 @@
#ifndef hifi_image_TGAReader_h
#define hifi_image_TGAReader_h
#include <QImage>
#include "Image.h"
namespace image {
// TODO Move this into a plugin that QImageReader can use
QImage readTGA(QIODevice& contents);
Image readTGA(QIODevice& contents);
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,118 @@
//
// TextureProcessing.h
// image/src/TextureProcessing
//
// Created by Clement Brisset on 4/5/2017.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_image_TextureProcessing_h
#define hifi_image_TextureProcessing_h
#include <QVariant>
#include <gpu/Texture.h>
#include "Image.h"
namespace image {
std::function<gpu::uint32(const glm::vec3&)> getHDRPackingFunction();
std::function<glm::vec3(gpu::uint32)> getHDRUnpackingFunction();
namespace TextureUsage {
/**jsdoc
* <p>Describes the type of texture.</p>
* <p>See also: {@link Material} and
* {@link https://docs.highfidelity.com/create/3d-models/pbr-materials-guide.html|PBR Materials Guide}.</p>
* <table>
* <thead>
* <tr><th>Value</th><th>Name</th><th>Description</th></tr>
* </thead>
* <tbody>
* <tr><td><code>0</code></td><td>Default</td><td>Basic color.</td></tr>
* <tr><td><code>1</code></td><td>Strict</td><td>Basic color. Quality never downgraded.</td></tr>
* <tr><td><code>2</code></td><td>Albedo</td><td>Color for PBR.</td></tr>
* <tr><td><code>3</code></td><td>Normal</td><td>Normal map.</td></tr>
* <tr><td><code>4</code></td><td>Bump</td><td>Bump map.</td></tr>
* <tr><td><code>5</code></td><td>Specular or metallic</td><td>Metallic or not.</td></tr>
* <tr><td><code>6</code></td><td>Roughness</td><td>Rough or matte.</td></tr>
* <tr><td><code>7</code></td><td>Gloss</td><td>Gloss or shine.</td></tr>
* <tr><td><code>8</code></td><td>Emissive</td><td>The amount of light reflected.</td></tr>
* <tr><td><code>9</code></td><td>Cube</td><td>Cubic image for sky boxes.</td></tr>
* <tr><td><code>10</code></td><td>Occlusion or scattering</td><td>How objects or human skin interact with light.</td></tr>
* <tr><td><code>11</code></td><td>Lightmap</td><td>Light map.</td></tr>
* <tr><td><code>12</code></td><td>Unused</td><td>Texture is not currently used.</td></tr>
* </tbody>
* </table>
* @typedef {number} TextureCache.TextureType
*/
enum Type {
DEFAULT_TEXTURE,
STRICT_TEXTURE,
ALBEDO_TEXTURE,
NORMAL_TEXTURE,
BUMP_TEXTURE,
SPECULAR_TEXTURE,
METALLIC_TEXTURE = SPECULAR_TEXTURE, // for now spec and metallic texture are the same, converted to grey
ROUGHNESS_TEXTURE,
GLOSS_TEXTURE,
EMISSIVE_TEXTURE,
CUBE_TEXTURE,
OCCLUSION_TEXTURE,
SCATTERING_TEXTURE = OCCLUSION_TEXTURE,
LIGHTMAP_TEXTURE,
UNUSED_TEXTURE
};
using TextureLoader = std::function<gpu::TexturePointer(Image&&, const std::string&, bool, gpu::BackendTarget, const std::atomic<bool>&)>;
TextureLoader getTextureLoaderForType(Type type, const QVariantMap& options = QVariantMap());
gpu::TexturePointer create2DTextureFromImage(Image&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createStrict2DTextureFromImage(Image&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createAlbedoTextureFromImage(Image&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createEmissiveTextureFromImage(Image&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createNormalTextureFromNormalImage(Image&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createNormalTextureFromBumpImage(Image&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createRoughnessTextureFromImage(Image&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createRoughnessTextureFromGlossImage(Image&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createMetallicTextureFromImage(Image&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createCubeTextureFromImage(Image&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createCubeTextureFromImageWithoutIrradiance(Image&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createLightmapTextureFromImage(Image&& image, const std::string& srcImageName,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer process2DTextureColorFromImage(Image&& srcImage, const std::string& srcImageName, bool compress,
gpu::BackendTarget target, bool isStrict, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer process2DTextureNormalMapFromImage(Image&& srcImage, const std::string& srcImageName, bool compress,
gpu::BackendTarget target, bool isBumpMap, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer process2DTextureGrayscaleFromImage(Image&& srcImage, const std::string& srcImageName, bool compress,
gpu::BackendTarget target, bool isInvertedPixels, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer processCubeTextureColorFromImage(Image&& srcImage, const std::string& srcImageName, bool compress,
gpu::BackendTarget target, bool generateIrradiance, const std::atomic<bool>& abortProcessing);
} // namespace TextureUsage
const QStringList getSupportedFormats();
gpu::TexturePointer processImage(std::shared_ptr<QIODevice> content, const std::string& url, ColorChannel sourceChannel,
int maxNumPixels, TextureUsage::Type textureType,
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing = false);
} // namespace image
#endif // hifi_image_TextureProcessing_h

View file

@ -559,8 +559,7 @@ void NetworkMaterial::setLightmapMap(const QUrl& url) {
}
NetworkMaterial::NetworkMaterial(const HFMMaterial& material, const QUrl& textureBaseUrl) :
graphics::Material(*material._material),
_textures(MapChannel::NUM_MAP_CHANNELS)
graphics::Material(*material._material)
{
_name = material.name.toStdString();
if (!material.albedoTexture.filename.isEmpty()) {
@ -709,7 +708,7 @@ void NetworkMaterial::setTextures(const QVariantMap& textureMap) {
bool NetworkMaterial::isMissingTexture() {
for (auto& networkTexture : _textures) {
auto& texture = networkTexture.texture;
auto& texture = networkTexture.second.texture;
if (!texture) {
continue;
}

View file

@ -36,15 +36,21 @@ public:
bool isMissingTexture();
void checkResetOpacityMap();
protected:
friend class Geometry;
class Texture {
public:
QString name;
NetworkTexturePointer texture;
};
using Textures = std::vector<Texture>;
struct MapChannelHash {
std::size_t operator()(MapChannel mapChannel) const {
return static_cast<std::size_t>(mapChannel);
}
};
using Textures = std::unordered_map<MapChannel, Texture, MapChannelHash>;
Textures getTextures() { return _textures; }
protected:
friend class Geometry;
Textures _textures;

View file

@ -34,7 +34,7 @@
#include <gl/GLHelpers.h>
#include <gpu/Batch.h>
#include <image/Image.h>
#include <image/TextureProcessing.h>
#include <NumericalConstants.h>
#include <shared/NsightHelpers.h>

View file

@ -23,7 +23,7 @@
#include <ResourceCache.h>
#include <graphics/TextureMap.h>
#include <image/ColorChannel.h>
#include <image/Image.h>
#include <image/TextureProcessing.h>
#include <ktx/KTX.h>
#include <TextureMeta.h>

View file

@ -25,7 +25,8 @@ class TextureCacheScriptingInterface : public ScriptableResourceCache, public De
// Properties are copied over from ResourceCache (see ResourceCache.h for reason).
/**jsdoc
* API to manage texture cache resources.
* The <code>TextureCache</code> API manages texture cache resources.
*
* @namespace TextureCache
*
* @hifi-interface
@ -47,11 +48,14 @@ public:
TextureCacheScriptingInterface();
/**jsdoc
* Prefetches a texture resource of specific type.
* @function TextureCache.prefetch
* @param {string} url
* @param {number} type
* @param {number} [maxNumPixels=67108864]
* @returns {ResourceObject}
* @variation 0
* @param {string} url - The URL of the texture to prefetch.
* @param {TextureCache.TextureType} type - The type of the texture.
* @param {number} [maxNumPixels=67108864] - The maximum number of pixels to use for the image. If the texture has more
* than this number it is downscaled.
* @returns {ResourceObject} A resource object.
*/
Q_INVOKABLE ScriptableResource* prefetch(const QUrl& url, int type, int maxNumPixels = ABSOLUTE_MAX_TEXTURE_NUM_PIXELS);
@ -59,6 +63,7 @@ signals:
/**jsdoc
* @function TextureCache.spectatorCameraFramebufferReset
* @returns {Signal}
* @deprecated This signal is deprecated and will be removed.
*/
void spectatorCameraFramebufferReset();
};

View file

@ -113,6 +113,7 @@ namespace baker {
hfmModelOut->jointRotationOffsets = input.get3();
hfmModelOut->jointIndices = input.get4();
hfmModelOut->flowData = input.get5();
hfmModelOut->computeKdops();
output = hfmModelOut;
}
};

View file

@ -10,6 +10,62 @@
#include "ModelBakerLogging.h"
#include <QJsonArray>
void processMaterialMapping(MaterialMapping& materialMapping, const QJsonObject& materialMap, const hifi::URL& url) {
auto mappingKeys = materialMap.keys();
for (auto mapping : mappingKeys) {
auto mappingJSON = materialMap[mapping];
if (mappingJSON.isObject()) {
auto mappingValue = mappingJSON.toObject();
// Old subsurface scattering mapping
{
auto scatteringIter = mappingValue.find("scattering");
auto scatteringMapIter = mappingValue.find("scatteringMap");
if (scatteringIter != mappingValue.end() || scatteringMapIter != mappingValue.end()) {
std::shared_ptr<NetworkMaterial> material = std::make_shared<NetworkMaterial>();
if (scatteringIter != mappingValue.end()) {
float scattering = (float)scatteringIter.value().toDouble();
material->setScattering(scattering);
}
if (scatteringMapIter != mappingValue.end()) {
QString scatteringMap = scatteringMapIter.value().toString();
material->setScatteringMap(scatteringMap);
}
material->setDefaultFallthrough(true);
NetworkMaterialResourcePointer materialResource = NetworkMaterialResourcePointer(new NetworkMaterialResource(),
[](NetworkMaterialResource* ptr) { ptr->deleteLater(); });
materialResource->moveToThread(qApp->thread());
materialResource->parsedMaterials.names.push_back("scattering");
materialResource->parsedMaterials.networkMaterials["scattering"] = material;
materialMapping.push_back(std::pair<std::string, NetworkMaterialResourcePointer>("mat::" + mapping.toStdString(), materialResource));
continue;
}
}
// Material JSON description
{
NetworkMaterialResourcePointer materialResource = NetworkMaterialResourcePointer(new NetworkMaterialResource(),
[](NetworkMaterialResource* ptr) { ptr->deleteLater(); });
materialResource->moveToThread(qApp->thread());
materialResource->parsedMaterials = NetworkMaterialResource::parseJSONMaterials(QJsonDocument(mappingValue), url);
materialMapping.push_back(std::pair<std::string, NetworkMaterialResourcePointer>(mapping.toStdString(), materialResource));
}
} else if (mappingJSON.isString()) {
auto mappingValue = mappingJSON.toString();
materialMapping.push_back(std::pair<std::string, NetworkMaterialResourcePointer>(mapping.toStdString(),
MaterialCache::instance().getMaterial(url.resolved(mappingValue))));
}
}
}
void ParseMaterialMappingTask::run(const baker::BakeContextPointer& context, const Input& input, Output& output) {
const auto& mapping = input.get0();
const auto& url = input.get1();
@ -18,56 +74,18 @@ void ParseMaterialMappingTask::run(const baker::BakeContextPointer& context, con
auto mappingIter = mapping.find("materialMap");
if (mappingIter != mapping.end()) {
QByteArray materialMapValue = mappingIter.value().toByteArray();
QJsonObject materialMap = QJsonDocument::fromJson(materialMapValue).object();
if (materialMap.isEmpty()) {
QJsonDocument materialMapJSON = QJsonDocument::fromJson(materialMapValue);
if (materialMapJSON.isEmpty()) {
qCDebug(model_baker) << "Material Map found but did not produce valid JSON:" << materialMapValue;
} else if (materialMapJSON.isObject()) {
QJsonObject materialMap = materialMapJSON.object();
processMaterialMapping(materialMapping, materialMap, url);
} else {
auto mappingKeys = materialMap.keys();
for (auto mapping : mappingKeys) {
auto mappingJSON = materialMap[mapping];
if (mappingJSON.isObject()) {
auto mappingValue = mappingJSON.toObject();
// Old subsurface scattering mapping
{
auto scatteringIter = mappingValue.find("scattering");
auto scatteringMapIter = mappingValue.find("scatteringMap");
if (scatteringIter != mappingValue.end() || scatteringMapIter != mappingValue.end()) {
std::shared_ptr<NetworkMaterial> material = std::make_shared<NetworkMaterial>();
if (scatteringIter != mappingValue.end()) {
float scattering = (float)scatteringIter.value().toDouble();
material->setScattering(scattering);
}
if (scatteringMapIter != mappingValue.end()) {
QString scatteringMap = scatteringMapIter.value().toString();
material->setScatteringMap(scatteringMap);
}
material->setDefaultFallthrough(true);
NetworkMaterialResourcePointer materialResource = NetworkMaterialResourcePointer(new NetworkMaterialResource(), [](NetworkMaterialResource* ptr) { ptr->deleteLater(); });
materialResource->moveToThread(qApp->thread());
materialResource->parsedMaterials.names.push_back("scattering");
materialResource->parsedMaterials.networkMaterials["scattering"] = material;
materialMapping.push_back(std::pair<std::string, NetworkMaterialResourcePointer>("mat::" + mapping.toStdString(), materialResource));
continue;
}
}
// Material JSON description
{
NetworkMaterialResourcePointer materialResource = NetworkMaterialResourcePointer(new NetworkMaterialResource(), [](NetworkMaterialResource* ptr) { ptr->deleteLater(); });
materialResource->moveToThread(qApp->thread());
materialResource->parsedMaterials = NetworkMaterialResource::parseJSONMaterials(QJsonDocument(mappingValue), url);
materialMapping.push_back(std::pair<std::string, NetworkMaterialResourcePointer>(mapping.toStdString(), materialResource));
}
} else if (mappingJSON.isString()) {
auto mappingValue = mappingJSON.toString();
materialMapping.push_back(std::pair<std::string, NetworkMaterialResourcePointer>(mapping.toStdString(), MaterialCache::instance().getMaterial(url.resolved(mappingValue))));
QJsonArray materialMapArray = materialMapJSON.array();
for (auto materialMapIter : materialMapArray) {
if (materialMapIter.isObject()) {
QJsonObject materialMap = materialMapIter.toObject();
processMaterialMapping(materialMapping, materialMap, url);
}
}
}

View file

@ -437,8 +437,8 @@ const QVariantMap Geometry::getTextures() const {
QVariantMap textures;
for (const auto& material : _materials) {
for (const auto& texture : material->_textures) {
if (texture.texture) {
textures[texture.name] = texture.texture->getURL();
if (texture.second.texture) {
textures[texture.second.name] = texture.second.texture->getURL();
}
}
}
@ -467,7 +467,7 @@ void Geometry::setTextures(const QVariantMap& textureMap) {
for (auto& material : _materials) {
// Check if any material textures actually changed
if (std::any_of(material->_textures.cbegin(), material->_textures.cend(),
[&textureMap](const NetworkMaterial::Textures::value_type& it) { return it.texture && textureMap.contains(it.name); })) {
[&textureMap](const NetworkMaterial::Textures::value_type& it) { return it.second.texture && textureMap.contains(it.second.name); })) {
// FIXME: The Model currently caches the materials (waste of space!)
// so they must be copied in the Geometry copy-ctor

View file

@ -25,7 +25,8 @@ class ModelCacheScriptingInterface : public ScriptableResourceCache, public Depe
// Properties are copied over from ResourceCache (see ResourceCache.h for reason).
/**jsdoc
* API to manage model cache resources.
* The <code>ModelCache</code> API manages model cache resources.
*
* @namespace ModelCache
*
* @hifi-interface

View file

@ -91,6 +91,9 @@ private:
class ScriptableResource : public QObject {
/**jsdoc
* Information about a cached resource. Created by {@link AnimationCache.prefetch}, {@link ModelCache.prefetch},
* {@link SoundCache.prefetch}, or {@link TextureCache.prefetch}.
*
* @class ResourceObject
*
* @hifi-interface
@ -99,8 +102,8 @@ class ScriptableResource : public QObject {
* @hifi-server-entity
* @hifi-assignment-client
*
* @property {string} url - URL of this resource.
* @property {Resource.State} state - Current loading state.
* @property {string} url - URL of the resource. <em>Read-only.</em>
* @property {Resource.State} state - Current loading state. <em>Read-only.</em>
*/
Q_OBJECT
Q_PROPERTY(QUrl url READ getURL)
@ -109,12 +112,13 @@ class ScriptableResource : public QObject {
public:
/**jsdoc
* The loading state of a resource.
* @typedef {object} Resource.State
* @property {number} QUEUED - The resource is queued up, waiting to be loaded.
* @property {number} LOADING - The resource is downloading.
* @property {number} LOADED - The resource has finished downloaded by is not complete.
* @property {number} LOADED - The resource has finished downloading but is not complete.
* @property {number} FINISHED - The resource has completely finished loading and is ready.
* @property {number} FAILED - Downloading the resource has failed.
* @property {number} FAILED - The resource has failed to download.
*/
enum State {
QUEUED,
@ -129,7 +133,7 @@ public:
virtual ~ScriptableResource() = default;
/**jsdoc
* Release this resource.
* Releases the resource.
* @function ResourceObject#release
*/
Q_INVOKABLE void release();
@ -144,16 +148,16 @@ public:
signals:
/**jsdoc
* Triggered when download progress for this resource has changed.
* Triggered when the resource's download progress changes.
* @function ResourceObject#progressChanged
* @param {number} bytesReceived - Byytes downloaded so far.
* @param {number} bytesReceived - Bytes downloaded so far.
* @param {number} bytesTotal - Total number of bytes in the resource.
* @returns {Signal}
*/
void progressChanged(uint64_t bytesReceived, uint64_t bytesTotal);
/**jsdoc
* Triggered when resource loading state has changed.
* Triggered when the resource's loading state changes.
* @function ResourceObject#stateChanged
* @param {Resource.State} state - New state.
* @returns {Signal}
@ -317,30 +321,63 @@ public:
ScriptableResourceCache(QSharedPointer<ResourceCache> resourceCache);
/**jsdoc
* Get the list of all resource URLs.
* Gets the URLs of all resources in the cache.
* @function ResourceCache.getResourceList
* @returns {string[]}
* @returns {string[]} The URLs of all resources in the cache.
* @example <caption>Report cached resources.</caption>
* // Replace AnimationCache with ModelCache, SoundCache, or TextureCache as appropriate.
*
* var cachedResources = AnimationCache.getResourceList();
* print("Cached resources: " + JSON.stringify(cachedResources));
*/
Q_INVOKABLE QVariantList getResourceList();
/**jsdoc
* @function ResourceCache.updateTotalSize
* @param {number} deltaSize
* @param {number} deltaSize - Delta size.
* @deprecated This function is deprecated and will be removed.
*/
Q_INVOKABLE void updateTotalSize(const qint64& deltaSize);
/**jsdoc
* Prefetches a resource.
* @function ResourceCache.prefetch
* @param {string} url - URL of the resource to prefetch.
* @returns {ResourceObject}
* @param {string} url - The URL of the resource to prefetch.
* @returns {ResourceObject} A resource object.
* @example <caption>Prefetch a resource and wait until it has loaded.</caption>
* // Replace AnimationCache with ModelCache, SoundCache, or TextureCache as appropriate.
* // TextureCache has its own version of this function.
*
* var resourceURL = "https://s3-us-west-1.amazonaws.com/hifi-content/clement/production/animations/sitting_idle.fbx";
* var resourceObject = AnimationCache.prefetch(resourceURL);
*
* function checkIfResourceLoaded(state) {
* if (state === Resource.State.FINISHED) {
* print("Resource loaded and ready.");
* } else if (state === Resource.State.FAILED) {
* print("Resource not loaded.");
* }
* }
*
* // Resource may have already been loaded.
* print("Resource state: " + resourceObject.state);
* checkIfResourceLoaded(resourceObject.state);
*
* // Resource may still be loading.
* resourceObject.stateChanged.connect(function (state) {
* print("Resource state changed to: " + state);
* checkIfResourceLoaded(state);
* });
*/
Q_INVOKABLE ScriptableResource* prefetch(const QUrl& url) { return prefetch(url, nullptr, std::numeric_limits<size_t>::max()); }
// FIXME: This function variation shouldn't be in the API.
Q_INVOKABLE ScriptableResource* prefetch(const QUrl& url, void* extra, size_t extraHash);
signals:
/**jsdoc
* Triggered when the cache content has changed.
* @function ResourceCache.dirty
* @returns {Signal}
*/

View file

@ -18,6 +18,8 @@
#include <DependencyManager.h>
/**jsdoc
* The <code>Resources</code> API enables the default location for different resource types to be overridden.
*
* @namespace Resources
*
* @hifi-interface
@ -32,15 +34,17 @@ class ResourceScriptingInterface : public QObject, public Dependency {
public:
/**jsdoc
* Overrides a path prefix with an alternative path.
* @function Resources.overrideUrlPrefix
* @param {string} prefix
* @param {string} replacement
* @param {string} prefix - The path prefix to override, e.g., <code>"atp:/"</code>.
* @param {string} replacement - The replacement path for the prefix.
*/
Q_INVOKABLE void overrideUrlPrefix(const QString& prefix, const QString& replacement);
/**jsdoc
* Restores the default path for a specified prefix.
* @function Resources.restoreUrlPrefix
* @param {string} prefix
* @param {string} prefix - The prefix of the resource to restore the path for.
*/
Q_INVOKABLE void restoreUrlPrefix(const QString& prefix) {
overrideUrlPrefix(prefix, "");

View file

@ -86,7 +86,8 @@ PacketVersion versionForPacketType(PacketType packetType) {
case PacketType::MicrophoneAudioNoEcho:
case PacketType::MicrophoneAudioWithEcho:
case PacketType::AudioStreamStats:
return static_cast<PacketVersion>(AudioVersion::HighDynamicRangeVolume);
case PacketType::StopInjector:
return static_cast<PacketVersion>(AudioVersion::StopInjectors);
case PacketType::DomainSettings:
return 18; // replace min_avatar_scale and max_avatar_scale with min_avatar_height and max_avatar_height
case PacketType::Ping:

View file

@ -134,6 +134,7 @@ public:
BulkAvatarTraits,
AudioSoloRequest,
BulkAvatarTraitsAck,
StopInjector,
NUM_PACKET_TYPE
};
@ -369,6 +370,7 @@ enum class AudioVersion : PacketVersion {
SpaceBubbleChanges,
HasPersonalMute,
HighDynamicRangeVolume,
StopInjectors
};
enum class MessageDataVersion : PacketVersion {

View file

@ -245,7 +245,7 @@ void OculusMobileDisplayPlugin::updatePresentPose() {
});
}
void OculusMobileDisplayPlugin::internalPresent(const gpu::FramebufferPointer& compsiteFramebuffer) {
void OculusMobileDisplayPlugin::internalPresent() {
VrHandler::pollTask();
if (!vrActive()) {
@ -253,12 +253,8 @@ void OculusMobileDisplayPlugin::internalPresent(const gpu::FramebufferPointer& c
return;
}
GLuint sourceTexture = 0;
glm::uvec2 sourceSize;
if (compsiteFramebuffer) {
sourceTexture = getGLBackend()->getTextureID(compsiteFramebuffer->getRenderBuffer(0));
sourceSize = { compsiteFramebuffer->getWidth(), compsiteFramebuffer->getHeight() };
}
auto sourceTexture = getGLBackend()->getTextureID(_compositeFramebuffer->getRenderBuffer(0));
glm::uvec2 sourceSize{ _compositeFramebuffer->getWidth(), _compositeFramebuffer->getHeight() };
VrHandler::presentFrame(sourceTexture, sourceSize, presentTracking);
_presentRate.increment();
}

View file

@ -54,8 +54,8 @@ protected:
void uncustomizeContext() override;
void updatePresentPose() override;
void internalPresent(const gpu::FramebufferPointer&) override;
void hmdPresent(const gpu::FramebufferPointer&) override { throw std::runtime_error("Unused"); }
void internalPresent() override;
void hmdPresent() override { throw std::runtime_error("Unused"); }
bool isHmdMounted() const override;
bool alwaysPresent() const override { return true; }

View file

@ -436,7 +436,7 @@ void CharacterController::setLocalBoundingBox(const glm::vec3& minCorner, const
float z = scale.z;
float radius = 0.5f * sqrtf(0.5f * (x * x + z * z));
float halfHeight = 0.5f * scale.y - radius;
float MIN_HALF_HEIGHT = 0.1f;
float MIN_HALF_HEIGHT = 0.0f;
if (halfHeight < MIN_HALF_HEIGHT) {
halfHeight = MIN_HALF_HEIGHT;
}

View file

@ -17,6 +17,14 @@ void SphereRegion::translate(const glm::vec3& translation) {
line.second += translation;
}
}
void SphereRegion::scale(float scale) {
for (auto &line : _lines) {
line.first *= scale;
line.second *= scale;
}
}
void SphereRegion::dump(std::vector<std::pair<glm::vec3, glm::vec3>>& outLines) {
for (auto &line : _lines) {
outLines.push_back(line);
@ -127,7 +135,7 @@ bool MultiSphereShape::computeMultiSphereShape(int jointIndex, const QString& na
_jointIndex = jointIndex;
_name = name;
_mode = getExtractionModeByName(_name);
if (_mode == CollisionShapeExtractionMode::None || kdop.size() < 4 || kdop.size() > 200) {
if (_mode == CollisionShapeExtractionMode::None || kdop.size() < 4) {
return false;
}
std::vector<glm::vec3> points;
@ -151,7 +159,9 @@ bool MultiSphereShape::computeMultiSphereShape(int jointIndex, const QString& na
_midPoint /= (int)points.size();
glm::vec3 dimensions = max - min;
if (glm::length(dimensions) == 0.0f) {
return false;
}
for (size_t i = 0; i < points.size(); i++) {
glm::vec3 relPoint = points[i] - _midPoint;
relPoints.push_back(relPoint);
@ -343,6 +353,7 @@ void MultiSphereShape::connectSpheres(int index1, int index2, bool onlyEdges) {
}
void MultiSphereShape::calculateDebugLines() {
std::vector<float> radiuses;
if (_spheres.size() == 1) {
auto sphere = _spheres[0];
calculateSphereLines(_debugLines, sphere._position, sphere._radius);
@ -351,41 +362,25 @@ void MultiSphereShape::calculateDebugLines() {
} else if (_spheres.size() == 4) {
std::vector<glm::vec3> axes;
axes.resize(8);
const float AXIS_DOT_THRESHOLD = 0.3f;
for (size_t i = 0; i < CORNER_SIGNS.size(); i++) {
for (size_t j = 0; j < 4; j++) {
for (size_t j = 0; j < _spheres.size(); j++) {
auto axis = _spheres[j]._position - _midPoint;
glm::vec3 sign = { axis.x != 0.0f ? glm::abs(axis.x) / axis.x : 0.0f,
axis.x != 0.0f ? glm::abs(axis.y) / axis.y : 0.0f ,
axis.z != 0.0f ? glm::abs(axis.z) / axis.z : 0.0f };
bool add = false;
if (sign.x == 0.0f) {
if (sign.y == CORNER_SIGNS[i].y && sign.z == CORNER_SIGNS[i].z) {
add = true;
}
} else if (sign.y == 0.0f) {
if (sign.x == CORNER_SIGNS[i].x && sign.z == CORNER_SIGNS[i].z) {
add = true;
}
} else if (sign.z == 0.0f) {
if (sign.x == CORNER_SIGNS[i].x && sign.y == CORNER_SIGNS[i].y) {
add = true;
}
} else if (sign == CORNER_SIGNS[i]) {
add = true;
}
if (add) {
if (glm::length(axes[i]) == 0.0f && glm::length(axis) > 0.0f && glm::dot(CORNER_SIGNS[i], glm::normalize(axis)) > AXIS_DOT_THRESHOLD) {
radiuses.push_back(_spheres[j]._radius);
axes[i] = axis;
break;
}
}
}
}
calculateChamferBox(_debugLines, _spheres[0]._radius, axes, _midPoint);
calculateChamferBox(_debugLines, radiuses, axes, _midPoint);
} else if (_spheres.size() == 8) {
std::vector<glm::vec3> axes;
for (size_t i = 0; i < _spheres.size(); i++) {
radiuses.push_back(_spheres[i]._radius);
axes.push_back(_spheres[i]._position - _midPoint);
}
calculateChamferBox(_debugLines, _spheres[0]._radius, axes, _midPoint);
calculateChamferBox(_debugLines, radiuses, axes, _midPoint);
}
}
@ -398,9 +393,9 @@ void MultiSphereShape::connectEdges(std::vector<std::pair<glm::vec3, glm::vec3>>
}
}
void MultiSphereShape::calculateChamferBox(std::vector<std::pair<glm::vec3, glm::vec3>>& outLines, const float& radius, const std::vector<glm::vec3>& axes, const glm::vec3& translation) {
void MultiSphereShape::calculateChamferBox(std::vector<std::pair<glm::vec3, glm::vec3>>& outLines, const std::vector<float>& radiuses, const std::vector<glm::vec3>& axes, const glm::vec3& translation) {
std::vector<std::pair<glm::vec3, glm::vec3>> sphereLines;
calculateSphereLines(sphereLines, glm::vec3(0.0f), radius);
calculateSphereLines(sphereLines, glm::vec3(0.0f), radiuses[0]);
std::vector<SphereRegion> regions = {
SphereRegion({ 1.0f, 1.0f, 1.0f }),
@ -417,6 +412,7 @@ void MultiSphereShape::calculateChamferBox(std::vector<std::pair<glm::vec3, glm:
for (size_t i = 0; i < regions.size(); i++) {
regions[i].extractSphereRegion(sphereLines);
regions[i].scale(radiuses[i]/radiuses[0]);
regions[i].translate(translation + axes[i]);
regions[i].extractEdges(axes[i].y < 0);
regions[i].dump(outLines);

Some files were not shown because too many files have changed in this diff Show more