3
0
Fork 0
mirror of https://github.com/lubosz/overte.git synced 2025-04-27 19:55:28 +02:00

Merge remote-tracking branch 'upstream/master' into particleLoad

This commit is contained in:
SamGondelman 2018-10-25 10:21:40 -07:00
commit faff5f11e8
302 changed files with 10297 additions and 6013 deletions
BUILD.mdBUILD_LINUX.mdBUILD_OSX.mdBUILD_WIN.md
android
assignment-client/src
cmake
interface
libraries

View file

@ -9,6 +9,7 @@
- [cmake](https://cmake.org/download/): 3.9
- [Qt](https://www.qt.io/download-open-source): 5.10.1
- [Python](https://www.python.org/downloads/): 3.6 or higher
- [OpenSSL](https://www.openssl.org/): Use the latest available 1.0 version (**NOT** 1.1) of OpenSSL to avoid security vulnerabilities.
- [VHACD](https://github.com/virneo/v-hacd)(clone this repository)(Optional)

View file

@ -40,6 +40,11 @@ Install build tools:
sudo apt-get install cmake
```
Install Python 3:
```bash
sudo apt-get install python3.6
```
### Get code and checkout the tag you need

View file

@ -6,6 +6,10 @@ Please read the [general build guide](BUILD.md) for information on dependencies
brew install cmake openssl qt
### Python 3
Download an install Python 3.6.6 or higher from [here](https://www.python.org/downloads/). Execute the `Update Shell Profile.command` script that is provided with the installer.
### OpenSSL
Assuming you've installed OpenSSL using the homebrew instructions above, you'll need to set OPENSSL_ROOT_DIR so CMake can find your installations.
@ -28,7 +32,9 @@ Note that this uses the version from the homebrew formula at the time of this wr
If Xcode is your editor of choice, you can ask CMake to generate Xcode project files instead of Unix Makefiles.
cmake .. -GXcode
cmake .. -G Xcode
If `cmake` complains about Python 3 being missing, you may need to update your CMake binary with command `brew upgrade cmake`, or by downloading and running the latest CMake installer, depending on how you originally instaled CMake
After running cmake, you will have the make files or Xcode project file necessary to build all of the components. Open the hifi.xcodeproj file, choose ALL_BUILD from the Product > Scheme menu (or target drop down), and click Run.

View file

@ -5,11 +5,17 @@ Note: We are now using Visual Studio 2017 and Qt 5.10.1. If you are upgrading fr
Note: The prerequisites will require about 10 GB of space on your drive. You will also need a system with at least 8GB of main memory.
### Step 1. Visual Studio 2017
### Step 1. Visual Studio 2017 & Python
If you dont have Community or Professional edition of Visual Studio 2017, download [Visual Studio Community 2017](https://www.visualstudio.com/downloads/).
When selecting components, check "Desktop development with C++." Also on the right on the Summary toolbar, check "Windows 8.1 SDK and UCRT SDK" and "VC++ 2015.3 v140 toolset (x86,x64)".
When selecting components, check "Desktop development with C++". Also on the right on the Summary toolbar, check "Windows 8.1 SDK and UCRT SDK" and "VC++ 2015.3 v140 toolset (x86,x64)". If you do not already have a python development environment installed, also check "Python Development" in this screen.
If you already have Visual Studio installed and need to add python, open the "Add or remove programs" control panel and find the "Microsoft Visual Studio Installer". Select it and click "Modify". In the installer, select "Modify" again, then check "Python Development" and allow the installer to apply the changes.
### Step 1a. Alternate Python
If you do not wish to use the Python installation bundled with Visual Studio, you can download the installer from [here](https://www.python.org/downloads/). Ensure you get version 3.6.6 or higher.
### Step 2. Installing CMake

View file

@ -24,7 +24,6 @@ android {
'-DANDROID_STL=c++_shared',
'-DQT_CMAKE_PREFIX_PATH=' + HIFI_ANDROID_PRECOMPILED + '/qt/lib/cmake',
'-DNATIVE_SCRIBE=' + HIFI_ANDROID_PRECOMPILED + '/scribe' + EXEC_SUFFIX,
'-DNATIVE_SHREFLECT=' + HIFI_ANDROID_PRECOMPILED + '/shreflect' + EXEC_SUFFIX,
'-DHIFI_ANDROID_PRECOMPILED=' + HIFI_ANDROID_PRECOMPILED,
'-DRELEASE_NUMBER=' + RELEASE_NUMBER,
'-DRELEASE_TYPE=' + RELEASE_TYPE,

View file

@ -161,31 +161,19 @@ def packages = [
]
]
def scribeLocalFile='scribe' + EXEC_SUFFIX
def scribeFile='scribe_linux_x86_64'
def scribeChecksum='ca4b904f52f4f993c29175ba96798fa6'
def scribeVersion='u_iTrJDaE95i2abTPXOpPZckGBIim53G'
def shreflectLocalFile='shreflect' + EXEC_SUFFIX
def shreflectFile='shreflect_linux_x86_64'
def shreflectChecksum='d6094a8580066c0b6f4e80b5adfb1d98'
def shreflectVersion='jnrpudh6fptIg6T2.Z6fgKP2ultAdKmE'
def scribeChecksum='4635c28192724281d2367ce9e94380ab'
def scribeVersion='mPAY_N846oZH1tPY1bwChB_hzqkiYyoC'
if (Os.isFamily(Os.FAMILY_MAC)) {
scribeFile = 'scribe_osx_x86_64'
scribeChecksum='72db9d32d4e1e50add755570ac5eb749'
scribeVersion='DAW0DmnjCRib4MD8x93bgc2Z2MpPojZC'
shreflectFile='shreflect_osx_x86_64'
shreflectChecksum='d613ef0703c21371fee93fd2e54b964f'
shreflectVersion='.rYNzjSFq6WtWDnE5KIKRIAGyJtr__ad'
scribeChecksum='1ead61c285d265eba9a5ef91ae3b7c26'
scribeVersion='4TAXWdo9fviw60N2wUA8HNyQ9TabjZa3'
} else if (Os.isFamily(Os.FAMILY_WINDOWS)) {
scribeFile = 'scribe_win32_x86_64.exe'
scribeChecksum='678e43d290c90fda670c6fefe038a06d'
scribeVersion='PuullrA_bPlO9kXZRt8rLe536X1UI.m7'
shreflectFile='shreflect_win32_x86_64.exe'
shreflectChecksum='6f4a77b8cceb3f1bbc655132c3665060'
shreflectVersion='iIyCyza1nelkbI7ihybF59bBlwrfAC3D'
scribeChecksum='9c29a62595daf4844f95f6744d568c15'
scribeVersion='DUoxjufeX8ZAIVRBKRczWTuZwT13enTv'
}
def options = [
@ -461,27 +449,11 @@ task fixScribePermissions(type: Exec, dependsOn: verifyScribe) {
commandLine 'chmod', 'a+x', HIFI_ANDROID_PRECOMPILED + '/' + scribeLocalFile
}
task downloadShreflect(type: Download) {
src baseUrl + shreflectFile + '?versionId=' + shreflectVersion
dest new File(baseFolder, shreflectLocalFile)
onlyIfNewer true
}
task verifyShreflect(type: Verify, dependsOn: downloadShreflect) {
src new File(baseFolder, shreflectLocalFile);
checksum shreflectChecksum
}
task fixShreflectPermissions(type: Exec, dependsOn: verifyShreflect) {
commandLine 'chmod', 'a+x', HIFI_ANDROID_PRECOMPILED + '/' + shreflectLocalFile
}
task setupScribe(dependsOn: [verifyScribe, verifyShreflect]) { }
task setupScribe(dependsOn: [verifyScribe]) { }
// On Windows, we don't need to set the executable bit, but on OSX and Unix we do
if (!Os.isFamily(Os.FAMILY_WINDOWS)) {
setupScribe.dependsOn fixScribePermissions
setupScribe.dependsOn fixShreflectPermissions
}
task extractGvrBinaries(dependsOn: extractDependencies) {

View file

@ -222,7 +222,8 @@ void Agent::requestScript() {
return;
}
auto request = DependencyManager::get<ResourceManager>()->createResourceRequest(this, scriptURL);
auto request = DependencyManager::get<ResourceManager>()->createResourceRequest(
this, scriptURL, true, -1, "Agent::requestScript");
if (!request) {
qWarning() << "Could not create ResourceRequest for Agent script at" << scriptURL.toString();

View file

@ -21,7 +21,6 @@
#include <QtCore/QTimer>
#include <QUuid>
#include <ClientTraitsHandler.h>
#include <EntityEditPacketSender.h>
#include <EntityTree.h>
#include <ScriptEngine.h>

View file

@ -35,6 +35,7 @@
#include "AssignmentClientLogging.h"
#include "AssignmentFactory.h"
#include "ResourceRequestObserver.h"
const QString ASSIGNMENT_CLIENT_TARGET_NAME = "assignment-client";
const long long ASSIGNMENT_REQUEST_INTERVAL_MSECS = 1 * 1000;
@ -49,6 +50,7 @@ AssignmentClient::AssignmentClient(Assignment::Type requestAssignmentType, QStri
DependencyManager::set<tracing::Tracer>();
DependencyManager::set<StatTracker>();
DependencyManager::set<AccountManager>();
DependencyManager::set<ResourceRequestObserver>();
auto addressManager = DependencyManager::set<AddressManager>();

View file

@ -6,6 +6,10 @@ if (NOT "${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
message( FATAL_ERROR "Only 64 bit builds supported." )
endif()
if (USE_CCACHE OR "$ENV{USE_CCACHE}")
configure_ccache()
endif()
if (WIN32)
add_definitions(-DNOMINMAX -D_CRT_SECURE_NO_WARNINGS)

42
cmake/externals/glslang/CMakeLists.txt vendored Normal file
View file

@ -0,0 +1,42 @@
set(EXTERNAL_NAME glslang)
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL https://github.com/KhronosGroup/glslang/archive/7.8.2853.zip
URL_MD5 4f93e3818528176c622c137fba05cbf8
CMAKE_ARGS -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>-$<CONFIG>
LOG_DOWNLOAD 1
LOG_CONFIGURE 1
LOG_BUILD 1
)
# Hide this external target (for ide users)
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
# includes
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
set(SUFFIXED_INSTALL_DIR "${INSTALL_DIR}-$<CONFIG>")
list(APPEND INCLUDE_DIRS ${SUFFIXED_INSTALL_DIR}/include)
#list(APPEND INCLUDE_DIRS ${INSTALL_DIR}/include)
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIR ${INCLUDE_DIRS} CACHE PATH "List of glslang include directories")
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${${EXTERNAL_NAME_UPPER}_INCLUDE_DIR} CACHE PATH "List of glslang include directories")
set(LIB_DIR ${SUFFIXED_INSTALL_DIR}/lib)
list(APPEND LIB_NAMES glslang HLSL OGLCompiler OSDependent SPIRV SPVRemapper)
include(SelectLibraryConfigurations)
foreach(BASE_LIB ${LIB_NAMES})
string(TOUPPER ${BASE_LIB} BASE_LIB_UPPER)
list(APPEND ${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE "${LIB_DIR}/${BASE_LIB}.lib")
list(APPEND ${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG "${LIB_DIR}/${BASE_LIB}d.lib")
endforeach()
select_library_configurations(${EXTERNAL_NAME_UPPER})
set(${EXTERNAL_NAME_UPPER}_LIBRARY ${${EXTERNAL_NAME_UPPER}_LIBRARY} CACHE FILEPATH "Location of glslang libraries")
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${${EXTERNAL_NAME_UPPER}_LIBRARIES} CACHE FILEPATH "Location of glslang libraries")

View file

@ -4,8 +4,8 @@ set(EXTERNAL_NAME serverless-content)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://cdn.highfidelity.com/content-sets/serverless-tutorial-RC73.zip
URL_MD5 0c5edfb63cafb042311d3cf25261fbf2
URL http://cdn.highfidelity.com/content-sets/serverless-tutorial-RC75.zip
URL_MD5 b4225d058952e17976ac228330ce8d51
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""

View file

@ -0,0 +1,34 @@
set(EXTERNAL_NAME spirv_binaries)
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
include(ExternalProject)
if (CMAKE_HOST_SYSTEM_NAME STREQUAL "Windows")
set(DOWNLOAD_URL https://public.highfidelity.com/dependencies/vulkan/vulkansdk-win32-1.1.82.1.tar.gz)
set(DOWNLOAD_MD5 3a83ef490bce248b1a4d6726a3e5893e)
set(BIN_DIR "Bin")
elseif (CMAKE_HOST_SYSTEM_NAME STREQUAL "Darwin")
set(DOWNLOAD_URL https://public.highfidelity.com/dependencies/vulkan/vulkansdk-macos-1.1.82.1.tar.gz)
set(DOWNLOAD_MD5 a57d37275b2c5db023ba8e84a63461ff)
set(BIN_DIR "macOS/bin")
else ()
set(DOWNLOAD_URL https://public.highfidelity.com/dependencies/vulkan/vulkansdk-linux-x86_64-1.1.82.1.tar.gz)
set(DOWNLOAD_MD5 5a7c9eeda8cee6b36724da7f7cbe5ec6)
set(BIN_DIR "x86_64/bin")
endif ()
ExternalProject_Add(
${EXTERNAL_NAME}
URL ${DOWNLOAD_URL}
URL_MD5 ${DOWNLOAD_MD5}
BUILD_COMMAND ""
CONFIGURE_COMMAND ""
INSTALL_COMMAND ""
LOG_DOWNLOAD ON
)
# Hide this external target (for ide users)
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
set(${EXTERNAL_NAME_UPPER}_DIR "${SOURCE_DIR}/${BIN_DIR}" CACHE FILEPATH "SPIRV binary tools location")

View file

@ -0,0 +1,35 @@
set(EXTERNAL_NAME spirv_cross)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL https://github.com/KhronosGroup/SPIRV-Cross/archive/2018-08-07.zip
URL_MD5 11198e4dc6a815ffbdb7a0a56d2d9261
CONFIGURE_COMMAND CMAKE_ARGS ${ANDROID_CMAKE_ARGS} -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>-$<CONFIG> ${EXTRA_CMAKE_FLAGS}
LOG_DOWNLOAD 1
LOG_CONFIGURE 1
LOG_BUILD 1
)
# Hide this external target (for ide users)
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
set(SUFFIXED_INSTALL_DIR "${INSTALL_DIR}-$<CONFIG>")
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SUFFIXED_INSTALL_DIR}/include CACHE PATH "List of Draco include directories")
if (UNIX)
set(LIB_PREFIX "lib")
set(LIB_EXT "a")
elseif (WIN32)
set(LIB_EXT "lib")
endif ()
foreach(lib glsl msl cpp hlsl reflect util core)
list(APPEND ${EXTERNAL_NAME_UPPER}_LIBRARIES ${SUFFIXED_INSTALL_DIR}/lib/spirv-cross-${lib}.${LIB_EXT})
endforeach()
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${${EXTERNAL_NAME_UPPER}_LIBRARIES} CACHE FILEPATH "Path to SPIRV-Cross libraries")

View file

@ -0,0 +1,18 @@
set(EXTERNAL_NAME spirv_headers)
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL https://github.com/KhronosGroup/SPIRV-Headers/archive/2c512180ca03b5d4f56283efc85745775b45fdc4.zip
URL_MD5 83e652221b5f21d5fdb61c45f5b4d9f9
CONFIGURE_COMMAND CMAKE_ARGS ${ANDROID_CMAKE_ARGS} -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> ${EXTRA_CMAKE_FLAGS}
LOG_DOWNLOAD 1
LOG_CONFIGURE 1
LOG_BUILD 1
)
# Hide this external target (for ide users)
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
set(${EXTERNAL_NAME_UPPER}_ROOT ${INSTALL_DIR} CACHE PATH "List of include directories")

View file

@ -0,0 +1,33 @@
set(EXTERNAL_NAME spirv_tools)
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL https://github.com/KhronosGroup/SPIRV-Tools/archive/v2018.4.zip
URL_MD5 7a7c69cf6ff0318910b4bfbdf30bcfc9
CONFIGURE_COMMAND CMAKE_ARGS ${ANDROID_CMAKE_ARGS} -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} -DSPIRV-Headers_SOURCE_DIR=${SPIRV_HEADERS_ROOT} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>-$<CONFIG> ${EXTRA_CMAKE_FLAGS}
LOG_DOWNLOAD 1
LOG_CONFIGURE 1
LOG_BUILD 1
)
# Hide this external target (for ide users)
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
set(SUFFIXED_INSTALL_DIR "${INSTALL_DIR}-$<CONFIG>")
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SUFFIXED_INSTALL_DIR}/include CACHE PATH "List of SPIRV-Tools include directories")
if (UNIX)
set(LIB_PREFIX "lib")
set(LIB_EXT "a")
elseif (WIN32)
set(LIB_EXT "lib")
endif ()
list(APPEND ${EXTERNAL_NAME_UPPER}_LIBRARIES ${SUFFIXED_INSTALL_DIR}/lib/SPIRV-Tools-opt.${LIB_EXT})
list(APPEND ${EXTERNAL_NAME_UPPER}_LIBRARIES ${SUFFIXED_INSTALL_DIR}/lib/SPIRV-Tools-link.${LIB_EXT})
list(APPEND ${EXTERNAL_NAME_UPPER}_LIBRARIES ${SUFFIXED_INSTALL_DIR}/lib/SPIRV-Tools.${LIB_EXT})
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${${EXTERNAL_NAME_UPPER}_LIBRARIES} CACHE FILEPATH "Path to SPIRV-Tool libraries")

View file

@ -10,6 +10,10 @@ if (POLICY CMP0042)
cmake_policy(SET CMP0042 NEW)
endif ()
if (POLICY CMP0074)
cmake_policy(SET CMP0074 OLD)
endif ()
set_property(GLOBAL PROPERTY USE_FOLDERS ON)
set_property(GLOBAL PROPERTY PREDEFINED_TARGETS_FOLDER "CMakeTargets")
# Hide automoc folders (for IDEs)

View file

@ -8,34 +8,132 @@
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
# FIXME use the built tools
macro(AUTOSCRIBE_APPEND_QRC)
string(CONCAT SHADER_QRC "${SHADER_QRC}" "<file alias=\"${ARGV0}\">${ARGV1}</file>\n")
endmacro()
set(VULKAN_DIR $ENV{VULKAN_SDK})
set(GLSLANG_EXEC "${VULKAN_DIR}/Bin/glslangValidator.exe")
set(SPIRV_CROSS_EXEC "${VULKAN_DIR}/Bin/spirv-cross.exe")
set(SPIRV_OPT_EXEC "${VULKAN_DIR}/Bin/spirv-opt.exe")
set(GLSLC_EXEC "${VULKAN_DIR}/Bin/glslc.exe")
set(SCRIBE_EXEC "D:/scribe.exe")
macro(AUTOSCRIBE_PLATFORM_SHADER)
set(AUTOSCRIBE_PLATFORM_PATH "${ARGV0}")
string(REGEX MATCH "([0-9]+(es)?)(/stereo)?" PLATFORM_PATH_REGEX ${AUTOSCRIBE_PLATFORM_PATH})
set(AUTOSCRIBE_DIALECT "${CMAKE_MATCH_1}")
if (CMAKE_MATCH_3)
set(AUTOSCRIBE_VARIANT "stereo")
else()
set(AUTOSCRIBE_VARIANT "mono")
endif()
string(REGEX REPLACE "/" "\\\\" SOURCE_GROUP_PATH ${AUTOSCRIBE_PLATFORM_PATH})
set(SOURCE_GROUP_PATH "${SHADER_LIB}\\${SOURCE_GROUP_PATH}")
set(AUTOSCRIBE_DIALECT_HEADER "${AUTOSCRIBE_HEADER_DIR}/${AUTOSCRIBE_DIALECT}/header.glsl")
set(AUTOSCRIBE_VARIANT_HEADER "${AUTOSCRIBE_HEADER_DIR}/${AUTOSCRIBE_VARIANT}.glsl")
set(AUTOSCRIBE_OUTPUT_FILE "${SHADERS_DIR}/${SHADER_LIB}/${AUTOSCRIBE_PLATFORM_PATH}/${SHADER_NAME}.${SHADER_TYPE}")
AUTOSCRIBE_APPEND_QRC("${SHADER_COUNT}/${AUTOSCRIBE_PLATFORM_PATH}/scribe" "${AUTOSCRIBE_OUTPUT_FILE}")
source_group(${SOURCE_GROUP_PATH} FILES ${AUTOSCRIBE_OUTPUT_FILE})
set_property(SOURCE ${AUTOSCRIBE_OUTPUT_FILE} PROPERTY SKIP_AUTOMOC ON)
list(APPEND SCRIBED_SHADERS ${AUTOSCRIBE_OUTPUT_FILE})
set(AUTOSCRIBE_SPIRV_FILE "${AUTOSCRIBE_OUTPUT_FILE}.spv")
# don't add unoptimized spirv to the QRC
#AUTOSCRIBE_APPEND_QRC("${SHADER_COUNT}/${AUTOSCRIBE_PLATFORM_PATH}/spirv_unopt" "${AUTOSCRIBE_SPIRV_FILE}")
source_group(${SOURCE_GROUP_PATH} FILES ${AUTOSCRIBE_SPIRV_FILE})
set_property(SOURCE ${AUTOSCRIBE_SPIRV_FILE} PROPERTY SKIP_AUTOMOC ON)
list(APPEND SPIRV_SHADERS ${AUTOSCRIBE_SPIRV_FILE})
set(AUTOSCRIBE_SPIRV_OPT_FILE "${AUTOSCRIBE_OUTPUT_FILE}.opt.spv")
AUTOSCRIBE_APPEND_QRC("${SHADER_COUNT}/${AUTOSCRIBE_PLATFORM_PATH}/spirv" "${AUTOSCRIBE_SPIRV_OPT_FILE}")
source_group(${SOURCE_GROUP_PATH} FILES ${AUTOSCRIBE_SPIRV_OPT_FILE})
set_property(SOURCE ${AUTOSCRIBE_SPIRV_OPT_FILE} PROPERTY SKIP_AUTOMOC ON)
list(APPEND SPIRV_SHADERS ${AUTOSCRIBE_SPIRV_OPT_FILE})
set(AUTOSCRIBE_SPIRV_GLSL_FILE "${AUTOSCRIBE_OUTPUT_FILE}.glsl")
AUTOSCRIBE_APPEND_QRC("${SHADER_COUNT}/${AUTOSCRIBE_PLATFORM_PATH}/glsl" "${AUTOSCRIBE_SPIRV_GLSL_FILE}")
source_group(${SOURCE_GROUP_PATH} FILES ${AUTOSCRIBE_SPIRV_GLSL_FILE})
set_property(SOURCE ${AUTOSCRIBE_SPIRV_GLSL_FILE} PROPERTY SKIP_AUTOMOC ON)
list(APPEND SPIRV_SHADERS ${AUTOSCRIBE_SPIRV_GLSL_FILE})
set(AUTOSCRIBE_SPIRV_JSON_FILE "${AUTOSCRIBE_OUTPUT_FILE}.json")
AUTOSCRIBE_APPEND_QRC("${SHADER_COUNT}/${AUTOSCRIBE_PLATFORM_PATH}/json" "${AUTOSCRIBE_SPIRV_JSON_FILE}")
source_group(${SOURCE_GROUP_PATH} FILES ${AUTOSCRIBE_SPIRV_JSON_FILE})
set_property(SOURCE ${AUTOSCRIBE_SPIRV_JSON_FILE} PROPERTY SKIP_AUTOMOC ON)
list(APPEND REFLECTED_SHADERS ${AUTOSCRIBE_SPIRV_JSON_FILE})
unset(SHADER_GEN_LINE)
list(APPEND SHADER_GEN_LINE ${AUTOSCRIBE_DIALECT})
list(APPEND SHADER_GEN_LINE ${AUTOSCRIBE_VARIANT})
file(RELATIVE_PATH TEMP_PATH ${CMAKE_SOURCE_DIR} ${SHADER_FILE})
list(APPEND SHADER_GEN_LINE ${TEMP_PATH})
file(RELATIVE_PATH TEMP_PATH ${CMAKE_SOURCE_DIR} ${AUTOSCRIBE_OUTPUT_FILE})
list(APPEND SHADER_GEN_LINE ${TEMP_PATH})
list(APPEND SHADER_GEN_LINE ${AUTOSCRIBE_SHADER_SEEN_LIBS})
string(CONCAT AUTOSCRIBE_SHADERGEN_COMMANDS "${AUTOSCRIBE_SHADERGEN_COMMANDS}" "${SHADER_GEN_LINE}\n")
# # FIXME need better mechanism for determining the include files
# add_custom_command(
# OUTPUT ${AUTOSCRIBE_OUTPUT_FILE}
# COMMAND ${SCRIBE_COMMAND} ${SHADER_FILE} ${SCRIBE_ARGS} -o ${AUTOSCRIBE_OUTPUT_FILE} -h ${AUTOSCRIBE_DIALECT_HEADER} -h ${AUTOSCRIBE_VARIANT_HEADER}
# DEPENDS ${SCRIBE_COMMAND} ${SHADER_FILE} ${AUTOSCRIBE_DIALECT_HEADER} ${AUTOSCRIBE_VARIANT_HEADER})
# # Generate the spirv file
# add_custom_command(
# OUTPUT ${AUTOSCRIBE_SPIRV_FILE}
# COMMAND ${GLSLANG_EXEC} -V110 -o ${AUTOSCRIBE_SPIRV_FILE} ${AUTOSCRIBE_OUTPUT_FILE}
# DEPENDS ${AUTOSCRIBE_OUTPUT_FILE} ${GLSLANG_EXEC})
# # Generate the optimized spirv file
# add_custom_command(
# OUTPUT ${AUTOSCRIBE_SPIRV_OPT_FILE}
# COMMAND ${SPIRV_OPT_EXEC} -O ${AUTOSCRIBE_SPIRV_FILE} -o ${AUTOSCRIBE_SPIRV_OPT_FILE}
# DEPENDS ${AUTOSCRIBE_SPIRV_FILE} ${SPIRV_OPT_EXEC})
# # Generate the optimized GLSL file
# add_custom_command(
# OUTPUT ${AUTOSCRIBE_SPIRV_GLSL_FILE}
# COMMAND ${SPIRV_CROSS_EXEC} ${SPIRV_CROSS_ARGS} ${AUTOSCRIBE_SPIRV_OPT_FILE} --output ${AUTOSCRIBE_SPIRV_GLSL_FILE}
# DEPENDS ${AUTOSCRIBE_SPIRV_OPT_FILE} ${SPIRV_CROSS_EXEC})
# # Generate the optimized spirv file
# add_custom_command(
# OUTPUT ${AUTOSCRIBE_SPIRV_JSON_FILE}
# COMMAND ${SPIRV_CROSS_EXEC} --reflect json ${AUTOSCRIBE_SPIRV_OPT_FILE} --output ${AUTOSCRIBE_SPIRV_JSON_FILE}
# DEPENDS ${AUTOSCRIBE_SPIRV_OPT_FILE} ${SPIRV_CROSS_EXEC})
endmacro()
macro(AUTOSCRIBE_SHADER)
#
# Set the include paths
#
# FIXME base the include paths off of output from the scribe tool,
# instead of treating every previously seen shader as a possible header
unset(SHADER_INCLUDE_FILES)
# Grab include files
foreach(includeFile ${ARGN})
list(APPEND SHADER_INCLUDE_FILES ${includeFile})
endforeach()
foreach(SHADER_INCLUDE ${SHADER_INCLUDE_FILES})
get_filename_component(INCLUDE_DIR ${SHADER_INCLUDE} PATH)
list(APPEND SHADER_INCLUDES_PATHS ${INCLUDE_DIR})
endforeach()
list(REMOVE_DUPLICATES SHADER_INCLUDES_PATHS)
#Extract the unique include shader paths
set(INCLUDES ${HIFI_LIBRARIES_SHADER_INCLUDE_FILES})
foreach(EXTRA_SHADER_INCLUDE ${INCLUDES})
list(APPEND SHADER_INCLUDES_PATHS ${EXTRA_SHADER_INCLUDE})
endforeach()
list(REMOVE_DUPLICATES SHADER_INCLUDES_PATHS)
#message(ready for includes ${SHADER_INCLUDES_PATHS})
# make the scribe include arguments
set(SCRIBE_INCLUDES)
unset(SCRIBE_INCLUDES)
foreach(INCLUDE_PATH ${SHADER_INCLUDES_PATHS})
set(SCRIBE_INCLUDES ${SCRIBE_INCLUDES} -I ${INCLUDE_PATH}/)
endforeach()
#
# Figure out the various output names
#
# Define the final name of the generated shader file
get_filename_component(SHADER_NAME ${SHADER_FILE} NAME_WE)
get_filename_component(SHADER_EXT ${SHADER_FILE} EXT)
@ -46,38 +144,36 @@ macro(AUTOSCRIBE_SHADER)
elseif(${SHADER_EXT} STREQUAL .slg)
set(SHADER_TYPE geom)
endif()
file(MAKE_DIRECTORY "${SHADERS_DIR}/${SHADER_LIB}")
set(SHADER_TARGET "${SHADERS_DIR}/${SHADER_LIB}/${SHADER_NAME}.${SHADER_TYPE}")
file(TO_CMAKE_PATH "${SHADER_TARGET}" COMPILED_SHADER)
set(REFLECTED_SHADER "${COMPILED_SHADER}.json")
set(SCRIBE_ARGS -T ${SHADER_TYPE} -D GLPROFILE ${GLPROFILE} ${SCRIBE_INCLUDES} -o ${SHADER_TARGET} ${SHADER_FILE})
set(SCRIBE_ARGS -D GLPROFILE ${GLPROFILE} -T ${SHADER_TYPE} ${SCRIBE_INCLUDES} )
# Generate the frag/vert file
add_custom_command(
OUTPUT ${SHADER_TARGET}
COMMAND ${SCRIBE_COMMAND} ${SCRIBE_ARGS}
DEPENDS ${SHADER_FILE} ${SCRIBE_COMMAND} ${SHADER_INCLUDE_FILES})
# SHADER_SCRIBED -> the output of scribe
set(SHADER_SCRIBED "${SHADERS_DIR}/${SHADER_LIB}/${SHADER_NAME}.${SHADER_TYPE}")
# Generate the json reflection
# FIXME move to spirv-cross for this task after we have spirv compatible shaders
add_custom_command(
OUTPUT ${REFLECTED_SHADER}
COMMAND ${SHREFLECT_COMMAND} ${COMPILED_SHADER}
DEPENDS ${SHREFLECT_DEPENDENCY} ${COMPILED_SHADER})
# SHADER_NAME_FILE -> a file containing the shader name and extension (useful for debugging and for
# determining the type of shader from the filename)
set(SHADER_NAME_FILE "${SHADER_SCRIBED}.name")
file(TO_CMAKE_PATH "${SHADER_SCRIBED}" SHADER_SCRIBED)
file(WRITE "${SHADER_SCRIBED}.name" "${SHADER_NAME}.${SHADER_TYPE}")
AUTOSCRIBE_APPEND_QRC("${SHADER_COUNT}/name" "${SHADER_NAME_FILE}")
#output the generated file name
source_group("Compiled/${SHADER_LIB}" FILES ${COMPILED_SHADER})
set_property(SOURCE ${COMPILED_SHADER} PROPERTY SKIP_AUTOMOC ON)
list(APPEND COMPILED_SHADERS ${COMPILED_SHADER})
if (USE_GLES)
set(SPIRV_CROSS_ARGS --version 310es)
AUTOSCRIBE_PLATFORM_SHADER("310es")
AUTOSCRIBE_PLATFORM_SHADER("310es/stereo")
else()
set(SPIRV_CROSS_ARGS --version 410 --no-420pack-extension)
AUTOSCRIBE_PLATFORM_SHADER("410")
AUTOSCRIBE_PLATFORM_SHADER("410/stereo")
if (NOT APPLE)
set(SPIRV_CROSS_ARGS --version 450)
AUTOSCRIBE_PLATFORM_SHADER("450")
AUTOSCRIBE_PLATFORM_SHADER("450/stereo")
endif()
endif()
source_group("Reflected/${SHADER_LIB}" FILES ${REFLECTED_SHADER})
list(APPEND REFLECTED_SHADERS ${REFLECTED_SHADER})
string(CONCAT SHADER_QRC "${SHADER_QRC}" "<file alias=\"${SHADER_COUNT}\">${COMPILED_SHADER}</file>\n")
string(CONCAT SHADER_QRC "${SHADER_QRC}" "<file alias=\"${SHADER_COUNT}_reflection\">${REFLECTED_SHADER}</file>\n")
string(CONCAT SHADER_ENUMS "${SHADER_ENUMS}" "${SHADER_NAME} = ${SHADER_COUNT},\n")
string(CONCAT SHADER_SHADERS_ARRAY "${SHADER_SHADERS_ARRAY}" "${SHADER_COUNT},\n")
MATH(EXPR SHADER_COUNT "${SHADER_COUNT}+1")
endmacro()
@ -86,6 +182,8 @@ macro(AUTOSCRIBE_SHADER_LIB)
message(FATAL_ERROR "AUTOSCRIBE_SHADER_LIB can only be used by the shaders library")
endif()
file(MAKE_DIRECTORY "${SHADERS_DIR}/${SHADER_LIB}")
list(APPEND HIFI_LIBRARIES_SHADER_INCLUDE_FILES "${CMAKE_SOURCE_DIR}/libraries/${SHADER_LIB}/src")
string(REGEX REPLACE "[-]" "_" SHADER_NAMESPACE ${SHADER_LIB})
string(CONCAT SHADER_ENUMS "${SHADER_ENUMS}" "namespace ${SHADER_NAMESPACE} {\n")
@ -165,66 +263,103 @@ macro(AUTOSCRIBE_SHADER_LIB)
# Finish the shader enums
string(CONCAT SHADER_ENUMS "${SHADER_ENUMS}" "} // namespace ${SHADER_NAMESPACE}\n")
#file(RELATIVE_PATH RELATIVE_LIBRARY_DIR_PATH ${CMAKE_CURRENT_SOURCE_DIR} "${HIFI_LIBRARY_DIR}")
#foreach(HIFI_LIBRARY ${ARGN})
#list(APPEND HIFI_LIBRARIES_SHADER_INCLUDE_FILES ${HIFI_LIBRARY_DIR}/${HIFI_LIBRARY}/src)
#endforeach()
#endif()
endmacro()
macro(AUTOSCRIBE_SHADER_LIBS)
set(SCRIBE_COMMAND scribe)
set(SHREFLECT_COMMAND shreflect)
set(SHREFLECT_DEPENDENCY shreflect)
# Target dependant Custom rule on the SHADER_FILE
if (ANDROID)
set(GLPROFILE LINUX_GL)
set(SCRIBE_COMMAND ${NATIVE_SCRIBE})
set(SHREFLECT_COMMAND ${NATIVE_SHREFLECT})
unset(SHREFLECT_DEPENDENCY)
else()
if (APPLE)
set(GLPROFILE MAC_GL)
elseif(UNIX)
set(GLPROFILE LINUX_GL)
else()
set(GLPROFILE PC_GL)
endif()
endif()
message(STATUS "Shader processing start")
set(AUTOSCRIBE_HEADER_DIR ${CMAKE_CURRENT_SOURCE_DIR}/headers)
# Start the shader IDs
set(SHADER_COUNT 1)
set(SHADERS_DIR "${CMAKE_CURRENT_BINARY_DIR}/shaders")
set(SHADER_ENUMS "")
file(MAKE_DIRECTORY ${SHADERS_DIR})
set(SHADER_ENUMS "")
set(SHADER_COUNT 1)
#
# Scribe generation & program defintiion
#
foreach(SHADER_LIB ${ARGN})
list(APPEND AUTOSCRIBE_SHADER_SEEN_LIBS ${SHADER_LIB})
AUTOSCRIBE_SHADER_LIB(${SHADER_LIB})
endforeach()
# Generate the library files
configure_file(
ShaderEnums.cpp.in
${CMAKE_CURRENT_BINARY_DIR}/shaders/ShaderEnums.cpp)
${CMAKE_CURRENT_BINARY_DIR}/ShaderEnums.cpp)
configure_file(
ShaderEnums.h.in
${CMAKE_CURRENT_BINARY_DIR}/shaders/ShaderEnums.h)
configure_file(
shaders.qrc.in
${CMAKE_CURRENT_BINARY_DIR}/shaders.qrc)
${CMAKE_CURRENT_BINARY_DIR}/ShaderEnums.h)
set(AUTOSCRIBE_SHADER_LIB_SRC "${CMAKE_CURRENT_BINARY_DIR}/shaders/ShaderEnums.h;${CMAKE_CURRENT_BINARY_DIR}/shaders/ShaderEnums.cpp")
set(QT_RESOURCES_FILE ${CMAKE_CURRENT_BINARY_DIR}/shaders.qrc)
configure_file(shaders.qrc.in ${CMAKE_CURRENT_BINARY_DIR}/shaders.qrc)
list(APPEND QT_RESOURCES_FILE ${CMAKE_CURRENT_BINARY_DIR}/shaders.qrc)
list(APPEND AUTOSCRIBE_SHADER_HEADERS ${AUTOSCRIBE_HEADER_DIR}/mono.glsl ${AUTOSCRIBE_HEADER_DIR}/stereo.glsl)
list(APPEND AUTOSCRIBE_SHADER_HEADERS ${AUTOSCRIBE_HEADER_DIR}/450/header.glsl ${AUTOSCRIBE_HEADER_DIR}/410/header.glsl ${AUTOSCRIBE_HEADER_DIR}/310es/header.glsl)
source_group("Shader Headers" FILES ${AUTOSCRIBE_HEADER_DIR}/mono.glsl ${AUTOSCRIBE_HEADER_DIR}/stereo.glsl)
source_group("Shader Headers\\450" FILES ${AUTOSCRIBE_HEADER_DIR}/450/header.glsl)
source_group("Shader Headers\\410" FILES ${AUTOSCRIBE_HEADER_DIR}/410/header.glsl)
source_group("Shader Headers\\310es" FILES ${AUTOSCRIBE_HEADER_DIR}/310es/header.glsl)
list(APPEND AUTOSCRIBE_SHADER_LIB_SRC ${AUTOSCRIBE_SHADER_HEADERS})
list(APPEND AUTOSCRIBE_SHADER_LIB_SRC ${CMAKE_CURRENT_BINARY_DIR}/ShaderEnums.h ${CMAKE_CURRENT_BINARY_DIR}/ShaderEnums.cpp)
# Write the shadergen command list
set(AUTOSCRIBE_SHADERGEN_COMMANDS_FILE ${CMAKE_CURRENT_BINARY_DIR}/shadergen.txt)
file(WRITE ${AUTOSCRIBE_SHADERGEN_COMMANDS_FILE} "${AUTOSCRIBE_SHADERGEN_COMMANDS}")
# grab the SPIRV binaries we require
# note we don't use the normal ADD_DEPENDENCY_EXTERNAL_PROJECTS macro because only a custom command
# depends on these, not any of our build artifacts, so there's no valid target for the add_dependencies
# call in ADD_DEPENDENCY_EXTERNAL_PROJECTS to use
add_subdirectory(${EXTERNAL_PROJECT_DIR}/spirv_binaries ${EXTERNALS_BINARY_DIR}/spirv_binaries)
target_python()
# A custom python script which will generate
if (ANDROID)
add_custom_command(
OUTPUT ${SCRIBED_SHADERS} ${SPIRV_SHADERS} ${REFLECTED_SHADERS}
COMMENT "Generating/updating shaders"
COMMAND ${HIFI_PYTHON_EXEC} ${CMAKE_SOURCE_DIR}/tools/shadergen.py
--commands ${AUTOSCRIBE_SHADERGEN_COMMANDS_FILE}
--spirv-binaries ${SPIRV_BINARIES_DIR}
--scribe ${NATIVE_SCRIBE}
--build-dir ${CMAKE_CURRENT_BINARY_DIR}
--source-dir ${CMAKE_SOURCE_DIR}
DEPENDS ${AUTOSCRIBE_SHADER_HEADERS} spirv_binaries ${CMAKE_SOURCE_DIR}/tools/shadergen.py ${ALL_SCRIBE_SHADERS})
else()
add_custom_command(
OUTPUT ${SCRIBED_SHADERS} ${SPIRV_SHADERS} ${REFLECTED_SHADERS}
COMMENT "Generating/updating shaders"
COMMAND ${HIFI_PYTHON_EXEC} ${CMAKE_SOURCE_DIR}/tools/shadergen.py
--commands ${AUTOSCRIBE_SHADERGEN_COMMANDS_FILE}
--spirv-binaries ${SPIRV_BINARIES_DIR}
--scribe $<TARGET_FILE:scribe>
--build-dir ${CMAKE_CURRENT_BINARY_DIR}
--source-dir ${CMAKE_SOURCE_DIR}
DEPENDS ${AUTOSCRIBE_SHADER_HEADERS} scribe spirv_binaries ${CMAKE_SOURCE_DIR}/tools/shadergen.py ${ALL_SCRIBE_SHADERS})
endif()
add_custom_target(shadergen DEPENDS ${SCRIBED_SHADERS} ${SPIRV_SHADERS} ${REFLECTED_SHADERS})
set_target_properties(shadergen PROPERTIES FOLDER "Shaders")
# Custom targets required to force generation of the shaders via scribe
add_custom_target(scribe_shaders SOURCES ${ALL_SCRIBE_SHADERS})
add_custom_target(compiled_shaders SOURCES ${COMPILED_SHADERS})
add_custom_target(reflected_shaders SOURCES ${REFLECTED_SHADERS})
add_custom_target(scribe_shaders SOURCES ${ALL_SCRIBE_SHADERS} ${AUTOSCRIBE_SHADER_HEADERS})
set_target_properties(scribe_shaders PROPERTIES FOLDER "Shaders")
set_target_properties(compiled_shaders PROPERTIES FOLDER "Shaders")
add_custom_target(scribed_shaders SOURCES ${SCRIBED_SHADERS})
set_target_properties(scribed_shaders PROPERTIES FOLDER "Shaders")
add_dependencies(scribed_shaders shadergen)
add_custom_target(spirv_shaders SOURCES ${SPIRV_SHADERS})
set_target_properties(spirv_shaders PROPERTIES FOLDER "Shaders")
add_dependencies(spirv_shaders shadergen)
add_custom_target(reflected_shaders SOURCES ${REFLECTED_SHADERS})
set_target_properties(reflected_shaders PROPERTIES FOLDER "Shaders")
add_dependencies(reflected_shaders shadergen)
message(STATUS "Shader processing end")
endmacro()

View file

@ -0,0 +1,45 @@
#
# ConfigureCCache.cmake
# cmake/macros
#
# Created by Clement Brisset on 10/10/18.
# Copyright 2018 High Fidelity, Inc.
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http:#www.apache.org/licenses/LICENSE-2.0.html
#
macro(configure_ccache)
find_program(CCACHE_PROGRAM ccache)
if(CCACHE_PROGRAM)
message(STATUS "Configuring ccache")
# Set up wrapper scripts
set(C_LAUNCHER "${CCACHE_PROGRAM}")
set(CXX_LAUNCHER "${CCACHE_PROGRAM}")
set(LAUNCH_C_IN "${CMAKE_CURRENT_SOURCE_DIR}/cmake/templates/launch-c.in")
set(LAUNCH_CXX_IN "${CMAKE_CURRENT_SOURCE_DIR}/cmake/templates/launch-cxx.in")
set(LAUNCH_C "${CMAKE_BINARY_DIR}/CMakeFiles/launch-c")
set(LAUNCH_CXX "${CMAKE_BINARY_DIR}/CMakeFiles/launch-cxx")
configure_file(${LAUNCH_C_IN} ${LAUNCH_C})
configure_file(${LAUNCH_CXX_IN} ${LAUNCH_CXX})
execute_process(COMMAND chmod a+rx ${LAUNCH_C} ${LAUNCH_CXX})
if(CMAKE_GENERATOR STREQUAL "Xcode")
# Set Xcode project attributes to route compilation and linking
# through our scripts
set(CMAKE_XCODE_ATTRIBUTE_CC ${LAUNCH_C})
set(CMAKE_XCODE_ATTRIBUTE_CXX ${LAUNCH_CXX})
set(CMAKE_XCODE_ATTRIBUTE_LD ${LAUNCH_C})
set(CMAKE_XCODE_ATTRIBUTE_LDPLUSPLUS ${LAUNCH_CXX})
else()
# Support Unix Makefiles and Ninja
set(CMAKE_C_COMPILER_LAUNCHER ${LAUNCH_C})
set(CMAKE_CXX_COMPILER_LAUNCHER ${LAUNCH_CXX})
endif()
else()
message(WARNING "Could not find ccache")
endif()
endmacro()

View file

@ -0,0 +1,22 @@
macro(TARGET_PYTHON)
if (NOT HIFI_PYTHON_EXEC)
# Find the python interpreter
if (CAME_VERSION VERSION_LESS 3.12)
# this logic is deprecated in CMake after 3.12
# FIXME eventually we should make 3.12 the min cmake verion and just use the Python3 find_package path
set(Python_ADDITIONAL_VERSIONS 3)
find_package(PythonInterp)
set(HIFI_PYTHON_VERSION ${PYTHON_VERSION_STRING})
set(HIFI_PYTHON_EXEC ${PYTHON_EXECUTABLE})
else()
# the new hotness
find_package(Python3)
set(HIFI_PYTHON_VERSION ${Python3_VERSION})
set(HIFI_PYTHON_EXEC ${Python3_EXECUTABLE})
endif()
if ((NOT HIFI_PYTHON_EXEC) OR (HIFI_PYTHON_VERSION VERSION_LESS 3.5))
message(FATAL_ERROR "Unable to locate Python interpreter 3.5 or higher")
endif()
endif()
endmacro()

View file

@ -0,0 +1,15 @@
macro(TARGET_SPIRV)
add_dependency_external_projects(spirv_cross)
target_link_libraries(${TARGET_NAME} ${SPIRV_CROSS_LIBRARIES})
target_include_directories(${TARGET_NAME} SYSTEM PRIVATE ${SPIRV_CROSS_INCLUDE_DIRS})
# spirv-tools requires spirv-headers
add_dependency_external_projects(spirv_headers)
add_dependency_external_projects(spirv_tools)
target_link_libraries(${TARGET_NAME} ${SPIRV_TOOLS_LIBRARIES})
target_include_directories(${TARGET_NAME} SYSTEM PRIVATE ${SPIRV_TOOLS_INCLUDE_DIRS})
add_dependency_external_projects(glslang)
target_link_libraries(${TARGET_NAME} ${GLSLANG_LIBRARIES})
target_include_directories(${TARGET_NAME} SYSTEM PRIVATE ${GLSLANG_INCLUDE_DIRS})
endmacro()

View file

@ -0,0 +1,10 @@
#
# Created by Bradley Austin Davis on 2016/02/16
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
macro(TARGET_SPIRV_BINARIES)
add_dependency_external_projects(spirv_binaries)
endmacro()

View file

@ -0,0 +1,19 @@
#
# Created by Bradley Austin Davis on 2016/02/16
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
macro(TARGET_VULKAN)
find_package(Vulkan)
if (Vulkan_FOUND)
add_definitions(-DHAVE_VULKAN)
target_include_directories(${TARGET_NAME} PRIVATE ${Vulkan_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${Vulkan_LIBRARIES})
add_dependency_external_projects(glslang)
target_include_directories(${TARGET_NAME} PRIVATE ${GLSLANG_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${GLSLANG_LIBRARIES})
endif()
endmacro()

View file

@ -0,0 +1,12 @@
#!/bin/sh
# Xcode generator doesn't include the compiler as the
# first argument, Ninja and Makefiles do. Handle both cases.
if [[ "$1" = "${CMAKE_C_COMPILER}" ]] ; then
shift
fi
export CCACHE_CPP2=true
export CCACHE_HARDLINK=true
export CCACHE_SLOPPINESS=file_macro,time_macros,include_file_mtime,include_file_ctime,file_stat_matches
exec "${C_LAUNCHER}" "${CMAKE_C_COMPILER}" "$@"

View file

@ -0,0 +1,12 @@
#!/bin/sh
# Xcode generator doesn't include the compiler as the
# first argument, Ninja and Makefiles do. Handle both cases.
if [[ "$1" = "${CMAKE_CXX_COMPILER}" ]] ; then
shift
fi
export CCACHE_CPP2=true
export CCACHE_HARDLINK=true
export CCACHE_SLOPPINESS=file_macro,time_macros,include_file_mtime,include_file_ctime,file_stat_matches
exec "${CXX_LAUNCHER}" "${CMAKE_CXX_COMPILER}" "$@"

View file

@ -0,0 +1,140 @@
{
"version": "1.1",
"root": {
"id": "userAnimStateMachine",
"type": "stateMachine",
"data": {
"currentState": "idleAnim",
"states": [
{
"id": "idleAnim",
"interpTarget": 6,
"interpDuration": 6,
"transitions": [
{ "var": "postTransitAnim", "state": "postTransitAnim" },
{ "var": "preTransitAnim", "state": "preTransitAnim" }
]
},
{
"id": "preTransitAnim",
"interpTarget": 6,
"interpDuration": 6,
"transitions": [
{ "var": "idleAnim", "state": "idleAnim" },
{ "var": "transitAnim", "state": "transitAnim" }
]
},
{
"id": "transitAnim",
"interpTarget": 6,
"interpDuration": 6,
"transitions": [
{ "var": "preTransitAnim", "state": "preTransitAnim" },
{ "var": "postTransitAnim", "state": "postTransitAnim" }
]
},
{
"id": "postTransitAnim",
"interpTarget": 6,
"interpDuration": 6,
"transitions": [
{ "var": "transitAnim", "state": "transitAnim" },
{ "var": "idleAnim", "state": "idleAnim" }
]
},
{
"id": "userAnimA",
"interpTarget": 6,
"interpDuration": 6,
"transitions": [
{ "var": "idleAnim", "state": "idleAnim" },
{ "var": "userAnimB", "state": "userAnimB" }
]
},
{
"id": "userAnimB",
"interpTarget": 6,
"interpDuration": 6,
"transitions": [
{ "var": "idleAnim", "state": "idleAnim" },
{ "var": "userAnimA", "state": "userAnimA" }
]
}
]
},
"children": [
{
"id": "idleAnim",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/idle.fbx",
"startFrame": 0.0,
"endFrame": 90.0,
"timeScale": 1.0,
"loopFlag": true
},
"children": []
},
{
"id": "preTransitAnim",
"type": "clip",
"data": {
"url": "https://hifi-content.s3.amazonaws.com/luis/test_scripts/transitApp/animations/teleport01_warp.fbx",
"startFrame": 0.0,
"endFrame": 10.0,
"timeScale": 1.0,
"loopFlag": false
},
"children": []
},
{
"id": "transitAnim",
"type": "clip",
"data": {
"url": "https://hifi-content.s3.amazonaws.com/luis/test_scripts/transitApp/animations/teleport01_warp.fbx",
"startFrame": 11.0,
"endFrame": 11.0,
"timeScale": 1.0,
"loopFlag": true
},
"children": []
},
{
"id": "postTransitAnim",
"type": "clip",
"data": {
"url": "https://hifi-content.s3.amazonaws.com/luis/test_scripts/transitApp/animations/teleport01_warp.fbx",
"startFrame": 22.0,
"endFrame": 49.0,
"timeScale": 1.0,
"loopFlag": false
},
"children": []
},
{
"id": "userAnimA",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/idle.fbx",
"startFrame": 0.0,
"endFrame": 90.0,
"timeScale": 1.0,
"loopFlag": true
},
"children": []
},
{
"id": "userAnimB",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/idle.fbx",
"startFrame": 0.0,
"endFrame": 90.0,
"timeScale": 1.0,
"loopFlag": true
},
"children": []
}
]
}
}

View file

@ -133,7 +133,7 @@
{ "from": "Keyboard.W", "when": "!Keyboard.Control", "to": "Actions.LONGITUDINAL_FORWARD" },
{ "from": "Keyboard.S", "when": "!Keyboard.Control", "to": "Actions.LONGITUDINAL_BACKWARD" },
{ "from": "Keyboard.Shift", "when": ["!Keyboard.Left", "!Keyboard.Right"], "to": "Actions.SPRINT" },
{ "from": "Keyboard.C", "to": "Actions.VERTICAL_DOWN" },
{ "from": "Keyboard.C", "when": "!Keyboard.Control", "to": "Actions.VERTICAL_DOWN" },
{ "from": "Keyboard.Left", "when": "Keyboard.Shift", "to": "Actions.LATERAL_LEFT" },
{ "from": "Keyboard.Right", "when": "Keyboard.Shift", "to": "Actions.LATERAL_RIGHT" },
{ "from": "Keyboard.Up", "when": "Application.CameraFirstPerson", "to": "Actions.LONGITUDINAL_FORWARD" },

View file

@ -51,32 +51,34 @@
{ "from": "Vive.RSCenter", "to": "Standard.RightPrimaryThumb" },
{ "from": "Vive.RightApplicationMenu", "to": "Standard.RightSecondaryThumb" },
{ "from": "Vive.LeftHand", "to": "Standard.LeftHand"},
{ "from": "Vive.RightHand", "to": "Standard.RightHand"},
{ "from": "Vive.LeftHand", "to": "Standard.LeftHand" },
{ "from": "Vive.RightHand", "to": "Standard.RightHand" },
{ "from": "Vive.Head", "to" : "Standard.Head" },
{
"from": "Vive.LeftFoot", "to" : "Standard.LeftFoot",
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.3}]
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.15}]
},
{
"from": "Vive.RightFoot", "to" : "Standard.RightFoot",
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.3}]
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.15}]
},
{
"from": "Vive.Hips", "to" : "Standard.Hips",
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.3}]
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.15}]
},
{
"from": "Vive.Spine2", "to" : "Standard.Spine2",
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.3}]
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.15}]
},
{
"from": "Vive.RightArm", "to" : "Standard.RightArm",
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.15}]
},
{
"from": "Vive.LeftArm", "to" : "Standard.LeftArm",
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.15}]
},
{ "from": "Vive.Head", "to" : "Standard.Head"},
{ "from": "Vive.RightArm", "to" : "Standard.RightArm" },
{ "from": "Vive.LeftArm", "to" : "Standard.LeftArm" },
{ "from": "Vive.TrackedObject00", "to" : "Standard.TrackedObject00" },
{ "from": "Vive.TrackedObject01", "to" : "Standard.TrackedObject01" },

View file

@ -10,6 +10,7 @@ Item {
property int modality: Qt.NonModal
implicitHeight: row.height
implicitWidth: row.width
visible: false
Component.onCompleted: {
stats.parentChanged.connect(fill);

View file

@ -31,7 +31,7 @@ Rectangle {
scaleSlider.notify = false;
scaleSlider.value = Math.round(avatarScale * 10);
scaleSlider.notify = true;;
scaleSlider.notify = true;
if (settings.dominantHand === 'left') {
leftHandRadioButton.checked = true;

View file

@ -0,0 +1,364 @@
//
// ItemUnderTest
// qml/hifi/commerce/marketplaceItemTester
//
// Load items not in the marketplace for testing purposes
//
// Created by Kerry Ivan Kurian on 2018-10-18
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.10
import QtQuick.Controls 2.3
import Hifi 1.0 as Hifi
import "qrc:////qml//styles-uit" as HifiStylesUit
import "qrc:////qml//controls-uit" as HifiControlsUit
Rectangle {
id: root;
color: hifi.colors.baseGray
width: parent.width - 16
height: childrenRect.height + itemHeaderContainer.anchors.topMargin + detailsContainer.anchors.topMargin
property var detailsExpanded: false
property var actions: {
"forward": function(resource, assetType, resourceObjectId){
switch(assetType) {
case "application":
Commerce.installApp(resource, true);
break;
case "avatar":
MyAvatar.useFullAvatarURL(resource);
break;
case "content set":
urlHandler.handleUrl("hifi://localhost/0,0,0");
Commerce.replaceContentSet(toUrl(resource), "");
break;
case "entity":
case "wearable":
rezEntity(resource, assetType, resourceObjectId);
break;
default:
print("Marketplace item tester unsupported assetType " + assetType);
}
},
"trash": function(resource, assetType){
if ("application" === assetType) {
Commerce.uninstallApp(resource);
}
sendToScript({
method: "tester_deleteResourceObject",
objectId: resourceListModel.get(index).resourceObjectId});
resourceListModel.remove(index);
}
}
Item {
id: itemHeaderContainer
anchors.top: parent.top
anchors.topMargin: 8
anchors.left: parent.left
anchors.leftMargin: 8
width: parent.width - 16
height: childrenRect.height
Item {
id: itemNameContainer
width: parent.width * 0.5
height: childrenRect.height
HifiStylesUit.RalewaySemiBold {
id: resourceName
height: paintedHeight
width: parent.width
text: {
var match = resource.match(/\/([^/]*)$/);
return match ? match[1] : resource;
}
size: 14
color: hifi.colors.white
wrapMode: Text.WrapAnywhere
}
HifiStylesUit.RalewayRegular {
id: resourceUrl
anchors.top: resourceName.bottom;
anchors.topMargin: 4;
height: paintedHeight
width: parent.width
text: resource
size: 12
color: hifi.colors.faintGray;
wrapMode: Text.WrapAnywhere
}
}
HifiControlsUit.ComboBox {
id: comboBox
anchors.left: itemNameContainer.right
anchors.leftMargin: 4
anchors.verticalCenter: itemNameContainer.verticalCenter
height: 30
width: parent.width * 0.3 - anchors.leftMargin
model: [
"application",
"avatar",
"content set",
"entity",
"wearable",
"unknown"
]
currentIndex: (("entity or wearable" === assetType) ?
model.indexOf("unknown") : model.indexOf(assetType))
Component.onCompleted: {
onCurrentIndexChanged.connect(function() {
assetType = model[currentIndex];
sendToScript({
method: "tester_updateResourceObjectAssetType",
objectId: resourceListModel.get(index)["resourceObjectId"],
assetType: assetType });
});
}
}
Button {
id: actionButton
property var glyphs: {
"application": hifi.glyphs.install,
"avatar": hifi.glyphs.avatar,
"content set": hifi.glyphs.globe,
"entity": hifi.glyphs.wand,
"trash": hifi.glyphs.trash,
"unknown": hifi.glyphs.circleSlash,
"wearable": hifi.glyphs.hat
}
property int color: hifi.buttons.blue;
property int colorScheme: hifi.colorSchemes.dark;
anchors.left: comboBox.right
anchors.leftMargin: 4
anchors.verticalCenter: itemNameContainer.verticalCenter
width: parent.width * 0.10 - anchors.leftMargin
height: width
enabled: comboBox.model[comboBox.currentIndex] !== "unknown"
onClicked: {
if (model.currentlyRecordingResources) {
model.currentlyRecordingResources = false;
} else {
model.resourceAccessEventText = "";
model.currentlyRecordingResources = true;
root.actions["forward"](resource, comboBox.currentText, resourceObjectId);
}
sendToScript({
method: "tester_updateResourceRecordingStatus",
objectId: resourceListModel.get(index).resourceObjectId,
status: model.currentlyRecordingResources
});
}
background: Rectangle {
radius: 4;
gradient: Gradient {
GradientStop {
position: 0.2
color: {
if (!actionButton.enabled) {
hifi.buttons.disabledColorStart[actionButton.colorScheme]
} else if (actionButton.pressed) {
hifi.buttons.pressedColor[actionButton.color]
} else if (actionButton.hovered) {
hifi.buttons.hoveredColor[actionButton.color]
} else {
hifi.buttons.colorStart[actionButton.color]
}
}
}
GradientStop {
position: 1.0
color: {
if (!actionButton.enabled) {
hifi.buttons.disabledColorFinish[actionButton.colorScheme]
} else if (actionButton.pressed) {
hifi.buttons.pressedColor[actionButton.color]
} else if (actionButton.hovered) {
hifi.buttons.hoveredColor[actionButton.color]
} else {
hifi.buttons.colorFinish[actionButton.color]
}
}
}
}
}
contentItem: Item {
HifiStylesUit.HiFiGlyphs {
id: rezIcon;
text: model.currentlyRecordingResources ? hifi.glyphs.scriptStop : actionButton.glyphs[comboBox.model[comboBox.currentIndex]];
anchors.fill: parent
size: 30;
horizontalAlignment: Text.AlignHCenter;
verticalAlignment: Text.AlignVCenter;
color: enabled ? hifi.buttons.textColor[actionButton.color]
: hifi.buttons.disabledTextColor[actionButton.colorScheme]
}
}
}
Button {
id: trashButton
property int color: hifi.buttons.red;
property int colorScheme: hifi.colorSchemes.dark;
anchors.left: actionButton.right
anchors.verticalCenter: itemNameContainer.verticalCenter
anchors.leftMargin: 4
width: parent.width * 0.10 - anchors.leftMargin
height: width
onClicked: {
root.actions["trash"](resource, comboBox.currentText, resourceObjectId);
}
background: Rectangle {
radius: 4;
gradient: Gradient {
GradientStop {
position: 0.2
color: {
if (!trashButton.enabled) {
hifi.buttons.disabledColorStart[trashButton.colorScheme]
} else if (trashButton.pressed) {
hifi.buttons.pressedColor[trashButton.color]
} else if (trashButton.hovered) {
hifi.buttons.hoveredColor[trashButton.color]
} else {
hifi.buttons.colorStart[trashButton.color]
}
}
}
GradientStop {
position: 1.0
color: {
if (!trashButton.enabled) {
hifi.buttons.disabledColorFinish[trashButton.colorScheme]
} else if (trashButton.pressed) {
hifi.buttons.pressedColor[trashButton.color]
} else if (trashButton.hovered) {
hifi.buttons.hoveredColor[trashButton.color]
} else {
hifi.buttons.colorFinish[trashButton.color]
}
}
}
}
}
contentItem: Item {
HifiStylesUit.HiFiGlyphs {
id: trashIcon;
text: hifi.glyphs.trash
anchors.fill: parent
size: 22;
horizontalAlignment: Text.AlignHCenter
verticalAlignment: Text.AlignVCenter
color: enabled ? hifi.buttons.textColor[trashButton.color]
: hifi.buttons.disabledTextColor[trashButton.colorScheme]
}
}
}
}
Item {
id: detailsContainer
width: parent.width - 16
height: root.detailsExpanded ? 300 : 26
anchors.top: itemHeaderContainer.bottom
anchors.topMargin: 12
anchors.left: parent.left
anchors.leftMargin: 8
HifiStylesUit.HiFiGlyphs {
id: detailsToggle
anchors.left: parent.left
anchors.leftMargin: -4
anchors.top: parent.top
anchors.topMargin: -2
width: 22
text: root.detailsExpanded ? hifi.glyphs.minimize : hifi.glyphs.maximize
color: hifi.colors.white
size: 22
MouseArea {
anchors.fill: parent
onClicked: root.detailsExpanded = !root.detailsExpanded
}
}
ScrollView {
id: detailsTextContainer
anchors.top: parent.top
anchors.left: detailsToggle.right
anchors.leftMargin: 4
anchors.right: parent.right
height: detailsContainer.height - (root.detailsExpanded ? (copyToClipboardButton.height + copyToClipboardButton.anchors.topMargin) : 0)
clip: true
TextArea {
id: detailsText
readOnly: true
color: hifi.colors.white
text: {
var numUniqueResources = (model.resourceAccessEventText.split("\n").length - 1);
if (root.detailsExpanded && numUniqueResources > 0) {
return model.resourceAccessEventText
} else {
return numUniqueResources.toString() + " unique source/resource url pair" + (numUniqueResources === 1 ? "" : "s") + " recorded"
}
}
font: Qt.font({ family: "Courier", pointSize: 8, weight: Font.Normal })
wrapMode: TextEdit.NoWrap
background: Rectangle {
anchors.fill: parent;
color: hifi.colors.baseGrayShadow;
border.width: 0;
}
}
MouseArea {
anchors.fill: parent
onClicked: {
if (root.detailsExpanded) {
detailsText.selectAll();
} else {
root.detailsExpanded = true;
}
}
}
}
HifiControlsUit.Button {
id: copyToClipboardButton;
visible: root.detailsExpanded
color: hifi.buttons.noneBorderlessWhite
colorScheme: hifi.colorSchemes.dark
anchors.top: detailsTextContainer.bottom
anchors.topMargin: 8
anchors.right: parent.right
width: 160
height: 30
text: "Copy to Clipboard"
onClicked: {
Window.copyToClipboard(detailsText.text);
}
}
}
}

View file

@ -4,21 +4,19 @@
//
// Load items not in the marketplace for testing purposes
//
// Created by Zach Fox on 2018-09-05
// Created by Kerry Ivan Kurian on 2018-09-05
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import QtQuick.Controls 1.4
import QtQuick.Controls.Styles 1.4
import QtQuick.Dialogs 1.0
import QtQuick 2.10
import QtQuick.Layouts 1.1
import QtQuick.Controls 2.3
import Hifi 1.0 as Hifi
import "../../../styles-uit" as HifiStylesUit
import "../../../controls-uit" as HifiControlsUit
import "qrc:////qml//styles-uit" as HifiStylesUit
import "qrc:////qml//controls-uit" as HifiControlsUit
@ -27,33 +25,223 @@ Rectangle {
property string installedApps
property var nextResourceObjectId: 0
signal sendToScript(var message)
HifiStylesUit.HifiConstants { id: hifi }
ListModel { id: resourceListModel }
color: hifi.colors.white
color: hifi.colors.darkGray
AnimatedImage {
id: spinner;
source: "spinner.gif"
width: 74;
height: width;
anchors.verticalCenter: parent.verticalCenter;
anchors.horizontalCenter: parent.horizontalCenter;
//
// TITLE BAR START
//
Item {
id: titleBarContainer
// Size
width: root.width
height: 50
// Anchors
anchors.left: parent.left
anchors.top: parent.top
// Title bar text
HifiStylesUit.RalewaySemiBold {
id: titleBarText
text: "Marketplace Item Tester"
// Text size
size: 24
// Anchors
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.left: parent.left
anchors.leftMargin: 16
width: paintedWidth
// Style
color: hifi.colors.lightGrayText
// Alignment
horizontalAlignment: Text.AlignHLeft
verticalAlignment: Text.AlignVCenter
}
// Separator
HifiControlsUit.Separator {
anchors.left: parent.left
anchors.right: parent.right
anchors.bottom: parent.bottom
anchors.bottomMargin: 1
}
}
//
// TITLE BAR END
//
Rectangle {
id: spinner
z: 999
anchors.top: titleBarContainer.bottom
anchors.left: parent.left
anchors.right: parent.right
anchors.bottom: buttonContainer.top
color: hifi.colors.darkGray
AnimatedImage {
source: "spinner.gif"
width: 74
height: width
anchors.centerIn: parent
}
}
Rectangle {
id: instructionsContainer
z: 998
color: hifi.colors.darkGray
visible: resourceListModel.count === 0 && !spinner.visible
anchors.top: titleBarContainer.bottom
anchors.topMargin: 20
anchors.left: parent.left
anchors.leftMargin: 20
anchors.right: parent.right
anchors.rightMargin: 20
anchors.bottom: buttonContainer.top
anchors.bottomMargin: 20
HifiStylesUit.RalewayRegular {
text: "Use Marketplace Item Tester to test out your items before submitting them to the Marketplace." +
"\n\nUse one of the buttons below to load your item."
// Text size
size: 20
// Anchors
anchors.fill: parent
// Style
color: hifi.colors.lightGrayText
wrapMode: Text.Wrap
// Alignment
horizontalAlignment: Text.AlignHCenter
verticalAlignment: Text.AlignVCenter
}
}
ListView {
id: itemList
visible: !instructionsContainer.visible
anchors.top: titleBarContainer.bottom
anchors.topMargin: 20
anchors.left: parent.left
anchors.right: parent.right
anchors.bottom: buttonContainer.top
anchors.bottomMargin: 20
ScrollBar.vertical: ScrollBar {
visible: !instructionsContainer.visible
policy: ScrollBar.AlwaysOn
parent: itemList.parent
anchors.top: itemList.top
anchors.right: itemList.right
anchors.bottom: itemList.bottom
width: 16
}
clip: true
model: resourceListModel
spacing: 8
delegate: ItemUnderTest { }
}
Item {
id: buttonContainer
anchors.left: parent.left
anchors.leftMargin: 12
anchors.right: parent.right
anchors.rightMargin: 12
anchors.bottom: parent.bottom
anchors.bottomMargin: 12
height: 40
property string currentAction
property var actions: {
"Load File": function() {
buttonContainer.currentAction = "load file";
Window.browseChanged.connect(onResourceSelected);
Window.browseAsync("Please select a file (*.app.json *.json *.fst *.json.gz)", "", "Assets (*.app.json *.json *.fst *.json.gz)");
},
"Load URL": function() {
buttonContainer.currentAction = "load url";
Window.promptTextChanged.connect(onResourceSelected);
Window.promptAsync("Please enter a URL", "");
}
}
function onResourceSelected(resource) {
// It is possible that we received the present signal
// from something other than our browserAsync window.
// Alas, there is nothing we can do about that so charge
// ahead as though we are sure the present signal is one
// we expect.
print("!!!! resource selected");
switch(currentAction) {
case "load file":
Window.browseChanged.disconnect(onResourceSelected);
break
case "load url":
Window.promptTextChanged.disconnect(onResourceSelected);
break;
}
if (resource) {
print("!!!! building resource object");
var resourceObj = buildResourceObj(resource);
print("!!!! notifying script of resource object");
sendToScript({
method: 'tester_newResourceObject',
resourceObject: resourceObj
});
}
}
HifiControlsUit.Button {
enabled: !spinner.visible
anchors.right: parent.horizontalCenter
anchors.rightMargin: width/4
anchors.verticalCenter: parent.verticalCenter
color: hifi.buttons.blue
fontSize: 20
text: "Load File"
width: parent.width / 3
height: parent.height
onClicked: buttonContainer.actions[text]()
}
HifiControlsUit.Button {
enabled: !spinner.visible
anchors.left: parent.horizontalCenter
anchors.leftMargin: width/4
anchors.verticalCenter: parent.verticalCenter
color: hifi.buttons.blue
fontSize: 20
text: "Load URL"
width: parent.width / 3
height: parent.height
onClicked: buttonContainer.actions[text]()
}
}
function fromScript(message) {
switch (message.method) {
case "newResourceObjectInTest":
var resourceObject = message.resourceObject;
resourceListModel.clear(); // REMOVE THIS once we support specific referrers
resourceListModel.append(resourceObject);
spinner.visible = false;
break;
case "nextObjectIdInTest":
print("!!!! message from script! " + JSON.stringify(message));
nextResourceObjectId = message.id;
spinner.visible = false;
break;
case "resourceRequestEvent":
// When we support multiple items under test simultaneously,
// we'll have to replace "0" with the correct index.
resourceListModel.setProperty(0, "resourceAccessEventText", message.resourceAccessEventText);
break;
}
}
@ -64,227 +252,26 @@ Rectangle {
resource.match(/\.json\.gz$/) ? "content set" :
resource.match(/\.json$/) ? "entity or wearable" :
"unknown");
return { "id": nextResourceObjectId++,
// Uncomment this once we support more than one item in test at the same time
//nextResourceObjectId++;
return { "resourceObjectId": nextResourceObjectId,
"resource": resource,
"assetType": assetType };
}
function installResourceObj(resourceObj) {
if ("application" === resourceObj.assetType) {
Commerce.installApp(resourceObj.resource);
}
}
function addAllInstalledAppsToList() {
var i, apps = Commerce.getInstalledApps().split(","), len = apps.length;
for(i = 0; i < len - 1; ++i) {
if (i in apps) {
resourceListModel.append(buildResourceObj(apps[i]));
}
}
}
function toUrl(resource) {
var httpPattern = /^http/i;
return httpPattern.test(resource) ? resource : "file:///" + resource;
}
function rezEntity(resource, entityType) {
function rezEntity(resource, entityType, resourceObjectId) {
print("!!!! tester_rezClicked");
sendToScript({
method: 'tester_rezClicked',
itemHref: toUrl(resource),
itemType: entityType});
itemType: entityType,
itemId: resourceObjectId });
}
ListView {
anchors.fill: parent
anchors.leftMargin: 12
anchors.bottomMargin: 40
anchors.rightMargin: 12
model: resourceListModel
spacing: 5
interactive: false
delegate: RowLayout {
anchors.left: parent.left
width: parent.width
spacing: 5
property var actions: {
"forward": function(resource, assetType){
switch(assetType) {
case "application":
Commerce.openApp(resource);
break;
case "avatar":
MyAvatar.useFullAvatarURL(resource);
break;
case "content set":
urlHandler.handleUrl("hifi://localhost/0,0,0");
Commerce.replaceContentSet(toUrl(resource), "");
break;
case "entity":
case "wearable":
rezEntity(resource, assetType);
break;
default:
print("Marketplace item tester unsupported assetType " + assetType);
}
},
"trash": function(resource, assetType){
if ("application" === assetType) {
Commerce.uninstallApp(resource);
}
sendToScript({
method: "tester_deleteResourceObject",
objectId: resourceListModel.get(index).id});
resourceListModel.remove(index);
}
}
Column {
Layout.preferredWidth: root.width * .6
spacing: 5
Text {
text: {
var match = resource.match(/\/([^/]*)$/);
return match ? match[1] : resource;
}
font.pointSize: 12
horizontalAlignment: Text.AlignBottom
}
Text {
text: resource
font.pointSize: 8
width: root.width * .6
horizontalAlignment: Text.AlignBottom
wrapMode: Text.WrapAnywhere
}
}
ComboBox {
id: comboBox
Layout.preferredWidth: root.width * .2
model: [
"application",
"avatar",
"content set",
"entity",
"wearable",
"unknown"
]
currentIndex: (("entity or wearable" === assetType) ?
model.indexOf("unknown") : model.indexOf(assetType))
Component.onCompleted: {
onCurrentIndexChanged.connect(function() {
assetType = model[currentIndex];
sendToScript({
method: "tester_updateResourceObjectAssetType",
objectId: resourceListModel.get(index)["id"],
assetType: assetType });
});
}
}
Repeater {
model: [ "forward", "trash" ]
HifiStylesUit.HiFiGlyphs {
property var glyphs: {
"application": hifi.glyphs.install,
"avatar": hifi.glyphs.avatar,
"content set": hifi.glyphs.globe,
"entity": hifi.glyphs.wand,
"trash": hifi.glyphs.trash,
"unknown": hifi.glyphs.circleSlash,
"wearable": hifi.glyphs.hat,
}
text: (("trash" === modelData) ?
glyphs.trash :
glyphs[comboBox.model[comboBox.currentIndex]])
size: ("trash" === modelData) ? 22 : 30
color: hifi.colors.black
horizontalAlignment: Text.AlignHCenter
MouseArea {
anchors.fill: parent
onClicked: {
actions[modelData](resource, comboBox.currentText);
}
}
}
}
}
headerPositioning: ListView.OverlayHeader
header: HifiStylesUit.RalewayRegular {
id: rootHeader
text: "Marketplace Item Tester"
height: 80
width: paintedWidth
size: 22
color: hifi.colors.black
anchors.left: parent.left
anchors.leftMargin: 12
}
footerPositioning: ListView.OverlayFooter
footer: Row {
id: rootActions
spacing: 20
anchors.horizontalCenter: parent.horizontalCenter
property string currentAction
property var actions: {
"Load File": function(){
rootActions.currentAction = "load file";
Window.browseChanged.connect(onResourceSelected);
Window.browseAsync("Please select a file (*.app.json *.json *.fst *.json.gz)", "", "Assets (*.app.json *.json *.fst *.json.gz)");
},
"Load URL": function(){
rootActions.currentAction = "load url";
Window.promptTextChanged.connect(onResourceSelected);
Window.promptAsync("Please enter a URL", "");
}
}
function onResourceSelected(resource) {
// It is possible that we received the present signal
// from something other than our browserAsync window.
// Alas, there is nothing we can do about that so charge
// ahead as though we are sure the present signal is one
// we expect.
switch(currentAction) {
case "load file":
Window.browseChanged.disconnect(onResourceSelected);
break
case "load url":
Window.promptTextChanged.disconnect(onResourceSelected);
break;
}
if (resource) {
var resourceObj = buildResourceObj(resource);
installResourceObj(resourceObj);
sendToScript({
method: 'tester_newResourceObject',
resourceObject: resourceObj });
}
}
Repeater {
model: [ "Load File", "Load URL" ]
HifiControlsUit.Button {
color: hifi.buttons.blue
fontSize: 20
text: modelData
width: root.width / 3
height: 40
onClicked: actions[text]()
}
}
}
}
signal sendToScript(var message)
}

Binary file not shown.

Before

(image error) Size: 45 KiB

After

(image error) Size: 58 KiB

View file

@ -175,7 +175,7 @@ TabBar {
method: "newEntityButtonClicked",
params: { buttonName: "newParticleButton" }
});
editTabView.currentIndex = 4
editTabView.currentIndex = 2
}
}
@ -279,21 +279,6 @@ TabBar {
}
}
EditTabButton {
title: "P"
active: true
enabled: true
property string originalUrl: ""
property Component visualItem: Component {
WebView {
id: particleExplorerWebView
url: Paths.defaultScripts + "/system/particle_explorer/particleExplorer.html"
enabled: true
}
}
}
function fromScript(message) {
switch (message.method) {
case 'selectTab':
@ -326,9 +311,6 @@ TabBar {
case 'grid':
editTabView.currentIndex = 3;
break;
case 'particle':
editTabView.currentIndex = 4;
break;
default:
console.warn('Attempt to switch to invalid tab:', id);
}

View file

@ -18,7 +18,6 @@ TabBar {
readonly property int create: 0
readonly property int properties: 1
readonly property int grid: 2
readonly property int particle: 3
}
readonly property HifiConstants hifi: HifiConstants {}
@ -182,7 +181,7 @@ TabBar {
method: "newEntityButtonClicked",
params: { buttonName: "newParticleButton" }
});
editTabView.currentIndex = tabIndex.particle
editTabView.currentIndex = tabIndex.properties
}
}
@ -271,21 +270,6 @@ TabBar {
}
}
EditTabButton {
title: "P"
active: true
enabled: true
property string originalUrl: ""
property Component visualItem: Component {
WebView {
id: particleExplorerWebView
url: Paths.defaultScripts + "/system/particle_explorer/particleExplorer.html"
enabled: true
}
}
}
function fromScript(message) {
switch (message.method) {
case 'selectTab':
@ -299,7 +283,7 @@ TabBar {
// Changes the current tab based on tab index or title as input
function selectTab(id) {
if (typeof id === 'number') {
if (id >= tabIndex.create && id <= tabIndex.particle) {
if (id >= tabIndex.create && id <= tabIndex.grid) {
editTabView.currentIndex = id;
} else {
console.warn('Attempt to switch to invalid tab:', id);
@ -315,9 +299,6 @@ TabBar {
case 'grid':
editTabView.currentIndex = tabIndex.grid;
break;
case 'particle':
editTabView.currentIndex = tabIndex.particle;
break;
default:
console.warn('Attempt to switch to invalid tab:', id);
}

View file

@ -822,11 +822,44 @@ Flickable {
}
}
Row {
id: outOfRangeDataStrategyRow
anchors.top: viveInDesktop.bottom
anchors.topMargin: 5
anchors.left: openVrConfiguration.left
anchors.leftMargin: leftMargin + 10
spacing: 15
RalewayRegular {
id: outOfRangeDataStrategyLabel
size: 12
text: "Out Of Range Data Strategy:"
color: hifi.colors.lightGrayText
topPadding: 5
}
HifiControls.ComboBox {
id: outOfRangeDataStrategyComboBox
height: 25
width: 100
editable: true
colorScheme: hifi.colorSchemes.dark
model: ["None", "Freeze", "Drop"]
label: ""
onCurrentIndexChanged: {
sendConfigurationSettings();
}
}
}
RalewayBold {
id: viveDesktopText
size: 10
size: 12
text: "Use " + stack.selectedPlugin + " devices in desktop mode"
color: hifi.colors.white
color: hifi.colors.lightGrayText
anchors {
left: viveInDesktop.right
@ -946,6 +979,7 @@ Flickable {
viveInDesktop.checked = desktopMode;
hmdInDesktop.checked = hmdDesktopPosition;
outOfRangeDataStrategyComboBox.currentIndex = outOfRangeDataStrategyComboBox.model.indexOf(settings.outOfRangeDataStrategy);
initializeButtonState();
updateCalibrationText();
@ -1107,7 +1141,8 @@ Flickable {
"armCircumference": armCircumference.realValue,
"shoulderWidth": shoulderWidth.realValue,
"desktopMode": viveInDesktop.checked,
"hmdDesktopTracking": hmdInDesktop.checked
"hmdDesktopTracking": hmdInDesktop.checked,
"outOfRangeDataStrategy": outOfRangeDataStrategyComboBox.model[outOfRangeDataStrategyComboBox.currentIndex]
}
return settingsObject;

View file

@ -226,6 +226,7 @@
#include "commerce/Ledger.h"
#include "commerce/Wallet.h"
#include "commerce/QmlCommerce.h"
#include "ResourceRequestObserver.h"
#include "webbrowser/WebBrowserSuggestionsEngine.h"
#include <DesktopPreviewProvider.h>
@ -947,6 +948,7 @@ bool setupEssentials(int& argc, char** argv, bool runningMarkerExisted) {
DependencyManager::set<WalletScriptingInterface>();
DependencyManager::set<FadeEffect>();
DependencyManager::set<ResourceRequestObserver>();
return previousSessionCrashed;
}
@ -3129,6 +3131,7 @@ void Application::onDesktopRootContextCreated(QQmlContext* surfaceContext) {
surfaceContext->setContextProperty("ContextOverlay", DependencyManager::get<ContextOverlayInterface>().data());
surfaceContext->setContextProperty("Wallet", DependencyManager::get<WalletScriptingInterface>().data());
surfaceContext->setContextProperty("HiFiAbout", AboutUtil::getInstance());
surfaceContext->setContextProperty("ResourceRequestObserver", DependencyManager::get<ResourceRequestObserver>().data());
if (auto steamClient = PluginManager::getInstance()->getSteamClientPlugin()) {
surfaceContext->setContextProperty("Steam", new SteamScriptingInterface(engine, steamClient.get()));
@ -5022,12 +5025,12 @@ void Application::saveSettings() const {
PluginManager::getInstance()->saveSettings();
}
bool Application::importEntities(const QString& urlOrFilename) {
bool Application::importEntities(const QString& urlOrFilename, const bool isObservable, const qint64 callerId) {
bool success = false;
_entityClipboard->withWriteLock([&] {
_entityClipboard->eraseAllOctreeElements();
success = _entityClipboard->readFromURL(urlOrFilename);
success = _entityClipboard->readFromURL(urlOrFilename, isObservable, callerId);
if (success) {
_entityClipboard->reaverageOctreeElements();
}
@ -5812,6 +5815,42 @@ void Application::update(float deltaTime) {
controller::Pose pose = userInputMapper->getPoseState(action);
myAvatar->setControllerPoseInSensorFrame(action, pose.transform(avatarToSensorMatrix));
}
static const std::vector<QString> trackedObjectStringLiterals = {
QStringLiteral("_TrackedObject00"), QStringLiteral("_TrackedObject01"), QStringLiteral("_TrackedObject02"), QStringLiteral("_TrackedObject03"),
QStringLiteral("_TrackedObject04"), QStringLiteral("_TrackedObject05"), QStringLiteral("_TrackedObject06"), QStringLiteral("_TrackedObject07"),
QStringLiteral("_TrackedObject08"), QStringLiteral("_TrackedObject09"), QStringLiteral("_TrackedObject10"), QStringLiteral("_TrackedObject11"),
QStringLiteral("_TrackedObject12"), QStringLiteral("_TrackedObject13"), QStringLiteral("_TrackedObject14"), QStringLiteral("_TrackedObject15")
};
// Controlled by the Developer > Avatar > Show Tracked Objects menu.
if (_showTrackedObjects) {
static const std::vector<controller::Action> trackedObjectActions = {
controller::Action::TRACKED_OBJECT_00, controller::Action::TRACKED_OBJECT_01, controller::Action::TRACKED_OBJECT_02, controller::Action::TRACKED_OBJECT_03,
controller::Action::TRACKED_OBJECT_04, controller::Action::TRACKED_OBJECT_05, controller::Action::TRACKED_OBJECT_06, controller::Action::TRACKED_OBJECT_07,
controller::Action::TRACKED_OBJECT_08, controller::Action::TRACKED_OBJECT_09, controller::Action::TRACKED_OBJECT_10, controller::Action::TRACKED_OBJECT_11,
controller::Action::TRACKED_OBJECT_12, controller::Action::TRACKED_OBJECT_13, controller::Action::TRACKED_OBJECT_14, controller::Action::TRACKED_OBJECT_15
};
int i = 0;
glm::vec4 BLUE(0.0f, 0.0f, 1.0f, 1.0f);
for (auto& action : trackedObjectActions) {
controller::Pose pose = userInputMapper->getPoseState(action);
if (pose.valid) {
glm::vec3 pos = transformPoint(myAvatarMatrix, pose.translation);
glm::quat rot = glmExtractRotation(myAvatarMatrix) * pose.rotation;
DebugDraw::getInstance().addMarker(trackedObjectStringLiterals[i], rot, pos, BLUE);
} else {
DebugDraw::getInstance().removeMarker(trackedObjectStringLiterals[i]);
}
i++;
}
} else if (_prevShowTrackedObjects) {
for (auto& key : trackedObjectStringLiterals) {
DebugDraw::getInstance().removeMarker(key);
}
}
_prevShowTrackedObjects = _showTrackedObjects;
}
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
@ -6811,6 +6850,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEnginePointe
scriptEngine->registerGlobalObject("Wallet", DependencyManager::get<WalletScriptingInterface>().data());
scriptEngine->registerGlobalObject("AddressManager", DependencyManager::get<AddressManager>().data());
scriptEngine->registerGlobalObject("HifiAbout", AboutUtil::getInstance());
scriptEngine->registerGlobalObject("ResourceRequestObserver", DependencyManager::get<ResourceRequestObserver>().data());
qScriptRegisterMetaType(scriptEngine.data(), OverlayIDtoScriptValue, OverlayIDfromScriptValue);
@ -7197,7 +7237,8 @@ void Application::addAssetToWorldFromURL(QString url) {
addAssetToWorldInfo(filename, "Downloading model file " + filename + ".");
auto request = DependencyManager::get<ResourceManager>()->createResourceRequest(nullptr, QUrl(url));
auto request = DependencyManager::get<ResourceManager>()->createResourceRequest(
nullptr, QUrl(url), true, -1, "Application::addAssetToWorldFromURL");
connect(request, &ResourceRequest::finished, this, &Application::addAssetToWorldFromURLRequestFinished);
request->send();
}
@ -8306,6 +8347,10 @@ void Application::setShowBulletConstraintLimits(bool value) {
_physicsEngine->setShowBulletConstraintLimits(value);
}
void Application::setShowTrackedObjects(bool value) {
_showTrackedObjects = value;
}
void Application::startHMDStandBySession() {
_autoSwitchDisplayModeSupportedHMDPlugin->startStandBySession();
}

View file

@ -342,7 +342,7 @@ public slots:
QVector<EntityItemID> pasteEntities(float x, float y, float z);
bool exportEntities(const QString& filename, const QVector<EntityItemID>& entityIDs, const glm::vec3* givenOffset = nullptr);
bool exportEntities(const QString& filename, float x, float y, float z, float scale);
bool importEntities(const QString& url);
bool importEntities(const QString& url, const bool isObservable = true, const qint64 callerId = -1);
void updateThreadPoolCount() const;
void updateSystemTabletMode();
void goToErrorDomainURL(QUrl errorDomainURL);
@ -498,6 +498,8 @@ private slots:
void setShowBulletConstraints(bool value);
void setShowBulletConstraintLimits(bool value);
void setShowTrackedObjects(bool value);
private:
void init();
bool handleKeyEventForFocusedEntityOrOverlay(QEvent* event);
@ -779,5 +781,8 @@ private:
std::atomic<bool> _pendingRenderEvent { true };
bool quitWhenFinished { false };
bool _showTrackedObjects { false };
bool _prevShowTrackedObjects { false };
};
#endif // hifi_Application_h

View file

@ -273,15 +273,71 @@ Menu::Menu() {
// Developer menu ----------------------------------
MenuWrapper* developerMenu = addMenu("Developer", "Developer");
// Developer > Scripting >>>
MenuWrapper* scriptingOptionsMenu = developerMenu->addMenu("Scripting");
// Developer > Scripting > Console...
addActionToQMenuAndActionHash(scriptingOptionsMenu, MenuOption::Console, Qt::CTRL | Qt::ALT | Qt::Key_J,
DependencyManager::get<StandAloneJSConsole>().data(),
SLOT(toggleConsole()),
QAction::NoRole,
UNSPECIFIED_POSITION);
// Developer > Scripting > API Debugger
action = addActionToQMenuAndActionHash(scriptingOptionsMenu, "API Debugger");
connect(action, &QAction::triggered, [] {
QUrl defaultScriptsLoc = PathUtils::defaultScriptsLocation();
defaultScriptsLoc.setPath(defaultScriptsLoc.path() + "developer/utilities/tools/currentAPI.js");
DependencyManager::get<ScriptEngines>()->loadScript(defaultScriptsLoc.toString());
});
// Developer > Scripting > Entity Script Server Log
auto essLogAction = addActionToQMenuAndActionHash(scriptingOptionsMenu, MenuOption::EntityScriptServerLog, 0,
qApp, SLOT(toggleEntityScriptServerLogDialog()));
{
auto nodeList = DependencyManager::get<NodeList>();
QObject::connect(nodeList.data(), &NodeList::canRezChanged, essLogAction, [essLogAction] {
auto nodeList = DependencyManager::get<NodeList>();
essLogAction->setEnabled(nodeList->getThisNodeCanRez());
});
essLogAction->setEnabled(nodeList->getThisNodeCanRez());
}
// Developer > Scripting > Script Log (HMD friendly)...
addActionToQMenuAndActionHash(scriptingOptionsMenu, "Script Log (HMD friendly)...", Qt::NoButton,
qApp, SLOT(showScriptLogs()));
// Developer > Scripting > Verbose Logging
addCheckableActionToQMenuAndActionHash(scriptingOptionsMenu, MenuOption::VerboseLogging, 0, false,
qApp, SLOT(updateVerboseLogging()));
// Developer > Scripting > Enable Speech Control API
#if defined(Q_OS_MAC) || defined(Q_OS_WIN)
auto speechRecognizer = DependencyManager::get<SpeechRecognizer>();
QAction* speechRecognizerAction = addCheckableActionToQMenuAndActionHash(scriptingOptionsMenu, MenuOption::ControlWithSpeech,
Qt::CTRL | Qt::SHIFT | Qt::Key_C,
speechRecognizer->getEnabled(),
speechRecognizer.data(),
SLOT(setEnabled(bool)),
UNSPECIFIED_POSITION);
connect(speechRecognizer.data(), SIGNAL(enabledUpdated(bool)), speechRecognizerAction, SLOT(setChecked(bool)));
#endif
// Developer > UI >>>
MenuWrapper* uiOptionsMenu = developerMenu->addMenu("UI");
action = addCheckableActionToQMenuAndActionHash(uiOptionsMenu, MenuOption::DesktopTabletToToolbar, 0,
qApp->getDesktopTabletBecomesToolbarSetting());
// Developer > UI > Show Overlays
addCheckableActionToQMenuAndActionHash(uiOptionsMenu, MenuOption::Overlays, 0, true);
// Developer > UI > Desktop Tablet Becomes Toolbar
connect(action, &QAction::triggered, [action] {
qApp->setDesktopTabletBecomesToolbarSetting(action->isChecked());
});
// Developer > UI > HMD Tablet Becomes Toolbar
action = addCheckableActionToQMenuAndActionHash(uiOptionsMenu, MenuOption::HMDTabletToToolbar, 0,
qApp->getHmdTabletBecomesToolbarSetting());
connect(action, &QAction::triggered, [action] {
@ -570,6 +626,8 @@ Menu::Menu() {
avatar.get(), SLOT(updateMotionBehaviorFromMenu()),
UNSPECIFIED_POSITION, "Developer");
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowTrackedObjects, 0, false, qApp, SLOT(setShowTrackedObjects(bool)));
// Developer > Hands >>>
MenuWrapper* handOptionsMenu = developerMenu->addMenu("Hands");
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHandTargets, 0, false,
@ -684,10 +742,11 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(pickingOptionsMenu, MenuOption::ForceCoarsePicking, 0, false,
DependencyManager::get<PickManager>().data(), SLOT(setForceCoarsePicking(bool)));
// Developer > Display Crash Options
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::DisplayCrashOptions, 0, true);
// Developer > Crash >>>
MenuWrapper* crashMenu = developerMenu->addMenu("Crash");
// Developer > Crash > Display Crash Options
addCheckableActionToQMenuAndActionHash(crashMenu, MenuOption::DisplayCrashOptions, 0, true);
addActionToQMenuAndActionHash(crashMenu, MenuOption::DeadlockInterface, 0, qApp, SLOT(deadlockApplication()));
addActionToQMenuAndActionHash(crashMenu, MenuOption::UnresponsiveInterface, 0, qApp, SLOT(unresponsiveApplication()));
@ -722,59 +781,15 @@ Menu::Menu() {
action = addActionToQMenuAndActionHash(crashMenu, MenuOption::CrashNewFaultThreaded);
connect(action, &QAction::triggered, qApp, []() { std::thread(crash::newFault).join(); });
// Developer > Stats
// Developer > Show Statistics
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::Stats);
// Developer > Show Animation Statistics
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::AnimStats);
// Settings > Enable Speech Control API
#if defined(Q_OS_MAC) || defined(Q_OS_WIN)
auto speechRecognizer = DependencyManager::get<SpeechRecognizer>();
QAction* speechRecognizerAction = addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::ControlWithSpeech,
Qt::CTRL | Qt::SHIFT | Qt::Key_C,
speechRecognizer->getEnabled(),
speechRecognizer.data(),
SLOT(setEnabled(bool)),
UNSPECIFIED_POSITION);
connect(speechRecognizer.data(), SIGNAL(enabledUpdated(bool)), speechRecognizerAction, SLOT(setChecked(bool)));
#endif
// console
addActionToQMenuAndActionHash(developerMenu, MenuOption::Console, Qt::CTRL | Qt::ALT | Qt::Key_J,
DependencyManager::get<StandAloneJSConsole>().data(),
SLOT(toggleConsole()),
QAction::NoRole,
UNSPECIFIED_POSITION);
// Developer > API Debugger
action = addActionToQMenuAndActionHash(developerMenu, "API Debugger");
connect(action, &QAction::triggered, [] {
QUrl defaultScriptsLoc = PathUtils::defaultScriptsLocation();
defaultScriptsLoc.setPath(defaultScriptsLoc.path() + "developer/utilities/tools/currentAPI.js");
DependencyManager::get<ScriptEngines>()->loadScript(defaultScriptsLoc.toString());
});
// Developer > Log...
// Developer > Log
addActionToQMenuAndActionHash(developerMenu, MenuOption::Log, Qt::CTRL | Qt::SHIFT | Qt::Key_L,
qApp, SLOT(toggleLogDialog()));
auto essLogAction = addActionToQMenuAndActionHash(developerMenu, MenuOption::EntityScriptServerLog, 0,
qApp, SLOT(toggleEntityScriptServerLogDialog()));
{
auto nodeList = DependencyManager::get<NodeList>();
QObject::connect(nodeList.data(), &NodeList::canRezChanged, essLogAction, [essLogAction] {
auto nodeList = DependencyManager::get<NodeList>();
essLogAction->setEnabled(nodeList->getThisNodeCanRez());
});
essLogAction->setEnabled(nodeList->getThisNodeCanRez());
}
addActionToQMenuAndActionHash(developerMenu, "Script Log (HMD friendly)...", Qt::NoButton,
qApp, SLOT(showScriptLogs()));
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::VerboseLogging, 0, false,
qApp, SLOT(updateVerboseLogging()));
// Developer > Show Overlays
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::Overlays, 0, true);
#if 0 /// -------------- REMOVED FOR NOW --------------
addDisabledActionAndSeparator(navigateMenu, "History");

View file

@ -183,6 +183,7 @@ namespace MenuOption {
const QString RunClientScriptTests = "Run Client Script Tests";
const QString RunTimingTests = "Run Timing Tests";
const QString ScriptedMotorControl = "Enable Scripted Motor Control";
const QString ShowTrackedObjects = "Show Tracked Objects";
const QString SendWrongDSConnectVersion = "Send wrong DS connect version";
const QString SendWrongProtocolVersion = "Send wrong protocol version";
const QString SetHomeLocation = "Set Home Location";

View file

@ -53,7 +53,8 @@ void ATPAssetMigrator::loadEntityServerFile() {
auto migrateResources = [=](QUrl migrationURL, QJsonValueRef jsonValue, bool isModelURL) {
auto request =
DependencyManager::get<ResourceManager>()->createResourceRequest(this, migrationURL);
DependencyManager::get<ResourceManager>()->createResourceRequest(
this, migrationURL, true, -1, "ATPAssetMigrator::loadEntityServerFile");
if (request) {
qCDebug(asset_migrator) << "Requesting" << migrationURL << "for ATP asset migration";

View file

@ -135,7 +135,7 @@ bool AvatarActionHold::getTarget(float deltaTimeStep, glm::quat& rotation, glm::
glm::vec3 palmPosition;
glm::quat palmRotation;
bool isTransitingWithAvatar = holdingAvatar->getTransit()->isTransiting();
bool isTransitingWithAvatar = holdingAvatar->getTransit()->isActive();
if (isTransitingWithAvatar != _isTransitingWithAvatar) {
_isTransitingWithAvatar = isTransitingWithAvatar;
auto ownerEntity = _ownerEntity.lock();
@ -424,7 +424,7 @@ bool AvatarActionHold::updateArguments(QVariantMap arguments) {
if (ownerEntity) {
ownerEntity->setDynamicDataDirty(true);
ownerEntity->setDynamicDataNeedsTransmit(true);
ownerEntity->setTransitingWithAvatar(myAvatar->getTransit()->isTransiting());
ownerEntity->setTransitingWithAvatar(myAvatar->getTransit()->isActive());
}
});
}

View file

@ -71,14 +71,12 @@ AvatarManager::AvatarManager(QObject* parent) :
}
});
const float AVATAR_TRANSIT_TRIGGER_DISTANCE = 1.0f;
const int AVATAR_TRANSIT_FRAME_COUNT = 11; // Based on testing
const int AVATAR_TRANSIT_FRAMES_PER_METER = 1; // Based on testing
_transitConfig._totalFrames = AVATAR_TRANSIT_FRAME_COUNT;
_transitConfig._triggerDistance = AVATAR_TRANSIT_TRIGGER_DISTANCE;
_transitConfig._minTriggerDistance = AVATAR_TRANSIT_MIN_TRIGGER_DISTANCE;
_transitConfig._maxTriggerDistance = AVATAR_TRANSIT_MAX_TRIGGER_DISTANCE;
_transitConfig._framesPerMeter = AVATAR_TRANSIT_FRAMES_PER_METER;
_transitConfig._isDistanceBased = true;
_transitConfig._isDistanceBased = AVATAR_TRANSIT_DISTANCE_BASED;
_transitConfig._abortDistance = AVATAR_TRANSIT_ABORT_DISTANCE;
}
AvatarSharedPointer AvatarManager::addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer) {
@ -126,13 +124,39 @@ void AvatarManager::setSpace(workload::SpacePointer& space ) {
_space = space;
}
void AvatarManager::handleTransitAnimations(AvatarTransit::Status status) {
switch (status) {
case AvatarTransit::Status::STARTED:
_myAvatar->getSkeletonModel()->getRig().triggerNetworkAnimation("preTransitAnim");
break;
case AvatarTransit::Status::START_TRANSIT:
_myAvatar->getSkeletonModel()->getRig().triggerNetworkAnimation("transitAnim");
break;
case AvatarTransit::Status::END_TRANSIT:
_myAvatar->getSkeletonModel()->getRig().triggerNetworkAnimation("postTransitAnim");
break;
case AvatarTransit::Status::ENDED:
_myAvatar->getSkeletonModel()->getRig().triggerNetworkAnimation("idleAnim");
break;
case AvatarTransit::Status::PRE_TRANSIT:
break;
case AvatarTransit::Status::POST_TRANSIT:
break;
case AvatarTransit::Status::IDLE:
break;
case AvatarTransit::Status::TRANSITING:
break;
case AvatarTransit::Status::ABORT_TRANSIT:
break;
}
}
void AvatarManager::updateMyAvatar(float deltaTime) {
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "AvatarManager::updateMyAvatar()");
AvatarTransit::Status status = _myAvatar->updateTransit(deltaTime, _myAvatar->getNextPosition(), _transitConfig);
bool sendFirstTransitPackage = (status == AvatarTransit::Status::START_TRANSIT);
bool blockTransitData = (status == AvatarTransit::Status::TRANSITING);
AvatarTransit::Status status = _myAvatar->updateTransit(deltaTime, _myAvatar->getNextPosition(), _myAvatar->getSensorToWorldScale(), _transitConfig);
handleTransitAnimations(status);
_myAvatar->update(deltaTime);
render::Transaction transaction;
@ -142,18 +166,13 @@ void AvatarManager::updateMyAvatar(float deltaTime) {
quint64 now = usecTimestampNow();
quint64 dt = now - _lastSendAvatarDataTime;
if (sendFirstTransitPackage || (dt > MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS && !_myAvatarDataPacketsPaused && !blockTransitData)) {
if (dt > MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS && !_myAvatarDataPacketsPaused) {
// send head/hand data to the avatar mixer and voxel server
PerformanceTimer perfTimer("send");
if (sendFirstTransitPackage) {
_myAvatar->overrideNextPackagePositionData(_myAvatar->getTransit()->getEndPosition());
}
PerformanceTimer perfTimer("send");
_myAvatar->sendAvatarDataPacket();
_lastSendAvatarDataTime = now;
_myAvatarSendRate.increment();
}
}
@ -267,7 +286,7 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
if (inView && avatar->hasNewJointData()) {
numAvatarsUpdated++;
}
auto transitStatus = avatar->_transit.update(deltaTime, avatar->_globalPosition, _transitConfig);
auto transitStatus = avatar->_transit.update(deltaTime, avatar->_serverPosition, _transitConfig);
if (avatar->getIsNewAvatar() && (transitStatus == AvatarTransit::Status::START_TRANSIT || transitStatus == AvatarTransit::Status::ABORT_TRANSIT)) {
avatar->_transit.reset();
avatar->setIsNewAvatar(false);

View file

@ -221,6 +221,7 @@ private:
// frequently grabs a read lock on the hash to get a given avatar by ID
void handleRemovedAvatar(const AvatarSharedPointer& removedAvatar,
KillAvatarReason removalReason = KillAvatarReason::NoReason) override;
void handleTransitAnimations(AvatarTransit::Status status);
QVector<AvatarSharedPointer> _avatarsToFade;

View file

@ -26,6 +26,7 @@
#include <AccountManager.h>
#include <AddressManager.h>
#include <AudioClient.h>
#include <ClientTraitsHandler.h>
#include <display-plugins/DisplayPlugin.h>
#include <FSTReader.h>
#include <GeometryUtil.h>
@ -463,10 +464,74 @@ void MyAvatar::reset(bool andRecenter, bool andReload, bool andHead) {
}
}
void MyAvatar::updateSitStandState(float newHeightReading, float dt) {
const float STANDING_HEIGHT_MULTIPLE = 1.2f;
const float SITTING_HEIGHT_MULTIPLE = 0.833f;
const float SITTING_TIMEOUT = 4.0f; // 4 seconds
const float STANDING_TIMEOUT = 0.3333f; // 1/3 second
const float SITTING_UPPER_BOUND = 1.52f;
if (!getIsSitStandStateLocked()) {
if (!getIsAway() && qApp->isHMDMode()) {
if (getIsInSittingState()) {
if (newHeightReading > (STANDING_HEIGHT_MULTIPLE * _tippingPoint)) {
// if we recenter upwards then no longer in sitting state
_sitStandStateTimer += dt;
if (_sitStandStateTimer > STANDING_TIMEOUT) {
_averageUserHeightSensorSpace = newHeightReading;
_tippingPoint = newHeightReading;
setIsInSittingState(false);
}
} else if (newHeightReading < (SITTING_HEIGHT_MULTIPLE * _tippingPoint)) {
// if we are mis labelled as sitting but we are standing in the real world this will
// make sure that a real sit is still recognized so we won't be stuck in sitting unable to change state
_sitStandStateTimer += dt;
if (_sitStandStateTimer > SITTING_TIMEOUT) {
_averageUserHeightSensorSpace = newHeightReading;
_tippingPoint = newHeightReading;
// here we stay in sit state but reset the average height
setIsInSittingState(true);
}
} else {
// sanity check if average height greater than 5ft they are not sitting(or get off your dangerous barstool please)
if (_averageUserHeightSensorSpace > SITTING_UPPER_BOUND) {
setIsInSittingState(false);
} else {
// tipping point is average height when sitting.
_tippingPoint = _averageUserHeightSensorSpace;
_sitStandStateTimer = 0.0f;
}
}
} else {
// in the standing state
if (newHeightReading < (SITTING_HEIGHT_MULTIPLE * _tippingPoint)) {
_sitStandStateTimer += dt;
if (_sitStandStateTimer > SITTING_TIMEOUT) {
_averageUserHeightSensorSpace = newHeightReading;
_tippingPoint = newHeightReading;
setIsInSittingState(true);
}
} else {
// use the mode height for the tipping point when we are standing.
_tippingPoint = getCurrentStandingHeight();
_sitStandStateTimer = 0.0f;
}
}
} else {
//if you are away then reset the average and set state to standing.
_averageUserHeightSensorSpace = _userHeight.get();
_tippingPoint = _userHeight.get();
setIsInSittingState(false);
}
}
}
void MyAvatar::update(float deltaTime) {
// update moving average of HMD facing in xz plane.
const float HMD_FACING_TIMESCALE = getRotationRecenterFilterLength();
const float PERCENTAGE_WEIGHT_HEAD_VS_SHOULDERS_AZIMUTH = 0.0f; // 100 percent shoulders
const float COSINE_THIRTY_DEGREES = 0.866f;
const float SQUATTY_TIMEOUT = 30.0f; // 30 seconds
const float HEIGHT_FILTER_COEFFICIENT = 0.01f;
float tau = deltaTime / HMD_FACING_TIMESCALE;
setHipToHandController(computeHandAzimuth());
@ -493,11 +558,36 @@ void MyAvatar::update(float deltaTime) {
_smoothOrientationTimer += deltaTime;
}
float newHeightReading = getControllerPoseInAvatarFrame(controller::Action::HEAD).getTranslation().y;
int newHeightReadingInCentimeters = glm::floor(newHeightReading * CENTIMETERS_PER_METER);
_recentModeReadings.insert(newHeightReadingInCentimeters);
setCurrentStandingHeight(computeStandingHeightMode(getControllerPoseInAvatarFrame(controller::Action::HEAD)));
setAverageHeadRotation(computeAverageHeadRotation(getControllerPoseInAvatarFrame(controller::Action::HEAD)));
controller::Pose newHeightReading = getControllerPoseInSensorFrame(controller::Action::HEAD);
if (newHeightReading.isValid()) {
int newHeightReadingInCentimeters = glm::floor(newHeightReading.getTranslation().y * CENTIMETERS_PER_METER);
_averageUserHeightSensorSpace = lerp(_averageUserHeightSensorSpace, newHeightReading.getTranslation().y, HEIGHT_FILTER_COEFFICIENT);
_recentModeReadings.insert(newHeightReadingInCentimeters);
setCurrentStandingHeight(computeStandingHeightMode(newHeightReading));
setAverageHeadRotation(computeAverageHeadRotation(getControllerPoseInAvatarFrame(controller::Action::HEAD)));
}
// if the spine is straight and the head is below the default position by 5 cm then increment squatty count.
const float SQUAT_THRESHOLD = 0.05f;
glm::vec3 headDefaultPositionAvatarSpace = getAbsoluteDefaultJointTranslationInObjectFrame(getJointIndex("Head"));
glm::quat spine2OrientationAvatarSpace = getAbsoluteJointRotationInObjectFrame(getJointIndex("Spine2"));
glm::vec3 upSpine2 = spine2OrientationAvatarSpace * glm::vec3(0.0f, 1.0f, 0.0f);
if (glm::length(upSpine2) > 0.0f) {
upSpine2 = glm::normalize(upSpine2);
}
float angleSpine2 = glm::dot(upSpine2, glm::vec3(0.0f, 1.0f, 0.0f));
if (getControllerPoseInAvatarFrame(controller::Action::HEAD).getTranslation().y < (headDefaultPositionAvatarSpace.y - SQUAT_THRESHOLD) && (angleSpine2 > COSINE_THIRTY_DEGREES)) {
_squatTimer += deltaTime;
if (_squatTimer > SQUATTY_TIMEOUT) {
_squatTimer = 0.0f;
_follow._squatDetected = true;
}
} else {
_squatTimer = 0.0f;
}
// put update sit stand state counts here
updateSitStandState(newHeightReading.getTranslation().y, deltaTime);
if (_drawAverageFacingEnabled) {
auto sensorHeadPose = getControllerPoseInSensorFrame(controller::Action::HEAD);
@ -968,7 +1058,6 @@ void MyAvatar::updateSensorToWorldMatrix() {
updateJointFromController(controller::Action::RIGHT_HAND, _controllerRightHandMatrixCache);
if (hasSensorToWorldScaleChanged) {
setTransitScale(sensorToWorldScale);
emit sensorToWorldScaleChanged(sensorToWorldScale);
}
@ -3557,12 +3646,9 @@ glm::vec3 MyAvatar::computeCounterBalance() {
glm::vec3 counterBalancedCg = (1.0f / DEFAULT_AVATAR_HIPS_MASS) * counterBalancedForHead;
// find the height of the hips
const float UPPER_LEG_FRACTION = 0.3333f;
glm::vec3 xzDiff((cgHeadMass.position.x - counterBalancedCg.x), 0.0f, (cgHeadMass.position.z - counterBalancedCg.z));
float headMinusHipXz = glm::length(xzDiff);
float headHipDefault = glm::length(tposeHead - tposeHips);
float hipFootDefault = tposeHips.y - tposeRightFoot.y;
float sitSquatThreshold = tposeHips.y - (UPPER_LEG_FRACTION * hipFootDefault);
float hipHeight = 0.0f;
if (headHipDefault > headMinusHipXz) {
hipHeight = sqrtf((headHipDefault * headHipDefault) - (headMinusHipXz * headMinusHipXz));
@ -3574,10 +3660,6 @@ glm::vec3 MyAvatar::computeCounterBalance() {
if (counterBalancedCg.y > (tposeHips.y + 0.05f)) {
// if the height is higher than default hips, clamp to default hips
counterBalancedCg.y = tposeHips.y + 0.05f;
} else if (counterBalancedCg.y < sitSquatThreshold) {
//do a height reset
setResetMode(true);
_follow.activate(FollowHelper::Vertical);
}
return counterBalancedCg;
}
@ -3818,6 +3900,18 @@ bool MyAvatar::getIsInWalkingState() const {
return _isInWalkingState;
}
bool MyAvatar::getIsInSittingState() const {
return _isInSittingState.get();
}
MyAvatar::SitStandModelType MyAvatar::getUserRecenterModel() const {
return _userRecenterModel.get();
}
bool MyAvatar::getIsSitStandStateLocked() const {
return _lockSitStandState.get();
}
float MyAvatar::getWalkSpeed() const {
return _walkSpeed.get() * _walkSpeedScalar;
}
@ -3838,6 +3932,61 @@ void MyAvatar::setIsInWalkingState(bool isWalking) {
_isInWalkingState = isWalking;
}
void MyAvatar::setIsInSittingState(bool isSitting) {
_sitStandStateTimer = 0.0f;
_squatTimer = 0.0f;
// on reset height we need the count to be more than one in case the user sits and stands up quickly.
_isInSittingState.set(isSitting);
setResetMode(true);
if (isSitting) {
setCenterOfGravityModelEnabled(false);
} else {
setCenterOfGravityModelEnabled(true);
}
setSitStandStateChange(true);
}
void MyAvatar::setUserRecenterModel(MyAvatar::SitStandModelType modelName) {
_userRecenterModel.set(modelName);
switch (modelName) {
case MyAvatar::SitStandModelType::ForceSit:
setHMDLeanRecenterEnabled(true);
setIsInSittingState(true);
setIsSitStandStateLocked(true);
break;
case MyAvatar::SitStandModelType::ForceStand:
setHMDLeanRecenterEnabled(true);
setIsInSittingState(false);
setIsSitStandStateLocked(true);
break;
case MyAvatar::SitStandModelType::Auto:
default:
setHMDLeanRecenterEnabled(true);
setIsInSittingState(false);
setIsSitStandStateLocked(false);
break;
case MyAvatar::SitStandModelType::DisableHMDLean:
setHMDLeanRecenterEnabled(false);
setIsInSittingState(false);
setIsSitStandStateLocked(false);
break;
}
}
void MyAvatar::setIsSitStandStateLocked(bool isLocked) {
_lockSitStandState.set(isLocked);
_sitStandStateTimer = 0.0f;
_squatTimer = 0.0f;
_averageUserHeightSensorSpace = _userHeight.get();
_tippingPoint = _userHeight.get();
if (!isLocked) {
// always start the auto transition mode in standing state.
setIsInSittingState(false);
}
}
void MyAvatar::setWalkSpeed(float value) {
_walkSpeed.set(value);
}
@ -3854,6 +4003,14 @@ float MyAvatar::getSprintSpeed() const {
return _sprintSpeed.get();
}
void MyAvatar::setSitStandStateChange(bool stateChanged) {
_sitStandStateChange = stateChanged;
}
float MyAvatar::getSitStandStateChange() const {
return _sitStandStateChange;
}
QVector<QString> MyAvatar::getScriptUrls() {
QVector<QString> scripts = _skeletonModel->isLoaded() ? _skeletonModel->getFBXGeometry().scripts : QVector<QString>();
return scripts;
@ -3997,6 +4154,7 @@ bool MyAvatar::FollowHelper::shouldActivateHorizontal(const MyAvatar& myAvatar,
// x axis of currentBodyMatrix in world space.
glm::vec3 right = glm::normalize(glm::vec3(currentBodyMatrix[0][0], currentBodyMatrix[1][0], currentBodyMatrix[2][0]));
glm::vec3 offset = extractTranslation(desiredBodyMatrix) - extractTranslation(currentBodyMatrix);
controller::Pose currentHeadPose = myAvatar.getControllerPoseInAvatarFrame(controller::Action::HEAD);
float forwardLeanAmount = glm::dot(forward, offset);
float lateralLeanAmount = glm::dot(right, offset);
@ -4005,14 +4163,19 @@ bool MyAvatar::FollowHelper::shouldActivateHorizontal(const MyAvatar& myAvatar,
const float MAX_FORWARD_LEAN = 0.15f;
const float MAX_BACKWARD_LEAN = 0.1f;
if (forwardLeanAmount > 0 && forwardLeanAmount > MAX_FORWARD_LEAN) {
return true;
bool stepDetected = false;
if (myAvatar.getIsInSittingState()) {
if (!withinBaseOfSupport(currentHeadPose)) {
stepDetected = true;
}
} else if (forwardLeanAmount > 0 && forwardLeanAmount > MAX_FORWARD_LEAN) {
stepDetected = true;
} else if (forwardLeanAmount < 0 && forwardLeanAmount < -MAX_BACKWARD_LEAN) {
return true;
stepDetected = true;
} else {
stepDetected = fabs(lateralLeanAmount) > MAX_LATERAL_LEAN;
}
return fabs(lateralLeanAmount) > MAX_LATERAL_LEAN;
return stepDetected;
}
bool MyAvatar::FollowHelper::shouldActivateHorizontalCG(MyAvatar& myAvatar) const {
@ -4021,6 +4184,7 @@ bool MyAvatar::FollowHelper::shouldActivateHorizontalCG(MyAvatar& myAvatar) cons
controller::Pose currentHeadPose = myAvatar.getControllerPoseInAvatarFrame(controller::Action::HEAD);
controller::Pose currentLeftHandPose = myAvatar.getControllerPoseInAvatarFrame(controller::Action::LEFT_HAND);
controller::Pose currentRightHandPose = myAvatar.getControllerPoseInAvatarFrame(controller::Action::RIGHT_HAND);
controller::Pose currentHeadSensorPose = myAvatar.getControllerPoseInSensorFrame(controller::Action::HEAD);
bool stepDetected = false;
float myScale = myAvatar.getAvatarScale();
@ -4030,7 +4194,7 @@ bool MyAvatar::FollowHelper::shouldActivateHorizontalCG(MyAvatar& myAvatar) cons
} else {
if (!withinBaseOfSupport(currentHeadPose) &&
headAngularVelocityBelowThreshold(currentHeadPose) &&
isWithinThresholdHeightMode(currentHeadPose, myAvatar.getCurrentStandingHeight(), myScale) &&
isWithinThresholdHeightMode(currentHeadSensorPose, myAvatar.getCurrentStandingHeight(), myScale) &&
handDirectionMatchesHeadDirection(currentLeftHandPose, currentRightHandPose, currentHeadPose) &&
handAngularVelocityBelowThreshold(currentLeftHandPose, currentRightHandPose) &&
headVelocityGreaterThanThreshold(currentHeadPose) &&
@ -4046,6 +4210,7 @@ bool MyAvatar::FollowHelper::shouldActivateHorizontalCG(MyAvatar& myAvatar) cons
glm::vec3 currentHeadPosition = currentHeadPose.getTranslation();
float anatomicalHeadToHipsDistance = glm::length(defaultHeadPosition - defaultHipsPosition);
if (!isActive(Horizontal) &&
(!isActive(Vertical)) &&
(glm::length(currentHeadPosition - defaultHipsPosition) > (anatomicalHeadToHipsDistance + (DEFAULT_AVATAR_SPINE_STRETCH_LIMIT * anatomicalHeadToHipsDistance)))) {
myAvatar.setResetMode(true);
stepDetected = true;
@ -4061,10 +4226,32 @@ bool MyAvatar::FollowHelper::shouldActivateHorizontalCG(MyAvatar& myAvatar) cons
bool MyAvatar::FollowHelper::shouldActivateVertical(const MyAvatar& myAvatar, const glm::mat4& desiredBodyMatrix, const glm::mat4& currentBodyMatrix) const {
const float CYLINDER_TOP = 0.1f;
const float CYLINDER_BOTTOM = -1.5f;
const float SITTING_BOTTOM = -0.02f;
glm::vec3 offset = extractTranslation(desiredBodyMatrix) - extractTranslation(currentBodyMatrix);
bool returnValue = false;
return (offset.y > CYLINDER_TOP) || (offset.y < CYLINDER_BOTTOM);
if (myAvatar.getSitStandStateChange()) {
returnValue = true;
} else {
if (myAvatar.getIsInSittingState()) {
if (myAvatar.getIsSitStandStateLocked()) {
returnValue = (offset.y > CYLINDER_TOP);
}
if (offset.y < SITTING_BOTTOM) {
// we recenter more easily when in sitting state.
returnValue = true;
}
} else {
// in the standing state
returnValue = (offset.y > CYLINDER_TOP) || (offset.y < CYLINDER_BOTTOM);
// finally check for squats in standing
if (_squatDetected) {
returnValue = true;
}
}
}
return returnValue;
}
void MyAvatar::FollowHelper::prePhysicsUpdate(MyAvatar& myAvatar, const glm::mat4& desiredBodyMatrix,
@ -4085,9 +4272,10 @@ void MyAvatar::FollowHelper::prePhysicsUpdate(MyAvatar& myAvatar, const glm::mat
}
}
} else {
// center of gravity model is not enabled
if (!isActive(Horizontal) && (shouldActivateHorizontal(myAvatar, desiredBodyMatrix, currentBodyMatrix) || hasDriveInput)) {
activate(Horizontal);
if (myAvatar.getEnableStepResetRotation()) {
if (myAvatar.getEnableStepResetRotation() && !myAvatar.getIsInSittingState()) {
activate(Rotation);
myAvatar.setHeadControllerFacingMovingAverage(myAvatar.getHeadControllerFacing());
}
@ -4095,6 +4283,9 @@ void MyAvatar::FollowHelper::prePhysicsUpdate(MyAvatar& myAvatar, const glm::mat
}
if (!isActive(Vertical) && (shouldActivateVertical(myAvatar, desiredBodyMatrix, currentBodyMatrix) || hasDriveInput)) {
activate(Vertical);
if (_squatDetected) {
_squatDetected = false;
}
}
} else {
if (!isActive(Rotation) && getForceActivateRotation()) {
@ -4144,7 +4335,7 @@ void MyAvatar::FollowHelper::prePhysicsUpdate(MyAvatar& myAvatar, const glm::mat
myAvatar.getCharacterController()->setFollowParameters(followWorldPose, getMaxTimeRemaining());
}
glm::mat4 MyAvatar::FollowHelper::postPhysicsUpdate(const MyAvatar& myAvatar, const glm::mat4& currentBodyMatrix) {
glm::mat4 MyAvatar::FollowHelper::postPhysicsUpdate(MyAvatar& myAvatar, const glm::mat4& currentBodyMatrix) {
if (isActive()) {
float dt = myAvatar.getCharacterController()->getFollowTime();
decrementTimeRemaining(dt);
@ -4161,6 +4352,11 @@ glm::mat4 MyAvatar::FollowHelper::postPhysicsUpdate(const MyAvatar& myAvatar, co
glm::mat4 newBodyMat = createMatFromQuatAndPos(sensorAngularDisplacement * glmExtractRotation(currentBodyMatrix),
sensorLinearDisplacement + extractTranslation(currentBodyMatrix));
if (myAvatar.getSitStandStateChange()) {
myAvatar.setSitStandStateChange(false);
deactivate(Vertical);
setTranslation(newBodyMat, extractTranslation(myAvatar.deriveBodyFromHMDSensor()));
}
return newBodyMat;
} else {
return currentBodyMatrix;

View file

@ -21,7 +21,6 @@
#include <AvatarConstants.h>
#include <avatars-renderer/Avatar.h>
#include <avatars-renderer/ScriptAvatar.h>
#include <ClientTraitsHandler.h>
#include <controllers/Pose.h>
#include <controllers/Actions.h>
#include <EntityItem.h>
@ -142,6 +141,8 @@ class MyAvatar : public Avatar {
* @property {number} walkSpeed
* @property {number} walkBackwardSpeed
* @property {number} sprintSpeed
* @property {number} isInSittingState
* @property {number} userRecenterModel
*
* @property {Vec3} skeletonOffset - Can be used to apply a translation offset between the avatar's position and the
* registration point of the 3D model.
@ -242,6 +243,9 @@ class MyAvatar : public Avatar {
Q_PROPERTY(float walkSpeed READ getWalkSpeed WRITE setWalkSpeed);
Q_PROPERTY(float walkBackwardSpeed READ getWalkBackwardSpeed WRITE setWalkBackwardSpeed);
Q_PROPERTY(float sprintSpeed READ getSprintSpeed WRITE setSprintSpeed);
Q_PROPERTY(bool isInSittingState READ getIsInSittingState WRITE setIsInSittingState);
Q_PROPERTY(MyAvatar::SitStandModelType userRecenterModel READ getUserRecenterModel WRITE setUserRecenterModel);
Q_PROPERTY(bool isSitStandStateLocked READ getIsSitStandStateLocked WRITE setIsSitStandStateLocked);
const QString DOMINANT_LEFT_HAND = "left";
const QString DOMINANT_RIGHT_HAND = "right";
@ -262,6 +266,15 @@ public:
};
Q_ENUM(DriveKeys)
enum SitStandModelType {
ForceSit = 0,
ForceStand,
Auto,
DisableHMDLean,
NumSitStandTypes
};
Q_ENUM(SitStandModelType)
explicit MyAvatar(QThread* thread);
virtual ~MyAvatar();
@ -1121,12 +1134,21 @@ public:
void setIsInWalkingState(bool isWalking);
bool getIsInWalkingState() const;
void setIsInSittingState(bool isSitting);
bool getIsInSittingState() const;
void setUserRecenterModel(MyAvatar::SitStandModelType modelName);
MyAvatar::SitStandModelType getUserRecenterModel() const;
void setIsSitStandStateLocked(bool isLocked);
bool getIsSitStandStateLocked() const;
void setWalkSpeed(float value);
float getWalkSpeed() const;
void setWalkBackwardSpeed(float value);
float getWalkBackwardSpeed() const;
void setSprintSpeed(float value);
float getSprintSpeed() const;
void setSitStandStateChange(bool stateChanged);
float getSitStandStateChange() const;
void updateSitStandState(float newHeightReading, float dt);
QVector<QString> getScriptUrls();
@ -1532,6 +1554,7 @@ signals:
*/
void disableHandTouchForIDChanged(const QUuid& entityID, bool disable);
private slots:
void leaveDomain();
void updateCollisionCapsuleCache();
@ -1742,7 +1765,7 @@ private:
bool shouldActivateHorizontal(const MyAvatar& myAvatar, const glm::mat4& desiredBodyMatrix, const glm::mat4& currentBodyMatrix) const;
bool shouldActivateHorizontalCG(MyAvatar& myAvatar) const;
void prePhysicsUpdate(MyAvatar& myAvatar, const glm::mat4& bodySensorMatrix, const glm::mat4& currentBodyMatrix, bool hasDriveInput);
glm::mat4 postPhysicsUpdate(const MyAvatar& myAvatar, const glm::mat4& currentBodyMatrix);
glm::mat4 postPhysicsUpdate(MyAvatar& myAvatar, const glm::mat4& currentBodyMatrix);
bool getForceActivateRotation() const;
void setForceActivateRotation(bool val);
bool getForceActivateVertical() const;
@ -1751,6 +1774,7 @@ private:
void setForceActivateHorizontal(bool val);
bool getToggleHipsFollowing() const;
void setToggleHipsFollowing(bool followHead);
bool _squatDetected { false };
std::atomic<bool> _forceActivateRotation { false };
std::atomic<bool> _forceActivateVertical { false };
std::atomic<bool> _forceActivateHorizontal { false };
@ -1820,10 +1844,13 @@ private:
std::mutex _pinnedJointsMutex;
std::vector<int> _pinnedJoints;
void updateChildCauterization(SpatiallyNestablePointer object, bool cauterize);
// height of user in sensor space, when standing erect.
ThreadSafeValueCache<float> _userHeight { DEFAULT_AVATAR_HEIGHT };
void updateChildCauterization(SpatiallyNestablePointer object, bool cauterize);
float _averageUserHeightSensorSpace { _userHeight.get() };
bool _sitStandStateChange { false };
ThreadSafeValueCache<bool> _lockSitStandState { false };
// max unscaled forward movement speed
ThreadSafeValueCache<float> _walkSpeed { DEFAULT_AVATAR_MAX_WALKING_SPEED };
@ -1831,6 +1858,11 @@ private:
ThreadSafeValueCache<float> _sprintSpeed { AVATAR_SPRINT_SPEED_SCALAR };
float _walkSpeedScalar { AVATAR_WALK_SPEED_SCALAR };
bool _isInWalkingState { false };
ThreadSafeValueCache<bool> _isInSittingState { false };
ThreadSafeValueCache<MyAvatar::SitStandModelType> _userRecenterModel { MyAvatar::SitStandModelType::Auto };
float _sitStandStateTimer { 0.0f };
float _squatTimer { 0.0f };
float _tippingPoint { _userHeight.get() };
// load avatar scripts once when rig is ready
bool _shouldLoadScripts { false };

View file

@ -36,6 +36,7 @@ Rig::CharacterControllerState convertCharacterControllerState(CharacterControlle
static AnimPose computeHipsInSensorFrame(MyAvatar* myAvatar, bool isFlying) {
glm::mat4 worldToSensorMat = glm::inverse(myAvatar->getSensorToWorldMatrix());
// check for pinned hips.
auto hipsIndex = myAvatar->getJointIndex("Hips");
if (myAvatar->isJointPinned(hipsIndex)) {
@ -46,7 +47,7 @@ static AnimPose computeHipsInSensorFrame(MyAvatar* myAvatar, bool isFlying) {
}
glm::mat4 hipsMat;
if (myAvatar->getCenterOfGravityModelEnabled() && !isFlying && !(myAvatar->getIsInWalkingState()) && myAvatar->getHMDLeanRecenterEnabled()) {
if (myAvatar->getCenterOfGravityModelEnabled() && !isFlying && !(myAvatar->getIsInWalkingState()) && !(myAvatar->getIsInSittingState()) && myAvatar->getHMDLeanRecenterEnabled()) {
// then we use center of gravity model
hipsMat = myAvatar->deriveBodyUsingCgModel();
} else {
@ -199,49 +200,38 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
if (avatarHeadPose.isValid() && !(params.primaryControllerFlags[Rig::PrimaryControllerType_Hips] & (uint8_t)Rig::ControllerFlags::Enabled)) {
bool isFlying = (myAvatar->getCharacterController()->getState() == CharacterController::State::Hover || myAvatar->getCharacterController()->computeCollisionGroup() == BULLET_COLLISION_GROUP_COLLISIONLESS);
if (!_prevHipsValid) {
AnimPose hips = computeHipsInSensorFrame(myAvatar, isFlying);
_prevHips = hips;
}
AnimPose hips = computeHipsInSensorFrame(myAvatar, isFlying);
// timescale in seconds
const float TRANS_HORIZ_TIMESCALE = 0.15f;
const float TRANS_VERT_TIMESCALE = 0.01f; // We want the vertical component of the hips to follow quickly to prevent spine squash/stretch.
const float ROT_TIMESCALE = 0.15f;
const float FLY_IDLE_TRANSITION_TIMESCALE = 0.25f;
float transHorizAlpha, transVertAlpha, rotAlpha;
if (_flyIdleTimer < 0.0f) {
transHorizAlpha = glm::min(deltaTime / TRANS_HORIZ_TIMESCALE, 1.0f);
transVertAlpha = glm::min(deltaTime / TRANS_VERT_TIMESCALE, 1.0f);
rotAlpha = glm::min(deltaTime / ROT_TIMESCALE, 1.0f);
_smoothHipsHelper.setHorizontalTranslationTimescale(TRANS_HORIZ_TIMESCALE);
_smoothHipsHelper.setVerticalTranslationTimescale(TRANS_VERT_TIMESCALE);
_smoothHipsHelper.setRotationTimescale(ROT_TIMESCALE);
} else {
transHorizAlpha = glm::min(deltaTime / FLY_IDLE_TRANSITION_TIMESCALE, 1.0f);
transVertAlpha = glm::min(deltaTime / FLY_IDLE_TRANSITION_TIMESCALE, 1.0f);
rotAlpha = glm::min(deltaTime / FLY_IDLE_TRANSITION_TIMESCALE, 1.0f);
_smoothHipsHelper.setHorizontalTranslationTimescale(FLY_IDLE_TRANSITION_TIMESCALE);
_smoothHipsHelper.setVerticalTranslationTimescale(FLY_IDLE_TRANSITION_TIMESCALE);
_smoothHipsHelper.setRotationTimescale(FLY_IDLE_TRANSITION_TIMESCALE);
}
// smootly lerp hips, in sensorframe, with different coeff for horiz and vertical translation.
float hipsY = hips.trans().y;
hips.trans() = lerp(_prevHips.trans(), hips.trans(), transHorizAlpha);
hips.trans().y = lerp(_prevHips.trans().y, hipsY, transVertAlpha);
hips.rot() = safeLerp(_prevHips.rot(), hips.rot(), rotAlpha);
_prevHips = hips;
_prevHipsValid = true;
AnimPose sensorHips = computeHipsInSensorFrame(myAvatar, isFlying);
if (!_prevIsEstimatingHips) {
_smoothHipsHelper.teleport(sensorHips);
}
sensorHips = _smoothHipsHelper.update(sensorHips, deltaTime);
glm::mat4 invRigMat = glm::inverse(myAvatar->getTransform().getMatrix() * Matrices::Y_180);
AnimPose sensorToRigPose(invRigMat * myAvatar->getSensorToWorldMatrix());
params.primaryControllerPoses[Rig::PrimaryControllerType_Hips] = sensorToRigPose * hips;
params.primaryControllerPoses[Rig::PrimaryControllerType_Hips] = sensorToRigPose * sensorHips;
params.primaryControllerFlags[Rig::PrimaryControllerType_Hips] = (uint8_t)Rig::ControllerFlags::Enabled | (uint8_t)Rig::ControllerFlags::Estimated;
// set spine2 if we have hand controllers
if (myAvatar->getControllerPoseInAvatarFrame(controller::Action::RIGHT_HAND).isValid() &&
myAvatar->getControllerPoseInAvatarFrame(controller::Action::LEFT_HAND).isValid() &&
!(params.primaryControllerFlags[Rig::PrimaryControllerType_Spine2] & (uint8_t)Rig::ControllerFlags::Enabled)) {
myAvatar->getControllerPoseInAvatarFrame(controller::Action::LEFT_HAND).isValid() &&
!(params.primaryControllerFlags[Rig::PrimaryControllerType_Spine2] & (uint8_t)Rig::ControllerFlags::Enabled)) {
AnimPose currentSpine2Pose;
AnimPose currentHeadPose;
@ -250,6 +240,7 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
bool headExists = _rig.getAbsoluteJointPoseInRigFrame(_rig.indexOfJoint("Head"), currentHeadPose);
bool hipsExists = _rig.getAbsoluteJointPoseInRigFrame(_rig.indexOfJoint("Hips"), currentHipsPose);
if (spine2Exists && headExists && hipsExists) {
AnimPose rigSpaceYaw(myAvatar->getSpine2RotationRigSpace());
glm::vec3 u, v, w;
glm::vec3 fwd = rigSpaceYaw.rot() * glm::vec3(0.0f, 0.0f, 1.0f);
@ -267,8 +258,9 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
}
}
_prevIsEstimatingHips = true;
} else {
_prevHipsValid = false;
_prevIsEstimatingHips = false;
}
params.isTalking = head->getTimeWithoutTalking() <= 1.5f;
@ -298,7 +290,7 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
auto velocity = myAvatar->getLocalVelocity() / myAvatar->getSensorToWorldScale();
auto position = myAvatar->getLocalPosition();
auto orientation = myAvatar->getLocalOrientation();
_rig.computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState);
_rig.computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState, myAvatar->getSensorToWorldScale());
// evaluate AnimGraph animation and update jointStates.
Model::updateRig(deltaTime, parentTransform);

View file

@ -10,6 +10,7 @@
#define hifi_MySkeletonModel_h
#include <avatars-renderer/SkeletonModel.h>
#include <AnimUtil.h>
#include "MyAvatar.h"
/// A skeleton loaded from a model.
@ -26,11 +27,12 @@ public:
private:
void updateFingers();
AnimPose _prevHips; // sensor frame
bool _prevHipsValid { false };
CriticallyDampedSpringPoseHelper _smoothHipsHelper; // sensor frame
bool _prevIsFlying { false };
float _flyIdleTimer { 0.0f };
float _prevIsEstimatingHips { false };
std::map<int, int> _jointRotationFrameOffsetMap;
};

View file

@ -315,7 +315,7 @@ QString QmlCommerce::getInstalledApps(const QString& justInstalledAppID) {
return installedAppsFromMarketplace;
}
bool QmlCommerce::installApp(const QString& itemHref) {
bool QmlCommerce::installApp(const QString& itemHref, const bool& alsoOpenImmediately) {
if (!QDir(_appsPath).exists()) {
if (!QDir().mkdir(_appsPath)) {
qCDebug(commerce) << "Couldn't make _appsPath directory.";
@ -325,7 +325,8 @@ bool QmlCommerce::installApp(const QString& itemHref) {
QUrl appHref(itemHref);
auto request = DependencyManager::get<ResourceManager>()->createResourceRequest(this, appHref);
auto request =
DependencyManager::get<ResourceManager>()->createResourceRequest(this, appHref, true, -1, "QmlCommerce::installApp");
if (!request) {
qCDebug(commerce) << "Couldn't create resource request for app.";
@ -357,13 +358,22 @@ bool QmlCommerce::installApp(const QString& itemHref) {
QJsonObject appFileJsonObject = appFileJsonDocument.object();
QString scriptUrl = appFileJsonObject["scriptURL"].toString();
if ((DependencyManager::get<ScriptEngines>()->loadScript(scriptUrl.trimmed())).isNull()) {
qCDebug(commerce) << "Couldn't load script.";
return false;
// Don't try to re-load (install) a script if it's already running
QStringList runningScripts = DependencyManager::get<ScriptEngines>()->getRunningScripts();
if (!runningScripts.contains(scriptUrl)) {
if ((DependencyManager::get<ScriptEngines>()->loadScript(scriptUrl.trimmed())).isNull()) {
qCDebug(commerce) << "Couldn't load script.";
return false;
}
QFileInfo appFileInfo(appFile);
emit appInstalled(appFileInfo.baseName());
}
if (alsoOpenImmediately) {
QmlCommerce::openApp(itemHref);
}
QFileInfo appFileInfo(appFile);
emit appInstalled(appFileInfo.baseName());
return true;
});
request->send();

View file

@ -88,7 +88,7 @@ protected:
Q_INVOKABLE void replaceContentSet(const QString& itemHref, const QString& certificateID);
Q_INVOKABLE QString getInstalledApps(const QString& justInstalledAppID = "");
Q_INVOKABLE bool installApp(const QString& appHref);
Q_INVOKABLE bool installApp(const QString& appHref, const bool& alsoOpenImmediately = false);
Q_INVOKABLE bool uninstallApp(const QString& appHref);
Q_INVOKABLE bool openApp(const QString& appHref);

View file

@ -46,11 +46,17 @@ bool ClipboardScriptingInterface::exportEntities(const QString& filename, float
return retVal;
}
bool ClipboardScriptingInterface::importEntities(const QString& filename) {
bool ClipboardScriptingInterface::importEntities(
const QString& filename,
const bool isObservable,
const qint64 callerId
) {
bool retVal;
BLOCKING_INVOKE_METHOD(qApp, "importEntities",
Q_RETURN_ARG(bool, retVal),
Q_ARG(const QString&, filename));
Q_ARG(const QString&, filename),
Q_ARG(const bool, isObservable),
Q_ARG(const qint64, callerId));
return retVal;
}

View file

@ -50,9 +50,11 @@ public:
* You can generate a JSON file using {@link Clipboard.exportEntities}.
* @function Clipboard.importEntities
* @param {string} filename Path and name of file to import.
* @param {boolean} does the ResourceRequestObserver observe this request?
* @param {number} optional internal id of object causing this import.
* @returns {boolean} <code>true</code> if the import was successful, otherwise <code>false</code>.
*/
Q_INVOKABLE bool importEntities(const QString& filename);
Q_INVOKABLE bool importEntities(const QString& filename, const bool isObservable = true, const qint64 callerId = -1);
/**jsdoc
* Export the entities specified to a JSON file.

View file

@ -259,6 +259,39 @@ void setupPreferences() {
auto preference = new CheckPreference(VR_MOVEMENT, "Show room boundaries while teleporting", getter, setter);
preferences->addPreference(preference);
}
{
auto getter = [myAvatar]()->int {
switch (myAvatar->getUserRecenterModel()) {
case MyAvatar::SitStandModelType::Auto:
default:
return 0;
case MyAvatar::SitStandModelType::ForceSit:
return 1;
case MyAvatar::SitStandModelType::DisableHMDLean:
return 2;
}
};
auto setter = [myAvatar](int value) {
switch (value) {
case 0:
default:
myAvatar->setUserRecenterModel(MyAvatar::SitStandModelType::Auto);
break;
case 1:
myAvatar->setUserRecenterModel(MyAvatar::SitStandModelType::ForceSit);
break;
case 2:
myAvatar->setUserRecenterModel(MyAvatar::SitStandModelType::DisableHMDLean);
break;
}
};
auto preference = new RadioButtonsPreference(VR_MOVEMENT, "Auto / Force Sit / Disable Recenter", getter, setter);
QStringList items;
items << "Auto - turns on avatar leaning when standing in real world" << "Seated - disables all avatar leaning while sitting in real world" << "Disabled - allows avatar sitting on the floor [Experimental]";
preference->setHeading("Avatar leaning behavior");
preference->setItems(items);
preferences->addPreference(preference);
}
{
auto getter = [=]()->float { return myAvatar->getUserHeight(); };
auto setter = [=](float value) { myAvatar->setUserHeight(value); };

View file

@ -59,6 +59,7 @@
#include "raypick/PointerScriptingInterface.h"
#include <display-plugins/CompositorHelper.h>
#include "AboutUtil.h"
#include "ResourceRequestObserver.h"
static int MAX_WINDOW_SIZE = 4096;
static const float METERS_TO_INCHES = 39.3701f;
@ -269,6 +270,7 @@ void Web3DOverlay::setupQmlSurface(bool isTablet) {
_webSurface->getSurfaceContext()->setContextProperty("Window", DependencyManager::get<WindowScriptingInterface>().data());
_webSurface->getSurfaceContext()->setContextProperty("Reticle", qApp->getApplicationCompositor().getReticleInterface());
_webSurface->getSurfaceContext()->setContextProperty("HiFiAbout", AboutUtil::getInstance());
_webSurface->getSurfaceContext()->setContextProperty("ResourceRequestObserver", DependencyManager::get<ResourceRequestObserver>().data());
// Override min fps for tablet UI, for silky smooth scrolling
setMaxFPS(90);

View file

@ -51,6 +51,8 @@ public:
bool getMirrorFlag() const { return _mirrorFlag; }
void setMirrorFlag(bool mirrorFlag) { _mirrorFlag = mirrorFlag; }
float getFrame() const { return _frame; }
void loadURL(const QString& url);
protected:

View file

@ -24,7 +24,7 @@
#include "AnimUtil.h"
static const int MAX_TARGET_MARKERS = 30;
static const float JOINT_CHAIN_INTERP_TIME = 0.25f;
static const float JOINT_CHAIN_INTERP_TIME = 0.5f;
static void lookupJointInfo(const AnimInverseKinematics::JointChainInfo& jointChainInfo,
int indexA, int indexB,
@ -253,11 +253,25 @@ void AnimInverseKinematics::solve(const AnimContext& context, const std::vector<
if (numLoops == MAX_IK_LOOPS) {
for (size_t i = 0; i < _prevJointChainInfoVec.size(); i++) {
if (_prevJointChainInfoVec[i].timer > 0.0f) {
float alpha = (JOINT_CHAIN_INTERP_TIME - _prevJointChainInfoVec[i].timer) / JOINT_CHAIN_INTERP_TIME;
// ease in expo
alpha = 1.0f - powf(2.0f, -10.0f * alpha);
size_t chainSize = std::min(_prevJointChainInfoVec[i].jointInfoVec.size(), jointChainInfoVec[i].jointInfoVec.size());
for (size_t j = 0; j < chainSize; j++) {
jointChainInfoVec[i].jointInfoVec[j].rot = safeMix(_prevJointChainInfoVec[i].jointInfoVec[j].rot, jointChainInfoVec[i].jointInfoVec[j].rot, alpha);
jointChainInfoVec[i].jointInfoVec[j].trans = lerp(_prevJointChainInfoVec[i].jointInfoVec[j].trans, jointChainInfoVec[i].jointInfoVec[j].trans, alpha);
if (jointChainInfoVec[i].target.getType() != IKTarget::Type::Unknown) {
// if we are interping into an enabled target type, i.e. not off, lerp the rot and the trans.
for (size_t j = 0; j < chainSize; j++) {
jointChainInfoVec[i].jointInfoVec[j].rot = safeMix(_prevJointChainInfoVec[i].jointInfoVec[j].rot, jointChainInfoVec[i].jointInfoVec[j].rot, alpha);
jointChainInfoVec[i].jointInfoVec[j].trans = lerp(_prevJointChainInfoVec[i].jointInfoVec[j].trans, jointChainInfoVec[i].jointInfoVec[j].trans, alpha);
}
} else {
// if we are interping into a disabled target type, keep the rot & trans the same, but lerp the weight down to zero.
jointChainInfoVec[i].target.setType((int)_prevJointChainInfoVec[i].target.getType());
jointChainInfoVec[i].target.setWeight(_prevJointChainInfoVec[i].target.getWeight() * (1.0f - alpha));
jointChainInfoVec[i].jointInfoVec = _prevJointChainInfoVec[i].jointInfoVec;
}
}
}

View file

@ -201,14 +201,17 @@ const AnimPoseVec& AnimTwoBoneIK::evaluate(const AnimVariantMap& animVars, const
if (_interpType != InterpType::None) {
_interpAlpha += _interpAlphaVel * dt;
// ease in expo
float easeInAlpha = 1.0f - powf(2.0f, -10.0f * _interpAlpha);
if (_interpAlpha < 1.0f) {
AnimChain interpChain;
if (_interpType == InterpType::SnapshotToUnderPoses) {
interpChain = underChain;
interpChain.blend(_snapshotChain, _interpAlpha);
interpChain.blend(_snapshotChain, easeInAlpha);
} else if (_interpType == InterpType::SnapshotToSolve) {
interpChain = ikChain;
interpChain.blend(_snapshotChain, _interpAlpha);
interpChain.blend(_snapshotChain, easeInAlpha);
}
// copy interpChain into _poses
interpChain.outputRelativePoses(_poses);

View file

@ -38,4 +38,94 @@ AnimPose boneLookAt(const glm::vec3& target, const AnimPose& bone);
// and returns a bodyRot that is also z-forward and y-up
glm::quat computeBodyFacingFromHead(const glm::quat& headRot, const glm::vec3& up);
// Uses a approximation of a critically damped spring to smooth full AnimPoses.
// It provides seperate timescales for horizontal, vertical and rotation components.
// The timescale is roughly how much time it will take the spring will reach halfway toward it's target.
class CriticallyDampedSpringPoseHelper {
public:
CriticallyDampedSpringPoseHelper() : _prevPoseValid(false) {}
void setHorizontalTranslationTimescale(float timescale) {
_horizontalTranslationTimescale = timescale;
}
void setVerticalTranslationTimescale(float timescale) {
_verticalTranslationTimescale = timescale;
}
void setRotationTimescale(float timescale) {
_rotationTimescale = timescale;
}
AnimPose update(const AnimPose& pose, float deltaTime) {
if (!_prevPoseValid) {
_prevPose = pose;
_prevPoseValid = true;
}
const float horizontalTranslationAlpha = glm::min(deltaTime / _horizontalTranslationTimescale, 1.0f);
const float verticalTranslationAlpha = glm::min(deltaTime / _verticalTranslationTimescale, 1.0f);
const float rotationAlpha = glm::min(deltaTime / _rotationTimescale, 1.0f);
const float poseY = pose.trans().y;
AnimPose newPose = _prevPose;
newPose.trans() = lerp(_prevPose.trans(), pose.trans(), horizontalTranslationAlpha);
newPose.trans().y = lerp(_prevPose.trans().y, poseY, verticalTranslationAlpha);
newPose.rot() = safeLerp(_prevPose.rot(), pose.rot(), rotationAlpha);
_prevPose = newPose;
_prevPoseValid = true;
return newPose;
}
void teleport(const AnimPose& pose) {
_prevPoseValid = true;
_prevPose = pose;
}
protected:
AnimPose _prevPose;
float _horizontalTranslationTimescale { 0.15f };
float _verticalTranslationTimescale { 0.15f };
float _rotationTimescale { 0.15f };
bool _prevPoseValid;
};
class SnapshotBlendPoseHelper {
public:
SnapshotBlendPoseHelper() : _snapshotValid(false) {}
void setBlendDuration(float duration) {
_duration = duration;
}
void setSnapshot(const AnimPose& pose) {
_snapshotValid = true;
_snapshotPose = pose;
_timer = _duration;
}
AnimPose update(const AnimPose& targetPose, float deltaTime) {
_timer -= deltaTime;
if (_timer > 0.0f) {
float alpha = (_duration - _timer) / _duration;
// ease in expo
alpha = 1.0f - powf(2.0f, -10.0f * alpha);
AnimPose newPose = targetPose;
newPose.blend(_snapshotPose, alpha);
return newPose;
} else {
return targetPose;
}
}
protected:
AnimPose _snapshotPose;
float _duration { 1.0f };
float _timer { 0.0f };
bool _snapshotValid { false };
};
#endif

View file

@ -30,7 +30,9 @@
#include "AnimOverlay.h"
#include "AnimSkeleton.h"
#include "AnimUtil.h"
#include "AvatarConstants.h"
#include "IKTarget.h"
#include "PathUtils.h"
static int nextRigId = 1;
@ -133,6 +135,30 @@ void Rig::overrideAnimation(const QString& url, float fps, bool loop, float firs
_animVars.set("userAnimB", clipNodeEnum == UserAnimState::B);
}
void Rig::triggerNetworkAnimation(const QString& animName) {
_networkVars.set("idleAnim", false);
_networkVars.set("preTransitAnim", false);
_networkVars.set("transitAnim", false);
_networkVars.set("postTransitAnim", false);
_sendNetworkNode = true;
if (animName == "idleAnim") {
_networkVars.set("idleAnim", true);
_networkAnimState.clipNodeEnum = NetworkAnimState::Idle;
_sendNetworkNode = false;
} else if (animName == "preTransitAnim") {
_networkVars.set("preTransitAnim", true);
_networkAnimState.clipNodeEnum = NetworkAnimState::PreTransit;
} else if (animName == "transitAnim") {
_networkVars.set("transitAnim", true);
_networkAnimState.clipNodeEnum = NetworkAnimState::Transit;
} else if (animName == "postTransitAnim") {
_networkVars.set("postTransitAnim", true);
_networkAnimState.clipNodeEnum = NetworkAnimState::PostTransit;
}
}
void Rig::restoreAnimation() {
if (_userAnimState.clipNodeEnum != UserAnimState::None) {
_userAnimState.clipNodeEnum = UserAnimState::None;
@ -144,6 +170,16 @@ void Rig::restoreAnimation() {
}
}
void Rig::restoreNetworkAnimation() {
if (_networkAnimState.clipNodeEnum != NetworkAnimState::Idle) {
_networkAnimState.clipNodeEnum = NetworkAnimState::Idle;
_networkVars.set("idleAnim", true);
_networkVars.set("preTransitAnim", false);
_networkVars.set("transitAnim", false);
_networkVars.set("postTransitAnim", false);
}
}
QStringList Rig::getAnimationRoles() const {
if (_animNode) {
QStringList list;
@ -208,11 +244,17 @@ void Rig::restoreRoleAnimation(const QString& role) {
void Rig::destroyAnimGraph() {
_animSkeleton.reset();
_animLoader.reset();
_networkLoader.reset();
_animNode.reset();
_internalPoseSet._relativePoses.clear();
_internalPoseSet._absolutePoses.clear();
_internalPoseSet._overridePoses.clear();
_internalPoseSet._overrideFlags.clear();
_networkNode.reset();
_networkPoseSet._relativePoses.clear();
_networkPoseSet._absolutePoses.clear();
_networkPoseSet._overridePoses.clear();
_networkPoseSet._overrideFlags.clear();
_numOverrides = 0;
_leftEyeJointChildren.clear();
_rightEyeJointChildren.clear();
@ -229,14 +271,24 @@ void Rig::initJointStates(const FBXGeometry& geometry, const glm::mat4& modelOff
_internalPoseSet._relativePoses.clear();
_internalPoseSet._relativePoses = _animSkeleton->getRelativeDefaultPoses();
_networkPoseSet._relativePoses.clear();
_networkPoseSet._relativePoses = _animSkeleton->getRelativeDefaultPoses();
buildAbsoluteRigPoses(_internalPoseSet._relativePoses, _internalPoseSet._absolutePoses);
buildAbsoluteRigPoses(_networkPoseSet._relativePoses, _networkPoseSet._absolutePoses);
_internalPoseSet._overridePoses.clear();
_internalPoseSet._overridePoses = _animSkeleton->getRelativeDefaultPoses();
_internalPoseSet._overrideFlags.clear();
_internalPoseSet._overrideFlags.resize(_animSkeleton->getNumJoints(), false);
_networkPoseSet._overridePoses.clear();
_networkPoseSet._overridePoses = _animSkeleton->getRelativeDefaultPoses();
_networkPoseSet._overrideFlags.clear();
_networkPoseSet._overrideFlags.resize(_animSkeleton->getNumJoints(), false);
_numOverrides = 0;
buildAbsoluteRigPoses(_animSkeleton->getRelativeDefaultPoses(), _absoluteDefaultPoses);
@ -270,6 +322,18 @@ void Rig::reset(const FBXGeometry& geometry) {
_internalPoseSet._overrideFlags.clear();
_internalPoseSet._overrideFlags.resize(_animSkeleton->getNumJoints(), false);
_networkPoseSet._relativePoses.clear();
_networkPoseSet._relativePoses = _animSkeleton->getRelativeDefaultPoses();
buildAbsoluteRigPoses(_networkPoseSet._relativePoses, _networkPoseSet._absolutePoses);
_networkPoseSet._overridePoses.clear();
_networkPoseSet._overridePoses = _animSkeleton->getRelativeDefaultPoses();
_networkPoseSet._overrideFlags.clear();
_networkPoseSet._overrideFlags.resize(_animSkeleton->getNumJoints(), false);
_numOverrides = 0;
buildAbsoluteRigPoses(_animSkeleton->getRelativeDefaultPoses(), _absoluteDefaultPoses);
@ -629,7 +693,8 @@ bool Rig::getRelativeDefaultJointTranslation(int index, glm::vec3& translationOu
}
}
void Rig::computeMotionAnimationState(float deltaTime, const glm::vec3& worldPosition, const glm::vec3& worldVelocity, const glm::quat& worldRotation, CharacterControllerState ccState) {
void Rig::computeMotionAnimationState(float deltaTime, const glm::vec3& worldPosition, const glm::vec3& worldVelocity,
const glm::quat& worldRotation, CharacterControllerState ccState, float sensorToWorldScale) {
glm::vec3 forward = worldRotation * IDENTITY_FORWARD;
glm::vec3 workingVelocity = worldVelocity;
@ -924,9 +989,15 @@ void Rig::computeMotionAnimationState(float deltaTime, const glm::vec3& worldPos
}
_animVars.set("isNotInAir", false);
// compute blend based on velocity
const float JUMP_SPEED = 3.5f;
float alpha = glm::clamp(-workingVelocity.y / JUMP_SPEED, -1.0f, 1.0f) + 1.0f;
// We want to preserve the apparent jump height in sensor space.
const float jumpHeight = std::max(sensorToWorldScale * DEFAULT_AVATAR_JUMP_HEIGHT, DEFAULT_AVATAR_MIN_JUMP_HEIGHT);
// convert jump height to a initial jump speed with the given gravity.
const float jumpSpeed = sqrtf(2.0f * -DEFAULT_AVATAR_GRAVITY * jumpHeight);
// compute inAirAlpha blend based on velocity
float alpha = glm::clamp((-workingVelocity.y * sensorToWorldScale) / jumpSpeed, -1.0f, 1.0f) + 1.0f;
_animVars.set("inAirAlpha", alpha);
}
@ -1049,26 +1120,56 @@ void Rig::updateAnimations(float deltaTime, const glm::mat4& rootTransform, cons
updateAnimationStateHandlers();
_animVars.setRigToGeometryTransform(_rigToGeometryTransform);
if (_networkNode) {
_networkVars.setRigToGeometryTransform(_rigToGeometryTransform);
}
AnimContext context(_enableDebugDrawIKTargets, _enableDebugDrawIKConstraints, _enableDebugDrawIKChains,
getGeometryToRigTransform(), rigToWorldTransform);
// evaluate the animation
AnimVariantMap triggersOut;
AnimVariantMap networkTriggersOut;
_internalPoseSet._relativePoses = _animNode->evaluate(_animVars, context, deltaTime, triggersOut);
if (_networkNode) {
_networkPoseSet._relativePoses = _networkNode->evaluate(_networkVars, context, deltaTime, networkTriggersOut);
const float NETWORK_ANIMATION_BLEND_FRAMES = 6.0f;
float alpha = 1.0f;
std::shared_ptr<AnimClip> clip;
if (_networkAnimState.clipNodeEnum == NetworkAnimState::PreTransit) {
clip = std::dynamic_pointer_cast<AnimClip>(_networkNode->findByName("preTransitAnim"));
if (clip) {
alpha = (clip->getFrame() - clip->getStartFrame()) / NETWORK_ANIMATION_BLEND_FRAMES;
}
} else if (_networkAnimState.clipNodeEnum == NetworkAnimState::PostTransit) {
clip = std::dynamic_pointer_cast<AnimClip>(_networkNode->findByName("postTransitAnim"));
if (clip) {
alpha = (clip->getEndFrame() - clip->getFrame()) / NETWORK_ANIMATION_BLEND_FRAMES;
}
}
if (_sendNetworkNode) {
for (size_t i = 0; i < _networkPoseSet._relativePoses.size(); i++) {
_networkPoseSet._relativePoses[i].blend(_internalPoseSet._relativePoses[i], (alpha > 1.0f ? 1.0f : alpha));
}
}
}
if ((int)_internalPoseSet._relativePoses.size() != _animSkeleton->getNumJoints()) {
// animations haven't fully loaded yet.
_internalPoseSet._relativePoses = _animSkeleton->getRelativeDefaultPoses();
}
if ((int)_networkPoseSet._relativePoses.size() != _animSkeleton->getNumJoints()) {
// animations haven't fully loaded yet.
_networkPoseSet._relativePoses = _animSkeleton->getRelativeDefaultPoses();
}
_lastAnimVars = _animVars;
_animVars.clearTriggers();
_animVars = triggersOut;
_networkVars.clearTriggers();
_networkVars = networkTriggersOut;
_lastContext = context;
}
applyOverridePoses();
buildAbsoluteRigPoses(_internalPoseSet._relativePoses, _internalPoseSet._absolutePoses);
buildAbsoluteRigPoses(_networkPoseSet._relativePoses, _networkPoseSet._absolutePoses);
// copy internal poses to external poses
{
QWriteLocker writeLock(&_externalPoseSetLock);
@ -1574,6 +1675,7 @@ glm::vec3 Rig::calculateKneePoleVector(int footJointIndex, int kneeIndex, int up
void Rig::updateFromControllerParameters(const ControllerParameters& params, float dt) {
if (!_animSkeleton || !_animNode) {
_previousControllerParameters = params;
return;
}
@ -1584,7 +1686,9 @@ void Rig::updateFromControllerParameters(const ControllerParameters& params, flo
bool leftHandEnabled = params.primaryControllerFlags[PrimaryControllerType_LeftHand] & (uint8_t)ControllerFlags::Enabled;
bool rightHandEnabled = params.primaryControllerFlags[PrimaryControllerType_RightHand] & (uint8_t)ControllerFlags::Enabled;
bool hipsEnabled = params.primaryControllerFlags[PrimaryControllerType_Hips] & (uint8_t)ControllerFlags::Enabled;
bool prevHipsEnabled = _previousControllerParameters.primaryControllerFlags[PrimaryControllerType_Hips] & (uint8_t)ControllerFlags::Enabled;
bool hipsEstimated = params.primaryControllerFlags[PrimaryControllerType_Hips] & (uint8_t)ControllerFlags::Estimated;
bool prevHipsEstimated = _previousControllerParameters.primaryControllerFlags[PrimaryControllerType_Hips] & (uint8_t)ControllerFlags::Estimated;
bool leftFootEnabled = params.primaryControllerFlags[PrimaryControllerType_LeftFoot] & (uint8_t)ControllerFlags::Enabled;
bool rightFootEnabled = params.primaryControllerFlags[PrimaryControllerType_RightFoot] & (uint8_t)ControllerFlags::Enabled;
bool spine2Enabled = params.primaryControllerFlags[PrimaryControllerType_Spine2] & (uint8_t)ControllerFlags::Enabled;
@ -1623,9 +1727,26 @@ void Rig::updateFromControllerParameters(const ControllerParameters& params, flo
}
if (hipsEnabled) {
// Apply a bit of smoothing when the hips toggle between estimated and non-estimated poses.
// This should help smooth out problems with the vive tracker when the sensor is occluded.
if (prevHipsEnabled && hipsEstimated != prevHipsEstimated) {
// blend from a snapshot of the previous hips.
const float HIPS_BLEND_DURATION = 0.5f;
_hipsBlendHelper.setBlendDuration(HIPS_BLEND_DURATION);
_hipsBlendHelper.setSnapshot(_previousControllerParameters.primaryControllerPoses[PrimaryControllerType_Hips]);
} else if (!prevHipsEnabled) {
// we have no sensible value to blend from.
const float HIPS_BLEND_DURATION = 0.0f;
_hipsBlendHelper.setBlendDuration(HIPS_BLEND_DURATION);
_hipsBlendHelper.setSnapshot(params.primaryControllerPoses[PrimaryControllerType_Hips]);
}
AnimPose hips = _hipsBlendHelper.update(params.primaryControllerPoses[PrimaryControllerType_Hips], dt);
_animVars.set("hipsType", (int)IKTarget::Type::RotationAndPosition);
_animVars.set("hipsPosition", params.primaryControllerPoses[PrimaryControllerType_Hips].trans());
_animVars.set("hipsRotation", params.primaryControllerPoses[PrimaryControllerType_Hips].rot());
_animVars.set("hipsPosition", hips.trans());
_animVars.set("hipsRotation", hips.rot());
} else {
_animVars.set("hipsType", (int)IKTarget::Type::Unknown);
}
@ -1665,6 +1786,8 @@ void Rig::updateFromControllerParameters(const ControllerParameters& params, flo
}
}
}
_previousControllerParameters = params;
}
void Rig::initAnimGraph(const QUrl& url) {
@ -1672,11 +1795,14 @@ void Rig::initAnimGraph(const QUrl& url) {
_animGraphURL = url;
_animNode.reset();
_networkNode.reset();
// load the anim graph
_animLoader.reset(new AnimNodeLoader(url));
auto networkUrl = PathUtils::resourcesUrl("avatar/network-animation.json");
_networkLoader.reset(new AnimNodeLoader(networkUrl));
std::weak_ptr<AnimSkeleton> weakSkeletonPtr = _animSkeleton;
connect(_animLoader.get(), &AnimNodeLoader::success, [this, weakSkeletonPtr](AnimNode::Pointer nodeIn) {
connect(_animLoader.get(), &AnimNodeLoader::success, [this, weakSkeletonPtr, url](AnimNode::Pointer nodeIn) {
_animNode = nodeIn;
// abort load if the previous skeleton was deleted.
@ -1703,7 +1829,33 @@ void Rig::initAnimGraph(const QUrl& url) {
emit onLoadComplete();
});
connect(_animLoader.get(), &AnimNodeLoader::error, [url](int error, QString str) {
qCCritical(animation) << "Error loading" << url.toDisplayString() << "code = " << error << "str =" << str;
qCritical(animation) << "Error loading" << url.toDisplayString() << "code = " << error << "str =" << str;
});
connect(_networkLoader.get(), &AnimNodeLoader::success, [this, weakSkeletonPtr, networkUrl](AnimNode::Pointer nodeIn) {
_networkNode = nodeIn;
// abort load if the previous skeleton was deleted.
auto sharedSkeletonPtr = weakSkeletonPtr.lock();
if (!sharedSkeletonPtr) {
return;
}
_networkNode->setSkeleton(sharedSkeletonPtr);
if (_networkAnimState.clipNodeEnum != NetworkAnimState::Idle) {
// restore the user animation we had before reset.
NetworkAnimState origState = _networkAnimState;
_networkAnimState = { NetworkAnimState::Idle, "", 30.0f, false, 0.0f, 0.0f };
if (_networkAnimState.clipNodeEnum == NetworkAnimState::PreTransit) {
triggerNetworkAnimation("preTransitAnim");
} else if (_networkAnimState.clipNodeEnum == NetworkAnimState::Transit) {
triggerNetworkAnimation("transitAnim");
} else if (_networkAnimState.clipNodeEnum == NetworkAnimState::PostTransit) {
triggerNetworkAnimation("postTransitAnim");
}
}
});
connect(_networkLoader.get(), &AnimNodeLoader::error, [networkUrl](int error, QString str) {
qCritical(animation) << "Error loading" << networkUrl.toDisplayString() << "code = " << error << "str =" << str;
});
}
}
@ -1782,13 +1934,13 @@ void Rig::copyJointsIntoJointData(QVector<JointData>& jointDataVec) const {
if (isIndexValid(i)) {
// rotations are in absolute rig frame.
glm::quat defaultAbsRot = geometryToRigPose.rot() * _animSkeleton->getAbsoluteDefaultPose(i).rot();
data.rotation = _internalPoseSet._absolutePoses[i].rot();
data.rotation = !_sendNetworkNode ? _internalPoseSet._absolutePoses[i].rot() : _networkPoseSet._absolutePoses[i].rot();
data.rotationIsDefaultPose = isEqual(data.rotation, defaultAbsRot);
// translations are in relative frame but scaled so that they are in meters,
// instead of geometry units.
glm::vec3 defaultRelTrans = _geometryOffset.scale() * _animSkeleton->getRelativeDefaultPose(i).trans();
data.translation = _geometryOffset.scale() * _internalPoseSet._relativePoses[i].trans();
data.translation = _geometryOffset.scale() * (!_sendNetworkNode ? _internalPoseSet._relativePoses[i].trans() : _networkPoseSet._relativePoses[i].trans());
data.translationIsDefaultPose = isEqual(data.translation, defaultRelTrans);
} else {
data.translationIsDefaultPose = true;

View file

@ -24,6 +24,7 @@
#include "AnimNode.h"
#include "AnimNodeLoader.h"
#include "SimpleMovingAverage.h"
#include "AnimUtil.h"
class Rig;
class AnimInverseKinematics;
@ -113,7 +114,10 @@ public:
void destroyAnimGraph();
void overrideAnimation(const QString& url, float fps, bool loop, float firstFrame, float lastFrame);
void triggerNetworkAnimation(const QString& animName);
void restoreAnimation();
void restoreNetworkAnimation();
QStringList getAnimationRoles() const;
void overrideRoleAnimation(const QString& role, const QString& url, float fps, bool loop, float firstFrame, float lastFrame);
void restoreRoleAnimation(const QString& role);
@ -172,7 +176,8 @@ public:
AnimPose getJointPose(int jointIndex) const;
// Start or stop animations as needed.
void computeMotionAnimationState(float deltaTime, const glm::vec3& worldPosition, const glm::vec3& worldVelocity, const glm::quat& worldRotation, CharacterControllerState ccState);
void computeMotionAnimationState(float deltaTime, const glm::vec3& worldPosition, const glm::vec3& worldVelocity,
const glm::quat& worldRotation, CharacterControllerState ccState, float sensorToWorldScale);
// Regardless of who started the animations or how many, update the joints.
void updateAnimations(float deltaTime, const glm::mat4& rootTransform, const glm::mat4& rigToWorldTransform);
@ -269,6 +274,7 @@ protected:
// Only accessed by the main thread
PoseSet _internalPoseSet;
PoseSet _networkPoseSet;
// Copy of the _poseSet for external threads.
PoseSet _externalPoseSet;
@ -300,9 +306,12 @@ protected:
QUrl _animGraphURL;
std::shared_ptr<AnimNode> _animNode;
std::shared_ptr<AnimNode> _networkNode;
std::shared_ptr<AnimSkeleton> _animSkeleton;
std::unique_ptr<AnimNodeLoader> _animLoader;
std::unique_ptr<AnimNodeLoader> _networkLoader;
AnimVariantMap _animVars;
AnimVariantMap _networkVars;
enum class RigRole {
Idle = 0,
@ -315,6 +324,25 @@ protected:
RigRole _state { RigRole::Idle };
RigRole _desiredState { RigRole::Idle };
float _desiredStateAge { 0.0f };
struct NetworkAnimState {
enum ClipNodeEnum {
Idle = 0,
PreTransit,
Transit,
PostTransit
};
NetworkAnimState() : clipNodeEnum(NetworkAnimState::Idle) {}
NetworkAnimState(ClipNodeEnum clipNodeEnumIn, const QString& urlIn, float fpsIn, bool loopIn, float firstFrameIn, float lastFrameIn) :
clipNodeEnum(clipNodeEnumIn), url(urlIn), fps(fpsIn), loop(loopIn), firstFrame(firstFrameIn), lastFrame(lastFrameIn) {}
ClipNodeEnum clipNodeEnum;
QString url;
float fps;
bool loop;
float firstFrame;
float lastFrame;
};
struct UserAnimState {
enum ClipNodeEnum {
@ -349,6 +377,7 @@ protected:
};
UserAnimState _userAnimState;
NetworkAnimState _networkAnimState;
std::map<QString, RoleAnimState> _roleAnimStates;
float _leftHandOverlayAlpha { 0.0f };
@ -382,9 +411,13 @@ protected:
int _rigId;
bool _headEnabled { false };
bool _sendNetworkNode { false };
AnimContext _lastContext;
AnimVariantMap _lastAnimVars;
SnapshotBlendPoseHelper _hipsBlendHelper;
ControllerParameters _previousControllerParameters;
};
#endif /* defined(__hifi__Rig__) */

View file

@ -1,6 +1,6 @@
set(TARGET_NAME avatars-renderer)
setup_hifi_library(Network Script)
link_hifi_libraries(shared gpu graphics animation model-networking script-engine render render-utils image trackers entities-renderer)
link_hifi_libraries(shared shaders gpu graphics animation model-networking script-engine render render-utils image trackers entities-renderer)
include_hifi_library_headers(avatars)
include_hifi_library_headers(networking)
include_hifi_library_headers(fbx)

View file

@ -114,27 +114,29 @@ void Avatar::setShowNamesAboveHeads(bool show) {
}
AvatarTransit::Status AvatarTransit::update(float deltaTime, const glm::vec3& avatarPosition, const AvatarTransit::TransitConfig& config) {
glm::vec3 currentPosition = _isTransiting ? _currentPosition : avatarPosition;
float oneFrameDistance = glm::length(currentPosition - _lastPosition);
const float MAX_TRANSIT_DISTANCE = 30.0f;
float scaledMaxTransitDistance = MAX_TRANSIT_DISTANCE * _scale;
if (oneFrameDistance > config._triggerDistance && !_isTransiting) {
if (oneFrameDistance < scaledMaxTransitDistance) {
start(deltaTime, _lastPosition, currentPosition, config);
float oneFrameDistance = _isActive ? glm::length(avatarPosition - _endPosition) : glm::length(avatarPosition - _lastPosition);
if (oneFrameDistance > (config._minTriggerDistance * _scale)) {
if (oneFrameDistance < (config._maxTriggerDistance * _scale)) {
start(deltaTime, _lastPosition, avatarPosition, config);
} else {
_lastPosition = currentPosition;
return Status::ABORT_TRANSIT;
_lastPosition = avatarPosition;
_status = Status::ABORT_TRANSIT;
}
}
_lastPosition = currentPosition;
_lastPosition = avatarPosition;
_status = updatePosition(deltaTime);
if (_isActive && oneFrameDistance > (config._abortDistance * _scale) && _status == Status::POST_TRANSIT) {
reset();
_status = Status::ENDED;
}
return _status;
}
void AvatarTransit::reset() {
_lastPosition = _endPosition;
_currentPosition = _endPosition;
_isTransiting = false;
_isActive = false;
}
void AvatarTransit::start(float deltaTime, const glm::vec3& startPosition, const glm::vec3& endPosition, const AvatarTransit::TransitConfig& config) {
_startPosition = startPosition;
@ -143,12 +145,14 @@ void AvatarTransit::start(float deltaTime, const glm::vec3& startPosition, const
_transitLine = endPosition - startPosition;
_totalDistance = glm::length(_transitLine);
_easeType = config._easeType;
const float REFERENCE_FRAMES_PER_SECOND = 30.0f;
_preTransitTime = AVATAR_PRE_TRANSIT_FRAME_COUNT / AVATAR_TRANSIT_FRAMES_PER_SECOND;
_postTransitTime = AVATAR_POST_TRANSIT_FRAME_COUNT / AVATAR_TRANSIT_FRAMES_PER_SECOND;
int transitFrames = (!config._isDistanceBased) ? config._totalFrames : config._framesPerMeter * _totalDistance;
_totalTime = (float)transitFrames / REFERENCE_FRAMES_PER_SECOND;
_currentTime = 0.0f;
_isTransiting = true;
_transitTime = (float)transitFrames / AVATAR_TRANSIT_FRAMES_PER_SECOND;
_totalTime = _transitTime + _preTransitTime + _postTransitTime;
_currentTime = _isActive ? _preTransitTime : 0.0f;
_isActive = true;
}
float AvatarTransit::getEaseValue(AvatarTransit::EaseType type, float value) {
@ -171,31 +175,37 @@ float AvatarTransit::getEaseValue(AvatarTransit::EaseType type, float value) {
AvatarTransit::Status AvatarTransit::updatePosition(float deltaTime) {
Status status = Status::IDLE;
if (_isTransiting) {
if (_isActive) {
float nextTime = _currentTime + deltaTime;
if (nextTime >= _totalTime) {
_currentPosition = _endPosition;
_isTransiting = false;
status = Status::END_TRANSIT;
} else {
if (nextTime < _preTransitTime) {
_currentPosition = _startPosition;
status = Status::PRE_TRANSIT;
if (_currentTime == 0) {
status = Status::STARTED;
}
} else if (nextTime < _totalTime - _postTransitTime){
status = Status::TRANSITING;
if (_currentTime <= _preTransitTime) {
status = Status::START_TRANSIT;
} else {
status = Status::TRANSITING;
float percentageIntoTransit = (nextTime - _preTransitTime) / _transitTime;
_currentPosition = _startPosition + getEaseValue(_easeType, percentageIntoTransit) * _transitLine;
}
} else {
status = Status::POST_TRANSIT;
_currentPosition = _endPosition;
if (nextTime >= _totalTime) {
_isActive = false;
status = Status::ENDED;
} else if (_currentTime < _totalTime - _postTransitTime) {
status = Status::END_TRANSIT;
}
float percentageIntoTransit = nextTime / _totalTime;
_currentPosition = _startPosition + getEaseValue(_easeType, percentageIntoTransit) * _transitLine;
}
_currentTime = nextTime;
}
return status;
}
bool AvatarTransit::getNextPosition(glm::vec3& nextPosition) {
nextPosition = _currentPosition;
return _isTransiting;
}
Avatar::Avatar(QThread* thread) :
_voiceSphereID(GeometryCache::UNKNOWN_ID)
{
@ -536,16 +546,10 @@ void Avatar::relayJointDataToChildren() {
void Avatar::simulate(float deltaTime, bool inView) {
PROFILE_RANGE(simulation, "simulate");
if (_transit.isTransiting()) {
glm::vec3 nextPosition;
if (_transit.getNextPosition(nextPosition)) {
_globalPosition = nextPosition;
_globalPositionChanged = usecTimestampNow();
if (!hasParent()) {
setLocalPosition(nextPosition);
}
}
_globalPosition = _transit.isActive() ? _transit.getCurrentPosition() : _serverPosition;
if (!hasParent()) {
setLocalPosition(_globalPosition);
}
_simulationRate.increment();
@ -558,7 +562,7 @@ void Avatar::simulate(float deltaTime, bool inView) {
PROFILE_RANGE(simulation, "updateJoints");
if (inView) {
Head* head = getHead();
if (_hasNewJointData || _transit.isTransiting()) {
if (_hasNewJointData || _transit.isActive()) {
_skeletonModel->getRig().copyJointsFromJointData(_jointData);
glm::mat4 rootTransform = glm::scale(_skeletonModel->getScale()) * glm::translate(_skeletonModel->getOffset());
_skeletonModel->getRig().computeExternalPoses(rootTransform);
@ -703,10 +707,10 @@ static TextRenderer3D* textRenderer(TextRendererType type) {
return displayNameRenderer;
}
void Avatar::metaBlendshapeOperator(int blendshapeNumber, const QVector<BlendshapeOffset>& blendshapeOffsets, const QVector<int>& blendedMeshSizes,
const render::ItemIDs& subItemIDs) {
void Avatar::metaBlendshapeOperator(render::ItemID renderItemID, int blendshapeNumber, const QVector<BlendshapeOffset>& blendshapeOffsets,
const QVector<int>& blendedMeshSizes, const render::ItemIDs& subItemIDs) {
render::Transaction transaction;
transaction.updateItem<AvatarData>(_renderItemID, [blendshapeNumber, blendshapeOffsets, blendedMeshSizes,
transaction.updateItem<AvatarData>(renderItemID, [blendshapeNumber, blendshapeOffsets, blendedMeshSizes,
subItemIDs](AvatarData& avatar) {
auto avatarPtr = dynamic_cast<Avatar*>(&avatar);
if (avatarPtr) {
@ -726,7 +730,7 @@ void Avatar::addToScene(AvatarSharedPointer self, const render::ScenePointer& sc
_renderBound = getBounds();
transaction.resetItem(_renderItemID, avatarPayloadPointer);
using namespace std::placeholders;
_skeletonModel->addToScene(scene, transaction, std::bind(&Avatar::metaBlendshapeOperator, this, _1, _2, _3, _4));
_skeletonModel->addToScene(scene, transaction, std::bind(&Avatar::metaBlendshapeOperator, _renderItemID, _1, _2, _3, _4));
_skeletonModel->setTagMask(render::hifi::TAG_ALL_VIEWS);
_skeletonModel->setGroupCulled(true);
_skeletonModel->setCanCastShadow(true);
@ -950,7 +954,7 @@ void Avatar::fixupModelsInScene(const render::ScenePointer& scene) {
if (_skeletonModel->isRenderable() && _skeletonModel->needsFixupInScene()) {
_skeletonModel->removeFromScene(scene, transaction);
using namespace std::placeholders;
_skeletonModel->addToScene(scene, transaction, std::bind(&Avatar::metaBlendshapeOperator, this, _1, _2, _3, _4));
_skeletonModel->addToScene(scene, transaction, std::bind(&Avatar::metaBlendshapeOperator, _renderItemID, _1, _2, _3, _4));
_skeletonModel->setTagMask(render::hifi::TAG_ALL_VIEWS);
_skeletonModel->setGroupCulled(true);
@ -1731,6 +1735,7 @@ void Avatar::getCapsule(glm::vec3& start, glm::vec3& end, float& radius) {
glm::vec3 Avatar::getWorldFeetPosition() {
ShapeInfo shapeInfo;
computeShapeInfo(shapeInfo);
glm::vec3 halfExtents = shapeInfo.getHalfExtents(); // x = radius, y = halfHeight
glm::vec3 localFeet(0.0f, shapeInfo.getOffset().y - halfExtents.y - halfExtents.x, 0.0f);
@ -1994,22 +1999,12 @@ float Avatar::getUnscaledEyeHeightFromSkeleton() const {
}
}
AvatarTransit::Status Avatar::updateTransit(float deltaTime, const glm::vec3& avatarPosition, const AvatarTransit::TransitConfig& config) {
AvatarTransit::Status Avatar::updateTransit(float deltaTime, const glm::vec3& avatarPosition, float avatarScale, const AvatarTransit::TransitConfig& config) {
std::lock_guard<std::mutex> lock(_transitLock);
_transit.setScale(avatarScale);
return _transit.update(deltaTime, avatarPosition, config);
}
void Avatar::setTransitScale(float scale) {
std::lock_guard<std::mutex> lock(_transitLock);
return _transit.setScale(scale);
}
void Avatar::overrideNextPackagePositionData(const glm::vec3& position) {
std::lock_guard<std::mutex> lock(_transitLock);
_overrideGlobalPosition = true;
_globalPositionOverride = position;
}
void Avatar::addMaterial(graphics::MaterialLayer material, const std::string& parentMaterialName) {
std::lock_guard<std::mutex> lock(_materialsLock);
_materials[parentMaterialName].push(material);

View file

@ -56,9 +56,13 @@ class AvatarTransit {
public:
enum Status {
IDLE = 0,
STARTED,
PRE_TRANSIT,
START_TRANSIT,
TRANSITING,
END_TRANSIT,
POST_TRANSIT,
ENDED,
ABORT_TRANSIT
};
@ -72,20 +76,20 @@ public:
struct TransitConfig {
TransitConfig() {};
int _totalFrames { 0 };
int _framesPerMeter { 0 };
float _framesPerMeter { 0.0f };
bool _isDistanceBased { false };
float _triggerDistance { 0 };
float _minTriggerDistance { 0.0f };
float _maxTriggerDistance { 0.0f };
float _abortDistance{ 0.0f };
EaseType _easeType { EaseType::EASE_OUT };
};
AvatarTransit() {};
Status update(float deltaTime, const glm::vec3& avatarPosition, const TransitConfig& config);
Status getStatus() { return _status; }
bool isTransiting() { return _isTransiting; }
bool isActive() { return _isActive; }
glm::vec3 getCurrentPosition() { return _currentPosition; }
bool getNextPosition(glm::vec3& nextPosition);
glm::vec3 getEndPosition() { return _endPosition; }
float getTransitTime() { return _totalTime; }
void setScale(float scale) { _scale = scale; }
void reset();
@ -93,7 +97,7 @@ private:
Status updatePosition(float deltaTime);
void start(float deltaTime, const glm::vec3& startPosition, const glm::vec3& endPosition, const TransitConfig& config);
float getEaseValue(AvatarTransit::EaseType type, float value);
bool _isTransiting { false };
bool _isActive { false };
glm::vec3 _startPosition;
glm::vec3 _endPosition;
@ -103,7 +107,10 @@ private:
glm::vec3 _transitLine;
float _totalDistance { 0.0f };
float _preTransitTime { 0.0f };
float _totalTime { 0.0f };
float _transitTime { 0.0f };
float _postTransitTime { 0.0f };
float _currentTime { 0.0f };
EaseType _easeType { EaseType::EASE_OUT };
Status _status { Status::IDLE };
@ -431,11 +438,7 @@ public:
virtual scriptable::ScriptableModelBase getScriptableModel() override;
std::shared_ptr<AvatarTransit> getTransit() { return std::make_shared<AvatarTransit>(_transit); };
AvatarTransit::Status updateTransit(float deltaTime, const glm::vec3& avatarPosition, const AvatarTransit::TransitConfig& config);
void setTransitScale(float scale);
void overrideNextPackagePositionData(const glm::vec3& position);
AvatarTransit::Status updateTransit(float deltaTime, const glm::vec3& avatarPosition, float avatarScale, const AvatarTransit::TransitConfig& config);
signals:
void targetScaleChanged(float targetScale);
@ -623,8 +626,8 @@ protected:
LoadingStatus _loadingStatus { LoadingStatus::NoModel };
void metaBlendshapeOperator(int blendshapeNumber, const QVector<BlendshapeOffset>& blendshapeOffsets, const QVector<int>& blendedMeshSizes,
const render::ItemIDs& subItemIDs);
static void metaBlendshapeOperator(render::ItemID renderItemID, int blendshapeNumber, const QVector<BlendshapeOffset>& blendshapeOffsets,
const QVector<int>& blendedMeshSizes, const render::ItemIDs& subItemIDs);
};
#endif // hifi_Avatar_h

View file

@ -44,6 +44,7 @@
#include "AvatarLogging.h"
#include "AvatarTraits.h"
#include "ClientTraitsHandler.h"
#include "ResourceRequestObserver.h"
//#define WANT_DEBUG
@ -374,12 +375,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
if (hasAvatarGlobalPosition) {
auto startSection = destinationBuffer;
if (_overrideGlobalPosition) {
AVATAR_MEMCPY(_globalPositionOverride);
} else {
AVATAR_MEMCPY(_globalPosition);
}
AVATAR_MEMCPY(_globalPosition);
int numBytes = destinationBuffer - startSection;
@ -886,20 +882,22 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
offset = glm::vec3(row * SPACE_BETWEEN_AVATARS, 0.0f, col * SPACE_BETWEEN_AVATARS);
}
auto newValue = glm::vec3(data->globalPosition[0], data->globalPosition[1], data->globalPosition[2]) + offset;
if (_globalPosition != newValue) {
_globalPosition = newValue;
_serverPosition = glm::vec3(data->globalPosition[0], data->globalPosition[1], data->globalPosition[2]) + offset;
auto oneStepDistance = glm::length(_globalPosition - _serverPosition);
if (oneStepDistance <= AVATAR_TRANSIT_MIN_TRIGGER_DISTANCE || oneStepDistance >= AVATAR_TRANSIT_MAX_TRIGGER_DISTANCE) {
_globalPosition = _serverPosition;
// if we don't have a parent, make sure to also set our local position
if (!hasParent()) {
setLocalPosition(_serverPosition);
}
}
if (_globalPosition != _serverPosition) {
_globalPositionChanged = now;
}
sourceBuffer += sizeof(AvatarDataPacket::AvatarGlobalPosition);
int numBytesRead = sourceBuffer - startSection;
_globalPositionRate.increment(numBytesRead);
_globalPositionUpdateRate.increment();
// if we don't have a parent, make sure to also set our local position
if (!hasParent()) {
setLocalPosition(newValue);
}
}
if (hasAvatarBoundingBox) {
@ -2114,10 +2112,6 @@ void AvatarData::sendAvatarDataPacket(bool sendAll) {
}
}
if (_overrideGlobalPosition) {
_overrideGlobalPosition = false;
}
doneEncoding(cullSmallData);
static AvatarDataSequenceNumber sequenceNumber = 0;
@ -2161,11 +2155,21 @@ void AvatarData::updateJointMappings() {
}
if (_skeletonModelURL.fileName().toLower().endsWith(".fst")) {
////
// TODO: Should we rely upon HTTPResourceRequest for ResourceRequestObserver instead?
// HTTPResourceRequest::doSend() covers all of the following and
// then some. It doesn't cover the connect() call, so we may
// want to add a HTTPResourceRequest::doSend() method that does
// connects.
QNetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance();
QNetworkRequest networkRequest = QNetworkRequest(_skeletonModelURL);
networkRequest.setAttribute(QNetworkRequest::FollowRedirectsAttribute, true);
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
DependencyManager::get<ResourceRequestObserver>()->update(
_skeletonModelURL, -1, "AvatarData::updateJointMappings");
QNetworkReply* networkReply = networkAccessManager.get(networkRequest);
//
////
connect(networkReply, &QNetworkReply::finished, this, &AvatarData::setJointMappingsFromNetworkReply);
}
}

View file

@ -327,6 +327,17 @@ const float AVATAR_DISTANCE_LEVEL_5 = 200.0f; // meters
// This is the start location in the Sandbox (xyz: 6270, 211, 6000).
const glm::vec3 START_LOCATION(6270, 211, 6000);
// Avatar Transit Constants
const float AVATAR_TRANSIT_MIN_TRIGGER_DISTANCE = 1.0f;
const float AVATAR_TRANSIT_MAX_TRIGGER_DISTANCE = 30.0f;
const int AVATAR_TRANSIT_FRAME_COUNT = 11;
const float AVATAR_TRANSIT_FRAMES_PER_METER = 0.5f;
const float AVATAR_TRANSIT_ABORT_DISTANCE = 0.1f;
const bool AVATAR_TRANSIT_DISTANCE_BASED = true;
const float AVATAR_TRANSIT_FRAMES_PER_SECOND = 30.0f;
const float AVATAR_PRE_TRANSIT_FRAME_COUNT = 10.0f;
const float AVATAR_POST_TRANSIT_FRAME_COUNT = 27.0f;
enum KeyState {
NO_KEY_DOWN = 0,
INSERT_KEY_DOWN,
@ -1378,8 +1389,7 @@ protected:
// where Entities are located. This is currently only used by the mixer to decide how often to send
// updates about one avatar to another.
glm::vec3 _globalPosition { 0, 0, 0 };
glm::vec3 _globalPositionOverride { 0, 0, 0 };
bool _overrideGlobalPosition { false };
glm::vec3 _serverPosition { 0, 0, 0 };
quint64 _globalPositionChanged { 0 };
quint64 _avatarBoundingBoxChanged { 0 };

View file

@ -31,7 +31,27 @@ ClientTraitsHandler::ClientTraitsHandler(AvatarData* owningAvatar) :
nodeList->getPacketReceiver().registerListener(PacketType::SetAvatarTraits, this, "processTraitOverride");
}
void ClientTraitsHandler::markTraitUpdated(AvatarTraits::TraitType updatedTrait) {
Lock lock(_traitLock);
_traitStatuses[updatedTrait] = Updated;
_hasChangedTraits = true;
}
void ClientTraitsHandler::markInstancedTraitUpdated(AvatarTraits::TraitType traitType, QUuid updatedInstanceID) {
Lock lock(_traitLock);
_traitStatuses.instanceInsert(traitType, updatedInstanceID, Updated);
_hasChangedTraits = true;
}
void ClientTraitsHandler::markInstancedTraitDeleted(AvatarTraits::TraitType traitType, QUuid deleteInstanceID) {
Lock lock(_traitLock);
_traitStatuses.instanceInsert(traitType, deleteInstanceID, Deleted);
_hasChangedTraits = true;
}
void ClientTraitsHandler::resetForNewMixer() {
Lock lock(_traitLock);
// re-set the current version to 0
_currentTraitVersion = AvatarTraits::DEFAULT_TRAIT_VERSION;
@ -46,6 +66,8 @@ void ClientTraitsHandler::resetForNewMixer() {
}
void ClientTraitsHandler::sendChangedTraitsToMixer() {
Lock lock(_traitLock);
if (hasChangedTraits() || _shouldPerformInitialSend) {
// we have at least one changed trait to send
@ -113,6 +135,7 @@ void ClientTraitsHandler::sendChangedTraitsToMixer() {
void ClientTraitsHandler::processTraitOverride(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
if (sendingNode->getType() == NodeType::AvatarMixer) {
Lock lock(_traitLock);
while (message->getBytesLeftToRead()) {
AvatarTraits::TraitType traitType;
message->readPrimitive(&traitType);

View file

@ -26,14 +26,11 @@ public:
void sendChangedTraitsToMixer();
bool hasChangedTraits() { return _hasChangedTraits; }
bool hasChangedTraits() const { return _hasChangedTraits; }
void markTraitUpdated(AvatarTraits::TraitType updatedTrait)
{ _traitStatuses[updatedTrait] = Updated; _hasChangedTraits = true; }
void markInstancedTraitUpdated(AvatarTraits::TraitType traitType, QUuid updatedInstanceID)
{ _traitStatuses.instanceInsert(traitType, updatedInstanceID, Updated); _hasChangedTraits = true; }
void markInstancedTraitDeleted(AvatarTraits::TraitType traitType, QUuid deleteInstanceID)
{ _traitStatuses.instanceInsert(traitType, deleteInstanceID, Deleted); _hasChangedTraits = true; }
void markTraitUpdated(AvatarTraits::TraitType updatedTrait);
void markInstancedTraitUpdated(AvatarTraits::TraitType traitType, QUuid updatedInstanceID);
void markInstancedTraitDeleted(AvatarTraits::TraitType traitType, QUuid deleteInstanceID);
void resetForNewMixer();
@ -41,17 +38,21 @@ public slots:
void processTraitOverride(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
private:
using Mutex = std::recursive_mutex;
using Lock = std::lock_guard<Mutex>;
enum ClientTraitStatus {
Unchanged,
Updated,
Deleted
};
AvatarData* _owningAvatar;
AvatarData* const _owningAvatar;
Mutex _traitLock;
AvatarTraits::AssociatedTraitValues<ClientTraitStatus, Unchanged> _traitStatuses;
AvatarTraits::TraitVersion _currentTraitVersion { AvatarTraits::DEFAULT_TRAIT_VERSION };
AvatarTraits::TraitVersion _currentTraitVersion { AvatarTraits::DEFAULT_TRAIT_VERSION };
AvatarTraits::TraitVersion _currentSkeletonVersion { AvatarTraits::NULL_TRAIT_VERSION };
bool _shouldPerformInitialSend { false };

View file

@ -31,6 +31,7 @@
#include "filters/RotateFilter.h"
#include "filters/LowVelocityFilter.h"
#include "filters/ExponentialSmoothingFilter.h"
#include "filters/AccelerationLimiterFilter.h"
using namespace controller;
@ -51,6 +52,7 @@ REGISTER_FILTER_CLASS_INSTANCE(PostTransformFilter, "postTransform")
REGISTER_FILTER_CLASS_INSTANCE(RotateFilter, "rotate")
REGISTER_FILTER_CLASS_INSTANCE(LowVelocityFilter, "lowVelocity")
REGISTER_FILTER_CLASS_INSTANCE(ExponentialSmoothingFilter, "exponentialSmoothing")
REGISTER_FILTER_CLASS_INSTANCE(AccelerationLimiterFilter, "accelerationLimiter")
const QString JSON_FILTER_TYPE = QStringLiteral("type");
const QString JSON_FILTER_PARAMS = QStringLiteral("params");

View file

@ -0,0 +1,192 @@
//
// Created by Anthony Thibault 2018/11/09
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AccelerationLimiterFilter.h"
#include <QtCore/QJsonObject>
#include <QtCore/QJsonArray>
#include "../../UserInputMapper.h"
#include "../../Input.h"
#include <DependencyManager.h>
#include <QDebug>
#include <StreamUtils.h>
static const QString JSON_ROTATION_ACCELERATION_LIMIT = QStringLiteral("rotationAccelerationLimit");
static const QString JSON_TRANSLATION_ACCELERATION_LIMIT = QStringLiteral("translationAccelerationLimit");
static const QString JSON_TRANSLATION_SNAP_THRESHOLD = QStringLiteral("translationSnapThreshold");
static const QString JSON_ROTATION_SNAP_THRESHOLD = QStringLiteral("rotationSnapThreshold");
static glm::vec3 angularVelFromDeltaRot(const glm::quat& deltaQ, float dt) {
// Measure the angular velocity of a delta rotation quaternion by using quaternion logarithm.
// The logarithm of a unit quternion returns the axis of rotation with a length of one half the angle of rotation in the imaginary part.
// The real part will be 0. Then we multiply it by 2 / dt. turning it into the angular velocity, (except for the extra w = 0 part).
glm::quat omegaQ((2.0f / dt) * glm::log(deltaQ));
return glm::vec3(omegaQ.x, omegaQ.y, omegaQ.z);
}
static glm::quat deltaRotFromAngularVel(const glm::vec3& omega, float dt) {
// Convert angular velocity into a delta quaternion by using quaternion exponent.
// The exponent of quaternion will return a delta rotation around the axis of the imaginary part, by twice the angle as determined by the length of that imaginary part.
// It is the inverse of the logarithm step in angularVelFromDeltaRot
glm::quat omegaQ(0.0f, omega.x, omega.y, omega.z);
return glm::exp((dt / 2.0f) * omegaQ);
}
static glm::vec3 filterTranslation(const glm::vec3& x0, const glm::vec3& x1, const glm::vec3& x2, const glm::vec3& x3,
float dt, float accLimit, float snapThreshold) {
// measure the linear velocities of this step and the previoius step
glm::vec3 v1 = (x3 - x1) / (2.0f * dt);
glm::vec3 v0 = (x2 - x0) / (2.0f * dt);
// compute the acceleration
const glm::vec3 a = (v1 - v0) / dt;
// clamp the acceleration if it is over the limit
float aLen = glm::length(a);
// pick limit based on if we are moving faster then our target
float distToTarget = glm::length(x3 - x2);
if (aLen > accLimit && distToTarget > snapThreshold) {
// Solve for a new `v1`, such that `a` does not exceed `aLimit`
// This combines two steps:
// 1) Computing a limited accelration in the direction of `a`, but with a magnitute of `aLimit`:
// `newA = a * (aLimit / aLen)`
// 2) Computing new `v1`
// `v1 = newA * dt + v0`
// We combine the scalars from step 1 and step 2 into a single term to avoid having to do multiple scalar-vec3 multiplies.
v1 = a * ((accLimit * dt) / aLen) + v0;
// apply limited v1 to compute filtered x3
return v1 * dt + x2;
} else {
// did not exceed limit, no filtering necesary
return x3;
}
}
static glm::quat filterRotation(const glm::quat& q0In, const glm::quat& q1In, const glm::quat& q2In, const glm::quat& q3In,
float dt, float accLimit, float snapThreshold) {
// ensure quaternions have the same polarity
glm::quat q0 = q0In;
glm::quat q1 = glm::dot(q0In, q1In) < 0.0f ? -q1In : q1In;
glm::quat q2 = glm::dot(q1In, q2In) < 0.0f ? -q2In : q2In;
glm::quat q3 = glm::dot(q2In, q3In) < 0.0f ? -q3In : q3In;
// measure the angular velocities of this step and the previous step
glm::vec3 w1 = angularVelFromDeltaRot(q3 * glm::inverse(q1), 2.0f * dt);
glm::vec3 w0 = angularVelFromDeltaRot(q2 * glm::inverse(q0), 2.0f * dt);
const glm::vec3 a = (w1 - w0) / dt;
float aLen = glm::length(a);
// clamp the acceleration if it is over the limit
float angleToTarget = glm::angle(q3 * glm::inverse(q2));
if (aLen > accLimit && angleToTarget > snapThreshold) {
// solve for a new w1, such that a does not exceed the accLimit
w1 = a * ((accLimit * dt) / aLen) + w0;
// apply limited w1 to compute filtered q3
return deltaRotFromAngularVel(w1, dt) * q2;
} else {
// did not exceed limit, no filtering necesary
return q3;
}
}
namespace controller {
Pose AccelerationLimiterFilter::apply(Pose value) const {
if (value.isValid()) {
// to perform filtering in sensor space, we need to compute the transformations.
auto userInputMapper = DependencyManager::get<UserInputMapper>();
const InputCalibrationData calibrationData = userInputMapper->getInputCalibrationData();
glm::mat4 sensorToAvatarMat = glm::inverse(calibrationData.avatarMat) * calibrationData.sensorToWorldMat;
glm::mat4 avatarToSensorMat = glm::inverse(calibrationData.sensorToWorldMat) * calibrationData.avatarMat;
// transform pose into sensor space.
Pose sensorValue = value.transform(avatarToSensorMat);
if (_prevValid) {
const float DELTA_TIME = 0.01111111f;
glm::vec3 unfilteredTranslation = sensorValue.translation;
sensorValue.translation = filterTranslation(_prevPos[0], _prevPos[1], _prevPos[2], sensorValue.translation,
DELTA_TIME, _translationAccelerationLimit, _translationSnapThreshold);
glm::quat unfilteredRot = sensorValue.rotation;
sensorValue.rotation = filterRotation(_prevRot[0], _prevRot[1], _prevRot[2], sensorValue.rotation,
DELTA_TIME, _rotationAccelerationLimit, _rotationSnapThreshold);
// remember previous values.
_prevPos[0] = _prevPos[1];
_prevPos[1] = _prevPos[2];
_prevPos[2] = sensorValue.translation;
_prevRot[0] = _prevRot[1];
_prevRot[1] = _prevRot[2];
_prevRot[2] = sensorValue.rotation;
_unfilteredPrevPos[0] = _unfilteredPrevPos[1];
_unfilteredPrevPos[1] = _unfilteredPrevPos[2];
_unfilteredPrevPos[2] = unfilteredTranslation;
_unfilteredPrevRot[0] = _unfilteredPrevRot[1];
_unfilteredPrevRot[1] = _unfilteredPrevRot[2];
_unfilteredPrevRot[2] = unfilteredRot;
// transform back into avatar space
return sensorValue.transform(sensorToAvatarMat);
} else {
// initialize previous values with the current sample.
_prevPos[0] = sensorValue.translation;
_prevPos[1] = sensorValue.translation;
_prevPos[2] = sensorValue.translation;
_prevRot[0] = sensorValue.rotation;
_prevRot[1] = sensorValue.rotation;
_prevRot[2] = sensorValue.rotation;
_unfilteredPrevPos[0] = sensorValue.translation;
_unfilteredPrevPos[1] = sensorValue.translation;
_unfilteredPrevPos[2] = sensorValue.translation;
_unfilteredPrevRot[0] = sensorValue.rotation;
_unfilteredPrevRot[1] = sensorValue.rotation;
_unfilteredPrevRot[2] = sensorValue.rotation;
_prevValid = true;
// no previous value to smooth with, so return value unchanged
return value;
}
} else {
// mark previous poses as invalid.
_prevValid = false;
// return invalid value unchanged
return value;
}
}
bool AccelerationLimiterFilter::parseParameters(const QJsonValue& parameters) {
if (parameters.isObject()) {
auto obj = parameters.toObject();
if (obj.contains(JSON_ROTATION_ACCELERATION_LIMIT) && obj.contains(JSON_TRANSLATION_ACCELERATION_LIMIT) &&
obj.contains(JSON_ROTATION_SNAP_THRESHOLD) && obj.contains(JSON_TRANSLATION_SNAP_THRESHOLD)) {
_rotationAccelerationLimit = (float)obj[JSON_ROTATION_ACCELERATION_LIMIT].toDouble();
_translationAccelerationLimit = (float)obj[JSON_TRANSLATION_ACCELERATION_LIMIT].toDouble();
_rotationSnapThreshold = (float)obj[JSON_ROTATION_SNAP_THRESHOLD].toDouble();
_translationSnapThreshold = (float)obj[JSON_TRANSLATION_SNAP_THRESHOLD].toDouble();
return true;
}
}
return false;
}
}

View file

@ -0,0 +1,41 @@
//
// Created by Anthony Thibault 2018/11/09
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_Controllers_Filters_Acceleration_Limiter_h
#define hifi_Controllers_Filters_Acceleration_Limiter_h
#include "../Filter.h"
namespace controller {
class AccelerationLimiterFilter : public Filter {
REGISTER_FILTER_CLASS(AccelerationLimiterFilter);
public:
AccelerationLimiterFilter() {}
float apply(float value) const override { return value; }
Pose apply(Pose value) const override;
bool parseParameters(const QJsonValue& parameters) override;
private:
float _rotationAccelerationLimit { FLT_MAX };
float _translationAccelerationLimit { FLT_MAX };
float _rotationSnapThreshold { 0.0f };
float _translationSnapThreshold { 0.0f };
mutable glm::vec3 _prevPos[3]; // sensor space
mutable glm::quat _prevRot[3]; // sensor space
mutable glm::vec3 _unfilteredPrevPos[3]; // sensor space
mutable glm::quat _unfilteredPrevRot[3]; // sensor space
mutable bool _prevValid { false };
};
}
#endif

View file

@ -35,7 +35,7 @@ namespace controller {
if (_prevSensorValue.isValid()) {
// exponential smoothing filter
sensorValue.translation = _translationConstant * sensorValue.getTranslation() + (1.0f - _translationConstant) * _prevSensorValue.getTranslation();
sensorValue.rotation = safeMix(sensorValue.getRotation(), _prevSensorValue.getRotation(), _rotationConstant);
sensorValue.rotation = safeMix(sensorValue.getRotation(), _prevSensorValue.getRotation(), (1.0f - _rotationConstant));
// remember previous sensor space value.
_prevSensorValue = sensorValue;

View file

@ -2,11 +2,11 @@ struct TextureData {
ivec2 textureSize;
};
layout(std140, binding=0) uniform textureDataBuffer {
LAYOUT_STD140(binding=0) uniform textureDataBuffer {
TextureData textureData;
};
layout(binding=0) uniform sampler2D colorMap;
LAYOUT(binding=0) uniform sampler2D colorMap;
layout(location=0) in vec2 varTexCoord0;
layout(location=0) out vec4 outFragColor;

View file

@ -363,56 +363,35 @@ void OpenGLDisplayPlugin::customizeContext() {
}
if (!_presentPipeline) {
gpu::StatePointer blendState = gpu::StatePointer(new gpu::State());
blendState->setDepthTest(gpu::State::DepthTest(false));
blendState->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
gpu::StatePointer scissorState = gpu::StatePointer(new gpu::State());
scissorState->setDepthTest(gpu::State::DepthTest(false));
scissorState->setScissorEnable(true);
{
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::drawTexture);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(gpu::State::DepthTest(false));
state->setScissorEnable(true);
_simplePipeline = gpu::Pipeline::create(program, state);
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTexture);
_simplePipeline = gpu::Pipeline::create(program, scissorState);
_hudPipeline = gpu::Pipeline::create(program, blendState);
}
{
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::display_plugins::program::SrgbToLinear);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(gpu::State::DepthTest(false));
state->setScissorEnable(true);
_presentPipeline = gpu::Pipeline::create(program, state);
_presentPipeline = gpu::Pipeline::create(program, scissorState);
}
{
auto vs = gpu::Shader::createVertex(shader::gpu::vertex::DrawUnitQuadTexcoord);
auto ps = gpu::Shader::createPixel(shader::gpu::fragment::DrawTexture);
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(gpu::State::DepthTest(false));
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
_hudPipeline = gpu::Pipeline::create(program, state);
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTextureMirroredX);
_mirrorHUDPipeline = gpu::Pipeline::create(program, blendState);
}
{
auto vs = gpu::Shader::createVertex(shader::gpu::vertex::DrawUnitQuadTexcoord);
auto ps = gpu::Shader::createPixel(shader::gpu::fragment::DrawTextureMirroredX);
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(gpu::State::DepthTest(false));
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
_mirrorHUDPipeline = gpu::Pipeline::create(program, state);
}
{
auto vs = gpu::Shader::createVertex(shader::gpu::vertex::DrawTransformUnitQuad);
auto ps = gpu::Shader::createPixel(shader::gpu::fragment::DrawTexture);
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(gpu::State::DepthTest(false));
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
_cursorPipeline = gpu::Pipeline::create(program, state);
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::gpu::program::DrawTransformedTexture);
_cursorPipeline = gpu::Pipeline::create(program, blendState);
}
}
updateCompositeFramebuffer();

View file

@ -1,10 +1,10 @@
// OpenGLDisplayPlugin_present.frag
layout(binding = 0) uniform sampler2D colorMap;
LAYOUT(binding=0) uniform sampler2D colorMap;
layout(location = 0) in vec2 varTexCoord0;
layout(location=0) in vec2 varTexCoord0;
layout(location = 0) out vec4 outFragColor;
layout(location=0) out vec4 outFragColor;
float sRGBFloatToLinear(float value) {
const float SRGB_ELBOW = 0.04045;

View file

@ -30,12 +30,14 @@ using namespace render::entities;
// is a half unit sphere. However, the geometry cache renders a UNIT sphere, so we need to scale down.
static const float SPHERE_ENTITY_SCALE = 0.5f;
static_assert(shader::render_utils::program::simple != 0, "Validate simple program exists");
static_assert(shader::render_utils::program::simple_transparent != 0, "Validate simple transparent program exists");
ShapeEntityRenderer::ShapeEntityRenderer(const EntityItemPointer& entity) : Parent(entity) {
_procedural._vertexSource = gpu::Shader::getVertexShaderSource(shader::render_utils::vertex::simple);
// FIXME: Setup proper uniform slots and use correct pipelines for forward rendering
_procedural._opaquefragmentSource = gpu::Shader::getFragmentShaderSource(shader::render_utils::fragment::simple);
_procedural._transparentfragmentSource = gpu::Shader::getFragmentShaderSource(shader::render_utils::fragment::simple_transparent);
_procedural._opaqueFragmentSource = gpu::Shader::Source::get(shader::render_utils::fragment::simple);
_procedural._transparentFragmentSource = gpu::Shader::Source::get(shader::render_utils::fragment::simple_transparent);
_procedural._opaqueState->setCullMode(gpu::State::CULL_NONE);
_procedural._opaqueState->setDepthTest(true, true, gpu::LESS_EQUAL);
PrepareStencil::testMaskDrawShape(*_procedural._opaqueState);

View file

@ -15,7 +15,7 @@
<@include DeferredBufferWrite.slh@>
// the albedo texture
layout(binding=0) uniform sampler2D originalTexture;
LAYOUT(binding=0) uniform sampler2D originalTexture;
// the interpolated normal
layout(location=0) in vec3 interpolatedNormal;

View file

@ -18,7 +18,7 @@
<$declareFadeFragment()$>
// the albedo texture
layout(binding=0) uniform sampler2D originalTexture;
LAYOUT(binding=0) uniform sampler2D originalTexture;
// the interpolated normal
layout(location=0) in vec3 interpolatedNormal;
@ -30,7 +30,7 @@ struct PolyLineUniforms {
vec3 color;
};
layout(binding=0) uniform polyLineBuffer {
LAYOUT(binding=0) uniform polyLineBuffer {
PolyLineUniforms polyline;
};

View file

@ -20,15 +20,15 @@ layout(location=RENDER_UTILS_ATTR_NORMAL_MS) in vec3 _normal;
layout(location=RENDER_UTILS_ATTR_POSITION_MS) in vec4 _position;
layout(location=RENDER_UTILS_ATTR_POSITION_WS) in vec4 _worldPosition;
layout(binding=ENTITIES_TEXTURE_POLYVOX_XMAP) uniform sampler2D xMap;
layout(binding=ENTITIES_TEXTURE_POLYVOX_YMAP) uniform sampler2D yMap;
layout(binding=ENTITIES_TEXTURE_POLYVOX_ZMAP) uniform sampler2D zMap;
LAYOUT(binding=ENTITIES_TEXTURE_POLYVOX_XMAP) uniform sampler2D xMap;
LAYOUT(binding=ENTITIES_TEXTURE_POLYVOX_YMAP) uniform sampler2D yMap;
LAYOUT(binding=ENTITIES_TEXTURE_POLYVOX_ZMAP) uniform sampler2D zMap;
struct PolyvoxParams {
vec4 voxelVolumeSize;
};
layout(binding=0) uniform polyvoxParamsBuffer {
LAYOUT(binding=0) uniform polyvoxParamsBuffer {
PolyvoxParams params;
};

View file

@ -23,15 +23,15 @@ layout(location=RENDER_UTILS_ATTR_POSITION_MS) in vec4 _position;
layout(location=RENDER_UTILS_ATTR_POSITION_WS) in vec4 _worldPosition;
layout(location=RENDER_UTILS_ATTR_POSITION_ES) in vec4 _worldFadePosition;
layout(binding=ENTITIES_TEXTURE_POLYVOX_XMAP) uniform sampler2D xMap;
layout(binding=ENTITIES_TEXTURE_POLYVOX_YMAP) uniform sampler2D yMap;
layout(binding=ENTITIES_TEXTURE_POLYVOX_ZMAP) uniform sampler2D zMap;
LAYOUT(binding=ENTITIES_TEXTURE_POLYVOX_XMAP) uniform sampler2D xMap;
LAYOUT(binding=ENTITIES_TEXTURE_POLYVOX_YMAP) uniform sampler2D yMap;
LAYOUT(binding=ENTITIES_TEXTURE_POLYVOX_ZMAP) uniform sampler2D zMap;
struct PolyvoxParams {
vec4 voxelVolumeSize;
};
layout(binding=0) uniform polyvoxParamsBuffer {
LAYOUT(binding=0) uniform polyvoxParamsBuffer {
PolyvoxParams params;
};

View file

@ -10,7 +10,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
layout(binding=0) uniform sampler2D colorMap;
LAYOUT(binding=0) uniform sampler2D colorMap;
layout(location=0) in vec4 varColor;
layout(location=1) in vec2 varTexcoord;

View file

@ -43,7 +43,7 @@ struct ParticleUniforms {
vec2 spare;
};
layout(std140, binding=0) uniform particleBuffer {
LAYOUT_STD140(binding=0) uniform particleBuffer {
ParticleUniforms particle;
};

View file

@ -5,4 +5,4 @@ include_hifi_library_headers(fbx)
include_hifi_library_headers(gpu)
include_hifi_library_headers(image)
include_hifi_library_headers(ktx)
link_hifi_libraries(shared networking octree avatars graphics model-networking)
link_hifi_libraries(shared shaders networking octree avatars graphics model-networking)

View file

@ -200,7 +200,8 @@ void EntityEditFilters::addFilter(EntityItemID entityID, QString filterURL) {
_filterDataMap.insert(entityID, filterData);
_lock.unlock();
auto scriptRequest = DependencyManager::get<ResourceManager>()->createResourceRequest(this, scriptURL);
auto scriptRequest = DependencyManager::get<ResourceManager>()->createResourceRequest(
this, scriptURL, true, -1, "EntityEditFilters::addFilter");
if (!scriptRequest) {
qWarning() << "Could not create ResourceRequest for Entity Edit filter script at" << scriptURL.toString();
scriptRequestFinished(entityID);

View file

@ -953,7 +953,8 @@ bool GLTFReader::doesResourceExist(const QString& url) {
}
std::tuple<bool, QByteArray> GLTFReader::requestData(QUrl& url) {
auto request = DependencyManager::get<ResourceManager>()->createResourceRequest(nullptr, url);
auto request = DependencyManager::get<ResourceManager>()->createResourceRequest(
nullptr, url, true, -1, "GLTFReader::requestData");
if (!request) {
return std::make_tuple(false, QByteArray());

View file

@ -443,7 +443,8 @@ void OBJReader::parseTextureLine(const QByteArray& textureLine, QByteArray& file
}
std::tuple<bool, QByteArray> requestData(QUrl& url) {
auto request = DependencyManager::get<ResourceManager>()->createResourceRequest(nullptr, url);
auto request = DependencyManager::get<ResourceManager>()->createResourceRequest(
nullptr, url, true, -1, "(OBJReader) requestData");
if (!request) {
return std::make_tuple(false, QByteArray());

View file

@ -1,6 +1,6 @@
set(TARGET_NAME gpu-gl-common)
setup_hifi_library(Concurrent)
link_hifi_libraries(shared gl gpu)
link_hifi_libraries(shared gl gpu shaders)
GroupSources("src")
target_opengl()

View file

@ -643,18 +643,21 @@ protected:
}
} _pipeline;
// Backend dependant compilation of the shader
// Backend dependent compilation of the shader
virtual void postLinkProgram(ShaderObject& programObject, const Shader& program) const;
virtual GLShader* compileBackendProgram(const Shader& program, const Shader::CompilationHandler& handler);
virtual GLShader* compileBackendShader(const Shader& shader, const Shader::CompilationHandler& handler);
virtual std::string getBackendShaderHeader() const = 0;
// For a program, this will return a string containing all the source files (without any
// backend headers or defines). For a vertex, fragment or geometry shader, this will
// return the fully customized shader with all the version and backend specific
// For a program, this will return a string containing all the source files (without any
// backend headers or defines). For a vertex, fragment or geometry shader, this will
// return the fully customized shader with all the version and backend specific
// preprocessor directives
// The program string returned can be used as a key for a cache of shader binaries
// The shader strings can be reliably sent to the low level `compileShader` functions
virtual std::string getShaderSource(const Shader& shader, int version) final;
virtual std::string getShaderSource(const Shader& shader, shader::Variant version) final;
shader::Variant getShaderVariant() const { return isStereo() ? shader::Variant::Stereo : shader::Variant::Mono; }
virtual shader::Dialect getShaderDialect() const = 0;
class ElementResource {
public:
gpu::Element _element;

View file

@ -54,7 +54,7 @@ void GLBackend::do_setPipeline(const Batch& batch, size_t paramOffset) {
// check the program cache
// pick the program version
#ifdef GPU_STEREO_CAMERA_BUFFER
GLuint glprogram = pipelineObject->_program->getProgram((GLShader::Version)isStereo());
GLuint glprogram = pipelineObject->_program->getProgram(getShaderVariant());
#else
GLuint glprogram = pipelineObject->_program->getProgram();
#endif

View file

@ -25,120 +25,53 @@ static const std::array<GLenum, NUM_SHADER_DOMAINS> SHADER_DOMAINS{ {
GL_GEOMETRY_SHADER,
} };
// Domain specific defines
// Must match the order of type specified in gpu::Shader::Type
static const std::array<std::string, NUM_SHADER_DOMAINS> DOMAIN_DEFINES{ {
"#define GPU_VERTEX_SHADER",
"#define GPU_PIXEL_SHADER",
"#define GPU_GEOMETRY_SHADER",
} };
// Stereo specific defines
static const std::string stereoVersion{
#ifdef GPU_STEREO_DRAWCALL_INSTANCED
R"SHADER(
#define GPU_TRANSFORM_IS_STEREO
#define GPU_TRANSFORM_STEREO_CAMERA
#define GPU_TRANSFORM_STEREO_CAMERA_INSTANCED
#define GPU_TRANSFORM_STEREO_SPLIT_SCREEN
)SHADER"
#endif
#ifdef GPU_STEREO_DRAWCALL_DOUBLED
#ifdef GPU_STEREO_CAMERA_BUFFER
R"SHADER(
#define GPU_TRANSFORM_IS_STEREO
#define GPU_TRANSFORM_STEREO_CAMERA
#define GPU_TRANSFORM_STEREO_CAMERA_ATTRIBUTED
)SHADER"
#else
R"SHADER(
#define GPU_TRANSFORM_IS_STEREO
)SHADER"
#endif
#endif
};
// TextureTable specific defines
static const std::string textureTableVersion {
"#extension GL_ARB_bindless_texture : require\n#define GPU_TEXTURE_TABLE_BINDLESS\n"
};
// Versions specific of the shader
static const std::array<std::string, GLShader::NumVersions> VERSION_DEFINES { {
"",
stereoVersion
} };
static std::string getShaderTypeString(Shader::Type type) {
switch (type) {
case Shader::Type::VERTEX:
return "vertex";
case Shader::Type::PIXEL:
return "pixel";
case Shader::Type::GEOMETRY:
return "geometry";
case Shader::Type::PROGRAM:
return "program";
default:
qFatal("Unexpected shader type %d", type);
Q_UNREACHABLE();
}
}
std::string GLBackend::getShaderSource(const Shader& shader, int version) {
std::string GLBackend::getShaderSource(const Shader& shader, shader::Variant variant) {
if (shader.isProgram()) {
std::string result;
result.append("// VERSION " + std::to_string(version));
for (const auto& subShader : shader.getShaders()) {
result.append("//-------- ");
result.append(getShaderTypeString(subShader->getType()));
result.append("\n");
result.append(subShader->getSource().getCode());
if (subShader) {
result += subShader->getSource().getSource(getShaderDialect(), variant);
}
}
return result;
}
std::string shaderDefines = getBackendShaderHeader() + "\n"
+ (supportsBindless() ? textureTableVersion : "\n")
+ DOMAIN_DEFINES[shader.getType()] + "\n"
+ VERSION_DEFINES[version];
return shaderDefines + "\n" + shader.getSource().getCode();
}
return shader.getSource().getSource(getShaderDialect(), variant);
}
GLShader* GLBackend::compileBackendShader(const Shader& shader, const Shader::CompilationHandler& handler) {
// Any GLSLprogram ? normally yes...
GLenum shaderDomain = SHADER_DOMAINS[shader.getType()];
GLShader::ShaderObjects shaderObjects;
Shader::CompilationLogs compilationLogs(GLShader::NumVersions);
const auto& variants = shader::allVariants();
Shader::CompilationLogs compilationLogs(variants.size());
shader.incrementCompilationAttempt();
for (int version = 0; version < GLShader::NumVersions; version++) {
auto& shaderObject = shaderObjects[version];
auto shaderSource = getShaderSource(shader, version);
for (const auto& variant : variants) {
auto index = static_cast<uint32_t>(variant);
auto shaderSource = getShaderSource(shader, variant);
auto& shaderObject = shaderObjects[index];
if (handler) {
bool retest = true;
std::string currentSrc = shaderSource;
// When a Handler is specified, we can try multiple times to build the shader and let the handler change the source if the compilation fails.
// The retest bool is set to false as soon as the compilation succeed to wexit the while loop.
// The retest bool is set to false as soon as the compilation succeed to exit the while loop.
// The handler tells us if we should retry or not while returning a modified version of the source.
while (retest) {
bool result = ::gl::compileShader(shaderDomain, currentSrc, shaderObject.glshader, compilationLogs[version].message);
compilationLogs[version].compiled = result;
bool result = ::gl::compileShader(shaderDomain, currentSrc, shaderObject.glshader, compilationLogs[index].message);
compilationLogs[index].compiled = result;
if (!result) {
std::string newSrc;
retest = handler(shader, currentSrc, compilationLogs[version], newSrc);
retest = handler(shader, currentSrc, compilationLogs[index], newSrc);
currentSrc = newSrc;
} else {
retest = false;
}
}
} else {
compilationLogs[version].compiled = ::gl::compileShader(shaderDomain, shaderSource, shaderObject.glshader, compilationLogs[version].message);
compilationLogs[index].compiled = ::gl::compileShader(shaderDomain, shaderSource, shaderObject.glshader, compilationLogs[index].message);
}
if (!compilationLogs[version].compiled) {
qCWarning(gpugllogging) << "GLBackend::compileBackendProgram - Shader didn't compile:\n" << compilationLogs[version].message.c_str();
if (!compilationLogs[index].compiled) {
qCWarning(gpugllogging) << "GLBackend::compileBackendProgram - Shader didn't compile:\n" << compilationLogs[index].message.c_str();
shader.setCompilationLogs(compilationLogs);
return nullptr;
}
@ -162,11 +95,13 @@ GLShader* GLBackend::compileBackendProgram(const Shader& program, const Shader::
GLShader::ShaderObjects programObjects;
program.incrementCompilationAttempt();
Shader::CompilationLogs compilationLogs(GLShader::NumVersions);
const auto& variants = shader::allVariants();
Shader::CompilationLogs compilationLogs(variants.size());
for (int version = 0; version < GLShader::NumVersions; version++) {
auto& programObject = programObjects[version];
auto programSource = getShaderSource(program, version);
for (const auto& variant : variants) {
auto index = static_cast<uint32_t>(variant);
auto& programObject = programObjects[index];
auto programSource = getShaderSource(program, variant);
auto hash = ::gl::getShaderHash(programSource);
CachedShader cachedBinary;
@ -199,11 +134,11 @@ GLShader* GLBackend::compileBackendProgram(const Shader& program, const Shader::
for (auto subShader : program.getShaders()) {
auto object = GLShader::sync((*this), *subShader, handler);
if (object) {
shaderGLObjects.push_back(object->_shaderObjects[version].glshader);
shaderGLObjects.push_back(object->_shaderObjects[index].glshader);
} else {
qCWarning(gpugllogging) << "GLBackend::compileBackendProgram - One of the shaders of the program is not compiled?";
compilationLogs[version].compiled = false;
compilationLogs[version].message = std::string("Failed to compile, one of the shaders of the program is not compiled ?");
compilationLogs[index].compiled = false;
compilationLogs[index].message = std::string("Failed to compile, one of the shaders of the program is not compiled ?");
program.setCompilationLogs(compilationLogs);
return nullptr;
}
@ -211,9 +146,9 @@ GLShader* GLBackend::compileBackendProgram(const Shader& program, const Shader::
glprogram = ::gl::buildProgram(shaderGLObjects);
if (!::gl::linkProgram(glprogram, compilationLogs[version].message)) {
qCWarning(gpugllogging) << "GLBackend::compileBackendProgram - Program didn't link:\n" << compilationLogs[version].message.c_str();
compilationLogs[version].compiled = false;
if (!::gl::linkProgram(glprogram, compilationLogs[index].message)) {
qCWarning(gpugllogging) << "GLBackend::compileBackendProgram - Program didn't link:\n" << compilationLogs[index].message.c_str();
compilationLogs[index].compiled = false;
glDeleteProgram(glprogram);
glprogram = 0;
return nullptr;
@ -228,12 +163,12 @@ GLShader* GLBackend::compileBackendProgram(const Shader& program, const Shader::
}
if (glprogram == 0) {
qCWarning(gpugllogging) << "GLBackend::compileBackendProgram - Program didn't link:\n" << compilationLogs[version].message.c_str();
qCWarning(gpugllogging) << "GLBackend::compileBackendProgram - Program didn't link:\n" << compilationLogs[index].message.c_str();
program.setCompilationLogs(compilationLogs);
return nullptr;
}
compilationLogs[version].compiled = true;
compilationLogs[index].compiled = true;
programObject.glprogram = glprogram;
postLinkProgram(programObject, program);
}
@ -249,7 +184,10 @@ GLShader* GLBackend::compileBackendProgram(const Shader& program, const Shader::
static const GLint INVALID_UNIFORM_INDEX = -1;
GLint GLBackend::getRealUniformLocation(GLint location) const {
auto& shader = _pipeline._programShader->_shaderObjects[(GLShader::Version)isStereo()];
auto variant = isStereo() ? shader::Variant::Stereo : shader::Variant::Mono;
auto index = static_cast<uint32_t>(variant);
auto& shader = _pipeline._programShader->_shaderObjects[index];
auto itr = shader.uniformRemap.find(location);
if (itr == shader.uniformRemap.end()) {
// This shouldn't happen, because we use reflection to determine all the possible
@ -264,20 +202,23 @@ GLint GLBackend::getRealUniformLocation(GLint location) const {
void GLBackend::postLinkProgram(ShaderObject& shaderObject, const Shader& program) const {
const auto& glprogram = shaderObject.glprogram;
const auto& expectedUniforms = program.getUniforms();
const auto expectedLocationsByName = expectedUniforms.getLocationsByName();
const auto uniforms = ::gl::Uniform::load(glprogram, expectedUniforms.getNames());
auto& uniformRemap = shaderObject.uniformRemap;
const auto& expectedUniforms = program.getReflection().uniforms;
// Pre-initialize all the uniforms with an invalid location
for (const auto& entry : expectedLocationsByName) {
auto& uniformRemap = shaderObject.uniformRemap;
// initialize all the uniforms with an invalid location
for (const auto& entry : expectedUniforms) {
uniformRemap[entry.second] = INVALID_UNIFORM_INDEX;
}
// Now load up all the actual found uniform location
// Get the actual uniform locations from the shader
const auto names = Shader::Reflection::getNames(expectedUniforms);
const auto uniforms = ::gl::Uniform::load(glprogram, names);
// Now populate the remapping with the found locations
for (const auto& uniform : uniforms) {
const auto& name = uniform.name;
const auto& expectedLocation = expectedLocationsByName.at(name);
const auto& expectedLocation = expectedUniforms.at(name);
const auto& location = uniform.binding;
uniformRemap[expectedLocation] = location;
}
@ -462,3 +403,4 @@ void GLBackend::initShaderBinaryCache() {
void GLBackend::killShaderBinaryCache() {
::gl::saveShaderCache(_shaderBinaryCache._binaries);
}

View file

@ -52,7 +52,7 @@ GLPipeline* GLPipeline::sync(GLBackend& backend, const Pipeline& pipeline) {
// Special case for view correction matrices, any pipeline that declares the correction buffer
// uniform will automatically have it provided without any client code necessary.
// Required for stable lighting in the HMD.
object->_cameraCorrection = shader->getUniformBuffers().isValid(gpu::slot::buffer::CameraCorrection);
object->_cameraCorrection = shader->getReflection().validUniformBuffer(gpu::slot::buffer::CameraCorrection);
object->_program = programObject;
object->_state = stateObject;

View file

@ -14,43 +14,45 @@
namespace gpu { namespace gl {
struct ShaderObject {
GLuint glshader { 0 };
GLuint glprogram { 0 };
enum class BindingType
{
INPUT,
OUTPUT,
TEXTURE,
SAMPLER,
UNIFORM_BUFFER,
RESOURCE_BUFFER,
UNIFORM,
};
using LocationMap = std::unordered_map <GLuint, GLuint>;
LocationMap uniformRemap;
using LocationMap = std::unordered_map<std::string, int32_t>;
using ReflectionMap = std::map<BindingType, LocationMap>;
using UniformMap = std::unordered_map<GLuint, GLuint>;
GLuint glshader{ 0 };
GLuint glprogram{ 0 };
UniformMap uniformRemap;
};
class GLShader : public GPUObject {
public:
static GLShader* sync(GLBackend& backend, const Shader& shader, const Shader::CompilationHandler& handler = nullptr);
enum Version {
Mono = 0,
Stereo,
NumVersions
};
using ShaderObject = gpu::gl::ShaderObject;
using ShaderObjects = std::array< ShaderObject, NumVersions >;
using UniformMapping = std::map<GLint, GLint>;
using UniformMappingVersions = std::vector<UniformMapping>;
using ShaderObjects = std::array<ShaderObject, shader::NUM_VARIANTS>;
GLShader(const std::weak_ptr<GLBackend>& backend);
~GLShader();
ShaderObjects _shaderObjects;
GLuint getProgram(Version version = Mono) const {
return _shaderObjects[version].glprogram;
GLuint getProgram(shader::Variant version = shader::Variant::Mono) const {
return _shaderObjects[static_cast<uint32_t>(version)].glprogram;
}
const std::weak_ptr<GLBackend> _backend;
};
} }
}} // namespace gpu::gl
#endif

View file

@ -1,6 +1,6 @@
set(TARGET_NAME gpu-gl)
setup_hifi_library(Concurrent)
link_hifi_libraries(shared gl gpu gpu-gl-common)
link_hifi_libraries(shared gl gpu gpu-gl-common shaders)
if (UNIX)
target_link_libraries(${TARGET_NAME} pthread)
endif(UNIX)

View file

@ -170,8 +170,7 @@ protected:
// Output stage
void do_blit(const Batch& batch, size_t paramOffset) override;
std::string getBackendShaderHeader() const override;
shader::Dialect getShaderDialect() const override { return shader::Dialect::glsl410; }
void postLinkProgram(ShaderObject& programObject, const Shader& program) const override;
};

View file

@ -12,22 +12,13 @@ using namespace gpu;
using namespace gpu::gl;
using namespace gpu::gl41;
// GLSL version
std::string GL41Backend::getBackendShaderHeader() const {
static const std::string header(
R"SHADER(#version 410 core
#define GPU_GL410
#define BITFIELD int
)SHADER");
return header;
}
void GL41Backend::postLinkProgram(ShaderObject& programObject, const Shader& program) const {
Parent::postLinkProgram(programObject, program);
const auto& glprogram = programObject.glprogram;
const auto& reflection = program.getReflection();
// For the UBOs, use glUniformBlockBinding to fixup the locations based on the reflection
{
const auto expectedUbos = program.getUniformBuffers().getLocationsByName();
const auto& expectedUbos = reflection.uniformBuffers;
auto ubos = ::gl::UniformBlock::load(glprogram);
for (const auto& ubo : ubos) {
const auto& name = ubo.name;
@ -41,7 +32,7 @@ void GL41Backend::postLinkProgram(ShaderObject& programObject, const Shader& pro
// For the Textures, use glUniform1i to fixup the active texture slots based on the reflection
{
const auto expectedTextures = program.getTextures().getLocationsByName();
const auto& expectedTextures = reflection.textures;
for (const auto& expectedTexture : expectedTextures) {
auto location = glGetUniformLocation(glprogram, expectedTexture.first.c_str());
if (location < 0) {
@ -53,8 +44,9 @@ void GL41Backend::postLinkProgram(ShaderObject& programObject, const Shader& pro
// For the resource buffers, do the same as for the textures, since in GL 4.1 that's how they're implemented
{
const auto expectedResourceBuffers = program.getResourceBuffers().getLocationsByName();
const auto resourceBufferUniforms = ::gl::Uniform::loadByName(glprogram, program.getResourceBuffers().getNames());
const auto& expectedResourceBuffers = reflection.resourceBuffers;
const auto names = Shader::Reflection::getNames(expectedResourceBuffers);
const auto resourceBufferUniforms = ::gl::Uniform::loadByName(glprogram, names);
for (const auto& resourceBuffer : resourceBufferUniforms) {
const auto& targetBinding = expectedResourceBuffers.at(resourceBuffer.name);
glProgramUniform1i(glprogram, resourceBuffer.binding, targetBinding + GL41Backend::RESOURCE_BUFFER_SLOT0_TEX_UNIT);

View file

@ -23,7 +23,7 @@
#define GPU_BINDLESS_TEXTURES 0
namespace gpu { namespace gl45 {
using namespace gpu::gl;
using TextureWeakPointer = std::weak_ptr<Texture>;
@ -56,6 +56,7 @@ public:
using Parent = GLTexture;
friend class GL45Backend;
static GLuint allocate(const Texture& texture);
protected:
GL45Texture(const std::weak_ptr<GLBackend>& backend, const Texture& texture);
void generateMips() const override;
@ -88,6 +89,7 @@ public:
virtual const Bindless& getBindless() const;
void releaseBindless() const;
void recreateBindless() const;
private:
mutable Bindless _bindless;
#endif
@ -98,10 +100,11 @@ public:
mutable Sampler _cachedSampler{ getInvalidSampler() };
};
#if GPU_BINDLESS_TEXTURES
#if GPU_BINDLESS_TEXTURES
class GL45TextureTable : public GLObject<TextureTable> {
static GLuint allocate();
using Parent = GLObject<TextureTable>;
public:
using BindlessArray = std::array<GL45Texture::Bindless, TextureTable::COUNT>;
@ -116,7 +119,6 @@ public:
};
#endif
//
// Textures that have fixed allocation sizes and cannot be managed at runtime
//
@ -134,12 +136,13 @@ public:
void allocateStorage() const;
void syncSampler() const override;
const Size _size { 0 };
const Size _size{ 0 };
};
class GL45AttachmentTexture : public GL45FixedAllocationTexture {
using Parent = GL45FixedAllocationTexture;
friend class GL45Backend;
protected:
GL45AttachmentTexture(const std::weak_ptr<GLBackend>& backend, const Texture& texture);
~GL45AttachmentTexture();
@ -148,6 +151,7 @@ public:
class GL45StrictResourceTexture : public GL45FixedAllocationTexture {
using Parent = GL45FixedAllocationTexture;
friend class GL45Backend;
protected:
GL45StrictResourceTexture(const std::weak_ptr<GLBackend>& backend, const Texture& texture);
~GL45StrictResourceTexture();
@ -179,6 +183,7 @@ public:
class GL45ResourceTexture : public GL45VariableAllocationTexture {
using Parent = GL45VariableAllocationTexture;
friend class GL45Backend;
protected:
GL45ResourceTexture(const std::weak_ptr<GLBackend>& backend, const Texture& texture);
@ -186,7 +191,6 @@ public:
size_t promote() override;
size_t demote() override;
void populateTransferQueue(TransferQueue& pendingTransfers) override;
void allocateStorage(uint16 mip);
Size copyMipsFromTexture();
@ -226,7 +230,6 @@ public:
};
#endif
protected:
void draw(GLenum mode, uint32 numVertices, uint32 startVertex) override;
@ -244,7 +247,6 @@ protected:
GLuint getQueryID(const QueryPointer& query) override;
GLQuery* syncGPUObject(const Query& query) override;
// Draw Stage
void do_draw(const Batch& batch, size_t paramOffset) override;
void do_drawIndexed(const Batch& batch, size_t paramOffset) override;
@ -270,7 +272,7 @@ protected:
void do_blit(const Batch& batch, size_t paramOffset) override;
// Shader Stage
std::string getBackendShaderHeader() const override;
shader::Dialect getShaderDialect() const override;
// Texture Management Stage
void initTextureManagementStage() override;
@ -282,9 +284,8 @@ protected:
#endif
};
} }
}} // namespace gpu::gl45
Q_DECLARE_LOGGING_CATEGORY(gpugl45logging)
#endif

View file

@ -7,22 +7,16 @@
//
#include "GL45Backend.h"
#include <gpu/gl/GLShader.h>
//#include <gl/GLShaders.h>
using namespace gpu;
using namespace gpu::gl;
using namespace gpu::gl45;
// GLSL version
std::string GL45Backend::getBackendShaderHeader() const {
static const std::string header(
R"SHADER(#version 450 core
#define GPU_GL450
#define BITFIELD int
)SHADER"
#ifdef GPU_SSBO_TRANSFORM_OBJECT
R"SHADER(#define GPU_SSBO_TRANSFORM_OBJECT)SHADER"
shader::Dialect GL45Backend::getShaderDialect() const {
#if defined(Q_OS_MAC)
// We build, but don't actually use GL 4.5 on OSX
throw std::runtime_error("GL 4.5 unavailable on OSX");
#else
return shader::Dialect::glsl450;
#endif
);
return header;
}

View file

@ -1,5 +1,5 @@
set(TARGET_NAME gpu-gles)
setup_hifi_library(Gui Concurrent)
link_hifi_libraries(shared gl gpu gpu-gl-common)
link_hifi_libraries(shared shaders gl gpu gpu-gl-common)
GroupSources("src")
target_opengl()

View file

@ -27,11 +27,7 @@ class GLESBackend : public GLBackend {
friend class Context;
public:
static const GLint TRANSFORM_OBJECT_SLOT { 31 };
static const GLint RESOURCE_TRANSFER_TEX_UNIT { 32 };
static const GLint RESOURCE_TRANSFER_EXTRA_TEX_UNIT { 33 };
static const GLint RESOURCE_BUFFER_TEXBUF_TEX_UNIT { 34 };
static const GLint RESOURCE_BUFFER_SLOT0_TEX_UNIT { 35 };
explicit GLESBackend(bool syncCache) : Parent(syncCache) {}
GLESBackend() : Parent() {}
virtual ~GLESBackend() {
@ -166,7 +162,7 @@ protected:
// Output stage
void do_blit(const Batch& batch, size_t paramOffset) override;
std::string getBackendShaderHeader() const override;
shader::Dialect getShaderDialect() const override { return shader::Dialect::glsl310es; }
};
} }

View file

@ -12,15 +12,3 @@ using namespace gpu;
using namespace gpu::gl;
using namespace gpu::gles;
// GLSL version
std::string GLESBackend::getBackendShaderHeader() const {
static const std::string header(
R"SHADER(#version 310 es
#extension GL_EXT_texture_buffer : enable
precision highp float;
precision highp samplerBuffer;
precision highp sampler2DShadow;
#define BITFIELD highp int
)SHADER");
return header;
}

View file

@ -60,12 +60,11 @@ void GLESBackend::transferTransformState(const Batch& batch) const {
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
glActiveTexture(GL_TEXTURE0 + GLESBackend::TRANSFORM_OBJECT_SLOT);
glActiveTexture(GL_TEXTURE0 + slot::texture::ObjectTransforms);
glBindTexture(GL_TEXTURE_BUFFER, _transform._objectBufferTexture);
if (!batch._objects.empty()) {
glTexBuffer(GL_TEXTURE_BUFFER, GL_RGBA32F, _transform._objectBuffer);
}
CHECK_GL_ERROR();
// Make sure the current Camera offset is unknown before render Draw

Some files were not shown because too many files have changed in this diff Show more