mirror of
https://github.com/overte-org/overte.git
synced 2025-04-23 06:13:33 +02:00
Merging with master around rc76 cut time
This commit is contained in:
commit
0166d7748a
1957 changed files with 66886 additions and 40267 deletions
.eslintrc.jsBUILD.mdBUILD_LINUX.mdBUILD_OSX.mdBUILD_WIN.mdCMakeLists.txtLICENSEVCPKG.md
android
app
CMakeLists.txtbuild.gradle
build.gradlegradle.propertiessrc/main
AndroidManifest.xml
cpp
java/io/highfidelity/hifiinterface
HifiUtils.javaInterfaceActivity.javaLoginMenuActivity.javaMainActivity.javaSplashActivity.javaWebViewActivity.java
fragment
FriendsFragment.javaHomeFragment.javaLoginFragment.javaOnBackPressedListener.javaSettingsFragment.javaSignupFragment.javaStartMenuFragment.javaWebViewFragment.java
receiver
view
res
assignment-client
CMakeLists.txt
src
Agent.cppAgent.hAssignmentClient.cpp
audio
AudioMixer.cppAudioMixer.hAudioMixerClientData.cppAudioMixerClientData.hAudioMixerSlave.cppAudioMixerSlave.hAudioMixerSlavePool.cppAudioMixerSlavePool.hAudioMixerStats.cppAudioMixerStats.hAvatarAudioStream.cppAvatarAudioStream.h
avatars
AvatarMixer.cppAvatarMixerClientData.cppAvatarMixerClientData.hAvatarMixerSlave.cppAvatarMixerSlave.hScriptableAvatar.cppScriptableAvatar.h
entities
scripts
cmake
compiler.cmakeinit.cmake
externals
boostconfig
bullet
draco
etc2comp
gli
glm
json
nvtt
openvr
quazip
sdl2
serverless-content
zlib
macros
|
@ -36,6 +36,7 @@ module.exports = {
|
|||
"GlobalServices": false,
|
||||
"GooglePoly": false,
|
||||
"Graphics": false,
|
||||
"HifiAbout": false,
|
||||
"HMD": false,
|
||||
"LaserPointers": false,
|
||||
"location": true,
|
||||
|
|
27
BUILD.md
27
BUILD.md
|
@ -9,14 +9,12 @@
|
|||
|
||||
- [cmake](https://cmake.org/download/): 3.9
|
||||
- [Qt](https://www.qt.io/download-open-source): 5.10.1
|
||||
- [OpenSSL](https://www.openssl.org/): Use the latest available 1.0 version (**NOT** 1.1) of OpenSSL to avoid security vulnerabilities.
|
||||
- [VHACD](https://github.com/virneo/v-hacd)(clone this repository)(Optional)
|
||||
- [Python](https://www.python.org/downloads/): 3.6 or higher
|
||||
|
||||
### CMake External Project Dependencies
|
||||
|
||||
These dependencies need not be installed manually. They are automatically downloaded on the platforms where they are required.
|
||||
- [Bullet Physics Engine](https://github.com/bulletphysics/bullet3/releases): 2.83
|
||||
- [GLEW](http://glew.sourceforge.net/): 1.13
|
||||
- [glm](https://glm.g-truc.net/0.9.8/index.html): 0.9.8
|
||||
- [Oculus SDK](https://developer.oculus.com/downloads/): 1.11 (Win32) / 0.5 (Mac)
|
||||
- [OpenVR](https://github.com/ValveSoftware/openvr): 1.0.6 (Win32 only)
|
||||
|
@ -24,16 +22,15 @@ These dependencies need not be installed manually. They are automatically downlo
|
|||
- [QuaZip](https://sourceforge.net/projects/quazip/files/quazip/): 0.7.3
|
||||
- [SDL2](https://www.libsdl.org/download-2.0.php): 2.0.3
|
||||
- [Intel Threading Building Blocks](https://www.threadingbuildingblocks.org/): 4.3
|
||||
- [Sixense](http://sixense.com/): 071615
|
||||
- [vcpkg](https://github.com/highfidelity/vcpkg):
|
||||
- [VHACD](https://github.com/virneo/v-hacd)
|
||||
- [zlib](http://www.zlib.net/): 1.28 (Win32 only)
|
||||
- nVidia Texture Tools: 2.1
|
||||
- [nvtt](https://github.com/highfidelity/nvidia-texture-tools): 2.1.1 (customized)
|
||||
|
||||
The above dependencies will be downloaded, built, linked and included automatically by CMake where we require them. The CMakeLists files that handle grabbing each of the following external dependencies can be found in the [cmake/externals folder](cmake/externals). The resulting downloads, source files and binaries will be placed in the `build/ext` folder in each of the subfolders for each external project.
|
||||
|
||||
These are not placed in your normal build tree when doing an out of source build so that they do not need to be re-downloaded and re-compiled every time the CMake build folder is cleared. Should you want to force a re-download and re-compile of a specific external, you can simply remove that directory from the appropriate subfolder in `build/ext`. Should you want to force a re-download and re-compile of all externals, just remove the `build/ext` folder.
|
||||
|
||||
If you would like to use a specific install of a dependency instead of the version that would be grabbed as a CMake ExternalProject, you can pass -DUSE\_LOCAL\_$NAME=0 (where $NAME is the name of the subfolder in [cmake/externals](cmake/externals)) when you run CMake to tell it not to get that dependency as an external project.
|
||||
|
||||
#### CMake
|
||||
|
||||
Hifi uses CMake to generate build files and project files for your platform.
|
||||
|
@ -46,6 +43,7 @@ This can either be entered directly into your shell session before you build or
|
|||
|
||||
The path it needs to be set to will depend on where and how Qt5 was installed. e.g.
|
||||
|
||||
export QT_CMAKE_PREFIX_PATH=/usr/local/Qt5.10.1/5.10.1/gcc_64/lib/cmake
|
||||
export QT_CMAKE_PREFIX_PATH=/usr/local/qt/5.10.1/clang_64/lib/cmake/
|
||||
export QT_CMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.10.1/lib/cmake
|
||||
export QT_CMAKE_PREFIX_PATH=/usr/local/opt/qt5/lib/cmake
|
||||
|
@ -80,9 +78,22 @@ In the examples below the variable $NAME would be replaced by the name of the de
|
|||
* $NAME_ROOT_DIR - set this variable in your ENV
|
||||
* HIFI_LIB_DIR - set this variable in your ENV to your High Fidelity lib folder, should contain a folder '$name'
|
||||
|
||||
|
||||
### Optional Components
|
||||
|
||||
#### Build Options
|
||||
|
||||
The following build options can be used when running CMake
|
||||
|
||||
* BUILD_CLIENT
|
||||
* BUILD_SERVER
|
||||
* BUILD_TESTS
|
||||
* BUILD_TOOLS
|
||||
|
||||
#### Developer Build Options
|
||||
|
||||
* USE_GLES
|
||||
* DISABLE_UI
|
||||
|
||||
#### Devices
|
||||
|
||||
You can support external input/output devices such as Leap Motion, MIDI, and more by adding each individual SDK in the visible building path. Refer to the readme file available in each device folder in [interface/external/](interface/external) for the detailed explanation of the requirements to use the device.
|
||||
|
|
|
@ -6,13 +6,20 @@ Please read the [general build guide](BUILD.md) for information on dependencies
|
|||
|
||||
Should you choose not to install Qt5 via a package manager that handles dependencies for you, you may be missing some Qt5 dependencies. On Ubuntu, for example, the following additional packages are required:
|
||||
|
||||
libasound2 libxmu-dev libxi-dev freeglut3-dev libasound2-dev libjack0 libjack-dev libxrandr-dev libudev-dev libssl-dev
|
||||
libasound2 libxmu-dev libxi-dev freeglut3-dev libasound2-dev libjack0 libjack-dev libxrandr-dev libudev-dev libssl-dev zlib1g-dev
|
||||
|
||||
## Ubuntu 16.04 specific build guide
|
||||
## Ubuntu 16.04/18.04 specific build guide
|
||||
|
||||
### Ubuntu 18.04 only
|
||||
Add the universe repository:
|
||||
_(This is not enabled by default on the server edition)_
|
||||
```bash
|
||||
sudo add-apt-repository universe
|
||||
sudo apt-get update
|
||||
```
|
||||
|
||||
### Prepare environment
|
||||
hifiqt5.10.1
|
||||
Install qt:
|
||||
Install Qt 5.10.1:
|
||||
```bash
|
||||
wget http://debian.highfidelity.com/pool/h/hi/hifiqt5.10.1_5.10.1_amd64.deb
|
||||
sudo dpkg -i hifiqt5.10.1_5.10.1_amd64.deb
|
||||
|
@ -20,19 +27,25 @@ sudo dpkg -i hifiqt5.10.1_5.10.1_amd64.deb
|
|||
|
||||
Install build dependencies:
|
||||
```bash
|
||||
sudo apt-get install libasound2 libxmu-dev libxi-dev freeglut3-dev libasound2-dev libjack0 libjack-dev libxrandr-dev libudev-dev libssl-dev
|
||||
sudo apt-get install libasound2 libxmu-dev libxi-dev freeglut3-dev libasound2-dev libjack0 libjack-dev libxrandr-dev libudev-dev libssl-dev zlib1g-dev
|
||||
```
|
||||
|
||||
To compile interface in a server you must install:
|
||||
```bash
|
||||
sudo apt -y install libpulse0 libnss3 libnspr4 libfontconfig1 libxcursor1 libxcomposite1 libxtst6 libxslt1.1
|
||||
sudo apt-get -y install libpulse0 libnss3 libnspr4 libfontconfig1 libxcursor1 libxcomposite1 libxtst6 libxslt1.1
|
||||
```
|
||||
|
||||
Install build tools:
|
||||
```bash
|
||||
sudo apt install cmake
|
||||
sudo apt-get install cmake
|
||||
```
|
||||
|
||||
Install Python 3:
|
||||
```bash
|
||||
sudo apt-get install python3.6
|
||||
```
|
||||
|
||||
|
||||
### Get code and checkout the tag you need
|
||||
|
||||
Clone this repository:
|
||||
|
@ -48,12 +61,7 @@ git tags
|
|||
|
||||
Then checkout last tag with:
|
||||
```bash
|
||||
git checkout tags/RELEASE-6819
|
||||
```
|
||||
|
||||
Or go to the highfidelity download page (https://highfidelity.com/download) to get the release version. For example, if there is a BETA 6731 type:
|
||||
```bash
|
||||
git checkout tags/RELEASE-6731
|
||||
git checkout tags/v0.71.0
|
||||
```
|
||||
|
||||
### Compiling
|
||||
|
@ -66,15 +74,20 @@ cd hifi/build
|
|||
|
||||
Prepare makefiles:
|
||||
```bash
|
||||
cmake -DQT_CMAKE_PREFIX_PATH=/usr/local/Qt5.10.1/5.10/gcc_64/lib/cmake ..
|
||||
cmake -DQT_CMAKE_PREFIX_PATH=/usr/local/Qt5.10.1/5.10.1/gcc_64/lib/cmake ..
|
||||
```
|
||||
|
||||
Start compilation and get a cup of coffee:
|
||||
Start compilation of the server and get a cup of coffee:
|
||||
```bash
|
||||
make domain-server assignment-client interface
|
||||
make domain-server assignment-client
|
||||
```
|
||||
|
||||
In a server does not make sense to compile interface
|
||||
To compile interface:
|
||||
```bash
|
||||
make interface
|
||||
```
|
||||
|
||||
In a server, it does not make sense to compile interface
|
||||
|
||||
### Running the software
|
||||
|
||||
|
@ -93,4 +106,4 @@ Running interface:
|
|||
./interface/interface
|
||||
```
|
||||
|
||||
Go to localhost in running interface.
|
||||
Go to localhost in the running interface.
|
||||
|
|
|
@ -6,6 +6,10 @@ Please read the [general build guide](BUILD.md) for information on dependencies
|
|||
|
||||
brew install cmake openssl qt
|
||||
|
||||
### Python 3
|
||||
|
||||
Download an install Python 3.6.6 or higher from [here](https://www.python.org/downloads/). Execute the `Update Shell Profile.command` script that is provided with the installer.
|
||||
|
||||
### OpenSSL
|
||||
|
||||
Assuming you've installed OpenSSL using the homebrew instructions above, you'll need to set OPENSSL_ROOT_DIR so CMake can find your installations.
|
||||
|
@ -28,7 +32,9 @@ Note that this uses the version from the homebrew formula at the time of this wr
|
|||
|
||||
If Xcode is your editor of choice, you can ask CMake to generate Xcode project files instead of Unix Makefiles.
|
||||
|
||||
cmake .. -GXcode
|
||||
cmake .. -G Xcode
|
||||
|
||||
If `cmake` complains about Python 3 being missing, you may need to update your CMake binary with command `brew upgrade cmake`, or by downloading and running the latest CMake installer, depending on how you originally instaled CMake
|
||||
|
||||
After running cmake, you will have the make files or Xcode project file necessary to build all of the components. Open the hifi.xcodeproj file, choose ALL_BUILD from the Product > Scheme menu (or target drop down), and click Run.
|
||||
|
||||
|
|
10
BUILD_WIN.md
10
BUILD_WIN.md
|
@ -5,11 +5,17 @@ Note: We are now using Visual Studio 2017 and Qt 5.10.1. If you are upgrading fr
|
|||
|
||||
Note: The prerequisites will require about 10 GB of space on your drive. You will also need a system with at least 8GB of main memory.
|
||||
|
||||
### Step 1. Visual Studio 2017
|
||||
### Step 1. Visual Studio 2017 & Python
|
||||
|
||||
If you don’t have Community or Professional edition of Visual Studio 2017, download [Visual Studio Community 2017](https://www.visualstudio.com/downloads/).
|
||||
|
||||
When selecting components, check "Desktop development with C++." Also on the right on the Summary toolbar, check "Windows 8.1 SDK and UCRT SDK" and "VC++ 2015.3 v140 toolset (x86,x64)".
|
||||
When selecting components, check "Desktop development with C++". Also on the right on the Summary toolbar, check "Windows 8.1 SDK and UCRT SDK" and "VC++ 2015.3 v140 toolset (x86,x64)". If you do not already have a python development environment installed, also check "Python Development" in this screen.
|
||||
|
||||
If you already have Visual Studio installed and need to add python, open the "Add or remove programs" control panel and find the "Microsoft Visual Studio Installer". Select it and click "Modify". In the installer, select "Modify" again, then check "Python Development" and allow the installer to apply the changes.
|
||||
|
||||
### Step 1a. Alternate Python
|
||||
|
||||
If you do not wish to use the Python installation bundled with Visual Studio, you can download the installer from [here](https://www.python.org/downloads/). Ensure you get version 3.6.6 or higher.
|
||||
|
||||
### Step 2. Installing CMake
|
||||
|
||||
|
|
|
@ -7,17 +7,36 @@ else()
|
|||
cmake_minimum_required(VERSION 3.2)
|
||||
endif()
|
||||
|
||||
include("${CMAKE_CURRENT_SOURCE_DIR}/cmake/macros/TargetPython.cmake")
|
||||
target_python()
|
||||
|
||||
if (HIFI_ANDROID )
|
||||
execute_process(
|
||||
COMMAND ${HIFI_PYTHON_EXEC} ${CMAKE_CURRENT_SOURCE_DIR}/prebuild.py --android --build-root ${CMAKE_BINARY_DIR}
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
)
|
||||
else()
|
||||
execute_process(
|
||||
COMMAND ${HIFI_PYTHON_EXEC} ${CMAKE_CURRENT_SOURCE_DIR}/prebuild.py --build-root ${CMAKE_BINARY_DIR}
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
)
|
||||
# squelch the Policy CMP0074 warning without requiring an update to cmake 3.12.
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL 3.12)
|
||||
cmake_policy(SET CMP0074 NEW)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT EXISTS "${CMAKE_BINARY_DIR}/vcpkg.cmake")
|
||||
message(FATAL_ERROR "vcpkg configuration missing.")
|
||||
endif()
|
||||
|
||||
include("${CMAKE_BINARY_DIR}/vcpkg.cmake")
|
||||
project(hifi)
|
||||
|
||||
include("cmake/init.cmake")
|
||||
|
||||
include("cmake/compiler.cmake")
|
||||
|
||||
if (BUILD_SCRIBE_ONLY)
|
||||
add_subdirectory(tools/scribe)
|
||||
add_subdirectory(tools/shader_reflect)
|
||||
return()
|
||||
endif()
|
||||
add_paths_to_fixup_libs(${VCPKG_INSTALL_ROOT}/bin)
|
||||
add_paths_to_fixup_libs(${VCPKG_INSTALL_ROOT}/debug/bin)
|
||||
|
||||
if (NOT DEFINED CLIENT_ONLY)
|
||||
set(CLIENT_ONLY 0)
|
||||
|
@ -35,7 +54,8 @@ endif()
|
|||
|
||||
set(BUILD_CLIENT_OPTION ON)
|
||||
set(BUILD_SERVER_OPTION ON)
|
||||
set(BUILD_TESTS_OPTION ON)
|
||||
set(BUILD_TESTS_OPTION OFF)
|
||||
set(BUILD_MANUAL_TESTS_OPTION ${BUILD_TESTS_OPTION})
|
||||
set(BUILD_TOOLS_OPTION ON)
|
||||
set(BUILD_INSTALLER_OPTION ON)
|
||||
set(GLES_OPTION OFF)
|
||||
|
@ -61,16 +81,18 @@ if (ANDROID)
|
|||
set(GLES_OPTION ON)
|
||||
set(PLATFORM_QT_COMPONENTS AndroidExtras WebView)
|
||||
else ()
|
||||
set(PLATFORM_QT_COMPONENTS WebEngine WebEngineWidgets)
|
||||
set(PLATFORM_QT_COMPONENTS WebEngine)
|
||||
endif ()
|
||||
|
||||
if (USE_GLES AND (NOT ANDROID))
|
||||
set(DISABLE_QML_OPTION ON)
|
||||
endif()
|
||||
|
||||
|
||||
option(BUILD_CLIENT "Build client components" ${BUILD_CLIENT_OPTION})
|
||||
option(BUILD_SERVER "Build server components" ${BUILD_SERVER_OPTION})
|
||||
option(BUILD_TESTS "Build tests" ${BUILD_TESTS_OPTION})
|
||||
option(BUILD_MANUAL_TESTS "Build manual tests" ${BUILD_MANUAL_TESTS_OPTION})
|
||||
option(BUILD_TOOLS "Build tools" ${BUILD_TOOLS_OPTION})
|
||||
option(BUILD_INSTALLER "Build installer" ${BUILD_INSTALLER_OPTION})
|
||||
option(USE_GLES "Use OpenGL ES" ${GLES_OPTION})
|
||||
|
@ -138,6 +160,8 @@ list(APPEND CMAKE_PREFIX_PATH "${QT_CMAKE_PREFIX_PATH}")
|
|||
find_package( Threads )
|
||||
|
||||
add_definitions(-DGLM_FORCE_RADIANS)
|
||||
add_definitions(-DGLM_ENABLE_EXPERIMENTAL)
|
||||
add_definitions(-DGLM_FORCE_CTOR_INIT)
|
||||
set(HIFI_LIBRARY_DIR "${CMAKE_CURRENT_SOURCE_DIR}/libraries")
|
||||
|
||||
set(EXTERNAL_PROJECT_PREFIX "project")
|
||||
|
@ -162,7 +186,6 @@ if (BUILD_SERVER)
|
|||
set_target_properties(domain-server PROPERTIES FOLDER "Apps")
|
||||
add_subdirectory(ice-server)
|
||||
set_target_properties(ice-server PROPERTIES FOLDER "Apps")
|
||||
add_subdirectory(server-console)
|
||||
endif()
|
||||
|
||||
if (BUILD_CLIENT)
|
||||
|
@ -174,6 +197,7 @@ endif()
|
|||
|
||||
if (BUILD_CLIENT OR BUILD_SERVER)
|
||||
add_subdirectory(plugins)
|
||||
add_subdirectory(server-console)
|
||||
endif()
|
||||
|
||||
# BUILD_TOOLS option will be handled inside the tools's CMakeLists.txt because 'scribe' tool is required for build anyway
|
||||
|
@ -185,7 +209,9 @@ if (BUILD_TESTS)
|
|||
include(CTest)
|
||||
enable_testing()
|
||||
add_subdirectory(tests)
|
||||
add_subdirectory(tests-manual)
|
||||
if (BUILD_MANUAL_TESTS)
|
||||
add_subdirectory(tests-manual)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (BUILD_INSTALLER)
|
||||
|
|
2
LICENSE
2
LICENSE
|
@ -1,4 +1,4 @@
|
|||
Copyright (c) 2013-2016, High Fidelity, Inc.
|
||||
Copyright (c) 2013-2018, High Fidelity, Inc.
|
||||
All rights reserved.
|
||||
licensing@highfidelity.io
|
||||
|
||||
|
|
75
VCPKG.md
Normal file
75
VCPKG.md
Normal file
|
@ -0,0 +1,75 @@
|
|||
[VCPKG](https://github.com/Microsoft/vcpkg) is an open source package management system created by Microsoft, intially just for Windows based system, but eventually extended to cover Linux and OSX as well, and in theory extensible enough to cover additional operating systems.
|
||||
|
||||
VCPKG is now our primary mechanism for managing the external libraries and tools on which we rely to build our applications.
|
||||
|
||||
Conventional usage of VCPKG involves cloning the repository, running the bootstrapping script to build the vcpkg binary, and then calling the binary to install a set of libraries. The libraries themselves are specified by a set of port files inside the [repository](https://github.com/Microsoft/vcpkg/tree/master/ports)
|
||||
|
||||
Because the main VCPKG repository does not contain all the ports we want, and because we want to be able to manage the precise versions of our dependencies, rather than allow it to be outside of our control, instead of using the main vcpkg repository, we use a combination of a [fork](https://github.com/highfidelity/vcpkg) of the repository (which allows us to customize the vcpkg binary, currently necessary to deal with some out of date tools on our build hosts) and a set of [custom port files](./cmake/ports) stored in our own repository.
|
||||
|
||||
## Adding new packages to vcpkg
|
||||
|
||||
Note... Android vcpkg usage is still experimental. Contact Austin for more detailed information if you need to add a new package for use by Android.
|
||||
|
||||
### Setup development environment
|
||||
|
||||
In order to add new packages, you will need to set up an environment for testing. This assumes you already have the tools for normal Hifi development (git, cmake, a working C++ compiler, etc)
|
||||
|
||||
* Clone our vcpkg [fork](https://github.com/highfidelity/vcpkg)
|
||||
* Remove the ports directory from the checkout and symlink to our own [custom port files](./cmake/ports)
|
||||
* Bootstrap the vcpkg binary with the `bootstrap-vcpkg.sh` or `bootstrap-vcpkg.bat` script
|
||||
|
||||
### Add a new port skeleton
|
||||
|
||||
Your new package will require, at minimum, a `CONTROL` file and a `portfile.cmake` file, located in a subdirectory of the ports folder. Assuming you're creating a new dependency named `foo` it should be located in `ports/foo` under the vcpkg directory. The `CONTROL` file will contain a small number of fields, such as the name, version, description and any other vcpkg ports on which you depend. The `portfile.cmake` is a CMake script that will instruct vcpkg how to build the packages. We'll cover that in more depth in a moment. For now, just create one and leave it blank.
|
||||
|
||||
### Add a reference to your package to one or more of the hifi meta-packages
|
||||
|
||||
We have three meta-packages used to do our building. When you modify one of these packages, make sure to bump the version number in the `CONTROL` file for the package
|
||||
|
||||
#### hifi-deps
|
||||
|
||||
This metapackage contains anything required for building the server or shared components. For instance, the `glm`, `tbb` and `zlib` packages are declared here because they're used everywhere, not just in our client application code.
|
||||
|
||||
#### hifi-client-deps
|
||||
|
||||
This metapackage contains anything required for building the client. For example, `sdl2` is listed here because it's required for our joystick input, but not for the server or shared components. Note that `hifi-client-deps` depends on `hifi-deps`, so you don't have to declare something twice if it's used in both he server and client. Just declare it in `hifi-deps` and it will still be includeded transitively.
|
||||
|
||||
#### hifi-host-tools
|
||||
|
||||
This metapackage contains anything we use to create executables that will then be used in the build process. The `hifi-deps` and `hifi-client-deps` packages are built for the target architecture, which may be different than the host architecture (for instance, when building for Android). The `hifi-host-tools` packages are always build for the host architecture, because they're tools that are intended to be run as part of the build process. Scribe for example is used at build time to generate shaders. Building an arm64 version of Scribe is useless because we need to run it on the host machine.
|
||||
|
||||
Note that packages can appear in both the `hifi-host-tools` and one of the other metapackages, indicating that the package both contains a library which we will use at runtime, and a tool which we will use at build time. The `spirv-tools` package is an example.
|
||||
|
||||
### Implement the portfile.cmake
|
||||
|
||||
How the portfile is written depends on what kind of package you're working with. It's basically still a CMake script, but there are a number of [functions](https://vcpkg.readthedocs.io/en/latest/maintainers/portfile-functions/) available to make fetching and building packages easier.
|
||||
|
||||
Typically there are three areas you need to deal with
|
||||
|
||||
* Getting the source
|
||||
* Building the source
|
||||
* Installing the artifacts
|
||||
|
||||
#### Getting sources
|
||||
|
||||
Getting sources from github, gitlab or bitbucket is easy. There are special functions specifcially for those. See the [etc2comp portfile](./cmake/ports/etc2comp/portfile.cmake) for an example of fetching source via github.
|
||||
|
||||
If the project isn't available that way, you can still use the [vcpkg_download_distfile](https://vcpkg.readthedocs.io/en/latest/maintainers/vcpkg_download_distfile/) function to explicitly download an archive and then use [vcpkg_extract_source_archive](https://vcpkg.readthedocs.io/en/latest/maintainers/vcpkg_extract_source_archive/) to unpack it. See the [zlib portfile](./cmake/ports/zlib/portfile.cmake) for an example there.
|
||||
|
||||
#### Building
|
||||
|
||||
If your package uses CMake, you'll be able to use the [vcpkg_configure_cmake](https://vcpkg.readthedocs.io/en/latest/maintainers/vcpkg_configure_cmake/) and [vcpkg_build_cmake](https://vcpkg.readthedocs.io/en/latest/maintainers/vcpkg_build_cmake/) commands to configure and build the package. If you're going to be relying on the CMake installation functionality, you can just call [vcpkg_install_cmake](https://vcpkg.readthedocs.io/en/latest/maintainers/vcpkg_install_cmake/), since it will implicitly run the build before the install.
|
||||
|
||||
If your package is not binary, but doesn't use CMake, you're just going to have to figure it out.
|
||||
|
||||
If your package is binary, then you can just skip this step
|
||||
|
||||
#### Installing
|
||||
|
||||
Once you've built the package, you need to install the artifacts in the target directory. Ideally, your package's CMake INSTALL commands will do the right thing. However, there are usually some things you have to do manually. Since VCPKG will build both the release and debug versions for all packages, you need to make sure if your package installed headers that you remove the _debug_ versions of these headers. This is typically done with the `file(REMOVE_RECURSE ${CURRENT_PACKAGES_DIR}/debug/include)`. Additionally, if your package creates any standalone executables, you need to make sure they're installed in the destination `tools` directory, not the `bin` or `lib` directories, which are specifically for shared library binaries (like .so or .dll files) and link library files (like .a or .lib files) respectively.
|
||||
|
||||
If you're dealing with a binary package, then you'll need to explicitly perform all the required copies from the location where you extracted the archive to the installation directory. An example of this is available in the [openssl-android portfile](./cmake/ports/openssl-android/portfile.cmake)
|
||||
|
||||
### Commit and test
|
||||
|
||||
Once you've tested building your new package locally, you'll need to commit and push the changes and additions to the portfiles you've made and then monitor the build hosts to verify that the new package successfully built on all the target environments.
|
|
@ -1,12 +1,14 @@
|
|||
set(TARGET_NAME native-lib)
|
||||
setup_hifi_library()
|
||||
link_hifi_libraries(shared task networking gl gpu qml image fbx render-utils physics entities octree ${PLATFORM_GL_BACKEND})
|
||||
link_hifi_libraries(shared task networking gl gpu qml image fbx hfm render-utils physics entities octree ${PLATFORM_GL_BACKEND})
|
||||
target_opengl()
|
||||
target_bullet()
|
||||
|
||||
set(INTERFACE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../../interface")
|
||||
add_subdirectory("${INTERFACE_DIR}" "libraries/interface")
|
||||
include_directories("${INTERFACE_DIR}/src")
|
||||
set(HIFI_CODEC_PLUGIN_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../../plugins/hifiCodec")
|
||||
add_subdirectory("${HIFI_CODEC_PLUGIN_DIR}" "libraries/hifiCodecPlugin")
|
||||
|
||||
target_link_libraries(native-lib android log m interface)
|
||||
|
||||
|
|
|
@ -23,8 +23,6 @@ android {
|
|||
'-DANDROID_TOOLCHAIN=clang',
|
||||
'-DANDROID_STL=c++_shared',
|
||||
'-DQT_CMAKE_PREFIX_PATH=' + HIFI_ANDROID_PRECOMPILED + '/qt/lib/cmake',
|
||||
'-DNATIVE_SCRIBE=' + HIFI_ANDROID_PRECOMPILED + '/scribe' + EXEC_SUFFIX,
|
||||
'-DNATIVE_SHREFLECT=' + HIFI_ANDROID_PRECOMPILED + '/shreflect' + EXEC_SUFFIX,
|
||||
'-DHIFI_ANDROID_PRECOMPILED=' + HIFI_ANDROID_PRECOMPILED,
|
||||
'-DRELEASE_NUMBER=' + RELEASE_NUMBER,
|
||||
'-DRELEASE_TYPE=' + RELEASE_TYPE,
|
||||
|
@ -53,6 +51,9 @@ android {
|
|||
debug {
|
||||
buildConfigField "String", "BACKTRACE_URL", "\"" + (System.getenv("CMAKE_BACKTRACE_URL") ? System.getenv("CMAKE_BACKTRACE_URL") : '') + "\""
|
||||
buildConfigField "String", "BACKTRACE_TOKEN", "\"" + (System.getenv("CMAKE_BACKTRACE_TOKEN") ? System.getenv("CMAKE_BACKTRACE_TOKEN") : '') + "\""
|
||||
buildConfigField "String", "OAUTH_CLIENT_ID", "\"" + (System.getenv("OAUTH_CLIENT_ID") ? System.getenv("OAUTH_CLIENT_ID") : '') + "\""
|
||||
buildConfigField "String", "OAUTH_CLIENT_SECRET", "\"" + (System.getenv("OAUTH_CLIENT_SECRET") ? System.getenv("OAUTH_CLIENT_SECRET") : '') + "\""
|
||||
buildConfigField "String", "OAUTH_REDIRECT_URI", "\"" + (System.getenv("OAUTH_REDIRECT_URI") ? System.getenv("OAUTH_REDIRECT_URI") : '') + "\""
|
||||
}
|
||||
release {
|
||||
minifyEnabled false
|
||||
|
@ -63,6 +64,9 @@ android {
|
|||
project.hasProperty("HIFI_ANDROID_KEY_PASSWORD")? signingConfigs.release : null
|
||||
buildConfigField "String", "BACKTRACE_URL", "\"" + (System.getenv("CMAKE_BACKTRACE_URL") ? System.getenv("CMAKE_BACKTRACE_URL") : '') + "\""
|
||||
buildConfigField "String", "BACKTRACE_TOKEN", "\"" + (System.getenv("CMAKE_BACKTRACE_TOKEN") ? System.getenv("CMAKE_BACKTRACE_TOKEN") : '') + "\""
|
||||
buildConfigField "String", "OAUTH_CLIENT_ID", "\"" + (System.getenv("OAUTH_CLIENT_ID") ? System.getenv("OAUTH_CLIENT_ID") : '') + "\""
|
||||
buildConfigField "String", "OAUTH_CLIENT_SECRET", "\"" + (System.getenv("OAUTH_CLIENT_SECRET") ? System.getenv("OAUTH_CLIENT_SECRET") : '') + "\""
|
||||
buildConfigField "String", "OAUTH_REDIRECT_URI", "\"" + (System.getenv("OAUTH_REDIRECT_URI") ? System.getenv("OAUTH_REDIRECT_URI") : '') + "\""
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -80,8 +84,10 @@ android {
|
|||
if (Os.isFamily(Os.FAMILY_UNIX)) {
|
||||
def uploadDumpSymsTask = rootProject.getTasksByName("uploadBreakpadDumpSyms${variant.name.capitalize()}", false).first()
|
||||
def runDumpSymsTask = rootProject.getTasksByName("runBreakpadDumpSyms${variant.name.capitalize()}", false).first()
|
||||
def renameHifiACTask = rootProject.getTasksByName("renameHifiACTask${variant.name.capitalize()}", false).first()
|
||||
runDumpSymsTask.dependsOn(task)
|
||||
variant.assemble.dependsOn(uploadDumpSymsTask)
|
||||
variant.mergeResources.dependsOn(renameHifiACTask)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -133,6 +139,10 @@ dependencies {
|
|||
|
||||
implementation 'com.android.support.constraint:constraint-layout:1.0.2'
|
||||
implementation 'com.android.support:design:26.1.0'
|
||||
compile 'com.android.support:support-v4:26.1.0'
|
||||
compile 'com.android.support:appcompat-v7:26.1.0'
|
||||
compile 'com.android.support:support-vector-drawable:26.1.0'
|
||||
|
||||
implementation 'com.android.support:appcompat-v7:26.1.0'
|
||||
compile 'com.android.support:recyclerview-v7:26.1.0'
|
||||
compile 'com.android.support:cardview-v7:26.1.0'
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
|
||||
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
|
||||
<uses-permission android:name="android.permission.RECORD_AUDIO" />
|
||||
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
|
||||
<uses-feature android:name="android.hardware.sensor.accelerometer" android:required="true"/>
|
||||
<uses-feature android:name="android.hardware.sensor.gyroscope" android:required="true"/>
|
||||
|
||||
|
@ -70,11 +71,24 @@
|
|||
android:screenOrientation="portrait"
|
||||
android:theme="@style/Theme.AppCompat.Translucent.NoActionBar" />
|
||||
|
||||
<activity android:name=".LoginMenuActivity"
|
||||
android:screenOrientation="portrait"
|
||||
android:theme="@style/Theme.AppCompat.Translucent.NoActionBar" />
|
||||
|
||||
<service
|
||||
android:name=".BreakpadUploaderService"
|
||||
android:enabled="true"
|
||||
android:exported="false"
|
||||
android:process=":breakpad_uploader"/>
|
||||
|
||||
<receiver
|
||||
android:name=".receiver.HeadsetStateReceiver"
|
||||
android:enabled="true"
|
||||
android:exported="true">
|
||||
<intent-filter>
|
||||
<action android:name="android.intent.action.HEADSET_PLUG" />
|
||||
</intent-filter>
|
||||
</receiver>
|
||||
</application>
|
||||
|
||||
<uses-feature android:name="android.software.vr.mode" android:required="true"/>
|
||||
|
|
|
@ -24,8 +24,12 @@
|
|||
#include <udt/PacketHeaders.h>
|
||||
#include <SettingHandle.h>
|
||||
|
||||
#define AUTO_LOGOUT_SETTING_NAME "wallet/autoLogout"
|
||||
#define WALLET_USERNAME_SETTING_NAME "wallet/savedUsername"
|
||||
|
||||
QAndroidJniObject __interfaceActivity;
|
||||
QAndroidJniObject __loginCompletedListener;
|
||||
QAndroidJniObject __signupCompletedListener;
|
||||
QAndroidJniObject __loadCompleteListener;
|
||||
QAndroidJniObject __usernameChangedListener;
|
||||
void tempMessageHandler(QtMsgType type, const QMessageLogContext& context, const QString& message) {
|
||||
|
@ -156,7 +160,7 @@ JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeOnCrea
|
|||
JavaVM* jvm;
|
||||
env->GetJavaVM(&jvm);
|
||||
|
||||
QObject::connect(&AndroidHelper::instance(), &AndroidHelper::androidActivityRequested, [jvm](const QString& a, const bool backToScene, QList<QString> args) {
|
||||
QObject::connect(&AndroidHelper::instance(), &AndroidHelper::androidActivityRequested, [jvm](const QString& a, const bool backToScene, QMap<QString, QString> args) {
|
||||
JNIEnv* myNewEnv;
|
||||
JavaVMAttachArgs jvmArgs;
|
||||
jvmArgs.version = JNI_VERSION_1_6; // choose your JNI version
|
||||
|
@ -182,9 +186,11 @@ JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeOnCrea
|
|||
jmethodID mapClassConstructor = myNewEnv->GetMethodID(hashMapClass, "<init>", "()V");
|
||||
jobject hashmap = myNewEnv->NewObject(hashMapClass, mapClassConstructor);
|
||||
jmethodID mapClassPut = myNewEnv->GetMethodID(hashMapClass, "put", "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;");
|
||||
for (const QString& arg: args) {
|
||||
QAndroidJniObject jArg = QAndroidJniObject::fromString(arg);
|
||||
myNewEnv->CallObjectMethod(hashmap, mapClassPut, QAndroidJniObject::fromString("url").object<jstring>(), jArg.object<jstring>());
|
||||
QMap<QString, QString>::iterator i;
|
||||
for (i = args.begin(); i != args.end(); ++i) {
|
||||
QAndroidJniObject jKey = QAndroidJniObject::fromString(i.key());
|
||||
QAndroidJniObject jValue = QAndroidJniObject::fromString(i.value());
|
||||
myNewEnv->CallObjectMethod(hashmap, mapClassPut, jKey.object<jstring>(), jValue.object<jstring>());
|
||||
}
|
||||
__interfaceActivity.callMethod<void>("openAndroidActivity", "(Ljava/lang/String;ZLjava/util/HashMap;)V", string.object<jstring>(), jBackToScene, hashmap);
|
||||
if (attachedHere) {
|
||||
|
@ -207,11 +213,13 @@ JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeOnDest
|
|||
JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeGotoUrl(JNIEnv* env, jobject obj, jstring url) {
|
||||
QAndroidJniObject jniUrl("java/lang/String", "(Ljava/lang/String;)V", url);
|
||||
DependencyManager::get<AddressManager>()->loadSettings(jniUrl.toString());
|
||||
AndroidHelper::instance().muteMic();
|
||||
}
|
||||
|
||||
JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeGoToUser(JNIEnv* env, jobject obj, jstring username) {
|
||||
QAndroidJniObject jniUsername("java/lang/String", "(Ljava/lang/String;)V", username);
|
||||
DependencyManager::get<AddressManager>()->goToUser(jniUsername.toString(), false);
|
||||
AndroidHelper::instance().muteMic();
|
||||
}
|
||||
|
||||
JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeOnPause(JNIEnv* env, jobject obj) {
|
||||
|
@ -256,9 +264,75 @@ JNIEXPORT jstring JNICALL Java_io_highfidelity_hifiinterface_fragment_HomeFragme
|
|||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeLogin(JNIEnv *env, jobject instance,
|
||||
Java_io_highfidelity_hifiinterface_HifiUtils_updateHifiSetting(JNIEnv *env, jobject instance,
|
||||
jstring group_, jstring key_,
|
||||
jboolean value_) {
|
||||
const char *c_group = env->GetStringUTFChars(group_, 0);
|
||||
const char *c_key = env->GetStringUTFChars(key_, 0);
|
||||
|
||||
const QString group = QString::fromUtf8(c_group);
|
||||
const QString key = QString::fromUtf8(c_key);
|
||||
|
||||
env->ReleaseStringUTFChars(group_, c_group);
|
||||
env->ReleaseStringUTFChars(key_, c_key);
|
||||
|
||||
bool value = value_;
|
||||
|
||||
Setting::Handle<bool> setting { QStringList() << group << key , !value };
|
||||
setting.set(value);
|
||||
}
|
||||
|
||||
JNIEXPORT jboolean JNICALL
|
||||
Java_io_highfidelity_hifiinterface_HifiUtils_getHifiSettingBoolean(JNIEnv *env,
|
||||
jobject instance,
|
||||
jstring group_,
|
||||
jstring key_,
|
||||
jboolean defaultValue) {
|
||||
const char *c_group = env->GetStringUTFChars(group_, 0);
|
||||
const char *c_key = env->GetStringUTFChars(key_, 0);
|
||||
|
||||
const QString group = QString::fromUtf8(c_group);
|
||||
const QString key = QString::fromUtf8(c_key);
|
||||
|
||||
env->ReleaseStringUTFChars(group_, c_group);
|
||||
env->ReleaseStringUTFChars(key_, c_key);
|
||||
|
||||
Setting::Handle<bool> setting { QStringList() << group << key , defaultValue};
|
||||
return setting.get();
|
||||
}
|
||||
|
||||
JNIEXPORT jboolean JNICALL
|
||||
Java_io_highfidelity_hifiinterface_HifiUtils_isUserLoggedIn(JNIEnv *env, jobject instance) {
|
||||
return DependencyManager::get<AccountManager>()->isLoggedIn();
|
||||
}
|
||||
|
||||
JNIEXPORT jboolean JNICALL
|
||||
Java_io_highfidelity_hifiinterface_HifiUtils_isKeepingLoggedIn(JNIEnv *env, jobject instance) {
|
||||
Setting::Handle<bool> setting(AUTO_LOGOUT_SETTING_NAME, true);
|
||||
return !setting.get();
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_cancelLogin(JNIEnv *env, jobject instance) {
|
||||
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
|
||||
QObject::disconnect(accountManager.data(), &AccountManager::loginComplete, nullptr, nullptr);
|
||||
QObject::disconnect(accountManager.data(), &AccountManager::loginFailed, nullptr, nullptr);
|
||||
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_SignupFragment_cancelLogin(JNIEnv *env,
|
||||
jobject instance) {
|
||||
|
||||
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_cancelLogin(env, instance);
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_login(JNIEnv *env, jobject instance,
|
||||
jstring username_, jstring password_,
|
||||
jobject usernameChangedListener) {
|
||||
jboolean keepLoggedIn) {
|
||||
const char *c_username = env->GetStringUTFChars(username_, 0);
|
||||
const char *c_password = env->GetStringUTFChars(password_, 0);
|
||||
QString username = QString(c_username);
|
||||
|
@ -269,31 +343,131 @@ Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeLogin(JNIEnv *en
|
|||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
|
||||
__loginCompletedListener = QAndroidJniObject(instance);
|
||||
__usernameChangedListener = QAndroidJniObject(usernameChangedListener);
|
||||
|
||||
QObject::connect(accountManager.data(), &AccountManager::loginComplete, [](const QUrl& authURL) {
|
||||
QObject::connect(accountManager.data(), &AccountManager::loginComplete, [username, keepLoggedIn](const QUrl& authURL) {
|
||||
jboolean jSuccess = (jboolean) true;
|
||||
__loginCompletedListener.callMethod<void>("handleLoginCompleted", "(Z)V", jSuccess);
|
||||
if (__loginCompletedListener.isValid()) {
|
||||
__loginCompletedListener.callMethod<void>("handleLoginCompleted", "(Z)V", jSuccess);
|
||||
}
|
||||
Setting::Handle<QVariant>(AUTO_LOGOUT_SETTING_NAME).set(!keepLoggedIn);
|
||||
QString usernameToSave = keepLoggedIn ? username : "";
|
||||
Setting::Handle<QVariant>(WALLET_USERNAME_SETTING_NAME).set(usernameToSave);
|
||||
});
|
||||
|
||||
QObject::connect(accountManager.data(), &AccountManager::loginFailed, []() {
|
||||
jboolean jSuccess = (jboolean) false;
|
||||
__loginCompletedListener.callMethod<void>("handleLoginCompleted", "(Z)V", jSuccess);
|
||||
});
|
||||
|
||||
QObject::connect(accountManager.data(), &AccountManager::usernameChanged, [](const QString& username) {
|
||||
QAndroidJniObject string = QAndroidJniObject::fromString(username);
|
||||
__usernameChangedListener.callMethod<void>("handleUsernameChanged", "(Ljava/lang/String;)V", string.object<jstring>());
|
||||
if (__loginCompletedListener.isValid()) {
|
||||
__loginCompletedListener.callMethod<void>("handleLoginCompleted", "(Z)V", jSuccess);
|
||||
}
|
||||
});
|
||||
|
||||
QMetaObject::invokeMethod(accountManager.data(), "requestAccessToken",
|
||||
Q_ARG(const QString&, username), Q_ARG(const QString&, password));
|
||||
}
|
||||
|
||||
JNIEXPORT jboolean JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_FriendsFragment_nativeIsLoggedIn(JNIEnv *env, jobject instance) {
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_retrieveAccessToken(JNIEnv *env,
|
||||
jobject instance,
|
||||
jstring authCode_,
|
||||
jstring clientId_,
|
||||
jstring clientSecret_,
|
||||
jstring redirectUri_) {
|
||||
const char *c_authCode = env->GetStringUTFChars(authCode_, 0);
|
||||
const char *c_clientId = env->GetStringUTFChars(clientId_, 0);
|
||||
const char *c_clientSecret = env->GetStringUTFChars(clientSecret_, 0);
|
||||
const char *c_redirectUri = env->GetStringUTFChars(redirectUri_, 0);
|
||||
|
||||
QString authCode = QString(c_authCode);
|
||||
QString clientId = QString(c_clientId);
|
||||
QString clientSecret = QString(c_clientSecret);
|
||||
QString redirectUri = QString(c_redirectUri);
|
||||
|
||||
env->ReleaseStringUTFChars(authCode_, c_authCode);
|
||||
env->ReleaseStringUTFChars(clientId_, c_clientId);
|
||||
env->ReleaseStringUTFChars(clientSecret_, c_clientSecret);
|
||||
env->ReleaseStringUTFChars(redirectUri_, c_redirectUri);
|
||||
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
return accountManager->isLoggedIn();
|
||||
|
||||
__loginCompletedListener = QAndroidJniObject(instance); // TODO: use a different listener?
|
||||
|
||||
QObject::connect(accountManager.data(), &AccountManager::loginComplete, [](const QUrl& authURL) {
|
||||
jboolean jSuccess = (jboolean) true;
|
||||
if (__loginCompletedListener.isValid()) {
|
||||
__loginCompletedListener.callMethod<void>("handleLoginCompleted", "(Z)V", jSuccess);
|
||||
}
|
||||
});
|
||||
|
||||
QObject::connect(accountManager.data(), &AccountManager::loginFailed, []() {
|
||||
jboolean jSuccess = (jboolean) false;
|
||||
if (__loginCompletedListener.isValid()) {
|
||||
__loginCompletedListener.callMethod<void>("handleLoginCompleted", "(Z)V", jSuccess);
|
||||
}
|
||||
});
|
||||
|
||||
QMetaObject::invokeMethod(accountManager.data(), "requestAccessTokenWithAuthCode",
|
||||
Q_ARG(const QString&, authCode), Q_ARG(const QString&, clientId),
|
||||
Q_ARG(const QString&, clientSecret), Q_ARG(const QString&, redirectUri));
|
||||
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_SignupFragment_login(JNIEnv *env,
|
||||
jobject instance,
|
||||
jstring username_,
|
||||
jstring password_,
|
||||
jboolean keepLoggedIn) {
|
||||
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_login(env, instance, username_, password_, keepLoggedIn);
|
||||
}
|
||||
|
||||
JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeInitAfterAppLoaded(JNIEnv* env, jobject obj) {
|
||||
AndroidHelper::instance().moveToThread(qApp->thread());
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_SignupFragment_signup(JNIEnv *env, jobject instance,
|
||||
jstring email_, jstring username_,
|
||||
jstring password_) {
|
||||
|
||||
const char *c_email = env->GetStringUTFChars(email_, 0);
|
||||
const char *c_username = env->GetStringUTFChars(username_, 0);
|
||||
const char *c_password = env->GetStringUTFChars(password_, 0);
|
||||
QString email = QString(c_email);
|
||||
QString username = QString(c_username);
|
||||
QString password = QString(c_password);
|
||||
env->ReleaseStringUTFChars(email_, c_email);
|
||||
env->ReleaseStringUTFChars(username_, c_username);
|
||||
env->ReleaseStringUTFChars(password_, c_password);
|
||||
|
||||
__signupCompletedListener = QAndroidJniObject(instance);
|
||||
|
||||
// disconnect any previous callback
|
||||
QObject::disconnect(&AndroidHelper::instance(), &AndroidHelper::handleSignupCompleted, nullptr, nullptr);
|
||||
QObject::disconnect(&AndroidHelper::instance(), &AndroidHelper::handleSignupFailed, nullptr, nullptr);
|
||||
|
||||
QObject::connect(&AndroidHelper::instance(), &AndroidHelper::handleSignupCompleted, []() {
|
||||
jboolean jSuccess = (jboolean) true;
|
||||
if (__signupCompletedListener.isValid()) {
|
||||
__signupCompletedListener.callMethod<void>("handleSignupCompleted", "()V", jSuccess);
|
||||
}
|
||||
});
|
||||
|
||||
QObject::connect(&AndroidHelper::instance(), &AndroidHelper::handleSignupFailed, [](QString errorString) {
|
||||
if (__signupCompletedListener.isValid()) {
|
||||
QAndroidJniObject string = QAndroidJniObject::fromString(errorString);
|
||||
__signupCompletedListener.callMethod<void>("handleSignupFailed", "(Ljava/lang/String;)V", string.object<jstring>());
|
||||
}
|
||||
});
|
||||
|
||||
AndroidHelper::instance().signup(email, username, password);
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_SignupFragment_cancelSignup(JNIEnv *env, jobject instance) {
|
||||
QObject::disconnect(&AndroidHelper::instance(), &AndroidHelper::handleSignupCompleted, nullptr, nullptr);
|
||||
QObject::disconnect(&AndroidHelper::instance(), &AndroidHelper::handleSignupFailed, nullptr, nullptr);
|
||||
|
||||
__signupCompletedListener = nullptr;
|
||||
}
|
||||
|
||||
JNIEXPORT jstring JNICALL
|
||||
|
@ -318,23 +492,40 @@ Java_io_highfidelity_hifiinterface_SplashActivity_registerLoadCompleteListener(J
|
|||
});
|
||||
|
||||
}
|
||||
JNIEXPORT jboolean JNICALL
|
||||
Java_io_highfidelity_hifiinterface_MainActivity_nativeIsLoggedIn(JNIEnv *env, jobject instance) {
|
||||
return DependencyManager::get<AccountManager>()->isLoggedIn();
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_MainActivity_nativeLogout(JNIEnv *env, jobject instance) {
|
||||
Java_io_highfidelity_hifiinterface_MainActivity_logout(JNIEnv *env, jobject instance) {
|
||||
DependencyManager::get<AccountManager>()->logout();
|
||||
}
|
||||
|
||||
JNIEXPORT jstring JNICALL
|
||||
Java_io_highfidelity_hifiinterface_MainActivity_nativeGetDisplayName(JNIEnv *env,
|
||||
Java_io_highfidelity_hifiinterface_MainActivity_getUsername(JNIEnv *env,
|
||||
jobject instance) {
|
||||
QString username = DependencyManager::get<AccountManager>()->getAccountInfo().getUsername();
|
||||
return env->NewStringUTF(username.toLatin1().data());
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_MainActivity_setUsernameChangedListener(JNIEnv *env,
|
||||
jobject instance,
|
||||
jobject usernameChangedListener) {
|
||||
__usernameChangedListener = QAndroidJniObject(usernameChangedListener);
|
||||
|
||||
if (!__usernameChangedListener.isValid()) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
|
||||
QObject::connect(accountManager.data(), &AccountManager::usernameChanged, [](const QString& username) {
|
||||
QAndroidJniObject string = QAndroidJniObject::fromString(username);
|
||||
if (__usernameChangedListener.isValid()) {
|
||||
__usernameChangedListener.callMethod<void>("handleUsernameChanged", "(Ljava/lang/String;)V", string.object<jstring>());
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeBeforeEnterBackground(JNIEnv *env, jobject obj) {
|
||||
AndroidHelper::instance().notifyBeforeEnterBackground();
|
||||
|
@ -355,5 +546,11 @@ JNIEXPORT void Java_io_highfidelity_hifiinterface_WebViewActivity_nativeProcessU
|
|||
AndroidHelper::instance().processURL(QString::fromUtf8(nativeString));
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_receiver_HeadsetStateReceiver_notifyHeadsetOn(JNIEnv *env,
|
||||
jobject instance,
|
||||
jboolean pluggedIn) {
|
||||
AndroidHelper::instance().notifyHeadsetOn(pluggedIn);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -64,4 +64,10 @@ public class HifiUtils {
|
|||
|
||||
public native String protocolVersionSignature();
|
||||
|
||||
public native boolean isUserLoggedIn();
|
||||
|
||||
public native void updateHifiSetting(String group, String key, boolean value);
|
||||
public native boolean getHifiSettingBoolean(String group, String key, boolean defaultValue);
|
||||
|
||||
public native boolean isKeepingLoggedIn();
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@ package io.highfidelity.hifiinterface;
|
|||
|
||||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
import android.content.IntentFilter;
|
||||
import android.content.pm.ActivityInfo;
|
||||
import android.content.pm.PackageInfo;
|
||||
import android.content.pm.PackageManager;
|
||||
|
@ -38,8 +39,10 @@ import java.lang.reflect.Field;
|
|||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import io.highfidelity.hifiinterface.fragment.WebViewFragment;
|
||||
import io.highfidelity.hifiinterface.receiver.HeadsetStateReceiver;
|
||||
|
||||
/*import com.google.vr.cardboard.DisplaySynchronizer;
|
||||
import com.google.vr.cardboard.DisplayUtils;
|
||||
|
@ -55,6 +58,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
|
|||
private static final int NORMAL_DPI = 160;
|
||||
|
||||
private Vibrator mVibrator;
|
||||
private HeadsetStateReceiver headsetStateReceiver;
|
||||
|
||||
//public static native void handleHifiURL(String hifiURLString);
|
||||
private native long nativeOnCreate(InterfaceActivity instance, AssetManager assetManager);
|
||||
|
@ -65,13 +69,14 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
|
|||
private native void nativeEnterBackground();
|
||||
private native void nativeEnterForeground();
|
||||
private native long nativeOnExitVr();
|
||||
private native void nativeInitAfterAppLoaded();
|
||||
|
||||
private AssetManager assetManager;
|
||||
|
||||
private static boolean inVrMode;
|
||||
|
||||
private boolean nativeEnterBackgroundCallEnqueued = false;
|
||||
private SlidingDrawer webSlidingDrawer;
|
||||
private SlidingDrawer mWebSlidingDrawer;
|
||||
// private GvrApi gvrApi;
|
||||
// Opaque native pointer to the Application C++ object.
|
||||
// This object is owned by the InterfaceActivity instance and passed to the native methods.
|
||||
|
@ -111,17 +116,6 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
|
|||
//nativeGvrApi =
|
||||
nativeOnCreate(this, assetManager /*, gvrApi.getNativeGvrContext()*/);
|
||||
|
||||
Point size = new Point();
|
||||
getWindowManager().getDefaultDisplay().getRealSize(size);
|
||||
|
||||
try {
|
||||
PackageInfo pInfo = this.getPackageManager().getPackageInfo(getPackageName(), 0);
|
||||
String version = pInfo.versionName;
|
||||
// setAppVersion(version);
|
||||
} catch (PackageManager.NameNotFoundException e) {
|
||||
Log.e("GVR", "Error getting application version", e);
|
||||
}
|
||||
|
||||
final View rootView = getWindow().getDecorView().findViewById(android.R.id.content);
|
||||
|
||||
// This is a workaround to hide the menu bar when the virtual keyboard is shown from Qt
|
||||
|
@ -132,25 +126,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
|
|||
});
|
||||
startActivity(new Intent(this, SplashActivity.class));
|
||||
mVibrator = (Vibrator) this.getSystemService(VIBRATOR_SERVICE);
|
||||
|
||||
FrameLayout mainLayout = findViewById(android.R.id.content);
|
||||
LayoutInflater inflater = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
|
||||
webSlidingDrawer = (SlidingDrawer) inflater.inflate(R.layout.web_drawer, mainLayout, false);
|
||||
QtLayout qtLayout = (QtLayout) mainLayout.getChildAt(0);
|
||||
QtLayout.LayoutParams layoutParams = new QtLayout.LayoutParams(webSlidingDrawer.getLayoutParams());
|
||||
webSlidingDrawer.setOnDrawerCloseListener(() -> {
|
||||
WebViewFragment webViewFragment = (WebViewFragment) getFragmentManager().findFragmentByTag("webViewFragment");
|
||||
webViewFragment.close();
|
||||
});
|
||||
int widthPx = Math.max(size.x, size.y);
|
||||
int heightPx = Math.min(size.x, size.y);
|
||||
|
||||
layoutParams.x = (int) (widthPx - WEB_DRAWER_RIGHT_MARGIN * getResources().getDisplayMetrics().xdpi / NORMAL_DPI);
|
||||
layoutParams.y = (int) (heightPx - WEB_DRAWER_BOTTOM_MARGIN * getResources().getDisplayMetrics().ydpi / NORMAL_DPI);
|
||||
|
||||
layoutParams.resolveLayoutDirection(View.LAYOUT_DIRECTION_RTL);
|
||||
qtLayout.addView(webSlidingDrawer, layoutParams);
|
||||
webSlidingDrawer.setVisibility(View.GONE);
|
||||
headsetStateReceiver = new HeadsetStateReceiver();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -161,6 +137,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
|
|||
} else {
|
||||
nativeEnterBackground();
|
||||
}
|
||||
unregisterReceiver(headsetStateReceiver);
|
||||
//gvrApi.pauseTracking();
|
||||
}
|
||||
|
||||
|
@ -183,6 +160,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
|
|||
nativeEnterForeground();
|
||||
surfacesWorkaround();
|
||||
keepInterfaceRunning = false;
|
||||
registerReceiver(headsetStateReceiver, new IntentFilter(Intent.ACTION_HEADSET_PLUG));
|
||||
//gvrApi.resumeTracking();
|
||||
}
|
||||
|
||||
|
@ -280,14 +258,47 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
|
|||
protected void onNewIntent(Intent intent) {
|
||||
super.onNewIntent(intent);
|
||||
if (intent.hasExtra(DOMAIN_URL)) {
|
||||
webSlidingDrawer.setVisibility(View.GONE);
|
||||
hideWebDrawer();
|
||||
nativeGotoUrl(intent.getStringExtra(DOMAIN_URL));
|
||||
} else if (intent.hasExtra(EXTRA_GOTO_USERNAME)) {
|
||||
webSlidingDrawer.setVisibility(View.GONE);
|
||||
hideWebDrawer();
|
||||
nativeGoToUser(intent.getStringExtra(EXTRA_GOTO_USERNAME));
|
||||
}
|
||||
}
|
||||
|
||||
private void hideWebDrawer() {
|
||||
if (mWebSlidingDrawer != null) {
|
||||
mWebSlidingDrawer.setVisibility(View.GONE);
|
||||
}
|
||||
}
|
||||
|
||||
public void showWebDrawer() {
|
||||
if (mWebSlidingDrawer == null) {
|
||||
FrameLayout mainLayout = findViewById(android.R.id.content);
|
||||
LayoutInflater inflater = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
|
||||
QtLayout qtLayout = (QtLayout) mainLayout.getChildAt(0);
|
||||
mWebSlidingDrawer = (SlidingDrawer) inflater.inflate(R.layout.web_drawer, mainLayout, false);
|
||||
|
||||
QtLayout.LayoutParams layoutParams = new QtLayout.LayoutParams(mWebSlidingDrawer.getLayoutParams());
|
||||
mWebSlidingDrawer.setOnDrawerCloseListener(() -> {
|
||||
WebViewFragment webViewFragment = (WebViewFragment) getFragmentManager().findFragmentByTag("webViewFragment");
|
||||
webViewFragment.close();
|
||||
});
|
||||
|
||||
Point size = new Point();
|
||||
getWindowManager().getDefaultDisplay().getRealSize(size);
|
||||
int widthPx = Math.max(size.x, size.y);
|
||||
int heightPx = Math.min(size.x, size.y);
|
||||
|
||||
layoutParams.x = (int) (widthPx - WEB_DRAWER_RIGHT_MARGIN * getResources().getDisplayMetrics().xdpi / NORMAL_DPI);
|
||||
layoutParams.y = (int) (heightPx - WEB_DRAWER_BOTTOM_MARGIN * getResources().getDisplayMetrics().ydpi / NORMAL_DPI);
|
||||
|
||||
layoutParams.resolveLayoutDirection(View.LAYOUT_DIRECTION_RTL);
|
||||
qtLayout.addView(mWebSlidingDrawer, layoutParams);
|
||||
}
|
||||
mWebSlidingDrawer.setVisibility(View.VISIBLE);
|
||||
}
|
||||
|
||||
public void openAndroidActivity(String activityName, boolean backToScene) {
|
||||
openAndroidActivity(activityName, backToScene, null);
|
||||
}
|
||||
|
@ -296,29 +307,37 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
|
|||
switch (activityName) {
|
||||
case "Home":
|
||||
case "Privacy Policy":
|
||||
case "Login": {
|
||||
nativeBeforeEnterBackground();
|
||||
Intent intent = new Intent(this, MainActivity.class);
|
||||
intent.putExtra(MainActivity.EXTRA_FRAGMENT, activityName);
|
||||
intent.putExtra(MainActivity.EXTRA_BACK_TO_SCENE, backToScene);
|
||||
startActivity(intent);
|
||||
break;
|
||||
}
|
||||
case "Login":
|
||||
nativeBeforeEnterBackground();
|
||||
Intent loginIntent = new Intent(this, LoginMenuActivity.class);
|
||||
loginIntent.putExtra(LoginMenuActivity.EXTRA_BACK_TO_SCENE, backToScene);
|
||||
loginIntent.putExtra(LoginMenuActivity.EXTRA_BACK_ON_SKIP, true);
|
||||
if (args != null && args.containsKey(DOMAIN_URL)) {
|
||||
loginIntent.putExtra(LoginMenuActivity.EXTRA_DOMAIN_URL, (String) args.get(DOMAIN_URL));
|
||||
}
|
||||
startActivity(loginIntent);
|
||||
break;
|
||||
case "WebView":
|
||||
runOnUiThread(() -> {
|
||||
webSlidingDrawer.setVisibility(View.VISIBLE);
|
||||
if (!webSlidingDrawer.isOpened()) {
|
||||
webSlidingDrawer.animateOpen();
|
||||
showWebDrawer();
|
||||
if (!mWebSlidingDrawer.isOpened()) {
|
||||
mWebSlidingDrawer.animateOpen();
|
||||
}
|
||||
if (args != null && args.containsKey(WebViewActivity.WEB_VIEW_ACTIVITY_EXTRA_URL)) {
|
||||
WebViewFragment webViewFragment = (WebViewFragment) getFragmentManager().findFragmentByTag("webViewFragment");
|
||||
webViewFragment.loadUrl((String) args.get(WebViewActivity.WEB_VIEW_ACTIVITY_EXTRA_URL), true);
|
||||
webViewFragment.setToolbarVisible(true);
|
||||
webViewFragment.setCloseAction(() -> {
|
||||
if (webSlidingDrawer.isOpened()) {
|
||||
webSlidingDrawer.animateClose();
|
||||
if (mWebSlidingDrawer.isOpened()) {
|
||||
mWebSlidingDrawer.animateClose();
|
||||
}
|
||||
webSlidingDrawer.setVisibility(View.GONE);
|
||||
hideWebDrawer();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
@ -335,6 +354,9 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
|
|||
if (nativeEnterBackgroundCallEnqueued) {
|
||||
nativeEnterBackground();
|
||||
}
|
||||
runOnUiThread(() -> {
|
||||
nativeInitAfterAppLoaded();
|
||||
});
|
||||
}
|
||||
|
||||
public void performHapticFeedback(int duration) {
|
||||
|
@ -361,4 +383,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
|
|||
public void onExpand() {
|
||||
keepInterfaceRunning = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOAuthAuthorizeCallback(Uri uri) { }
|
||||
}
|
||||
|
|
|
@ -0,0 +1,210 @@
|
|||
package io.highfidelity.hifiinterface;
|
||||
|
||||
|
||||
import android.app.Fragment;
|
||||
import android.app.FragmentManager;
|
||||
import android.app.FragmentTransaction;
|
||||
import android.content.Intent;
|
||||
import android.os.Bundle;
|
||||
import android.support.v7.app.AppCompatActivity;
|
||||
import android.view.View;
|
||||
import io.highfidelity.hifiinterface.fragment.LoginFragment;
|
||||
import io.highfidelity.hifiinterface.fragment.OnBackPressedListener;
|
||||
import io.highfidelity.hifiinterface.fragment.SignupFragment;
|
||||
import io.highfidelity.hifiinterface.fragment.StartMenuFragment;
|
||||
|
||||
public class LoginMenuActivity extends AppCompatActivity
|
||||
implements StartMenuFragment.StartMenuInteractionListener,
|
||||
LoginFragment.OnLoginInteractionListener,
|
||||
SignupFragment.OnSignupInteractionListener {
|
||||
|
||||
/**
|
||||
* Set EXTRA_FINISH_ON_BACK to finish the app when back button is pressed
|
||||
*/
|
||||
public static final String EXTRA_FINISH_ON_BACK = "finishOnBack";
|
||||
|
||||
/**
|
||||
* Set EXTRA_BACK_TO_SCENE to back to the scene
|
||||
*/
|
||||
public static final String EXTRA_BACK_TO_SCENE = "backToScene";
|
||||
|
||||
/**
|
||||
* Set EXTRA_BACK_ON_SKIP to finish this activity when skip button is pressed
|
||||
*/
|
||||
public static final String EXTRA_BACK_ON_SKIP = "backOnSkip";
|
||||
|
||||
public static final String EXTRA_DOMAIN_URL = "url";
|
||||
|
||||
private boolean finishOnBack;
|
||||
private boolean backToScene;
|
||||
private boolean backOnSkip;
|
||||
private String domainUrlToBack;
|
||||
|
||||
|
||||
@Override
|
||||
protected void onCreate(Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
setContentView(R.layout.activity_encourage_login);
|
||||
|
||||
finishOnBack = getIntent().getBooleanExtra(EXTRA_FINISH_ON_BACK, false);
|
||||
backToScene = getIntent().getBooleanExtra(EXTRA_BACK_TO_SCENE, false);
|
||||
domainUrlToBack = getIntent().getStringExtra(EXTRA_DOMAIN_URL);
|
||||
backOnSkip = getIntent().getBooleanExtra(EXTRA_BACK_ON_SKIP, false);
|
||||
|
||||
if (savedInstanceState != null) {
|
||||
finishOnBack = savedInstanceState.getBoolean(EXTRA_FINISH_ON_BACK, false);
|
||||
backToScene = savedInstanceState.getBoolean(EXTRA_BACK_TO_SCENE, false);
|
||||
backOnSkip = savedInstanceState.getBoolean(EXTRA_BACK_ON_SKIP, false);
|
||||
domainUrlToBack = savedInstanceState.getString(EXTRA_DOMAIN_URL);
|
||||
}
|
||||
|
||||
loadMenuFragment();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onSaveInstanceState(Bundle outState) {
|
||||
super.onSaveInstanceState(outState);
|
||||
outState.putBoolean(EXTRA_FINISH_ON_BACK, finishOnBack);
|
||||
outState.putBoolean(EXTRA_BACK_TO_SCENE, backToScene);
|
||||
outState.putString(EXTRA_DOMAIN_URL, domainUrlToBack);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onRestoreInstanceState(Bundle savedInstanceState) {
|
||||
super.onRestoreInstanceState(savedInstanceState);
|
||||
finishOnBack = savedInstanceState.getBoolean(EXTRA_FINISH_ON_BACK, false);
|
||||
backToScene = savedInstanceState.getBoolean(EXTRA_BACK_TO_SCENE, false);
|
||||
backOnSkip = savedInstanceState.getBoolean(EXTRA_BACK_ON_SKIP, false);
|
||||
domainUrlToBack = savedInstanceState.getString(EXTRA_DOMAIN_URL);
|
||||
}
|
||||
|
||||
private void loadMenuFragment() {
|
||||
FragmentManager fragmentManager = getFragmentManager();
|
||||
FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction();
|
||||
Fragment fragment = StartMenuFragment.newInstance();
|
||||
fragmentTransaction.replace(R.id.content_frame, fragment);
|
||||
fragmentTransaction.addToBackStack(fragment.toString());
|
||||
fragmentTransaction.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_OPEN);
|
||||
fragmentTransaction.commit();
|
||||
hideStatusBar();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onResume() {
|
||||
super.onResume();
|
||||
hideStatusBar();
|
||||
}
|
||||
|
||||
private void hideStatusBar() {
|
||||
View decorView = getWindow().getDecorView();
|
||||
// Hide the status bar.
|
||||
int uiOptions = View.SYSTEM_UI_FLAG_FULLSCREEN | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN;
|
||||
decorView.setSystemUiVisibility(uiOptions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSignupButtonClicked() {
|
||||
loadSignupFragment();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLoginButtonClicked() {
|
||||
loadLoginFragment(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSkipLoginClicked() {
|
||||
if (backOnSkip) {
|
||||
onBackPressed();
|
||||
} else {
|
||||
loadMainActivity();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSteamLoginButtonClicked() {
|
||||
loadLoginFragment(true);
|
||||
}
|
||||
|
||||
private void loadSignupFragment() {
|
||||
FragmentManager fragmentManager = getFragmentManager();
|
||||
FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction();
|
||||
Fragment fragment = SignupFragment.newInstance();
|
||||
String tag = getString(R.string.tagFragmentSignup);
|
||||
fragmentTransaction.replace(R.id.content_frame, fragment, tag);
|
||||
fragmentTransaction.addToBackStack(tag);
|
||||
fragmentTransaction.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_OPEN);
|
||||
fragmentTransaction.commit();
|
||||
hideStatusBar();
|
||||
}
|
||||
|
||||
private void loadLoginFragment(boolean useOauth) {
|
||||
FragmentManager fragmentManager = getFragmentManager();
|
||||
FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction();
|
||||
Fragment fragment = LoginFragment.newInstance(useOauth);
|
||||
String tag = getString(R.string.tagFragmentLogin);
|
||||
fragmentTransaction.replace(R.id.content_frame, fragment, tag);
|
||||
fragmentTransaction.addToBackStack(tag);
|
||||
fragmentTransaction.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_OPEN);
|
||||
fragmentTransaction.commit();
|
||||
hideStatusBar();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLoginCompleted() {
|
||||
loadMainActivity();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCancelLogin() {
|
||||
getFragmentManager().popBackStack();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCancelSignup() {
|
||||
getFragmentManager().popBackStack();
|
||||
}
|
||||
|
||||
private void loadMainActivity() {
|
||||
finish();
|
||||
if (backToScene) {
|
||||
backToScene = false;
|
||||
goToDomain(domainUrlToBack != null? domainUrlToBack : "");
|
||||
} else {
|
||||
startActivity(new Intent(this, MainActivity.class));
|
||||
}
|
||||
}
|
||||
|
||||
private void goToDomain(String domainUrl) {
|
||||
Intent intent = new Intent(this, InterfaceActivity.class);
|
||||
intent.putExtra(InterfaceActivity.DOMAIN_URL, domainUrl);
|
||||
finish();
|
||||
intent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP);
|
||||
startActivity(intent);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void onSignupCompleted() {
|
||||
loadMainActivity();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onBackPressed() {
|
||||
FragmentManager fm = getFragmentManager();
|
||||
int index = fm.getBackStackEntryCount() - 1;
|
||||
if (index > 0) {
|
||||
FragmentManager.BackStackEntry backEntry = fm.getBackStackEntryAt(index);
|
||||
String tag = backEntry.getName();
|
||||
Fragment topFragment = getFragmentManager().findFragmentByTag(tag);
|
||||
if (!(topFragment instanceof OnBackPressedListener) ||
|
||||
!((OnBackPressedListener) topFragment).doBack()) {
|
||||
super.onBackPressed();
|
||||
}
|
||||
} else if (finishOnBack){
|
||||
finishAffinity();
|
||||
} else {
|
||||
finish();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
package io.highfidelity.hifiinterface;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.app.Fragment;
|
||||
import android.app.FragmentManager;
|
||||
import android.app.FragmentTransaction;
|
||||
|
@ -31,12 +32,12 @@ import com.squareup.picasso.Picasso;
|
|||
|
||||
import io.highfidelity.hifiinterface.fragment.FriendsFragment;
|
||||
import io.highfidelity.hifiinterface.fragment.HomeFragment;
|
||||
import io.highfidelity.hifiinterface.fragment.LoginFragment;
|
||||
import io.highfidelity.hifiinterface.fragment.PolicyFragment;
|
||||
import io.highfidelity.hifiinterface.fragment.SettingsFragment;
|
||||
import io.highfidelity.hifiinterface.fragment.SignupFragment;
|
||||
import io.highfidelity.hifiinterface.task.DownloadProfileImageTask;
|
||||
|
||||
public class MainActivity extends AppCompatActivity implements NavigationView.OnNavigationItemSelectedListener,
|
||||
LoginFragment.OnLoginInteractionListener,
|
||||
HomeFragment.OnHomeInteractionListener,
|
||||
FriendsFragment.OnHomeInteractionListener {
|
||||
|
||||
|
@ -44,12 +45,13 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
public static final String DEFAULT_FRAGMENT = "Home";
|
||||
public static final String EXTRA_FRAGMENT = "fragment";
|
||||
public static final String EXTRA_BACK_TO_SCENE = "backToScene";
|
||||
public static final String EXTRA_BACK_TO_URL = "url";
|
||||
|
||||
private String TAG = "HighFidelity";
|
||||
|
||||
public native boolean nativeIsLoggedIn();
|
||||
public native void nativeLogout();
|
||||
public native String nativeGetDisplayName();
|
||||
public native void logout();
|
||||
public native void setUsernameChangedListener(Activity usernameChangedListener);
|
||||
public native String getUsername();
|
||||
|
||||
private DrawerLayout mDrawerLayout;
|
||||
private NavigationView mNavigationView;
|
||||
|
@ -61,6 +63,7 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
private MenuItem mPeopleMenuItem;
|
||||
|
||||
private boolean backToScene;
|
||||
private String backToUrl;
|
||||
|
||||
@Override
|
||||
protected void onCreate(Bundle savedInstanceState) {
|
||||
|
@ -80,6 +83,8 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
|
||||
mPeopleMenuItem = mNavigationView.getMenu().findItem(R.id.action_people);
|
||||
|
||||
updateDebugMenu(mNavigationView.getMenu());
|
||||
|
||||
Toolbar toolbar = findViewById(R.id.toolbar);
|
||||
toolbar.setTitleTextAppearance(this, R.style.HomeActionBarTitleStyle);
|
||||
setSupportActionBar(toolbar);
|
||||
|
@ -102,17 +107,23 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
loadFragment(DEFAULT_FRAGMENT);
|
||||
}
|
||||
|
||||
if (getIntent().hasExtra(EXTRA_BACK_TO_SCENE)) {
|
||||
backToScene = getIntent().getBooleanExtra(EXTRA_BACK_TO_SCENE, false);
|
||||
backToScene = getIntent().getBooleanExtra(EXTRA_BACK_TO_SCENE, false);
|
||||
backToUrl = getIntent().getStringExtra(EXTRA_BACK_TO_URL);
|
||||
}
|
||||
}
|
||||
|
||||
private void updateDebugMenu(Menu menu) {
|
||||
if (BuildConfig.DEBUG) {
|
||||
for (int i=0; i < menu.size(); i++) {
|
||||
if (menu.getItem(i).getItemId() == R.id.action_debug_settings) {
|
||||
menu.getItem(i).setVisible(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void loadFragment(String fragment) {
|
||||
switch (fragment) {
|
||||
case "Login":
|
||||
loadLoginFragment();
|
||||
break;
|
||||
case "Home":
|
||||
loadHomeFragment(true);
|
||||
break;
|
||||
|
@ -130,28 +141,35 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
|
||||
private void loadHomeFragment(boolean addToBackStack) {
|
||||
Fragment fragment = HomeFragment.newInstance();
|
||||
loadFragment(fragment, getString(R.string.home), getString(R.string.tagFragmentHome), addToBackStack);
|
||||
loadFragment(fragment, getString(R.string.home), getString(R.string.tagFragmentHome), addToBackStack, true);
|
||||
}
|
||||
|
||||
private void loadLoginFragment() {
|
||||
Fragment fragment = LoginFragment.newInstance();
|
||||
|
||||
loadFragment(fragment, getString(R.string.login), getString(R.string.tagFragmentLogin), true);
|
||||
private void startLoginMenuActivity() {
|
||||
Intent intent = new Intent(this, LoginMenuActivity.class);
|
||||
intent.putExtra(LoginMenuActivity.EXTRA_BACK_ON_SKIP, true);
|
||||
startActivity(intent);
|
||||
}
|
||||
|
||||
private void loadPrivacyPolicyFragment() {
|
||||
Fragment fragment = PolicyFragment.newInstance();
|
||||
|
||||
loadFragment(fragment, getString(R.string.privacyPolicy), getString(R.string.tagFragmentPolicy), true);
|
||||
loadFragment(fragment, getString(R.string.privacyPolicy), getString(R.string.tagFragmentPolicy), true, true);
|
||||
}
|
||||
|
||||
private void loadPeopleFragment() {
|
||||
Fragment fragment = FriendsFragment.newInstance();
|
||||
|
||||
loadFragment(fragment, getString(R.string.people), getString(R.string.tagFragmentPeople), true);
|
||||
loadFragment(fragment, getString(R.string.people), getString(R.string.tagFragmentPeople), true, true);
|
||||
}
|
||||
|
||||
private void loadFragment(Fragment fragment, String title, String tag, boolean addToBackStack) {
|
||||
private void loadSettingsFragment() {
|
||||
SettingsFragment fragment = SettingsFragment.newInstance();
|
||||
|
||||
loadFragment(fragment, getString(R.string.settings), getString(R.string.tagSettings), true, true);
|
||||
}
|
||||
|
||||
|
||||
private void loadFragment(Fragment newFragment, String title, String tag, boolean addToBackStack, boolean goBackUntilHome) {
|
||||
FragmentManager fragmentManager = getFragmentManager();
|
||||
|
||||
// check if it's the same fragment
|
||||
|
@ -163,17 +181,19 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
return; // cancel as we are already in that fragment
|
||||
}
|
||||
|
||||
// go back until first transaction
|
||||
int backStackEntryCount = fragmentManager.getBackStackEntryCount();
|
||||
for (int i = 0; i < backStackEntryCount - 1; i++) {
|
||||
fragmentManager.popBackStackImmediate();
|
||||
if (goBackUntilHome) {
|
||||
// go back until first transaction
|
||||
int backStackEntryCount = fragmentManager.getBackStackEntryCount();
|
||||
for (int i = 0; i < backStackEntryCount - 1; i++) {
|
||||
fragmentManager.popBackStackImmediate();
|
||||
}
|
||||
}
|
||||
|
||||
// this case is when we wanted to go home.. rollback already did that!
|
||||
// But asking for a new Home fragment makes it easier to have an updated list so we let it to continue
|
||||
|
||||
FragmentTransaction ft = fragmentManager.beginTransaction();
|
||||
ft.replace(R.id.content_frame, fragment, tag);
|
||||
ft.replace(R.id.content_frame, newFragment, tag);
|
||||
|
||||
if (addToBackStack) {
|
||||
ft.addToBackStack(title);
|
||||
|
@ -185,7 +205,7 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
|
||||
|
||||
private void updateLoginMenu() {
|
||||
if (nativeIsLoggedIn()) {
|
||||
if (HifiUtils.getInstance().isUserLoggedIn()) {
|
||||
mLoginPanel.setVisibility(View.GONE);
|
||||
mProfilePanel.setVisibility(View.VISIBLE);
|
||||
mLogoutOption.setVisibility(View.VISIBLE);
|
||||
|
@ -201,7 +221,7 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
}
|
||||
|
||||
private void updateProfileHeader() {
|
||||
updateProfileHeader(nativeGetDisplayName());
|
||||
updateProfileHeader(getUsername());
|
||||
}
|
||||
private void updateProfileHeader(String username) {
|
||||
if (!username.isEmpty()) {
|
||||
|
@ -241,6 +261,9 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
case R.id.action_people:
|
||||
loadPeopleFragment();
|
||||
return true;
|
||||
case R.id.action_debug_settings:
|
||||
loadSettingsFragment();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@ -248,15 +271,22 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
@Override
|
||||
protected void onStart() {
|
||||
super.onStart();
|
||||
setUsernameChangedListener(this);
|
||||
updateLoginMenu();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onStop() {
|
||||
super.onStop();
|
||||
setUsernameChangedListener(null);
|
||||
}
|
||||
|
||||
public void onLoginClicked(View view) {
|
||||
loadLoginFragment();
|
||||
startLoginMenuActivity();
|
||||
}
|
||||
|
||||
public void onLogoutClicked(View view) {
|
||||
nativeLogout();
|
||||
logout();
|
||||
updateLoginMenu();
|
||||
exitLoggedInFragment();
|
||||
|
||||
|
@ -278,7 +308,7 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
}
|
||||
|
||||
private void goToLastLocation() {
|
||||
goToDomain("");
|
||||
goToDomain(backToUrl != null? backToUrl : "");
|
||||
}
|
||||
|
||||
private void goToDomain(String domainUrl) {
|
||||
|
@ -297,16 +327,6 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
startActivity(intent);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLoginCompleted() {
|
||||
loadHomeFragment(false);
|
||||
updateLoginMenu();
|
||||
if (backToScene) {
|
||||
backToScene = false;
|
||||
goToLastLocation();
|
||||
}
|
||||
}
|
||||
|
||||
public void handleUsernameChanged(String username) {
|
||||
runOnUiThread(() -> updateProfileHeader(username));
|
||||
}
|
||||
|
@ -351,7 +371,6 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
public void onBackPressed() {
|
||||
// if a fragment needs to internally manage back presses..
|
||||
FragmentManager fm = getFragmentManager();
|
||||
Log.d("[BACK]", "getBackStackEntryCount " + fm.getBackStackEntryCount());
|
||||
Fragment friendsFragment = fm.findFragmentByTag(getString(R.string.tagFragmentPeople));
|
||||
if (friendsFragment != null && friendsFragment instanceof FriendsFragment) {
|
||||
if (((FriendsFragment) friendsFragment).onBackPressed()) {
|
||||
|
|
|
@ -3,7 +3,6 @@ package io.highfidelity.hifiinterface;
|
|||
import android.app.Activity;
|
||||
import android.content.Intent;
|
||||
import android.os.Bundle;
|
||||
import android.os.Handler;
|
||||
import android.view.View;
|
||||
|
||||
public class SplashActivity extends Activity {
|
||||
|
@ -37,7 +36,13 @@ public class SplashActivity extends Activity {
|
|||
}
|
||||
|
||||
public void onAppLoadedComplete() {
|
||||
startActivity(new Intent(this, MainActivity.class));
|
||||
if (HifiUtils.getInstance().isUserLoggedIn()) {
|
||||
startActivity(new Intent(this, MainActivity.class));
|
||||
} else {
|
||||
Intent menuIntent = new Intent(this, LoginMenuActivity.class);
|
||||
menuIntent.putExtra(LoginMenuActivity.EXTRA_FINISH_ON_BACK, true);
|
||||
startActivity(menuIntent);
|
||||
}
|
||||
SplashActivity.this.finish();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,11 +28,18 @@ import java.net.MalformedURLException;
|
|||
import java.net.URL;
|
||||
|
||||
import io.highfidelity.hifiinterface.fragment.WebViewFragment;
|
||||
import io.highfidelity.hifiinterface.fragment.WebViewFragment.OnWebViewInteractionListener;
|
||||
|
||||
public class WebViewActivity extends Activity implements WebViewFragment.OnWebViewInteractionListener {
|
||||
public class WebViewActivity extends Activity implements OnWebViewInteractionListener {
|
||||
|
||||
public static final String WEB_VIEW_ACTIVITY_EXTRA_URL = "url";
|
||||
public static final String WEB_VIEW_ACTIVITY_EXTRA_CLEAR_COOKIES = "clear_cookies";
|
||||
public static final String RESULT_OAUTH_CODE = "code";
|
||||
public static final String RESULT_OAUTH_STATE = "state";
|
||||
|
||||
private static final String FRAGMENT_TAG = "WebViewActivity_WebFragment";
|
||||
private static final String OAUTH_CODE = "code";
|
||||
private static final String OAUTH_STATE = "state";
|
||||
|
||||
private native void nativeProcessURL(String url);
|
||||
|
||||
|
@ -47,14 +54,15 @@ public class WebViewActivity extends Activity implements WebViewFragment.OnWebVi
|
|||
mActionBar = getActionBar();
|
||||
mActionBar.setDisplayHomeAsUpEnabled(true);
|
||||
|
||||
loadWebViewFragment(getIntent().getStringExtra(WEB_VIEW_ACTIVITY_EXTRA_URL));
|
||||
loadWebViewFragment(getIntent().getStringExtra(WEB_VIEW_ACTIVITY_EXTRA_URL), getIntent().getBooleanExtra(WEB_VIEW_ACTIVITY_EXTRA_CLEAR_COOKIES, false));
|
||||
}
|
||||
|
||||
private void loadWebViewFragment(String url) {
|
||||
private void loadWebViewFragment(String url, boolean clearCookies) {
|
||||
WebViewFragment fragment = WebViewFragment.newInstance();
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.putString(WebViewFragment.URL, url);
|
||||
bundle.putBoolean(WebViewFragment.TOOLBAR_VISIBLE, false);
|
||||
bundle.putBoolean(WebViewFragment.CLEAR_COOKIES, clearCookies);
|
||||
fragment.setArguments(bundle);
|
||||
FragmentManager fragmentManager = getFragmentManager();
|
||||
FragmentTransaction ft = fragmentManager.beginTransaction();
|
||||
|
@ -131,4 +139,13 @@ public class WebViewActivity extends Activity implements WebViewFragment.OnWebVi
|
|||
@Override
|
||||
public void onExpand() { }
|
||||
|
||||
@Override
|
||||
public void onOAuthAuthorizeCallback(Uri uri) {
|
||||
Intent result = new Intent();
|
||||
result.putExtra(RESULT_OAUTH_CODE, uri.getQueryParameter(OAUTH_CODE));
|
||||
result.putExtra(RESULT_OAUTH_STATE, uri.getQueryParameter(OAUTH_STATE));
|
||||
setResult(Activity.RESULT_OK, result);
|
||||
finish();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -23,8 +23,6 @@ import io.highfidelity.hifiinterface.view.UserListAdapter;
|
|||
|
||||
public class FriendsFragment extends Fragment {
|
||||
|
||||
public native boolean nativeIsLoggedIn();
|
||||
|
||||
public native String nativeGetAccessToken();
|
||||
|
||||
private RecyclerView mUsersView;
|
||||
|
@ -98,13 +96,17 @@ public class FriendsFragment extends Fragment {
|
|||
|
||||
mUsersAdapter.setListener(new UserListAdapter.AdapterListener() {
|
||||
@Override
|
||||
public void onEmptyAdapter() {
|
||||
mSwipeRefreshLayout.setRefreshing(false);
|
||||
public void onEmptyAdapter(boolean shouldStopRefreshing) {
|
||||
if (shouldStopRefreshing) {
|
||||
mSwipeRefreshLayout.setRefreshing(false);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNonEmptyAdapter() {
|
||||
mSwipeRefreshLayout.setRefreshing(false);
|
||||
public void onNonEmptyAdapter(boolean shouldStopRefreshing) {
|
||||
if (shouldStopRefreshing) {
|
||||
mSwipeRefreshLayout.setRefreshing(false);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -115,6 +117,8 @@ public class FriendsFragment extends Fragment {
|
|||
|
||||
mUsersView.setAdapter(mUsersAdapter);
|
||||
|
||||
mUsersAdapter.startLoad();
|
||||
|
||||
mSlidingUpPanelLayout.setFadeOnClickListener(new View.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
|
|
|
@ -76,18 +76,22 @@ public class HomeFragment extends Fragment {
|
|||
});
|
||||
mDomainAdapter.setListener(new DomainAdapter.AdapterListener() {
|
||||
@Override
|
||||
public void onEmptyAdapter() {
|
||||
public void onEmptyAdapter(boolean shouldStopRefreshing) {
|
||||
searchNoResultsView.setText(R.string.search_no_results);
|
||||
searchNoResultsView.setVisibility(View.VISIBLE);
|
||||
mDomainsView.setVisibility(View.GONE);
|
||||
mSwipeRefreshLayout.setRefreshing(false);
|
||||
if (shouldStopRefreshing) {
|
||||
mSwipeRefreshLayout.setRefreshing(false);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNonEmptyAdapter() {
|
||||
public void onNonEmptyAdapter(boolean shouldStopRefreshing) {
|
||||
searchNoResultsView.setVisibility(View.GONE);
|
||||
mDomainsView.setVisibility(View.VISIBLE);
|
||||
mSwipeRefreshLayout.setRefreshing(false);
|
||||
if (shouldStopRefreshing) {
|
||||
mSwipeRefreshLayout.setRefreshing(false);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -96,11 +100,20 @@ public class HomeFragment extends Fragment {
|
|||
}
|
||||
});
|
||||
mDomainsView.setAdapter(mDomainAdapter);
|
||||
mDomainAdapter.startLoad();
|
||||
|
||||
mSearchView = rootView.findViewById(R.id.searchView);
|
||||
mSearchIconView = rootView.findViewById(R.id.search_mag_icon);
|
||||
mClearSearch = rootView.findViewById(R.id.search_clear);
|
||||
|
||||
getActivity().getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
|
||||
|
||||
return rootView;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStart() {
|
||||
super.onStart();
|
||||
mSearchView.addTextChangedListener(new TextWatcher() {
|
||||
@Override
|
||||
public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) {}
|
||||
|
@ -142,10 +155,6 @@ public class HomeFragment extends Fragment {
|
|||
mDomainAdapter.loadDomains(mSearchView.getText().toString(), true);
|
||||
}
|
||||
});
|
||||
|
||||
getActivity().getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
|
||||
|
||||
return rootView;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -2,36 +2,65 @@ package io.highfidelity.hifiinterface.fragment;
|
|||
|
||||
import android.app.Activity;
|
||||
import android.app.Fragment;
|
||||
import android.app.ProgressDialog;
|
||||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
import android.net.Uri;
|
||||
import android.os.Bundle;
|
||||
import android.text.Editable;
|
||||
import android.text.TextWatcher;
|
||||
import android.support.annotation.Nullable;
|
||||
import android.util.Log;
|
||||
import android.view.KeyEvent;
|
||||
import android.view.LayoutInflater;
|
||||
import android.view.View;
|
||||
import android.view.ViewGroup;
|
||||
import android.view.inputmethod.EditorInfo;
|
||||
import android.view.inputmethod.InputMethodManager;
|
||||
import android.widget.Button;
|
||||
import android.widget.CheckBox;
|
||||
import android.widget.EditText;
|
||||
import android.widget.TextView;
|
||||
|
||||
import io.highfidelity.hifiinterface.R;
|
||||
import org.qtproject.qt5.android.QtNative;
|
||||
|
||||
public class LoginFragment extends Fragment {
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.Random;
|
||||
|
||||
import io.highfidelity.hifiinterface.BuildConfig;
|
||||
import io.highfidelity.hifiinterface.HifiUtils;
|
||||
import io.highfidelity.hifiinterface.R;
|
||||
import io.highfidelity.hifiinterface.WebViewActivity;
|
||||
|
||||
import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationActive;
|
||||
import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationInactive;
|
||||
|
||||
public class LoginFragment extends Fragment
|
||||
implements OnBackPressedListener {
|
||||
|
||||
private static final String ARG_USE_OAUTH = "use_oauth";
|
||||
private static final String TAG = "Interface";
|
||||
|
||||
private final String OAUTH_CLIENT_ID = BuildConfig.OAUTH_CLIENT_ID;
|
||||
private final String OAUTH_REDIRECT_URI = BuildConfig.OAUTH_REDIRECT_URI;
|
||||
private final String OAUTH_AUTHORIZE_BASE_URL = "https://highfidelity.com/oauth/authorize";
|
||||
private static final int OAUTH_AUTHORIZE_REQUEST = 1;
|
||||
|
||||
private EditText mUsername;
|
||||
private EditText mPassword;
|
||||
private TextView mError;
|
||||
private TextView mForgotPassword;
|
||||
private Button mLoginButton;
|
||||
private CheckBox mKeepMeLoggedInCheckbox;
|
||||
private ViewGroup mLoginForm;
|
||||
private ViewGroup mLoggingInFrame;
|
||||
private ViewGroup mLoggedInFrame;
|
||||
private boolean mLoginInProgress;
|
||||
private boolean mLoginSuccess;
|
||||
private boolean mUseOauth;
|
||||
private String mOauthState;
|
||||
|
||||
private ProgressDialog mDialog;
|
||||
public native void login(String username, String password, boolean keepLoggedIn);
|
||||
private native void retrieveAccessToken(String authCode, String clientId, String clientSecret, String redirectUri);
|
||||
|
||||
public native void nativeLogin(String username, String password, Activity usernameChangedListener);
|
||||
public native void cancelLogin();
|
||||
|
||||
private LoginFragment.OnLoginInteractionListener mListener;
|
||||
|
||||
|
@ -39,11 +68,22 @@ public class LoginFragment extends Fragment {
|
|||
// Required empty public constructor
|
||||
}
|
||||
|
||||
public static LoginFragment newInstance() {
|
||||
public static LoginFragment newInstance(boolean useOauth) {
|
||||
LoginFragment fragment = new LoginFragment();
|
||||
Bundle args = new Bundle();
|
||||
args.putBoolean(ARG_USE_OAUTH, useOauth);
|
||||
fragment.setArguments(args);
|
||||
return fragment;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCreate(@Nullable Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
if (getArguments() != null) {
|
||||
mUseOauth = getArguments().getBoolean(ARG_USE_OAUTH, false);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public View onCreateView(LayoutInflater inflater, ViewGroup container,
|
||||
Bundle savedInstanceState) {
|
||||
|
@ -53,58 +93,29 @@ public class LoginFragment extends Fragment {
|
|||
mPassword = rootView.findViewById(R.id.password);
|
||||
mError = rootView.findViewById(R.id.error);
|
||||
mLoginButton = rootView.findViewById(R.id.loginButton);
|
||||
mForgotPassword = rootView.findViewById(R.id.forgotPassword);
|
||||
mLoginForm = rootView.findViewById(R.id.loginForm);
|
||||
mLoggingInFrame = rootView.findViewById(R.id.loggingInFrame);
|
||||
mLoggedInFrame = rootView.findViewById(R.id.loggedInFrame);
|
||||
mKeepMeLoggedInCheckbox = rootView.findViewById(R.id.keepMeLoggedIn);
|
||||
|
||||
mUsername.addTextChangedListener(new TextWatcher() {
|
||||
boolean ignoreNextChange = false;
|
||||
boolean hadBlankSpace = false;
|
||||
@Override
|
||||
public void beforeTextChanged(CharSequence charSequence, int start, int count, int after) {
|
||||
hadBlankSpace = charSequence.length() > 0 && charSequence.charAt(charSequence.length()-1) == ' ';
|
||||
}
|
||||
rootView.findViewById(R.id.forgotPassword).setOnClickListener(view -> onForgotPasswordClicked());
|
||||
|
||||
@Override
|
||||
public void onTextChanged(CharSequence charSequence, int start, int count, int after) {
|
||||
rootView.findViewById(R.id.cancel).setOnClickListener(view -> onCancelLogin());
|
||||
|
||||
}
|
||||
rootView.findViewById(R.id.getStarted).setOnClickListener(view -> onGetStartedClicked());
|
||||
|
||||
@Override
|
||||
public void afterTextChanged(Editable editable) {
|
||||
if (!ignoreNextChange) {
|
||||
ignoreNextChange = true;
|
||||
boolean spaceFound = false;
|
||||
for (int i = 0; i < editable.length(); i++) {
|
||||
if (editable.charAt(i) == ' ') {
|
||||
spaceFound=true;
|
||||
editable.delete(i, i + 1);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
mLoginButton.setOnClickListener(view -> onLoginButtonClicked());
|
||||
|
||||
if (hadBlankSpace && !spaceFound && editable.length() > 0) {
|
||||
editable.delete(editable.length()-1, editable.length());
|
||||
}
|
||||
rootView.findViewById(R.id.takeMeInWorld).setOnClickListener(view -> skipLogin());
|
||||
mPassword.setOnEditorActionListener((textView, actionId, keyEvent) -> onPasswordEditorAction(textView, actionId, keyEvent));
|
||||
|
||||
editable.append(' ');
|
||||
ignoreNextChange = false;
|
||||
}
|
||||
mKeepMeLoggedInCheckbox.setChecked(HifiUtils.getInstance().isKeepingLoggedIn());
|
||||
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
mLoginButton.setOnClickListener(view -> login());
|
||||
|
||||
mForgotPassword.setOnClickListener(view -> forgotPassword());
|
||||
|
||||
mPassword.setOnEditorActionListener(
|
||||
(textView, actionId, keyEvent) -> {
|
||||
if (actionId == EditorInfo.IME_ACTION_DONE) {
|
||||
mLoginButton.performClick();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
if (mUseOauth) {
|
||||
openWebForAuthorization();
|
||||
} else {
|
||||
showLoginForm();
|
||||
}
|
||||
return rootView;
|
||||
}
|
||||
|
||||
|
@ -125,14 +136,67 @@ public class LoginFragment extends Fragment {
|
|||
mListener = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onResume() {
|
||||
super.onResume();
|
||||
// This hack intends to keep Qt threads running even after the app comes from background
|
||||
QtNative.setApplicationState(ApplicationActive);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStop() {
|
||||
super.onStop();
|
||||
cancelActivityIndicator();
|
||||
// Leave the Qt app paused
|
||||
QtNative.setApplicationState(ApplicationInactive);
|
||||
hideKeyboard();
|
||||
}
|
||||
|
||||
public void login() {
|
||||
@Override
|
||||
public void onActivityResult(int requestCode, int resultCode, Intent data) {
|
||||
if (requestCode == OAUTH_AUTHORIZE_REQUEST) {
|
||||
if (resultCode == Activity.RESULT_OK) {
|
||||
String authCode = data.getStringExtra(WebViewActivity.RESULT_OAUTH_CODE);
|
||||
String state = data.getStringExtra(WebViewActivity.RESULT_OAUTH_STATE);
|
||||
if (state != null && state.equals(mOauthState) && mListener != null) {
|
||||
mOauthState = null;
|
||||
showActivityIndicator();
|
||||
mLoginInProgress = true;
|
||||
retrieveAccessToken(authCode, BuildConfig.OAUTH_CLIENT_ID, BuildConfig.OAUTH_CLIENT_SECRET, BuildConfig.OAUTH_REDIRECT_URI);
|
||||
}
|
||||
} else {
|
||||
onCancelLogin();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void onCancelLogin() {
|
||||
if (mListener != null) {
|
||||
mListener.onCancelLogin();
|
||||
}
|
||||
}
|
||||
|
||||
private boolean onPasswordEditorAction(TextView textView, int actionId, KeyEvent keyEvent) {
|
||||
if (actionId == EditorInfo.IME_ACTION_DONE) {
|
||||
mLoginButton.performClick();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private void skipLogin() {
|
||||
if (mListener != null) {
|
||||
mListener.onSkipLoginClicked();
|
||||
}
|
||||
}
|
||||
|
||||
private void onGetStartedClicked() {
|
||||
if (mListener != null) {
|
||||
mListener.onLoginCompleted();
|
||||
}
|
||||
}
|
||||
|
||||
public void onLoginButtonClicked() {
|
||||
String username = mUsername.getText().toString().trim();
|
||||
String password = mPassword.getText().toString();
|
||||
hideKeyboard();
|
||||
|
@ -142,7 +206,10 @@ public class LoginFragment extends Fragment {
|
|||
mLoginButton.setEnabled(false);
|
||||
hideError();
|
||||
showActivityIndicator();
|
||||
nativeLogin(username, password, getActivity());
|
||||
mLoginInProgress = true;
|
||||
mLoginSuccess = false;
|
||||
boolean keepUserLoggedIn = mKeepMeLoggedInCheckbox.isChecked();
|
||||
login(username, password, keepUserLoggedIn);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -154,25 +221,32 @@ public class LoginFragment extends Fragment {
|
|||
}
|
||||
}
|
||||
|
||||
private void forgotPassword() {
|
||||
private void onForgotPasswordClicked() {
|
||||
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse("https://highfidelity.com/users/password/new"));
|
||||
startActivity(intent);
|
||||
}
|
||||
|
||||
private void showActivityIndicator() {
|
||||
if (mDialog == null) {
|
||||
mDialog = new ProgressDialog(getContext());
|
||||
}
|
||||
mDialog.setMessage(getString(R.string.logging_in));
|
||||
mDialog.setCancelable(false);
|
||||
mDialog.show();
|
||||
mLoginForm.setVisibility(View.GONE);
|
||||
mLoggedInFrame.setVisibility(View.GONE);
|
||||
mLoggingInFrame.setVisibility(View.VISIBLE);
|
||||
mLoggingInFrame.bringToFront();
|
||||
}
|
||||
|
||||
private void cancelActivityIndicator() {
|
||||
if (mDialog != null) {
|
||||
mDialog.cancel();
|
||||
}
|
||||
private void showLoginForm() {
|
||||
mLoggingInFrame.setVisibility(View.GONE);
|
||||
mLoggedInFrame.setVisibility(View.GONE);
|
||||
mLoginForm.setVisibility(View.VISIBLE);
|
||||
mLoginForm.bringToFront();
|
||||
}
|
||||
|
||||
private void showLoggedInMessage() {
|
||||
mLoginForm.setVisibility(View.GONE);
|
||||
mLoggingInFrame.setVisibility(View.GONE);
|
||||
mLoggedInFrame.setVisibility(View.VISIBLE);
|
||||
mLoggedInFrame.bringToFront();
|
||||
}
|
||||
|
||||
private void showError(String error) {
|
||||
mError.setText(error);
|
||||
mError.setVisibility(View.VISIBLE);
|
||||
|
@ -184,22 +258,71 @@ public class LoginFragment extends Fragment {
|
|||
}
|
||||
|
||||
public void handleLoginCompleted(boolean success) {
|
||||
Log.d("[LOGIN]", "handleLoginCompleted " + success);
|
||||
mLoginInProgress = false;
|
||||
getActivity().runOnUiThread(() -> {
|
||||
mLoginButton.setEnabled(true);
|
||||
cancelActivityIndicator();
|
||||
if (success) {
|
||||
if (mListener != null) {
|
||||
mListener.onLoginCompleted();
|
||||
}
|
||||
mLoginSuccess = true;
|
||||
showLoggedInMessage();
|
||||
} else {
|
||||
showError(getString(R.string.login_username_or_password_incorrect));
|
||||
if (!mUseOauth) {
|
||||
showLoginForm();
|
||||
showError(getString(R.string.login_username_or_password_incorrect));
|
||||
} else {
|
||||
openWebForAuthorization();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean doBack() {
|
||||
if (mLoginInProgress) {
|
||||
cancelLogin();
|
||||
showLoginForm();
|
||||
mLoginInProgress = false;
|
||||
mLoginButton.setEnabled(true);
|
||||
return true;
|
||||
} else if (mLoginSuccess) {
|
||||
onGetStartedClicked();
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private void updateOauthState() {
|
||||
// as we only use oauth for steam that's ok for now
|
||||
mOauthState = "steam-" + Long.toString(new Random().nextLong());
|
||||
}
|
||||
|
||||
private String buildAuthorizeUrl() {
|
||||
StringBuilder sb = new StringBuilder(OAUTH_AUTHORIZE_BASE_URL);
|
||||
sb.append("?client_id=").append(OAUTH_CLIENT_ID);
|
||||
try {
|
||||
String redirectUri = URLEncoder.encode(OAUTH_REDIRECT_URI, "utf-8");
|
||||
sb.append("&redirect_uri=").append(redirectUri);
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
Log.e(TAG, "Cannot build oauth autorization url", e);
|
||||
}
|
||||
sb.append("&response_type=code&scope=owner");
|
||||
sb.append("&state=").append(mOauthState);
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private void openWebForAuthorization() {
|
||||
Intent openUrlIntent = new Intent(getActivity(), WebViewActivity.class);
|
||||
updateOauthState();
|
||||
openUrlIntent.putExtra(WebViewActivity.WEB_VIEW_ACTIVITY_EXTRA_URL, buildAuthorizeUrl());
|
||||
openUrlIntent.putExtra(WebViewActivity.WEB_VIEW_ACTIVITY_EXTRA_CLEAR_COOKIES, true);
|
||||
startActivityForResult(openUrlIntent, OAUTH_AUTHORIZE_REQUEST);
|
||||
}
|
||||
|
||||
|
||||
public interface OnLoginInteractionListener {
|
||||
void onLoginCompleted();
|
||||
void onCancelLogin();
|
||||
void onSkipLoginClicked();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
package io.highfidelity.hifiinterface.fragment;
|
||||
|
||||
public interface OnBackPressedListener {
|
||||
|
||||
/**
|
||||
* Processes the back pressed event and returns true if it was managed by this Fragment
|
||||
* @return
|
||||
*/
|
||||
boolean doBack();
|
||||
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
package io.highfidelity.hifiinterface.fragment;
|
||||
|
||||
import android.content.SharedPreferences;
|
||||
import android.media.audiofx.AcousticEchoCanceler;
|
||||
import android.os.Bundle;
|
||||
import android.preference.PreferenceFragment;
|
||||
import android.preference.PreferenceManager;
|
||||
import android.support.annotation.Nullable;
|
||||
|
||||
import io.highfidelity.hifiinterface.HifiUtils;
|
||||
import io.highfidelity.hifiinterface.R;
|
||||
|
||||
public class SettingsFragment extends PreferenceFragment implements SharedPreferences.OnSharedPreferenceChangeListener {
|
||||
|
||||
private final String HIFI_SETTINGS_ANDROID_GROUP = "Android";
|
||||
private final String HIFI_SETTINGS_AEC_KEY = "aec";
|
||||
private final String PREFERENCE_KEY_AEC = "aec";
|
||||
|
||||
private final boolean DEFAULT_AEC_ENABLED = true;
|
||||
|
||||
@Override
|
||||
public void onCreate(@Nullable Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
addPreferencesFromResource(R.xml.settings);
|
||||
boolean aecAvailable = AcousticEchoCanceler.isAvailable();
|
||||
PreferenceManager.setDefaultValues(getContext(), R.xml.settings, false);
|
||||
|
||||
if (!aecAvailable) {
|
||||
findPreference(PREFERENCE_KEY_AEC).setEnabled(false);
|
||||
HifiUtils.getInstance().updateHifiSetting(HIFI_SETTINGS_ANDROID_GROUP, HIFI_SETTINGS_AEC_KEY, false);
|
||||
}
|
||||
|
||||
getPreferenceScreen().getSharedPreferences().edit().putBoolean(PREFERENCE_KEY_AEC,
|
||||
aecAvailable && HifiUtils.getInstance().getHifiSettingBoolean(HIFI_SETTINGS_ANDROID_GROUP, HIFI_SETTINGS_AEC_KEY, DEFAULT_AEC_ENABLED)).commit();
|
||||
}
|
||||
|
||||
public static SettingsFragment newInstance() {
|
||||
SettingsFragment fragment = new SettingsFragment();
|
||||
return fragment;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onResume() {
|
||||
super.onResume();
|
||||
getPreferenceScreen().getSharedPreferences().registerOnSharedPreferenceChangeListener(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPause() {
|
||||
super.onPause();
|
||||
getPreferenceScreen().getSharedPreferences().unregisterOnSharedPreferenceChangeListener(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
|
||||
switch (key) {
|
||||
case "aec":
|
||||
HifiUtils.getInstance().updateHifiSetting(HIFI_SETTINGS_ANDROID_GROUP, HIFI_SETTINGS_AEC_KEY, sharedPreferences.getBoolean(key, false));
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,260 @@
|
|||
package io.highfidelity.hifiinterface.fragment;
|
||||
|
||||
import android.app.Fragment;
|
||||
import android.content.Context;
|
||||
import android.os.Bundle;
|
||||
import android.view.KeyEvent;
|
||||
import android.view.LayoutInflater;
|
||||
import android.view.View;
|
||||
import android.view.ViewGroup;
|
||||
import android.view.inputmethod.EditorInfo;
|
||||
import android.view.inputmethod.InputMethodManager;
|
||||
import android.widget.Button;
|
||||
import android.widget.CheckBox;
|
||||
import android.widget.EditText;
|
||||
import android.widget.TextView;
|
||||
|
||||
import org.qtproject.qt5.android.QtNative;
|
||||
|
||||
import io.highfidelity.hifiinterface.HifiUtils;
|
||||
import io.highfidelity.hifiinterface.R;
|
||||
|
||||
import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationActive;
|
||||
import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationInactive;
|
||||
|
||||
public class SignupFragment extends Fragment
|
||||
implements OnBackPressedListener {
|
||||
|
||||
private EditText mEmail;
|
||||
private EditText mUsername;
|
||||
private EditText mPassword;
|
||||
private TextView mError;
|
||||
private TextView mActivityText;
|
||||
private Button mSignupButton;
|
||||
private CheckBox mKeepMeLoggedInCheckbox;
|
||||
|
||||
private ViewGroup mSignupForm;
|
||||
private ViewGroup mLoggingInFrame;
|
||||
private ViewGroup mLoggedInFrame;
|
||||
|
||||
private boolean mLoginInProgress;
|
||||
private boolean mSignupInProgress;
|
||||
private boolean mSignupSuccess;
|
||||
|
||||
public native void signup(String email, String username, String password); // move to SignupFragment
|
||||
public native void cancelSignup();
|
||||
public native void login(String username, String password, boolean keepLoggedIn);
|
||||
public native void cancelLogin();
|
||||
|
||||
private SignupFragment.OnSignupInteractionListener mListener;
|
||||
|
||||
public SignupFragment() {
|
||||
// Required empty public constructor
|
||||
}
|
||||
|
||||
public static SignupFragment newInstance() {
|
||||
SignupFragment fragment = new SignupFragment();
|
||||
return fragment;
|
||||
}
|
||||
|
||||
@Override
|
||||
public View onCreateView(LayoutInflater inflater, ViewGroup container,
|
||||
Bundle savedInstanceState) {
|
||||
View rootView = inflater.inflate(R.layout.fragment_signup, container, false);
|
||||
|
||||
mEmail = rootView.findViewById(R.id.email);
|
||||
mUsername = rootView.findViewById(R.id.username);
|
||||
mPassword = rootView.findViewById(R.id.password);
|
||||
mError = rootView.findViewById(R.id.error);
|
||||
mSignupButton = rootView.findViewById(R.id.signupButton);
|
||||
mActivityText = rootView.findViewById(R.id.activityText);
|
||||
mKeepMeLoggedInCheckbox = rootView.findViewById(R.id.keepMeLoggedIn);
|
||||
|
||||
mSignupForm = rootView.findViewById(R.id.signupForm);
|
||||
mLoggedInFrame = rootView.findViewById(R.id.loggedInFrame);
|
||||
mLoggingInFrame = rootView.findViewById(R.id.loggingInFrame);
|
||||
|
||||
rootView.findViewById(R.id.cancel).setOnClickListener(view -> onCancelSignup());
|
||||
|
||||
mSignupButton.setOnClickListener(view -> signup());
|
||||
|
||||
rootView.findViewById(R.id.getStarted).setOnClickListener(view -> onGetStartedClicked());
|
||||
|
||||
mPassword.setOnEditorActionListener((textView, actionId, keyEvent) -> onPasswordEditorAction(textView, actionId, keyEvent));
|
||||
|
||||
mKeepMeLoggedInCheckbox.setChecked(HifiUtils.getInstance().isKeepingLoggedIn());
|
||||
|
||||
return rootView;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onAttach(Context context) {
|
||||
super.onAttach(context);
|
||||
if (context instanceof OnSignupInteractionListener) {
|
||||
mListener = (OnSignupInteractionListener) context;
|
||||
} else {
|
||||
throw new RuntimeException(context.toString()
|
||||
+ " must implement OnSignupInteractionListener");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDetach() {
|
||||
super.onDetach();
|
||||
mListener = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onResume() {
|
||||
super.onResume();
|
||||
// This hack intends to keep Qt threads running even after the app comes from background
|
||||
QtNative.setApplicationState(ApplicationActive);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStop() {
|
||||
super.onStop();
|
||||
// Leave the Qt app paused
|
||||
QtNative.setApplicationState(ApplicationInactive);
|
||||
hideKeyboard();
|
||||
}
|
||||
|
||||
private boolean onPasswordEditorAction(TextView textView, int actionId, KeyEvent keyEvent) {
|
||||
if (actionId == EditorInfo.IME_ACTION_DONE) {
|
||||
mSignupButton.performClick();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private void onCancelSignup() {
|
||||
if (mListener != null) {
|
||||
mListener.onCancelSignup();
|
||||
}
|
||||
}
|
||||
|
||||
public void signup() {
|
||||
String email = mEmail.getText().toString().trim();
|
||||
String username = mUsername.getText().toString().trim();
|
||||
String password = mPassword.getText().toString();
|
||||
hideKeyboard();
|
||||
if (email.isEmpty() || username.isEmpty() || password.isEmpty()) {
|
||||
showError(getString(R.string.signup_email_username_or_password_incorrect));
|
||||
} else {
|
||||
mSignupButton.setEnabled(false);
|
||||
hideError();
|
||||
mActivityText.setText(R.string.creating_account);
|
||||
showActivityIndicator();
|
||||
mSignupInProgress = true;
|
||||
mSignupSuccess = false;
|
||||
signup(email, username, password);
|
||||
}
|
||||
}
|
||||
|
||||
private void hideKeyboard() {
|
||||
View view = getActivity().getCurrentFocus();
|
||||
if (view != null) {
|
||||
InputMethodManager imm = (InputMethodManager) getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
|
||||
imm.hideSoftInputFromWindow(view.getWindowToken(), 0);
|
||||
}
|
||||
}
|
||||
|
||||
private void showActivityIndicator() {
|
||||
mSignupForm.setVisibility(View.GONE);
|
||||
mLoggedInFrame.setVisibility(View.GONE);
|
||||
mLoggingInFrame.setVisibility(View.VISIBLE);
|
||||
}
|
||||
|
||||
private void showLoggedInMessage() {
|
||||
mSignupForm.setVisibility(View.GONE);
|
||||
mLoggingInFrame.setVisibility(View.GONE);
|
||||
mLoggedInFrame.setVisibility(View.VISIBLE);
|
||||
}
|
||||
|
||||
private void showSignupForm() {
|
||||
mLoggingInFrame.setVisibility(View.GONE);
|
||||
mLoggedInFrame.setVisibility(View.GONE);
|
||||
mSignupForm.setVisibility(View.VISIBLE);
|
||||
}
|
||||
private void showError(String error) {
|
||||
mError.setText(error);
|
||||
mError.setVisibility(View.VISIBLE);
|
||||
}
|
||||
|
||||
private void hideError() {
|
||||
mError.setText("");
|
||||
mError.setVisibility(View.INVISIBLE);
|
||||
}
|
||||
|
||||
public interface OnSignupInteractionListener {
|
||||
void onSignupCompleted();
|
||||
void onCancelSignup();
|
||||
}
|
||||
|
||||
private void onGetStartedClicked() {
|
||||
if (mListener != null) {
|
||||
mListener.onSignupCompleted();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void handleSignupCompleted() {
|
||||
mSignupInProgress = false;
|
||||
String username = mUsername.getText().toString().trim();
|
||||
String password = mPassword.getText().toString();
|
||||
getActivity().runOnUiThread(() -> {
|
||||
mActivityText.setText(R.string.logging_in);
|
||||
});
|
||||
mLoginInProgress = true;
|
||||
boolean keepUserLoggedIn = mKeepMeLoggedInCheckbox.isChecked();
|
||||
login(username, password, keepUserLoggedIn);
|
||||
}
|
||||
|
||||
public void handleSignupFailed(String error) {
|
||||
mSignupInProgress = false;
|
||||
getActivity().runOnUiThread(() -> {
|
||||
mSignupButton.setEnabled(true);
|
||||
showSignupForm();
|
||||
mError.setText(error);
|
||||
mError.setVisibility(View.VISIBLE);
|
||||
});
|
||||
}
|
||||
|
||||
public void handleLoginCompleted(boolean success) {
|
||||
mLoginInProgress = false;
|
||||
getActivity().runOnUiThread(() -> {
|
||||
mSignupButton.setEnabled(true);
|
||||
if (success) {
|
||||
mSignupSuccess = true;
|
||||
showLoggedInMessage();
|
||||
} else {
|
||||
// Registration was successful but login failed.
|
||||
// Let the user to login manually
|
||||
mListener.onCancelSignup();
|
||||
showSignupForm();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean doBack() {
|
||||
if (mSignupInProgress) {
|
||||
cancelSignup();
|
||||
} else if (mLoginInProgress) {
|
||||
cancelLogin();
|
||||
}
|
||||
|
||||
if (mSignupInProgress || mLoginInProgress) {
|
||||
showSignupForm();
|
||||
mLoginInProgress = false;
|
||||
mSignupInProgress = false;
|
||||
mSignupButton.setEnabled(true);
|
||||
return true;
|
||||
} else if (mSignupSuccess) {
|
||||
onGetStartedClicked();
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
package io.highfidelity.hifiinterface.fragment;
|
||||
|
||||
import android.app.Fragment;
|
||||
import android.content.Context;
|
||||
import android.os.Bundle;
|
||||
import android.view.LayoutInflater;
|
||||
import android.view.View;
|
||||
import android.view.ViewGroup;
|
||||
|
||||
import io.highfidelity.hifiinterface.R;
|
||||
|
||||
public class StartMenuFragment extends Fragment {
|
||||
|
||||
private String TAG = "HighFidelity";
|
||||
private StartMenuInteractionListener mListener;
|
||||
|
||||
public StartMenuFragment() {
|
||||
// Required empty public constructor
|
||||
}
|
||||
|
||||
public static StartMenuFragment newInstance() {
|
||||
StartMenuFragment fragment = new StartMenuFragment();
|
||||
return fragment;
|
||||
}
|
||||
|
||||
@Override
|
||||
public View onCreateView(LayoutInflater inflater, ViewGroup container,
|
||||
Bundle savedInstanceState) {
|
||||
// Inflate the layout for this fragment
|
||||
View rootView = inflater.inflate(R.layout.fragment_login_menu, container, false);
|
||||
rootView.findViewById(R.id.signupButton).setOnClickListener(view -> {
|
||||
if (mListener != null) {
|
||||
mListener.onSignupButtonClicked();
|
||||
}
|
||||
});
|
||||
|
||||
rootView.findViewById(R.id.loginButton).setOnClickListener(view -> {
|
||||
if (mListener != null) {
|
||||
mListener.onLoginButtonClicked();
|
||||
}
|
||||
});
|
||||
|
||||
rootView.findViewById(R.id.steamLoginButton).setOnClickListener(view -> {
|
||||
if (mListener != null) {
|
||||
mListener.onSteamLoginButtonClicked();
|
||||
}
|
||||
});
|
||||
|
||||
rootView.findViewById(R.id.takeMeInWorld).setOnClickListener(view -> {
|
||||
if (mListener != null) {
|
||||
mListener.onSkipLoginClicked();
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
|
||||
return rootView;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onAttach(Context context) {
|
||||
super.onAttach(context);
|
||||
if (context instanceof StartMenuInteractionListener) {
|
||||
mListener = (StartMenuInteractionListener) context;
|
||||
} else {
|
||||
throw new RuntimeException(context.toString()
|
||||
+ " must implement StartMenuInteractionListener");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDetach() {
|
||||
super.onDetach();
|
||||
mListener = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* This interface must be implemented by activities that contain this
|
||||
* fragment to allow an interaction in this fragment to be communicated
|
||||
* to the activity and potentially other fragments contained in that
|
||||
* activity.
|
||||
* <p>
|
||||
* See the Android Training lesson <a href=
|
||||
* "http://developer.android.com/training/basics/fragments/communicating.html"
|
||||
* >Communicating with Other Fragments</a> for more information.
|
||||
*/
|
||||
public interface StartMenuInteractionListener {
|
||||
void onSignupButtonClicked();
|
||||
void onLoginButtonClicked();
|
||||
void onSkipLoginClicked();
|
||||
void onSteamLoginButtonClicked();
|
||||
}
|
||||
}
|
|
@ -4,9 +4,11 @@ import android.app.Fragment;
|
|||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
import android.graphics.Bitmap;
|
||||
import android.net.Uri;
|
||||
import android.net.http.SslError;
|
||||
import android.os.Bundle;
|
||||
import android.os.Handler;
|
||||
import android.text.TextUtils;
|
||||
import android.view.GestureDetector;
|
||||
import android.view.KeyEvent;
|
||||
import android.view.LayoutInflater;
|
||||
|
@ -14,6 +16,7 @@ import android.view.MotionEvent;
|
|||
import android.view.View;
|
||||
import android.view.ViewGroup;
|
||||
import android.view.animation.AlphaAnimation;
|
||||
import android.webkit.CookieManager;
|
||||
import android.webkit.SslErrorHandler;
|
||||
import android.webkit.WebChromeClient;
|
||||
import android.webkit.WebResourceError;
|
||||
|
@ -25,6 +28,7 @@ import android.webkit.WebViewClient;
|
|||
import android.widget.ProgressBar;
|
||||
import android.widget.Toast;
|
||||
|
||||
import io.highfidelity.hifiinterface.BuildConfig;
|
||||
import io.highfidelity.hifiinterface.R;
|
||||
import io.highfidelity.hifiinterface.WebViewActivity;
|
||||
|
||||
|
@ -32,6 +36,7 @@ public class WebViewFragment extends Fragment implements GestureDetector.OnGestu
|
|||
|
||||
public static final String URL = "url";
|
||||
public static final String TOOLBAR_VISIBLE = "toolbar_visible";
|
||||
public static final String CLEAR_COOKIES = "clear_cookies";
|
||||
private static final long DELAY_HIDE_TOOLBAR_MILLIS = 3000;
|
||||
private static final long FADE_OUT_DURATION = 2000;
|
||||
|
||||
|
@ -41,6 +46,7 @@ public class WebViewFragment extends Fragment implements GestureDetector.OnGestu
|
|||
private ProgressBar mProgressBar;
|
||||
private String mUrl;
|
||||
private boolean mToolbarVisible;
|
||||
private boolean mClearCookies;
|
||||
|
||||
private OnWebViewInteractionListener mListener;
|
||||
private Runnable mCloseAction;
|
||||
|
@ -170,6 +176,7 @@ public class WebViewFragment extends Fragment implements GestureDetector.OnGestu
|
|||
if (getArguments() != null) {
|
||||
mUrl = getArguments().getString(URL);
|
||||
mToolbarVisible = getArguments().getBoolean(TOOLBAR_VISIBLE);
|
||||
mClearCookies = getArguments().getBoolean(CLEAR_COOKIES);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -179,6 +186,10 @@ public class WebViewFragment extends Fragment implements GestureDetector.OnGestu
|
|||
View rootView = inflater.inflate(R.layout.fragment_web_view, container, false);
|
||||
mProgressBar = rootView.findViewById(R.id.toolbarProgressBar);
|
||||
myWebView = rootView.findViewById(R.id.web_view);
|
||||
if (mClearCookies) {
|
||||
CookieManager.getInstance().removeAllCookies(null);
|
||||
}
|
||||
|
||||
mHandler = new Handler();
|
||||
gestureDetector = new GestureDetector(this);
|
||||
gestureDetector.setOnDoubleTapListener(new GestureDetector.OnDoubleTapListener() {
|
||||
|
@ -251,6 +262,7 @@ public class WebViewFragment extends Fragment implements GestureDetector.OnGestu
|
|||
void onWebLoaded(String url, SafenessLevel safenessLevel);
|
||||
void onTitleReceived(String title);
|
||||
void onExpand();
|
||||
void onOAuthAuthorizeCallback(Uri uri);
|
||||
}
|
||||
|
||||
|
||||
|
@ -320,6 +332,18 @@ public class WebViewFragment extends Fragment implements GestureDetector.OnGestu
|
|||
super.onLoadResource(view, url);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean shouldOverrideUrlLoading(WebView view, WebResourceRequest request) {
|
||||
if (!TextUtils.isEmpty(BuildConfig.OAUTH_REDIRECT_URI) &&
|
||||
request.getUrl().toString().startsWith(BuildConfig.OAUTH_REDIRECT_URI)) {
|
||||
if (mListener != null) {
|
||||
mListener.onOAuthAuthorizeCallback(request.getUrl());
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return super.shouldOverrideUrlLoading(view, request);
|
||||
}
|
||||
}
|
||||
|
||||
class HiFiWebChromeClient extends WebChromeClient {
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
package io.highfidelity.hifiinterface.receiver;
|
||||
|
||||
import android.content.BroadcastReceiver;
|
||||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
import android.media.AudioManager;
|
||||
import android.util.Log;
|
||||
|
||||
public class HeadsetStateReceiver extends BroadcastReceiver {
|
||||
|
||||
private native void notifyHeadsetOn(boolean pluggedIn);
|
||||
|
||||
@Override
|
||||
public void onReceive(Context context, Intent intent) {
|
||||
AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
|
||||
notifyHeadsetOn(audioManager.isWiredHeadsetOn());
|
||||
}
|
||||
}
|
|
@ -12,6 +12,7 @@ import android.widget.TextView;
|
|||
|
||||
import com.squareup.picasso.Picasso;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import io.highfidelity.hifiinterface.R;
|
||||
|
@ -36,19 +37,41 @@ public class DomainAdapter extends RecyclerView.Adapter<DomainAdapter.ViewHolder
|
|||
// references to our domains
|
||||
private Domain[] mDomains = {};
|
||||
|
||||
private static Domain[] DOMAINS_TMP_CACHE = {};
|
||||
|
||||
public DomainAdapter(Context c, String protocol, String lastLocation) {
|
||||
mContext = c;
|
||||
this.mInflater = LayoutInflater.from(mContext);
|
||||
mProtocol = protocol;
|
||||
mLastLocation = lastLocation;
|
||||
domainProvider = new UserStoryDomainProvider(mProtocol);
|
||||
loadDomains("", true);
|
||||
}
|
||||
|
||||
public void setListener(AdapterListener adapterListener) {
|
||||
mAdapterListener = adapterListener;
|
||||
}
|
||||
|
||||
public void startLoad() {
|
||||
useTmpCachedDomains();
|
||||
loadDomains("", true);
|
||||
}
|
||||
|
||||
private void useTmpCachedDomains() {
|
||||
synchronized (this) {
|
||||
if (DOMAINS_TMP_CACHE != null && DOMAINS_TMP_CACHE.length > 0) {
|
||||
mDomains = Arrays.copyOf(DOMAINS_TMP_CACHE, DOMAINS_TMP_CACHE.length);
|
||||
notifyDataSetChanged();
|
||||
if (mAdapterListener != null) {
|
||||
if (mDomains.length == 0) {
|
||||
mAdapterListener.onEmptyAdapter(false);
|
||||
} else {
|
||||
mAdapterListener.onNonEmptyAdapter(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void loadDomains(String filterText, boolean forceRefresh) {
|
||||
domainProvider.retrieve(filterText, new DomainProvider.DomainCallback() {
|
||||
@Override
|
||||
|
@ -60,13 +83,18 @@ public class DomainAdapter extends RecyclerView.Adapter<DomainAdapter.ViewHolder
|
|||
overrideDefaultThumbnails(domain);
|
||||
|
||||
mDomains = new Domain[domain.size()];
|
||||
mDomains = domain.toArray(mDomains);
|
||||
notifyDataSetChanged();
|
||||
if (mAdapterListener != null) {
|
||||
if (mDomains.length == 0) {
|
||||
mAdapterListener.onEmptyAdapter();
|
||||
} else {
|
||||
mAdapterListener.onNonEmptyAdapter();
|
||||
synchronized (this) {
|
||||
domain.toArray(mDomains);
|
||||
if (filterText.isEmpty()) {
|
||||
DOMAINS_TMP_CACHE = Arrays.copyOf(mDomains, mDomains.length);
|
||||
}
|
||||
notifyDataSetChanged();
|
||||
if (mAdapterListener != null) {
|
||||
if (mDomains.length == 0) {
|
||||
mAdapterListener.onEmptyAdapter(true);
|
||||
} else {
|
||||
mAdapterListener.onNonEmptyAdapter(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -112,8 +140,6 @@ public class DomainAdapter extends RecyclerView.Adapter<DomainAdapter.ViewHolder
|
|||
|
||||
@Override
|
||||
public void onBindViewHolder(ViewHolder holder, int position) {
|
||||
// TODO
|
||||
//holder.thumbnail.setImageResource(mDomains[position].thumbnail);
|
||||
Domain domain = mDomains[position];
|
||||
holder.mDomainName.setText(domain.name);
|
||||
Uri uri = Uri.parse(domain.thumbnail);
|
||||
|
@ -164,8 +190,8 @@ public class DomainAdapter extends RecyclerView.Adapter<DomainAdapter.ViewHolder
|
|||
}
|
||||
|
||||
public interface AdapterListener {
|
||||
void onEmptyAdapter();
|
||||
void onNonEmptyAdapter();
|
||||
void onEmptyAdapter(boolean shouldStopRefreshing);
|
||||
void onNonEmptyAdapter(boolean shouldStopRefreshing);
|
||||
void onError(Exception e, String message);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,28 +37,57 @@ public class UserListAdapter extends RecyclerView.Adapter<UserListAdapter.ViewHo
|
|||
private ItemClickListener mClickListener;
|
||||
private AdapterListener mAdapterListener;
|
||||
|
||||
private static List<User> USERS_TMP_CACHE;
|
||||
|
||||
public UserListAdapter(Context c, UsersProvider usersProvider) {
|
||||
mContext = c;
|
||||
mInflater = LayoutInflater.from(mContext);
|
||||
mProvider = usersProvider;
|
||||
loadUsers();
|
||||
}
|
||||
|
||||
public void setListener(AdapterListener adapterListener) {
|
||||
mAdapterListener = adapterListener;
|
||||
}
|
||||
|
||||
public void startLoad() {
|
||||
useTmpCachedUsers();
|
||||
loadUsers();
|
||||
}
|
||||
|
||||
private void useTmpCachedUsers() {
|
||||
synchronized (this) {
|
||||
if (USERS_TMP_CACHE != null && USERS_TMP_CACHE.size() > 0) {
|
||||
mUsers = new ArrayList<>(USERS_TMP_CACHE.size());
|
||||
mUsers.addAll(USERS_TMP_CACHE);
|
||||
notifyDataSetChanged();
|
||||
if (mAdapterListener != null) {
|
||||
if (mUsers.isEmpty()) {
|
||||
mAdapterListener.onEmptyAdapter(false);
|
||||
} else {
|
||||
mAdapterListener.onNonEmptyAdapter(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void loadUsers() {
|
||||
mProvider.retrieve(new UsersProvider.UsersCallback() {
|
||||
@Override
|
||||
public void retrieveOk(List<User> users) {
|
||||
mUsers = new ArrayList<>(users);
|
||||
notifyDataSetChanged();
|
||||
if (mAdapterListener != null) {
|
||||
if (mUsers.isEmpty()) {
|
||||
mAdapterListener.onEmptyAdapter();
|
||||
} else {
|
||||
mAdapterListener.onNonEmptyAdapter();
|
||||
|
||||
synchronized (this) {
|
||||
USERS_TMP_CACHE = new ArrayList<>(mUsers.size());
|
||||
USERS_TMP_CACHE.addAll(mUsers);
|
||||
|
||||
if (mAdapterListener != null) {
|
||||
if (mUsers.isEmpty()) {
|
||||
mAdapterListener.onEmptyAdapter(true);
|
||||
} else {
|
||||
mAdapterListener.onNonEmptyAdapter(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -240,8 +269,9 @@ public class UserListAdapter extends RecyclerView.Adapter<UserListAdapter.ViewHo
|
|||
}
|
||||
|
||||
public interface AdapterListener {
|
||||
void onEmptyAdapter();
|
||||
void onNonEmptyAdapter();
|
||||
void onEmptyAdapter(boolean shouldStopRefreshing);
|
||||
void onNonEmptyAdapter(boolean shouldStopRefreshing);
|
||||
void onError(Exception e, String message);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
BIN
android/app/src/main/res/drawable/encourage_login_background.jpg
Normal file
BIN
android/app/src/main/res/drawable/encourage_login_background.jpg
Normal file
Binary file not shown.
After ![]() (image error) Size: 48 KiB |
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<vector xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
android:width="350dp"
|
||||
android:width="340dp"
|
||||
android:height="100dp"
|
||||
android:viewportWidth="350"
|
||||
android:viewportHeight="100">
|
||||
|
|
27
android/app/src/main/res/drawable/ic_eye_noshow.xml
Normal file
27
android/app/src/main/res/drawable/ic_eye_noshow.xml
Normal file
|
@ -0,0 +1,27 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<vector xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
android:width="36dp"
|
||||
android:height="22dp"
|
||||
android:viewportWidth="36"
|
||||
android:viewportHeight="22">
|
||||
|
||||
<path
|
||||
android:fillColor="#3D3D3D"
|
||||
android:fillType="evenOdd"
|
||||
android:pathData="M3.59534,11.0156 C6.16042,13.4128,9.65987,15.5898,13.6042,16.1774 C17.686,16.7856,22.4164,15.7196,27.3057,11.0659 C22.0721,6.07309,17.0642,5.14115,12.9153,5.90073 C8.99427,6.61859,5.69298,8.87688,3.59534,11.0156 Z M12.455,3.27591 C17.7727,2.30235,23.9836,3.74895,30.1053,10.1333 L31,11.0664 L30.1053,11.9994 C24.3636,17.9875,18.4774,19.5983,13.2276,18.8161 C8.06048,18.0463,3.70384,14.9892,0.837069,11.9994 L0,11.1265 L0.778477,10.1986 C3.05338,7.48717,7.2318,4.23217,12.455,3.27591 Z" />
|
||||
<path
|
||||
android:fillColor="#3D3D3D"
|
||||
android:pathData="M15.6539,7.11119 C17.6719,7.11119,19.3078,8.81726,19.3078,10.9218 C19.3078,13.0263,17.6719,14.7324,15.6539,14.7324 C13.6359,14.7324,12,13.0263,12,10.9218 C12,8.81726,13.6359,7.11119,15.6539,7.11119 Z" />
|
||||
<!--path
|
||||
android:fillColor="#000000"
|
||||
android:strokeColor="#ffffff"
|
||||
android:strokeWidth="2.7"
|
||||
android:strokeLineCap="round"
|
||||
android:pathData="M27,2.90919 L8.90919,21" /-->
|
||||
<path
|
||||
android:fillColor="#000000"
|
||||
android:strokeColor="#3D3D3D"
|
||||
android:strokeWidth="3"
|
||||
android:strokeLineCap="round"
|
||||
android:pathData="M25,2.12132 L7.12132,20" />
|
||||
</vector>
|
15
android/app/src/main/res/drawable/ic_eye_show.xml
Normal file
15
android/app/src/main/res/drawable/ic_eye_show.xml
Normal file
|
@ -0,0 +1,15 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<vector xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
android:width="36dp"
|
||||
android:height="16dp"
|
||||
android:viewportWidth="36"
|
||||
android:viewportHeight="16">
|
||||
|
||||
<path
|
||||
android:fillColor="#3D3D3D"
|
||||
android:fillType="evenOdd"
|
||||
android:pathData="M3.59534,8.01564 C6.16042,10.4128,9.65987,12.5898,13.6042,13.1774 C17.686,13.7856,22.4164,12.7196,27.3057,8.06585 C22.0721,3.07309,17.0642,2.14115,12.9153,2.90073 C8.99427,3.61859,5.69298,5.87688,3.59534,8.01564 Z M12.455,0.275915 C17.7727,-0.697651,23.9836,0.748949,30.1053,7.13329 L31,8.06636 L30.1053,8.99944 C24.3636,14.9875,18.4774,16.5983,13.2276,15.8161 C8.06048,15.0463,3.70384,11.9892,0.837069,8.99944 L0,8.12646 L0.778477,7.1986 C3.05338,4.48717,7.2318,1.23217,12.455,0.275915 Z" />
|
||||
<path
|
||||
android:fillColor="#3D3D3D"
|
||||
android:pathData="M15.6441,4.11118 C17.6621,4.11118,19.298,5.81725,19.298,7.92179 C19.298,10.0263,17.6621,11.7324,15.6441,11.7324 C13.6261,11.7324,11.9902,10.0263,11.9902,7.92179 C11.9902,5.81725,13.6261,4.11118,15.6441,4.11118 Z" />
|
||||
</vector>
|
11
android/app/src/main/res/drawable/ic_right_arrow.xml
Normal file
11
android/app/src/main/res/drawable/ic_right_arrow.xml
Normal file
|
@ -0,0 +1,11 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<vector xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
android:width="11dp"
|
||||
android:height="14dp"
|
||||
android:viewportWidth="11"
|
||||
android:viewportHeight="14">
|
||||
|
||||
<path
|
||||
android:fillColor="#ffffff"
|
||||
android:pathData="M2.98427,0.868092 C2.35913,0.324495,1.41169,0.390596,0.868092,1.01573 C0.324495,1.64087,0.390596,2.58831,1.01573,3.13191 L2.98427,0.868092 Z M8,7.21739 L8.93497,8.39035 L10.3436,7.26752 L8.98427,6.08548 L8,7.21739 Z M1.06503,10.827 C0.417224,11.3434,0.310672,12.2872,0.82704,12.935 C1.34341,13.5828,2.28716,13.6893,2.93497,13.173 L1.06503,10.827 Z M1.01573,3.13191 L7.01573,8.3493 L8.98427,6.08548 L2.98427,0.868092 L1.01573,3.13191 Z M7.06503,6.04443 L1.06503,10.827 L2.93497,13.173 L8.93497,8.39035 L7.06503,6.04443 Z" />
|
||||
</vector>
|
11
android/app/src/main/res/drawable/ic_steam.xml
Normal file
11
android/app/src/main/res/drawable/ic_steam.xml
Normal file
|
@ -0,0 +1,11 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<vector xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
android:width="21dp"
|
||||
android:height="21dp"
|
||||
android:viewportWidth="21"
|
||||
android:viewportHeight="21">
|
||||
|
||||
<path
|
||||
android:fillColor="#ffffff"
|
||||
android:pathData="M10.4866,0 C4.92045,0,0.367395,4.32104,0,9.78734 L5.45262,11.9841 C5.93184,11.651,6.51333,11.4545,7.14145,11.4545 C7.27641,11.4545,7.4083,11.4666,7.53829,11.4841 L10.1047,7.72495 C10.1083,5.56672,11.861,3.81818,14.0229,3.81818 C16.1872,3.81818,17.9416,5.57035,17.9416,7.73182 C17.9416,9.89329,16.1872,11.6455,14.0229,11.6455 C14.021,11.6455,14.0189,11.6453,14.017,11.6453 L10.0936,14.2008 C10.0986,14.2712,10.1043,14.3419,10.1043,14.4136 C10.1043,16.048,8.77791,17.3727,7.14145,17.3727 C5.69539,17.3727,4.49304,16.3376,4.2325,14.969 L0.378099,13.3841 C1.6334,17.7801,5.6822,21,10.4866,21 C16.2931,21,21,16.2991,21,10.5 C21,4.70114,16.2931,0,10.4866,0 Z M7.14145,16.0364 C6.96655,16.0364,6.79833,16.0081,6.64044,15.9569 L6.63968,15.9589 L6.59151,15.939 C6.54506,15.9224,6.49975,15.9037,6.45541,15.8831 L5.15462,15.3483 C5.50614,16.0927,6.26253,16.6091,7.14145,16.6091 C8.35546,16.6091,9.33971,15.6263,9.33971,14.4136 C9.33971,13.201,8.35546,12.2182,7.14145,12.2182 C6.87269,12.2182,6.61636,12.2688,6.37818,12.357 L7.75448,12.9114 C7.76404,12.9154,7.77359,12.9188,7.78296,12.923 L7.89001,12.9662 L7.88714,12.9732 C8.40898,13.243,8.76625,13.7861,8.76625,14.4136 C8.76625,15.3098,8.03872,16.0364,7.14145,16.0364 Z M16.7946,7.73182 C16.7946,6.20302,15.5537,4.96364,14.0229,4.96364 C12.4922,4.96364,11.2512,6.20302,11.2512,7.73182 C11.2512,9.26062,12.4922,10.5,14.0229,10.5 C15.5537,10.5,16.7946,9.26062,16.7946,7.73182 Z M12.0158,7.73182 C12.0158,6.62474,12.9144,5.72727,14.0229,5.72727 C15.1314,5.72727,16.03,6.62474,16.03,7.73182 C16.03,8.8389,15.1314,9.73636,14.0229,9.73636 C12.9144,9.73636,12.0158,8.8389,12.0158,7.73182 Z" />
|
||||
</vector>
|
24
android/app/src/main/res/drawable/rounded_button_color3.xml
Normal file
24
android/app/src/main/res/drawable/rounded_button_color3.xml
Normal file
|
@ -0,0 +1,24 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<selector xmlns:android="http://schemas.android.com/apk/res/android" >
|
||||
<item android:state_pressed="true" >
|
||||
<shape android:shape="rectangle" >
|
||||
<corners android:radius="4dip" />
|
||||
<stroke android:width="1dip" android:color="@color/colorButton2" />
|
||||
<solid android:color="@color/colorButton2"/>
|
||||
</shape>
|
||||
</item>
|
||||
<item android:state_focused="true">
|
||||
<shape android:shape="rectangle" >
|
||||
<corners android:radius="4dip" />
|
||||
<stroke android:width="1dip" android:color="@color/colorButton2" />
|
||||
<solid android:color="@color/colorButton2"/>
|
||||
</shape>
|
||||
</item>
|
||||
<item>
|
||||
<shape android:shape="rectangle" >
|
||||
<corners android:radius="4dip" />
|
||||
<stroke android:width="1dip" android:color="@color/colorButton2" />
|
||||
<solid android:color="@color/colorButton2"/>
|
||||
</shape>
|
||||
</item>
|
||||
</selector>
|
24
android/app/src/main/res/drawable/rounded_button_color4.xml
Normal file
24
android/app/src/main/res/drawable/rounded_button_color4.xml
Normal file
|
@ -0,0 +1,24 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<selector xmlns:android="http://schemas.android.com/apk/res/android" >
|
||||
<item android:state_pressed="true" >
|
||||
<shape android:shape="rectangle" >
|
||||
<corners android:radius="4dip" />
|
||||
<stroke android:width="1dip" android:color="@color/colorButton4" />
|
||||
<solid android:color="@color/colorButton4"/>
|
||||
</shape>
|
||||
</item>
|
||||
<item android:state_focused="true">
|
||||
<shape android:shape="rectangle" >
|
||||
<corners android:radius="4dip" />
|
||||
<stroke android:width="1dip" android:color="@color/colorButton4" />
|
||||
<solid android:color="@color/colorButton4"/>
|
||||
</shape>
|
||||
</item>
|
||||
<item>
|
||||
<shape android:shape="rectangle" >
|
||||
<corners android:radius="4dip" />
|
||||
<stroke android:width="1dip" android:color="@color/colorButton4" />
|
||||
<solid android:color="@color/colorButton4"/>
|
||||
</shape>
|
||||
</item>
|
||||
</selector>
|
|
@ -1,7 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<shape xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
android:shape="rectangle" android:padding="9dp">
|
||||
<corners android:radius="4dip" />
|
||||
<stroke android:width="1dip" android:color="@android:color/black" />
|
||||
<solid android:color="@color/backgroundEditText"/>
|
||||
</shape>
|
|
@ -0,0 +1,24 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<selector xmlns:android="http://schemas.android.com/apk/res/android" >
|
||||
<item android:state_pressed="true" >
|
||||
<shape android:shape="rectangle" >
|
||||
<corners android:radius="4dip" />
|
||||
<stroke android:width="1dip" android:color="@color/colorButton2" />
|
||||
<solid android:color="@color/colorButton2"/>
|
||||
</shape>
|
||||
</item>
|
||||
<item android:state_focused="true">
|
||||
<shape android:shape="rectangle" >
|
||||
<corners android:radius="4dip" />
|
||||
<stroke android:width="1dip" android:color="@color/colorButton2" />
|
||||
<solid android:color="@color/colorButton2"/>
|
||||
</shape>
|
||||
</item>
|
||||
<item>
|
||||
<shape android:shape="rectangle" >
|
||||
<corners android:radius="4dip" />
|
||||
<stroke android:width="1dip" android:color="@color/colorButton2" />
|
||||
<solid android:color="@color/colorButton2"/>
|
||||
</shape>
|
||||
</item>
|
||||
</selector>
|
|
@ -0,0 +1,5 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<selector xmlns:android="http://schemas.android.com/apk/res/android">
|
||||
<item android:state_checked="true" android:drawable="@drawable/ic_eye_noshow"/>
|
||||
<item android:drawable="@drawable/ic_eye_show" />
|
||||
</selector>
|
14
android/app/src/main/res/layout/activity_encourage_login.xml
Normal file
14
android/app/src/main/res/layout/activity_encourage_login.xml
Normal file
|
@ -0,0 +1,14 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:app="http://schemas.android.com/apk/res-auto"
|
||||
xmlns:tools="http://schemas.android.com/tools"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent"
|
||||
tools:context=".LoginMenuActivity">
|
||||
|
||||
<FrameLayout
|
||||
android:id="@+id/content_frame"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent" />
|
||||
|
||||
</android.support.constraint.ConstraintLayout>
|
|
@ -6,6 +6,17 @@
|
|||
android:layout_height="match_parent"
|
||||
android:background="@color/backgroundLight">
|
||||
|
||||
<ImageView
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent"
|
||||
android:src="@drawable/encourage_login_background"
|
||||
android:scaleType="fitXY" />
|
||||
|
||||
<FrameLayout
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent"
|
||||
android:background="#B2000000" />
|
||||
|
||||
<ImageView
|
||||
android:id="@+id/header"
|
||||
android:layout_width="@dimen/header_hifi_width"
|
||||
|
@ -17,95 +28,245 @@
|
|||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:srcCompat="@drawable/hifi_header" />
|
||||
|
||||
<TextView
|
||||
android:id="@+id/error"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_marginBottom="16dp"
|
||||
android:fontFamily="@font/raleway"
|
||||
android:textColor="@color/colorLoginError"
|
||||
android:textSize="12sp"
|
||||
app:layout_constraintBottom_toTopOf="@id/username"
|
||||
app:layout_constraintLeft_toLeftOf="@id/username"
|
||||
android:visibility="invisible"/>
|
||||
|
||||
<EditText
|
||||
android:id="@+id/username"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="35dp"
|
||||
android:layout_marginLeft="46dp"
|
||||
android:layout_marginRight="46dp"
|
||||
android:background="@drawable/rounded_edit"
|
||||
android:padding="7dp"
|
||||
android:paddingRight="12dp"
|
||||
android:paddingTop="14dp"
|
||||
android:ems="10"
|
||||
android:fontFamily="@font/raleway"
|
||||
android:textSize="14sp"
|
||||
android:inputType="textEmailAddress"
|
||||
android:textStyle="italic"
|
||||
android:textColor="@color/editTextColor"
|
||||
android:textColorHint="@color/editTextColor"
|
||||
android:gravity="right|center_vertical"
|
||||
<android.support.constraint.ConstraintLayout
|
||||
android:id="@+id/loggingInFrame"
|
||||
android:layout_width="0dp"
|
||||
android:layout_height="0dp"
|
||||
android:layout_marginTop="@dimen/login_form_margin_top"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
app:layout_constraintTop_toBottomOf="@id/header"
|
||||
android:layout_marginTop="70dp"
|
||||
android:hint="@string/username_or_email" />
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
android:visibility="gone">
|
||||
<TextView
|
||||
android:id="@+id/loggingInText"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="@string/logging_in"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:textSize="24sp"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
android:layout_marginTop="83dp"
|
||||
/>
|
||||
<ProgressBar
|
||||
android:layout_width="101dp"
|
||||
android:layout_height="101dp"
|
||||
android:layout_marginTop="20dp"
|
||||
app:layout_constraintTop_toBottomOf="@id/loggingInText"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
android:indeterminateTint="#00B4EF"
|
||||
/>
|
||||
</android.support.constraint.ConstraintLayout>
|
||||
|
||||
<EditText
|
||||
android:id="@+id/password"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="35dp"
|
||||
android:layout_marginLeft="46dp"
|
||||
android:layout_marginRight="46dp"
|
||||
android:background="@drawable/rounded_edit"
|
||||
android:padding="7dp"
|
||||
android:paddingRight="12dp"
|
||||
android:paddingTop="14dp"
|
||||
android:ems="10"
|
||||
android:fontFamily="@font/raleway"
|
||||
android:textSize="14sp"
|
||||
android:inputType="textPassword"
|
||||
android:textStyle="italic"
|
||||
android:textColor="@color/editTextColor"
|
||||
android:textColorHint="@color/editTextColor"
|
||||
android:gravity="right|center_vertical"
|
||||
app:layout_constraintTop_toBottomOf="@id/username"
|
||||
android:hint="@string/password"
|
||||
android:layout_marginTop="13dp"
|
||||
android:imeOptions="actionDone"/>
|
||||
|
||||
<Button
|
||||
android:id="@+id/loginButton"
|
||||
android:layout_width="154dp"
|
||||
android:layout_height="38dp"
|
||||
android:layout_marginTop="16dp"
|
||||
android:background="@drawable/rounded_button"
|
||||
android:fontFamily="@font/raleway_semibold"
|
||||
android:paddingBottom="0dp"
|
||||
android:paddingLeft="55dp"
|
||||
android:paddingRight="55dp"
|
||||
android:paddingTop="0dp"
|
||||
android:text="@string/login"
|
||||
android:textColor="@color/white_opaque"
|
||||
android:textAllCaps="false"
|
||||
android:textSize="15sp"
|
||||
app:layout_constraintRight_toRightOf="@id/username"
|
||||
app:layout_constraintTop_toBottomOf="@id/password"
|
||||
app:layout_goneMarginTop="4dp"/>
|
||||
<android.support.constraint.ConstraintLayout
|
||||
android:id="@+id/loggedInFrame"
|
||||
android:layout_width="0dp"
|
||||
android:layout_height="0dp"
|
||||
android:layout_marginTop="@dimen/login_form_margin_top"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
app:layout_constraintTop_toBottomOf="@id/header"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
android:visibility="gone">
|
||||
<TextView
|
||||
android:id="@+id/loggedInText"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="@string/logged_in_welcome"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:textSize="24sp"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
android:layout_marginTop="115dp"
|
||||
/>
|
||||
<Button
|
||||
android:id="@+id/getStarted"
|
||||
android:layout_width="@dimen/button_medium_width"
|
||||
android:layout_height="@dimen/button_medium_height"
|
||||
android:layout_marginTop="22dp"
|
||||
android:background="@drawable/rounded_button_color1"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:text="@string/get_started"
|
||||
android:textColor="@color/white_opaque"
|
||||
android:textAllCaps="false"
|
||||
android:textSize="@dimen/button_medium_text_size"
|
||||
app:layout_constraintTop_toBottomOf="@id/loggedInText"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
app:layout_goneMarginTop="4dp"/>
|
||||
|
||||
<TextView
|
||||
android:id="@+id/forgotPassword"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:fontFamily="@font/raleway_semibold"
|
||||
android:textSize="14dp"
|
||||
android:text="@string/forgot_password"
|
||||
android:textStyle="italic"
|
||||
android:paddingRight="10dp"
|
||||
app:layout_constraintLeft_toLeftOf="@id/password"
|
||||
app:layout_constraintTop_toTopOf="@id/loginButton"
|
||||
app:layout_constraintRight_toLeftOf="@id/loginButton"
|
||||
android:textColor="@color/colorButton1"/>
|
||||
</android.support.constraint.ConstraintLayout>
|
||||
|
||||
<android.support.constraint.ConstraintLayout
|
||||
android:id="@+id/loginForm"
|
||||
android:layout_width="0dp"
|
||||
android:layout_height="0dp"
|
||||
android:layout_marginTop="@dimen/login_form_margin_top"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
app:layout_constraintTop_toBottomOf="@id/header"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
android:visibility="gone">
|
||||
<TextView
|
||||
android:id="@+id/error"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_marginBottom="25dp"
|
||||
android:layout_marginLeft="9dp"
|
||||
android:layout_marginRight="9dp"
|
||||
android:fontFamily="sans-serif"
|
||||
android:textColor="@color/colorLoginError"
|
||||
android:textSize="14sp"
|
||||
app:layout_constraintBottom_toTopOf="@id/username"
|
||||
app:layout_constraintLeft_toLeftOf="@id/username"
|
||||
android:visibility="invisible"/>
|
||||
|
||||
<EditText
|
||||
android:id="@+id/username"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="27dp"
|
||||
android:layout_marginLeft="@dimen/login_margin"
|
||||
android:layout_marginRight="@dimen/login_margin"
|
||||
android:background="@color/white_opaque"
|
||||
android:paddingLeft="@dimen/edit_text_padding"
|
||||
android:ems="10"
|
||||
android:fontFamily="sans-serif"
|
||||
android:textSize="@dimen/login_edit_text_size"
|
||||
android:inputType="textEmailAddress"
|
||||
android:textColor="@color/editTextColor"
|
||||
android:textColorHint="@color/editTextColor"
|
||||
android:gravity="left|center_vertical"
|
||||
android:hint="@string/username_or_email"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
android:layout_marginTop="83dp"/>
|
||||
|
||||
<android.support.design.widget.TextInputLayout
|
||||
android:id="@+id/passwordLayout"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_marginLeft="@dimen/login_margin"
|
||||
android:layout_marginRight="@dimen/login_margin"
|
||||
android:background="@color/white_opaque"
|
||||
app:passwordToggleTint="@color/showPasswordColor"
|
||||
app:passwordToggleEnabled="true"
|
||||
app:hintAnimationEnabled="false"
|
||||
app:passwordToggleDrawable="@drawable/selector_show_password"
|
||||
app:hintEnabled="false"
|
||||
app:layout_constraintTop_toBottomOf="@id/username"
|
||||
android:layout_marginTop="15dp"
|
||||
>
|
||||
<android.support.design.widget.TextInputEditText
|
||||
android:id="@+id/password"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="@dimen/login_edit_text_height"
|
||||
android:background="@color/white_opaque"
|
||||
android:paddingLeft="@dimen/edit_text_padding"
|
||||
android:drawablePadding="45dp"
|
||||
android:drawableEnd="@drawable/ic_eye_noshow"
|
||||
android:ems="10"
|
||||
android:fontFamily="sans-serif"
|
||||
android:textSize="@dimen/login_edit_text_size"
|
||||
android:textColor="@color/editTextColor"
|
||||
android:textColorHint="@color/editTextColor"
|
||||
android:gravity="left|center_vertical"
|
||||
android:imeOptions="actionDone"
|
||||
android:hint="@string/password"
|
||||
android:inputType="textPassword" />
|
||||
</android.support.design.widget.TextInputLayout>
|
||||
|
||||
<CheckBox
|
||||
android:id="@+id/keepMeLoggedIn"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_marginTop="13dp"
|
||||
android:layout_marginRight="66dp"
|
||||
app:layout_constraintTop_toBottomOf="@id/passwordLayout"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
android:padding="0dp" />
|
||||
|
||||
<TextView
|
||||
android:id="@+id/keepMeLoggedInLabel"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:textSize="12sp"
|
||||
android:text="@string/keep_me_logged_in"
|
||||
app:layout_constraintRight_toLeftOf="@id/keepMeLoggedIn"
|
||||
app:layout_constraintTop_toTopOf="@id/keepMeLoggedIn"
|
||||
app:layout_constraintBottom_toBottomOf="@id/keepMeLoggedIn"
|
||||
android:textColor="@color/white_opaque"/>
|
||||
|
||||
<Button
|
||||
android:id="@+id/loginButton"
|
||||
android:layout_width="@dimen/button_medium_width"
|
||||
android:layout_height="@dimen/button_medium_height"
|
||||
android:background="@drawable/rounded_button_color3"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:layout_marginTop="@dimen/button_medium_margin"
|
||||
android:text="@string/log_in"
|
||||
android:textColor="@color/white_opaque"
|
||||
android:textAllCaps="false"
|
||||
android:textSize="@dimen/button_medium_text_size"
|
||||
app:layout_constraintRight_toRightOf="@id/username"
|
||||
app:layout_constraintTop_toBottomOf="@id/keepMeLoggedIn" />
|
||||
|
||||
<TextView
|
||||
android:id="@+id/cancel"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_marginRight="8dp"
|
||||
app:layout_constraintLeft_toLeftOf="@id/passwordLayout"
|
||||
app:layout_constraintTop_toTopOf="@id/loginButton"
|
||||
app:layout_constraintBottom_toBottomOf="@id/loginButton"
|
||||
app:layout_constraintRight_toLeftOf="@id/loginButton"
|
||||
android:textColor="@color/white_opaque"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:textSize="@dimen/button_medium_text_size"
|
||||
android:text="@string/cancel_uppercase" />
|
||||
|
||||
<TextView
|
||||
android:id="@+id/forgotPassword"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:fontFamily="sans-serif"
|
||||
android:textSize="14sp"
|
||||
android:layout_marginTop="18dp"
|
||||
android:text="@string/cant_access_your_account"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
app:layout_constraintTop_toBottomOf="@id/loginButton"
|
||||
android:textColor="@color/colorButton1"/>
|
||||
|
||||
<TextView
|
||||
android:id="@+id/takeMeInWorld"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:textSize="14sp"
|
||||
android:layout_marginBottom="11dp"
|
||||
android:padding="5dp"
|
||||
android:text="@string/take_me_in_world"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
android:textColor="@color/white_opaque"/>
|
||||
|
||||
<ImageView
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:src="@drawable/ic_right_arrow"
|
||||
android:layout_marginLeft="4dp"
|
||||
app:layout_constraintLeft_toRightOf="@id/takeMeInWorld"
|
||||
app:layout_constraintTop_toTopOf="@id/takeMeInWorld"
|
||||
app:layout_constraintBottom_toBottomOf="@id/takeMeInWorld"
|
||||
/>
|
||||
|
||||
</android.support.constraint.ConstraintLayout>
|
||||
|
||||
|
||||
|
||||
|
|
122
android/app/src/main/res/layout/fragment_login_menu.xml
Normal file
122
android/app/src/main/res/layout/fragment_login_menu.xml
Normal file
|
@ -0,0 +1,122 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:app="http://schemas.android.com/apk/res-auto"
|
||||
xmlns:tools="http://schemas.android.com/tools"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent"
|
||||
tools:context=".fragment.StartMenuFragment">
|
||||
|
||||
<ImageView
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent"
|
||||
android:src="@drawable/encourage_login_background"
|
||||
android:scaleType="fitXY"
|
||||
/>
|
||||
|
||||
<ImageView
|
||||
android:id="@+id/header"
|
||||
android:layout_width="@dimen/header_hifi_width"
|
||||
android:layout_height="@dimen/header_hifi_height"
|
||||
android:layout_marginTop="@dimen/header_hifi_margin_top"
|
||||
android:contentDescription="HighFidelity"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:srcCompat="@drawable/hifi_header" />
|
||||
|
||||
<TextView
|
||||
android:id="@+id/text"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:textSize="@dimen/login_menu_text_size"
|
||||
android:layout_marginTop="37dp"
|
||||
android:text="@string/be_anywere"
|
||||
android:gravity="center"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
app:layout_constraintTop_toBottomOf="@id/header"
|
||||
/>
|
||||
|
||||
<Button
|
||||
android:id="@+id/signupButton"
|
||||
android:layout_width="@dimen/button_large_width"
|
||||
android:layout_height="@dimen/button_large_height"
|
||||
android:layout_marginTop="48dp"
|
||||
android:background="@drawable/rounded_button_color1"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:text="@string/signup_uppercase"
|
||||
android:textColor="@color/white_opaque"
|
||||
android:textAllCaps="false"
|
||||
android:textSize="@dimen/button_large_text_size"
|
||||
app:layout_constraintTop_toBottomOf="@id/text"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent" />
|
||||
|
||||
<android.support.constraint.ConstraintLayout
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="@dimen/login_menu_translucent_rectangle_height"
|
||||
app:layout_constraintBottom_toBottomOf="parent" android:background="#B2000000">
|
||||
|
||||
|
||||
<Button
|
||||
android:id="@+id/loginButton"
|
||||
android:layout_width="@dimen/button_large_width"
|
||||
android:layout_height="@dimen/button_large_height"
|
||||
android:background="@drawable/rounded_button_color3"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:layout_marginTop="@dimen/login_menu_button_margin_top"
|
||||
android:text="@string/log_in"
|
||||
android:textColor="@color/white_opaque"
|
||||
android:textAllCaps="false"
|
||||
android:textSize="@dimen/button_large_text_size"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"/>
|
||||
|
||||
<Button
|
||||
android:id="@+id/steamLoginButton"
|
||||
android:layout_width="@dimen/button_large_width"
|
||||
android:layout_height="@dimen/button_large_height"
|
||||
android:background="@drawable/rounded_button_color4"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:layout_marginTop="10dp"
|
||||
android:text="@string/steam_log_in"
|
||||
android:textAlignment="center"
|
||||
android:textColor="@color/white_opaque"
|
||||
android:textAllCaps="false"
|
||||
android:textSize="@dimen/button_large_text_size"
|
||||
android:drawableLeft="@drawable/ic_steam"
|
||||
android:paddingLeft="38dp"
|
||||
android:paddingRight="38dp"
|
||||
app:layout_constraintTop_toBottomOf="@id/loginButton"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"/>
|
||||
|
||||
<TextView
|
||||
android:id="@+id/takeMeInWorld"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:textSize="14sp"
|
||||
android:layout_marginBottom="11dp"
|
||||
android:padding="5dp"
|
||||
android:text="@string/take_me_in_world"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
android:textColor="@color/white_opaque"/>
|
||||
|
||||
<ImageView
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:src="@drawable/ic_right_arrow"
|
||||
android:layout_marginLeft="4dp"
|
||||
app:layout_constraintLeft_toRightOf="@id/takeMeInWorld"
|
||||
app:layout_constraintTop_toTopOf="@id/takeMeInWorld"
|
||||
app:layout_constraintBottom_toBottomOf="@id/takeMeInWorld"
|
||||
/>
|
||||
|
||||
</android.support.constraint.ConstraintLayout>
|
||||
|
||||
</android.support.constraint.ConstraintLayout>
|
252
android/app/src/main/res/layout/fragment_signup.xml
Normal file
252
android/app/src/main/res/layout/fragment_signup.xml
Normal file
|
@ -0,0 +1,252 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:app="http://schemas.android.com/apk/res-auto"
|
||||
xmlns:tools="http://schemas.android.com/tools"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent"
|
||||
android:background="@color/backgroundLight">
|
||||
|
||||
|
||||
<ImageView
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent"
|
||||
android:src="@drawable/encourage_login_background"
|
||||
android:scaleType="fitXY" />
|
||||
|
||||
<FrameLayout
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent"
|
||||
android:background="#B2000000" />
|
||||
|
||||
<ImageView
|
||||
android:id="@+id/header"
|
||||
android:layout_width="@dimen/header_hifi_width"
|
||||
android:layout_height="@dimen/header_hifi_height"
|
||||
android:layout_marginTop="@dimen/header_hifi_margin_top"
|
||||
android:contentDescription="HighFidelity"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:srcCompat="@drawable/hifi_header" />
|
||||
|
||||
<android.support.constraint.ConstraintLayout
|
||||
android:id="@+id/loggingInFrame"
|
||||
android:layout_width="0dp"
|
||||
android:layout_height="0dp"
|
||||
android:layout_marginTop="100dp"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
app:layout_constraintTop_toBottomOf="@id/header"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
android:visibility="gone">
|
||||
<TextView
|
||||
android:id="@+id/activityText"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="@string/logging_in"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:textSize="24sp"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
android:layout_marginTop="83dp"
|
||||
/>
|
||||
<ProgressBar
|
||||
android:layout_width="101dp"
|
||||
android:layout_height="101dp"
|
||||
android:layout_marginTop="20dp"
|
||||
app:layout_constraintTop_toBottomOf="@id/activityText"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
android:indeterminateTint="#00B4EF"
|
||||
/>
|
||||
</android.support.constraint.ConstraintLayout>
|
||||
|
||||
|
||||
<android.support.constraint.ConstraintLayout
|
||||
android:id="@+id/loggedInFrame"
|
||||
android:layout_width="0dp"
|
||||
android:layout_height="0dp"
|
||||
android:layout_marginTop="100dp"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
app:layout_constraintTop_toBottomOf="@id/header"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
android:visibility="gone">
|
||||
<TextView
|
||||
android:id="@+id/loggedInText"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="@string/logged_in_welcome"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:textSize="24sp"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
android:layout_marginTop="115dp"
|
||||
/>
|
||||
<Button
|
||||
android:id="@+id/getStarted"
|
||||
android:layout_width="@dimen/button_medium_width"
|
||||
android:layout_height="@dimen/button_medium_height"
|
||||
android:layout_marginTop="22dp"
|
||||
android:background="@drawable/rounded_button_color1"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:text="@string/get_started"
|
||||
android:textColor="@color/white_opaque"
|
||||
android:textAllCaps="false"
|
||||
android:textSize="@dimen/button_medium_text_size"
|
||||
app:layout_constraintTop_toBottomOf="@id/loggedInText"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent" />
|
||||
|
||||
</android.support.constraint.ConstraintLayout>
|
||||
|
||||
<android.support.constraint.ConstraintLayout
|
||||
android:id="@+id/signupForm"
|
||||
android:layout_width="0dp"
|
||||
android:layout_height="0dp"
|
||||
android:layout_marginTop="100dp"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
app:layout_constraintTop_toBottomOf="@id/header"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
android:visibility="visible">
|
||||
|
||||
<TextView
|
||||
android:id="@+id/error"
|
||||
android:layout_width="0dp"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_marginBottom="16dp"
|
||||
android:fontFamily="sans-serif"
|
||||
android:textColor="@color/colorLoginError"
|
||||
android:textSize="14sp"
|
||||
app:layout_constraintBottom_toTopOf="@id/username"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:layout_constraintLeft_toLeftOf="@id/username"
|
||||
app:layout_constraintRight_toRightOf="@id/username"
|
||||
android:visibility="invisible"/>
|
||||
|
||||
<EditText
|
||||
android:id="@+id/username"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="@dimen/login_edit_text_height"
|
||||
android:layout_marginLeft="@dimen/signup_margin"
|
||||
android:layout_marginRight="@dimen/signup_margin"
|
||||
android:background="@color/white_opaque"
|
||||
android:paddingLeft="@dimen/edit_text_padding"
|
||||
android:ems="10"
|
||||
android:fontFamily="sans-serif"
|
||||
android:textSize="@dimen/login_edit_text_size"
|
||||
android:inputType="text"
|
||||
android:textColor="@color/editTextColor"
|
||||
android:textColorHint="@color/editTextColor"
|
||||
android:gravity="left|center_vertical"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
android:layout_marginTop="70dp"
|
||||
android:hint="@string/username" />
|
||||
|
||||
<EditText
|
||||
android:id="@+id/email"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="@dimen/login_edit_text_height"
|
||||
android:layout_marginLeft="@dimen/signup_margin"
|
||||
android:layout_marginRight="@dimen/signup_margin"
|
||||
android:background="@color/white_opaque"
|
||||
android:paddingLeft="@dimen/edit_text_padding"
|
||||
android:ems="10"
|
||||
android:fontFamily="sans-serif"
|
||||
android:textSize="@dimen/login_edit_text_size"
|
||||
android:inputType="textEmailAddress"
|
||||
android:textColor="@color/editTextColor"
|
||||
android:textColorHint="@color/editTextColor"
|
||||
android:gravity="left|center_vertical"
|
||||
app:layout_constraintTop_toBottomOf="@id/username"
|
||||
android:layout_marginTop="14dp"
|
||||
android:hint="@string/email" />
|
||||
|
||||
|
||||
<android.support.design.widget.TextInputLayout
|
||||
android:id="@+id/passwordLayout"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_marginLeft="@dimen/signup_margin"
|
||||
android:layout_marginRight="@dimen/signup_margin"
|
||||
app:passwordToggleTint="@color/showPasswordColor"
|
||||
app:passwordToggleEnabled="true"
|
||||
app:hintAnimationEnabled="false"
|
||||
app:passwordToggleDrawable="@drawable/selector_show_password"
|
||||
app:hintEnabled="false"
|
||||
app:layout_constraintTop_toBottomOf="@id/email"
|
||||
android:layout_marginTop="15dp"
|
||||
>
|
||||
<android.support.design.widget.TextInputEditText
|
||||
android:id="@+id/password"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="27dp"
|
||||
android:background="@color/white_opaque"
|
||||
android:paddingLeft="@dimen/edit_text_padding"
|
||||
android:drawablePadding="55dp"
|
||||
android:drawableEnd="@drawable/ic_eye_noshow"
|
||||
android:ems="10"
|
||||
android:fontFamily="sans-serif"
|
||||
android:textSize="@dimen/login_edit_text_size"
|
||||
android:textColor="@color/editTextColor"
|
||||
android:textColorHint="@color/editTextColor"
|
||||
android:gravity="left|center_vertical"
|
||||
android:imeOptions="actionDone"
|
||||
android:hint="@string/password"
|
||||
android:inputType="textPassword" />
|
||||
</android.support.design.widget.TextInputLayout>
|
||||
|
||||
<CheckBox
|
||||
android:id="@+id/keepMeLoggedIn"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_marginTop="13dp"
|
||||
android:layout_marginRight="66dp"
|
||||
app:layout_constraintTop_toBottomOf="@id/passwordLayout"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
android:padding="0dp" />
|
||||
|
||||
<TextView
|
||||
android:id="@+id/keepMeLoggedInLabel"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:textSize="12sp"
|
||||
android:text="@string/keep_me_logged_in"
|
||||
app:layout_constraintRight_toLeftOf="@id/keepMeLoggedIn"
|
||||
app:layout_constraintTop_toTopOf="@id/keepMeLoggedIn"
|
||||
app:layout_constraintBottom_toBottomOf="@id/keepMeLoggedIn"
|
||||
android:textColor="@color/white_opaque"/>
|
||||
|
||||
<Button
|
||||
android:id="@+id/signupButton"
|
||||
android:layout_width="@dimen/button_medium_width"
|
||||
android:layout_height="@dimen/button_medium_height"
|
||||
android:layout_marginTop="10dp"
|
||||
android:background="@drawable/rounded_button_color1"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:text="@string/signup_uppercase"
|
||||
android:textColor="@color/white_opaque"
|
||||
android:textAllCaps="false"
|
||||
android:textSize="@dimen/button_medium_text_size"
|
||||
app:layout_constraintRight_toRightOf="@id/username"
|
||||
app:layout_constraintTop_toBottomOf="@id/keepMeLoggedIn" />
|
||||
|
||||
<TextView
|
||||
android:id="@+id/cancel"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
app:layout_constraintLeft_toLeftOf="@id/passwordLayout"
|
||||
app:layout_constraintTop_toTopOf="@id/signupButton"
|
||||
app:layout_constraintBottom_toBottomOf="@id/signupButton"
|
||||
app:layout_constraintRight_toLeftOf="@id/signupButton"
|
||||
android:textColor="@color/white_opaque"
|
||||
android:fontFamily="@font/raleway_bold"
|
||||
android:textSize="@dimen/button_medium_text_size"
|
||||
android:text="@string/cancel_uppercase" />
|
||||
</android.support.constraint.ConstraintLayout>
|
||||
|
||||
</android.support.constraint.ConstraintLayout>
|
|
@ -9,4 +9,9 @@
|
|||
android:id="@+id/action_people"
|
||||
android:title="@string/people"
|
||||
/>
|
||||
<item
|
||||
android:id="@+id/action_debug_settings"
|
||||
android:title="@string/settings"
|
||||
android:visible="false"
|
||||
/>
|
||||
</menu>
|
||||
|
|
22
android/app/src/main/res/values-w385dp/dimens.xml
Normal file
22
android/app/src/main/res/values-w385dp/dimens.xml
Normal file
|
@ -0,0 +1,22 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<resources>
|
||||
<dimen name="header_hifi_margin_top">35dp</dimen>
|
||||
<dimen name="header_hifi_height">84dp</dimen>
|
||||
<dimen name="header_hifi_width">340dp</dimen>
|
||||
<dimen name="button_medium_width">171dp</dimen>
|
||||
<dimen name="button_medium_height">42dp</dimen>
|
||||
<dimen name="button_medium_margin">10dp</dimen>
|
||||
<dimen name="login_edit_text_size">14sp</dimen>
|
||||
<dimen name="button_large_text_size">18sp</dimen>
|
||||
<dimen name="button_medium_text_size">18sp</dimen>
|
||||
<dimen name="login_margin">72dp</dimen>
|
||||
<dimen name="signup_margin">76dp</dimen>
|
||||
<dimen name="login_form_margin_top">100dp</dimen>
|
||||
<dimen name="login_edit_text_height">27dp</dimen>
|
||||
<dimen name="button_large_width">238dp</dimen>
|
||||
<dimen name="button_large_height">42dp</dimen>
|
||||
<dimen name="login_menu_translucent_rectangle_height">270dp</dimen>
|
||||
<dimen name="login_menu_button_margin_top">86dp</dimen>
|
||||
<dimen name="login_menu_text_size">22sp</dimen>
|
||||
<dimen name="login_edit_text_padding">16dp</dimen>
|
||||
</resources>
|
|
@ -6,8 +6,12 @@
|
|||
<color name="colorAccent">#54D7FD</color>
|
||||
<color name="backgroundEditText">#E3E3E3</color>
|
||||
<color name="editTextColor">#575757</color>
|
||||
<color name="showPasswordColor">#3D3D3D</color>
|
||||
<color name="tabs">#1EB5EC</color>
|
||||
<color name="colorButton1">#00B4EF</color>
|
||||
<color name="colorButton2">#828282</color>
|
||||
<color name="colorButton3">#8F8F8F</color>
|
||||
<color name="colorButton4">#434343</color>
|
||||
<color name="backgroundDark">#333333</color>
|
||||
<color name="backgroundLight">#4F4F4F</color>
|
||||
<color name="backgroundSearch">#33999999</color>
|
||||
|
@ -22,4 +26,6 @@
|
|||
<color name="starSelectedTint">#FBD92A</color>
|
||||
<color name="starUnselectedTint">#8A8A8A</color>
|
||||
<color name="slidingUpPanelFadeColor">#40000000</color>
|
||||
<color name="clearText">#F2F2F2</color>
|
||||
|
||||
</resources>
|
||||
|
|
|
@ -33,10 +33,26 @@
|
|||
<dimen name="domainMarginBottom">6dp</dimen>
|
||||
<dimen name="domainNameHeight">64dp</dimen>
|
||||
|
||||
<dimen name="header_hifi_margin_top">56dp</dimen>
|
||||
<dimen name="header_hifi_height">101dp</dimen>
|
||||
<dimen name="header_hifi_width">425dp</dimen>
|
||||
<dimen name="header_hifi_margin_top">32dp</dimen>
|
||||
<dimen name="header_hifi_height">76dp</dimen>
|
||||
<dimen name="header_hifi_width">306dp</dimen>
|
||||
|
||||
<dimen name="list_vertical_padding">8dp</dimen>
|
||||
<dimen name="button_medium_width">150dp</dimen>
|
||||
<dimen name="button_medium_height">38dp</dimen>
|
||||
<dimen name="login_margin">65dp</dimen>
|
||||
<dimen name="signup_margin">68dp</dimen>
|
||||
<dimen name="login_form_margin_top">90dp</dimen>
|
||||
<dimen name="button_medium_margin">9dp</dimen>
|
||||
<dimen name="button_medium_text_size">16sp</dimen>
|
||||
<dimen name="button_large_text_size">16sp</dimen>
|
||||
<dimen name="login_edit_text_size">13sp</dimen>
|
||||
<dimen name="login_edit_text_height">24dp</dimen>
|
||||
<dimen name="button_large_width">214dp</dimen>
|
||||
<dimen name="button_large_height">38dp</dimen>
|
||||
<dimen name="login_menu_translucent_rectangle_height">300dp</dimen>
|
||||
<dimen name="login_menu_button_margin_top">77dp</dimen>
|
||||
<dimen name="login_menu_text_size">20sp</dimen>
|
||||
<dimen name="login_edit_text_padding">14dp</dimen>
|
||||
|
||||
</resources>
|
||||
|
|
|
@ -10,23 +10,48 @@
|
|||
<string name="popular">POPULAR</string>
|
||||
<string name="bookmarks">BOOKMARKS</string>
|
||||
<string name="goto_url_hint">Type a domain url</string>
|
||||
<string name="username_or_email">Username or email\u00A0</string>
|
||||
<string name="password">Password\u00A0</string>
|
||||
<string name="email">Email</string>
|
||||
<string name="username">Username</string>
|
||||
<string name="username_or_email">Username or email</string>
|
||||
<string name="password">Password</string>
|
||||
<string name="login">Login</string>
|
||||
<string name="logout">Logout</string>
|
||||
<string name="forgot_password">Forgot password?\u00A0</string>
|
||||
<string name="cant_access_your_account"><u>Can\u0027t access your account?</u></string>
|
||||
<string name="login_username_or_password_incorrect">Username or password incorrect.</string>
|
||||
<string name="logging_in">Logging into High Fidelity</string>
|
||||
<string name="logging_in">Logging in</string>
|
||||
<string name="search_hint"><i>Search for a place by name</i>\u00A0</string>
|
||||
<string name="search_loading">Loading places…</string>
|
||||
<string name="search_no_results">No places exist with that name</string>
|
||||
<string name="privacyPolicy">Privacy Policy</string>
|
||||
<string name="your_last_location">Your Last Location</string>
|
||||
<string name="online">Online</string>
|
||||
<string name="signup">Sign Up</string>
|
||||
<string name="signup_uppercase">SIGN UP</string>
|
||||
<string name="creating_account">Creating your High Fidelity account</string>
|
||||
<string name="signup_email_username_or_password_incorrect">Email, username or password incorrect.</string>
|
||||
<string name="signedin_welcome">You are now signed into High Fidelity</string>
|
||||
<string name="logged_in_welcome">You are now logged in!</string>
|
||||
<string name="welcome">Welcome</string>
|
||||
<string name="cancel">Cancel</string>
|
||||
<string name="cancel_uppercase">CANCEL</string>
|
||||
<string name="get_started">GET STARTED</string>
|
||||
|
||||
<!-- tags -->
|
||||
<string name="tagFragmentHome">tagFragmentHome</string>
|
||||
<string name="tagFragmentLogin">tagFragmentLogin</string>
|
||||
<string name="tagFragmentLoggingIn">tagFragmentLogginIn</string>
|
||||
<string name="tagFragmentSignup">tagFragmentSignup</string>
|
||||
<string name="tagFragmentPolicy">tagFragmentPolicy</string>
|
||||
<string name="tagFragmentPeople">tagFragmentPeople</string>
|
||||
<string name="tagSettings">tagSettings</string>
|
||||
<string name="tagFragmentSignedIn">tagFragmentSignedIn</string>
|
||||
<string name="settings">Settings</string>
|
||||
<string name="AEC">AEC</string>
|
||||
<string name="acoustic_echo_cancellation">Acoustic Echo Cancellation</string>
|
||||
<string name="settings_developer">Developer</string>
|
||||
<string name="log_in">LOG IN</string>
|
||||
<string name="keep_me_logged_in">Keep Me Logged In</string>
|
||||
<string name="take_me_in_world">No thanks, take me in-world!</string>
|
||||
<string name="be_anywere">BE ANYWHERE, WITH ANYONE \nRIGHT NOW</string>
|
||||
<string name="steam_log_in">STEAM LOG IN</string>
|
||||
</resources>
|
||||
|
|
12
android/app/src/main/res/xml/settings.xml
Normal file
12
android/app/src/main/res/xml/settings.xml
Normal file
|
@ -0,0 +1,12 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android">
|
||||
<PreferenceCategory
|
||||
android:title="@string/settings_developer"
|
||||
android:key="pref_key_developer">
|
||||
<SwitchPreference
|
||||
android:key="aec"
|
||||
android:title="@string/AEC"
|
||||
android:summary="@string/acoustic_echo_cancellation"
|
||||
android:defaultValue="true" />
|
||||
</PreferenceCategory>
|
||||
</PreferenceScreen>
|
|
@ -72,17 +72,17 @@ def jniFolder = new File(appDir, 'src/main/jniLibs/arm64-v8a')
|
|||
def baseUrl = 'https://hifi-public.s3.amazonaws.com/dependencies/android/'
|
||||
def breakpadDumpSymsDir = new File("${appDir}/build/tmp/breakpadDumpSyms")
|
||||
|
||||
def qtFile='qt-5.11.1_linux_armv8-libcpp_openssl.tgz'
|
||||
def qtChecksum='f312c47cd8b8dbca824c32af4eec5e66'
|
||||
def qtVersionId='nyCGcb91S4QbYeJhUkawO5x1lrLdSNB_'
|
||||
def qtFile='qt-5.11.1_linux_armv8-libcpp_openssl_patched.tgz'
|
||||
def qtChecksum='aa449d4bfa963f3bc9a9dfe558ba29df'
|
||||
def qtVersionId='3S97HBM5G5Xw9EfE52sikmgdN3t6C2MN'
|
||||
if (Os.isFamily(Os.FAMILY_MAC)) {
|
||||
qtFile = 'qt-5.11.1_osx_armv8-libcpp_openssl.tgz'
|
||||
qtChecksum='a0c8b394aec5b0fcd46714ca3a53278a'
|
||||
qtVersionId='QNa.lwNJaPc0eGuIL.xZ8ebeTuLL7rh8'
|
||||
qtFile = 'qt-5.11.1_osx_armv8-libcpp_openssl_patched.tgz'
|
||||
qtChecksum='c83cc477c08a892e00c71764dca051a0'
|
||||
qtVersionId='OxBD7iKINv1HbyOXmAmDrBb8AF3N.Kup'
|
||||
} else if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
qtFile = 'qt-5.11.1_win_armv8-libcpp_openssl.tgz'
|
||||
qtChecksum='d80aed4233ce9e222aae8376e7a94bf9'
|
||||
qtVersionId='iDVXu0i3WEXRFIxQCtzcJ2XuKrE8RIqB'
|
||||
qtFile = 'qt-5.11.1_win_armv8-libcpp_openssl_patched.tgz'
|
||||
qtChecksum='0582191cc55431aa4f660848a542883e'
|
||||
qtVersionId='JfWM0P_Mz5Qp0LwpzhrsRwN3fqlLSFeT'
|
||||
}
|
||||
|
||||
def packages = [
|
||||
|
@ -106,11 +106,6 @@ def packages = [
|
|||
versionId: 'r5Zran.JSCtvrrB6Q4KaqfIoALPw3lYY',
|
||||
checksum: 'a8ee8584cf1ccd34766c7ddd9d5e5449',
|
||||
],
|
||||
glm: [
|
||||
file: 'glm-0.9.8.5-patched.tgz',
|
||||
versionId: 'cskfMoJrFlAeqI3WPxemyO_Cxt7rT9EJ',
|
||||
checksum: '067b5fe16b220b5b1a1039ba51b062ae',
|
||||
],
|
||||
gvr: [
|
||||
file: 'gvrsdk_v1.101.0.tgz',
|
||||
versionId: 'nqBV_j81Uc31rC7bKIrlya_Hah4v3y5r',
|
||||
|
@ -143,11 +138,9 @@ def packages = [
|
|||
includeLibs: ['libtbb.so', 'libtbbmalloc.so'],
|
||||
],
|
||||
hifiAC: [
|
||||
file: 'libplugins_libhifiCodec.zip',
|
||||
versionId: 'i31pW.qNbvFOXRxbyiJUxg3sphaFNmZU',
|
||||
checksum: '9412a8e12c88a4096c1fc843bb9fe52d',
|
||||
sharedLibFolder: '',
|
||||
includeLibs: ['libplugins_libhifiCodec.so']
|
||||
baseUrl: 'http://s3.amazonaws.com/hifi-public/dependencies/',
|
||||
file: 'codecSDK-android_armv8-2.0.zip',
|
||||
checksum: '1cbef929675818fc64c4101b72f84a6a'
|
||||
],
|
||||
etc2comp: [
|
||||
file: 'etc2comp-patched-armv8-libcpp.tgz',
|
||||
|
@ -163,33 +156,6 @@ def packages = [
|
|||
]
|
||||
]
|
||||
|
||||
|
||||
def scribeLocalFile='scribe' + EXEC_SUFFIX
|
||||
def scribeFile='scribe_linux_x86_64'
|
||||
def scribeChecksum='ca4b904f52f4f993c29175ba96798fa6'
|
||||
def scribeVersion='u_iTrJDaE95i2abTPXOpPZckGBIim53G'
|
||||
|
||||
def shreflectLocalFile='shreflect' + EXEC_SUFFIX
|
||||
def shreflectFile='shreflect_linux_x86_64'
|
||||
def shreflectChecksum='d6094a8580066c0b6f4e80b5adfb1d98'
|
||||
def shreflectVersion='jnrpudh6fptIg6T2.Z6fgKP2ultAdKmE'
|
||||
|
||||
if (Os.isFamily(Os.FAMILY_MAC)) {
|
||||
scribeFile = 'scribe_osx_x86_64'
|
||||
scribeChecksum='72db9d32d4e1e50add755570ac5eb749'
|
||||
scribeVersion='DAW0DmnjCRib4MD8x93bgc2Z2MpPojZC'
|
||||
shreflectFile='shreflect_osx_x86_64'
|
||||
shreflectChecksum='d613ef0703c21371fee93fd2e54b964f'
|
||||
shreflectVersion='.rYNzjSFq6WtWDnE5KIKRIAGyJtr__ad'
|
||||
} else if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
scribeFile = 'scribe_win32_x86_64.exe'
|
||||
scribeChecksum='678e43d290c90fda670c6fefe038a06d'
|
||||
scribeVersion='PuullrA_bPlO9kXZRt8rLe536X1UI.m7'
|
||||
shreflectFile='shreflect_win32_x86_64.exe'
|
||||
shreflectChecksum='6f4a77b8cceb3f1bbc655132c3665060'
|
||||
shreflectVersion='iIyCyza1nelkbI7ihybF59bBlwrfAC3D'
|
||||
}
|
||||
|
||||
def options = [
|
||||
files: new TreeSet<File>(),
|
||||
features: new HashSet<String>(),
|
||||
|
@ -367,7 +333,8 @@ task downloadDependencies {
|
|||
doLast {
|
||||
packages.each { entry ->
|
||||
def filename = entry.value['file'];
|
||||
def url = baseUrl + filename;
|
||||
def dependencyBaseUrl = entry.value['baseUrl']
|
||||
def url = (dependencyBaseUrl?.trim() ? dependencyBaseUrl : baseUrl) + filename;
|
||||
if (entry.value.containsKey('versionId')) {
|
||||
url = url + '?versionId=' + entry.value['versionId']
|
||||
}
|
||||
|
@ -447,44 +414,6 @@ task copyDependencies(dependsOn: [ extractDependencies ]) {
|
|||
}
|
||||
}
|
||||
|
||||
task downloadScribe(type: Download) {
|
||||
src baseUrl + scribeFile + '?versionId=' + scribeVersion
|
||||
dest new File(baseFolder, scribeLocalFile)
|
||||
onlyIfNewer true
|
||||
}
|
||||
|
||||
task verifyScribe (type: Verify, dependsOn: downloadScribe) {
|
||||
src new File(baseFolder, scribeLocalFile);
|
||||
checksum scribeChecksum
|
||||
}
|
||||
|
||||
task fixScribePermissions(type: Exec, dependsOn: verifyScribe) {
|
||||
commandLine 'chmod', 'a+x', HIFI_ANDROID_PRECOMPILED + '/' + scribeLocalFile
|
||||
}
|
||||
|
||||
task downloadShreflect(type: Download) {
|
||||
src baseUrl + shreflectFile + '?versionId=' + shreflectVersion
|
||||
dest new File(baseFolder, shreflectLocalFile)
|
||||
onlyIfNewer true
|
||||
}
|
||||
|
||||
task verifyShreflect(type: Verify, dependsOn: downloadShreflect) {
|
||||
src new File(baseFolder, shreflectLocalFile);
|
||||
checksum shreflectChecksum
|
||||
}
|
||||
|
||||
task fixShreflectPermissions(type: Exec, dependsOn: verifyShreflect) {
|
||||
commandLine 'chmod', 'a+x', HIFI_ANDROID_PRECOMPILED + '/' + shreflectLocalFile
|
||||
}
|
||||
|
||||
task setupScribe(dependsOn: [verifyScribe, verifyShreflect]) { }
|
||||
|
||||
// On Windows, we don't need to set the executable bit, but on OSX and Unix we do
|
||||
if (!Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
setupScribe.dependsOn fixScribePermissions
|
||||
setupScribe.dependsOn fixShreflectPermissions
|
||||
}
|
||||
|
||||
task extractGvrBinaries(dependsOn: extractDependencies) {
|
||||
doLast {
|
||||
def gvrLibFolder = new File(HIFI_ANDROID_PRECOMPILED, 'gvr/gvr-android-sdk-1.101.0/libraries');
|
||||
|
@ -571,7 +500,7 @@ task qtBundle {
|
|||
}
|
||||
}
|
||||
|
||||
task setupDependencies(dependsOn: [setupScribe, copyDependencies, extractGvrBinaries, qtBundle]) { }
|
||||
task setupDependencies(dependsOn: [copyDependencies, extractGvrBinaries, qtBundle]) { }
|
||||
|
||||
task cleanDependencies(type: Delete) {
|
||||
delete HIFI_ANDROID_PRECOMPILED
|
||||
|
@ -668,6 +597,21 @@ task uploadBreakpadDumpSymsRelease(type:io.github.httpbuilderng.http.HttpTask, d
|
|||
}
|
||||
}
|
||||
|
||||
task renameHifiACTaskDebug() {
|
||||
doLast {
|
||||
def sourceFile = new File("${appDir}/build/intermediates/cmake/debug/obj/arm64-v8a/","libhifiCodec.so")
|
||||
def destinationFile = new File("${appDir}/src/main/jniLibs/arm64-v8a", "libplugins_libhifiCodec.so")
|
||||
copy { from sourceFile; into destinationFile.parent; rename(sourceFile.name, destinationFile.name) }
|
||||
}
|
||||
}
|
||||
task renameHifiACTaskRelease(type: Copy) {
|
||||
doLast {
|
||||
def sourceFile = new File("${appDir}/build/intermediates/cmake/release/obj/arm64-v8a/","libhifiCodec.so")
|
||||
def destinationFile = new File("${appDir}/src/main/jniLibs/arm64-v8a", "libplugins_libhifiCodec.so")
|
||||
copy { from sourceFile; into destinationFile.parent; rename(sourceFile.name, destinationFile.name) }
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME this code is prototyping the desired functionality for doing build time binary dependency resolution.
|
||||
// See the comment on the qtBundle task above
|
||||
/*
|
||||
|
|
1
android/gradle.properties
Normal file
1
android/gradle.properties
Normal file
|
@ -0,0 +1 @@
|
|||
org.gradle.jvmargs=-Xms2g -Xmx4g
|
|
@ -11,7 +11,7 @@ setup_memory_debugger()
|
|||
|
||||
# link in the shared libraries
|
||||
link_hifi_libraries(
|
||||
audio avatars octree gpu graphics fbx entities
|
||||
audio avatars octree gpu graphics fbx hfm entities
|
||||
networking animation recording shared script-engine embedded-webserver
|
||||
controllers physics plugins midi image
|
||||
)
|
||||
|
|
|
@ -53,6 +53,7 @@
|
|||
#include <EntityScriptingInterface.h> // TODO: consider moving to scriptengine.h
|
||||
|
||||
#include "entities/AssignmentParentFinder.h"
|
||||
#include "AssignmentDynamicFactory.h"
|
||||
#include "RecordingScriptingInterface.h"
|
||||
#include "AbstractAudioInterface.h"
|
||||
#include "AgentScriptingInterface.h"
|
||||
|
@ -67,6 +68,9 @@ Agent::Agent(ReceivedMessage& message) :
|
|||
{
|
||||
DependencyManager::set<ScriptableAvatar>();
|
||||
|
||||
DependencyManager::registerInheritance<EntityDynamicFactoryInterface, AssignmentDynamicFactory>();
|
||||
DependencyManager::set<AssignmentDynamicFactory>();
|
||||
|
||||
DependencyManager::set<AnimationCache>();
|
||||
DependencyManager::set<AnimationCacheScriptingInterface>();
|
||||
DependencyManager::set<EntityScriptingInterface>(false);
|
||||
|
@ -92,7 +96,6 @@ Agent::Agent(ReceivedMessage& message) :
|
|||
DependencyManager::set<recording::Recorder>();
|
||||
DependencyManager::set<recording::ClipCache>();
|
||||
|
||||
DependencyManager::set<ScriptCache>();
|
||||
DependencyManager::set<RecordingScriptingInterface>();
|
||||
DependencyManager::set<UsersScriptingInterface>();
|
||||
|
||||
|
@ -173,6 +176,8 @@ void Agent::run() {
|
|||
// Create ScriptEngines on threaded-assignment thread then move to main thread.
|
||||
DependencyManager::set<ScriptEngines>(ScriptEngine::AGENT_SCRIPT)->moveToThread(qApp->thread());
|
||||
|
||||
DependencyManager::set<ScriptCache>();
|
||||
|
||||
// make sure we request our script once the agent connects to the domain
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
|
@ -211,13 +216,14 @@ void Agent::requestScript() {
|
|||
}
|
||||
|
||||
// make sure this is not a script request for the file scheme
|
||||
if (scriptURL.scheme() == URL_SCHEME_FILE) {
|
||||
if (scriptURL.scheme() == HIFI_URL_SCHEME_FILE) {
|
||||
qWarning() << "Cannot load script for Agent from local filesystem.";
|
||||
scriptRequestFinished();
|
||||
return;
|
||||
}
|
||||
|
||||
auto request = DependencyManager::get<ResourceManager>()->createResourceRequest(this, scriptURL);
|
||||
auto request = DependencyManager::get<ResourceManager>()->createResourceRequest(
|
||||
this, scriptURL, true, -1, "Agent::requestScript");
|
||||
|
||||
if (!request) {
|
||||
qWarning() << "Could not create ResourceRequest for Agent script at" << scriptURL.toString();
|
||||
|
@ -356,154 +362,173 @@ void Agent::scriptRequestFinished() {
|
|||
}
|
||||
|
||||
void Agent::executeScript() {
|
||||
_scriptEngine = scriptEngineFactory(ScriptEngine::AGENT_SCRIPT, _scriptContents, _payload);
|
||||
// the following block is scoped so that any shared pointers we take here
|
||||
// are cleared before we call setFinished at the end of the function
|
||||
{
|
||||
_scriptEngine = scriptEngineFactory(ScriptEngine::AGENT_SCRIPT, _scriptContents, _payload);
|
||||
|
||||
// setup an Avatar for the script to use
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
// setup an Avatar for the script to use
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
scriptedAvatar->setID(getSessionUUID());
|
||||
scriptedAvatar->setForceFaceTrackerConnected(true);
|
||||
|
||||
scriptedAvatar->setID(getSessionUUID());
|
||||
// call model URL setters with empty URLs so our avatar, if user, will have the default models
|
||||
scriptedAvatar->setSkeletonModelURL(QUrl());
|
||||
|
||||
connect(_scriptEngine.data(), SIGNAL(update(float)),
|
||||
scriptedAvatar.data(), SLOT(update(float)), Qt::ConnectionType::QueuedConnection);
|
||||
scriptedAvatar->setForceFaceTrackerConnected(true);
|
||||
// force lazy initialization of the head data for the scripted avatar
|
||||
// since it is referenced below by computeLoudness and getAudioLoudness
|
||||
scriptedAvatar->getHeadOrientation();
|
||||
|
||||
// call model URL setters with empty URLs so our avatar, if user, will have the default models
|
||||
scriptedAvatar->setSkeletonModelURL(QUrl());
|
||||
// give this AvatarData object to the script engine
|
||||
_scriptEngine->registerGlobalObject("Avatar", scriptedAvatar.data());
|
||||
|
||||
// force lazy initialization of the head data for the scripted avatar
|
||||
// since it is referenced below by computeLoudness and getAudioLoudness
|
||||
scriptedAvatar->getHeadOrientation();
|
||||
// give scripts access to the Users object
|
||||
_scriptEngine->registerGlobalObject("Users", DependencyManager::get<UsersScriptingInterface>().data());
|
||||
|
||||
// give this AvatarData object to the script engine
|
||||
_scriptEngine->registerGlobalObject("Avatar", scriptedAvatar.data());
|
||||
auto player = DependencyManager::get<recording::Deck>();
|
||||
connect(player.data(), &recording::Deck::playbackStateChanged, [&player, &scriptedAvatar] {
|
||||
if (player->isPlaying()) {
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
if (recordingInterface->getPlayFromCurrentLocation()) {
|
||||
scriptedAvatar->setRecordingBasis();
|
||||
}
|
||||
|
||||
// give scripts access to the Users object
|
||||
_scriptEngine->registerGlobalObject("Users", DependencyManager::get<UsersScriptingInterface>().data());
|
||||
// these procedural movements are included in the recordings
|
||||
scriptedAvatar->setHasProceduralEyeFaceMovement(false);
|
||||
scriptedAvatar->setHasProceduralBlinkFaceMovement(false);
|
||||
scriptedAvatar->setHasAudioEnabledFaceMovement(false);
|
||||
} else {
|
||||
scriptedAvatar->clearRecordingBasis();
|
||||
|
||||
auto player = DependencyManager::get<recording::Deck>();
|
||||
connect(player.data(), &recording::Deck::playbackStateChanged, [=] {
|
||||
if (player->isPlaying()) {
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
if (recordingInterface->getPlayFromCurrentLocation()) {
|
||||
scriptedAvatar->setRecordingBasis();
|
||||
// restore procedural blendshape movement
|
||||
scriptedAvatar->setHasProceduralEyeFaceMovement(true);
|
||||
scriptedAvatar->setHasProceduralBlinkFaceMovement(true);
|
||||
scriptedAvatar->setHasAudioEnabledFaceMovement(true);
|
||||
}
|
||||
} else {
|
||||
scriptedAvatar->clearRecordingBasis();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
using namespace recording;
|
||||
static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
|
||||
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [scriptedAvatar](Frame::ConstPointer frame) {
|
||||
using namespace recording;
|
||||
static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
|
||||
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [scriptedAvatar](Frame::ConstPointer frame) {
|
||||
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
bool useFrameSkeleton = recordingInterface->getPlayerUseSkeletonModel();
|
||||
|
||||
// FIXME - the ability to switch the avatar URL is not actually supported when playing back from a recording
|
||||
if (!useFrameSkeleton) {
|
||||
static std::once_flag warning;
|
||||
std::call_once(warning, [] {
|
||||
qWarning() << "Recording.setPlayerUseSkeletonModel(false) is not currently supported.";
|
||||
});
|
||||
}
|
||||
|
||||
AvatarData::fromFrame(frame->data, *scriptedAvatar);
|
||||
});
|
||||
|
||||
using namespace recording;
|
||||
static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::getAudioFrameName());
|
||||
Frame::registerFrameHandler(AUDIO_FRAME_TYPE, [this, &scriptedAvatar](Frame::ConstPointer frame) {
|
||||
static quint16 audioSequenceNumber{ 0 };
|
||||
|
||||
QByteArray audio(frame->data);
|
||||
|
||||
if (_isNoiseGateEnabled) {
|
||||
int16_t* samples = reinterpret_cast<int16_t*>(audio.data());
|
||||
int numSamples = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL;
|
||||
_audioGate.render(samples, samples, numSamples);
|
||||
}
|
||||
|
||||
computeLoudness(&audio, scriptedAvatar);
|
||||
|
||||
// state machine to detect gate opening and closing
|
||||
bool audioGateOpen = (scriptedAvatar->getAudioLoudness() != 0.0f);
|
||||
bool openedInLastBlock = !_audioGateOpen && audioGateOpen; // the gate just opened
|
||||
bool closedInLastBlock = _audioGateOpen && !audioGateOpen; // the gate just closed
|
||||
_audioGateOpen = audioGateOpen;
|
||||
Q_UNUSED(openedInLastBlock);
|
||||
|
||||
// the codec must be flushed to silence before sending silent packets,
|
||||
// so delay the transition to silent packets by one packet after becoming silent.
|
||||
auto packetType = PacketType::MicrophoneAudioNoEcho;
|
||||
if (!audioGateOpen && !closedInLastBlock) {
|
||||
packetType = PacketType::SilentAudioFrame;
|
||||
}
|
||||
|
||||
Transform audioTransform;
|
||||
auto headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioTransform.setTranslation(scriptedAvatar->getWorldPosition());
|
||||
audioTransform.setRotation(headOrientation);
|
||||
|
||||
QByteArray encodedBuffer;
|
||||
if (_encoder) {
|
||||
_encoder->encode(audio, encodedBuffer);
|
||||
} else {
|
||||
encodedBuffer = audio;
|
||||
}
|
||||
|
||||
AbstractAudioInterface::emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), audioSequenceNumber, false,
|
||||
audioTransform, scriptedAvatar->getWorldPosition(), glm::vec3(0),
|
||||
packetType, _selectedCodecName);
|
||||
});
|
||||
|
||||
auto avatarHashMap = DependencyManager::set<AvatarHashMap>();
|
||||
_scriptEngine->registerGlobalObject("AvatarList", avatarHashMap.data());
|
||||
|
||||
// register ourselves to the script engine
|
||||
_scriptEngine->registerGlobalObject("Agent", new AgentScriptingInterface(this));
|
||||
|
||||
_scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCacheScriptingInterface>().data());
|
||||
_scriptEngine->registerGlobalObject("SoundCache", DependencyManager::get<SoundCacheScriptingInterface>().data());
|
||||
|
||||
QScriptValue webSocketServerConstructorValue = _scriptEngine->newFunction(WebSocketServerClass::constructor);
|
||||
_scriptEngine->globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
|
||||
|
||||
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
|
||||
|
||||
_scriptEngine->registerGlobalObject("EntityViewer", &_entityViewer);
|
||||
|
||||
_scriptEngine->registerGetterSetter("location", LocationScriptingInterface::locationGetter,
|
||||
LocationScriptingInterface::locationSetter);
|
||||
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
bool useFrameSkeleton = recordingInterface->getPlayerUseSkeletonModel();
|
||||
_scriptEngine->registerGlobalObject("Recording", recordingInterface.data());
|
||||
|
||||
// FIXME - the ability to switch the avatar URL is not actually supported when playing back from a recording
|
||||
if (!useFrameSkeleton) {
|
||||
static std::once_flag warning;
|
||||
std::call_once(warning, [] {
|
||||
qWarning() << "Recording.setPlayerUseSkeletonModel(false) is not currently supported.";
|
||||
});
|
||||
entityScriptingInterface->init();
|
||||
|
||||
_entityViewer.init();
|
||||
|
||||
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
|
||||
|
||||
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
|
||||
|
||||
// Agents should run at 45hz
|
||||
static const int AVATAR_DATA_HZ = 45;
|
||||
static const int AVATAR_DATA_IN_MSECS = MSECS_PER_SECOND / AVATAR_DATA_HZ;
|
||||
QTimer* avatarDataTimer = new QTimer(this);
|
||||
connect(avatarDataTimer, &QTimer::timeout, this, &Agent::processAgentAvatar);
|
||||
avatarDataTimer->setSingleShot(false);
|
||||
avatarDataTimer->setInterval(AVATAR_DATA_IN_MSECS);
|
||||
avatarDataTimer->setTimerType(Qt::PreciseTimer);
|
||||
avatarDataTimer->start();
|
||||
|
||||
_scriptEngine->run();
|
||||
|
||||
Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
|
||||
Frame::clearFrameHandler(AVATAR_FRAME_TYPE);
|
||||
|
||||
if (recordingInterface->isPlaying()) {
|
||||
recordingInterface->stopPlaying();
|
||||
}
|
||||
|
||||
AvatarData::fromFrame(frame->data, *scriptedAvatar);
|
||||
});
|
||||
|
||||
using namespace recording;
|
||||
static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::getAudioFrameName());
|
||||
Frame::registerFrameHandler(AUDIO_FRAME_TYPE, [this, &scriptedAvatar](Frame::ConstPointer frame) {
|
||||
static quint16 audioSequenceNumber{ 0 };
|
||||
|
||||
QByteArray audio(frame->data);
|
||||
|
||||
if (_isNoiseGateEnabled) {
|
||||
int16_t* samples = reinterpret_cast<int16_t*>(audio.data());
|
||||
int numSamples = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL;
|
||||
_audioGate.render(samples, samples, numSamples);
|
||||
if (recordingInterface->isRecording()) {
|
||||
recordingInterface->stopRecording();
|
||||
}
|
||||
|
||||
computeLoudness(&audio, scriptedAvatar);
|
||||
avatarDataTimer->stop();
|
||||
|
||||
// state machine to detect gate opening and closing
|
||||
bool audioGateOpen = (scriptedAvatar->getAudioLoudness() != 0.0f);
|
||||
bool openedInLastBlock = !_audioGateOpen && audioGateOpen; // the gate just opened
|
||||
bool closedInLastBlock = _audioGateOpen && !audioGateOpen; // the gate just closed
|
||||
_audioGateOpen = audioGateOpen;
|
||||
Q_UNUSED(openedInLastBlock);
|
||||
setIsAvatar(false); // will stop timers for sending identity packets
|
||||
}
|
||||
|
||||
// the codec must be flushed to silence before sending silent packets,
|
||||
// so delay the transition to silent packets by one packet after becoming silent.
|
||||
auto packetType = PacketType::MicrophoneAudioNoEcho;
|
||||
if (!audioGateOpen && !closedInLastBlock) {
|
||||
packetType = PacketType::SilentAudioFrame;
|
||||
}
|
||||
|
||||
Transform audioTransform;
|
||||
auto headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioTransform.setTranslation(scriptedAvatar->getWorldPosition());
|
||||
audioTransform.setRotation(headOrientation);
|
||||
|
||||
QByteArray encodedBuffer;
|
||||
if (_encoder) {
|
||||
_encoder->encode(audio, encodedBuffer);
|
||||
} else {
|
||||
encodedBuffer = audio;
|
||||
}
|
||||
|
||||
AbstractAudioInterface::emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), audioSequenceNumber, false,
|
||||
audioTransform, scriptedAvatar->getWorldPosition(), glm::vec3(0),
|
||||
packetType, _selectedCodecName);
|
||||
});
|
||||
|
||||
auto avatarHashMap = DependencyManager::set<AvatarHashMap>();
|
||||
_scriptEngine->registerGlobalObject("AvatarList", avatarHashMap.data());
|
||||
|
||||
// register ourselves to the script engine
|
||||
_scriptEngine->registerGlobalObject("Agent", new AgentScriptingInterface(this));
|
||||
|
||||
_scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCacheScriptingInterface>().data());
|
||||
_scriptEngine->registerGlobalObject("SoundCache", DependencyManager::get<SoundCacheScriptingInterface>().data());
|
||||
|
||||
QScriptValue webSocketServerConstructorValue = _scriptEngine->newFunction(WebSocketServerClass::constructor);
|
||||
_scriptEngine->globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
|
||||
|
||||
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
|
||||
|
||||
_scriptEngine->registerGlobalObject("EntityViewer", &_entityViewer);
|
||||
|
||||
_scriptEngine->registerGetterSetter("location", LocationScriptingInterface::locationGetter,
|
||||
LocationScriptingInterface::locationSetter);
|
||||
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
_scriptEngine->registerGlobalObject("Recording", recordingInterface.data());
|
||||
|
||||
entityScriptingInterface->init();
|
||||
|
||||
_entityViewer.init();
|
||||
|
||||
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
|
||||
|
||||
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
|
||||
|
||||
QMetaObject::invokeMethod(&_avatarAudioTimer, "start");
|
||||
|
||||
// Agents should run at 45hz
|
||||
static const int AVATAR_DATA_HZ = 45;
|
||||
static const int AVATAR_DATA_IN_MSECS = MSECS_PER_SECOND / AVATAR_DATA_HZ;
|
||||
QTimer* avatarDataTimer = new QTimer(this);
|
||||
connect(avatarDataTimer, &QTimer::timeout, this, &Agent::processAgentAvatar);
|
||||
avatarDataTimer->setSingleShot(false);
|
||||
avatarDataTimer->setInterval(AVATAR_DATA_IN_MSECS);
|
||||
avatarDataTimer->setTimerType(Qt::PreciseTimer);
|
||||
avatarDataTimer->start();
|
||||
|
||||
_scriptEngine->run();
|
||||
|
||||
Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
|
||||
Frame::clearFrameHandler(AVATAR_FRAME_TYPE);
|
||||
|
||||
DependencyManager::destroy<RecordingScriptingInterface>();
|
||||
setFinished(true);
|
||||
}
|
||||
|
||||
|
@ -553,28 +578,33 @@ void Agent::setIsAvatar(bool isAvatar) {
|
|||
}
|
||||
_isAvatar = isAvatar;
|
||||
|
||||
if (_isAvatar && !_avatarIdentityTimer) {
|
||||
// set up the avatar timers
|
||||
_avatarIdentityTimer = new QTimer(this);
|
||||
_avatarQueryTimer = new QTimer(this);
|
||||
auto scriptableAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
if (_isAvatar) {
|
||||
if (!_avatarIdentityTimer) {
|
||||
// set up the avatar timers
|
||||
_avatarIdentityTimer = new QTimer(this);
|
||||
_avatarQueryTimer = new QTimer(this);
|
||||
|
||||
// connect our slot
|
||||
connect(_avatarIdentityTimer, &QTimer::timeout, this, &Agent::sendAvatarIdentityPacket);
|
||||
connect(_avatarQueryTimer, &QTimer::timeout, this, &Agent::queryAvatars);
|
||||
// connect our slot
|
||||
connect(_avatarIdentityTimer, &QTimer::timeout, this, &Agent::sendAvatarIdentityPacket);
|
||||
connect(_avatarQueryTimer, &QTimer::timeout, this, &Agent::queryAvatars);
|
||||
|
||||
static const int AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS = 1000;
|
||||
static const int AVATAR_VIEW_PACKET_SEND_INTERVAL_MSECS = 1000;
|
||||
static const int AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS = 1000;
|
||||
static const int AVATAR_VIEW_PACKET_SEND_INTERVAL_MSECS = 1000;
|
||||
|
||||
// start the timers
|
||||
_avatarIdentityTimer->start(AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS); // FIXME - we shouldn't really need to constantly send identity packets
|
||||
_avatarQueryTimer->start(AVATAR_VIEW_PACKET_SEND_INTERVAL_MSECS);
|
||||
// start the timers
|
||||
_avatarIdentityTimer->start(AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS); // FIXME - we shouldn't really need to constantly send identity packets
|
||||
_avatarQueryTimer->start(AVATAR_VIEW_PACKET_SEND_INTERVAL_MSECS);
|
||||
|
||||
// tell the avatarAudioTimer to start ticking
|
||||
QMetaObject::invokeMethod(&_avatarAudioTimer, "start");
|
||||
}
|
||||
connect(_scriptEngine.data(), &ScriptEngine::update,
|
||||
scriptableAvatar.data(), &ScriptableAvatar::update, Qt::QueuedConnection);
|
||||
|
||||
if (!_isAvatar) {
|
||||
// tell the avatarAudioTimer to start ticking
|
||||
QMetaObject::invokeMethod(&_avatarAudioTimer, "start");
|
||||
}
|
||||
|
||||
_entityEditSender.setMyAvatar(scriptableAvatar.data());
|
||||
} else {
|
||||
if (_avatarIdentityTimer) {
|
||||
_avatarIdentityTimer->stop();
|
||||
delete _avatarIdentityTimer;
|
||||
|
@ -601,14 +631,14 @@ void Agent::setIsAvatar(bool isAvatar) {
|
|||
packet->writePrimitive(KillAvatarReason::NoReason);
|
||||
nodeList->sendPacket(std::move(packet), *node);
|
||||
});
|
||||
|
||||
disconnect(_scriptEngine.data(), &ScriptEngine::update,
|
||||
scriptableAvatar.data(), &ScriptableAvatar::update);
|
||||
|
||||
QMetaObject::invokeMethod(&_avatarAudioTimer, "stop");
|
||||
}
|
||||
|
||||
QMetaObject::invokeMethod(&_avatarAudioTimer, "stop");
|
||||
|
||||
_entityEditSender.setMyAvatar(nullptr);
|
||||
} else {
|
||||
auto scriptableAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
_entityEditSender.setMyAvatar(scriptableAvatar.data());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -724,13 +754,13 @@ void Agent::processAgentAvatarAudio() {
|
|||
const int16_t* nextSoundOutput = NULL;
|
||||
|
||||
if (_avatarSound) {
|
||||
const QByteArray& soundByteArray = _avatarSound->getByteArray();
|
||||
nextSoundOutput = reinterpret_cast<const int16_t*>(soundByteArray.data()
|
||||
auto audioData = _avatarSound->getAudioData();
|
||||
nextSoundOutput = reinterpret_cast<const int16_t*>(audioData->rawData()
|
||||
+ _numAvatarSoundSentBytes);
|
||||
|
||||
int numAvailableBytes = (soundByteArray.size() - _numAvatarSoundSentBytes) > AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL
|
||||
int numAvailableBytes = (audioData->getNumBytes() - _numAvatarSoundSentBytes) > AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL
|
||||
? AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL
|
||||
: soundByteArray.size() - _numAvatarSoundSentBytes;
|
||||
: audioData->getNumBytes() - _numAvatarSoundSentBytes;
|
||||
numAvailableSamples = (int16_t)numAvailableBytes / sizeof(int16_t);
|
||||
|
||||
|
||||
|
@ -743,7 +773,7 @@ void Agent::processAgentAvatarAudio() {
|
|||
}
|
||||
|
||||
_numAvatarSoundSentBytes += numAvailableBytes;
|
||||
if (_numAvatarSoundSentBytes == soundByteArray.size()) {
|
||||
if (_numAvatarSoundSentBytes == (int)audioData->getNumBytes()) {
|
||||
// we're done with this sound object - so set our pointer back to NULL
|
||||
// and our sent bytes back to zero
|
||||
_avatarSound.clear();
|
||||
|
@ -759,7 +789,7 @@ void Agent::processAgentAvatarAudio() {
|
|||
// seek past the sequence number, will be packed when destination node is known
|
||||
audioPacket->seek(sizeof(quint16));
|
||||
|
||||
if (silentFrame) {
|
||||
if (silentFrame && !_flushEncoder) {
|
||||
|
||||
if (!_isListeningToAudioStream) {
|
||||
// if we have a silent frame and we're not listening then just send nothing and break out of here
|
||||
|
@ -781,7 +811,7 @@ void Agent::processAgentAvatarAudio() {
|
|||
|
||||
// no matter what, the loudness should be set to 0
|
||||
computeLoudness(nullptr, scriptedAvatar);
|
||||
} else if (nextSoundOutput) {
|
||||
} else if (nextSoundOutput || _flushEncoder) {
|
||||
|
||||
// write the codec
|
||||
audioPacket->writeString(_selectedCodecName);
|
||||
|
@ -835,8 +865,6 @@ void Agent::processAgentAvatarAudio() {
|
|||
}
|
||||
|
||||
void Agent::aboutToFinish() {
|
||||
setIsAvatar(false);// will stop timers for sending identity packets
|
||||
|
||||
// our entity tree is going to go away so tell that to the EntityScriptingInterface
|
||||
DependencyManager::get<EntityScriptingInterface>()->setEntityTree(nullptr);
|
||||
|
||||
|
@ -855,15 +883,25 @@ void Agent::aboutToFinish() {
|
|||
DependencyManager::destroy<SoundCache>();
|
||||
DependencyManager::destroy<AudioScriptingInterface>();
|
||||
|
||||
DependencyManager::destroy<RecordingScriptingInterface>();
|
||||
DependencyManager::destroy<recording::Deck>();
|
||||
DependencyManager::destroy<recording::Recorder>();
|
||||
DependencyManager::destroy<recording::ClipCache>();
|
||||
DependencyManager::destroy<ScriptEngine>();
|
||||
|
||||
// drop our shared pointer to the script engine, then ask ScriptEngines to shutdown scripting
|
||||
// this ensures that the ScriptEngine goes down before ScriptEngines
|
||||
_scriptEngine.clear();
|
||||
|
||||
{
|
||||
DependencyManager::get<ScriptEngines>()->shutdownScripting();
|
||||
}
|
||||
|
||||
DependencyManager::destroy<ScriptEngines>();
|
||||
|
||||
DependencyManager::destroy<AssignmentDynamicFactory>();
|
||||
|
||||
DependencyManager::destroy<ScriptableAvatar>();
|
||||
|
||||
QMetaObject::invokeMethod(&_avatarAudioTimer, "stop");
|
||||
|
||||
// cleanup codec & encoder
|
||||
if (_codec && _encoder) {
|
||||
_codec->releaseEncoder(_encoder);
|
||||
|
|
|
@ -21,7 +21,6 @@
|
|||
#include <QtCore/QTimer>
|
||||
#include <QUuid>
|
||||
|
||||
#include <ClientTraitsHandler.h>
|
||||
#include <EntityEditPacketSender.h>
|
||||
#include <EntityTree.h>
|
||||
#include <ScriptEngine.h>
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
|
||||
#include "AssignmentClientLogging.h"
|
||||
#include "AssignmentFactory.h"
|
||||
#include "ResourceRequestObserver.h"
|
||||
|
||||
const QString ASSIGNMENT_CLIENT_TARGET_NAME = "assignment-client";
|
||||
const long long ASSIGNMENT_REQUEST_INTERVAL_MSECS = 1 * 1000;
|
||||
|
@ -49,6 +50,7 @@ AssignmentClient::AssignmentClient(Assignment::Type requestAssignmentType, QStri
|
|||
DependencyManager::set<tracing::Tracer>();
|
||||
DependencyManager::set<StatTracker>();
|
||||
DependencyManager::set<AccountManager>();
|
||||
DependencyManager::set<ResourceRequestObserver>();
|
||||
|
||||
auto addressManager = DependencyManager::set<AddressManager>();
|
||||
|
||||
|
|
|
@ -38,6 +38,8 @@
|
|||
#include "AvatarAudioStream.h"
|
||||
#include "InjectedAudioStream.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
static const float DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE = 0.5f; // attenuation = -6dB * log2(distance)
|
||||
static const int DISABLE_STATIC_JITTER_FRAMES = -1;
|
||||
static const float DEFAULT_NOISE_MUTING_THRESHOLD = 1.0f;
|
||||
|
@ -49,11 +51,11 @@ static const QString AUDIO_THREADING_GROUP_KEY = "audio_threading";
|
|||
int AudioMixer::_numStaticJitterFrames{ DISABLE_STATIC_JITTER_FRAMES };
|
||||
float AudioMixer::_noiseMutingThreshold{ DEFAULT_NOISE_MUTING_THRESHOLD };
|
||||
float AudioMixer::_attenuationPerDoublingInDistance{ DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE };
|
||||
std::map<QString, std::shared_ptr<CodecPlugin>> AudioMixer::_availableCodecs{ };
|
||||
map<QString, shared_ptr<CodecPlugin>> AudioMixer::_availableCodecs{ };
|
||||
QStringList AudioMixer::_codecPreferenceOrder{};
|
||||
QHash<QString, AABox> AudioMixer::_audioZones;
|
||||
QVector<AudioMixer::ZoneSettings> AudioMixer::_zoneSettings;
|
||||
QVector<AudioMixer::ReverbSettings> AudioMixer::_zoneReverbSettings;
|
||||
vector<AudioMixer::ZoneDescription> AudioMixer::_audioZones;
|
||||
vector<AudioMixer::ZoneSettings> AudioMixer::_zoneSettings;
|
||||
vector<AudioMixer::ReverbSettings> AudioMixer::_zoneReverbSettings;
|
||||
|
||||
AudioMixer::AudioMixer(ReceivedMessage& message) :
|
||||
ThreadedAssignment(message)
|
||||
|
@ -67,7 +69,7 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
|
|||
_availableCodecs.clear(); // Make sure struct is clean
|
||||
auto pluginManager = DependencyManager::set<PluginManager>();
|
||||
auto codecPlugins = pluginManager->getCodecPlugins();
|
||||
std::for_each(codecPlugins.cbegin(), codecPlugins.cend(),
|
||||
for_each(codecPlugins.cbegin(), codecPlugins.cend(),
|
||||
[&](const CodecPluginPointer& codec) {
|
||||
_availableCodecs[codec->getName()] = codec;
|
||||
});
|
||||
|
@ -87,7 +89,8 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
|
|||
PacketType::NodeIgnoreRequest,
|
||||
PacketType::RadiusIgnoreRequest,
|
||||
PacketType::RequestsDomainListData,
|
||||
PacketType::PerAvatarGainSet },
|
||||
PacketType::PerAvatarGainSet,
|
||||
PacketType::AudioSoloRequest },
|
||||
this, "queueAudioPacket");
|
||||
|
||||
// packets whose consequences are global should be processed on the main thread
|
||||
|
@ -122,7 +125,7 @@ void AudioMixer::queueAudioPacket(QSharedPointer<ReceivedMessage> message, Share
|
|||
void AudioMixer::queueReplicatedAudioPacket(QSharedPointer<ReceivedMessage> message) {
|
||||
// make sure we have a replicated node for the original sender of the packet
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
|
||||
// Node ID is now part of user data, since replicated audio packets are non-sourced.
|
||||
QUuid nodeID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
|
@ -173,12 +176,12 @@ void AudioMixer::handleMuteEnvironmentPacket(QSharedPointer<ReceivedMessage> mes
|
|||
}
|
||||
}
|
||||
|
||||
const std::pair<QString, CodecPluginPointer> AudioMixer::negotiateCodec(std::vector<QString> codecs) {
|
||||
const pair<QString, CodecPluginPointer> AudioMixer::negotiateCodec(vector<QString> codecs) {
|
||||
QString selectedCodecName;
|
||||
CodecPluginPointer selectedCodec;
|
||||
|
||||
// read the codecs requested (by the client)
|
||||
int minPreference = std::numeric_limits<int>::max();
|
||||
int minPreference = numeric_limits<int>::max();
|
||||
for (auto& codec : codecs) {
|
||||
if (_availableCodecs.count(codec) > 0) {
|
||||
int preference = _codecPreferenceOrder.indexOf(codec);
|
||||
|
@ -191,20 +194,9 @@ const std::pair<QString, CodecPluginPointer> AudioMixer::negotiateCodec(std::vec
|
|||
}
|
||||
}
|
||||
|
||||
return std::make_pair(selectedCodecName, _availableCodecs[selectedCodecName]);
|
||||
return make_pair(selectedCodecName, _availableCodecs[selectedCodecName]);
|
||||
}
|
||||
|
||||
void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) {
|
||||
// enumerate the connected listeners to remove HRTF objects for the disconnected node
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
nodeList->eachNode([&killedNode](const SharedNodePointer& node) {
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
if (clientData) {
|
||||
clientData->removeNode(killedNode->getUUID());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void AudioMixer::handleNodeMuteRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
@ -223,32 +215,31 @@ void AudioMixer::handleNodeMuteRequestPacket(QSharedPointer<ReceivedMessage> pac
|
|||
}
|
||||
}
|
||||
|
||||
void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) {
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(killedNode->getLinkedData());
|
||||
if (clientData) {
|
||||
// stage the removal of all streams from this node, workers handle when preparing mixes for listeners
|
||||
_workerSharedData.removedNodes.emplace_back(killedNode->getLocalID());
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixer::handleKillAvatarPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
|
||||
if (clientData) {
|
||||
clientData->removeAgentAvatarAudioStream();
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->eachNode([sendingNode](const SharedNodePointer& node){
|
||||
auto listenerClientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
if (listenerClientData) {
|
||||
listenerClientData->removeHRTFForStream(sendingNode->getUUID());
|
||||
}
|
||||
});
|
||||
|
||||
// stage a removal of the avatar audio stream from this Agent, workers handle when preparing mixes for listeners
|
||||
_workerSharedData.removedStreams.emplace_back(sendingNode->getUUID(), sendingNode->getLocalID(), QUuid());
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixer::removeHRTFsForFinishedInjector(const QUuid& streamID) {
|
||||
auto injectorClientData = qobject_cast<AudioMixerClientData*>(sender());
|
||||
if (injectorClientData) {
|
||||
// enumerate the connected listeners to remove HRTF objects for the disconnected injector
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
nodeList->eachNode([injectorClientData, &streamID](const SharedNodePointer& node){
|
||||
auto listenerClientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
if (listenerClientData) {
|
||||
listenerClientData->removeHRTFForStream(injectorClientData->getNodeID(), streamID);
|
||||
}
|
||||
});
|
||||
if (injectorClientData) {
|
||||
// stage the removal of this stream, workers handle when preparing mixes for listeners
|
||||
_workerSharedData.removedStreams.emplace_back(injectorClientData->getNodeID(), injectorClientData->getNodeLocalID(),
|
||||
streamID);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -285,7 +276,7 @@ void AudioMixer::sendStatsPacket() {
|
|||
// timing stats
|
||||
QJsonObject timingStats;
|
||||
|
||||
auto addTiming = [&](Timer& timer, std::string name) {
|
||||
auto addTiming = [&](Timer& timer, string name) {
|
||||
uint64_t timing, trailing;
|
||||
timer.get(timing, trailing);
|
||||
timingStats[("us_per_" + name).c_str()] = (qint64)(timing / _numStatFrames);
|
||||
|
@ -293,12 +284,12 @@ void AudioMixer::sendStatsPacket() {
|
|||
};
|
||||
|
||||
addTiming(_ticTiming, "tic");
|
||||
addTiming(_checkTimeTiming, "check_time");
|
||||
addTiming(_sleepTiming, "sleep");
|
||||
addTiming(_frameTiming, "frame");
|
||||
addTiming(_prepareTiming, "prepare");
|
||||
addTiming(_packetsTiming, "packets");
|
||||
addTiming(_mixTiming, "mix");
|
||||
addTiming(_eventsTiming, "events");
|
||||
addTiming(_packetsTiming, "packets");
|
||||
|
||||
#ifdef HIFI_AUDIO_MIXER_DEBUG
|
||||
timingStats["ns_per_mix"] = (_stats.totalMixes > 0) ? (float)(_stats.mixTime / _stats.totalMixes) : 0;
|
||||
|
@ -311,11 +302,24 @@ void AudioMixer::sendStatsPacket() {
|
|||
QJsonObject mixStats;
|
||||
|
||||
mixStats["%_hrtf_mixes"] = percentageForMixStats(_stats.hrtfRenders);
|
||||
mixStats["%_hrtf_silent_mixes"] = percentageForMixStats(_stats.hrtfSilentRenders);
|
||||
mixStats["%_hrtf_throttle_mixes"] = percentageForMixStats(_stats.hrtfThrottleRenders);
|
||||
mixStats["%_manual_stereo_mixes"] = percentageForMixStats(_stats.manualStereoMixes);
|
||||
mixStats["%_manual_echo_mixes"] = percentageForMixStats(_stats.manualEchoMixes);
|
||||
|
||||
mixStats["1_hrtf_renders"] = (int)(_stats.hrtfRenders / (float)_numStatFrames);
|
||||
mixStats["1_hrtf_resets"] = (int)(_stats.hrtfResets / (float)_numStatFrames);
|
||||
mixStats["1_hrtf_updates"] = (int)(_stats.hrtfUpdates / (float)_numStatFrames);
|
||||
|
||||
mixStats["2_skipped_streams"] = (int)(_stats.skipped / (float)_numStatFrames);
|
||||
mixStats["2_inactive_streams"] = (int)(_stats.inactive / (float)_numStatFrames);
|
||||
mixStats["2_active_streams"] = (int)(_stats.active / (float)_numStatFrames);
|
||||
|
||||
mixStats["3_skippped_to_active"] = (int)(_stats.skippedToActive / (float)_numStatFrames);
|
||||
mixStats["3_skippped_to_inactive"] = (int)(_stats.skippedToInactive / (float)_numStatFrames);
|
||||
mixStats["3_inactive_to_skippped"] = (int)(_stats.inactiveToSkipped / (float)_numStatFrames);
|
||||
mixStats["3_inactive_to_active"] = (int)(_stats.inactiveToActive / (float)_numStatFrames);
|
||||
mixStats["3_active_to_skippped"] = (int)(_stats.activeToSkipped / (float)_numStatFrames);
|
||||
mixStats["3_active_to_inactive"] = (int)(_stats.activeToInactive / (float)_numStatFrames);
|
||||
|
||||
mixStats["total_mixes"] = _stats.totalMixes;
|
||||
mixStats["avg_mixes_per_block"] = _stats.totalMixes / _numStatFrames;
|
||||
|
||||
|
@ -366,7 +370,7 @@ AudioMixerClientData* AudioMixer::getOrCreateClientData(Node* node) {
|
|||
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
|
||||
if (!clientData) {
|
||||
node->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(node->getUUID(), node->getLocalID()) });
|
||||
node->setLinkedData(unique_ptr<NodeData> { new AudioMixerClientData(node->getUUID(), node->getLocalID()) });
|
||||
clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
|
||||
}
|
||||
|
@ -393,33 +397,49 @@ void AudioMixer::start() {
|
|||
|
||||
// mix state
|
||||
unsigned int frame = 1;
|
||||
auto frameTimestamp = p_high_resolution_clock::now();
|
||||
|
||||
while (!_isFinished) {
|
||||
auto ticTimer = _ticTiming.timer();
|
||||
|
||||
{
|
||||
auto timer = _sleepTiming.timer();
|
||||
auto frameDuration = timeFrame(frameTimestamp);
|
||||
if (_startFrameTimestamp.time_since_epoch().count() == 0) {
|
||||
_startFrameTimestamp = _idealFrameTimestamp = p_high_resolution_clock::now();
|
||||
} else {
|
||||
auto timer = _checkTimeTiming.timer();
|
||||
auto frameDuration = timeFrame();
|
||||
throttle(frameDuration, frame);
|
||||
}
|
||||
|
||||
auto frameTimer = _frameTiming.timer();
|
||||
|
||||
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
|
||||
// prepare frames; pop off any new audio from their streams
|
||||
{
|
||||
auto prepareTimer = _prepareTiming.timer();
|
||||
std::for_each(cbegin, cend, [&](const SharedNodePointer& node) {
|
||||
_stats.sumStreams += prepareFrame(node, frame);
|
||||
});
|
||||
}
|
||||
// process (node-isolated) audio packets across slave threads
|
||||
{
|
||||
auto packetsTimer = _packetsTiming.timer();
|
||||
|
||||
// first clear the concurrent vector of added streams that the slaves will add to when they process packets
|
||||
_workerSharedData.addedStreams.clear();
|
||||
|
||||
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
|
||||
_slavePool.processPackets(cbegin, cend);
|
||||
});
|
||||
}
|
||||
|
||||
// process queued events (networking, global audio packets, &c.)
|
||||
{
|
||||
auto eventsTimer = _eventsTiming.timer();
|
||||
|
||||
// clear removed nodes and removed streams before we process events that will setup the new set
|
||||
_workerSharedData.removedNodes.clear();
|
||||
_workerSharedData.removedStreams.clear();
|
||||
|
||||
// since we're a while loop we need to yield to qt's event processing
|
||||
QCoreApplication::processEvents();
|
||||
}
|
||||
|
||||
int numToRetain = nodeList->size() * (1 - _throttlingRatio);
|
||||
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
|
||||
// mix across slave threads
|
||||
{
|
||||
auto mixTimer = _mixTiming.timer();
|
||||
_slavePool.mix(cbegin, cend, frame, _throttlingRatio);
|
||||
}
|
||||
auto mixTimer = _mixTiming.timer();
|
||||
_slavePool.mix(cbegin, cend, frame, numToRetain);
|
||||
});
|
||||
|
||||
// gather stats
|
||||
|
@ -431,21 +451,6 @@ void AudioMixer::start() {
|
|||
++frame;
|
||||
++_numStatFrames;
|
||||
|
||||
// process queued events (networking, global audio packets, &c.)
|
||||
{
|
||||
auto eventsTimer = _eventsTiming.timer();
|
||||
|
||||
// since we're a while loop we need to yield to qt's event processing
|
||||
QCoreApplication::processEvents();
|
||||
|
||||
// process (node-isolated) audio packets across slave threads
|
||||
{
|
||||
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
|
||||
auto packetsTimer = _packetsTiming.timer();
|
||||
_slavePool.processPackets(cbegin, cend);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (_isFinished) {
|
||||
// alert qt eventing that this is finished
|
||||
|
@ -455,26 +460,26 @@ void AudioMixer::start() {
|
|||
}
|
||||
}
|
||||
|
||||
std::chrono::microseconds AudioMixer::timeFrame(p_high_resolution_clock::time_point& timestamp) {
|
||||
chrono::microseconds AudioMixer::timeFrame() {
|
||||
// advance the next frame
|
||||
auto nextTimestamp = timestamp + std::chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
|
||||
auto now = p_high_resolution_clock::now();
|
||||
|
||||
// compute how long the last frame took
|
||||
auto duration = std::chrono::duration_cast<std::chrono::microseconds>(now - timestamp);
|
||||
auto duration = chrono::duration_cast<chrono::microseconds>(now - _startFrameTimestamp);
|
||||
|
||||
// set the new frame timestamp
|
||||
timestamp = std::max(now, nextTimestamp);
|
||||
_idealFrameTimestamp += chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
|
||||
|
||||
// sleep until the next frame should start
|
||||
// WIN32 sleep_until is broken until VS2015 Update 2
|
||||
// instead, std::max (above) guarantees that timestamp >= now, so we can sleep_for
|
||||
std::this_thread::sleep_for(timestamp - now);
|
||||
{
|
||||
auto timer = _sleepTiming.timer();
|
||||
this_thread::sleep_until(_idealFrameTimestamp);
|
||||
}
|
||||
|
||||
_startFrameTimestamp = p_high_resolution_clock::now();
|
||||
|
||||
return duration;
|
||||
}
|
||||
|
||||
void AudioMixer::throttle(std::chrono::microseconds duration, int frame) {
|
||||
void AudioMixer::throttle(chrono::microseconds duration, int frame) {
|
||||
// throttle using a modified proportional-integral controller
|
||||
const float FRAME_TIME = 10000.0f;
|
||||
float mixRatio = duration.count() / FRAME_TIME;
|
||||
|
@ -508,28 +513,19 @@ void AudioMixer::throttle(std::chrono::microseconds duration, int frame) {
|
|||
if (_trailingMixRatio > TARGET) {
|
||||
int proportionalTerm = 1 + (_trailingMixRatio - TARGET) / 0.1f;
|
||||
_throttlingRatio += THROTTLE_RATE * proportionalTerm;
|
||||
_throttlingRatio = std::min(_throttlingRatio, 1.0f);
|
||||
_throttlingRatio = min(_throttlingRatio, 1.0f);
|
||||
qCDebug(audio) << "audio-mixer is struggling (" << _trailingMixRatio << "mix/sleep) - throttling"
|
||||
<< _throttlingRatio << "of streams";
|
||||
} else if (_throttlingRatio > 0.0f && _trailingMixRatio <= BACKOFF_TARGET) {
|
||||
int proportionalTerm = 1 + (TARGET - _trailingMixRatio) / 0.2f;
|
||||
_throttlingRatio -= BACKOFF_RATE * proportionalTerm;
|
||||
_throttlingRatio = std::max(_throttlingRatio, 0.0f);
|
||||
_throttlingRatio = max(_throttlingRatio, 0.0f);
|
||||
qCDebug(audio) << "audio-mixer is recovering (" << _trailingMixRatio << "mix/sleep) - throttling"
|
||||
<< _throttlingRatio << "of streams";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int AudioMixer::prepareFrame(const SharedNodePointer& node, unsigned int frame) {
|
||||
AudioMixerClientData* data = (AudioMixerClientData*)node->getLinkedData();
|
||||
if (data == nullptr) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return data->checkBuffersBeforeFrameSend();
|
||||
}
|
||||
|
||||
void AudioMixer::clearDomainSettings() {
|
||||
_numStaticJitterFrames = DISABLE_STATIC_JITTER_FRAMES;
|
||||
_attenuationPerDoublingInDistance = DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE;
|
||||
|
@ -661,8 +657,11 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
const QString Y_MAX = "y_max";
|
||||
const QString Z_MIN = "z_min";
|
||||
const QString Z_MAX = "z_max";
|
||||
foreach (const QString& zone, zones.keys()) {
|
||||
QJsonObject zoneObject = zones[zone].toObject();
|
||||
|
||||
auto zoneNames = zones.keys();
|
||||
_audioZones.reserve(zoneNames.length());
|
||||
foreach (const QString& zoneName, zoneNames) {
|
||||
QJsonObject zoneObject = zones[zoneName].toObject();
|
||||
|
||||
if (zoneObject.contains(X_MIN) && zoneObject.contains(X_MAX) && zoneObject.contains(Y_MIN) &&
|
||||
zoneObject.contains(Y_MAX) && zoneObject.contains(Z_MIN) && zoneObject.contains(Z_MAX)) {
|
||||
|
@ -686,8 +685,8 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
glm::vec3 corner(xMin, yMin, zMin);
|
||||
glm::vec3 dimensions(xMax - xMin, yMax - yMin, zMax - zMin);
|
||||
AABox zoneAABox(corner, dimensions);
|
||||
_audioZones.insert(zone, zoneAABox);
|
||||
qCDebug(audio) << "Added zone:" << zone << "(corner:" << corner << ", dimensions:" << dimensions << ")";
|
||||
_audioZones.push_back({ zoneName, zoneAABox });
|
||||
qCDebug(audio) << "Added zone:" << zoneName << "(corner:" << corner << ", dimensions:" << dimensions << ")";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -707,18 +706,28 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
coefficientObject.contains(LISTENER) &&
|
||||
coefficientObject.contains(COEFFICIENT)) {
|
||||
|
||||
ZoneSettings settings;
|
||||
auto itSource = find_if(begin(_audioZones), end(_audioZones), [&](const ZoneDescription& description) {
|
||||
return description.name == coefficientObject.value(SOURCE).toString();
|
||||
});
|
||||
auto itListener = find_if(begin(_audioZones), end(_audioZones), [&](const ZoneDescription& description) {
|
||||
return description.name == coefficientObject.value(LISTENER).toString();
|
||||
});
|
||||
|
||||
bool ok;
|
||||
settings.source = coefficientObject.value(SOURCE).toString();
|
||||
settings.listener = coefficientObject.value(LISTENER).toString();
|
||||
settings.coefficient = coefficientObject.value(COEFFICIENT).toString().toFloat(&ok);
|
||||
float coefficient = coefficientObject.value(COEFFICIENT).toString().toFloat(&ok);
|
||||
|
||||
if (ok && settings.coefficient >= 0.0f && settings.coefficient <= 1.0f &&
|
||||
_audioZones.contains(settings.source) && _audioZones.contains(settings.listener)) {
|
||||
|
||||
if (ok && coefficient >= 0.0f && coefficient <= 1.0f &&
|
||||
itSource != end(_audioZones) &&
|
||||
itListener != end(_audioZones)) {
|
||||
|
||||
ZoneSettings settings;
|
||||
settings.source = itSource - begin(_audioZones);
|
||||
settings.listener = itListener - begin(_audioZones);
|
||||
settings.coefficient = coefficient;
|
||||
|
||||
_zoneSettings.push_back(settings);
|
||||
qCDebug(audio) << "Added Coefficient:" << settings.source << settings.listener << settings.coefficient;
|
||||
qCDebug(audio) << "Added Coefficient:" << itSource->name << itListener->name << settings.coefficient;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -739,19 +748,21 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
reverbObject.contains(WET_LEVEL)) {
|
||||
|
||||
bool okReverbTime, okWetLevel;
|
||||
QString zone = reverbObject.value(ZONE).toString();
|
||||
auto itZone = find_if(begin(_audioZones), end(_audioZones), [&](const ZoneDescription& description) {
|
||||
return description.name == reverbObject.value(ZONE).toString();
|
||||
});
|
||||
float reverbTime = reverbObject.value(REVERB_TIME).toString().toFloat(&okReverbTime);
|
||||
float wetLevel = reverbObject.value(WET_LEVEL).toString().toFloat(&okWetLevel);
|
||||
|
||||
if (okReverbTime && okWetLevel && _audioZones.contains(zone)) {
|
||||
if (okReverbTime && okWetLevel && itZone != end(_audioZones)) {
|
||||
ReverbSettings settings;
|
||||
settings.zone = zone;
|
||||
settings.zone = itZone - begin(_audioZones);
|
||||
settings.reverbTime = reverbTime;
|
||||
settings.wetLevel = wetLevel;
|
||||
|
||||
_zoneReverbSettings.push_back(settings);
|
||||
|
||||
qCDebug(audio) << "Added Reverb:" << zone << reverbTime << wetLevel;
|
||||
qCDebug(audio) << "Added Reverb:" << itZone->name << reverbTime << wetLevel;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -764,7 +775,7 @@ AudioMixer::Timer::Timing::Timing(uint64_t& sum) : _sum(sum) {
|
|||
}
|
||||
|
||||
AudioMixer::Timer::Timing::~Timing() {
|
||||
_sum += std::chrono::duration_cast<std::chrono::microseconds>(p_high_resolution_clock::now() - _timing).count();
|
||||
_sum += chrono::duration_cast<chrono::microseconds>(p_high_resolution_clock::now() - _timing).count();
|
||||
}
|
||||
|
||||
void AudioMixer::Timer::get(uint64_t& timing, uint64_t& trailing) {
|
||||
|
|
|
@ -34,13 +34,18 @@ class AudioMixer : public ThreadedAssignment {
|
|||
public:
|
||||
AudioMixer(ReceivedMessage& message);
|
||||
|
||||
|
||||
struct ZoneDescription {
|
||||
QString name;
|
||||
AABox area;
|
||||
};
|
||||
struct ZoneSettings {
|
||||
QString source;
|
||||
QString listener;
|
||||
int source;
|
||||
int listener;
|
||||
float coefficient;
|
||||
};
|
||||
struct ReverbSettings {
|
||||
QString zone;
|
||||
int zone;
|
||||
float reverbTime;
|
||||
float wetLevel;
|
||||
};
|
||||
|
@ -48,9 +53,9 @@ public:
|
|||
static int getStaticJitterFrames() { return _numStaticJitterFrames; }
|
||||
static bool shouldMute(float quietestFrame) { return quietestFrame > _noiseMutingThreshold; }
|
||||
static float getAttenuationPerDoublingInDistance() { return _attenuationPerDoublingInDistance; }
|
||||
static const QHash<QString, AABox>& getAudioZones() { return _audioZones; }
|
||||
static const QVector<ZoneSettings>& getZoneSettings() { return _zoneSettings; }
|
||||
static const QVector<ReverbSettings>& getReverbSettings() { return _zoneReverbSettings; }
|
||||
static const std::vector<ZoneDescription>& getAudioZones() { return _audioZones; }
|
||||
static const std::vector<ZoneSettings>& getZoneSettings() { return _zoneSettings; }
|
||||
static const std::vector<ReverbSettings>& getReverbSettings() { return _zoneReverbSettings; }
|
||||
static const std::pair<QString, CodecPluginPointer> negotiateCodec(std::vector<QString> codecs);
|
||||
|
||||
static bool shouldReplicateTo(const Node& from, const Node& to) {
|
||||
|
@ -79,11 +84,8 @@ private slots:
|
|||
|
||||
private:
|
||||
// mixing helpers
|
||||
std::chrono::microseconds timeFrame(p_high_resolution_clock::time_point& timestamp);
|
||||
std::chrono::microseconds timeFrame();
|
||||
void throttle(std::chrono::microseconds frameDuration, int frame);
|
||||
// pop a frame from any streams on the node
|
||||
// returns the number of available streams
|
||||
int prepareFrame(const SharedNodePointer& node, unsigned int frame);
|
||||
|
||||
AudioMixerClientData* getOrCreateClientData(Node* node);
|
||||
|
||||
|
@ -92,6 +94,9 @@ private:
|
|||
void parseSettingsObject(const QJsonObject& settingsObject);
|
||||
void clearDomainSettings();
|
||||
|
||||
p_high_resolution_clock::time_point _idealFrameTimestamp;
|
||||
p_high_resolution_clock::time_point _startFrameTimestamp;
|
||||
|
||||
float _trailingMixRatio { 0.0f };
|
||||
float _throttlingRatio { 0.0f };
|
||||
|
||||
|
@ -100,7 +105,7 @@ private:
|
|||
int _numStatFrames { 0 };
|
||||
AudioMixerStats _stats;
|
||||
|
||||
AudioMixerSlavePool _slavePool;
|
||||
AudioMixerSlavePool _slavePool { _workerSharedData };
|
||||
|
||||
class Timer {
|
||||
public:
|
||||
|
@ -123,7 +128,9 @@ private:
|
|||
uint64_t _history[TIMER_TRAILING_SECONDS] {};
|
||||
int _index { 0 };
|
||||
};
|
||||
|
||||
Timer _ticTiming;
|
||||
Timer _checkTimeTiming;
|
||||
Timer _sleepTiming;
|
||||
Timer _frameTiming;
|
||||
Timer _prepareTiming;
|
||||
|
@ -136,10 +143,13 @@ private:
|
|||
static float _attenuationPerDoublingInDistance;
|
||||
static std::map<QString, CodecPluginPointer> _availableCodecs;
|
||||
static QStringList _codecPreferenceOrder;
|
||||
static QHash<QString, AABox> _audioZones;
|
||||
static QVector<ZoneSettings> _zoneSettings;
|
||||
static QVector<ReverbSettings> _zoneReverbSettings;
|
||||
|
||||
|
||||
static std::vector<ZoneDescription> _audioZones;
|
||||
static std::vector<ZoneSettings> _zoneSettings;
|
||||
static std::vector<ReverbSettings> _zoneReverbSettings;
|
||||
|
||||
AudioMixerSlave::SharedData _workerSharedData;
|
||||
};
|
||||
|
||||
#endif // hifi_AudioMixer_h
|
||||
|
|
|
@ -13,6 +13,8 @@
|
|||
|
||||
#include <random>
|
||||
|
||||
#include <glm/common.hpp>
|
||||
|
||||
#include <QtCore/QDebug>
|
||||
#include <QtCore/QJsonArray>
|
||||
|
||||
|
@ -28,7 +30,6 @@
|
|||
AudioMixerClientData::AudioMixerClientData(const QUuid& nodeID, Node::LocalID nodeLocalID) :
|
||||
NodeData(nodeID, nodeLocalID),
|
||||
audioLimiter(AudioConstants::SAMPLE_RATE, AudioConstants::STEREO),
|
||||
_ignoreZone(*this),
|
||||
_outgoingMixedAudioSequenceNumber(0),
|
||||
_downstreamAudioStreamStats()
|
||||
{
|
||||
|
@ -56,7 +57,7 @@ void AudioMixerClientData::queuePacket(QSharedPointer<ReceivedMessage> message,
|
|||
_packetQueue.push(message);
|
||||
}
|
||||
|
||||
void AudioMixerClientData::processPackets() {
|
||||
int AudioMixerClientData::processPackets(ConcurrentAddedStreams& addedStreams) {
|
||||
SharedNodePointer node = _packetQueue.node;
|
||||
assert(_packetQueue.empty() || node);
|
||||
_packetQueue.node.clear();
|
||||
|
@ -69,22 +70,17 @@ void AudioMixerClientData::processPackets() {
|
|||
case PacketType::MicrophoneAudioWithEcho:
|
||||
case PacketType::InjectAudio:
|
||||
case PacketType::SilentAudioFrame: {
|
||||
|
||||
if (node->isUpstream()) {
|
||||
setupCodecForReplicatedAgent(packet);
|
||||
}
|
||||
|
||||
QMutexLocker lock(&getMutex());
|
||||
parseData(*packet);
|
||||
processStreamPacket(*packet, addedStreams);
|
||||
|
||||
optionallyReplicatePacket(*packet, *node);
|
||||
|
||||
break;
|
||||
}
|
||||
case PacketType::AudioStreamStats: {
|
||||
QMutexLocker lock(&getMutex());
|
||||
parseData(*packet);
|
||||
|
||||
break;
|
||||
}
|
||||
case PacketType::NegotiateAudioFormat:
|
||||
|
@ -102,6 +98,9 @@ void AudioMixerClientData::processPackets() {
|
|||
case PacketType::RadiusIgnoreRequest:
|
||||
parseRadiusIgnoreRequest(packet, node);
|
||||
break;
|
||||
case PacketType::AudioSoloRequest:
|
||||
parseSoloRequest(packet, node);
|
||||
break;
|
||||
default:
|
||||
Q_UNREACHABLE();
|
||||
}
|
||||
|
@ -109,6 +108,10 @@ void AudioMixerClientData::processPackets() {
|
|||
_packetQueue.pop();
|
||||
}
|
||||
assert(_packetQueue.empty());
|
||||
|
||||
// now that we have processed all packets for this frame
|
||||
// we can prepare the sources from this client to be ready for mixing
|
||||
return checkBuffersBeforeFrameSend();
|
||||
}
|
||||
|
||||
bool isReplicatedPacket(PacketType packetType) {
|
||||
|
@ -186,63 +189,155 @@ void AudioMixerClientData::parseRequestsDomainListData(ReceivedMessage& message)
|
|||
void AudioMixerClientData::parsePerAvatarGainSet(ReceivedMessage& message, const SharedNodePointer& node) {
|
||||
QUuid uuid = node->getUUID();
|
||||
// parse the UUID from the packet
|
||||
QUuid avatarUuid = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
QUuid avatarUUID = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
uint8_t packedGain;
|
||||
message.readPrimitive(&packedGain);
|
||||
float gain = unpackFloatGainFromByte(packedGain);
|
||||
|
||||
if (avatarUuid.isNull()) {
|
||||
if (avatarUUID.isNull()) {
|
||||
// set the MASTER avatar gain
|
||||
setMasterAvatarGain(gain);
|
||||
qCDebug(audio) << "Setting MASTER avatar gain for " << uuid << " to " << gain;
|
||||
} else {
|
||||
// set the per-source avatar gain
|
||||
hrtfForStream(avatarUuid, QUuid()).setGainAdjustment(gain);
|
||||
qCDebug(audio) << "Setting avatar gain adjustment for hrtf[" << uuid << "][" << avatarUuid << "] to " << gain;
|
||||
setGainForAvatar(avatarUUID, gain);
|
||||
qCDebug(audio) << "Setting avatar gain adjustment for hrtf[" << uuid << "][" << avatarUUID << "] to " << gain;
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerClientData::setGainForAvatar(QUuid nodeID, float gain) {
|
||||
auto it = std::find_if(_streams.active.cbegin(), _streams.active.cend(), [nodeID](const MixableStream& mixableStream){
|
||||
return mixableStream.nodeStreamID.nodeID == nodeID && mixableStream.nodeStreamID.streamID.isNull();
|
||||
});
|
||||
|
||||
if (it != _streams.active.cend()) {
|
||||
it->hrtf->setGainAdjustment(gain);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerClientData::parseNodeIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node) {
|
||||
node->parseIgnoreRequestMessage(message);
|
||||
auto ignoredNodesPair = node->parseIgnoreRequestMessage(message);
|
||||
|
||||
// we have a vector of ignored or unignored node UUIDs - update our internal data structures so that
|
||||
// streams can be included or excluded next time a mix is being created
|
||||
if (ignoredNodesPair.second) {
|
||||
// we have newly ignored nodes, add them to our vector
|
||||
_newIgnoredNodeIDs.insert(std::end(_newIgnoredNodeIDs),
|
||||
std::begin(ignoredNodesPair.first), std::end(ignoredNodesPair.first));
|
||||
} else {
|
||||
// we have newly unignored nodes, add them to our vector
|
||||
_newUnignoredNodeIDs.insert(std::end(_newUnignoredNodeIDs),
|
||||
std::begin(ignoredNodesPair.first), std::end(ignoredNodesPair.first));
|
||||
}
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
for (auto& nodeID : ignoredNodesPair.first) {
|
||||
auto otherNode = nodeList->nodeWithUUID(nodeID);
|
||||
if (otherNode) {
|
||||
auto otherNodeMixerClientData = static_cast<AudioMixerClientData*>(otherNode->getLinkedData());
|
||||
if (otherNodeMixerClientData) {
|
||||
if (ignoredNodesPair.second) {
|
||||
otherNodeMixerClientData->ignoredByNode(getNodeID());
|
||||
} else {
|
||||
otherNodeMixerClientData->unignoredByNode(getNodeID());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerClientData::ignoredByNode(QUuid nodeID) {
|
||||
// first add this ID to the concurrent vector for newly ignoring nodes
|
||||
_newIgnoringNodeIDs.push_back(nodeID);
|
||||
|
||||
// now take a lock and on the consistent vector of ignoring nodes and make sure this node is in it
|
||||
std::lock_guard<std::mutex> lock(_ignoringNodeIDsMutex);
|
||||
if (std::find(_ignoringNodeIDs.begin(), _ignoringNodeIDs.end(), nodeID) == _ignoringNodeIDs.end()) {
|
||||
_ignoringNodeIDs.push_back(nodeID);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerClientData::unignoredByNode(QUuid nodeID) {
|
||||
// first add this ID to the concurrent vector for newly unignoring nodes
|
||||
_newUnignoringNodeIDs.push_back(nodeID);
|
||||
|
||||
// now take a lock on the consistent vector of ignoring nodes and make sure this node isn't in it
|
||||
std::lock_guard<std::mutex> lock(_ignoringNodeIDsMutex);
|
||||
auto it = _ignoringNodeIDs.begin();
|
||||
while (it != _ignoringNodeIDs.end()) {
|
||||
if (*it == nodeID) {
|
||||
it = _ignoringNodeIDs.erase(it);
|
||||
} else {
|
||||
++it;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerClientData::clearStagedIgnoreChanges() {
|
||||
_newIgnoredNodeIDs.clear();
|
||||
_newUnignoredNodeIDs.clear();
|
||||
_newIgnoringNodeIDs.clear();
|
||||
_newUnignoringNodeIDs.clear();
|
||||
}
|
||||
|
||||
void AudioMixerClientData::parseRadiusIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node) {
|
||||
node->parseIgnoreRadiusRequestMessage(message);
|
||||
bool enabled;
|
||||
message->readPrimitive(&enabled);
|
||||
|
||||
_isIgnoreRadiusEnabled = enabled;
|
||||
|
||||
auto avatarAudioStream = getAvatarAudioStream();
|
||||
|
||||
// if we have an avatar audio stream, tell it wether its ignore box should be enabled or disabled
|
||||
if (avatarAudioStream) {
|
||||
if (_isIgnoreRadiusEnabled) {
|
||||
avatarAudioStream->enableIgnoreBox();
|
||||
} else {
|
||||
avatarAudioStream->disableIgnoreBox();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void AudioMixerClientData::parseSoloRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node) {
|
||||
|
||||
uint8_t addToSolo;
|
||||
message->readPrimitive(&addToSolo);
|
||||
|
||||
while (message->getBytesLeftToRead()) {
|
||||
// parse out the UUID being soloed from the packet
|
||||
QUuid soloedUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
if (addToSolo) {
|
||||
_soloedNodes.push_back(soloedUUID);
|
||||
} else {
|
||||
auto it = std::remove(std::begin(_soloedNodes), std::end(_soloedNodes), soloedUUID);
|
||||
_soloedNodes.erase(it, std::end(_soloedNodes));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AvatarAudioStream* AudioMixerClientData::getAvatarAudioStream() {
|
||||
QReadLocker readLocker { &_streamsLock };
|
||||
auto it = std::find_if(_audioStreams.begin(), _audioStreams.end(), [](const SharedStreamPointer& stream){
|
||||
return stream->getStreamIdentifier().isNull();
|
||||
});
|
||||
|
||||
auto it = _audioStreams.find(QUuid());
|
||||
if (it != _audioStreams.end()) {
|
||||
return dynamic_cast<AvatarAudioStream*>(it->second.get());
|
||||
return dynamic_cast<AvatarAudioStream*>(it->get());
|
||||
}
|
||||
|
||||
// no mic stream found - return NULL
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void AudioMixerClientData::removeHRTFForStream(const QUuid& nodeID, const QUuid& streamID) {
|
||||
auto it = _nodeSourcesHRTFMap.find(nodeID);
|
||||
if (it != _nodeSourcesHRTFMap.end()) {
|
||||
// erase the stream with the given ID from the given node
|
||||
it->second.erase(streamID);
|
||||
|
||||
// is the map for this node now empty?
|
||||
// if so we can remove it
|
||||
if (it->second.size() == 0) {
|
||||
_nodeSourcesHRTFMap.erase(it);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerClientData::removeAgentAvatarAudioStream() {
|
||||
QWriteLocker writeLocker { &_streamsLock };
|
||||
auto it = _audioStreams.find(QUuid());
|
||||
auto it = std::remove_if(_audioStreams.begin(), _audioStreams.end(), [](const SharedStreamPointer& stream){
|
||||
return stream->getStreamIdentifier().isNull();
|
||||
});
|
||||
|
||||
if (it != _audioStreams.end()) {
|
||||
_audioStreams.erase(it);
|
||||
}
|
||||
writeLocker.unlock();
|
||||
}
|
||||
|
||||
int AudioMixerClientData::parseData(ReceivedMessage& message) {
|
||||
|
@ -252,128 +347,186 @@ int AudioMixerClientData::parseData(ReceivedMessage& message) {
|
|||
// skip over header, appendFlag, and num stats packed
|
||||
message.seek(sizeof(quint8) + sizeof(quint16));
|
||||
|
||||
if (message.getBytesLeftToRead() != sizeof(AudioStreamStats)) {
|
||||
qWarning() << "Received AudioStreamStats of wrong size" << message.getBytesLeftToRead()
|
||||
<< "instead of" << sizeof(AudioStreamStats) << "from"
|
||||
<< message.getSourceID() << "at" << message.getSenderSockAddr();
|
||||
|
||||
return message.getPosition();
|
||||
}
|
||||
|
||||
// read the downstream audio stream stats
|
||||
message.readPrimitive(&_downstreamAudioStreamStats);
|
||||
|
||||
return message.getPosition();
|
||||
|
||||
} else {
|
||||
SharedStreamPointer matchingStream;
|
||||
|
||||
bool isMicStream = false;
|
||||
|
||||
if (packetType == PacketType::MicrophoneAudioWithEcho
|
||||
|| packetType == PacketType::ReplicatedMicrophoneAudioWithEcho
|
||||
|| packetType == PacketType::MicrophoneAudioNoEcho
|
||||
|| packetType == PacketType::ReplicatedMicrophoneAudioNoEcho
|
||||
|| packetType == PacketType::SilentAudioFrame
|
||||
|| packetType == PacketType::ReplicatedSilentAudioFrame) {
|
||||
|
||||
QWriteLocker writeLocker { &_streamsLock };
|
||||
|
||||
auto micStreamIt = _audioStreams.find(QUuid());
|
||||
if (micStreamIt == _audioStreams.end()) {
|
||||
// we don't have a mic stream yet, so add it
|
||||
|
||||
// hop past the sequence number that leads the packet
|
||||
message.seek(sizeof(quint16));
|
||||
|
||||
// pull the codec string from the packet
|
||||
auto codecString = message.readString();
|
||||
|
||||
// determine if the stream is stereo or not
|
||||
bool isStereo;
|
||||
if (packetType == PacketType::SilentAudioFrame
|
||||
|| packetType == PacketType::ReplicatedSilentAudioFrame) {
|
||||
quint16 numSilentSamples;
|
||||
message.readPrimitive(&numSilentSamples);
|
||||
isStereo = numSilentSamples == AudioConstants::NETWORK_FRAME_SAMPLES_STEREO;
|
||||
} else {
|
||||
quint8 channelFlag;
|
||||
message.readPrimitive(&channelFlag);
|
||||
isStereo = channelFlag == 1;
|
||||
}
|
||||
|
||||
auto avatarAudioStream = new AvatarAudioStream(isStereo, AudioMixer::getStaticJitterFrames());
|
||||
avatarAudioStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
qCDebug(audio) << "creating new AvatarAudioStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
|
||||
|
||||
connect(avatarAudioStream, &InboundAudioStream::mismatchedAudioCodec,
|
||||
this, &AudioMixerClientData::handleMismatchAudioFormat);
|
||||
|
||||
auto emplaced = _audioStreams.emplace(
|
||||
QUuid(),
|
||||
std::unique_ptr<PositionalAudioStream> { avatarAudioStream }
|
||||
);
|
||||
|
||||
micStreamIt = emplaced.first;
|
||||
}
|
||||
|
||||
matchingStream = micStreamIt->second;
|
||||
|
||||
writeLocker.unlock();
|
||||
|
||||
isMicStream = true;
|
||||
} else if (packetType == PacketType::InjectAudio
|
||||
|| packetType == PacketType::ReplicatedInjectAudio) {
|
||||
// this is injected audio
|
||||
// grab the stream identifier for this injected audio
|
||||
message.seek(sizeof(quint16));
|
||||
|
||||
QUuid streamIdentifier = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
bool isStereo;
|
||||
message.readPrimitive(&isStereo);
|
||||
|
||||
QWriteLocker writeLock { &_streamsLock };
|
||||
|
||||
auto streamIt = _audioStreams.find(streamIdentifier);
|
||||
|
||||
if (streamIt == _audioStreams.end()) {
|
||||
// we don't have this injected stream yet, so add it
|
||||
auto injectorStream = new InjectedAudioStream(streamIdentifier, isStereo, AudioMixer::getStaticJitterFrames());
|
||||
|
||||
#if INJECTORS_SUPPORT_CODECS
|
||||
injectorStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
qCDebug(audio) << "creating new injectorStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
|
||||
#endif
|
||||
|
||||
auto emplaced = _audioStreams.emplace(
|
||||
streamIdentifier,
|
||||
std::unique_ptr<InjectedAudioStream> { injectorStream }
|
||||
);
|
||||
|
||||
streamIt = emplaced.first;
|
||||
}
|
||||
|
||||
matchingStream = streamIt->second;
|
||||
|
||||
writeLock.unlock();
|
||||
}
|
||||
|
||||
// seek to the beginning of the packet so that the next reader is in the right spot
|
||||
message.seek(0);
|
||||
|
||||
// check the overflow count before we parse data
|
||||
auto overflowBefore = matchingStream->getOverflowCount();
|
||||
auto parseResult = matchingStream->parseData(message);
|
||||
|
||||
if (matchingStream->getOverflowCount() > overflowBefore) {
|
||||
qCDebug(audio) << "Just overflowed on stream from" << message.getSourceID() << "at" << message.getSenderSockAddr();
|
||||
qCDebug(audio) << "This stream is for" << (isMicStream ? "microphone audio" : "injected audio");
|
||||
}
|
||||
|
||||
return parseResult;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int AudioMixerClientData::checkBuffersBeforeFrameSend() {
|
||||
QWriteLocker writeLocker { &_streamsLock };
|
||||
bool AudioMixerClientData::containsValidPosition(ReceivedMessage& message) const {
|
||||
static const int SEQUENCE_NUMBER_BYTES = sizeof(quint16);
|
||||
|
||||
auto posBefore = message.getPosition();
|
||||
|
||||
message.seek(SEQUENCE_NUMBER_BYTES);
|
||||
|
||||
// skip over the codec string
|
||||
message.readString();
|
||||
|
||||
switch (message.getType()) {
|
||||
case PacketType::MicrophoneAudioNoEcho:
|
||||
case PacketType::MicrophoneAudioWithEcho: {
|
||||
// skip over the stereo flag
|
||||
message.seek(message.getPosition() + sizeof(ChannelFlag));
|
||||
break;
|
||||
}
|
||||
case PacketType::SilentAudioFrame: {
|
||||
// skip the number of silent samples
|
||||
message.seek(message.getPosition() + sizeof(SilentSamplesBytes));
|
||||
break;
|
||||
}
|
||||
case PacketType::InjectAudio: {
|
||||
// skip the stream ID, stereo flag, and loopback flag
|
||||
message.seek(message.getPosition() + NUM_STREAM_ID_BYTES + sizeof(ChannelFlag) + sizeof(LoopbackFlag));
|
||||
break;
|
||||
}
|
||||
default:
|
||||
Q_UNREACHABLE();
|
||||
break;
|
||||
}
|
||||
|
||||
glm::vec3 peekPosition;
|
||||
message.readPrimitive(&peekPosition);
|
||||
|
||||
// reset the position the message was at before we were called
|
||||
message.seek(posBefore);
|
||||
|
||||
if (glm::any(glm::isnan(peekPosition))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void AudioMixerClientData::processStreamPacket(ReceivedMessage& message, ConcurrentAddedStreams &addedStreams) {
|
||||
|
||||
if (!containsValidPosition(message)) {
|
||||
qDebug() << "Refusing to process audio stream from" << message.getSourceID() << "with invalid position";
|
||||
return;
|
||||
}
|
||||
|
||||
SharedStreamPointer matchingStream;
|
||||
|
||||
auto packetType = message.getType();
|
||||
bool newStream = false;
|
||||
|
||||
if (packetType == PacketType::MicrophoneAudioWithEcho
|
||||
|| packetType == PacketType::MicrophoneAudioNoEcho
|
||||
|| packetType == PacketType::SilentAudioFrame) {
|
||||
|
||||
auto micStreamIt = std::find_if(_audioStreams.begin(), _audioStreams.end(), [](const SharedStreamPointer& stream){
|
||||
return stream->getStreamIdentifier().isNull();
|
||||
});
|
||||
|
||||
if (micStreamIt == _audioStreams.end()) {
|
||||
// we don't have a mic stream yet, so add it
|
||||
|
||||
// hop past the sequence number that leads the packet
|
||||
message.seek(sizeof(StreamSequenceNumber));
|
||||
|
||||
// pull the codec string from the packet
|
||||
auto codecString = message.readString();
|
||||
|
||||
// determine if the stream is stereo or not
|
||||
bool isStereo;
|
||||
if (packetType == PacketType::SilentAudioFrame || packetType == PacketType::ReplicatedSilentAudioFrame) {
|
||||
SilentSamplesBytes numSilentSamples;
|
||||
message.readPrimitive(&numSilentSamples);
|
||||
isStereo = numSilentSamples == AudioConstants::NETWORK_FRAME_SAMPLES_STEREO;
|
||||
} else {
|
||||
ChannelFlag channelFlag;
|
||||
message.readPrimitive(&channelFlag);
|
||||
isStereo = channelFlag == 1;
|
||||
}
|
||||
|
||||
auto avatarAudioStream = new AvatarAudioStream(isStereo, AudioMixer::getStaticJitterFrames());
|
||||
avatarAudioStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
|
||||
if (_isIgnoreRadiusEnabled) {
|
||||
avatarAudioStream->enableIgnoreBox();
|
||||
} else {
|
||||
avatarAudioStream->disableIgnoreBox();
|
||||
}
|
||||
|
||||
qCDebug(audio) << "creating new AvatarAudioStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
|
||||
|
||||
connect(avatarAudioStream, &InboundAudioStream::mismatchedAudioCodec,
|
||||
this, &AudioMixerClientData::handleMismatchAudioFormat);
|
||||
|
||||
matchingStream = SharedStreamPointer(avatarAudioStream);
|
||||
_audioStreams.push_back(matchingStream);
|
||||
|
||||
newStream = true;
|
||||
} else {
|
||||
matchingStream = *micStreamIt;
|
||||
}
|
||||
} else if (packetType == PacketType::InjectAudio) {
|
||||
|
||||
// this is injected audio
|
||||
// skip the sequence number and codec string and grab the stream identifier for this injected audio
|
||||
message.seek(sizeof(StreamSequenceNumber));
|
||||
message.readString();
|
||||
|
||||
QUuid streamIdentifier = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
auto streamIt = std::find_if(_audioStreams.begin(), _audioStreams.end(), [&streamIdentifier](const SharedStreamPointer& stream) {
|
||||
return stream->getStreamIdentifier() == streamIdentifier;
|
||||
});
|
||||
|
||||
if (streamIt == _audioStreams.end()) {
|
||||
bool isStereo;
|
||||
message.readPrimitive(&isStereo);
|
||||
|
||||
// we don't have this injected stream yet, so add it
|
||||
auto injectorStream = new InjectedAudioStream(streamIdentifier, isStereo, AudioMixer::getStaticJitterFrames());
|
||||
|
||||
#if INJECTORS_SUPPORT_CODECS
|
||||
injectorStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
qCDebug(audio) << "creating new injectorStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
|
||||
#endif
|
||||
|
||||
matchingStream = SharedStreamPointer(injectorStream);
|
||||
_audioStreams.push_back(matchingStream);
|
||||
|
||||
newStream = true;
|
||||
} else {
|
||||
matchingStream = *streamIt;
|
||||
}
|
||||
}
|
||||
|
||||
// seek to the beginning of the packet so that the next reader is in the right spot
|
||||
message.seek(0);
|
||||
|
||||
// check the overflow count before we parse data
|
||||
auto overflowBefore = matchingStream->getOverflowCount();
|
||||
matchingStream->parseData(message);
|
||||
|
||||
if (matchingStream->getOverflowCount() > overflowBefore) {
|
||||
qCDebug(audio) << "Just overflowed on stream" << matchingStream->getStreamIdentifier()
|
||||
<< "from" << message.getSourceID();
|
||||
}
|
||||
|
||||
if (newStream) {
|
||||
// whenever a stream is added, push it to the concurrent vector of streams added this frame
|
||||
addedStreams.push_back(AddedStream(getNodeID(), getNodeLocalID(), matchingStream->getStreamIdentifier(), matchingStream.get()));
|
||||
}
|
||||
}
|
||||
|
||||
int AudioMixerClientData::checkBuffersBeforeFrameSend() {
|
||||
auto it = _audioStreams.begin();
|
||||
while (it != _audioStreams.end()) {
|
||||
SharedStreamPointer stream = it->second;
|
||||
SharedStreamPointer stream = *it;
|
||||
|
||||
if (stream->popFrames(1, true) > 0) {
|
||||
stream->updateLastPopOutputLoudnessAndTrailingLoudness();
|
||||
|
@ -388,7 +541,7 @@ int AudioMixerClientData::checkBuffersBeforeFrameSend() {
|
|||
// this is an inactive injector, pull it from our streams
|
||||
|
||||
// first emit that it is finished so that the HRTF objects for this source can be cleaned up
|
||||
emit injectorStreamFinished(it->second->getStreamIdentifier());
|
||||
emit injectorStreamFinished(stream->getStreamIdentifier());
|
||||
|
||||
// erase the stream to drop our ref to the shared pointer and remove it
|
||||
it = _audioStreams.erase(it);
|
||||
|
@ -441,7 +594,7 @@ void AudioMixerClientData::sendAudioStreamStatsPackets(const SharedNodePointer&
|
|||
|
||||
// pack the calculated number of stream stats
|
||||
for (int i = 0; i < numStreamStatsToPack; i++) {
|
||||
PositionalAudioStream* stream = it->second.get();
|
||||
PositionalAudioStream* stream = it->get();
|
||||
|
||||
stream->perSecondCallbackForUpdatingStats();
|
||||
|
||||
|
@ -513,12 +666,12 @@ QJsonObject AudioMixerClientData::getAudioStreamStats() {
|
|||
QJsonArray injectorArray;
|
||||
auto streamsCopy = getAudioStreams();
|
||||
for (auto& injectorPair : streamsCopy) {
|
||||
if (injectorPair.second->getType() == PositionalAudioStream::Injector) {
|
||||
if (injectorPair->getType() == PositionalAudioStream::Injector) {
|
||||
QJsonObject upstreamStats;
|
||||
|
||||
AudioStreamStats streamStats = injectorPair.second->getAudioStreamStats();
|
||||
AudioStreamStats streamStats = injectorPair->getAudioStreamStats();
|
||||
upstreamStats["inj.desired"] = streamStats._desiredJitterBufferFrames;
|
||||
upstreamStats["desired_calc"] = injectorPair.second->getCalculatedJitterBufferFrames();
|
||||
upstreamStats["desired_calc"] = injectorPair->getCalculatedJitterBufferFrames();
|
||||
upstreamStats["available_avg_10s"] = streamStats._framesAvailableAverage;
|
||||
upstreamStats["available"] = (double) streamStats._framesAvailable;
|
||||
upstreamStats["unplayed"] = (double) streamStats._unplayedMs;
|
||||
|
@ -609,99 +762,6 @@ void AudioMixerClientData::cleanupCodec() {
|
|||
}
|
||||
}
|
||||
|
||||
AudioMixerClientData::IgnoreZone& AudioMixerClientData::IgnoreZoneMemo::get(unsigned int frame) {
|
||||
// check for a memoized zone
|
||||
if (frame != _frame.load(std::memory_order_acquire)) {
|
||||
AvatarAudioStream* stream = _data.getAvatarAudioStream();
|
||||
|
||||
// get the initial dimensions from the stream
|
||||
glm::vec3 corner = stream ? stream->getAvatarBoundingBoxCorner() : glm::vec3(0);
|
||||
glm::vec3 scale = stream ? stream->getAvatarBoundingBoxScale() : glm::vec3(0);
|
||||
|
||||
// enforce a minimum scale
|
||||
static const glm::vec3 MIN_IGNORE_BOX_SCALE = glm::vec3(0.3f, 1.3f, 0.3f);
|
||||
if (glm::any(glm::lessThan(scale, MIN_IGNORE_BOX_SCALE))) {
|
||||
scale = MIN_IGNORE_BOX_SCALE;
|
||||
}
|
||||
|
||||
// (this is arbitrary number determined empirically for comfort)
|
||||
const float IGNORE_BOX_SCALE_FACTOR = 2.4f;
|
||||
scale *= IGNORE_BOX_SCALE_FACTOR;
|
||||
|
||||
// create the box (we use a box for the zone for convenience)
|
||||
AABox box(corner, scale);
|
||||
|
||||
// update the memoized zone
|
||||
// This may be called by multiple threads concurrently,
|
||||
// so take a lock and only update the memo if this call is first.
|
||||
// This prevents concurrent updates from invalidating the returned reference
|
||||
// (contingent on the preconditions listed in the header).
|
||||
std::lock_guard<std::mutex> lock(_mutex);
|
||||
if (frame != _frame.load(std::memory_order_acquire)) {
|
||||
_zone = box;
|
||||
unsigned int oldFrame = _frame.exchange(frame, std::memory_order_release);
|
||||
Q_UNUSED(oldFrame);
|
||||
}
|
||||
}
|
||||
|
||||
return _zone;
|
||||
}
|
||||
|
||||
void AudioMixerClientData::IgnoreNodeCache::cache(bool shouldIgnore) {
|
||||
if (!_isCached) {
|
||||
_shouldIgnore = shouldIgnore;
|
||||
_isCached = true;
|
||||
}
|
||||
}
|
||||
|
||||
bool AudioMixerClientData::IgnoreNodeCache::isCached() {
|
||||
return _isCached;
|
||||
}
|
||||
|
||||
bool AudioMixerClientData::IgnoreNodeCache::shouldIgnore() {
|
||||
bool ignore = _shouldIgnore;
|
||||
_isCached = false;
|
||||
return ignore;
|
||||
}
|
||||
|
||||
bool AudioMixerClientData::shouldIgnore(const SharedNodePointer self, const SharedNodePointer node, unsigned int frame) {
|
||||
// this is symmetric over self / node; if computed, it is cached in the other
|
||||
|
||||
// check the cache to avoid computation
|
||||
auto& cache = _nodeSourcesIgnoreMap[node->getUUID()];
|
||||
if (cache.isCached()) {
|
||||
return cache.shouldIgnore();
|
||||
}
|
||||
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
if (!nodeData) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// compute shouldIgnore
|
||||
bool shouldIgnore = true;
|
||||
if ( // the nodes are not ignoring each other explicitly (or are but get data regardless)
|
||||
(!self->isIgnoringNodeWithID(node->getUUID()) ||
|
||||
(nodeData->getRequestsDomainListData() && node->getCanKick())) &&
|
||||
(!node->isIgnoringNodeWithID(self->getUUID()) ||
|
||||
(getRequestsDomainListData() && self->getCanKick()))) {
|
||||
|
||||
// if either node is enabling an ignore radius, check their proximity
|
||||
if ((self->isIgnoreRadiusEnabled() || node->isIgnoreRadiusEnabled())) {
|
||||
auto& zone = _ignoreZone.get(frame);
|
||||
auto& nodeZone = nodeData->_ignoreZone.get(frame);
|
||||
shouldIgnore = zone.touches(nodeZone);
|
||||
} else {
|
||||
shouldIgnore = false;
|
||||
}
|
||||
}
|
||||
|
||||
// cache in node
|
||||
nodeData->_nodeSourcesIgnoreMap[self->getUUID()].cache(shouldIgnore);
|
||||
|
||||
return shouldIgnore;
|
||||
}
|
||||
|
||||
void AudioMixerClientData::setupCodecForReplicatedAgent(QSharedPointer<ReceivedMessage> message) {
|
||||
// hop past the sequence number that leads the packet
|
||||
message->seek(sizeof(quint16));
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
|
||||
#include <queue>
|
||||
|
||||
#include <tbb/concurrent_vector.h>
|
||||
|
||||
#include <QtCore/QJsonObject>
|
||||
|
||||
#include <AABox.h>
|
||||
|
@ -30,44 +32,40 @@
|
|||
class AudioMixerClientData : public NodeData {
|
||||
Q_OBJECT
|
||||
public:
|
||||
struct AddedStream {
|
||||
NodeIDStreamID nodeIDStreamID;
|
||||
PositionalAudioStream* positionalStream;
|
||||
|
||||
AddedStream(QUuid nodeID, Node::LocalID localNodeID,
|
||||
StreamID streamID, PositionalAudioStream* positionalStream) :
|
||||
nodeIDStreamID(nodeID, localNodeID, streamID), positionalStream(positionalStream) {};
|
||||
};
|
||||
|
||||
using ConcurrentAddedStreams = tbb::concurrent_vector<AddedStream>;
|
||||
|
||||
AudioMixerClientData(const QUuid& nodeID, Node::LocalID nodeLocalID);
|
||||
~AudioMixerClientData();
|
||||
|
||||
using SharedStreamPointer = std::shared_ptr<PositionalAudioStream>;
|
||||
using AudioStreamMap = std::unordered_map<QUuid, SharedStreamPointer>;
|
||||
using AudioStreamVector = std::vector<SharedStreamPointer>;
|
||||
|
||||
void queuePacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer node);
|
||||
void processPackets();
|
||||
int processPackets(ConcurrentAddedStreams& addedStreams); // returns the number of available streams this frame
|
||||
|
||||
// locks the mutex to make a copy
|
||||
AudioStreamMap getAudioStreams() { QReadLocker readLock { &_streamsLock }; return _audioStreams; }
|
||||
AudioStreamVector& getAudioStreams() { return _audioStreams; }
|
||||
AvatarAudioStream* getAvatarAudioStream();
|
||||
|
||||
// returns whether self (this data's node) should ignore node, memoized by frame
|
||||
// precondition: frame is increasing after first call (including overflow wrap)
|
||||
bool shouldIgnore(SharedNodePointer self, SharedNodePointer node, unsigned int frame);
|
||||
|
||||
// the following methods should be called from the AudioMixer assignment thread ONLY
|
||||
// they are not thread-safe
|
||||
|
||||
// returns a new or existing HRTF object for the given stream from the given node
|
||||
AudioHRTF& hrtfForStream(const QUuid& nodeID, const QUuid& streamID = QUuid()) { return _nodeSourcesHRTFMap[nodeID][streamID]; }
|
||||
|
||||
// removes an AudioHRTF object for a given stream
|
||||
void removeHRTFForStream(const QUuid& nodeID, const QUuid& streamID = QUuid());
|
||||
|
||||
// remove all sources and data from this node
|
||||
void removeNode(const QUuid& nodeID) { _nodeSourcesIgnoreMap.unsafe_erase(nodeID); _nodeSourcesHRTFMap.erase(nodeID); }
|
||||
|
||||
void removeAgentAvatarAudioStream();
|
||||
|
||||
// packet parsers
|
||||
int parseData(ReceivedMessage& message) override;
|
||||
void processStreamPacket(ReceivedMessage& message, ConcurrentAddedStreams& addedStreams);
|
||||
void negotiateAudioFormat(ReceivedMessage& message, const SharedNodePointer& node);
|
||||
void parseRequestsDomainListData(ReceivedMessage& message);
|
||||
void parsePerAvatarGainSet(ReceivedMessage& message, const SharedNodePointer& node);
|
||||
void parseNodeIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node);
|
||||
void parseRadiusIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node);
|
||||
void parseSoloRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node);
|
||||
|
||||
// attempt to pop a frame from each audio stream, and return the number of streams from this client
|
||||
int checkBuffersBeforeFrameSend();
|
||||
|
@ -108,11 +106,59 @@ public:
|
|||
bool shouldMuteClient() { return _shouldMuteClient; }
|
||||
void setShouldMuteClient(bool shouldMuteClient) { _shouldMuteClient = shouldMuteClient; }
|
||||
glm::vec3 getPosition() { return getAvatarAudioStream() ? getAvatarAudioStream()->getPosition() : glm::vec3(0); }
|
||||
bool getRequestsDomainListData() { return _requestsDomainListData; }
|
||||
bool getRequestsDomainListData() const { return _requestsDomainListData; }
|
||||
void setRequestsDomainListData(bool requesting) { _requestsDomainListData = requesting; }
|
||||
|
||||
void setupCodecForReplicatedAgent(QSharedPointer<ReceivedMessage> message);
|
||||
|
||||
struct MixableStream {
|
||||
float approximateVolume { 0.0f };
|
||||
NodeIDStreamID nodeStreamID;
|
||||
std::unique_ptr<AudioHRTF> hrtf;
|
||||
PositionalAudioStream* positionalStream;
|
||||
bool ignoredByListener { false };
|
||||
bool ignoringListener { false };
|
||||
|
||||
MixableStream(NodeIDStreamID nodeIDStreamID, PositionalAudioStream* positionalStream) :
|
||||
nodeStreamID(nodeIDStreamID), hrtf(new AudioHRTF), positionalStream(positionalStream) {};
|
||||
MixableStream(QUuid nodeID, Node::LocalID localNodeID, StreamID streamID, PositionalAudioStream* positionalStream) :
|
||||
nodeStreamID(nodeID, localNodeID, streamID), hrtf(new AudioHRTF), positionalStream(positionalStream) {};
|
||||
};
|
||||
|
||||
using MixableStreamsVector = std::vector<MixableStream>;
|
||||
struct Streams {
|
||||
MixableStreamsVector active;
|
||||
MixableStreamsVector inactive;
|
||||
MixableStreamsVector skipped;
|
||||
};
|
||||
|
||||
Streams& getStreams() { return _streams; }
|
||||
|
||||
// thread-safe, called from AudioMixerSlave(s) while processing ignore packets for other nodes
|
||||
void ignoredByNode(QUuid nodeID);
|
||||
void unignoredByNode(QUuid nodeID);
|
||||
|
||||
// start of methods called non-concurrently from single AudioMixerSlave mixing for the owning node
|
||||
|
||||
const Node::IgnoredNodeIDs& getNewIgnoredNodeIDs() const { return _newIgnoredNodeIDs; }
|
||||
const Node::IgnoredNodeIDs& getNewUnignoredNodeIDs() const { return _newUnignoredNodeIDs; }
|
||||
|
||||
using ConcurrentIgnoreNodeIDs = tbb::concurrent_vector<QUuid>;
|
||||
const ConcurrentIgnoreNodeIDs& getNewIgnoringNodeIDs() const { return _newIgnoringNodeIDs; }
|
||||
const ConcurrentIgnoreNodeIDs& getNewUnignoringNodeIDs() const { return _newUnignoringNodeIDs; }
|
||||
|
||||
void clearStagedIgnoreChanges();
|
||||
|
||||
const Node::IgnoredNodeIDs& getIgnoringNodeIDs() const { return _ignoringNodeIDs; }
|
||||
|
||||
|
||||
const std::vector<QUuid>& getSoloedNodes() const { return _soloedNodes; }
|
||||
|
||||
bool getHasReceivedFirstMix() const { return _hasReceivedFirstMix; }
|
||||
void setHasReceivedFirstMix(bool hasReceivedFirstMix) { _hasReceivedFirstMix = hasReceivedFirstMix; }
|
||||
|
||||
// end of methods called non-concurrently from single AudioMixerSlave
|
||||
|
||||
signals:
|
||||
void injectorStreamFinished(const QUuid& streamIdentifier);
|
||||
|
||||
|
@ -126,52 +172,15 @@ private:
|
|||
};
|
||||
PacketQueue _packetQueue;
|
||||
|
||||
QReadWriteLock _streamsLock;
|
||||
AudioStreamMap _audioStreams; // microphone stream from avatar is stored under key of null UUID
|
||||
AudioStreamVector _audioStreams; // microphone stream from avatar has a null stream ID
|
||||
|
||||
void optionallyReplicatePacket(ReceivedMessage& packet, const Node& node);
|
||||
|
||||
using IgnoreZone = AABox;
|
||||
class IgnoreZoneMemo {
|
||||
public:
|
||||
IgnoreZoneMemo(AudioMixerClientData& data) : _data(data) {}
|
||||
void setGainForAvatar(QUuid nodeID, float gain);
|
||||
|
||||
// returns an ignore zone, memoized by frame (lockless if the zone is already memoized)
|
||||
// preconditions:
|
||||
// - frame is increasing after first call (including overflow wrap)
|
||||
// - there are no references left from calls to getIgnoreZone(frame - 1)
|
||||
IgnoreZone& get(unsigned int frame);
|
||||
bool containsValidPosition(ReceivedMessage& message) const;
|
||||
|
||||
private:
|
||||
AudioMixerClientData& _data;
|
||||
IgnoreZone _zone;
|
||||
std::atomic<unsigned int> _frame { 0 };
|
||||
std::mutex _mutex;
|
||||
};
|
||||
IgnoreZoneMemo _ignoreZone;
|
||||
|
||||
class IgnoreNodeCache {
|
||||
public:
|
||||
// std::atomic is not copyable - always initialize uncached
|
||||
IgnoreNodeCache() {}
|
||||
IgnoreNodeCache(const IgnoreNodeCache& other) {}
|
||||
|
||||
void cache(bool shouldIgnore);
|
||||
bool isCached();
|
||||
bool shouldIgnore();
|
||||
|
||||
private:
|
||||
std::atomic<bool> _isCached { false };
|
||||
bool _shouldIgnore { false };
|
||||
};
|
||||
struct IgnoreNodeCacheHasher { std::size_t operator()(const QUuid& key) const { return qHash(key); } };
|
||||
|
||||
using NodeSourcesIgnoreMap = tbb::concurrent_unordered_map<QUuid, IgnoreNodeCache, IgnoreNodeCacheHasher>;
|
||||
NodeSourcesIgnoreMap _nodeSourcesIgnoreMap;
|
||||
|
||||
using HRTFMap = std::unordered_map<QUuid, AudioHRTF>;
|
||||
using NodeSourcesHRTFMap = std::unordered_map<QUuid, HRTFMap>;
|
||||
NodeSourcesHRTFMap _nodeSourcesHRTFMap;
|
||||
Streams _streams;
|
||||
|
||||
quint16 _outgoingMixedAudioSequenceNumber;
|
||||
|
||||
|
@ -190,6 +199,23 @@ private:
|
|||
|
||||
bool _shouldMuteClient { false };
|
||||
bool _requestsDomainListData { false };
|
||||
|
||||
std::vector<AddedStream> _newAddedStreams;
|
||||
|
||||
Node::IgnoredNodeIDs _newIgnoredNodeIDs;
|
||||
Node::IgnoredNodeIDs _newUnignoredNodeIDs;
|
||||
|
||||
tbb::concurrent_vector<QUuid> _newIgnoringNodeIDs;
|
||||
tbb::concurrent_vector<QUuid> _newUnignoringNodeIDs;
|
||||
|
||||
std::mutex _ignoringNodeIDsMutex;
|
||||
Node::IgnoredNodeIDs _ignoringNodeIDs;
|
||||
|
||||
std::atomic_bool _isIgnoreRadiusEnabled { false };
|
||||
|
||||
std::vector<QUuid> _soloedNodes;
|
||||
|
||||
bool _hasReceivedFirstMix { false };
|
||||
};
|
||||
|
||||
#endif // hifi_AudioMixerClientData_h
|
||||
|
|
|
@ -36,7 +36,10 @@
|
|||
#include "InjectedAudioStream.h"
|
||||
#include "AudioHelpers.h"
|
||||
|
||||
using AudioStreamMap = AudioMixerClientData::AudioStreamMap;
|
||||
using namespace std;
|
||||
using AudioStreamVector = AudioMixerClientData::AudioStreamVector;
|
||||
using MixableStream = AudioMixerClientData::MixableStream;
|
||||
using MixableStreamsVector = AudioMixerClientData::MixableStreamsVector;
|
||||
|
||||
// packet helpers
|
||||
std::unique_ptr<NLPacket> createAudioPacket(PacketType type, int size, quint16 sequence, QString codec);
|
||||
|
@ -46,9 +49,8 @@ void sendMutePacket(const SharedNodePointer& node, AudioMixerClientData&);
|
|||
void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData& data);
|
||||
|
||||
// mix helpers
|
||||
inline float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition);
|
||||
inline float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudioStream& listeningNodeStream,
|
||||
inline float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd);
|
||||
inline float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNodeStream,
|
||||
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, float distance, bool isEcho);
|
||||
inline float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition);
|
||||
|
@ -56,15 +58,16 @@ inline float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const
|
|||
void AudioMixerSlave::processPackets(const SharedNodePointer& node) {
|
||||
AudioMixerClientData* data = (AudioMixerClientData*)node->getLinkedData();
|
||||
if (data) {
|
||||
data->processPackets();
|
||||
// process packets and collect the number of streams available for this frame
|
||||
stats.sumStreams += data->processPackets(_sharedData.addedStreams);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerSlave::configureMix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
|
||||
void AudioMixerSlave::configureMix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain) {
|
||||
_begin = begin;
|
||||
_end = end;
|
||||
_frame = frame;
|
||||
_throttlingRatio = throttlingRatio;
|
||||
_numToRetain = numToRetain;
|
||||
}
|
||||
|
||||
void AudioMixerSlave::mix(const SharedNodePointer& node) {
|
||||
|
@ -125,105 +128,345 @@ void AudioMixerSlave::mix(const SharedNodePointer& node) {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
template <class Container, class Predicate>
|
||||
void erase_if(Container& cont, Predicate&& pred) {
|
||||
auto it = remove_if(begin(cont), end(cont), std::forward<Predicate>(pred));
|
||||
cont.erase(it, end(cont));
|
||||
}
|
||||
|
||||
template <class Container>
|
||||
bool contains(const Container& cont, typename Container::value_type value) {
|
||||
return std::any_of(begin(cont), end(cont), [&value](const auto& element) {
|
||||
return value == element;
|
||||
});
|
||||
}
|
||||
|
||||
// This class lets you do an erase if in several segments
|
||||
// that use different predicates
|
||||
template <class Container>
|
||||
class SegmentedEraseIf {
|
||||
public:
|
||||
using iterator = typename Container::iterator;
|
||||
|
||||
SegmentedEraseIf(Container& cont) : _cont(cont) {
|
||||
_first = begin(_cont);
|
||||
_it = _first;
|
||||
}
|
||||
~SegmentedEraseIf() {
|
||||
assert(_it == end(_cont));
|
||||
_cont.erase(_first, _it);
|
||||
}
|
||||
|
||||
template <class Predicate>
|
||||
void iterateTo(iterator last, Predicate pred) {
|
||||
while (_it != last) {
|
||||
if (!pred(*_it)) {
|
||||
if (_first != _it) {
|
||||
*_first = move(*_it);
|
||||
}
|
||||
++_first;
|
||||
}
|
||||
++_it;
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
iterator _first;
|
||||
iterator _it;
|
||||
Container& _cont;
|
||||
};
|
||||
|
||||
|
||||
void AudioMixerSlave::addStreams(Node& listener, AudioMixerClientData& listenerData) {
|
||||
auto& ignoredNodeIDs = listener.getIgnoredNodeIDs();
|
||||
auto& ignoringNodeIDs = listenerData.getIgnoringNodeIDs();
|
||||
|
||||
auto& streams = listenerData.getStreams();
|
||||
|
||||
// add data for newly created streams to our vector
|
||||
if (!listenerData.getHasReceivedFirstMix()) {
|
||||
// when this listener is new, we need to fill its added streams object with all available streams
|
||||
std::for_each(_begin, _end, [&](const SharedNodePointer& node) {
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
if (nodeData) {
|
||||
for (auto& stream : nodeData->getAudioStreams()) {
|
||||
bool ignoredByListener = contains(ignoredNodeIDs, node->getUUID());
|
||||
bool ignoringListener = contains(ignoringNodeIDs, node->getUUID());
|
||||
|
||||
if (ignoredByListener || ignoringListener) {
|
||||
streams.skipped.emplace_back(node->getUUID(), node->getLocalID(),
|
||||
stream->getStreamIdentifier(), stream.get());
|
||||
|
||||
// pre-populate ignored and ignoring flags for this stream
|
||||
streams.skipped.back().ignoredByListener = ignoredByListener;
|
||||
streams.skipped.back().ignoringListener = ignoringListener;
|
||||
} else {
|
||||
streams.active.emplace_back(node->getUUID(), node->getLocalID(),
|
||||
stream->getStreamIdentifier(), stream.get());
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// flag this listener as having received their first mix so we know we don't need to enumerate all nodes again
|
||||
listenerData.setHasReceivedFirstMix(true);
|
||||
} else {
|
||||
for (const auto& newStream : _sharedData.addedStreams) {
|
||||
bool ignoredByListener = contains(ignoredNodeIDs, newStream.nodeIDStreamID.nodeID);
|
||||
bool ignoringListener = contains(ignoringNodeIDs, newStream.nodeIDStreamID.nodeID);
|
||||
|
||||
if (ignoredByListener || ignoringListener) {
|
||||
streams.skipped.emplace_back(newStream.nodeIDStreamID, newStream.positionalStream);
|
||||
|
||||
// pre-populate ignored and ignoring flags for this stream
|
||||
streams.skipped.back().ignoredByListener = ignoredByListener;
|
||||
streams.skipped.back().ignoringListener = ignoringListener;
|
||||
} else {
|
||||
streams.active.emplace_back(newStream.nodeIDStreamID, newStream.positionalStream);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool shouldBeRemoved(const MixableStream& stream, const AudioMixerSlave::SharedData& sharedData) {
|
||||
return (contains(sharedData.removedNodes, stream.nodeStreamID.nodeLocalID) ||
|
||||
contains(sharedData.removedStreams, stream.nodeStreamID));
|
||||
};
|
||||
|
||||
bool shouldBeInactive(MixableStream& stream) {
|
||||
return (!stream.positionalStream->lastPopSucceeded() ||
|
||||
stream.positionalStream->getLastPopOutputLoudness() == 0.0f);
|
||||
};
|
||||
|
||||
bool shouldBeSkipped(MixableStream& stream, const Node& listener,
|
||||
const AvatarAudioStream& listenerAudioStream,
|
||||
const AudioMixerClientData& listenerData) {
|
||||
|
||||
if (stream.nodeStreamID.nodeLocalID == listener.getLocalID()) {
|
||||
return !stream.positionalStream->shouldLoopbackForNode();
|
||||
}
|
||||
|
||||
// grab the unprocessed ignores and unignores from and for this listener
|
||||
const auto& nodesIgnoredByListener = listenerData.getNewIgnoredNodeIDs();
|
||||
const auto& nodesUnignoredByListener = listenerData.getNewUnignoredNodeIDs();
|
||||
const auto& nodesIgnoringListener = listenerData.getNewIgnoringNodeIDs();
|
||||
const auto& nodesUnignoringListener = listenerData.getNewUnignoringNodeIDs();
|
||||
|
||||
// this stream was previously not ignored by the listener and we have some newly ignored streams
|
||||
// check now if it is one of the ignored streams and flag it as such
|
||||
if (stream.ignoredByListener) {
|
||||
stream.ignoredByListener = !contains(nodesUnignoredByListener, stream.nodeStreamID.nodeID);
|
||||
} else {
|
||||
stream.ignoredByListener = contains(nodesIgnoredByListener, stream.nodeStreamID.nodeID);
|
||||
}
|
||||
|
||||
if (stream.ignoringListener) {
|
||||
stream.ignoringListener = !contains(nodesUnignoringListener, stream.nodeStreamID.nodeID);
|
||||
} else {
|
||||
stream.ignoringListener = contains(nodesIgnoringListener, stream.nodeStreamID.nodeID);
|
||||
}
|
||||
|
||||
bool listenerIsAdmin = listenerData.getRequestsDomainListData() && listener.getCanKick();
|
||||
if (stream.ignoredByListener || (stream.ignoringListener && !listenerIsAdmin)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!listenerData.getSoloedNodes().empty()) {
|
||||
return !contains(listenerData.getSoloedNodes(), stream.nodeStreamID.nodeID);
|
||||
}
|
||||
|
||||
bool shouldCheckIgnoreBox = (listenerAudioStream.isIgnoreBoxEnabled() ||
|
||||
stream.positionalStream->isIgnoreBoxEnabled());
|
||||
if (shouldCheckIgnoreBox &&
|
||||
listenerAudioStream.getIgnoreBox().touches(stream.positionalStream->getIgnoreBox())) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
float approximateVolume(const MixableStream& stream, const AvatarAudioStream* listenerAudioStream) {
|
||||
if (stream.positionalStream->getLastPopOutputTrailingLoudness() == 0.0f) {
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
if (stream.positionalStream == listenerAudioStream) {
|
||||
return 1.0f;
|
||||
}
|
||||
|
||||
// approximate the gain
|
||||
float gain = approximateGain(*listenerAudioStream, *(stream.positionalStream));
|
||||
|
||||
// for avatar streams, modify by the set gain adjustment
|
||||
if (stream.nodeStreamID.streamID.isNull()) {
|
||||
gain *= stream.hrtf->getGainAdjustment();
|
||||
}
|
||||
|
||||
return stream.positionalStream->getLastPopOutputTrailingLoudness() * gain;
|
||||
};
|
||||
|
||||
bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
|
||||
AvatarAudioStream* listenerAudioStream = static_cast<AudioMixerClientData*>(listener->getLinkedData())->getAvatarAudioStream();
|
||||
AudioMixerClientData* listenerData = static_cast<AudioMixerClientData*>(listener->getLinkedData());
|
||||
|
||||
// if we received an invalid position from this listener, then refuse to make them a mix
|
||||
// because we don't know how to do it properly
|
||||
if (!listenerAudioStream->hasValidPosition()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// zero out the mix for this listener
|
||||
memset(_mixSamples, 0, sizeof(_mixSamples));
|
||||
|
||||
bool isThrottling = _throttlingRatio > 0.0f;
|
||||
std::vector<std::pair<float, SharedNodePointer>> throttledNodes;
|
||||
bool isThrottling = _numToRetain != -1;
|
||||
bool isSoloing = !listenerData->getSoloedNodes().empty();
|
||||
|
||||
typedef void (AudioMixerSlave::*MixFunctor)(
|
||||
AudioMixerClientData&, const QUuid&, const AvatarAudioStream&, const PositionalAudioStream&);
|
||||
auto forAllStreams = [&](const SharedNodePointer& node, AudioMixerClientData* nodeData, MixFunctor mixFunctor) {
|
||||
auto nodeID = node->getUUID();
|
||||
for (auto& streamPair : nodeData->getAudioStreams()) {
|
||||
auto nodeStream = streamPair.second;
|
||||
(this->*mixFunctor)(*listenerData, nodeID, *listenerAudioStream, *nodeStream);
|
||||
}
|
||||
};
|
||||
auto& streams = listenerData->getStreams();
|
||||
|
||||
#ifdef HIFI_AUDIO_MIXER_DEBUG
|
||||
auto mixStart = p_high_resolution_clock::now();
|
||||
#endif
|
||||
addStreams(*listener, *listenerData);
|
||||
|
||||
std::for_each(_begin, _end, [&](const SharedNodePointer& node) {
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
if (!nodeData) {
|
||||
return;
|
||||
// Process skipped streams
|
||||
erase_if(streams.skipped, [&](MixableStream& stream) {
|
||||
if (shouldBeRemoved(stream, _sharedData)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (*node == *listener) {
|
||||
// only mix the echo, if requested
|
||||
for (auto& streamPair : nodeData->getAudioStreams()) {
|
||||
auto nodeStream = streamPair.second;
|
||||
if (nodeStream->shouldLoopbackForNode()) {
|
||||
mixStream(*listenerData, node->getUUID(), *listenerAudioStream, *nodeStream);
|
||||
}
|
||||
}
|
||||
} else if (!listenerData->shouldIgnore(listener, node, _frame)) {
|
||||
if (!isThrottling) {
|
||||
forAllStreams(node, nodeData, &AudioMixerSlave::mixStream);
|
||||
if (!shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
|
||||
if (shouldBeInactive(stream)) {
|
||||
streams.inactive.push_back(move(stream));
|
||||
++stats.skippedToInactive;
|
||||
} else {
|
||||
auto nodeID = node->getUUID();
|
||||
streams.active.push_back(move(stream));
|
||||
++stats.skippedToActive;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// compute the node's max relative volume
|
||||
float nodeVolume = 0.0f;
|
||||
for (auto& streamPair : nodeData->getAudioStreams()) {
|
||||
auto nodeStream = streamPair.second;
|
||||
if (!isThrottling) {
|
||||
updateHRTFParameters(stream, *listenerAudioStream,
|
||||
listenerData->getMasterAvatarGain());
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
// approximate the gain
|
||||
glm::vec3 relativePosition = nodeStream->getPosition() - listenerAudioStream->getPosition();
|
||||
float gain = approximateGain(*listenerAudioStream, *nodeStream, relativePosition);
|
||||
// Process inactive streams
|
||||
erase_if(streams.inactive, [&](MixableStream& stream) {
|
||||
if (shouldBeRemoved(stream, _sharedData)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// modify by hrtf gain adjustment
|
||||
auto& hrtf = listenerData->hrtfForStream(nodeID, nodeStream->getStreamIdentifier());
|
||||
gain *= hrtf.getGainAdjustment();
|
||||
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
|
||||
streams.skipped.push_back(move(stream));
|
||||
++stats.inactiveToSkipped;
|
||||
return true;
|
||||
}
|
||||
|
||||
auto streamVolume = nodeStream->getLastPopOutputTrailingLoudness() * gain;
|
||||
nodeVolume = std::max(streamVolume, nodeVolume);
|
||||
}
|
||||
if (!shouldBeInactive(stream)) {
|
||||
streams.active.push_back(move(stream));
|
||||
++stats.inactiveToActive;
|
||||
return true;
|
||||
}
|
||||
|
||||
// max-heapify the nodes by relative volume
|
||||
throttledNodes.push_back({ nodeVolume, node });
|
||||
std::push_heap(throttledNodes.begin(), throttledNodes.end());
|
||||
if (!isThrottling) {
|
||||
updateHRTFParameters(stream, *listenerAudioStream,
|
||||
listenerData->getMasterAvatarGain());
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
// Process active streams
|
||||
erase_if(streams.active, [&](MixableStream& stream) {
|
||||
if (shouldBeRemoved(stream, _sharedData)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (isThrottling) {
|
||||
// we're throttling, so we need to update the approximate volume for any un-skipped streams
|
||||
// unless this is simply for an echo (in which case the approx volume is 1.0)
|
||||
stream.approximateVolume = approximateVolume(stream, listenerAudioStream);
|
||||
} else {
|
||||
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
|
||||
addStream(stream, *listenerAudioStream, 0.0f, isSoloing);
|
||||
streams.skipped.push_back(move(stream));
|
||||
++stats.activeToSkipped;
|
||||
return true;
|
||||
}
|
||||
|
||||
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(),
|
||||
isSoloing);
|
||||
|
||||
if (shouldBeInactive(stream)) {
|
||||
// To reduce artifacts we still call render to flush the HRTF for every silent
|
||||
// sources on the first frame where the source becomes silent
|
||||
// this ensures the correct tail from last mixed block
|
||||
streams.inactive.push_back(move(stream));
|
||||
++stats.activeToInactive;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
|
||||
if (isThrottling) {
|
||||
// pop the loudest nodes off the heap and mix their streams
|
||||
int numToRetain = (int)(std::distance(_begin, _end) * (1 - _throttlingRatio));
|
||||
for (int i = 0; i < numToRetain; i++) {
|
||||
if (throttledNodes.empty()) {
|
||||
break;
|
||||
// since we're throttling, we need to partition the mixable into throttled and unthrottled streams
|
||||
int numToRetain = min(_numToRetain, (int)streams.active.size()); // Make sure we don't overflow
|
||||
auto throttlePoint = begin(streams.active) + numToRetain;
|
||||
|
||||
std::nth_element(streams.active.begin(), throttlePoint, streams.active.end(),
|
||||
[](const auto& a, const auto& b)
|
||||
{
|
||||
return a.approximateVolume > b.approximateVolume;
|
||||
});
|
||||
|
||||
SegmentedEraseIf<MixableStreamsVector> erase(streams.active);
|
||||
erase.iterateTo(throttlePoint, [&](MixableStream& stream) {
|
||||
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
|
||||
resetHRTFState(stream);
|
||||
streams.skipped.push_back(move(stream));
|
||||
++stats.activeToSkipped;
|
||||
return true;
|
||||
}
|
||||
|
||||
std::pop_heap(throttledNodes.begin(), throttledNodes.end());
|
||||
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(),
|
||||
isSoloing);
|
||||
|
||||
auto& node = throttledNodes.back().second;
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
forAllStreams(node, nodeData, &AudioMixerSlave::mixStream);
|
||||
if (shouldBeInactive(stream)) {
|
||||
// To reduce artifacts we still call render to flush the HRTF for every silent
|
||||
// sources on the first frame where the source becomes silent
|
||||
// this ensures the correct tail from last mixed block
|
||||
streams.inactive.push_back(move(stream));
|
||||
++stats.activeToInactive;
|
||||
return true;
|
||||
}
|
||||
|
||||
throttledNodes.pop_back();
|
||||
}
|
||||
return false;
|
||||
});
|
||||
erase.iterateTo(end(streams.active), [&](MixableStream& stream) {
|
||||
// To reduce artifacts we reset the HRTF state for every throttled
|
||||
// sources on the first frame where the source becomes throttled
|
||||
// this ensures at least remove the tail from last mixed block
|
||||
// preventing excessive artifacts on the next first block
|
||||
resetHRTFState(stream);
|
||||
|
||||
// throttle the remaining nodes' streams
|
||||
for (const std::pair<float, SharedNodePointer>& nodePair : throttledNodes) {
|
||||
auto& node = nodePair.second;
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
forAllStreams(node, nodeData, &AudioMixerSlave::throttleStream);
|
||||
}
|
||||
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
|
||||
streams.skipped.push_back(move(stream));
|
||||
++stats.activeToSkipped;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (shouldBeInactive(stream)) {
|
||||
streams.inactive.push_back(move(stream));
|
||||
++stats.activeToInactive;
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
stats.skipped += (int)streams.skipped.size();
|
||||
stats.inactive += (int)streams.inactive.size();
|
||||
stats.active += (int)streams.active.size();
|
||||
|
||||
// clear the newly ignored, un-ignored, ignoring, and un-ignoring streams now that we've processed them
|
||||
listenerData->clearStagedIgnoreChanges();
|
||||
|
||||
#ifdef HIFI_AUDIO_MIXER_DEBUG
|
||||
auto mixEnd = p_high_resolution_clock::now();
|
||||
auto mixTime = std::chrono::duration_cast<std::chrono::nanoseconds>(mixEnd - mixStart);
|
||||
|
@ -246,51 +489,39 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
|
|||
return hasAudio;
|
||||
}
|
||||
|
||||
void AudioMixerSlave::throttleStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
|
||||
const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
|
||||
// only throttle this stream to the mix if it has a valid position, we won't know how to mix it otherwise
|
||||
if (streamToAdd.hasValidPosition()) {
|
||||
addStream(listenerNodeData, sourceNodeID, listeningNodeStream, streamToAdd, true);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerSlave::mixStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
|
||||
const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
|
||||
// only add the stream to the mix if it has a valid position, we won't know how to mix it otherwise
|
||||
if (streamToAdd.hasValidPosition()) {
|
||||
addStream(listenerNodeData, sourceNodeID, listeningNodeStream, streamToAdd, false);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
|
||||
const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
bool throttle) {
|
||||
void AudioMixerSlave::addStream(AudioMixerClientData::MixableStream& mixableStream,
|
||||
AvatarAudioStream& listeningNodeStream,
|
||||
float masterListenerGain, bool isSoloing) {
|
||||
++stats.totalMixes;
|
||||
|
||||
// to reduce artifacts we call the HRTF functor for every source, even if throttled or silent
|
||||
// this ensures the correct tail from last mixed block and the correct spatialization of next first block
|
||||
auto streamToAdd = mixableStream.positionalStream;
|
||||
|
||||
// check if this is a server echo of a source back to itself
|
||||
bool isEcho = (&streamToAdd == &listeningNodeStream);
|
||||
bool isEcho = (streamToAdd == &listeningNodeStream);
|
||||
|
||||
glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition();
|
||||
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream.getPosition();
|
||||
|
||||
float distance = glm::max(glm::length(relativePosition), EPSILON);
|
||||
float gain = computeGain(listenerNodeData, listeningNodeStream, streamToAdd, relativePosition, distance, isEcho);
|
||||
float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
|
||||
|
||||
float gain = 1.0f;
|
||||
if (!isSoloing) {
|
||||
gain = computeGain(masterListenerGain, listeningNodeStream, *streamToAdd, relativePosition, distance, isEcho);
|
||||
}
|
||||
|
||||
const int HRTF_DATASET_INDEX = 1;
|
||||
|
||||
if (!streamToAdd.lastPopSucceeded()) {
|
||||
if (!streamToAdd->lastPopSucceeded()) {
|
||||
bool forceSilentBlock = true;
|
||||
|
||||
if (!streamToAdd.getLastPopOutput().isNull()) {
|
||||
bool isInjector = dynamic_cast<const InjectedAudioStream*>(&streamToAdd);
|
||||
if (!streamToAdd->getLastPopOutput().isNull()) {
|
||||
bool isInjector = dynamic_cast<const InjectedAudioStream*>(streamToAdd);
|
||||
|
||||
// in an injector, just go silent - the injector has likely ended
|
||||
// in other inputs (microphone, &c.), repeat with fade to avoid the harsh jump to silence
|
||||
if (!isInjector) {
|
||||
// calculate its fade factor, which depends on how many times it's already been repeated.
|
||||
float fadeFactor = calculateRepeatedFrameFadeFactor(streamToAdd.getConsecutiveNotMixedCount() - 1);
|
||||
float fadeFactor = calculateRepeatedFrameFadeFactor(streamToAdd->getConsecutiveNotMixedCount() - 1);
|
||||
if (fadeFactor > 0.0f) {
|
||||
// apply the fadeFactor to the gain
|
||||
gain *= fadeFactor;
|
||||
|
@ -302,15 +533,12 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
|
|||
if (forceSilentBlock) {
|
||||
// call renderSilent with a forced silent block to reduce artifacts
|
||||
// (this is not done for stereo streams since they do not go through the HRTF)
|
||||
if (!streamToAdd.isStereo() && !isEcho) {
|
||||
// get the existing listener-source HRTF object, or create a new one
|
||||
auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
|
||||
|
||||
if (!streamToAdd->isStereo() && !isEcho) {
|
||||
static int16_t silentMonoBlock[AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL] = {};
|
||||
hrtf.renderSilent(silentMonoBlock, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
mixableStream.hrtf->render(silentMonoBlock, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
++stats.hrtfSilentRenders;
|
||||
++stats.hrtfRenders;
|
||||
}
|
||||
|
||||
return;
|
||||
|
@ -318,16 +546,15 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
|
|||
}
|
||||
|
||||
// grab the stream from the ring buffer
|
||||
AudioRingBuffer::ConstIterator streamPopOutput = streamToAdd.getLastPopOutput();
|
||||
AudioRingBuffer::ConstIterator streamPopOutput = streamToAdd->getLastPopOutput();
|
||||
|
||||
// stereo sources are not passed through HRTF
|
||||
if (streamToAdd.isStereo()) {
|
||||
if (streamToAdd->isStereo()) {
|
||||
|
||||
// apply the avatar gain adjustment
|
||||
auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
|
||||
gain *= hrtf.getGainAdjustment();
|
||||
gain *= mixableStream.hrtf->getGainAdjustment();
|
||||
|
||||
const float scale = 1/32768.0f; // int16_t to float
|
||||
const float scale = 1 / 32768.0f; // int16_t to float
|
||||
|
||||
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL; i++) {
|
||||
_mixSamples[2*i+0] += (float)streamPopOutput[2*i+0] * gain * scale;
|
||||
|
@ -335,11 +562,8 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
|
|||
}
|
||||
|
||||
++stats.manualStereoMixes;
|
||||
return;
|
||||
}
|
||||
|
||||
// echo sources are not passed through HRTF
|
||||
if (isEcho) {
|
||||
} else if (isEcho) {
|
||||
// echo sources are not passed through HRTF
|
||||
|
||||
const float scale = 1/32768.0f; // int16_t to float
|
||||
|
||||
|
@ -350,41 +574,38 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
|
|||
}
|
||||
|
||||
++stats.manualEchoMixes;
|
||||
return;
|
||||
} else {
|
||||
streamPopOutput.readSamples(_bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
mixableStream.hrtf->render(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
++stats.hrtfRenders;
|
||||
}
|
||||
}
|
||||
|
||||
// get the existing listener-source HRTF object, or create a new one
|
||||
auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
|
||||
void AudioMixerSlave::updateHRTFParameters(AudioMixerClientData::MixableStream& mixableStream,
|
||||
AvatarAudioStream& listeningNodeStream,
|
||||
float masterListenerGain) {
|
||||
auto streamToAdd = mixableStream.positionalStream;
|
||||
|
||||
streamPopOutput.readSamples(_bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
// check if this is a server echo of a source back to itself
|
||||
bool isEcho = (streamToAdd == &listeningNodeStream);
|
||||
|
||||
if (streamToAdd.getLastPopOutputLoudness() == 0.0f) {
|
||||
// call renderSilent to reduce artifacts
|
||||
hrtf.renderSilent(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream.getPosition();
|
||||
|
||||
++stats.hrtfSilentRenders;
|
||||
return;
|
||||
}
|
||||
float distance = glm::max(glm::length(relativePosition), EPSILON);
|
||||
float gain = computeGain(masterListenerGain, listeningNodeStream, *streamToAdd, relativePosition, distance, isEcho);
|
||||
float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
|
||||
|
||||
if (throttle) {
|
||||
// call renderSilent with actual frame data and a gain of 0.0f to reduce artifacts
|
||||
hrtf.renderSilent(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, 0.0f,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
mixableStream.hrtf->setParameterHistory(azimuth, distance, gain);
|
||||
|
||||
++stats.hrtfThrottleRenders;
|
||||
return;
|
||||
}
|
||||
++stats.hrtfUpdates;
|
||||
}
|
||||
|
||||
if (streamToAdd.getType() == PositionalAudioStream::Injector) {
|
||||
// apply per-avatar gain to positional audio injectors, which wouldn't otherwise be affected by PAL sliders
|
||||
hrtf.setGainAdjustment(listenerNodeData.hrtfForStream(sourceNodeID, QUuid()).getGainAdjustment());
|
||||
}
|
||||
|
||||
hrtf.render(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
++stats.hrtfRenders;
|
||||
void AudioMixerSlave::resetHRTFState(AudioMixerClientData::MixableStream& mixableStream) {
|
||||
mixableStream.hrtf->reset();
|
||||
++stats.hrtfResets;
|
||||
}
|
||||
|
||||
std::unique_ptr<NLPacket> createAudioPacket(PacketType type, int size, quint16 sequence, QString codec) {
|
||||
|
@ -443,12 +664,12 @@ void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData&
|
|||
glm::vec3 streamPosition = stream->getPosition();
|
||||
|
||||
// find reverb properties
|
||||
for (int i = 0; i < reverbSettings.size(); ++i) {
|
||||
AABox box = audioZones[reverbSettings[i].zone];
|
||||
for (const auto& settings : reverbSettings) {
|
||||
AABox box = audioZones[settings.zone].area;
|
||||
if (box.contains(streamPosition)) {
|
||||
hasReverb = true;
|
||||
reverbTime = reverbSettings[i].reverbTime;
|
||||
wetLevel = reverbSettings[i].wetLevel;
|
||||
reverbTime = settings.reverbTime;
|
||||
wetLevel = settings.wetLevel;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -493,8 +714,7 @@ void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData&
|
|||
}
|
||||
}
|
||||
|
||||
float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition) {
|
||||
float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
|
||||
float gain = 1.0f;
|
||||
|
||||
// injector: apply attenuation
|
||||
|
@ -505,13 +725,14 @@ float approximateGain(const AvatarAudioStream& listeningNodeStream, const Positi
|
|||
// avatar: skip attenuation - it is too costly to approximate
|
||||
|
||||
// distance attenuation: approximate, ignore zone-specific attenuations
|
||||
glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition();
|
||||
float distance = glm::length(relativePosition);
|
||||
return gain / distance;
|
||||
|
||||
// avatar: skip master gain - it is constant for all streams
|
||||
}
|
||||
|
||||
float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudioStream& listeningNodeStream,
|
||||
float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNodeStream,
|
||||
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, float distance, bool isEcho) {
|
||||
float gain = 1.0f;
|
||||
|
||||
|
@ -534,7 +755,7 @@ float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudi
|
|||
gain *= offAxisCoefficient;
|
||||
|
||||
// apply master gain, only to avatars
|
||||
gain *= listenerNodeData.getMasterAvatarGain();
|
||||
gain *= masterListenerGain;
|
||||
}
|
||||
|
||||
auto& audioZones = AudioMixer::getAudioZones();
|
||||
|
@ -542,10 +763,10 @@ float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudi
|
|||
|
||||
// find distance attenuation coefficient
|
||||
float attenuationPerDoublingInDistance = AudioMixer::getAttenuationPerDoublingInDistance();
|
||||
for (int i = 0; i < zoneSettings.length(); ++i) {
|
||||
if (audioZones[zoneSettings[i].source].contains(streamToAdd.getPosition()) &&
|
||||
audioZones[zoneSettings[i].listener].contains(listeningNodeStream.getPosition())) {
|
||||
attenuationPerDoublingInDistance = zoneSettings[i].coefficient;
|
||||
for (const auto& settings : zoneSettings) {
|
||||
if (audioZones[settings.source].area.contains(streamToAdd.getPosition()) &&
|
||||
audioZones[settings.listener].area.contains(listeningNodeStream.getPosition())) {
|
||||
attenuationPerDoublingInDistance = settings.coefficient;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,29 +12,39 @@
|
|||
#ifndef hifi_AudioMixerSlave_h
|
||||
#define hifi_AudioMixerSlave_h
|
||||
|
||||
#include <tbb/concurrent_vector.h>
|
||||
|
||||
#include <AABox.h>
|
||||
#include <AudioHRTF.h>
|
||||
#include <AudioRingBuffer.h>
|
||||
#include <ThreadedAssignment.h>
|
||||
#include <UUIDHasher.h>
|
||||
#include <NodeList.h>
|
||||
#include <PositionalAudioStream.h>
|
||||
|
||||
#include "AudioMixerClientData.h"
|
||||
#include "AudioMixerStats.h"
|
||||
|
||||
class PositionalAudioStream;
|
||||
class AvatarAudioStream;
|
||||
class AudioHRTF;
|
||||
class AudioMixerClientData;
|
||||
|
||||
class AudioMixerSlave {
|
||||
public:
|
||||
using ConstIter = NodeList::const_iterator;
|
||||
|
||||
struct SharedData {
|
||||
AudioMixerClientData::ConcurrentAddedStreams addedStreams;
|
||||
std::vector<Node::LocalID> removedNodes;
|
||||
std::vector<NodeIDStreamID> removedStreams;
|
||||
};
|
||||
|
||||
AudioMixerSlave(SharedData& sharedData) : _sharedData(sharedData) {};
|
||||
|
||||
// process packets for a given node (requires no configuration)
|
||||
void processPackets(const SharedNodePointer& node);
|
||||
|
||||
// configure a round of mixing
|
||||
void configureMix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio);
|
||||
void configureMix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain);
|
||||
|
||||
// mix and broadcast non-ignored streams to the node (requires configuration using configureMix, above)
|
||||
// returns true if a mixed packet was sent to the node
|
||||
|
@ -45,13 +55,15 @@ public:
|
|||
private:
|
||||
// create mix, returns true if mix has audio
|
||||
bool prepareMix(const SharedNodePointer& listener);
|
||||
void throttleStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
|
||||
const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer);
|
||||
void mixStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
|
||||
const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer);
|
||||
void addStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
|
||||
const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer,
|
||||
bool throttle);
|
||||
void addStream(AudioMixerClientData::MixableStream& mixableStream,
|
||||
AvatarAudioStream& listeningNodeStream,
|
||||
float masterListenerGain, bool isSoloing);
|
||||
void updateHRTFParameters(AudioMixerClientData::MixableStream& mixableStream,
|
||||
AvatarAudioStream& listeningNodeStream,
|
||||
float masterListenerGain);
|
||||
void resetHRTFState(AudioMixerClientData::MixableStream& mixableStream);
|
||||
|
||||
void addStreams(Node& listener, AudioMixerClientData& listenerData);
|
||||
|
||||
// mixing buffers
|
||||
float _mixSamples[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
|
||||
|
@ -61,7 +73,9 @@ private:
|
|||
ConstIter _begin;
|
||||
ConstIter _end;
|
||||
unsigned int _frame { 0 };
|
||||
float _throttlingRatio { 0.0f };
|
||||
int _numToRetain { -1 };
|
||||
|
||||
SharedData& _sharedData;
|
||||
};
|
||||
|
||||
#endif // hifi_AudioMixerSlave_h
|
||||
|
|
|
@ -74,13 +74,11 @@ void AudioMixerSlavePool::processPackets(ConstIter begin, ConstIter end) {
|
|||
run(begin, end);
|
||||
}
|
||||
|
||||
void AudioMixerSlavePool::mix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
|
||||
void AudioMixerSlavePool::mix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain) {
|
||||
_function = &AudioMixerSlave::mix;
|
||||
_configure = [=](AudioMixerSlave& slave) {
|
||||
slave.configureMix(_begin, _end, _frame, _throttlingRatio);
|
||||
slave.configureMix(_begin, _end, frame, numToRetain);
|
||||
};
|
||||
_frame = frame;
|
||||
_throttlingRatio = throttlingRatio;
|
||||
|
||||
run(begin, end);
|
||||
}
|
||||
|
@ -167,7 +165,7 @@ void AudioMixerSlavePool::resize(int numThreads) {
|
|||
if (numThreads > _numThreads) {
|
||||
// start new slaves
|
||||
for (int i = 0; i < numThreads - _numThreads; ++i) {
|
||||
auto slave = new AudioMixerSlaveThread(*this);
|
||||
auto slave = new AudioMixerSlaveThread(*this, _workerSharedData);
|
||||
slave->start();
|
||||
_slaves.emplace_back(slave);
|
||||
}
|
||||
|
|
|
@ -31,7 +31,8 @@ class AudioMixerSlaveThread : public QThread, public AudioMixerSlave {
|
|||
using Lock = std::unique_lock<Mutex>;
|
||||
|
||||
public:
|
||||
AudioMixerSlaveThread(AudioMixerSlavePool& pool) : _pool(pool) {}
|
||||
AudioMixerSlaveThread(AudioMixerSlavePool& pool, AudioMixerSlave::SharedData& sharedData)
|
||||
: AudioMixerSlave(sharedData), _pool(pool) {}
|
||||
|
||||
void run() override final;
|
||||
|
||||
|
@ -58,14 +59,15 @@ class AudioMixerSlavePool {
|
|||
public:
|
||||
using ConstIter = NodeList::const_iterator;
|
||||
|
||||
AudioMixerSlavePool(int numThreads = QThread::idealThreadCount()) { setNumThreads(numThreads); }
|
||||
AudioMixerSlavePool(AudioMixerSlave::SharedData& sharedData, int numThreads = QThread::idealThreadCount())
|
||||
: _workerSharedData(sharedData) { setNumThreads(numThreads); }
|
||||
~AudioMixerSlavePool() { resize(0); }
|
||||
|
||||
// process packets on slave threads
|
||||
void processPackets(ConstIter begin, ConstIter end);
|
||||
|
||||
// mix on slave threads
|
||||
void mix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio);
|
||||
void mix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain);
|
||||
|
||||
// iterate over all slaves
|
||||
void each(std::function<void(AudioMixerSlave& slave)> functor);
|
||||
|
@ -96,10 +98,10 @@ private:
|
|||
|
||||
// frame state
|
||||
Queue _queue;
|
||||
unsigned int _frame { 0 };
|
||||
float _throttlingRatio { 0.0f };
|
||||
ConstIter _begin;
|
||||
ConstIter _end;
|
||||
|
||||
AudioMixerSlave::SharedData& _workerSharedData;
|
||||
};
|
||||
|
||||
#endif // hifi_AudioMixerSlavePool_h
|
||||
|
|
|
@ -15,12 +15,27 @@ void AudioMixerStats::reset() {
|
|||
sumStreams = 0;
|
||||
sumListeners = 0;
|
||||
sumListenersSilent = 0;
|
||||
|
||||
totalMixes = 0;
|
||||
|
||||
hrtfRenders = 0;
|
||||
hrtfSilentRenders = 0;
|
||||
hrtfThrottleRenders = 0;
|
||||
hrtfResets = 0;
|
||||
hrtfUpdates = 0;
|
||||
|
||||
manualStereoMixes = 0;
|
||||
manualEchoMixes = 0;
|
||||
|
||||
skippedToActive = 0;
|
||||
skippedToInactive = 0;
|
||||
inactiveToSkipped = 0;
|
||||
inactiveToActive = 0;
|
||||
activeToSkipped = 0;
|
||||
activeToInactive = 0;
|
||||
|
||||
skipped = 0;
|
||||
inactive = 0;
|
||||
active = 0;
|
||||
|
||||
#ifdef HIFI_AUDIO_MIXER_DEBUG
|
||||
mixTime = 0;
|
||||
#endif
|
||||
|
@ -30,12 +45,27 @@ void AudioMixerStats::accumulate(const AudioMixerStats& otherStats) {
|
|||
sumStreams += otherStats.sumStreams;
|
||||
sumListeners += otherStats.sumListeners;
|
||||
sumListenersSilent += otherStats.sumListenersSilent;
|
||||
|
||||
totalMixes += otherStats.totalMixes;
|
||||
|
||||
hrtfRenders += otherStats.hrtfRenders;
|
||||
hrtfSilentRenders += otherStats.hrtfSilentRenders;
|
||||
hrtfThrottleRenders += otherStats.hrtfThrottleRenders;
|
||||
hrtfResets += otherStats.hrtfResets;
|
||||
hrtfUpdates += otherStats.hrtfUpdates;
|
||||
|
||||
manualStereoMixes += otherStats.manualStereoMixes;
|
||||
manualEchoMixes += otherStats.manualEchoMixes;
|
||||
|
||||
skippedToActive += otherStats.skippedToActive;
|
||||
skippedToInactive += otherStats.skippedToInactive;
|
||||
inactiveToSkipped += otherStats.inactiveToSkipped;
|
||||
inactiveToActive += otherStats.inactiveToActive;
|
||||
activeToSkipped += otherStats.activeToSkipped;
|
||||
activeToInactive += otherStats.activeToInactive;
|
||||
|
||||
skipped += otherStats.skipped;
|
||||
inactive += otherStats.inactive;
|
||||
active += otherStats.active;
|
||||
|
||||
#ifdef HIFI_AUDIO_MIXER_DEBUG
|
||||
mixTime += otherStats.mixTime;
|
||||
#endif
|
||||
|
|
|
@ -24,12 +24,23 @@ struct AudioMixerStats {
|
|||
int totalMixes { 0 };
|
||||
|
||||
int hrtfRenders { 0 };
|
||||
int hrtfSilentRenders { 0 };
|
||||
int hrtfThrottleRenders { 0 };
|
||||
int hrtfResets { 0 };
|
||||
int hrtfUpdates { 0 };
|
||||
|
||||
int manualStereoMixes { 0 };
|
||||
int manualEchoMixes { 0 };
|
||||
|
||||
int skippedToActive { 0 };
|
||||
int skippedToInactive { 0 };
|
||||
int inactiveToSkipped { 0 };
|
||||
int inactiveToActive { 0 };
|
||||
int activeToSkipped { 0 };
|
||||
int activeToInactive { 0 };
|
||||
|
||||
int skipped { 0 };
|
||||
int inactive { 0 };
|
||||
int active { 0 };
|
||||
|
||||
#ifdef HIFI_AUDIO_MIXER_DEBUG
|
||||
uint64_t mixTime { 0 };
|
||||
#endif
|
||||
|
|
|
@ -23,9 +23,9 @@ int AvatarAudioStream::parseStreamProperties(PacketType type, const QByteArray&
|
|||
|
||||
if (type == PacketType::SilentAudioFrame) {
|
||||
const char* dataAt = packetAfterSeqNum.constData();
|
||||
quint16 numSilentSamples = *(reinterpret_cast<const quint16*>(dataAt));
|
||||
readBytes += sizeof(quint16);
|
||||
numAudioSamples = (int)numSilentSamples;
|
||||
SilentSamplesBytes numSilentSamples = *(reinterpret_cast<const quint16*>(dataAt));
|
||||
readBytes += sizeof(SilentSamplesBytes);
|
||||
numAudioSamples = (int) numSilentSamples;
|
||||
|
||||
// read the positional data
|
||||
readBytes += parsePositionalData(packetAfterSeqNum.mid(readBytes));
|
||||
|
@ -34,9 +34,9 @@ int AvatarAudioStream::parseStreamProperties(PacketType type, const QByteArray&
|
|||
_shouldLoopbackForNode = (type == PacketType::MicrophoneAudioWithEcho);
|
||||
|
||||
// read the channel flag
|
||||
quint8 channelFlag = packetAfterSeqNum.at(readBytes);
|
||||
ChannelFlag channelFlag = packetAfterSeqNum.at(readBytes);
|
||||
bool isStereo = channelFlag == 1;
|
||||
readBytes += sizeof(quint8);
|
||||
readBytes += sizeof(ChannelFlag);
|
||||
|
||||
// if isStereo value has changed, restart the ring buffer with new frame size
|
||||
if (isStereo != _isStereo) {
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
|
||||
#include "PositionalAudioStream.h"
|
||||
|
||||
using SilentSamplesBytes = quint16;
|
||||
|
||||
class AvatarAudioStream : public PositionalAudioStream {
|
||||
public:
|
||||
AvatarAudioStream(bool isStereo, int numStaticJitterFrames = -1);
|
||||
|
|
|
@ -541,7 +541,8 @@ void AvatarMixer::handleRequestsDomainListDataPacket(QSharedPointer<ReceivedMess
|
|||
// ...For those nodes, reset the lastBroadcastTime to 0
|
||||
// so that the AvatarMixer will send Identity data to us
|
||||
[&](const SharedNodePointer& node) {
|
||||
nodeData->setLastBroadcastTime(node->getUUID(), 0);
|
||||
nodeData->setLastBroadcastTime(node->getLocalID(), 0);
|
||||
nodeData->resetSentTraitData(node->getLocalID());
|
||||
}
|
||||
);
|
||||
}
|
||||
|
@ -564,7 +565,8 @@ void AvatarMixer::handleAvatarIdentityPacket(QSharedPointer<ReceivedMessage> mes
|
|||
// parse the identity packet and update the change timestamp if appropriate
|
||||
bool identityChanged = false;
|
||||
bool displayNameChanged = false;
|
||||
avatar.processAvatarIdentity(message->getMessage(), identityChanged, displayNameChanged);
|
||||
QDataStream avatarIdentityStream(message->getMessage());
|
||||
avatar.processAvatarIdentity(avatarIdentityStream, identityChanged, displayNameChanged);
|
||||
|
||||
if (identityChanged) {
|
||||
QMutexLocker nodeDataLocker(&nodeData->getMutex());
|
||||
|
@ -588,10 +590,10 @@ void AvatarMixer::handleAvatarIdentityRequestPacket(QSharedPointer<ReceivedMessa
|
|||
QUuid avatarID(QUuid::fromRfc4122(message->getMessage()) );
|
||||
if (!avatarID.isNull()) {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
auto node = nodeList->nodeWithUUID(avatarID);
|
||||
if (node) {
|
||||
QMutexLocker lock(&node->getMutex());
|
||||
AvatarMixerClientData* avatarClientData = dynamic_cast<AvatarMixerClientData*>(node->getLinkedData());
|
||||
auto requestedNode = nodeList->nodeWithUUID(avatarID);
|
||||
|
||||
if (requestedNode) {
|
||||
AvatarMixerClientData* avatarClientData = static_cast<AvatarMixerClientData*>(requestedNode->getLinkedData());
|
||||
if (avatarClientData) {
|
||||
const AvatarData& avatarData = avatarClientData->getAvatar();
|
||||
QByteArray serializedAvatar = avatarData.identityByteArray();
|
||||
|
@ -600,6 +602,11 @@ void AvatarMixer::handleAvatarIdentityRequestPacket(QSharedPointer<ReceivedMessa
|
|||
nodeList->sendPacketList(std::move(identityPackets), *senderNode);
|
||||
++_sumIdentityPackets;
|
||||
}
|
||||
|
||||
AvatarMixerClientData* senderData = static_cast<AvatarMixerClientData*>(senderNode->getLinkedData());
|
||||
if (senderData) {
|
||||
senderData->resetSentTraitData(requestedNode->getLocalID());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -625,28 +632,38 @@ void AvatarMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage>
|
|||
while (message->getBytesLeftToRead()) {
|
||||
// parse out the UUID being ignored from the packet
|
||||
QUuid ignoredUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
if (nodeList->nodeWithUUID(ignoredUUID)) {
|
||||
auto ignoredNode = nodeList->nodeWithUUID(ignoredUUID);
|
||||
if (ignoredNode) {
|
||||
if (nodeData) {
|
||||
// Reset the lastBroadcastTime for the ignored avatar to 0
|
||||
// so the AvatarMixer knows it'll have to send identity data about the ignored avatar
|
||||
// to the ignorer if the ignorer unignores.
|
||||
nodeData->setLastBroadcastTime(ignoredUUID, 0);
|
||||
nodeData->setLastBroadcastTime(ignoredNode->getLocalID(), 0);
|
||||
nodeData->resetSentTraitData(ignoredNode->getLocalID());
|
||||
}
|
||||
|
||||
|
||||
// Reset the lastBroadcastTime for the ignorer (FROM THE PERSPECTIVE OF THE IGNORED) to 0
|
||||
// so the AvatarMixer knows it'll have to send identity data about the ignorer
|
||||
// to the ignored if the ignorer unignores.
|
||||
auto ignoredNode = nodeList->nodeWithUUID(ignoredUUID);
|
||||
AvatarMixerClientData* ignoredNodeData = reinterpret_cast<AvatarMixerClientData*>(ignoredNode->getLinkedData());
|
||||
if (ignoredNodeData) {
|
||||
ignoredNodeData->setLastBroadcastTime(senderNode->getUUID(), 0);
|
||||
ignoredNodeData->setLastBroadcastTime(senderNode->getLocalID(), 0);
|
||||
ignoredNodeData->resetSentTraitData(senderNode->getLocalID());
|
||||
}
|
||||
}
|
||||
|
||||
if (addToIgnore) {
|
||||
senderNode->addIgnoredNode(ignoredUUID);
|
||||
|
||||
if (ignoredNode) {
|
||||
// send a reliable kill packet to remove the sending avatar for the ignored avatar
|
||||
auto killPacket = NLPacket::create(PacketType::KillAvatar,
|
||||
NUM_BYTES_RFC4122_UUID + sizeof(KillAvatarReason), true);
|
||||
killPacket->write(senderNode->getUUID().toRfc4122());
|
||||
killPacket->writePrimitive(KillAvatarReason::AvatarDisconnected);
|
||||
nodeList->sendPacket(std::move(killPacket), *ignoredNode);
|
||||
}
|
||||
} else {
|
||||
senderNode->removeIgnoredNode(ignoredUUID);
|
||||
}
|
||||
|
@ -657,7 +674,13 @@ void AvatarMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage>
|
|||
|
||||
void AvatarMixer::handleRadiusIgnoreRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
|
||||
auto start = usecTimestampNow();
|
||||
sendingNode->parseIgnoreRadiusRequestMessage(packet);
|
||||
|
||||
bool enabled;
|
||||
packet->readPrimitive(&enabled);
|
||||
|
||||
auto avatarData = getOrCreateClientData(sendingNode);
|
||||
avatarData->setIsIgnoreRadiusEnabled(enabled);
|
||||
|
||||
auto end = usecTimestampNow();
|
||||
_handleRadiusIgnoreRequestPacketElapsedTime += (end - start);
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
|
||||
#include "AvatarMixerClientData.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <udt/PacketHeaders.h>
|
||||
|
||||
#include <DependencyManager.h>
|
||||
|
@ -25,20 +26,20 @@ AvatarMixerClientData::AvatarMixerClientData(const QUuid& nodeID, Node::LocalID
|
|||
_avatar->setID(nodeID);
|
||||
}
|
||||
|
||||
uint64_t AvatarMixerClientData::getLastOtherAvatarEncodeTime(QUuid otherAvatar) const {
|
||||
std::unordered_map<QUuid, uint64_t>::const_iterator itr = _lastOtherAvatarEncodeTime.find(otherAvatar);
|
||||
uint64_t AvatarMixerClientData::getLastOtherAvatarEncodeTime(NLPacket::LocalID otherAvatar) const {
|
||||
const auto itr = _lastOtherAvatarEncodeTime.find(otherAvatar);
|
||||
if (itr != _lastOtherAvatarEncodeTime.end()) {
|
||||
return itr->second;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::setLastOtherAvatarEncodeTime(const QUuid& otherAvatar, uint64_t time) {
|
||||
std::unordered_map<QUuid, uint64_t>::iterator itr = _lastOtherAvatarEncodeTime.find(otherAvatar);
|
||||
void AvatarMixerClientData::setLastOtherAvatarEncodeTime(NLPacket::LocalID otherAvatar, uint64_t time) {
|
||||
auto itr = _lastOtherAvatarEncodeTime.find(otherAvatar);
|
||||
if (itr != _lastOtherAvatarEncodeTime.end()) {
|
||||
itr->second = time;
|
||||
} else {
|
||||
_lastOtherAvatarEncodeTime.emplace(std::pair<QUuid, uint64_t>(otherAvatar, time));
|
||||
_lastOtherAvatarEncodeTime.emplace(std::pair<NLPacket::LocalID, uint64_t>(otherAvatar, time));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -111,6 +112,11 @@ void AvatarMixerClientData::processSetTraitsMessage(ReceivedMessage& message,
|
|||
AvatarTraits::TraitWireSize traitSize;
|
||||
message.readPrimitive(&traitSize);
|
||||
|
||||
if (traitSize < -1 || traitSize > message.getBytesLeftToRead()) {
|
||||
qWarning() << "Refusing to process simple trait of size" << traitSize << "from" << message.getSenderSockAddr();
|
||||
break;
|
||||
}
|
||||
|
||||
if (packetTraitVersion > _lastReceivedTraitVersions[traitType]) {
|
||||
_avatar->processTrait(traitType, message.read(traitSize));
|
||||
_lastReceivedTraitVersions[traitType] = packetTraitVersion;
|
||||
|
@ -127,26 +133,41 @@ void AvatarMixerClientData::processSetTraitsMessage(ReceivedMessage& message,
|
|||
} else {
|
||||
AvatarTraits::TraitInstanceID instanceID = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
if (message.getBytesLeftToRead() == 0) {
|
||||
qWarning () << "Received an instanced trait with no size from" << message.getSenderSockAddr();
|
||||
break;
|
||||
}
|
||||
|
||||
AvatarTraits::TraitWireSize traitSize;
|
||||
message.readPrimitive(&traitSize);
|
||||
|
||||
auto& instanceVersionRef = _lastReceivedTraitVersions.getInstanceValueRef(traitType, instanceID);
|
||||
if (traitSize < -1 || traitSize > message.getBytesLeftToRead()) {
|
||||
qWarning() << "Refusing to process instanced trait of size" << traitSize << "from" << message.getSenderSockAddr();
|
||||
break;
|
||||
}
|
||||
|
||||
if (packetTraitVersion > instanceVersionRef) {
|
||||
if (traitSize == AvatarTraits::DELETED_TRAIT_SIZE) {
|
||||
_avatar->processDeletedTraitInstance(traitType, instanceID);
|
||||
if (traitType == AvatarTraits::AvatarEntity) {
|
||||
auto& instanceVersionRef = _lastReceivedTraitVersions.getInstanceValueRef(traitType, instanceID);
|
||||
|
||||
// to track a deleted instance but keep version information
|
||||
// the avatar mixer uses the negative value of the sent version
|
||||
instanceVersionRef = -packetTraitVersion;
|
||||
if (packetTraitVersion > instanceVersionRef) {
|
||||
if (traitSize == AvatarTraits::DELETED_TRAIT_SIZE) {
|
||||
_avatar->processDeletedTraitInstance(traitType, instanceID);
|
||||
|
||||
// to track a deleted instance but keep version information
|
||||
// the avatar mixer uses the negative value of the sent version
|
||||
instanceVersionRef = -packetTraitVersion;
|
||||
} else {
|
||||
_avatar->processTraitInstance(traitType, instanceID, message.read(traitSize));
|
||||
instanceVersionRef = packetTraitVersion;
|
||||
}
|
||||
|
||||
anyTraitsChanged = true;
|
||||
} else {
|
||||
_avatar->processTraitInstance(traitType, instanceID, message.read(traitSize));
|
||||
instanceVersionRef = packetTraitVersion;
|
||||
message.seek(message.getPosition() + traitSize);
|
||||
}
|
||||
|
||||
anyTraitsChanged = true;
|
||||
} else {
|
||||
message.seek(message.getPosition() + traitSize);
|
||||
qWarning() << "Refusing to process traits packet with instanced trait of unprocessable type from" << message.getSenderSockAddr();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -199,7 +220,7 @@ void AvatarMixerClientData::checkSkeletonURLAgainstWhitelist(const SlaveSharedDa
|
|||
}
|
||||
}
|
||||
|
||||
uint64_t AvatarMixerClientData::getLastBroadcastTime(const QUuid& nodeUUID) const {
|
||||
uint64_t AvatarMixerClientData::getLastBroadcastTime(NLPacket::LocalID nodeUUID) const {
|
||||
// return the matching PacketSequenceNumber, or the default if we don't have it
|
||||
auto nodeMatch = _lastBroadcastTimes.find(nodeUUID);
|
||||
if (nodeMatch != _lastBroadcastTimes.end()) {
|
||||
|
@ -208,9 +229,9 @@ uint64_t AvatarMixerClientData::getLastBroadcastTime(const QUuid& nodeUUID) cons
|
|||
return 0;
|
||||
}
|
||||
|
||||
uint16_t AvatarMixerClientData::getLastBroadcastSequenceNumber(const QUuid& nodeUUID) const {
|
||||
uint16_t AvatarMixerClientData::getLastBroadcastSequenceNumber(NLPacket::LocalID nodeID) const {
|
||||
// return the matching PacketSequenceNumber, or the default if we don't have it
|
||||
auto nodeMatch = _lastBroadcastSequenceNumbers.find(nodeUUID);
|
||||
auto nodeMatch = _lastBroadcastSequenceNumbers.find(nodeID);
|
||||
if (nodeMatch != _lastBroadcastSequenceNumbers.end()) {
|
||||
return nodeMatch->second;
|
||||
}
|
||||
|
@ -218,26 +239,49 @@ uint16_t AvatarMixerClientData::getLastBroadcastSequenceNumber(const QUuid& node
|
|||
}
|
||||
|
||||
void AvatarMixerClientData::ignoreOther(SharedNodePointer self, SharedNodePointer other) {
|
||||
ignoreOther(self.data(), other.data());
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::ignoreOther(const Node* self, const Node* other) {
|
||||
if (!isRadiusIgnoring(other->getUUID())) {
|
||||
addToRadiusIgnoringSet(other->getUUID());
|
||||
auto killPacket = NLPacket::create(PacketType::KillAvatar, NUM_BYTES_RFC4122_UUID + sizeof(KillAvatarReason), true);
|
||||
killPacket->write(other->getUUID().toRfc4122());
|
||||
if (self->isIgnoreRadiusEnabled()) {
|
||||
if (_isIgnoreRadiusEnabled) {
|
||||
killPacket->writePrimitive(KillAvatarReason::TheirAvatarEnteredYourBubble);
|
||||
} else {
|
||||
killPacket->writePrimitive(KillAvatarReason::YourAvatarEnteredTheirBubble);
|
||||
}
|
||||
setLastBroadcastTime(other->getUUID(), 0);
|
||||
setLastBroadcastTime(other->getLocalID(), 0);
|
||||
|
||||
resetSentTraitData(other->getLocalID());
|
||||
|
||||
DependencyManager::get<NodeList>()->sendPacket(std::move(killPacket), *self);
|
||||
}
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::removeFromRadiusIgnoringSet(SharedNodePointer self, const QUuid& other) {
|
||||
if (isRadiusIgnoring(other)) {
|
||||
_radiusIgnoredOthers.erase(other);
|
||||
bool AvatarMixerClientData::isRadiusIgnoring(const QUuid& other) const {
|
||||
return std::find(_radiusIgnoredOthers.cbegin(), _radiusIgnoredOthers.cend(), other) != _radiusIgnoredOthers.cend();
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::addToRadiusIgnoringSet(const QUuid& other) {
|
||||
if (!isRadiusIgnoring(other)) {
|
||||
_radiusIgnoredOthers.push_back(other);
|
||||
}
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::removeFromRadiusIgnoringSet(const QUuid& other) {
|
||||
auto ignoredOtherIter = std::find(_radiusIgnoredOthers.cbegin(), _radiusIgnoredOthers.cend(), other);
|
||||
if (ignoredOtherIter != _radiusIgnoredOthers.cend()) {
|
||||
_radiusIgnoredOthers.erase(ignoredOtherIter);
|
||||
}
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::resetSentTraitData(Node::LocalID nodeLocalID) {
|
||||
_lastSentTraitsTimestamps[nodeLocalID] = TraitsCheckTimestamp();
|
||||
_sentTraitVersions[nodeLocalID].reset();
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::readViewFrustumPacket(const QByteArray& message) {
|
||||
_currentViewFrustums.clear();
|
||||
|
||||
|
@ -287,9 +331,9 @@ AvatarMixerClientData::TraitsCheckTimestamp AvatarMixerClientData::getLastOtherA
|
|||
}
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::cleanupKilledNode(const QUuid& nodeUUID, Node::LocalID nodeLocalID) {
|
||||
removeLastBroadcastSequenceNumber(nodeUUID);
|
||||
removeLastBroadcastTime(nodeUUID);
|
||||
void AvatarMixerClientData::cleanupKilledNode(const QUuid&, Node::LocalID nodeLocalID) {
|
||||
removeLastBroadcastSequenceNumber(nodeLocalID);
|
||||
removeLastBroadcastTime(nodeLocalID);
|
||||
_lastSentTraitsTimestamps.erase(nodeLocalID);
|
||||
_sentTraitVersions.erase(nodeLocalID);
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
#include <algorithm>
|
||||
#include <cfloat>
|
||||
#include <unordered_map>
|
||||
#include <unordered_set>
|
||||
#include <vector>
|
||||
#include <queue>
|
||||
|
||||
#include <QtCore/QJsonObject>
|
||||
|
@ -45,17 +45,20 @@ public:
|
|||
|
||||
int parseData(ReceivedMessage& message) override;
|
||||
AvatarData& getAvatar() { return *_avatar; }
|
||||
const AvatarData& getAvatar() const { return *_avatar; }
|
||||
const AvatarData* getConstAvatarData() const { return _avatar.get(); }
|
||||
AvatarSharedPointer getAvatarSharedPointer() const { return _avatar; }
|
||||
|
||||
uint16_t getLastBroadcastSequenceNumber(const QUuid& nodeUUID) const;
|
||||
void setLastBroadcastSequenceNumber(const QUuid& nodeUUID, uint16_t sequenceNumber)
|
||||
{ _lastBroadcastSequenceNumbers[nodeUUID] = sequenceNumber; }
|
||||
Q_INVOKABLE void removeLastBroadcastSequenceNumber(const QUuid& nodeUUID) { _lastBroadcastSequenceNumbers.erase(nodeUUID); }
|
||||
uint16_t getLastBroadcastSequenceNumber(NLPacket::LocalID nodeID) const;
|
||||
void setLastBroadcastSequenceNumber(NLPacket::LocalID nodeID, uint16_t sequenceNumber)
|
||||
{ _lastBroadcastSequenceNumbers[nodeID] = sequenceNumber; }
|
||||
Q_INVOKABLE void removeLastBroadcastSequenceNumber(NLPacket::LocalID nodeID) { _lastBroadcastSequenceNumbers.erase(nodeID); }
|
||||
bool isIgnoreRadiusEnabled() const { return _isIgnoreRadiusEnabled; }
|
||||
void setIsIgnoreRadiusEnabled(bool enabled) { _isIgnoreRadiusEnabled = enabled; }
|
||||
|
||||
uint64_t getLastBroadcastTime(const QUuid& nodeUUID) const;
|
||||
void setLastBroadcastTime(const QUuid& nodeUUID, uint64_t broadcastTime) { _lastBroadcastTimes[nodeUUID] = broadcastTime; }
|
||||
Q_INVOKABLE void removeLastBroadcastTime(const QUuid& nodeUUID) { _lastBroadcastTimes.erase(nodeUUID); }
|
||||
uint64_t getLastBroadcastTime(NLPacket::LocalID nodeUUID) const;
|
||||
void setLastBroadcastTime(NLPacket::LocalID nodeUUID, uint64_t broadcastTime) { _lastBroadcastTimes[nodeUUID] = broadcastTime; }
|
||||
Q_INVOKABLE void removeLastBroadcastTime(NLPacket::LocalID nodeUUID) { _lastBroadcastTimes.erase(nodeUUID); }
|
||||
|
||||
Q_INVOKABLE void cleanupKilledNode(const QUuid& nodeUUID, Node::LocalID nodeLocalID);
|
||||
|
||||
|
@ -89,12 +92,12 @@ public:
|
|||
|
||||
void loadJSONStats(QJsonObject& jsonObject) const;
|
||||
|
||||
glm::vec3 getPosition() const { return _avatar ? _avatar->getWorldPosition() : glm::vec3(0); }
|
||||
glm::vec3 getGlobalBoundingBoxCorner() const { return _avatar ? _avatar->getGlobalBoundingBoxCorner() : glm::vec3(0); }
|
||||
bool isRadiusIgnoring(const QUuid& other) const { return _radiusIgnoredOthers.find(other) != _radiusIgnoredOthers.end(); }
|
||||
void addToRadiusIgnoringSet(const QUuid& other) { _radiusIgnoredOthers.insert(other); }
|
||||
void removeFromRadiusIgnoringSet(SharedNodePointer self, const QUuid& other);
|
||||
glm::vec3 getPosition() const { return _avatar ? _avatar->getClientGlobalPosition() : glm::vec3(0); }
|
||||
bool isRadiusIgnoring(const QUuid& other) const;
|
||||
void addToRadiusIgnoringSet(const QUuid& other);
|
||||
void removeFromRadiusIgnoringSet(const QUuid& other);
|
||||
void ignoreOther(SharedNodePointer self, SharedNodePointer other);
|
||||
void ignoreOther(const Node* self, const Node* other);
|
||||
|
||||
void readViewFrustumPacket(const QByteArray& message);
|
||||
|
||||
|
@ -110,10 +113,10 @@ public:
|
|||
|
||||
const ConicalViewFrustums& getViewFrustums() const { return _currentViewFrustums; }
|
||||
|
||||
uint64_t getLastOtherAvatarEncodeTime(QUuid otherAvatar) const;
|
||||
void setLastOtherAvatarEncodeTime(const QUuid& otherAvatar, uint64_t time);
|
||||
uint64_t getLastOtherAvatarEncodeTime(NLPacket::LocalID otherAvatar) const;
|
||||
void setLastOtherAvatarEncodeTime(NLPacket::LocalID otherAvatar, uint64_t time);
|
||||
|
||||
QVector<JointData>& getLastOtherAvatarSentJoints(QUuid otherAvatar) { return _lastOtherAvatarSentJoints[otherAvatar]; }
|
||||
QVector<JointData>& getLastOtherAvatarSentJoints(NLPacket::LocalID otherAvatar) { return _lastOtherAvatarSentJoints[otherAvatar]; }
|
||||
|
||||
void queuePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer node);
|
||||
int processPackets(const SlaveSharedData& slaveSharedData); // returns number of packets processed
|
||||
|
@ -135,6 +138,8 @@ public:
|
|||
|
||||
AvatarTraits::TraitVersions& getLastSentTraitVersions(Node::LocalID otherAvatar) { return _sentTraitVersions[otherAvatar]; }
|
||||
|
||||
void resetSentTraitData(Node::LocalID nodeID);
|
||||
|
||||
private:
|
||||
struct PacketQueue : public std::queue<QSharedPointer<ReceivedMessage>> {
|
||||
QWeakPointer<Node> node;
|
||||
|
@ -144,13 +149,13 @@ private:
|
|||
AvatarSharedPointer _avatar { new AvatarData() };
|
||||
|
||||
uint16_t _lastReceivedSequenceNumber { 0 };
|
||||
std::unordered_map<QUuid, uint16_t> _lastBroadcastSequenceNumbers;
|
||||
std::unordered_map<QUuid, uint64_t> _lastBroadcastTimes;
|
||||
std::unordered_map<NLPacket::LocalID, uint16_t> _lastBroadcastSequenceNumbers;
|
||||
std::unordered_map<NLPacket::LocalID, uint64_t> _lastBroadcastTimes;
|
||||
|
||||
// this is a map of the last time we encoded an "other" avatar for
|
||||
// sending to "this" node
|
||||
std::unordered_map<QUuid, uint64_t> _lastOtherAvatarEncodeTime;
|
||||
std::unordered_map<QUuid, QVector<JointData>> _lastOtherAvatarSentJoints;
|
||||
std::unordered_map<NLPacket::LocalID, uint64_t> _lastOtherAvatarEncodeTime;
|
||||
std::unordered_map<NLPacket::LocalID, QVector<JointData>> _lastOtherAvatarSentJoints;
|
||||
|
||||
uint64_t _identityChangeTimestamp;
|
||||
bool _avatarSessionDisplayNameMustChange{ true };
|
||||
|
@ -164,7 +169,7 @@ private:
|
|||
int _numOutOfOrderSends = 0;
|
||||
|
||||
SimpleMovingAverage _avgOtherAvatarDataRate;
|
||||
std::unordered_set<QUuid> _radiusIgnoredOthers;
|
||||
std::vector<QUuid> _radiusIgnoredOthers;
|
||||
ConicalViewFrustums _currentViewFrustums;
|
||||
|
||||
int _recentOtherAvatarsInView { 0 };
|
||||
|
@ -177,6 +182,8 @@ private:
|
|||
|
||||
std::unordered_map<Node::LocalID, TraitsCheckTimestamp> _lastSentTraitsTimestamps;
|
||||
std::unordered_map<Node::LocalID, AvatarTraits::TraitVersions> _sentTraitVersions;
|
||||
|
||||
std::atomic_bool _isIgnoreRadiusEnabled { false };
|
||||
};
|
||||
|
||||
#endif // hifi_AvatarMixerClientData_h
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
|
||||
#include <algorithm>
|
||||
#include <random>
|
||||
#include <chrono>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtx/norm.hpp>
|
||||
|
@ -33,6 +34,8 @@
|
|||
#include "AvatarMixer.h"
|
||||
#include "AvatarMixerClientData.h"
|
||||
|
||||
namespace chrono = std::chrono;
|
||||
|
||||
void AvatarMixerSlave::configure(ConstIter begin, ConstIter end) {
|
||||
_begin = begin;
|
||||
_end = end;
|
||||
|
@ -65,13 +68,11 @@ void AvatarMixerSlave::processIncomingPackets(const SharedNodePointer& node) {
|
|||
_stats.processIncomingPacketsElapsedTime += (end - start);
|
||||
}
|
||||
|
||||
int AvatarMixerSlave::sendIdentityPacket(const AvatarMixerClientData* nodeData, const SharedNodePointer& destinationNode) {
|
||||
if (destinationNode->getType() == NodeType::Agent && !destinationNode->isUpstream()) {
|
||||
int AvatarMixerSlave::sendIdentityPacket(NLPacketList& packetList, const AvatarMixerClientData* nodeData, const Node& destinationNode) {
|
||||
if (destinationNode.getType() == NodeType::Agent && !destinationNode.isUpstream()) {
|
||||
QByteArray individualData = nodeData->getConstAvatarData()->identityByteArray();
|
||||
individualData.replace(0, NUM_BYTES_RFC4122_UUID, nodeData->getNodeID().toRfc4122()); // FIXME, this looks suspicious
|
||||
auto identityPackets = NLPacketList::create(PacketType::AvatarIdentity, QByteArray(), true, true);
|
||||
identityPackets->write(individualData);
|
||||
DependencyManager::get<NodeList>()->sendPacketList(std::move(identityPackets), *destinationNode);
|
||||
packetList.write(individualData);
|
||||
_stats.numIdentityPackets++;
|
||||
return individualData.size();
|
||||
} else {
|
||||
|
@ -149,6 +150,7 @@ qint64 AvatarMixerSlave::addChangedTraitsToBulkPacket(AvatarMixerClientData* lis
|
|||
});
|
||||
|
||||
if (!isDeleted && (sentInstanceIt == sentIDValuePairs.end() || receivedVersion > sentInstanceIt->value)) {
|
||||
|
||||
// this instance version exists and has never been sent or is newer so we need to send it
|
||||
bytesWritten += sendingAvatar->packTraitInstance(traitType, instanceID, traitsPacketList, receivedVersion);
|
||||
|
||||
|
@ -158,6 +160,7 @@ qint64 AvatarMixerSlave::addChangedTraitsToBulkPacket(AvatarMixerClientData* lis
|
|||
sentIDValuePairs.emplace_back(instanceID, receivedVersion);
|
||||
}
|
||||
} else if (isDeleted && sentInstanceIt != sentIDValuePairs.end() && absoluteReceivedVersion > sentInstanceIt->value) {
|
||||
|
||||
// this instance version was deleted and we haven't sent the delete to this client yet
|
||||
bytesWritten += AvatarTraits::packInstancedTraitDelete(traitType, instanceID, traitsPacketList, absoluteReceivedVersion);
|
||||
|
||||
|
@ -177,6 +180,7 @@ qint64 AvatarMixerSlave::addChangedTraitsToBulkPacket(AvatarMixerClientData* lis
|
|||
listeningNodeData->setLastOtherAvatarTraitsSendPoint(otherNodeLocalID, timeOfLastTraitsChange);
|
||||
}
|
||||
|
||||
|
||||
return bytesWritten;
|
||||
}
|
||||
|
||||
|
@ -209,7 +213,18 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
|
|||
_stats.jobElapsedTime += (end - start);
|
||||
}
|
||||
|
||||
AABox computeBubbleBox(const AvatarData& avatar, float bubbleExpansionFactor) {
|
||||
AABox box = avatar.getGlobalBoundingBox();
|
||||
glm::vec3 scale = box.getScale();
|
||||
scale *= bubbleExpansionFactor;
|
||||
const glm::vec3 MIN_BUBBLE_SCALE(0.3f, 1.3f, 0.3);
|
||||
scale = glm::max(scale, MIN_BUBBLE_SCALE);
|
||||
box.setScaleStayCentered(glm::max(scale, MIN_BUBBLE_SCALE));
|
||||
return box;
|
||||
}
|
||||
|
||||
void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node) {
|
||||
const Node* destinationNode = node.data();
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
|
@ -220,7 +235,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
|
||||
_stats.nodesBroadcastedTo++;
|
||||
|
||||
AvatarMixerClientData* nodeData = reinterpret_cast<AvatarMixerClientData*>(node->getLinkedData());
|
||||
AvatarMixerClientData* nodeData = reinterpret_cast<AvatarMixerClientData*>(destinationNode->getLinkedData());
|
||||
|
||||
nodeData->resetInViewStats();
|
||||
|
||||
|
@ -230,24 +245,19 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
// reset the internal state for correct random number distribution
|
||||
distribution.reset();
|
||||
|
||||
// Estimate number to sort on number sent last frame (with min. of 20).
|
||||
const int numToSendEst = std::max(int(nodeData->getNumAvatarsSentLastFrame() * 2.5f), 20);
|
||||
|
||||
// reset the number of sent avatars
|
||||
nodeData->resetNumAvatarsSentLastFrame();
|
||||
|
||||
// keep a counter of the number of considered avatars
|
||||
int numOtherAvatars = 0;
|
||||
|
||||
// keep track of outbound data rate specifically for avatar data
|
||||
int numAvatarDataBytes = 0;
|
||||
int identityBytesSent = 0;
|
||||
int traitBytesSent = 0;
|
||||
|
||||
// max number of avatarBytes per frame
|
||||
auto maxAvatarBytesPerFrame = (_maxKbpsPerNode * BYTES_PER_KILOBIT) / AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND;
|
||||
|
||||
// FIXME - find a way to not send the sessionID for every avatar
|
||||
int minimumBytesPerAvatar = AvatarDataPacket::AVATAR_HAS_FLAGS_SIZE + NUM_BYTES_RFC4122_UUID;
|
||||
|
||||
int overBudgetAvatars = 0;
|
||||
int maxAvatarBytesPerFrame = int(_maxKbpsPerNode * BYTES_PER_KILOBIT / AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND);
|
||||
|
||||
// keep track of the number of other avatars held back in this frame
|
||||
int numAvatarsHeldBack = 0;
|
||||
|
@ -260,66 +270,34 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
bool PALIsOpen = nodeData->getRequestsDomainListData();
|
||||
|
||||
// When this is true, the AvatarMixer will send Avatar data to a client about avatars that have ignored them
|
||||
bool getsAnyIgnored = PALIsOpen && node->getCanKick();
|
||||
bool getsAnyIgnored = PALIsOpen && destinationNode->getCanKick();
|
||||
|
||||
if (PALIsOpen) {
|
||||
// Increase minimumBytesPerAvatar if the PAL is open
|
||||
minimumBytesPerAvatar += sizeof(AvatarDataPacket::AvatarGlobalPosition) +
|
||||
sizeof(AvatarDataPacket::AudioLoudness);
|
||||
}
|
||||
// Bandwidth allowance for data that must be sent.
|
||||
int minimumBytesPerAvatar = PALIsOpen ? AvatarDataPacket::AVATAR_HAS_FLAGS_SIZE + NUM_BYTES_RFC4122_UUID +
|
||||
sizeof(AvatarDataPacket::AvatarGlobalPosition) + sizeof(AvatarDataPacket::AudioLoudness) : 0;
|
||||
|
||||
// setup a PacketList for the avatarPackets
|
||||
auto avatarPacketList = NLPacketList::create(PacketType::BulkAvatarData);
|
||||
|
||||
// Define the minimum bubble size
|
||||
static const glm::vec3 minBubbleSize = avatar.getSensorToWorldScale() * glm::vec3(0.3f, 1.3f, 0.3f);
|
||||
// Define the scale of the box for the current node
|
||||
glm::vec3 nodeBoxScale = (nodeData->getPosition() - nodeData->getGlobalBoundingBoxCorner()) * 2.0f * avatar.getSensorToWorldScale();
|
||||
// Set up the bounding box for the current node
|
||||
AABox nodeBox(nodeData->getGlobalBoundingBoxCorner(), nodeBoxScale);
|
||||
// Clamp the size of the bounding box to a minimum scale
|
||||
if (glm::any(glm::lessThan(nodeBoxScale, minBubbleSize))) {
|
||||
nodeBox.setScaleStayCentered(minBubbleSize);
|
||||
}
|
||||
// Quadruple the scale of both bounding boxes
|
||||
nodeBox.embiggen(4.0f);
|
||||
|
||||
|
||||
// setup list of AvatarData as well as maps to map betweeen the AvatarData and the original nodes
|
||||
std::vector<AvatarSharedPointer> avatarsToSort;
|
||||
std::unordered_map<AvatarSharedPointer, SharedNodePointer> avatarDataToNodes;
|
||||
std::unordered_map<QUuid, uint64_t> avatarEncodeTimes;
|
||||
std::for_each(_begin, _end, [&](const SharedNodePointer& otherNode) {
|
||||
// make sure this is an agent that we have avatar data for before considering it for inclusion
|
||||
if (otherNode->getType() == NodeType::Agent
|
||||
&& otherNode->getLinkedData()) {
|
||||
const AvatarMixerClientData* otherNodeData = reinterpret_cast<const AvatarMixerClientData*>(otherNode->getLinkedData());
|
||||
|
||||
AvatarSharedPointer otherAvatar = otherNodeData->getAvatarSharedPointer();
|
||||
avatarsToSort.push_back(otherAvatar);
|
||||
avatarDataToNodes[otherAvatar] = otherNode;
|
||||
QUuid id = otherAvatar->getSessionUUID();
|
||||
avatarEncodeTimes[id] = nodeData->getLastOtherAvatarEncodeTime(id);
|
||||
}
|
||||
});
|
||||
// compute node bounding box
|
||||
const float MY_AVATAR_BUBBLE_EXPANSION_FACTOR = 4.0f; // magic number determined emperically
|
||||
AABox nodeBox = computeBubbleBox(avatar, MY_AVATAR_BUBBLE_EXPANSION_FACTOR);
|
||||
|
||||
class SortableAvatar: public PrioritySortUtil::Sortable {
|
||||
public:
|
||||
SortableAvatar() = delete;
|
||||
SortableAvatar(const AvatarSharedPointer& avatar, uint64_t lastEncodeTime)
|
||||
: _avatar(avatar), _lastEncodeTime(lastEncodeTime) {}
|
||||
glm::vec3 getPosition() const override { return _avatar->getWorldPosition(); }
|
||||
SortableAvatar(const AvatarData* avatar, const Node* avatarNode, uint64_t lastEncodeTime)
|
||||
: _avatar(avatar), _node(avatarNode), _lastEncodeTime(lastEncodeTime) {}
|
||||
glm::vec3 getPosition() const override { return _avatar->getClientGlobalPosition(); }
|
||||
float getRadius() const override {
|
||||
glm::vec3 nodeBoxHalfScale = (_avatar->getWorldPosition() - _avatar->getGlobalBoundingBoxCorner() * _avatar->getSensorToWorldScale());
|
||||
return glm::max(nodeBoxHalfScale.x, glm::max(nodeBoxHalfScale.y, nodeBoxHalfScale.z));
|
||||
glm::vec3 nodeBoxScale = _avatar->getGlobalBoundingBox().getScale();
|
||||
return 0.5f * glm::max(nodeBoxScale.x, glm::max(nodeBoxScale.y, nodeBoxScale.z));
|
||||
}
|
||||
uint64_t getTimestamp() const override {
|
||||
return _lastEncodeTime;
|
||||
}
|
||||
AvatarSharedPointer getAvatar() const { return _avatar; }
|
||||
const Node* getNode() const { return _node; }
|
||||
|
||||
private:
|
||||
AvatarSharedPointer _avatar;
|
||||
const AvatarData* _avatar;
|
||||
const Node* _node;
|
||||
uint64_t _lastEncodeTime;
|
||||
};
|
||||
|
||||
|
@ -329,15 +307,18 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
AvatarData::_avatarSortCoefficientSize,
|
||||
AvatarData::_avatarSortCoefficientCenter,
|
||||
AvatarData::_avatarSortCoefficientAge);
|
||||
sortedAvatars.reserve(_end - _begin);
|
||||
|
||||
// ignore or sort
|
||||
const AvatarSharedPointer& thisAvatar = nodeData->getAvatarSharedPointer();
|
||||
for (const auto& avatar : avatarsToSort) {
|
||||
if (avatar == thisAvatar) {
|
||||
// don't echo updates to self
|
||||
for (auto listedNode = _begin; listedNode != _end; ++listedNode) {
|
||||
Node* otherNodeRaw = (*listedNode).data();
|
||||
if (otherNodeRaw->getType() != NodeType::Agent
|
||||
|| !otherNodeRaw->getLinkedData()
|
||||
|| otherNodeRaw == destinationNode) {
|
||||
continue;
|
||||
}
|
||||
|
||||
auto avatarNode = otherNodeRaw;
|
||||
|
||||
bool shouldIgnore = false;
|
||||
// We ignore other nodes for a couple of reasons:
|
||||
// 1) ignore bubbles and ignore specific node
|
||||
|
@ -345,53 +326,38 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
// happen if for example the avatar is connected on a desktop and sending
|
||||
// updates at ~30hz. So every 3 frames we skip a frame.
|
||||
|
||||
auto avatarNode = avatarDataToNodes[avatar];
|
||||
assert(avatarNode); // we can't have gotten here without the avatarData being a valid key in the map
|
||||
|
||||
const AvatarMixerClientData* avatarNodeData = reinterpret_cast<const AvatarMixerClientData*>(avatarNode->getLinkedData());
|
||||
assert(avatarNodeData); // we can't have gotten here without avatarNode having valid data
|
||||
const AvatarMixerClientData* avatarClientNodeData = reinterpret_cast<const AvatarMixerClientData*>(avatarNode->getLinkedData());
|
||||
assert(avatarClientNodeData); // we can't have gotten here without avatarNode having valid data
|
||||
quint64 startIgnoreCalculation = usecTimestampNow();
|
||||
|
||||
// make sure we have data for this avatar, that it isn't the same node,
|
||||
// and isn't an avatar that the viewing node has ignored
|
||||
// or that has ignored the viewing node
|
||||
if (!avatarNode->getLinkedData()
|
||||
|| avatarNode->getUUID() == node->getUUID()
|
||||
|| (node->isIgnoringNodeWithID(avatarNode->getUUID()) && !PALIsOpen)
|
||||
|| (avatarNode->isIgnoringNodeWithID(node->getUUID()) && !getsAnyIgnored)) {
|
||||
if ((destinationNode->isIgnoringNodeWithID(avatarNode->getUUID()) && !PALIsOpen)
|
||||
|| (avatarNode->isIgnoringNodeWithID(destinationNode->getUUID()) && !getsAnyIgnored)) {
|
||||
shouldIgnore = true;
|
||||
} else {
|
||||
// Check to see if the space bubble is enabled
|
||||
// Don't bother with these checks if the other avatar has their bubble enabled and we're gettingAnyIgnored
|
||||
if (node->isIgnoreRadiusEnabled() || (avatarNode->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
|
||||
float sensorToWorldScale = avatarNodeData->getAvatarSharedPointer()->getSensorToWorldScale();
|
||||
// Define the scale of the box for the current other node
|
||||
glm::vec3 otherNodeBoxScale = (avatarNodeData->getPosition() - avatarNodeData->getGlobalBoundingBoxCorner()) * 2.0f * sensorToWorldScale;
|
||||
// Set up the bounding box for the current other node
|
||||
AABox otherNodeBox(avatarNodeData->getGlobalBoundingBoxCorner(), otherNodeBoxScale);
|
||||
// Clamp the size of the bounding box to a minimum scale
|
||||
if (glm::any(glm::lessThan(otherNodeBoxScale, minBubbleSize))) {
|
||||
otherNodeBox.setScaleStayCentered(minBubbleSize);
|
||||
}
|
||||
// Change the scale of both bounding boxes
|
||||
// (This is an arbitrary number determined empirically)
|
||||
otherNodeBox.embiggen(2.4f);
|
||||
|
||||
if (nodeData->isIgnoreRadiusEnabled() || (avatarClientNodeData->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
|
||||
// Perform the collision check between the two bounding boxes
|
||||
AABox otherNodeBox = avatarClientNodeData->getAvatar().getDefaultBubbleBox();
|
||||
if (nodeBox.touches(otherNodeBox)) {
|
||||
nodeData->ignoreOther(node, avatarNode);
|
||||
nodeData->ignoreOther(destinationNode, avatarNode);
|
||||
shouldIgnore = !getsAnyIgnored;
|
||||
}
|
||||
}
|
||||
// Not close enough to ignore
|
||||
if (!shouldIgnore) {
|
||||
nodeData->removeFromRadiusIgnoringSet(node, avatarNode->getUUID());
|
||||
nodeData->removeFromRadiusIgnoringSet(avatarNode->getUUID());
|
||||
}
|
||||
}
|
||||
|
||||
if (!shouldIgnore) {
|
||||
AvatarDataSequenceNumber lastSeqToReceiver = nodeData->getLastBroadcastSequenceNumber(avatarNode->getUUID());
|
||||
AvatarDataSequenceNumber lastSeqFromSender = avatarNodeData->getLastReceivedSequenceNumber();
|
||||
AvatarDataSequenceNumber lastSeqToReceiver = nodeData->getLastBroadcastSequenceNumber(avatarNode->getLocalID());
|
||||
AvatarDataSequenceNumber lastSeqFromSender = avatarClientNodeData->getLastReceivedSequenceNumber();
|
||||
|
||||
// FIXME - This code does appear to be working. But it seems brittle.
|
||||
// It supports determining if the frame of data for this "other"
|
||||
|
@ -406,6 +372,11 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
if (lastSeqToReceiver == lastSeqFromSender && lastSeqToReceiver != 0) {
|
||||
++numAvatarsHeldBack;
|
||||
shouldIgnore = true;
|
||||
} else if (lastSeqFromSender == 0) {
|
||||
// We have have not yet recieved any data about this avatar. Ignore it for now
|
||||
// This is important for Agent scripts that are not avatar
|
||||
// so that they don't appear to be an avatar at the origin
|
||||
shouldIgnore = true;
|
||||
} else if (lastSeqFromSender - lastSeqToReceiver > 1) {
|
||||
// this is a skip - we still send the packet but capture the presence of the skip so we see it happening
|
||||
++numAvatarsWithSkippedFrames;
|
||||
|
@ -416,12 +387,10 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
|
||||
if (!shouldIgnore) {
|
||||
// sort this one for later
|
||||
uint64_t lastEncodeTime = 0;
|
||||
std::unordered_map<QUuid, uint64_t>::const_iterator itr = avatarEncodeTimes.find(avatar->getSessionUUID());
|
||||
if (itr != avatarEncodeTimes.end()) {
|
||||
lastEncodeTime = itr->second;
|
||||
}
|
||||
sortedAvatars.push(SortableAvatar(avatar, lastEncodeTime));
|
||||
const AvatarData* avatarNodeData = avatarClientNodeData->getConstAvatarData();
|
||||
auto lastEncodeTime = nodeData->getLastOtherAvatarEncodeTime(avatarNode->getLocalID());
|
||||
|
||||
sortedAvatars.push(SortableAvatar(avatarNodeData, avatarNode, lastEncodeTime));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -429,144 +398,125 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
|
||||
int remainingAvatars = (int)sortedAvatars.size();
|
||||
auto traitsPacketList = NLPacketList::create(PacketType::BulkAvatarTraits, QByteArray(), true, true);
|
||||
while (!sortedAvatars.empty()) {
|
||||
const auto avatarData = sortedAvatars.top().getAvatar();
|
||||
sortedAvatars.pop();
|
||||
remainingAvatars--;
|
||||
auto avatarPacket = NLPacket::create(PacketType::BulkAvatarData);
|
||||
const int avatarPacketCapacity = avatarPacket->getPayloadCapacity();
|
||||
int avatarSpaceAvailable = avatarPacketCapacity;
|
||||
int numPacketsSent = 0;
|
||||
auto identityPacketList = NLPacketList::create(PacketType::AvatarIdentity, QByteArray(), true, true);
|
||||
|
||||
const auto& sortedAvatarVector = sortedAvatars.getSortedVector(numToSendEst);
|
||||
for (const auto& sortedAvatar : sortedAvatarVector) {
|
||||
const Node* otherNode = sortedAvatar.getNode();
|
||||
auto lastEncodeForOther = sortedAvatar.getTimestamp();
|
||||
|
||||
auto otherNode = avatarDataToNodes[avatarData];
|
||||
assert(otherNode); // we can't have gotten here without the avatarData being a valid key in the map
|
||||
|
||||
// NOTE: Here's where we determine if we are over budget and drop to bare minimum data
|
||||
AvatarData::AvatarDataDetail detail = AvatarData::NoData;
|
||||
|
||||
// NOTE: Here's where we determine if we are over budget and drop remaining avatars,
|
||||
// or send minimal avatar data in uncommon case of PALIsOpen.
|
||||
int minimRemainingAvatarBytes = minimumBytesPerAvatar * remainingAvatars;
|
||||
bool overBudget = (identityBytesSent + numAvatarDataBytes + minimRemainingAvatarBytes) > maxAvatarBytesPerFrame;
|
||||
if (overBudget) {
|
||||
if (PALIsOpen) {
|
||||
_stats.overBudgetAvatars++;
|
||||
detail = AvatarData::PALMinimum;
|
||||
} else {
|
||||
_stats.overBudgetAvatars += remainingAvatars;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
quint64 startAvatarDataPacking = usecTimestampNow();
|
||||
|
||||
++numOtherAvatars;
|
||||
auto startAvatarDataPacking = chrono::high_resolution_clock::now();
|
||||
|
||||
const AvatarMixerClientData* otherNodeData = reinterpret_cast<const AvatarMixerClientData*>(otherNode->getLinkedData());
|
||||
const AvatarData* otherAvatar = otherNodeData->getConstAvatarData();
|
||||
|
||||
// If the time that the mixer sent AVATAR DATA about Avatar B to Avatar A is BEFORE OR EQUAL TO
|
||||
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Avatar A.
|
||||
if (otherAvatar->hasProcessedFirstIdentity()
|
||||
&& nodeData->getLastBroadcastTime(otherNode->getUUID()) <= otherNodeData->getIdentityChangeTimestamp()) {
|
||||
identityBytesSent += sendIdentityPacket(otherNodeData, node);
|
||||
// Typically all out-of-view avatars but such avatars' priorities will rise with time:
|
||||
bool isLowerPriority = sortedAvatar.getPriority() <= OUT_OF_VIEW_THRESHOLD;
|
||||
|
||||
// remember the last time we sent identity details about this other node to the receiver
|
||||
nodeData->setLastBroadcastTime(otherNode->getUUID(), usecTimestampNow());
|
||||
}
|
||||
|
||||
// determine if avatar is in view which determines how much data to send
|
||||
glm::vec3 otherPosition = otherAvatar->getClientGlobalPosition();
|
||||
glm::vec3 otherNodeBoxScale = (otherPosition - otherNodeData->getGlobalBoundingBoxCorner()) * 2.0f * otherAvatar->getSensorToWorldScale();
|
||||
AABox otherNodeBox(otherNodeData->getGlobalBoundingBoxCorner(), otherNodeBoxScale);
|
||||
bool isInView = nodeData->otherAvatarInView(otherNodeBox);
|
||||
|
||||
// start a new segment in the PacketList for this avatar
|
||||
avatarPacketList->startSegment();
|
||||
|
||||
AvatarData::AvatarDataDetail detail;
|
||||
|
||||
if (overBudget) {
|
||||
overBudgetAvatars++;
|
||||
_stats.overBudgetAvatars++;
|
||||
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::NoData;
|
||||
} else if (!isInView) {
|
||||
if (isLowerPriority) {
|
||||
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::MinimumData;
|
||||
nodeData->incrementAvatarOutOfView();
|
||||
} else {
|
||||
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO
|
||||
? AvatarData::SendAllData : AvatarData::CullSmallData;
|
||||
} else if (!overBudget) {
|
||||
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO ? AvatarData::SendAllData : AvatarData::CullSmallData;
|
||||
nodeData->incrementAvatarInView();
|
||||
}
|
||||
|
||||
bool includeThisAvatar = true;
|
||||
auto lastEncodeForOther = nodeData->getLastOtherAvatarEncodeTime(otherNode->getUUID());
|
||||
QVector<JointData>& lastSentJointsForOther = nodeData->getLastOtherAvatarSentJoints(otherNode->getUUID());
|
||||
// If the time that the mixer sent AVATAR DATA about Avatar B to Avatar A is BEFORE OR EQUAL TO
|
||||
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Avatar A.
|
||||
if (otherAvatar->hasProcessedFirstIdentity()
|
||||
&& nodeData->getLastBroadcastTime(otherNode->getLocalID()) <= otherNodeData->getIdentityChangeTimestamp()) {
|
||||
identityBytesSent += sendIdentityPacket(*identityPacketList, otherNodeData, *destinationNode);
|
||||
|
||||
lastSentJointsForOther.resize(otherAvatar->getJointCount());
|
||||
|
||||
bool distanceAdjust = true;
|
||||
glm::vec3 viewerPosition = myPosition;
|
||||
AvatarDataPacket::HasFlags hasFlagsOut; // the result of the toByteArray
|
||||
bool dropFaceTracking = false;
|
||||
|
||||
quint64 start = usecTimestampNow();
|
||||
QByteArray bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
|
||||
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition,
|
||||
&lastSentJointsForOther);
|
||||
quint64 end = usecTimestampNow();
|
||||
_stats.toByteArrayElapsedTime += (end - start);
|
||||
|
||||
static auto maxAvatarDataBytes = avatarPacketList->getMaxSegmentSize() - NUM_BYTES_RFC4122_UUID;
|
||||
if (bytes.size() > maxAvatarDataBytes) {
|
||||
qCWarning(avatars) << "otherAvatar.toByteArray() for" << otherNode->getUUID()
|
||||
<< "resulted in very large buffer of" << bytes.size() << "bytes - dropping facial data";
|
||||
|
||||
dropFaceTracking = true; // first try dropping the facial data
|
||||
bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
|
||||
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
|
||||
|
||||
if (bytes.size() > maxAvatarDataBytes) {
|
||||
qCWarning(avatars) << "otherAvatar.toByteArray() for" << otherNode->getUUID()
|
||||
<< "without facial data resulted in very large buffer of" << bytes.size()
|
||||
<< "bytes - reducing to MinimumData";
|
||||
bytes = otherAvatar->toByteArray(AvatarData::MinimumData, lastEncodeForOther, lastSentJointsForOther,
|
||||
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
|
||||
|
||||
if (bytes.size() > maxAvatarDataBytes) {
|
||||
qCWarning(avatars) << "otherAvatar.toByteArray() for" << otherNode->getUUID()
|
||||
<< "MinimumData resulted in very large buffer of" << bytes.size()
|
||||
<< "bytes - refusing to send avatar";
|
||||
includeThisAvatar = false;
|
||||
}
|
||||
// remember the last time we sent identity details about this other node to the receiver
|
||||
nodeData->setLastBroadcastTime(otherNode->getLocalID(), usecTimestampNow());
|
||||
}
|
||||
}
|
||||
|
||||
if (includeThisAvatar) {
|
||||
numAvatarDataBytes += avatarPacketList->write(otherNode->getUUID().toRfc4122());
|
||||
numAvatarDataBytes += avatarPacketList->write(bytes);
|
||||
QVector<JointData>& lastSentJointsForOther = nodeData->getLastOtherAvatarSentJoints(otherNode->getLocalID());
|
||||
|
||||
if (detail != AvatarData::NoData) {
|
||||
_stats.numOthersIncluded++;
|
||||
const bool distanceAdjust = true;
|
||||
const bool dropFaceTracking = false;
|
||||
AvatarDataPacket::SendStatus sendStatus;
|
||||
sendStatus.sendUUID = true;
|
||||
|
||||
// increment the number of avatars sent to this reciever
|
||||
nodeData->incrementNumAvatarsSentLastFrame();
|
||||
do {
|
||||
auto startSerialize = chrono::high_resolution_clock::now();
|
||||
QByteArray bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
|
||||
sendStatus, dropFaceTracking, distanceAdjust, myPosition,
|
||||
&lastSentJointsForOther, avatarSpaceAvailable);
|
||||
auto endSerialize = chrono::high_resolution_clock::now();
|
||||
_stats.toByteArrayElapsedTime +=
|
||||
(quint64)chrono::duration_cast<chrono::microseconds>(endSerialize - startSerialize).count();
|
||||
|
||||
// set the last sent sequence number for this sender on the receiver
|
||||
nodeData->setLastBroadcastSequenceNumber(otherNode->getUUID(),
|
||||
otherNodeData->getLastReceivedSequenceNumber());
|
||||
nodeData->setLastOtherAvatarEncodeTime(otherNode->getUUID(), usecTimestampNow());
|
||||
avatarPacket->write(bytes);
|
||||
avatarSpaceAvailable -= bytes.size();
|
||||
numAvatarDataBytes += bytes.size();
|
||||
if (!sendStatus || avatarSpaceAvailable < (int)AvatarDataPacket::MIN_BULK_PACKET_SIZE) {
|
||||
// Weren't able to fit everything.
|
||||
nodeList->sendPacket(std::move(avatarPacket), *destinationNode);
|
||||
++numPacketsSent;
|
||||
avatarPacket = NLPacket::create(PacketType::BulkAvatarData);
|
||||
avatarSpaceAvailable = avatarPacketCapacity;
|
||||
}
|
||||
} else {
|
||||
// TODO? this avatar is not included now, and will probably not be included next frame.
|
||||
// It would be nice if we could tweak its future sort priority to put it at the back of the list.
|
||||
} while (!sendStatus);
|
||||
|
||||
if (detail != AvatarData::NoData) {
|
||||
_stats.numOthersIncluded++;
|
||||
|
||||
// increment the number of avatars sent to this receiver
|
||||
nodeData->incrementNumAvatarsSentLastFrame();
|
||||
|
||||
// set the last sent sequence number for this sender on the receiver
|
||||
nodeData->setLastBroadcastSequenceNumber(otherNode->getLocalID(),
|
||||
otherNodeData->getLastReceivedSequenceNumber());
|
||||
nodeData->setLastOtherAvatarEncodeTime(otherNode->getLocalID(), usecTimestampNow());
|
||||
}
|
||||
|
||||
avatarPacketList->endSegment();
|
||||
|
||||
quint64 endAvatarDataPacking = usecTimestampNow();
|
||||
_stats.avatarDataPackingElapsedTime += (endAvatarDataPacking - startAvatarDataPacking);
|
||||
auto endAvatarDataPacking = chrono::high_resolution_clock::now();
|
||||
_stats.avatarDataPackingElapsedTime +=
|
||||
(quint64) chrono::duration_cast<chrono::microseconds>(endAvatarDataPacking - startAvatarDataPacking).count();
|
||||
|
||||
// use helper to add any changed traits to our packet list
|
||||
traitBytesSent += addChangedTraitsToBulkPacket(nodeData, otherNodeData, *traitsPacketList);
|
||||
remainingAvatars--;
|
||||
}
|
||||
|
||||
traitsPacketList->getDataSize();
|
||||
if (nodeData->getNumAvatarsSentLastFrame() > numToSendEst) {
|
||||
qCWarning(avatars) << "More avatars sent than upper estimate" << nodeData->getNumAvatarsSentLastFrame()
|
||||
<< " / " << numToSendEst;
|
||||
}
|
||||
|
||||
quint64 startPacketSending = usecTimestampNow();
|
||||
|
||||
// close the current packet so that we're always sending something
|
||||
avatarPacketList->closeCurrentPacket(true);
|
||||
if (avatarPacket->getPayloadSize() != 0) {
|
||||
nodeList->sendPacket(std::move(avatarPacket), *destinationNode);
|
||||
++numPacketsSent;
|
||||
}
|
||||
|
||||
_stats.numPacketsSent += (int)avatarPacketList->getNumPackets();
|
||||
_stats.numPacketsSent += numPacketsSent;
|
||||
_stats.numBytesSent += numAvatarDataBytes;
|
||||
|
||||
// send the avatar data PacketList
|
||||
nodeList->sendPacketList(std::move(avatarPacketList), *node);
|
||||
|
||||
// record the bytes sent for other avatar data in the AvatarMixerClientData
|
||||
nodeData->recordSentAvatarData(numAvatarDataBytes);
|
||||
|
||||
|
@ -575,7 +525,13 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
|
||||
if (traitsPacketList->getNumPackets() >= 1) {
|
||||
// send the traits packet list
|
||||
nodeList->sendPacketList(std::move(traitsPacketList), *node);
|
||||
nodeList->sendPacketList(std::move(traitsPacketList), *destinationNode);
|
||||
}
|
||||
|
||||
// Send any AvatarIdentity packets:
|
||||
identityPacketList->closeCurrentPacket();
|
||||
if (identityBytesSent > 0) {
|
||||
nodeList->sendPacketList(std::move(identityPacketList), *destinationNode);
|
||||
}
|
||||
|
||||
// record the number of avatars held back this frame
|
||||
|
@ -623,20 +579,20 @@ void AvatarMixerSlave::broadcastAvatarDataToDownstreamMixer(const SharedNodePoin
|
|||
// so we always send a full update for this avatar
|
||||
|
||||
quint64 start = usecTimestampNow();
|
||||
AvatarDataPacket::HasFlags flagsOut;
|
||||
AvatarDataPacket::SendStatus sendStatus;
|
||||
|
||||
QVector<JointData> emptyLastJointSendData { otherAvatar->getJointCount() };
|
||||
|
||||
QByteArray avatarByteArray = otherAvatar->toByteArray(AvatarData::SendAllData, 0, emptyLastJointSendData,
|
||||
flagsOut, false, false, glm::vec3(0), nullptr);
|
||||
sendStatus, false, false, glm::vec3(0), nullptr, 0);
|
||||
quint64 end = usecTimestampNow();
|
||||
_stats.toByteArrayElapsedTime += (end - start);
|
||||
|
||||
auto lastBroadcastTime = nodeData->getLastBroadcastTime(agentNode->getUUID());
|
||||
auto lastBroadcastTime = nodeData->getLastBroadcastTime(agentNode->getLocalID());
|
||||
if (lastBroadcastTime <= agentNodeData->getIdentityChangeTimestamp()
|
||||
|| (start - lastBroadcastTime) >= REBROADCAST_IDENTITY_TO_DOWNSTREAM_EVERY_US) {
|
||||
sendReplicatedIdentityPacket(*agentNode, agentNodeData, *node);
|
||||
nodeData->setLastBroadcastTime(agentNode->getUUID(), start);
|
||||
nodeData->setLastBroadcastTime(agentNode->getLocalID(), start);
|
||||
}
|
||||
|
||||
// figure out how large our avatar byte array can be to fit in the packet list
|
||||
|
@ -654,14 +610,14 @@ void AvatarMixerSlave::broadcastAvatarDataToDownstreamMixer(const SharedNodePoin
|
|||
<< "-" << avatarByteArray.size() << "bytes";
|
||||
|
||||
avatarByteArray = otherAvatar->toByteArray(AvatarData::SendAllData, 0, emptyLastJointSendData,
|
||||
flagsOut, true, false, glm::vec3(0), nullptr);
|
||||
sendStatus, true, false, glm::vec3(0), nullptr, 0);
|
||||
|
||||
if (avatarByteArray.size() > maxAvatarByteArraySize) {
|
||||
qCWarning(avatars) << "Replicated avatar data without facial data still too large for"
|
||||
<< otherAvatar->getSessionUUID() << "-" << avatarByteArray.size() << "bytes";
|
||||
|
||||
avatarByteArray = otherAvatar->toByteArray(AvatarData::MinimumData, 0, emptyLastJointSendData,
|
||||
flagsOut, true, false, glm::vec3(0), nullptr);
|
||||
sendStatus, true, false, glm::vec3(0), nullptr, 0);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -670,7 +626,7 @@ void AvatarMixerSlave::broadcastAvatarDataToDownstreamMixer(const SharedNodePoin
|
|||
nodeData->incrementNumAvatarsSentLastFrame();
|
||||
|
||||
// set the last sent sequence number for this sender on the receiver
|
||||
nodeData->setLastBroadcastSequenceNumber(agentNode->getUUID(),
|
||||
nodeData->setLastBroadcastSequenceNumber(agentNode->getLocalID(),
|
||||
agentNodeData->getLastReceivedSequenceNumber());
|
||||
|
||||
// increment the number of avatars sent to this reciever
|
||||
|
|
|
@ -101,7 +101,7 @@ public:
|
|||
void harvestStats(AvatarMixerSlaveStats& stats);
|
||||
|
||||
private:
|
||||
int sendIdentityPacket(const AvatarMixerClientData* nodeData, const SharedNodePointer& destinationNode);
|
||||
int sendIdentityPacket(NLPacketList& packet, const AvatarMixerClientData* nodeData, const Node& destinationNode);
|
||||
int sendReplicatedIdentityPacket(const Node& agentNode, const AvatarMixerClientData* nodeData, const Node& destinationNode);
|
||||
|
||||
qint64 addChangedTraitsToBulkPacket(AvatarMixerClientData* listeningNodeData,
|
||||
|
|
|
@ -69,10 +69,10 @@ void ScriptableAvatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
|
|||
AvatarData::setSkeletonModelURL(skeletonModelURL);
|
||||
}
|
||||
|
||||
static AnimPose composeAnimPose(const FBXJoint& fbxJoint, const glm::quat rotation, const glm::vec3 translation) {
|
||||
static AnimPose composeAnimPose(const HFMJoint& joint, const glm::quat rotation, const glm::vec3 translation) {
|
||||
glm::mat4 translationMat = glm::translate(translation);
|
||||
glm::mat4 rotationMat = glm::mat4_cast(fbxJoint.preRotation * rotation * fbxJoint.postRotation);
|
||||
glm::mat4 finalMat = translationMat * fbxJoint.preTransform * rotationMat * fbxJoint.postTransform;
|
||||
glm::mat4 rotationMat = glm::mat4_cast(joint.preRotation * rotation * joint.postRotation);
|
||||
glm::mat4 finalMat = translationMat * joint.preTransform * rotationMat * joint.postTransform;
|
||||
return AnimPose(finalMat);
|
||||
}
|
||||
|
||||
|
@ -84,7 +84,7 @@ void ScriptableAvatar::update(float deltatime) {
|
|||
// Run animation
|
||||
if (_animation && _animation->isLoaded() && _animation->getFrames().size() > 0 && !_bind.isNull() && _bind->isLoaded()) {
|
||||
if (!_animSkeleton) {
|
||||
_animSkeleton = std::make_shared<AnimSkeleton>(_bind->getGeometry());
|
||||
_animSkeleton = std::make_shared<AnimSkeleton>(_bind->getHFMModel());
|
||||
}
|
||||
float currentFrame = _animationDetails.currentFrame + deltatime * _animationDetails.fps;
|
||||
if (_animationDetails.loop || currentFrame < _animationDetails.lastFrame) {
|
||||
|
@ -93,7 +93,7 @@ void ScriptableAvatar::update(float deltatime) {
|
|||
}
|
||||
_animationDetails.currentFrame = currentFrame;
|
||||
|
||||
const QVector<FBXJoint>& modelJoints = _bind->getGeometry().joints;
|
||||
const QVector<HFMJoint>& modelJoints = _bind->getHFMModel().joints;
|
||||
QStringList animationJointNames = _animation->getJointNames();
|
||||
|
||||
const int nJoints = modelJoints.size();
|
||||
|
@ -102,8 +102,8 @@ void ScriptableAvatar::update(float deltatime) {
|
|||
}
|
||||
|
||||
const int frameCount = _animation->getFrames().size();
|
||||
const FBXAnimationFrame& floorFrame = _animation->getFrames().at((int)glm::floor(currentFrame) % frameCount);
|
||||
const FBXAnimationFrame& ceilFrame = _animation->getFrames().at((int)glm::ceil(currentFrame) % frameCount);
|
||||
const HFMAnimationFrame& floorFrame = _animation->getFrames().at((int)glm::floor(currentFrame) % frameCount);
|
||||
const HFMAnimationFrame& ceilFrame = _animation->getFrames().at((int)glm::ceil(currentFrame) % frameCount);
|
||||
const float frameFraction = glm::fract(currentFrame);
|
||||
std::vector<AnimPose> poses = _animSkeleton->getRelativeDefaultPoses();
|
||||
|
||||
|
@ -113,7 +113,7 @@ void ScriptableAvatar::update(float deltatime) {
|
|||
const QString& name = animationJointNames[i];
|
||||
// As long as we need the model preRotations anyway, let's get the jointIndex from the bind skeleton rather than
|
||||
// trusting the .fst (which is sometimes not updated to match changes to .fbx).
|
||||
int mapping = _bind->getGeometry().getJointIndex(name);
|
||||
int mapping = _bind->getHFMModel().getJointIndex(name);
|
||||
if (mapping != -1 && !_maskedJoints.contains(name)) {
|
||||
|
||||
AnimPose floorPose = composeAnimPose(modelJoints[mapping], floorFrame.rotations[i], floorFrame.translations[i] * UNIT_SCALE);
|
||||
|
@ -145,3 +145,15 @@ void ScriptableAvatar::update(float deltatime) {
|
|||
|
||||
_clientTraitsHandler->sendChangedTraitsToMixer();
|
||||
}
|
||||
|
||||
void ScriptableAvatar::setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement) {
|
||||
_headData->setHasProceduralBlinkFaceMovement(hasProceduralBlinkFaceMovement);
|
||||
}
|
||||
|
||||
void ScriptableAvatar::setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement) {
|
||||
_headData->setHasProceduralEyeFaceMovement(hasProceduralEyeFaceMovement);
|
||||
}
|
||||
|
||||
void ScriptableAvatar::setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement) {
|
||||
_headData->setHasAudioEnabledFaceMovement(hasAudioEnabledFaceMovement);
|
||||
}
|
||||
|
|
|
@ -157,9 +157,16 @@ public:
|
|||
|
||||
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking = false) override;
|
||||
|
||||
private slots:
|
||||
void setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement);
|
||||
bool getHasProceduralBlinkFaceMovement() const override { return _headData->getHasProceduralBlinkFaceMovement(); }
|
||||
void setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement);
|
||||
bool getHasProceduralEyeFaceMovement() const override { return _headData->getHasProceduralEyeFaceMovement(); }
|
||||
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
|
||||
bool getHasAudioEnabledFaceMovement() const override { return _headData->getHasAudioEnabledFaceMovement(); }
|
||||
|
||||
public slots:
|
||||
void update(float deltatime);
|
||||
|
||||
|
||||
private:
|
||||
AnimationPointer _animation;
|
||||
AnimationDetails _animationDetails;
|
||||
|
|
|
@ -164,7 +164,7 @@ bool EntityTreeSendThread::traverseTreeAndSendContents(SharedNodePointer node, O
|
|||
// Send EntityQueryInitialResultsComplete reliable packet ...
|
||||
auto initialCompletion = NLPacket::create(PacketType::EntityQueryInitialResultsComplete,
|
||||
sizeof(OCTREE_PACKET_SEQUENCE), true);
|
||||
initialCompletion->writePrimitive(OCTREE_PACKET_SEQUENCE(nodeData->getSequenceNumber() - 1U));
|
||||
initialCompletion->writePrimitive(OCTREE_PACKET_SEQUENCE(nodeData->getSequenceNumber()));
|
||||
DependencyManager::get<NodeList>()->sendPacket(std::move(initialCompletion), *node);
|
||||
}
|
||||
|
||||
|
|
|
@ -33,6 +33,7 @@
|
|||
|
||||
#include <EntityScriptClient.h> // for EntityScriptServerServices
|
||||
|
||||
#include "../AssignmentDynamicFactory.h"
|
||||
#include "EntityScriptServerLogging.h"
|
||||
#include "../entities/AssignmentParentFinder.h"
|
||||
|
||||
|
@ -56,6 +57,9 @@ int EntityScriptServer::_entitiesScriptEngineCount = 0;
|
|||
EntityScriptServer::EntityScriptServer(ReceivedMessage& message) : ThreadedAssignment(message) {
|
||||
qInstallMessageHandler(messageHandler);
|
||||
|
||||
DependencyManager::registerInheritance<EntityDynamicFactoryInterface, AssignmentDynamicFactory>();
|
||||
DependencyManager::set<AssignmentDynamicFactory>();
|
||||
|
||||
DependencyManager::set<EntityScriptingInterface>(false)->setPacketSender(&_entityEditSender);
|
||||
DependencyManager::set<ResourceScriptingInterface>();
|
||||
|
||||
|
@ -579,6 +583,7 @@ void EntityScriptServer::handleOctreePacket(QSharedPointer<ReceivedMessage> mess
|
|||
void EntityScriptServer::aboutToFinish() {
|
||||
shutdownScriptEngine();
|
||||
|
||||
DependencyManager::destroy<AssignmentDynamicFactory>();
|
||||
DependencyManager::destroy<AssignmentParentFinder>();
|
||||
|
||||
DependencyManager::get<ResourceManager>()->cleanup();
|
||||
|
|
|
@ -6,6 +6,10 @@ if (NOT "${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
|
|||
message( FATAL_ERROR "Only 64 bit builds supported." )
|
||||
endif()
|
||||
|
||||
if (USE_CCACHE OR "$ENV{USE_CCACHE}")
|
||||
configure_ccache()
|
||||
endif()
|
||||
|
||||
if (WIN32)
|
||||
add_definitions(-DNOMINMAX -D_CRT_SECURE_NO_WARNINGS)
|
||||
|
||||
|
@ -88,7 +92,7 @@ if (APPLE)
|
|||
set(OSX_SDK "${OSX_VERSION}" CACHE String "OS X SDK version to look for inside Xcode bundle or at OSX_SDK_PATH")
|
||||
|
||||
# set our OS X deployment target
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET 10.8)
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET 10.9)
|
||||
|
||||
# find the SDK path for the desired SDK
|
||||
find_path(
|
||||
|
|
20
cmake/externals/boostconfig/CMakeLists.txt
vendored
20
cmake/externals/boostconfig/CMakeLists.txt
vendored
|
@ -1,20 +0,0 @@
|
|||
set(EXTERNAL_NAME boostconfig)
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
|
||||
include(ExternalProject)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
#URL https://github.com/boostorg/config/archive/boost-1.58.0.zip
|
||||
URL https://public.highfidelity.com/dependencies/config-boost-1.58.0.zip
|
||||
URL_MD5 42fa673bae2b7645a22736445e80eb8d
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/include CACHE TYPE INTERNAL)
|
||||
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
85
cmake/externals/bullet/CMakeLists.txt
vendored
85
cmake/externals/bullet/CMakeLists.txt
vendored
|
@ -1,85 +0,0 @@
|
|||
set(EXTERNAL_NAME bullet)
|
||||
|
||||
if (WIN32)
|
||||
set(PLATFORM_CMAKE_ARGS "-DUSE_MSVC_RUNTIME_LIBRARY_DLL=1")
|
||||
else ()
|
||||
set(PLATFORM_CMAKE_ARGS "-DBUILD_SHARED_LIBS=1")
|
||||
|
||||
if (ANDROID)
|
||||
list(APPEND PLATFORM_CMAKE_ARGS "-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}" "-DANDROID_NATIVE_API_LEVEL=19")
|
||||
elseif (APPLE)
|
||||
list(APPEND PLATFORM_CMAKE_ARGS "-DCMAKE_INSTALL_NAME_DIR=<INSTALL_DIR>/lib")
|
||||
endif()
|
||||
endif ()
|
||||
|
||||
include(ExternalProject)
|
||||
|
||||
if (WIN32)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://public.highfidelity.com/dependencies/bullet-2.88.tgz
|
||||
URL_MD5 0a6876607ebe83e227427215f15946fd
|
||||
CMAKE_ARGS ${PLATFORM_CMAKE_ARGS} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DBUILD_EXTRAS=0 -DINSTALL_LIBS=1 -DBUILD_BULLET3=0 -DBUILD_OPENGL3_DEMOS=0 -DBUILD_BULLET2_DEMOS=0 -DBUILD_UNIT_TESTS=0 -DUSE_GLUT=0 -DUSE_DX11=0
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
LOG_BUILD 1
|
||||
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
|
||||
)
|
||||
else ()
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://public.highfidelity.com/dependencies/bullet-2.88.tgz
|
||||
URL_MD5 0a6876607ebe83e227427215f15946fd
|
||||
CMAKE_ARGS ${PLATFORM_CMAKE_ARGS} -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DBUILD_EXTRAS=0 -DINSTALL_LIBS=1 -DBUILD_BULLET3=0 -DBUILD_OPENGL3_DEMOS=0 -DBUILD_BULLET2_DEMOS=0 -DBUILD_UNIT_TESTS=0 -DUSE_GLUT=0
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
LOG_BUILD 1
|
||||
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
|
||||
)
|
||||
endif ()
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
|
||||
set(BULLET_LIB_DIR "${INSTALL_DIR}/lib")
|
||||
|
||||
if (APPLE OR UNIX OR ANDROID)
|
||||
if (APPLE)
|
||||
set(BULLET_LIB_EXT "dylib")
|
||||
else ()
|
||||
set(BULLET_LIB_EXT "so")
|
||||
endif ()
|
||||
|
||||
set(LIB_PREFIX "lib")
|
||||
elseif (WIN32)
|
||||
set(BULLET_LIB_EXT "lib")
|
||||
endif ()
|
||||
|
||||
if (DEFINED BULLET_LIB_EXT)
|
||||
set(_BULLET_LIB_PAIRS "DYNAMICS_LIBRARY\;BulletDynamics" "COLLISION_LIBRARY\;BulletCollision" "MATH_LIBRARY\;LinearMath" "SOFTBODY_LIBRARY\;BulletSoftBody")
|
||||
|
||||
foreach(_LIB_PAIR ${_BULLET_LIB_PAIRS})
|
||||
list(GET _LIB_PAIR 0 _LIB_VAR_NAME)
|
||||
list(GET _LIB_PAIR 1 _LIB_NAME)
|
||||
|
||||
|
||||
|
||||
if (WIN32)
|
||||
# on windows, we might end up with a library that ends with RelWithDebInfo if Visual Studio is building for that configuration
|
||||
set(${EXTERNAL_NAME_UPPER}_${_LIB_VAR_NAME}_RELEASE "${BULLET_LIB_DIR}/${LIB_PREFIX}${_LIB_NAME}$<$<CONFIG:RelWithDebInfo>:_RelWithDebugInfo>$<$<CONFIG:MinSizeRel>:_MinsizeRel>.${BULLET_LIB_EXT}" CACHE FILEPATH "${_LIB_NAME} release library location")
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_${_LIB_VAR_NAME}_DEBUG ${BULLET_LIB_DIR}/${LIB_PREFIX}${_LIB_NAME}_Debug.${BULLET_LIB_EXT} CACHE FILEPATH "${_LIB_NAME} debug library location")
|
||||
else ()
|
||||
set(${EXTERNAL_NAME_UPPER}_${_LIB_VAR_NAME}_RELEASE ${BULLET_LIB_DIR}/${LIB_PREFIX}${_LIB_NAME}.${BULLET_LIB_EXT} CACHE FILEPATH "${_LIB_NAME} release library location")
|
||||
set(${EXTERNAL_NAME_UPPER}_${_LIB_VAR_NAME}_DEBUG "" CACHE FILEPATH "${_LIB_NAME} debug library location")
|
||||
endif ()
|
||||
endforeach()
|
||||
endif ()
|
||||
|
||||
if (DEFINED ${EXTERNAL_NAME_UPPER}_DYNAMICS_LIBRARY_RELEASE)
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIR ${INSTALL_DIR}/include/bullet CACHE PATH "Path to bullet include directory")
|
||||
endif ()
|
41
cmake/externals/draco/CMakeLists.txt
vendored
41
cmake/externals/draco/CMakeLists.txt
vendored
|
@ -1,41 +0,0 @@
|
|||
set(EXTERNAL_NAME draco)
|
||||
|
||||
if (ANDROID)
|
||||
set(ANDROID_CMAKE_ARGS "-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}" "-DANDROID_NATIVE_API_LEVEL=19")
|
||||
endif ()
|
||||
|
||||
if (APPLE)
|
||||
set(EXTRA_CMAKE_FLAGS -DCMAKE_CXX_FLAGS=-stdlib=libc++ -DCMAKE_EXE_LINKER_FLAGS=-stdlib=libc++)
|
||||
endif ()
|
||||
|
||||
include(ExternalProject)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://public.highfidelity.com/dependencies/draco-1.1.0.zip
|
||||
URL_MD5 208f8b04c91d5f1c73d731a3ea37c5bb
|
||||
CONFIGURE_COMMAND CMAKE_ARGS ${ANDROID_CMAKE_ARGS} -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>-$<CONFIG> ${EXTRA_CMAKE_FLAGS}
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
LOG_BUILD 1
|
||||
)
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
|
||||
set(SUFFIXED_INSTALL_DIR "${INSTALL_DIR}-$<CONFIG>")
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SUFFIXED_INSTALL_DIR}/include CACHE PATH "List of Draco include directories")
|
||||
|
||||
if (UNIX)
|
||||
set(LIB_PREFIX "lib")
|
||||
set(LIB_EXT "a")
|
||||
elseif (WIN32)
|
||||
set(LIB_EXT "lib")
|
||||
endif ()
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY ${SUFFIXED_INSTALL_DIR}/lib/${LIB_PREFIX}draco.${LIB_EXT} CACHE FILEPATH "Path to Draco release library")
|
||||
set(${EXTERNAL_NAME_UPPER}_ENCODER_LIBRARY ${SUFFIXED_INSTALL_DIR}/lib/${LIB_PREFIX}dracoenc.${LIB_EXT} CACHE FILEPATH "Path to Draco encoder release library")
|
||||
set(${EXTERNAL_NAME_UPPER}_DECODER_LIBRARY ${SUFFIXED_INSTALL_DIR}/lib/${LIB_PREFIX}dracodec.${LIB_EXT} CACHE FILEPATH "Path to Draco decoder release library")
|
58
cmake/externals/etc2comp/CMakeLists.txt
vendored
58
cmake/externals/etc2comp/CMakeLists.txt
vendored
|
@ -1,58 +0,0 @@
|
|||
set(EXTERNAL_NAME etc2comp)
|
||||
|
||||
if (ANDROID)
|
||||
set(ANDROID_CMAKE_ARGS "-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}" "-DANDROID_NATIVE_API_LEVEL=19")
|
||||
endif ()
|
||||
|
||||
if (APPLE)
|
||||
set(EXTRA_CMAKE_FLAGS -DCMAKE_CXX_FLAGS=-stdlib=libc++ -DCMAKE_EXE_LINKER_FLAGS=-stdlib=libc++)
|
||||
endif ()
|
||||
|
||||
include(ExternalProject)
|
||||
# We use a patched version of etc2comp that properly generates all the necessary mips
|
||||
# See https://github.com/google/etc2comp/pull/29
|
||||
# We also use part of https://github.com/google/etc2comp/pull/1, which fixes a bug
|
||||
# that would override CMAKE_CXX_FLAGS
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://public.highfidelity.com/dependencies/etc2comp-patched.zip
|
||||
URL_MD5 4c96153eb179acbe619e3d99d3330595
|
||||
CMAKE_ARGS ${ANDROID_CMAKE_ARGS} ${EXTRA_CMAKE_FLAGS}
|
||||
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
LOG_BUILD 1
|
||||
)
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
|
||||
if (WIN32 OR APPLE)
|
||||
if (WIN32)
|
||||
set(_LIB_FILE "EtcLib.lib")
|
||||
else ()
|
||||
set(_LIB_FILE "libEtcLib.a")
|
||||
endif ()
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/build/EtcLib/Debug/${_LIB_FILE} CACHE FILEPATH "Path to Etc2Comp debug library")
|
||||
|
||||
# use generator expression to ensure the correct library is found when building different configurations in VS
|
||||
set(_LIB_FOLDER "$<$<CONFIG:RelWithDebInfo>:build/EtcLib/RelWithDebInfo>")
|
||||
set(_LIB_FOLDER "${_LIB_FOLDER}$<$<CONFIG:MinSizeRel>:build/EtcLib/MinSizeRel>")
|
||||
set(_LIB_FOLDER "${_LIB_FOLDER}$<$<OR:$<CONFIG:Release>,$<CONFIG:Debug>>:build/EtcLib/Release>")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/${_LIB_FOLDER}/${_LIB_FILE} CACHE FILEPATH "Path to Etc2Comp release library")
|
||||
else ()
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG "" CACHE FILEPATH "Path to EtcLib debug library")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/build/EtcLib/libEtcLib.a CACHE FILEPATH "Path to EtcLib release library")
|
||||
|
||||
endif ()
|
||||
|
||||
set(ETC_INCLUDE_DIR ${SOURCE_DIR}/EtcLib/Etc CACHE FILEPATH "Path to Etc2Comp/Etc include directory")
|
||||
set(ETCCODEC_INCLUDE_DIR ${SOURCE_DIR}/EtcLib/EtcCodec CACHE FILEPATH "Path to Etc2Comp/EtcCodec include directory")
|
||||
# ETC2COMP_INCLUDE_DIRS will be set later by FindEtc2Comp
|
21
cmake/externals/gli/CMakeLists.txt
vendored
21
cmake/externals/gli/CMakeLists.txt
vendored
|
@ -1,21 +0,0 @@
|
|||
set(EXTERNAL_NAME gli)
|
||||
|
||||
include(ExternalProject)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://public.highfidelity.com/dependencies/gli-0.8.1.0.zip
|
||||
URL_MD5 00c990f59c12bbf367956ef399d6f798
|
||||
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR} CACHE PATH "List of gli include directories")
|
21
cmake/externals/glm/CMakeLists.txt
vendored
21
cmake/externals/glm/CMakeLists.txt
vendored
|
@ -1,21 +0,0 @@
|
|||
set(EXTERNAL_NAME glm)
|
||||
|
||||
include(ExternalProject)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://public.highfidelity.com/dependencies/glm-0.9.8.5-patched.zip
|
||||
URL_MD5 7d39ecc1cea275427534c3cfd6dd63f0
|
||||
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> ${EXTERNAL_ARGS}
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
LOG_BUILD 1
|
||||
)
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${INSTALL_DIR}/include CACHE PATH "List of glm include directories")
|
22
cmake/externals/json/CMakeLists.txt
vendored
22
cmake/externals/json/CMakeLists.txt
vendored
|
@ -1,22 +0,0 @@
|
|||
set(EXTERNAL_NAME json)
|
||||
|
||||
include(ExternalProject)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://hifi-public.s3.amazonaws.com/dependencies/json_3.1.2.zip
|
||||
URL_MD5 94dbf6ea25a7569ddc0ab6e20862cf16
|
||||
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> ${EXTERNAL_ARGS}
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR} CACHE PATH "List of json include directories")
|
87
cmake/externals/nvtt/CMakeLists.txt
vendored
87
cmake/externals/nvtt/CMakeLists.txt
vendored
|
@ -1,87 +0,0 @@
|
|||
include(ExternalProject)
|
||||
include(SelectLibraryConfigurations)
|
||||
|
||||
set(EXTERNAL_NAME nvtt)
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
|
||||
if (WIN32)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://public.highfidelity.com/dependencies/nvtt-win-2.1.0.hifi.zip
|
||||
URL_MD5 10da01cf601f88f6dc12a6bc13c89136
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/include CACHE PATH "Location of NVTT include directory")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${SOURCE_DIR}/Release/x64/nvtt.lib CACHE FILEPATH "Path to NVTT release library")
|
||||
set(${EXTERNAL_NAME_UPPER}_DLL_PATH "${SOURCE_DIR}/Release>/x64" CACHE PATH "Location of NVTT release DLL")
|
||||
else ()
|
||||
|
||||
if (ANDROID)
|
||||
set(ANDROID_CMAKE_ARGS "-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}" "-DANDROID_NATIVE_API_LEVEL=19")
|
||||
endif ()
|
||||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://public.highfidelity.com/dependencies/nvidia-texture-tools-2.1.0.hifi-83462e4.zip
|
||||
URL_MD5 602776e08515b54bfa1b8dc455003f0f
|
||||
CONFIGURE_COMMAND CMAKE_ARGS ${ANDROID_CMAKE_ARGS} -DNVTT_SHARED=1 -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
LOG_BUILD 1
|
||||
)
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${INSTALL_DIR}/include CACHE PATH "Location of NVTT include directory")
|
||||
|
||||
if (APPLE)
|
||||
set(_LIB_EXT "dylib")
|
||||
else ()
|
||||
set(_LIB_EXT "so")
|
||||
endif ()
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/libnvtt.${_LIB_EXT} CACHE FILEPATH "Path to NVTT library")
|
||||
|
||||
if (APPLE)
|
||||
# on OS X we have to use install_name_tool to fix the paths found in the NVTT shared libraries
|
||||
# so that they can be found and linked during the linking phase
|
||||
set(_NVTT_LIB_DIR "${INSTALL_DIR}/lib")
|
||||
|
||||
# first fix the install names of all present libraries
|
||||
ExternalProject_Add_Step(
|
||||
${EXTERNAL_NAME}
|
||||
change-install-name
|
||||
COMMENT "Calling install_name_tool on NVTT libraries to fix install name for dylib linking"
|
||||
COMMAND ${CMAKE_COMMAND} -DINSTALL_NAME_LIBRARY_DIR=${_NVTT_LIB_DIR} -P ${EXTERNAL_PROJECT_DIR}/OSXInstallNameChange.cmake
|
||||
DEPENDEES install
|
||||
WORKING_DIRECTORY <INSTALL_DIR>
|
||||
LOG 1
|
||||
)
|
||||
|
||||
# then, for the main library (libnvtt) fix the paths to the dependency libraries (core, image, math)
|
||||
ExternalProject_Add_Step(
|
||||
${EXTERNAL_NAME}
|
||||
change-dependency-paths
|
||||
COMMENT "Calling install_name_tool on NVTT libraries to fix paths for dependency libraries"
|
||||
COMMAND install_name_tool -change libnvimage.dylib ${INSTALL_DIR}/lib/libnvimage.dylib libnvtt.dylib
|
||||
COMMAND install_name_tool -change libnvcore.dylib ${INSTALL_DIR}/lib/libnvcore.dylib libnvtt.dylib
|
||||
COMMAND install_name_tool -change libnvmath.dylib ${INSTALL_DIR}/lib/libnvmath.dylib libnvtt.dylib
|
||||
COMMAND install_name_tool -change libnvcore.dylib ${INSTALL_DIR}/lib/libnvcore.dylib libnvimage.dylib
|
||||
COMMAND install_name_tool -change libnvmath.dylib ${INSTALL_DIR}/lib/libnvmath.dylib libnvimage.dylib
|
||||
COMMAND install_name_tool -change libnvcore.dylib ${INSTALL_DIR}/lib/libnvcore.dylib libnvmath.dylib
|
||||
DEPENDEES install
|
||||
WORKING_DIRECTORY <INSTALL_DIR>/lib
|
||||
LOG 1
|
||||
)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
# Hide this external target (for IDE users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
49
cmake/externals/openvr/CMakeLists.txt
vendored
49
cmake/externals/openvr/CMakeLists.txt
vendored
|
@ -1,49 +0,0 @@
|
|||
include(ExternalProject)
|
||||
include(SelectLibraryConfigurations)
|
||||
|
||||
set(EXTERNAL_NAME OpenVR)
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://public.highfidelity.com/dependencies/openvr-1.0.6.zip
|
||||
URL_MD5 f6892cd3a3078f505d03b4297f5a1951
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/headers CACHE TYPE INTERNAL)
|
||||
|
||||
if (WIN32)
|
||||
|
||||
# FIXME need to account for different architectures
|
||||
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/win64/openvr_api.lib CACHE TYPE INTERNAL)
|
||||
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/win64)
|
||||
else()
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/win32/openvr_api.lib CACHE TYPE INTERNAL)
|
||||
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/win32)
|
||||
endif()
|
||||
|
||||
elseif(APPLE)
|
||||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/osx32/libopenvr_api.dylib CACHE TYPE INTERNAL)
|
||||
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/osx32)
|
||||
|
||||
elseif(NOT ANDROID)
|
||||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/linux64/libopenvr_api.so CACHE TYPE INTERNAL)
|
||||
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/linux64)
|
||||
|
||||
endif()
|
||||
|
21
cmake/externals/quazip/CMakeLists.txt
vendored
21
cmake/externals/quazip/CMakeLists.txt
vendored
|
@ -1,14 +1,19 @@
|
|||
set(EXTERNAL_NAME quazip)
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
cmake_policy(SET CMP0046 OLD)
|
||||
|
||||
include(ExternalProject)
|
||||
|
||||
set(QUAZIP_CMAKE_ARGS -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DCMAKE_PREFIX_PATH=${QT_CMAKE_PREFIX_PATH} -DCMAKE_INSTALL_NAME_DIR:PATH=<INSTALL_DIR>/lib -DZLIB_ROOT=${ZLIB_ROOT} -DCMAKE_POSITION_INDEPENDENT_CODE=ON)
|
||||
set(QUAZIP_CMAKE_ARGS
|
||||
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
|
||||
-DCMAKE_PREFIX_PATH=${QT_CMAKE_PREFIX_PATH}
|
||||
-DCMAKE_INSTALL_NAME_DIR:PATH=<INSTALL_DIR>/lib
|
||||
-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}
|
||||
-DZLIB_ROOT=${VCPKG_INSTALL_ROOT}
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON)
|
||||
|
||||
if (APPLE)
|
||||
else ()
|
||||
set(QUAZIP_CMAKE_ARGS ${QUAZIP_CMAKE_ARGS} -DCMAKE_CXX_STANDARD=11)
|
||||
if (NOT APPLE)
|
||||
set(QUAZIP_CMAKE_ARGS ${QUAZIP_CMAKE_ARGS} -DCMAKE_CXX_STANDARD=11)
|
||||
endif ()
|
||||
|
||||
ExternalProject_Add(
|
||||
|
@ -22,10 +27,8 @@ ExternalProject_Add(
|
|||
LOG_BUILD 1
|
||||
)
|
||||
|
||||
add_dependencies(quazip zlib)
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES
|
||||
FOLDER "hidden/externals"
|
||||
INSTALL_NAME_DIR ${INSTALL_DIR}/lib
|
||||
BUILD_WITH_INSTALL_RPATH True)
|
||||
|
@ -54,4 +57,4 @@ select_library_configurations(${EXTERNAL_NAME_UPPER})
|
|||
|
||||
# Force selected libraries into the cache
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY ${${EXTERNAL_NAME_UPPER}_LIBRARY} CACHE FILEPATH "Location of QuaZip libraries")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${${EXTERNAL_NAME_UPPER}_LIBRARIES} CACHE FILEPATH "Location of QuaZip libraries")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${${EXTERNAL_NAME_UPPER}_LIBRARIES} CACHE FILEPATH "Location of QuaZip libraries")
|
||||
|
|
90
cmake/externals/sdl2/CMakeLists.txt
vendored
90
cmake/externals/sdl2/CMakeLists.txt
vendored
|
@ -1,90 +0,0 @@
|
|||
set(EXTERNAL_NAME sdl2)
|
||||
|
||||
include(ExternalProject)
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
|
||||
if (WIN32)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://public.highfidelity.com/dependencies/SDL2-devel-2.0.3-VC.zip
|
||||
URL_MD5 30a333bcbe94bc5016e8799c73e86233
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
elseif (APPLE)
|
||||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://public.highfidelity.com/dependencies/SDL2-2.0.3.zip
|
||||
URL_MD5 55f1eae5142d20db11c844d8d4d6deed
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DVIDEO_OPENGL=OFF
|
||||
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
LOG_BUILD 1
|
||||
)
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIR ${INSTALL_DIR}/include/SDL2 CACHE PATH "Location of SDL2 include directory")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY "${INSTALL_DIR}/lib/libSDL2-2.0.dylib" CACHE STRING "Path to SDL2 library")
|
||||
|
||||
set(_SDL2_LIB_DIR "${INSTALL_DIR}/lib")
|
||||
|
||||
ExternalProject_Add_Step(
|
||||
${EXTERNAL_NAME}
|
||||
change-install-name
|
||||
COMMENT "Calling install_name_tool on SDL2 libraries to fix install name for dylib linking"
|
||||
COMMAND ${CMAKE_COMMAND} -DINSTALL_NAME_LIBRARY_DIR=${_SDL2_LIB_DIR} -P ${EXTERNAL_PROJECT_DIR}/OSXInstallNameChange.cmake
|
||||
DEPENDEES install
|
||||
WORKING_DIRECTORY <INSTALL_DIR>
|
||||
LOG 1
|
||||
)
|
||||
|
||||
else ()
|
||||
if (ANDROID)
|
||||
set(ANDROID_CMAKE_ARGS "-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}" "-DANDROID_NATIVE_API_LEVEL=19")
|
||||
endif ()
|
||||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://public.highfidelity.com/dependencies/SDL2-2.0.3.tar.gz
|
||||
URL_MD5 fe6c61d2e9df9ef570e7e80c6e822537
|
||||
CMAKE_ARGS ${ANDROID_CMAKE_ARGS} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
LOG_BUILD 1
|
||||
)
|
||||
endif ()
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
||||
|
||||
if (APPLE)
|
||||
|
||||
# NOOP
|
||||
|
||||
elseif (WIN32)
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIR ${SOURCE_DIR}/include CACHE PATH "Location of SDL2 include directory")
|
||||
|
||||
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_TEMP ${SOURCE_DIR}/lib/x64/SDL2.lib CACHE FILEPATH "Path to SDL2 library")
|
||||
set(${EXTERNAL_NAME_UPPER}_DLL_PATH ${SOURCE_DIR}/lib/x64 CACHE PATH "Location of SDL2 DLL")
|
||||
else()
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_TEMP ${SOURCE_DIR}/lib/x86/SDL2.lib CACHE FILEPATH "Path to SDL2 library")
|
||||
set(${EXTERNAL_NAME_UPPER}_DLL_PATH ${SOURCE_DIR}/lib/x86 CACHE PATH "Location of SDL2 DLL")
|
||||
endif()
|
||||
|
||||
add_paths_to_fixup_libs(${${EXTERNAL_NAME_UPPER}_DLL_PATH})
|
||||
|
||||
else ()
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIR ${INSTALL_DIR}/include/SDL2 CACHE PATH "Location of SDL2 include directory")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_TEMP ${INSTALL_DIR}/lib/libSDL2.so CACHE FILEPATH "Path to SDL2 library")
|
||||
|
||||
endif ()
|
|
@ -4,8 +4,8 @@ set(EXTERNAL_NAME serverless-content)
|
|||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://cdn.highfidelity.com/content-sets/serverless-tutorial-RC72.zip
|
||||
URL_MD5 b1d8faf9266bfbff88274a484911eb99
|
||||
URL http://cdn.highfidelity.com/content-sets/serverless-tutorial-RC75.zip
|
||||
URL_MD5 b4225d058952e17976ac228330ce8d51
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
|
|
33
cmake/externals/zlib/CMakeLists.txt
vendored
33
cmake/externals/zlib/CMakeLists.txt
vendored
|
@ -1,33 +0,0 @@
|
|||
set(EXTERNAL_NAME zlib)
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
|
||||
include(ExternalProject)
|
||||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://public.highfidelity.com/dependencies/zlib128.zip
|
||||
URL_MD5 126f8676442ffbd97884eb4d6f32afb4
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
|
||||
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
LOG_BUILD 1
|
||||
)
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
|
||||
set(${EXTERNAL_NAME_UPPER}_ROOT ${INSTALL_DIR} CACHE PATH "Path for Zlib install root")
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIR ${INSTALL_DIR}/include CACHE PATH "List of zlib include directories")
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${${EXTERNAL_NAME_UPPER}_INCLUDE_DIR} CACHE PATH "List of zlib include directories")
|
||||
set(${EXTERNAL_NAME_UPPER}_DLL_PATH ${INSTALL_DIR}/bin CACHE FILEPATH "Location of ZLib DLL")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/zlib.lib CACHE FILEPATH "Location of zlib release library")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/zlibd.lib CACHE FILEPATH "Location of zlib debug library")
|
||||
|
||||
include(SelectLibraryConfigurations)
|
||||
select_library_configurations(${EXTERNAL_NAME_UPPER})
|
||||
|
||||
# Force selected libraries into the cache
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY ${${EXTERNAL_NAME_UPPER}_LIBRARY} CACHE FILEPATH "Location of zlib libraries")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${${EXTERNAL_NAME_UPPER}_LIBRARIES} CACHE FILEPATH "Location of zlib libraries")
|
|
@ -3,11 +3,15 @@ if (WIN32)
|
|||
endif (WIN32)
|
||||
|
||||
if (POLICY CMP0043)
|
||||
cmake_policy(SET CMP0043 OLD)
|
||||
cmake_policy(SET CMP0043 NEW)
|
||||
endif ()
|
||||
|
||||
if (POLICY CMP0042)
|
||||
cmake_policy(SET CMP0042 OLD)
|
||||
cmake_policy(SET CMP0042 NEW)
|
||||
endif ()
|
||||
|
||||
if (POLICY CMP0074)
|
||||
cmake_policy(SET CMP0074 OLD)
|
||||
endif ()
|
||||
|
||||
set_property(GLOBAL PROPERTY USE_FOLDERS ON)
|
||||
|
@ -34,7 +38,7 @@ file(GLOB HIFI_CUSTOM_MACROS "cmake/macros/*.cmake")
|
|||
foreach(CUSTOM_MACRO ${HIFI_CUSTOM_MACROS})
|
||||
include(${CUSTOM_MACRO})
|
||||
endforeach()
|
||||
unset(HIFI_CUSTOM_MACROS)
|
||||
unset(HIFI_CUSTOM_MACROS)
|
||||
|
||||
if (ANDROID)
|
||||
set(BUILD_SHARED_LIBS ON)
|
||||
|
|
|
@ -8,35 +8,95 @@
|
|||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
# FIXME use the built tools
|
||||
|
||||
macro(AUTOSCRIBE_APPEND_QRC)
|
||||
string(CONCAT SHADER_QRC "${SHADER_QRC}" "<file alias=\"${ARGV0}\">${ARGV1}</file>\n")
|
||||
endmacro()
|
||||
|
||||
macro(AUTOSCRIBE_PLATFORM_SHADER)
|
||||
set(AUTOSCRIBE_PLATFORM_PATH "${ARGV0}")
|
||||
string(REGEX MATCH "([0-9]+(es)?)(/stereo)?" PLATFORM_PATH_REGEX ${AUTOSCRIBE_PLATFORM_PATH})
|
||||
set(AUTOSCRIBE_DIALECT "${CMAKE_MATCH_1}")
|
||||
if (CMAKE_MATCH_3)
|
||||
set(AUTOSCRIBE_VARIANT "stereo")
|
||||
else()
|
||||
set(AUTOSCRIBE_VARIANT "mono")
|
||||
endif()
|
||||
string(REGEX REPLACE "/" "\\\\" SOURCE_GROUP_PATH ${AUTOSCRIBE_PLATFORM_PATH})
|
||||
set(SOURCE_GROUP_PATH "${SHADER_LIB}\\${SOURCE_GROUP_PATH}")
|
||||
set(AUTOSCRIBE_DIALECT_HEADER "${AUTOSCRIBE_HEADER_DIR}/${AUTOSCRIBE_DIALECT}/header.glsl")
|
||||
set(AUTOSCRIBE_VARIANT_HEADER "${AUTOSCRIBE_HEADER_DIR}/${AUTOSCRIBE_VARIANT}.glsl")
|
||||
|
||||
set(AUTOSCRIBE_OUTPUT_FILE "${SHADERS_DIR}/${SHADER_LIB}/${AUTOSCRIBE_PLATFORM_PATH}/${SHADER_NAME}.${SHADER_TYPE}")
|
||||
AUTOSCRIBE_APPEND_QRC("${SHADER_COUNT}/${AUTOSCRIBE_PLATFORM_PATH}/scribe" "${AUTOSCRIBE_OUTPUT_FILE}")
|
||||
source_group(${SOURCE_GROUP_PATH} FILES ${AUTOSCRIBE_OUTPUT_FILE})
|
||||
set_property(SOURCE ${AUTOSCRIBE_OUTPUT_FILE} PROPERTY SKIP_AUTOMOC ON)
|
||||
list(APPEND SCRIBED_SHADERS ${AUTOSCRIBE_OUTPUT_FILE})
|
||||
|
||||
set(AUTOSCRIBE_SPIRV_FILE "${AUTOSCRIBE_OUTPUT_FILE}.spv")
|
||||
# don't add unoptimized spirv to the QRC
|
||||
#AUTOSCRIBE_APPEND_QRC("${SHADER_COUNT}/${AUTOSCRIBE_PLATFORM_PATH}/spirv_unopt" "${AUTOSCRIBE_SPIRV_FILE}")
|
||||
source_group(${SOURCE_GROUP_PATH} FILES ${AUTOSCRIBE_SPIRV_FILE})
|
||||
set_property(SOURCE ${AUTOSCRIBE_SPIRV_FILE} PROPERTY SKIP_AUTOMOC ON)
|
||||
list(APPEND SPIRV_SHADERS ${AUTOSCRIBE_SPIRV_FILE})
|
||||
|
||||
set(AUTOSCRIBE_SPIRV_OPT_FILE "${AUTOSCRIBE_OUTPUT_FILE}.opt.spv")
|
||||
AUTOSCRIBE_APPEND_QRC("${SHADER_COUNT}/${AUTOSCRIBE_PLATFORM_PATH}/spirv" "${AUTOSCRIBE_SPIRV_OPT_FILE}")
|
||||
source_group(${SOURCE_GROUP_PATH} FILES ${AUTOSCRIBE_SPIRV_OPT_FILE})
|
||||
set_property(SOURCE ${AUTOSCRIBE_SPIRV_OPT_FILE} PROPERTY SKIP_AUTOMOC ON)
|
||||
list(APPEND SPIRV_SHADERS ${AUTOSCRIBE_SPIRV_OPT_FILE})
|
||||
|
||||
set(AUTOSCRIBE_SPIRV_GLSL_FILE "${AUTOSCRIBE_OUTPUT_FILE}.glsl")
|
||||
AUTOSCRIBE_APPEND_QRC("${SHADER_COUNT}/${AUTOSCRIBE_PLATFORM_PATH}/glsl" "${AUTOSCRIBE_SPIRV_GLSL_FILE}")
|
||||
source_group(${SOURCE_GROUP_PATH} FILES ${AUTOSCRIBE_SPIRV_GLSL_FILE})
|
||||
set_property(SOURCE ${AUTOSCRIBE_SPIRV_GLSL_FILE} PROPERTY SKIP_AUTOMOC ON)
|
||||
list(APPEND SPIRV_SHADERS ${AUTOSCRIBE_SPIRV_GLSL_FILE})
|
||||
|
||||
set(AUTOSCRIBE_SPIRV_JSON_FILE "${AUTOSCRIBE_OUTPUT_FILE}.json")
|
||||
AUTOSCRIBE_APPEND_QRC("${SHADER_COUNT}/${AUTOSCRIBE_PLATFORM_PATH}/json" "${AUTOSCRIBE_SPIRV_JSON_FILE}")
|
||||
source_group(${SOURCE_GROUP_PATH} FILES ${AUTOSCRIBE_SPIRV_JSON_FILE})
|
||||
set_property(SOURCE ${AUTOSCRIBE_SPIRV_JSON_FILE} PROPERTY SKIP_AUTOMOC ON)
|
||||
list(APPEND REFLECTED_SHADERS ${AUTOSCRIBE_SPIRV_JSON_FILE})
|
||||
|
||||
unset(SHADER_GEN_LINE)
|
||||
list(APPEND SHADER_GEN_LINE ${AUTOSCRIBE_DIALECT})
|
||||
list(APPEND SHADER_GEN_LINE ${AUTOSCRIBE_VARIANT})
|
||||
file(RELATIVE_PATH TEMP_PATH ${CMAKE_SOURCE_DIR} ${SHADER_FILE})
|
||||
list(APPEND SHADER_GEN_LINE ${TEMP_PATH})
|
||||
file(RELATIVE_PATH TEMP_PATH ${CMAKE_SOURCE_DIR} ${AUTOSCRIBE_OUTPUT_FILE})
|
||||
list(APPEND SHADER_GEN_LINE ${TEMP_PATH})
|
||||
list(APPEND SHADER_GEN_LINE ${AUTOSCRIBE_SHADER_SEEN_LIBS})
|
||||
string(CONCAT AUTOSCRIBE_SHADERGEN_COMMANDS "${AUTOSCRIBE_SHADERGEN_COMMANDS}" "${SHADER_GEN_LINE}\n")
|
||||
endmacro()
|
||||
|
||||
macro(AUTOSCRIBE_SHADER)
|
||||
message(STATUS "Processing shader ${SHADER_FILE}")
|
||||
#
|
||||
# Set the include paths
|
||||
#
|
||||
# FIXME base the include paths off of output from the scribe tool,
|
||||
# instead of treating every previously seen shader as a possible header
|
||||
unset(SHADER_INCLUDE_FILES)
|
||||
# Grab include files
|
||||
foreach(includeFile ${ARGN})
|
||||
list(APPEND SHADER_INCLUDE_FILES ${includeFile})
|
||||
endforeach()
|
||||
|
||||
foreach(SHADER_INCLUDE ${SHADER_INCLUDE_FILES})
|
||||
get_filename_component(INCLUDE_DIR ${SHADER_INCLUDE} PATH)
|
||||
list(APPEND SHADER_INCLUDES_PATHS ${INCLUDE_DIR})
|
||||
endforeach()
|
||||
|
||||
list(REMOVE_DUPLICATES SHADER_INCLUDES_PATHS)
|
||||
#Extract the unique include shader paths
|
||||
set(INCLUDES ${HIFI_LIBRARIES_SHADER_INCLUDE_FILES})
|
||||
foreach(EXTRA_SHADER_INCLUDE ${INCLUDES})
|
||||
list(APPEND SHADER_INCLUDES_PATHS ${EXTRA_SHADER_INCLUDE})
|
||||
endforeach()
|
||||
|
||||
list(REMOVE_DUPLICATES SHADER_INCLUDES_PATHS)
|
||||
#message(ready for includes ${SHADER_INCLUDES_PATHS})
|
||||
|
||||
# make the scribe include arguments
|
||||
set(SCRIBE_INCLUDES)
|
||||
unset(SCRIBE_INCLUDES)
|
||||
foreach(INCLUDE_PATH ${SHADER_INCLUDES_PATHS})
|
||||
set(SCRIBE_INCLUDES ${SCRIBE_INCLUDES} -I ${INCLUDE_PATH}/)
|
||||
endforeach()
|
||||
|
||||
#
|
||||
# Figure out the various output names
|
||||
#
|
||||
# Define the final name of the generated shader file
|
||||
get_filename_component(SHADER_NAME ${SHADER_FILE} NAME_WE)
|
||||
get_filename_component(SHADER_EXT ${SHADER_FILE} EXT)
|
||||
|
@ -47,38 +107,36 @@ macro(AUTOSCRIBE_SHADER)
|
|||
elseif(${SHADER_EXT} STREQUAL .slg)
|
||||
set(SHADER_TYPE geom)
|
||||
endif()
|
||||
file(MAKE_DIRECTORY "${SHADERS_DIR}/${SHADER_LIB}")
|
||||
set(SHADER_TARGET "${SHADERS_DIR}/${SHADER_LIB}/${SHADER_NAME}.${SHADER_TYPE}")
|
||||
file(TO_CMAKE_PATH "${SHADER_TARGET}" COMPILED_SHADER)
|
||||
set(REFLECTED_SHADER "${COMPILED_SHADER}.json")
|
||||
|
||||
set(SCRIBE_ARGS -T ${SHADER_TYPE} -D GLPROFILE ${GLPROFILE} ${SCRIBE_INCLUDES} -o ${SHADER_TARGET} ${SHADER_FILE})
|
||||
set(SCRIBE_ARGS -D GLPROFILE ${GLPROFILE} -T ${SHADER_TYPE} ${SCRIBE_INCLUDES} )
|
||||
|
||||
# Generate the frag/vert file
|
||||
add_custom_command(
|
||||
OUTPUT ${SHADER_TARGET}
|
||||
COMMAND ${SCRIBE_COMMAND} ${SCRIBE_ARGS}
|
||||
DEPENDS ${SHADER_FILE} ${SCRIBE_COMMAND} ${SHADER_INCLUDE_FILES})
|
||||
# SHADER_SCRIBED -> the output of scribe
|
||||
set(SHADER_SCRIBED "${SHADERS_DIR}/${SHADER_LIB}/${SHADER_NAME}.${SHADER_TYPE}")
|
||||
|
||||
# Generate the json reflection
|
||||
# FIXME move to spirv-cross for this task after we have spirv compatible shaders
|
||||
add_custom_command(
|
||||
OUTPUT ${REFLECTED_SHADER}
|
||||
COMMAND ${SHREFLECT_COMMAND} ${COMPILED_SHADER}
|
||||
DEPENDS ${SHREFLECT_DEPENDENCY} ${COMPILED_SHADER})
|
||||
# SHADER_NAME_FILE -> a file containing the shader name and extension (useful for debugging and for
|
||||
# determining the type of shader from the filename)
|
||||
set(SHADER_NAME_FILE "${SHADER_SCRIBED}.name")
|
||||
file(TO_CMAKE_PATH "${SHADER_SCRIBED}" SHADER_SCRIBED)
|
||||
file(WRITE "${SHADER_SCRIBED}.name" "${SHADER_NAME}.${SHADER_TYPE}")
|
||||
AUTOSCRIBE_APPEND_QRC("${SHADER_COUNT}/name" "${SHADER_NAME_FILE}")
|
||||
|
||||
#output the generated file name
|
||||
source_group("Compiled/${SHADER_LIB}" FILES ${COMPILED_SHADER})
|
||||
set_property(SOURCE ${COMPILED_SHADER} PROPERTY SKIP_AUTOMOC ON)
|
||||
list(APPEND COMPILED_SHADERS ${COMPILED_SHADER})
|
||||
if (USE_GLES)
|
||||
set(SPIRV_CROSS_ARGS --version 310es)
|
||||
AUTOSCRIBE_PLATFORM_SHADER("310es")
|
||||
AUTOSCRIBE_PLATFORM_SHADER("310es/stereo")
|
||||
else()
|
||||
set(SPIRV_CROSS_ARGS --version 410 --no-420pack-extension)
|
||||
AUTOSCRIBE_PLATFORM_SHADER("410")
|
||||
AUTOSCRIBE_PLATFORM_SHADER("410/stereo")
|
||||
if (NOT APPLE)
|
||||
set(SPIRV_CROSS_ARGS --version 450)
|
||||
AUTOSCRIBE_PLATFORM_SHADER("450")
|
||||
AUTOSCRIBE_PLATFORM_SHADER("450/stereo")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
source_group("Reflected/${SHADER_LIB}" FILES ${REFLECTED_SHADER})
|
||||
list(APPEND REFLECTED_SHADERS ${REFLECTED_SHADER})
|
||||
|
||||
string(CONCAT SHADER_QRC "${SHADER_QRC}" "<file alias=\"${SHADER_COUNT}\">${COMPILED_SHADER}</file>\n")
|
||||
string(CONCAT SHADER_QRC "${SHADER_QRC}" "<file alias=\"${SHADER_COUNT}_reflection\">${REFLECTED_SHADER}</file>\n")
|
||||
string(CONCAT SHADER_ENUMS "${SHADER_ENUMS}" "${SHADER_NAME} = ${SHADER_COUNT},\n")
|
||||
|
||||
string(CONCAT SHADER_SHADERS_ARRAY "${SHADER_SHADERS_ARRAY}" "${SHADER_COUNT},\n")
|
||||
MATH(EXPR SHADER_COUNT "${SHADER_COUNT}+1")
|
||||
endmacro()
|
||||
|
||||
|
@ -87,6 +145,8 @@ macro(AUTOSCRIBE_SHADER_LIB)
|
|||
message(FATAL_ERROR "AUTOSCRIBE_SHADER_LIB can only be used by the shaders library")
|
||||
endif()
|
||||
|
||||
file(MAKE_DIRECTORY "${SHADERS_DIR}/${SHADER_LIB}")
|
||||
|
||||
list(APPEND HIFI_LIBRARIES_SHADER_INCLUDE_FILES "${CMAKE_SOURCE_DIR}/libraries/${SHADER_LIB}/src")
|
||||
string(REGEX REPLACE "[-]" "_" SHADER_NAMESPACE ${SHADER_LIB})
|
||||
string(CONCAT SHADER_ENUMS "${SHADER_ENUMS}" "namespace ${SHADER_NAMESPACE} {\n")
|
||||
|
@ -166,66 +226,81 @@ macro(AUTOSCRIBE_SHADER_LIB)
|
|||
|
||||
# Finish the shader enums
|
||||
string(CONCAT SHADER_ENUMS "${SHADER_ENUMS}" "} // namespace ${SHADER_NAMESPACE}\n")
|
||||
#file(RELATIVE_PATH RELATIVE_LIBRARY_DIR_PATH ${CMAKE_CURRENT_SOURCE_DIR} "${HIFI_LIBRARY_DIR}")
|
||||
#foreach(HIFI_LIBRARY ${ARGN})
|
||||
#list(APPEND HIFI_LIBRARIES_SHADER_INCLUDE_FILES ${HIFI_LIBRARY_DIR}/${HIFI_LIBRARY}/src)
|
||||
#endforeach()
|
||||
#endif()
|
||||
endmacro()
|
||||
|
||||
macro(AUTOSCRIBE_SHADER_LIBS)
|
||||
set(SCRIBE_COMMAND scribe)
|
||||
set(SHREFLECT_COMMAND shreflect)
|
||||
set(SHREFLECT_DEPENDENCY shreflect)
|
||||
|
||||
# Target dependant Custom rule on the SHADER_FILE
|
||||
if (ANDROID)
|
||||
set(GLPROFILE LINUX_GL)
|
||||
set(SCRIBE_COMMAND ${NATIVE_SCRIBE})
|
||||
set(SHREFLECT_COMMAND ${NATIVE_SHREFLECT})
|
||||
unset(SHREFLECT_DEPENDENCY)
|
||||
else()
|
||||
if (APPLE)
|
||||
set(GLPROFILE MAC_GL)
|
||||
elseif(UNIX)
|
||||
set(GLPROFILE LINUX_GL)
|
||||
else()
|
||||
set(GLPROFILE PC_GL)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
message(STATUS "Shader processing start")
|
||||
set(AUTOSCRIBE_HEADER_DIR ${CMAKE_CURRENT_SOURCE_DIR}/headers)
|
||||
# Start the shader IDs
|
||||
set(SHADER_COUNT 1)
|
||||
set(SHADERS_DIR "${CMAKE_CURRENT_BINARY_DIR}/shaders")
|
||||
set(SHADER_ENUMS "")
|
||||
file(MAKE_DIRECTORY ${SHADERS_DIR})
|
||||
set(SHADER_ENUMS "")
|
||||
set(SHADER_COUNT 1)
|
||||
|
||||
#
|
||||
# Scribe generation & program defintiion
|
||||
#
|
||||
foreach(SHADER_LIB ${ARGN})
|
||||
list(APPEND AUTOSCRIBE_SHADER_SEEN_LIBS ${SHADER_LIB})
|
||||
AUTOSCRIBE_SHADER_LIB(${SHADER_LIB})
|
||||
endforeach()
|
||||
|
||||
# Generate the library files
|
||||
configure_file(
|
||||
ShaderEnums.cpp.in
|
||||
${CMAKE_CURRENT_BINARY_DIR}/shaders/ShaderEnums.cpp)
|
||||
${CMAKE_CURRENT_BINARY_DIR}/ShaderEnums.cpp)
|
||||
configure_file(
|
||||
ShaderEnums.h.in
|
||||
${CMAKE_CURRENT_BINARY_DIR}/shaders/ShaderEnums.h)
|
||||
configure_file(
|
||||
shaders.qrc.in
|
||||
${CMAKE_CURRENT_BINARY_DIR}/shaders.qrc)
|
||||
${CMAKE_CURRENT_BINARY_DIR}/ShaderEnums.h)
|
||||
|
||||
set(AUTOSCRIBE_SHADER_LIB_SRC "${CMAKE_CURRENT_BINARY_DIR}/shaders/ShaderEnums.h;${CMAKE_CURRENT_BINARY_DIR}/shaders/ShaderEnums.cpp")
|
||||
set(QT_RESOURCES_FILE ${CMAKE_CURRENT_BINARY_DIR}/shaders.qrc)
|
||||
configure_file(shaders.qrc.in ${CMAKE_CURRENT_BINARY_DIR}/shaders.qrc)
|
||||
list(APPEND QT_RESOURCES_FILE ${CMAKE_CURRENT_BINARY_DIR}/shaders.qrc)
|
||||
|
||||
list(APPEND AUTOSCRIBE_SHADER_HEADERS ${AUTOSCRIBE_HEADER_DIR}/mono.glsl ${AUTOSCRIBE_HEADER_DIR}/stereo.glsl)
|
||||
list(APPEND AUTOSCRIBE_SHADER_HEADERS ${AUTOSCRIBE_HEADER_DIR}/450/header.glsl ${AUTOSCRIBE_HEADER_DIR}/410/header.glsl ${AUTOSCRIBE_HEADER_DIR}/310es/header.glsl)
|
||||
source_group("Shader Headers" FILES ${AUTOSCRIBE_HEADER_DIR}/mono.glsl ${AUTOSCRIBE_HEADER_DIR}/stereo.glsl)
|
||||
source_group("Shader Headers\\450" FILES ${AUTOSCRIBE_HEADER_DIR}/450/header.glsl)
|
||||
source_group("Shader Headers\\410" FILES ${AUTOSCRIBE_HEADER_DIR}/410/header.glsl)
|
||||
source_group("Shader Headers\\310es" FILES ${AUTOSCRIBE_HEADER_DIR}/310es/header.glsl)
|
||||
|
||||
list(APPEND AUTOSCRIBE_SHADER_LIB_SRC ${AUTOSCRIBE_SHADER_HEADERS})
|
||||
list(APPEND AUTOSCRIBE_SHADER_LIB_SRC ${CMAKE_CURRENT_BINARY_DIR}/ShaderEnums.h ${CMAKE_CURRENT_BINARY_DIR}/ShaderEnums.cpp)
|
||||
|
||||
# Write the shadergen command list
|
||||
set(AUTOSCRIBE_SHADERGEN_COMMANDS_FILE ${CMAKE_CURRENT_BINARY_DIR}/shadergen.txt)
|
||||
file(WRITE ${AUTOSCRIBE_SHADERGEN_COMMANDS_FILE} "${AUTOSCRIBE_SHADERGEN_COMMANDS}")
|
||||
|
||||
# A custom python script which will generate all our shader artifacts
|
||||
add_custom_command(
|
||||
OUTPUT ${SCRIBED_SHADERS} ${SPIRV_SHADERS} ${REFLECTED_SHADERS}
|
||||
COMMENT "Generating/updating shaders"
|
||||
COMMAND ${HIFI_PYTHON_EXEC} ${CMAKE_SOURCE_DIR}/tools/shadergen.py
|
||||
--commands ${AUTOSCRIBE_SHADERGEN_COMMANDS_FILE}
|
||||
--tools-dir ${VCPKG_TOOLS_DIR}
|
||||
--build-dir ${CMAKE_CURRENT_BINARY_DIR}
|
||||
--source-dir ${CMAKE_SOURCE_DIR}
|
||||
DEPENDS ${AUTOSCRIBE_SHADER_HEADERS} ${CMAKE_SOURCE_DIR}/tools/shadergen.py ${ALL_SCRIBE_SHADERS})
|
||||
|
||||
add_custom_target(shadergen DEPENDS ${SCRIBED_SHADERS} ${SPIRV_SHADERS} ${REFLECTED_SHADERS})
|
||||
set_target_properties(shadergen PROPERTIES FOLDER "Shaders")
|
||||
|
||||
# Custom targets required to force generation of the shaders via scribe
|
||||
add_custom_target(scribe_shaders SOURCES ${ALL_SCRIBE_SHADERS})
|
||||
add_custom_target(compiled_shaders SOURCES ${COMPILED_SHADERS})
|
||||
add_custom_target(reflected_shaders SOURCES ${REFLECTED_SHADERS})
|
||||
add_custom_target(scribe_shaders SOURCES ${ALL_SCRIBE_SHADERS} ${AUTOSCRIBE_SHADER_HEADERS})
|
||||
set_target_properties(scribe_shaders PROPERTIES FOLDER "Shaders")
|
||||
set_target_properties(compiled_shaders PROPERTIES FOLDER "Shaders")
|
||||
|
||||
add_custom_target(scribed_shaders SOURCES ${SCRIBED_SHADERS})
|
||||
set_target_properties(scribed_shaders PROPERTIES FOLDER "Shaders")
|
||||
add_dependencies(scribed_shaders shadergen)
|
||||
|
||||
add_custom_target(spirv_shaders SOURCES ${SPIRV_SHADERS})
|
||||
set_target_properties(spirv_shaders PROPERTIES FOLDER "Shaders")
|
||||
add_dependencies(spirv_shaders shadergen)
|
||||
|
||||
add_custom_target(reflected_shaders SOURCES ${REFLECTED_SHADERS})
|
||||
set_target_properties(reflected_shaders PROPERTIES FOLDER "Shaders")
|
||||
add_dependencies(reflected_shaders shadergen)
|
||||
|
||||
message(STATUS "Shader processing end")
|
||||
endmacro()
|
||||
|
||||
|
||||
|
|
45
cmake/macros/ConfigureCCache.cmake
Normal file
45
cmake/macros/ConfigureCCache.cmake
Normal file
|
@ -0,0 +1,45 @@
|
|||
#
|
||||
# ConfigureCCache.cmake
|
||||
# cmake/macros
|
||||
#
|
||||
# Created by Clement Brisset on 10/10/18.
|
||||
# Copyright 2018 High Fidelity, Inc.
|
||||
#
|
||||
# Distributed under the Apache License, Version 2.0.
|
||||
# See the accompanying file LICENSE or http:#www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
macro(configure_ccache)
|
||||
find_program(CCACHE_PROGRAM ccache)
|
||||
if(CCACHE_PROGRAM)
|
||||
message(STATUS "Configuring ccache")
|
||||
|
||||
# Set up wrapper scripts
|
||||
set(C_LAUNCHER "${CCACHE_PROGRAM}")
|
||||
set(CXX_LAUNCHER "${CCACHE_PROGRAM}")
|
||||
|
||||
set(LAUNCH_C_IN "${CMAKE_CURRENT_SOURCE_DIR}/cmake/templates/launch-c.in")
|
||||
set(LAUNCH_CXX_IN "${CMAKE_CURRENT_SOURCE_DIR}/cmake/templates/launch-cxx.in")
|
||||
set(LAUNCH_C "${CMAKE_BINARY_DIR}/CMakeFiles/launch-c")
|
||||
set(LAUNCH_CXX "${CMAKE_BINARY_DIR}/CMakeFiles/launch-cxx")
|
||||
|
||||
configure_file(${LAUNCH_C_IN} ${LAUNCH_C})
|
||||
configure_file(${LAUNCH_CXX_IN} ${LAUNCH_CXX})
|
||||
execute_process(COMMAND chmod a+rx ${LAUNCH_C} ${LAUNCH_CXX})
|
||||
|
||||
if(CMAKE_GENERATOR STREQUAL "Xcode")
|
||||
# Set Xcode project attributes to route compilation and linking
|
||||
# through our scripts
|
||||
set(CMAKE_XCODE_ATTRIBUTE_CC ${LAUNCH_C})
|
||||
set(CMAKE_XCODE_ATTRIBUTE_CXX ${LAUNCH_CXX})
|
||||
set(CMAKE_XCODE_ATTRIBUTE_LD ${LAUNCH_C})
|
||||
set(CMAKE_XCODE_ATTRIBUTE_LDPLUSPLUS ${LAUNCH_CXX})
|
||||
else()
|
||||
# Support Unix Makefiles and Ninja
|
||||
set(CMAKE_C_COMPILER_LAUNCHER ${LAUNCH_C})
|
||||
set(CMAKE_CXX_COMPILER_LAUNCHER ${LAUNCH_CXX})
|
||||
endif()
|
||||
else()
|
||||
message(WARNING "Could not find ccache")
|
||||
endif()
|
||||
endmacro()
|
|
@ -66,8 +66,8 @@ macro(install_beside_console)
|
|||
install(CODE "
|
||||
set(MACOSX_BUNDLE_EXECUTABLE_NAME domain-server)
|
||||
set(MACOSX_BUNDLE_GUI_IDENTIFIER com.highfidelity.server-components)
|
||||
set(MACOSX_BUNDLE_BUNDLE_NAME Sandbox\\ Components)
|
||||
configure_file(${HF_CMAKE_DIR}/templates/MacOSXBundleSandboxComponentsInfo.plist.in ${ESCAPED_BUNDLE_NAME}/Contents/Info.plist)
|
||||
set(MACOSX_BUNDLE_BUNDLE_NAME Console\\ Components)
|
||||
configure_file(${HF_CMAKE_DIR}/templates/MacOSXBundleConsoleComponentsInfo.plist.in ${ESCAPED_BUNDLE_NAME}/Contents/Info.plist)
|
||||
execute_process(COMMAND ${MACDEPLOYQT_COMMAND} ${ESCAPED_BUNDLE_NAME} -verbose=2 -executable=${ESCAPED_EXECUTABLE_NAME})"
|
||||
COMPONENT ${SERVER_COMPONENT}
|
||||
)
|
||||
|
|
|
@ -19,13 +19,13 @@ macro(manually_install_openssl_for_qt)
|
|||
find_package(OpenSSL REQUIRED)
|
||||
|
||||
install(
|
||||
FILES "${OPENSSL_DLL_PATH}/ssleay32.dll"
|
||||
FILES "${VCPKG_INSTALL_ROOT}/bin/ssleay32.dll"
|
||||
DESTINATION ${TARGET_INSTALL_DIR}
|
||||
COMPONENT ${TARGET_INSTALL_COMPONENT}
|
||||
)
|
||||
|
||||
install(
|
||||
FILES "${OPENSSL_DLL_PATH}/libeay32.dll"
|
||||
FILES "${VCPKG_INSTALL_ROOT}/bin/libeay32.dll"
|
||||
DESTINATION ${TARGET_INSTALL_DIR}
|
||||
COMPONENT ${TARGET_INSTALL_COMPONENT}
|
||||
)
|
||||
|
|
|
@ -18,7 +18,7 @@ macro(SET_PACKAGING_PARAMETERS)
|
|||
set(BUILD_GLOBAL_SERVICES "DEVELOPMENT")
|
||||
set(USE_STABLE_GLOBAL_SERVICES 0)
|
||||
set(BUILD_NUMBER 0)
|
||||
set(APP_USER_MODEL_ID "com.highfidelity.sandbox-dev")
|
||||
set(APP_USER_MODEL_ID "com.highfidelity.console-dev")
|
||||
|
||||
set_from_env(RELEASE_TYPE RELEASE_TYPE "DEV")
|
||||
set_from_env(RELEASE_NUMBER RELEASE_NUMBER "")
|
||||
|
@ -37,6 +37,7 @@ macro(SET_PACKAGING_PARAMETERS)
|
|||
set(BUILD_VERSION ${RELEASE_NUMBER})
|
||||
set(BUILD_ORGANIZATION "High Fidelity")
|
||||
set(HIGH_FIDELITY_PROTOCOL "hifi")
|
||||
set(HIGH_FIDELITY_APP_PROTOCOL "hifiapp")
|
||||
set(INTERFACE_BUNDLE_NAME "Interface")
|
||||
set(INTERFACE_ICON_PREFIX "interface")
|
||||
|
||||
|
@ -142,7 +143,12 @@ macro(SET_PACKAGING_PARAMETERS)
|
|||
set(CONSOLE_INSTALL_DIR ${DMG_SUBFOLDER_NAME})
|
||||
set(INTERFACE_INSTALL_DIR ${DMG_SUBFOLDER_NAME})
|
||||
|
||||
set(CONSOLE_EXEC_NAME "Sandbox.app")
|
||||
if (CLIENT_ONLY)
|
||||
set(CONSOLE_EXEC_NAME "Console.app")
|
||||
else ()
|
||||
set(CONSOLE_EXEC_NAME "Sandbox.app")
|
||||
endif()
|
||||
|
||||
set(CONSOLE_INSTALL_APP_PATH "${CONSOLE_INSTALL_DIR}/${CONSOLE_EXEC_NAME}")
|
||||
|
||||
set(CONSOLE_APP_CONTENTS "${CONSOLE_INSTALL_APP_PATH}/Contents")
|
||||
|
@ -176,16 +182,19 @@ macro(SET_PACKAGING_PARAMETERS)
|
|||
# shortcut names
|
||||
if (PRODUCTION_BUILD)
|
||||
set(INTERFACE_SHORTCUT_NAME "High Fidelity Interface")
|
||||
set(CONSOLE_SHORTCUT_NAME "Sandbox")
|
||||
set(APP_USER_MODEL_ID "com.highfidelity.sandbox")
|
||||
set(CONSOLE_SHORTCUT_NAME "Console")
|
||||
set(SANDBOX_SHORTCUT_NAME "Sandbox")
|
||||
set(APP_USER_MODEL_ID "com.highfidelity.console")
|
||||
else ()
|
||||
set(INTERFACE_SHORTCUT_NAME "High Fidelity Interface - ${BUILD_VERSION_NO_SHA}")
|
||||
set(CONSOLE_SHORTCUT_NAME "Sandbox - ${BUILD_VERSION_NO_SHA}")
|
||||
set(CONSOLE_SHORTCUT_NAME "Console - ${BUILD_VERSION_NO_SHA}")
|
||||
set(SANDBOX_SHORTCUT_NAME "Sandbox - ${BUILD_VERSION_NO_SHA}")
|
||||
endif ()
|
||||
|
||||
set(INTERFACE_HF_SHORTCUT_NAME "${INTERFACE_SHORTCUT_NAME}")
|
||||
set(CONSOLE_HF_SHORTCUT_NAME "High Fidelity ${CONSOLE_SHORTCUT_NAME}")
|
||||
|
||||
set(SANDBOX_HF_SHORTCUT_NAME "High Fidelity ${SANDBOX_SHORTCUT_NAME}")
|
||||
|
||||
set(PRE_SANDBOX_INTERFACE_SHORTCUT_NAME "High Fidelity")
|
||||
set(PRE_SANDBOX_CONSOLE_SHORTCUT_NAME "Server Console")
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@ macro(TARGET_BULLET)
|
|||
list(APPEND BULLET_LIBRARIES ${LIB_DIR}/libLinearMath.a)
|
||||
list(APPEND BULLET_LIBRARIES ${LIB_DIR}/libBulletSoftBody.a)
|
||||
else()
|
||||
add_dependency_external_projects(bullet)
|
||||
find_package(Bullet REQUIRED)
|
||||
endif()
|
||||
# perform the system include hack for OS X to ignore warnings
|
||||
|
|
|
@ -1,18 +1,24 @@
|
|||
macro(TARGET_DRACO)
|
||||
set(LIBS draco dracodec dracoenc)
|
||||
find_library(LIBPATH ${LIB} PATHS )
|
||||
if (ANDROID)
|
||||
set(INSTALL_DIR ${HIFI_ANDROID_PRECOMPILED}/draco)
|
||||
set(DRACO_INCLUDE_DIRS "${INSTALL_DIR}/include" CACHE TYPE INTERNAL)
|
||||
|
||||
set(LIB_DIR ${INSTALL_DIR}/lib)
|
||||
list(APPEND DRACO_LIBRARIES ${LIB_DIR}/libdraco.a)
|
||||
list(APPEND DRACO_LIBRARIES ${LIB_DIR}/libdracodec.a)
|
||||
list(APPEND DRACO_LIBRARIES ${LIB_DIR}/libdracoenc.a)
|
||||
target_link_libraries(${TARGET_NAME} ${DRACO_LIBRARIES})
|
||||
else()
|
||||
add_dependency_external_projects(draco)
|
||||
find_package(Draco REQUIRED)
|
||||
list(APPEND DRACO_LIBRARIES ${DRACO_LIBRARY})
|
||||
list(APPEND DRACO_LIBRARIES ${DRACO_ENCODER_LIBRARY})
|
||||
set(LIB_SEARCH_PATH_RELEASE ${VCPKG_INSTALL_ROOT}/lib/)
|
||||
set(LIB_SEARCH_PATH_DEBUG ${VCPKG_INSTALL_ROOT}/debug/lib/)
|
||||
foreach(LIB ${LIBS})
|
||||
find_library(${LIB}_LIBPATH ${LIB} PATHS ${LIB_SEARCH_PATH_RELEASE} NO_DEFAULT_PATH)
|
||||
list(APPEND DRACO_LIBRARY_RELEASE ${${LIB}_LIBPATH})
|
||||
find_library(${LIB}D_LIBPATH ${LIB} PATHS ${LIB_SEARCH_PATH_DEBUG} NO_DEFAULT_PATH)
|
||||
list(APPEND DRACO_LIBRARY_DEBUG ${${LIB}D_LIBPATH})
|
||||
endforeach()
|
||||
select_library_configurations(DRACO)
|
||||
target_link_libraries(${TARGET_NAME} ${DRACO_LIBRARY})
|
||||
endif()
|
||||
target_include_directories(${TARGET_NAME} SYSTEM PRIVATE ${DRACO_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${DRACO_LIBRARIES})
|
||||
endmacro()
|
||||
endmacro()
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue