mirror of
https://github.com/overte-org/overte.git
synced 2025-08-10 01:00:44 +02:00
Merge pull request #4189 from birarda/android
add the gvr-interface target for GearVR
This commit is contained in:
commit
390d2be48f
115 changed files with 4399 additions and 686 deletions
9
.gitignore
vendored
9
.gitignore
vendored
|
@ -39,4 +39,11 @@ interface/resources/visage/*
|
||||||
# Ignore interfaceCache for Linux users
|
# Ignore interfaceCache for Linux users
|
||||||
interface/interfaceCache/
|
interface/interfaceCache/
|
||||||
|
|
||||||
TAGS
|
# ignore audio-client externals
|
||||||
|
libraries/audio-client/external/*/*
|
||||||
|
!libraries/audio-client/external/*/readme.txt
|
||||||
|
|
||||||
|
gvr-interface/assets/oculussig*
|
||||||
|
gvr-interface/libs/*
|
||||||
|
|
||||||
|
TAGS
|
6
BUILD.md
6
BUILD.md
|
@ -1,18 +1,20 @@
|
||||||
###Dependencies
|
###Dependencies
|
||||||
|
|
||||||
* [cmake](http://www.cmake.org/cmake/resources/software.html) ~> 2.8.12.2
|
* [cmake](http://www.cmake.org/cmake/resources/software.html) ~> 2.8.12.2
|
||||||
* [Qt](http://qt-project.org/downloads) ~> 5.3.0
|
* [Qt](http://qt-project.org/downloads) ~> 5.3.2
|
||||||
* [glm](http://glm.g-truc.net/0.9.5/index.html) ~> 0.9.5.4
|
* [glm](http://glm.g-truc.net/0.9.5/index.html) ~> 0.9.5.4
|
||||||
* [OpenSSL](https://www.openssl.org/related/binaries.html) ~> 1.0.1g
|
* [OpenSSL](https://www.openssl.org/related/binaries.html) ~> 1.0.1g
|
||||||
* IMPORTANT: OpenSSL 1.0.1g is critical to avoid a security vulnerability.
|
* IMPORTANT: OpenSSL 1.0.1g is critical to avoid a security vulnerability.
|
||||||
* [Intel Threading Building Blocks](https://www.threadingbuildingblocks.org/) ~> 4.3
|
* [Intel Threading Building Blocks](https://www.threadingbuildingblocks.org/) ~> 4.3
|
||||||
|
* [Soxr](http://sourceforge.net/projects/soxr/) ~> 0.1.1
|
||||||
* [Bullet Physics Engine](https://code.google.com/p/bullet/downloads/list) ~> 2.82
|
* [Bullet Physics Engine](https://code.google.com/p/bullet/downloads/list) ~> 2.82
|
||||||
* [Gverb](https://github.com/highfidelity/gverb/archive/master.zip) (direct download to latest version)
|
* [Gverb](https://github.com/highfidelity/gverb/archive/master.zip) (direct download to latest version)
|
||||||
|
|
||||||
### OS Specific Build Guides
|
### OS Specific Build Guides
|
||||||
* [BUILD_WIN.md](BUILD_WIN.md) - additional instructions for Windows.
|
|
||||||
* [BUILD_OSX.md](BUILD_OSX.md) - additional instructions for OS X.
|
* [BUILD_OSX.md](BUILD_OSX.md) - additional instructions for OS X.
|
||||||
* [BUILD_LINUX.md](BUILD_LINUX.md) - additional instructions for Linux.
|
* [BUILD_LINUX.md](BUILD_LINUX.md) - additional instructions for Linux.
|
||||||
|
* [BUILD_WIN.md](BUILD_WIN.md) - additional instructions for Windows.
|
||||||
|
* [BUILD_ANDROID.md](BUILD_ANDROID.md) - additional instructions for Android
|
||||||
|
|
||||||
###CMake
|
###CMake
|
||||||
Hifi uses CMake to generate build files and project files for your platform.
|
Hifi uses CMake to generate build files and project files for your platform.
|
||||||
|
|
148
BUILD_ANDROID.md
Normal file
148
BUILD_ANDROID.md
Normal file
|
@ -0,0 +1,148 @@
|
||||||
|
Please read the [general build guide](BUILD.md) for information on dependencies required for all platforms. Only Android specific instructions are found in this file.
|
||||||
|
|
||||||
|
###Android Dependencies
|
||||||
|
|
||||||
|
You will need the following tools to build our Android targets.
|
||||||
|
|
||||||
|
* [cmake](http://www.cmake.org/download/) ~> 3.1.0
|
||||||
|
* Note that this is a newer version required than the minimum for hifi desktop targets.
|
||||||
|
* [Qt](http://www.qt.io/download-open-source/#) ~> 5.4.0
|
||||||
|
* Note that this is a newer version required than the minimum for hifi desktop targets.
|
||||||
|
* [ant](http://ant.apache.org/bindownload.cgi) ~> 1.9.4
|
||||||
|
* [Android NDK](https://developer.android.com/tools/sdk/ndk/index.html) = r10c
|
||||||
|
* [Android SDK](http://developer.android.com/sdk/installing/index.html) ~> 24.0.2
|
||||||
|
* Install the latest Platform-tools
|
||||||
|
* Install the latest Build-tools
|
||||||
|
* Install the SDK Platform for API Level 19
|
||||||
|
* Install Sources for Android SDK for API Level 19
|
||||||
|
* Install the ARM EABI v7a System Image if you want to run an emulator.
|
||||||
|
|
||||||
|
You will also need to cross-compile the dependencies required for all platforms for Android, and help CMake find these compiled libraries on your machine.
|
||||||
|
|
||||||
|
####Optional Components
|
||||||
|
|
||||||
|
* [Oculus Mobile SDK](https://developer.oculus.com/downloads/#sdk=mobile) ~> 0.4.2
|
||||||
|
|
||||||
|
####ANDROID_LIB_DIR
|
||||||
|
|
||||||
|
Since you won't be installing Android dependencies to system paths on your development machine, CMake will need a little help tracking down your Android dependencies.
|
||||||
|
|
||||||
|
This is most easily accomplished by installing all Android dependencies in the same folder. You can place this folder wherever you like on your machine. In this build guide and across our CMakeLists files this folder is referred to as `ANDROID_LIB_DIR`. You can set `ANDROID_LIB_DIR` in your environment or by passing when you run CMake.
|
||||||
|
|
||||||
|
####Qt
|
||||||
|
|
||||||
|
Install Qt 5.4 for Android for your host environment from the [Qt downloads page](http://www.qt.io/download/). Install Qt to ``$ANDROID_LIB_DIR/Qt``. This is required so that our root CMakeLists file can help CMake find your Android Qt installation.
|
||||||
|
|
||||||
|
The component required for the Android build is the `Android armv7` component.
|
||||||
|
|
||||||
|
If you would like to install Qt to a different location, or attempt to build with a different Qt version, you can pass `ANDROID_QT_CMAKE_PREFIX_PATH` to CMake. Point to the `cmake` folder inside `$VERSION_NUMBER/android_armv7/lib`. Otherwise, our root CMakeLists will set it to `$ANDROID_LIB_DIR/Qt/5.3/android_armv7/lib/cmake`.
|
||||||
|
|
||||||
|
####OpenSSL
|
||||||
|
|
||||||
|
Cross-compilation of OpenSSL has been tested from an OS X machine running 10.10 compiling OpenSSL 1.0.2. It is likely that the steps below will work for other OpenSSL versions than 1.0.2.
|
||||||
|
|
||||||
|
The original instructions to compile OpenSSL for Android from your host environment can be found [here](http://wiki.openssl.org/index.php/Android). We required some tweaks to get OpenSSL to successfully compile, those tweaks are explained below.
|
||||||
|
|
||||||
|
Download the [OpenSSL source](https://www.openssl.org/source/) and extract the tarball inside your `ANDROID_LIB_DIR`. Rename the extracted folder to `openssl`.
|
||||||
|
|
||||||
|
You will need the [setenv-android.sh script](http://wiki.openssl.org/index.php/File:Setenv-android.sh) from the OpenSSL wiki.
|
||||||
|
|
||||||
|
You must change three values at the top of the `setenv-android.sh` script - `_ANDROID_NDK`, `_ANDROID_EABI` and `_ANDROID_API`.
|
||||||
|
`_ANDROID_NDK` should be `android-ndk-r10`, `_ANDROID_EABI` should be `arm-linux-androidebi-4.9` and `_ANDROID_API` should be `19`.
|
||||||
|
|
||||||
|
First, make sure `ANDROID_NDK_ROOT` is set in your env. This should be the path to the root of your Android NDK install. `setenv-android.sh` needs `ANDROID_NDK_ROOT` to set the environment variables required for building OpenSSL.
|
||||||
|
|
||||||
|
Source the `setenv-android.sh` script so it can set environment variables that OpenSSL will use while compiling. If you use zsh as your shell you may need to modify the `setenv-android.sh` for it to set the correct variables in your env.
|
||||||
|
|
||||||
|
```
|
||||||
|
export ANDROID_NDK_ROOT=YOUR_NDK_ROOT
|
||||||
|
source setenv-android.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Then, from the OpenSSL directory, run the following commands.
|
||||||
|
|
||||||
|
```
|
||||||
|
perl -pi -e 's/install: all install_docs install_sw/install: install_docs install_sw/g' Makefile.org
|
||||||
|
./config shared -no-ssl2 -no-ssl3 -no-comp -no-hw -no-engine --openssldir=/usr/local/ssl/$ANDROID_API
|
||||||
|
make depend
|
||||||
|
make all
|
||||||
|
```
|
||||||
|
|
||||||
|
This should generate libcrypto and libssl in the root of the OpenSSL directory. YOU MUST remove the `libssl.so` and `libcrypto.so` files that are generated. They are symlinks to `libssl.so.VER` and `libcrypto.so.VER` which Android does not know how to handle. By removing `libssl.so` and `libcrypto.so` the FindOpenSSL module will find the static libs and use those instead.
|
||||||
|
|
||||||
|
If you have been building other components it is possible that the OpenSSL compile will fail based on the values other cross-compilations (tbb, bullet) have set. Ensure that you are in a new terminal window to avoid compilation errors from previously set environment variables.
|
||||||
|
|
||||||
|
####Intel Threading Building Blocks
|
||||||
|
|
||||||
|
Download the [Intel Threading Building Blocks source](https://www.threadingbuildingblocks.org/download) and extract the tarball inside your `ANDROID_LIB_DIR`. Rename the extracted folder to `tbb`.
|
||||||
|
|
||||||
|
NOTE: BEFORE YOU ATTEMPT TO CROSS-COMPILE TBB, DISCONNECT ANY DEVICES ADB WOULD DETECT. The tbb build process asks adb for a couple of strings, and if a device is plugged in extra characters get added that will cause ndk-build to fail with an error.
|
||||||
|
|
||||||
|
From the tbb directory, execute the following commands. First, we build TBB using `ndk-build`. Then, the compiled libs are copied to a lib folder in the root of tbb directory.
|
||||||
|
|
||||||
|
```
|
||||||
|
cd jni
|
||||||
|
ndk-build target=android tbb tbbmalloc arch=arm
|
||||||
|
cd ../
|
||||||
|
mkdir lib
|
||||||
|
cp `find . -name "*.so"` lib/
|
||||||
|
```
|
||||||
|
|
||||||
|
####Soxr
|
||||||
|
|
||||||
|
Download the [Soxr source](http://sourceforge.net/projects/soxr/) and extract the tarball inside your `ANDROID_LIB_DIR`. Rename the extracted folder to `soxr`.
|
||||||
|
|
||||||
|
From the soxr directory, use cmake, along with the `android.toolchain.cmake` file (included in this repository under cmake/android) to cross-compile soxr for Android. Note that you will need ANDROID_NDK set in your environment before using the toolchain file.
|
||||||
|
|
||||||
|
The full set of commands to build soxr for Android is shown below. It is a long command, make sure you copy the entire command (up to `-DBUILD_TESTS=0`).
|
||||||
|
|
||||||
|
```
|
||||||
|
cmake -DCMAKE_TOOLCHAIN_FILE=$FULL_PATH_TO_TOOLCHAIN -DCMAKE_INSTALL_PREFIX=. -DHAVE_WORDS_BIGENDIAN_EXITCODE=1 -DBUILD_TESTS=0
|
||||||
|
make
|
||||||
|
make install
|
||||||
|
```
|
||||||
|
|
||||||
|
This will create the `lib` and `include` folders inside `ANDROID_LIB_DIR/soxr` that FindSoxr will look for.
|
||||||
|
|
||||||
|
####Oculus Mobile SDK
|
||||||
|
|
||||||
|
The Oculus Mobile SDK is optional, for Gear VR support. It is not required to compile gvr-interface.
|
||||||
|
|
||||||
|
Download the [Oculus Mobile SDK](https://developer.oculus.com/downloads/#sdk=mobile) and extract the archive inside your `ANDROID_LIB_DIR` folder. Rename the extracted folder to `libovr`.
|
||||||
|
|
||||||
|
From the VRLib directory, use ndk-build to build VrLib.
|
||||||
|
|
||||||
|
```
|
||||||
|
cd VRLib
|
||||||
|
ndk-build
|
||||||
|
```
|
||||||
|
|
||||||
|
This will create the liboculus.a archive that our FindLibOVR module will look for when cmake is run.
|
||||||
|
|
||||||
|
#####Hybrid testing
|
||||||
|
|
||||||
|
Currently the 'vr_dual' mode that would allow us to run a hybrid app has limited support in the Oculus Mobile SDK. The best way to have an application we can launch without having to connect to the GearVR is to put the Gear VR Service into developer mode. This stops Oculus Home from taking over the device when it is plugged into the Gear VR headset, and allows the application to be launched from the Applications page.
|
||||||
|
|
||||||
|
To put the Gear VR Service into developer mode you need an application with an Oculus Signature File on your device. Generate an Oculus Signature File for your device on the [Oculus osig tool page](https://developer.oculus.com/tools/osig/). Place this file in the gvr-interface/assets directory. Cmake will automatically copy it into your apk in the right place when you execute `make gvr-interface-apk`.
|
||||||
|
|
||||||
|
Once the application is on your device, go to `Settings->Application Manager->Gear VR Service->Manage Storage`. Tap on `VR Service Version` six times. It will scan your device to verify that you have an osig file in an application on your device, and then it will let you enable Developer mode.
|
||||||
|
|
||||||
|
####GLM
|
||||||
|
|
||||||
|
GLM is a header only library and technically the same GLM used for desktop builds of hifi could be used for the Android build. However, to avoid conflicts with system installations of Android dependencies, CMake will only look for Android headers and libraries in `ANDROID_LIB_DIR` or in your android-ndk install.
|
||||||
|
|
||||||
|
Download the [glm headers](http://sourceforge.net/projects/ogl-math/files/) from their sourceforge page. The version you download should match the requirement shown in the [general build guide](BUILD.md). Extract the archive into your `ANDROID_LIB_DIR` and rename the extracted folder to `glm`.
|
||||||
|
|
||||||
|
###CMake
|
||||||
|
|
||||||
|
We use CMake to generate the makefiles that compile and deploy the Android APKs to your device. In order to create Makefiles for the Android targets, CMake requires that some environment variables are set, and that other variables are passed to it when it is run.
|
||||||
|
|
||||||
|
The following must be set in your environment:
|
||||||
|
|
||||||
|
* ANDROID_NDK - the root of your Android NDK install
|
||||||
|
* ANDROID_HOME - the root of your Android SDK install
|
||||||
|
* ANDROID_LIB_DIR - the directory containing cross-compiled versions of dependencies
|
||||||
|
|
||||||
|
The following must be passed to CMake when it is run:
|
||||||
|
|
||||||
|
* USE_ANDROID_TOOLCHAIN - set to true to build for Android
|
|
@ -4,7 +4,7 @@ Please read the [general build guide](BUILD.md) for information on dependencies
|
||||||
[Homebrew](http://brew.sh/) is an excellent package manager for OS X. It makes install of all hifi dependencies very simple.
|
[Homebrew](http://brew.sh/) is an excellent package manager for OS X. It makes install of all hifi dependencies very simple.
|
||||||
|
|
||||||
brew tap highfidelity/homebrew-formulas
|
brew tap highfidelity/homebrew-formulas
|
||||||
brew install cmake glm openssl tbb
|
brew install cmake glm openssl tbb libsoxr
|
||||||
brew install highfidelity/formulas/qt5
|
brew install highfidelity/formulas/qt5
|
||||||
brew link qt5 --force
|
brew link qt5 --force
|
||||||
|
|
||||||
|
|
14
BUILD_WIN.md
14
BUILD_WIN.md
|
@ -178,6 +178,20 @@ You now have Bullet libraries compiled, now you need to put them in the right pl
|
||||||
|
|
||||||
_Note that the INSTALL target should handle the copying of files into an install directory automatically, however, without modifications to Cmake, the install target didn't work right for me, please update this instructions if you get that working right - Leo <leo@highfidelity.io>_
|
_Note that the INSTALL target should handle the copying of files into an install directory automatically, however, without modifications to Cmake, the install target didn't work right for me, please update this instructions if you get that working right - Leo <leo@highfidelity.io>_
|
||||||
|
|
||||||
|
###Soxr
|
||||||
|
|
||||||
|
Download the zip from the [soxr sourceforge page](http://sourceforge.net/projects/soxr/).
|
||||||
|
|
||||||
|
We recommend you install it to %HIFI_LIB_DIR%\soxr. This will help our FindSoxr cmake module find what it needs. You can place it wherever you like on your machine if you specify SOXR_ROOT_DIR as an environment variable or a variable passed when cmake is run.
|
||||||
|
|
||||||
|
Extract the soxr archive wherever you like. Then, inside the extracted folder, create a directory called `build`. From that build directory, the following commands will build and then install soxr to `%HIFI_LIB_DIR%`.
|
||||||
|
|
||||||
|
```
|
||||||
|
cmake .. -G "NMake Makefiles" -DCMAKE_INSTALL_PREFIX=%HIFI_LIB_DIR%/soxr
|
||||||
|
nmake
|
||||||
|
nmake install
|
||||||
|
```
|
||||||
|
|
||||||
###Build High Fidelity using Visual Studio
|
###Build High Fidelity using Visual Studio
|
||||||
Follow the same build steps from the CMake section of [BUILD.md](BUILD.md), but pass a different generator to CMake.
|
Follow the same build steps from the CMake section of [BUILD.md](BUILD.md), but pass a different generator to CMake.
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,10 @@
|
||||||
cmake_minimum_required(VERSION 2.8.12.2)
|
cmake_minimum_required(VERSION 2.8.12.2)
|
||||||
|
|
||||||
|
if (USE_ANDROID_TOOLCHAIN)
|
||||||
|
set(CMAKE_TOOLCHAIN_FILE "${CMAKE_CURRENT_SOURCE_DIR}/cmake/android/android.toolchain.cmake")
|
||||||
|
set(ANDROID_NATIVE_API_LEVEL 19)
|
||||||
|
endif ()
|
||||||
|
|
||||||
if (WIN32)
|
if (WIN32)
|
||||||
cmake_policy(SET CMP0020 NEW)
|
cmake_policy(SET CMP0020 NEW)
|
||||||
endif (WIN32)
|
endif (WIN32)
|
||||||
|
@ -58,8 +63,27 @@ if (APPLE)
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --stdlib=libc++")
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --stdlib=libc++")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (NOT QT_CMAKE_PREFIX_PATH)
|
if (NOT ANDROID_LIB_DIR)
|
||||||
set(QT_CMAKE_PREFIX_PATH $ENV{QT_CMAKE_PREFIX_PATH})
|
set(ANDROID_LIB_DIR $ENV{ANDROID_LIB_DIR})
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
if (ANDROID)
|
||||||
|
if (NOT ANDROID_QT_CMAKE_PREFIX_PATH)
|
||||||
|
set(QT_CMAKE_PREFIX_PATH ${ANDROID_LIB_DIR}/Qt/5.4/android_armv7/lib/cmake)
|
||||||
|
else ()
|
||||||
|
set(QT_CMAKE_PREFIX_PATH ${ANDROID_QT_CMAKE_PREFIX_PATH})
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
|
||||||
|
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/lib)
|
||||||
|
|
||||||
|
if (ANDROID_LIB_DIR)
|
||||||
|
list(APPEND CMAKE_FIND_ROOT_PATH ${ANDROID_LIB_DIR})
|
||||||
|
endif ()
|
||||||
|
else ()
|
||||||
|
if (NOT QT_CMAKE_PREFIX_PATH)
|
||||||
|
set(QT_CMAKE_PREFIX_PATH $ENV{QT_CMAKE_PREFIX_PATH})
|
||||||
|
endif ()
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
set(CMAKE_PREFIX_PATH ${CMAKE_PREFIX_PATH} ${QT_CMAKE_PREFIX_PATH})
|
set(CMAKE_PREFIX_PATH ${CMAKE_PREFIX_PATH} ${QT_CMAKE_PREFIX_PATH})
|
||||||
|
@ -84,10 +108,21 @@ foreach(CUSTOM_MACRO ${HIFI_CUSTOM_MACROS})
|
||||||
include(${CUSTOM_MACRO})
|
include(${CUSTOM_MACRO})
|
||||||
endforeach()
|
endforeach()
|
||||||
|
|
||||||
# targets on all platforms
|
if (ANDROID)
|
||||||
add_subdirectory(assignment-client)
|
file(GLOB ANDROID_CUSTOM_MACROS "cmake/android/*.cmake")
|
||||||
add_subdirectory(domain-server)
|
foreach(CUSTOM_MACRO ${ANDROID_CUSTOM_MACROS})
|
||||||
add_subdirectory(ice-server)
|
include(${CUSTOM_MACRO})
|
||||||
add_subdirectory(interface)
|
endforeach()
|
||||||
add_subdirectory(tests)
|
endif ()
|
||||||
add_subdirectory(tools)
|
|
||||||
|
# add subdirectories for all targets
|
||||||
|
if (NOT ANDROID)
|
||||||
|
add_subdirectory(assignment-client)
|
||||||
|
add_subdirectory(domain-server)
|
||||||
|
add_subdirectory(ice-server)
|
||||||
|
add_subdirectory(interface)
|
||||||
|
add_subdirectory(tests)
|
||||||
|
add_subdirectory(tools)
|
||||||
|
else ()
|
||||||
|
add_subdirectory(gvr-interface)
|
||||||
|
endif ()
|
|
@ -17,7 +17,7 @@
|
||||||
#include <QtNetwork/QNetworkRequest>
|
#include <QtNetwork/QNetworkRequest>
|
||||||
#include <QtNetwork/QNetworkReply>
|
#include <QtNetwork/QNetworkReply>
|
||||||
|
|
||||||
#include <AvatarData.h>
|
#include <AvatarHashMap.h>
|
||||||
#include <NetworkAccessManager.h>
|
#include <NetworkAccessManager.h>
|
||||||
#include <NodeList.h>
|
#include <NodeList.h>
|
||||||
#include <PacketHeaders.h>
|
#include <PacketHeaders.h>
|
||||||
|
@ -39,8 +39,7 @@ Agent::Agent(const QByteArray& packet) :
|
||||||
_receivedAudioStream(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES,
|
_receivedAudioStream(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES,
|
||||||
InboundAudioStream::Settings(0, false, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, false,
|
InboundAudioStream::Settings(0, false, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, false,
|
||||||
DEFAULT_WINDOW_STARVE_THRESHOLD, DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES,
|
DEFAULT_WINDOW_STARVE_THRESHOLD, DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES,
|
||||||
DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION, false)),
|
DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION, false))
|
||||||
_avatarHashMap()
|
|
||||||
{
|
{
|
||||||
// be the parent of the script engine so it gets moved when we do
|
// be the parent of the script engine so it gets moved when we do
|
||||||
_scriptEngine.setParent(this);
|
_scriptEngine.setParent(this);
|
||||||
|
@ -135,7 +134,7 @@ void Agent::readPendingDatagrams() {
|
||||||
|| datagramPacketType == PacketTypeAvatarBillboard
|
|| datagramPacketType == PacketTypeAvatarBillboard
|
||||||
|| datagramPacketType == PacketTypeKillAvatar) {
|
|| datagramPacketType == PacketTypeKillAvatar) {
|
||||||
// let the avatar hash map process it
|
// let the avatar hash map process it
|
||||||
_avatarHashMap.processAvatarMixerDatagram(receivedPacket, nodeList->sendingNodeForPacket(receivedPacket));
|
DependencyManager::get<AvatarHashMap>()->processAvatarMixerDatagram(receivedPacket, nodeList->sendingNodeForPacket(receivedPacket));
|
||||||
|
|
||||||
// let this continue through to the NodeList so it updates last heard timestamp
|
// let this continue through to the NodeList so it updates last heard timestamp
|
||||||
// for the sending avatar-mixer
|
// for the sending avatar-mixer
|
||||||
|
@ -202,7 +201,7 @@ void Agent::run() {
|
||||||
|
|
||||||
// give this AvatarData object to the script engine
|
// give this AvatarData object to the script engine
|
||||||
_scriptEngine.setAvatarData(&scriptedAvatar, "Avatar");
|
_scriptEngine.setAvatarData(&scriptedAvatar, "Avatar");
|
||||||
_scriptEngine.setAvatarHashMap(&_avatarHashMap, "AvatarList");
|
_scriptEngine.setAvatarHashMap(DependencyManager::get<AvatarHashMap>().data(), "AvatarList");
|
||||||
|
|
||||||
// register ourselves to the script engine
|
// register ourselves to the script engine
|
||||||
_scriptEngine.registerGlobalObject("Agent", this);
|
_scriptEngine.registerGlobalObject("Agent", this);
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
#include <QtCore/QObject>
|
#include <QtCore/QObject>
|
||||||
#include <QtCore/QUrl>
|
#include <QtCore/QUrl>
|
||||||
|
|
||||||
#include <AvatarHashMap.h>
|
|
||||||
#include <EntityEditPacketSender.h>
|
#include <EntityEditPacketSender.h>
|
||||||
#include <EntityTree.h>
|
#include <EntityTree.h>
|
||||||
#include <EntityTreeHeadlessViewer.h>
|
#include <EntityTreeHeadlessViewer.h>
|
||||||
|
@ -63,8 +62,6 @@ private:
|
||||||
|
|
||||||
MixedAudioStream _receivedAudioStream;
|
MixedAudioStream _receivedAudioStream;
|
||||||
float _lastReceivedAudioLoudness;
|
float _lastReceivedAudioLoudness;
|
||||||
|
|
||||||
AvatarHashMap _avatarHashMap;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // hifi_Agent_h
|
#endif // hifi_Agent_h
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
#include <AccountManager.h>
|
#include <AccountManager.h>
|
||||||
#include <AddressManager.h>
|
#include <AddressManager.h>
|
||||||
#include <Assignment.h>
|
#include <Assignment.h>
|
||||||
|
#include <AvatarHashMap.h>
|
||||||
#include <HifiConfigVariantMap.h>
|
#include <HifiConfigVariantMap.h>
|
||||||
#include <LogHandler.h>
|
#include <LogHandler.h>
|
||||||
#include <LogUtils.h>
|
#include <LogUtils.h>
|
||||||
|
@ -55,6 +56,7 @@ AssignmentClient::AssignmentClient(int &argc, char **argv) :
|
||||||
DependencyManager::registerInheritance<LimitedNodeList, NodeList>();
|
DependencyManager::registerInheritance<LimitedNodeList, NodeList>();
|
||||||
auto addressManager = DependencyManager::set<AddressManager>();
|
auto addressManager = DependencyManager::set<AddressManager>();
|
||||||
auto nodeList = DependencyManager::set<NodeList>(NodeType::Unassigned);
|
auto nodeList = DependencyManager::set<NodeList>(NodeType::Unassigned);
|
||||||
|
auto avatarHashMap = DependencyManager::set<AvatarHashMap>();
|
||||||
|
|
||||||
// setup a shutdown event listener to handle SIGTERM or WM_CLOSE for us
|
// setup a shutdown event listener to handle SIGTERM or WM_CLOSE for us
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
|
|
82
cmake/android/AndroidManifest.xml.in
Executable file
82
cmake/android/AndroidManifest.xml.in
Executable file
|
@ -0,0 +1,82 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
|
||||||
|
<!-- IMPORTANT: Do not manually manipulate this automatically generated file, changes will be gone after the next build! -->
|
||||||
|
|
||||||
|
<manifest package="${ANDROID_APK_PACKAGE}" xmlns:android="http://schemas.android.com/apk/res/android" android:versionName="${ANDROID_APK_VERSION_NAME}" android:versionCode="${ANDROID_APK_VERSION_CODE}" android:installLocation="auto">
|
||||||
|
<application
|
||||||
|
android:hardwareAccelerated="true"
|
||||||
|
android:name="org.qtproject.qt5.android.bindings.QtApplication"
|
||||||
|
android:label="@string/AppDisplayName"
|
||||||
|
android:icon="@drawable/icon"
|
||||||
|
android:debuggable="${ANDROID_APK_DEBUGGABLE}">
|
||||||
|
|
||||||
|
<!-- VR MODE -->
|
||||||
|
<meta-data android:name="com.samsung.android.vr.application.mode" android:value="vr_only"/>
|
||||||
|
|
||||||
|
<activity
|
||||||
|
android:configChanges="orientation|uiMode|screenLayout|screenSize|smallestScreenSize|locale|fontScale|keyboard|keyboardHidden|navigation"
|
||||||
|
android:name="${ANDROID_ACTIVITY_NAME}"
|
||||||
|
android:label="@string/AppDisplayName"
|
||||||
|
android:screenOrientation="landscape"
|
||||||
|
android:launchMode="singleTop"
|
||||||
|
${ANDROID_APK_THEME}>
|
||||||
|
|
||||||
|
<intent-filter>
|
||||||
|
<action android:name="android.intent.action.MAIN"/>
|
||||||
|
<category android:name="android.intent.category.LAUNCHER"/>
|
||||||
|
</intent-filter>
|
||||||
|
|
||||||
|
<meta-data android:name="android.app.lib_name" android:value="-- %%INSERT_APP_LIB_NAME%% --"/>
|
||||||
|
<meta-data android:name="android.app.qt_sources_resource_id" android:resource="@array/qt_sources"/>
|
||||||
|
<meta-data android:name="android.app.repository" android:value="default"/>
|
||||||
|
<meta-data android:name="android.app.qt_libs_resource_id" android:resource="@array/qt_libs"/>
|
||||||
|
<meta-data android:name="android.app.bundled_libs_resource_id" android:resource="@array/bundled_libs"/>
|
||||||
|
<!-- Deploy Qt libs as part of package -->
|
||||||
|
<meta-data android:name="android.app.bundle_local_qt_libs" android:value="-- %%BUNDLE_LOCAL_QT_LIBS%% --"/>
|
||||||
|
<meta-data android:name="android.app.bundled_in_lib_resource_id" android:resource="@array/bundled_in_lib"/>
|
||||||
|
<meta-data android:name="android.app.bundled_in_assets_resource_id" android:resource="@array/bundled_in_assets"/>
|
||||||
|
<!-- Run with local libs -->
|
||||||
|
<meta-data android:name="android.app.use_local_qt_libs" android:value="-- %%USE_LOCAL_QT_LIBS%% --"/>
|
||||||
|
<meta-data android:name="android.app.libs_prefix" android:value="/data/local/tmp/qt/"/>
|
||||||
|
<meta-data android:name="android.app.load_local_libs" android:value="-- %%INSERT_LOCAL_LIBS%% --"/>
|
||||||
|
<meta-data android:name="android.app.load_local_jars" android:value="-- %%INSERT_LOCAL_JARS%% --"/>
|
||||||
|
<meta-data android:name="android.app.static_init_classes" android:value="-- %%INSERT_INIT_CLASSES%% --"/>
|
||||||
|
<!-- Messages maps -->
|
||||||
|
<meta-data android:value="@string/ministro_not_found_msg" android:name="android.app.ministro_not_found_msg"/>
|
||||||
|
<meta-data android:value="@string/ministro_needed_msg" android:name="android.app.ministro_needed_msg"/>
|
||||||
|
<meta-data android:value="@string/fatal_error_msg" android:name="android.app.fatal_error_msg"/>
|
||||||
|
<!-- Messages maps -->
|
||||||
|
|
||||||
|
<!-- Splash screen -->
|
||||||
|
<!-- <meta-data android:name="android.app.splash_screen_drawable" android:resource="@drawable/logo"/> -->
|
||||||
|
|
||||||
|
${ANDROID_EXTRA_ACTIVITY_XML}
|
||||||
|
</activity>
|
||||||
|
|
||||||
|
<activity
|
||||||
|
android:name="com.oculusvr.vrlib.PlatformActivity"
|
||||||
|
android:theme="@android:style/Theme.Black.NoTitleBar.Fullscreen"
|
||||||
|
android:launchMode="singleTask"
|
||||||
|
android:screenOrientation="landscape"
|
||||||
|
android:configChanges="screenSize|orientation|keyboardHidden|keyboard">
|
||||||
|
</activity>
|
||||||
|
|
||||||
|
${ANDROID_EXTRA_APPLICATION_XML}
|
||||||
|
</application>
|
||||||
|
<uses-sdk android:minSdkVersion="${ANDROID_API_LEVEL}" android:targetSdkVersion="${ANDROID_API_LEVEL}"/>
|
||||||
|
|
||||||
|
<!-- The following comment will be replaced upon deployment with default permissions based on the dependencies of the application.
|
||||||
|
Remove the comment if you do not require these default permissions. -->
|
||||||
|
<uses-permission android:name="android.permission.INTERNET" />
|
||||||
|
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
|
||||||
|
<uses-permission android:name="android.permission.RECORD_AUDIO" />
|
||||||
|
|
||||||
|
<!-- camera permission required for GEAR VR passthrough camera -->
|
||||||
|
<uses-permission android:name="android.permission.CAMERA" />
|
||||||
|
|
||||||
|
<!-- The following comment will be replaced upon deployment with default features based on the dependencies of the application.
|
||||||
|
Remove the comment if you do not require these default features. -->
|
||||||
|
|
||||||
|
<!-- Tell the system this app requires OpenGL ES 3.0. -->
|
||||||
|
<uses-feature android:glEsVersion="0x00030000" android:required="true" />
|
||||||
|
</manifest>
|
162
cmake/android/QtCreateAPK.cmake
Normal file
162
cmake/android/QtCreateAPK.cmake
Normal file
|
@ -0,0 +1,162 @@
|
||||||
|
#
|
||||||
|
# QtCreateAPK.cmake
|
||||||
|
#
|
||||||
|
# Created by Stephen Birarda on 11/18/14.
|
||||||
|
# Copyright 2013 High Fidelity, Inc.
|
||||||
|
#
|
||||||
|
# Distributed under the Apache License, Version 2.0.
|
||||||
|
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
#
|
||||||
|
|
||||||
|
#
|
||||||
|
# OPTIONS
|
||||||
|
# These options will modify how QtCreateAPK behaves. May be useful if somebody wants to fork.
|
||||||
|
# For High Fidelity purposes these should not need to be changed.
|
||||||
|
#
|
||||||
|
set(ANDROID_THIS_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}) # Directory this CMake file is in
|
||||||
|
|
||||||
|
if (POLICY CMP0026)
|
||||||
|
cmake_policy(SET CMP0026 OLD)
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
macro(qt_create_apk)
|
||||||
|
if(ANDROID_APK_FULLSCREEN)
|
||||||
|
set(ANDROID_APK_THEME "android:theme=\"@android:style/Theme.NoTitleBar.Fullscreen\"")
|
||||||
|
else()
|
||||||
|
set(ANDROID_APK_THEME "")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (CMAKE_BUILD_TYPE MATCHES RELEASE)
|
||||||
|
set(ANDROID_APK_DEBUGGABLE "false")
|
||||||
|
set(ANDROID_APK_RELEASE_LOCAL ${ANDROID_APK_RELEASE})
|
||||||
|
else ()
|
||||||
|
set(ANDROID_APK_DEBUGGABLE "true")
|
||||||
|
set(ANDROID_APK_RELEASE_LOCAL "0")
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
# Create "AndroidManifest.xml"
|
||||||
|
configure_file("${ANDROID_THIS_DIRECTORY}/AndroidManifest.xml.in" "${ANDROID_APK_BUILD_DIR}/AndroidManifest.xml")
|
||||||
|
|
||||||
|
# create "strings.xml"
|
||||||
|
configure_file("${ANDROID_THIS_DIRECTORY}/strings.xml.in" "${ANDROID_APK_BUILD_DIR}/res/values/strings.xml")
|
||||||
|
|
||||||
|
# figure out where the qt dir is
|
||||||
|
get_filename_component(QT_DIR "${QT_CMAKE_PREFIX_PATH}/../../" ABSOLUTE)
|
||||||
|
|
||||||
|
# find androiddeployqt
|
||||||
|
find_program(ANDROID_DEPLOY_QT androiddeployqt HINTS "${QT_DIR}/bin")
|
||||||
|
|
||||||
|
# set the path to our app shared library
|
||||||
|
set(EXECUTABLE_DESTINATION_PATH "${ANDROID_APK_OUTPUT_DIR}/libs/${ANDROID_ABI}/lib${TARGET_NAME}.so")
|
||||||
|
|
||||||
|
# add our dependencies to the deployment file
|
||||||
|
get_property(_DEPENDENCIES TARGET ${TARGET_NAME} PROPERTY INTERFACE_LINK_LIBRARIES)
|
||||||
|
|
||||||
|
foreach(_IGNORE_COPY IN LISTS IGNORE_COPY_LIBS)
|
||||||
|
list(REMOVE_ITEM _DEPENDENCIES ${_IGNORE_COPY})
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
foreach(_DEP IN LISTS _DEPENDENCIES)
|
||||||
|
if (NOT TARGET ${_DEP})
|
||||||
|
list(APPEND _DEPS_LIST ${_DEP})
|
||||||
|
else ()
|
||||||
|
if(NOT _DEP MATCHES "Qt5::.*")
|
||||||
|
get_property(_DEP_LOCATION TARGET ${_DEP} PROPERTY "LOCATION_${CMAKE_BUILD_TYPE}")
|
||||||
|
|
||||||
|
# recurisvely add libraries which are dependencies of this target
|
||||||
|
get_property(_DEP_DEPENDENCIES TARGET ${_DEP} PROPERTY INTERFACE_LINK_LIBRARIES)
|
||||||
|
|
||||||
|
foreach(_SUB_DEP IN LISTS _DEP_DEPENDENCIES)
|
||||||
|
if (NOT TARGET ${_SUB_DEP} AND NOT _SUB_DEP MATCHES "Qt5::.*")
|
||||||
|
list(APPEND _DEPS_LIST ${_SUB_DEP})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
list(APPEND _DEPS_LIST ${_DEP_LOCATION})
|
||||||
|
endif()
|
||||||
|
endif ()
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
list(REMOVE_DUPLICATES _DEPS_LIST)
|
||||||
|
|
||||||
|
# just copy static libs to apk libs folder - don't add to deps list
|
||||||
|
foreach(_LOCATED_DEP IN LISTS _DEPS_LIST)
|
||||||
|
if (_LOCATED_DEP MATCHES "\\.a$")
|
||||||
|
add_custom_command(
|
||||||
|
TARGET ${TARGET_NAME}
|
||||||
|
POST_BUILD
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E copy ${_LOCATED_DEP} "${ANDROID_APK_OUTPUT_DIR}/libs/${ANDROID_ABI}"
|
||||||
|
)
|
||||||
|
list(REMOVE_ITEM _DEPS_LIST ${_LOCATED_DEP})
|
||||||
|
endif ()
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
string(REPLACE ";" "," _DEPS "${_DEPS_LIST}")
|
||||||
|
|
||||||
|
configure_file("${ANDROID_THIS_DIRECTORY}/deployment-file.json.in" "${TARGET_NAME}-deployment.json")
|
||||||
|
|
||||||
|
# copy the res folder from the target to the apk build dir
|
||||||
|
add_custom_target(
|
||||||
|
${TARGET_NAME}-copy-res
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E copy_directory "${CMAKE_CURRENT_SOURCE_DIR}/res" "${ANDROID_APK_BUILD_DIR}/res"
|
||||||
|
)
|
||||||
|
|
||||||
|
# copy the assets folder from the target to the apk build dir
|
||||||
|
add_custom_target(
|
||||||
|
${TARGET_NAME}-copy-assets
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E copy_directory "${CMAKE_CURRENT_SOURCE_DIR}/assets" "${ANDROID_APK_BUILD_DIR}/assets"
|
||||||
|
)
|
||||||
|
|
||||||
|
# copy the java folder from src to the apk build dir
|
||||||
|
add_custom_target(
|
||||||
|
${TARGET_NAME}-copy-java
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E copy_directory "${CMAKE_CURRENT_SOURCE_DIR}/src/java" "${ANDROID_APK_BUILD_DIR}/src"
|
||||||
|
)
|
||||||
|
|
||||||
|
# copy the libs folder from src to the apk build dir
|
||||||
|
add_custom_target(
|
||||||
|
${TARGET_NAME}-copy-libs
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E copy_directory "${CMAKE_CURRENT_SOURCE_DIR}/libs" "${ANDROID_APK_BUILD_DIR}/libs"
|
||||||
|
)
|
||||||
|
|
||||||
|
# handle setup for ndk-gdb
|
||||||
|
add_custom_target(${TARGET_NAME}-gdb DEPENDS ${TARGET_NAME})
|
||||||
|
|
||||||
|
if (ANDROID_APK_DEBUGGABLE)
|
||||||
|
get_property(TARGET_LOCATION TARGET ${TARGET_NAME} PROPERTY LOCATION)
|
||||||
|
|
||||||
|
set(GDB_SOLIB_PATH ${ANDROID_APK_BUILD_DIR}/obj/local/${ANDROID_NDK_ABI_NAME}/)
|
||||||
|
|
||||||
|
# generate essential Android Makefiles
|
||||||
|
file(WRITE ${ANDROID_APK_BUILD_DIR}/jni/Android.mk "APP_ABI := ${ANDROID_NDK_ABI_NAME}\n")
|
||||||
|
file(WRITE ${ANDROID_APK_BUILD_DIR}/jni/Application.mk "APP_ABI := ${ANDROID_NDK_ABI_NAME}\n")
|
||||||
|
|
||||||
|
# create gdb.setup
|
||||||
|
get_directory_property(PROJECT_INCLUDES DIRECTORY ${PROJECT_SOURCE_DIR} INCLUDE_DIRECTORIES)
|
||||||
|
string(REGEX REPLACE ";" " " PROJECT_INCLUDES "${PROJECT_INCLUDES}")
|
||||||
|
file(WRITE ${ANDROID_APK_BUILD_DIR}/libs/${ANDROID_NDK_ABI_NAME}/gdb.setup "set solib-search-path ${GDB_SOLIB_PATH}\n")
|
||||||
|
file(APPEND ${ANDROID_APK_BUILD_DIR}/libs/${ANDROID_NDK_ABI_NAME}/gdb.setup "directory ${PROJECT_INCLUDES}\n")
|
||||||
|
|
||||||
|
# copy lib to obj
|
||||||
|
add_custom_command(TARGET ${TARGET_NAME}-gdb PRE_BUILD COMMAND ${CMAKE_COMMAND} -E make_directory ${GDB_SOLIB_PATH})
|
||||||
|
add_custom_command(TARGET ${TARGET_NAME}-gdb PRE_BUILD COMMAND cp ${TARGET_LOCATION} ${GDB_SOLIB_PATH})
|
||||||
|
|
||||||
|
# strip symbols
|
||||||
|
add_custom_command(TARGET ${TARGET_NAME}-gdb PRE_BUILD COMMAND ${CMAKE_STRIP} ${TARGET_LOCATION})
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
# use androiddeployqt to create the apk
|
||||||
|
add_custom_target(${TARGET_NAME}-apk
|
||||||
|
COMMAND ${ANDROID_DEPLOY_QT} --input "${TARGET_NAME}-deployment.json" --output "${ANDROID_APK_OUTPUT_DIR}" --android-platform android-${ANDROID_API_LEVEL} ${ANDROID_DEPLOY_QT_INSTALL} --verbose --deployment bundled "\\$(ARGS)"
|
||||||
|
DEPENDS ${TARGET_NAME} ${TARGET_NAME}-copy-res ${TARGET_NAME}-copy-assets ${TARGET_NAME}-copy-java ${TARGET_NAME}-copy-libs ${TARGET_NAME}-gdb
|
||||||
|
)
|
||||||
|
|
||||||
|
# rename the APK if the caller asked us to
|
||||||
|
if (ANDROID_APK_CUSTOM_NAME)
|
||||||
|
add_custom_command(
|
||||||
|
TARGET ${TARGET_NAME}-apk
|
||||||
|
POST_BUILD
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E rename "${ANDROID_APK_OUTPUT_DIR}/bin/QtApp-debug.apk" "${ANDROID_APK_OUTPUT_DIR}/bin/${ANDROID_APK_CUSTOM_NAME}"
|
||||||
|
)
|
||||||
|
endif ()
|
||||||
|
endmacro()
|
1753
cmake/android/android.toolchain.cmake
Executable file
1753
cmake/android/android.toolchain.cmake
Executable file
File diff suppressed because it is too large
Load diff
13
cmake/android/deployment-file.json.in
Normal file
13
cmake/android/deployment-file.json.in
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
{
|
||||||
|
"qt": "@QT_DIR@",
|
||||||
|
"sdk": "@ANDROID_SDK_ROOT@",
|
||||||
|
"ndk": "@ANDROID_NDK@",
|
||||||
|
"toolchain-prefix": "@ANDROID_TOOLCHAIN_MACHINE_NAME@",
|
||||||
|
"tool-prefix": "@ANDROID_TOOLCHAIN_MACHINE_NAME@",
|
||||||
|
"toolchain-version": "@ANDROID_COMPILER_VERSION@",
|
||||||
|
"ndk-host": "@ANDROID_NDK_HOST_SYSTEM_NAME@",
|
||||||
|
"target-architecture": "@ANDROID_ABI@",
|
||||||
|
"application-binary": "@EXECUTABLE_DESTINATION_PATH@",
|
||||||
|
"android-extra-libs": "@_DEPS@",
|
||||||
|
"android-package-source-directory": "@ANDROID_APK_BUILD_DIR@"
|
||||||
|
}
|
11
cmake/android/strings.xml.in
Normal file
11
cmake/android/strings.xml.in
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
<?xml version='1.0' encoding='utf-8'?>
|
||||||
|
|
||||||
|
<!-- IMPORTANT: Do not manually manipulate this automatically generated file, changes will be gone after the next build! -->
|
||||||
|
|
||||||
|
<resources>
|
||||||
|
<string name="AppDisplayName">${ANDROID_APP_DISPLAY_NAME}</string>
|
||||||
|
|
||||||
|
<string name="ministro_not_found_msg">Can\'t find Ministro service.\nThe application can\'t start.</string>
|
||||||
|
<string name="ministro_needed_msg">This application requires Ministro service. Would you like to install it?</string>
|
||||||
|
<string name="fatal_error_msg">Your application encountered a fatal error and cannot continue.</string>
|
||||||
|
</resources>
|
|
@ -13,5 +13,11 @@ macro(AUTO_MTC)
|
||||||
|
|
||||||
file(GLOB INCLUDE_FILES src/*.h)
|
file(GLOB INCLUDE_FILES src/*.h)
|
||||||
|
|
||||||
add_custom_command(OUTPUT ${AUTOMTC_SRC} COMMAND mtc -o ${AUTOMTC_SRC} ${INCLUDE_FILES} DEPENDS mtc ${INCLUDE_FILES})
|
if (NOT ANDROID)
|
||||||
|
set(MTC_EXECUTABLE mtc)
|
||||||
|
else ()
|
||||||
|
set(MTC_EXECUTABLE $ENV{MTC_PATH}/mtc)
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
add_custom_command(OUTPUT ${AUTOMTC_SRC} COMMAND ${MTC_EXECUTABLE} -o ${AUTOMTC_SRC} ${INCLUDE_FILES} DEPENDS ${MTC_EXECUTABLE} ${INCLUDE_FILES})
|
||||||
endmacro()
|
endmacro()
|
||||||
|
|
|
@ -16,6 +16,10 @@ macro(HIFI_LIBRARY_SEARCH_HINTS LIBRARY_FOLDER)
|
||||||
set(${LIBRARY_PREFIX}_SEARCH_DIRS "${${LIBRARY_PREFIX}_ROOT_DIR}")
|
set(${LIBRARY_PREFIX}_SEARCH_DIRS "${${LIBRARY_PREFIX}_ROOT_DIR}")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
|
if (ANDROID)
|
||||||
|
set(${LIBRARY_PREFIX}_SEARCH_DIRS "${${LIBRARY_PREFIX}_SEARCH_DIRS}" "/${LIBRARY_FOLDER}")
|
||||||
|
endif ()
|
||||||
|
|
||||||
if (DEFINED ENV{${LIBRARY_PREFIX}_ROOT_DIR})
|
if (DEFINED ENV{${LIBRARY_PREFIX}_ROOT_DIR})
|
||||||
set(${LIBRARY_PREFIX}_SEARCH_DIRS "${${LIBRARY_PREFIX}_SEARCH_DIRS}" "$ENV{${LIBRARY_PREFIX}_ROOT_DIR}")
|
set(${LIBRARY_PREFIX}_SEARCH_DIRS "${${LIBRARY_PREFIX}_SEARCH_DIRS}" "$ENV{${LIBRARY_PREFIX}_ROOT_DIR}")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
|
@ -8,12 +8,6 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
macro(INCLUDE_GLM)
|
macro(INCLUDE_GLM)
|
||||||
|
|
||||||
find_package(GLM REQUIRED)
|
find_package(GLM REQUIRED)
|
||||||
include_directories("${GLM_INCLUDE_DIRS}")
|
include_directories(SYSTEM "${GLM_INCLUDE_DIRS}")
|
||||||
|
|
||||||
if (APPLE OR UNIX)
|
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -isystem ${GLM_INCLUDE_DIRS}")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
endmacro(INCLUDE_GLM)
|
endmacro(INCLUDE_GLM)
|
|
@ -32,5 +32,4 @@ macro(LINK_HIFI_LIBRARIES)
|
||||||
list(APPEND ${TARGET_NAME}_DEPENDENCY_INCLUDES ${LINKED_TARGET_DEPENDENCY_INCLUDES})
|
list(APPEND ${TARGET_NAME}_DEPENDENCY_INCLUDES ${LINKED_TARGET_DEPENDENCY_INCLUDES})
|
||||||
endif()
|
endif()
|
||||||
endforeach()
|
endforeach()
|
||||||
|
|
||||||
endmacro(LINK_HIFI_LIBRARIES)
|
endmacro(LINK_HIFI_LIBRARIES)
|
|
@ -9,14 +9,18 @@
|
||||||
|
|
||||||
macro(SETUP_HIFI_LIBRARY)
|
macro(SETUP_HIFI_LIBRARY)
|
||||||
|
|
||||||
project(${TARGET_NAME})
|
project(${TARGET_NAME})
|
||||||
|
|
||||||
# grab the implemenation and header files
|
# grab the implemenation and header files
|
||||||
file(GLOB_RECURSE LIB_SRCS "src/*.h" "src/*.cpp")
|
file(GLOB_RECURSE LIB_SRCS "src/*.h" "src/*.cpp")
|
||||||
set(LIB_SRCS ${LIB_SRCS})
|
list(APPEND ${TARGET_NAME}_SRCS ${LIB_SRCS})
|
||||||
|
|
||||||
# create a library and set the property so it can be referenced later
|
# create a library and set the property so it can be referenced later
|
||||||
add_library(${TARGET_NAME} ${LIB_SRCS} ${AUTOMTC_SRC} ${AUTOSCRIBE_SHADER_LIB_SRC})
|
if (${${TARGET_NAME}_SHARED})
|
||||||
|
add_library(${TARGET_NAME} SHARED ${LIB_SRCS} ${AUTOMTC_SRC} ${AUTOSCRIBE_SHADER_LIB_SRC})
|
||||||
|
else ()
|
||||||
|
add_library(${TARGET_NAME} ${LIB_SRCS} ${AUTOMTC_SRC} ${AUTOSCRIBE_SHADER_LIB_SRC})
|
||||||
|
endif ()
|
||||||
|
|
||||||
set(${TARGET_NAME}_DEPENDENCY_QT_MODULES ${ARGN})
|
set(${TARGET_NAME}_DEPENDENCY_QT_MODULES ${ARGN})
|
||||||
list(APPEND ${TARGET_NAME}_DEPENDENCY_QT_MODULES Core)
|
list(APPEND ${TARGET_NAME}_DEPENDENCY_QT_MODULES Core)
|
||||||
|
|
|
@ -19,6 +19,7 @@ hifi_library_search_hints("glm")
|
||||||
|
|
||||||
# locate header
|
# locate header
|
||||||
find_path(GLM_INCLUDE_DIR "glm/glm.hpp" HINTS ${GLM_SEARCH_DIRS})
|
find_path(GLM_INCLUDE_DIR "glm/glm.hpp" HINTS ${GLM_SEARCH_DIRS})
|
||||||
|
|
||||||
set(GLM_INCLUDE_DIRS "${GLM_INCLUDE_DIR}")
|
set(GLM_INCLUDE_DIRS "${GLM_INCLUDE_DIR}")
|
||||||
|
|
||||||
include(FindPackageHandleStandardArgs)
|
include(FindPackageHandleStandardArgs)
|
||||||
|
|
|
@ -23,8 +23,8 @@ else (GVERB_INCLUDE_DIRS)
|
||||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||||
hifi_library_search_hints("gverb")
|
hifi_library_search_hints("gverb")
|
||||||
|
|
||||||
find_path(GVERB_INCLUDE_DIRS gverb.h PATH_SUFFIXES include HINTS ${GVERB_SEARCH_DIRS})
|
find_path(GVERB_INCLUDE_DIRS gverb.h PATH_SUFFIXES include HINTS ${GVERB_SEARCH_DIRS} NO_CMAKE_FIND_ROOT_PATH)
|
||||||
find_path(GVERB_SRC_DIRS gverb.c PATH_SUFFIXES src HINTS ${GVERB_SEARCH_DIRS})
|
find_path(GVERB_SRC_DIRS gverb.c PATH_SUFFIXES src HINTS ${GVERB_SEARCH_DIRS} NO_CMAKE_FIND_ROOT_PATH)
|
||||||
|
|
||||||
if (GVERB_INCLUDE_DIRS)
|
if (GVERB_INCLUDE_DIRS)
|
||||||
set(GVERB_FOUND TRUE)
|
set(GVERB_FOUND TRUE)
|
||||||
|
@ -33,7 +33,7 @@ else (GVERB_INCLUDE_DIRS)
|
||||||
if (GVERB_FOUND)
|
if (GVERB_FOUND)
|
||||||
message(STATUS "Found Gverb: ${GVERB_INCLUDE_DIRS}")
|
message(STATUS "Found Gverb: ${GVERB_INCLUDE_DIRS}")
|
||||||
else (GVERB_FOUND)
|
else (GVERB_FOUND)
|
||||||
message(FATAL_ERROR "Could NOT find Gverb. Read ./interface/externals/gverb/readme.txt")
|
message(FATAL_ERROR "Could NOT find Gverb. Read ./libraries/audio-client/externals/gverb/readme.txt")
|
||||||
endif (GVERB_FOUND)
|
endif (GVERB_FOUND)
|
||||||
|
|
||||||
endif(GVERB_INCLUDE_DIRS)
|
endif(GVERB_INCLUDE_DIRS)
|
|
@ -21,42 +21,60 @@
|
||||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||||
hifi_library_search_hints("libovr")
|
hifi_library_search_hints("libovr")
|
||||||
|
|
||||||
find_path(LIBOVR_INCLUDE_DIRS OVR.h PATH_SUFFIXES Include HINTS ${LIBOVR_SEARCH_DIRS})
|
|
||||||
find_path(LIBOVR_SRC_DIR Util_Render_Stereo.h PATH_SUFFIXES Src/Util HINTS ${LIBOVR_SEARCH_DIRS})
|
|
||||||
|
|
||||||
include(SelectLibraryConfigurations)
|
include(SelectLibraryConfigurations)
|
||||||
|
|
||||||
if (APPLE)
|
if (NOT ANDROID)
|
||||||
find_library(LIBOVR_LIBRARY_DEBUG NAMES ovr PATH_SUFFIXES Lib/Mac/Debug HINTS ${LIBOVR_SEARCH_DIRS})
|
|
||||||
find_library(LIBOVR_LIBRARY_RELEASE NAMES ovr PATH_SUFFIXES Lib/Mac/Release HINTS ${LIBOVR_SEARCH_DIRS})
|
find_path(LIBOVR_INCLUDE_DIRS OVR.h PATH_SUFFIXES Include HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
find_library(ApplicationServices ApplicationServices)
|
find_path(LIBOVR_SRC_DIR Util_Render_Stereo.h PATH_SUFFIXES Src/Util HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
find_library(IOKit IOKit)
|
|
||||||
elseif (UNIX)
|
|
||||||
find_library(UDEV_LIBRARY_RELEASE udev /usr/lib/x86_64-linux-gnu/)
|
|
||||||
find_library(XINERAMA_LIBRARY_RELEASE Xinerama /usr/lib/x86_64-linux-gnu/)
|
|
||||||
|
|
||||||
if (CMAKE_CL_64)
|
if (APPLE)
|
||||||
set(LINUX_ARCH_DIR "i386")
|
find_library(LIBOVR_LIBRARY_DEBUG NAMES ovr PATH_SUFFIXES Lib/Mac/Debug HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
else()
|
find_library(LIBOVR_LIBRARY_RELEASE NAMES ovr PATH_SUFFIXES Lib/Mac/Release HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
set(LINUX_ARCH_DIR "x86_64")
|
find_library(ApplicationServices ApplicationServices)
|
||||||
endif()
|
find_library(IOKit IOKit)
|
||||||
|
elseif (UNIX)
|
||||||
|
find_library(UDEV_LIBRARY_RELEASE udev /usr/lib/x86_64-linux-gnu/)
|
||||||
|
find_library(XINERAMA_LIBRARY_RELEASE Xinerama /usr/lib/x86_64-linux-gnu/)
|
||||||
|
|
||||||
find_library(LIBOVR_LIBRARY_DEBUG NAMES ovr PATH_SUFFIXES Lib/Linux/Debug/${LINUX_ARCH_DIR} HINTS ${LIBOVR_SEARCH_DIRS})
|
if (CMAKE_CL_64)
|
||||||
find_library(LIBOVR_LIBRARY_RELEASE NAMES ovr PATH_SUFFIXES Lib/Linux/Release/${LINUX_ARCH_DIR} HINTS ${LIBOVR_SEARCH_DIRS})
|
set(LINUX_ARCH_DIR "i386")
|
||||||
|
else()
|
||||||
|
set(LINUX_ARCH_DIR "x86_64")
|
||||||
|
endif()
|
||||||
|
|
||||||
select_library_configurations(UDEV)
|
find_library(LIBOVR_LIBRARY_DEBUG NAMES ovr PATH_SUFFIXES Lib/Linux/Debug/${LINUX_ARCH_DIR} HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
select_library_configurations(XINERAMA)
|
find_library(LIBOVR_LIBRARY_RELEASE NAMES ovr PATH_SUFFIXES Lib/Linux/Release/${LINUX_ARCH_DIR} HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
|
|
||||||
elseif (WIN32)
|
select_library_configurations(UDEV)
|
||||||
if (MSVC10)
|
select_library_configurations(XINERAMA)
|
||||||
find_library(LIBOVR_LIBRARY_DEBUG NAMES libovrd PATH_SUFFIXES Lib/Win32/VS2010 HINTS ${LIBOVR_SEARCH_DIRS})
|
|
||||||
find_library(LIBOVR_LIBRARY_RELEASE NAMES libovr PATH_SUFFIXES Lib/Win32/VS2010 HINTS ${LIBOVR_SEARCH_DIRS})
|
elseif (WIN32)
|
||||||
elseif (MSVC12)
|
if (MSVC10)
|
||||||
find_library(LIBOVR_LIBRARY_DEBUG NAMES libovrd PATH_SUFFIXES Lib/Win32/VS2013 HINTS ${LIBOVR_SEARCH_DIRS})
|
find_library(LIBOVR_LIBRARY_DEBUG NAMES libovrd PATH_SUFFIXES Lib/Win32/VS2010 HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
find_library(LIBOVR_LIBRARY_RELEASE NAMES libovr PATH_SUFFIXES Lib/Win32/VS2013 HINTS ${LIBOVR_SEARCH_DIRS})
|
find_library(LIBOVR_LIBRARY_RELEASE NAMES libovr PATH_SUFFIXES Lib/Win32/VS2010 HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
|
elseif (MSVC12)
|
||||||
|
find_library(LIBOVR_LIBRARY_DEBUG NAMES libovrd PATH_SUFFIXES Lib/Win32/VS2013 HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
|
find_library(LIBOVR_LIBRARY_RELEASE NAMES libovr PATH_SUFFIXES Lib/Win32/VS2013 HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
|
endif ()
|
||||||
|
find_package(ATL)
|
||||||
endif ()
|
endif ()
|
||||||
find_package(ATL)
|
|
||||||
endif ()
|
else (NOT ANDROID)
|
||||||
|
set(_VRLIB_JNI_DIR "VRLib/jni")
|
||||||
|
set(_VRLIB_LIBS_DIR "VRLib/obj/local/armeabi-v7a")
|
||||||
|
|
||||||
|
find_path(LIBOVR_VRLIB_DIR VRLib.vcxproj PATH_SUFFIXES VRLib HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
|
|
||||||
|
find_path(LIBOVR_INCLUDE_DIRS OVR.h PATH_SUFFIXES ${_VRLIB_JNI_DIR}/LibOVR/Include HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
|
find_path(LIBOVR_SRC_DIR OVR_CAPI.h PATH_SUFFIXES ${_VRLIB_JNI_DIR}/LibOVR/Src HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
|
|
||||||
|
find_path(MINIZIP_DIR minizip.c PATH_SUFFIXES ${_VRLIB_JNI_DIR}/3rdParty/minizip HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
|
find_path(JNI_DIR VrCommon.h PATH_SUFFIXES ${_VRLIB_JNI_DIR} HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
|
|
||||||
|
find_library(LIBOVR_LIBRARY_RELEASE NAMES oculus PATH_SUFFIXES ${_VRLIB_LIBS_DIR} HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
|
find_library(TURBOJPEG_LIBRARY NAMES jpeg PATH_SUFFIXES 3rdParty/turbojpeg HINTS ${LIBOVR_SEARCH_DIRS})
|
||||||
|
endif (NOT ANDROID)
|
||||||
|
|
||||||
select_library_configurations(LIBOVR)
|
select_library_configurations(LIBOVR)
|
||||||
set(LIBOVR_LIBRARIES ${LIBOVR_LIBRARY})
|
set(LIBOVR_LIBRARIES ${LIBOVR_LIBRARY})
|
||||||
|
@ -66,6 +84,10 @@ list(APPEND LIBOVR_ARGS_LIST LIBOVR_INCLUDE_DIRS LIBOVR_SRC_DIR LIBOVR_LIBRARY)
|
||||||
if (APPLE)
|
if (APPLE)
|
||||||
list(APPEND LIBOVR_LIBRARIES ${IOKit} ${ApplicationServices})
|
list(APPEND LIBOVR_LIBRARIES ${IOKit} ${ApplicationServices})
|
||||||
list(APPEND LIBOVR_ARGS_LIST IOKit ApplicationServices)
|
list(APPEND LIBOVR_ARGS_LIST IOKit ApplicationServices)
|
||||||
|
elseif (ANDROID)
|
||||||
|
|
||||||
|
list(APPEND LIBOVR_ANDROID_LIBRARIES "-lGLESv3" "-lEGL" "-landroid" "-lOpenMAXAL" "-llog" "-lz" "-lOpenSLES")
|
||||||
|
list(APPEND LIBOVR_ARGS_LIST LIBOVR_ANDROID_LIBRARIES LIBOVR_VRLIB_DIR MINIZIP_DIR JNI_DIR TURBOJPEG_LIBRARY)
|
||||||
elseif (UNIX)
|
elseif (UNIX)
|
||||||
list(APPEND LIBOVR_LIBRARIES "${UDEV_LIBRARY}" "${XINERAMA_LIBRARY}")
|
list(APPEND LIBOVR_LIBRARIES "${UDEV_LIBRARY}" "${XINERAMA_LIBRARY}")
|
||||||
list(APPEND LIBOVR_ARGS_LIST UDEV_LIBRARY XINERAMA_LIBRARY)
|
list(APPEND LIBOVR_ARGS_LIST UDEV_LIBRARY XINERAMA_LIBRARY)
|
||||||
|
@ -75,7 +97,10 @@ elseif (WIN32)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
include(FindPackageHandleStandardArgs)
|
include(FindPackageHandleStandardArgs)
|
||||||
|
|
||||||
find_package_handle_standard_args(LibOVR DEFAULT_MSG ${LIBOVR_ARGS_LIST})
|
find_package_handle_standard_args(LibOVR DEFAULT_MSG ${LIBOVR_ARGS_LIST})
|
||||||
|
|
||||||
|
if (ANDROID)
|
||||||
|
list(APPEND LIBOVR_INCLUDE_DIRS ${LIBOVR_SRC_DIR} ${MINIZIP_DIR} ${JNI_DIR})
|
||||||
|
endif ()
|
||||||
|
|
||||||
mark_as_advanced(LIBOVR_INCLUDE_DIRS LIBOVR_LIBRARIES LIBOVR_SEARCH_DIRS)
|
mark_as_advanced(LIBOVR_INCLUDE_DIRS LIBOVR_LIBRARIES LIBOVR_SEARCH_DIRS)
|
||||||
|
|
|
@ -52,7 +52,9 @@ else ()
|
||||||
set(_OPENSSL_ROOT_HINTS_AND_PATHS ${OPENSSL_SEARCH_DIRS})
|
set(_OPENSSL_ROOT_HINTS_AND_PATHS ${OPENSSL_SEARCH_DIRS})
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
find_path(OPENSSL_INCLUDE_DIR NAMES openssl/ssl.h HINTS ${_OPENSSL_ROOT_HINTS_AND_PATHS} ${_OPENSSL_INCLUDEDIR} PATH_SUFFIXES include)
|
find_path(OPENSSL_INCLUDE_DIR NAMES openssl/ssl.h HINTS ${_OPENSSL_ROOT_HINTS_AND_PATHS} ${_OPENSSL_INCLUDEDIR}
|
||||||
|
PATH_SUFFIXES include
|
||||||
|
)
|
||||||
|
|
||||||
if (WIN32 AND NOT CYGWIN)
|
if (WIN32 AND NOT CYGWIN)
|
||||||
if (MSVC)
|
if (MSVC)
|
||||||
|
@ -133,7 +135,9 @@ else()
|
||||||
PATH_SUFFIXES lib
|
PATH_SUFFIXES lib
|
||||||
)
|
)
|
||||||
|
|
||||||
find_library(OPENSSL_CRYPTO_LIBRARY NAMES crypto HINTS ${_OPENSSL_ROOT_HINTS_AND_PATHS} ${_OPENSSL_LIBDIR} PATH_SUFFIXES lib)
|
find_library(OPENSSL_CRYPTO_LIBRARY NAMES crypto HINTS ${_OPENSSL_ROOT_HINTS_AND_PATHS} ${_OPENSSL_LIBDIR}
|
||||||
|
PATH_SUFFIXES lib
|
||||||
|
)
|
||||||
|
|
||||||
mark_as_advanced(OPENSSL_CRYPTO_LIBRARY OPENSSL_SSL_LIBRARY)
|
mark_as_advanced(OPENSSL_CRYPTO_LIBRARY OPENSSL_SSL_LIBRARY)
|
||||||
|
|
||||||
|
@ -179,8 +183,8 @@ endfunction()
|
||||||
if (OPENSSL_INCLUDE_DIR)
|
if (OPENSSL_INCLUDE_DIR)
|
||||||
if(OPENSSL_INCLUDE_DIR AND EXISTS "${OPENSSL_INCLUDE_DIR}/openssl/opensslv.h")
|
if(OPENSSL_INCLUDE_DIR AND EXISTS "${OPENSSL_INCLUDE_DIR}/openssl/opensslv.h")
|
||||||
file(STRINGS "${OPENSSL_INCLUDE_DIR}/openssl/opensslv.h" openssl_version_str
|
file(STRINGS "${OPENSSL_INCLUDE_DIR}/openssl/opensslv.h" openssl_version_str
|
||||||
REGEX "^#define[\t ]+OPENSSL_VERSION_NUMBER[\t ]+0x([0-9a-fA-F])+.*")
|
REGEX "^#[ ]?define[\t ]+OPENSSL_VERSION_NUMBER[\t ]+0x([0-9a-fA-F])+.*")
|
||||||
|
|
||||||
# The version number is encoded as 0xMNNFFPPS: major minor fix patch status
|
# The version number is encoded as 0xMNNFFPPS: major minor fix patch status
|
||||||
# The status gives if this is a developer or prerelease and is ignored here.
|
# The status gives if this is a developer or prerelease and is ignored here.
|
||||||
# Major, minor, and fix directly translate into the version numbers shown in
|
# Major, minor, and fix directly translate into the version numbers shown in
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#
|
#
|
||||||
# FindRtMidd.cmake
|
# FindRtMidi.cmake
|
||||||
#
|
#
|
||||||
# Try to find the RtMidi library
|
# Try to find the RtMidi library
|
||||||
#
|
#
|
||||||
|
|
30
cmake/modules/FindSoxr.cmake
Normal file
30
cmake/modules/FindSoxr.cmake
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
#
|
||||||
|
# FindSoxr.cmake
|
||||||
|
#
|
||||||
|
# Try to find the libsoxr resampling library
|
||||||
|
#
|
||||||
|
# You can provide a LIBSOXR_ROOT_DIR which contains lib and include directories
|
||||||
|
#
|
||||||
|
# Once done this will define
|
||||||
|
#
|
||||||
|
# SOXR_FOUND - system found libsoxr
|
||||||
|
# SOXR_INCLUDE_DIRS - the libsoxr include directory
|
||||||
|
# SOXR_LIBRARIES - link to this to use libsoxr
|
||||||
|
#
|
||||||
|
# Created on 1/22/2015 by Stephen Birarda
|
||||||
|
# Copyright 2015 High Fidelity, Inc.
|
||||||
|
#
|
||||||
|
# Distributed under the Apache License, Version 2.0.
|
||||||
|
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
#
|
||||||
|
|
||||||
|
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||||
|
hifi_library_search_hints("soxr")
|
||||||
|
|
||||||
|
find_path(SOXR_INCLUDE_DIRS soxr.h PATH_SUFFIXES include HINTS ${SOXR_SEARCH_DIRS})
|
||||||
|
find_library(SOXR_LIBRARIES NAMES soxr PATH_SUFFIXES lib HINTS ${SOXR_SEARCH_DIRS})
|
||||||
|
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
find_package_handle_standard_args(SOXR DEFAULT_MSG SOXR_INCLUDE_DIRS SOXR_LIBRARIES)
|
||||||
|
|
||||||
|
mark_as_advanced(SOXR_INCLUDE_DIRS SOXR_LIBRARIES SOXR_SEARCH_DIRS)
|
|
@ -28,7 +28,7 @@ set(_TBB_LIB_MALLOC_NAME "${_TBB_LIB_NAME}malloc")
|
||||||
|
|
||||||
if (APPLE)
|
if (APPLE)
|
||||||
set(_TBB_LIB_DIR "lib/libc++")
|
set(_TBB_LIB_DIR "lib/libc++")
|
||||||
elseif (UNIX)
|
elseif (UNIX AND NOT ANDROID)
|
||||||
if(CMAKE_SIZEOF_VOID_P EQUAL 8)
|
if(CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||||
set(_TBB_ARCH_DIR "intel64")
|
set(_TBB_ARCH_DIR "intel64")
|
||||||
else()
|
else()
|
||||||
|
@ -56,6 +56,13 @@ elseif (WIN32)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
set(_TBB_LIB_DIR "lib/${_TBB_ARCH_DIR}/vc12")
|
set(_TBB_LIB_DIR "lib/${_TBB_ARCH_DIR}/vc12")
|
||||||
|
elseif (ANDROID)
|
||||||
|
set(_TBB_DEFAULT_INSTALL_DIR "/tbb")
|
||||||
|
set(_TBB_LIB_NAME "tbb")
|
||||||
|
set(_TBB_LIB_DIR "lib")
|
||||||
|
set(_TBB_LIB_MALLOC_NAME "${_TBB_LIB_NAME}malloc")
|
||||||
|
set(_TBB_LIB_DEBUG_NAME "${_TBB_LIB_NAME}_debug")
|
||||||
|
set(_TBB_LIB_MALLOC_DEBUG_NAME "${_TBB_LIB_MALLOC_NAME}_debug")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
find_library(TBB_LIBRARY_DEBUG NAMES ${_TBB_LIB_NAME}_debug PATH_SUFFIXES ${_TBB_LIB_DIR} HINTS ${TBB_SEARCH_DIRS})
|
find_library(TBB_LIBRARY_DEBUG NAMES ${_TBB_LIB_NAME}_debug PATH_SUFFIXES ${_TBB_LIB_DIR} HINTS ${TBB_SEARCH_DIRS})
|
||||||
|
|
|
@ -917,10 +917,10 @@ void DomainServer::sendDomainListToNode(const SharedNodePointer& node, const Hif
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// always write the last broadcastPacket
|
|
||||||
nodeList->writeDatagram(broadcastPacket, node, senderSockAddr);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// always write the last broadcastPacket
|
||||||
|
nodeList->writeDatagram(broadcastPacket, node, senderSockAddr);
|
||||||
}
|
}
|
||||||
|
|
||||||
void DomainServer::readAvailableDatagrams() {
|
void DomainServer::readAvailableDatagrams() {
|
||||||
|
|
41
examples/entityScripts/portal.js
Normal file
41
examples/entityScripts/portal.js
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
(function(){
|
||||||
|
var teleport;
|
||||||
|
var portalDestination;
|
||||||
|
|
||||||
|
function playSound() {
|
||||||
|
Audio.playSound(teleport, { volume: 0.40, localOnly: true });
|
||||||
|
};
|
||||||
|
|
||||||
|
this.preload = function(entityID) {
|
||||||
|
teleport = SoundCache.getSound("http://s3.amazonaws.com/hifi-public/birarda/teleport.raw");
|
||||||
|
|
||||||
|
var properties = Entities.getEntityProperties(entityID);
|
||||||
|
portalDestination = properties.userData;
|
||||||
|
|
||||||
|
print("The portal destination is " + portalDestination);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.enterEntity = function(entityID) {
|
||||||
|
if (portalDestination.length > 0) {
|
||||||
|
print("Teleporting to hifi://" + portalDestination);
|
||||||
|
Window.location = "hifi://" + portalDestination;
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
this.leaveEntity = function(entityID) {
|
||||||
|
Entities.editEntity(entityID, {
|
||||||
|
animationURL: "http://hifi-public.s3.amazonaws.com/models/content/phonebooth.fbx",
|
||||||
|
animationSettings: '{ "frameIndex": 1, "running": false }'
|
||||||
|
});
|
||||||
|
|
||||||
|
playSound();
|
||||||
|
};
|
||||||
|
|
||||||
|
this.hoverEnterEntity = function(entityID) {
|
||||||
|
Entities.editEntity(entityID, {
|
||||||
|
animationURL: "http://hifi-public.s3.amazonaws.com/models/content/phonebooth.fbx",
|
||||||
|
animationSettings: '{ "fps": 24, "firstFrame": 1, "lastFrame": 25, "frameIndex": 1, "running": true, "hold": true }'
|
||||||
|
});
|
||||||
|
};
|
||||||
|
})
|
88
gvr-interface/CMakeLists.txt
Normal file
88
gvr-interface/CMakeLists.txt
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
set(TARGET_NAME gvr-interface)
|
||||||
|
|
||||||
|
if (ANDROID)
|
||||||
|
set(ANDROID_APK_BUILD_DIR "${CMAKE_CURRENT_BINARY_DIR}/apk-build")
|
||||||
|
set(ANDROID_APK_OUTPUT_DIR "${CMAKE_CURRENT_BINARY_DIR}/apk")
|
||||||
|
|
||||||
|
set(ANDROID_SDK_ROOT $ENV{ANDROID_HOME})
|
||||||
|
set(ANDROID_APP_DISPLAY_NAME Interface)
|
||||||
|
set(ANDROID_API_LEVEL 19)
|
||||||
|
set(ANDROID_APK_PACKAGE io.highfidelity.gvrinterface)
|
||||||
|
set(ANDROID_ACTIVITY_NAME io.highfidelity.gvrinterface.InterfaceActivity)
|
||||||
|
set(ANDROID_APK_VERSION_NAME "0.1")
|
||||||
|
set(ANDROID_APK_VERSION_CODE 1)
|
||||||
|
set(ANDROID_APK_FULLSCREEN TRUE)
|
||||||
|
set(ANDROID_DEPLOY_QT_INSTALL "--install")
|
||||||
|
|
||||||
|
set(BUILD_SHARED_LIBS ON)
|
||||||
|
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${ANDROID_APK_OUTPUT_DIR}/libs/${ANDROID_ABI}")
|
||||||
|
|
||||||
|
setup_hifi_library(Gui Widgets AndroidExtras)
|
||||||
|
else ()
|
||||||
|
setup_hifi_project(Gui Widgets)
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
include_directories(${Qt5Gui_PRIVATE_INCLUDE_DIRS})
|
||||||
|
|
||||||
|
include_glm()
|
||||||
|
|
||||||
|
link_hifi_libraries(shared networking audio-client avatars)
|
||||||
|
include_dependency_includes()
|
||||||
|
|
||||||
|
if (ANDROID)
|
||||||
|
find_package(LibOVR)
|
||||||
|
|
||||||
|
if (LIBOVR_FOUND)
|
||||||
|
add_definitions(-DHAVE_LIBOVR)
|
||||||
|
target_link_libraries(${TARGET_NAME} ${LIBOVR_LIBRARIES} ${LIBOVR_ANDROID_LIBRARIES} ${TURBOJPEG_LIBRARY})
|
||||||
|
include_directories(SYSTEM ${LIBOVR_INCLUDE_DIRS})
|
||||||
|
|
||||||
|
# we need VRLib, so add a project.properties to our apk build folder that says that
|
||||||
|
file(RELATIVE_PATH RELATIVE_VRLIB_PATH ${ANDROID_APK_OUTPUT_DIR} "${LIBOVR_VRLIB_DIR}")
|
||||||
|
file(WRITE "${ANDROID_APK_BUILD_DIR}/project.properties" "android.library.reference.1=${RELATIVE_VRLIB_PATH}")
|
||||||
|
|
||||||
|
list(APPEND IGNORE_COPY_LIBS ${LIBOVR_ANDROID_LIBRARIES})
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
# the presence of a HOCKEY_APP_ID means we are making a beta build
|
||||||
|
if (ANDROID AND HOCKEY_APP_ID)
|
||||||
|
set(HOCKEY_APP_ENABLED true)
|
||||||
|
set(HOCKEY_APP_ACTIVITY "<activity android:name='net.hockeyapp.android.UpdateActivity' />\n")
|
||||||
|
set(ANDROID_ACTIVITY_NAME io.highfidelity.gvrinterface.InterfaceBetaActivity)
|
||||||
|
set(ANDROID_DEPLOY_QT_INSTALL "")
|
||||||
|
set(ANDROID_APK_CUSTOM_NAME "Interface-beta.apk")
|
||||||
|
|
||||||
|
# set the ANDROID_APK_VERSION_CODE to the number of git commits
|
||||||
|
execute_process(
|
||||||
|
COMMAND git rev-list --first-parent --count HEAD
|
||||||
|
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||||
|
OUTPUT_VARIABLE GIT_COMMIT_COUNT
|
||||||
|
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||||
|
)
|
||||||
|
|
||||||
|
set(ANDROID_APK_VERSION_CODE ${GIT_COMMIT_COUNT})
|
||||||
|
|
||||||
|
configure_file("${CMAKE_CURRENT_SOURCE_DIR}/templates/InterfaceBetaActivity.java.in" "${ANDROID_APK_BUILD_DIR}/src/io/highfidelity/gvrinterface/InterfaceBetaActivity.java")
|
||||||
|
elseif (ANDROID)
|
||||||
|
set(HOCKEY_APP_ENABLED false)
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
if (ANDROID)
|
||||||
|
|
||||||
|
set(HIFI_URL_INTENT "<intent-filter>\
|
||||||
|
\n <action android:name='android.intent.action.VIEW' />\
|
||||||
|
\n <category android:name='android.intent.category.DEFAULT' />\
|
||||||
|
\n <category android:name='android.intent.category.BROWSABLE' />\
|
||||||
|
\n <data android:scheme='hifi' />\
|
||||||
|
\n </intent-filter>"
|
||||||
|
)
|
||||||
|
|
||||||
|
set(ANDROID_EXTRA_APPLICATION_XML "${HOCKEY_APP_ACTIVITY}")
|
||||||
|
set(ANDROID_EXTRA_ACTIVITY_XML "${HIFI_URL_INTENT}")
|
||||||
|
|
||||||
|
configure_file("${CMAKE_CURRENT_SOURCE_DIR}/templates/hockeyapp.xml.in" "${ANDROID_APK_BUILD_DIR}/res/values/hockeyapp.xml")
|
||||||
|
qt_create_apk()
|
||||||
|
|
||||||
|
endif (ANDROID)
|
BIN
gvr-interface/res/drawable/icon.png
Normal file
BIN
gvr-interface/res/drawable/icon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 9.7 KiB |
75
gvr-interface/src/Client.cpp
Normal file
75
gvr-interface/src/Client.cpp
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
//
|
||||||
|
// Client.cpp
|
||||||
|
// gvr-interface/src
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 1/20/15.
|
||||||
|
// Copyright 2013 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#include <QtCore/QThread>
|
||||||
|
|
||||||
|
#include <AccountManager.h>
|
||||||
|
#include <AddressManager.h>
|
||||||
|
#include <HifiSockAddr.h>
|
||||||
|
#include <NodeList.h>
|
||||||
|
#include <PacketHeaders.h>
|
||||||
|
|
||||||
|
#include "Client.h"
|
||||||
|
|
||||||
|
Client::Client(QObject* parent) :
|
||||||
|
QObject(parent)
|
||||||
|
{
|
||||||
|
// we need to make sure that required dependencies are created
|
||||||
|
DependencyManager::set<AddressManager>();
|
||||||
|
|
||||||
|
setupNetworking();
|
||||||
|
}
|
||||||
|
|
||||||
|
void Client::setupNetworking() {
|
||||||
|
// once Application order of instantiation is fixed this should be done from AccountManager
|
||||||
|
AccountManager::getInstance().setAuthURL(DEFAULT_NODE_AUTH_URL);
|
||||||
|
|
||||||
|
// setup the NodeList for this client
|
||||||
|
DependencyManager::registerInheritance<LimitedNodeList, NodeList>();
|
||||||
|
auto nodeList = DependencyManager::set<NodeList>(NodeType::Agent, 0);
|
||||||
|
|
||||||
|
// while datagram processing remains simple for targets using Client, we'll handle datagrams
|
||||||
|
connect(&nodeList->getNodeSocket(), &QUdpSocket::readyRead, this, &Client::processDatagrams);
|
||||||
|
|
||||||
|
// every second, ask the NodeList to check in with the domain server
|
||||||
|
QTimer* domainCheckInTimer = new QTimer(this);
|
||||||
|
domainCheckInTimer->setInterval(DOMAIN_SERVER_CHECK_IN_MSECS);
|
||||||
|
connect(domainCheckInTimer, &QTimer::timeout, nodeList.data(), &NodeList::sendDomainServerCheckIn);
|
||||||
|
|
||||||
|
// TODO: once the Client knows its Address on start-up we should be able to immediately send a check in here
|
||||||
|
domainCheckInTimer->start();
|
||||||
|
|
||||||
|
// handle the case where the domain stops talking to us
|
||||||
|
// TODO: can we just have the nodelist do this when it sets up? Is there a user of the NodeList that wouldn't want this?
|
||||||
|
connect(nodeList.data(), &NodeList::limitOfSilentDomainCheckInsReached, nodeList.data(), &NodeList::reset);
|
||||||
|
}
|
||||||
|
|
||||||
|
void Client::processVerifiedPacket(const HifiSockAddr& senderSockAddr, const QByteArray& incomingPacket) {
|
||||||
|
DependencyManager::get<NodeList>()->processNodeData(senderSockAddr, incomingPacket);
|
||||||
|
}
|
||||||
|
|
||||||
|
void Client::processDatagrams() {
|
||||||
|
HifiSockAddr senderSockAddr;
|
||||||
|
|
||||||
|
static QByteArray incomingPacket;
|
||||||
|
|
||||||
|
auto nodeList = DependencyManager::get<NodeList>();
|
||||||
|
|
||||||
|
while (DependencyManager::get<NodeList>()->getNodeSocket().hasPendingDatagrams()) {
|
||||||
|
incomingPacket.resize(nodeList->getNodeSocket().pendingDatagramSize());
|
||||||
|
nodeList->getNodeSocket().readDatagram(incomingPacket.data(), incomingPacket.size(),
|
||||||
|
senderSockAddr.getAddressPointer(), senderSockAddr.getPortPointer());
|
||||||
|
|
||||||
|
if (nodeList->packetVersionAndHashMatch(incomingPacket)) {
|
||||||
|
processVerifiedPacket(senderSockAddr, incomingPacket);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
32
gvr-interface/src/Client.h
Normal file
32
gvr-interface/src/Client.h
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
//
|
||||||
|
// Client.h
|
||||||
|
// gvr-interface/src
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 1/20/15.
|
||||||
|
// Copyright 2013 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef hifi_Client_h
|
||||||
|
#define hifi_Client_h
|
||||||
|
|
||||||
|
#include <QtCore/QObject>
|
||||||
|
|
||||||
|
#include <HifiSockAddr.h>
|
||||||
|
|
||||||
|
class QThread;
|
||||||
|
|
||||||
|
class Client : public QObject {
|
||||||
|
Q_OBJECT
|
||||||
|
public:
|
||||||
|
Client(QObject* parent = 0);
|
||||||
|
protected:
|
||||||
|
void setupNetworking();
|
||||||
|
virtual void processVerifiedPacket(const HifiSockAddr& senderSockAddr, const QByteArray& incomingPacket);
|
||||||
|
private slots:
|
||||||
|
void processDatagrams();
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // hifi_Client_h
|
184
gvr-interface/src/GVRInterface.cpp
Normal file
184
gvr-interface/src/GVRInterface.cpp
Normal file
|
@ -0,0 +1,184 @@
|
||||||
|
//
|
||||||
|
// GVRInterface.cpp
|
||||||
|
// gvr-interface/src
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 11/18/14.
|
||||||
|
// Copyright 2013 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifdef ANDROID
|
||||||
|
|
||||||
|
#include <jni.h>
|
||||||
|
|
||||||
|
#include <qpa/qplatformnativeinterface.h>
|
||||||
|
#include <QtAndroidExtras/QAndroidJniEnvironment>
|
||||||
|
#include <QtAndroidExtras/QAndroidJniObject>
|
||||||
|
|
||||||
|
#ifdef HAVE_LIBOVR
|
||||||
|
|
||||||
|
#include <KeyState.h>
|
||||||
|
#include <VrApi/VrApi.h>
|
||||||
|
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#include <QtCore/QTimer>
|
||||||
|
#include <QtGui/QKeyEvent>
|
||||||
|
#include <QtWidgets/QMenuBar>
|
||||||
|
|
||||||
|
#include "GVRMainWindow.h"
|
||||||
|
#include "RenderingClient.h"
|
||||||
|
|
||||||
|
#include "GVRInterface.h"
|
||||||
|
|
||||||
|
static QString launchURLString = QString();
|
||||||
|
|
||||||
|
#ifdef ANDROID
|
||||||
|
|
||||||
|
extern "C" {
|
||||||
|
|
||||||
|
JNIEXPORT void Java_io_highfidelity_gvrinterface_InterfaceActivity_handleHifiURL(JNIEnv *jni, jclass clazz, jstring hifiURLString) {
|
||||||
|
launchURLString = QAndroidJniObject(hifiURLString).toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
|
GVRInterface::GVRInterface(int argc, char* argv[]) :
|
||||||
|
QApplication(argc, argv),
|
||||||
|
_mainWindow(NULL),
|
||||||
|
_inVRMode(false)
|
||||||
|
{
|
||||||
|
setApplicationName("gvr-interface");
|
||||||
|
setOrganizationName("highfidelity");
|
||||||
|
setOrganizationDomain("io");
|
||||||
|
|
||||||
|
if (!launchURLString.isEmpty()) {
|
||||||
|
// did we get launched with a lookup URL? If so it is time to give that to the AddressManager
|
||||||
|
qDebug() << "We were opened via a hifi URL -" << launchURLString;
|
||||||
|
}
|
||||||
|
|
||||||
|
_client = new RenderingClient(this, launchURLString);
|
||||||
|
|
||||||
|
launchURLString = QString();
|
||||||
|
|
||||||
|
connect(this, &QGuiApplication::applicationStateChanged, this, &GVRInterface::handleApplicationStateChange);
|
||||||
|
|
||||||
|
#if defined(ANDROID) && defined(HAVE_LIBOVR)
|
||||||
|
QAndroidJniEnvironment jniEnv;
|
||||||
|
|
||||||
|
QPlatformNativeInterface* interface = QApplication::platformNativeInterface();
|
||||||
|
jobject activity = (jobject) interface->nativeResourceForIntegration("QtActivity");
|
||||||
|
|
||||||
|
ovr_RegisterHmtReceivers(&*jniEnv, activity);
|
||||||
|
|
||||||
|
// PLATFORMACTIVITY_REMOVAL: Temp workaround for PlatformActivity being
|
||||||
|
// stripped from UnityPlugin. Alternate is to use LOCAL_WHOLE_STATIC_LIBRARIES
|
||||||
|
// but that increases the size of the plugin by ~1MiB
|
||||||
|
OVR::linkerPlatformActivity++;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// call our idle function whenever we can
|
||||||
|
QTimer* idleTimer = new QTimer(this);
|
||||||
|
connect(idleTimer, &QTimer::timeout, this, &GVRInterface::idle);
|
||||||
|
idleTimer->start(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
void GVRInterface::idle() {
|
||||||
|
#if defined(ANDROID) && defined(HAVE_LIBOVR)
|
||||||
|
if (!_inVRMode && ovr_IsHeadsetDocked()) {
|
||||||
|
qDebug() << "The headset just got docked - enter VR mode.";
|
||||||
|
enterVRMode();
|
||||||
|
} else if (_inVRMode) {
|
||||||
|
|
||||||
|
if (ovr_IsHeadsetDocked()) {
|
||||||
|
static int counter = 0;
|
||||||
|
|
||||||
|
// Get the latest head tracking state, predicted ahead to the midpoint of the time
|
||||||
|
// it will be displayed. It will always be corrected to the real values by
|
||||||
|
// time warp, but the closer we get, the less black will be pulled in at the edges.
|
||||||
|
const double now = ovr_GetTimeInSeconds();
|
||||||
|
static double prev;
|
||||||
|
const double rawDelta = now - prev;
|
||||||
|
prev = now;
|
||||||
|
const double clampedPrediction = std::min( 0.1, rawDelta * 2);
|
||||||
|
ovrSensorState sensor = ovrHmd_GetSensorState(OvrHmd, now + clampedPrediction, true );
|
||||||
|
|
||||||
|
auto ovrOrientation = sensor.Predicted.Pose.Orientation;
|
||||||
|
glm::quat newOrientation(ovrOrientation.w, ovrOrientation.x, ovrOrientation.y, ovrOrientation.z);
|
||||||
|
_client->setOrientation(newOrientation);
|
||||||
|
|
||||||
|
if (counter++ % 100000 == 0) {
|
||||||
|
qDebug() << "GetSensorState in frame" << counter << "-"
|
||||||
|
<< ovrOrientation.x << ovrOrientation.y << ovrOrientation.z << ovrOrientation.w;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
qDebug() << "The headset was undocked - leaving VR mode.";
|
||||||
|
|
||||||
|
leaveVRMode();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
OVR::KeyState& backKeyState = _mainWindow->getBackKeyState();
|
||||||
|
auto backEvent = backKeyState.Update(ovr_GetTimeInSeconds());
|
||||||
|
|
||||||
|
if (backEvent == OVR::KeyState::KEY_EVENT_LONG_PRESS) {
|
||||||
|
qDebug() << "Attemping to start the Platform UI Activity.";
|
||||||
|
ovr_StartPackageActivity(_ovr, PUI_CLASS_NAME, PUI_GLOBAL_MENU);
|
||||||
|
} else if (backEvent == OVR::KeyState::KEY_EVENT_DOUBLE_TAP || backEvent == OVR::KeyState::KEY_EVENT_SHORT_PRESS) {
|
||||||
|
qDebug() << "Got an event we should cancel for!";
|
||||||
|
} else if (backEvent == OVR::KeyState::KEY_EVENT_DOUBLE_TAP) {
|
||||||
|
qDebug() << "The button is down!";
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
void GVRInterface::handleApplicationStateChange(Qt::ApplicationState state) {
|
||||||
|
switch(state) {
|
||||||
|
case Qt::ApplicationActive:
|
||||||
|
qDebug() << "The application is active.";
|
||||||
|
break;
|
||||||
|
case Qt::ApplicationSuspended:
|
||||||
|
qDebug() << "The application is being suspended.";
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void GVRInterface::enterVRMode() {
|
||||||
|
#if defined(ANDROID) && defined(HAVE_LIBOVR)
|
||||||
|
// Default vrModeParms
|
||||||
|
ovrModeParms vrModeParms;
|
||||||
|
vrModeParms.AsynchronousTimeWarp = true;
|
||||||
|
vrModeParms.AllowPowerSave = true;
|
||||||
|
vrModeParms.DistortionFileName = NULL;
|
||||||
|
vrModeParms.EnableImageServer = false;
|
||||||
|
vrModeParms.CpuLevel = 2;
|
||||||
|
vrModeParms.GpuLevel = 2;
|
||||||
|
vrModeParms.GameThreadTid = 0;
|
||||||
|
|
||||||
|
QAndroidJniEnvironment jniEnv;
|
||||||
|
|
||||||
|
QPlatformNativeInterface* interface = QApplication::platformNativeInterface();
|
||||||
|
jobject activity = (jobject) interface->nativeResourceForIntegration("QtActivity");
|
||||||
|
|
||||||
|
vrModeParms.ActivityObject = activity;
|
||||||
|
|
||||||
|
ovrHmdInfo hmdInfo;
|
||||||
|
_ovr = ovr_EnterVrMode(vrModeParms, &hmdInfo);
|
||||||
|
|
||||||
|
_inVRMode = true;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
void GVRInterface::leaveVRMode() {
|
||||||
|
#if defined(ANDROID) && defined(HAVE_LIBOVR)
|
||||||
|
ovr_LeaveVrMode(_ovr);
|
||||||
|
_inVRMode = false;
|
||||||
|
#endif
|
||||||
|
}
|
71
gvr-interface/src/GVRInterface.h
Normal file
71
gvr-interface/src/GVRInterface.h
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
//
|
||||||
|
// GVRInterface.h
|
||||||
|
// gvr-interface/src
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 11/18/14.
|
||||||
|
// Copyright 2013 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef hifi_GVRInterface_h
|
||||||
|
#define hifi_GVRInterface_h
|
||||||
|
|
||||||
|
#include <QtWidgets/QApplication>
|
||||||
|
|
||||||
|
#if defined(ANDROID) && defined(HAVE_LIBOVR)
|
||||||
|
class ovrMobile;
|
||||||
|
class ovrHmdInfo;
|
||||||
|
|
||||||
|
// This is set by JNI_OnLoad() when the .so is initially loaded.
|
||||||
|
// Must use to attach each thread that will use JNI:
|
||||||
|
namespace OVR {
|
||||||
|
// PLATFORMACTIVITY_REMOVAL: Temp workaround for PlatformActivity being
|
||||||
|
// stripped from UnityPlugin. Alternate is to use LOCAL_WHOLE_STATIC_LIBRARIES
|
||||||
|
// but that increases the size of the plugin by ~1MiB
|
||||||
|
extern int linkerPlatformActivity;
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
|
class GVRMainWindow;
|
||||||
|
class RenderingClient;
|
||||||
|
class QKeyEvent;
|
||||||
|
|
||||||
|
#if defined(qApp)
|
||||||
|
#undef qApp
|
||||||
|
#endif
|
||||||
|
#define qApp (static_cast<GVRInterface*>(QApplication::instance()))
|
||||||
|
|
||||||
|
class GVRInterface : public QApplication {
|
||||||
|
Q_OBJECT
|
||||||
|
public:
|
||||||
|
GVRInterface(int argc, char* argv[]);
|
||||||
|
RenderingClient* getClient() { return _client; }
|
||||||
|
|
||||||
|
void setMainWindow(GVRMainWindow* mainWindow) { _mainWindow = mainWindow; }
|
||||||
|
|
||||||
|
protected:
|
||||||
|
void keyPressEvent(QKeyEvent* event);
|
||||||
|
|
||||||
|
private slots:
|
||||||
|
void handleApplicationStateChange(Qt::ApplicationState state);
|
||||||
|
void idle();
|
||||||
|
private:
|
||||||
|
|
||||||
|
void enterVRMode();
|
||||||
|
void leaveVRMode();
|
||||||
|
|
||||||
|
#if defined(ANDROID) && defined(HAVE_LIBOVR)
|
||||||
|
ovrMobile* _ovr;
|
||||||
|
ovrHmdInfo* _hmdInfo;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
GVRMainWindow* _mainWindow;
|
||||||
|
|
||||||
|
RenderingClient* _client;
|
||||||
|
bool _inVRMode;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // hifi_GVRInterface_h
|
176
gvr-interface/src/GVRMainWindow.cpp
Normal file
176
gvr-interface/src/GVRMainWindow.cpp
Normal file
|
@ -0,0 +1,176 @@
|
||||||
|
//
|
||||||
|
// GVRMainWindow.cpp
|
||||||
|
// gvr-interface/src
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 1/20/14.
|
||||||
|
// Copyright 2013 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#include <QtGui/QKeyEvent>
|
||||||
|
#include <QtWidgets/QApplication>
|
||||||
|
#include <QtWidgets/QInputDialog>
|
||||||
|
#include <QtWidgets/QLabel>
|
||||||
|
#include <QtWidgets/QLineEdit>
|
||||||
|
#include <QtWidgets/QMenuBar>
|
||||||
|
#include <QtWidgets/QMessageBox>
|
||||||
|
#include <QtWidgets/QVBoxLayout>
|
||||||
|
|
||||||
|
#ifndef ANDROID
|
||||||
|
|
||||||
|
#include <QtWidgets/QDesktopWidget>
|
||||||
|
|
||||||
|
#elif defined(HAVE_LIBOVR)
|
||||||
|
|
||||||
|
#include <OVR_CAPI.h>
|
||||||
|
|
||||||
|
const float LIBOVR_DOUBLE_TAP_DURATION = 0.25f;
|
||||||
|
const float LIBOVR_LONG_PRESS_DURATION = 0.75f;
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#include <AddressManager.h>
|
||||||
|
|
||||||
|
#include "InterfaceView.h"
|
||||||
|
#include "LoginDialog.h"
|
||||||
|
#include "RenderingClient.h"
|
||||||
|
|
||||||
|
#include "GVRMainWindow.h"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
GVRMainWindow::GVRMainWindow(QWidget* parent) :
|
||||||
|
QMainWindow(parent),
|
||||||
|
#if defined(ANDROID) && defined(HAVE_LIBOVR)
|
||||||
|
_backKeyState(LIBOVR_DOUBLE_TAP_DURATION, LIBOVR_LONG_PRESS_DURATION),
|
||||||
|
_wasBackKeyDown(false),
|
||||||
|
#endif
|
||||||
|
_mainLayout(NULL),
|
||||||
|
_menuBar(NULL),
|
||||||
|
_loginAction(NULL)
|
||||||
|
{
|
||||||
|
|
||||||
|
#ifndef ANDROID
|
||||||
|
const int NOTE_4_WIDTH = 2560;
|
||||||
|
const int NOTE_4_HEIGHT = 1440;
|
||||||
|
setFixedSize(NOTE_4_WIDTH / 2, NOTE_4_HEIGHT / 2);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
setupMenuBar();
|
||||||
|
|
||||||
|
QWidget* baseWidget = new QWidget(this);
|
||||||
|
|
||||||
|
// setup a layout so we can vertically align to top
|
||||||
|
_mainLayout = new QVBoxLayout(baseWidget);
|
||||||
|
_mainLayout->setAlignment(Qt::AlignTop);
|
||||||
|
|
||||||
|
// set the layout on the base widget
|
||||||
|
baseWidget->setLayout(_mainLayout);
|
||||||
|
|
||||||
|
setCentralWidget(baseWidget);
|
||||||
|
|
||||||
|
// add the interface view
|
||||||
|
new InterfaceView(baseWidget);
|
||||||
|
}
|
||||||
|
|
||||||
|
GVRMainWindow::~GVRMainWindow() {
|
||||||
|
delete _menuBar;
|
||||||
|
}
|
||||||
|
|
||||||
|
void GVRMainWindow::keyPressEvent(QKeyEvent* event) {
|
||||||
|
#ifdef ANDROID
|
||||||
|
if (event->key() == Qt::Key_Back) {
|
||||||
|
// got the Android back key, hand off to OVR KeyState
|
||||||
|
_backKeyState.HandleEvent(ovr_GetTimeInSeconds(), true, (_wasBackKeyDown ? 1 : 0));
|
||||||
|
_wasBackKeyDown = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
QWidget::keyPressEvent(event);
|
||||||
|
}
|
||||||
|
|
||||||
|
void GVRMainWindow::keyReleaseEvent(QKeyEvent* event) {
|
||||||
|
#ifdef ANDROID
|
||||||
|
if (event->key() == Qt::Key_Back) {
|
||||||
|
// release on the Android back key, hand off to OVR KeyState
|
||||||
|
_backKeyState.HandleEvent(ovr_GetTimeInSeconds(), false, 0);
|
||||||
|
_wasBackKeyDown = false;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
QWidget::keyReleaseEvent(event);
|
||||||
|
}
|
||||||
|
|
||||||
|
void GVRMainWindow::setupMenuBar() {
|
||||||
|
QMenu* fileMenu = new QMenu("File");
|
||||||
|
QMenu* helpMenu = new QMenu("Help");
|
||||||
|
|
||||||
|
_menuBar = new QMenuBar(0);
|
||||||
|
|
||||||
|
_menuBar->addMenu(fileMenu);
|
||||||
|
_menuBar->addMenu(helpMenu);
|
||||||
|
|
||||||
|
QAction* goToAddress = new QAction("Go to Address", fileMenu);
|
||||||
|
connect(goToAddress, &QAction::triggered, this, &GVRMainWindow::showAddressBar);
|
||||||
|
fileMenu->addAction(goToAddress);
|
||||||
|
|
||||||
|
_loginAction = new QAction("Login", fileMenu);
|
||||||
|
fileMenu->addAction(_loginAction);
|
||||||
|
|
||||||
|
// change the login action depending on our logged in/out state
|
||||||
|
AccountManager& accountManager = AccountManager::getInstance();
|
||||||
|
connect(&accountManager, &AccountManager::loginComplete, this, &GVRMainWindow::refreshLoginAction);
|
||||||
|
connect(&accountManager, &AccountManager::logoutComplete, this, &GVRMainWindow::refreshLoginAction);
|
||||||
|
|
||||||
|
// refresh the state now
|
||||||
|
refreshLoginAction();
|
||||||
|
|
||||||
|
QAction* aboutQt = new QAction("About Qt", helpMenu);
|
||||||
|
connect(aboutQt, &QAction::triggered, qApp, &QApplication::aboutQt);
|
||||||
|
helpMenu->addAction(aboutQt);
|
||||||
|
|
||||||
|
setMenuBar(_menuBar);
|
||||||
|
}
|
||||||
|
|
||||||
|
void GVRMainWindow::showAddressBar() {
|
||||||
|
// setup the address QInputDialog
|
||||||
|
QInputDialog* addressDialog = new QInputDialog(this);
|
||||||
|
addressDialog->setLabelText("Address");
|
||||||
|
|
||||||
|
// add the address dialog to the main layout
|
||||||
|
_mainLayout->addWidget(addressDialog);
|
||||||
|
|
||||||
|
connect(addressDialog, &QInputDialog::textValueSelected,
|
||||||
|
DependencyManager::get<AddressManager>().data(), &AddressManager::handleLookupString);
|
||||||
|
}
|
||||||
|
|
||||||
|
void GVRMainWindow::showLoginDialog() {
|
||||||
|
LoginDialog* loginDialog = new LoginDialog(this);
|
||||||
|
|
||||||
|
// have the acccount manager handle credentials from LoginDialog
|
||||||
|
AccountManager& accountManager = AccountManager::getInstance();
|
||||||
|
connect(loginDialog, &LoginDialog::credentialsEntered, &accountManager, &AccountManager::requestAccessToken);
|
||||||
|
connect(&accountManager, &AccountManager::loginFailed, this, &GVRMainWindow::showLoginFailure);
|
||||||
|
|
||||||
|
_mainLayout->addWidget(loginDialog);
|
||||||
|
}
|
||||||
|
|
||||||
|
void GVRMainWindow::showLoginFailure() {
|
||||||
|
QMessageBox::warning(this, "Login Failed",
|
||||||
|
"Could not log in with that username and password. Please try again!");
|
||||||
|
}
|
||||||
|
|
||||||
|
void GVRMainWindow::refreshLoginAction() {
|
||||||
|
AccountManager& accountManager = AccountManager::getInstance();
|
||||||
|
disconnect(_loginAction, &QAction::triggered, &accountManager, 0);
|
||||||
|
|
||||||
|
if (accountManager.isLoggedIn()) {
|
||||||
|
_loginAction->setText("Logout");
|
||||||
|
connect(_loginAction, &QAction::triggered, &accountManager, &AccountManager::logout);
|
||||||
|
} else {
|
||||||
|
_loginAction->setText("Login");
|
||||||
|
connect(_loginAction, &QAction::triggered, this, &GVRMainWindow::showLoginDialog);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
58
gvr-interface/src/GVRMainWindow.h
Normal file
58
gvr-interface/src/GVRMainWindow.h
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
//
|
||||||
|
// GVRMainWindow.h
|
||||||
|
// gvr-interface/src
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 1/20/14.
|
||||||
|
// Copyright 2013 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef hifi_GVRMainWindow_h
|
||||||
|
#define hifi_GVRMainWindow_h
|
||||||
|
|
||||||
|
#include <QtWidgets/QMainWindow>
|
||||||
|
|
||||||
|
#if defined(ANDROID) && defined(HAVE_LIBOVR)
|
||||||
|
#include <KeyState.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
class QKeyEvent;
|
||||||
|
class QMenuBar;
|
||||||
|
class QVBoxLayout;
|
||||||
|
|
||||||
|
class GVRMainWindow : public QMainWindow {
|
||||||
|
Q_OBJECT
|
||||||
|
public:
|
||||||
|
GVRMainWindow(QWidget* parent = 0);
|
||||||
|
~GVRMainWindow();
|
||||||
|
public slots:
|
||||||
|
void showAddressBar();
|
||||||
|
void showLoginDialog();
|
||||||
|
|
||||||
|
void showLoginFailure();
|
||||||
|
|
||||||
|
#if defined(ANDROID) && defined(HAVE_LIBOVR)
|
||||||
|
OVR::KeyState& getBackKeyState() { return _backKeyState; }
|
||||||
|
#endif
|
||||||
|
|
||||||
|
protected:
|
||||||
|
void keyPressEvent(QKeyEvent* event);
|
||||||
|
void keyReleaseEvent(QKeyEvent* event);
|
||||||
|
private slots:
|
||||||
|
void refreshLoginAction();
|
||||||
|
private:
|
||||||
|
void setupMenuBar();
|
||||||
|
|
||||||
|
#if defined(ANDROID) && defined(HAVE_LIBOVR)
|
||||||
|
OVR::KeyState _backKeyState;
|
||||||
|
bool _wasBackKeyDown;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
QVBoxLayout* _mainLayout;
|
||||||
|
QMenuBar* _menuBar;
|
||||||
|
QAction* _loginAction;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // hifi_GVRMainWindow_h
|
18
gvr-interface/src/InterfaceView.cpp
Normal file
18
gvr-interface/src/InterfaceView.cpp
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
//
|
||||||
|
// InterfaceView.cpp
|
||||||
|
// gvr-interface/src
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 1/28/14.
|
||||||
|
// Copyright 2013 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#include "InterfaceView.h"
|
||||||
|
|
||||||
|
InterfaceView::InterfaceView(QWidget* parent, Qt::WindowFlags flags) :
|
||||||
|
QOpenGLWidget(parent, flags)
|
||||||
|
{
|
||||||
|
|
||||||
|
}
|
23
gvr-interface/src/InterfaceView.h
Normal file
23
gvr-interface/src/InterfaceView.h
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
//
|
||||||
|
// InterfaceView.h
|
||||||
|
// gvr-interface/src
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 1/28/14.
|
||||||
|
// Copyright 2013 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef hifi_InterfaceView_h
|
||||||
|
#define hifi_InterfaceView_h
|
||||||
|
|
||||||
|
#include <QtWidgets/QOpenGLWidget>
|
||||||
|
|
||||||
|
class InterfaceView : public QOpenGLWidget {
|
||||||
|
Q_OBJECT
|
||||||
|
public:
|
||||||
|
InterfaceView(QWidget* parent = 0, Qt::WindowFlags flags = 0);
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // hifi_InterfaceView_h
|
69
gvr-interface/src/LoginDialog.cpp
Normal file
69
gvr-interface/src/LoginDialog.cpp
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
//
|
||||||
|
// LoginDialog.cpp
|
||||||
|
// gvr-interface/src
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 2015-02-03.
|
||||||
|
// Copyright 2015 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#include <QtWidgets/QDialogButtonBox>
|
||||||
|
#include <QtWidgets/QGridLayout>
|
||||||
|
#include <QtWidgets/QLabel>
|
||||||
|
#include <QtWidgets/QLineEdit>
|
||||||
|
#include <QtWidgets/QPushButton>
|
||||||
|
|
||||||
|
#include "LoginDialog.h"
|
||||||
|
|
||||||
|
LoginDialog::LoginDialog(QWidget* parent) :
|
||||||
|
QDialog(parent)
|
||||||
|
{
|
||||||
|
setupGUI();
|
||||||
|
setWindowTitle("Login");
|
||||||
|
setModal(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
void LoginDialog::setupGUI() {
|
||||||
|
// setup a grid layout
|
||||||
|
QGridLayout* formGridLayout = new QGridLayout(this);
|
||||||
|
|
||||||
|
_usernameLineEdit = new QLineEdit(this);
|
||||||
|
|
||||||
|
QLabel* usernameLabel = new QLabel(this);
|
||||||
|
usernameLabel->setText("Username");
|
||||||
|
usernameLabel->setBuddy(_usernameLineEdit);
|
||||||
|
|
||||||
|
formGridLayout->addWidget(usernameLabel, 0, 0);
|
||||||
|
formGridLayout->addWidget(_usernameLineEdit, 1, 0);
|
||||||
|
|
||||||
|
_passwordLineEdit = new QLineEdit(this);
|
||||||
|
_passwordLineEdit->setEchoMode(QLineEdit::Password);
|
||||||
|
|
||||||
|
QLabel* passwordLabel = new QLabel(this);
|
||||||
|
passwordLabel->setText("Password");
|
||||||
|
passwordLabel->setBuddy(_passwordLineEdit);
|
||||||
|
|
||||||
|
formGridLayout->addWidget(passwordLabel, 2, 0);
|
||||||
|
formGridLayout->addWidget(_passwordLineEdit, 3, 0);
|
||||||
|
|
||||||
|
QDialogButtonBox* buttons = new QDialogButtonBox(this);
|
||||||
|
|
||||||
|
QPushButton* okButton = buttons->addButton(QDialogButtonBox::Ok);
|
||||||
|
QPushButton* cancelButton = buttons->addButton(QDialogButtonBox::Cancel);
|
||||||
|
|
||||||
|
okButton->setText("Login");
|
||||||
|
|
||||||
|
connect(cancelButton, &QPushButton::clicked, this, &QDialog::close);
|
||||||
|
connect(okButton, &QPushButton::clicked, this, &LoginDialog::loginButtonClicked);
|
||||||
|
|
||||||
|
formGridLayout->addWidget(buttons, 4, 0, 1, 2);
|
||||||
|
|
||||||
|
setLayout(formGridLayout);
|
||||||
|
}
|
||||||
|
|
||||||
|
void LoginDialog::loginButtonClicked() {
|
||||||
|
emit credentialsEntered(_usernameLineEdit->text(), _passwordLineEdit->text());
|
||||||
|
close();
|
||||||
|
}
|
34
gvr-interface/src/LoginDialog.h
Normal file
34
gvr-interface/src/LoginDialog.h
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
//
|
||||||
|
// LoginDialog.h
|
||||||
|
// gvr-interface/src
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 2015-02-03.
|
||||||
|
// Copyright 2015 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef hifi_LoginDialog_h
|
||||||
|
#define hifi_LoginDialog_h
|
||||||
|
|
||||||
|
#include <QtWidgets/QDialog>
|
||||||
|
|
||||||
|
class QLineEdit;
|
||||||
|
|
||||||
|
class LoginDialog : public QDialog {
|
||||||
|
Q_OBJECT
|
||||||
|
public:
|
||||||
|
LoginDialog(QWidget* parent = 0);
|
||||||
|
signals:
|
||||||
|
void credentialsEntered(const QString& username, const QString& password);
|
||||||
|
private slots:
|
||||||
|
void loginButtonClicked();
|
||||||
|
private:
|
||||||
|
void setupGUI();
|
||||||
|
|
||||||
|
QLineEdit* _usernameLineEdit;
|
||||||
|
QLineEdit* _passwordLineEdit;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // hifi_LoginDialog_h
|
167
gvr-interface/src/RenderingClient.cpp
Normal file
167
gvr-interface/src/RenderingClient.cpp
Normal file
|
@ -0,0 +1,167 @@
|
||||||
|
//
|
||||||
|
// RenderingClient.cpp
|
||||||
|
// gvr-interface/src
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 1/20/15.
|
||||||
|
// Copyright 2013 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#include <QtCore/QThread>
|
||||||
|
#include <QtWidgets/QInputDialog>
|
||||||
|
|
||||||
|
#include <AddressManager.h>
|
||||||
|
#include <AudioClient.h>
|
||||||
|
#include <AvatarHashMap.h>
|
||||||
|
#include <NodeList.h>
|
||||||
|
|
||||||
|
#include "RenderingClient.h"
|
||||||
|
|
||||||
|
RenderingClient* RenderingClient::_instance = NULL;
|
||||||
|
|
||||||
|
RenderingClient::RenderingClient(QObject *parent, const QString& launchURLString) :
|
||||||
|
Client(parent)
|
||||||
|
{
|
||||||
|
_instance = this;
|
||||||
|
|
||||||
|
// connect to AddressManager and pass it the launch URL, if we have one
|
||||||
|
auto addressManager = DependencyManager::get<AddressManager>();
|
||||||
|
connect(addressManager.data(), &AddressManager::locationChangeRequired, this, &RenderingClient::goToLocation);
|
||||||
|
addressManager->loadSettings(launchURLString);
|
||||||
|
|
||||||
|
// tell the NodeList which node types all rendering clients will want to know about
|
||||||
|
DependencyManager::get<NodeList>()->addSetOfNodeTypesToNodeInterestSet(NodeSet() << NodeType::AudioMixer << NodeType::AvatarMixer);
|
||||||
|
|
||||||
|
DependencyManager::set<AvatarHashMap>();
|
||||||
|
|
||||||
|
// get our audio client setup on its own thread
|
||||||
|
QThread* audioThread = new QThread(this);
|
||||||
|
auto audioClient = DependencyManager::set<AudioClient>();
|
||||||
|
|
||||||
|
audioClient->setPositionGetter(getPositionForAudio);
|
||||||
|
audioClient->setOrientationGetter(getOrientationForAudio);
|
||||||
|
|
||||||
|
audioClient->moveToThread(audioThread);
|
||||||
|
connect(audioThread, &QThread::started, audioClient.data(), &AudioClient::start);
|
||||||
|
|
||||||
|
audioThread->start();
|
||||||
|
|
||||||
|
|
||||||
|
connect(&_avatarTimer, &QTimer::timeout, this, &RenderingClient::sendAvatarPacket);
|
||||||
|
_avatarTimer.setInterval(16); // 60 FPS
|
||||||
|
_avatarTimer.start();
|
||||||
|
_fakeAvatar.setDisplayName("GearVR");
|
||||||
|
_fakeAvatar.setFaceModelURL(QUrl(DEFAULT_HEAD_MODEL_URL));
|
||||||
|
_fakeAvatar.setSkeletonModelURL(QUrl(DEFAULT_BODY_MODEL_URL));
|
||||||
|
_fakeAvatar.toByteArray(); // Creates HeadData
|
||||||
|
}
|
||||||
|
|
||||||
|
void RenderingClient::sendAvatarPacket() {
|
||||||
|
_fakeAvatar.setPosition(_position);
|
||||||
|
_fakeAvatar.setHeadOrientation(_orientation);
|
||||||
|
|
||||||
|
QByteArray packet = byteArrayWithPopulatedHeader(PacketTypeAvatarData);
|
||||||
|
packet.append(_fakeAvatar.toByteArray());
|
||||||
|
DependencyManager::get<NodeList>()->broadcastToNodes(packet, NodeSet() << NodeType::AvatarMixer);
|
||||||
|
_fakeAvatar.sendIdentityPacket();
|
||||||
|
}
|
||||||
|
|
||||||
|
RenderingClient::~RenderingClient() {
|
||||||
|
auto audioClient = DependencyManager::get<AudioClient>();
|
||||||
|
|
||||||
|
// stop the audio client
|
||||||
|
QMetaObject::invokeMethod(audioClient.data(), "stop", Qt::BlockingQueuedConnection);
|
||||||
|
|
||||||
|
// ask the audio thread to quit and wait until it is done
|
||||||
|
audioClient->thread()->quit();
|
||||||
|
audioClient->thread()->wait();
|
||||||
|
}
|
||||||
|
|
||||||
|
void RenderingClient::processVerifiedPacket(const HifiSockAddr& senderSockAddr, const QByteArray& incomingPacket) {
|
||||||
|
auto nodeList = DependencyManager::get<NodeList>();
|
||||||
|
PacketType incomingType = packetTypeForPacket(incomingPacket);
|
||||||
|
|
||||||
|
switch (incomingType) {
|
||||||
|
case PacketTypeAudioEnvironment:
|
||||||
|
case PacketTypeAudioStreamStats:
|
||||||
|
case PacketTypeMixedAudio:
|
||||||
|
case PacketTypeSilentAudioFrame: {
|
||||||
|
|
||||||
|
if (incomingType == PacketTypeAudioStreamStats) {
|
||||||
|
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "parseAudioStreamStatsPacket",
|
||||||
|
Qt::QueuedConnection,
|
||||||
|
Q_ARG(QByteArray, incomingPacket));
|
||||||
|
} else if (incomingType == PacketTypeAudioEnvironment) {
|
||||||
|
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "parseAudioEnvironmentData",
|
||||||
|
Qt::QueuedConnection,
|
||||||
|
Q_ARG(QByteArray, incomingPacket));
|
||||||
|
} else {
|
||||||
|
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "addReceivedAudioToStream",
|
||||||
|
Qt::QueuedConnection,
|
||||||
|
Q_ARG(QByteArray, incomingPacket));
|
||||||
|
}
|
||||||
|
|
||||||
|
// update having heard from the audio-mixer and record the bytes received
|
||||||
|
SharedNodePointer audioMixer = nodeList->sendingNodeForPacket(incomingPacket);
|
||||||
|
|
||||||
|
if (audioMixer) {
|
||||||
|
audioMixer->setLastHeardMicrostamp(usecTimestampNow());
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case PacketTypeBulkAvatarData:
|
||||||
|
case PacketTypeKillAvatar:
|
||||||
|
case PacketTypeAvatarIdentity:
|
||||||
|
case PacketTypeAvatarBillboard: {
|
||||||
|
// update having heard from the avatar-mixer and record the bytes received
|
||||||
|
SharedNodePointer avatarMixer = nodeList->sendingNodeForPacket(incomingPacket);
|
||||||
|
|
||||||
|
if (avatarMixer) {
|
||||||
|
avatarMixer->setLastHeardMicrostamp(usecTimestampNow());
|
||||||
|
|
||||||
|
QMetaObject::invokeMethod(DependencyManager::get<AvatarHashMap>().data(),
|
||||||
|
"processAvatarMixerDatagram",
|
||||||
|
Q_ARG(const QByteArray&, incomingPacket),
|
||||||
|
Q_ARG(const QWeakPointer<Node>&, avatarMixer));
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
Client::processVerifiedPacket(senderSockAddr, incomingPacket);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void RenderingClient::goToLocation(const glm::vec3& newPosition,
|
||||||
|
bool hasOrientationChange, const glm::quat& newOrientation,
|
||||||
|
bool shouldFaceLocation) {
|
||||||
|
qDebug().nospace() << "RenderingClient goToLocation - moving to " << newPosition.x << ", "
|
||||||
|
<< newPosition.y << ", " << newPosition.z;
|
||||||
|
|
||||||
|
glm::vec3 shiftedPosition = newPosition;
|
||||||
|
|
||||||
|
if (hasOrientationChange) {
|
||||||
|
qDebug().nospace() << "RenderingClient goToLocation - new orientation is "
|
||||||
|
<< newOrientation.x << ", " << newOrientation.y << ", " << newOrientation.z << ", " << newOrientation.w;
|
||||||
|
|
||||||
|
// orient the user to face the target
|
||||||
|
glm::quat quatOrientation = newOrientation;
|
||||||
|
|
||||||
|
if (shouldFaceLocation) {
|
||||||
|
|
||||||
|
quatOrientation = newOrientation * glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
|
||||||
|
|
||||||
|
// move the user a couple units away
|
||||||
|
const float DISTANCE_TO_USER = 2.0f;
|
||||||
|
shiftedPosition = newPosition - quatOrientation * glm::vec3( 0.0f, 0.0f,-1.0f) * DISTANCE_TO_USER;
|
||||||
|
}
|
||||||
|
|
||||||
|
_orientation = quatOrientation;
|
||||||
|
}
|
||||||
|
|
||||||
|
_position = shiftedPosition;
|
||||||
|
|
||||||
|
}
|
56
gvr-interface/src/RenderingClient.h
Normal file
56
gvr-interface/src/RenderingClient.h
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
//
|
||||||
|
// RenderingClient.h
|
||||||
|
// gvr-interface/src
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 1/20/15.
|
||||||
|
// Copyright 2013 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
|
||||||
|
#ifndef hifi_RenderingClient_h
|
||||||
|
#define hifi_RenderingClient_h
|
||||||
|
|
||||||
|
#include <glm/glm.hpp>
|
||||||
|
#include <glm/gtc/quaternion.hpp>
|
||||||
|
|
||||||
|
#include <QTimer>
|
||||||
|
|
||||||
|
#include <AvatarData.h>
|
||||||
|
|
||||||
|
#include "Client.h"
|
||||||
|
|
||||||
|
class RenderingClient : public Client {
|
||||||
|
Q_OBJECT
|
||||||
|
public:
|
||||||
|
RenderingClient(QObject* parent = 0, const QString& launchURLString = QString());
|
||||||
|
~RenderingClient();
|
||||||
|
|
||||||
|
const glm::vec3& getPosition() const { return _position; }
|
||||||
|
const glm::quat& getOrientation() const { return _orientation; }
|
||||||
|
void setOrientation(const glm::quat& orientation) { _orientation = orientation; }
|
||||||
|
|
||||||
|
static glm::vec3 getPositionForAudio() { return _instance->getPosition(); }
|
||||||
|
static glm::quat getOrientationForAudio() { return _instance->getOrientation(); }
|
||||||
|
|
||||||
|
private slots:
|
||||||
|
void goToLocation(const glm::vec3& newPosition,
|
||||||
|
bool hasOrientationChange, const glm::quat& newOrientation,
|
||||||
|
bool shouldFaceLocation);
|
||||||
|
void sendAvatarPacket();
|
||||||
|
|
||||||
|
private:
|
||||||
|
virtual void processVerifiedPacket(const HifiSockAddr& senderSockAddr, const QByteArray& incomingPacket);
|
||||||
|
|
||||||
|
static RenderingClient* _instance;
|
||||||
|
|
||||||
|
glm::vec3 _position;
|
||||||
|
glm::quat _orientation;
|
||||||
|
|
||||||
|
QTimer _avatarTimer;
|
||||||
|
AvatarData _fakeAvatar;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // hifi_RenderingClient_h
|
|
@ -0,0 +1,41 @@
|
||||||
|
//
|
||||||
|
// InterfaceActivity.java
|
||||||
|
// gvr-interface/java
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 1/26/15.
|
||||||
|
// Copyright 2015 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
package io.highfidelity.gvrinterface;
|
||||||
|
|
||||||
|
import android.content.Intent;
|
||||||
|
import android.net.Uri;
|
||||||
|
import android.os.Bundle;
|
||||||
|
import android.view.WindowManager;
|
||||||
|
import android.util.Log;
|
||||||
|
import org.qtproject.qt5.android.bindings.QtActivity;
|
||||||
|
|
||||||
|
public class InterfaceActivity extends QtActivity {
|
||||||
|
|
||||||
|
public static native void handleHifiURL(String hifiURLString);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onCreate(Bundle savedInstanceState) {
|
||||||
|
super.onCreate(savedInstanceState);
|
||||||
|
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
|
||||||
|
|
||||||
|
// Get the intent that started this activity in case we have a hifi:// URL to parse
|
||||||
|
Intent intent = getIntent();
|
||||||
|
if (intent.getAction() == Intent.ACTION_VIEW) {
|
||||||
|
Uri data = intent.getData();
|
||||||
|
|
||||||
|
if (data.getScheme().equals("hifi")) {
|
||||||
|
handleHifiURL(data.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
28
gvr-interface/src/main.cpp
Normal file
28
gvr-interface/src/main.cpp
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
//
|
||||||
|
// main.cpp
|
||||||
|
// gvr-interface/src
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 11/17/14.
|
||||||
|
// Copyright 2014 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#include "GVRMainWindow.h"
|
||||||
|
#include "GVRInterface.h"
|
||||||
|
|
||||||
|
int main(int argc, char* argv[]) {
|
||||||
|
GVRInterface app(argc, argv);
|
||||||
|
|
||||||
|
GVRMainWindow mainWindow;
|
||||||
|
#ifdef ANDROID
|
||||||
|
mainWindow.showFullScreen();
|
||||||
|
#else
|
||||||
|
mainWindow.showMaximized();
|
||||||
|
#endif
|
||||||
|
|
||||||
|
app.setMainWindow(&mainWindow);
|
||||||
|
|
||||||
|
return app.exec();
|
||||||
|
}
|
51
gvr-interface/templates/InterfaceBetaActivity.java.in
Normal file
51
gvr-interface/templates/InterfaceBetaActivity.java.in
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
//
|
||||||
|
// InterfaceBetaActivity.java
|
||||||
|
// gvr-interface/java
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 1/27/15.
|
||||||
|
// Copyright 2015 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
package io.highfidelity.gvrinterface;
|
||||||
|
|
||||||
|
import android.os.Bundle;
|
||||||
|
import net.hockeyapp.android.CrashManager;
|
||||||
|
import net.hockeyapp.android.UpdateManager;
|
||||||
|
|
||||||
|
public class InterfaceBetaActivity extends InterfaceActivity {
|
||||||
|
|
||||||
|
public String _hockeyAppID;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onCreate(Bundle savedInstanceState) {
|
||||||
|
super.onCreate(savedInstanceState);
|
||||||
|
|
||||||
|
_hockeyAppID = getString(R.string.HockeyAppID);
|
||||||
|
|
||||||
|
checkForUpdates();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void onPause() {
|
||||||
|
super.onPause();
|
||||||
|
UpdateManager.unregister();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void onResume() {
|
||||||
|
super.onResume();
|
||||||
|
checkForCrashes();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkForCrashes() {
|
||||||
|
CrashManager.register(this, _hockeyAppID);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkForUpdates() {
|
||||||
|
// Remove this for store / production builds!
|
||||||
|
UpdateManager.register(this, _hockeyAppID);
|
||||||
|
}
|
||||||
|
}
|
5
gvr-interface/templates/hockeyapp.xml.in
Normal file
5
gvr-interface/templates/hockeyapp.xml.in
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
<?xml version='1.0' encoding='utf-8'?>
|
||||||
|
<resources>
|
||||||
|
<string name="HockeyAppID">${HOCKEY_APP_ID}</string>
|
||||||
|
<bool name="HockeyAppEnabled">${HOCKEY_APP_ENABLED}</bool>
|
||||||
|
</resources>
|
|
@ -2,7 +2,7 @@ set(TARGET_NAME interface)
|
||||||
project(${TARGET_NAME})
|
project(${TARGET_NAME})
|
||||||
|
|
||||||
# set a default root dir for each of our optional externals if it was not passed
|
# set a default root dir for each of our optional externals if it was not passed
|
||||||
set(OPTIONAL_EXTERNALS "Faceshift" "LibOVR" "PrioVR" "Sixense" "LeapMotion" "RtMidi" "Qxmpp" "SDL2" "Gverb" "RSSDK")
|
set(OPTIONAL_EXTERNALS "Faceshift" "LibOVR" "PrioVR" "Sixense" "LeapMotion" "RtMidi" "Qxmpp" "SDL2" "RSSDK")
|
||||||
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
|
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
|
||||||
string(TOUPPER ${EXTERNAL} ${EXTERNAL}_UPPERCASE)
|
string(TOUPPER ${EXTERNAL} ${EXTERNAL}_UPPERCASE)
|
||||||
if (NOT ${${EXTERNAL}_UPPERCASE}_ROOT_DIR)
|
if (NOT ${${EXTERNAL}_UPPERCASE}_ROOT_DIR)
|
||||||
|
@ -14,10 +14,6 @@ endforeach()
|
||||||
find_package(Qt5LinguistTools REQUIRED)
|
find_package(Qt5LinguistTools REQUIRED)
|
||||||
find_package(Qt5LinguistToolsMacros)
|
find_package(Qt5LinguistToolsMacros)
|
||||||
|
|
||||||
|
|
||||||
# As Gverb is currently the only reverb library, it's required.
|
|
||||||
find_package(Gverb REQUIRED)
|
|
||||||
|
|
||||||
if (DEFINED ENV{JOB_ID})
|
if (DEFINED ENV{JOB_ID})
|
||||||
set(BUILD_SEQ $ENV{JOB_ID})
|
set(BUILD_SEQ $ENV{JOB_ID})
|
||||||
else ()
|
else ()
|
||||||
|
@ -114,7 +110,8 @@ endif()
|
||||||
add_executable(${TARGET_NAME} MACOSX_BUNDLE ${INTERFACE_SRCS} ${QM})
|
add_executable(${TARGET_NAME} MACOSX_BUNDLE ${INTERFACE_SRCS} ${QM})
|
||||||
|
|
||||||
# link required hifi libraries
|
# link required hifi libraries
|
||||||
link_hifi_libraries(shared octree environment gpu model fbx metavoxels networking entities avatars audio animation script-engine physics
|
link_hifi_libraries(shared octree environment gpu model fbx metavoxels networking entities avatars
|
||||||
|
audio audio-client animation script-engine physics
|
||||||
render-utils entities-renderer)
|
render-utils entities-renderer)
|
||||||
|
|
||||||
# find any optional and required libraries
|
# find any optional and required libraries
|
||||||
|
@ -179,13 +176,6 @@ if (QXMPP_FOUND AND NOT DISABLE_QXMPP AND WIN32)
|
||||||
add_definitions(-DQXMPP_STATIC)
|
add_definitions(-DQXMPP_STATIC)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (GVERB_FOUND)
|
|
||||||
file(GLOB GVERB_SRCS ${GVERB_SRC_DIRS}/*.c)
|
|
||||||
include_directories(${GVERB_INCLUDE_DIRS})
|
|
||||||
add_library(gverb STATIC ${GVERB_SRCS})
|
|
||||||
target_link_libraries(${TARGET_NAME} gverb)
|
|
||||||
endif (GVERB_FOUND)
|
|
||||||
|
|
||||||
# include headers for interface and InterfaceConfig.
|
# include headers for interface and InterfaceConfig.
|
||||||
include_directories("${PROJECT_SOURCE_DIR}/src" "${PROJECT_BINARY_DIR}/includes")
|
include_directories("${PROJECT_SOURCE_DIR}/src" "${PROJECT_BINARY_DIR}/includes")
|
||||||
|
|
||||||
|
@ -199,12 +189,10 @@ add_definitions(-DQT_NO_BEARERMANAGEMENT)
|
||||||
|
|
||||||
if (APPLE)
|
if (APPLE)
|
||||||
# link in required OS X frameworks and include the right GL headers
|
# link in required OS X frameworks and include the right GL headers
|
||||||
find_library(CoreAudio CoreAudio)
|
|
||||||
find_library(CoreFoundation CoreFoundation)
|
|
||||||
find_library(OpenGL OpenGL)
|
find_library(OpenGL OpenGL)
|
||||||
find_library(AppKit AppKit)
|
find_library(AppKit AppKit)
|
||||||
|
|
||||||
target_link_libraries(${TARGET_NAME} ${CoreAudio} ${CoreFoundation} ${OpenGL} ${AppKit})
|
target_link_libraries(${TARGET_NAME} ${OpenGL} ${AppKit})
|
||||||
|
|
||||||
# install command for OS X bundle
|
# install command for OS X bundle
|
||||||
INSTALL(TARGETS ${TARGET_NAME}
|
INSTALL(TARGETS ${TARGET_NAME}
|
||||||
|
|
|
@ -81,13 +81,15 @@
|
||||||
#include <UUID.h>
|
#include <UUID.h>
|
||||||
|
|
||||||
#include "Application.h"
|
#include "Application.h"
|
||||||
#include "Audio.h"
|
#include "AudioClient.h"
|
||||||
#include "InterfaceVersion.h"
|
#include "InterfaceVersion.h"
|
||||||
#include "LODManager.h"
|
#include "LODManager.h"
|
||||||
#include "Menu.h"
|
#include "Menu.h"
|
||||||
#include "ModelUploader.h"
|
#include "ModelUploader.h"
|
||||||
#include "Util.h"
|
#include "Util.h"
|
||||||
|
|
||||||
|
#include "avatar/AvatarManager.h"
|
||||||
|
|
||||||
#include "audio/AudioToolBox.h"
|
#include "audio/AudioToolBox.h"
|
||||||
#include "audio/AudioIOStatsRenderer.h"
|
#include "audio/AudioIOStatsRenderer.h"
|
||||||
#include "audio/AudioScope.h"
|
#include "audio/AudioScope.h"
|
||||||
|
@ -127,8 +129,6 @@
|
||||||
#include "ui/StandAloneJSConsole.h"
|
#include "ui/StandAloneJSConsole.h"
|
||||||
#include "ui/Stats.h"
|
#include "ui/Stats.h"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
// Starfield information
|
// Starfield information
|
||||||
|
@ -163,6 +163,7 @@ bool setupEssentials(int& argc, char** argv) {
|
||||||
QCoreApplication::setApplicationVersion(BUILD_VERSION);
|
QCoreApplication::setApplicationVersion(BUILD_VERSION);
|
||||||
|
|
||||||
DependencyManager::registerInheritance<LimitedNodeList, NodeList>();
|
DependencyManager::registerInheritance<LimitedNodeList, NodeList>();
|
||||||
|
DependencyManager::registerInheritance<AvatarHashMap, AvatarManager>();
|
||||||
|
|
||||||
// Set dependencies
|
// Set dependencies
|
||||||
auto glCanvas = DependencyManager::set<GLCanvas>();
|
auto glCanvas = DependencyManager::set<GLCanvas>();
|
||||||
|
@ -171,7 +172,7 @@ bool setupEssentials(int& argc, char** argv) {
|
||||||
auto geometryCache = DependencyManager::set<GeometryCache>();
|
auto geometryCache = DependencyManager::set<GeometryCache>();
|
||||||
auto glowEffect = DependencyManager::set<GlowEffect>();
|
auto glowEffect = DependencyManager::set<GlowEffect>();
|
||||||
auto faceshift = DependencyManager::set<Faceshift>();
|
auto faceshift = DependencyManager::set<Faceshift>();
|
||||||
auto audio = DependencyManager::set<Audio>();
|
auto audio = DependencyManager::set<AudioClient>();
|
||||||
auto audioScope = DependencyManager::set<AudioScope>();
|
auto audioScope = DependencyManager::set<AudioScope>();
|
||||||
auto audioIOStatsRenderer = DependencyManager::set<AudioIOStatsRenderer>();
|
auto audioIOStatsRenderer = DependencyManager::set<AudioIOStatsRenderer>();
|
||||||
auto deferredLightingEffect = DependencyManager::set<DeferredLightingEffect>();
|
auto deferredLightingEffect = DependencyManager::set<DeferredLightingEffect>();
|
||||||
|
@ -182,6 +183,7 @@ bool setupEssentials(int& argc, char** argv) {
|
||||||
auto ddeFaceTracker = DependencyManager::set<DdeFaceTracker>();
|
auto ddeFaceTracker = DependencyManager::set<DdeFaceTracker>();
|
||||||
auto modelBlender = DependencyManager::set<ModelBlender>();
|
auto modelBlender = DependencyManager::set<ModelBlender>();
|
||||||
auto audioToolBox = DependencyManager::set<AudioToolBox>();
|
auto audioToolBox = DependencyManager::set<AudioToolBox>();
|
||||||
|
auto avatarManager = DependencyManager::set<AvatarManager>();
|
||||||
auto lodManager = DependencyManager::set<LODManager>();
|
auto lodManager = DependencyManager::set<LODManager>();
|
||||||
auto jsConsole = DependencyManager::set<StandAloneJSConsole>();
|
auto jsConsole = DependencyManager::set<StandAloneJSConsole>();
|
||||||
auto dialogsManager = DependencyManager::set<DialogsManager>();
|
auto dialogsManager = DependencyManager::set<DialogsManager>();
|
||||||
|
@ -244,7 +246,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
||||||
{
|
{
|
||||||
_logger = new FileLogger(this); // After setting organization name in order to get correct directory
|
_logger = new FileLogger(this); // After setting organization name in order to get correct directory
|
||||||
qInstallMessageHandler(messageHandler);
|
qInstallMessageHandler(messageHandler);
|
||||||
|
|
||||||
QFontDatabase::addApplicationFont(PathUtils::resourcesPath() + "styles/Inconsolata.otf");
|
QFontDatabase::addApplicationFont(PathUtils::resourcesPath() + "styles/Inconsolata.otf");
|
||||||
_window->setWindowTitle("Interface");
|
_window->setWindowTitle("Interface");
|
||||||
|
|
||||||
|
@ -253,7 +255,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
||||||
auto glCanvas = DependencyManager::get<GLCanvas>();
|
auto glCanvas = DependencyManager::get<GLCanvas>();
|
||||||
auto nodeList = DependencyManager::get<NodeList>();
|
auto nodeList = DependencyManager::get<NodeList>();
|
||||||
|
|
||||||
_myAvatar = _avatarManager.getMyAvatar();
|
_myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
|
|
||||||
_applicationStartupTime = startup_time;
|
_applicationStartupTime = startup_time;
|
||||||
|
|
||||||
|
@ -284,9 +286,13 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
||||||
QThread* audioThread = new QThread(this);
|
QThread* audioThread = new QThread(this);
|
||||||
audioThread->setObjectName("Audio Thread");
|
audioThread->setObjectName("Audio Thread");
|
||||||
|
|
||||||
auto audioIO = DependencyManager::get<Audio>();
|
auto audioIO = DependencyManager::get<AudioClient>();
|
||||||
|
|
||||||
|
audioIO->setPositionGetter(getPositionForAudio);
|
||||||
|
audioIO->setOrientationGetter(getOrientationForAudio);
|
||||||
|
|
||||||
audioIO->moveToThread(audioThread);
|
audioIO->moveToThread(audioThread);
|
||||||
connect(audioThread, &QThread::started, audioIO.data(), &Audio::start);
|
connect(audioThread, &QThread::started, audioIO.data(), &AudioClient::start);
|
||||||
connect(audioIO.data(), SIGNAL(muteToggled()), this, SLOT(audioMuteToggled()));
|
connect(audioIO.data(), SIGNAL(muteToggled()), this, SLOT(audioMuteToggled()));
|
||||||
|
|
||||||
audioThread->start();
|
audioThread->start();
|
||||||
|
@ -330,9 +336,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
||||||
auto dialogsManager = DependencyManager::get<DialogsManager>();
|
auto dialogsManager = DependencyManager::get<DialogsManager>();
|
||||||
connect(&accountManager, &AccountManager::authRequired, dialogsManager.data(), &DialogsManager::showLoginDialog);
|
connect(&accountManager, &AccountManager::authRequired, dialogsManager.data(), &DialogsManager::showLoginDialog);
|
||||||
connect(&accountManager, &AccountManager::usernameChanged, this, &Application::updateWindowTitle);
|
connect(&accountManager, &AccountManager::usernameChanged, this, &Application::updateWindowTitle);
|
||||||
|
|
||||||
// once we have a profile in account manager make sure we generate a new keypair
|
|
||||||
connect(&accountManager, &AccountManager::profileChanged, &accountManager, &AccountManager::generateNewKeypair);
|
|
||||||
|
|
||||||
// set the account manager's root URL and trigger a login request if we don't have the access token
|
// set the account manager's root URL and trigger a login request if we don't have the access token
|
||||||
accountManager.setAuthURL(DEFAULT_NODE_AUTH_URL);
|
accountManager.setAuthURL(DEFAULT_NODE_AUTH_URL);
|
||||||
|
@ -497,7 +500,7 @@ Application::~Application() {
|
||||||
// kill any audio injectors that are still around
|
// kill any audio injectors that are still around
|
||||||
AudioScriptingInterface::getInstance().stopAllInjectors();
|
AudioScriptingInterface::getInstance().stopAllInjectors();
|
||||||
|
|
||||||
auto audioIO = DependencyManager::get<Audio>();
|
auto audioIO = DependencyManager::get<AudioClient>();
|
||||||
|
|
||||||
// stop the audio process
|
// stop the audio process
|
||||||
QMetaObject::invokeMethod(audioIO.data(), "stop", Qt::BlockingQueuedConnection);
|
QMetaObject::invokeMethod(audioIO.data(), "stop", Qt::BlockingQueuedConnection);
|
||||||
|
@ -706,7 +709,7 @@ void Application::runTests() {
|
||||||
void Application::audioMuteToggled() {
|
void Application::audioMuteToggled() {
|
||||||
QAction* muteAction = Menu::getInstance()->getActionForOption(MenuOption::MuteAudio);
|
QAction* muteAction = Menu::getInstance()->getActionForOption(MenuOption::MuteAudio);
|
||||||
Q_CHECK_PTR(muteAction);
|
Q_CHECK_PTR(muteAction);
|
||||||
muteAction->setChecked(DependencyManager::get<Audio>()->isMuted());
|
muteAction->setChecked(DependencyManager::get<AudioClient>()->isMuted());
|
||||||
}
|
}
|
||||||
|
|
||||||
void Application::aboutApp() {
|
void Application::aboutApp() {
|
||||||
|
@ -1566,13 +1569,16 @@ bool Application::exportEntities(const QString& filename, float x, float y, floa
|
||||||
}
|
}
|
||||||
|
|
||||||
void Application::loadSettings() {
|
void Application::loadSettings() {
|
||||||
DependencyManager::get<Audio>()->loadSettings();
|
|
||||||
|
DependencyManager::get<AudioClient>()->loadSettings();
|
||||||
|
|
||||||
Menu::getInstance()->loadSettings();
|
Menu::getInstance()->loadSettings();
|
||||||
_myAvatar->loadData();
|
_myAvatar->loadData();
|
||||||
}
|
}
|
||||||
|
|
||||||
void Application::saveSettings() {
|
void Application::saveSettings() {
|
||||||
DependencyManager::get<Audio>()->saveSettings();
|
DependencyManager::get<AudioClient>()->saveSettings();
|
||||||
|
|
||||||
Menu::getInstance()->saveSettings();
|
Menu::getInstance()->saveSettings();
|
||||||
_myAvatar->saveData();
|
_myAvatar->saveData();
|
||||||
}
|
}
|
||||||
|
@ -1606,7 +1612,7 @@ void Application::init() {
|
||||||
DependencyManager::get<AmbientOcclusionEffect>()->init(this);
|
DependencyManager::get<AmbientOcclusionEffect>()->init(this);
|
||||||
|
|
||||||
// TODO: move _myAvatar out of Application. Move relevant code to MyAvataar or AvatarManager
|
// TODO: move _myAvatar out of Application. Move relevant code to MyAvataar or AvatarManager
|
||||||
_avatarManager.init();
|
DependencyManager::get<AvatarManager>()->init();
|
||||||
_myCamera.setMode(CAMERA_MODE_FIRST_PERSON);
|
_myCamera.setMode(CAMERA_MODE_FIRST_PERSON);
|
||||||
|
|
||||||
_mirrorCamera.setMode(CAMERA_MODE_MIRROR);
|
_mirrorCamera.setMode(CAMERA_MODE_MIRROR);
|
||||||
|
@ -1985,7 +1991,7 @@ void Application::update(float deltaTime) {
|
||||||
|
|
||||||
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
|
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
|
||||||
|
|
||||||
_avatarManager.updateOtherAvatars(deltaTime); //loop through all the other avatars and simulate them...
|
DependencyManager::get<AvatarManager>()->updateOtherAvatars(deltaTime); //loop through all the other avatars and simulate them...
|
||||||
|
|
||||||
updateMetavoxels(deltaTime); // update metavoxels
|
updateMetavoxels(deltaTime); // update metavoxels
|
||||||
updateCamera(deltaTime); // handle various camera tweaks like off axis projection
|
updateCamera(deltaTime); // handle various camera tweaks like off axis projection
|
||||||
|
@ -2012,7 +2018,7 @@ void Application::update(float deltaTime) {
|
||||||
{
|
{
|
||||||
PerformanceTimer perfTimer("myAvatar");
|
PerformanceTimer perfTimer("myAvatar");
|
||||||
updateMyAvatarLookAtPosition();
|
updateMyAvatarLookAtPosition();
|
||||||
updateMyAvatar(deltaTime); // Sample hardware, update view frustum if needed, and send avatar data to mixer/nodes
|
DependencyManager::get<AvatarManager>()->updateMyAvatar(deltaTime); // Sample hardware, update view frustum if needed, and send avatar data to mixer/nodes
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -2069,31 +2075,11 @@ void Application::update(float deltaTime) {
|
||||||
if (sinceLastNack > TOO_LONG_SINCE_LAST_SEND_DOWNSTREAM_AUDIO_STATS) {
|
if (sinceLastNack > TOO_LONG_SINCE_LAST_SEND_DOWNSTREAM_AUDIO_STATS) {
|
||||||
_lastSendDownstreamAudioStats = now;
|
_lastSendDownstreamAudioStats = now;
|
||||||
|
|
||||||
QMetaObject::invokeMethod(DependencyManager::get<Audio>().data(), "sendDownstreamAudioStatsPacket", Qt::QueuedConnection);
|
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "sendDownstreamAudioStatsPacket", Qt::QueuedConnection);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Application::updateMyAvatar(float deltaTime) {
|
|
||||||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
|
||||||
PerformanceWarning warn(showWarnings, "Application::updateMyAvatar()");
|
|
||||||
|
|
||||||
_myAvatar->update(deltaTime);
|
|
||||||
|
|
||||||
quint64 now = usecTimestampNow();
|
|
||||||
quint64 dt = now - _lastSendAvatarDataTime;
|
|
||||||
|
|
||||||
if (dt > MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS) {
|
|
||||||
// send head/hand data to the avatar mixer and voxel server
|
|
||||||
PerformanceTimer perfTimer("send");
|
|
||||||
QByteArray packet = byteArrayWithPopulatedHeader(PacketTypeAvatarData);
|
|
||||||
packet.append(_myAvatar->toByteArray());
|
|
||||||
controlledBroadcastToNodes(packet, NodeSet() << NodeType::AvatarMixer);
|
|
||||||
|
|
||||||
_lastSendAvatarDataTime = now;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
int Application::sendNackPackets() {
|
int Application::sendNackPackets() {
|
||||||
|
|
||||||
if (Menu::getInstance()->isOptionChecked(MenuOption::DisableNackPackets)) {
|
if (Menu::getInstance()->isOptionChecked(MenuOption::DisableNackPackets)) {
|
||||||
|
@ -2533,7 +2519,7 @@ void Application::updateShadowMap() {
|
||||||
|
|
||||||
{
|
{
|
||||||
PerformanceTimer perfTimer("avatarManager");
|
PerformanceTimer perfTimer("avatarManager");
|
||||||
_avatarManager.renderAvatars(Avatar::SHADOW_RENDER_MODE);
|
DependencyManager::get<AvatarManager>()->renderAvatars(Avatar::SHADOW_RENDER_MODE);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -2785,7 +2771,7 @@ void Application::displaySide(Camera& theCamera, bool selfAvatarOnly, RenderArgs
|
||||||
bool mirrorMode = (theCamera.getMode() == CAMERA_MODE_MIRROR);
|
bool mirrorMode = (theCamera.getMode() == CAMERA_MODE_MIRROR);
|
||||||
{
|
{
|
||||||
PerformanceTimer perfTimer("avatars");
|
PerformanceTimer perfTimer("avatars");
|
||||||
_avatarManager.renderAvatars(mirrorMode ? Avatar::MIRROR_RENDER_MODE : Avatar::NORMAL_RENDER_MODE,
|
DependencyManager::get<AvatarManager>()->renderAvatars(mirrorMode ? Avatar::MIRROR_RENDER_MODE : Avatar::NORMAL_RENDER_MODE,
|
||||||
false, selfAvatarOnly);
|
false, selfAvatarOnly);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2800,7 +2786,7 @@ void Application::displaySide(Camera& theCamera, bool selfAvatarOnly, RenderArgs
|
||||||
|
|
||||||
{
|
{
|
||||||
PerformanceTimer perfTimer("avatarsPostLighting");
|
PerformanceTimer perfTimer("avatarsPostLighting");
|
||||||
_avatarManager.renderAvatars(mirrorMode ? Avatar::MIRROR_RENDER_MODE : Avatar::NORMAL_RENDER_MODE,
|
DependencyManager::get<AvatarManager>()->renderAvatars(mirrorMode ? Avatar::MIRROR_RENDER_MODE : Avatar::NORMAL_RENDER_MODE,
|
||||||
true, selfAvatarOnly);
|
true, selfAvatarOnly);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3064,7 +3050,7 @@ void Application::resetSensors() {
|
||||||
|
|
||||||
_myAvatar->reset();
|
_myAvatar->reset();
|
||||||
|
|
||||||
QMetaObject::invokeMethod(DependencyManager::get<Audio>().data(), "reset", Qt::QueuedConnection);
|
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "reset", Qt::QueuedConnection);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void setShortcutsEnabled(QWidget* widget, bool enabled) {
|
static void setShortcutsEnabled(QWidget* widget, bool enabled) {
|
||||||
|
@ -3204,7 +3190,7 @@ void Application::nodeKilled(SharedNodePointer node) {
|
||||||
_entityEditSender.nodeKilled(node);
|
_entityEditSender.nodeKilled(node);
|
||||||
|
|
||||||
if (node->getType() == NodeType::AudioMixer) {
|
if (node->getType() == NodeType::AudioMixer) {
|
||||||
QMetaObject::invokeMethod(DependencyManager::get<Audio>().data(), "audioMixerKilled");
|
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "audioMixerKilled");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (node->getType() == NodeType::EntityServer) {
|
if (node->getType() == NodeType::EntityServer) {
|
||||||
|
@ -3247,7 +3233,7 @@ void Application::nodeKilled(SharedNodePointer node) {
|
||||||
|
|
||||||
} else if (node->getType() == NodeType::AvatarMixer) {
|
} else if (node->getType() == NodeType::AvatarMixer) {
|
||||||
// our avatar mixer has gone away - clear the hash of avatars
|
// our avatar mixer has gone away - clear the hash of avatars
|
||||||
_avatarManager.clearOtherAvatars();
|
DependencyManager::get<AvatarManager>()->clearOtherAvatars();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3396,7 +3382,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
||||||
|
|
||||||
// hook our avatar and avatar hash map object into this script engine
|
// hook our avatar and avatar hash map object into this script engine
|
||||||
scriptEngine->setAvatarData(_myAvatar, "MyAvatar"); // leave it as a MyAvatar class to expose thrust features
|
scriptEngine->setAvatarData(_myAvatar, "MyAvatar"); // leave it as a MyAvatar class to expose thrust features
|
||||||
scriptEngine->setAvatarHashMap(&_avatarManager, "AvatarList");
|
scriptEngine->setAvatarHashMap(DependencyManager::get<AvatarManager>().data(), "AvatarList");
|
||||||
|
|
||||||
scriptEngine->registerGlobalObject("Camera", &_myCamera);
|
scriptEngine->registerGlobalObject("Camera", &_myCamera);
|
||||||
|
|
||||||
|
@ -3437,7 +3423,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
||||||
scriptEngine->registerGlobalObject("GlobalServices", GlobalServicesScriptingInterface::getInstance());
|
scriptEngine->registerGlobalObject("GlobalServices", GlobalServicesScriptingInterface::getInstance());
|
||||||
qScriptRegisterMetaType(scriptEngine, DownloadInfoResultToScriptValue, DownloadInfoResultFromScriptValue);
|
qScriptRegisterMetaType(scriptEngine, DownloadInfoResultToScriptValue, DownloadInfoResultFromScriptValue);
|
||||||
|
|
||||||
scriptEngine->registerGlobalObject("AvatarManager", &_avatarManager);
|
scriptEngine->registerGlobalObject("AvatarManager", DependencyManager::get<AvatarManager>().data());
|
||||||
|
|
||||||
scriptEngine->registerGlobalObject("Joysticks", &JoystickScriptingInterface::getInstance());
|
scriptEngine->registerGlobalObject("Joysticks", &JoystickScriptingInterface::getInstance());
|
||||||
qScriptRegisterMetaType(scriptEngine, joystickToScriptValue, joystickFromScriptValue);
|
qScriptRegisterMetaType(scriptEngine, joystickToScriptValue, joystickFromScriptValue);
|
||||||
|
@ -3737,10 +3723,6 @@ void Application::toggleLogDialog() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Application::initAvatarAndViewFrustum() {
|
|
||||||
updateMyAvatar(0.0f);
|
|
||||||
}
|
|
||||||
|
|
||||||
void Application::checkVersion() {
|
void Application::checkVersion() {
|
||||||
QNetworkRequest latestVersionRequest((QUrl(CHECK_VERSION_URL)));
|
QNetworkRequest latestVersionRequest((QUrl(CHECK_VERSION_URL)));
|
||||||
latestVersionRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
|
latestVersionRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
|
||||||
|
|
|
@ -37,6 +37,7 @@
|
||||||
#include <TextureCache.h>
|
#include <TextureCache.h>
|
||||||
#include <ViewFrustum.h>
|
#include <ViewFrustum.h>
|
||||||
|
|
||||||
|
#include "AudioClient.h"
|
||||||
#include "Bookmarks.h"
|
#include "Bookmarks.h"
|
||||||
#include "Camera.h"
|
#include "Camera.h"
|
||||||
#include "DatagramProcessor.h"
|
#include "DatagramProcessor.h"
|
||||||
|
@ -49,7 +50,6 @@
|
||||||
#include "Physics.h"
|
#include "Physics.h"
|
||||||
#include "Stars.h"
|
#include "Stars.h"
|
||||||
#include "avatar/Avatar.h"
|
#include "avatar/Avatar.h"
|
||||||
#include "avatar/AvatarManager.h"
|
|
||||||
#include "avatar/MyAvatar.h"
|
#include "avatar/MyAvatar.h"
|
||||||
#include "devices/PrioVR.h"
|
#include "devices/PrioVR.h"
|
||||||
#include "devices/SixenseManager.h"
|
#include "devices/SixenseManager.h"
|
||||||
|
@ -109,10 +109,6 @@ static const float MIRROR_REARVIEW_DISTANCE = 0.722f;
|
||||||
static const float MIRROR_REARVIEW_BODY_DISTANCE = 2.56f;
|
static const float MIRROR_REARVIEW_BODY_DISTANCE = 2.56f;
|
||||||
static const float MIRROR_FIELD_OF_VIEW = 30.0f;
|
static const float MIRROR_FIELD_OF_VIEW = 30.0f;
|
||||||
|
|
||||||
// 70 times per second - target is 60hz, but this helps account for any small deviations
|
|
||||||
// in the update loop
|
|
||||||
static const quint64 MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS = (1000 * 1000) / 70;
|
|
||||||
|
|
||||||
static const quint64 TOO_LONG_SINCE_LAST_SEND_DOWNSTREAM_AUDIO_STATS = 1 * USECS_PER_SECOND;
|
static const quint64 TOO_LONG_SINCE_LAST_SEND_DOWNSTREAM_AUDIO_STATS = 1 * USECS_PER_SECOND;
|
||||||
|
|
||||||
static const QString INFO_HELP_PATH = "html/interface-welcome-allsvg.html";
|
static const QString INFO_HELP_PATH = "html/interface-welcome-allsvg.html";
|
||||||
|
@ -134,6 +130,8 @@ public:
|
||||||
static Application* getInstance() { return qApp; } // TODO: replace fully by qApp
|
static Application* getInstance() { return qApp; } // TODO: replace fully by qApp
|
||||||
static const glm::vec3& getPositionForPath() { return getInstance()->_myAvatar->getPosition(); }
|
static const glm::vec3& getPositionForPath() { return getInstance()->_myAvatar->getPosition(); }
|
||||||
static glm::quat getOrientationForPath() { return getInstance()->_myAvatar->getOrientation(); }
|
static glm::quat getOrientationForPath() { return getInstance()->_myAvatar->getOrientation(); }
|
||||||
|
static glm::vec3 getPositionForAudio() { return getInstance()->_myAvatar->getHead()->getPosition(); }
|
||||||
|
static glm::quat getOrientationForAudio() { return getInstance()->_myAvatar->getHead()->getFinalOrientationInWorldFrame(); }
|
||||||
|
|
||||||
Application(int& argc, char** argv, QElapsedTimer &startup_time);
|
Application(int& argc, char** argv, QElapsedTimer &startup_time);
|
||||||
~Application();
|
~Application();
|
||||||
|
@ -168,8 +166,6 @@ public:
|
||||||
|
|
||||||
bool isThrottleRendering() const { return DependencyManager::get<GLCanvas>()->isThrottleRendering(); }
|
bool isThrottleRendering() const { return DependencyManager::get<GLCanvas>()->isThrottleRendering(); }
|
||||||
|
|
||||||
MyAvatar* getAvatar() { return _myAvatar; }
|
|
||||||
const MyAvatar* getAvatar() const { return _myAvatar; }
|
|
||||||
Camera* getCamera() { return &_myCamera; }
|
Camera* getCamera() { return &_myCamera; }
|
||||||
ViewFrustum* getViewFrustum() { return &_viewFrustum; }
|
ViewFrustum* getViewFrustum() { return &_viewFrustum; }
|
||||||
ViewFrustum* getDisplayViewFrustum() { return &_displayViewFrustum; }
|
ViewFrustum* getDisplayViewFrustum() { return &_displayViewFrustum; }
|
||||||
|
@ -224,8 +220,6 @@ public:
|
||||||
virtual AbstractControllerScriptingInterface* getControllerScriptingInterface() { return &_controllerScriptingInterface; }
|
virtual AbstractControllerScriptingInterface* getControllerScriptingInterface() { return &_controllerScriptingInterface; }
|
||||||
virtual void registerScriptEngineWithApplicationServices(ScriptEngine* scriptEngine);
|
virtual void registerScriptEngineWithApplicationServices(ScriptEngine* scriptEngine);
|
||||||
|
|
||||||
|
|
||||||
AvatarManager& getAvatarManager() { return _avatarManager; }
|
|
||||||
void resetProfile(const QString& username);
|
void resetProfile(const QString& username);
|
||||||
|
|
||||||
void controlledBroadcastToNodes(const QByteArray& packet, const NodeSet& destinationNodeTypes);
|
void controlledBroadcastToNodes(const QByteArray& packet, const NodeSet& destinationNodeTypes);
|
||||||
|
@ -263,7 +257,7 @@ public:
|
||||||
virtual float getSizeScale() const;
|
virtual float getSizeScale() const;
|
||||||
virtual int getBoundaryLevelAdjust() const;
|
virtual int getBoundaryLevelAdjust() const;
|
||||||
virtual PickRay computePickRay(float x, float y);
|
virtual PickRay computePickRay(float x, float y);
|
||||||
virtual const glm::vec3& getAvatarPosition() const { return getAvatar()->getPosition(); }
|
virtual const glm::vec3& getAvatarPosition() const { return _myAvatar->getPosition(); }
|
||||||
|
|
||||||
NodeBounds& getNodeBoundsDisplay() { return _nodeBoundsDisplay; }
|
NodeBounds& getNodeBoundsDisplay() { return _nodeBoundsDisplay; }
|
||||||
|
|
||||||
|
@ -333,7 +327,6 @@ public slots:
|
||||||
void loadDialog();
|
void loadDialog();
|
||||||
void loadScriptURLDialog();
|
void loadScriptURLDialog();
|
||||||
void toggleLogDialog();
|
void toggleLogDialog();
|
||||||
void initAvatarAndViewFrustum();
|
|
||||||
ScriptEngine* loadScript(const QString& scriptFilename = QString(), bool isUserLoaded = true,
|
ScriptEngine* loadScript(const QString& scriptFilename = QString(), bool isUserLoaded = true,
|
||||||
bool loadScriptFromEditor = false, bool activateMainWindow = false);
|
bool loadScriptFromEditor = false, bool activateMainWindow = false);
|
||||||
void scriptFinished(const QString& scriptName);
|
void scriptFinished(const QString& scriptName);
|
||||||
|
@ -430,7 +423,6 @@ private:
|
||||||
|
|
||||||
void renderLookatIndicator(glm::vec3 pointOfInterest);
|
void renderLookatIndicator(glm::vec3 pointOfInterest);
|
||||||
|
|
||||||
void updateMyAvatar(float deltaTime);
|
|
||||||
void queryOctree(NodeType_t serverType, PacketType packetType, NodeToJurisdictionMap& jurisdictions);
|
void queryOctree(NodeType_t serverType, PacketType packetType, NodeToJurisdictionMap& jurisdictions);
|
||||||
void loadViewFrustum(Camera& camera, ViewFrustum& viewFrustum);
|
void loadViewFrustum(Camera& camera, ViewFrustum& viewFrustum);
|
||||||
|
|
||||||
|
@ -487,7 +479,6 @@ private:
|
||||||
|
|
||||||
OctreeQuery _octreeQuery; // NodeData derived class for querying octee cells from octree servers
|
OctreeQuery _octreeQuery; // NodeData derived class for querying octee cells from octree servers
|
||||||
|
|
||||||
AvatarManager _avatarManager;
|
|
||||||
MyAvatar* _myAvatar; // TODO: move this and relevant code to AvatarManager (or MyAvatar as the case may be)
|
MyAvatar* _myAvatar; // TODO: move this and relevant code to AvatarManager (or MyAvatar as the case may be)
|
||||||
|
|
||||||
PrioVR _prioVR;
|
PrioVR _prioVR;
|
||||||
|
@ -581,8 +572,6 @@ private:
|
||||||
quint64 _lastNackTime;
|
quint64 _lastNackTime;
|
||||||
quint64 _lastSendDownstreamAudioStats;
|
quint64 _lastSendDownstreamAudioStats;
|
||||||
|
|
||||||
quint64 _lastSendAvatarDataTime;
|
|
||||||
|
|
||||||
bool _isVSyncOn;
|
bool _isVSyncOn;
|
||||||
|
|
||||||
bool _aboutToQuit;
|
bool _aboutToQuit;
|
||||||
|
|
|
@ -15,7 +15,8 @@
|
||||||
#include <PerfStat.h>
|
#include <PerfStat.h>
|
||||||
|
|
||||||
#include "Application.h"
|
#include "Application.h"
|
||||||
#include "Audio.h"
|
#include "avatar/AvatarManager.h"
|
||||||
|
#include "AudioClient.h"
|
||||||
#include "Menu.h"
|
#include "Menu.h"
|
||||||
|
|
||||||
#include "DatagramProcessor.h"
|
#include "DatagramProcessor.h"
|
||||||
|
@ -54,15 +55,15 @@ void DatagramProcessor::processDatagrams() {
|
||||||
case PacketTypeMixedAudio:
|
case PacketTypeMixedAudio:
|
||||||
case PacketTypeSilentAudioFrame: {
|
case PacketTypeSilentAudioFrame: {
|
||||||
if (incomingType == PacketTypeAudioStreamStats) {
|
if (incomingType == PacketTypeAudioStreamStats) {
|
||||||
QMetaObject::invokeMethod(DependencyManager::get<Audio>().data(), "parseAudioStreamStatsPacket",
|
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "parseAudioStreamStatsPacket",
|
||||||
Qt::QueuedConnection,
|
Qt::QueuedConnection,
|
||||||
Q_ARG(QByteArray, incomingPacket));
|
Q_ARG(QByteArray, incomingPacket));
|
||||||
} else if (incomingType == PacketTypeAudioEnvironment) {
|
} else if (incomingType == PacketTypeAudioEnvironment) {
|
||||||
QMetaObject::invokeMethod(DependencyManager::get<Audio>().data(), "parseAudioEnvironmentData",
|
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "parseAudioEnvironmentData",
|
||||||
Qt::QueuedConnection,
|
Qt::QueuedConnection,
|
||||||
Q_ARG(QByteArray, incomingPacket));
|
Q_ARG(QByteArray, incomingPacket));
|
||||||
} else {
|
} else {
|
||||||
QMetaObject::invokeMethod(DependencyManager::get<Audio>().data(), "addReceivedAudioToStream",
|
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "addReceivedAudioToStream",
|
||||||
Qt::QueuedConnection,
|
Qt::QueuedConnection,
|
||||||
Q_ARG(QByteArray, incomingPacket));
|
Q_ARG(QByteArray, incomingPacket));
|
||||||
}
|
}
|
||||||
|
@ -107,7 +108,8 @@ void DatagramProcessor::processDatagrams() {
|
||||||
|
|
||||||
if (avatarMixer) {
|
if (avatarMixer) {
|
||||||
avatarMixer->setLastHeardMicrostamp(usecTimestampNow());
|
avatarMixer->setLastHeardMicrostamp(usecTimestampNow());
|
||||||
QMetaObject::invokeMethod(&application->getAvatarManager(), "processAvatarMixerDatagram",
|
|
||||||
|
QMetaObject::invokeMethod(DependencyManager::get<AvatarManager>().data(), "processAvatarMixerDatagram",
|
||||||
Q_ARG(const QByteArray&, incomingPacket),
|
Q_ARG(const QByteArray&, incomingPacket),
|
||||||
Q_ARG(const QWeakPointer<Node>&, avatarMixer));
|
Q_ARG(const QWeakPointer<Node>&, avatarMixer));
|
||||||
}
|
}
|
||||||
|
@ -123,7 +125,7 @@ void DatagramProcessor::processDatagrams() {
|
||||||
}
|
}
|
||||||
case PacketTypeNoisyMute:
|
case PacketTypeNoisyMute:
|
||||||
case PacketTypeMuteEnvironment: {
|
case PacketTypeMuteEnvironment: {
|
||||||
bool mute = !DependencyManager::get<Audio>()->isMuted();
|
bool mute = !DependencyManager::get<AudioClient>()->isMuted();
|
||||||
|
|
||||||
if (incomingType == PacketTypeMuteEnvironment) {
|
if (incomingType == PacketTypeMuteEnvironment) {
|
||||||
glm::vec3 position;
|
glm::vec3 position;
|
||||||
|
@ -132,13 +134,14 @@ void DatagramProcessor::processDatagrams() {
|
||||||
int headerSize = numBytesForPacketHeaderGivenPacketType(PacketTypeMuteEnvironment);
|
int headerSize = numBytesForPacketHeaderGivenPacketType(PacketTypeMuteEnvironment);
|
||||||
memcpy(&position, incomingPacket.constData() + headerSize, sizeof(glm::vec3));
|
memcpy(&position, incomingPacket.constData() + headerSize, sizeof(glm::vec3));
|
||||||
memcpy(&radius, incomingPacket.constData() + headerSize + sizeof(glm::vec3), sizeof(float));
|
memcpy(&radius, incomingPacket.constData() + headerSize + sizeof(glm::vec3), sizeof(float));
|
||||||
distance = glm::distance(Application::getInstance()->getAvatar()->getPosition(), position);
|
distance = glm::distance(DependencyManager::get<AvatarManager>()->getMyAvatar()->getPosition(),
|
||||||
|
position);
|
||||||
|
|
||||||
mute = mute && (distance < radius);
|
mute = mute && (distance < radius);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (mute) {
|
if (mute) {
|
||||||
DependencyManager::get<Audio>()->toggleMute();
|
DependencyManager::get<AudioClient>()->toggleMute();
|
||||||
if (incomingType == PacketTypeMuteEnvironment) {
|
if (incomingType == PacketTypeMuteEnvironment) {
|
||||||
AudioScriptingInterface::getInstance().environmentMuted();
|
AudioScriptingInterface::getInstance().environmentMuted();
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//
|
//
|
||||||
// FileUtils.cpp
|
// FileUtils.cpp
|
||||||
// libraries/shared/src
|
// interface/src
|
||||||
//
|
//
|
||||||
// Created by Stojce Slavkovski on 12/23/13.
|
// Created by Stojce Slavkovski on 12/23/13.
|
||||||
// Copyright 2013 High Fidelity, Inc.
|
// Copyright 2013 High Fidelity, Inc.
|
||||||
|
@ -9,9 +9,14 @@
|
||||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
//
|
//
|
||||||
|
|
||||||
|
#include <qdir.h>
|
||||||
|
#include <qfileinfo.h>
|
||||||
|
#include <qdesktopservices.h>
|
||||||
|
#include <qprocess.h>
|
||||||
|
#include <qurl.h>
|
||||||
|
|
||||||
#include "FileUtils.h"
|
#include "FileUtils.h"
|
||||||
#include <QtCore>
|
|
||||||
#include <QDesktopServices>
|
|
||||||
|
|
||||||
void FileUtils::locateFile(QString filePath) {
|
void FileUtils::locateFile(QString filePath) {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//
|
//
|
||||||
// FileUtils.h
|
// FileUtils.h
|
||||||
// libraries/shared/src
|
// interface/src
|
||||||
//
|
//
|
||||||
// Created by Stojce Slavkovski on 12/23/13.
|
// Created by Stojce Slavkovski on 12/23/13.
|
||||||
// Copyright 2013 High Fidelity, Inc.
|
// Copyright 2013 High Fidelity, Inc.
|
|
@ -14,6 +14,7 @@
|
||||||
#include <QShortcut>
|
#include <QShortcut>
|
||||||
|
|
||||||
#include <AddressManager.h>
|
#include <AddressManager.h>
|
||||||
|
#include <AudioClient.h>
|
||||||
#include <DependencyManager.h>
|
#include <DependencyManager.h>
|
||||||
#include <GlowEffect.h>
|
#include <GlowEffect.h>
|
||||||
#include <PathUtils.h>
|
#include <PathUtils.h>
|
||||||
|
@ -23,9 +24,9 @@
|
||||||
|
|
||||||
#include "Application.h"
|
#include "Application.h"
|
||||||
#include "AccountManager.h"
|
#include "AccountManager.h"
|
||||||
#include "Audio.h"
|
|
||||||
#include "audio/AudioIOStatsRenderer.h"
|
#include "audio/AudioIOStatsRenderer.h"
|
||||||
#include "audio/AudioScope.h"
|
#include "audio/AudioScope.h"
|
||||||
|
#include "avatar/AvatarManager.h"
|
||||||
#include "devices/Faceshift.h"
|
#include "devices/Faceshift.h"
|
||||||
#include "devices/RealSense.h"
|
#include "devices/RealSense.h"
|
||||||
#include "devices/SixenseManager.h"
|
#include "devices/SixenseManager.h"
|
||||||
|
@ -186,26 +187,26 @@ Menu::Menu() {
|
||||||
SLOT(resetSensors()));
|
SLOT(resetSensors()));
|
||||||
|
|
||||||
QMenu* avatarMenu = addMenu("Avatar");
|
QMenu* avatarMenu = addMenu("Avatar");
|
||||||
|
QObject* avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
|
|
||||||
QMenu* avatarSizeMenu = avatarMenu->addMenu("Size");
|
QMenu* avatarSizeMenu = avatarMenu->addMenu("Size");
|
||||||
addActionToQMenuAndActionHash(avatarSizeMenu,
|
addActionToQMenuAndActionHash(avatarSizeMenu,
|
||||||
MenuOption::IncreaseAvatarSize,
|
MenuOption::IncreaseAvatarSize,
|
||||||
Qt::Key_Plus,
|
Qt::Key_Plus,
|
||||||
qApp->getAvatar(),
|
avatar,
|
||||||
SLOT(increaseSize()));
|
SLOT(increaseSize()));
|
||||||
addActionToQMenuAndActionHash(avatarSizeMenu,
|
addActionToQMenuAndActionHash(avatarSizeMenu,
|
||||||
MenuOption::DecreaseAvatarSize,
|
MenuOption::DecreaseAvatarSize,
|
||||||
Qt::Key_Minus,
|
Qt::Key_Minus,
|
||||||
qApp->getAvatar(),
|
avatar,
|
||||||
SLOT(decreaseSize()));
|
SLOT(decreaseSize()));
|
||||||
addActionToQMenuAndActionHash(avatarSizeMenu,
|
addActionToQMenuAndActionHash(avatarSizeMenu,
|
||||||
MenuOption::ResetAvatarSize,
|
MenuOption::ResetAvatarSize,
|
||||||
Qt::Key_Equal,
|
Qt::Key_Equal,
|
||||||
qApp->getAvatar(),
|
avatar,
|
||||||
SLOT(resetSize()));
|
SLOT(resetSize()));
|
||||||
|
|
||||||
QObject* avatar = qApp->getAvatar();
|
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::KeyboardMotorControl,
|
||||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::KeyboardMotorControl,
|
|
||||||
Qt::CTRL | Qt::SHIFT | Qt::Key_K, true, avatar, SLOT(updateMotionBehavior()));
|
Qt::CTRL | Qt::SHIFT | Qt::Key_K, true, avatar, SLOT(updateMotionBehavior()));
|
||||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ScriptedMotorControl, 0, true,
|
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ScriptedMotorControl, 0, true,
|
||||||
avatar, SLOT(updateMotionBehavior()));
|
avatar, SLOT(updateMotionBehavior()));
|
||||||
|
@ -447,7 +448,7 @@ Menu::Menu() {
|
||||||
addCheckableActionToQMenuAndActionHash(timingMenu, MenuOption::PipelineWarnings);
|
addCheckableActionToQMenuAndActionHash(timingMenu, MenuOption::PipelineWarnings);
|
||||||
addCheckableActionToQMenuAndActionHash(timingMenu, MenuOption::SuppressShortTimings);
|
addCheckableActionToQMenuAndActionHash(timingMenu, MenuOption::SuppressShortTimings);
|
||||||
|
|
||||||
auto audioIO = DependencyManager::get<Audio>();
|
auto audioIO = DependencyManager::get<AudioClient>();
|
||||||
QMenu* audioDebugMenu = developerMenu->addMenu("Audio");
|
QMenu* audioDebugMenu = developerMenu->addMenu("Audio");
|
||||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioNoiseReduction,
|
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioNoiseReduction,
|
||||||
0,
|
0,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//
|
//
|
||||||
// UIUtil.cpp
|
// UIUtil.cpp
|
||||||
// library/shared/src
|
// interface/src
|
||||||
//
|
//
|
||||||
// Created by Ryan Huffman on 09/02/2014.
|
// Created by Ryan Huffman on 09/02/2014.
|
||||||
// Copyright 2014 High Fidelity, Inc.
|
// Copyright 2014 High Fidelity, Inc.
|
|
@ -1,6 +1,6 @@
|
||||||
//
|
//
|
||||||
// UIUtil.h
|
// UIUtil.h
|
||||||
// library/shared/src
|
// interface/src
|
||||||
//
|
//
|
||||||
// Created by Ryan Huffman on 09/02/2014.
|
// Created by Ryan Huffman on 09/02/2014.
|
||||||
// Copyright 2014 High Fidelity, Inc.
|
// Copyright 2014 High Fidelity, Inc.
|
|
@ -11,15 +11,14 @@
|
||||||
|
|
||||||
#include "InterfaceConfig.h"
|
#include "InterfaceConfig.h"
|
||||||
|
|
||||||
|
#include <AudioClient.h>
|
||||||
#include <AudioConstants.h>
|
#include <AudioConstants.h>
|
||||||
|
#include <AudioIOStats.h>
|
||||||
#include <DependencyManager.h>
|
#include <DependencyManager.h>
|
||||||
#include <GeometryCache.h>
|
#include <GeometryCache.h>
|
||||||
#include <NodeList.h>
|
#include <NodeList.h>
|
||||||
#include <Util.h>
|
#include <Util.h>
|
||||||
|
|
||||||
#include "Audio.h"
|
|
||||||
#include "AudioIOStats.h"
|
|
||||||
|
|
||||||
#include "AudioIOStatsRenderer.h"
|
#include "AudioIOStatsRenderer.h"
|
||||||
|
|
||||||
AudioIOStatsRenderer::AudioIOStatsRenderer() :
|
AudioIOStatsRenderer::AudioIOStatsRenderer() :
|
||||||
|
@ -28,7 +27,7 @@ AudioIOStatsRenderer::AudioIOStatsRenderer() :
|
||||||
_shouldShowInjectedStreams(false)
|
_shouldShowInjectedStreams(false)
|
||||||
{
|
{
|
||||||
// grab the stats object from the audio I/O singleton
|
// grab the stats object from the audio I/O singleton
|
||||||
_stats = &DependencyManager::get<Audio>()->getStats();
|
_stats = &DependencyManager::get<AudioClient>()->getStats();
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
|
|
|
@ -13,11 +13,10 @@
|
||||||
|
|
||||||
#include <limits>
|
#include <limits>
|
||||||
|
|
||||||
|
#include <AudioClient.h>
|
||||||
#include <AudioConstants.h>
|
#include <AudioConstants.h>
|
||||||
#include <GeometryCache.h>
|
#include <GeometryCache.h>
|
||||||
|
|
||||||
#include "Audio.h"
|
|
||||||
|
|
||||||
#include "AudioScope.h"
|
#include "AudioScope.h"
|
||||||
|
|
||||||
static const unsigned int DEFAULT_FRAMES_PER_SCOPE = 5;
|
static const unsigned int DEFAULT_FRAMES_PER_SCOPE = 5;
|
||||||
|
@ -41,14 +40,14 @@ AudioScope::AudioScope() :
|
||||||
_outputLeftID(DependencyManager::get<GeometryCache>()->allocateID()),
|
_outputLeftID(DependencyManager::get<GeometryCache>()->allocateID()),
|
||||||
_outputRightD(DependencyManager::get<GeometryCache>()->allocateID())
|
_outputRightD(DependencyManager::get<GeometryCache>()->allocateID())
|
||||||
{
|
{
|
||||||
auto audioIO = DependencyManager::get<Audio>();
|
auto audioIO = DependencyManager::get<AudioClient>();
|
||||||
connect(&audioIO->getReceivedAudioStream(), &MixedProcessedAudioStream::addedSilence,
|
connect(&audioIO->getReceivedAudioStream(), &MixedProcessedAudioStream::addedSilence,
|
||||||
this, &AudioScope::addStereoSilenceToScope);
|
this, &AudioScope::addStereoSilenceToScope);
|
||||||
connect(&audioIO->getReceivedAudioStream(), &MixedProcessedAudioStream::addedLastFrameRepeatedWithFade,
|
connect(&audioIO->getReceivedAudioStream(), &MixedProcessedAudioStream::addedLastFrameRepeatedWithFade,
|
||||||
this, &AudioScope::addLastFrameRepeatedWithFadeToScope);
|
this, &AudioScope::addLastFrameRepeatedWithFadeToScope);
|
||||||
connect(&audioIO->getReceivedAudioStream(), &MixedProcessedAudioStream::addedStereoSamples,
|
connect(&audioIO->getReceivedAudioStream(), &MixedProcessedAudioStream::addedStereoSamples,
|
||||||
this, &AudioScope::addStereoSamplesToScope);
|
this, &AudioScope::addStereoSamplesToScope);
|
||||||
connect(audioIO.data(), &Audio::inputReceived, this, &AudioScope::addInputToScope);
|
connect(audioIO.data(), &AudioClient::inputReceived, this, &AudioScope::addInputToScope);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AudioScope::toggle() {
|
void AudioScope::toggle() {
|
||||||
|
|
|
@ -11,12 +11,11 @@
|
||||||
|
|
||||||
#include "InterfaceConfig.h"
|
#include "InterfaceConfig.h"
|
||||||
|
|
||||||
|
#include <AudioClient.h>
|
||||||
#include <GLCanvas.h>
|
#include <GLCanvas.h>
|
||||||
#include <PathUtils.h>
|
#include <PathUtils.h>
|
||||||
#include <GeometryCache.h>
|
#include <GeometryCache.h>
|
||||||
|
|
||||||
#include "Audio.h"
|
|
||||||
|
|
||||||
#include "AudioToolBox.h"
|
#include "AudioToolBox.h"
|
||||||
|
|
||||||
// Mute icon configration
|
// Mute icon configration
|
||||||
|
@ -29,7 +28,7 @@ AudioToolBox::AudioToolBox() :
|
||||||
|
|
||||||
bool AudioToolBox::mousePressEvent(int x, int y) {
|
bool AudioToolBox::mousePressEvent(int x, int y) {
|
||||||
if (_iconBounds.contains(x, y)) {
|
if (_iconBounds.contains(x, y)) {
|
||||||
DependencyManager::get<Audio>()->toggleMute();
|
DependencyManager::get<AudioClient>()->toggleMute();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
@ -49,7 +48,7 @@ void AudioToolBox::render(int x, int y, bool boxed) {
|
||||||
_boxTextureId = glCanvas->bindTexture(QImage(PathUtils::resourcesPath() + "images/audio-box.svg"));
|
_boxTextureId = glCanvas->bindTexture(QImage(PathUtils::resourcesPath() + "images/audio-box.svg"));
|
||||||
}
|
}
|
||||||
|
|
||||||
auto audioIO = DependencyManager::get<Audio>();
|
auto audioIO = DependencyManager::get<AudioClient>();
|
||||||
|
|
||||||
if (boxed) {
|
if (boxed) {
|
||||||
bool isClipping = ((audioIO->getTimeSinceLastClip() > 0.0f) && (audioIO->getTimeSinceLastClip() < 1.0f));
|
bool isClipping = ((audioIO->getTimeSinceLastClip() > 0.0f) && (audioIO->getTimeSinceLastClip() < 1.0f));
|
||||||
|
|
|
@ -36,6 +36,7 @@
|
||||||
|
|
||||||
#include "Application.h"
|
#include "Application.h"
|
||||||
#include "Avatar.h"
|
#include "Avatar.h"
|
||||||
|
#include "AvatarManager.h"
|
||||||
#include "Hand.h"
|
#include "Hand.h"
|
||||||
#include "Head.h"
|
#include "Head.h"
|
||||||
#include "Menu.h"
|
#include "Menu.h"
|
||||||
|
@ -272,7 +273,8 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode, bool
|
||||||
_referential->update();
|
_referential->update();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (postLighting && glm::distance(Application::getInstance()->getAvatar()->getPosition(), _position) < 10.0f) {
|
if (postLighting &&
|
||||||
|
glm::distance(DependencyManager::get<AvatarManager>()->getMyAvatar()->getPosition(), _position) < 10.0f) {
|
||||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||||
|
|
||||||
// render pointing lasers
|
// render pointing lasers
|
||||||
|
@ -351,7 +353,7 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode, bool
|
||||||
const float GLOW_MAX_LOUDNESS = 2500.0f;
|
const float GLOW_MAX_LOUDNESS = 2500.0f;
|
||||||
const float MAX_GLOW = 0.5f;
|
const float MAX_GLOW = 0.5f;
|
||||||
|
|
||||||
float GLOW_FROM_AVERAGE_LOUDNESS = ((this == Application::getInstance()->getAvatar())
|
float GLOW_FROM_AVERAGE_LOUDNESS = ((this == DependencyManager::get<AvatarManager>()->getMyAvatar())
|
||||||
? 0.0f
|
? 0.0f
|
||||||
: MAX_GLOW * getHeadData()->getAudioLoudness() / GLOW_MAX_LOUDNESS);
|
: MAX_GLOW * getHeadData()->getAudioLoudness() / GLOW_MAX_LOUDNESS);
|
||||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::GlowWhenSpeaking)) {
|
if (!Menu::getInstance()->isOptionChecked(MenuOption::GlowWhenSpeaking)) {
|
||||||
|
@ -375,7 +377,7 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode, bool
|
||||||
float distance = BASE_LIGHT_DISTANCE * _scale;
|
float distance = BASE_LIGHT_DISTANCE * _scale;
|
||||||
glm::vec3 position = glm::mix(_skeletonModel.getTranslation(), getHead()->getFaceModel().getTranslation(), 0.9f);
|
glm::vec3 position = glm::mix(_skeletonModel.getTranslation(), getHead()->getFaceModel().getTranslation(), 0.9f);
|
||||||
glm::quat orientation = getOrientation();
|
glm::quat orientation = getOrientation();
|
||||||
foreach (const AvatarManager::LocalLight& light, Application::getInstance()->getAvatarManager().getLocalLights()) {
|
foreach (const AvatarManager::LocalLight& light, DependencyManager::get<AvatarManager>()->getLocalLights()) {
|
||||||
glm::vec3 direction = orientation * light.direction;
|
glm::vec3 direction = orientation * light.direction;
|
||||||
DependencyManager::get<DeferredLightingEffect>()->addSpotLight(position - direction * distance,
|
DependencyManager::get<DeferredLightingEffect>()->addSpotLight(position - direction * distance,
|
||||||
distance * 2.0f, glm::vec3(), light.color, light.color, 1.0f, 0.5f, 0.0f, direction,
|
distance * 2.0f, glm::vec3(), light.color, light.color, 1.0f, 0.5f, 0.0f, direction,
|
||||||
|
@ -1048,7 +1050,7 @@ void Avatar::setShowDisplayName(bool showDisplayName) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// For myAvatar, the alpha update is not done (called in simulate for other avatars)
|
// For myAvatar, the alpha update is not done (called in simulate for other avatars)
|
||||||
if (Application::getInstance()->getAvatar() == this) {
|
if (DependencyManager::get<AvatarManager>()->getMyAvatar() == this) {
|
||||||
if (showDisplayName) {
|
if (showDisplayName) {
|
||||||
_displayNameAlpha = DISPLAYNAME_ALPHA;
|
_displayNameAlpha = DISPLAYNAME_ALPHA;
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -55,10 +55,6 @@ enum ScreenTintLayer {
|
||||||
NUM_SCREEN_TINT_LAYERS
|
NUM_SCREEN_TINT_LAYERS
|
||||||
};
|
};
|
||||||
|
|
||||||
// Where one's own Avatar begins in the world (will be overwritten if avatar data file is found).
|
|
||||||
// This is the start location in the Sandbox (xyz: 6270, 211, 6000).
|
|
||||||
const glm::vec3 START_LOCATION(0.38269043f * TREE_SCALE, 0.01287842f * TREE_SCALE, 0.36621094f * TREE_SCALE);
|
|
||||||
|
|
||||||
class Texture;
|
class Texture;
|
||||||
|
|
||||||
class Avatar : public AvatarData {
|
class Avatar : public AvatarData {
|
||||||
|
|
|
@ -26,6 +26,9 @@
|
||||||
#include "Menu.h"
|
#include "Menu.h"
|
||||||
#include "MyAvatar.h"
|
#include "MyAvatar.h"
|
||||||
|
|
||||||
|
// 70 times per second - target is 60hz, but this helps account for any small deviations
|
||||||
|
// in the update loop
|
||||||
|
static const quint64 MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS = (1000 * 1000) / 70;
|
||||||
|
|
||||||
// We add _myAvatar into the hash with all the other AvatarData, and we use the default NULL QUid as the key.
|
// We add _myAvatar into the hash with all the other AvatarData, and we use the default NULL QUid as the key.
|
||||||
const QUuid MY_AVATAR_KEY; // NULL key
|
const QUuid MY_AVATAR_KEY; // NULL key
|
||||||
|
@ -56,11 +59,26 @@ AvatarManager::AvatarManager(QObject* parent) :
|
||||||
|
|
||||||
void AvatarManager::init() {
|
void AvatarManager::init() {
|
||||||
_myAvatar->init();
|
_myAvatar->init();
|
||||||
_myAvatar->setPosition(START_LOCATION);
|
|
||||||
_myAvatar->setDisplayingLookatVectors(false);
|
|
||||||
_avatarHash.insert(MY_AVATAR_KEY, _myAvatar);
|
_avatarHash.insert(MY_AVATAR_KEY, _myAvatar);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void AvatarManager::updateMyAvatar(float deltaTime) {
|
||||||
|
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||||
|
PerformanceWarning warn(showWarnings, "AvatarManager::updateMyAvatar()");
|
||||||
|
|
||||||
|
_myAvatar->update(deltaTime);
|
||||||
|
|
||||||
|
quint64 now = usecTimestampNow();
|
||||||
|
quint64 dt = now - _lastSendAvatarDataTime;
|
||||||
|
|
||||||
|
if (dt > MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS) {
|
||||||
|
// send head/hand data to the avatar mixer and voxel server
|
||||||
|
PerformanceTimer perfTimer("send");
|
||||||
|
_myAvatar->sendAvatarDataPacket();
|
||||||
|
_lastSendAvatarDataTime = now;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void AvatarManager::updateOtherAvatars(float deltaTime) {
|
void AvatarManager::updateOtherAvatars(float deltaTime) {
|
||||||
if (_avatarHash.size() < 2 && _avatarFades.isEmpty()) {
|
if (_avatarHash.size() < 2 && _avatarFades.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
|
|
|
@ -24,18 +24,18 @@ class MyAvatar;
|
||||||
|
|
||||||
class AvatarManager : public AvatarHashMap {
|
class AvatarManager : public AvatarHashMap {
|
||||||
Q_OBJECT
|
Q_OBJECT
|
||||||
|
SINGLETON_DEPENDENCY
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
/// Registers the script types associated with the avatar manager.
|
/// Registers the script types associated with the avatar manager.
|
||||||
static void registerMetaTypes(QScriptEngine* engine);
|
static void registerMetaTypes(QScriptEngine* engine);
|
||||||
|
|
||||||
AvatarManager(QObject* parent = 0);
|
|
||||||
|
|
||||||
void init();
|
void init();
|
||||||
|
|
||||||
MyAvatar* getMyAvatar() { return _myAvatar.data(); }
|
MyAvatar* getMyAvatar() { return _myAvatar.data(); }
|
||||||
|
|
||||||
|
void updateMyAvatar(float deltaTime);
|
||||||
void updateOtherAvatars(float deltaTime);
|
void updateOtherAvatars(float deltaTime);
|
||||||
void renderAvatars(Avatar::RenderMode renderMode, bool postLighting = false, bool selfAvatarOnly = false);
|
void renderAvatars(Avatar::RenderMode renderMode, bool postLighting = false, bool selfAvatarOnly = false);
|
||||||
|
|
||||||
|
@ -51,6 +51,7 @@ public:
|
||||||
Q_INVOKABLE QVector<AvatarManager::LocalLight> getLocalLights() const;
|
Q_INVOKABLE QVector<AvatarManager::LocalLight> getLocalLights() const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
AvatarManager(QObject* parent = 0);
|
||||||
AvatarManager(const AvatarManager& other);
|
AvatarManager(const AvatarManager& other);
|
||||||
|
|
||||||
void simulateAvatarFades(float deltaTime);
|
void simulateAvatarFades(float deltaTime);
|
||||||
|
@ -63,6 +64,7 @@ private:
|
||||||
|
|
||||||
QVector<AvatarSharedPointer> _avatarFades;
|
QVector<AvatarSharedPointer> _avatarFades;
|
||||||
QSharedPointer<MyAvatar> _myAvatar;
|
QSharedPointer<MyAvatar> _myAvatar;
|
||||||
|
quint64 _lastSendAvatarDataTime = 0; // Controls MyAvatar send data rate.
|
||||||
|
|
||||||
QVector<AvatarManager::LocalLight> _localLights;
|
QVector<AvatarManager::LocalLight> _localLights;
|
||||||
};
|
};
|
||||||
|
|
|
@ -12,15 +12,14 @@
|
||||||
|
|
||||||
#include <QImage>
|
#include <QImage>
|
||||||
|
|
||||||
#include <NodeList.h>
|
|
||||||
|
|
||||||
#include <GeometryUtil.h>
|
#include <GeometryUtil.h>
|
||||||
|
#include <NodeList.h>
|
||||||
#include <ProgramObject.h>
|
#include <ProgramObject.h>
|
||||||
|
|
||||||
#include "Application.h"
|
#include "AvatarManager.h"
|
||||||
#include "Avatar.h"
|
|
||||||
#include "Hand.h"
|
#include "Hand.h"
|
||||||
#include "Menu.h"
|
#include "Menu.h"
|
||||||
|
#include "MyAvatar.h"
|
||||||
#include "Util.h"
|
#include "Util.h"
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
@ -131,7 +130,7 @@ void Hand::render(bool isMine, Model::RenderMode renderMode) {
|
||||||
void Hand::renderHandTargets(bool isMine) {
|
void Hand::renderHandTargets(bool isMine) {
|
||||||
glPushMatrix();
|
glPushMatrix();
|
||||||
|
|
||||||
const float avatarScale = Application::getInstance()->getAvatar()->getScale();
|
const float avatarScale = DependencyManager::get<AvatarManager>()->getMyAvatar()->getScale();
|
||||||
|
|
||||||
const float alpha = 1.0f;
|
const float alpha = 1.0f;
|
||||||
const glm::vec3 handColor(1.0, 0.0, 0.0); // Color the hand targets red to be different than skin
|
const glm::vec3 handColor(1.0, 0.0, 0.0); // Color the hand targets red to be different than skin
|
||||||
|
|
|
@ -18,8 +18,6 @@
|
||||||
|
|
||||||
#include <HeadData.h>
|
#include <HeadData.h>
|
||||||
|
|
||||||
#include <OctreeConstants.h> // for IDENTITY_*
|
|
||||||
|
|
||||||
#include "FaceModel.h"
|
#include "FaceModel.h"
|
||||||
#include "InterfaceConfig.h"
|
#include "InterfaceConfig.h"
|
||||||
#include "world.h"
|
#include "world.h"
|
||||||
|
|
|
@ -23,6 +23,7 @@
|
||||||
#include <AccountManager.h>
|
#include <AccountManager.h>
|
||||||
#include <AddressManager.h>
|
#include <AddressManager.h>
|
||||||
#include <AnimationHandle.h>
|
#include <AnimationHandle.h>
|
||||||
|
#include <AudioClient.h>
|
||||||
#include <DependencyManager.h>
|
#include <DependencyManager.h>
|
||||||
#include <GeometryUtil.h>
|
#include <GeometryUtil.h>
|
||||||
#include <NodeList.h>
|
#include <NodeList.h>
|
||||||
|
@ -33,7 +34,7 @@
|
||||||
#include <TextRenderer.h>
|
#include <TextRenderer.h>
|
||||||
|
|
||||||
#include "Application.h"
|
#include "Application.h"
|
||||||
#include "Audio.h"
|
#include "AvatarManager.h"
|
||||||
#include "Environment.h"
|
#include "Environment.h"
|
||||||
#include "Menu.h"
|
#include "Menu.h"
|
||||||
#include "ModelReferential.h"
|
#include "ModelReferential.h"
|
||||||
|
@ -149,7 +150,7 @@ void MyAvatar::update(float deltaTime) {
|
||||||
head->relaxLean(deltaTime);
|
head->relaxLean(deltaTime);
|
||||||
updateFromTrackers(deltaTime);
|
updateFromTrackers(deltaTime);
|
||||||
// Get audio loudness data from audio input device
|
// Get audio loudness data from audio input device
|
||||||
auto audio = DependencyManager::get<Audio>();
|
auto audio = DependencyManager::get<AudioClient>();
|
||||||
head->setAudioLoudness(audio->getLastInputLoudness());
|
head->setAudioLoudness(audio->getLastInputLoudness());
|
||||||
head->setAudioAverageLoudness(audio->getAudioAverageInputLoudness());
|
head->setAudioAverageLoudness(audio->getAudioAverageInputLoudness());
|
||||||
|
|
||||||
|
@ -492,9 +493,12 @@ void MyAvatar::startRecording() {
|
||||||
if (!_recorder) {
|
if (!_recorder) {
|
||||||
_recorder = RecorderPointer(new Recorder(this));
|
_recorder = RecorderPointer(new Recorder(this));
|
||||||
}
|
}
|
||||||
DependencyManager::get<Audio>()->setRecorder(_recorder);
|
// connect to AudioClient's signal so we get input audio
|
||||||
_recorder->startRecording();
|
auto audioClient = DependencyManager::get<AudioClient>();
|
||||||
|
connect(audioClient.data(), &AudioClient::inputReceived, _recorder.data(),
|
||||||
|
&Recorder::recordAudio, Qt::BlockingQueuedConnection);
|
||||||
|
|
||||||
|
_recorder->startRecording();
|
||||||
}
|
}
|
||||||
|
|
||||||
void MyAvatar::stopRecording() {
|
void MyAvatar::stopRecording() {
|
||||||
|
@ -506,6 +510,10 @@ void MyAvatar::stopRecording() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (_recorder) {
|
if (_recorder) {
|
||||||
|
// stop grabbing audio from the AudioClient
|
||||||
|
auto audioClient = DependencyManager::get<AudioClient>();
|
||||||
|
disconnect(audioClient.data(), 0, _recorder.data(), 0);
|
||||||
|
|
||||||
_recorder->stopRecording();
|
_recorder->stopRecording();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -898,7 +906,7 @@ void MyAvatar::updateLookAtTargetAvatar() {
|
||||||
const float GREATEST_LOOKING_AT_DISTANCE = 10.0f;
|
const float GREATEST_LOOKING_AT_DISTANCE = 10.0f;
|
||||||
|
|
||||||
int howManyLookingAtMe = 0;
|
int howManyLookingAtMe = 0;
|
||||||
foreach (const AvatarSharedPointer& avatarPointer, Application::getInstance()->getAvatarManager().getAvatarHash()) {
|
foreach (const AvatarSharedPointer& avatarPointer, DependencyManager::get<AvatarManager>()->getAvatarHash()) {
|
||||||
Avatar* avatar = static_cast<Avatar*>(avatarPointer.data());
|
Avatar* avatar = static_cast<Avatar*>(avatarPointer.data());
|
||||||
bool isCurrentTarget = avatar->getIsLookAtTarget();
|
bool isCurrentTarget = avatar->getIsLookAtTarget();
|
||||||
float distanceTo = glm::length(avatar->getHead()->getEyePosition() - cameraPosition);
|
float distanceTo = glm::length(avatar->getHead()->getEyePosition() - cameraPosition);
|
||||||
|
@ -1615,7 +1623,7 @@ bool findAvatarAvatarPenetration(const glm::vec3 positionA, float radiusA, float
|
||||||
void MyAvatar::updateCollisionWithAvatars(float deltaTime) {
|
void MyAvatar::updateCollisionWithAvatars(float deltaTime) {
|
||||||
// Reset detector for nearest avatar
|
// Reset detector for nearest avatar
|
||||||
_distanceToNearestAvatar = std::numeric_limits<float>::max();
|
_distanceToNearestAvatar = std::numeric_limits<float>::max();
|
||||||
const AvatarHash& avatars = Application::getInstance()->getAvatarManager().getAvatarHash();
|
const AvatarHash& avatars = DependencyManager::get<AvatarManager>()->getAvatarHash();
|
||||||
if (avatars.size() <= 1) {
|
if (avatars.size() <= 1) {
|
||||||
// no need to compute a bunch of stuff if we have one or fewer avatars
|
// no need to compute a bunch of stuff if we have one or fewer avatars
|
||||||
return;
|
return;
|
||||||
|
@ -1690,7 +1698,7 @@ void MyAvatar::updateChatCircle(float deltaTime) {
|
||||||
// find all circle-enabled members and sort by distance
|
// find all circle-enabled members and sort by distance
|
||||||
QVector<SortedAvatar> sortedAvatars;
|
QVector<SortedAvatar> sortedAvatars;
|
||||||
|
|
||||||
foreach (const AvatarSharedPointer& avatarPointer, Application::getInstance()->getAvatarManager().getAvatarHash()) {
|
foreach (const AvatarSharedPointer& avatarPointer, DependencyManager::get<AvatarManager>()->getAvatarHash()) {
|
||||||
Avatar* avatar = static_cast<Avatar*>(avatarPointer.data());
|
Avatar* avatar = static_cast<Avatar*>(avatarPointer.data());
|
||||||
if ( ! avatar->isChatCirclingEnabled() ||
|
if ( ! avatar->isChatCirclingEnabled() ||
|
||||||
avatar == static_cast<Avatar*>(this)) {
|
avatar == static_cast<Avatar*>(this)) {
|
||||||
|
|
|
@ -22,6 +22,8 @@
|
||||||
|
|
||||||
#include <glm/glm.hpp>
|
#include <glm/glm.hpp>
|
||||||
|
|
||||||
|
#include <avatar/AvatarManager.h>
|
||||||
|
#include <avatar/MyAvatar.h>
|
||||||
#include <GlowEffect.h>
|
#include <GlowEffect.h>
|
||||||
#include <PathUtils.h>
|
#include <PathUtils.h>
|
||||||
#include <SharedUtil.h>
|
#include <SharedUtil.h>
|
||||||
|
@ -198,11 +200,12 @@ void OculusManager::disconnect() {
|
||||||
|
|
||||||
#ifdef HAVE_LIBOVR
|
#ifdef HAVE_LIBOVR
|
||||||
void OculusManager::positionCalibrationBillboard(Text3DOverlay* billboard) {
|
void OculusManager::positionCalibrationBillboard(Text3DOverlay* billboard) {
|
||||||
glm::quat headOrientation = Application::getInstance()->getAvatar()->getHeadOrientation();
|
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
|
glm::quat headOrientation = myAvatar->getHeadOrientation();
|
||||||
headOrientation.x = 0;
|
headOrientation.x = 0;
|
||||||
headOrientation.z = 0;
|
headOrientation.z = 0;
|
||||||
glm::normalize(headOrientation);
|
glm::normalize(headOrientation);
|
||||||
billboard->setPosition(Application::getInstance()->getAvatar()->getHeadPosition()
|
billboard->setPosition(myAvatar->getHeadPosition()
|
||||||
+ headOrientation * glm::vec3(0.0f, 0.0f, -CALIBRATION_MESSAGE_DISTANCE));
|
+ headOrientation * glm::vec3(0.0f, 0.0f, -CALIBRATION_MESSAGE_DISTANCE));
|
||||||
billboard->setRotation(headOrientation);
|
billboard->setRotation(headOrientation);
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,7 @@
|
||||||
#include <QTimer>
|
#include <QTimer>
|
||||||
#include <QtDebug>
|
#include <QtDebug>
|
||||||
|
|
||||||
|
#include <avatar/AvatarManager.h>
|
||||||
#include <FBXReader.h>
|
#include <FBXReader.h>
|
||||||
#include <PerfStat.h>
|
#include <PerfStat.h>
|
||||||
#include <TextRenderer.h>
|
#include <TextRenderer.h>
|
||||||
|
@ -42,7 +43,7 @@ static int indexOfHumanIKJoint(const char* jointName) {
|
||||||
}
|
}
|
||||||
|
|
||||||
static void setPalm(float deltaTime, int index) {
|
static void setPalm(float deltaTime, int index) {
|
||||||
MyAvatar* avatar = Application::getInstance()->getAvatar();
|
MyAvatar* avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
Hand* hand = avatar->getHand();
|
Hand* hand = avatar->getHand();
|
||||||
PalmData* palm;
|
PalmData* palm;
|
||||||
bool foundHand = false;
|
bool foundHand = false;
|
||||||
|
@ -86,9 +87,9 @@ static void setPalm(float deltaTime, int index) {
|
||||||
// TODO: transfom this to stay in the model-frame.
|
// TODO: transfom this to stay in the model-frame.
|
||||||
glm::vec3 position;
|
glm::vec3 position;
|
||||||
glm::quat rotation;
|
glm::quat rotation;
|
||||||
SkeletonModel* skeletonModel = &Application::getInstance()->getAvatar()->getSkeletonModel();
|
SkeletonModel* skeletonModel = &DependencyManager::get<AvatarManager>()->getMyAvatar()->getSkeletonModel();
|
||||||
int jointIndex;
|
int jointIndex;
|
||||||
glm::quat inverseRotation = glm::inverse(Application::getInstance()->getAvatar()->getOrientation());
|
glm::quat inverseRotation = glm::inverse(DependencyManager::get<AvatarManager>()->getMyAvatar()->getOrientation());
|
||||||
if (index == LEFT_HAND_INDEX) {
|
if (index == LEFT_HAND_INDEX) {
|
||||||
jointIndex = skeletonModel->getLeftHandJointIndex();
|
jointIndex = skeletonModel->getLeftHandJointIndex();
|
||||||
skeletonModel->getJointRotationInWorldFrame(jointIndex, rotation);
|
skeletonModel->getJointRotationInWorldFrame(jointIndex, rotation);
|
||||||
|
|
|
@ -11,6 +11,7 @@
|
||||||
|
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
|
#include <avatar/AvatarManager.h>
|
||||||
#include <PerfStat.h>
|
#include <PerfStat.h>
|
||||||
|
|
||||||
#include "Application.h"
|
#include "Application.h"
|
||||||
|
@ -144,7 +145,7 @@ void SixenseManager::setFilter(bool filter) {
|
||||||
|
|
||||||
void SixenseManager::update(float deltaTime) {
|
void SixenseManager::update(float deltaTime) {
|
||||||
#ifdef HAVE_SIXENSE
|
#ifdef HAVE_SIXENSE
|
||||||
Hand* hand = Application::getInstance()->getAvatar()->getHand();
|
Hand* hand = DependencyManager::get<AvatarManager>()->getMyAvatar()->getHand();
|
||||||
if (_isInitialized && _isEnabled) {
|
if (_isInitialized && _isEnabled) {
|
||||||
#ifdef __APPLE__
|
#ifdef __APPLE__
|
||||||
SixenseBaseFunction sixenseGetNumActiveControllers =
|
SixenseBaseFunction sixenseGetNumActiveControllers =
|
||||||
|
@ -458,8 +459,7 @@ void SixenseManager::updateCalibration(const sixenseControllerData* controllers)
|
||||||
|
|
||||||
//Injecting mouse movements and clicks
|
//Injecting mouse movements and clicks
|
||||||
void SixenseManager::emulateMouse(PalmData* palm, int index) {
|
void SixenseManager::emulateMouse(PalmData* palm, int index) {
|
||||||
Application* application = Application::getInstance();
|
MyAvatar* avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
MyAvatar* avatar = application->getAvatar();
|
|
||||||
auto glCanvas = DependencyManager::get<GLCanvas>();
|
auto glCanvas = DependencyManager::get<GLCanvas>();
|
||||||
QPoint pos;
|
QPoint pos;
|
||||||
|
|
||||||
|
@ -478,7 +478,7 @@ void SixenseManager::emulateMouse(PalmData* palm, int index) {
|
||||||
|
|
||||||
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseLasers)
|
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseLasers)
|
||||||
|| Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode)) {
|
|| Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode)) {
|
||||||
pos = application->getApplicationOverlay().getPalmClickLocation(palm);
|
pos = qApp->getApplicationOverlay().getPalmClickLocation(palm);
|
||||||
} else {
|
} else {
|
||||||
// Get directon relative to avatar orientation
|
// Get directon relative to avatar orientation
|
||||||
glm::vec3 direction = glm::inverse(avatar->getOrientation()) * palm->getFingerDirection();
|
glm::vec3 direction = glm::inverse(avatar->getOrientation()) * palm->getFingerDirection();
|
||||||
|
@ -501,14 +501,14 @@ void SixenseManager::emulateMouse(PalmData* palm, int index) {
|
||||||
if (_bumperPressed[index]) {
|
if (_bumperPressed[index]) {
|
||||||
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, bumperButton, bumperButton, 0);
|
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, bumperButton, bumperButton, 0);
|
||||||
|
|
||||||
application->mouseReleaseEvent(&mouseEvent, deviceID);
|
qApp->mouseReleaseEvent(&mouseEvent, deviceID);
|
||||||
|
|
||||||
_bumperPressed[index] = false;
|
_bumperPressed[index] = false;
|
||||||
}
|
}
|
||||||
if (_triggerPressed[index]) {
|
if (_triggerPressed[index]) {
|
||||||
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, triggerButton, triggerButton, 0);
|
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, triggerButton, triggerButton, 0);
|
||||||
|
|
||||||
application->mouseReleaseEvent(&mouseEvent, deviceID);
|
qApp->mouseReleaseEvent(&mouseEvent, deviceID);
|
||||||
|
|
||||||
_triggerPressed[index] = false;
|
_triggerPressed[index] = false;
|
||||||
}
|
}
|
||||||
|
@ -522,11 +522,11 @@ void SixenseManager::emulateMouse(PalmData* palm, int index) {
|
||||||
//Only send the mouse event if the opposite left button isnt held down.
|
//Only send the mouse event if the opposite left button isnt held down.
|
||||||
if (triggerButton == Qt::LeftButton) {
|
if (triggerButton == Qt::LeftButton) {
|
||||||
if (!_triggerPressed[(int)(!index)]) {
|
if (!_triggerPressed[(int)(!index)]) {
|
||||||
application->mouseMoveEvent(&mouseEvent, deviceID);
|
qApp->mouseMoveEvent(&mouseEvent, deviceID);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!_bumperPressed[(int)(!index)]) {
|
if (!_bumperPressed[(int)(!index)]) {
|
||||||
application->mouseMoveEvent(&mouseEvent, deviceID);
|
qApp->mouseMoveEvent(&mouseEvent, deviceID);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -549,12 +549,12 @@ void SixenseManager::emulateMouse(PalmData* palm, int index) {
|
||||||
|
|
||||||
QMouseEvent mouseEvent(QEvent::MouseButtonPress, pos, bumperButton, bumperButton, 0);
|
QMouseEvent mouseEvent(QEvent::MouseButtonPress, pos, bumperButton, bumperButton, 0);
|
||||||
|
|
||||||
application->mousePressEvent(&mouseEvent, deviceID);
|
qApp->mousePressEvent(&mouseEvent, deviceID);
|
||||||
}
|
}
|
||||||
} else if (_bumperPressed[index]) {
|
} else if (_bumperPressed[index]) {
|
||||||
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, bumperButton, bumperButton, 0);
|
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, bumperButton, bumperButton, 0);
|
||||||
|
|
||||||
application->mouseReleaseEvent(&mouseEvent, deviceID);
|
qApp->mouseReleaseEvent(&mouseEvent, deviceID);
|
||||||
|
|
||||||
_bumperPressed[index] = false;
|
_bumperPressed[index] = false;
|
||||||
}
|
}
|
||||||
|
@ -566,12 +566,12 @@ void SixenseManager::emulateMouse(PalmData* palm, int index) {
|
||||||
|
|
||||||
QMouseEvent mouseEvent(QEvent::MouseButtonPress, pos, triggerButton, triggerButton, 0);
|
QMouseEvent mouseEvent(QEvent::MouseButtonPress, pos, triggerButton, triggerButton, 0);
|
||||||
|
|
||||||
application->mousePressEvent(&mouseEvent, deviceID);
|
qApp->mousePressEvent(&mouseEvent, deviceID);
|
||||||
}
|
}
|
||||||
} else if (_triggerPressed[index]) {
|
} else if (_triggerPressed[index]) {
|
||||||
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, triggerButton, triggerButton, 0);
|
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, triggerButton, triggerButton, 0);
|
||||||
|
|
||||||
application->mouseReleaseEvent(&mouseEvent, deviceID);
|
qApp->mouseReleaseEvent(&mouseEvent, deviceID);
|
||||||
|
|
||||||
_triggerPressed[index] = false;
|
_triggerPressed[index] = false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
//
|
//
|
||||||
|
|
||||||
#include "Audio.h"
|
#include "AudioClient.h"
|
||||||
#include "AudioDeviceScriptingInterface.h"
|
#include "AudioDeviceScriptingInterface.h"
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,15 +19,15 @@ AudioDeviceScriptingInterface* AudioDeviceScriptingInterface::getInstance() {
|
||||||
}
|
}
|
||||||
|
|
||||||
AudioDeviceScriptingInterface::AudioDeviceScriptingInterface() {
|
AudioDeviceScriptingInterface::AudioDeviceScriptingInterface() {
|
||||||
connect(DependencyManager::get<Audio>().data(), &Audio::muteToggled,
|
connect(DependencyManager::get<AudioClient>().data(), &AudioClient::muteToggled,
|
||||||
this, &AudioDeviceScriptingInterface::muteToggled);
|
this, &AudioDeviceScriptingInterface::muteToggled);
|
||||||
connect(DependencyManager::get<Audio>().data(), &Audio::deviceChanged,
|
connect(DependencyManager::get<AudioClient>().data(), &AudioClient::deviceChanged,
|
||||||
this, &AudioDeviceScriptingInterface::deviceChanged);
|
this, &AudioDeviceScriptingInterface::deviceChanged);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AudioDeviceScriptingInterface::setInputDevice(const QString& deviceName) {
|
bool AudioDeviceScriptingInterface::setInputDevice(const QString& deviceName) {
|
||||||
bool result;
|
bool result;
|
||||||
QMetaObject::invokeMethod(DependencyManager::get<Audio>().data(), "switchInputToAudioDevice",
|
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "switchInputToAudioDevice",
|
||||||
Qt::BlockingQueuedConnection,
|
Qt::BlockingQueuedConnection,
|
||||||
Q_RETURN_ARG(bool, result),
|
Q_RETURN_ARG(bool, result),
|
||||||
Q_ARG(const QString&, deviceName));
|
Q_ARG(const QString&, deviceName));
|
||||||
|
@ -37,7 +37,7 @@ bool AudioDeviceScriptingInterface::setInputDevice(const QString& deviceName) {
|
||||||
|
|
||||||
bool AudioDeviceScriptingInterface::setOutputDevice(const QString& deviceName) {
|
bool AudioDeviceScriptingInterface::setOutputDevice(const QString& deviceName) {
|
||||||
bool result;
|
bool result;
|
||||||
QMetaObject::invokeMethod(DependencyManager::get<Audio>().data(), "switchOutputToAudioDevice",
|
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "switchOutputToAudioDevice",
|
||||||
Qt::BlockingQueuedConnection,
|
Qt::BlockingQueuedConnection,
|
||||||
Q_RETURN_ARG(bool, result),
|
Q_RETURN_ARG(bool, result),
|
||||||
Q_ARG(const QString&, deviceName));
|
Q_ARG(const QString&, deviceName));
|
||||||
|
@ -46,50 +46,50 @@ bool AudioDeviceScriptingInterface::setOutputDevice(const QString& deviceName) {
|
||||||
}
|
}
|
||||||
|
|
||||||
QString AudioDeviceScriptingInterface::getInputDevice() {
|
QString AudioDeviceScriptingInterface::getInputDevice() {
|
||||||
return DependencyManager::get<Audio>()->getDeviceName(QAudio::AudioInput);
|
return DependencyManager::get<AudioClient>()->getDeviceName(QAudio::AudioInput);
|
||||||
}
|
}
|
||||||
|
|
||||||
QString AudioDeviceScriptingInterface::getOutputDevice() {
|
QString AudioDeviceScriptingInterface::getOutputDevice() {
|
||||||
return DependencyManager::get<Audio>()->getDeviceName(QAudio::AudioOutput);
|
return DependencyManager::get<AudioClient>()->getDeviceName(QAudio::AudioOutput);
|
||||||
}
|
}
|
||||||
|
|
||||||
QString AudioDeviceScriptingInterface::getDefaultInputDevice() {
|
QString AudioDeviceScriptingInterface::getDefaultInputDevice() {
|
||||||
return DependencyManager::get<Audio>()->getDefaultDeviceName(QAudio::AudioInput);
|
return DependencyManager::get<AudioClient>()->getDefaultDeviceName(QAudio::AudioInput);
|
||||||
}
|
}
|
||||||
|
|
||||||
QString AudioDeviceScriptingInterface::getDefaultOutputDevice() {
|
QString AudioDeviceScriptingInterface::getDefaultOutputDevice() {
|
||||||
return DependencyManager::get<Audio>()->getDefaultDeviceName(QAudio::AudioOutput);
|
return DependencyManager::get<AudioClient>()->getDefaultDeviceName(QAudio::AudioOutput);
|
||||||
}
|
}
|
||||||
|
|
||||||
QVector<QString> AudioDeviceScriptingInterface::getInputDevices() {
|
QVector<QString> AudioDeviceScriptingInterface::getInputDevices() {
|
||||||
return DependencyManager::get<Audio>()->getDeviceNames(QAudio::AudioInput);
|
return DependencyManager::get<AudioClient>()->getDeviceNames(QAudio::AudioInput);
|
||||||
}
|
}
|
||||||
|
|
||||||
QVector<QString> AudioDeviceScriptingInterface::getOutputDevices() {
|
QVector<QString> AudioDeviceScriptingInterface::getOutputDevices() {
|
||||||
return DependencyManager::get<Audio>()->getDeviceNames(QAudio::AudioOutput);
|
return DependencyManager::get<AudioClient>()->getDeviceNames(QAudio::AudioOutput);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
float AudioDeviceScriptingInterface::getInputVolume() {
|
float AudioDeviceScriptingInterface::getInputVolume() {
|
||||||
return DependencyManager::get<Audio>()->getInputVolume();
|
return DependencyManager::get<AudioClient>()->getInputVolume();
|
||||||
}
|
}
|
||||||
|
|
||||||
void AudioDeviceScriptingInterface::setInputVolume(float volume) {
|
void AudioDeviceScriptingInterface::setInputVolume(float volume) {
|
||||||
DependencyManager::get<Audio>()->setInputVolume(volume);
|
DependencyManager::get<AudioClient>()->setInputVolume(volume);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AudioDeviceScriptingInterface::setReverb(bool reverb) {
|
void AudioDeviceScriptingInterface::setReverb(bool reverb) {
|
||||||
DependencyManager::get<Audio>()->setReverb(reverb);
|
DependencyManager::get<AudioClient>()->setReverb(reverb);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AudioDeviceScriptingInterface::setReverbOptions(const AudioEffectOptions* options) {
|
void AudioDeviceScriptingInterface::setReverbOptions(const AudioEffectOptions* options) {
|
||||||
DependencyManager::get<Audio>()->setReverbOptions(options);
|
DependencyManager::get<AudioClient>()->setReverbOptions(options);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AudioDeviceScriptingInterface::toggleMute() {
|
void AudioDeviceScriptingInterface::toggleMute() {
|
||||||
DependencyManager::get<Audio>()->toggleMute();
|
DependencyManager::get<AudioClient>()->toggleMute();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AudioDeviceScriptingInterface::getMuted() {
|
bool AudioDeviceScriptingInterface::getMuted() {
|
||||||
return DependencyManager::get<Audio>()->isMuted();
|
return DependencyManager::get<AudioClient>()->isMuted();
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,10 +9,12 @@
|
||||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
//
|
//
|
||||||
|
|
||||||
|
#include <avatar/AvatarManager.h>
|
||||||
|
#include <avatar/MyAvatar.h>
|
||||||
|
#include <GLCanvas.h>
|
||||||
#include <HandData.h>
|
#include <HandData.h>
|
||||||
#include <HFBackEvent.h>
|
#include <HFBackEvent.h>
|
||||||
|
|
||||||
#include "Application.h"
|
|
||||||
#include "devices/MotionTracker.h"
|
#include "devices/MotionTracker.h"
|
||||||
#include "devices/SixenseManager.h"
|
#include "devices/SixenseManager.h"
|
||||||
#include "ControllerScriptingInterface.h"
|
#include "ControllerScriptingInterface.h"
|
||||||
|
@ -41,7 +43,7 @@ void ControllerScriptingInterface::handleMetaEvent(HFMetaEvent* event) {
|
||||||
const PalmData* ControllerScriptingInterface::getPrimaryPalm() const {
|
const PalmData* ControllerScriptingInterface::getPrimaryPalm() const {
|
||||||
int leftPalmIndex, rightPalmIndex;
|
int leftPalmIndex, rightPalmIndex;
|
||||||
|
|
||||||
const HandData* handData = Application::getInstance()->getAvatar()->getHandData();
|
const HandData* handData = DependencyManager::get<AvatarManager>()->getMyAvatar()->getHandData();
|
||||||
handData->getLeftRightPalmIndices(leftPalmIndex, rightPalmIndex);
|
handData->getLeftRightPalmIndices(leftPalmIndex, rightPalmIndex);
|
||||||
|
|
||||||
if (rightPalmIndex != -1) {
|
if (rightPalmIndex != -1) {
|
||||||
|
@ -52,7 +54,7 @@ const PalmData* ControllerScriptingInterface::getPrimaryPalm() const {
|
||||||
}
|
}
|
||||||
|
|
||||||
int ControllerScriptingInterface::getNumberOfActivePalms() const {
|
int ControllerScriptingInterface::getNumberOfActivePalms() const {
|
||||||
const HandData* handData = Application::getInstance()->getAvatar()->getHandData();
|
const HandData* handData = DependencyManager::get<AvatarManager>()->getMyAvatar()->getHandData();
|
||||||
int numberOfPalms = handData->getNumPalms();
|
int numberOfPalms = handData->getNumPalms();
|
||||||
int numberOfActivePalms = 0;
|
int numberOfActivePalms = 0;
|
||||||
for (int i = 0; i < numberOfPalms; i++) {
|
for (int i = 0; i < numberOfPalms; i++) {
|
||||||
|
@ -64,12 +66,12 @@ int ControllerScriptingInterface::getNumberOfActivePalms() const {
|
||||||
}
|
}
|
||||||
|
|
||||||
const PalmData* ControllerScriptingInterface::getPalm(int palmIndex) const {
|
const PalmData* ControllerScriptingInterface::getPalm(int palmIndex) const {
|
||||||
const HandData* handData = Application::getInstance()->getAvatar()->getHandData();
|
const HandData* handData = DependencyManager::get<AvatarManager>()->getMyAvatar()->getHandData();
|
||||||
return &handData->getPalms()[palmIndex];
|
return &handData->getPalms()[palmIndex];
|
||||||
}
|
}
|
||||||
|
|
||||||
const PalmData* ControllerScriptingInterface::getActivePalm(int palmIndex) const {
|
const PalmData* ControllerScriptingInterface::getActivePalm(int palmIndex) const {
|
||||||
const HandData* handData = Application::getInstance()->getAvatar()->getHandData();
|
const HandData* handData = DependencyManager::get<AvatarManager>()->getMyAvatar()->getHandData();
|
||||||
int numberOfPalms = handData->getNumPalms();
|
int numberOfPalms = handData->getNumPalms();
|
||||||
int numberOfActivePalms = 0;
|
int numberOfActivePalms = 0;
|
||||||
for (int i = 0; i < numberOfPalms; i++) {
|
for (int i = 0; i < numberOfPalms; i++) {
|
||||||
|
|
|
@ -16,17 +16,18 @@
|
||||||
#include <QFileDialog>
|
#include <QFileDialog>
|
||||||
#include <QFormLayout>
|
#include <QFormLayout>
|
||||||
#include <QLineEdit>
|
#include <QLineEdit>
|
||||||
|
#include <QMenu>
|
||||||
#include <QPushButton>
|
#include <QPushButton>
|
||||||
#include <QScrollArea>
|
#include <QScrollArea>
|
||||||
#include <QVBoxLayout>
|
#include <QVBoxLayout>
|
||||||
|
|
||||||
|
#include "avatar/AvatarManager.h"
|
||||||
|
|
||||||
#include "AnimationsDialog.h"
|
#include "AnimationsDialog.h"
|
||||||
#include "Application.h"
|
|
||||||
#include "MainWindow.h"
|
|
||||||
|
|
||||||
AnimationsDialog::AnimationsDialog(QWidget* parent) :
|
AnimationsDialog::AnimationsDialog(QWidget* parent) :
|
||||||
QDialog(parent) {
|
QDialog(parent)
|
||||||
|
{
|
||||||
setWindowTitle("Edit Animations");
|
setWindowTitle("Edit Animations");
|
||||||
setAttribute(Qt::WA_DeleteOnClose);
|
setAttribute(Qt::WA_DeleteOnClose);
|
||||||
|
|
||||||
|
@ -42,7 +43,8 @@ AnimationsDialog::AnimationsDialog(QWidget* parent) :
|
||||||
area->setWidget(container);
|
area->setWidget(container);
|
||||||
_animations->addStretch(1);
|
_animations->addStretch(1);
|
||||||
|
|
||||||
foreach (const AnimationHandlePointer& handle, Application::getInstance()->getAvatar()->getAnimationHandles()) {
|
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
|
foreach (const AnimationHandlePointer& handle, myAvatar->getAnimationHandles()) {
|
||||||
_animations->insertWidget(_animations->count() - 1, new AnimationPanel(this, handle));
|
_animations->insertWidget(_animations->count() - 1, new AnimationPanel(this, handle));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -68,8 +70,8 @@ void AnimationsDialog::setVisible(bool visible) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void AnimationsDialog::addAnimation() {
|
void AnimationsDialog::addAnimation() {
|
||||||
_animations->insertWidget(_animations->count() - 1, new AnimationPanel(
|
_animations->insertWidget(_animations->count() - 1, new AnimationPanel(this,
|
||||||
this, Application::getInstance()->getAvatar()->addAnimationHandle()));
|
DependencyManager::get<AvatarManager>()->getMyAvatar()->addAnimationHandle()));
|
||||||
}
|
}
|
||||||
|
|
||||||
Setting::Handle<QString> AnimationPanel::_animationDirectory("animation_directory", QString());
|
Setting::Handle<QString> AnimationPanel::_animationDirectory("animation_directory", QString());
|
||||||
|
@ -172,7 +174,7 @@ void AnimationPanel::chooseURL() {
|
||||||
void AnimationPanel::chooseMaskedJoints() {
|
void AnimationPanel::chooseMaskedJoints() {
|
||||||
QMenu menu;
|
QMenu menu;
|
||||||
QStringList maskedJoints = _handle->getMaskedJoints();
|
QStringList maskedJoints = _handle->getMaskedJoints();
|
||||||
foreach (const QString& jointName, Application::getInstance()->getAvatar()->getJointNames()) {
|
foreach (const QString& jointName, DependencyManager::get<AvatarManager>()->getMyAvatar()->getJointNames()) {
|
||||||
QAction* action = menu.addAction(jointName);
|
QAction* action = menu.addAction(jointName);
|
||||||
action->setCheckable(true);
|
action->setCheckable(true);
|
||||||
action->setChecked(maskedJoints.contains(jointName));
|
action->setChecked(maskedJoints.contains(jointName));
|
||||||
|
@ -203,6 +205,6 @@ void AnimationPanel::updateHandle() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void AnimationPanel::removeHandle() {
|
void AnimationPanel::removeHandle() {
|
||||||
Application::getInstance()->getAvatar()->removeAnimationHandle(_handle);
|
DependencyManager::get<AvatarManager>()->getMyAvatar()->removeAnimationHandle(_handle);
|
||||||
deleteLater();
|
deleteLater();
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,10 +13,11 @@
|
||||||
|
|
||||||
#include <QOpenGLFramebufferObject>
|
#include <QOpenGLFramebufferObject>
|
||||||
|
|
||||||
|
#include <avatar/AvatarManager.h>
|
||||||
#include <PathUtils.h>
|
#include <PathUtils.h>
|
||||||
#include <PerfStat.h>
|
#include <PerfStat.h>
|
||||||
|
|
||||||
#include "Audio.h"
|
#include "AudioClient.h"
|
||||||
#include "audio/AudioIOStatsRenderer.h"
|
#include "audio/AudioIOStatsRenderer.h"
|
||||||
#include "audio/AudioScope.h"
|
#include "audio/AudioScope.h"
|
||||||
#include "audio/AudioToolBox.h"
|
#include "audio/AudioToolBox.h"
|
||||||
|
@ -167,8 +168,7 @@ ApplicationOverlay::~ApplicationOverlay() {
|
||||||
// Renders the overlays either to a texture or to the screen
|
// Renders the overlays either to a texture or to the screen
|
||||||
void ApplicationOverlay::renderOverlay(bool renderToTexture) {
|
void ApplicationOverlay::renderOverlay(bool renderToTexture) {
|
||||||
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "ApplicationOverlay::displayOverlay()");
|
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "ApplicationOverlay::displayOverlay()");
|
||||||
Application* application = Application::getInstance();
|
Overlays& overlays = qApp->getOverlays();
|
||||||
Overlays& overlays = application->getOverlays();
|
|
||||||
auto glCanvas = DependencyManager::get<GLCanvas>();
|
auto glCanvas = DependencyManager::get<GLCanvas>();
|
||||||
|
|
||||||
_textureFov = glm::radians(_oculusUIAngularSize);
|
_textureFov = glm::radians(_oculusUIAngularSize);
|
||||||
|
@ -212,7 +212,7 @@ void ApplicationOverlay::renderOverlay(bool renderToTexture) {
|
||||||
renderStatsAndLogs();
|
renderStatsAndLogs();
|
||||||
|
|
||||||
// give external parties a change to hook in
|
// give external parties a change to hook in
|
||||||
emit application->renderingOverlay();
|
emit qApp->renderingOverlay();
|
||||||
|
|
||||||
overlays.renderHUD();
|
overlays.renderHUD();
|
||||||
|
|
||||||
|
@ -284,7 +284,7 @@ void ApplicationOverlay::displayOverlayTextureOculus(Camera& whichCamera) {
|
||||||
|
|
||||||
|
|
||||||
//Update and draw the magnifiers
|
//Update and draw the magnifiers
|
||||||
MyAvatar* myAvatar = Application::getInstance()->getAvatar();
|
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
const glm::quat& orientation = myAvatar->getOrientation();
|
const glm::quat& orientation = myAvatar->getOrientation();
|
||||||
const glm::vec3& position = myAvatar->getDefaultEyePosition();
|
const glm::vec3& position = myAvatar->getDefaultEyePosition();
|
||||||
const float scale = myAvatar->getScale() * _oculusUIRadius;
|
const float scale = myAvatar->getScale() * _oculusUIRadius;
|
||||||
|
@ -348,10 +348,8 @@ void ApplicationOverlay::displayOverlayTexture3DTV(Camera& whichCamera, float as
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
Application* application = Application::getInstance();
|
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
|
const glm::vec3& viewMatrixTranslation = qApp->getViewMatrixTranslation();
|
||||||
MyAvatar* myAvatar = application->getAvatar();
|
|
||||||
const glm::vec3& viewMatrixTranslation = application->getViewMatrixTranslation();
|
|
||||||
|
|
||||||
glActiveTexture(GL_TEXTURE0);
|
glActiveTexture(GL_TEXTURE0);
|
||||||
|
|
||||||
|
@ -413,8 +411,8 @@ void ApplicationOverlay::displayOverlayTexture3DTV(Camera& whichCamera, float as
|
||||||
const float reticleSize = 40.0f / glCanvas->width() * quadWidth;
|
const float reticleSize = 40.0f / glCanvas->width() * quadWidth;
|
||||||
x -= reticleSize / 2.0f;
|
x -= reticleSize / 2.0f;
|
||||||
y += reticleSize / 2.0f;
|
y += reticleSize / 2.0f;
|
||||||
const float mouseX = (application->getMouseX() / (float)glCanvas->width()) * quadWidth;
|
const float mouseX = (qApp->getMouseX() / (float)glCanvas->width()) * quadWidth;
|
||||||
const float mouseY = (1.0 - (application->getMouseY() / (float)glCanvas->height())) * quadHeight;
|
const float mouseY = (1.0 - (qApp->getMouseY() / (float)glCanvas->height())) * quadHeight;
|
||||||
|
|
||||||
glm::vec4 reticleColor = { RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], 1.0f };
|
glm::vec4 reticleColor = { RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], 1.0f };
|
||||||
|
|
||||||
|
@ -445,22 +443,21 @@ void ApplicationOverlay::computeOculusPickRay(float x, float y, glm::vec3& origi
|
||||||
const glm::vec3 localDirection = orientation * IDENTITY_FRONT;
|
const glm::vec3 localDirection = orientation * IDENTITY_FRONT;
|
||||||
|
|
||||||
//Rotate the UI pick ray by the avatar orientation
|
//Rotate the UI pick ray by the avatar orientation
|
||||||
const MyAvatar* myAvatar = Application::getInstance()->getAvatar();
|
const MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
origin = myAvatar->getDefaultEyePosition();
|
origin = myAvatar->getDefaultEyePosition();
|
||||||
direction = myAvatar->getOrientation() * localDirection;
|
direction = myAvatar->getOrientation() * localDirection;
|
||||||
}
|
}
|
||||||
|
|
||||||
//Caculate the click location using one of the sixense controllers. Scale is not applied
|
//Caculate the click location using one of the sixense controllers. Scale is not applied
|
||||||
QPoint ApplicationOverlay::getPalmClickLocation(const PalmData *palm) const {
|
QPoint ApplicationOverlay::getPalmClickLocation(const PalmData *palm) const {
|
||||||
Application* application = Application::getInstance();
|
|
||||||
auto glCanvas = DependencyManager::get<GLCanvas>();
|
auto glCanvas = DependencyManager::get<GLCanvas>();
|
||||||
MyAvatar* myAvatar = application->getAvatar();
|
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
|
|
||||||
glm::vec3 tip = myAvatar->getLaserPointerTipPosition(palm);
|
glm::vec3 tip = myAvatar->getLaserPointerTipPosition(palm);
|
||||||
glm::vec3 eyePos = myAvatar->getHead()->getEyePosition();
|
glm::vec3 eyePos = myAvatar->getHead()->getEyePosition();
|
||||||
glm::quat invOrientation = glm::inverse(myAvatar->getOrientation());
|
glm::quat invOrientation = glm::inverse(myAvatar->getOrientation());
|
||||||
//direction of ray goes towards camera
|
//direction of ray goes towards camera
|
||||||
glm::vec3 dir = invOrientation * glm::normalize(application->getCamera()->getPosition() - tip);
|
glm::vec3 dir = invOrientation * glm::normalize(qApp->getCamera()->getPosition() - tip);
|
||||||
glm::vec3 tipPos = invOrientation * (tip - eyePos);
|
glm::vec3 tipPos = invOrientation * (tip - eyePos);
|
||||||
|
|
||||||
QPoint rv;
|
QPoint rv;
|
||||||
|
@ -494,7 +491,7 @@ QPoint ApplicationOverlay::getPalmClickLocation(const PalmData *palm) const {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
glm::dmat4 projection;
|
glm::dmat4 projection;
|
||||||
application->getProjectionMatrix(&projection);
|
qApp->getProjectionMatrix(&projection);
|
||||||
|
|
||||||
glm::vec4 clipSpacePos = glm::vec4(projection * glm::dvec4(tipPos, 1.0));
|
glm::vec4 clipSpacePos = glm::vec4(projection * glm::dvec4(tipPos, 1.0));
|
||||||
glm::vec3 ndcSpacePos;
|
glm::vec3 ndcSpacePos;
|
||||||
|
@ -510,8 +507,7 @@ QPoint ApplicationOverlay::getPalmClickLocation(const PalmData *palm) const {
|
||||||
|
|
||||||
//Finds the collision point of a world space ray
|
//Finds the collision point of a world space ray
|
||||||
bool ApplicationOverlay::calculateRayUICollisionPoint(const glm::vec3& position, const glm::vec3& direction, glm::vec3& result) const {
|
bool ApplicationOverlay::calculateRayUICollisionPoint(const glm::vec3& position, const glm::vec3& direction, glm::vec3& result) const {
|
||||||
Application* application = Application::getInstance();
|
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
MyAvatar* myAvatar = application->getAvatar();
|
|
||||||
|
|
||||||
glm::quat orientation = myAvatar->getOrientation();
|
glm::quat orientation = myAvatar->getOrientation();
|
||||||
|
|
||||||
|
@ -531,7 +527,6 @@ bool ApplicationOverlay::calculateRayUICollisionPoint(const glm::vec3& position,
|
||||||
|
|
||||||
//Renders optional pointers
|
//Renders optional pointers
|
||||||
void ApplicationOverlay::renderPointers() {
|
void ApplicationOverlay::renderPointers() {
|
||||||
Application* application = Application::getInstance();
|
|
||||||
auto glCanvas = DependencyManager::get<GLCanvas>();
|
auto glCanvas = DependencyManager::get<GLCanvas>();
|
||||||
|
|
||||||
//lazily load crosshair texture
|
//lazily load crosshair texture
|
||||||
|
@ -543,12 +538,12 @@ void ApplicationOverlay::renderPointers() {
|
||||||
glActiveTexture(GL_TEXTURE0);
|
glActiveTexture(GL_TEXTURE0);
|
||||||
glBindTexture(GL_TEXTURE_2D, _crosshairTexture);
|
glBindTexture(GL_TEXTURE_2D, _crosshairTexture);
|
||||||
|
|
||||||
if (OculusManager::isConnected() && !application->getLastMouseMoveWasSimulated() && !application->isMouseHidden()) {
|
if (OculusManager::isConnected() && !qApp->getLastMouseMoveWasSimulated() && !qApp->isMouseHidden()) {
|
||||||
//If we are in oculus, render reticle later
|
//If we are in oculus, render reticle later
|
||||||
if (_lastMouseMove == 0) {
|
if (_lastMouseMove == 0) {
|
||||||
_lastMouseMove = usecTimestampNow();
|
_lastMouseMove = usecTimestampNow();
|
||||||
}
|
}
|
||||||
QPoint position = QPoint(application->getTrueMouseX(), application->getTrueMouseY());
|
QPoint position = QPoint(qApp->getTrueMouseX(), qApp->getTrueMouseY());
|
||||||
|
|
||||||
static const int MAX_IDLE_TIME = 3;
|
static const int MAX_IDLE_TIME = 3;
|
||||||
if (_reticlePosition[MOUSE] != position) {
|
if (_reticlePosition[MOUSE] != position) {
|
||||||
|
@ -567,7 +562,7 @@ void ApplicationOverlay::renderPointers() {
|
||||||
_magActive[MOUSE] = true;
|
_magActive[MOUSE] = true;
|
||||||
_reticleActive[LEFT_CONTROLLER] = false;
|
_reticleActive[LEFT_CONTROLLER] = false;
|
||||||
_reticleActive[RIGHT_CONTROLLER] = false;
|
_reticleActive[RIGHT_CONTROLLER] = false;
|
||||||
} else if (application->getLastMouseMoveWasSimulated() && Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput)) {
|
} else if (qApp->getLastMouseMoveWasSimulated() && Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput)) {
|
||||||
_lastMouseMove = 0;
|
_lastMouseMove = 0;
|
||||||
//only render controller pointer if we aren't already rendering a mouse pointer
|
//only render controller pointer if we aren't already rendering a mouse pointer
|
||||||
_reticleActive[MOUSE] = false;
|
_reticleActive[MOUSE] = false;
|
||||||
|
@ -579,16 +574,15 @@ void ApplicationOverlay::renderPointers() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void ApplicationOverlay::renderControllerPointers() {
|
void ApplicationOverlay::renderControllerPointers() {
|
||||||
Application* application = Application::getInstance();
|
|
||||||
auto glCanvas = DependencyManager::get<GLCanvas>();
|
auto glCanvas = DependencyManager::get<GLCanvas>();
|
||||||
MyAvatar* myAvatar = application->getAvatar();
|
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
|
|
||||||
//Static variables used for storing controller state
|
//Static variables used for storing controller state
|
||||||
static quint64 pressedTime[NUMBER_OF_RETICLES] = { 0ULL, 0ULL, 0ULL };
|
static quint64 pressedTime[NUMBER_OF_RETICLES] = { 0ULL, 0ULL, 0ULL };
|
||||||
static bool isPressed[NUMBER_OF_RETICLES] = { false, false, false };
|
static bool isPressed[NUMBER_OF_RETICLES] = { false, false, false };
|
||||||
static bool stateWhenPressed[NUMBER_OF_RETICLES] = { false, false, false };
|
static bool stateWhenPressed[NUMBER_OF_RETICLES] = { false, false, false };
|
||||||
|
|
||||||
const HandData* handData = Application::getInstance()->getAvatar()->getHandData();
|
const HandData* handData = DependencyManager::get<AvatarManager>()->getMyAvatar()->getHandData();
|
||||||
|
|
||||||
for (unsigned int palmIndex = 2; palmIndex < 4; palmIndex++) {
|
for (unsigned int palmIndex = 2; palmIndex < 4; palmIndex++) {
|
||||||
const int index = palmIndex - 1;
|
const int index = palmIndex - 1;
|
||||||
|
@ -695,7 +689,7 @@ void ApplicationOverlay::renderPointersOculus(const glm::vec3& eyePos) {
|
||||||
glMatrixMode(GL_MODELVIEW);
|
glMatrixMode(GL_MODELVIEW);
|
||||||
|
|
||||||
//Controller Pointers
|
//Controller Pointers
|
||||||
MyAvatar* myAvatar = Application::getInstance()->getAvatar();
|
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
for (int i = 0; i < (int)myAvatar->getHand()->getNumPalms(); i++) {
|
for (int i = 0; i < (int)myAvatar->getHand()->getNumPalms(); i++) {
|
||||||
|
|
||||||
PalmData& palm = myAvatar->getHand()->getPalms()[i];
|
PalmData& palm = myAvatar->getHand()->getPalms()[i];
|
||||||
|
@ -790,7 +784,7 @@ void ApplicationOverlay::renderMagnifier(glm::vec2 magPos, float sizeMult, bool
|
||||||
|
|
||||||
void ApplicationOverlay::renderAudioMeter() {
|
void ApplicationOverlay::renderAudioMeter() {
|
||||||
auto glCanvas = DependencyManager::get<GLCanvas>();
|
auto glCanvas = DependencyManager::get<GLCanvas>();
|
||||||
auto audio = DependencyManager::get<Audio>();
|
auto audio = DependencyManager::get<AudioClient>();
|
||||||
|
|
||||||
// Audio VU Meter and Mute Icon
|
// Audio VU Meter and Mute Icon
|
||||||
const int MUTE_ICON_SIZE = 24;
|
const int MUTE_ICON_SIZE = 24;
|
||||||
|
@ -903,7 +897,6 @@ void ApplicationOverlay::renderAudioMeter() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void ApplicationOverlay::renderStatsAndLogs() {
|
void ApplicationOverlay::renderStatsAndLogs() {
|
||||||
|
|
||||||
Application* application = Application::getInstance();
|
Application* application = Application::getInstance();
|
||||||
QSharedPointer<BandwidthRecorder> bandwidthRecorder = DependencyManager::get<BandwidthRecorder>();
|
QSharedPointer<BandwidthRecorder> bandwidthRecorder = DependencyManager::get<BandwidthRecorder>();
|
||||||
|
|
||||||
|
|
|
@ -18,9 +18,12 @@
|
||||||
#include <QScrollArea>
|
#include <QScrollArea>
|
||||||
#include <QVBoxLayout>
|
#include <QVBoxLayout>
|
||||||
|
|
||||||
#include "Application.h"
|
#include <avatar/AvatarManager.h>
|
||||||
|
#include <avatar/MyAvatar.h>
|
||||||
|
#include <DependencyManager.h>
|
||||||
|
|
||||||
#include "AttachmentsDialog.h"
|
#include "AttachmentsDialog.h"
|
||||||
#include "MainWindow.h"
|
#include "ModelsBrowser.h"
|
||||||
|
|
||||||
AttachmentsDialog::AttachmentsDialog(QWidget* parent) :
|
AttachmentsDialog::AttachmentsDialog(QWidget* parent) :
|
||||||
QDialog(parent) {
|
QDialog(parent) {
|
||||||
|
@ -40,7 +43,7 @@ AttachmentsDialog::AttachmentsDialog(QWidget* parent) :
|
||||||
area->setWidget(container);
|
area->setWidget(container);
|
||||||
_attachments->addStretch(1);
|
_attachments->addStretch(1);
|
||||||
|
|
||||||
foreach (const AttachmentData& data, Application::getInstance()->getAvatar()->getAttachmentData()) {
|
foreach (const AttachmentData& data, DependencyManager::get<AvatarManager>()->getMyAvatar()->getAttachmentData()) {
|
||||||
addAttachment(data);
|
addAttachment(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -70,7 +73,7 @@ void AttachmentsDialog::updateAttachmentData() {
|
||||||
for (int i = 0; i < _attachments->count() - 1; i++) {
|
for (int i = 0; i < _attachments->count() - 1; i++) {
|
||||||
data.append(static_cast<AttachmentPanel*>(_attachments->itemAt(i)->widget())->getAttachmentData());
|
data.append(static_cast<AttachmentPanel*>(_attachments->itemAt(i)->widget())->getAttachmentData());
|
||||||
}
|
}
|
||||||
Application::getInstance()->getAvatar()->setAttachmentData(data);
|
DependencyManager::get<AvatarManager>()->getMyAvatar()->setAttachmentData(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AttachmentsDialog::addAttachment(const AttachmentData& data) {
|
void AttachmentsDialog::addAttachment(const AttachmentData& data) {
|
||||||
|
@ -116,7 +119,7 @@ AttachmentPanel::AttachmentPanel(AttachmentsDialog* dialog, const AttachmentData
|
||||||
connect(chooseURL, SIGNAL(clicked(bool)), SLOT(chooseModelURL()));
|
connect(chooseURL, SIGNAL(clicked(bool)), SLOT(chooseModelURL()));
|
||||||
|
|
||||||
layout->addRow("Joint:", _jointName = new QComboBox());
|
layout->addRow("Joint:", _jointName = new QComboBox());
|
||||||
QSharedPointer<NetworkGeometry> geometry = Application::getInstance()->getAvatar()->getSkeletonModel().getGeometry();
|
QSharedPointer<NetworkGeometry> geometry = DependencyManager::get<AvatarManager>()->getMyAvatar()->getSkeletonModel().getGeometry();
|
||||||
if (geometry && geometry->isLoaded()) {
|
if (geometry && geometry->isLoaded()) {
|
||||||
foreach (const FBXJoint& joint, geometry->getFBXGeometry().joints) {
|
foreach (const FBXJoint& joint, geometry->getFBXGeometry().joints) {
|
||||||
_jointName->addItem(joint.name);
|
_jointName->addItem(joint.name);
|
||||||
|
@ -177,7 +180,7 @@ void AttachmentPanel::modelURLChanged() {
|
||||||
_dialog->updateAttachmentData();
|
_dialog->updateAttachmentData();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
AttachmentData attachment = Application::getInstance()->getAvatar()->loadAttachmentData(_modelURL->text());
|
AttachmentData attachment = DependencyManager::get<AvatarManager>()->getMyAvatar()->loadAttachmentData(_modelURL->text());
|
||||||
if (attachment.isValid()) {
|
if (attachment.isValid()) {
|
||||||
_applying = true;
|
_applying = true;
|
||||||
_jointName->setCurrentText(attachment.jointName);
|
_jointName->setCurrentText(attachment.jointName);
|
||||||
|
@ -195,7 +198,7 @@ void AttachmentPanel::jointNameChanged() {
|
||||||
_dialog->updateAttachmentData();
|
_dialog->updateAttachmentData();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
AttachmentData attachment = Application::getInstance()->getAvatar()->loadAttachmentData(
|
AttachmentData attachment = DependencyManager::get<AvatarManager>()->getMyAvatar()->loadAttachmentData(
|
||||||
_modelURL->text(), _jointName->currentText());
|
_modelURL->text(), _jointName->currentText());
|
||||||
if (attachment.isValid()) {
|
if (attachment.isValid()) {
|
||||||
applyAttachmentData(attachment);
|
applyAttachmentData(attachment);
|
||||||
|
@ -209,7 +212,7 @@ void AttachmentPanel::updateAttachmentData() {
|
||||||
}
|
}
|
||||||
// save the attachment data under the model URL (if any)
|
// save the attachment data under the model URL (if any)
|
||||||
if (!_modelURL->text().isEmpty()) {
|
if (!_modelURL->text().isEmpty()) {
|
||||||
Application::getInstance()->getAvatar()->saveAttachmentData(getAttachmentData());
|
DependencyManager::get<AvatarManager>()->getMyAvatar()->saveAttachmentData(getAttachmentData());
|
||||||
}
|
}
|
||||||
_dialog->updateAttachmentData();
|
_dialog->updateAttachmentData();
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,13 +33,13 @@
|
||||||
#include <QVBoxLayout>
|
#include <QVBoxLayout>
|
||||||
|
|
||||||
#include <AttributeRegistry.h>
|
#include <AttributeRegistry.h>
|
||||||
|
#include <avatar/AvatarManager.h>
|
||||||
#include <GeometryCache.h>
|
#include <GeometryCache.h>
|
||||||
#include <MetavoxelMessages.h>
|
#include <MetavoxelMessages.h>
|
||||||
#include <MetavoxelUtil.h>
|
#include <MetavoxelUtil.h>
|
||||||
#include <PathUtils.h>
|
#include <PathUtils.h>
|
||||||
|
|
||||||
#include "Application.h"
|
#include "Application.h"
|
||||||
#include "MainWindow.h"
|
|
||||||
#include "MetavoxelEditor.h"
|
#include "MetavoxelEditor.h"
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
@ -267,7 +267,7 @@ void MetavoxelEditor::deleteSelectedAttribute() {
|
||||||
void MetavoxelEditor::centerGridPosition() {
|
void MetavoxelEditor::centerGridPosition() {
|
||||||
const float CENTER_OFFSET = 0.625f;
|
const float CENTER_OFFSET = 0.625f;
|
||||||
float eyePosition = (glm::inverse(getGridRotation()) * Application::getInstance()->getCamera()->getPosition()).z -
|
float eyePosition = (glm::inverse(getGridRotation()) * Application::getInstance()->getCamera()->getPosition()).z -
|
||||||
Application::getInstance()->getAvatar()->getScale() * CENTER_OFFSET;
|
DependencyManager::get<AvatarManager>()->getMyAvatar()->getScale() * CENTER_OFFSET;
|
||||||
double step = getGridSpacing();
|
double step = getGridSpacing();
|
||||||
_gridPosition->setValue(step * floor(eyePosition / step));
|
_gridPosition->setValue(step * floor(eyePosition / step));
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,11 +11,12 @@
|
||||||
|
|
||||||
#include <QFileDialog>
|
#include <QFileDialog>
|
||||||
|
|
||||||
|
#include <AudioClient.h>
|
||||||
|
#include <avatar/AvatarManager.h>
|
||||||
#include <devices/Faceshift.h>
|
#include <devices/Faceshift.h>
|
||||||
#include <devices/SixenseManager.h>
|
#include <devices/SixenseManager.h>
|
||||||
|
|
||||||
#include "Application.h"
|
#include "Application.h"
|
||||||
#include "Audio.h"
|
|
||||||
#include "MainWindow.h"
|
#include "MainWindow.h"
|
||||||
#include "Menu.h"
|
#include "Menu.h"
|
||||||
#include "ModelsBrowser.h"
|
#include "ModelsBrowser.h"
|
||||||
|
@ -107,7 +108,7 @@ void PreferencesDialog::resizeEvent(QResizeEvent *resizeEvent) {
|
||||||
|
|
||||||
void PreferencesDialog::loadPreferences() {
|
void PreferencesDialog::loadPreferences() {
|
||||||
|
|
||||||
MyAvatar* myAvatar = Application::getInstance()->getAvatar();
|
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
Menu* menuInstance = Menu::getInstance();
|
Menu* menuInstance = Menu::getInstance();
|
||||||
|
|
||||||
_displayNameString = myAvatar->getDisplayName();
|
_displayNameString = myAvatar->getDisplayName();
|
||||||
|
@ -133,8 +134,8 @@ void PreferencesDialog::loadPreferences() {
|
||||||
ui.faceshiftEyeDeflectionSider->maximum());
|
ui.faceshiftEyeDeflectionSider->maximum());
|
||||||
|
|
||||||
ui.faceshiftHostnameEdit->setText(faceshift->getHostname());
|
ui.faceshiftHostnameEdit->setText(faceshift->getHostname());
|
||||||
|
|
||||||
auto audio = DependencyManager::get<Audio>();
|
auto audio = DependencyManager::get<AudioClient>();
|
||||||
MixedProcessedAudioStream& stream = audio->getReceivedAudioStream();
|
MixedProcessedAudioStream& stream = audio->getReceivedAudioStream();
|
||||||
|
|
||||||
ui.dynamicJitterBuffersCheckBox->setChecked(stream.getDynamicJitterBuffers());
|
ui.dynamicJitterBuffersCheckBox->setChecked(stream.getDynamicJitterBuffers());
|
||||||
|
@ -153,7 +154,7 @@ void PreferencesDialog::loadPreferences() {
|
||||||
ui.outputStarveDetectionThresholdSpinner->setValue(audio->getOutputStarveDetectionThreshold());
|
ui.outputStarveDetectionThresholdSpinner->setValue(audio->getOutputStarveDetectionThreshold());
|
||||||
ui.outputStarveDetectionPeriodSpinner->setValue(audio->getOutputStarveDetectionPeriod());
|
ui.outputStarveDetectionPeriodSpinner->setValue(audio->getOutputStarveDetectionPeriod());
|
||||||
|
|
||||||
ui.realWorldFieldOfViewSpin->setValue(qApp->getAvatar()->getRealWorldFieldOfView());
|
ui.realWorldFieldOfViewSpin->setValue(DependencyManager::get<AvatarManager>()->getMyAvatar()->getRealWorldFieldOfView());
|
||||||
|
|
||||||
ui.fieldOfViewSpin->setValue(qApp->getFieldOfView());
|
ui.fieldOfViewSpin->setValue(qApp->getFieldOfView());
|
||||||
|
|
||||||
|
@ -173,7 +174,7 @@ void PreferencesDialog::loadPreferences() {
|
||||||
|
|
||||||
void PreferencesDialog::savePreferences() {
|
void PreferencesDialog::savePreferences() {
|
||||||
|
|
||||||
MyAvatar* myAvatar = Application::getInstance()->getAvatar();
|
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
bool shouldDispatchIdentityPacket = false;
|
bool shouldDispatchIdentityPacket = false;
|
||||||
|
|
||||||
QString displayNameStr(ui.displayNameEdit->text());
|
QString displayNameStr(ui.displayNameEdit->text());
|
||||||
|
@ -233,7 +234,7 @@ void PreferencesDialog::savePreferences() {
|
||||||
auto glCanvas = DependencyManager::get<GLCanvas>();
|
auto glCanvas = DependencyManager::get<GLCanvas>();
|
||||||
Application::getInstance()->resizeGL(glCanvas->width(), glCanvas->height());
|
Application::getInstance()->resizeGL(glCanvas->width(), glCanvas->height());
|
||||||
|
|
||||||
qApp->getAvatar()->setRealWorldFieldOfView(ui.realWorldFieldOfViewSpin->value());
|
DependencyManager::get<AvatarManager>()->getMyAvatar()->setRealWorldFieldOfView(ui.realWorldFieldOfViewSpin->value());
|
||||||
|
|
||||||
qApp->setFieldOfView(ui.fieldOfViewSpin->value());
|
qApp->setFieldOfView(ui.fieldOfViewSpin->value());
|
||||||
|
|
||||||
|
@ -251,7 +252,7 @@ void PreferencesDialog::savePreferences() {
|
||||||
sixense.setReticleMoveSpeed(ui.sixenseReticleMoveSpeedSpin->value());
|
sixense.setReticleMoveSpeed(ui.sixenseReticleMoveSpeedSpin->value());
|
||||||
sixense.setInvertButtons(ui.invertSixenseButtonsCheckBox->isChecked());
|
sixense.setInvertButtons(ui.invertSixenseButtonsCheckBox->isChecked());
|
||||||
|
|
||||||
auto audio = DependencyManager::get<Audio>();
|
auto audio = DependencyManager::get<AudioClient>();
|
||||||
MixedProcessedAudioStream& stream = audio->getReceivedAudioStream();
|
MixedProcessedAudioStream& stream = audio->getReceivedAudioStream();
|
||||||
|
|
||||||
stream.setDynamicJitterBuffers(ui.dynamicJitterBuffersCheckBox->isChecked());
|
stream.setDynamicJitterBuffers(ui.dynamicJitterBuffersCheckBox->isChecked());
|
||||||
|
|
|
@ -17,8 +17,11 @@
|
||||||
#include <QTemporaryFile>
|
#include <QTemporaryFile>
|
||||||
|
|
||||||
#include <AccountManager.h>
|
#include <AccountManager.h>
|
||||||
#include <Application.h>
|
#include <avatar/AvatarManager.h>
|
||||||
|
#include <avatar/MyAvatar.h>
|
||||||
#include <FileUtils.h>
|
#include <FileUtils.h>
|
||||||
|
#include <GLCanvas.h>
|
||||||
|
#include <NodeList.h>
|
||||||
|
|
||||||
#include "Snapshot.h"
|
#include "Snapshot.h"
|
||||||
|
|
||||||
|
@ -93,7 +96,7 @@ QFile* Snapshot::savedFileForSnapshot(bool isTemporary) {
|
||||||
auto glCanvas = DependencyManager::get<GLCanvas>();
|
auto glCanvas = DependencyManager::get<GLCanvas>();
|
||||||
QImage shot = glCanvas->grabFrameBuffer();
|
QImage shot = glCanvas->grabFrameBuffer();
|
||||||
|
|
||||||
Avatar* avatar = qApp->getAvatar();
|
Avatar* avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
|
|
||||||
glm::vec3 location = avatar->getPosition();
|
glm::vec3 location = avatar->getPosition();
|
||||||
glm::quat orientation = avatar->getHead()->getOrientation();
|
glm::quat orientation = avatar->getHead()->getOrientation();
|
||||||
|
|
|
@ -15,6 +15,7 @@
|
||||||
#include <glm/glm.hpp>
|
#include <glm/glm.hpp>
|
||||||
|
|
||||||
#include <QString>
|
#include <QString>
|
||||||
|
#include <QStandardPaths>
|
||||||
|
|
||||||
#include <SettingHandle.h>
|
#include <SettingHandle.h>
|
||||||
|
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
#include <glm/gtx/quaternion.hpp>
|
#include <glm/gtx/quaternion.hpp>
|
||||||
#include <glm/gtx/vector_angle.hpp>
|
#include <glm/gtx/vector_angle.hpp>
|
||||||
|
|
||||||
|
#include <avatar/AvatarManager.h>
|
||||||
#include <Application.h>
|
#include <Application.h>
|
||||||
#include <GeometryCache.h>
|
#include <GeometryCache.h>
|
||||||
#include <GLCanvas.h>
|
#include <GLCanvas.h>
|
||||||
|
@ -227,7 +228,7 @@ void Stats::display(
|
||||||
glPointSize(1.0f);
|
glPointSize(1.0f);
|
||||||
|
|
||||||
// we need to take one avatar out so we don't include ourselves
|
// we need to take one avatar out so we don't include ourselves
|
||||||
int totalAvatars = Application::getInstance()->getAvatarManager().size() - 1;
|
int totalAvatars = DependencyManager::get<AvatarManager>()->size() - 1;
|
||||||
int totalServers = DependencyManager::get<NodeList>()->size();
|
int totalServers = DependencyManager::get<NodeList>()->size();
|
||||||
|
|
||||||
lines = _expanded ? 5 : 3;
|
lines = _expanded ? 5 : 3;
|
||||||
|
@ -413,7 +414,7 @@ void Stats::display(
|
||||||
horizontalOffset = _lastHorizontalOffset + _generalStatsWidth + _bandwidthStatsWidth + _pingStatsWidth + 2;
|
horizontalOffset = _lastHorizontalOffset + _generalStatsWidth + _bandwidthStatsWidth + _pingStatsWidth + 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
MyAvatar* myAvatar = Application::getInstance()->getAvatar();
|
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
glm::vec3 avatarPos = myAvatar->getPosition();
|
glm::vec3 avatarPos = myAvatar->getPosition();
|
||||||
|
|
||||||
lines = _expanded ? 8 : 3;
|
lines = _expanded ? 8 : 3;
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
#include <typeinfo>
|
#include <typeinfo>
|
||||||
|
|
||||||
#include <Application.h>
|
#include <Application.h>
|
||||||
|
#include <avatar/AvatarManager.h>
|
||||||
#include <devices/OculusManager.h>
|
#include <devices/OculusManager.h>
|
||||||
#include <LODManager.h>
|
#include <LODManager.h>
|
||||||
|
|
||||||
|
@ -114,7 +115,7 @@ void Overlays::renderWorld(bool drawFront, RenderArgs::RenderMode renderMode, Re
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
bool myAvatarComputed = false;
|
bool myAvatarComputed = false;
|
||||||
MyAvatar* avatar = NULL;
|
MyAvatar* avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
glm::quat myAvatarRotation;
|
glm::quat myAvatarRotation;
|
||||||
glm::vec3 myAvatarPosition(0.0f);
|
glm::vec3 myAvatarPosition(0.0f);
|
||||||
float angle = 0.0f;
|
float angle = 0.0f;
|
||||||
|
@ -138,7 +139,6 @@ void Overlays::renderWorld(bool drawFront, RenderArgs::RenderMode renderMode, Re
|
||||||
switch (thisOverlay->getAnchor()) {
|
switch (thisOverlay->getAnchor()) {
|
||||||
case Overlay::MY_AVATAR:
|
case Overlay::MY_AVATAR:
|
||||||
if (!myAvatarComputed) {
|
if (!myAvatarComputed) {
|
||||||
avatar = Application::getInstance()->getAvatar();
|
|
||||||
myAvatarRotation = avatar->getOrientation();
|
myAvatarRotation = avatar->getOrientation();
|
||||||
myAvatarPosition = avatar->getPosition();
|
myAvatarPosition = avatar->getPosition();
|
||||||
angle = glm::degrees(glm::angle(myAvatarRotation));
|
angle = glm::degrees(glm::angle(myAvatarRotation));
|
||||||
|
|
35
libraries/audio-client/CMakeLists.txt
Normal file
35
libraries/audio-client/CMakeLists.txt
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
set(TARGET_NAME audio-client)
|
||||||
|
|
||||||
|
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
|
||||||
|
setup_hifi_library(Network Multimedia)
|
||||||
|
|
||||||
|
link_hifi_libraries(audio)
|
||||||
|
|
||||||
|
# append audio includes to our list of includes to bubble
|
||||||
|
list(APPEND ${TARGET_NAME}_DEPENDENCY_INCLUDES "${HIFI_LIBRARY_DIR}/audio/src")
|
||||||
|
|
||||||
|
set(GVERB_ROOT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/external/gverb")
|
||||||
|
|
||||||
|
# As Gverb is currently the only reverb library, it's required.
|
||||||
|
find_package(Gverb REQUIRED)
|
||||||
|
|
||||||
|
file(GLOB GVERB_SRCS ${GVERB_SRC_DIRS}/*.c)
|
||||||
|
add_library(gverb STATIC ${GVERB_SRCS})
|
||||||
|
target_link_libraries(${TARGET_NAME} gverb)
|
||||||
|
|
||||||
|
# append gverb includes to our list of includes to bubble
|
||||||
|
list(APPEND ${TARGET_NAME}_DEPENDENCY_INCLUDES "${GVERB_INCLUDE_DIRS}")
|
||||||
|
|
||||||
|
# we use libsoxr for resampling
|
||||||
|
find_package(Soxr REQUIRED)
|
||||||
|
target_link_libraries(${TARGET_NAME} ${SOXR_LIBRARIES})
|
||||||
|
include_directories(SYSTEM ${SOXR_INCLUDE_DIRS})
|
||||||
|
|
||||||
|
if (APPLE)
|
||||||
|
find_library(CoreAudio CoreAudio)
|
||||||
|
find_library(CoreFoundation CoreFoundation)
|
||||||
|
target_link_libraries(${TARGET_NAME} ${CoreAudio} ${CoreFoundation})
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
# call macro to include our dependency includes and bubble them up via a property on our target
|
||||||
|
include_dependency_includes()
|
|
@ -1,5 +1,4 @@
|
||||||
|
Instructions for adding the Gverb library to audio-client
|
||||||
Instructions for adding the Gverb library to Interface
|
|
||||||
(This is a required library)
|
(This is a required library)
|
||||||
Clément Brisset, October 22nd, 2014
|
Clément Brisset, October 22nd, 2014
|
||||||
|
|
||||||
|
@ -9,7 +8,7 @@ Clément Brisset, October 22nd, 2014
|
||||||
|
|
||||||
2. Extract the archive
|
2. Extract the archive
|
||||||
|
|
||||||
3. Place the directories “include” and “src” in interface/external/gverb
|
3. Place the directories “include” and “src” in libraries/audio-client/external/gverb
|
||||||
(Normally next to this readme)
|
(Normally next to this readme)
|
||||||
|
|
||||||
4. Clear your build directory, run cmake, build and you should be all set.
|
4. Clear your build directory, run cmake, build and you should be all set.
|
|
@ -1,5 +1,5 @@
|
||||||
//
|
//
|
||||||
// Audio.cpp
|
// AudioClient.cpp
|
||||||
// interface/src
|
// interface/src
|
||||||
//
|
//
|
||||||
// Created by Stephen Birarda on 1/22/13.
|
// Created by Stephen Birarda on 1/22/13.
|
||||||
|
@ -33,8 +33,8 @@
|
||||||
#include <QtMultimedia/QAudioInput>
|
#include <QtMultimedia/QAudioInput>
|
||||||
#include <QtMultimedia/QAudioOutput>
|
#include <QtMultimedia/QAudioOutput>
|
||||||
|
|
||||||
#include <AudioConstants.h>
|
#include <soxr.h>
|
||||||
#include <AudioInjector.h>
|
|
||||||
#include <NodeList.h>
|
#include <NodeList.h>
|
||||||
#include <PacketHeaders.h>
|
#include <PacketHeaders.h>
|
||||||
#include <PositionalAudioStream.h>
|
#include <PositionalAudioStream.h>
|
||||||
|
@ -42,8 +42,11 @@
|
||||||
#include <SharedUtil.h>
|
#include <SharedUtil.h>
|
||||||
#include <UUID.h>
|
#include <UUID.h>
|
||||||
|
|
||||||
#include "Application.h"
|
#include "AudioInjector.h"
|
||||||
#include "Audio.h"
|
#include "AudioConstants.h"
|
||||||
|
#include "PositionalAudioStream.h"
|
||||||
|
|
||||||
|
#include "AudioClient.h"
|
||||||
|
|
||||||
static const int RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES = 100;
|
static const int RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES = 100;
|
||||||
|
|
||||||
|
@ -59,7 +62,7 @@ Setting::Handle<int> windowSecondsForDesiredReduction("windowSecondsForDesiredRe
|
||||||
DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION);
|
DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION);
|
||||||
Setting::Handle<bool> repetitionWithFade("repetitionWithFade", DEFAULT_REPETITION_WITH_FADE);
|
Setting::Handle<bool> repetitionWithFade("repetitionWithFade", DEFAULT_REPETITION_WITH_FADE);
|
||||||
|
|
||||||
Audio::Audio() :
|
AudioClient::AudioClient() :
|
||||||
AbstractAudioInterface(),
|
AbstractAudioInterface(),
|
||||||
_audioInput(NULL),
|
_audioInput(NULL),
|
||||||
_desiredInputFormat(),
|
_desiredInputFormat(),
|
||||||
|
@ -79,8 +82,14 @@ Audio::Audio() :
|
||||||
_outputStarveDetectionCount(0),
|
_outputStarveDetectionCount(0),
|
||||||
_outputBufferSizeFrames("audioOutputBufferSize",
|
_outputBufferSizeFrames("audioOutputBufferSize",
|
||||||
DEFAULT_MAX_FRAMES_OVER_DESIRED),
|
DEFAULT_MAX_FRAMES_OVER_DESIRED),
|
||||||
|
#ifdef Q_OS_ANDROID
|
||||||
|
_outputStarveDetectionEnabled("audioOutputStarveDetectionEnabled",
|
||||||
|
false),
|
||||||
|
#else
|
||||||
_outputStarveDetectionEnabled("audioOutputStarveDetectionEnabled",
|
_outputStarveDetectionEnabled("audioOutputStarveDetectionEnabled",
|
||||||
DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_ENABLED),
|
DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_ENABLED),
|
||||||
|
#endif
|
||||||
|
|
||||||
_outputStarveDetectionPeriodMsec("audioOutputStarveDetectionPeriod",
|
_outputStarveDetectionPeriodMsec("audioOutputStarveDetectionPeriod",
|
||||||
DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_PERIOD),
|
DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_PERIOD),
|
||||||
_outputStarveDetectionThreshold("audioOutputStarveDetectionThreshold",
|
_outputStarveDetectionThreshold("audioOutputStarveDetectionThreshold",
|
||||||
|
@ -97,6 +106,9 @@ Audio::Audio() :
|
||||||
_reverbOptions(&_scriptReverbOptions),
|
_reverbOptions(&_scriptReverbOptions),
|
||||||
_gverbLocal(NULL),
|
_gverbLocal(NULL),
|
||||||
_gverb(NULL),
|
_gverb(NULL),
|
||||||
|
_inputToNetworkResampler(NULL),
|
||||||
|
_networkToOutputResampler(NULL),
|
||||||
|
_loopbackResampler(NULL),
|
||||||
_noiseSourceEnabled(false),
|
_noiseSourceEnabled(false),
|
||||||
_toneSourceEnabled(true),
|
_toneSourceEnabled(true),
|
||||||
_outgoingAvatarAudioSequenceNumber(0),
|
_outgoingAvatarAudioSequenceNumber(0),
|
||||||
|
@ -107,7 +119,8 @@ Audio::Audio() :
|
||||||
// clear the array of locally injected samples
|
// clear the array of locally injected samples
|
||||||
memset(_localProceduralSamples, 0, AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL);
|
memset(_localProceduralSamples, 0, AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL);
|
||||||
|
|
||||||
connect(&_receivedAudioStream, &MixedProcessedAudioStream::processSamples, this, &Audio::processReceivedSamples, Qt::DirectConnection);
|
connect(&_receivedAudioStream, &MixedProcessedAudioStream::processSamples,
|
||||||
|
this, &AudioClient::processReceivedSamples, Qt::DirectConnection);
|
||||||
|
|
||||||
// Initialize GVerb
|
// Initialize GVerb
|
||||||
initGverb();
|
initGverb();
|
||||||
|
@ -118,11 +131,11 @@ Audio::Audio() :
|
||||||
_outputDevices = getDeviceNames(QAudio::AudioOutput);
|
_outputDevices = getDeviceNames(QAudio::AudioOutput);
|
||||||
|
|
||||||
QTimer* updateTimer = new QTimer(this);
|
QTimer* updateTimer = new QTimer(this);
|
||||||
connect(updateTimer, &QTimer::timeout, this, &Audio::checkDevices);
|
connect(updateTimer, &QTimer::timeout, this, &AudioClient::checkDevices);
|
||||||
updateTimer->start(DEVICE_CHECK_INTERVAL_MSECS);
|
updateTimer->start(DEVICE_CHECK_INTERVAL_MSECS);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::reset() {
|
void AudioClient::reset() {
|
||||||
_receivedAudioStream.reset();
|
_receivedAudioStream.reset();
|
||||||
_stats.reset();
|
_stats.reset();
|
||||||
_noiseSource.reset();
|
_noiseSource.reset();
|
||||||
|
@ -131,7 +144,7 @@ void Audio::reset() {
|
||||||
_inputGain.reset();
|
_inputGain.reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::audioMixerKilled() {
|
void AudioClient::audioMixerKilled() {
|
||||||
_outgoingAvatarAudioSequenceNumber = 0;
|
_outgoingAvatarAudioSequenceNumber = 0;
|
||||||
_stats.reset();
|
_stats.reset();
|
||||||
}
|
}
|
||||||
|
@ -149,6 +162,26 @@ QAudioDeviceInfo getNamedAudioDeviceForMode(QAudio::Mode mode, const QString& de
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
soxr_datatype_t soxrDataTypeFromQAudioFormat(const QAudioFormat& audioFormat) {
|
||||||
|
if (audioFormat.sampleType() == QAudioFormat::Float) {
|
||||||
|
return SOXR_FLOAT32_I;
|
||||||
|
} else {
|
||||||
|
if (audioFormat.sampleSize() == 16) {
|
||||||
|
return SOXR_INT16_I;
|
||||||
|
} else {
|
||||||
|
return SOXR_INT32_I;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
int numDestinationSamplesRequired(const QAudioFormat& sourceFormat, const QAudioFormat& destinationFormat,
|
||||||
|
int numSourceSamples) {
|
||||||
|
float ratio = (float) destinationFormat.channelCount() / sourceFormat.channelCount();
|
||||||
|
ratio *= (float) destinationFormat.sampleRate() / sourceFormat.sampleRate();
|
||||||
|
|
||||||
|
return (numSourceSamples * ratio) + 0.5f;
|
||||||
|
}
|
||||||
|
|
||||||
QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
|
QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
|
||||||
#ifdef __APPLE__
|
#ifdef __APPLE__
|
||||||
if (QAudioDeviceInfo::availableDevices(mode).size() > 1) {
|
if (QAudioDeviceInfo::availableDevices(mode).size() > 1) {
|
||||||
|
@ -272,18 +305,35 @@ bool adjustedFormatForAudioDevice(const QAudioDeviceInfo& audioDevice,
|
||||||
adjustedAudioFormat.setChannelCount(1);
|
adjustedAudioFormat.setChannelCount(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const int FORTY_FOUR = 44100;
|
||||||
|
|
||||||
|
adjustedAudioFormat = desiredAudioFormat;
|
||||||
|
|
||||||
|
#ifdef Q_OS_ANDROID
|
||||||
|
adjustedAudioFormat.setSampleRate(FORTY_FOUR);
|
||||||
|
#else
|
||||||
|
const int HALF_FORTY_FOUR = FORTY_FOUR / 2;
|
||||||
|
|
||||||
if (audioDevice.supportedSampleRates().contains(AudioConstants::SAMPLE_RATE * 2)) {
|
if (audioDevice.supportedSampleRates().contains(AudioConstants::SAMPLE_RATE * 2)) {
|
||||||
// use 48, which is a sample downsample, upsample
|
// use 48, which is a sample downsample, upsample
|
||||||
adjustedAudioFormat = desiredAudioFormat;
|
|
||||||
adjustedAudioFormat.setSampleRate(AudioConstants::SAMPLE_RATE * 2);
|
adjustedAudioFormat.setSampleRate(AudioConstants::SAMPLE_RATE * 2);
|
||||||
|
} else if (audioDevice.supportedSampleRates().contains(HALF_FORTY_FOUR)) {
|
||||||
|
// use 22050, resample but closer to 24
|
||||||
|
adjustedAudioFormat.setSampleRate(HALF_FORTY_FOUR);
|
||||||
|
} else if (audioDevice.supportedSampleRates().contains(FORTY_FOUR)) {
|
||||||
|
// use 48000, libsoxr will resample
|
||||||
|
adjustedAudioFormat.setSampleRate(FORTY_FOUR);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
if (adjustedAudioFormat != desiredAudioFormat) {
|
||||||
// return the nearest in case it needs 2 channels
|
// return the nearest in case it needs 2 channels
|
||||||
adjustedAudioFormat = audioDevice.nearestFormat(adjustedAudioFormat);
|
adjustedAudioFormat = audioDevice.nearestFormat(adjustedAudioFormat);
|
||||||
return true;
|
return true;
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return false;
|
|
||||||
} else {
|
} else {
|
||||||
// set the adjustedAudioFormat to the desiredAudioFormat, since it will work
|
// set the adjustedAudioFormat to the desiredAudioFormat, since it will work
|
||||||
adjustedAudioFormat = desiredAudioFormat;
|
adjustedAudioFormat = desiredAudioFormat;
|
||||||
|
@ -291,87 +341,104 @@ bool adjustedFormatForAudioDevice(const QAudioDeviceInfo& audioDevice,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void linearResampling(const int16_t* sourceSamples, int16_t* destinationSamples,
|
bool sampleChannelConversion(const int16_t* sourceSamples, int16_t* destinationSamples, unsigned int numSourceSamples,
|
||||||
unsigned int numSourceSamples, unsigned int numDestinationSamples,
|
const QAudioFormat& sourceAudioFormat, const QAudioFormat& destinationAudioFormat) {
|
||||||
const QAudioFormat& sourceAudioFormat, const QAudioFormat& destinationAudioFormat) {
|
if (sourceAudioFormat.channelCount() == 2 && destinationAudioFormat.channelCount() == 1) {
|
||||||
if (sourceAudioFormat == destinationAudioFormat) {
|
// loop through the stereo input audio samples and average every two samples
|
||||||
memcpy(destinationSamples, sourceSamples, numSourceSamples * sizeof(int16_t));
|
for (uint i = 0; i < numSourceSamples; i += 2) {
|
||||||
} else {
|
destinationSamples[i / 2] = (sourceSamples[i] / 2) + (sourceSamples[i + 1] / 2);
|
||||||
float sourceToDestinationFactor = (sourceAudioFormat.sampleRate() / (float) destinationAudioFormat.sampleRate())
|
|
||||||
* (sourceAudioFormat.channelCount() / (float) destinationAudioFormat.channelCount());
|
|
||||||
|
|
||||||
// take into account the number of channels in source and destination
|
|
||||||
// accomodate for the case where have an output with > 2 channels
|
|
||||||
// this is the case with our HDMI capture
|
|
||||||
|
|
||||||
if (sourceToDestinationFactor >= 2) {
|
|
||||||
// we need to downsample from 48 to 24
|
|
||||||
// for now this only supports a mono output - this would be the case for audio input
|
|
||||||
if (destinationAudioFormat.channelCount() == 1) {
|
|
||||||
for (unsigned int i = sourceAudioFormat.channelCount(); i < numSourceSamples; i += 2 * sourceAudioFormat.channelCount()) {
|
|
||||||
if (i + (sourceAudioFormat.channelCount()) >= numSourceSamples) {
|
|
||||||
destinationSamples[(i - sourceAudioFormat.channelCount()) / (int) sourceToDestinationFactor] =
|
|
||||||
(sourceSamples[i - sourceAudioFormat.channelCount()] / 2)
|
|
||||||
+ (sourceSamples[i] / 2);
|
|
||||||
} else {
|
|
||||||
destinationSamples[(i - sourceAudioFormat.channelCount()) / (int) sourceToDestinationFactor] =
|
|
||||||
(sourceSamples[i - sourceAudioFormat.channelCount()] / 4)
|
|
||||||
+ (sourceSamples[i] / 2)
|
|
||||||
+ (sourceSamples[i + sourceAudioFormat.channelCount()] / 4);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// this is a 48 to 24 resampling but both source and destination are two channels
|
|
||||||
// squish two samples into one in each channel
|
|
||||||
for (unsigned int i = 0; i < numSourceSamples; i += 4) {
|
|
||||||
destinationSamples[i / 2] = (sourceSamples[i] / 2) + (sourceSamples[i + 2] / 2);
|
|
||||||
destinationSamples[(i / 2) + 1] = (sourceSamples[i + 1] / 2) + (sourceSamples[i + 3] / 2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (sourceAudioFormat.sampleRate() == destinationAudioFormat.sampleRate()) {
|
|
||||||
// mono to stereo, same sample rate
|
|
||||||
if (!(sourceAudioFormat.channelCount() == 1 && destinationAudioFormat.channelCount() == 2)) {
|
|
||||||
qWarning() << "Unsupported format conversion" << sourceAudioFormat << destinationAudioFormat;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
for (const int16_t* sourceEnd = sourceSamples + numSourceSamples; sourceSamples != sourceEnd;
|
|
||||||
sourceSamples++) {
|
|
||||||
*destinationSamples++ = *sourceSamples;
|
|
||||||
*destinationSamples++ = *sourceSamples;
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// upsample from 24 to 48
|
|
||||||
// for now this only supports a stereo to stereo conversion - this is our case for network audio to output
|
|
||||||
int sourceIndex = 0;
|
|
||||||
int dtsSampleRateFactor = (destinationAudioFormat.sampleRate() / sourceAudioFormat.sampleRate());
|
|
||||||
int sampleShift = destinationAudioFormat.channelCount() * dtsSampleRateFactor;
|
|
||||||
int destinationToSourceFactor = (1 / sourceToDestinationFactor);
|
|
||||||
|
|
||||||
for (unsigned int i = 0; i < numDestinationSamples; i += sampleShift) {
|
|
||||||
sourceIndex = (i / destinationToSourceFactor);
|
|
||||||
|
|
||||||
// fill the L/R channels and make the rest silent
|
|
||||||
for (unsigned int j = i; j < i + sampleShift; j++) {
|
|
||||||
if (j % destinationAudioFormat.channelCount() == 0) {
|
|
||||||
// left channel
|
|
||||||
destinationSamples[j] = sourceSamples[sourceIndex];
|
|
||||||
} else if (j % destinationAudioFormat.channelCount() == 1) {
|
|
||||||
// right channel
|
|
||||||
destinationSamples[j] = sourceSamples[sourceIndex + (sourceAudioFormat.channelCount() > 1 ? 1 : 0)];
|
|
||||||
} else {
|
|
||||||
// channels above 2, fill with silence
|
|
||||||
destinationSamples[j] = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} else if (sourceAudioFormat.channelCount() == 1 && destinationAudioFormat.channelCount() == 2) {
|
||||||
|
|
||||||
|
// loop through the mono input audio and repeat each sample twice
|
||||||
|
for (uint i = 0; i < numSourceSamples; ++i) {
|
||||||
|
destinationSamples[i * 2] = destinationSamples[(i * 2) + 1] = sourceSamples[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
soxr_error_t possibleResampling(soxr_t resampler,
|
||||||
|
const int16_t* sourceSamples, int16_t* destinationSamples,
|
||||||
|
unsigned int numSourceSamples, unsigned int numDestinationSamples,
|
||||||
|
const QAudioFormat& sourceAudioFormat, const QAudioFormat& destinationAudioFormat) {
|
||||||
|
|
||||||
|
if (numSourceSamples > 0) {
|
||||||
|
if (!resampler) {
|
||||||
|
if (!sampleChannelConversion(sourceSamples, destinationSamples, numSourceSamples,
|
||||||
|
sourceAudioFormat, destinationAudioFormat)) {
|
||||||
|
// no conversion, we can copy the samples directly across
|
||||||
|
memcpy(destinationSamples, sourceSamples, numSourceSamples * sizeof(int16_t));
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
} else {
|
||||||
|
soxr_error_t resampleError = 0;
|
||||||
|
|
||||||
|
if (sourceAudioFormat.channelCount() != destinationAudioFormat.channelCount()) {
|
||||||
|
float channelCountRatio = (float) destinationAudioFormat.channelCount() / sourceAudioFormat.channelCount();
|
||||||
|
|
||||||
|
int numChannelCoversionSamples = (int) (numSourceSamples * channelCountRatio);
|
||||||
|
int16_t* channelConversionSamples = new int16_t[numChannelCoversionSamples];
|
||||||
|
|
||||||
|
sampleChannelConversion(sourceSamples, channelConversionSamples,
|
||||||
|
numSourceSamples,
|
||||||
|
sourceAudioFormat, destinationAudioFormat);
|
||||||
|
|
||||||
|
qDebug() << "resample from" << sourceAudioFormat << "to" << destinationAudioFormat
|
||||||
|
<< "from" << numChannelCoversionSamples << "to" << numDestinationSamples;
|
||||||
|
|
||||||
|
resampleError = soxr_process(resampler,
|
||||||
|
channelConversionSamples, numChannelCoversionSamples, NULL,
|
||||||
|
destinationSamples, numDestinationSamples, NULL);
|
||||||
|
|
||||||
|
delete[] channelConversionSamples;
|
||||||
|
} else {
|
||||||
|
resampleError = soxr_process(resampler,
|
||||||
|
sourceSamples, numSourceSamples, NULL,
|
||||||
|
destinationSamples, numDestinationSamples, NULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
return resampleError;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::start() {
|
soxr_t soxrResamplerFromInputFormatToOutputFormat(const QAudioFormat& sourceAudioFormat,
|
||||||
|
const QAudioFormat& destinationAudioFormat) {
|
||||||
|
soxr_error_t soxrError;
|
||||||
|
|
||||||
|
// setup soxr_io_spec_t for input and output
|
||||||
|
soxr_io_spec_t inputToNetworkSpec = soxr_io_spec(soxrDataTypeFromQAudioFormat(sourceAudioFormat),
|
||||||
|
soxrDataTypeFromQAudioFormat(destinationAudioFormat));
|
||||||
|
|
||||||
|
// setup soxr_quality_spec_t for quality options
|
||||||
|
soxr_quality_spec_t qualitySpec = soxr_quality_spec(SOXR_MQ, 0);
|
||||||
|
|
||||||
|
soxr_t newResampler = soxr_create(sourceAudioFormat.sampleRate(),
|
||||||
|
destinationAudioFormat.sampleRate(),
|
||||||
|
1,
|
||||||
|
&soxrError, &inputToNetworkSpec, &qualitySpec, 0);
|
||||||
|
|
||||||
|
if (soxrError) {
|
||||||
|
qDebug() << "There was an error setting up the soxr resampler -" << "soxr error code was " << soxrError;
|
||||||
|
|
||||||
|
soxr_delete(newResampler);
|
||||||
|
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
return newResampler;
|
||||||
|
}
|
||||||
|
|
||||||
|
void AudioClient::start() {
|
||||||
|
|
||||||
// set up the desired audio format
|
// set up the desired audio format
|
||||||
_desiredInputFormat.setSampleRate(AudioConstants::SAMPLE_RATE);
|
_desiredInputFormat.setSampleRate(AudioConstants::SAMPLE_RATE);
|
||||||
|
@ -394,9 +461,11 @@ void Audio::start() {
|
||||||
|
|
||||||
if (!inputFormatSupported) {
|
if (!inputFormatSupported) {
|
||||||
qDebug() << "Unable to set up audio input because of a problem with input format.";
|
qDebug() << "Unable to set up audio input because of a problem with input format.";
|
||||||
|
qDebug() << "The closest format available is" << inputDeviceInfo.nearestFormat(_desiredInputFormat);
|
||||||
}
|
}
|
||||||
if (!outputFormatSupported) {
|
if (!outputFormatSupported) {
|
||||||
qDebug() << "Unable to set up audio output because of a problem with output format.";
|
qDebug() << "Unable to set up audio output because of a problem with output format.";
|
||||||
|
qDebug() << "The closest format available is" << outputDeviceInfo.nearestFormat(_desiredOutputFormat);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_audioInput) {
|
if (_audioInput) {
|
||||||
|
@ -410,7 +479,7 @@ void Audio::start() {
|
||||||
_inputGain.setParameters(1.0f,0.0f);
|
_inputGain.setParameters(1.0f,0.0f);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::stop() {
|
void AudioClient::stop() {
|
||||||
|
|
||||||
_inputFrameBuffer.finalize();
|
_inputFrameBuffer.finalize();
|
||||||
_inputGain.finalize();
|
_inputGain.finalize();
|
||||||
|
@ -421,14 +490,19 @@ void Audio::stop() {
|
||||||
// "switch" to invalid devices in order to shut down the state
|
// "switch" to invalid devices in order to shut down the state
|
||||||
switchInputToAudioDevice(QAudioDeviceInfo());
|
switchInputToAudioDevice(QAudioDeviceInfo());
|
||||||
switchOutputToAudioDevice(QAudioDeviceInfo());
|
switchOutputToAudioDevice(QAudioDeviceInfo());
|
||||||
|
|
||||||
|
if (_loopbackResampler) {
|
||||||
|
soxr_delete(_loopbackResampler);
|
||||||
|
_loopbackResampler = NULL;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
QString Audio::getDefaultDeviceName(QAudio::Mode mode) {
|
QString AudioClient::getDefaultDeviceName(QAudio::Mode mode) {
|
||||||
QAudioDeviceInfo deviceInfo = defaultAudioDeviceForMode(mode);
|
QAudioDeviceInfo deviceInfo = defaultAudioDeviceForMode(mode);
|
||||||
return deviceInfo.deviceName();
|
return deviceInfo.deviceName();
|
||||||
}
|
}
|
||||||
|
|
||||||
QVector<QString> Audio::getDeviceNames(QAudio::Mode mode) {
|
QVector<QString> AudioClient::getDeviceNames(QAudio::Mode mode) {
|
||||||
QVector<QString> deviceNames;
|
QVector<QString> deviceNames;
|
||||||
foreach(QAudioDeviceInfo audioDevice, QAudioDeviceInfo::availableDevices(mode)) {
|
foreach(QAudioDeviceInfo audioDevice, QAudioDeviceInfo::availableDevices(mode)) {
|
||||||
deviceNames << audioDevice.deviceName().trimmed();
|
deviceNames << audioDevice.deviceName().trimmed();
|
||||||
|
@ -436,17 +510,17 @@ QVector<QString> Audio::getDeviceNames(QAudio::Mode mode) {
|
||||||
return deviceNames;
|
return deviceNames;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Audio::switchInputToAudioDevice(const QString& inputDeviceName) {
|
bool AudioClient::switchInputToAudioDevice(const QString& inputDeviceName) {
|
||||||
qDebug() << "DEBUG [" << inputDeviceName << "] [" << getNamedAudioDeviceForMode(QAudio::AudioInput, inputDeviceName).deviceName() << "]";
|
qDebug() << "DEBUG [" << inputDeviceName << "] [" << getNamedAudioDeviceForMode(QAudio::AudioInput, inputDeviceName).deviceName() << "]";
|
||||||
return switchInputToAudioDevice(getNamedAudioDeviceForMode(QAudio::AudioInput, inputDeviceName));
|
return switchInputToAudioDevice(getNamedAudioDeviceForMode(QAudio::AudioInput, inputDeviceName));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Audio::switchOutputToAudioDevice(const QString& outputDeviceName) {
|
bool AudioClient::switchOutputToAudioDevice(const QString& outputDeviceName) {
|
||||||
qDebug() << "DEBUG [" << outputDeviceName << "] [" << getNamedAudioDeviceForMode(QAudio::AudioOutput, outputDeviceName).deviceName() << "]";
|
qDebug() << "DEBUG [" << outputDeviceName << "] [" << getNamedAudioDeviceForMode(QAudio::AudioOutput, outputDeviceName).deviceName() << "]";
|
||||||
return switchOutputToAudioDevice(getNamedAudioDeviceForMode(QAudio::AudioOutput, outputDeviceName));
|
return switchOutputToAudioDevice(getNamedAudioDeviceForMode(QAudio::AudioOutput, outputDeviceName));
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::initGverb() {
|
void AudioClient::initGverb() {
|
||||||
// Initialize a new gverb instance
|
// Initialize a new gverb instance
|
||||||
_gverbLocal = gverb_new(_outputFormat.sampleRate(), _reverbOptions->getMaxRoomSize(), _reverbOptions->getRoomSize(),
|
_gverbLocal = gverb_new(_outputFormat.sampleRate(), _reverbOptions->getMaxRoomSize(), _reverbOptions->getRoomSize(),
|
||||||
_reverbOptions->getReverbTime(), _reverbOptions->getDamping(), _reverbOptions->getSpread(),
|
_reverbOptions->getReverbTime(), _reverbOptions->getDamping(), _reverbOptions->getSpread(),
|
||||||
|
@ -473,7 +547,7 @@ void Audio::initGverb() {
|
||||||
gverb_set_taillevel(_gverb, DB_CO(_reverbOptions->getTailLevel()));
|
gverb_set_taillevel(_gverb, DB_CO(_reverbOptions->getTailLevel()));
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::updateGverbOptions() {
|
void AudioClient::updateGverbOptions() {
|
||||||
bool reverbChanged = false;
|
bool reverbChanged = false;
|
||||||
if (_receivedAudioStream.hasReverb()) {
|
if (_receivedAudioStream.hasReverb()) {
|
||||||
|
|
||||||
|
@ -500,7 +574,7 @@ void Audio::updateGverbOptions() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::setReverbOptions(const AudioEffectOptions* options) {
|
void AudioClient::setReverbOptions(const AudioEffectOptions* options) {
|
||||||
// Save the new options
|
// Save the new options
|
||||||
_scriptReverbOptions.setMaxRoomSize(options->getMaxRoomSize());
|
_scriptReverbOptions.setMaxRoomSize(options->getMaxRoomSize());
|
||||||
_scriptReverbOptions.setRoomSize(options->getRoomSize());
|
_scriptReverbOptions.setRoomSize(options->getRoomSize());
|
||||||
|
@ -520,7 +594,7 @@ void Audio::setReverbOptions(const AudioEffectOptions* options) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::addReverb(ty_gverb* gverb, int16_t* samplesData, int numSamples, QAudioFormat& audioFormat, bool noEcho) {
|
void AudioClient::addReverb(ty_gverb* gverb, int16_t* samplesData, int numSamples, QAudioFormat& audioFormat, bool noEcho) {
|
||||||
float wetFraction = DB_CO(_reverbOptions->getWetLevel());
|
float wetFraction = DB_CO(_reverbOptions->getWetLevel());
|
||||||
float dryFraction = (noEcho) ? 0.0f : (1.0f - wetFraction);
|
float dryFraction = (noEcho) ? 0.0f : (1.0f - wetFraction);
|
||||||
|
|
||||||
|
@ -549,7 +623,7 @@ void Audio::addReverb(ty_gverb* gverb, int16_t* samplesData, int numSamples, QAu
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
|
void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
|
||||||
// If there is server echo, reverb will be applied to the recieved audio stream so no need to have it here.
|
// If there is server echo, reverb will be applied to the recieved audio stream so no need to have it here.
|
||||||
bool hasLocalReverb = (_reverb || _receivedAudioStream.hasReverb()) &&
|
bool hasLocalReverb = (_reverb || _receivedAudioStream.hasReverb()) &&
|
||||||
!_shouldEchoToServer;
|
!_shouldEchoToServer;
|
||||||
|
@ -565,18 +639,26 @@ void Audio::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
|
||||||
_loopbackOutputDevice = _loopbackAudioOutput->start();
|
_loopbackOutputDevice = _loopbackAudioOutput->start();
|
||||||
}
|
}
|
||||||
|
|
||||||
QByteArray loopBackByteArray(inputByteArray);
|
// do we need to setup a resampler?
|
||||||
if (_inputFormat != _outputFormat) {
|
if (_inputFormat.sampleRate() != _outputFormat.sampleRate() && !_loopbackResampler) {
|
||||||
float loopbackOutputToInputRatio = (_outputFormat.sampleRate() / (float) _inputFormat.sampleRate()) *
|
qDebug() << "Attemping to create a resampler for input format to output format for audio loopback.";
|
||||||
(_outputFormat.channelCount() / _inputFormat.channelCount());
|
_loopbackResampler = soxrResamplerFromInputFormatToOutputFormat(_inputFormat, _outputFormat);
|
||||||
loopBackByteArray.resize(inputByteArray.size() * loopbackOutputToInputRatio);
|
|
||||||
loopBackByteArray.fill(0);
|
if (!_loopbackResampler) {
|
||||||
linearResampling(reinterpret_cast<int16_t*>(inputByteArray.data()),
|
return;
|
||||||
reinterpret_cast<int16_t*>(loopBackByteArray.data()),
|
}
|
||||||
inputByteArray.size() / sizeof(int16_t), loopBackByteArray.size() / sizeof(int16_t),
|
|
||||||
_inputFormat, _outputFormat);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static QByteArray loopBackByteArray;
|
||||||
|
loopBackByteArray.resize(numDestinationSamplesRequired(_inputFormat, _outputFormat,
|
||||||
|
inputByteArray.size() / sizeof(int16_t)) * sizeof(int16_t));
|
||||||
|
|
||||||
|
possibleResampling(_loopbackResampler,
|
||||||
|
reinterpret_cast<int16_t*>(inputByteArray.data()),
|
||||||
|
reinterpret_cast<int16_t*>(loopBackByteArray.data()),
|
||||||
|
inputByteArray.size() / sizeof(int16_t), loopBackByteArray.size() / sizeof(int16_t),
|
||||||
|
_inputFormat, _outputFormat);
|
||||||
|
|
||||||
if (hasLocalReverb) {
|
if (hasLocalReverb) {
|
||||||
int16_t* loopbackSamples = reinterpret_cast<int16_t*>(loopBackByteArray.data());
|
int16_t* loopbackSamples = reinterpret_cast<int16_t*>(loopBackByteArray.data());
|
||||||
int numLoopbackSamples = loopBackByteArray.size() / sizeof(int16_t);
|
int numLoopbackSamples = loopBackByteArray.size() / sizeof(int16_t);
|
||||||
|
@ -589,7 +671,7 @@ void Audio::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::handleAudioInput() {
|
void AudioClient::handleAudioInput() {
|
||||||
static char audioDataPacket[MAX_PACKET_SIZE];
|
static char audioDataPacket[MAX_PACKET_SIZE];
|
||||||
|
|
||||||
static int numBytesPacketHeader = numBytesForPacketHeaderGivenPacketType(PacketTypeMicrophoneAudioNoEcho);
|
static int numBytesPacketHeader = numBytesForPacketHeaderGivenPacketType(PacketTypeMicrophoneAudioNoEcho);
|
||||||
|
@ -600,7 +682,7 @@ void Audio::handleAudioInput() {
|
||||||
static int leadingBytes = numBytesPacketHeader + sizeof(quint16) + sizeof(glm::vec3) + sizeof(glm::quat) + sizeof(quint8);
|
static int leadingBytes = numBytesPacketHeader + sizeof(quint16) + sizeof(glm::vec3) + sizeof(glm::quat) + sizeof(quint8);
|
||||||
static int16_t* networkAudioSamples = (int16_t*)(audioDataPacket + leadingBytes);
|
static int16_t* networkAudioSamples = (int16_t*)(audioDataPacket + leadingBytes);
|
||||||
|
|
||||||
float inputToNetworkInputRatio = calculateDeviceToNetworkInputRatio(_numInputCallbackBytes);
|
float inputToNetworkInputRatio = calculateDeviceToNetworkInputRatio();
|
||||||
|
|
||||||
int inputSamplesRequired = (int)((float)AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * inputToNetworkInputRatio);
|
int inputSamplesRequired = (int)((float)AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * inputToNetworkInputRatio);
|
||||||
|
|
||||||
|
@ -639,9 +721,6 @@ void Audio::handleAudioInput() {
|
||||||
|
|
||||||
while (_inputRingBuffer.samplesAvailable() >= inputSamplesRequired) {
|
while (_inputRingBuffer.samplesAvailable() >= inputSamplesRequired) {
|
||||||
|
|
||||||
int16_t* inputAudioSamples = new int16_t[inputSamplesRequired];
|
|
||||||
_inputRingBuffer.readSamples(inputAudioSamples, inputSamplesRequired);
|
|
||||||
|
|
||||||
const int numNetworkBytes = _isStereoInput
|
const int numNetworkBytes = _isStereoInput
|
||||||
? AudioConstants::NETWORK_FRAME_BYTES_STEREO
|
? AudioConstants::NETWORK_FRAME_BYTES_STEREO
|
||||||
: AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
|
: AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
|
||||||
|
@ -649,20 +728,25 @@ void Audio::handleAudioInput() {
|
||||||
? AudioConstants::NETWORK_FRAME_SAMPLES_STEREO
|
? AudioConstants::NETWORK_FRAME_SAMPLES_STEREO
|
||||||
: AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL;
|
: AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL;
|
||||||
|
|
||||||
// zero out the monoAudioSamples array and the locally injected audio
|
|
||||||
memset(networkAudioSamples, 0, numNetworkBytes);
|
|
||||||
|
|
||||||
if (!_muted) {
|
if (!_muted) {
|
||||||
|
|
||||||
|
// zero out the monoAudioSamples array and the locally injected audio
|
||||||
|
memset(networkAudioSamples, 0, numNetworkBytes);
|
||||||
|
|
||||||
// Increment the time since the last clip
|
// Increment the time since the last clip
|
||||||
if (_timeSinceLastClip >= 0.0f) {
|
if (_timeSinceLastClip >= 0.0f) {
|
||||||
_timeSinceLastClip += (float) numNetworkSamples / (float) AudioConstants::SAMPLE_RATE;
|
_timeSinceLastClip += (float) numNetworkSamples / (float) AudioConstants::SAMPLE_RATE;
|
||||||
}
|
}
|
||||||
|
|
||||||
// we aren't muted, downsample the input audio
|
int16_t* inputAudioSamples = new int16_t[inputSamplesRequired];
|
||||||
linearResampling((int16_t*) inputAudioSamples, networkAudioSamples,
|
_inputRingBuffer.readSamples(inputAudioSamples, inputSamplesRequired);
|
||||||
inputSamplesRequired, numNetworkSamples,
|
|
||||||
_inputFormat, _desiredInputFormat);
|
possibleResampling(_inputToNetworkResampler,
|
||||||
|
inputAudioSamples, networkAudioSamples,
|
||||||
|
inputSamplesRequired, numNetworkSamples,
|
||||||
|
_inputFormat, _desiredInputFormat);
|
||||||
|
|
||||||
|
delete[] inputAudioSamples;
|
||||||
|
|
||||||
// only impose the noise gate and perform tone injection if we are sending mono audio
|
// only impose the noise gate and perform tone injection if we are sending mono audio
|
||||||
if (!_isStereoInput && !_audioSourceInjectEnabled && _isNoiseGateEnabled) {
|
if (!_isStereoInput && !_audioSourceInjectEnabled && _isNoiseGateEnabled) {
|
||||||
|
@ -688,27 +772,24 @@ void Audio::handleAudioInput() {
|
||||||
|
|
||||||
_lastInputLoudness = fabs(loudness / numNetworkSamples);
|
_lastInputLoudness = fabs(loudness / numNetworkSamples);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
emit inputReceived(QByteArray(reinterpret_cast<const char*>(networkAudioSamples),
|
||||||
|
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL));
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
// our input loudness is 0, since we're muted
|
// our input loudness is 0, since we're muted
|
||||||
_lastInputLoudness = 0;
|
_lastInputLoudness = 0;
|
||||||
_timeSinceLastClip = 0.0f;
|
_timeSinceLastClip = 0.0f;
|
||||||
|
|
||||||
|
_inputRingBuffer.shiftReadPosition(inputSamplesRequired);
|
||||||
}
|
}
|
||||||
|
|
||||||
emit inputReceived(QByteArray(reinterpret_cast<const char*>(networkAudioSamples),
|
|
||||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL));
|
|
||||||
|
|
||||||
auto nodeList = DependencyManager::get<NodeList>();
|
auto nodeList = DependencyManager::get<NodeList>();
|
||||||
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
|
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
|
||||||
|
|
||||||
if (_recorder && _recorder.data()->isRecording()) {
|
|
||||||
_recorder.data()->record(reinterpret_cast<char*>(networkAudioSamples), numNetworkBytes);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (audioMixer && audioMixer->getActiveSocket()) {
|
if (audioMixer && audioMixer->getActiveSocket()) {
|
||||||
const MyAvatar* interfaceAvatar = Application::getInstance()->getAvatar();
|
glm::vec3 headPosition = _positionGetter();
|
||||||
glm::vec3 headPosition = interfaceAvatar->getHead()->getPosition();
|
glm::quat headOrientation = _orientationGetter();
|
||||||
glm::quat headOrientation = interfaceAvatar->getHead()->getFinalOrientationInWorldFrame();
|
|
||||||
quint8 isStereo = _isStereoInput ? 1 : 0;
|
quint8 isStereo = _isStereoInput ? 1 : 0;
|
||||||
|
|
||||||
PacketType packetType;
|
PacketType packetType;
|
||||||
|
@ -764,30 +845,23 @@ void Audio::handleAudioInput() {
|
||||||
nodeList->writeDatagram(audioDataPacket, packetBytes, audioMixer);
|
nodeList->writeDatagram(audioDataPacket, packetBytes, audioMixer);
|
||||||
_outgoingAvatarAudioSequenceNumber++;
|
_outgoingAvatarAudioSequenceNumber++;
|
||||||
}
|
}
|
||||||
delete[] inputAudioSamples;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::processReceivedSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer) {
|
void AudioClient::processReceivedSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer) {
|
||||||
const int numNetworkOutputSamples = inputBuffer.size() / sizeof(int16_t);
|
const int numNetworkOutputSamples = inputBuffer.size() / sizeof(int16_t);
|
||||||
const int numDeviceOutputSamples = numNetworkOutputSamples * (_outputFormat.sampleRate() * _outputFormat.channelCount())
|
const int numDeviceOutputSamples = numNetworkOutputSamples * (_outputFormat.sampleRate() * _outputFormat.channelCount())
|
||||||
/ (_desiredOutputFormat.sampleRate() * _desiredOutputFormat.channelCount());
|
/ (_desiredOutputFormat.sampleRate() * _desiredOutputFormat.channelCount());
|
||||||
|
|
||||||
outputBuffer.resize(numDeviceOutputSamples * sizeof(int16_t));
|
outputBuffer.resize(numDeviceOutputSamples * sizeof(int16_t));
|
||||||
|
|
||||||
const int16_t* receivedSamples;
|
const int16_t* receivedSamples = reinterpret_cast<const int16_t*>(inputBuffer.data());
|
||||||
// copy the samples we'll resample from the ring buffer - this also
|
|
||||||
// pushes the read pointer of the ring buffer forwards
|
|
||||||
//receivedAudioStreamPopOutput.readSamples(receivedSamples, numNetworkOutputSamples);
|
|
||||||
|
|
||||||
receivedSamples = reinterpret_cast<const int16_t*>(inputBuffer.data());
|
|
||||||
|
|
||||||
// copy the packet from the RB to the output
|
// copy the packet from the RB to the output
|
||||||
linearResampling(receivedSamples,
|
possibleResampling(_networkToOutputResampler, receivedSamples,
|
||||||
(int16_t*)outputBuffer.data(),
|
reinterpret_cast<int16_t*>(outputBuffer.data()),
|
||||||
numNetworkOutputSamples,
|
numNetworkOutputSamples, numDeviceOutputSamples,
|
||||||
numDeviceOutputSamples,
|
_desiredOutputFormat, _outputFormat);
|
||||||
_desiredOutputFormat, _outputFormat);
|
|
||||||
|
|
||||||
if(_reverb || _receivedAudioStream.hasReverb()) {
|
if(_reverb || _receivedAudioStream.hasReverb()) {
|
||||||
updateGverbOptions();
|
updateGverbOptions();
|
||||||
|
@ -795,14 +869,14 @@ void Audio::processReceivedSamples(const QByteArray& inputBuffer, QByteArray& ou
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::sendMuteEnvironmentPacket() {
|
void AudioClient::sendMuteEnvironmentPacket() {
|
||||||
QByteArray mutePacket = byteArrayWithPopulatedHeader(PacketTypeMuteEnvironment);
|
QByteArray mutePacket = byteArrayWithPopulatedHeader(PacketTypeMuteEnvironment);
|
||||||
QDataStream mutePacketStream(&mutePacket, QIODevice::Append);
|
QDataStream mutePacketStream(&mutePacket, QIODevice::Append);
|
||||||
|
|
||||||
const float MUTE_RADIUS = 50;
|
const float MUTE_RADIUS = 50;
|
||||||
|
|
||||||
mutePacketStream.writeBytes(reinterpret_cast<const char *>(&Application::getInstance()->getAvatar()->getPosition()),
|
glm::vec3 currentSourcePosition = _positionGetter();
|
||||||
sizeof(glm::vec3));
|
mutePacketStream.writeBytes(reinterpret_cast<const char *>(¤tSourcePosition), sizeof(glm::vec3));
|
||||||
mutePacketStream.writeBytes(reinterpret_cast<const char *>(&MUTE_RADIUS), sizeof(float));
|
mutePacketStream.writeBytes(reinterpret_cast<const char *>(&MUTE_RADIUS), sizeof(float));
|
||||||
|
|
||||||
// grab our audio mixer from the NodeList, if it exists
|
// grab our audio mixer from the NodeList, if it exists
|
||||||
|
@ -815,14 +889,14 @@ void Audio::sendMuteEnvironmentPacket() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::addReceivedAudioToStream(const QByteArray& audioByteArray) {
|
void AudioClient::addReceivedAudioToStream(const QByteArray& audioByteArray) {
|
||||||
if (_audioOutput) {
|
if (_audioOutput) {
|
||||||
// Audio output must exist and be correctly set up if we're going to process received audio
|
// Audio output must exist and be correctly set up if we're going to process received audio
|
||||||
_receivedAudioStream.parseData(audioByteArray);
|
_receivedAudioStream.parseData(audioByteArray);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::parseAudioEnvironmentData(const QByteArray &packet) {
|
void AudioClient::parseAudioEnvironmentData(const QByteArray &packet) {
|
||||||
int numBytesPacketHeader = numBytesForPacketHeader(packet);
|
int numBytesPacketHeader = numBytesForPacketHeader(packet);
|
||||||
const char* dataAt = packet.constData() + numBytesPacketHeader;
|
const char* dataAt = packet.constData() + numBytesPacketHeader;
|
||||||
|
|
||||||
|
@ -843,12 +917,12 @@ void Audio::parseAudioEnvironmentData(const QByteArray &packet) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::toggleMute() {
|
void AudioClient::toggleMute() {
|
||||||
_muted = !_muted;
|
_muted = !_muted;
|
||||||
muteToggled();
|
emit muteToggled();
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::setIsStereoInput(bool isStereoInput) {
|
void AudioClient::setIsStereoInput(bool isStereoInput) {
|
||||||
if (isStereoInput != _isStereoInput) {
|
if (isStereoInput != _isStereoInput) {
|
||||||
_isStereoInput = isStereoInput;
|
_isStereoInput = isStereoInput;
|
||||||
|
|
||||||
|
@ -863,35 +937,35 @@ void Audio::setIsStereoInput(bool isStereoInput) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::toggleAudioSourceInject() {
|
void AudioClient::toggleAudioSourceInject() {
|
||||||
_audioSourceInjectEnabled = !_audioSourceInjectEnabled;
|
_audioSourceInjectEnabled = !_audioSourceInjectEnabled;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::selectAudioSourcePinkNoise() {
|
void AudioClient::selectAudioSourcePinkNoise() {
|
||||||
_noiseSourceEnabled = true;
|
_noiseSourceEnabled = true;
|
||||||
_toneSourceEnabled = false;
|
_toneSourceEnabled = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::selectAudioSourceSine440() {
|
void AudioClient::selectAudioSourceSine440() {
|
||||||
_toneSourceEnabled = true;
|
_toneSourceEnabled = true;
|
||||||
_noiseSourceEnabled = false;
|
_noiseSourceEnabled = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Audio::outputLocalInjector(bool isStereo, qreal volume, AudioInjector* injector) {
|
bool AudioClient::outputLocalInjector(bool isStereo, qreal volume, AudioInjector* injector) {
|
||||||
if (injector->getLocalBuffer()) {
|
if (injector->getLocalBuffer()) {
|
||||||
QAudioFormat localFormat = _desiredOutputFormat;
|
QAudioFormat localFormat = _desiredOutputFormat;
|
||||||
localFormat.setChannelCount(isStereo ? 2 : 1);
|
localFormat.setChannelCount(isStereo ? 2 : 1);
|
||||||
|
|
||||||
QAudioOutput* localOutput = new QAudioOutput(getNamedAudioDeviceForMode(QAudio::AudioOutput, _outputAudioDeviceName),
|
QAudioOutput* localOutput = new QAudioOutput(getNamedAudioDeviceForMode(QAudio::AudioOutput, _outputAudioDeviceName),
|
||||||
localFormat);
|
localFormat,
|
||||||
|
injector);
|
||||||
localOutput->setVolume(volume);
|
localOutput->setVolume(volume);
|
||||||
|
|
||||||
// move the localOutput to the same thread as the local injector buffer
|
// move the localOutput to the same thread as the local injector buffer
|
||||||
localOutput->moveToThread(injector->getLocalBuffer()->thread());
|
localOutput->moveToThread(injector->getLocalBuffer()->thread());
|
||||||
|
|
||||||
// have it be cleaned up when that thread is done
|
// have it be cleaned up when that injector is done
|
||||||
connect(injector->thread(), &QThread::finished, localOutput, &QAudioOutput::stop);
|
connect(injector, &AudioInjector::finished, localOutput, &QAudioOutput::stop);
|
||||||
connect(injector->thread(), &QThread::finished, localOutput, &QAudioOutput::deleteLater);
|
|
||||||
|
|
||||||
qDebug() << "Starting QAudioOutput for local injector" << localOutput;
|
qDebug() << "Starting QAudioOutput for local injector" << localOutput;
|
||||||
|
|
||||||
|
@ -903,13 +977,13 @@ bool Audio::outputLocalInjector(bool isStereo, qreal volume, AudioInjector* inje
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void Audio::outputFormatChanged() {
|
void AudioClient::outputFormatChanged() {
|
||||||
int outputFormatChannelCountTimesSampleRate = _outputFormat.channelCount() * _outputFormat.sampleRate();
|
int outputFormatChannelCountTimesSampleRate = _outputFormat.channelCount() * _outputFormat.sampleRate();
|
||||||
_outputFrameSize = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * outputFormatChannelCountTimesSampleRate / _desiredOutputFormat.sampleRate();
|
_outputFrameSize = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * outputFormatChannelCountTimesSampleRate / _desiredOutputFormat.sampleRate();
|
||||||
_receivedAudioStream.outputFormatChanged(outputFormatChannelCountTimesSampleRate);
|
_receivedAudioStream.outputFormatChanged(outputFormatChannelCountTimesSampleRate);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Audio::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceInfo) {
|
bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceInfo) {
|
||||||
bool supportedFormat = false;
|
bool supportedFormat = false;
|
||||||
|
|
||||||
// cleanup any previously initialized device
|
// cleanup any previously initialized device
|
||||||
|
@ -926,16 +1000,36 @@ bool Audio::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceInfo) {
|
||||||
|
|
||||||
_inputAudioDeviceName = "";
|
_inputAudioDeviceName = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (_inputToNetworkResampler) {
|
||||||
|
// if we were using an input to network resampler, delete it here
|
||||||
|
soxr_delete(_inputToNetworkResampler);
|
||||||
|
_inputToNetworkResampler = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
if (!inputDeviceInfo.isNull()) {
|
if (!inputDeviceInfo.isNull()) {
|
||||||
qDebug() << "The audio input device " << inputDeviceInfo.deviceName() << "is available.";
|
qDebug() << "The audio input device " << inputDeviceInfo.deviceName() << "is available.";
|
||||||
_inputAudioDeviceName = inputDeviceInfo.deviceName().trimmed();
|
_inputAudioDeviceName = inputDeviceInfo.deviceName().trimmed();
|
||||||
|
|
||||||
if (adjustedFormatForAudioDevice(inputDeviceInfo, _desiredInputFormat, _inputFormat)) {
|
if (adjustedFormatForAudioDevice(inputDeviceInfo, _desiredInputFormat, _inputFormat)) {
|
||||||
qDebug() << "The format to be used for audio input is" << _inputFormat;
|
qDebug() << "The format to be used for audio input is" << _inputFormat;
|
||||||
|
|
||||||
|
// we've got the best we can get for input
|
||||||
|
// if required, setup a soxr resampler for this input to our desired network format
|
||||||
|
if (_inputFormat != _desiredInputFormat
|
||||||
|
&& _inputFormat.sampleRate() != _desiredInputFormat.sampleRate()) {
|
||||||
|
qDebug() << "Attemping to create a soxr resampler for input format to network format.";
|
||||||
|
_inputToNetworkResampler = soxrResamplerFromInputFormatToOutputFormat(_inputFormat, _desiredInputFormat);
|
||||||
|
|
||||||
|
if (!_inputToNetworkResampler) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
qDebug() << "No resampling required for audio input to match desired network format.";
|
||||||
|
}
|
||||||
|
|
||||||
// if the user wants stereo but this device can't provide then bail
|
// if the user wants stereo but this device can't provide then bail
|
||||||
if (!_isStereoInput || _inputFormat.channelCount() == 2) {
|
if (!_isStereoInput || _inputFormat.channelCount() == 2) {
|
||||||
_audioInput = new QAudioInput(inputDeviceInfo, _inputFormat, this);
|
_audioInput = new QAudioInput(inputDeviceInfo, _inputFormat, this);
|
||||||
_numInputCallbackBytes = calculateNumberOfInputCallbackBytes(_inputFormat);
|
_numInputCallbackBytes = calculateNumberOfInputCallbackBytes(_inputFormat);
|
||||||
_audioInput->setBufferSize(_numInputCallbackBytes);
|
_audioInput->setBufferSize(_numInputCallbackBytes);
|
||||||
|
@ -943,10 +1037,15 @@ bool Audio::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceInfo) {
|
||||||
// how do we want to handle input working, but output not working?
|
// how do we want to handle input working, but output not working?
|
||||||
int numFrameSamples = calculateNumberOfFrameSamples(_numInputCallbackBytes);
|
int numFrameSamples = calculateNumberOfFrameSamples(_numInputCallbackBytes);
|
||||||
_inputRingBuffer.resizeForFrameSize(numFrameSamples);
|
_inputRingBuffer.resizeForFrameSize(numFrameSamples);
|
||||||
_inputDevice = _audioInput->start();
|
|
||||||
connect(_inputDevice, SIGNAL(readyRead()), this, SLOT(handleAudioInput()));
|
|
||||||
|
|
||||||
supportedFormat = true;
|
_inputDevice = _audioInput->start();
|
||||||
|
|
||||||
|
if (_inputDevice) {
|
||||||
|
connect(_inputDevice, SIGNAL(readyRead()), this, SLOT(handleAudioInput()));
|
||||||
|
supportedFormat = true;
|
||||||
|
} else {
|
||||||
|
qDebug() << "Error starting audio input -" << _audioInput->error();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -954,7 +1053,7 @@ bool Audio::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceInfo) {
|
||||||
return supportedFormat;
|
return supportedFormat;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::outputNotify() {
|
void AudioClient::outputNotify() {
|
||||||
int recentUnfulfilled = _audioOutputIODevice.getRecentUnfulfilledReads();
|
int recentUnfulfilled = _audioOutputIODevice.getRecentUnfulfilledReads();
|
||||||
if (recentUnfulfilled > 0) {
|
if (recentUnfulfilled > 0) {
|
||||||
if (_outputStarveDetectionEnabled.get()) {
|
if (_outputStarveDetectionEnabled.get()) {
|
||||||
|
@ -978,7 +1077,7 @@ void Audio::outputNotify() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Audio::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo) {
|
bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo) {
|
||||||
bool supportedFormat = false;
|
bool supportedFormat = false;
|
||||||
|
|
||||||
// cleanup any previously initialized device
|
// cleanup any previously initialized device
|
||||||
|
@ -992,6 +1091,18 @@ bool Audio::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo)
|
||||||
delete _loopbackAudioOutput;
|
delete _loopbackAudioOutput;
|
||||||
_loopbackAudioOutput = NULL;
|
_loopbackAudioOutput = NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (_networkToOutputResampler) {
|
||||||
|
// if we were using an input to network resampler, delete it here
|
||||||
|
soxr_delete(_networkToOutputResampler);
|
||||||
|
_networkToOutputResampler = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_loopbackResampler) {
|
||||||
|
// if we were using an input to output resample, delete it here
|
||||||
|
soxr_delete(_loopbackResampler);
|
||||||
|
_loopbackResampler = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
if (!outputDeviceInfo.isNull()) {
|
if (!outputDeviceInfo.isNull()) {
|
||||||
qDebug() << "The audio output device " << outputDeviceInfo.deviceName() << "is available.";
|
qDebug() << "The audio output device " << outputDeviceInfo.deviceName() << "is available.";
|
||||||
|
@ -1000,13 +1111,27 @@ bool Audio::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo)
|
||||||
if (adjustedFormatForAudioDevice(outputDeviceInfo, _desiredOutputFormat, _outputFormat)) {
|
if (adjustedFormatForAudioDevice(outputDeviceInfo, _desiredOutputFormat, _outputFormat)) {
|
||||||
qDebug() << "The format to be used for audio output is" << _outputFormat;
|
qDebug() << "The format to be used for audio output is" << _outputFormat;
|
||||||
|
|
||||||
|
// we've got the best we can get for input
|
||||||
|
// if required, setup a soxr resampler for this input to our desired network format
|
||||||
|
if (_desiredOutputFormat != _outputFormat
|
||||||
|
&& _desiredOutputFormat.sampleRate() != _outputFormat.sampleRate()) {
|
||||||
|
qDebug() << "Attemping to create a resampler for network format to output format.";
|
||||||
|
_networkToOutputResampler = soxrResamplerFromInputFormatToOutputFormat(_desiredOutputFormat, _outputFormat);
|
||||||
|
|
||||||
|
if (!_networkToOutputResampler) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
qDebug() << "No resampling required for network output to match actual output format.";
|
||||||
|
}
|
||||||
|
|
||||||
outputFormatChanged();
|
outputFormatChanged();
|
||||||
|
|
||||||
// setup our general output device for audio-mixer audio
|
// setup our general output device for audio-mixer audio
|
||||||
_audioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
_audioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
||||||
_audioOutput->setBufferSize(_outputBufferSizeFrames.get() * _outputFrameSize * sizeof(int16_t));
|
_audioOutput->setBufferSize(_outputBufferSizeFrames.get() * _outputFrameSize * sizeof(int16_t));
|
||||||
|
|
||||||
connect(_audioOutput, &QAudioOutput::notify, this, &Audio::outputNotify);
|
connect(_audioOutput, &QAudioOutput::notify, this, &AudioClient::outputNotify);
|
||||||
|
|
||||||
qDebug() << "Output Buffer capacity in frames: " << _audioOutput->bufferSize() / sizeof(int16_t) / (float)_outputFrameSize;
|
qDebug() << "Output Buffer capacity in frames: " << _audioOutput->bufferSize() / sizeof(int16_t) / (float)_outputFrameSize;
|
||||||
|
|
||||||
|
@ -1015,6 +1140,7 @@ bool Audio::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo)
|
||||||
|
|
||||||
// setup a loopback audio output device
|
// setup a loopback audio output device
|
||||||
_loopbackAudioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
_loopbackAudioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
||||||
|
|
||||||
|
|
||||||
_timeSinceLastReceived.start();
|
_timeSinceLastReceived.start();
|
||||||
|
|
||||||
|
@ -1025,7 +1151,7 @@ bool Audio::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo)
|
||||||
return supportedFormat;
|
return supportedFormat;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::setOutputBufferSize(int numFrames) {
|
void AudioClient::setOutputBufferSize(int numFrames) {
|
||||||
numFrames = std::min(std::max(numFrames, MIN_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES), MAX_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES);
|
numFrames = std::min(std::max(numFrames, MIN_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES), MAX_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES);
|
||||||
if (numFrames != _outputBufferSizeFrames.get()) {
|
if (numFrames != _outputBufferSizeFrames.get()) {
|
||||||
qDebug() << "Audio output buffer size (frames): " << numFrames;
|
qDebug() << "Audio output buffer size (frames): " << numFrames;
|
||||||
|
@ -1045,27 +1171,27 @@ void Audio::setOutputBufferSize(int numFrames) {
|
||||||
// proportional to the accelerator ratio.
|
// proportional to the accelerator ratio.
|
||||||
|
|
||||||
#ifdef Q_OS_WIN
|
#ifdef Q_OS_WIN
|
||||||
const float Audio::CALLBACK_ACCELERATOR_RATIO = 0.1f;
|
const float AudioClient::CALLBACK_ACCELERATOR_RATIO = 0.1f;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef Q_OS_MAC
|
#ifdef Q_OS_MAC
|
||||||
const float Audio::CALLBACK_ACCELERATOR_RATIO = 2.0f;
|
const float AudioClient::CALLBACK_ACCELERATOR_RATIO = 2.0f;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef Q_OS_LINUX
|
#ifdef Q_OS_LINUX
|
||||||
const float Audio::CALLBACK_ACCELERATOR_RATIO = 2.0f;
|
const float AudioClient::CALLBACK_ACCELERATOR_RATIO = 2.0f;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
int Audio::calculateNumberOfInputCallbackBytes(const QAudioFormat& format) const {
|
int AudioClient::calculateNumberOfInputCallbackBytes(const QAudioFormat& format) const {
|
||||||
int numInputCallbackBytes = (int)(((AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL
|
int numInputCallbackBytes = (int)(((AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL
|
||||||
* format.channelCount()
|
* format.channelCount()
|
||||||
* (format.sampleRate() / AudioConstants::SAMPLE_RATE))
|
* ((float) format.sampleRate() / AudioConstants::SAMPLE_RATE))
|
||||||
/ CALLBACK_ACCELERATOR_RATIO) + 0.5f);
|
/ CALLBACK_ACCELERATOR_RATIO) + 0.5f);
|
||||||
|
|
||||||
return numInputCallbackBytes;
|
return numInputCallbackBytes;
|
||||||
}
|
}
|
||||||
|
|
||||||
float Audio::calculateDeviceToNetworkInputRatio(int numBytes) const {
|
float AudioClient::calculateDeviceToNetworkInputRatio() const {
|
||||||
float inputToNetworkInputRatio = (int)((_numInputCallbackBytes
|
float inputToNetworkInputRatio = (int)((_numInputCallbackBytes
|
||||||
* CALLBACK_ACCELERATOR_RATIO
|
* CALLBACK_ACCELERATOR_RATIO
|
||||||
/ AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL) + 0.5f);
|
/ AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL) + 0.5f);
|
||||||
|
@ -1073,18 +1199,18 @@ float Audio::calculateDeviceToNetworkInputRatio(int numBytes) const {
|
||||||
return inputToNetworkInputRatio;
|
return inputToNetworkInputRatio;
|
||||||
}
|
}
|
||||||
|
|
||||||
int Audio::calculateNumberOfFrameSamples(int numBytes) const {
|
int AudioClient::calculateNumberOfFrameSamples(int numBytes) const {
|
||||||
int frameSamples = (int)(numBytes * CALLBACK_ACCELERATOR_RATIO + 0.5f) / sizeof(int16_t);
|
int frameSamples = (int)(numBytes * CALLBACK_ACCELERATOR_RATIO + 0.5f) / sizeof(int16_t);
|
||||||
return frameSamples;
|
return frameSamples;
|
||||||
}
|
}
|
||||||
|
|
||||||
float Audio::getInputRingBufferMsecsAvailable() const {
|
float AudioClient::getInputRingBufferMsecsAvailable() const {
|
||||||
int bytesInInputRingBuffer = _inputRingBuffer.samplesAvailable() * sizeof(int16_t);
|
int bytesInInputRingBuffer = _inputRingBuffer.samplesAvailable() * sizeof(int16_t);
|
||||||
float msecsInInputRingBuffer = bytesInInputRingBuffer / (float)(_inputFormat.bytesForDuration(USECS_PER_MSEC));
|
float msecsInInputRingBuffer = bytesInInputRingBuffer / (float)(_inputFormat.bytesForDuration(USECS_PER_MSEC));
|
||||||
return msecsInInputRingBuffer;
|
return msecsInInputRingBuffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
float Audio::getAudioOutputMsecsUnplayed() const {
|
float AudioClient::getAudioOutputMsecsUnplayed() const {
|
||||||
if (!_audioOutput) {
|
if (!_audioOutput) {
|
||||||
return 0.0f;
|
return 0.0f;
|
||||||
}
|
}
|
||||||
|
@ -1093,7 +1219,7 @@ float Audio::getAudioOutputMsecsUnplayed() const {
|
||||||
return msecsAudioOutputUnplayed;
|
return msecsAudioOutputUnplayed;
|
||||||
}
|
}
|
||||||
|
|
||||||
qint64 Audio::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
|
qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
|
||||||
int samplesRequested = maxSize / sizeof(int16_t);
|
int samplesRequested = maxSize / sizeof(int16_t);
|
||||||
int samplesPopped;
|
int samplesPopped;
|
||||||
int bytesWritten;
|
int bytesWritten;
|
||||||
|
@ -1115,7 +1241,7 @@ qint64 Audio::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
|
||||||
return bytesWritten;
|
return bytesWritten;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::checkDevices() {
|
void AudioClient::checkDevices() {
|
||||||
QVector<QString> inputDevices = getDeviceNames(QAudio::AudioInput);
|
QVector<QString> inputDevices = getDeviceNames(QAudio::AudioInput);
|
||||||
QVector<QString> outputDevices = getDeviceNames(QAudio::AudioOutput);
|
QVector<QString> outputDevices = getDeviceNames(QAudio::AudioOutput);
|
||||||
|
|
||||||
|
@ -1127,26 +1253,25 @@ void Audio::checkDevices() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::loadSettings() {
|
void AudioClient::loadSettings() {
|
||||||
_receivedAudioStream.setDynamicJitterBuffers(dynamicJitterBuffers.get());
|
_receivedAudioStream.setDynamicJitterBuffers(dynamicJitterBuffers.get());
|
||||||
_receivedAudioStream.setMaxFramesOverDesired(maxFramesOverDesired.get());
|
_receivedAudioStream.setMaxFramesOverDesired(maxFramesOverDesired.get());
|
||||||
_receivedAudioStream.setStaticDesiredJitterBufferFrames(staticDesiredJitterBufferFrames.get());
|
_receivedAudioStream.setStaticDesiredJitterBufferFrames(staticDesiredJitterBufferFrames.get());
|
||||||
_receivedAudioStream.setUseStDevForJitterCalc(useStDevForJitterCalc.get());
|
_receivedAudioStream.setUseStDevForJitterCalc(useStDevForJitterCalc.get());
|
||||||
_receivedAudioStream.setWindowStarveThreshold(windowStarveThreshold.get());
|
_receivedAudioStream.setWindowStarveThreshold(windowStarveThreshold.get());
|
||||||
_receivedAudioStream.setWindowSecondsForDesiredCalcOnTooManyStarves(
|
_receivedAudioStream.setWindowSecondsForDesiredCalcOnTooManyStarves(
|
||||||
windowSecondsForDesiredCalcOnTooManyStarves.get());
|
windowSecondsForDesiredCalcOnTooManyStarves.get());
|
||||||
_receivedAudioStream.setWindowSecondsForDesiredReduction(windowSecondsForDesiredReduction.get());
|
_receivedAudioStream.setWindowSecondsForDesiredReduction(windowSecondsForDesiredReduction.get());
|
||||||
_receivedAudioStream.setRepetitionWithFade(repetitionWithFade.get());
|
_receivedAudioStream.setRepetitionWithFade(repetitionWithFade.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::saveSettings() {
|
void AudioClient::saveSettings() {
|
||||||
dynamicJitterBuffers.set(_receivedAudioStream.getDynamicJitterBuffers());
|
dynamicJitterBuffers.set(_receivedAudioStream.getDynamicJitterBuffers());
|
||||||
maxFramesOverDesired.set(_receivedAudioStream.getMaxFramesOverDesired());
|
maxFramesOverDesired.set(_receivedAudioStream.getMaxFramesOverDesired());
|
||||||
staticDesiredJitterBufferFrames.set(_receivedAudioStream.getDesiredJitterBufferFrames());
|
staticDesiredJitterBufferFrames.set(_receivedAudioStream.getDesiredJitterBufferFrames());
|
||||||
windowStarveThreshold.set(_receivedAudioStream.getWindowStarveThreshold());
|
windowStarveThreshold.set(_receivedAudioStream.getWindowStarveThreshold());
|
||||||
windowSecondsForDesiredCalcOnTooManyStarves.set(_receivedAudioStream.
|
windowSecondsForDesiredCalcOnTooManyStarves.set(_receivedAudioStream.
|
||||||
getWindowSecondsForDesiredCalcOnTooManyStarves());
|
getWindowSecondsForDesiredCalcOnTooManyStarves());
|
||||||
windowSecondsForDesiredReduction.set(_receivedAudioStream.getWindowSecondsForDesiredReduction());
|
windowSecondsForDesiredReduction.set(_receivedAudioStream.getWindowSecondsForDesiredReduction());
|
||||||
repetitionWithFade.set(_receivedAudioStream.getRepetitionWithFade());
|
repetitionWithFade.set(_receivedAudioStream.getRepetitionWithFade());
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
//
|
//
|
||||||
// Audio.h
|
// AudioClient.h
|
||||||
// interface/src
|
// interface/src
|
||||||
//
|
//
|
||||||
// Created by Stephen Birarda on 1/22/13.
|
// Created by Stephen Birarda on 1/22/13.
|
||||||
|
@ -9,43 +9,38 @@
|
||||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
//
|
//
|
||||||
|
|
||||||
#ifndef hifi_Audio_h
|
#ifndef hifi_AudioClient_h
|
||||||
#define hifi_Audio_h
|
#define hifi_AudioClient_h
|
||||||
|
|
||||||
#include <fstream>
|
#include <fstream>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
#include <QAudio>
|
#include <QtCore/QByteArray>
|
||||||
#include <QAudioInput>
|
#include <QtCore/QElapsedTimer>
|
||||||
#include <QElapsedTimer>
|
|
||||||
#include <QGLWidget>
|
|
||||||
#include <QtCore/QObject>
|
#include <QtCore/QObject>
|
||||||
#include <QtCore/QVector>
|
#include <QtCore/QVector>
|
||||||
|
#include <QtMultimedia/QAudio>
|
||||||
#include <QtMultimedia/QAudioFormat>
|
#include <QtMultimedia/QAudioFormat>
|
||||||
#include <QVector>
|
#include <QtMultimedia/QAudioInput>
|
||||||
#include <QByteArray>
|
|
||||||
|
|
||||||
#include <AbstractAudioInterface.h>
|
#include <AbstractAudioInterface.h>
|
||||||
|
#include <AudioBuffer.h>
|
||||||
|
#include <AudioEffectOptions.h>
|
||||||
|
#include <AudioFormat.h>
|
||||||
|
#include <AudioGain.h>
|
||||||
#include <AudioRingBuffer.h>
|
#include <AudioRingBuffer.h>
|
||||||
|
#include <AudioSourceTone.h>
|
||||||
|
#include <AudioSourceNoise.h>
|
||||||
|
#include <AudioStreamStats.h>
|
||||||
#include <DependencyManager.h>
|
#include <DependencyManager.h>
|
||||||
|
|
||||||
|
#include <MixedProcessedAudioStream.h>
|
||||||
|
#include <RingBufferHistory.h>
|
||||||
#include <SettingHandle.h>
|
#include <SettingHandle.h>
|
||||||
#include <StDev.h>
|
#include <StDev.h>
|
||||||
|
|
||||||
#include "audio/AudioIOStats.h"
|
#include "AudioIOStats.h"
|
||||||
#include "audio/AudioNoiseGate.h"
|
#include "AudioNoiseGate.h"
|
||||||
#include "AudioStreamStats.h"
|
|
||||||
#include "Recorder.h"
|
|
||||||
#include "RingBufferHistory.h"
|
|
||||||
#include "AudioRingBuffer.h"
|
|
||||||
#include "AudioFormat.h"
|
|
||||||
#include "AudioBuffer.h"
|
|
||||||
#include "AudioSourceTone.h"
|
|
||||||
#include "AudioSourceNoise.h"
|
|
||||||
#include "AudioGain.h"
|
|
||||||
|
|
||||||
#include "MixedProcessedAudioStream.h"
|
|
||||||
#include "AudioEffectOptions.h"
|
|
||||||
|
|
||||||
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
#pragma warning( push )
|
#pragma warning( push )
|
||||||
|
@ -67,20 +62,24 @@ static const int MIN_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES = 1;
|
||||||
static const int MAX_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES = 20;
|
static const int MAX_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES = 20;
|
||||||
static const int DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_ENABLED = true;
|
static const int DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_ENABLED = true;
|
||||||
static const int DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_THRESHOLD = 3;
|
static const int DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_THRESHOLD = 3;
|
||||||
static const int DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_PERIOD = 10 * 1000; // 10 Seconds
|
static const quint64 DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_PERIOD = 10 * 1000; // 10 Seconds
|
||||||
|
|
||||||
class QAudioInput;
|
class QAudioInput;
|
||||||
class QAudioOutput;
|
class QAudioOutput;
|
||||||
class QIODevice;
|
class QIODevice;
|
||||||
|
struct soxr;
|
||||||
|
|
||||||
class Audio : public AbstractAudioInterface, public Dependency {
|
typedef glm::vec3 (*AudioPositionGetter)();
|
||||||
|
typedef glm::quat (*AudioOrientationGetter)();
|
||||||
|
|
||||||
|
class AudioClient : public AbstractAudioInterface, public Dependency {
|
||||||
Q_OBJECT
|
Q_OBJECT
|
||||||
SINGLETON_DEPENDENCY
|
SINGLETON_DEPENDENCY
|
||||||
public:
|
public:
|
||||||
|
|
||||||
class AudioOutputIODevice : public QIODevice {
|
class AudioOutputIODevice : public QIODevice {
|
||||||
public:
|
public:
|
||||||
AudioOutputIODevice(MixedProcessedAudioStream& receivedAudioStream, Audio* audio) :
|
AudioOutputIODevice(MixedProcessedAudioStream& receivedAudioStream, AudioClient* audio) :
|
||||||
_receivedAudioStream(receivedAudioStream), _audio(audio), _unfulfilledReads(0) {};
|
_receivedAudioStream(receivedAudioStream), _audio(audio), _unfulfilledReads(0) {};
|
||||||
|
|
||||||
void start() { open(QIODevice::ReadOnly); }
|
void start() { open(QIODevice::ReadOnly); }
|
||||||
|
@ -91,7 +90,7 @@ public:
|
||||||
int getRecentUnfulfilledReads() { int unfulfilledReads = _unfulfilledReads; _unfulfilledReads = 0; return unfulfilledReads; }
|
int getRecentUnfulfilledReads() { int unfulfilledReads = _unfulfilledReads; _unfulfilledReads = 0; return unfulfilledReads; }
|
||||||
private:
|
private:
|
||||||
MixedProcessedAudioStream& _receivedAudioStream;
|
MixedProcessedAudioStream& _receivedAudioStream;
|
||||||
Audio* _audio;
|
AudioClient* _audio;
|
||||||
int _unfulfilledReads;
|
int _unfulfilledReads;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -113,19 +112,20 @@ public:
|
||||||
|
|
||||||
float getInputRingBufferMsecsAvailable() const;
|
float getInputRingBufferMsecsAvailable() const;
|
||||||
float getAudioOutputMsecsUnplayed() const;
|
float getAudioOutputMsecsUnplayed() const;
|
||||||
|
|
||||||
void setRecorder(RecorderPointer recorder) { _recorder = recorder; }
|
|
||||||
|
|
||||||
int getOutputBufferSize() { return _outputBufferSizeFrames.get(); }
|
int getOutputBufferSize() { return _outputBufferSizeFrames.get(); }
|
||||||
|
|
||||||
bool getOutputStarveDetectionEnabled() { return _outputStarveDetectionEnabled.get(); }
|
bool getOutputStarveDetectionEnabled() { return _outputStarveDetectionEnabled.get(); }
|
||||||
void setOutputStarveDetectionEnabled(bool enabled) { _outputStarveDetectionEnabled.set(enabled); }
|
void setOutputStarveDetectionEnabled(bool enabled) { _outputStarveDetectionEnabled.set(enabled); }
|
||||||
|
|
||||||
int getOutputStarveDetectionPeriod() { return _outputStarveDetectionPeriodMsec.get(); }
|
int getOutputStarveDetectionPeriod() { return _outputStarveDetectionPeriodMsec.get(); }
|
||||||
void setOutputStarveDetectionPeriod(int msecs) { _outputStarveDetectionPeriodMsec.set(msecs); }
|
void setOutputStarveDetectionPeriod(int msecs) { _outputStarveDetectionPeriodMsec.set(msecs); }
|
||||||
|
|
||||||
int getOutputStarveDetectionThreshold() { return _outputStarveDetectionThreshold.get(); }
|
int getOutputStarveDetectionThreshold() { return _outputStarveDetectionThreshold.get(); }
|
||||||
void setOutputStarveDetectionThreshold(int threshold) { _outputStarveDetectionThreshold.set(threshold); }
|
void setOutputStarveDetectionThreshold(int threshold) { _outputStarveDetectionThreshold.set(threshold); }
|
||||||
|
|
||||||
|
void setPositionGetter(AudioPositionGetter positionGetter) { _positionGetter = positionGetter; }
|
||||||
|
void setOrientationGetter(AudioOrientationGetter orientationGetter) { _orientationGetter = orientationGetter; }
|
||||||
|
|
||||||
static const float CALLBACK_ACCELERATOR_RATIO;
|
static const float CALLBACK_ACCELERATOR_RATIO;
|
||||||
|
|
||||||
|
@ -179,10 +179,13 @@ public slots:
|
||||||
signals:
|
signals:
|
||||||
bool muteToggled();
|
bool muteToggled();
|
||||||
void inputReceived(const QByteArray& inputSamples);
|
void inputReceived(const QByteArray& inputSamples);
|
||||||
|
void outputBytesToNetwork(int numBytes);
|
||||||
|
void inputBytesFromNetwork(int numBytes);
|
||||||
|
|
||||||
void deviceChanged();
|
void deviceChanged();
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
Audio();
|
AudioClient();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void outputFormatChanged();
|
void outputFormatChanged();
|
||||||
|
@ -208,7 +211,7 @@ private:
|
||||||
|
|
||||||
QString _inputAudioDeviceName;
|
QString _inputAudioDeviceName;
|
||||||
QString _outputAudioDeviceName;
|
QString _outputAudioDeviceName;
|
||||||
|
|
||||||
quint64 _outputStarveDetectionStartTimeMsec;
|
quint64 _outputStarveDetectionStartTimeMsec;
|
||||||
int _outputStarveDetectionCount;
|
int _outputStarveDetectionCount;
|
||||||
|
|
||||||
|
@ -237,6 +240,11 @@ private:
|
||||||
AudioEffectOptions* _reverbOptions;
|
AudioEffectOptions* _reverbOptions;
|
||||||
ty_gverb* _gverbLocal;
|
ty_gverb* _gverbLocal;
|
||||||
ty_gverb* _gverb;
|
ty_gverb* _gverb;
|
||||||
|
|
||||||
|
// possible soxr streams needed for resample
|
||||||
|
soxr* _inputToNetworkResampler;
|
||||||
|
soxr* _networkToOutputResampler;
|
||||||
|
soxr* _loopbackResampler;
|
||||||
|
|
||||||
// Adds Reverb
|
// Adds Reverb
|
||||||
void initGverb();
|
void initGverb();
|
||||||
|
@ -251,7 +259,7 @@ private:
|
||||||
// Callback acceleration dependent calculations
|
// Callback acceleration dependent calculations
|
||||||
int calculateNumberOfInputCallbackBytes(const QAudioFormat& format) const;
|
int calculateNumberOfInputCallbackBytes(const QAudioFormat& format) const;
|
||||||
int calculateNumberOfFrameSamples(int numBytes) const;
|
int calculateNumberOfFrameSamples(int numBytes) const;
|
||||||
float calculateDeviceToNetworkInputRatio(int numBytes) const;
|
float calculateDeviceToNetworkInputRatio() const;
|
||||||
|
|
||||||
// Input framebuffer
|
// Input framebuffer
|
||||||
AudioBufferFloat32 _inputFrameBuffer;
|
AudioBufferFloat32 _inputFrameBuffer;
|
||||||
|
@ -274,16 +282,17 @@ private:
|
||||||
|
|
||||||
AudioOutputIODevice _audioOutputIODevice;
|
AudioOutputIODevice _audioOutputIODevice;
|
||||||
|
|
||||||
WeakRecorderPointer _recorder;
|
|
||||||
|
|
||||||
AudioIOStats _stats;
|
AudioIOStats _stats;
|
||||||
|
|
||||||
AudioNoiseGate _inputGate;
|
AudioNoiseGate _inputGate;
|
||||||
|
|
||||||
|
AudioPositionGetter _positionGetter;
|
||||||
|
AudioOrientationGetter _orientationGetter;
|
||||||
|
|
||||||
QVector<QString> _inputDevices;
|
QVector<QString> _inputDevices;
|
||||||
QVector<QString> _outputDevices;
|
QVector<QString> _outputDevices;
|
||||||
void checkDevices();
|
void checkDevices();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
#endif // hifi_Audio_h
|
#endif // hifi_AudioClient_h
|
|
@ -9,14 +9,12 @@
|
||||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
//
|
//
|
||||||
|
|
||||||
#include "InterfaceConfig.h"
|
|
||||||
|
|
||||||
#include <AudioConstants.h>
|
#include <AudioConstants.h>
|
||||||
#include <MixedProcessedAudioStream.h>
|
#include <MixedProcessedAudioStream.h>
|
||||||
#include <NodeList.h>
|
#include <NodeList.h>
|
||||||
#include <PositionalAudioStream.h>
|
#include <PositionalAudioStream.h>
|
||||||
|
|
||||||
#include "Audio.h"
|
#include "AudioClient.h"
|
||||||
|
|
||||||
#include "AudioIOStats.h"
|
#include "AudioIOStats.h"
|
||||||
|
|
||||||
|
@ -27,7 +25,7 @@ const int APPROXIMATELY_30_SECONDS_OF_AUDIO_PACKETS = (int)(30.0f * 1000.0f / Au
|
||||||
|
|
||||||
AudioIOStats::AudioIOStats(MixedProcessedAudioStream* receivedAudioStream) :
|
AudioIOStats::AudioIOStats(MixedProcessedAudioStream* receivedAudioStream) :
|
||||||
_receivedAudioStream(receivedAudioStream),
|
_receivedAudioStream(receivedAudioStream),
|
||||||
_audioInputMsecsReadStats(MSECS_PER_SECOND / (float)AudioConstants::NETWORK_FRAME_MSECS * Audio::CALLBACK_ACCELERATOR_RATIO, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
|
_audioInputMsecsReadStats(MSECS_PER_SECOND / (float)AudioConstants::NETWORK_FRAME_MSECS * AudioClient::CALLBACK_ACCELERATOR_RATIO, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
|
||||||
_inputRingBufferMsecsAvailableStats(1, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
|
_inputRingBufferMsecsAvailableStats(1, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
|
||||||
_audioOutputMsecsUnplayedStats(1, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
|
_audioOutputMsecsUnplayedStats(1, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
|
||||||
_lastSentAudioPacket(0),
|
_lastSentAudioPacket(0),
|
||||||
|
@ -97,7 +95,7 @@ void AudioIOStats::parseAudioStreamStatsPacket(const QByteArray& packet) {
|
||||||
|
|
||||||
void AudioIOStats::sendDownstreamAudioStatsPacket() {
|
void AudioIOStats::sendDownstreamAudioStatsPacket() {
|
||||||
|
|
||||||
auto audioIO = DependencyManager::get<Audio>();
|
auto audioIO = DependencyManager::get<AudioClient>();
|
||||||
|
|
||||||
// since this function is called every second, we'll sample for some of our stats here
|
// since this function is called every second, we'll sample for some of our stats here
|
||||||
_inputRingBufferMsecsAvailableStats.update(audioIO->getInputRingBufferMsecsAvailable());
|
_inputRingBufferMsecsAvailableStats.update(audioIO->getInputRingBufferMsecsAvailable());
|
|
@ -1,6 +1,6 @@
|
||||||
//
|
//
|
||||||
// AudioBuffer.h
|
// AudioBuffer.h
|
||||||
// hifi
|
// libraries/audio/src
|
||||||
//
|
//
|
||||||
// Created by Craig Hansen-Sturm on 8/29/14.
|
// Created by Craig Hansen-Sturm on 8/29/14.
|
||||||
// Copyright 2014 High Fidelity, Inc.
|
// Copyright 2014 High Fidelity, Inc.
|
||||||
|
@ -16,6 +16,8 @@
|
||||||
|
|
||||||
#include <QDebug>
|
#include <QDebug>
|
||||||
|
|
||||||
|
#include "AudioFormat.h"
|
||||||
|
|
||||||
template< typename T >
|
template< typename T >
|
||||||
class AudioFrameBuffer {
|
class AudioFrameBuffer {
|
||||||
|
|
||||||
|
|
|
@ -108,12 +108,7 @@ void AudioInjector::injectLocally() {
|
||||||
// give our current send position to the local buffer
|
// give our current send position to the local buffer
|
||||||
_localBuffer->setCurrentOffset(_currentSendPosition);
|
_localBuffer->setCurrentOffset(_currentSendPosition);
|
||||||
|
|
||||||
QMetaObject::invokeMethod(_localAudioInterface, "outputLocalInjector",
|
success = _localAudioInterface->outputLocalInjector(_options.stereo, _options.volume, this);
|
||||||
Qt::BlockingQueuedConnection,
|
|
||||||
Q_RETURN_ARG(bool, success),
|
|
||||||
Q_ARG(bool, _options.stereo),
|
|
||||||
Q_ARG(qreal, _options.volume),
|
|
||||||
Q_ARG(AudioInjector*, this));
|
|
||||||
|
|
||||||
// if we're not looping and the buffer tells us it is empty then emit finished
|
// if we're not looping and the buffer tells us it is empty then emit finished
|
||||||
connect(_localBuffer, &AudioInjectorLocalBuffer::bufferEmpty, this, &AudioInjector::stop);
|
connect(_localBuffer, &AudioInjectorLocalBuffer::bufferEmpty, this, &AudioInjector::stop);
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
|
|
||||||
#include "InboundAudioStream.h"
|
#include "InboundAudioStream.h"
|
||||||
|
|
||||||
class Audio;
|
class AudioClient;
|
||||||
|
|
||||||
class MixedProcessedAudioStream : public InboundAudioStream {
|
class MixedProcessedAudioStream : public InboundAudioStream {
|
||||||
Q_OBJECT
|
Q_OBJECT
|
||||||
|
|
|
@ -5,7 +5,7 @@ setup_hifi_library(Network Script)
|
||||||
|
|
||||||
include_glm()
|
include_glm()
|
||||||
|
|
||||||
link_hifi_libraries(audio shared octree networking gpu model fbx)
|
link_hifi_libraries(audio shared networking)
|
||||||
|
|
||||||
# call macro to include our dependency includes and bubble them up via a property on our target
|
# call macro to include our dependency includes and bubble them up via a property on our target
|
||||||
include_dependency_includes()
|
include_dependency_includes()
|
||||||
|
|
|
@ -1068,6 +1068,13 @@ void AvatarData::setJointMappingsFromNetworkReply() {
|
||||||
networkReply->deleteLater();
|
networkReply->deleteLater();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void AvatarData::sendAvatarDataPacket() {
|
||||||
|
QByteArray dataPacket = byteArrayWithPopulatedHeader(PacketTypeAvatarData);
|
||||||
|
dataPacket.append(toByteArray());
|
||||||
|
|
||||||
|
DependencyManager::get<NodeList>()->broadcastToNodes(dataPacket, NodeSet() << NodeType::AvatarMixer);
|
||||||
|
}
|
||||||
|
|
||||||
void AvatarData::sendIdentityPacket() {
|
void AvatarData::sendIdentityPacket() {
|
||||||
QByteArray identityPacket = byteArrayWithPopulatedHeader(PacketTypeAvatarIdentity);
|
QByteArray identityPacket = byteArrayWithPopulatedHeader(PacketTypeAvatarIdentity);
|
||||||
identityPacket.append(identityByteArray());
|
identityPacket.append(identityByteArray());
|
||||||
|
|
|
@ -37,12 +37,12 @@ typedef unsigned long long quint64;
|
||||||
#include <QHash>
|
#include <QHash>
|
||||||
#include <QObject>
|
#include <QObject>
|
||||||
#include <QRect>
|
#include <QRect>
|
||||||
#include <QScriptable>
|
|
||||||
#include <QStringList>
|
#include <QStringList>
|
||||||
#include <QUrl>
|
#include <QUrl>
|
||||||
#include <QUuid>
|
#include <QUuid>
|
||||||
#include <QVariantMap>
|
#include <QVariantMap>
|
||||||
#include <QVector>
|
#include <QVector>
|
||||||
|
#include <QtScript/QScriptable>
|
||||||
|
|
||||||
#include <CollisionInfo.h>
|
#include <CollisionInfo.h>
|
||||||
#include <RegisteredMetaTypes.h>
|
#include <RegisteredMetaTypes.h>
|
||||||
|
@ -110,6 +110,10 @@ const int AVATAR_BILLBOARD_PACKET_SEND_INTERVAL_MSECS = 5000;
|
||||||
const QUrl DEFAULT_HEAD_MODEL_URL = QUrl("http://public.highfidelity.io/models/heads/defaultAvatar_head.fst");
|
const QUrl DEFAULT_HEAD_MODEL_URL = QUrl("http://public.highfidelity.io/models/heads/defaultAvatar_head.fst");
|
||||||
const QUrl DEFAULT_BODY_MODEL_URL = QUrl("http://public.highfidelity.io/models/skeletons/defaultAvatar_body.fst");
|
const QUrl DEFAULT_BODY_MODEL_URL = QUrl("http://public.highfidelity.io/models/skeletons/defaultAvatar_body.fst");
|
||||||
|
|
||||||
|
// Where one's own Avatar begins in the world (will be overwritten if avatar data file is found).
|
||||||
|
// This is the start location in the Sandbox (xyz: 6270, 211, 6000).
|
||||||
|
const glm::vec3 START_LOCATION(6270, 211, 6000);
|
||||||
|
|
||||||
enum KeyState {
|
enum KeyState {
|
||||||
NO_KEY_DOWN = 0,
|
NO_KEY_DOWN = 0,
|
||||||
INSERT_KEY_DOWN,
|
INSERT_KEY_DOWN,
|
||||||
|
@ -297,8 +301,10 @@ public:
|
||||||
const Referential* getReferential() const { return _referential; }
|
const Referential* getReferential() const { return _referential; }
|
||||||
|
|
||||||
public slots:
|
public slots:
|
||||||
|
void sendAvatarDataPacket();
|
||||||
void sendIdentityPacket();
|
void sendIdentityPacket();
|
||||||
void sendBillboardPacket();
|
void sendBillboardPacket();
|
||||||
|
|
||||||
void setBillboardFromNetworkReply();
|
void setBillboardFromNetworkReply();
|
||||||
void setJointMappingsFromNetworkReply();
|
void setJointMappingsFromNetworkReply();
|
||||||
void setSessionUUID(const QUuid& sessionUUID) { _sessionUUID = sessionUUID; }
|
void setSessionUUID(const QUuid& sessionUUID) { _sessionUUID = sessionUUID; }
|
||||||
|
@ -329,7 +335,7 @@ public slots:
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
QUuid _sessionUUID;
|
QUuid _sessionUUID;
|
||||||
glm::vec3 _position;
|
glm::vec3 _position = START_LOCATION;
|
||||||
glm::vec3 _handPosition;
|
glm::vec3 _handPosition;
|
||||||
|
|
||||||
Referential* _referential;
|
Referential* _referential;
|
||||||
|
@ -357,8 +363,8 @@ protected:
|
||||||
HeadData* _headData;
|
HeadData* _headData;
|
||||||
HandData* _handData;
|
HandData* _handData;
|
||||||
|
|
||||||
QUrl _faceModelURL;
|
QUrl _faceModelURL = DEFAULT_HEAD_MODEL_URL;
|
||||||
QUrl _skeletonModelURL;
|
QUrl _skeletonModelURL = DEFAULT_BODY_MODEL_URL;
|
||||||
QVector<AttachmentData> _attachmentData;
|
QVector<AttachmentData> _attachmentData;
|
||||||
QString _displayName;
|
QString _displayName;
|
||||||
|
|
||||||
|
|
|
@ -14,10 +14,7 @@
|
||||||
|
|
||||||
#include "AvatarHashMap.h"
|
#include "AvatarHashMap.h"
|
||||||
|
|
||||||
AvatarHashMap::AvatarHashMap() :
|
AvatarHashMap::AvatarHashMap() {
|
||||||
_avatarHash(),
|
|
||||||
_lastOwnerSessionUUID()
|
|
||||||
{
|
|
||||||
connect(DependencyManager::get<NodeList>().data(), &NodeList::uuidChanged, this, &AvatarHashMap::sessionUUIDChanged);
|
connect(DependencyManager::get<NodeList>().data(), &NodeList::uuidChanged, this, &AvatarHashMap::sessionUUIDChanged);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -54,26 +51,26 @@ void AvatarHashMap::processAvatarMixerDatagram(const QByteArray& datagram, const
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AvatarHashMap::containsAvatarWithDisplayName(const QString& displayName) {
|
bool AvatarHashMap::containsAvatarWithDisplayName(const QString& displayName) {
|
||||||
return avatarWithDisplayName(displayName) == NULL ? false : true;
|
return !avatarWithDisplayName(displayName).isNull();
|
||||||
}
|
}
|
||||||
|
|
||||||
AvatarData* AvatarHashMap::avatarWithDisplayName(const QString& displayName) {
|
AvatarWeakPointer AvatarHashMap::avatarWithDisplayName(const QString& displayName) {
|
||||||
foreach(const AvatarSharedPointer& sharedAvatar, _avatarHash) {
|
foreach(const AvatarSharedPointer& sharedAvatar, _avatarHash) {
|
||||||
if (sharedAvatar->getDisplayName() == displayName) {
|
if (sharedAvatar->getDisplayName() == displayName) {
|
||||||
// this is a match
|
// this is a match
|
||||||
// check if this avatar should still be around
|
// check if this avatar should still be around
|
||||||
if (!shouldKillAvatar(sharedAvatar)) {
|
if (!shouldKillAvatar(sharedAvatar)) {
|
||||||
// we have a match, return the AvatarData
|
// we have a match, return the AvatarData
|
||||||
return sharedAvatar.data();
|
return sharedAvatar;
|
||||||
} else {
|
} else {
|
||||||
// we should remove this avatar, but we might not be on a thread that is allowed
|
// we should remove this avatar, but we might not be on a thread that is allowed
|
||||||
// so we just return NULL to the caller
|
// so we just return NULL to the caller
|
||||||
return NULL;
|
return AvatarWeakPointer();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return NULL;
|
return AvatarWeakPointer();
|
||||||
}
|
}
|
||||||
|
|
||||||
AvatarSharedPointer AvatarHashMap::newSharedAvatar() {
|
AvatarSharedPointer AvatarHashMap::newSharedAvatar() {
|
||||||
|
|
|
@ -16,6 +16,7 @@
|
||||||
#include <QtCore/QSharedPointer>
|
#include <QtCore/QSharedPointer>
|
||||||
#include <QtCore/QUuid>
|
#include <QtCore/QUuid>
|
||||||
|
|
||||||
|
#include <DependencyManager.h>
|
||||||
#include <Node.h>
|
#include <Node.h>
|
||||||
|
|
||||||
#include "AvatarData.h"
|
#include "AvatarData.h"
|
||||||
|
@ -24,23 +25,24 @@ typedef QSharedPointer<AvatarData> AvatarSharedPointer;
|
||||||
typedef QWeakPointer<AvatarData> AvatarWeakPointer;
|
typedef QWeakPointer<AvatarData> AvatarWeakPointer;
|
||||||
typedef QHash<QUuid, AvatarSharedPointer> AvatarHash;
|
typedef QHash<QUuid, AvatarSharedPointer> AvatarHash;
|
||||||
|
|
||||||
class AvatarHashMap : public QObject {
|
class AvatarHashMap : public QObject, public Dependency {
|
||||||
Q_OBJECT
|
Q_OBJECT
|
||||||
public:
|
SINGLETON_DEPENDENCY
|
||||||
AvatarHashMap();
|
|
||||||
|
|
||||||
|
public:
|
||||||
const AvatarHash& getAvatarHash() { return _avatarHash; }
|
const AvatarHash& getAvatarHash() { return _avatarHash; }
|
||||||
int size() const { return _avatarHash.size(); }
|
int size() { return _avatarHash.size(); }
|
||||||
|
|
||||||
public slots:
|
public slots:
|
||||||
void processAvatarMixerDatagram(const QByteArray& datagram, const QWeakPointer<Node>& mixerWeakPointer);
|
void processAvatarMixerDatagram(const QByteArray& datagram, const QWeakPointer<Node>& mixerWeakPointer);
|
||||||
bool containsAvatarWithDisplayName(const QString& displayName);
|
bool containsAvatarWithDisplayName(const QString& displayName);
|
||||||
AvatarData* avatarWithDisplayName(const QString& displayname);
|
AvatarWeakPointer avatarWithDisplayName(const QString& displayname);
|
||||||
|
|
||||||
private slots:
|
private slots:
|
||||||
void sessionUUIDChanged(const QUuid& sessionUUID, const QUuid& oldUUID);
|
void sessionUUIDChanged(const QUuid& sessionUUID, const QUuid& oldUUID);
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
AvatarHashMap();
|
||||||
virtual AvatarHash::iterator erase(const AvatarHash::iterator& iterator);
|
virtual AvatarHash::iterator erase(const AvatarHash::iterator& iterator);
|
||||||
|
|
||||||
bool shouldKillAvatar(const AvatarSharedPointer& sharedAvatar);
|
bool shouldKillAvatar(const AvatarSharedPointer& sharedAvatar);
|
||||||
|
|
|
@ -11,13 +11,17 @@
|
||||||
|
|
||||||
#include <glm/gtx/quaternion.hpp>
|
#include <glm/gtx/quaternion.hpp>
|
||||||
|
|
||||||
#include <FBXReader.h>
|
#include <FaceshiftConstants.h>
|
||||||
#include <GLMHelpers.h>
|
#include <GLMHelpers.h>
|
||||||
#include <OctreeConstants.h>
|
|
||||||
|
|
||||||
#include "AvatarData.h"
|
#include "AvatarData.h"
|
||||||
#include "HeadData.h"
|
#include "HeadData.h"
|
||||||
|
|
||||||
|
/// The names of the blendshapes expected by Faceshift, terminated with an empty string.
|
||||||
|
extern const char* FACESHIFT_BLENDSHAPES[];
|
||||||
|
/// The size of FACESHIFT_BLENDSHAPES
|
||||||
|
extern const int NUM_FACESHIFT_BLENDSHAPES;
|
||||||
|
|
||||||
HeadData::HeadData(AvatarData* owningAvatar) :
|
HeadData::HeadData(AvatarData* owningAvatar) :
|
||||||
_baseYaw(0.0f),
|
_baseYaw(0.0f),
|
||||||
_basePitch(0.0f),
|
_basePitch(0.0f),
|
||||||
|
@ -63,6 +67,7 @@ void HeadData::setBlendshape(QString name, float val) {
|
||||||
for (int i = 0; i < NUM_FACESHIFT_BLENDSHAPES; i++) {
|
for (int i = 0; i < NUM_FACESHIFT_BLENDSHAPES; i++) {
|
||||||
blendshapeLookupMap[FACESHIFT_BLENDSHAPES[i]] = i;
|
blendshapeLookupMap[FACESHIFT_BLENDSHAPES[i]] = i;
|
||||||
}
|
}
|
||||||
|
hasInitializedLookupMap = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
//Check to see if the named blendshape exists, and then set its value if it does
|
//Check to see if the named blendshape exists, and then set its value if it does
|
||||||
|
|
|
@ -136,7 +136,6 @@ void Recorder::record() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Recorder::record(char* samples, int size) {
|
void Recorder::recordAudio(const QByteArray& audioByteArray) {
|
||||||
QByteArray byteArray(samples, size);
|
_recording->addAudioPacket(audioByteArray);
|
||||||
_recording->addAudioPacket(byteArray);
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//
|
//
|
||||||
// Recorder.h
|
// Recorder.h
|
||||||
//
|
// libraries/avatars/src
|
||||||
//
|
//
|
||||||
// Created by Clement on 8/7/14.
|
// Created by Clement on 8/7/14.
|
||||||
// Copyright 2014 High Fidelity, Inc.
|
// Copyright 2014 High Fidelity, Inc.
|
||||||
|
@ -26,7 +26,8 @@ typedef QSharedPointer<Recorder> RecorderPointer;
|
||||||
typedef QWeakPointer<Recorder> WeakRecorderPointer;
|
typedef QWeakPointer<Recorder> WeakRecorderPointer;
|
||||||
|
|
||||||
/// Records a recording
|
/// Records a recording
|
||||||
class Recorder {
|
class Recorder : public QObject {
|
||||||
|
Q_OBJECT
|
||||||
public:
|
public:
|
||||||
Recorder(AvatarData* avatar);
|
Recorder(AvatarData* avatar);
|
||||||
|
|
||||||
|
@ -40,7 +41,8 @@ public slots:
|
||||||
void stopRecording();
|
void stopRecording();
|
||||||
void saveToFile(const QString& file);
|
void saveToFile(const QString& file);
|
||||||
void record();
|
void record();
|
||||||
void record(char* samples, int size);
|
void recordAudio(const QByteArray& audioArray);
|
||||||
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
QElapsedTimer _timer;
|
QElapsedTimer _timer;
|
||||||
|
@ -50,4 +52,4 @@ private:
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
#endif // hifi_Recorder_h
|
#endif // hifi_Recorder_h
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
#include <gpu/GPUConfig.h>
|
#include <gpu/GPUConfig.h>
|
||||||
|
|
||||||
#include <DeferredLightingEffect.h>
|
#include <DeferredLightingEffect.h>
|
||||||
|
#include <GLMHelpers.h>
|
||||||
#include <PerfStat.h>
|
#include <PerfStat.h>
|
||||||
|
|
||||||
#include "RenderableLightEntityItem.h"
|
#include "RenderableLightEntityItem.h"
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
#include <glm/gtx/quaternion.hpp>
|
#include <glm/gtx/quaternion.hpp>
|
||||||
#include <glm/gtx/transform.hpp>
|
#include <glm/gtx/transform.hpp>
|
||||||
|
|
||||||
|
#include <FaceshiftConstants.h>
|
||||||
#include <GeometryUtil.h>
|
#include <GeometryUtil.h>
|
||||||
#include <GLMHelpers.h>
|
#include <GLMHelpers.h>
|
||||||
#include <OctalCode.h>
|
#include <OctalCode.h>
|
||||||
|
@ -610,60 +611,6 @@ QString getID(const QVariantList& properties, int index = 0) {
|
||||||
return processID(properties.at(index).toString());
|
return processID(properties.at(index).toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
const char* FACESHIFT_BLENDSHAPES[] = {
|
|
||||||
"EyeBlink_L",
|
|
||||||
"EyeBlink_R",
|
|
||||||
"EyeSquint_L",
|
|
||||||
"EyeSquint_R",
|
|
||||||
"EyeDown_L",
|
|
||||||
"EyeDown_R",
|
|
||||||
"EyeIn_L",
|
|
||||||
"EyeIn_R",
|
|
||||||
"EyeOpen_L",
|
|
||||||
"EyeOpen_R",
|
|
||||||
"EyeOut_L",
|
|
||||||
"EyeOut_R",
|
|
||||||
"EyeUp_L",
|
|
||||||
"EyeUp_R",
|
|
||||||
"BrowsD_L",
|
|
||||||
"BrowsD_R",
|
|
||||||
"BrowsU_C",
|
|
||||||
"BrowsU_L",
|
|
||||||
"BrowsU_R",
|
|
||||||
"JawFwd",
|
|
||||||
"JawLeft",
|
|
||||||
"JawOpen",
|
|
||||||
"JawChew",
|
|
||||||
"JawRight",
|
|
||||||
"MouthLeft",
|
|
||||||
"MouthRight",
|
|
||||||
"MouthFrown_L",
|
|
||||||
"MouthFrown_R",
|
|
||||||
"MouthSmile_L",
|
|
||||||
"MouthSmile_R",
|
|
||||||
"MouthDimple_L",
|
|
||||||
"MouthDimple_R",
|
|
||||||
"LipsStretch_L",
|
|
||||||
"LipsStretch_R",
|
|
||||||
"LipsUpperClose",
|
|
||||||
"LipsLowerClose",
|
|
||||||
"LipsUpperUp",
|
|
||||||
"LipsLowerDown",
|
|
||||||
"LipsUpperOpen",
|
|
||||||
"LipsLowerOpen",
|
|
||||||
"LipsFunnel",
|
|
||||||
"LipsPucker",
|
|
||||||
"ChinLowerRaise",
|
|
||||||
"ChinUpperRaise",
|
|
||||||
"Sneer",
|
|
||||||
"Puff",
|
|
||||||
"CheekSquint_L",
|
|
||||||
"CheekSquint_R",
|
|
||||||
""
|
|
||||||
};
|
|
||||||
|
|
||||||
const int NUM_FACESHIFT_BLENDSHAPES = sizeof(FACESHIFT_BLENDSHAPES) / sizeof(char*);
|
|
||||||
|
|
||||||
const char* HUMANIK_JOINTS[] = {
|
const char* HUMANIK_JOINTS[] = {
|
||||||
"RightHand",
|
"RightHand",
|
||||||
"RightForeArm",
|
"RightForeArm",
|
||||||
|
@ -1718,13 +1665,13 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
material.id = getID(object.properties);
|
material.id = getID(object.properties);
|
||||||
|
|
||||||
material._material = model::MaterialPointer(new model::Material());
|
material._material = model::MaterialPointer(new model::Material());
|
||||||
material._material->setEmissive(material.emissive);
|
material._material->setEmissive(material.emissive);
|
||||||
material._material->setDiffuse(material.diffuse);
|
material._material->setDiffuse(material.diffuse);
|
||||||
material._material->setSpecular(material.specular);
|
material._material->setSpecular(material.specular);
|
||||||
material._material->setShininess(material.shininess);
|
material._material->setShininess(material.shininess);
|
||||||
material._material->setOpacity(material.opacity);
|
material._material->setOpacity(material.opacity);
|
||||||
|
|
||||||
materials.insert(material.id, material);
|
materials.insert(material.id, material);
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue