mirror of
https://github.com/lubosz/overte.git
synced 2025-04-26 14:35:37 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into virtualEntities
Conflicts: examples/editModels.js interface/src/entities/EntityTreeRenderer.h libraries/models/src/ModelsScriptingInterface.cpp libraries/script-engine/src/ScriptEngine.cpp
This commit is contained in:
commit
805a88b73b
184 changed files with 7622 additions and 3055 deletions
.gitignoreBUILD.md
assignment-client/src
cmake
macros
modules
examples
avatarLocalLight.jsbot_randomExpression.jseditModels.jseditVoxels.jsfallingSand.jsgrenadeLauncher.jslaserPointer.jsleapOfFaith.jsmultipleCursorsExample.jstoolBars.jsvoxelsoundwaves.jsvoxelwall.js
images
bg_hr.pngblacktocat.pngbody-bg.pnghighlight-bg.jpghr.pngicon_download.pngoctocat-icon.pngsprite_download.pngtar-gz-icon.pngzip-icon.png
index.htmlinterface
CMakeLists.txt
external
resources/shaders
model.fragmodel_cascaded_shadow_map.fragmodel_cascaded_shadow_normal_map.fragmodel_cascaded_shadow_normal_specular_map.fragmodel_cascaded_shadow_specular_map.fragmodel_normal_map.fragmodel_normal_specular_map.fragmodel_shadow_map.fragmodel_shadow_normal_map.fragmodel_shadow_normal_specular_map.fragmodel_shadow_specular_map.fragmodel_specular_map.frag
src
Application.cppApplication.hAudio.cppAudio.hCamera.cppHair.cppHair.hMenu.cppMenu.hSignedWalletTransaction.cppSignedWalletTransaction.hXmppClient.cppXmppClient.h
avatar
Avatar.cppAvatar.hAvatarManager.cppAvatarManager.hHand.cppHead.cppHead.hMuscleConstraint.cppMuscleConstraint.hMyAvatar.cppMyAvatar.hSkeletonModel.cppSkeletonModel.h
devices
28
.gitignore
vendored
28
.gitignore
vendored
|
@ -28,31 +28,9 @@ DerivedData
|
|||
|
||||
*.hmap
|
||||
|
||||
# ignore oculus
|
||||
interface/external/oculus/*
|
||||
!interface/external/oculus/readme.txt
|
||||
|
||||
# Ignore Sixense
|
||||
interface/external/sixense/*
|
||||
!interface/external/sixense/readme.txt
|
||||
|
||||
# Ignore Visage
|
||||
interface/external/visage/*
|
||||
!interface/external/visage/readme.txt
|
||||
interface/resources/visage/*
|
||||
!interface/resources/visage/tracker.cfg
|
||||
|
||||
# Ignore Faceplus
|
||||
interface/external/faceplus/*
|
||||
!interface/external/faceplus/readme.txt
|
||||
|
||||
# Ignore PrioVR
|
||||
interface/external/priovr/*
|
||||
!interface/external/priovr/readme.txt
|
||||
|
||||
# Ignore RtMidi
|
||||
interface/external/rtmidi/*
|
||||
!interface/external/rtmidi/readme.txt
|
||||
# ignore interface optional externals
|
||||
interface/external/*/*
|
||||
!interface/external/*/readme.txt
|
||||
|
||||
# Ignore interfaceCache for Linux users
|
||||
interface/interfaceCache/
|
||||
|
|
80
BUILD.md
80
BUILD.md
|
@ -2,25 +2,25 @@ Dependencies
|
|||
===
|
||||
* [cmake](http://www.cmake.org/cmake/resources/software.html) ~> 2.8.12.2
|
||||
* [Qt](http://qt-project.org/downloads) ~> 5.2.0
|
||||
* [zLib](http://www.zlib.net/) ~> 1.2.8
|
||||
* [glm](http://glm.g-truc.net/0.9.5/index.html) ~> 0.9.5.2
|
||||
* [qxmpp](https://github.com/qxmpp-project/qxmpp/) ~> 0.7.6
|
||||
* [GnuTLS](http://gnutls.org/download.html) ~> 3.2.12
|
||||
* IMPORTANT: GnuTLS 3.2.12 is critical to avoid a security vulnerability.
|
||||
* [OpenSSL](https://www.openssl.org/related/binaries.html) ~> 1.0.1g
|
||||
* IMPORTANT: OpenSSL 1.0.1g is critical to avoid a security vulnerability.
|
||||
|
||||
#####Linux only
|
||||
* [freeglut](http://freeglut.sourceforge.net/) ~> 2.8.0
|
||||
* [zLib](http://www.zlib.net/) ~> 1.2.8
|
||||
|
||||
#####Windows only
|
||||
* [GLEW](http://glew.sourceforge.net/) ~> 1.10.0
|
||||
* [freeglut MSVC](http://www.transmissionzero.co.uk/software/freeglut-devel/) ~> 2.8.1
|
||||
* [zLib](http://www.zlib.net/) ~> 1.2.8
|
||||
|
||||
CMake
|
||||
===
|
||||
Hifi uses CMake to generate build files and project files for your platform.
|
||||
|
||||
####Qt
|
||||
In order for CMake to find the Qt5 find modules, you will need to set an ENV variable pointing to your CMake installation.
|
||||
In order for CMake to find the Qt5 find modules, you will need to set an ENV variable pointing to your Qt installation.
|
||||
|
||||
For example, a Qt5 5.2.0 installation to /usr/local/qt5 would require that QT_CMAKE_PREFIX_PATH be set with the following command. This can either be entered directly into your shell session before you build or in your shell profile (e.g.: ~/.bash_profile, ~/.bashrc, ~/.zshrc - this depends on your shell and environment).
|
||||
|
||||
|
@ -30,7 +30,6 @@ The path it needs to be set to will depend on where and how Qt5 was installed. e
|
|||
export QT_CMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.2.1/lib/cmake
|
||||
export QT_CMAKE_PREFIX_PATH=/usr/local/opt/qt5/lib/cmake
|
||||
|
||||
|
||||
####Generating build files
|
||||
Create a build directory in the root of your checkout and then run the CMake build from there. This will keep the rest of the directory clean.
|
||||
|
||||
|
@ -43,8 +42,16 @@ Any variables that need to be set for CMake to find dependencies can be set as E
|
|||
|
||||
For example, to pass the QT_CMAKE_PREFIX_PATH variable during build file generation:
|
||||
|
||||
cmake .. -DQT_CMAKE_PREFIX_PATH=/usr/local/qt/5.2.0/lib/cmake
|
||||
cmake .. -DQT_CMAKE_PREFIX_PATH=/usr/local/qt/5.2.1/lib/cmake
|
||||
|
||||
####Finding Dependencies
|
||||
You can point our [Cmake find modules](cmake/modules/) to the correct version of dependencies by setting one of the three following variables to the location of the correct version of the dependency.
|
||||
|
||||
In the examples below the variable $NAME would be replaced by the name of the dependency in uppercase, and $name would be replaced by the name of the dependency in lowercase (ex: OPENSSL_ROOT_DIR, openssl).
|
||||
|
||||
* $NAME_ROOT_DIR - pass this variable to Cmake with the -DNAME_ROOT_DIR= flag when running Cmake to generate build files
|
||||
* $NAME_ROOT_DIR - set this variable in your ENV
|
||||
* HIFI_LIB_DIR - set this variable in your ENV to your High Fidelity lib folder, should contain a folder '$name'
|
||||
|
||||
UNIX
|
||||
===
|
||||
|
@ -55,23 +62,16 @@ Should you choose not to install Qt5 via a package manager that handles dependen
|
|||
|
||||
libasound2 libxmu-dev libxi-dev freeglut3-dev libasound2-dev libjack-dev
|
||||
|
||||
#####GnuTLS
|
||||
|
||||
If `libgnutls28-dev` 3.2.12 or higher is available via your package manager, it would be easiest to grab it from there. At the time of this writing that is not the case for any version of Ubuntu, so it will need to be built from source.
|
||||
|
||||
`gmplib` is a dependency for GnuTLS. On Ubuntu, we were unable to build `hogweed` (part of `libnettle`) with `gmpib` 6.x.x. If nettle is not built with `hogweed`, GnuTLS will fail to build. If you run into this problem, try version 4.2.1 of `gmplib`.
|
||||
|
||||
####OS X
|
||||
#####Package Managers
|
||||
[Homebrew](http://brew.sh/) is an excellent package manager for OS X. It makes install of all hifi dependencies very simple.
|
||||
|
||||
brew tap highfidelity/homebrew-formulas
|
||||
brew install cmake glm gnutls
|
||||
brew install cmake glm openssl
|
||||
brew install highfidelity/formulas/qt5
|
||||
brew link qt5 --force
|
||||
brew install highfidelity/formulas/qxmpp
|
||||
|
||||
We have a [homebrew formulas repository](https://github.com/highfidelity/homebrew-formulas) that you can use/tap to install some of the dependencies. In the code block above qt5 and qxmpp are installed from formulas in this repository.
|
||||
We have a [homebrew formulas repository](https://github.com/highfidelity/homebrew-formulas) that you can use/tap to install some of the dependencies. In the code block above qt5 is installed from a formula in this repository.
|
||||
|
||||
*Our [qt5 homebrew formula](https://raw.github.com/highfidelity/homebrew-formulas/master/qt5.rb) is for a patched version of Qt 5.2.0 stable that removes wireless network scanning that can reduce real-time audio performance. We recommended you use this formula to install Qt.*
|
||||
|
||||
|
@ -149,13 +149,10 @@ The recommended route for CMake to find the external dependencies is to place al
|
|||
-> glm
|
||||
-> glm
|
||||
-> glm.hpp
|
||||
-> gnutls
|
||||
-> openssl
|
||||
-> bin
|
||||
-> include
|
||||
-> lib
|
||||
-> qxmpp
|
||||
-> include
|
||||
-> lib
|
||||
-> zlib
|
||||
-> include
|
||||
-> lib
|
||||
|
@ -217,35 +214,6 @@ This package contains only headers, so there's nothing to add to the PATH.
|
|||
|
||||
Be careful with glm. For the folder other libraries would normally call 'include', the folder containing the headers, glm opts to use 'glm'. You will have a glm folder nested inside the top-level glm folder.
|
||||
|
||||
#### GnuTLS
|
||||
|
||||
You can get a precompiled version of GnuTLS for Windows [here](http://gnutls.org/download.html).
|
||||
|
||||
To use GnuTLS with Visual Studio, you will need to create `libgnutls-28.lib`, the import library for Visual Studio projects. This is done using the `lib` command in the `bin` folder of your GnuTLS download. Start a Visual Studio Command Prompt, and then run:
|
||||
|
||||
cd %HIFI_LIB_DIR%\gnutls\bin
|
||||
lib /def:libgnutls-28.def
|
||||
copy libgnutls-28.lib ..\lib
|
||||
|
||||
The Cmake FindGnuTLS module will now find libgnutls-28.lib during the Cmake run.
|
||||
|
||||
Add to the PATH: `%HIFI_LIB_DIR%\gnutls\bin`
|
||||
|
||||
#### qxmpp
|
||||
|
||||
Download a source-code release from the [qxmpp GitHub page](https://github.com/qxmpp-project/qxmpp/releases).
|
||||
|
||||
Start a Visual Studio Command Prompt.
|
||||
|
||||
mkdir %HIFI_LIB_DIR%\build
|
||||
tar xfz qxmpp-0.7.6.tar.gz -C %HIFI_LIB_DIR%\build
|
||||
cd %HIFI_LIB_DIR%\build\qxmpp-0.7.6
|
||||
qmake PREFIX=%HIFI_LIB_DIR%\qxmpp # This creates "Makefile"
|
||||
nmake
|
||||
nmake install
|
||||
|
||||
Add to the PATH: `%HIFI_LIB_DIR%\qxmpp\lib`
|
||||
|
||||
#### Build High Fidelity using Visual Studio
|
||||
Follow the same build steps from the CMake section, but pass a different generator to CMake.
|
||||
|
||||
|
@ -262,3 +230,17 @@ If you need to debug Interface, you can run interface from within Visual Studio
|
|||
* In the Solution Explorer, right click interface and click Set as StartUp Project
|
||||
* Set the "Working Directory" for the Interface debugging sessions to the Debug output directory so that your application can load resources. Do this: right click interface and click Properties, choose Debugging from Configuration Properties, set Working Directory to .\Debug
|
||||
* Now you can run and debug interface through Visual Studio
|
||||
|
||||
Optional Components
|
||||
===
|
||||
|
||||
####QXmpp
|
||||
|
||||
You can find QXmpp [here](https://github.com/qxmpp-project/qxmpp). The inclusion of the QXmpp enables text chat in the Interface client.
|
||||
|
||||
OS X users who tap our [homebrew formulas repository](https://github.com/highfidelity/homebrew-formulas) can install QXmpp via homebrew - `brew install highfidelity/formulas/qxmpp`.
|
||||
|
||||
#### Devices
|
||||
|
||||
You can support external input/output devices such as Leap Motion, Faceplus, Faceshift, PrioVR, MIDI, Razr Hydra and more by adding each individual SDK in the visible building path. Refer to the readme file available in each device folder in [interface/external/](interface/external) for the detailed explanation of the requirements to use the device.
|
||||
|
||||
|
|
|
@ -229,7 +229,8 @@ void Agent::run() {
|
|||
|
||||
// setup an Avatar for the script to use
|
||||
AvatarData scriptedAvatar;
|
||||
|
||||
scriptedAvatar.setForceFaceshiftConnected(true);
|
||||
|
||||
// call model URL setters with empty URLs so our avatar, if user, will have the default models
|
||||
scriptedAvatar.setFaceModelURL(QUrl());
|
||||
scriptedAvatar.setSkeletonModelURL(QUrl());
|
||||
|
|
|
@ -231,32 +231,12 @@ void AudioMixer::addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuf
|
|||
delayBufferSample[0] = correctBufferSample[0] * weakChannelAmplitudeRatio;
|
||||
delayBufferSample[1] = correctBufferSample[1] * weakChannelAmplitudeRatio;
|
||||
|
||||
__m64 bufferSamples = _mm_set_pi16(_clientSamples[s + goodChannelOffset],
|
||||
_clientSamples[s + goodChannelOffset + SINGLE_STEREO_OFFSET],
|
||||
_clientSamples[delayedChannelIndex],
|
||||
_clientSamples[delayedChannelIndex + SINGLE_STEREO_OFFSET]);
|
||||
__m64 addedSamples = _mm_set_pi16(correctBufferSample[0], correctBufferSample[1],
|
||||
delayBufferSample[0], delayBufferSample[1]);
|
||||
|
||||
// perform the MMX add (with saturation) of two correct and delayed samples
|
||||
__m64 mmxResult = _mm_adds_pi16(bufferSamples, addedSamples);
|
||||
int16_t* shortResults = reinterpret_cast<int16_t*>(&mmxResult);
|
||||
|
||||
// assign the results from the result of the mmx arithmetic
|
||||
_clientSamples[s + goodChannelOffset] = shortResults[3];
|
||||
_clientSamples[s + goodChannelOffset + SINGLE_STEREO_OFFSET] = shortResults[2];
|
||||
_clientSamples[delayedChannelIndex] = shortResults[1];
|
||||
_clientSamples[delayedChannelIndex + SINGLE_STEREO_OFFSET] = shortResults[0];
|
||||
_clientSamples[s + goodChannelOffset] += correctBufferSample[0];
|
||||
_clientSamples[s + goodChannelOffset + SINGLE_STEREO_OFFSET] += correctBufferSample[1];
|
||||
_clientSamples[delayedChannelIndex] += delayBufferSample[0];
|
||||
_clientSamples[delayedChannelIndex + SINGLE_STEREO_OFFSET] += delayBufferSample[1];
|
||||
}
|
||||
|
||||
// The following code is pretty gross and redundant, but AFAIK it's the best way to avoid
|
||||
// too many conditionals in handling the delay samples at the beginning of _clientSamples.
|
||||
// Basically we try to take the samples in batches of four, and then handle the remainder
|
||||
// conditionally to get rid of the rest.
|
||||
|
||||
const int DOUBLE_STEREO_OFFSET = 4;
|
||||
const int TRIPLE_STEREO_OFFSET = 6;
|
||||
|
||||
if (numSamplesDelay > 0) {
|
||||
// if there was a sample delay for this buffer, we need to pull samples prior to the nextOutput
|
||||
// to stick at the beginning
|
||||
|
@ -266,74 +246,9 @@ void AudioMixer::addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuf
|
|||
delayNextOutputStart = bufferStart + ringBufferSampleCapacity - numSamplesDelay;
|
||||
}
|
||||
|
||||
int i = 0;
|
||||
|
||||
while (i + 3 < numSamplesDelay) {
|
||||
// handle the first cases where we can MMX add four samples at once
|
||||
for (int i = 0; i < numSamplesDelay; i++) {
|
||||
int parentIndex = i * 2;
|
||||
__m64 bufferSamples = _mm_set_pi16(_clientSamples[parentIndex + delayedChannelOffset],
|
||||
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset],
|
||||
_clientSamples[parentIndex + DOUBLE_STEREO_OFFSET + delayedChannelOffset],
|
||||
_clientSamples[parentIndex + TRIPLE_STEREO_OFFSET + delayedChannelOffset]);
|
||||
__m64 addSamples = _mm_set_pi16(delayNextOutputStart[i] * attenuationAndWeakChannelRatio,
|
||||
delayNextOutputStart[i + 1] * attenuationAndWeakChannelRatio,
|
||||
delayNextOutputStart[i + 2] * attenuationAndWeakChannelRatio,
|
||||
delayNextOutputStart[i + 3] * attenuationAndWeakChannelRatio);
|
||||
__m64 mmxResult = _mm_adds_pi16(bufferSamples, addSamples);
|
||||
int16_t* shortResults = reinterpret_cast<int16_t*>(&mmxResult);
|
||||
|
||||
_clientSamples[parentIndex + delayedChannelOffset] = shortResults[3];
|
||||
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[2];
|
||||
_clientSamples[parentIndex + DOUBLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[1];
|
||||
_clientSamples[parentIndex + TRIPLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[0];
|
||||
|
||||
// push the index
|
||||
i += 4;
|
||||
}
|
||||
|
||||
int parentIndex = i * 2;
|
||||
|
||||
if (i + 2 < numSamplesDelay) {
|
||||
// MMX add only three delayed samples
|
||||
|
||||
__m64 bufferSamples = _mm_set_pi16(_clientSamples[parentIndex + delayedChannelOffset],
|
||||
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset],
|
||||
_clientSamples[parentIndex + DOUBLE_STEREO_OFFSET + delayedChannelOffset],
|
||||
0);
|
||||
__m64 addSamples = _mm_set_pi16(delayNextOutputStart[i] * attenuationAndWeakChannelRatio,
|
||||
delayNextOutputStart[i + 1] * attenuationAndWeakChannelRatio,
|
||||
delayNextOutputStart[i + 2] * attenuationAndWeakChannelRatio,
|
||||
0);
|
||||
__m64 mmxResult = _mm_adds_pi16(bufferSamples, addSamples);
|
||||
int16_t* shortResults = reinterpret_cast<int16_t*>(&mmxResult);
|
||||
|
||||
_clientSamples[parentIndex + delayedChannelOffset] = shortResults[3];
|
||||
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[2];
|
||||
_clientSamples[parentIndex + DOUBLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[1];
|
||||
|
||||
} else if (i + 1 < numSamplesDelay) {
|
||||
// MMX add two delayed samples
|
||||
__m64 bufferSamples = _mm_set_pi16(_clientSamples[parentIndex + delayedChannelOffset],
|
||||
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset],
|
||||
0, 0);
|
||||
__m64 addSamples = _mm_set_pi16(delayNextOutputStart[i] * attenuationAndWeakChannelRatio,
|
||||
delayNextOutputStart[i + 1] * attenuationAndWeakChannelRatio, 0, 0);
|
||||
|
||||
__m64 mmxResult = _mm_adds_pi16(bufferSamples, addSamples);
|
||||
int16_t* shortResults = reinterpret_cast<int16_t*>(&mmxResult);
|
||||
|
||||
_clientSamples[parentIndex + delayedChannelOffset] = shortResults[3];
|
||||
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[2];
|
||||
|
||||
} else if (i < numSamplesDelay) {
|
||||
// MMX add a single delayed sample
|
||||
__m64 bufferSamples = _mm_set_pi16(_clientSamples[parentIndex + delayedChannelOffset], 0, 0, 0);
|
||||
__m64 addSamples = _mm_set_pi16(delayNextOutputStart[i] * attenuationAndWeakChannelRatio, 0, 0, 0);
|
||||
|
||||
__m64 mmxResult = _mm_adds_pi16(bufferSamples, addSamples);
|
||||
int16_t* shortResults = reinterpret_cast<int16_t*>(&mmxResult);
|
||||
|
||||
_clientSamples[parentIndex + delayedChannelOffset] = shortResults[3];
|
||||
_clientSamples[parentIndex + delayedChannelOffset] += delayNextOutputStart[i] * attenuationAndWeakChannelRatio;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -429,6 +344,8 @@ void AudioMixer::readPendingDatagrams() {
|
|||
|
||||
void AudioMixer::sendStatsPacket() {
|
||||
static QJsonObject statsObject;
|
||||
|
||||
statsObject["useDynamicJitterBuffers"] = _useDynamicJitterBuffers;
|
||||
statsObject["trailing_sleep_percentage"] = _trailingSleepRatio * 100.0f;
|
||||
statsObject["performance_throttling_ratio"] = _performanceThrottlingRatio;
|
||||
|
||||
|
@ -565,6 +482,7 @@ void AudioMixer::run() {
|
|||
_useDynamicJitterBuffers = true;
|
||||
} else {
|
||||
qDebug() << "Dynamic jitter buffers disabled, using old behavior.";
|
||||
_useDynamicJitterBuffers = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ int AudioMixerClientData::parseData(const QByteArray& packet) {
|
|||
|| packetType == PacketTypeMicrophoneAudioNoEcho
|
||||
|| packetType == PacketTypeSilentAudioFrame) {
|
||||
|
||||
_incomingAvatarAudioSequenceNumberStats.sequenceNumberReceived(sequence);
|
||||
SequenceNumberStats::ArrivalInfo packetArrivalInfo = _incomingAvatarAudioSequenceNumberStats.sequenceNumberReceived(sequence);
|
||||
|
||||
// grab the AvatarAudioRingBuffer from the vector (or create it if it doesn't exist)
|
||||
AvatarAudioRingBuffer* avatarRingBuffer = getAvatarAudioRingBuffer();
|
||||
|
@ -84,8 +84,24 @@ int AudioMixerClientData::parseData(const QByteArray& packet) {
|
|||
_ringBuffers.push_back(avatarRingBuffer);
|
||||
}
|
||||
|
||||
// ask the AvatarAudioRingBuffer instance to parse the data
|
||||
avatarRingBuffer->parseData(packet);
|
||||
|
||||
// for now, late packets are simply discarded. In the future, it may be good to insert them into their correct place
|
||||
// in the ring buffer (if that frame hasn't been mixed yet)
|
||||
switch (packetArrivalInfo._status) {
|
||||
case SequenceNumberStats::Early: {
|
||||
int packetsLost = packetArrivalInfo._seqDiffFromExpected;
|
||||
avatarRingBuffer->parseDataAndHandleDroppedPackets(packet, packetsLost);
|
||||
break;
|
||||
}
|
||||
case SequenceNumberStats::OnTime: {
|
||||
// ask the AvatarAudioRingBuffer instance to parse the data
|
||||
avatarRingBuffer->parseDataAndHandleDroppedPackets(packet, 0);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (packetType == PacketTypeInjectAudio) {
|
||||
// this is injected audio
|
||||
|
||||
|
@ -95,7 +111,8 @@ int AudioMixerClientData::parseData(const QByteArray& packet) {
|
|||
if (!_incomingInjectedAudioSequenceNumberStatsMap.contains(streamIdentifier)) {
|
||||
_incomingInjectedAudioSequenceNumberStatsMap.insert(streamIdentifier, SequenceNumberStats(INCOMING_SEQ_STATS_HISTORY_LENGTH));
|
||||
}
|
||||
_incomingInjectedAudioSequenceNumberStatsMap[streamIdentifier].sequenceNumberReceived(sequence);
|
||||
SequenceNumberStats::ArrivalInfo packetArrivalInfo =
|
||||
_incomingInjectedAudioSequenceNumberStatsMap[streamIdentifier].sequenceNumberReceived(sequence);
|
||||
|
||||
InjectedAudioRingBuffer* matchingInjectedRingBuffer = NULL;
|
||||
|
||||
|
@ -112,7 +129,23 @@ int AudioMixerClientData::parseData(const QByteArray& packet) {
|
|||
_ringBuffers.push_back(matchingInjectedRingBuffer);
|
||||
}
|
||||
|
||||
matchingInjectedRingBuffer->parseData(packet);
|
||||
// for now, late packets are simply discarded. In the future, it may be good to insert them into their correct place
|
||||
// in the ring buffer (if that frame hasn't been mixed yet)
|
||||
switch (packetArrivalInfo._status) {
|
||||
case SequenceNumberStats::Early: {
|
||||
int packetsLost = packetArrivalInfo._seqDiffFromExpected;
|
||||
matchingInjectedRingBuffer->parseDataAndHandleDroppedPackets(packet, packetsLost);
|
||||
break;
|
||||
}
|
||||
case SequenceNumberStats::OnTime: {
|
||||
// ask the AvatarAudioRingBuffer instance to parse the data
|
||||
matchingInjectedRingBuffer->parseDataAndHandleDroppedPackets(packet, 0);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (packetType == PacketTypeAudioStreamStats) {
|
||||
|
||||
const char* dataAt = packet.data();
|
||||
|
@ -198,7 +231,7 @@ AudioStreamStats AudioMixerClientData::getAudioStreamStatsOfStream(const Positio
|
|||
streamStats._timeGapWindowAverage = timeGapStats.getWindowAverage();
|
||||
|
||||
streamStats._ringBufferFramesAvailable = ringBuffer->framesAvailable();
|
||||
streamStats._ringBufferCurrentJitterBufferFrames = ringBuffer->getCurrentJitterBufferFrames();
|
||||
streamStats._ringBufferFramesAvailableAverage = ringBuffer->getFramesAvailableAverage();
|
||||
streamStats._ringBufferDesiredJitterBufferFrames = ringBuffer->getDesiredJitterBufferFrames();
|
||||
streamStats._ringBufferStarveCount = ringBuffer->getStarveCount();
|
||||
streamStats._ringBufferConsecutiveNotMixedCount = ringBuffer->getConsecutiveNotMixedCount();
|
||||
|
@ -272,7 +305,7 @@ QString AudioMixerClientData::getAudioStreamStatsString() const {
|
|||
QString result;
|
||||
AudioStreamStats streamStats = _downstreamAudioStreamStats;
|
||||
result += "DOWNSTREAM.desired:" + QString::number(streamStats._ringBufferDesiredJitterBufferFrames)
|
||||
+ " current: ?"
|
||||
+ " available_avg_10s:" + QString::number(streamStats._ringBufferFramesAvailableAverage)
|
||||
+ " available:" + QString::number(streamStats._ringBufferFramesAvailable)
|
||||
+ " starves:" + QString::number(streamStats._ringBufferStarveCount)
|
||||
+ " not_mixed:" + QString::number(streamStats._ringBufferConsecutiveNotMixedCount)
|
||||
|
@ -291,7 +324,8 @@ QString AudioMixerClientData::getAudioStreamStatsString() const {
|
|||
if (avatarRingBuffer) {
|
||||
AudioStreamStats streamStats = getAudioStreamStatsOfStream(avatarRingBuffer);
|
||||
result += " UPSTREAM.mic.desired:" + QString::number(streamStats._ringBufferDesiredJitterBufferFrames)
|
||||
+ " current:" + QString::number(streamStats._ringBufferCurrentJitterBufferFrames)
|
||||
+ " desired_calc:" + QString::number(avatarRingBuffer->getCalculatedDesiredJitterBufferFrames())
|
||||
+ " available_avg_10s:" + QString::number(streamStats._ringBufferFramesAvailableAverage)
|
||||
+ " available:" + QString::number(streamStats._ringBufferFramesAvailable)
|
||||
+ " starves:" + QString::number(streamStats._ringBufferStarveCount)
|
||||
+ " not_mixed:" + QString::number(streamStats._ringBufferConsecutiveNotMixedCount)
|
||||
|
@ -313,7 +347,8 @@ QString AudioMixerClientData::getAudioStreamStatsString() const {
|
|||
if (_ringBuffers[i]->getType() == PositionalAudioRingBuffer::Injector) {
|
||||
AudioStreamStats streamStats = getAudioStreamStatsOfStream(_ringBuffers[i]);
|
||||
result += " UPSTREAM.inj.desired:" + QString::number(streamStats._ringBufferDesiredJitterBufferFrames)
|
||||
+ " current:" + QString::number(streamStats._ringBufferCurrentJitterBufferFrames)
|
||||
+ " desired_calc:" + QString::number(_ringBuffers[i]->getCalculatedDesiredJitterBufferFrames())
|
||||
+ " available_avg_10s:" + QString::number(streamStats._ringBufferFramesAvailableAverage)
|
||||
+ " available:" + QString::number(streamStats._ringBufferFramesAvailable)
|
||||
+ " starves:" + QString::number(streamStats._ringBufferStarveCount)
|
||||
+ " not_mixed:" + QString::number(streamStats._ringBufferConsecutiveNotMixedCount)
|
||||
|
|
|
@ -18,10 +18,53 @@ AvatarAudioRingBuffer::AvatarAudioRingBuffer(bool isStereo, bool dynamicJitterBu
|
|||
|
||||
}
|
||||
|
||||
int AvatarAudioRingBuffer::parseData(const QByteArray& packet) {
|
||||
timeGapStatsFrameReceived();
|
||||
updateDesiredJitterBufferFrames();
|
||||
int AvatarAudioRingBuffer::parseDataAndHandleDroppedPackets(const QByteArray& packet, int packetsSkipped) {
|
||||
frameReceivedUpdateTimingStats();
|
||||
|
||||
_shouldLoopbackForNode = (packetTypeForPacket(packet) == PacketTypeMicrophoneAudioWithEcho);
|
||||
return PositionalAudioRingBuffer::parseData(packet);
|
||||
|
||||
// skip the packet header (includes the source UUID)
|
||||
int readBytes = numBytesForPacketHeader(packet);
|
||||
|
||||
// skip the sequence number
|
||||
readBytes += sizeof(quint16);
|
||||
|
||||
// hop over the channel flag that has already been read in AudioMixerClientData
|
||||
readBytes += sizeof(quint8);
|
||||
// read the positional data
|
||||
readBytes += parsePositionalData(packet.mid(readBytes));
|
||||
|
||||
if (packetTypeForPacket(packet) == PacketTypeSilentAudioFrame) {
|
||||
// this source had no audio to send us, but this counts as a packet
|
||||
// write silence equivalent to the number of silent samples they just sent us
|
||||
int16_t numSilentSamples;
|
||||
|
||||
memcpy(&numSilentSamples, packet.data() + readBytes, sizeof(int16_t));
|
||||
readBytes += sizeof(int16_t);
|
||||
|
||||
// add silent samples for the dropped packets as well.
|
||||
// ASSUME that each dropped packet had same number of silent samples as this one
|
||||
numSilentSamples *= (packetsSkipped + 1);
|
||||
|
||||
// NOTE: fixes a bug in old clients that would send garbage for their number of silentSamples
|
||||
// CAN'T DO THIS because ScriptEngine.cpp sends frames of different size due to having a different sending interval
|
||||
// (every 16.667ms) than Audio.cpp (every 10.667ms)
|
||||
//numSilentSamples = getSamplesPerFrame();
|
||||
|
||||
addDroppableSilentSamples(numSilentSamples);
|
||||
|
||||
} else {
|
||||
int numAudioBytes = packet.size() - readBytes;
|
||||
int numAudioSamples = numAudioBytes / sizeof(int16_t);
|
||||
|
||||
// add silent samples for the dropped packets.
|
||||
// ASSUME that each dropped packet had same number of samples as this one
|
||||
if (packetsSkipped > 0) {
|
||||
addDroppableSilentSamples(packetsSkipped * numAudioSamples);
|
||||
}
|
||||
|
||||
// there is audio data to read
|
||||
readBytes += writeData(packet.data() + readBytes, numAudioBytes);
|
||||
}
|
||||
return readBytes;
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ class AvatarAudioRingBuffer : public PositionalAudioRingBuffer {
|
|||
public:
|
||||
AvatarAudioRingBuffer(bool isStereo = false, bool dynamicJitterBuffer = false);
|
||||
|
||||
int parseData(const QByteArray& packet);
|
||||
int parseDataAndHandleDroppedPackets(const QByteArray& packet, int packetsSkipped);
|
||||
private:
|
||||
// disallow copying of AvatarAudioRingBuffer objects
|
||||
AvatarAudioRingBuffer(const AvatarAudioRingBuffer&);
|
||||
|
|
|
@ -55,6 +55,7 @@ void MetavoxelServer::run() {
|
|||
_persister = new MetavoxelPersister(this);
|
||||
QThread* persistenceThread = new QThread(this);
|
||||
_persister->moveToThread(persistenceThread);
|
||||
_persister->connect(persistenceThread, SIGNAL(finished()), SLOT(deleteLater()));
|
||||
persistenceThread->start();
|
||||
|
||||
// queue up the load
|
||||
|
|
27
cmake/macros/HifiLibrarySearchHints.cmake
Normal file
27
cmake/macros/HifiLibrarySearchHints.cmake
Normal file
|
@ -0,0 +1,27 @@
|
|||
#
|
||||
# HifiLibrarySearchHints.cmake
|
||||
#
|
||||
# Created by Stephen Birarda on July 24th, 2014
|
||||
# Copyright 2014 High Fidelity, Inc.
|
||||
#
|
||||
# Distributed under the Apache License, Version 2.0.
|
||||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
macro(HIFI_LIBRARY_SEARCH_HINTS LIBRARY_FOLDER)
|
||||
string(TOUPPER ${LIBRARY_FOLDER} LIBRARY_PREFIX)
|
||||
set(${LIBRARY_PREFIX}_SEARCH_DIRS "")
|
||||
|
||||
if (${LIBRARY_PREFIX}_ROOT_DIR)
|
||||
set(${LIBRARY_PREFIX}_SEARCH_DIRS "${${LIBRARY_PREFIX}_ROOT_DIR}")
|
||||
endif ()
|
||||
|
||||
if (DEFINED ENV{${LIBRARY_PREFIX}_ROOT_DIR})
|
||||
set(${LIBRARY_PREFIX}_SEARCH_DIRS "${${LIBRARY_PREFIX}_SEARCH_DIRS}" "$ENV{${LIBRARY_PREFIX}_ROOT_DIR}")
|
||||
endif ()
|
||||
|
||||
if (DEFINED ENV{HIFI_LIB_DIR})
|
||||
set(${LIBRARY_PREFIX}_SEARCH_DIRS "${${LIBRARY_PREFIX}_SEARCH_DIRS}" "$ENV{HIFI_LIB_DIR}/${LIBRARY_FOLDER}")
|
||||
endif ()
|
||||
|
||||
endmacro(HIFI_LIBRARY_SEARCH_HINTS _library_folder)
|
|
@ -12,31 +12,13 @@
|
|||
# Copyright (c) 2014 High Fidelity
|
||||
#
|
||||
|
||||
if (FACEPLUS_LIBRARIES AND FACEPLUS_INCLUDE_DIRS)
|
||||
# in cache already
|
||||
set(FACEPLUS_FOUND TRUE)
|
||||
else (FACEPLUS_LIBRARIES AND FACEPLUS_INCLUDE_DIRS)
|
||||
find_path(FACEPLUS_INCLUDE_DIRS faceplus.h ${FACEPLUS_ROOT_DIR}/include)
|
||||
find_path(FACEPLUS_INCLUDE_DIRS faceplus.h ${FACEPLUS_ROOT_DIR}/include)
|
||||
|
||||
if (WIN32)
|
||||
find_library(FACEPLUS_LIBRARIES faceplus.lib ${FACEPLUS_ROOT_DIR}/win32/)
|
||||
endif (WIN32)
|
||||
|
||||
if (FACEPLUS_INCLUDE_DIRS AND FACEPLUS_LIBRARIES)
|
||||
set(FACEPLUS_FOUND TRUE)
|
||||
endif (FACEPLUS_INCLUDE_DIRS AND FACEPLUS_LIBRARIES)
|
||||
if (WIN32)
|
||||
find_library(FACEPLUS_LIBRARIES faceplus.lib ${FACEPLUS_ROOT_DIR}/win32/)
|
||||
endif (WIN32)
|
||||
|
||||
if (FACEPLUS_FOUND)
|
||||
if (NOT FACEPLUS_FIND_QUIETLY)
|
||||
message(STATUS "Found Faceplus... ${FACEPLUS_LIBRARIES}")
|
||||
endif (NOT FACEPLUS_FIND_QUIETLY)
|
||||
else ()
|
||||
if (FACEPLUS_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "Could not find Faceplus")
|
||||
endif (FACEPLUS_FIND_REQUIRED)
|
||||
endif ()
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(FACEPLUS DEFAULT_MSG FACEPLUS_INCLUDE_DIRS FACEPLUS_LIBRARIES)
|
||||
|
||||
# show the FACEPLUS_INCLUDE_DIRS and FACEPLUS_LIBRARIES variables only in the advanced view
|
||||
mark_as_advanced(FACEPLUS_INCLUDE_DIRS FACEPLUS_LIBRARIES)
|
||||
|
||||
endif (FACEPLUS_LIBRARIES AND FACEPLUS_INCLUDE_DIRS)
|
||||
mark_as_advanced(FACEPLUS_INCLUDE_DIRS FACEPLUS_LIBRARIES)
|
|
@ -18,47 +18,28 @@
|
|||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
if (FACESHIFT_LIBRARIES AND FACESHIFT_INCLUDE_DIRS)
|
||||
# in cache already
|
||||
set(FACESHIFT_FOUND TRUE)
|
||||
else ()
|
||||
find_path(FACESHIFT_INCLUDE_DIRS fsbinarystream.h ${FACESHIFT_ROOT_DIR}/include)
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("faceshift")
|
||||
|
||||
if (APPLE)
|
||||
find_library(FACESHIFT_LIBRARIES libfaceshift.a ${FACESHIFT_ROOT_DIR}/lib/MacOS/)
|
||||
elseif (UNIX)
|
||||
find_library(FACESHIFT_LIBRARIES libfaceshift.a ${FACESHIFT_ROOT_DIR}/lib/UNIX/)
|
||||
elseif (WIN32)
|
||||
# For windows, we're going to build the faceshift sources directly into the interface build
|
||||
# and not link to a prebuilt library. This is because the VS2010 linker doesn't like cross-linking
|
||||
# between release and debug libraries. If we change that in the future we can make win32 more
|
||||
# like the other platforms
|
||||
#find_library(FACESHIFT_LIBRARIES faceshift.lib ${FACESHIFT_ROOT_DIR}/lib/WIN32/)
|
||||
endif ()
|
||||
|
||||
if (WIN32)
|
||||
# Windows only cares about the headers
|
||||
if (FACESHIFT_INCLUDE_DIRS)
|
||||
set(FACESHIFT_FOUND TRUE)
|
||||
endif (FACESHIFT_INCLUDE_DIRS)
|
||||
else ()
|
||||
# Mac and Unix requires libraries
|
||||
if (FACESHIFT_INCLUDE_DIRS AND FACESHIFT_LIBRARIES)
|
||||
set(FACESHIFT_FOUND TRUE)
|
||||
endif (FACESHIFT_INCLUDE_DIRS AND FACESHIFT_LIBRARIES)
|
||||
endif ()
|
||||
|
||||
if (FACESHIFT_FOUND)
|
||||
if (NOT FACESHIFT_FIND_QUIETLY)
|
||||
message(STATUS "Found Faceshift... ${FACESHIFT_LIBRARIES}")
|
||||
endif (NOT FACESHIFT_FIND_QUIETLY)
|
||||
else ()
|
||||
if (FACESHIFT_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "Could not find Faceshift")
|
||||
endif (FACESHIFT_FIND_REQUIRED)
|
||||
endif ()
|
||||
|
||||
# show the FACESHIFT_INCLUDE_DIRS and FACESHIFT_LIBRARIES variables only in the advanced view
|
||||
mark_as_advanced(FACESHIFT_INCLUDE_DIRS FACESHIFT_LIBRARIES)
|
||||
find_path(FACESHIFT_INCLUDE_DIRS fsbinarystream.h PATH_SUFFIXES include HINTS ${FACESHIFT_SEARCH_DIRS})
|
||||
|
||||
if (APPLE)
|
||||
set(ARCH_DIR "MacOS")
|
||||
elseif (UNIX)
|
||||
set(ARCH_DIR "UNIX")
|
||||
elseif (WIN32)
|
||||
set(ARCH_DIR "Win32")
|
||||
endif ()
|
||||
|
||||
find_library(FACESHIFT_LIBRARY_RELEASE NAME faceshift PATH_SUFFIXES lib/${ARCH_DIR} HINTS ${FACESHIFT_SEARCH_DIRS})
|
||||
find_library(FACESHIFT_LIBRARY_DEBUG NAME faceshiftd PATH_SUFFIXES lib/${ARCH_DIR} HINTS ${FACESHIFT_SEARCH_DIRS})
|
||||
|
||||
include(SelectLibraryConfigurations)
|
||||
select_library_configurations(FACESHIFT)
|
||||
|
||||
set(FACESHIFT_LIBRARIES ${FACESHIFT_LIBRARY})
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(FACESHIFT DEFAULT_MSG FACESHIFT_INCLUDE_DIRS FACESHIFT_LIBRARIES)
|
||||
|
||||
mark_as_advanced(FACESHIFT_INCLUDE_DIRS FACESHIFT_LIBRARIES FACESHIFT_SEARCH_DIRS)
|
|
@ -6,7 +6,7 @@
|
|||
#
|
||||
# GLEW_FOUND
|
||||
# GLEW_INCLUDE_DIRS
|
||||
# GLEW_LIBRARY
|
||||
# GLEW_LIBRARIES
|
||||
#
|
||||
# Created on 2/6/2014 by Stephen Birarda
|
||||
# Copyright 2014 High Fidelity, Inc.
|
||||
|
@ -18,27 +18,22 @@
|
|||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
if (GLEW_INCLUDE_DIRS AND GLEW_LIBRARY)
|
||||
set(GLEW_FOUND TRUE)
|
||||
else ()
|
||||
if (WIN32)
|
||||
set(WIN_GLEW_SEARCH_DIRS "${GLEW_ROOT_DIR}" "$ENV{GLEW_ROOT_DIR}" "$ENV{HIFI_LIB_DIR}/glew")
|
||||
|
||||
find_path(GLEW_INCLUDE_DIRS GL/glew.h PATH_SUFFIXES include HINTS ${WIN_GLEW_SEARCH_DIRS})
|
||||
|
||||
find_library(GLEW_LIBRARY glew32s PATH_SUFFIXES "lib/Release/Win32" "lib" HINTS ${WIN_GLEW_SEARCH_DIRS})
|
||||
endif ()
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(GLEW DEFAULT_MSG GLEW_INCLUDE_DIRS GLEW_LIBRARY)
|
||||
if (WIN32)
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("glew")
|
||||
|
||||
find_path(GLEW_INCLUDE_DIRS GL/glew.h PATH_SUFFIXES include HINTS ${GLEW_SEARCH_DIRS})
|
||||
|
||||
if (GLEW_FOUND)
|
||||
if (NOT GLEW_FIND_QUIETLY)
|
||||
message(STATUS "Found GLEW: ${GLEW_LIBRARY}")
|
||||
endif ()
|
||||
else ()
|
||||
if (GLEW_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "Could not find GLEW")
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
find_library(GLEW_LIBRARY_RELEASE glew32s PATH_SUFFIXES "lib/Release/Win32" "lib" HINTS ${GLEW_SEARCH_DIRS})
|
||||
find_library(GLEW_LIBRARY_DEBUG glew32s PATH_SUFFIXES "lib/Debug/Win32" "lib" HINTS ${GLEW_SEARCH_DIRS})
|
||||
|
||||
include(SelectLibraryConfigurations)
|
||||
select_library_configurations(GLEW)
|
||||
endif ()
|
||||
|
||||
set(GLEW_LIBRARIES "${GLEW_LIBRARY}")
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(GLEW DEFAULT_MSG GLEW_INCLUDE_DIRS GLEW_LIBRARIES)
|
||||
|
||||
mark_as_advanced(GLEW_INCLUDE_DIRS GLEW_LIBRARIES GLEW_SEARCH_DIRS)
|
|
@ -1,67 +1,27 @@
|
|||
# FindGLM - attempts to locate the glm matrix/vector library.
|
||||
#
|
||||
# This module defines the following variables (on success):
|
||||
# GLM_INCLUDE_DIRS - where to find glm/glm.hpp
|
||||
# GLM_FOUND - if the library was successfully located
|
||||
#
|
||||
# It is trying a few standard installation locations, but can be customized
|
||||
# with the following variables:
|
||||
# GLM_ROOT_DIR - root directory of a glm installation
|
||||
# Headers are expected to be found in either:
|
||||
# <GLM_ROOT_DIR>/glm/glm.hpp OR
|
||||
# <GLM_ROOT_DIR>/include/glm/glm.hpp
|
||||
# This variable can either be a cmake or environment
|
||||
# variable. Note however that changing the value
|
||||
# of the environment varible will NOT result in
|
||||
# re-running the header search and therefore NOT
|
||||
# adjust the variables set by this module.
|
||||
|
||||
# This is a modified version of the FindGLM module included with CMake.
|
||||
# Copyright 2014 High Fidelity
|
||||
# FindGLM.cmake
|
||||
#
|
||||
#=============================================================================
|
||||
# Copyright 2012 Carsten Neumann
|
||||
# Try to find GLM include path.
|
||||
# Once done this will define
|
||||
#
|
||||
# Distributed under the OSI-approved BSD License (the "License");
|
||||
# see accompanying file Copyright.txt for details.
|
||||
#
|
||||
# This software is distributed WITHOUT ANY WARRANTY; without even the
|
||||
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the License for more information.
|
||||
#=============================================================================
|
||||
# (To distribute this file outside of CMake, substitute the full
|
||||
# License text for the above reference.)
|
||||
# GLM_INCLUDE_DIRS
|
||||
#
|
||||
# Created on 7/17/2014 by Stephen Birarda
|
||||
# Copyright 2014 High Fidelity, Inc.
|
||||
#
|
||||
# Distributed under the Apache License, Version 2.0.
|
||||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
if (GLM_INCLUDE_DIR)
|
||||
set(GLM_INCLUDE_DIRS "${GLM_INCLUDE_DIR}")
|
||||
set(GLM_FOUND TRUE)
|
||||
else ()
|
||||
# default search dirs
|
||||
set(_glm_HEADER_SEARCH_DIRS "$ENV{HIFI_LIB_DIR}/glm" "/usr/include" "/usr/local/include")
|
||||
# setup hints for GLM search
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("glm")
|
||||
|
||||
# check environment variable
|
||||
set(_glm_ENV_ROOT_DIR "$ENV{GLM_ROOT_DIR}")
|
||||
# locate header
|
||||
find_path(GLM_INCLUDE_DIR "glm/glm.hpp" HINTS ${GLM_SEARCH_DIRS})
|
||||
set(GLM_INCLUDE_DIRS "${GLM_INCLUDE_DIR}")
|
||||
|
||||
if (NOT GLM_ROOT_DIR AND _glm_ENV_ROOT_DIR)
|
||||
set(GLM_ROOT_DIR "${_glm_ENV_ROOT_DIR}")
|
||||
endif ()
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(GLM DEFAULT_MSG GLM_INCLUDE_DIRS)
|
||||
|
||||
# put user specified location at beginning of search
|
||||
if (GLM_ROOT_DIR)
|
||||
set(_glm_HEADER_SEARCH_DIRS "${GLM_ROOT_DIR}" "${GLM_ROOT_DIR}/include" "$ENV{HIFI_LIB_DIR}/glm" ${_glm_HEADER_SEARCH_DIRS})
|
||||
ENDIF()
|
||||
|
||||
# locate header
|
||||
find_path(GLM_INCLUDE_DIR "glm/glm.hpp" PATHS ${_glm_HEADER_SEARCH_DIRS})
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(GLM DEFAULT_MSG GLM_INCLUDE_DIR)
|
||||
|
||||
if (GLM_FOUND)
|
||||
set(GLM_INCLUDE_DIRS "${GLM_INCLUDE_DIR}")
|
||||
|
||||
if (NOT GLM_FIND_QUIETLY)
|
||||
MESSAGE(STATUS "GLM_INCLUDE_DIR = ${GLM_INCLUDE_DIR}")
|
||||
endif (NOT GLM_FIND_QUIETLY)
|
||||
endif ()
|
||||
endif ()
|
||||
mark_as_advanced(GLM_INCLUDE_DIRS GLM_SEARCH_DIRS)
|
|
@ -5,9 +5,8 @@
|
|||
# Once done this will define
|
||||
#
|
||||
# GLUT_FOUND
|
||||
# GLUT_INCLUDE_DIR
|
||||
# GLUT_INCLUDE_DIRS
|
||||
# GLUT_LIBRARIES
|
||||
# GLUT_DLL_PATH - Optionally defined for Win32, if not in path
|
||||
#
|
||||
# Created on 2/6/2014 by Stephen Birarda
|
||||
# Copyright 2014 High Fidelity, Inc.
|
||||
|
@ -19,73 +18,30 @@
|
|||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
if (GLUT_INCLUDE_DIR AND GLUT_LIBRARIES)
|
||||
set(GLUT_FOUND TRUE)
|
||||
else ()
|
||||
if (WIN32)
|
||||
set(WIN_GLUT_SEARCH_DIRS "${GLUT_ROOT_DIR}" "$ENV{GLUT_ROOT_DIR}" "$ENV{HIFI_LIB_DIR}/freeglut" "${OPENGL_INCLUDE_DIR}")
|
||||
find_path(GLUT_INCLUDE_DIR GL/glut.h PATH_SUFFIXES include HINTS ${WIN_GLUT_SEARCH_DIRS})
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("freeglut")
|
||||
|
||||
find_library(GLUT_glut_LIBRARY freeglut PATH_SUFFIXES lib HINTS ${WIN_GLUT_SEARCH_DIRS})
|
||||
else ()
|
||||
find_path(GLUT_INCLUDE_DIR GL/glut.h
|
||||
"${GLUT_LOCATION}/include"
|
||||
"$ENV{GLUT_LOCATION}/include"
|
||||
/usr/include
|
||||
/usr/include/GL
|
||||
/usr/local/include
|
||||
/usr/openwin/share/include
|
||||
/usr/openwin/include
|
||||
/usr/X11R6/include
|
||||
/usr/include/X11
|
||||
/opt/graphics/OpenGL/include
|
||||
/opt/graphics/OpenGL/contrib/libglut
|
||||
)
|
||||
find_library(GLUT_glut_LIBRARY glut
|
||||
"${GLUT_LOCATION}/lib"
|
||||
"$ENV{GLUT_LOCATION}/lib"
|
||||
/usr/lib
|
||||
/usr/local/lib
|
||||
/usr/openwin/lib
|
||||
/usr/X11R6/lib
|
||||
)
|
||||
find_library(GLUT_Xi_LIBRARY Xi
|
||||
"${GLUT_LOCATION}/lib"
|
||||
"$ENV{GLUT_LOCATION}/lib"
|
||||
/usr/lib
|
||||
/usr/local/lib
|
||||
/usr/openwin/lib
|
||||
/usr/X11R6/lib
|
||||
)
|
||||
find_library(GLUT_Xmu_LIBRARY Xmu
|
||||
"${GLUT_LOCATION}/lib"
|
||||
"$ENV{GLUT_LOCATION}/lib"
|
||||
/usr/lib
|
||||
/usr/local/lib
|
||||
/usr/openwin/lib
|
||||
/usr/X11R6/lib
|
||||
)
|
||||
endif ()
|
||||
|
||||
if (GLUT_INCLUDE_DIR AND GLUT_glut_LIBRARY)
|
||||
# Is -lXi and -lXmu required on all platforms that have it?
|
||||
# If not, we need some way to figure out what platform we are on.
|
||||
set(GLUT_LIBRARIES ${GLUT_glut_LIBRARY} ${GLUT_Xmu_LIBRARY} ${GLUT_Xi_LIBRARY})
|
||||
endif ()
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(GLUT DEFAULT_MSG GLUT_INCLUDE_DIR GLUT_LIBRARIES)
|
||||
if (WIN32)
|
||||
set(GLUT_HINT_DIRS "${FREEGLUT_SEARCH_DIRS} ${OPENGL_INCLUDE_DIR}")
|
||||
|
||||
if (GLUT_FOUND)
|
||||
if (NOT GLUT_FIND_QUIETLY)
|
||||
message(STATUS "Found GLUT: ${GLUT_LIBRARIES}")
|
||||
endif ()
|
||||
else ()
|
||||
if (GLUT_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "Could not find GLUT")
|
||||
endif ()
|
||||
endif ()
|
||||
find_path(GLUT_INCLUDE_DIRS GL/glut.h PATH_SUFFIXES include HINTS ${FREEGLUT_SEARCH_DIRS})
|
||||
find_library(GLUT_LIBRARY freeglut PATH_SUFFIXES lib HINTS ${FREEGLUT_SEARCH_DIRS})
|
||||
else ()
|
||||
find_path(GLUT_INCLUDE_DIRS GL/glut.h PATH_SUFFIXES include HINTS ${FREEGLUT_SEARCH_DIRS})
|
||||
find_library(GLUT_LIBRARY glut PATH_SUFFIXES lib HINTS ${FREEGLUT_SEARCH_DIRS})
|
||||
endif ()
|
||||
|
||||
mark_as_advanced(GLUT_glut_LIBRARY GLUT_Xmu_LIBRARY GLUT_Xi_LIBRARY)
|
||||
include(FindPackageHandleStandardArgs)
|
||||
|
||||
endif ()
|
||||
set(GLUT_LIBRARIES "${GLUT_LIBRARY}" "${XMU_LIBRARY}" "${XI_LIBRARY}")
|
||||
|
||||
if (UNIX)
|
||||
find_library(XI_LIBRARY Xi PATH_SUFFIXES lib HINTS ${FREEGLUT_SEARCH_DIRS})
|
||||
find_library(XMU_LIBRARY Xmu PATH_SUFFIXES lib HINTS ${FREEGLUT_SEARCH_DIRS})
|
||||
|
||||
find_package_handle_standard_args(GLUT DEFAULT_MSG GLUT_INCLUDE_DIRS GLUT_LIBRARIES XI_LIBRARY XMU_LIBRARY)
|
||||
else ()
|
||||
find_package_handle_standard_args(GLUT DEFAULT_MSG GLUT_INCLUDE_DIRS GLUT_LIBRARIES)
|
||||
endif ()
|
||||
|
||||
mark_as_advanced(GLUT_INCLUDE_DIRS GLUT_LIBRARIES GLUT_LIBRARY XI_LIBRARY XMU_LIBRARY FREEGLUT_SEARCH_DIRS)
|
35
cmake/modules/FindLeapMotion.cmake
Normal file
35
cmake/modules/FindLeapMotion.cmake
Normal file
|
@ -0,0 +1,35 @@
|
|||
# Try to find the LeapMotion library
|
||||
#
|
||||
# You must provide a LEAPMOTION_ROOT_DIR which contains lib and include directories
|
||||
#
|
||||
# Once done this will define
|
||||
#
|
||||
# LEAPMOTION_FOUND - system found LEAPMOTION
|
||||
# LEAPMOTION_INCLUDE_DIRS - the LEAPMOTION include directory
|
||||
# LEAPMOTION_LIBRARIES - Link this to use LEAPMOTION
|
||||
#
|
||||
# Created on 6/2/2014 by Sam Cake
|
||||
# Copyright (c) 2014 High Fidelity
|
||||
#
|
||||
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("leapmotion")
|
||||
|
||||
find_path(LEAPMOTION_INCLUDE_DIRS Leap.h PATH_SUFFIXES include HINTS ${LEAPMOTION_SEARCH_DIRS})
|
||||
|
||||
if (WIN32)
|
||||
find_library(LEAPMOTION_LIBRARY_DEBUG Leapd PATH_SUFFIXES lib/x86 HINTS ${LEAPMOTION_SEARCH_DIRS})
|
||||
find_library(LEAPMOTION_LIBRARY_RELEASE Leap PATH_SUFFIXES lib/x86 HINTS ${LEAPMOTION_SEARCH_DIRS})
|
||||
elseif (APPLE)
|
||||
find_library(LEAPMOTION_LIBRARY_RELEASE Leap PATH_SUFFIXES lib HINTS ${LEAPMOTION_SEARCH_DIRS})
|
||||
endif ()
|
||||
|
||||
include(SelectLibraryConfigurations)
|
||||
select_library_configurations(LEAPMOTION)
|
||||
|
||||
set(LEAPMOTION_LIBRARIES "${LEAPMOTION_LIBRARY}")
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(LEAPMOTION DEFAULT_MSG LEAPMOTION_INCLUDE_DIRS LEAPMOTION_LIBRARIES)
|
||||
|
||||
mark_as_advanced(LEAPMOTION_INCLUDE_DIRS LEAPMOTION_LIBRARIES LEAPMOTION_SEARCH_DIRS)
|
|
@ -18,52 +18,50 @@
|
|||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
if (LIBOVR_LIBRARIES AND LIBOVR_INCLUDE_DIRS)
|
||||
# in cache already
|
||||
set(LIBOVR_FOUND TRUE)
|
||||
else (LIBOVR_LIBRARIES AND LIBOVR_INCLUDE_DIRS)
|
||||
set(LIBOVR_SEARCH_DIRS "${LIBOVR_ROOT_DIR}" "$ENV{HIFI_LIB_DIR}/oculus")
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("oculus")
|
||||
|
||||
find_path(LIBOVR_INCLUDE_DIRS OVR.h PATH_SUFFIXES Include HINTS ${OCULUS_SEARCH_DIRS})
|
||||
find_path(LIBOVR_UTIL_INCLUDE_DIR Util_Render_Stereo.h PATH_SUFFIXES Src/Util HINTS ${OCULUS_SEARCH_DIRS})
|
||||
|
||||
# add the util include dir to the general include dirs
|
||||
set(LIBOVR_INCLUDE_DIRS "${LIBOVR_INCLUDE_DIRS}" "${LIBOVR_UTIL_INCLUDE_DIR}")
|
||||
|
||||
include(SelectLibraryConfigurations)
|
||||
|
||||
if (APPLE)
|
||||
find_library(LIBOVR_LIBRARY_DEBUG "Lib/MacOS/Debug/libovr.a" HINTS ${OCULUS_SEARCH_DIRS})
|
||||
find_library(LIBOVR_LIBRARY_RELEASE "Lib/MacOS/Release/libovr.a" HINTS ${OCULUS_SEARCH_DIRS})
|
||||
elseif (UNIX)
|
||||
find_library(UDEV_LIBRARY_RELEASE udev /usr/lib/x86_64-linux-gnu/)
|
||||
find_library(XINERAMA_LIBRARY_RELEASE Xinerama /usr/lib/x86_64-linux-gnu/)
|
||||
|
||||
find_path(LIBOVR_INCLUDE_DIRS OVR.h PATH_SUFFIXES Include HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
if (CMAKE_CL_64)
|
||||
set(LINUX_ARCH_DIR "i386")
|
||||
else()
|
||||
set(LINUX_ARCH_DIR "x86_64")
|
||||
endif()
|
||||
|
||||
if (APPLE)
|
||||
find_library(LIBOVR_LIBRARIES "Lib/MacOS/Release/libovr.a" HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
elseif (UNIX)
|
||||
find_library(UDEV_LIBRARY libudev.a /usr/lib/x86_64-linux-gnu/)
|
||||
find_library(XINERAMA_LIBRARY libXinerama.a /usr/lib/x86_64-linux-gnu/)
|
||||
|
||||
if (CMAKE_CL_64)
|
||||
set(LINUX_ARCH_DIR "i386")
|
||||
else()
|
||||
set(LINUX_ARCH_DIR "x86_64")
|
||||
endif()
|
||||
|
||||
find_library(OVR_LIBRARY "Lib/Linux/${CMAKE_BUILD_TYPE}/${LINUX_ARCH_DIR}/libovr.a" HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
if (UDEV_LIBRARY AND XINERAMA_LIBRARY AND OVR_LIBRARY)
|
||||
set(LIBOVR_LIBRARIES "${OVR_LIBRARY};${UDEV_LIBRARY};${XINERAMA_LIBRARY}" CACHE INTERNAL "Oculus libraries")
|
||||
endif (UDEV_LIBRARY AND XINERAMA_LIBRARY AND OVR_LIBRARY)
|
||||
elseif (WIN32)
|
||||
find_library(LIBOVR_RELEASE_LIBRARIES "Lib/Win32/libovr.lib" HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
find_library(LIBOVR_DEBUG_LIBRARIES "Lib/Win32/libovrd.lib" HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
|
||||
set(LIBOVR_LIBRARIES "${LIBOVR_RELEASE_LIBRARIES} ${LIBOVR_DEBUG_LIBRARIES}")
|
||||
endif ()
|
||||
find_library(LIBOVR_LIBRARY_DEBUG "Lib/Linux/Debug/${LINUX_ARCH_DIR}/libovr.a" HINTS ${OCULUS_SEARCH_DIRS})
|
||||
find_library(LIBOVR_LIBRARY_RELEASE "Lib/Linux/Release/${LINUX_ARCH_DIR}/libovr.a" HINTS ${OCULUS_SEARCH_DIRS})
|
||||
|
||||
select_library_configurations(UDEV)
|
||||
select_library_configurations(XINERAMA)
|
||||
|
||||
elseif (WIN32)
|
||||
find_library(LIBOVR_LIBRARY_DEBUG "Lib/Win32/libovrd.lib" HINTS ${OCULUS_SEARCH_DIRS})
|
||||
find_library(LIBOVR_LIBRARY_RELEASE "Lib/Win32/libovr.lib" HINTS ${OCULUS_SEARCH_DIRS})
|
||||
endif ()
|
||||
|
||||
if (LIBOVR_INCLUDE_DIRS AND LIBOVR_LIBRARIES)
|
||||
set(LIBOVR_FOUND TRUE)
|
||||
endif (LIBOVR_INCLUDE_DIRS AND LIBOVR_LIBRARIES)
|
||||
|
||||
if (LIBOVR_FOUND)
|
||||
if (NOT LibOVR_FIND_QUIETLY)
|
||||
message(STATUS "Found LibOVR: ${LIBOVR_LIBRARIES}")
|
||||
endif (NOT LibOVR_FIND_QUIETLY)
|
||||
else (LIBOVR_FOUND)
|
||||
if (LibOVR_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "Could not find LibOVR")
|
||||
endif (LibOVR_FIND_REQUIRED)
|
||||
endif (LIBOVR_FOUND)
|
||||
select_library_configurations(LIBOVR)
|
||||
|
||||
# show the LIBOVR_INCLUDE_DIRS and LIBOVR_LIBRARIES variables only in the advanced view
|
||||
mark_as_advanced(LIBOVR_INCLUDE_DIRS LIBOVR_LIBRARIES)
|
||||
set(LIBOVR_LIBRARIES "${LIBOVR_LIBRARIES}" "${UDEV_LIBRARY}" "${XINERAMA_LIBRARY}")
|
||||
|
||||
endif (LIBOVR_LIBRARIES AND LIBOVR_INCLUDE_DIRS)
|
||||
include(FindPackageHandleStandardArgs)
|
||||
if (UNIX)
|
||||
find_package_handle_standard_args(LIBOVR DEFAULT_MSG LIBOVR_INCLUDE_DIRS LIBOVR_UTIL_INCLUDE_DIR LIBOVR_LIBRARIES)
|
||||
elseif ()
|
||||
find_package_handle_standard_args(LIBOVR DEFAULT_MSG LIBOVR_INCLUDE_DIRS LIBOVR_UTIL_INCLUDE_DIR LIBOVR_LIBRARIES UDEV_LIBRARY XINERAMA_LIBRARY)
|
||||
endif ()
|
||||
|
||||
mark_as_advanced(LIBOVR_INCLUDE_DIRS LIBOVR_LIBRARIES OCULUS_SEARCH_DIRS)
|
||||
|
|
234
cmake/modules/FindOpenSSL.cmake
Normal file
234
cmake/modules/FindOpenSSL.cmake
Normal file
|
@ -0,0 +1,234 @@
|
|||
# - Try to find the OpenSSL encryption library
|
||||
# Once done this will define
|
||||
#
|
||||
# OPENSSL_ROOT_DIR - Set this variable to the root installation of OpenSSL
|
||||
#
|
||||
# Read-Only variables:
|
||||
# OPENSSL_FOUND - system has the OpenSSL library
|
||||
# OPENSSL_INCLUDE_DIR - the OpenSSL include directory
|
||||
# OPENSSL_LIBRARIES - The libraries needed to use OpenSSL
|
||||
# OPENSSL_VERSION - This is set to $major.$minor.$revision$path (eg. 0.9.8s)
|
||||
#
|
||||
# Modified on 7/16/2014 by Stephen Birarda
|
||||
# This is an adapted version of the FindOpenSSL.cmake module distributed with Cmake 2.8.12.2
|
||||
# The original license for that file is displayed below.
|
||||
#
|
||||
#=============================================================================
|
||||
# Copyright 2006-2009 Kitware, Inc.
|
||||
# Copyright 2006 Alexander Neundorf <neundorf@kde.org>
|
||||
# Copyright 2009-2011 Mathieu Malaterre <mathieu.malaterre@gmail.com>
|
||||
#
|
||||
# Distributed under the OSI-approved BSD License (the "License");
|
||||
# see accompanying file Copyright.txt for details.
|
||||
#
|
||||
# This software is distributed WITHOUT ANY WARRANTY; without even the
|
||||
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the License for more information.
|
||||
#=============================================================================
|
||||
# (To distribute this file outside of CMake, substitute the full
|
||||
# License text for the above reference.)
|
||||
|
||||
if (UNIX)
|
||||
find_package(PkgConfig QUIET)
|
||||
pkg_check_modules(_OPENSSL QUIET openssl)
|
||||
endif ()
|
||||
|
||||
if (WIN32)
|
||||
# http://www.slproweb.com/products/Win32OpenSSL.html
|
||||
set(_OPENSSL_ROOT_HINTS ${OPENSSL_ROOT_DIR} $ENV{OPENSSL_ROOT_DIR} $ENV{HIFI_LIB_DIR}/openssl
|
||||
"[HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\OpenSSL (32-bit)_is1;Inno Setup: App Path]"
|
||||
"[HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\OpenSSL (64-bit)_is1;Inno Setup: App Path]"
|
||||
)
|
||||
file(TO_CMAKE_PATH "$ENV{PROGRAMFILES}" _programfiles)
|
||||
set(_OPENSSL_ROOT_PATHS "${_programfiles}/OpenSSL" "${_programfiles}/OpenSSL-Win32" "${_programfiles}/OpenSSL-Win64"
|
||||
"C:/OpenSSL/" "C:/OpenSSL-Win32/" "C:/OpenSSL-Win64/"
|
||||
)
|
||||
unset(_programfiles)
|
||||
set(_OPENSSL_ROOT_HINTS_AND_PATHS HINTS ${_OPENSSL_ROOT_HINTS} PATHS ${_OPENSSL_ROOT_PATHS})
|
||||
else ()
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("openssl")
|
||||
|
||||
set(_OPENSSL_ROOT_HINTS_AND_PATHS ${OPENSSL_SEARCH_DIRS})
|
||||
endif ()
|
||||
|
||||
find_path(OPENSSL_INCLUDE_DIR NAMES openssl/ssl.h HINTS ${_OPENSSL_ROOT_HINTS_AND_PATHS} ${_OPENSSL_INCLUDEDIR} PATH_SUFFIXES include)
|
||||
|
||||
if (WIN32 AND NOT CYGWIN)
|
||||
if (MSVC)
|
||||
# /MD and /MDd are the standard values - if someone wants to use
|
||||
# others, the libnames have to change here too
|
||||
# use also ssl and ssleay32 in debug as fallback for openssl < 0.9.8b
|
||||
# TODO: handle /MT and static lib
|
||||
# In Visual C++ naming convention each of these four kinds of Windows libraries has it's standard suffix:
|
||||
# * MD for dynamic-release
|
||||
# * MDd for dynamic-debug
|
||||
# * MT for static-release
|
||||
# * MTd for static-debug
|
||||
|
||||
# Implementation details:
|
||||
# We are using the libraries located in the VC subdir instead of the parent directory eventhough :
|
||||
# libeay32MD.lib is identical to ../libeay32.lib, and
|
||||
# ssleay32MD.lib is identical to ../ssleay32.lib
|
||||
find_library(LIB_EAY_DEBUG NAMES libeay32MDd libeay32d
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS} PATH_SUFFIXES "lib" "VC" "lib/VC"
|
||||
)
|
||||
|
||||
find_library(LIB_EAY_RELEASE NAMES libeay32MD libeay32
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS} PATH_SUFFIXES "lib" "VC" "lib/VC"
|
||||
)
|
||||
|
||||
find_library(SSL_EAY_DEBUG NAMES ssleay32MDd ssleay32d
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS} PATH_SUFFIXES "lib" "VC" "lib/VC"
|
||||
)
|
||||
|
||||
find_library(SSL_EAY_RELEASE NAMES ssleay32MD ssleay32 ssl
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS} PATH_SUFFIXES "lib" "VC" "lib/VC"
|
||||
)
|
||||
|
||||
set(LIB_EAY_LIBRARY_DEBUG "${LIB_EAY_DEBUG}")
|
||||
set(LIB_EAY_LIBRARY_RELEASE "${LIB_EAY_RELEASE}")
|
||||
set(SSL_EAY_LIBRARY_DEBUG "${SSL_EAY_DEBUG}")
|
||||
set(SSL_EAY_LIBRARY_RELEASE "${SSL_EAY_RELEASE}")
|
||||
|
||||
include(SelectLibraryConfigurations)
|
||||
select_library_configurations(LIB_EAY)
|
||||
select_library_configurations(SSL_EAY)
|
||||
|
||||
set(OPENSSL_LIBRARIES ${SSL_EAY_LIBRARY} ${LIB_EAY_LIBRARY})
|
||||
elseif (MINGW)
|
||||
# same player, for MinGW
|
||||
set(LIB_EAY_NAMES libeay32)
|
||||
set(SSL_EAY_NAMES ssleay32)
|
||||
|
||||
if (CMAKE_CROSSCOMPILING)
|
||||
list(APPEND LIB_EAY_NAMES crypto)
|
||||
list(APPEND SSL_EAY_NAMES ssl)
|
||||
endif ()
|
||||
|
||||
find_library(LIB_EAY NAMES ${LIB_EAY_NAMES}
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS} PATH_SUFFIXES "lib" "lib/MinGW"
|
||||
)
|
||||
|
||||
find_library(SSL_EAY NAMES ${SSL_EAY_NAMES}
|
||||
${_OPENSSL_ROOT_HINTS_AND_PATHS} PATH_SUFFIXES "lib" "lib/MinGW"
|
||||
)
|
||||
|
||||
mark_as_advanced(SSL_EAY LIB_EAY)
|
||||
set(OPENSSL_LIBRARIES ${SSL_EAY} ${LIB_EAY})
|
||||
unset(LIB_EAY_NAMES)
|
||||
unset(SSL_EAY_NAMES)
|
||||
else ()
|
||||
# Not sure what to pick for -say- intel, let's use the toplevel ones and hope someone report issues:
|
||||
find_library(LIB_EAY NAMES libeay32 HINTS ${_OPENSSL_ROOT_HINTS_AND_PATHS} ${_OPENSSL_LIBDIR} PATH_SUFFIXES lib)
|
||||
|
||||
find_library(SSL_EAY NAMES ssleay32 HINTS ${_OPENSSL_ROOT_HINTS_AND_PATHS} ${_OPENSSL_LIBDIR} PATH_SUFFIXES lib)
|
||||
|
||||
mark_as_advanced(SSL_EAY LIB_EAY)
|
||||
set(OPENSSL_LIBRARIES ${SSL_EAY} ${LIB_EAY})
|
||||
endif()
|
||||
else()
|
||||
|
||||
find_library(OPENSSL_SSL_LIBRARY NAMES ssl ssleay32 ssleay32MD HINTS ${_OPENSSL_ROOT_HINTS_AND_PATHS} ${_OPENSSL_LIBDIR}
|
||||
PATH_SUFFIXES lib
|
||||
)
|
||||
|
||||
find_library(OPENSSL_CRYPTO_LIBRARY NAMES crypto HINTS ${_OPENSSL_ROOT_HINTS_AND_PATHS} ${_OPENSSL_LIBDIR} PATH_SUFFIXES lib)
|
||||
|
||||
mark_as_advanced(OPENSSL_CRYPTO_LIBRARY OPENSSL_SSL_LIBRARY)
|
||||
|
||||
# compat defines
|
||||
set(OPENSSL_SSL_LIBRARIES ${OPENSSL_SSL_LIBRARY})
|
||||
set(OPENSSL_CRYPTO_LIBRARIES ${OPENSSL_CRYPTO_LIBRARY})
|
||||
|
||||
set(OPENSSL_LIBRARIES ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY})
|
||||
|
||||
endif ()
|
||||
|
||||
function(from_hex HEX DEC)
|
||||
string(TOUPPER "${HEX}" HEX)
|
||||
set(_res 0)
|
||||
string(LENGTH "${HEX}" _strlen)
|
||||
|
||||
while (_strlen GREATER 0)
|
||||
math(EXPR _res "${_res} * 16")
|
||||
string(SUBSTRING "${HEX}" 0 1 NIBBLE)
|
||||
string(SUBSTRING "${HEX}" 1 -1 HEX)
|
||||
if (NIBBLE STREQUAL "A")
|
||||
math(EXPR _res "${_res} + 10")
|
||||
elseif (NIBBLE STREQUAL "B")
|
||||
math(EXPR _res "${_res} + 11")
|
||||
elseif (NIBBLE STREQUAL "C")
|
||||
math(EXPR _res "${_res} + 12")
|
||||
elseif (NIBBLE STREQUAL "D")
|
||||
math(EXPR _res "${_res} + 13")
|
||||
elseif (NIBBLE STREQUAL "E")
|
||||
math(EXPR _res "${_res} + 14")
|
||||
elseif (NIBBLE STREQUAL "F")
|
||||
math(EXPR _res "${_res} + 15")
|
||||
else()
|
||||
math(EXPR _res "${_res} + ${NIBBLE}")
|
||||
endif()
|
||||
|
||||
string(LENGTH "${HEX}" _strlen)
|
||||
endwhile()
|
||||
|
||||
set(${DEC} ${_res} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
if (OPENSSL_INCLUDE_DIR)
|
||||
if(OPENSSL_INCLUDE_DIR AND EXISTS "${OPENSSL_INCLUDE_DIR}/openssl/opensslv.h")
|
||||
file(STRINGS "${OPENSSL_INCLUDE_DIR}/openssl/opensslv.h" openssl_version_str
|
||||
REGEX "^#define[\t ]+OPENSSL_VERSION_NUMBER[\t ]+0x([0-9a-fA-F])+.*")
|
||||
|
||||
# The version number is encoded as 0xMNNFFPPS: major minor fix patch status
|
||||
# The status gives if this is a developer or prerelease and is ignored here.
|
||||
# Major, minor, and fix directly translate into the version numbers shown in
|
||||
# the string. The patch field translates to the single character suffix that
|
||||
# indicates the bug fix state, which 00 -> nothing, 01 -> a, 02 -> b and so
|
||||
# on.
|
||||
|
||||
string(REGEX REPLACE "^.*OPENSSL_VERSION_NUMBER[\t ]+0x([0-9a-fA-F])([0-9a-fA-F][0-9a-fA-F])([0-9a-fA-F][0-9a-fA-F])([0-9a-fA-F][0-9a-fA-F])([0-9a-fA-F]).*$"
|
||||
"\\1;\\2;\\3;\\4;\\5" OPENSSL_VERSION_LIST "${openssl_version_str}")
|
||||
list(GET OPENSSL_VERSION_LIST 0 OPENSSL_VERSION_MAJOR)
|
||||
list(GET OPENSSL_VERSION_LIST 1 OPENSSL_VERSION_MINOR)
|
||||
from_hex("${OPENSSL_VERSION_MINOR}" OPENSSL_VERSION_MINOR)
|
||||
list(GET OPENSSL_VERSION_LIST 2 OPENSSL_VERSION_FIX)
|
||||
from_hex("${OPENSSL_VERSION_FIX}" OPENSSL_VERSION_FIX)
|
||||
list(GET OPENSSL_VERSION_LIST 3 OPENSSL_VERSION_PATCH)
|
||||
|
||||
if (NOT OPENSSL_VERSION_PATCH STREQUAL "00")
|
||||
from_hex("${OPENSSL_VERSION_PATCH}" _tmp)
|
||||
# 96 is the ASCII code of 'a' minus 1
|
||||
math(EXPR OPENSSL_VERSION_PATCH_ASCII "${_tmp} + 96")
|
||||
unset(_tmp)
|
||||
# Once anyone knows how OpenSSL would call the patch versions beyond 'z'
|
||||
# this should be updated to handle that, too. This has not happened yet
|
||||
# so it is simply ignored here for now.
|
||||
string(ASCII "${OPENSSL_VERSION_PATCH_ASCII}" OPENSSL_VERSION_PATCH_STRING)
|
||||
endif ()
|
||||
|
||||
set(OPENSSL_VERSION "${OPENSSL_VERSION_MAJOR}.${OPENSSL_VERSION_MINOR}.${OPENSSL_VERSION_FIX}${OPENSSL_VERSION_PATCH_STRING}")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
|
||||
if (OPENSSL_VERSION)
|
||||
find_package_handle_standard_args(OpenSSL
|
||||
REQUIRED_VARS
|
||||
OPENSSL_LIBRARIES
|
||||
OPENSSL_INCLUDE_DIR
|
||||
VERSION_VAR
|
||||
OPENSSL_VERSION
|
||||
FAIL_MESSAGE
|
||||
"Could NOT find OpenSSL, try to set the path to OpenSSL root folder in the system variable OPENSSL_ROOT_DIR"
|
||||
)
|
||||
else ()
|
||||
find_package_handle_standard_args(OpenSSL "Could NOT find OpenSSL, try to set the path to OpenSSL root folder in the system variable OPENSSL_ROOT_DIR"
|
||||
OPENSSL_LIBRARIES
|
||||
OPENSSL_INCLUDE_DIR
|
||||
)
|
||||
endif ()
|
||||
|
||||
mark_as_advanced(OPENSSL_INCLUDE_DIR OPENSSL_LIBRARIES OPENSSL_SEARCH_DIRS)
|
|
@ -1,4 +1,4 @@
|
|||
# Try to find the PrioVT library
|
||||
# Try to find the PrioVR library
|
||||
#
|
||||
# You must provide a PRIOVR_ROOT_DIR which contains lib and include directories
|
||||
#
|
||||
|
@ -12,31 +12,13 @@
|
|||
# Copyright (c) 2014 High Fidelity
|
||||
#
|
||||
|
||||
if (PRIOVR_LIBRARIES AND PRIOVR_INCLUDE_DIRS)
|
||||
# in cache already
|
||||
set(PRIOVR_FOUND TRUE)
|
||||
else (PRIOVR_LIBRARIES AND PRIOVR_INCLUDE_DIRS)
|
||||
find_path(PRIOVR_INCLUDE_DIRS yei_skeletal_api.h ${PRIOVR_ROOT_DIR}/include)
|
||||
find_path(PRIOVR_INCLUDE_DIRS yei_skeletal_api.h ${PRIOVR_ROOT_DIR}/include)
|
||||
|
||||
if (WIN32)
|
||||
find_library(PRIOVR_LIBRARIES Skeletal_API.lib ${PRIOVR_ROOT_DIR}/lib)
|
||||
endif (WIN32)
|
||||
|
||||
if (PRIOVR_INCLUDE_DIRS AND PRIOVR_LIBRARIES)
|
||||
set(PRIOVR_FOUND TRUE)
|
||||
endif (PRIOVR_INCLUDE_DIRS AND PRIOVR_LIBRARIES)
|
||||
if (WIN32)
|
||||
find_library(PRIOVR_LIBRARIES Skeletal_API.lib ${PRIOVR_ROOT_DIR}/lib)
|
||||
endif (WIN32)
|
||||
|
||||
if (PRIOVR_FOUND)
|
||||
if (NOT PRIOVR_FIND_QUIETLY)
|
||||
message(STATUS "Found PrioVR... ${PRIOVR_LIBRARIES}")
|
||||
endif (NOT PRIOVR_FIND_QUIETLY)
|
||||
else ()
|
||||
if (PRIOVR_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "Could not find PrioVR")
|
||||
endif (PRIOVR_FIND_REQUIRED)
|
||||
endif ()
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(PRIOVR DEFAULT_MSG PRIOVR_INCLUDE_DIRS PRIOVR_LIBRARIES)
|
||||
|
||||
# show the PRIOVR_INCLUDE_DIRS and PRIOVR_LIBRARIES variables only in the advanced view
|
||||
mark_as_advanced(PRIOVR_INCLUDE_DIRS PRIOVR_LIBRARIES)
|
||||
|
||||
endif (PRIOVR_LIBRARIES AND PRIOVR_INCLUDE_DIRS)
|
||||
mark_as_advanced(PRIOVR_INCLUDE_DIRS PRIOVR_LIBRARIES)
|
|
@ -9,7 +9,7 @@
|
|||
#
|
||||
# QXMPP_FOUND - system found qxmpp
|
||||
# QXMPP_INCLUDE_DIRS - the qxmpp include directory
|
||||
# QXMPP_LIBRARY - Link this to use qxmpp
|
||||
# QXMPP_LIBRARIES - Link this to use qxmpp
|
||||
#
|
||||
# Created on 3/10/2014 by Stephen Birarda
|
||||
# Copyright 2014 High Fidelity, Inc.
|
||||
|
@ -18,17 +18,20 @@
|
|||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
if (QXMPP_LIBRARIES AND QXMPP_INCLUDE_DIRS)
|
||||
# in cache already
|
||||
set(QXMPP_FOUND TRUE)
|
||||
else ()
|
||||
|
||||
set(QXMPP_SEARCH_DIRS "${QXMPP_ROOT_DIR}" "$ENV{HIFI_LIB_DIR}/qxmpp")
|
||||
|
||||
find_path(QXMPP_INCLUDE_DIR QXmppClient.h PATH_SUFFIXES include/qxmpp HINTS ${QXMPP_SEARCH_DIRS})
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("qxmpp")
|
||||
|
||||
find_library(QXMPP_LIBRARY NAMES qxmpp qxmpp0 qxmpp_d PATH_SUFFIXES lib HINTS ${QXMPP_SEARCH_DIRS})
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(QXMPP DEFAULT_MSG QXMPP_INCLUDE_DIR QXMPP_LIBRARY)
|
||||
endif ()
|
||||
find_path(QXMPP_INCLUDE_DIRS QXmppClient.h PATH_SUFFIXES include/qxmpp HINTS ${QXMPP_SEARCH_DIRS})
|
||||
|
||||
find_library(QXMPP_LIBRARY_RELEASE NAMES qxmpp PATH_SUFFIXES lib HINTS ${QXMPP_SEARCH_DIRS})
|
||||
find_library(QXMPP_LIBRARY_DEBUG NAMES qxmpp_d PATH_SUFFIXES lib HINTS ${QXMPP_SEARCH_DIRS})
|
||||
|
||||
include(SelectLibraryConfigurations)
|
||||
select_library_configurations(QXMPP)
|
||||
|
||||
set(QXMPP_LIBRARIES "${QXMPP_LIBRARY}")
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(QXMPP DEFAULT_MSG QXMPP_INCLUDE_DIRS QXMPP_LIBRARIES)
|
||||
|
||||
mark_as_advanced(QXMPP_INCLUDE_DIRS QXMPP_LIBRARIES QXMPP_SEARCH_DIRS)
|
|
@ -9,7 +9,7 @@
|
|||
#
|
||||
# RTMIDI_FOUND - system found RtMidi
|
||||
# RTMIDI_INCLUDE_DIRS - the RtMidi include directory
|
||||
# RTMIDI_CPP - Include this with src to use RtMidi
|
||||
# RTMIDI_LIBRARIES - link to this to use RtMidi
|
||||
#
|
||||
# Created on 6/30/2014 by Stephen Birarda
|
||||
# Copyright 2014 High Fidelity, Inc.
|
||||
|
@ -18,16 +18,13 @@
|
|||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
if (RTMIDI_LIBRARIES AND RTMIDI_INCLUDE_DIRS)
|
||||
# in cache already
|
||||
set(RTMIDI_FOUND TRUE)
|
||||
else ()
|
||||
|
||||
set(RTMIDI_SEARCH_DIRS "${RTMIDI_ROOT_DIR}" "$ENV{HIFI_LIB_DIR}/rtmidi")
|
||||
|
||||
find_path(RTMIDI_INCLUDE_DIR RtMidi.h PATH_SUFFIXES include HINTS ${RTMIDI_SEARCH_DIRS})
|
||||
find_library(RTMIDI_LIBRARY NAMES rtmidi PATH_SUFFIXES lib HINTS ${RTMIDI_SEARCH_DIRS})
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(RTMIDI DEFAULT_MSG RTMIDI_INCLUDE_DIR RTMIDI_LIBRARY)
|
||||
endif ()
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("rtmidi")
|
||||
|
||||
find_path(RTMIDI_INCLUDE_DIRS RtMidi.h PATH_SUFFIXES include HINTS ${RTMIDI_SEARCH_DIRS})
|
||||
find_library(RTMIDI_LIBRARIES NAMES rtmidi PATH_SUFFIXES lib HINTS ${RTMIDI_SEARCH_DIRS})
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(RTMIDI DEFAULT_MSG RTMIDI_INCLUDE_DIRS RTMIDI_LIBRARIES)
|
||||
|
||||
mark_as_advanced(RTMIDI_INCLUDE_DIRS RTMIDI_LIBRARIES RTMIDI_SEARCH_DIRS)
|
|
@ -18,34 +18,28 @@
|
|||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
if (SIXENSE_LIBRARIES AND SIXENSE_INCLUDE_DIRS)
|
||||
# in cache already
|
||||
set(SIXENSE_FOUND TRUE)
|
||||
else ()
|
||||
|
||||
set(SIXENSE_SEARCH_DIRS "${SIXENSE_ROOT_DIR}" "$ENV{HIFI_LIB_DIR}/sixense")
|
||||
|
||||
find_path(SIXENSE_INCLUDE_DIRS sixense.h PATH_SUFFIXES include HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("sixense")
|
||||
|
||||
if (APPLE)
|
||||
find_library(SIXENSE_LIBRARIES lib/osx_x64/release_dll/libsixense_x64.dylib HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
elseif (UNIX)
|
||||
find_library(SIXENSE_LIBRARIES lib/linux_x64/release/libsixense_x64.so HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
elseif (WIN32)
|
||||
find_library(SIXENSE_LIBRARIES lib/win32/release_dll/sixense.lib HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
endif ()
|
||||
find_path(SIXENSE_INCLUDE_DIRS sixense.h PATH_SUFFIXES include HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
|
||||
if (SIXENSE_INCLUDE_DIRS AND SIXENSE_LIBRARIES)
|
||||
set(SIXENSE_FOUND TRUE)
|
||||
endif ()
|
||||
|
||||
if (SIXENSE_FOUND)
|
||||
if (NOT SIXENSE_FIND_QUIETLY)
|
||||
message(STATUS "Found Sixense: ${SIXENSE_LIBRARIES}")
|
||||
endif (NOT SIXENSE_FIND_QUIETLY)
|
||||
else ()
|
||||
if (SIXENSE_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "Could not find Sixense")
|
||||
endif (SIXENSE_FIND_REQUIRED)
|
||||
endif ()
|
||||
if (APPLE)
|
||||
find_library(SIXENSE_LIBRARY_RELEASE lib/osx_x64/release_dll/libsixense_x64.dylib HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
find_library(SIXENSE_LIBRARY_DEBUG lib/osx_x64/debug_dll/libsixensed_x64.dylib HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
elseif (UNIX)
|
||||
find_library(SIXENSE_LIBRARY_RELEASE lib/linux_x64/release/libsixense_x64.so HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
# find_library(SIXENSE_LIBRARY_DEBUG lib/linux_x64/debug/libsixensed_x64.so HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
elseif (WIN32)
|
||||
find_library(SIXENSE_LIBRARY_RELEASE lib/win32/release_dll/sixense.lib HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
find_library(SIXENSE_LIBRARY_DEBUG lib/win32/debug_dll/sixensed.lib HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
endif ()
|
||||
|
||||
include(SelectLibraryConfigurations)
|
||||
select_library_configurations(SIXENSE)
|
||||
|
||||
set(SIXENSE_LIBRARIES "${SIXENSE_LIBRARY}")
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(SIXENSE DEFAULT_MSG SIXENSE_INCLUDE_DIRS SIXENSE_LIBRARIES)
|
||||
|
||||
mark_as_advanced(SIXENSE_LIBRARIES SIXENSE_INCLUDE_DIRS SIXENSE_SEARCH_DIRS)
|
||||
|
|
|
@ -18,65 +18,41 @@
|
|||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
if (VISAGE_LIBRARIES AND VISAGE_INCLUDE_DIRS)
|
||||
# in cache already
|
||||
set(VISAGE_FOUND TRUE)
|
||||
else ()
|
||||
find_path(VISAGE_INCLUDE_DIR VisageTracker2.h ${VISAGE_ROOT_DIR}/include)
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("visage")
|
||||
|
||||
find_path(VISAGE_BASE_INCLUDE_DIR VisageTracker2.h PATH_SUFFIXES include HINTS ${VISAGE_SEARCH_DIRS})
|
||||
|
||||
if (APPLE)
|
||||
find_path(VISAGE_XML_INCLUDE_DIR libxml/xmlreader.h HINTS /usr/include/libxml2 ${VISAGE_SEARCH_DIRS})
|
||||
find_path(VISAGE_OPENCV_INCLUDE_DIR cv.h PATH_SUFFIXES dependencies/OpenCV_MacOSX/include HINTS ${VISAGE_SEARCH_DIRS})
|
||||
find_path(VISAGE_OPENCV2_INCLUDE_DIR opencv.hpp PATH_SUFFIXES dependencies/OpenCV_MacOSX/include/opencv2 HINTS ${VISAGE_SEARCH_DIRS})
|
||||
|
||||
if (APPLE)
|
||||
find_path(VISAGE_XML_INCLUDE_DIR libxml/xmlreader.h /usr/include/libxml2)
|
||||
find_path(VISAGE_OPENCV_INCLUDE_DIR cv.h ${VISAGE_ROOT_DIR}/dependencies/OpenCV_MacOSX/include)
|
||||
find_path(VISAGE_OPENCV2_INCLUDE_DIR opencv.hpp ${VISAGE_ROOT_DIR}/dependencies/OpenCV_MacOSX/include/opencv2)
|
||||
if (VISAGE_INCLUDE_DIR AND VISAGE_XML_INCLUDE_DIR AND VISAGE_OPENCV_INCLUDE_DIR AND VISAGE_OPENCV2_INCLUDE_DIR)
|
||||
set(VISAGE_INCLUDE_DIRS
|
||||
"${VISAGE_INCLUDE_DIR};${VISAGE_XML_INCLUDE_DIR};${VISAGE_OPENCV_INCLUDE_DIR};${VISAGE_OPENCV2_INCLUDE_DIR}"
|
||||
CACHE INTERNAL "Visage include dirs")
|
||||
endif (VISAGE_INCLUDE_DIR AND VISAGE_XML_INCLUDE_DIR AND VISAGE_OPENCV_INCLUDE_DIR AND VISAGE_OPENCV2_INCLUDE_DIR)
|
||||
|
||||
find_library(VISAGE_CORE_LIBRARY libvscore.a ${VISAGE_ROOT_DIR}/lib)
|
||||
find_library(VISAGE_VISION_LIBRARY libvsvision.a ${VISAGE_ROOT_DIR}/lib)
|
||||
find_library(VISAGE_OPENCV_LIBRARY libOpenCV.a ${VISAGE_ROOT_DIR}/dependencies/OpenCV_MacOSX/lib)
|
||||
if (VISAGE_CORE_LIBRARY AND VISAGE_VISION_LIBRARY AND VISAGE_OPENCV_LIBRARY)
|
||||
set(VISAGE_LIBRARIES "${VISAGE_CORE_LIBRARY};${VISAGE_VISION_LIBRARY};${VISAGE_OPENCV_LIBRARY}"
|
||||
CACHE INTERNAL "Visage libraries")
|
||||
endif (VISAGE_CORE_LIBRARY AND VISAGE_VISION_LIBRARY AND VISAGE_OPENCV_LIBRARY)
|
||||
|
||||
elseif (WIN32)
|
||||
find_path(VISAGE_XML_INCLUDE_DIR libxml/xmlreader.h ${VISAGE_ROOT_DIR}/dependencies/libxml2/include)
|
||||
find_path(VISAGE_OPENCV_INCLUDE_DIR opencv/cv.h ${VISAGE_ROOT_DIR}/dependencies/OpenCV/include)
|
||||
find_path(VISAGE_OPENCV2_INCLUDE_DIR cv.h ${VISAGE_ROOT_DIR}/dependencies/OpenCV/include/opencv)
|
||||
if (VISAGE_INCLUDE_DIR AND VISAGE_XML_INCLUDE_DIR AND VISAGE_OPENCV_INCLUDE_DIR AND VISAGE_OPENCV2_INCLUDE_DIR)
|
||||
set(VISAGE_INCLUDE_DIRS
|
||||
"${VISAGE_INCLUDE_DIR};${VISAGE_XML_INCLUDE_DIR};${VISAGE_OPENCV_INCLUDE_DIR};${VISAGE_OPENCV2_INCLUDE_DIR}"
|
||||
CACHE INTERNAL "Visage include dirs")
|
||||
endif (VISAGE_INCLUDE_DIR AND VISAGE_XML_INCLUDE_DIR AND VISAGE_OPENCV_INCLUDE_DIR AND VISAGE_OPENCV2_INCLUDE_DIR)
|
||||
|
||||
find_library(VISAGE_CORE_LIBRARY vscore.lib ${VISAGE_ROOT_DIR}/lib)
|
||||
find_library(VISAGE_VISION_LIBRARY vsvision.lib ${VISAGE_ROOT_DIR}/lib)
|
||||
find_library(VISAGE_OPENCV_LIBRARY opencv_core243.lib ${VISAGE_ROOT_DIR}/dependencies/OpenCV/lib)
|
||||
if (VISAGE_CORE_LIBRARY AND VISAGE_VISION_LIBRARY AND VISAGE_OPENCV_LIBRARY)
|
||||
set(VISAGE_LIBRARIES "${VISAGE_CORE_LIBRARY};${VISAGE_VISION_LIBRARY};${VISAGE_OPENCV_LIBRARY}"
|
||||
CACHE INTERNAL "Visage libraries")
|
||||
endif (VISAGE_CORE_LIBRARY AND VISAGE_VISION_LIBRARY AND VISAGE_OPENCV_LIBRARY)
|
||||
|
||||
endif ()
|
||||
|
||||
if (VISAGE_INCLUDE_DIRS AND VISAGE_LIBRARIES)
|
||||
set(VISAGE_FOUND TRUE)
|
||||
endif (VISAGE_INCLUDE_DIRS AND VISAGE_LIBRARIES)
|
||||
|
||||
if (VISAGE_FOUND)
|
||||
if (NOT VISAGE_FIND_QUIETLY)
|
||||
message(STATUS "Found Visage: ${VISAGE_LIBRARIES}")
|
||||
endif (NOT VISAGE_FIND_QUIETLY)
|
||||
else (VISAGE_FOUND)
|
||||
if (VISAGE_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "Could not find Visage")
|
||||
endif (VISAGE_FIND_REQUIRED)
|
||||
endif (VISAGE_FOUND)
|
||||
|
||||
# show the VISAGE_INCLUDE_DIRS and VISAGE_LIBRARIES variables only in the advanced view
|
||||
mark_as_advanced(VISAGE_INCLUDE_DIRS VISAGE_LIBRARIES)
|
||||
|
||||
find_library(VISAGE_CORE_LIBRARY NAME vscore PATH_SUFFIXES lib HINTS ${VISAGE_SEARCH_DIRS})
|
||||
find_library(VISAGE_VISION_LIBRARY NAME vsvision PATH_SUFFIXES lib HINTS ${VISAGE_SEARCH_DIRS})
|
||||
find_library(VISAGE_OPENCV_LIBRARY NAME OpenCV PATH_SUFFIXES dependencies/OpenCV_MacOSX/lib ${VISAGE_SEARCH_DIRS})
|
||||
|
||||
elseif (WIN32)
|
||||
find_path(VISAGE_XML_INCLUDE_DIR libxml/xmlreader.h PATH_SUFFIXES dependencies/libxml2/include HINTS ${VISAGE_SEARCH_DIRS})
|
||||
find_path(VISAGE_OPENCV_INCLUDE_DIR opencv/cv.h PATH_SUFFIXES dependencies/OpenCV/include HINTS ${VISAGE_SEARCH_DIRS})
|
||||
find_path(VISAGE_OPENCV2_INCLUDE_DIR cv.h PATH_SUFFIXES dependencies/OpenCV/include/opencv HINTS ${VISAGE_SEARCH_DIRS})
|
||||
|
||||
find_library(VISAGE_CORE_LIBRARY NAME vscore PATH_SUFFIXES lib HINTS ${VISAGE_SEARCH_DIRS})
|
||||
find_library(VISAGE_VISION_LIBRARY NAME vsvision PATH_SUFFIXES lib HINTS ${VISAGE_SEARCH_DIRS})
|
||||
find_library(VISAGE_OPENCV_LIBRARY NAME opencv_core243 PATH_SUFFIXES dependencies/OpenCV/lib HINTS ${VISAGE_SEARCH_DIRS})
|
||||
endif ()
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(VISAGE DEFAULT_MSG
|
||||
VISAGE_BASE_INCLUDE_DIR VISAGE_XML_INCLUDE_DIR VISAGE_OPENCV_INCLUDE_DIR VISAGE_OPENCV2_INCLUDE_DIR
|
||||
VISAGE_CORE_LIBRARY VISAGE_VISION_LIBRARY VISAGE_OPENCV_LIBRARY
|
||||
)
|
||||
|
||||
set(VISAGE_INCLUDE_DIRS "${VISAGE_XML_INCLUDE_DIR}" "${VISAGE_OPENCV_INCLUDE_DIR}" "${VISAGE_OPENCV2_INCLUDE_DIR}" "${VISAGE_BASE_INCLUDE_DIR}")
|
||||
set(VISAGE_LIBRARIES "${VISAGE_CORE_LIBRARY}" "${VISAGE_VISION_LIBRARY}" "${VISAGE_OPENCV_LIBRARY}")
|
||||
|
||||
mark_as_advanced(
|
||||
VISAGE_INCLUDE_DIRS VISAGE_LIBRARIES
|
||||
VISAGE_BASE_INCLUDE_DIR VISAGE_XML_INCLUDE_DIR VISAGE_OPENCV_INCLUDE_DIR VISAGE_OPENCV2_INCLUDE_DIR
|
||||
VISAGE_CORE_LIBRARY VISAGE_VISION_LIBRARY VISAGE_OPENCV_LIBRARY VISAGE_SEARCH_DIRS
|
||||
)
|
||||
|
|
|
@ -10,185 +10,127 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
var localLightDirections = [ {x: 1.0, y:1.0, z: 0.0}, {x: 0.0, y:1.0, z: 1.0} ];
|
||||
var localLightColors = [ {x: 0.0, y:1.0, z: 0.0}, {x: 1.0, y:0.0, z: 0.0} ];
|
||||
var localLightDirections = [ {x: 1.0, y:0.0, z: 0.0}, {x: 0.0, y:0.0, z: 1.0} ];
|
||||
var localLightColors = [ {x: 0.4, y:0.335, z: 0.266}, {x: 0.4, y:0.335, z: 0.266} ];
|
||||
|
||||
var currentSelection = 0;
|
||||
var currentNumLights = 1;
|
||||
var currentNumLights = 2;
|
||||
var maxNumLights = 2;
|
||||
var currentNumAvatars = 0;
|
||||
var avatarHashIDs = [];
|
||||
var changeDelta = 0.1;
|
||||
var lightsDirty = true;
|
||||
|
||||
function keyPressEvent(event) {
|
||||
|
||||
var choice = parseInt(event.text);
|
||||
|
||||
if (event.text == "1") {
|
||||
currentSelection = 0;
|
||||
print("light election = " + currentSelection);
|
||||
currentSelection = 0;
|
||||
print("light election = " + currentSelection);
|
||||
}
|
||||
else if (event.text == "2" ) {
|
||||
currentSelection = 1;
|
||||
print("light selection = " + currentSelection);
|
||||
currentSelection = 1;
|
||||
print("light selection = " + currentSelection);
|
||||
}
|
||||
else if (event.text == "3" ) {
|
||||
currentSelection = 2;
|
||||
print("light selection = " + currentSelection);
|
||||
currentSelection = 2;
|
||||
print("light selection = " + currentSelection);
|
||||
}
|
||||
else if (event.text == "4" ) {
|
||||
currentSelection = 3;
|
||||
print("light selection = " + currentSelection);
|
||||
currentSelection = 3;
|
||||
print("light selection = " + currentSelection);
|
||||
}
|
||||
else if (event.text == "5" ) {
|
||||
localLightColors[currentSelection].x += 0.01;
|
||||
if ( localLightColors[currentSelection].x > 1.0) {
|
||||
localLightColors[currentSelection].x = 0.0;
|
||||
}
|
||||
|
||||
setAllLightColors();
|
||||
print("CHANGE RED light " + currentSelection + " color (" + localLightColors[currentSelection].x + ", " + localLightColors[currentSelection].y + ", " + localLightColors[currentSelection].z + " )" );
|
||||
localLightColors[currentSelection].x += changeDelta;
|
||||
if ( localLightColors[currentSelection].x > 1.0) {
|
||||
localLightColors[currentSelection].x = 0.0;
|
||||
}
|
||||
|
||||
lightsDirty = true;
|
||||
print("CHANGE RED light " + currentSelection + " color (" + localLightColors[currentSelection].x + ", " + localLightColors[currentSelection].y + ", " + localLightColors[currentSelection].z + " )" );
|
||||
}
|
||||
else if (event.text == "6" ) {
|
||||
localLightColors[currentSelection].y += 0.01;
|
||||
if ( localLightColors[currentSelection].y > 1.0) {
|
||||
localLightColors[currentSelection].y = 0.0;
|
||||
}
|
||||
|
||||
setAllLightColors();
|
||||
print("CHANGE GREEN light " + currentSelection + " color (" + localLightColors[currentSelection].x + ", " + localLightColors[currentSelection].y + ", " + localLightColors[currentSelection].z + " )" );
|
||||
localLightColors[currentSelection].y += changeDelta;
|
||||
if ( localLightColors[currentSelection].y > 1.0) {
|
||||
localLightColors[currentSelection].y = 0.0;
|
||||
}
|
||||
|
||||
lightsDirty = true;
|
||||
print("CHANGE GREEN light " + currentSelection + " color (" + localLightColors[currentSelection].x + ", " + localLightColors[currentSelection].y + ", " + localLightColors[currentSelection].z + " )" );
|
||||
}
|
||||
else if (event.text == "7" ) {
|
||||
localLightColors[currentSelection].z += 0.01;
|
||||
if ( localLightColors[currentSelection].z > 1.0) {
|
||||
localLightColors[currentSelection].z = 0.0;
|
||||
}
|
||||
|
||||
setAllLightColors();
|
||||
print("CHANGE BLUE light " + currentSelection + " color (" + localLightColors[currentSelection].x + ", " + localLightColors[currentSelection].y + ", " + localLightColors[currentSelection].z + " )" );
|
||||
localLightColors[currentSelection].z += changeDelta;
|
||||
if ( localLightColors[currentSelection].z > 1.0) {
|
||||
localLightColors[currentSelection].z = 0.0;
|
||||
}
|
||||
|
||||
lightsDirty = true;
|
||||
print("CHANGE BLUE light " + currentSelection + " color (" + localLightColors[currentSelection].x + ", " + localLightColors[currentSelection].y + ", " + localLightColors[currentSelection].z + " )" );
|
||||
}
|
||||
else if (event.text == "8" ) {
|
||||
localLightDirections[currentSelection].x += 0.01;
|
||||
if (localLightDirections[currentSelection].x > 1.0) {
|
||||
localLightDirections[currentSelection].x = -1.0;
|
||||
}
|
||||
|
||||
setAllLightDirections();
|
||||
print("PLUS X light " + currentSelection + " direction (" + localLightDirections[currentSelection].x + ", " + localLightDirections[currentSelection].y + ", " + localLightDirections[currentSelection].z + " )" );
|
||||
localLightDirections[currentSelection].x += changeDelta;
|
||||
if (localLightDirections[currentSelection].x > 1.0) {
|
||||
localLightDirections[currentSelection].x = -1.0;
|
||||
}
|
||||
|
||||
lightsDirty = true;
|
||||
print("PLUS X light " + currentSelection + " direction (" + localLightDirections[currentSelection].x + ", " + localLightDirections[currentSelection].y + ", " + localLightDirections[currentSelection].z + " )" );
|
||||
}
|
||||
else if (event.text == "9" ) {
|
||||
localLightDirections[currentSelection].x -= 0.01;
|
||||
if (localLightDirections[currentSelection].x < -1.0) {
|
||||
localLightDirections[currentSelection].x = 1.0;
|
||||
}
|
||||
|
||||
setAllLightDirections();
|
||||
print("MINUS X light " + currentSelection + " direction (" + localLightDirections[currentSelection].x + ", " + localLightDirections[currentSelection].y + ", " + localLightDirections[currentSelection].z + " )" );
|
||||
localLightDirections[currentSelection].x -= changeDelta;
|
||||
if (localLightDirections[currentSelection].x < -1.0) {
|
||||
localLightDirections[currentSelection].x = 1.0;
|
||||
}
|
||||
|
||||
lightsDirty = true;
|
||||
print("MINUS X light " + currentSelection + " direction (" + localLightDirections[currentSelection].x + ", " + localLightDirections[currentSelection].y + ", " + localLightDirections[currentSelection].z + " )" );
|
||||
}
|
||||
else if (event.text == "0" ) {
|
||||
localLightDirections[currentSelection].y += 0.01;
|
||||
if (localLightDirections[currentSelection].y > 1.0) {
|
||||
localLightDirections[currentSelection].y = -1.0;
|
||||
}
|
||||
|
||||
setAllLightDirections();
|
||||
print("PLUS Y light " + currentSelection + " direction (" + localLightDirections[currentSelection].x + ", " + localLightDirections[currentSelection].y + ", " + localLightDirections[currentSelection].z + " )" );
|
||||
localLightDirections[currentSelection].y += changeDelta;
|
||||
if (localLightDirections[currentSelection].y > 1.0) {
|
||||
localLightDirections[currentSelection].y = -1.0;
|
||||
}
|
||||
|
||||
lightsDirty = true;
|
||||
print("PLUS Y light " + currentSelection + " direction (" + localLightDirections[currentSelection].x + ", " + localLightDirections[currentSelection].y + ", " + localLightDirections[currentSelection].z + " )" );
|
||||
}
|
||||
else if (event.text == "-" ) {
|
||||
localLightDirections[currentSelection].y -= 0.01;
|
||||
if (localLightDirections[currentSelection].y < -1.0) {
|
||||
localLightDirections[currentSelection].y = 1.0;
|
||||
}
|
||||
|
||||
setAllLightDirections();
|
||||
print("MINUS Y light " + currentSelection + " direction (" + localLightDirections[currentSelection].x + ", " + localLightDirections[currentSelection].y + ", " + localLightDirections[currentSelection].z + " )" );
|
||||
localLightDirections[currentSelection].y -= changeDelta;
|
||||
if (localLightDirections[currentSelection].y < -1.0) {
|
||||
localLightDirections[currentSelection].y = 1.0;
|
||||
}
|
||||
|
||||
lightsDirty = true;
|
||||
print("MINUS Y light " + currentSelection + " direction (" + localLightDirections[currentSelection].x + ", " + localLightDirections[currentSelection].y + ", " + localLightDirections[currentSelection].z + " )" );
|
||||
}
|
||||
else if (event.text == "," ) {
|
||||
if (currentNumLights + 1 <= maxNumLights) {
|
||||
++currentNumLights;
|
||||
|
||||
for (var i = 0; i < currentNumAvatars; i++) {
|
||||
AvatarManager.addAvatarLocalLight(i);
|
||||
|
||||
for (var j = 0; j < currentNumLights; j++) {
|
||||
AvatarManager.setAvatarLightColor(localLightColors[j], j, i);
|
||||
AvatarManager.setAvatarLightDirection(localLightDirections[j], j, i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
print("ADD LIGHT, number of lights " + currentNumLights);
|
||||
if (currentNumLights + 1 <= maxNumLights) {
|
||||
++currentNumLights;
|
||||
lightsDirty = true;
|
||||
}
|
||||
|
||||
print("ADD LIGHT, number of lights " + currentNumLights);
|
||||
}
|
||||
else if (event.text == "." ) {
|
||||
if (currentNumLights - 1 >= 0 ) {
|
||||
--currentNumLights;
|
||||
|
||||
for (var i = 0; i < currentNumAvatars; i++) {
|
||||
AvatarManager.removeAvatarLocalLight(i);
|
||||
|
||||
for (var j = 0; j < currentNumLights; j++) {
|
||||
AvatarManager.setAvatarLightColor(localLightColors[j], j, i);
|
||||
AvatarManager.setAvatarLightDirection(localLightDirections[j], j, i);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
print("REMOVE LIGHT, number of lights " + currentNumLights);
|
||||
if (currentNumLights - 1 >= 0 ) {
|
||||
--currentNumLights;
|
||||
lightsDirty = true;
|
||||
}
|
||||
|
||||
print("REMOVE LIGHT, number of lights " + currentNumLights);
|
||||
}
|
||||
}
|
||||
|
||||
function updateLocalLights()
|
||||
{
|
||||
// new avatars, so add lights
|
||||
var numAvatars = AvatarManager.getNumAvatars();
|
||||
if (numAvatars != currentNumAvatars) {
|
||||
|
||||
for (var i = 0; i < numAvatars; i++) {
|
||||
var id = AvatarManager.getAvatarHashKey(i);
|
||||
|
||||
// check if avatar has already been registered
|
||||
var hasRegistered = false;
|
||||
for (var j = 0; j < numAvatars; j++) {
|
||||
if (avatarHashIDs[j] == id) {
|
||||
hasRegistered = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// add new id and set light params
|
||||
if (!hasRegistered) {
|
||||
|
||||
avatarHashIDs.push(id);
|
||||
AvatarManager.addAvatarLocalLight(i);
|
||||
|
||||
// set color and direction for new avatar
|
||||
for (var j = 0; j < maxNumLights; j++) {
|
||||
AvatarManager.setAvatarLightColor(localLightColors[j], j, i);
|
||||
AvatarManager.setAvatarLightDirection(localLightDirections[j], j, i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
currentNumAvatars = numAvatars;
|
||||
}
|
||||
}
|
||||
|
||||
function setAllLightColors()
|
||||
{
|
||||
for (var i = 0; i < currentNumAvatars; i++) {
|
||||
for (var j = 0; j < maxNumLights; j++) {
|
||||
AvatarManager.setAvatarLightColor(localLightColors[j], j, i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function setAllLightDirections()
|
||||
{
|
||||
for (var i = 0; i < currentNumAvatars; i++) {
|
||||
for (var j = 0; j < maxNumLights; j++) {
|
||||
AvatarManager.setAvatarLightDirection(localLightDirections[j], j, i);
|
||||
}
|
||||
}
|
||||
if (lightsDirty) {
|
||||
var localLights = [];
|
||||
for (var i = 0; i < currentNumLights; i++) {
|
||||
localLights.push({ direction: localLightDirections[i], color: localLightColors[i] });
|
||||
}
|
||||
AvatarManager.setLocalLights(localLights);
|
||||
lightsDirty = false;
|
||||
}
|
||||
}
|
||||
|
||||
// main
|
||||
|
|
144
examples/bot_randomExpression.js
Normal file
144
examples/bot_randomExpression.js
Normal file
|
@ -0,0 +1,144 @@
|
|||
//
|
||||
// bot_randomExpression.js
|
||||
// examples
|
||||
//
|
||||
// Created by Ben Arnold on 7/23/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// This is an example script that demonstrates an NPC avatar with
|
||||
// random facial expressions.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
function getRandomFloat(min, max) {
|
||||
return Math.random() * (max - min) + min;
|
||||
}
|
||||
|
||||
function getRandomInt (min, max) {
|
||||
return Math.floor(Math.random() * (max - min + 1)) + min;
|
||||
}
|
||||
|
||||
function printVector(string, vector) {
|
||||
print(string + " " + vector.x + ", " + vector.y + ", " + vector.z);
|
||||
}
|
||||
|
||||
var timePassed = 0.0;
|
||||
var updateSpeed = 3.0;
|
||||
|
||||
var X_MIN = 5.0;
|
||||
var X_MAX = 15.0;
|
||||
var Z_MIN = 5.0;
|
||||
var Z_MAX = 15.0;
|
||||
var Y_PELVIS = 1.0;
|
||||
|
||||
// pick an integer between 1 and 100 for the body model for this bot
|
||||
botNumber = getRandomInt(1, 100);
|
||||
|
||||
newFaceFilePrefix = "ron";
|
||||
|
||||
newBodyFilePrefix = "bot" + botNumber;
|
||||
|
||||
// set the face model fst using the bot number
|
||||
// there is no need to change the body model - we're using the default
|
||||
Avatar.faceModelURL = "https://s3-us-west-1.amazonaws.com/highfidelity-public/meshes/" + newFaceFilePrefix + ".fst";
|
||||
Avatar.skeletonModelURL = "https://s3-us-west-1.amazonaws.com/highfidelity-public/meshes/" + newBodyFilePrefix + ".fst";
|
||||
Avatar.billboardURL = "https://s3-us-west-1.amazonaws.com/highfidelity-public/meshes/billboards/bot" + botNumber + ".png";
|
||||
|
||||
Agent.isAvatar = true;
|
||||
Agent.isListeningToAudioStream = true;
|
||||
|
||||
// change the avatar's position to the random one
|
||||
Avatar.position = { x: getRandomFloat(X_MIN, X_MAX), y: Y_PELVIS, z: getRandomFloat(Z_MIN, Z_MAX) };;
|
||||
printVector("New bot, position = ", Avatar.position);
|
||||
|
||||
var allBlendShapes = [];
|
||||
var targetBlendCoefficient = [];
|
||||
var currentBlendCoefficient = [];
|
||||
|
||||
function addBlendShape(s) {
|
||||
allBlendShapes[allBlendShapes.length] = s;
|
||||
}
|
||||
|
||||
//It is imperative that the following blendshapes are all present and are in the correct order
|
||||
addBlendShape("EyeBlink_L");
|
||||
addBlendShape("EyeBlink_R");
|
||||
addBlendShape("EyeSquint_L");
|
||||
addBlendShape("EyeSquint_R");
|
||||
addBlendShape("EyeDown_L");
|
||||
addBlendShape("EyeDown_R");
|
||||
addBlendShape("EyeIn_L");
|
||||
addBlendShape("EyeIn_R");
|
||||
addBlendShape("EyeOpen_L");
|
||||
addBlendShape("EyeOpen_R");
|
||||
addBlendShape("EyeOut_L");
|
||||
addBlendShape("EyeOut_R");
|
||||
addBlendShape("EyeUp_L");
|
||||
addBlendShape("EyeUp_R");
|
||||
addBlendShape("BrowsD_L");
|
||||
addBlendShape("BrowsD_R");
|
||||
addBlendShape("BrowsU_C");
|
||||
addBlendShape("BrowsU_L");
|
||||
addBlendShape("BrowsU_R");
|
||||
addBlendShape("JawFwd");
|
||||
addBlendShape("JawLeft");
|
||||
addBlendShape("JawOpen");
|
||||
addBlendShape("JawChew");
|
||||
addBlendShape("JawRight");
|
||||
addBlendShape("MouthLeft");
|
||||
addBlendShape("MouthRight");
|
||||
addBlendShape("MouthFrown_L");
|
||||
addBlendShape("MouthFrown_R");
|
||||
addBlendShape("MouthSmile_L");
|
||||
addBlendShape("MouthSmile_R");
|
||||
addBlendShape("MouthDimple_L");
|
||||
addBlendShape("MouthDimple_R");
|
||||
addBlendShape("LipsStretch_L");
|
||||
addBlendShape("LipsStretch_R");
|
||||
addBlendShape("LipsUpperClose");
|
||||
addBlendShape("LipsLowerClose");
|
||||
addBlendShape("LipsUpperUp");
|
||||
addBlendShape("LipsLowerDown");
|
||||
addBlendShape("LipsUpperOpen");
|
||||
addBlendShape("LipsLowerOpen");
|
||||
addBlendShape("LipsFunnel");
|
||||
addBlendShape("LipsPucker");
|
||||
addBlendShape("ChinLowerRaise");
|
||||
addBlendShape("ChinUpperRaise");
|
||||
addBlendShape("Sneer");
|
||||
addBlendShape("Puff");
|
||||
addBlendShape("CheekSquint_L");
|
||||
addBlendShape("CheekSquint_R");
|
||||
|
||||
for (var i = 0; i < allBlendShapes.length; i++) {
|
||||
targetBlendCoefficient[i] = 0;
|
||||
currentBlendCoefficient[i] = 0;
|
||||
}
|
||||
|
||||
function setRandomExpression() {
|
||||
for (var i = 0; i < allBlendShapes.length; i++) {
|
||||
targetBlendCoefficient[i] = Math.random();
|
||||
}
|
||||
}
|
||||
|
||||
var expressionChangeSpeed = 0.1;
|
||||
|
||||
function updateBlendShapes(deltaTime) {
|
||||
|
||||
for (var i = 0; i < allBlendShapes.length; i++) {
|
||||
currentBlendCoefficient[i] += (targetBlendCoefficient[i] - currentBlendCoefficient[i]) * expressionChangeSpeed;
|
||||
Avatar.setBlendshape(allBlendShapes[i], currentBlendCoefficient[i]);
|
||||
}
|
||||
}
|
||||
|
||||
function update(deltaTime) {
|
||||
timePassed += deltaTime;
|
||||
if (timePassed > updateSpeed) {
|
||||
timePassed = 0;
|
||||
setRandomExpression();
|
||||
}
|
||||
updateBlendShapes(deltaTime);
|
||||
}
|
||||
|
||||
Script.update.connect(update);
|
|
@ -665,7 +665,8 @@ function checkController(deltaTime) {
|
|||
|
||||
moveOverlays();
|
||||
}
|
||||
|
||||
var newModel;
|
||||
var browser;
|
||||
function initToolBar() {
|
||||
toolBar = new ToolBar(0, 0, ToolBar.VERTICAL);
|
||||
// New Model
|
||||
|
@ -676,6 +677,12 @@ function initToolBar() {
|
|||
visible: true,
|
||||
alpha: 0.9
|
||||
});
|
||||
browser = toolBar.addTool({
|
||||
imageURL: toolIconUrl + "list-icon.png",
|
||||
width: toolWidth, height: toolHeight,
|
||||
visible: true,
|
||||
alpha: 0.7
|
||||
});
|
||||
}
|
||||
|
||||
function moveOverlays() {
|
||||
|
@ -780,8 +787,25 @@ function mousePressEvent(event) {
|
|||
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
|
||||
|
||||
if (newModel == toolBar.clicked(clickedOverlay)) {
|
||||
var url = Window.prompt("Model url", modelURLs[Math.floor(Math.random() * modelURLs.length)]);
|
||||
if (url == null) {
|
||||
var url = Window.prompt("Model URL", modelURLs[Math.floor(Math.random() * modelURLs.length)]);
|
||||
if (url == null || url == "") {
|
||||
return;
|
||||
}
|
||||
|
||||
var position = Vec3.sum(MyAvatar.position, Vec3.multiply(Quat.getFront(MyAvatar.orientation), SPAWN_DISTANCE));
|
||||
|
||||
if (position.x > 0 && position.y > 0 && position.z > 0) {
|
||||
Models.addModel({ position: position,
|
||||
radius: radiusDefault,
|
||||
modelURL: url
|
||||
});
|
||||
} else {
|
||||
print("Can't create model: Model would be out of bounds.");
|
||||
}
|
||||
|
||||
} else if (browser == toolBar.clicked(clickedOverlay)) {
|
||||
var url = Window.s3Browse(".*(fbx|FBX)");
|
||||
if (url == null || url == "") {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1119,25 +1143,37 @@ function handeMenuEvent(menuItem){
|
|||
}
|
||||
if (editModelID != -1) {
|
||||
print(" Edit Properties.... about to edit properties...");
|
||||
var propertyName = Window.prompt("Which property would you like to change?", "modelURL");
|
||||
|
||||
var properties = Entities.getEntityProperties(editModelID);
|
||||
var oldValue = properties[propertyName];
|
||||
var newValue = Window.prompt("New value for: " + propertyName, oldValue);
|
||||
if (newValue != "") {
|
||||
if (propertyName == "color") {
|
||||
if (newValue == "red") {
|
||||
newValue = { red: 255, green: 0, blue: 0 };
|
||||
} else if (newValue == "green") {
|
||||
newValue = { red: 0, green: 255, blue: 0 };
|
||||
} else if (newValue == "blue") {
|
||||
newValue = { red: 0, green: 0, blue: 255 };
|
||||
} else {
|
||||
newValue = { red: 0, green: 0, blue: 0 };
|
||||
}
|
||||
}
|
||||
properties[propertyName] = newValue;
|
||||
Entities.editEntity(editModelID, properties);
|
||||
}
|
||||
|
||||
var array = new Array();
|
||||
var decimals = 3;
|
||||
array.push({ label: "Model URL:", value: properties.modelURL });
|
||||
array.push({ label: "Animation URL:", value: properties.animationURL });
|
||||
array.push({ label: "X:", value: properties.position.x.toFixed(decimals) });
|
||||
array.push({ label: "Y:", value: properties.position.y.toFixed(decimals) });
|
||||
array.push({ label: "Z:", value: properties.position.z.toFixed(decimals) });
|
||||
var angles = Quat.safeEulerAngles(properties.modelRotation);
|
||||
array.push({ label: "Pitch:", value: angles.x.toFixed(decimals) });
|
||||
array.push({ label: "Yaw:", value: angles.y.toFixed(decimals) });
|
||||
array.push({ label: "Roll:", value: angles.z.toFixed(decimals) });
|
||||
array.push({ label: "Scale:", value: 2 * properties.radius.toFixed(decimals) });
|
||||
|
||||
var propertyName = Window.form("Edit Properties", array);
|
||||
modelSelected = false;
|
||||
|
||||
properties.modelURL = array[0].value;
|
||||
properties.animationURL = array[1].value;
|
||||
properties.position.x = array[2].value;
|
||||
properties.position.y = array[3].value;
|
||||
properties.position.z = array[4].value;
|
||||
angles.x = array[5].value;
|
||||
angles.y = array[6].value;
|
||||
angles.z = array[7].value;
|
||||
properties.modelRotation = Quat.fromVec3Degrees(angles);
|
||||
properties.radius = array[8].value / 2;
|
||||
|
||||
Entities.editEntity(editModelID, properties);
|
||||
}
|
||||
}
|
||||
tooltip.show(false);
|
||||
|
|
|
@ -37,7 +37,7 @@ var WHITE_COLOR = { red: 255, green: 255, blue: 255 };
|
|||
var MAX_PASTE_VOXEL_SCALE = 256;
|
||||
var MIN_PASTE_VOXEL_SCALE = .256;
|
||||
|
||||
var zFightingSizeAdjust = 0.002; // used to adjust preview voxels to prevent z fighting
|
||||
var zFightingSizeAdjustRatio = 0.004; // used to adjust preview voxels to prevent z fighting
|
||||
var previewLineWidth = 1.5;
|
||||
|
||||
var inspectJsIsRunning = false;
|
||||
|
@ -51,7 +51,6 @@ var lastVoxelScale = 0;
|
|||
var dragStart = { x: 0, y: 0 };
|
||||
var wheelPixelsMoved = 0;
|
||||
|
||||
|
||||
var mouseX = 0;
|
||||
var mouseY = 0;
|
||||
|
||||
|
@ -65,7 +64,7 @@ colors[4] = { red: 236, green: 174, blue: 0 };
|
|||
colors[5] = { red: 234, green: 133, blue: 0 };
|
||||
colors[6] = { red: 211, green: 115, blue: 0 };
|
||||
colors[7] = { red: 48, green: 116, blue: 119 };
|
||||
colors[8] = { red: 31, green: 64, blue: 64 };
|
||||
colors[8] = { red: 36, green: 64, blue: 64 };
|
||||
var numColors = 9;
|
||||
var whichColor = 0; // Starting color is 'Copy' mode
|
||||
|
||||
|
@ -168,7 +167,16 @@ var voxelPreview = Overlays.addOverlay("cube", {
|
|||
lineWidth: 4
|
||||
});
|
||||
|
||||
var linePreviewTop = Overlays.addOverlay("line3d", {
|
||||
var linePreviewTop = [];
|
||||
var linePreviewBottom = [];
|
||||
var linePreviewLeft = [];
|
||||
var linePreviewRight = [];
|
||||
|
||||
// Currend cursor index
|
||||
var currentCursor = 0;
|
||||
|
||||
function addLineOverlay() {
|
||||
return Overlays.addOverlay("line3d", {
|
||||
position: { x: 0, y: 0, z: 0},
|
||||
end: { x: 0, y: 0, z: 0},
|
||||
color: { red: 255, green: 255, blue: 255},
|
||||
|
@ -176,34 +184,24 @@ var linePreviewTop = Overlays.addOverlay("line3d", {
|
|||
visible: false,
|
||||
lineWidth: previewLineWidth
|
||||
});
|
||||
}
|
||||
|
||||
//Cursor line previews for up to three cursors
|
||||
linePreviewTop[0] = addLineOverlay();
|
||||
linePreviewTop[1] = addLineOverlay();
|
||||
linePreviewTop[2] = addLineOverlay();
|
||||
|
||||
var linePreviewBottom = Overlays.addOverlay("line3d", {
|
||||
position: { x: 0, y: 0, z: 0},
|
||||
end: { x: 0, y: 0, z: 0},
|
||||
color: { red: 255, green: 255, blue: 255},
|
||||
alpha: 1,
|
||||
visible: false,
|
||||
lineWidth: previewLineWidth
|
||||
});
|
||||
|
||||
var linePreviewLeft = Overlays.addOverlay("line3d", {
|
||||
position: { x: 0, y: 0, z: 0},
|
||||
end: { x: 0, y: 0, z: 0},
|
||||
color: { red: 255, green: 255, blue: 255},
|
||||
alpha: 1,
|
||||
visible: false,
|
||||
lineWidth: previewLineWidth
|
||||
});
|
||||
|
||||
var linePreviewRight = Overlays.addOverlay("line3d", {
|
||||
position: { x: 0, y: 0, z: 0},
|
||||
end: { x: 0, y: 0, z: 0},
|
||||
color: { red: 255, green: 255, blue: 255},
|
||||
alpha: 1,
|
||||
visible: false,
|
||||
lineWidth: previewLineWidth
|
||||
});
|
||||
|
||||
linePreviewBottom[0] = addLineOverlay();
|
||||
linePreviewBottom[1] = addLineOverlay();
|
||||
linePreviewBottom[2] = addLineOverlay();
|
||||
|
||||
linePreviewLeft[0] = addLineOverlay();
|
||||
linePreviewLeft[1] = addLineOverlay();
|
||||
linePreviewLeft[2] = addLineOverlay();
|
||||
|
||||
linePreviewRight[0] = addLineOverlay();
|
||||
linePreviewRight[1] = addLineOverlay();
|
||||
linePreviewRight[2] = addLineOverlay();
|
||||
|
||||
// these will be used below
|
||||
var scaleSelectorWidth = 144;
|
||||
|
@ -698,79 +696,99 @@ function calculateVoxelFromIntersection(intersection, operation) {
|
|||
if (wantDebug) {
|
||||
print("wantAddAdjust="+wantAddAdjust);
|
||||
}
|
||||
|
||||
var zFightingSizeAdjust = zFightingSizeAdjustRatio * intersection.distance;
|
||||
|
||||
// now we also want to calculate the "edge square" for the face for this voxel
|
||||
if (intersection.face == "MIN_X_FACE") {
|
||||
|
||||
|
||||
highlightAt.x = x - zFightingSizeAdjust;
|
||||
highlightAt.y = y + zFightingSizeAdjust;
|
||||
highlightAt.z = z + zFightingSizeAdjust;
|
||||
voxelSize -= 2 * zFightingSizeAdjust;
|
||||
if (wantAddAdjust) {
|
||||
resultVoxel.x -= voxelSize;
|
||||
}
|
||||
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x, y: highlightAt.y + zFightingSizeAdjust, z: highlightAt.z + zFightingSizeAdjust };
|
||||
resultVoxel.bottomRight = {x: highlightAt.x, y: highlightAt.y + zFightingSizeAdjust, z: highlightAt.z + voxelSize - zFightingSizeAdjust };
|
||||
resultVoxel.topLeft = {x: highlightAt.x, y: highlightAt.y + voxelSize - zFightingSizeAdjust, z: highlightAt.z + zFightingSizeAdjust };
|
||||
resultVoxel.topRight = {x: highlightAt.x, y: highlightAt.y + voxelSize - zFightingSizeAdjust, z: highlightAt.z + voxelSize - zFightingSizeAdjust };
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x, y: highlightAt.y, z: highlightAt.z };
|
||||
resultVoxel.bottomRight = {x: highlightAt.x, y: highlightAt.y, z: highlightAt.z + voxelSize };
|
||||
resultVoxel.topLeft = {x: highlightAt.x, y: highlightAt.y + voxelSize, z: highlightAt.z };
|
||||
resultVoxel.topRight = {x: highlightAt.x, y: highlightAt.y + voxelSize, z: highlightAt.z + voxelSize };
|
||||
|
||||
} else if (intersection.face == "MAX_X_FACE") {
|
||||
|
||||
highlightAt.x = x + voxelSize + zFightingSizeAdjust;
|
||||
highlightAt.y = y + zFightingSizeAdjust;
|
||||
highlightAt.z = z + zFightingSizeAdjust;
|
||||
voxelSize -= 2 * zFightingSizeAdjust;
|
||||
if (wantAddAdjust) {
|
||||
resultVoxel.x += resultVoxel.s;
|
||||
}
|
||||
|
||||
resultVoxel.bottomRight = {x: highlightAt.x, y: highlightAt.y + zFightingSizeAdjust, z: highlightAt.z + zFightingSizeAdjust };
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x, y: highlightAt.y + zFightingSizeAdjust, z: highlightAt.z + voxelSize - zFightingSizeAdjust };
|
||||
resultVoxel.topRight = {x: highlightAt.x, y: highlightAt.y + voxelSize - zFightingSizeAdjust, z: highlightAt.z + zFightingSizeAdjust };
|
||||
resultVoxel.topLeft = {x: highlightAt.x, y: highlightAt.y + voxelSize - zFightingSizeAdjust, z: highlightAt.z + voxelSize - zFightingSizeAdjust };
|
||||
resultVoxel.bottomRight = {x: highlightAt.x, y: highlightAt.y, z: highlightAt.z };
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x, y: highlightAt.y, z: highlightAt.z + voxelSize };
|
||||
resultVoxel.topRight = {x: highlightAt.x, y: highlightAt.y + voxelSize, z: highlightAt.z };
|
||||
resultVoxel.topLeft = {x: highlightAt.x, y: highlightAt.y + voxelSize, z: highlightAt.z + voxelSize };
|
||||
|
||||
} else if (intersection.face == "MIN_Y_FACE") {
|
||||
|
||||
highlightAt.x = x + zFightingSizeAdjust;
|
||||
highlightAt.y = y - zFightingSizeAdjust;
|
||||
highlightAt.z = z + zFightingSizeAdjust;
|
||||
voxelSize -= 2 * zFightingSizeAdjust;
|
||||
if (wantAddAdjust) {
|
||||
resultVoxel.y -= voxelSize;
|
||||
}
|
||||
|
||||
resultVoxel.topRight = {x: highlightAt.x + zFightingSizeAdjust , y: highlightAt.y, z: highlightAt.z + zFightingSizeAdjust };
|
||||
resultVoxel.topLeft = {x: highlightAt.x + voxelSize - zFightingSizeAdjust, y: highlightAt.y, z: highlightAt.z + zFightingSizeAdjust };
|
||||
resultVoxel.bottomRight = {x: highlightAt.x + zFightingSizeAdjust , y: highlightAt.y, z: highlightAt.z + voxelSize - zFightingSizeAdjust };
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x + voxelSize - zFightingSizeAdjust , y: highlightAt.y, z: highlightAt.z + voxelSize - zFightingSizeAdjust };
|
||||
resultVoxel.topRight = {x: highlightAt.x , y: highlightAt.y, z: highlightAt.z };
|
||||
resultVoxel.topLeft = {x: highlightAt.x + voxelSize, y: highlightAt.y, z: highlightAt.z };
|
||||
resultVoxel.bottomRight = {x: highlightAt.x , y: highlightAt.y, z: highlightAt.z + voxelSize };
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x + voxelSize , y: highlightAt.y, z: highlightAt.z + voxelSize };
|
||||
|
||||
} else if (intersection.face == "MAX_Y_FACE") {
|
||||
|
||||
highlightAt.x = x + zFightingSizeAdjust;
|
||||
highlightAt.y = y + voxelSize + zFightingSizeAdjust;
|
||||
highlightAt.z = z + zFightingSizeAdjust;
|
||||
voxelSize -= 2 * zFightingSizeAdjust;
|
||||
if (wantAddAdjust) {
|
||||
resultVoxel.y += voxelSize;
|
||||
}
|
||||
|
||||
resultVoxel.bottomRight = {x: highlightAt.x + zFightingSizeAdjust, y: highlightAt.y, z: highlightAt.z + zFightingSizeAdjust };
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x + voxelSize - zFightingSizeAdjust, y: highlightAt.y, z: highlightAt.z + zFightingSizeAdjust};
|
||||
resultVoxel.topRight = {x: highlightAt.x + zFightingSizeAdjust, y: highlightAt.y, z: highlightAt.z + voxelSize - zFightingSizeAdjust};
|
||||
resultVoxel.topLeft = {x: highlightAt.x + voxelSize - zFightingSizeAdjust, y: highlightAt.y, z: highlightAt.z + voxelSize - zFightingSizeAdjust};
|
||||
resultVoxel.bottomRight = {x: highlightAt.x, y: highlightAt.y, z: highlightAt.z };
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x + voxelSize, y: highlightAt.y, z: highlightAt.z};
|
||||
resultVoxel.topRight = {x: highlightAt.x, y: highlightAt.y, z: highlightAt.z + voxelSize};
|
||||
resultVoxel.topLeft = {x: highlightAt.x + voxelSize, y: highlightAt.y, z: highlightAt.z + voxelSize};
|
||||
|
||||
} else if (intersection.face == "MIN_Z_FACE") {
|
||||
|
||||
highlightAt.x = x + zFightingSizeAdjust;
|
||||
highlightAt.y = y + zFightingSizeAdjust;
|
||||
highlightAt.z = z - zFightingSizeAdjust;
|
||||
voxelSize -= 2 * zFightingSizeAdjust;
|
||||
if (wantAddAdjust) {
|
||||
resultVoxel.z -= voxelSize;
|
||||
}
|
||||
|
||||
resultVoxel.bottomRight = {x: highlightAt.x + zFightingSizeAdjust, y: highlightAt.y + zFightingSizeAdjust, z: highlightAt.z };
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x + voxelSize - zFightingSizeAdjust, y: highlightAt.y + zFightingSizeAdjust, z: highlightAt.z};
|
||||
resultVoxel.topRight = {x: highlightAt.x + zFightingSizeAdjust, y: highlightAt.y + voxelSize - zFightingSizeAdjust, z: highlightAt.z };
|
||||
resultVoxel.topLeft = {x: highlightAt.x + voxelSize - zFightingSizeAdjust, y: highlightAt.y + voxelSize - zFightingSizeAdjust, z: highlightAt.z};
|
||||
resultVoxel.bottomRight = {x: highlightAt.x, y: highlightAt.y, z: highlightAt.z };
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x + voxelSize, y: highlightAt.y, z: highlightAt.z};
|
||||
resultVoxel.topRight = {x: highlightAt.x, y: highlightAt.y + voxelSize, z: highlightAt.z };
|
||||
resultVoxel.topLeft = {x: highlightAt.x + voxelSize, y: highlightAt.y + voxelSize, z: highlightAt.z};
|
||||
|
||||
} else if (intersection.face == "MAX_Z_FACE") {
|
||||
|
||||
highlightAt.x = x + zFightingSizeAdjust;
|
||||
highlightAt.y = y + zFightingSizeAdjust;
|
||||
highlightAt.z = z + voxelSize + zFightingSizeAdjust;
|
||||
voxelSize -= 2 * zFightingSizeAdjust;
|
||||
if (wantAddAdjust) {
|
||||
resultVoxel.z += voxelSize;
|
||||
}
|
||||
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x + zFightingSizeAdjust, y: highlightAt.y + zFightingSizeAdjust, z: highlightAt.z };
|
||||
resultVoxel.bottomRight = {x: highlightAt.x + voxelSize - zFightingSizeAdjust, y: highlightAt.y + zFightingSizeAdjust, z: highlightAt.z};
|
||||
resultVoxel.topLeft = {x: highlightAt.x + zFightingSizeAdjust, y: highlightAt.y + voxelSize - zFightingSizeAdjust, z: highlightAt.z };
|
||||
resultVoxel.topRight = {x: highlightAt.x + voxelSize - zFightingSizeAdjust, y: highlightAt.y + voxelSize - zFightingSizeAdjust, z: highlightAt.z};
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x, y: highlightAt.y, z: highlightAt.z };
|
||||
resultVoxel.bottomRight = {x: highlightAt.x + voxelSize, y: highlightAt.y, z: highlightAt.z};
|
||||
resultVoxel.topLeft = {x: highlightAt.x, y: highlightAt.y + voxelSize, z: highlightAt.z };
|
||||
resultVoxel.topRight = {x: highlightAt.x + voxelSize, y: highlightAt.y + voxelSize, z: highlightAt.z};
|
||||
|
||||
}
|
||||
|
||||
|
@ -809,21 +827,21 @@ function showPreviewLines() {
|
|||
var pasteVoxel = getNewPasteVoxel(pickRay);
|
||||
|
||||
// X axis
|
||||
Overlays.editOverlay(linePreviewBottom, {
|
||||
Overlays.editOverlay(linePreviewBottom[currentCursor], {
|
||||
position: pasteVoxel.origin,
|
||||
end: {x: pasteVoxel.origin.x + pasteVoxel.voxelSize, y: pasteVoxel.origin.y, z: pasteVoxel.origin.z },
|
||||
visible: true
|
||||
});
|
||||
|
||||
// Y axis
|
||||
Overlays.editOverlay(linePreviewRight, {
|
||||
Overlays.editOverlay(linePreviewRight[currentCursor], {
|
||||
position: pasteVoxel.origin,
|
||||
end: {x: pasteVoxel.origin.x, y: pasteVoxel.origin.y + pasteVoxel.voxelSize, z: pasteVoxel.origin.z },
|
||||
visible: true
|
||||
});
|
||||
|
||||
// Z axis
|
||||
Overlays.editOverlay(linePreviewTop, {
|
||||
Overlays.editOverlay(linePreviewTop[currentCursor], {
|
||||
position: pasteVoxel.origin,
|
||||
end: {x: pasteVoxel.origin.x, y: pasteVoxel.origin.y, z: pasteVoxel.origin.z - pasteVoxel.voxelSize },
|
||||
visible: true
|
||||
|
@ -837,22 +855,22 @@ function showPreviewLines() {
|
|||
if (intersection.intersects) {
|
||||
resultVoxel = calculateVoxelFromIntersection(intersection,"");
|
||||
Overlays.editOverlay(voxelPreview, { visible: false });
|
||||
Overlays.editOverlay(linePreviewTop, { position: resultVoxel.topLeft, end: resultVoxel.topRight, visible: true });
|
||||
Overlays.editOverlay(linePreviewBottom, { position: resultVoxel.bottomLeft, end: resultVoxel.bottomRight, visible: true });
|
||||
Overlays.editOverlay(linePreviewLeft, { position: resultVoxel.topLeft, end: resultVoxel.bottomLeft, visible: true });
|
||||
Overlays.editOverlay(linePreviewRight, { position: resultVoxel.topRight, end: resultVoxel.bottomRight, visible: true });
|
||||
Overlays.editOverlay(linePreviewTop[currentCursor], { position: resultVoxel.topLeft, end: resultVoxel.topRight, visible: true });
|
||||
Overlays.editOverlay(linePreviewBottom[currentCursor], { position: resultVoxel.bottomLeft, end: resultVoxel.bottomRight, visible: true });
|
||||
Overlays.editOverlay(linePreviewLeft[currentCursor], { position: resultVoxel.topLeft, end: resultVoxel.bottomLeft, visible: true });
|
||||
Overlays.editOverlay(linePreviewRight[currentCursor], { position: resultVoxel.topRight, end: resultVoxel.bottomRight, visible: true });
|
||||
colors[0] = {red: intersection.voxel.red, green: intersection.voxel.green , blue: intersection.voxel.blue };
|
||||
|
||||
if (copyScale) {
|
||||
scaleSelector.setScale(intersection.voxel.s);
|
||||
}
|
||||
moveTools();
|
||||
} else {
|
||||
} else if (intersection.accurate) {
|
||||
Overlays.editOverlay(voxelPreview, { visible: false });
|
||||
Overlays.editOverlay(linePreviewTop, { visible: false });
|
||||
Overlays.editOverlay(linePreviewBottom, { visible: false });
|
||||
Overlays.editOverlay(linePreviewLeft, { visible: false });
|
||||
Overlays.editOverlay(linePreviewRight, { visible: false });
|
||||
Overlays.editOverlay(linePreviewTop[currentCursor], { visible: false });
|
||||
Overlays.editOverlay(linePreviewBottom[currentCursor], { visible: false });
|
||||
Overlays.editOverlay(linePreviewLeft[currentCursor], { visible: false });
|
||||
Overlays.editOverlay(linePreviewRight[currentCursor], { visible: false });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -862,20 +880,20 @@ function showPreviewGuides() {
|
|||
showPreviewVoxel();
|
||||
|
||||
// make sure alternative is hidden
|
||||
Overlays.editOverlay(linePreviewTop, { visible: false });
|
||||
Overlays.editOverlay(linePreviewBottom, { visible: false });
|
||||
Overlays.editOverlay(linePreviewLeft, { visible: false });
|
||||
Overlays.editOverlay(linePreviewRight, { visible: false });
|
||||
Overlays.editOverlay(linePreviewTop[currentCursor], { visible: false });
|
||||
Overlays.editOverlay(linePreviewBottom[currentCursor], { visible: false });
|
||||
Overlays.editOverlay(linePreviewLeft[currentCursor], { visible: false });
|
||||
Overlays.editOverlay(linePreviewRight[currentCursor], { visible: false });
|
||||
} else {
|
||||
showPreviewLines();
|
||||
}
|
||||
} else {
|
||||
// make sure all previews are off
|
||||
Overlays.editOverlay(voxelPreview, { visible: false });
|
||||
Overlays.editOverlay(linePreviewTop, { visible: false });
|
||||
Overlays.editOverlay(linePreviewBottom, { visible: false });
|
||||
Overlays.editOverlay(linePreviewLeft, { visible: false });
|
||||
Overlays.editOverlay(linePreviewRight, { visible: false });
|
||||
Overlays.editOverlay(linePreviewTop[currentCursor], { visible: false });
|
||||
Overlays.editOverlay(linePreviewBottom[currentCursor], { visible: false });
|
||||
Overlays.editOverlay(linePreviewLeft[currentCursor], { visible: false });
|
||||
Overlays.editOverlay(linePreviewRight[currentCursor], { visible: false });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -968,6 +986,14 @@ function mousePressEvent(event) {
|
|||
return;
|
||||
}
|
||||
|
||||
if (event.deviceID == 1500) { // Left Hydra Controller
|
||||
currentCursor = 0;
|
||||
} else if (event.deviceID == 1501) { // Right Hydra Controller
|
||||
currentCursor = 1;
|
||||
} else {
|
||||
currentCursor = 2;
|
||||
}
|
||||
|
||||
var clickedOnSomething = false;
|
||||
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
|
||||
|
||||
|
@ -1220,11 +1246,16 @@ function menuItemEvent(menuItem) {
|
|||
}
|
||||
|
||||
function mouseMoveEvent(event) {
|
||||
if (!editToolsOn) {
|
||||
return;
|
||||
if (!editToolsOn || inspectJsIsRunning) {
|
||||
return;
|
||||
}
|
||||
if (inspectJsIsRunning) {
|
||||
return;
|
||||
|
||||
if (event.deviceID == 1500) { // Left Hydra Controller
|
||||
currentCursor = 0;
|
||||
} else if (event.deviceID == 1501) { // Right Hydra Controller
|
||||
currentCursor = 1;
|
||||
} else {
|
||||
currentCursor = 2;
|
||||
}
|
||||
|
||||
// Move Import Preview
|
||||
|
@ -1475,10 +1506,12 @@ Controller.captureKeyEvents({ text: "-" });
|
|||
|
||||
function scriptEnding() {
|
||||
Overlays.deleteOverlay(voxelPreview);
|
||||
Overlays.deleteOverlay(linePreviewTop);
|
||||
Overlays.deleteOverlay(linePreviewBottom);
|
||||
Overlays.deleteOverlay(linePreviewLeft);
|
||||
Overlays.deleteOverlay(linePreviewRight);
|
||||
for (var i = 0; i < linePreviewTop.length; i++) {
|
||||
Overlays.deleteOverlay(linePreviewTop[i]);
|
||||
Overlays.deleteOverlay(linePreviewBottom[i]);
|
||||
Overlays.deleteOverlay(linePreviewLeft[i]);
|
||||
Overlays.deleteOverlay(linePreviewRight[i]);
|
||||
}
|
||||
for (s = 0; s < numColors; s++) {
|
||||
Overlays.deleteOverlay(swatches[s]);
|
||||
}
|
||||
|
|
646
examples/fallingSand.js
Normal file
646
examples/fallingSand.js
Normal file
|
@ -0,0 +1,646 @@
|
|||
//
|
||||
// fallingSand.js
|
||||
// examples
|
||||
//
|
||||
// Created by Ben Arnold on 7/14/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// This sample script allows the user to place sand voxels that will undergo
|
||||
// cellular automata physics.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
var zFightingSizeAdjust = 0.002; // used to adjust preview voxels to prevent z fighting
|
||||
var previewLineWidth = 2.0;
|
||||
|
||||
var voxelSize = 1;
|
||||
var windowDimensions = Controller.getViewportDimensions();
|
||||
var toolIconUrl = "http://highfidelity-public.s3-us-west-1.amazonaws.com/images/tools/";
|
||||
|
||||
var MAX_VOXEL_SCALE_POWER = 5;
|
||||
var MIN_VOXEL_SCALE_POWER = -8;
|
||||
var MAX_VOXEL_SCALE = Math.pow(2.0, MAX_VOXEL_SCALE_POWER);
|
||||
var MIN_VOXEL_SCALE = Math.pow(2.0, MIN_VOXEL_SCALE_POWER);
|
||||
|
||||
|
||||
var linePreviewTop = Overlays.addOverlay("line3d", {
|
||||
position: { x: 0, y: 0, z: 0},
|
||||
end: { x: 0, y: 0, z: 0},
|
||||
color: { red: 0, green: 0, blue: 255},
|
||||
alpha: 1,
|
||||
visible: false,
|
||||
lineWidth: previewLineWidth
|
||||
});
|
||||
|
||||
var linePreviewBottom = Overlays.addOverlay("line3d", {
|
||||
position: { x: 0, y: 0, z: 0},
|
||||
end: { x: 0, y: 0, z: 0},
|
||||
color: { red: 0, green: 0, blue: 255},
|
||||
alpha: 1,
|
||||
visible: false,
|
||||
lineWidth: previewLineWidth
|
||||
});
|
||||
|
||||
var linePreviewLeft = Overlays.addOverlay("line3d", {
|
||||
position: { x: 0, y: 0, z: 0},
|
||||
end: { x: 0, y: 0, z: 0},
|
||||
color: { red: 0, green: 0, blue: 255},
|
||||
alpha: 1,
|
||||
visible: false,
|
||||
lineWidth: previewLineWidth
|
||||
});
|
||||
|
||||
var linePreviewRight = Overlays.addOverlay("line3d", {
|
||||
position: { x: 0, y: 0, z: 0},
|
||||
end: { x: 0, y: 0, z: 0},
|
||||
color: { red: 0, green: 0, blue: 255},
|
||||
alpha: 1,
|
||||
visible: false,
|
||||
lineWidth: previewLineWidth
|
||||
});
|
||||
|
||||
|
||||
var UIColor = { red: 119, green: 103, blue: 53};
|
||||
var activeUIColor = { red: 234, green: 206, blue: 106};
|
||||
|
||||
var toolHeight = 50;
|
||||
var toolWidth = 50;
|
||||
|
||||
var editToolsOn = true;
|
||||
|
||||
var voxelToolSelected = false;
|
||||
|
||||
var scaleSelectorWidth = 144;
|
||||
var scaleSelectorHeight = 37;
|
||||
|
||||
var scaleSelectorX = windowDimensions.x / 5.0;
|
||||
var scaleSelectorY = windowDimensions.y - scaleSelectorHeight;
|
||||
|
||||
var voxelTool = Overlays.addOverlay("image", {
|
||||
x: scaleSelectorX + scaleSelectorWidth + 1, y: windowDimensions.y - toolHeight, width: toolWidth, height: toolHeight,
|
||||
subImage: { x: 0, y: toolHeight, width: toolWidth, height: toolHeight },
|
||||
imageURL: toolIconUrl + "voxel-tool.svg",
|
||||
visible: editToolsOn,
|
||||
color: UIColor,
|
||||
alpha: 0.9
|
||||
});
|
||||
|
||||
var copyScale = true;
|
||||
function ScaleSelector() {
|
||||
this.x = scaleSelectorX;
|
||||
this.y = scaleSelectorY;
|
||||
this.width = scaleSelectorWidth;
|
||||
this.height = scaleSelectorHeight;
|
||||
|
||||
this.displayPower = false;
|
||||
this.scale = 1.0;
|
||||
this.power = 0;
|
||||
|
||||
this.FIRST_PART = this.width * 40.0 / 100.0;
|
||||
this.SECOND_PART = this.width * 37.0 / 100.0;
|
||||
|
||||
this.buttonsOverlay = Overlays.addOverlay("image", {
|
||||
x: this.x, y: this.y,
|
||||
width: this.width, height: this.height,
|
||||
//subImage: { x: 0, y: toolHeight, width: toolWidth, height: toolHeight },
|
||||
imageURL: toolIconUrl + "voxel-size-selector.svg",
|
||||
alpha: 0.9,
|
||||
visible: editToolsOn,
|
||||
color: activeUIColor
|
||||
});
|
||||
this.textOverlay = Overlays.addOverlay("text", {
|
||||
x: this.x + this.FIRST_PART, y: this.y,
|
||||
width: this.SECOND_PART, height: this.height,
|
||||
topMargin: 13,
|
||||
text: this.scale.toString(),
|
||||
alpha: 0.0,
|
||||
visible: editToolsOn,
|
||||
color: activeUIColor
|
||||
});
|
||||
this.powerOverlay = Overlays.addOverlay("text", {
|
||||
x: this.x + this.FIRST_PART, y: this.y,
|
||||
width: this.SECOND_PART, height: this.height,
|
||||
leftMargin: 28,
|
||||
text: this.power.toString(),
|
||||
alpha: 0.0,
|
||||
visible: false,
|
||||
color: activeUIColor
|
||||
});
|
||||
this.setScale = function(scale) {
|
||||
if (scale > MAX_VOXEL_SCALE) {
|
||||
scale = MAX_VOXEL_SCALE;
|
||||
}
|
||||
if (scale < MIN_VOXEL_SCALE) {
|
||||
scale = MIN_VOXEL_SCALE;
|
||||
}
|
||||
|
||||
this.scale = scale;
|
||||
this.power = Math.floor(Math.log(scale) / Math.log(2));
|
||||
this.update();
|
||||
}
|
||||
|
||||
this.show = function(doShow) {
|
||||
Overlays.editOverlay(this.buttonsOverlay, {visible: doShow});
|
||||
Overlays.editOverlay(this.textOverlay, {visible: doShow});
|
||||
Overlays.editOverlay(this.powerOverlay, {visible: doShow && this.displayPower});
|
||||
}
|
||||
|
||||
this.move = function() {
|
||||
this.x = swatchesX + swatchesWidth;
|
||||
this.y = swatchesY;
|
||||
|
||||
Overlays.editOverlay(this.buttonsOverlay, {
|
||||
x: this.x, y: this.y,
|
||||
});
|
||||
Overlays.editOverlay(this.textOverlay, {
|
||||
x: this.x + this.FIRST_PART, y: this.y,
|
||||
});
|
||||
Overlays.editOverlay(this.powerOverlay, {
|
||||
x: this.x + this.FIRST_PART, y: this.y,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
this.switchDisplay = function() {
|
||||
this.displayPower = !this.displayPower;
|
||||
|
||||
if (this.displayPower) {
|
||||
Overlays.editOverlay(this.textOverlay, {
|
||||
leftMargin: 18,
|
||||
text: "2"
|
||||
});
|
||||
Overlays.editOverlay(this.powerOverlay, {
|
||||
text: this.power.toString(),
|
||||
visible: editToolsOn
|
||||
});
|
||||
} else {
|
||||
Overlays.editOverlay(this.textOverlay, {
|
||||
leftMargin: 13,
|
||||
text: this.scale.toString()
|
||||
});
|
||||
Overlays.editOverlay(this.powerOverlay, {
|
||||
visible: false
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
this.update = function() {
|
||||
if (this.displayPower) {
|
||||
Overlays.editOverlay(this.powerOverlay, {text: this.power.toString()});
|
||||
} else {
|
||||
Overlays.editOverlay(this.textOverlay, {text: this.scale.toString()});
|
||||
}
|
||||
}
|
||||
|
||||
this.incrementScale = function() {
|
||||
copyScale = false;
|
||||
if (this.power < MAX_VOXEL_SCALE_POWER) {
|
||||
++this.power;
|
||||
this.scale *= 2.0;
|
||||
this.update();
|
||||
}
|
||||
}
|
||||
|
||||
this.decrementScale = function() {
|
||||
copyScale = false;
|
||||
if (MIN_VOXEL_SCALE_POWER < this.power) {
|
||||
--this.power;
|
||||
this.scale /= 2.0;
|
||||
this.update();
|
||||
}
|
||||
}
|
||||
|
||||
this.clicked = function(x, y) {
|
||||
if (this.x < x && x < this.x + this.width &&
|
||||
this.y < y && y < this.y + this.height) {
|
||||
|
||||
if (x < this.x + this.FIRST_PART) {
|
||||
this.decrementScale();
|
||||
} else if (x < this.x + this.FIRST_PART + this.SECOND_PART) {
|
||||
this.switchDisplay();
|
||||
} else {
|
||||
this.incrementScale();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
this.cleanup = function() {
|
||||
Overlays.deleteOverlay(this.buttonsOverlay);
|
||||
Overlays.deleteOverlay(this.textOverlay);
|
||||
Overlays.deleteOverlay(this.powerOverlay);
|
||||
}
|
||||
|
||||
}
|
||||
var scaleSelector = new ScaleSelector();
|
||||
|
||||
function calculateVoxelFromIntersection(intersection, operation) {
|
||||
|
||||
var resultVoxel;
|
||||
|
||||
var x;
|
||||
var y;
|
||||
var z;
|
||||
|
||||
// if our "target voxel size" is larger than the voxel we intersected with, then we need to find the closest
|
||||
// ancestor voxel of our target size that contains our intersected voxel.
|
||||
if (voxelSize > intersection.voxel.s) {
|
||||
x = Math.floor(intersection.voxel.x / voxelSize) * voxelSize;
|
||||
y = Math.floor(intersection.voxel.y / voxelSize) * voxelSize;
|
||||
z = Math.floor(intersection.voxel.z / voxelSize) * voxelSize;
|
||||
} else {
|
||||
// otherwise, calculate the enclosed voxel of size voxelSize that the intersection point falls inside of.
|
||||
// if you have a voxelSize that's smaller than the voxel you're intersecting, this calculation will result
|
||||
// in the subvoxel that the intersection point falls in, if the target voxelSize matches the intersecting
|
||||
// voxel this still works and results in returning the intersecting voxel which is what we want
|
||||
var adjustToCenter = Vec3.multiply(Voxels.getFaceVector(intersection.face), (voxelSize * -0.5));
|
||||
|
||||
var centerOfIntersectingVoxel = Vec3.sum(intersection.intersection, adjustToCenter);
|
||||
x = Math.floor(centerOfIntersectingVoxel.x / voxelSize) * voxelSize;
|
||||
y = Math.floor(centerOfIntersectingVoxel.y / voxelSize) * voxelSize;
|
||||
z = Math.floor(centerOfIntersectingVoxel.z / voxelSize) * voxelSize;
|
||||
}
|
||||
resultVoxel = { x: x, y: y, z: z, s: voxelSize };
|
||||
var highlightAt = { x: x, y: y, z: z, s: voxelSize };
|
||||
|
||||
|
||||
|
||||
// we only do the "add to the face we're pointing at" adjustment, if the operation is an add
|
||||
// operation, and the target voxel size is equal to or smaller than the intersecting voxel.
|
||||
var wantAddAdjust = (operation == "add" && (voxelSize <= intersection.voxel.s));
|
||||
|
||||
// now we also want to calculate the "edge square" for the face for this voxel
|
||||
if (intersection.face == "MIN_X_FACE") {
|
||||
|
||||
highlightAt.x = x - zFightingSizeAdjust;
|
||||
if (wantAddAdjust) {
|
||||
resultVoxel.x -= voxelSize;
|
||||
}
|
||||
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x, y: highlightAt.y + zFightingSizeAdjust, z: highlightAt.z + zFightingSizeAdjust };
|
||||
resultVoxel.bottomRight = {x: highlightAt.x, y: highlightAt.y + zFightingSizeAdjust, z: highlightAt.z + voxelSize - zFightingSizeAdjust };
|
||||
resultVoxel.topLeft = {x: highlightAt.x, y: highlightAt.y + voxelSize - zFightingSizeAdjust, z: highlightAt.z + zFightingSizeAdjust };
|
||||
resultVoxel.topRight = {x: highlightAt.x, y: highlightAt.y + voxelSize - zFightingSizeAdjust, z: highlightAt.z + voxelSize - zFightingSizeAdjust };
|
||||
|
||||
} else if (intersection.face == "MAX_X_FACE") {
|
||||
|
||||
highlightAt.x = x + voxelSize + zFightingSizeAdjust;
|
||||
if (wantAddAdjust) {
|
||||
resultVoxel.x += resultVoxel.s;
|
||||
}
|
||||
|
||||
resultVoxel.bottomRight = {x: highlightAt.x, y: highlightAt.y + zFightingSizeAdjust, z: highlightAt.z + zFightingSizeAdjust };
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x, y: highlightAt.y + zFightingSizeAdjust, z: highlightAt.z + voxelSize - zFightingSizeAdjust };
|
||||
resultVoxel.topRight = {x: highlightAt.x, y: highlightAt.y + voxelSize - zFightingSizeAdjust, z: highlightAt.z + zFightingSizeAdjust };
|
||||
resultVoxel.topLeft = {x: highlightAt.x, y: highlightAt.y + voxelSize - zFightingSizeAdjust, z: highlightAt.z + voxelSize - zFightingSizeAdjust };
|
||||
|
||||
} else if (intersection.face == "MIN_Y_FACE") {
|
||||
|
||||
highlightAt.y = y - zFightingSizeAdjust;
|
||||
if (wantAddAdjust) {
|
||||
resultVoxel.y -= voxelSize;
|
||||
}
|
||||
|
||||
resultVoxel.topRight = {x: highlightAt.x + zFightingSizeAdjust , y: highlightAt.y, z: highlightAt.z + zFightingSizeAdjust };
|
||||
resultVoxel.topLeft = {x: highlightAt.x + voxelSize - zFightingSizeAdjust, y: highlightAt.y, z: highlightAt.z + zFightingSizeAdjust };
|
||||
resultVoxel.bottomRight = {x: highlightAt.x + zFightingSizeAdjust , y: highlightAt.y, z: highlightAt.z + voxelSize - zFightingSizeAdjust };
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x + voxelSize - zFightingSizeAdjust , y: highlightAt.y, z: highlightAt.z + voxelSize - zFightingSizeAdjust };
|
||||
|
||||
} else if (intersection.face == "MAX_Y_FACE") {
|
||||
|
||||
highlightAt.y = y + voxelSize + zFightingSizeAdjust;
|
||||
if (wantAddAdjust) {
|
||||
resultVoxel.y += voxelSize;
|
||||
}
|
||||
|
||||
resultVoxel.bottomRight = {x: highlightAt.x + zFightingSizeAdjust, y: highlightAt.y, z: highlightAt.z + zFightingSizeAdjust };
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x + voxelSize - zFightingSizeAdjust, y: highlightAt.y, z: highlightAt.z + zFightingSizeAdjust};
|
||||
resultVoxel.topRight = {x: highlightAt.x + zFightingSizeAdjust, y: highlightAt.y, z: highlightAt.z + voxelSize - zFightingSizeAdjust};
|
||||
resultVoxel.topLeft = {x: highlightAt.x + voxelSize - zFightingSizeAdjust, y: highlightAt.y, z: highlightAt.z + voxelSize - zFightingSizeAdjust};
|
||||
|
||||
} else if (intersection.face == "MIN_Z_FACE") {
|
||||
|
||||
highlightAt.z = z - zFightingSizeAdjust;
|
||||
if (wantAddAdjust) {
|
||||
resultVoxel.z -= voxelSize;
|
||||
}
|
||||
|
||||
resultVoxel.bottomRight = {x: highlightAt.x + zFightingSizeAdjust, y: highlightAt.y + zFightingSizeAdjust, z: highlightAt.z };
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x + voxelSize - zFightingSizeAdjust, y: highlightAt.y + zFightingSizeAdjust, z: highlightAt.z};
|
||||
resultVoxel.topRight = {x: highlightAt.x + zFightingSizeAdjust, y: highlightAt.y + voxelSize - zFightingSizeAdjust, z: highlightAt.z };
|
||||
resultVoxel.topLeft = {x: highlightAt.x + voxelSize - zFightingSizeAdjust, y: highlightAt.y + voxelSize - zFightingSizeAdjust, z: highlightAt.z};
|
||||
|
||||
} else if (intersection.face == "MAX_Z_FACE") {
|
||||
|
||||
highlightAt.z = z + voxelSize + zFightingSizeAdjust;
|
||||
if (wantAddAdjust) {
|
||||
resultVoxel.z += voxelSize;
|
||||
}
|
||||
|
||||
resultVoxel.bottomLeft = {x: highlightAt.x + zFightingSizeAdjust, y: highlightAt.y + zFightingSizeAdjust, z: highlightAt.z };
|
||||
resultVoxel.bottomRight = {x: highlightAt.x + voxelSize - zFightingSizeAdjust, y: highlightAt.y + zFightingSizeAdjust, z: highlightAt.z};
|
||||
resultVoxel.topLeft = {x: highlightAt.x + zFightingSizeAdjust, y: highlightAt.y + voxelSize - zFightingSizeAdjust, z: highlightAt.z };
|
||||
resultVoxel.topRight = {x: highlightAt.x + voxelSize - zFightingSizeAdjust, y: highlightAt.y + voxelSize - zFightingSizeAdjust, z: highlightAt.z};
|
||||
|
||||
}
|
||||
return resultVoxel;
|
||||
}
|
||||
|
||||
var trackLastMouseX = 0;
|
||||
var trackLastMouseY = 0;
|
||||
|
||||
function showPreviewLines() {
|
||||
|
||||
var pickRay = Camera.computePickRay(trackLastMouseX, trackLastMouseY);
|
||||
|
||||
var intersection = Voxels.findRayIntersection(pickRay);
|
||||
|
||||
if (intersection.intersects) {
|
||||
var resultVoxel = calculateVoxelFromIntersection(intersection, "");
|
||||
Overlays.editOverlay(linePreviewTop, { position: resultVoxel.topLeft, end: resultVoxel.topRight, visible: true });
|
||||
Overlays.editOverlay(linePreviewBottom, { position: resultVoxel.bottomLeft, end: resultVoxel.bottomRight, visible: true });
|
||||
Overlays.editOverlay(linePreviewLeft, { position: resultVoxel.topLeft, end: resultVoxel.bottomLeft, visible: true });
|
||||
Overlays.editOverlay(linePreviewRight, { position: resultVoxel.topRight, end: resultVoxel.bottomRight, visible: true });
|
||||
} else {
|
||||
Overlays.editOverlay(linePreviewTop, { visible: false });
|
||||
Overlays.editOverlay(linePreviewBottom, { visible: false });
|
||||
Overlays.editOverlay(linePreviewLeft, { visible: false });
|
||||
Overlays.editOverlay(linePreviewRight, { visible: false });
|
||||
}
|
||||
}
|
||||
|
||||
function mouseMoveEvent(event) {
|
||||
trackLastMouseX = event.x;
|
||||
trackLastMouseY = event.y;
|
||||
if (!voxelToolSelected) {
|
||||
return;
|
||||
}
|
||||
showPreviewLines();
|
||||
}
|
||||
|
||||
var BRUSH_RADIUS = 2;
|
||||
|
||||
function mousePressEvent(event) {
|
||||
var mouseX = event.x;
|
||||
var mouseY = event.y;
|
||||
|
||||
var clickedOnSomething = false;
|
||||
// Check if we clicked an overlay
|
||||
var clickedOverlay = Overlays.getOverlayAtPoint({x: mouseX, y: mouseY});
|
||||
|
||||
if (clickedOverlay == voxelTool) {
|
||||
voxelToolSelected = !voxelToolSelected;
|
||||
|
||||
if (voxelToolSelected == true) {
|
||||
Overlays.editOverlay(voxelTool, {
|
||||
color: activeUIColor
|
||||
});
|
||||
} else {
|
||||
Overlays.editOverlay(voxelTool, {
|
||||
color: UIColor
|
||||
});
|
||||
}
|
||||
|
||||
clickedOnSomething = true;
|
||||
} else if (scaleSelector.clicked(event.x, event.y)) {
|
||||
clickedOnSomething = true;
|
||||
voxelSize = scaleSelector.scale;
|
||||
}
|
||||
|
||||
// Return if we clicked on the UI or the voxel tool is disabled
|
||||
if (clickedOnSomething || !voxelToolSelected) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Compute the picking ray for the click
|
||||
var pickRay = Camera.computePickRay(event.x, event.y);
|
||||
var intersection = Voxels.findRayIntersection(pickRay);
|
||||
var resultVoxel = calculateVoxelFromIntersection(intersection, "add");
|
||||
|
||||
|
||||
//Add a clump of sand voxels
|
||||
makeSphere(resultVoxel.x, resultVoxel.y, resultVoxel.z, voxelSize * BRUSH_RADIUS, voxelSize);
|
||||
}
|
||||
|
||||
var sandArray = [];
|
||||
var numSand = 0;
|
||||
|
||||
|
||||
//These arrays are used to buffer add/remove operations so they can be batched together
|
||||
var addArray = [];
|
||||
var addArraySize = 0;
|
||||
var removeArray = [];
|
||||
var removeArraySize = 0;
|
||||
|
||||
//The colors must be different
|
||||
var activeSandColor = { r: 234, g: 206, b: 106};
|
||||
var inactiveSandColor = { r: 233, g: 206, b: 106};
|
||||
|
||||
//This is used as an optimization, so that we
|
||||
//will check our 6 neighbors at most once.
|
||||
var adjacentVoxels = [];
|
||||
var numAdjacentVoxels = 0;
|
||||
//Stores a list of voxels we need to activate
|
||||
var activateMap = {};
|
||||
|
||||
var UPDATES_PER_SECOND = 12.0; // frames per second
|
||||
var frameIndex = 0.0;
|
||||
var oldFrameIndex = 0;
|
||||
|
||||
function update(deltaTime) {
|
||||
frameIndex += deltaTime * UPDATES_PER_SECOND;
|
||||
if (Math.floor(frameIndex) == oldFrameIndex) {
|
||||
return;
|
||||
}
|
||||
oldFrameIndex++;
|
||||
|
||||
//Clear the activate map each frame
|
||||
activateMap = {};
|
||||
|
||||
//Update all sand in our sandArray
|
||||
var i = 0;
|
||||
while (i < numSand) {
|
||||
//Update the sand voxel and if it doesn't move, deactivate it
|
||||
if (updateSand(i) == false) {
|
||||
deactivateSand(i);
|
||||
} else {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
for (var i = 0; i < removeArraySize; i++) {
|
||||
var voxel = removeArray[i];
|
||||
Voxels.eraseVoxel(voxel.x, voxel.y, voxel.z, voxel.s);
|
||||
}
|
||||
removeArraySize = 0;
|
||||
|
||||
//Add all voxels that have moved
|
||||
for (var i = 0; i < addArraySize; i++) {
|
||||
var voxel = addArray[i];
|
||||
Voxels.setVoxel(voxel.x, voxel.y, voxel.z, voxel.s, voxel.r, voxel.g, voxel.b);
|
||||
}
|
||||
addArraySize = 0;
|
||||
|
||||
for (var key in activateMap) {
|
||||
var voxel = activateMap[key];
|
||||
Voxels.setVoxel(voxel.x, voxel.y, voxel.z, voxel.s, activeSandColor.r, activeSandColor.g, activeSandColor.b);
|
||||
sandArray[numSand++] = { x: voxel.x, y: voxel.y, z: voxel.z, s: voxel.s, r: activeSandColor.r, g: activeSandColor.g, b: activeSandColor.b };
|
||||
}
|
||||
}
|
||||
|
||||
//Adds a sphere of sand at the center cx,cy,cz
|
||||
function makeSphere(cx, cy, cz, r, voxelSize) {
|
||||
|
||||
var r2 = r * r;
|
||||
var distance2;
|
||||
var dx;
|
||||
var dy;
|
||||
var dz;
|
||||
|
||||
for (var x = cx - r; x <= cx + r; x += voxelSize) {
|
||||
for (var y = cy - r; y <= cy + r; y += voxelSize) {
|
||||
for (var z = cz - r; z <= cz + r; z += voxelSize) {
|
||||
dx = Math.abs(x - cx);
|
||||
dy = Math.abs(y - cy);
|
||||
dz = Math.abs(z - cz);
|
||||
distance2 = dx * dx + dy * dy + dz * dz;
|
||||
if (distance2 <= r2 && isVoxelEmpty(x, y, z, voxelSize)) {
|
||||
Voxels.setVoxel(x, y, z, voxelSize, activeSandColor.r, activeSandColor.g, activeSandColor.b);
|
||||
sandArray[numSand++] = { x: x, y: y, z: z, s: voxelSize, r: activeSandColor.r, g: activeSandColor.g, b: activeSandColor.b };
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//Check if a given voxel is empty
|
||||
function isVoxelEmpty(x, y, z, s, isAdjacent) {
|
||||
var halfSize = s / 2;
|
||||
var point = {x: x + halfSize, y: y + halfSize, z: z + halfSize };
|
||||
|
||||
var adjacent = Voxels.getVoxelEnclosingPointBlocking(point);
|
||||
//If color is all 0, we assume its air.
|
||||
|
||||
if (adjacent.red == 0 && adjacent.green == 0 && adjacent.blue == 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (isAdjacent) {
|
||||
adjacentVoxels[numAdjacentVoxels++] = adjacent;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
//Moves voxel to x,y,z if the space is empty
|
||||
function tryMoveVoxel(voxel, x, y, z) {
|
||||
//If the adjacent voxel is empty, we will move to it.
|
||||
if (isVoxelEmpty(x, y, z, voxel.s, false)) {
|
||||
var hsize = voxel.s / 2;
|
||||
for (var i = 0; i < 5; i++) {
|
||||
var point = {x: voxel.x + directionVecs[i].x * voxel.s + hsize, y: voxel.y + directionVecs[i].y * voxel.s + hsize, z: voxel.z + directionVecs[i].z * voxel.s + hsize };
|
||||
adjacentVoxels[numAdjacentVoxels++] = Voxels.getVoxelEnclosingPointBlocking(point);
|
||||
}
|
||||
moveVoxel(voxel, x, y, z);
|
||||
|
||||
//Get all adjacent voxels for activation
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
//Moves voxel to x,y,z
|
||||
function moveVoxel(voxel, x, y, z) {
|
||||
activateNeighbors();
|
||||
removeArray[removeArraySize++] = {x: voxel.x, y: voxel.y, z: voxel.z, s: voxel.s};
|
||||
addArray[addArraySize++] = {x: x, y: y, z: z, s: voxel.s, r: activeSandColor.r, g: activeSandColor.g, b: activeSandColor.b};
|
||||
voxel.x = x;
|
||||
voxel.y = y;
|
||||
voxel.z = z;
|
||||
}
|
||||
|
||||
var LEFT = 0;
|
||||
var BACK = 1;
|
||||
var RIGHT = 2;
|
||||
var FRONT = 3;
|
||||
var TOP = 4;
|
||||
|
||||
//These indicate the different directions to neighbor voxels, so we can iterate them
|
||||
var directionVecs = [];
|
||||
directionVecs[LEFT] = {x: -1, y: 0, z: 0}; //Left
|
||||
directionVecs[BACK] = {x: 0, y: 0, z: -1}; //Back
|
||||
directionVecs[RIGHT] = {x: 1, y: 0, z: 0}; //Right
|
||||
directionVecs[FRONT] = {x: 0, y: 0, z: 1}; //Front
|
||||
directionVecs[TOP] = {x: 0, y: 1, z: 0}; //Top
|
||||
|
||||
function updateSand(i) {
|
||||
var voxel = sandArray[i];
|
||||
var size = voxel.s;
|
||||
var hsize = size / 2;
|
||||
numAdjacentVoxels = 0;
|
||||
|
||||
//Down
|
||||
if (tryMoveVoxel(voxel, voxel.x, voxel.y - size, voxel.z)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
//Left, back, right, front
|
||||
for (var i = 0; i < 4; i++) {
|
||||
if (isVoxelEmpty(voxel.x + directionVecs[i].x * size, voxel.y + directionVecs[i].y * size, voxel.z + directionVecs[i].z * size, size, true)
|
||||
&& isVoxelEmpty(voxel.x + directionVecs[i].x * size, (voxel.y - size) + directionVecs[i].y * size, voxel.z + directionVecs[i].z * size, size, false)) {
|
||||
//get the rest of the adjacent voxels
|
||||
for (var j = i + 1; j < 5; j++) {
|
||||
var point = {x: voxel.x + directionVecs[j].x * size + hsize, y: voxel.y + directionVecs[j].y * size + hsize, z: voxel.z + directionVecs[j].z * size + hsize };
|
||||
adjacentVoxels[numAdjacentVoxels++] = Voxels.getVoxelEnclosingPointBlocking(point);
|
||||
}
|
||||
moveVoxel(voxel, voxel.x + directionVecs[i].x * size, voxel.y + directionVecs[i].y * size, voxel.z + directionVecs[i].z * size);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function activateNeighbors() {
|
||||
for (var i = 0; i < numAdjacentVoxels; i++) {
|
||||
var voxel = adjacentVoxels[i];
|
||||
//Check if this neighbor is inactive, if so, activate it
|
||||
if (voxel.red == inactiveSandColor.r && voxel.green == inactiveSandColor.g && voxel.blue == inactiveSandColor.b) {
|
||||
activateMap[voxel.x.toString() + "," + voxel.y.toString() + ',' + voxel.z.toString()] = voxel;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//Deactivates a sand voxel to save processing power
|
||||
function deactivateSand(i) {
|
||||
var voxel = sandArray[i];
|
||||
addArray[addArraySize++] = {x: voxel.x, y: voxel.y, z: voxel.z, s: voxel.s, r: inactiveSandColor.r, g: inactiveSandColor.g, b: inactiveSandColor.b};
|
||||
sandArray[i] = sandArray[numSand-1];
|
||||
numSand--;
|
||||
}
|
||||
|
||||
//Cleanup
|
||||
function scriptEnding() {
|
||||
for (var i = 0; i < numSand; i++) {
|
||||
var voxel = sandArray[i];
|
||||
Voxels.eraseVoxel(voxel.x, voxel.y, voxel.z, voxel.s);
|
||||
}
|
||||
Overlays.deleteOverlay(linePreviewTop);
|
||||
Overlays.deleteOverlay(linePreviewBottom);
|
||||
Overlays.deleteOverlay(linePreviewLeft);
|
||||
Overlays.deleteOverlay(linePreviewRight);
|
||||
scaleSelector.cleanup();
|
||||
Overlays.deleteOverlay(voxelTool);
|
||||
}
|
||||
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
||||
Controller.mouseMoveEvent.connect(mouseMoveEvent);
|
||||
|
||||
Script.update.connect(update);
|
||||
Script.scriptEnding.connect(scriptEnding);
|
||||
|
||||
Voxels.setMaxPacketSize(1); //this is needed or a bug occurs :(
|
||||
Voxels.setPacketsPerSecond(10000);
|
358
examples/grenadeLauncher.js
Normal file
358
examples/grenadeLauncher.js
Normal file
|
@ -0,0 +1,358 @@
|
|||
//
|
||||
// grenadeLauncher.js
|
||||
// examples
|
||||
// Created by Ben Arnold on 7/11/14.
|
||||
// This is a modified version of gun.js by Brad Hefta-Gaub.
|
||||
//
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// This is an example script that turns the hydra controllers and mouse into a particle gun.
|
||||
// It reads the controller, watches for trigger pulls, and launches particles.
|
||||
// When particles collide with voxels they blow big holes out of the voxels.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
function getRandomFloat(min, max) {
|
||||
return Math.random() * (max - min) + min;
|
||||
}
|
||||
|
||||
var lastX = 0;
|
||||
var lastY = 0;
|
||||
var yawFromMouse = 0;
|
||||
var pitchFromMouse = 0;
|
||||
var isMouseDown = false;
|
||||
|
||||
var BULLET_VELOCITY = 3.0;
|
||||
var MIN_THROWER_DELAY = 1000;
|
||||
var MAX_THROWER_DELAY = 1000;
|
||||
var LEFT_BUTTON_1 = 1;
|
||||
var LEFT_BUTTON_3 = 3;
|
||||
var RELOAD_INTERVAL = 5;
|
||||
|
||||
var showScore = false;
|
||||
|
||||
// Load some sound to use for loading and firing
|
||||
var fireSound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Guns/GUN-SHOT2.raw");
|
||||
var loadSound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Guns/Gun_Reload_Weapon22.raw");
|
||||
var impactSound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Guns/BulletImpact2.raw");
|
||||
var targetHitSound = new Sound("http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/Space%20Invaders/hit.raw");
|
||||
var targetLaunchSound = new Sound("http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/Space%20Invaders/shoot.raw");
|
||||
|
||||
var gunModel = "http://public.highfidelity.io/models/attachments/HaloGun.fst";
|
||||
|
||||
var audioOptions = new AudioInjectionOptions();
|
||||
audioOptions.volume = 0.9;
|
||||
|
||||
var shotsFired = 0;
|
||||
|
||||
var shotTime = new Date();
|
||||
|
||||
// initialize our triggers
|
||||
var triggerPulled = new Array();
|
||||
var numberOfTriggers = Controller.getNumberOfTriggers();
|
||||
for (t = 0; t < numberOfTriggers; t++) {
|
||||
triggerPulled[t] = false;
|
||||
}
|
||||
|
||||
var isLaunchButtonPressed = false;
|
||||
|
||||
var score = 0;
|
||||
|
||||
// Create a reticle image in center of screen
|
||||
var screenSize = Controller.getViewportDimensions();
|
||||
var reticle = Overlays.addOverlay("image", {
|
||||
x: screenSize.x / 2 - 16,
|
||||
y: screenSize.y / 2 - 16,
|
||||
width: 32,
|
||||
height: 32,
|
||||
imageURL: "https://s3-us-west-1.amazonaws.com/highfidelity-public/images/reticle.png",
|
||||
color: { red: 255, green: 255, blue: 255},
|
||||
alpha: 1
|
||||
});
|
||||
|
||||
if (showScore) {
|
||||
var text = Overlays.addOverlay("text", {
|
||||
x: screenSize.x / 2 - 100,
|
||||
y: screenSize.y / 2 - 50,
|
||||
width: 150,
|
||||
height: 50,
|
||||
color: { red: 0, green: 0, blue: 0},
|
||||
textColor: { red: 255, green: 0, blue: 0},
|
||||
topMargin: 4,
|
||||
leftMargin: 4,
|
||||
text: "Score: " + score
|
||||
});
|
||||
}
|
||||
|
||||
function printVector(string, vector) {
|
||||
print(string + " " + vector.x + ", " + vector.y + ", " + vector.z);
|
||||
}
|
||||
|
||||
function shootBullet(position, velocity) {
|
||||
var BULLET_SIZE = 0.1;
|
||||
var BULLET_GRAVITY = -3.0;
|
||||
//Creates a grenade with a reasonable lifetime so that one is less likely to accidentally blow up
|
||||
//far away voxels
|
||||
Particles.addParticle(
|
||||
{ position: position,
|
||||
radius: BULLET_SIZE,
|
||||
color: { red: 10, green: 10, blue: 10 },
|
||||
velocity: velocity,
|
||||
gravity: { x: 0, y: BULLET_GRAVITY, z: 0 },
|
||||
lifetime: 10.0,
|
||||
damping: 0 });
|
||||
|
||||
// Play firing sounds
|
||||
audioOptions.position = position;
|
||||
Audio.playSound(fireSound, audioOptions);
|
||||
shotsFired++;
|
||||
if ((shotsFired % RELOAD_INTERVAL) == 0) {
|
||||
Audio.playSound(loadSound, audioOptions);
|
||||
}
|
||||
}
|
||||
|
||||
function shootTarget() {
|
||||
var TARGET_SIZE = 0.25;
|
||||
var TARGET_GRAVITY = -0.6;
|
||||
var TARGET_UP_VELOCITY = 3.0;
|
||||
var TARGET_FWD_VELOCITY = 5.0;
|
||||
var DISTANCE_TO_LAUNCH_FROM = 3.0;
|
||||
var camera = Camera.getPosition();
|
||||
//printVector("camera", camera);
|
||||
var targetDirection = Quat.angleAxis(getRandomFloat(-20.0, 20.0), { x:0, y:1, z:0 });
|
||||
targetDirection = Quat.multiply(Camera.getOrientation(), targetDirection);
|
||||
var forwardVector = Quat.getFront(targetDirection);
|
||||
//printVector("forwardVector", forwardVector);
|
||||
var newPosition = Vec3.sum(camera, Vec3.multiply(forwardVector, DISTANCE_TO_LAUNCH_FROM));
|
||||
//printVector("newPosition", newPosition);
|
||||
var velocity = Vec3.multiply(forwardVector, TARGET_FWD_VELOCITY);
|
||||
velocity.y += TARGET_UP_VELOCITY;
|
||||
//printVector("velocity", velocity);
|
||||
|
||||
Particles.addParticle(
|
||||
{ position: newPosition,
|
||||
radius: TARGET_SIZE,
|
||||
color: { red: 0, green: 200, blue: 200 },
|
||||
velocity: velocity,
|
||||
gravity: { x: 0, y: TARGET_GRAVITY, z: 0 },
|
||||
lifetime: 1000.0,
|
||||
damping: 0.99 });
|
||||
|
||||
// Record start time
|
||||
shotTime = new Date();
|
||||
|
||||
// Play target shoot sound
|
||||
audioOptions.position = newPosition;
|
||||
Audio.playSound(targetLaunchSound, audioOptions);
|
||||
}
|
||||
|
||||
|
||||
|
||||
function particleCollisionWithVoxel(particle, voxel, collision) {
|
||||
var VOXEL_SIZE = 0.5;
|
||||
// Don't make this big. I mean it.
|
||||
var CRATER_RADIUS = 5;
|
||||
var particleProperties = Particles.getParticleProperties(particle);
|
||||
var position = particleProperties.position;
|
||||
Particles.deleteParticle(particle);
|
||||
|
||||
audioOptions.position = collision.contactPoint;
|
||||
Audio.playSound(impactSound, audioOptions);
|
||||
|
||||
// Make a crater
|
||||
var center = collision.contactPoint;
|
||||
var RADIUS = CRATER_RADIUS * VOXEL_SIZE;
|
||||
var RADIUS2 = RADIUS * RADIUS;
|
||||
var distance2;
|
||||
var dx;
|
||||
var dy;
|
||||
var dz;
|
||||
for (var x = center.x - RADIUS; x <= center.x + RADIUS; x += VOXEL_SIZE) {
|
||||
for (var y = center.y - RADIUS; y <= center.y + RADIUS; y += VOXEL_SIZE) {
|
||||
for (var z = center.z - RADIUS; z <= center.z + RADIUS; z += VOXEL_SIZE) {
|
||||
dx = Math.abs(x - center.x);
|
||||
dy = Math.abs(y - center.y);
|
||||
dz = Math.abs(z - center.z);
|
||||
distance2 = dx * dx + dy * dy + dz * dz;
|
||||
if (distance2 <= RADIUS2) {
|
||||
Voxels.eraseVoxel(x, y, z, VOXEL_SIZE);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function particleCollisionWithParticle(particle1, particle2, collision) {
|
||||
score++;
|
||||
if (showScore) {
|
||||
Overlays.editOverlay(text, { text: "Score: " + score } );
|
||||
}
|
||||
|
||||
// Record shot time
|
||||
var endTime = new Date();
|
||||
var msecs = endTime.valueOf() - shotTime.valueOf();
|
||||
//print("hit, msecs = " + msecs);
|
||||
//Vec3.print("penetration = ", collision.penetration);
|
||||
//Vec3.print("contactPoint = ", collision.contactPoint);
|
||||
Particles.deleteParticle(particle1);
|
||||
Particles.deleteParticle(particle2);
|
||||
// play the sound near the camera so the shooter can hear it
|
||||
audioOptions.position = Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()));
|
||||
Audio.playSound(targetHitSound, audioOptions);
|
||||
}
|
||||
|
||||
function keyPressEvent(event) {
|
||||
// if our tools are off, then don't do anything
|
||||
if (event.text == "t") {
|
||||
var time = MIN_THROWER_DELAY + Math.random() * MAX_THROWER_DELAY;
|
||||
Script.setTimeout(shootTarget, time);
|
||||
} else if (event.text == ".") {
|
||||
shootFromMouse();
|
||||
} else if (event.text == "r") {
|
||||
playLoadSound();
|
||||
}
|
||||
}
|
||||
|
||||
function playLoadSound() {
|
||||
audioOptions.position = Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()));
|
||||
Audio.playSound(loadSound, audioOptions);
|
||||
}
|
||||
|
||||
//MyAvatar.attach(gunModel, "RightHand", {x: -0.02, y: -.14, z: 0.07}, Quat.fromPitchYawRollDegrees(-70, -151, 72), 0.20);
|
||||
MyAvatar.attach(gunModel, "LeftHand", {x: -0.02, y: -.14, z: 0.07}, Quat.fromPitchYawRollDegrees(-70, -151, 72), 0.20);
|
||||
|
||||
// Give a bit of time to load before playing sound
|
||||
Script.setTimeout(playLoadSound, 2000);
|
||||
|
||||
function update(deltaTime) {
|
||||
|
||||
// Check for mouseLook movement, update rotation
|
||||
// rotate body yaw for yaw received from controller or mouse
|
||||
var newOrientation = Quat.multiply(MyAvatar.orientation, Quat.fromVec3Radians( { x: 0, y: yawFromMouse, z: 0 } ));
|
||||
MyAvatar.orientation = newOrientation;
|
||||
yawFromMouse = 0;
|
||||
|
||||
// apply pitch from controller or mouse
|
||||
var newPitch = MyAvatar.headPitch + pitchFromMouse;
|
||||
MyAvatar.headPitch = newPitch;
|
||||
pitchFromMouse = 0;
|
||||
|
||||
// Check hydra controller for launch button press
|
||||
if (!isLaunchButtonPressed && Controller.isButtonPressed(LEFT_BUTTON_3)) {
|
||||
isLaunchButtonPressed = true;
|
||||
var time = MIN_THROWER_DELAY + Math.random() * MAX_THROWER_DELAY;
|
||||
Script.setTimeout(shootTarget, time);
|
||||
} else if (isLaunchButtonPressed && !Controller.isButtonPressed(LEFT_BUTTON_3)) {
|
||||
isLaunchButtonPressed = false;
|
||||
|
||||
}
|
||||
|
||||
// Check hydra controller for trigger press
|
||||
|
||||
var numberOfTriggers = Controller.getNumberOfTriggers();
|
||||
var numberOfSpatialControls = Controller.getNumberOfSpatialControls();
|
||||
var controllersPerTrigger = numberOfSpatialControls / numberOfTriggers;
|
||||
|
||||
// this is expected for hydras
|
||||
if (numberOfTriggers == 2 && controllersPerTrigger == 2) {
|
||||
for (var t = 0; t < numberOfTriggers; t++) {
|
||||
var shootABullet = false;
|
||||
var triggerValue = Controller.getTriggerValue(t);
|
||||
|
||||
if (triggerPulled[t]) {
|
||||
// must release to at least 0.1
|
||||
if (triggerValue < 0.1) {
|
||||
triggerPulled[t] = false; // unpulled
|
||||
}
|
||||
} else {
|
||||
// must pull to at least 0.9
|
||||
if (triggerValue > 0.9) {
|
||||
triggerPulled[t] = true; // pulled
|
||||
shootABullet = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (shootABullet) {
|
||||
|
||||
var palmController = t * controllersPerTrigger;
|
||||
var palmPosition = Controller.getSpatialControlPosition(palmController);
|
||||
|
||||
var fingerTipController = palmController + 1;
|
||||
var fingerTipPosition = Controller.getSpatialControlPosition(fingerTipController);
|
||||
|
||||
var palmToFingerTipVector =
|
||||
{ x: (fingerTipPosition.x - palmPosition.x),
|
||||
y: (fingerTipPosition.y - palmPosition.y),
|
||||
z: (fingerTipPosition.z - palmPosition.z) };
|
||||
|
||||
// just off the front of the finger tip
|
||||
var position = { x: fingerTipPosition.x + palmToFingerTipVector.x/2,
|
||||
y: fingerTipPosition.y + palmToFingerTipVector.y/2,
|
||||
z: fingerTipPosition.z + palmToFingerTipVector.z/2};
|
||||
|
||||
var linearVelocity = 25;
|
||||
|
||||
var velocity = { x: palmToFingerTipVector.x * linearVelocity,
|
||||
y: palmToFingerTipVector.y * linearVelocity,
|
||||
z: palmToFingerTipVector.z * linearVelocity };
|
||||
|
||||
shootBullet(position, velocity);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function mousePressEvent(event) {
|
||||
isMouseDown = true;
|
||||
lastX = event.x;
|
||||
lastY = event.y;
|
||||
//audioOptions.position = Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()));
|
||||
//Audio.playSound(loadSound, audioOptions);
|
||||
}
|
||||
|
||||
function shootFromMouse() {
|
||||
var DISTANCE_FROM_CAMERA = 2.0;
|
||||
var camera = Camera.getPosition();
|
||||
var forwardVector = Quat.getFront(Camera.getOrientation());
|
||||
var newPosition = Vec3.sum(camera, Vec3.multiply(forwardVector, DISTANCE_FROM_CAMERA));
|
||||
var velocity = Vec3.multiply(forwardVector, BULLET_VELOCITY);
|
||||
shootBullet(newPosition, velocity);
|
||||
}
|
||||
|
||||
function mouseReleaseEvent(event) {
|
||||
// position
|
||||
isMouseDown = false;
|
||||
}
|
||||
|
||||
function mouseMoveEvent(event) {
|
||||
//Move the camera if LEFT_BUTTON_1 is pressed
|
||||
if (Controller.isButtonPressed(LEFT_BUTTON_1)) {
|
||||
var MOUSE_YAW_SCALE = -0.25;
|
||||
var MOUSE_PITCH_SCALE = -12.5;
|
||||
var FIXED_MOUSE_TIMESTEP = 0.016;
|
||||
yawFromMouse += ((event.x - lastX) * MOUSE_YAW_SCALE * FIXED_MOUSE_TIMESTEP);
|
||||
pitchFromMouse += ((event.y - lastY) * MOUSE_PITCH_SCALE * FIXED_MOUSE_TIMESTEP);
|
||||
lastX = event.x;
|
||||
lastY = event.y;
|
||||
}
|
||||
}
|
||||
|
||||
function scriptEnding() {
|
||||
Overlays.deleteOverlay(reticle);
|
||||
Overlays.deleteOverlay(text);
|
||||
MyAvatar.detachOne(gunModel);
|
||||
}
|
||||
|
||||
Particles.particleCollisionWithVoxel.connect(particleCollisionWithVoxel);
|
||||
Particles.particleCollisionWithParticle.connect(particleCollisionWithParticle);
|
||||
Script.scriptEnding.connect(scriptEnding);
|
||||
Script.update.connect(update);
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
||||
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
|
||||
Controller.mouseMoveEvent.connect(mouseMoveEvent);
|
||||
Controller.keyPressEvent.connect(keyPressEvent);
|
||||
|
||||
|
||||
|
23
examples/laserPointer.js
Normal file
23
examples/laserPointer.js
Normal file
|
@ -0,0 +1,23 @@
|
|||
//
|
||||
// laserPointer.js
|
||||
// examples
|
||||
//
|
||||
// Created by Clément Brisset on 7/18/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
var LEFT = 0;
|
||||
var RIGHT = 1;
|
||||
var LEFT_HAND_FLAG = 1;
|
||||
var RIGHT_HAND_FLAG = 2;
|
||||
|
||||
function update() {
|
||||
var state = ((Controller.getTriggerValue(LEFT) > 0.9) ? LEFT_HAND_FLAG : 0) +
|
||||
((Controller.getTriggerValue(RIGHT) > 0.9) ? RIGHT_HAND_FLAG : 0);
|
||||
MyAvatar.setHandState(state);
|
||||
}
|
||||
|
||||
Script.update.connect(update);
|
258
examples/leapOfFaith.js
Normal file
258
examples/leapOfFaith.js
Normal file
|
@ -0,0 +1,258 @@
|
|||
//
|
||||
// leapOfFaith.js
|
||||
// examples
|
||||
//
|
||||
// Created by Sam Cake on 6/22/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
var jointList = MyAvatar.getJointNames();
|
||||
var jointMappings = "\n# Joint list start";
|
||||
for (var i = 0; i < jointList.length; i++) {
|
||||
jointMappings = jointMappings + "\njointIndex = " + jointList[i] + " = " + i;
|
||||
}
|
||||
print(jointMappings + "\n# Joint list end");
|
||||
|
||||
function vec3ToString( v ) {
|
||||
return ("(" + v.x +", " + v.y + ", " + v.z + ")" );
|
||||
}
|
||||
function quatToString( q ) {
|
||||
return ("(" + q.x +", " + q.y + ", " + q.z + ", " + q.w + ")" );
|
||||
}
|
||||
|
||||
function printSpatialEvent( label, spatialEvent ) {
|
||||
if ( false ){//label == "RightHandIndex1" ) {
|
||||
var dataString = label + " " +
|
||||
/*vec3ToString( spatialEvent.locTranslation ) + " " +
|
||||
quatToString( spatialEvent.locRotation ) + " " +*/
|
||||
vec3ToString( spatialEvent.absTranslation ) + " " +
|
||||
quatToString( spatialEvent.absRotation );
|
||||
print( dataString );
|
||||
}
|
||||
}
|
||||
|
||||
function avatarToWorldPos( apos ) {
|
||||
|
||||
// apply offset ?
|
||||
var offset = { x: 0, y: 0.5, z: -0.5 };
|
||||
var lpos = Vec3.sum(apos, offset);
|
||||
|
||||
var wpos = Vec3.sum( MyAvatar.position , Vec3.multiplyQbyV(MyAvatar.orientation, lpos) );
|
||||
|
||||
return wpos;
|
||||
}
|
||||
|
||||
function avatarToWorldQuat( aori) {
|
||||
|
||||
var wori = Quat.multiply(MyAvatar.orientation, aori);
|
||||
return wori;
|
||||
}
|
||||
|
||||
function controlerToSkeletonOri( jointName, isRightSide, event ) {
|
||||
|
||||
var qAvatarRootOffset = Quat.angleAxis( -180, {x:0, y:1, z:0});
|
||||
var qAxisOffset = Quat.angleAxis( -( 2 * isRightSide - 1) * 90, {x:0, y:1, z:0});
|
||||
var qAbsJoint = event.absRotation;
|
||||
|
||||
|
||||
return Quat.multiply( qAvatarRootOffset, Quat.multiply( qAbsJoint, qAxisOffset ) );
|
||||
}
|
||||
|
||||
|
||||
var jointParticles = [];
|
||||
function updateJointParticle( joint, pos, ori, look ) {
|
||||
/* print( "debug 1" );
|
||||
var jointID = jointParticles[ joint ];
|
||||
if ( jointID == null ) {
|
||||
print( "debug create " + joint );
|
||||
*/
|
||||
var radius = 0.005* look.r;
|
||||
var ballProperties = {
|
||||
position: pos,
|
||||
velocity: { x: 0, y: 0, z: 0},
|
||||
gravity: { x: 0, y: 0, z: 0 },
|
||||
damping: 0,
|
||||
radius : radius,
|
||||
color: look.c,
|
||||
lifetime: 0.05
|
||||
};
|
||||
var atomPos = Particles.addParticle(ballProperties);
|
||||
|
||||
/* // Zaxis
|
||||
var Zaxis = Vec3.multiply( Quat.getFront( ori ), - 1.5 * radius ) ;
|
||||
ballProperties.position = Vec3.sum(pos, Zaxis );
|
||||
ballProperties.radius = 0.35* radius;
|
||||
ballProperties.color= { red: 255, green: 255, blue: 255 };
|
||||
|
||||
var atomZ = Particles.addParticle(ballProperties);
|
||||
|
||||
var up = Vec3.multiply( Quat.getUp( ori ), 1.5 * radius ) ;
|
||||
ballProperties.position = Vec3.sum(pos, up) ;
|
||||
ballProperties.radius = 0.35* radius;
|
||||
ballProperties.color= { red: 0, green: 255, blue: 0 };
|
||||
|
||||
var atomY = Particles.addParticle(ballProperties);
|
||||
|
||||
var right = Vec3.multiply( Quat.getRight( ori ), 1.5 * radius ) ;
|
||||
ballProperties.position = Vec3.sum(pos, right) ;
|
||||
ballProperties.radius = 0.35* radius;
|
||||
ballProperties.color= { red: 255, green: 0, blue: 225 };
|
||||
|
||||
var atomX = Particles.addParticle(ballProperties);
|
||||
*/
|
||||
// jointParticles[ joint ] = { p: atomPos, x: atomX, y: atomY, z: atomZ };
|
||||
/*
|
||||
} else {
|
||||
//print( "debug update " + joint );
|
||||
|
||||
var p = Particles.getParticleProperties( jointID.p );
|
||||
p.position = pos;
|
||||
// p.lifetime = 1.0;
|
||||
Particles.editParticle( jointID.p, p );
|
||||
|
||||
|
||||
}*/
|
||||
}
|
||||
|
||||
function evalArmBoneLook( isRightSide, bone ) {
|
||||
return { c: { red: (255 * ( 1 - isRightSide )),
|
||||
green: 255 * ( ((bone)) / 2 ),
|
||||
blue: (255 * isRightSide) },
|
||||
r: 3 ,
|
||||
side: isRightSide };
|
||||
}
|
||||
|
||||
function evalFingerBoneLook( isRightSide, finger, bone ) {
|
||||
return { c: { red: (255 * ( 1 - isRightSide )),
|
||||
green: 255 * ( ((bone - 1)) / 3 ),
|
||||
blue: (255 * isRightSide) },
|
||||
r: (5 + (5 - (finger-1))) / 10.0,
|
||||
side: isRightSide };
|
||||
}
|
||||
|
||||
var leapJoints = [
|
||||
|
||||
{ n: "joint_L_elbow", l: evalArmBoneLook( 0, 2) },
|
||||
{ n: "joint_L_hand", l: evalArmBoneLook( 0, 1) },
|
||||
{ n: "joint_L_wrist", l: evalArmBoneLook( 0, 0) },
|
||||
|
||||
{ n: "joint_L_thumb2", l: evalFingerBoneLook( 0, 1, 2) },
|
||||
{ n: "joint_L_thumb3", l: evalFingerBoneLook( 0, 1, 3) },
|
||||
{ n: "joint_L_thumb4", l: evalFingerBoneLook( 0, 1, 4) },
|
||||
|
||||
{ n: "joint_L_index1", l: evalFingerBoneLook( 0, 2, 1) },
|
||||
{ n: "joint_L_index2", l: evalFingerBoneLook( 0, 2, 2) },
|
||||
{ n: "joint_L_index3", l: evalFingerBoneLook( 0, 2, 3) },
|
||||
{ n: "joint_L_index4", l: evalFingerBoneLook( 0, 2, 4) },
|
||||
|
||||
{ n: "joint_L_middle1", l: evalFingerBoneLook( 0, 3, 1) },
|
||||
{ n: "joint_L_middle2", l: evalFingerBoneLook( 0, 3, 2) },
|
||||
{ n: "joint_L_middle3", l: evalFingerBoneLook( 0, 3, 3) },
|
||||
{ n: "joint_L_middle4", l: evalFingerBoneLook( 0, 3, 4) },
|
||||
|
||||
{ n: "joint_L_ring1", l: evalFingerBoneLook( 0, 4, 1) },
|
||||
{ n: "joint_L_ring2", l: evalFingerBoneLook( 0, 4, 2) },
|
||||
{ n: "joint_L_ring3", l: evalFingerBoneLook( 0, 4, 3) },
|
||||
{ n: "joint_L_ring4", l: evalFingerBoneLook( 0, 4, 4) },
|
||||
|
||||
{ n: "joint_L_pinky1", l: evalFingerBoneLook( 0, 5, 1) },
|
||||
{ n: "joint_L_pinky2", l: evalFingerBoneLook( 0, 5, 2) },
|
||||
{ n: "joint_L_pinky3", l: evalFingerBoneLook( 0, 5, 3) },
|
||||
{ n: "joint_L_pinky4", l: evalFingerBoneLook( 0, 5, 4) },
|
||||
|
||||
{ n: "joint_R_elbow", l: evalArmBoneLook( 1, 2) },
|
||||
{ n: "joint_R_hand", l: evalArmBoneLook( 1, 1) },
|
||||
{ n: "joint_R_wrist", l: evalArmBoneLook( 1, 0) },
|
||||
|
||||
{ n: "joint_R_thumb2", l: evalFingerBoneLook( 1, 1, 2) },
|
||||
{ n: "joint_R_thumb3", l: evalFingerBoneLook( 1, 1, 3) },
|
||||
{ n: "joint_R_thumb4", l: evalFingerBoneLook( 1, 1, 4) },
|
||||
|
||||
{ n: "joint_R_index1", l: evalFingerBoneLook( 1, 2, 1) },
|
||||
{ n: "joint_R_index2", l: evalFingerBoneLook( 1, 2, 2) },
|
||||
{ n: "joint_R_index3", l: evalFingerBoneLook( 1, 2, 3) },
|
||||
{ n: "joint_R_index4", l: evalFingerBoneLook( 1, 2, 4) },
|
||||
|
||||
{ n: "joint_R_middle1", l: evalFingerBoneLook( 1, 3, 1) },
|
||||
{ n: "joint_R_middle2", l: evalFingerBoneLook( 1, 3, 2) },
|
||||
{ n: "joint_R_middle3", l: evalFingerBoneLook( 1, 3, 3) },
|
||||
{ n: "joint_R_middle4", l: evalFingerBoneLook( 1, 3, 4) },
|
||||
|
||||
{ n: "joint_R_ring1", l: evalFingerBoneLook( 1, 4, 1) },
|
||||
{ n: "joint_R_ring2", l: evalFingerBoneLook( 1, 4, 2) },
|
||||
{ n: "joint_R_ring3", l: evalFingerBoneLook( 1, 4, 3) },
|
||||
{ n: "joint_R_ring4", l: evalFingerBoneLook( 1, 4, 4) },
|
||||
|
||||
{ n: "joint_R_pinky1", l: evalFingerBoneLook( 1, 5, 1) },
|
||||
{ n: "joint_R_pinky2", l: evalFingerBoneLook( 1, 5, 2) },
|
||||
{ n: "joint_R_pinky3", l: evalFingerBoneLook( 1, 5, 3) },
|
||||
{ n: "joint_R_pinky4", l: evalFingerBoneLook( 1, 5, 4) },
|
||||
|
||||
];
|
||||
|
||||
function onSpatialEventHandler( jointName, look ) {
|
||||
var _jointName = jointName;
|
||||
var _look = look;
|
||||
var _side = look.side;
|
||||
return (function( spatialEvent ) {
|
||||
|
||||
// THis should be the call to update the skeleton joint from
|
||||
// the absolute joint transform coming from the Leap controller
|
||||
// WE need a new method on MyAvatar which will ultimately call
|
||||
// state.setRotationFromBindFrame(rotation, priority) in
|
||||
// Avatar::simulate=>void Model::setJointState(int index, bool valid, const glm::quat& rotation, float priority)
|
||||
|
||||
// MyAvatar.setJointRotationFromBindSpace(_jointName, controlerToSkeletonOri( _jointName, _side, spatialEvent ));
|
||||
|
||||
|
||||
updateJointParticle(_jointName,
|
||||
avatarToWorldPos( spatialEvent.absTranslation ),
|
||||
avatarToWorldQuat( spatialEvent.absRotation ),
|
||||
_look );
|
||||
printSpatialEvent(_jointName, spatialEvent );
|
||||
});
|
||||
}
|
||||
|
||||
var isPullingSpatialData = true;
|
||||
|
||||
var jointControllers = [];
|
||||
for ( i in leapJoints ) {
|
||||
|
||||
print( leapJoints[i].n );
|
||||
// In the current implementation, the LEapmotion is the only "Spatial" device
|
||||
// Each jointTracker is retreived from the joint name following the rigging convention
|
||||
var controller = Controller.createInputController( "Spatial", leapJoints[i].n );
|
||||
var handler = onSpatialEventHandler( leapJoints[i].n, leapJoints[i].l );
|
||||
jointControllers.push( { c: controller, h: handler } );
|
||||
|
||||
if ( ! isPullingSpatialData ) {
|
||||
controller.spatialEvent.connect( handler );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Script.update.connect(function(deltaTime) {
|
||||
|
||||
if ( isPullingSpatialData )
|
||||
{
|
||||
for ( i in jointControllers ) {
|
||||
if ( jointControllers[i].c.isActive() ) {
|
||||
var spatialEvent = { absTranslation: jointControllers[i].c.getAbsTranslation(),
|
||||
absRotation: jointControllers[i].c.getAbsRotation(),
|
||||
locTranslation: jointControllers[i].c.getLocTranslation(),
|
||||
locRotation: jointControllers[i].c.getLocRotation() };
|
||||
jointControllers[i].h( spatialEvent );
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Script.scriptEnding.connect(function() {
|
||||
});
|
77
examples/multipleCursorsExample.js
Normal file
77
examples/multipleCursorsExample.js
Normal file
|
@ -0,0 +1,77 @@
|
|||
//
|
||||
// multipleCursorsExample.js
|
||||
// examples
|
||||
//
|
||||
// Created by Thijs Wenker on 7/23/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// This is an example script that demonstrates use of multiple cursors
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
var cursors = {};
|
||||
|
||||
function Cursor(event) {
|
||||
this.deviceID = event.deviceID;
|
||||
|
||||
this.held_buttons = {
|
||||
LEFT: false,
|
||||
MIDDLE: false,
|
||||
RIGHT: false
|
||||
}
|
||||
this.updatePosition = function(event) {
|
||||
this.x = event.x;
|
||||
this.y = event.y;
|
||||
};
|
||||
this.pressEvent = function(event) {
|
||||
if (this.held_buttons[event.button] != undefined) {
|
||||
this.held_buttons[event.button] = true;
|
||||
}
|
||||
};
|
||||
this.releaseEvent = function(event) {
|
||||
if (this.held_buttons[event.button] != undefined) {
|
||||
this.held_buttons[event.button] = false;
|
||||
}
|
||||
};
|
||||
this.updatePosition(event);
|
||||
}
|
||||
|
||||
function mousePressEvent(event) {
|
||||
if (cursors[event.deviceID] == undefined) {
|
||||
cursors[event.deviceID] = new Cursor(event);
|
||||
}
|
||||
cursors[event.deviceID].pressEvent(event);
|
||||
}
|
||||
|
||||
function mouseReleaseEvent(event) {
|
||||
if (cursors[event.deviceID] == undefined) {
|
||||
cursors[event.deviceID] = new Cursor(event);
|
||||
}
|
||||
cursors[event.deviceID].releaseEvent(event);
|
||||
}
|
||||
|
||||
function mouseMoveEvent(event) {
|
||||
if (cursors[event.deviceID] == undefined) {
|
||||
cursors[event.deviceID] = new Cursor(event);
|
||||
} else {
|
||||
cursors[event.deviceID].updatePosition(event);
|
||||
}
|
||||
}
|
||||
|
||||
var lastOutputString = "";
|
||||
function checkCursors() {
|
||||
if(lastOutputString != JSON.stringify(cursors)) {
|
||||
lastOutputString = JSON.stringify(cursors);
|
||||
// outputs json string of all cursors only when a change occured
|
||||
print(lastOutputString);
|
||||
}
|
||||
}
|
||||
|
||||
Script.update.connect(checkCursors);
|
||||
|
||||
// Map the mouse events to our functions
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
||||
Controller.mouseMoveEvent.connect(mouseMoveEvent);
|
||||
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
|
|
@ -90,11 +90,18 @@ Tool = function(properties, selectable, selected) { // selectable and selected a
|
|||
return selected;
|
||||
}
|
||||
this.select = function(doSelect) {
|
||||
if (!selectable) {
|
||||
return;
|
||||
}
|
||||
|
||||
selected = doSelect;
|
||||
properties.subImage.y = (selected ? 2 : 1) * properties.subImage.height;
|
||||
Overlays.editOverlay(this.overlay(), { subImage: properties.subImage });
|
||||
}
|
||||
this.toggle = function() {
|
||||
if (!selectable) {
|
||||
return;
|
||||
}
|
||||
selected = !selected;
|
||||
properties.subImage.y = (selected ? 2 : 1) * properties.subImage.height;
|
||||
Overlays.editOverlay(this.overlay(), { subImage: properties.subImage });
|
||||
|
|
226
examples/voxelsoundwaves.js
Normal file
226
examples/voxelsoundwaves.js
Normal file
|
@ -0,0 +1,226 @@
|
|||
|
||||
|
||||
|
||||
var wallX = 8000;
|
||||
var wallY = 8000;
|
||||
var wallZ = 8000;//location
|
||||
|
||||
|
||||
var VOXELSIZE=12;//size of each voxel
|
||||
|
||||
var FACTOR = 0.75;
|
||||
|
||||
var loud=1.0;
|
||||
var maxLoud=500;
|
||||
var minLoud=200;//range of loudness
|
||||
|
||||
|
||||
|
||||
var maxB={color:225,direction:0,speed:1};
|
||||
var minB={color:50,direction:1,speed:1};
|
||||
var maxG={color:200,direction:0,speed:1};
|
||||
var minG={color:30,direction:1,speed:1};
|
||||
var maxR={color:255,direction:0,speed:1};
|
||||
var minR={color:150,direction:1,speed:1};//color objects
|
||||
var addVoxArray=[];
|
||||
var removeVoxArray=[];
|
||||
var numAddVox=0;
|
||||
var numRemoveVox=0;//array for voxels removed and added
|
||||
|
||||
|
||||
var height;
|
||||
var wallWidth=34;
|
||||
var wallHeight=25;
|
||||
var maxHeight=wallHeight;
|
||||
var minHeight=0;//properties of wall
|
||||
|
||||
|
||||
var heightSamplesArray=[];
|
||||
var sampleIndex=0;//declare new array of heights
|
||||
|
||||
var direction=1;
|
||||
|
||||
|
||||
|
||||
|
||||
//initiate and fill array of heights
|
||||
for(var k=0;k<wallWidth;k++)
|
||||
{
|
||||
heightSamplesArray[k]=0;
|
||||
}
|
||||
|
||||
|
||||
//send objects to function changeColor
|
||||
function scratch()
|
||||
{
|
||||
|
||||
|
||||
changeColor(maxB);
|
||||
changeColor(minB);
|
||||
changeColor(maxG);
|
||||
changeColor(minG);
|
||||
changeColor(maxR);
|
||||
changeColor(minR);
|
||||
|
||||
//calculates loudness
|
||||
var audioAverageLoudness = MyAvatar.audioAverageLoudness * FACTOR;
|
||||
|
||||
|
||||
loud = Math.log(audioAverageLoudness) / 5.0 * 255.0;
|
||||
|
||||
print("loud="+ loud);
|
||||
|
||||
|
||||
var scalingfactor=(loud-minLoud)/(maxLoud-minLoud);
|
||||
if(scalingfactor<0)
|
||||
{
|
||||
scalingfactor=0;
|
||||
}
|
||||
if(scalingfactor>1)
|
||||
{
|
||||
scalingfactor=1;
|
||||
}
|
||||
|
||||
//creates diff shades for diff levels of volume
|
||||
|
||||
var green=(maxG.color-minG.color)*scalingfactor+minG.color;
|
||||
var blue=(maxB.color-minB.color)*scalingfactor+minB.color;
|
||||
var red=(maxR.color-minR.color)*scalingfactor+minR.color;
|
||||
height=(maxHeight-minHeight)*scalingfactor+minHeight;
|
||||
|
||||
|
||||
//sets height at position sampleIndex
|
||||
heightSamplesArray[sampleIndex]=height;
|
||||
|
||||
|
||||
|
||||
|
||||
if(loud==Number.NEGATIVE_INFINITY)
|
||||
{
|
||||
green=minG.color;
|
||||
blue=minB.color;
|
||||
red=minR.color;
|
||||
|
||||
}
|
||||
|
||||
|
||||
var k=sampleIndex;
|
||||
|
||||
//add&remove voxels
|
||||
|
||||
for(var i=wallWidth-1;i>=0;i--)
|
||||
{
|
||||
|
||||
for(var j=0;j<wallHeight;j++)
|
||||
|
||||
{
|
||||
if(j<=heightSamplesArray[k])
|
||||
{
|
||||
addVoxArray[numAddVox]={x:wallX+i*VOXELSIZE, y:wallY+j*VOXELSIZE, z:wallZ};
|
||||
|
||||
numAddVox++;
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
removeVoxArray[numRemoveVox]={x:wallX+i*VOXELSIZE, y:wallY+j*VOXELSIZE, z:wallZ};
|
||||
|
||||
numRemoveVox++;
|
||||
}
|
||||
|
||||
}
|
||||
k--;
|
||||
if(k<0)
|
||||
{
|
||||
k=wallWidth-1;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
for(var k=0;k<numAddVox;k++)
|
||||
{
|
||||
Voxels.setVoxel(addVoxArray[k].x,addVoxArray[k].y,addVoxArray[k].z,VOXELSIZE, red, green, blue);
|
||||
}
|
||||
|
||||
for(var k=0;k<numRemoveVox;k++)
|
||||
{
|
||||
Voxels.eraseVoxel(removeVoxArray[k].x,removeVoxArray[k].y,removeVoxArray[k].z,VOXELSIZE);
|
||||
}
|
||||
|
||||
numAddVox=0;
|
||||
numRemoveVox=0;
|
||||
|
||||
sampleIndex++;
|
||||
if(sampleIndex>=wallWidth)
|
||||
{
|
||||
sampleIndex=0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
//color properties (shade, direction, speed)
|
||||
|
||||
function changeColor(color)
|
||||
{
|
||||
|
||||
if (color.direction==1)
|
||||
{
|
||||
if(color.color<255)
|
||||
{
|
||||
color.color+=(color.speed);
|
||||
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
color.direction=0;
|
||||
}
|
||||
|
||||
}
|
||||
else if(color.direction==0)
|
||||
{
|
||||
if(color.color>0)
|
||||
{
|
||||
color.color-=(color.speed);
|
||||
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
color.direction=1;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Script.update.connect(scratch);
|
||||
Voxels.setPacketsPerSecond(20000);
|
||||
|
103
examples/voxelwall.js
Normal file
103
examples/voxelwall.js
Normal file
|
@ -0,0 +1,103 @@
|
|||
|
||||
|
||||
|
||||
var wallX = 700;
|
||||
var wallY = 700;
|
||||
var wallZ = 700;//location
|
||||
|
||||
var VOXELSIZE=8;
|
||||
var red=225;
|
||||
var blue=0;
|
||||
var green=0;//color brightness
|
||||
var currentStep=0;//counting number of steps
|
||||
var direction=1;//left to right color change
|
||||
var height=8;
|
||||
var width=8;
|
||||
|
||||
|
||||
var currentStep=0;
|
||||
|
||||
|
||||
|
||||
|
||||
function step()
|
||||
{
|
||||
|
||||
|
||||
|
||||
currentStep++;
|
||||
|
||||
if(currentStep>6000)//how long it will run
|
||||
Script.stop();
|
||||
|
||||
|
||||
for(var i=0;i<width;i++)
|
||||
{
|
||||
for(var j=0;j<height;j++)
|
||||
{
|
||||
|
||||
Voxels.setVoxel(wallX+i*VOXELSIZE, wallY+j*VOXELSIZE, wallZ, VOXELSIZE, red,green,blue);
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (direction==1)
|
||||
{
|
||||
if(blue<255)
|
||||
{
|
||||
blue++;
|
||||
red--;
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
direction=0;
|
||||
}
|
||||
|
||||
}
|
||||
else if(direction==0)
|
||||
{
|
||||
if(blue>0)
|
||||
{
|
||||
blue--;
|
||||
red++;
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
direction=1;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
Script.update.connect(step);
|
||||
Voxels.setPacketsPerSecond(20000);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
BIN
images/bg_hr.png
Normal file
BIN
images/bg_hr.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 943 B |
BIN
images/blacktocat.png
Normal file
BIN
images/blacktocat.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 1.4 KiB |
BIN
images/body-bg.png
Normal file
BIN
images/body-bg.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 8.7 KiB |
BIN
images/highlight-bg.jpg
Normal file
BIN
images/highlight-bg.jpg
Normal file
Binary file not shown.
After ![]() (image error) Size: 33 KiB |
BIN
images/hr.png
Normal file
BIN
images/hr.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 1 KiB |
BIN
images/icon_download.png
Normal file
BIN
images/icon_download.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 1.1 KiB |
BIN
images/octocat-icon.png
Normal file
BIN
images/octocat-icon.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 1.6 KiB |
BIN
images/sprite_download.png
Normal file
BIN
images/sprite_download.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 16 KiB |
BIN
images/tar-gz-icon.png
Normal file
BIN
images/tar-gz-icon.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 1.6 KiB |
BIN
images/zip-icon.png
Normal file
BIN
images/zip-icon.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 1.6 KiB |
52
index.html
Normal file
52
index.html
Normal file
|
@ -0,0 +1,52 @@
|
|||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="chrome=1">
|
||||
<title>Hifi by highfidelity</title>
|
||||
|
||||
<link rel="stylesheet" href="stylesheets/styles.css">
|
||||
<link rel="stylesheet" href="stylesheets/pygment_trac.css">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
|
||||
<!--[if lt IE 9]>
|
||||
<script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script>
|
||||
<![endif]-->
|
||||
</head>
|
||||
<body>
|
||||
<div class="wrapper">
|
||||
<header>
|
||||
<h1>Hifi</h1>
|
||||
<p>Open, decentralized virtual worlds using sensors to control avatars and dynamically assigned devices as servers. San Francisco based startup, we are hiring: http://highfidelity.io/jobs You can also contribute by doing jobs listed at http://worklist.net -</p>
|
||||
|
||||
<p class="view"><a href="https://github.com/highfidelity/hifi">View the Project on GitHub <small>highfidelity/hifi</small></a></p>
|
||||
|
||||
|
||||
<ul>
|
||||
<li><a href="https://github.com/highfidelity/hifi/zipball/master">Download <strong>ZIP File</strong></a></li>
|
||||
<li><a href="https://github.com/highfidelity/hifi/tarball/master">Download <strong>TAR Ball</strong></a></li>
|
||||
<li><a href="https://github.com/highfidelity/hifi">View On <strong>GitHub</strong></a></li>
|
||||
</ul>
|
||||
</header>
|
||||
<section>
|
||||
<h3>
|
||||
<a name="avatar-documentation" class="anchor" href="#avatar-documentation"><span class="octicon octicon-link"></span></a>Avatar Documentation</h3>
|
||||
|
||||
<ul>
|
||||
<li><a href="https://github.com/highfidelity/hifi/wiki/Exporting-Your-Rigged-Avatar-From-Faceshift">Exporting your Rigged Avatar from Faceshift</a></li>
|
||||
<li><a href="https://github.com/highfidelity/hifi/wiki/Creating-Blendshapes-for-your-Avatar">Creating Blendshapes for your Avatar</a></li>
|
||||
<li><a href="https://github.com/highfidelity/hifi/wiki/How-to-Rig-a-Character-for-Faceshift">How to Rig a Character for Faceshift</a></li>
|
||||
<li><a href="https://github.com/highfidelity/hifi/wiki/Naming-Your-Skeletal-Joints">Naming your Skeletal Joints</a></li>
|
||||
<li><a href="https://github.com/highfidelity/hifi/wiki/The-FST-file">The FST File</a></li>
|
||||
<li><a href="https://github.com/highfidelity/hifi/wiki/Training-in-Faceshift">Training in Faceshift</a></li>
|
||||
<li><a href="https://github.com/highfidelity/hifi/wiki/Uploading-Your-Models">Uploading your Models</a></li>
|
||||
</ul>
|
||||
</section>
|
||||
<footer>
|
||||
<p>This project is maintained by <a href="https://github.com/highfidelity">highfidelity</a></p>
|
||||
<p><small>Hosted on GitHub Pages — Theme by <a href="https://github.com/orderedlist">orderedlist</a></small></p>
|
||||
</footer>
|
||||
</div>
|
||||
<script src="javascripts/scale.fix.js"></script>
|
||||
|
||||
</body>
|
||||
</html>
|
|
@ -12,13 +12,15 @@ project(${TARGET_NAME})
|
|||
|
||||
# setup for find modules
|
||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/../cmake/modules/")
|
||||
set(FACEPLUS_ROOT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/external/faceplus")
|
||||
set(FACESHIFT_ROOT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/external/faceshift")
|
||||
set(LIBOVR_ROOT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/external/oculus")
|
||||
set(PRIOVR_ROOT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/external/priovr")
|
||||
set(SIXENSE_ROOT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/external/Sixense")
|
||||
set(VISAGE_ROOT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/external/visage")
|
||||
set(RTMIDI_ROOT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/external/rtmidi")
|
||||
|
||||
# set a default root dir for each of our optional externals if it was not passed
|
||||
set(OPTIONAL_EXTERNALS "faceplus" "faceshift" "oculus" "priovr" "sixense" "visage" "leapmotion" "rtmidi" "qxmpp")
|
||||
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
|
||||
string(TOUPPER ${EXTERNAL} UPPER_EXTERNAL)
|
||||
if (NOT ${UPPER_EXTERNAL}_ROOT_DIR)
|
||||
set(${UPPER_EXTERNAL}_ROOT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/external/${EXTERNAL}")
|
||||
endif ()
|
||||
endforeach()
|
||||
|
||||
find_package(Qt5LinguistTools REQUIRED)
|
||||
find_package(Qt5LinguistToolsMacros)
|
||||
|
@ -56,19 +58,6 @@ foreach(SUBDIR avatar devices renderer ui starfield location scripting voxels pa
|
|||
set(INTERFACE_SRCS ${INTERFACE_SRCS} "${SUBDIR_SRCS}")
|
||||
endforeach(SUBDIR)
|
||||
|
||||
# windows also includes the faceshift externals, because using a lib doesn't work due to debug/release mismatch
|
||||
if (WIN32)
|
||||
set(EXTERNAL_SOURCE_SUBDIRS "faceshift")
|
||||
endif ()
|
||||
|
||||
foreach(EXTERNAL_SOURCE_SUBDIR ${EXTERNAL_SOURCE_SUBDIRS})
|
||||
file(GLOB_RECURSE SUBDIR_SRCS
|
||||
"external/${EXTERNAL_SOURCE_SUBDIR}/src/*.cpp"
|
||||
"external/${EXTERNAL_SOURCE_SUBDIR}/src/*.c"
|
||||
"external/${EXTERNAL_SOURCE_SUBDIR}/src/*.h")
|
||||
set(INTERFACE_SRCS ${INTERFACE_SRCS} "${SUBDIR_SRCS}")
|
||||
endforeach(EXTERNAL_SOURCE_SUBDIR)
|
||||
|
||||
find_package(Qt5 COMPONENTS Core Gui Multimedia Network OpenGL Script Svg WebKit WebKitWidgets Xml UiTools)
|
||||
|
||||
# grab the ui files in resources/ui
|
||||
|
@ -131,7 +120,7 @@ link_hifi_library(audio ${TARGET_NAME} "${ROOT_DIR}")
|
|||
link_hifi_library(animation ${TARGET_NAME} "${ROOT_DIR}")
|
||||
link_hifi_library(script-engine ${TARGET_NAME} "${ROOT_DIR}")
|
||||
|
||||
# find any optional libraries
|
||||
# find any optional and required libraries
|
||||
find_package(Faceplus)
|
||||
find_package(Faceshift)
|
||||
find_package(LibOVR)
|
||||
|
@ -139,106 +128,72 @@ find_package(PrioVR)
|
|||
find_package(SDL)
|
||||
find_package(Sixense)
|
||||
find_package(Visage)
|
||||
find_package(LeapMotion)
|
||||
find_package(ZLIB)
|
||||
find_package(Qxmpp)
|
||||
find_package(RtMidi)
|
||||
find_package(OpenSSL REQUIRED)
|
||||
|
||||
# include the Sixense library for Razer Hydra if available
|
||||
if (SIXENSE_FOUND AND NOT DISABLE_SIXENSE)
|
||||
add_definitions(-DHAVE_SIXENSE)
|
||||
include_directories(SYSTEM "${SIXENSE_INCLUDE_DIRS}")
|
||||
if (APPLE OR UNIX)
|
||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -isystem ${SIXENSE_INCLUDE_DIRS}")
|
||||
endif (APPLE OR UNIX)
|
||||
target_link_libraries(${TARGET_NAME} "${SIXENSE_LIBRARIES}")
|
||||
endif (SIXENSE_FOUND AND NOT DISABLE_SIXENSE)
|
||||
|
||||
# likewise with Visage library for webcam feature tracking
|
||||
if (VISAGE_FOUND AND NOT DISABLE_VISAGE)
|
||||
add_definitions(-DHAVE_VISAGE -DVISAGE_STATIC)
|
||||
include_directories(SYSTEM "${VISAGE_INCLUDE_DIRS}")
|
||||
if (APPLE)
|
||||
add_definitions(-DMAC_OS_X)
|
||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-comment")
|
||||
include_directories(SYSTEM "${VISAGE_INCLUDE_DIRS}")
|
||||
find_library(AVFoundation AVFoundation)
|
||||
find_library(CoreMedia CoreMedia)
|
||||
find_library(NEW_STD_LIBRARY libc++.dylib /usr/lib/)
|
||||
target_link_libraries(${TARGET_NAME} ${AVFoundation} ${CoreMedia} ${NEW_STD_LIBRARY})
|
||||
endif (APPLE)
|
||||
target_link_libraries(${TARGET_NAME} "${VISAGE_LIBRARIES}")
|
||||
endif (VISAGE_FOUND AND NOT DISABLE_VISAGE)
|
||||
|
||||
# and with Faceplus library, also for webcam feature tracking
|
||||
if (FACEPLUS_FOUND AND NOT DISABLE_FACEPLUS)
|
||||
add_definitions(-DHAVE_FACEPLUS)
|
||||
include_directories(SYSTEM "${FACEPLUS_INCLUDE_DIRS}")
|
||||
target_link_libraries(${TARGET_NAME} "${FACEPLUS_LIBRARIES}")
|
||||
endif (FACEPLUS_FOUND AND NOT DISABLE_FACEPLUS)
|
||||
|
||||
# and with LibOVR for Oculus Rift
|
||||
if (LIBOVR_FOUND AND NOT DISABLE_LIBOVR)
|
||||
add_definitions(-DHAVE_LIBOVR)
|
||||
include_directories(SYSTEM "${LIBOVR_INCLUDE_DIRS}")
|
||||
|
||||
if (APPLE OR UNIX)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -isystem ${LIBOVR_INCLUDE_DIRS}")
|
||||
endif ()
|
||||
|
||||
if (WIN32)
|
||||
target_link_libraries(${TARGET_NAME} optimized "${LIBOVR_RELEASE_LIBRARIES}" debug "${LIBOVR_DEBUG_LIBRARIES}")
|
||||
else ()
|
||||
target_link_libraries(${TARGET_NAME} "${LIBOVR_LIBRARIES}")
|
||||
endif ()
|
||||
endif (LIBOVR_FOUND AND NOT DISABLE_LIBOVR)
|
||||
|
||||
# and with PrioVR library
|
||||
if (PRIOVR_FOUND AND NOT DISABLE_PRIOVR)
|
||||
add_definitions(-DHAVE_PRIOVR)
|
||||
include_directories(SYSTEM "${PRIOVR_INCLUDE_DIRS}")
|
||||
target_link_libraries(${TARGET_NAME} "${PRIOVR_LIBRARIES}")
|
||||
endif (PRIOVR_FOUND AND NOT DISABLE_PRIOVR)
|
||||
|
||||
# and with SDL for joysticks
|
||||
if (SDL_FOUND AND NOT DISABLE_SDL)
|
||||
add_definitions(-DHAVE_SDL)
|
||||
include_directories(SYSTEM "${SDL_INCLUDE_DIR}")
|
||||
target_link_libraries(${TARGET_NAME} "${SDL_LIBRARY}")
|
||||
endif (SDL_FOUND AND NOT DISABLE_SDL)
|
||||
|
||||
# and with qxmpp for chat
|
||||
if (QXMPP_FOUND AND NOT DISABLE_QXMPP)
|
||||
add_definitions(-DHAVE_QXMPP -DQXMPP_STATIC)
|
||||
include_directories(SYSTEM ${QXMPP_INCLUDE_DIR})
|
||||
|
||||
target_link_libraries(${TARGET_NAME} "${QXMPP_LIBRARY}")
|
||||
endif (QXMPP_FOUND AND NOT DISABLE_QXMPP)
|
||||
|
||||
# and with RtMidi for RtMidi control
|
||||
if (RTMIDI_FOUND AND NOT DISABLE_RTMIDI)
|
||||
# perform standard include and linking for found externals
|
||||
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
|
||||
string(TOUPPER ${EXTERNAL} UPPER_EXTERNAL)
|
||||
|
||||
add_definitions(-DHAVE_RTMIDI)
|
||||
include_directories(SYSTEM ${RTMIDI_INCLUDE_DIR})
|
||||
target_link_libraries(${TARGET_NAME} "${RTMIDI_LIBRARY}")
|
||||
if (${UPPER_EXTERNAL} MATCHES "OCULUS")
|
||||
# the oculus directory is named OCULUS and not LIBOVR so hack to fix that here
|
||||
set(UPPER_EXTERNAL "LIBOVR")
|
||||
endif ()
|
||||
|
||||
if (${UPPER_EXTERNAL}_FOUND AND NOT DISABLE_${UPPER_EXTERNAL})
|
||||
add_definitions(-DHAVE_${UPPER_EXTERNAL})
|
||||
|
||||
# include the library directories (ignoring warnings)
|
||||
include_directories(SYSTEM ${${UPPER_EXTERNAL}_INCLUDE_DIRS})
|
||||
|
||||
# perform the system include hack for OS X to ignore warnings
|
||||
if (APPLE)
|
||||
find_library(CoreMIDI CoreMIDI)
|
||||
add_definitions(-D__MACOSX_CORE__)
|
||||
target_link_libraries(${TARGET_NAME} ${CoreMIDI})
|
||||
endif()
|
||||
endif()
|
||||
foreach(EXTERNAL_INCLUDE_DIR ${${UPPER_EXTERNAL}_INCLUDE_DIRS})
|
||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -isystem ${EXTERNAL_INCLUDE_DIR}")
|
||||
endforeach()
|
||||
endif ()
|
||||
|
||||
target_link_libraries(${TARGET_NAME} ${${UPPER_EXTERNAL}_LIBRARIES})
|
||||
|
||||
endif ()
|
||||
endforeach()
|
||||
|
||||
# special APPLE modifications for Visage library
|
||||
if (VISAGE_FOUND AND NOT DISABLE_VISAGE AND APPLE)
|
||||
add_definitions(-DMAC_OS_X)
|
||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-comment")
|
||||
find_library(AVFoundation AVFoundation)
|
||||
find_library(CoreMedia CoreMedia)
|
||||
find_library(NEW_STD_LIBRARY libc++.dylib /usr/lib/)
|
||||
target_link_libraries(${TARGET_NAME} ${AVFoundation} ${CoreMedia} ${NEW_STD_LIBRARY})
|
||||
endif ()
|
||||
|
||||
# special OS X modifications for RtMidi library
|
||||
if (RTMIDI_FOUND AND NOT DISABLE_RTMIDI AND APPLE)
|
||||
find_library(CoreMIDI CoreMIDI)
|
||||
add_definitions(-D__MACOSX_CORE__)
|
||||
target_link_libraries(${TARGET_NAME} ${CoreMIDI})
|
||||
endif ()
|
||||
|
||||
if (QXMPP_FOUND AND NOT DISABLE_QXMPP AND WIN32)
|
||||
# assume we're linking a static Qt on windows
|
||||
add_definitions(-DQXMPP_STATIC)
|
||||
endif ()
|
||||
|
||||
# include headers for interface and InterfaceConfig.
|
||||
include_directories("${PROJECT_SOURCE_DIR}/src" "${PROJECT_BINARY_DIR}/includes")
|
||||
|
||||
# include external library headers
|
||||
# use system flag so warnings are supressed
|
||||
include_directories(SYSTEM "${FACESHIFT_INCLUDE_DIRS}")
|
||||
include_directories(SYSTEM "${OPENSSL_INCLUDE_DIR}")
|
||||
|
||||
target_link_libraries(
|
||||
${TARGET_NAME}
|
||||
"${FACESHIFT_LIBRARIES}"
|
||||
"${ZLIB_LIBRARIES}"
|
||||
${OPENSSL_LIBRARIES}
|
||||
Qt5::Core Qt5::Gui Qt5::Multimedia Qt5::Network Qt5::OpenGL
|
||||
Qt5::Script Qt5::Svg Qt5::WebKit Qt5::WebKitWidgets Qt5::Xml Qt5::UiTools
|
||||
)
|
||||
|
@ -289,7 +244,7 @@ else (APPLE)
|
|||
find_package(OpenGL REQUIRED)
|
||||
find_package(GLUT REQUIRED)
|
||||
|
||||
include_directories(SYSTEM "${GLUT_INCLUDE_DIR}")
|
||||
include_directories(SYSTEM "${GLUT_INCLUDE_DIRS}")
|
||||
|
||||
if (${OPENGL_INCLUDE_DIR})
|
||||
include_directories(SYSTEM "${OPENGL_INCLUDE_DIR}")
|
||||
|
@ -308,6 +263,6 @@ else (APPLE)
|
|||
# add a definition for ssize_t so that windows doesn't bail
|
||||
add_definitions(-Dssize_t=long)
|
||||
|
||||
target_link_libraries(${TARGET_NAME} "${GLEW_LIBRARY}" wsock32.lib opengl32.lib)
|
||||
target_link_libraries(${TARGET_NAME} "${GLEW_LIBRARIES}" wsock32.lib opengl32.lib)
|
||||
endif()
|
||||
endif (APPLE)
|
||||
|
|
11
interface/external/faceshift/CMakeLists.txt
vendored
11
interface/external/faceshift/CMakeLists.txt
vendored
|
@ -1,11 +0,0 @@
|
|||
cmake_minimum_required(VERSION 2.8)
|
||||
|
||||
set(TARGET_NAME faceshift)
|
||||
project(${TARGET_NAME})
|
||||
|
||||
# grab the implemenation and header files
|
||||
file(GLOB FACESHIFT_SRCS include/*.h src/*.cpp)
|
||||
|
||||
include_directories(include)
|
||||
|
||||
add_library(${TARGET_NAME} "${FACESHIFT_SRCS}")
|
|
@ -1,410 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
#ifndef FSBINARYSTREAM_H
|
||||
#define FSBINARYSTREAM_H
|
||||
|
||||
// ==========================================================================
|
||||
// Copyright (C) 2012 faceshift AG, and/or its licensors. All rights reserved.
|
||||
//
|
||||
// the software is free to use and provided "as is", without warranty of any kind.
|
||||
// faceshift AG does not make and hereby disclaims any express or implied
|
||||
// warranties including, but not limited to, the warranties of
|
||||
// non-infringement, merchantability or fitness for a particular purpose,
|
||||
// or arising from a course of dealing, usage, or trade practice. in no
|
||||
// event will faceshift AG and/or its licensors be liable for any lost
|
||||
// revenues, data, or profits, or special, direct, indirect, or
|
||||
// consequential damages, even if faceshift AG and/or its licensors has
|
||||
// been advised of the possibility or probability of such damages.
|
||||
// ==========================================================================
|
||||
|
||||
|
||||
/**
|
||||
* Define the HAVE_EIGEN preprocessor define, if you are using the Eigen library, it allows you to easily convert our tracked data from and to eigen
|
||||
* See fsVector3f and fsQuaternionf for more details
|
||||
**/
|
||||
|
||||
#ifdef HAVE_EIGEN
|
||||
#include <Eigen/Core>
|
||||
#include <Eigen/Geometry>
|
||||
#endif
|
||||
|
||||
#ifdef _MSC_VER
|
||||
#include <memory>
|
||||
#else
|
||||
#include <tr1/memory>
|
||||
#endif
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <stdint.h>
|
||||
|
||||
/*******************************************************************************************
|
||||
* This first part of the file contains a definition of the datastructures holding the
|
||||
* tracking results
|
||||
******************************************************************************************/
|
||||
|
||||
namespace fs {
|
||||
|
||||
/**
|
||||
* A floating point three-vector.
|
||||
*
|
||||
* To keep these networking classes as simple as possible, we do not implement the
|
||||
* vector semantics here, use Eigen for that purpose. The class just holds three named floats,
|
||||
* and you have to interpret them yourself.
|
||||
**/
|
||||
struct fsVector3f {
|
||||
float x,y,z;
|
||||
|
||||
fsVector3f() {}
|
||||
#ifdef HAVE_EIGEN
|
||||
explicit fsVector3f(const Eigen::Matrix<float,3,1> &v) : x(v[0]), y(v[1]), z(v[2]) {}
|
||||
Eigen::Map< Eigen::Matrix<float,3,1> > eigen() const { return Eigen::Map<Eigen::Matrix<float,3,1> >((float*)this); }
|
||||
#endif
|
||||
};
|
||||
|
||||
/**
|
||||
* An integer three-vector.
|
||||
**/
|
||||
struct fsVector3i {
|
||||
int32_t x,y,z;
|
||||
|
||||
fsVector3i() {}
|
||||
#ifdef HAVE_EIGEN
|
||||
explicit fsVector3i(const Eigen::Matrix<int32_t,3,1> &v) : x(v[0]), y(v[1]), z(v[2]) {}
|
||||
Eigen::Map<Eigen::Matrix<int32_t,3,1> > eigen() const { return Eigen::Map<Eigen::Matrix<int32_t,3,1> >((int32_t*)this); }
|
||||
#endif
|
||||
};
|
||||
|
||||
/**
|
||||
* An integer four-vector.
|
||||
**/
|
||||
struct fsVector4i {
|
||||
int32_t x,y,z,w;
|
||||
|
||||
fsVector4i() {}
|
||||
#ifdef HAVE_EIGEN
|
||||
explicit fsVector4i(const Eigen::Matrix<int32_t,4,1> &v) : x(v[0]), y(v[1]), z(v[2]), w(v[3]) {}
|
||||
Eigen::Map<Eigen::Matrix<int32_t,4,1,Eigen::DontAlign> > eigen() const { return Eigen::Map<Eigen::Matrix<int32_t,4,1,Eigen::DontAlign> >((int32_t*)this); }
|
||||
#endif
|
||||
};
|
||||
|
||||
/**
|
||||
* Structure holding the data of a quaternion.
|
||||
*
|
||||
*To keep these networking classes as simple as possible, we do not implement the
|
||||
* quaternion semantics here. The class just holds four named floats, and you have to interpret them yourself.
|
||||
*
|
||||
* If you have Eigen you can just cast this class to an Eigen::Quaternionf and use it.
|
||||
*
|
||||
* The quaternion is defined as w+xi+yj+zk
|
||||
**/
|
||||
struct fsQuaternionf {
|
||||
float x,y,z,w;
|
||||
|
||||
fsQuaternionf() {}
|
||||
#ifdef HAVE_EIGEN
|
||||
explicit fsQuaternionf(const Eigen::Quaternionf &q) : x(q.x()), y(q.y()), z(q.z()), w(q.w()) {}
|
||||
Eigen::Quaternionf eigen() const { return Eigen::Quaternionf(w,x,y,z); }
|
||||
#endif
|
||||
};
|
||||
|
||||
/**
|
||||
* A structure containing the data tracked for a single frame.
|
||||
**/
|
||||
class fsTrackingData {
|
||||
public:
|
||||
//! time stamp in ms
|
||||
double m_timestamp;
|
||||
|
||||
//! flag whether tracking was successful [0,1]
|
||||
bool m_trackingSuccessful;
|
||||
|
||||
//! head pose
|
||||
fsQuaternionf m_headRotation;
|
||||
fsVector3f m_headTranslation;
|
||||
|
||||
//! eye gaze in degrees
|
||||
float m_eyeGazeLeftPitch;
|
||||
float m_eyeGazeLeftYaw;
|
||||
float m_eyeGazeRightPitch;
|
||||
float m_eyeGazeRightYaw;
|
||||
|
||||
//! blendshape coefficients
|
||||
std::vector<float> m_coeffs;
|
||||
|
||||
//! marker positions - format specified in faceshift
|
||||
std::vector< fsVector3f > m_markers;
|
||||
};
|
||||
|
||||
/**
|
||||
* A structure containing vertex information
|
||||
*/
|
||||
class fsVertexData {
|
||||
public:
|
||||
//! vertex data
|
||||
std::vector<fsVector3f> m_vertices;
|
||||
|
||||
#ifdef HAVE_EIGEN
|
||||
Eigen::Map<Eigen::Matrix<float,3,Eigen::Dynamic> > eigen() { return Eigen::Map<Eigen::Matrix<float,3,Eigen::Dynamic> >((float*)m_vertices.data(),3,m_vertices.size()); }
|
||||
#endif
|
||||
};
|
||||
|
||||
/**
|
||||
* A strucutre containing mesh information
|
||||
*/
|
||||
class fsMeshData {
|
||||
public:
|
||||
//! topology (quads)
|
||||
std::vector<fsVector4i> m_quads;
|
||||
|
||||
//! topology (triangles)
|
||||
std::vector<fsVector3i> m_tris;
|
||||
|
||||
//! vertex data
|
||||
fsVertexData m_vertex_data;
|
||||
|
||||
#ifdef HAVE_EIGEN
|
||||
Eigen::Map<Eigen::Matrix<int32_t,4,Eigen::Dynamic,Eigen::DontAlign> > quads_eigen() { return Eigen::Map<Eigen::Matrix<int32_t,4,Eigen::Dynamic,Eigen::DontAlign> >((int32_t*)m_quads.data(),4,m_quads.size()); }
|
||||
Eigen::Map<Eigen::Matrix<int32_t,3,Eigen::Dynamic> > tris_eigen() { return Eigen::Map<Eigen::Matrix<int32_t,3,Eigen::Dynamic> >((int32_t*)m_tris.data(),3,m_tris.size()); }
|
||||
Eigen::Map<Eigen::Matrix<float,3,Eigen::Dynamic> > vertices_eigen() { return m_vertex_data.eigen(); }
|
||||
#endif
|
||||
|
||||
};
|
||||
|
||||
/*******************************************************************************************
|
||||
* Now follows a definition of datastructures encapsulating the network messages
|
||||
******************************************************************************************/
|
||||
|
||||
/** Predeclaration of the message types available in faceshift **/
|
||||
|
||||
// Inbound
|
||||
class fsMsgStartCapturing;
|
||||
class fsMsgStopCapturing;
|
||||
class fsMsgCalibrateNeutral;
|
||||
class fsMsgSendMarkerNames;
|
||||
class fsMsgSendBlendshapeNames;
|
||||
class fsMsgSendRig;
|
||||
|
||||
// Outbound
|
||||
class fsMsgTrackingState;
|
||||
class fsMsgMarkerNames;
|
||||
class fsMsgBlendshapeNames;
|
||||
class fsMsgRig;
|
||||
|
||||
/**
|
||||
* Base class of all message that faceshift is sending.
|
||||
* A class can be queried for its type, using the id() function for use in a switch statement, or by using a dynamic_cast.
|
||||
**/
|
||||
class fsMsg {
|
||||
public:
|
||||
virtual ~fsMsg() {}
|
||||
|
||||
enum MessageType {
|
||||
// Messages to control faceshift via the network
|
||||
// These are sent from the client to faceshift
|
||||
MSG_IN_START_TRACKING = 44344,
|
||||
MSG_IN_STOP_TRACKING = 44444,
|
||||
MSG_IN_CALIBRATE_NEUTRAL = 44544,
|
||||
MSG_IN_SEND_MARKER_NAMES = 44644,
|
||||
MSG_IN_SEND_BLENDSHAPE_NAMES = 44744,
|
||||
MSG_IN_SEND_RIG = 44844,
|
||||
MSG_IN_HEADPOSE_RELATIVE = 44944,
|
||||
MSG_IN_HEADPOSE_ABSOLUTE = 44945,
|
||||
|
||||
// Messages containing tracking information
|
||||
// These are sent form faceshift to the client application
|
||||
MSG_OUT_TRACKING_STATE = 33433,
|
||||
MSG_OUT_MARKER_NAMES = 33533,
|
||||
MSG_OUT_BLENDSHAPE_NAMES = 33633,
|
||||
MSG_OUT_RIG = 33733
|
||||
};
|
||||
|
||||
virtual MessageType id() const = 0;
|
||||
};
|
||||
typedef std::tr1::shared_ptr<fsMsg> fsMsgPtr;
|
||||
|
||||
|
||||
/*************
|
||||
* Inbound
|
||||
***********/
|
||||
class fsMsgStartCapturing : public fsMsg {
|
||||
public:
|
||||
virtual ~fsMsgStartCapturing() {}
|
||||
virtual MessageType id() const { return MSG_IN_START_TRACKING; }
|
||||
};
|
||||
class fsMsgStopCapturing : public fsMsg {
|
||||
public:
|
||||
virtual ~fsMsgStopCapturing() {}
|
||||
virtual MessageType id() const { return MSG_IN_STOP_TRACKING; }
|
||||
};
|
||||
class fsMsgCalibrateNeutral : public fsMsg {
|
||||
public:
|
||||
virtual ~fsMsgCalibrateNeutral() {}
|
||||
virtual MessageType id() const { return MSG_IN_CALIBRATE_NEUTRAL; }
|
||||
};
|
||||
class fsMsgSendMarkerNames : public fsMsg {
|
||||
public:
|
||||
virtual ~fsMsgSendMarkerNames() {}
|
||||
virtual MessageType id() const { return MSG_IN_SEND_MARKER_NAMES; }
|
||||
};
|
||||
class fsMsgSendBlendshapeNames : public fsMsg {
|
||||
public:
|
||||
virtual ~fsMsgSendBlendshapeNames() {}
|
||||
virtual MessageType id() const { return MSG_IN_SEND_BLENDSHAPE_NAMES; }
|
||||
};
|
||||
class fsMsgSendRig : public fsMsg {
|
||||
public:
|
||||
virtual ~fsMsgSendRig() {}
|
||||
virtual MessageType id() const { return MSG_IN_SEND_RIG; }
|
||||
};
|
||||
class fsMsgHeadPoseRelative : public fsMsg {
|
||||
public:
|
||||
virtual ~fsMsgHeadPoseRelative() {}
|
||||
virtual MessageType id() const { return MSG_IN_HEADPOSE_RELATIVE; }
|
||||
};
|
||||
class fsMsgHeadPoseAbsolute : public fsMsg {
|
||||
public:
|
||||
virtual ~fsMsgHeadPoseAbsolute() {}
|
||||
virtual MessageType id() const { return MSG_IN_HEADPOSE_ABSOLUTE; }
|
||||
};
|
||||
|
||||
/*************
|
||||
* Outbound
|
||||
***********/
|
||||
class fsMsgTrackingState : public fsMsg {
|
||||
public:
|
||||
virtual ~fsMsgTrackingState() {}
|
||||
|
||||
/* */ fsTrackingData & tracking_data() /* */ { return m_tracking_data; }
|
||||
const fsTrackingData & tracking_data() const { return m_tracking_data; }
|
||||
|
||||
virtual MessageType id() const { return MSG_OUT_TRACKING_STATE; }
|
||||
|
||||
private:
|
||||
fsTrackingData m_tracking_data;
|
||||
};
|
||||
class fsMsgMarkerNames : public fsMsg {
|
||||
public:
|
||||
virtual ~fsMsgMarkerNames() {}
|
||||
|
||||
/* */ std::vector<std::string> & marker_names() /* */ { return m_marker_names; }
|
||||
const std::vector<std::string> & marker_names() const { return m_marker_names; }
|
||||
|
||||
virtual MessageType id() const { return MSG_OUT_MARKER_NAMES; }
|
||||
private:
|
||||
std::vector<std::string> m_marker_names;
|
||||
};
|
||||
class fsMsgBlendshapeNames : public fsMsg {
|
||||
public:
|
||||
virtual ~fsMsgBlendshapeNames() {}
|
||||
|
||||
/* */ std::vector<std::string> & blendshape_names() /* */ { return m_blendshape_names; }
|
||||
const std::vector<std::string> & blendshape_names() const { return m_blendshape_names; }
|
||||
|
||||
virtual MessageType id() const { return MSG_OUT_BLENDSHAPE_NAMES; }
|
||||
private:
|
||||
std::vector<std::string> m_blendshape_names;
|
||||
};
|
||||
class fsMsgRig : public fsMsg {
|
||||
public:
|
||||
virtual ~fsMsgRig() {}
|
||||
|
||||
virtual MessageType id() const { return MSG_OUT_RIG; }
|
||||
|
||||
/* */ fsMeshData & mesh() /* */ { return m_mesh; }
|
||||
const fsMeshData & mesh() const { return m_mesh; }
|
||||
|
||||
/* */ std::vector<std::string> & blendshape_names() /* */ { return m_blendshape_names; }
|
||||
const std::vector<std::string> & blendshape_names() const { return m_blendshape_names; }
|
||||
|
||||
/* */ std::vector<fsVertexData> & blendshapes() /* */ { return m_blendshapes; }
|
||||
const std::vector<fsVertexData> & blendshapes() const { return m_blendshapes; }
|
||||
|
||||
private:
|
||||
//! neutral mesh
|
||||
fsMeshData m_mesh;
|
||||
//! blendshape names
|
||||
std::vector<std::string> m_blendshape_names;
|
||||
//! blendshapes
|
||||
std::vector<fsVertexData> m_blendshapes;
|
||||
};
|
||||
class fsMsgSignal : public fsMsg {
|
||||
MessageType m_id;
|
||||
public:
|
||||
explicit fsMsgSignal(MessageType id) : m_id(id) {}
|
||||
virtual ~fsMsgSignal() {}
|
||||
virtual MessageType id() const { return m_id; }
|
||||
};
|
||||
|
||||
/**
|
||||
* Class to parse a faceshift data stream, and to create message to write into such a stream
|
||||
*
|
||||
* This needs to be connected with your networking methods by calling
|
||||
*
|
||||
* void received(int, const char *);
|
||||
*
|
||||
* whenever new data is available. After adding received data to the parser you can parse faceshift messages using the
|
||||
*
|
||||
* std::tr1::shared_ptr<fsMsg> get_message();
|
||||
*
|
||||
* to get the next message, if a full block of data has been received. This should be iterated until no more messages are in the buffer.
|
||||
*
|
||||
* You can also use this to encode messages to send back to faceshift. This works by calling the
|
||||
*
|
||||
* void encode_message(std::string &msg_out, const fsMsg &msg);
|
||||
*
|
||||
* methods (actually the specializations existing for each of our message types). This will encode the message into a
|
||||
* binary string in msg_out. You then only need to push the resulting string over the network to faceshift.
|
||||
*
|
||||
* This class does not handle differences in endianness or other strange things that can happen when pushing data over the network.
|
||||
* Should you have to adapt this to such a system, then it should be possible to do this by changing only the write_... and read_...
|
||||
* functions in the accompanying cpp file, but so far there was no need for it.
|
||||
**/
|
||||
class fsBinaryStream {
|
||||
public:
|
||||
fsBinaryStream();
|
||||
|
||||
/**
|
||||
* Use to push data into the parser. Typically called inside of your network receiver routine
|
||||
**/
|
||||
void received(long int, const char *);
|
||||
/**
|
||||
* After pushing data, you can try to extract messages from the stream. Process messages until a null pointer is returned.
|
||||
**/
|
||||
fsMsgPtr get_message();
|
||||
/**
|
||||
* When an invalid message is received, the valid field is set to false. No attempt is made to recover from the problem, you will have to disconnect.
|
||||
**/
|
||||
bool valid() const { return m_valid; }
|
||||
void clear() { m_start = 0; m_end = 0; m_valid=true; }
|
||||
|
||||
// Inbound
|
||||
static void encode_message(std::string &msg_out, const fsMsgTrackingState &msg);
|
||||
static void encode_message(std::string &msg_out, const fsMsgStartCapturing &msg);
|
||||
static void encode_message(std::string &msg_out, const fsMsgStopCapturing &msg);
|
||||
static void encode_message(std::string &msg_out, const fsMsgCalibrateNeutral &msg);
|
||||
static void encode_message(std::string &msg_out, const fsMsgSendMarkerNames &msg);
|
||||
static void encode_message(std::string &msg_out, const fsMsgSendBlendshapeNames &msg);
|
||||
static void encode_message(std::string &msg_out, const fsMsgSendRig &msg);
|
||||
static void encode_message(std::string &msg_out, const fsMsgHeadPoseRelative &msg);
|
||||
static void encode_message(std::string &msg_out, const fsMsgHeadPoseAbsolute &msg);
|
||||
|
||||
// Outbound
|
||||
static void encode_message(std::string &msg_out, const fsTrackingData &msg);
|
||||
static void encode_message(std::string &msg_out, const fsMsgMarkerNames &msg);
|
||||
static void encode_message(std::string &msg_out, const fsMsgBlendshapeNames &msg);
|
||||
static void encode_message(std::string &msg_out, const fsMsgRig &msg);
|
||||
static void encode_message(std::string &msg_out, const fsMsgSignal &msg); // Generic Signal
|
||||
|
||||
private:
|
||||
std::string m_buffer;
|
||||
long int m_start;
|
||||
long int m_end;
|
||||
bool m_valid;
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
||||
#endif // FSBINARYSTREAM_H
|
Binary file not shown.
BIN
interface/external/faceshift/lib/UNIX/libfaceshift.a
vendored
BIN
interface/external/faceshift/lib/UNIX/libfaceshift.a
vendored
Binary file not shown.
21
interface/external/faceshift/readme.txt
vendored
Normal file
21
interface/external/faceshift/readme.txt
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
|
||||
Instructions for adding the Faceshift library to Interface
|
||||
Stephen Birarda, July 18th, 2014
|
||||
|
||||
OS X users: You can also use homebrew to get the Faceshift library by tapping our repo - highfidelity/homebrew-formulas
|
||||
and then calling 'brew install highfidelity/formulas/faceshift'.
|
||||
|
||||
You can download the Faceshift SDK from http://download.faceshift.com/faceshift-network.zip.
|
||||
|
||||
Create a ‘faceshift’ folder under interface/externals.
|
||||
|
||||
You may optionally choose to place this folder in a location outside the repository (so you can re-use with different checkouts and different projects).
|
||||
|
||||
If so our CMake find module expects you to set the ENV variable 'HIFI_LIB_DIR' to a directory containing a subfolder ‘faceshift’ that contains the lib and include folders.
|
||||
|
||||
1. Build a Faceshift static library from the fsbinarystream.cpp file. If you build a release version call it libfaceshift.a. The debug version should be called libfaceshiftd.a. Place this in the ‘lib’ folder in your Faceshift folder.
|
||||
|
||||
2. Copy the fsbinarystream.h header file from the Faceshift SDK into the ‘include’ folder in your Faceshift folder.
|
||||
|
||||
3. Clear your build directory, run cmake and build, and you should be all set.
|
||||
|
502
interface/external/faceshift/src/fsbinarystream.cpp
vendored
502
interface/external/faceshift/src/fsbinarystream.cpp
vendored
|
@ -1,502 +0,0 @@
|
|||
// ==========================================================================
|
||||
// Copyright (C) 2012 faceshift AG, and/or its licensors. All rights reserved.
|
||||
//
|
||||
// the software is free to use and provided "as is", without warranty of any kind.
|
||||
// faceshift AG does not make and hereby disclaims any express or implied
|
||||
// warranties including, but not limited to, the warranties of
|
||||
// non-infringement, merchantability or fitness for a particular purpose,
|
||||
// or arising from a course of dealing, usage, or trade practice. in no
|
||||
// event will faceshift AG and/or its licensors be liable for any lost
|
||||
// revenues, data, or profits, or special, direct, indirect, or
|
||||
// consequential damages, even if faceshift AG and/or its licensors has
|
||||
// been advised of the possibility or probability of such damages.
|
||||
// ==========================================================================
|
||||
|
||||
#include "fsbinarystream.h"
|
||||
#include <stdio.h>
|
||||
#include <stdint.h>
|
||||
#include <string.h>
|
||||
|
||||
#define FSNETWORKVERSION 1
|
||||
|
||||
#ifdef FS_INTERNAL
|
||||
#include <common/log.hpp>
|
||||
#else
|
||||
#define LOG_RELEASE_ERROR(...) { printf("ERROR: %20s:%6d", __FILE__, __LINE__); printf(__VA_ARGS__); }
|
||||
#define LOG_RELEASE_WARNING(...) { printf("WARNING: %20s:%6d", __FILE__, __LINE__); printf(__VA_ARGS__); }
|
||||
#define LOG_RELEASE_INFO(...) { printf("INFO: %20s:%6d", __FILE__, __LINE__); printf(__VA_ARGS__); }
|
||||
#endif
|
||||
|
||||
|
||||
namespace fs {
|
||||
|
||||
// Ids of the submessages for the tracking state
|
||||
enum BlockId {
|
||||
BLOCKID_INFO = 101,
|
||||
BLOCKID_POSE = 102,
|
||||
BLOCKID_BLENDSHAPES = 103,
|
||||
BLOCKID_EYES = 104,
|
||||
BLOCKID_MARKERS = 105
|
||||
};
|
||||
|
||||
|
||||
typedef long int Size;
|
||||
|
||||
struct BlockHeader {
|
||||
uint16_t id;
|
||||
uint16_t version;
|
||||
uint32_t size;
|
||||
BlockHeader(uint16_t _id=0,
|
||||
uint32_t _size=0,
|
||||
uint16_t _version=FSNETWORKVERSION
|
||||
) : id(_id), version(_version), size(_size) {}
|
||||
};
|
||||
|
||||
// Interprets the data at the position start in buffer as a T and increments start by sizeof(T)
|
||||
// It should be sufficient to change/overload this function when you are on a wierd endian system
|
||||
template<class T> bool read_pod(T &value, const std::string &buffer, Size &start) {
|
||||
if(start+sizeof(T) > buffer.size()) return false;
|
||||
value = *(const T*)(&buffer[start]);
|
||||
start += sizeof(T);
|
||||
return true;
|
||||
}
|
||||
bool read_pod(std::string &value, const std::string &buffer, Size &start) {
|
||||
uint16_t len = 0;
|
||||
if(!read_pod(len, buffer, start)) return false;
|
||||
if(start+len>Size(buffer.size())) return false; // check whether we have enough data available
|
||||
value.resize(len);
|
||||
memcpy(&(value[0]), &buffer[start], len);
|
||||
start+=len;
|
||||
return true;
|
||||
}
|
||||
template<class T> bool read_vector(std::vector<T> & values, const std::string & buffer, Size & start) {
|
||||
uint32_t len = 0;
|
||||
if( !read_pod(len, buffer, start)) return false;
|
||||
if( start+len*sizeof(T) > buffer.size() ) return false;
|
||||
values.resize(len);
|
||||
for(uint32_t i = 0; i < len; ++i) {
|
||||
read_pod(values[i],buffer,start);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
template<class T> bool read_small_vector(std::vector<T> & values, const std::string & buffer, Size & start) {
|
||||
uint16_t len = 0;
|
||||
if( !read_pod(len, buffer, start)) return false;
|
||||
if( start+len*sizeof(T) > buffer.size() ) return false;
|
||||
values.resize(len);
|
||||
bool success = true;
|
||||
for(uint16_t i = 0; i < len; ++i) {
|
||||
success &= read_pod(values[i],buffer,start);
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
// Adds the bitpattern of the data to the end of the buffer.
|
||||
// It should be sufficient to change/overload this function when you are on a wierd endian system
|
||||
template <class T>
|
||||
void write_pod(std::string &buffer, const T &value) {
|
||||
Size start = buffer.size();
|
||||
buffer.resize(start + sizeof(T));
|
||||
*(T*)(&buffer[start]) = value;
|
||||
start += sizeof(T);
|
||||
}
|
||||
// special write function for strings
|
||||
void write_pod(std::string &buffer, const std::string &value) {
|
||||
uint16_t len = uint16_t(value.size()); write_pod(buffer, len);
|
||||
buffer.append(value);
|
||||
}
|
||||
template<class T> void write_vector(std::string & buffer, const std::vector<T> & values) {
|
||||
uint32_t len = values.size();
|
||||
write_pod(buffer,len);
|
||||
for(uint32_t i = 0; i < len; ++i)
|
||||
write_pod(buffer,values[i]);
|
||||
}
|
||||
template<class T> void write_small_vector(std::string & buffer, const std::vector<T> & values) {
|
||||
uint16_t len = values.size();
|
||||
write_pod(buffer,len);
|
||||
for(uint16_t i = 0; i < len; ++i)
|
||||
write_pod(buffer,values[i]);
|
||||
}
|
||||
void update_msg_size(std::string &buffer, Size start) {
|
||||
*(uint32_t*)(&buffer[start+4]) = buffer.size() - sizeof(BlockHeader) - start;
|
||||
}
|
||||
void update_msg_size(std::string &buffer) {
|
||||
*(uint32_t*)(&buffer[4]) = buffer.size() - sizeof(BlockHeader);
|
||||
}
|
||||
|
||||
static void skipHeader( Size &start) {
|
||||
start += sizeof(BlockHeader);
|
||||
}
|
||||
|
||||
//! returns whether @param data contains enough data to read the block header
|
||||
static bool headerAvailable(BlockHeader &header, const std::string &buffer, Size &start, const Size &end) {
|
||||
if (end-start >= Size(sizeof(BlockHeader))) {
|
||||
header = *(BlockHeader*)(&buffer[start]);
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
//! returns whether @param data contains data for a full block
|
||||
static bool blockAvailable(const std::string &buffer, Size &start, const Size &end) {
|
||||
BlockHeader header;
|
||||
if (!headerAvailable(header, buffer, start, end)) return false;
|
||||
return end-start >= Size(sizeof(header)+header.size);
|
||||
}
|
||||
|
||||
fsBinaryStream::fsBinaryStream() : m_buffer(), m_start(0), m_end(0), m_valid(true) { m_buffer.resize(64*1024); } // Use a 64kb buffer by default
|
||||
|
||||
void fsBinaryStream::received(long int sz, const char *data) {
|
||||
|
||||
long int new_end = m_end + sz;
|
||||
if (new_end > Size(m_buffer.size()) && m_start>0) {
|
||||
// If newly received block is too large to fit into the buffer, but we already have processed data from the start of the buffer, then
|
||||
// move memory to the front of the buffer
|
||||
// The buffer only grows, such that it is always large enough to contain the largest message seen so far.
|
||||
if (m_end>m_start) memmove(&m_buffer[0], &m_buffer[0] + m_start, m_end - m_start);
|
||||
m_end = m_end - m_start;
|
||||
m_start = 0;
|
||||
new_end = m_end + sz;
|
||||
}
|
||||
|
||||
if (new_end > Size(m_buffer.size())) m_buffer.resize((int)(1.5f * (float)new_end)); // HIFI: to get 1.5 without warnings
|
||||
|
||||
memcpy(&m_buffer[0] + m_end, data, sz);
|
||||
m_end += sz;
|
||||
|
||||
}
|
||||
|
||||
static bool decodeInfo(fsTrackingData & _trackingData, const std::string &buffer, Size &start) {
|
||||
bool success = true;
|
||||
success &= read_pod<double>(_trackingData.m_timestamp, buffer, start);
|
||||
unsigned char tracking_successfull = 0;
|
||||
success &= read_pod<unsigned char>( tracking_successfull, buffer, start );
|
||||
_trackingData.m_trackingSuccessful = bool(tracking_successfull != 0); // HIFI: get rid of windows warning
|
||||
return success;
|
||||
}
|
||||
|
||||
static bool decodePose(fsTrackingData & _trackingData, const std::string &buffer, Size &start) {
|
||||
bool success = true;
|
||||
success &= read_pod(_trackingData.m_headRotation.x, buffer, start);
|
||||
success &= read_pod(_trackingData.m_headRotation.y, buffer, start);
|
||||
success &= read_pod(_trackingData.m_headRotation.z, buffer, start);
|
||||
success &= read_pod(_trackingData.m_headRotation.w, buffer, start);
|
||||
success &= read_pod(_trackingData.m_headTranslation.x, buffer, start);
|
||||
success &= read_pod(_trackingData.m_headTranslation.y, buffer, start);
|
||||
success &= read_pod(_trackingData.m_headTranslation.z, buffer, start);
|
||||
return success;
|
||||
}
|
||||
|
||||
static bool decodeBlendshapes(fsTrackingData & _trackingData, const std::string &buffer, Size &start) {
|
||||
return read_vector(_trackingData.m_coeffs, buffer, start);
|
||||
}
|
||||
|
||||
static bool decodeEyeGaze(fsTrackingData & _trackingData, const std::string &buffer, Size &start) {
|
||||
bool success = true;
|
||||
success &= read_pod(_trackingData.m_eyeGazeLeftPitch , buffer, start);
|
||||
success &= read_pod(_trackingData.m_eyeGazeLeftYaw , buffer, start);
|
||||
success &= read_pod(_trackingData.m_eyeGazeRightPitch, buffer, start);
|
||||
success &= read_pod(_trackingData.m_eyeGazeRightYaw , buffer, start);
|
||||
return success;
|
||||
}
|
||||
|
||||
static bool decodeMarkers(fsTrackingData & _trackingData, const std::string &buffer, Size &start) {
|
||||
return read_small_vector( _trackingData.m_markers, buffer, start );
|
||||
}
|
||||
|
||||
static bool decodeMarkerNames(fsMsgMarkerNames &_msg, const std::string &buffer, Size &start) {
|
||||
return read_small_vector(_msg.marker_names(), buffer, start);
|
||||
}
|
||||
static bool decodeBlendshapeNames(fsMsgBlendshapeNames &_msg, const std::string &buffer, Size &start) {
|
||||
return read_small_vector(_msg.blendshape_names(), buffer, start);
|
||||
}
|
||||
static bool decodeRig(fsMsgRig &_msg, const std::string &buffer, Size &start) {
|
||||
bool success = true;
|
||||
success &= read_vector(_msg.mesh().m_quads,buffer,start); // read quads
|
||||
success &= read_vector(_msg.mesh().m_tris,buffer,start); // read triangles
|
||||
success &= read_vector(_msg.mesh().m_vertex_data.m_vertices,buffer,start);// read neutral vertices
|
||||
success &= read_small_vector(_msg.blendshape_names(),buffer,start); // read names
|
||||
uint16_t bsize = 0;
|
||||
success &= read_pod(bsize,buffer,start);
|
||||
_msg.blendshapes().resize(bsize);
|
||||
for(uint16_t i = 0;i < bsize; i++)
|
||||
success &= read_vector(_msg.blendshapes()[i].m_vertices,buffer,start); // read blendshapes
|
||||
return success;
|
||||
}
|
||||
|
||||
bool is_valid_msg(int id) {
|
||||
switch(id) {
|
||||
case fsMsg::MSG_IN_START_TRACKING :
|
||||
case fsMsg::MSG_IN_STOP_TRACKING :
|
||||
case fsMsg::MSG_IN_CALIBRATE_NEUTRAL :
|
||||
case fsMsg::MSG_IN_SEND_MARKER_NAMES :
|
||||
case fsMsg::MSG_IN_SEND_BLENDSHAPE_NAMES:
|
||||
case fsMsg::MSG_IN_SEND_RIG :
|
||||
case fsMsg::MSG_IN_HEADPOSE_RELATIVE :
|
||||
case fsMsg::MSG_IN_HEADPOSE_ABSOLUTE :
|
||||
case fsMsg::MSG_OUT_TRACKING_STATE :
|
||||
case fsMsg::MSG_OUT_MARKER_NAMES :
|
||||
case fsMsg::MSG_OUT_BLENDSHAPE_NAMES :
|
||||
case fsMsg::MSG_OUT_RIG : return true;
|
||||
default:
|
||||
LOG_RELEASE_ERROR("Invalid Message ID %d", id);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
fsMsgPtr fsBinaryStream::get_message() {
|
||||
BlockHeader super_block;
|
||||
if( !headerAvailable(super_block, m_buffer, m_start, m_end) ) return fsMsgPtr();
|
||||
if (!is_valid_msg(super_block.id)) { LOG_RELEASE_ERROR("Invalid superblock id"); m_valid = false; return fsMsgPtr(); }
|
||||
if( !blockAvailable( m_buffer, m_start, m_end) ) return fsMsgPtr();
|
||||
skipHeader(m_start);
|
||||
long super_block_data_start = m_start;
|
||||
switch (super_block.id) {
|
||||
case fsMsg::MSG_IN_START_TRACKING: {
|
||||
if (super_block.size > 0) { LOG_RELEASE_ERROR("Expected Size to be 0, not %d", super_block.size); m_valid = false; return fsMsgPtr(); }
|
||||
return fsMsgPtr(new fsMsgStartCapturing() );
|
||||
}; break;
|
||||
case fsMsg::MSG_IN_STOP_TRACKING: {
|
||||
if (super_block.size > 0) { LOG_RELEASE_ERROR("Expected Size to be 0, not %d", super_block.size); m_valid = false; return fsMsgPtr(); }
|
||||
return fsMsgPtr(new fsMsgStopCapturing() );
|
||||
}; break;
|
||||
case fsMsg::MSG_IN_CALIBRATE_NEUTRAL: {
|
||||
if (super_block.size > 0) { LOG_RELEASE_ERROR("Expected Size to be 0, not %d", super_block.size); m_valid = false; return fsMsgPtr(); }
|
||||
return fsMsgPtr(new fsMsgCalibrateNeutral() );
|
||||
}; break;
|
||||
case fsMsg::MSG_IN_SEND_MARKER_NAMES: {
|
||||
if (super_block.size > 0) { LOG_RELEASE_ERROR("Expected Size to be 0, not %d", super_block.size); m_valid = false; return fsMsgPtr(); }
|
||||
return fsMsgPtr(new fsMsgSendMarkerNames() );
|
||||
}; break;
|
||||
case fsMsg::MSG_IN_SEND_BLENDSHAPE_NAMES: {
|
||||
if (super_block.size > 0) { LOG_RELEASE_ERROR("Expected Size to be 0, not %d", super_block.size); m_valid = false; return fsMsgPtr(); }
|
||||
return fsMsgPtr(new fsMsgSendBlendshapeNames() );
|
||||
}; break;
|
||||
case fsMsg::MSG_IN_SEND_RIG: {
|
||||
if (super_block.size > 0) { LOG_RELEASE_ERROR("Expected Size to be 0, not %d", super_block.size); m_valid = false; return fsMsgPtr(); }
|
||||
return fsMsgPtr(new fsMsgSendRig() );
|
||||
}; break;
|
||||
case fsMsg::MSG_IN_HEADPOSE_RELATIVE: {
|
||||
if (super_block.size > 0) { LOG_RELEASE_ERROR("Expected Size to be 0, not %d", super_block.size); m_valid = false; return fsMsgPtr(); }
|
||||
return fsMsgPtr(new fsMsgHeadPoseRelative() );
|
||||
}; break;
|
||||
case fsMsg::MSG_IN_HEADPOSE_ABSOLUTE: {
|
||||
if (super_block.size > 0) { LOG_RELEASE_ERROR("Expected Size to be 0, not %d", super_block.size); m_valid = false; return fsMsgPtr(); }
|
||||
return fsMsgPtr(new fsMsgHeadPoseAbsolute() );
|
||||
}; break;
|
||||
case fsMsg::MSG_OUT_MARKER_NAMES: {
|
||||
std::tr1::shared_ptr< fsMsgMarkerNames > msg(new fsMsgMarkerNames());
|
||||
if( !decodeMarkerNames(*msg, m_buffer, m_start )) { LOG_RELEASE_ERROR("Could not decode marker names"); m_valid = false; return fsMsgPtr(); }
|
||||
uint64_t actual_size = m_start-super_block_data_start;
|
||||
if( actual_size != super_block.size ) { LOG_RELEASE_ERROR("Block was promised to be of size %d, not %d", super_block.size, actual_size); m_valid = false; return fsMsgPtr(); }
|
||||
return msg;
|
||||
}; break;
|
||||
case fsMsg::MSG_OUT_BLENDSHAPE_NAMES: {
|
||||
std::tr1::shared_ptr< fsMsgBlendshapeNames > msg(new fsMsgBlendshapeNames() );
|
||||
if( !decodeBlendshapeNames(*msg, m_buffer, m_start) ) { LOG_RELEASE_ERROR("Could not decode blendshape names"); m_valid = false; return fsMsgPtr(); }
|
||||
uint64_t actual_size = m_start-super_block_data_start;
|
||||
if( actual_size != super_block.size ) { LOG_RELEASE_ERROR("Block was promised to be of size %d, not %d", super_block.size, actual_size); m_valid = false; return fsMsgPtr(); }
|
||||
return msg;
|
||||
}; break;
|
||||
case fsMsg::MSG_OUT_TRACKING_STATE: {
|
||||
BlockHeader sub_block;
|
||||
uint16_t num_blocks = 0;
|
||||
if( !read_pod(num_blocks, m_buffer, m_start) ) { LOG_RELEASE_ERROR("Could not read num_blocks"); m_valid = false; return fsMsgPtr(); }
|
||||
std::tr1::shared_ptr<fsMsgTrackingState> msg = std::tr1::shared_ptr<fsMsgTrackingState>(new fsMsgTrackingState());
|
||||
for(int i = 0; i < num_blocks; i++) {
|
||||
if( !headerAvailable(sub_block, m_buffer, m_start, m_end) ) { LOG_RELEASE_ERROR("could not read sub-header %d", i); m_valid = false; return fsMsgPtr(); }
|
||||
if( !blockAvailable( m_buffer, m_start, m_end) ) { LOG_RELEASE_ERROR("could not read sub-block %d", i); m_valid = false; return fsMsgPtr(); }
|
||||
skipHeader(m_start);
|
||||
long sub_block_data_start = m_start;
|
||||
bool success = true;
|
||||
switch(sub_block.id) {
|
||||
case BLOCKID_INFO: success &= decodeInfo( msg->tracking_data(), m_buffer, m_start); break;
|
||||
case BLOCKID_POSE: success &= decodePose( msg->tracking_data(), m_buffer, m_start); break;
|
||||
case BLOCKID_BLENDSHAPES: success &= decodeBlendshapes(msg->tracking_data(), m_buffer, m_start); break;
|
||||
case BLOCKID_EYES: success &= decodeEyeGaze( msg->tracking_data(), m_buffer, m_start); break;
|
||||
case BLOCKID_MARKERS: success &= decodeMarkers( msg->tracking_data(), m_buffer, m_start); break;
|
||||
default:
|
||||
LOG_RELEASE_ERROR("Unexpected subblock id %d", sub_block.id);
|
||||
m_valid = false; return msg;
|
||||
break;
|
||||
}
|
||||
if(!success) {
|
||||
LOG_RELEASE_ERROR("Could not decode subblock with id %d", sub_block.id);
|
||||
m_valid = false; return fsMsgPtr();
|
||||
}
|
||||
uint64_t actual_size = m_start-sub_block_data_start;
|
||||
if( actual_size != sub_block.size ) {
|
||||
LOG_RELEASE_ERROR("Unexpected number of bytes consumed %d instead of %d for subblock %d id:%d", actual_size, sub_block.size, i, sub_block.id);
|
||||
m_valid = false; return fsMsgPtr();
|
||||
}
|
||||
}
|
||||
uint64_t actual_size = m_start-super_block_data_start;
|
||||
if( actual_size != super_block.size ) {
|
||||
LOG_RELEASE_ERROR("Unexpected number of bytes consumed %d instead of %d", actual_size, super_block.size);
|
||||
m_valid = false; return fsMsgPtr();
|
||||
}
|
||||
return msg;
|
||||
}; break;
|
||||
case fsMsg::MSG_OUT_RIG: {
|
||||
std::tr1::shared_ptr< fsMsgRig > msg(new fsMsgRig() );
|
||||
if( !decodeRig(*msg, m_buffer, m_start) ) { LOG_RELEASE_ERROR("Could not decode rig"); m_valid = false; return fsMsgPtr(); }
|
||||
if( m_start-super_block_data_start != super_block.size ) { LOG_RELEASE_ERROR("Could not decode rig unexpected size"); m_valid = false; return fsMsgPtr(); }
|
||||
return msg;
|
||||
}; break;
|
||||
default: {
|
||||
LOG_RELEASE_ERROR("Unexpected superblock id %d", super_block.id);
|
||||
m_valid = false; return fsMsgPtr();
|
||||
}; break;
|
||||
}
|
||||
return fsMsgPtr();
|
||||
}
|
||||
|
||||
static void encodeInfo(std::string &buffer, const fsTrackingData & _trackingData) {
|
||||
BlockHeader header(BLOCKID_INFO, sizeof(double) + 1);
|
||||
write_pod(buffer, header);
|
||||
|
||||
write_pod(buffer, _trackingData.m_timestamp);
|
||||
unsigned char tracking_successfull = _trackingData.m_trackingSuccessful;
|
||||
write_pod( buffer, tracking_successfull );
|
||||
}
|
||||
|
||||
static void encodePose(std::string &buffer, const fsTrackingData & _trackingData) {
|
||||
BlockHeader header(BLOCKID_POSE, sizeof(float)*7);
|
||||
write_pod(buffer, header);
|
||||
|
||||
write_pod(buffer, _trackingData.m_headRotation.x);
|
||||
write_pod(buffer, _trackingData.m_headRotation.y);
|
||||
write_pod(buffer, _trackingData.m_headRotation.z);
|
||||
write_pod(buffer, _trackingData.m_headRotation.w);
|
||||
write_pod(buffer, _trackingData.m_headTranslation.x);
|
||||
write_pod(buffer, _trackingData.m_headTranslation.y);
|
||||
write_pod(buffer, _trackingData.m_headTranslation.z);
|
||||
}
|
||||
|
||||
static void encodeBlendshapes(std::string &buffer, const fsTrackingData & _trackingData) {
|
||||
uint32_t num_parameters = _trackingData.m_coeffs.size();
|
||||
BlockHeader header(BLOCKID_BLENDSHAPES, sizeof(uint32_t) + sizeof(float)*num_parameters);
|
||||
write_pod(buffer, header);
|
||||
write_pod(buffer, num_parameters);
|
||||
for(uint32_t i = 0; i < num_parameters; i++)
|
||||
write_pod(buffer, _trackingData.m_coeffs[i]);
|
||||
}
|
||||
|
||||
static void encodeEyeGaze(std::string &buffer, const fsTrackingData & _trackingData) {
|
||||
BlockHeader header(BLOCKID_EYES, sizeof(float)*4);
|
||||
write_pod(buffer, header);
|
||||
write_pod(buffer, _trackingData.m_eyeGazeLeftPitch );
|
||||
write_pod(buffer, _trackingData.m_eyeGazeLeftYaw );
|
||||
write_pod(buffer, _trackingData.m_eyeGazeRightPitch);
|
||||
write_pod(buffer, _trackingData.m_eyeGazeRightYaw );
|
||||
}
|
||||
|
||||
static void encodeMarkers(std::string &buffer, const fsTrackingData & _trackingData) {
|
||||
uint16_t numMarkers = _trackingData.m_markers.size();
|
||||
BlockHeader header(BLOCKID_MARKERS, sizeof(uint16_t) + sizeof(float)*3*numMarkers);
|
||||
write_pod(buffer, header);
|
||||
write_pod(buffer, numMarkers);
|
||||
for(int i = 0; i < numMarkers; i++) {
|
||||
write_pod(buffer, _trackingData.m_markers[i].x);
|
||||
write_pod(buffer, _trackingData.m_markers[i].y);
|
||||
write_pod(buffer, _trackingData.m_markers[i].z);
|
||||
}
|
||||
}
|
||||
|
||||
// Inbound
|
||||
void fsBinaryStream::encode_message(std::string &msg_out, const fsMsgTrackingState &msg) {
|
||||
encode_message(msg_out, msg.tracking_data());
|
||||
}
|
||||
|
||||
void fsBinaryStream::encode_message(std::string &msg_out, const fsMsgStartCapturing &msg) {
|
||||
BlockHeader header(msg.id());
|
||||
write_pod(msg_out, header);
|
||||
}
|
||||
void fsBinaryStream::encode_message(std::string &msg_out, const fsMsgStopCapturing &msg) {
|
||||
BlockHeader header(msg.id());
|
||||
write_pod(msg_out, header);
|
||||
}
|
||||
void fsBinaryStream::encode_message(std::string &msg_out, const fsMsgCalibrateNeutral &msg) {
|
||||
BlockHeader header(msg.id());
|
||||
write_pod(msg_out, header);
|
||||
}
|
||||
void fsBinaryStream::encode_message(std::string &msg_out, const fsMsgSendMarkerNames &msg) {
|
||||
BlockHeader header(msg.id());
|
||||
write_pod(msg_out, header);
|
||||
}
|
||||
void fsBinaryStream::encode_message(std::string &msg_out, const fsMsgSendBlendshapeNames &msg) {
|
||||
BlockHeader header(msg.id());
|
||||
write_pod(msg_out, header);
|
||||
}
|
||||
void fsBinaryStream::encode_message(std::string &msg_out, const fsMsgSendRig &msg) {
|
||||
BlockHeader header(msg.id());
|
||||
write_pod(msg_out, header);
|
||||
}
|
||||
void fsBinaryStream::encode_message(std::string &msg_out, const fsMsgHeadPoseRelative &msg) {
|
||||
BlockHeader header(msg.id());
|
||||
write_pod(msg_out, header);
|
||||
}
|
||||
void fsBinaryStream::encode_message(std::string &msg_out, const fsMsgHeadPoseAbsolute &msg) {
|
||||
BlockHeader header(msg.id());
|
||||
write_pod(msg_out, header);
|
||||
}
|
||||
|
||||
// Outbound
|
||||
void fsBinaryStream::encode_message(std::string &msg_out, const fsMsgSignal &msg) {
|
||||
BlockHeader header(msg.id());
|
||||
write_pod(msg_out, header);
|
||||
}
|
||||
void fsBinaryStream::encode_message(std::string &msg_out, const fsTrackingData &tracking_data) {
|
||||
Size start = msg_out.size();
|
||||
|
||||
BlockHeader header(fsMsg::MSG_OUT_TRACKING_STATE);
|
||||
write_pod(msg_out, header);
|
||||
|
||||
uint16_t N_blocks = 5;
|
||||
write_pod(msg_out, N_blocks);
|
||||
encodeInfo( msg_out, tracking_data);
|
||||
encodePose( msg_out, tracking_data);
|
||||
encodeBlendshapes(msg_out, tracking_data);
|
||||
encodeEyeGaze( msg_out, tracking_data);
|
||||
encodeMarkers( msg_out, tracking_data);
|
||||
|
||||
update_msg_size(msg_out, start);
|
||||
}
|
||||
void fsBinaryStream::encode_message(std::string &msg_out, const fsMsgMarkerNames &msg) {
|
||||
Size start = msg_out.size();
|
||||
|
||||
BlockHeader header(msg.id());
|
||||
write_pod(msg_out, header);
|
||||
|
||||
write_small_vector(msg_out,msg.marker_names());
|
||||
|
||||
update_msg_size(msg_out, start);
|
||||
}
|
||||
void fsBinaryStream::encode_message(std::string &msg_out, const fsMsgBlendshapeNames &msg) {
|
||||
Size start = msg_out.size();
|
||||
|
||||
BlockHeader header(msg.id());
|
||||
write_pod(msg_out, header);
|
||||
|
||||
write_small_vector(msg_out,msg.blendshape_names());
|
||||
|
||||
update_msg_size(msg_out, start);
|
||||
}
|
||||
void fsBinaryStream::encode_message(std::string &msg_out, const fsMsgRig &msg) {
|
||||
Size start = msg_out.size();
|
||||
|
||||
BlockHeader header(msg.id());
|
||||
write_pod(msg_out, header);
|
||||
|
||||
write_vector(msg_out, msg.mesh().m_quads); // write quads
|
||||
write_vector(msg_out, msg.mesh().m_tris);// write triangles
|
||||
write_vector(msg_out, msg.mesh().m_vertex_data.m_vertices);// write neutral vertices
|
||||
write_small_vector(msg_out, msg.blendshape_names());// write names
|
||||
write_pod(msg_out,uint16_t(msg.blendshapes().size()));
|
||||
for(uint16_t i = 0;i < uint16_t(msg.blendshapes().size()); i++)
|
||||
write_vector(msg_out, msg.blendshapes()[i].m_vertices); // write blendshapes
|
||||
|
||||
update_msg_size(msg_out, start);
|
||||
}
|
||||
}
|
33
interface/external/leapmotion/readme.txt
vendored
Normal file
33
interface/external/leapmotion/readme.txt
vendored
Normal file
|
@ -0,0 +1,33 @@
|
|||
|
||||
Instructions for adding the Leap Motion library (LeapSDK) to Interface
|
||||
Sam Cake, June 10, 2014
|
||||
|
||||
You can download the Leap Developer Kit from https://developer.leapmotion.com/ (account creation required).
|
||||
Interface has been tested with SDK versions:
|
||||
- LeapDeveloperKit_2.0.3+17004_win
|
||||
- LeapDeveloperKit_2.0.3+17004_mac
|
||||
|
||||
1. Copy the LeapSDK folders from the LeapDeveloperKit installation directory (Lib, Include) into the interface/externals/leapmotion folder.
|
||||
This readme.txt should be there as well.
|
||||
|
||||
The files neeeded in the folders are:
|
||||
|
||||
include/
|
||||
- Leap.h
|
||||
- Leap.i
|
||||
- LeapMath.h
|
||||
|
||||
lib/
|
||||
x86/
|
||||
- Leap.dll
|
||||
- Leap.lib
|
||||
- mscvcp120.dll (optional if you already have the Msdev 2012 SDK redistriuable installed)
|
||||
- mscvcr120.dll (optional if you already have the Msdev 2012 SDK redistriuable installed)
|
||||
- lipLeap.dylib
|
||||
libc++/
|
||||
-libLeap.dylib
|
||||
|
||||
You may optionally choose to copy the SDK folders to a location outside the repository (so you can re-use with different checkouts and different projects).
|
||||
If so our CMake find module expects you to set the ENV variable 'HIFI_LIB_DIR' to a directory containing a subfolder 'leapmotion' that contains the 2 folders mentioned above (Include, Lib).
|
||||
|
||||
2. Clear your build directory, run cmake and build, and you should be all set.
|
|
@ -11,16 +11,18 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// the maximum number of local lights to apply
|
||||
const int MAX_LOCAL_LIGHTS = 2;
|
||||
|
||||
// the color of each local light
|
||||
uniform vec4 localLightColors[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the direction of each local light
|
||||
uniform vec4 localLightDirections[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the diffuse texture
|
||||
uniform sampler2D diffuseMap;
|
||||
|
||||
// local lights
|
||||
const int MAX_LOCAL_LIGHTS = 2; // 2 lights for now, will probably need more later on
|
||||
uniform int numLocalLights;
|
||||
uniform vec3 localLightDirections[MAX_LOCAL_LIGHTS];
|
||||
uniform vec3 localLightColors[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the interpolated position
|
||||
varying vec4 position;
|
||||
|
||||
|
@ -28,23 +30,18 @@ varying vec4 position;
|
|||
varying vec4 normal;
|
||||
|
||||
void main(void) {
|
||||
// compute the base color based on OpenGL lighting model
|
||||
// add up the local lights
|
||||
vec4 normalizedNormal = normalize(normal);
|
||||
float diffuse = dot(normalizedNormal, gl_LightSource[0].position);
|
||||
float facingLight = step(0.0, diffuse);
|
||||
|
||||
// the local light that is always present
|
||||
vec4 totalLocalLight = vec4(0.0, 0.0, 0.0, 1.0);
|
||||
for (int i = 0; i < numLocalLights; i++) {
|
||||
float localDiffuse = dot(normalizedNormal, vec4(localLightDirections[i], 1.0));
|
||||
float localLight = step(0.0, localDiffuse);
|
||||
float localLightVal = localDiffuse * localLight;
|
||||
|
||||
totalLocalLight += (localLightVal * vec4( localLightColors[i], 0.0));
|
||||
vec4 localLight = vec4(0.0, 0.0, 0.0, 0.0);
|
||||
for (int i = 0; i < MAX_LOCAL_LIGHTS; i++) {
|
||||
localLight += localLightColors[i] * max(0.0, dot(normalizedNormal, localLightDirections[i]));
|
||||
}
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
float diffuse = dot(normalizedNormal, gl_LightSource[0].position);
|
||||
float facingLight = step(0.0, diffuse);
|
||||
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + totalLocalLight);
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + localLight);
|
||||
|
||||
// compute the specular component (sans exponent)
|
||||
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position - normalize(vec4(position.xyz, 0.0))),
|
||||
|
|
|
@ -11,6 +11,15 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// the maximum number of local lights to apply
|
||||
const int MAX_LOCAL_LIGHTS = 2;
|
||||
|
||||
// the color of each local light
|
||||
uniform vec4 localLightColors[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the direction of each local light
|
||||
uniform vec4 localLightDirections[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the diffuse texture
|
||||
uniform sampler2D diffuseMap;
|
||||
|
||||
|
@ -34,9 +43,15 @@ void main(void) {
|
|||
int shadowIndex = int(dot(step(vec3(position.z), shadowDistances), vec3(1.0, 1.0, 1.0)));
|
||||
vec3 shadowTexCoord = vec3(dot(gl_EyePlaneS[shadowIndex], position), dot(gl_EyePlaneT[shadowIndex], position),
|
||||
dot(gl_EyePlaneR[shadowIndex], position));
|
||||
|
||||
// add up the local lights
|
||||
vec4 normalizedNormal = normalize(normal);
|
||||
vec4 localLight = vec4(0.0, 0.0, 0.0, 0.0);
|
||||
for (int i = 0; i < MAX_LOCAL_LIGHTS; i++) {
|
||||
localLight += localLightColors[i] * max(0.0, dot(normalizedNormal, localLightDirections[i]));
|
||||
}
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
vec4 normalizedNormal = normalize(normal);
|
||||
float diffuse = dot(normalizedNormal, gl_LightSource[0].position);
|
||||
float facingLight = step(0.0, diffuse) * 0.25 *
|
||||
(shadow2D(shadowMap, shadowTexCoord + vec3(-shadowScale, -shadowScale, 0.0)).r +
|
||||
|
@ -44,7 +59,7 @@ void main(void) {
|
|||
shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, -shadowScale, 0.0)).r +
|
||||
shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, shadowScale, 0.0)).r);
|
||||
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + localLight);
|
||||
|
||||
// compute the specular component (sans exponent)
|
||||
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position - normalize(vec4(position.xyz, 0.0))),
|
||||
|
|
|
@ -11,6 +11,15 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// the maximum number of local lights to apply
|
||||
const int MAX_LOCAL_LIGHTS = 2;
|
||||
|
||||
// the color of each local light
|
||||
uniform vec4 localLightColors[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the direction of each local light
|
||||
uniform vec4 localLightDirections[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the diffuse texture
|
||||
uniform sampler2D diffuseMap;
|
||||
|
||||
|
@ -46,10 +55,16 @@ void main(void) {
|
|||
vec3 shadowTexCoord = vec3(dot(gl_EyePlaneS[shadowIndex], interpolatedPosition),
|
||||
dot(gl_EyePlaneT[shadowIndex], interpolatedPosition),
|
||||
dot(gl_EyePlaneR[shadowIndex], interpolatedPosition));
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
|
||||
// add up the local lights
|
||||
vec4 viewNormal = vec4(normalizedTangent * localNormal.x +
|
||||
normalizedBitangent * localNormal.y + normalizedNormal * localNormal.z, 0.0);
|
||||
vec4 localLight = vec4(0.0, 0.0, 0.0, 0.0);
|
||||
for (int i = 0; i < MAX_LOCAL_LIGHTS; i++) {
|
||||
localLight += localLightColors[i] * max(0.0, dot(viewNormal, localLightDirections[i]));
|
||||
}
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
float diffuse = dot(viewNormal, gl_LightSource[0].position);
|
||||
float facingLight = step(0.0, diffuse) * 0.25 *
|
||||
(shadow2D(shadowMap, shadowTexCoord + vec3(-shadowScale, -shadowScale, 0.0)).r +
|
||||
|
@ -57,7 +72,7 @@ void main(void) {
|
|||
shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, -shadowScale, 0.0)).r +
|
||||
shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, shadowScale, 0.0)).r);
|
||||
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + localLight);
|
||||
|
||||
// compute the specular component (sans exponent)
|
||||
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position -
|
||||
|
|
|
@ -11,6 +11,15 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// the maximum number of local lights to apply
|
||||
const int MAX_LOCAL_LIGHTS = 2;
|
||||
|
||||
// the color of each local light
|
||||
uniform vec4 localLightColors[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the direction of each local light
|
||||
uniform vec4 localLightDirections[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the diffuse texture
|
||||
uniform sampler2D diffuseMap;
|
||||
|
||||
|
@ -50,9 +59,15 @@ void main(void) {
|
|||
dot(gl_EyePlaneT[shadowIndex], interpolatedPosition),
|
||||
dot(gl_EyePlaneR[shadowIndex], interpolatedPosition));
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
// add up the local lights
|
||||
vec4 viewNormal = vec4(normalizedTangent * localNormal.x +
|
||||
normalizedBitangent * localNormal.y + normalizedNormal * localNormal.z, 0.0);
|
||||
vec4 localLight = vec4(0.0, 0.0, 0.0, 0.0);
|
||||
for (int i = 0; i < MAX_LOCAL_LIGHTS; i++) {
|
||||
localLight += localLightColors[i] * max(0.0, dot(viewNormal, localLightDirections[i]));
|
||||
}
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
float diffuse = dot(viewNormal, gl_LightSource[0].position);
|
||||
float facingLight = step(0.0, diffuse) * 0.25 *
|
||||
(shadow2D(shadowMap, shadowTexCoord + vec3(-shadowScale, -shadowScale, 0.0)).r +
|
||||
|
@ -60,7 +75,7 @@ void main(void) {
|
|||
shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, -shadowScale, 0.0)).r +
|
||||
shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, shadowScale, 0.0)).r);
|
||||
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + localLight);
|
||||
|
||||
// compute the specular component (sans exponent)
|
||||
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position -
|
||||
|
|
|
@ -11,6 +11,15 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// the maximum number of local lights to apply
|
||||
const int MAX_LOCAL_LIGHTS = 2;
|
||||
|
||||
// the color of each local light
|
||||
uniform vec4 localLightColors[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the direction of each local light
|
||||
uniform vec4 localLightDirections[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the diffuse texture
|
||||
uniform sampler2D diffuseMap;
|
||||
|
||||
|
@ -38,8 +47,14 @@ void main(void) {
|
|||
vec3 shadowTexCoord = vec3(dot(gl_EyePlaneS[shadowIndex], position), dot(gl_EyePlaneT[shadowIndex], position),
|
||||
dot(gl_EyePlaneR[shadowIndex], position));
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
// add up the local lights
|
||||
vec4 normalizedNormal = normalize(normal);
|
||||
vec4 localLight = vec4(0.0, 0.0, 0.0, 0.0);
|
||||
for (int i = 0; i < MAX_LOCAL_LIGHTS; i++) {
|
||||
localLight += localLightColors[i] * max(0.0, dot(normalizedNormal, localLightDirections[i]));
|
||||
}
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
float diffuse = dot(normalizedNormal, gl_LightSource[0].position);
|
||||
float facingLight = step(0.0, diffuse) * 0.25 *
|
||||
(shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, -shadowScale, 0.0)).r +
|
||||
|
@ -47,7 +62,7 @@ void main(void) {
|
|||
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, -shadowScale, 0.0)).r +
|
||||
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, shadowScale, 0.0)).r);
|
||||
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + localLight);
|
||||
|
||||
// compute the specular component (sans exponent)
|
||||
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position - normalize(vec4(position.xyz, 0.0))),
|
||||
|
|
|
@ -11,6 +11,15 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// the maximum number of local lights to apply
|
||||
const int MAX_LOCAL_LIGHTS = 2;
|
||||
|
||||
// the color of each local light
|
||||
uniform vec4 localLightColors[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the direction of each local light
|
||||
uniform vec4 localLightDirections[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the diffuse texture
|
||||
uniform sampler2D diffuseMap;
|
||||
|
||||
|
@ -32,19 +41,20 @@ void main(void) {
|
|||
vec3 normalizedBitangent = normalize(cross(normalizedNormal, normalizedTangent));
|
||||
vec3 localNormal = vec3(texture2D(normalMap, gl_TexCoord[0].st)) * 2.0 - vec3(1.0, 1.0, 1.0);
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
// add up the local lights
|
||||
vec4 viewNormal = vec4(normalizedTangent * localNormal.x +
|
||||
normalizedBitangent * localNormal.y + normalizedNormal * localNormal.z, 0.0);
|
||||
vec4 localLight = vec4(0.0, 0.0, 0.0, 0.0);
|
||||
for (int i = 0; i < MAX_LOCAL_LIGHTS; i++) {
|
||||
localLight += localLightColors[i] * max(0.0, dot(viewNormal, localLightDirections[i]));
|
||||
}
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
float diffuse = dot(viewNormal, gl_LightSource[0].position);
|
||||
float facingLight = step(0.0, diffuse);
|
||||
float localDiffuse = dot(viewNormal, gl_LightSource[1].position);
|
||||
float localLight = step(0.0, localDiffuse);
|
||||
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + gl_FrontLightProduct[1].diffuse * (localDiffuse * localLight));
|
||||
|
||||
|
||||
|
||||
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + localLight);
|
||||
|
||||
// compute the specular component (sans exponent)
|
||||
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position -
|
||||
normalize(vec4(vec3(interpolatedPosition), 0.0))), viewNormal));
|
||||
|
|
|
@ -11,6 +11,15 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// the maximum number of local lights to apply
|
||||
const int MAX_LOCAL_LIGHTS = 2;
|
||||
|
||||
// the color of each local light
|
||||
uniform vec4 localLightColors[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the direction of each local light
|
||||
uniform vec4 localLightDirections[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the diffuse texture
|
||||
uniform sampler2D diffuseMap;
|
||||
|
||||
|
@ -35,13 +44,19 @@ void main(void) {
|
|||
vec3 normalizedBitangent = normalize(cross(normalizedNormal, normalizedTangent));
|
||||
vec3 localNormal = vec3(texture2D(normalMap, gl_TexCoord[0].st)) * 2.0 - vec3(1.0, 1.0, 1.0);
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
// add up the local lights
|
||||
vec4 viewNormal = vec4(normalizedTangent * localNormal.x +
|
||||
normalizedBitangent * localNormal.y + normalizedNormal * localNormal.z, 0.0);
|
||||
vec4 localLight = vec4(0.0, 0.0, 0.0, 0.0);
|
||||
for (int i = 0; i < MAX_LOCAL_LIGHTS; i++) {
|
||||
localLight += localLightColors[i] * max(0.0, dot(viewNormal, localLightDirections[i]));
|
||||
}
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
float diffuse = dot(viewNormal, gl_LightSource[0].position);
|
||||
float facingLight = step(0.0, diffuse);
|
||||
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + localLight);
|
||||
|
||||
// compute the specular component (sans exponent)
|
||||
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position -
|
||||
|
|
|
@ -11,6 +11,15 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// the maximum number of local lights to apply
|
||||
const int MAX_LOCAL_LIGHTS = 2;
|
||||
|
||||
// the color of each local light
|
||||
uniform vec4 localLightColors[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the direction of each local light
|
||||
uniform vec4 localLightDirections[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the diffuse texture
|
||||
uniform sampler2D diffuseMap;
|
||||
|
||||
|
@ -27,8 +36,14 @@ varying vec4 position;
|
|||
varying vec4 normal;
|
||||
|
||||
void main(void) {
|
||||
// compute the base color based on OpenGL lighting model
|
||||
// add up the local lights
|
||||
vec4 normalizedNormal = normalize(normal);
|
||||
vec4 localLight = vec4(0.0, 0.0, 0.0, 0.0);
|
||||
for (int i = 0; i < MAX_LOCAL_LIGHTS; i++) {
|
||||
localLight += localLightColors[i] * max(0.0, dot(normalizedNormal, localLightDirections[i]));
|
||||
}
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
float diffuse = dot(normalizedNormal, gl_LightSource[0].position);
|
||||
float facingLight = step(0.0, diffuse) * 0.25 *
|
||||
(shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, -shadowScale, 0.0)).r +
|
||||
|
@ -36,7 +51,7 @@ void main(void) {
|
|||
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, -shadowScale, 0.0)).r +
|
||||
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, shadowScale, 0.0)).r);
|
||||
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + localLight);
|
||||
|
||||
// compute the specular component (sans exponent)
|
||||
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position - normalize(vec4(position.xyz, 0.0))),
|
||||
|
|
|
@ -11,6 +11,15 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// the maximum number of local lights to apply
|
||||
const int MAX_LOCAL_LIGHTS = 2;
|
||||
|
||||
// the color of each local light
|
||||
uniform vec4 localLightColors[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the direction of each local light
|
||||
uniform vec4 localLightDirections[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the diffuse texture
|
||||
uniform sampler2D diffuseMap;
|
||||
|
||||
|
@ -38,9 +47,15 @@ void main(void) {
|
|||
vec3 normalizedBitangent = normalize(cross(normalizedNormal, normalizedTangent));
|
||||
vec3 localNormal = vec3(texture2D(normalMap, gl_TexCoord[0].st)) * 2.0 - vec3(1.0, 1.0, 1.0);
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
// add up the local lights
|
||||
vec4 viewNormal = vec4(normalizedTangent * localNormal.x +
|
||||
normalizedBitangent * localNormal.y + normalizedNormal * localNormal.z, 0.0);
|
||||
vec4 localLight = vec4(0.0, 0.0, 0.0, 0.0);
|
||||
for (int i = 0; i < MAX_LOCAL_LIGHTS; i++) {
|
||||
localLight += localLightColors[i] * max(0.0, dot(viewNormal, localLightDirections[i]));
|
||||
}
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
float diffuse = dot(viewNormal, gl_LightSource[0].position);
|
||||
float facingLight = step(0.0, diffuse) * 0.25 *
|
||||
(shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, -shadowScale, 0.0)).r +
|
||||
|
@ -48,7 +63,7 @@ void main(void) {
|
|||
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, -shadowScale, 0.0)).r +
|
||||
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, shadowScale, 0.0)).r);
|
||||
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + localLight);
|
||||
|
||||
// compute the specular component (sans exponent)
|
||||
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position -
|
||||
|
|
|
@ -11,6 +11,15 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// the maximum number of local lights to apply
|
||||
const int MAX_LOCAL_LIGHTS = 2;
|
||||
|
||||
// the color of each local light
|
||||
uniform vec4 localLightColors[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the direction of each local light
|
||||
uniform vec4 localLightDirections[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the diffuse texture
|
||||
uniform sampler2D diffuseMap;
|
||||
|
||||
|
@ -41,9 +50,15 @@ void main(void) {
|
|||
vec3 normalizedBitangent = normalize(cross(normalizedNormal, normalizedTangent));
|
||||
vec3 localNormal = vec3(texture2D(normalMap, gl_TexCoord[0].st)) * 2.0 - vec3(1.0, 1.0, 1.0);
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
// add up the local lights
|
||||
vec4 viewNormal = vec4(normalizedTangent * localNormal.x +
|
||||
normalizedBitangent * localNormal.y + normalizedNormal * localNormal.z, 0.0);
|
||||
vec4 localLight = vec4(0.0, 0.0, 0.0, 0.0);
|
||||
for (int i = 0; i < MAX_LOCAL_LIGHTS; i++) {
|
||||
localLight += localLightColors[i] * max(0.0, dot(viewNormal, localLightDirections[i]));
|
||||
}
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
float diffuse = dot(viewNormal, gl_LightSource[0].position);
|
||||
float facingLight = step(0.0, diffuse) * 0.25 *
|
||||
(shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, -shadowScale, 0.0)).r +
|
||||
|
@ -51,7 +66,7 @@ void main(void) {
|
|||
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, -shadowScale, 0.0)).r +
|
||||
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, shadowScale, 0.0)).r);
|
||||
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + localLight);
|
||||
|
||||
// compute the specular component (sans exponent)
|
||||
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position -
|
||||
|
|
|
@ -11,6 +11,15 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// the maximum number of local lights to apply
|
||||
const int MAX_LOCAL_LIGHTS = 2;
|
||||
|
||||
// the color of each local light
|
||||
uniform vec4 localLightColors[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the direction of each local light
|
||||
uniform vec4 localLightDirections[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the diffuse texture
|
||||
uniform sampler2D diffuseMap;
|
||||
|
||||
|
@ -30,8 +39,14 @@ varying vec4 position;
|
|||
varying vec4 normal;
|
||||
|
||||
void main(void) {
|
||||
// compute the base color based on OpenGL lighting model
|
||||
// add up the local lights
|
||||
vec4 normalizedNormal = normalize(normal);
|
||||
vec4 localLight = vec4(0.0, 0.0, 0.0, 0.0);
|
||||
for (int i = 0; i < MAX_LOCAL_LIGHTS; i++) {
|
||||
localLight += localLightColors[i] * max(0.0, dot(normalizedNormal, localLightDirections[i]));
|
||||
}
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
float diffuse = dot(normalizedNormal, gl_LightSource[0].position);
|
||||
float facingLight = step(0.0, diffuse) * 0.25 *
|
||||
(shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, -shadowScale, 0.0)).r +
|
||||
|
@ -39,7 +54,7 @@ void main(void) {
|
|||
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, -shadowScale, 0.0)).r +
|
||||
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, shadowScale, 0.0)).r);
|
||||
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + localLight);
|
||||
|
||||
// compute the specular component (sans exponent)
|
||||
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position - normalize(vec4(position.xyz, 0.0))),
|
||||
|
|
|
@ -10,11 +10,15 @@
|
|||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// the maximum number of local lights to apply
|
||||
const int MAX_LOCAL_LIGHTS = 2;
|
||||
|
||||
uniform int numLocalLights;
|
||||
uniform vec3 localLightDirections[MAX_LOCAL_LIGHTS];
|
||||
uniform vec3 localLightColors[MAX_LOCAL_LIGHTS];
|
||||
// the color of each local light
|
||||
uniform vec4 localLightColors[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the direction of each local light
|
||||
uniform vec4 localLightDirections[MAX_LOCAL_LIGHTS];
|
||||
|
||||
// the diffuse texture
|
||||
uniform sampler2D diffuseMap;
|
||||
|
@ -29,23 +33,18 @@ varying vec4 position;
|
|||
varying vec4 normal;
|
||||
|
||||
void main(void) {
|
||||
// compute the base color based on OpenGL lighting model
|
||||
// add up the local lights
|
||||
vec4 normalizedNormal = normalize(normal);
|
||||
float diffuse = dot(normalizedNormal, gl_LightSource[0].position);
|
||||
float facingLight = step(0.0, diffuse);
|
||||
|
||||
// the local light that is always present
|
||||
vec4 totalLocalLight = vec4(0.0, 0.0, 0.0, 1.0);
|
||||
for (int i = 0; i < numLocalLights; i++) {
|
||||
float localDiffuse = dot(normalizedNormal, vec4(localLightDirections[i], 1.0));
|
||||
float localLight = step(0.0, localDiffuse);
|
||||
float localLightVal = localDiffuse * localLight;
|
||||
|
||||
totalLocalLight += (localLightVal * vec4( localLightColors[i], 0.0));
|
||||
vec4 localLight = vec4(0.0, 0.0, 0.0, 0.0);
|
||||
for (int i = 0; i < MAX_LOCAL_LIGHTS; i++) {
|
||||
localLight += localLightColors[i] * max(0.0, dot(normalizedNormal, localLightDirections[i]));
|
||||
}
|
||||
|
||||
// compute the base color based on OpenGL lighting model
|
||||
float diffuse = dot(normalizedNormal, gl_LightSource[0].position);
|
||||
float facingLight = step(0.0, diffuse);
|
||||
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + totalLocalLight);
|
||||
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + localLight);
|
||||
|
||||
// compute the specular component (sans exponent)
|
||||
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position - normalize(vec4(position.xyz, 0.0))),
|
||||
|
|
|
@ -89,6 +89,8 @@
|
|||
#include "ui/Stats.h"
|
||||
#include "ui/TextRenderer.h"
|
||||
|
||||
#include "devices/Leapmotion.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
// Starfield information
|
||||
|
@ -153,7 +155,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
|||
_mouseX(0),
|
||||
_mouseY(0),
|
||||
_lastMouseMove(usecTimestampNow()),
|
||||
_lastMouseMoveType(QEvent::MouseMove),
|
||||
_lastMouseMoveWasSimulated(false),
|
||||
_mouseHidden(false),
|
||||
_seenMouseMove(false),
|
||||
_touchAvgX(0.0f),
|
||||
|
@ -522,8 +524,8 @@ void Application::initializeGL() {
|
|||
// Before we render anything, let's set up our viewFrustumOffsetCamera with a sufficiently large
|
||||
// field of view and near and far clip to make it interesting.
|
||||
//viewFrustumOffsetCamera.setFieldOfView(90.0);
|
||||
_viewFrustumOffsetCamera.setNearClip(0.1f);
|
||||
_viewFrustumOffsetCamera.setFarClip(500.0f * TREE_SCALE);
|
||||
_viewFrustumOffsetCamera.setNearClip(DEFAULT_NEAR_CLIP);
|
||||
_viewFrustumOffsetCamera.setFarClip(DEFAULT_FAR_CLIP);
|
||||
|
||||
initDisplay();
|
||||
qDebug( "Initialized Display.");
|
||||
|
@ -590,7 +592,7 @@ void Application::paintGL() {
|
|||
|
||||
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON) {
|
||||
_myCamera.setTightness(0.0f); // In first person, camera follows (untweaked) head exactly without delay
|
||||
_myCamera.setTargetPosition(_myAvatar->getHead()->calculateAverageEyePosition());
|
||||
_myCamera.setTargetPosition(_myAvatar->getHead()->getFilteredEyePosition());
|
||||
_myCamera.setTargetRotation(_myAvatar->getHead()->getCameraOrientation());
|
||||
|
||||
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
|
||||
|
@ -609,10 +611,10 @@ void Application::paintGL() {
|
|||
if (OculusManager::isConnected()) {
|
||||
_myCamera.setDistance(MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
|
||||
_myCamera.setTargetRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
|
||||
_myCamera.setTargetPosition(_myAvatar->getHead()->calculateAverageEyePosition() + glm::vec3(0, _raiseMirror * _myAvatar->getScale(), 0));
|
||||
_myCamera.setTargetPosition(_myAvatar->getHead()->getEyePosition() + glm::vec3(0, _raiseMirror * _myAvatar->getScale(), 0));
|
||||
} else {
|
||||
_myCamera.setTightness(0.0f);
|
||||
glm::vec3 eyePosition = _myAvatar->getHead()->calculateAverageEyePosition();
|
||||
glm::vec3 eyePosition = _myAvatar->getHead()->getFilteredEyePosition();
|
||||
float headHeight = eyePosition.y - _myAvatar->getPosition().y;
|
||||
_myCamera.setDistance(MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
|
||||
_myCamera.setTargetPosition(_myAvatar->getPosition() + glm::vec3(0, headHeight + (_raiseMirror * _myAvatar->getScale()), 0));
|
||||
|
@ -1139,18 +1141,19 @@ void Application::focusOutEvent(QFocusEvent* event) {
|
|||
_keysPressed.clear();
|
||||
}
|
||||
|
||||
void Application::mouseMoveEvent(QMouseEvent* event) {
|
||||
void Application::mouseMoveEvent(QMouseEvent* event, unsigned int deviceID) {
|
||||
|
||||
bool showMouse = true;
|
||||
|
||||
// Used by application overlay to determine how to draw cursor(s)
|
||||
_lastMouseMoveWasSimulated = deviceID > 0;
|
||||
|
||||
// If this mouse move event is emitted by a controller, dont show the mouse cursor
|
||||
if (event->type() == CONTROLLER_MOVE_EVENT) {
|
||||
if (_lastMouseMoveWasSimulated) {
|
||||
showMouse = false;
|
||||
}
|
||||
|
||||
// Used by application overlay to determine how to draw cursor(s)
|
||||
_lastMouseMoveType = event->type();
|
||||
|
||||
_controllerScriptingInterface.emitMouseMoveEvent(event); // send events to any registered scripts
|
||||
_controllerScriptingInterface.emitMouseMoveEvent(event, deviceID); // send events to any registered scripts
|
||||
|
||||
// if one of our scripts have asked to capture this event, then stop processing it
|
||||
if (_controllerScriptingInterface.isMouseCaptured()) {
|
||||
|
@ -1169,7 +1172,7 @@ void Application::mouseMoveEvent(QMouseEvent* event) {
|
|||
_mouseY = event->y();
|
||||
}
|
||||
|
||||
void Application::mousePressEvent(QMouseEvent* event) {
|
||||
void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
|
||||
_controllerScriptingInterface.emitMousePressEvent(event); // send events to any registered scripts
|
||||
|
||||
// if one of our scripts have asked to capture this event, then stop processing it
|
||||
|
@ -1202,7 +1205,7 @@ void Application::mousePressEvent(QMouseEvent* event) {
|
|||
}
|
||||
}
|
||||
|
||||
void Application::mouseReleaseEvent(QMouseEvent* event) {
|
||||
void Application::mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID) {
|
||||
_controllerScriptingInterface.emitMouseReleaseEvent(event); // send events to any registered scripts
|
||||
|
||||
// if one of our scripts have asked to capture this event, then stop processing it
|
||||
|
@ -1720,6 +1723,8 @@ void Application::init() {
|
|||
_faceplus.init();
|
||||
_visage.init();
|
||||
|
||||
Leapmotion::init();
|
||||
|
||||
// fire off an immediate domain-server check in now that settings are loaded
|
||||
NodeList::getInstance()->sendDomainServerCheckIn();
|
||||
|
||||
|
@ -1906,17 +1911,9 @@ void Application::updateMyAvatarLookAtPosition() {
|
|||
}
|
||||
} else {
|
||||
// I am not looking at anyone else, so just look forward
|
||||
lookAtSpot = _myAvatar->getHead()->calculateAverageEyePosition() +
|
||||
lookAtSpot = _myAvatar->getHead()->getEyePosition() +
|
||||
(_myAvatar->getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.f, 0.f, -TREE_SCALE));
|
||||
}
|
||||
// TODO: Add saccade to mouse pointer when stable, IF not looking at someone (since we know we are looking at it)
|
||||
/*
|
||||
const float FIXED_MIN_EYE_DISTANCE = 0.3f;
|
||||
float minEyeDistance = FIXED_MIN_EYE_DISTANCE + (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON ? 0.0f :
|
||||
glm::distance(_mouseRayOrigin, _myAvatar->getHead()->calculateAverageEyePosition()));
|
||||
lookAtSpot = _mouseRayOrigin + _mouseRayDirection * qMax(minEyeDistance, distance);
|
||||
*/
|
||||
|
||||
}
|
||||
//
|
||||
// Deflect the eyes a bit to match the detected Gaze from 3D camera if active
|
||||
|
@ -1926,7 +1923,7 @@ void Application::updateMyAvatarLookAtPosition() {
|
|||
float eyeYaw = tracker->getEstimatedEyeYaw();
|
||||
const float GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT = 0.1f;
|
||||
// deflect using Faceshift gaze data
|
||||
glm::vec3 origin = _myAvatar->getHead()->calculateAverageEyePosition();
|
||||
glm::vec3 origin = _myAvatar->getHead()->getEyePosition();
|
||||
float pitchSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? -1.0f : 1.0f;
|
||||
float deflection = Menu::getInstance()->getFaceshiftEyeDeflection();
|
||||
if (isLookingAtSomeone) {
|
||||
|
@ -2053,11 +2050,13 @@ void Application::update(float deltaTime) {
|
|||
updateMouseRay(); // check what's under the mouse and update the mouse voxel
|
||||
{
|
||||
PerformanceTimer perfTimer("devices");
|
||||
DeviceTracker::updateAll();
|
||||
updateFaceshift();
|
||||
updateVisage();
|
||||
_sixenseManager.update(deltaTime);
|
||||
_joystickManager.update();
|
||||
_prioVR.update(deltaTime);
|
||||
|
||||
}
|
||||
{
|
||||
PerformanceTimer perfTimer("myAvatar");
|
||||
|
@ -2065,6 +2064,10 @@ void Application::update(float deltaTime) {
|
|||
updateMyAvatar(deltaTime); // Sample hardware, update view frustum if needed, and send avatar data to mixer/nodes
|
||||
}
|
||||
|
||||
|
||||
// Dispatch input events
|
||||
_controllerScriptingInterface.updateInputControllers();
|
||||
|
||||
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
|
||||
|
||||
_avatarManager.updateOtherAvatars(deltaTime); //loop through all the other avatars and simulate them...
|
||||
|
@ -2590,7 +2593,9 @@ void Application::updateShadowMap() {
|
|||
glViewport(0, 0, _glWidget->width(), _glWidget->height());
|
||||
}
|
||||
|
||||
const GLfloat WHITE_SPECULAR_COLOR[] = { 1.0f, 1.0f, 1.0f, 1.0f };
|
||||
const GLfloat WORLD_AMBIENT_COLOR[] = { 0.525f, 0.525f, 0.6f };
|
||||
const GLfloat WORLD_DIFFUSE_COLOR[] = { 0.6f, 0.525f, 0.525f };
|
||||
const GLfloat WORLD_SPECULAR_COLOR[] = { 0.94f, 0.94f, 0.737f, 1.0f };
|
||||
const GLfloat NO_SPECULAR_COLOR[] = { 0.0f, 0.0f, 0.0f, 1.0f };
|
||||
|
||||
void Application::setupWorldLight() {
|
||||
|
@ -2602,13 +2607,10 @@ void Application::setupWorldLight() {
|
|||
glm::vec3 sunDirection = getSunDirection();
|
||||
GLfloat light_position0[] = { sunDirection.x, sunDirection.y, sunDirection.z, 0.0 };
|
||||
glLightfv(GL_LIGHT0, GL_POSITION, light_position0);
|
||||
GLfloat ambient_color[] = { 0.7f, 0.7f, 0.8f };
|
||||
glLightfv(GL_LIGHT0, GL_AMBIENT, ambient_color);
|
||||
GLfloat diffuse_color[] = { 0.8f, 0.7f, 0.7f };
|
||||
glLightfv(GL_LIGHT0, GL_DIFFUSE, diffuse_color);
|
||||
|
||||
glLightfv(GL_LIGHT0, GL_SPECULAR, WHITE_SPECULAR_COLOR);
|
||||
glMaterialfv(GL_FRONT, GL_SPECULAR, WHITE_SPECULAR_COLOR);
|
||||
glLightfv(GL_LIGHT0, GL_AMBIENT, WORLD_AMBIENT_COLOR);
|
||||
glLightfv(GL_LIGHT0, GL_DIFFUSE, WORLD_DIFFUSE_COLOR);
|
||||
glLightfv(GL_LIGHT0, GL_SPECULAR, WORLD_SPECULAR_COLOR);
|
||||
glMaterialfv(GL_FRONT, GL_SPECULAR, WORLD_SPECULAR_COLOR);
|
||||
glMateriali(GL_FRONT, GL_SHININESS, 96);
|
||||
}
|
||||
|
||||
|
@ -2786,7 +2788,7 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
|
|||
}
|
||||
|
||||
// restore default, white specular
|
||||
glMaterialfv(GL_FRONT, GL_SPECULAR, WHITE_SPECULAR_COLOR);
|
||||
glMaterialfv(GL_FRONT, GL_SPECULAR, WORLD_SPECULAR_COLOR);
|
||||
|
||||
_nodeBoundsDisplay.draw();
|
||||
|
||||
|
@ -2795,8 +2797,13 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
|
|||
bool mirrorMode = (whichCamera.getInterpolatedMode() == CAMERA_MODE_MIRROR);
|
||||
{
|
||||
PerformanceTimer perfTimer("avatars");
|
||||
|
||||
|
||||
_avatarManager.renderAvatars(mirrorMode ? Avatar::MIRROR_RENDER_MODE : Avatar::NORMAL_RENDER_MODE, selfAvatarOnly);
|
||||
|
||||
//Render the sixense lasers
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseLasers)) {
|
||||
_myAvatar->renderLaserPointers();
|
||||
}
|
||||
}
|
||||
|
||||
if (!selfAvatarOnly) {
|
||||
|
@ -2920,7 +2927,7 @@ void Application::renderRearViewMirror(const QRect& region, bool billboard) {
|
|||
_mirrorCamera.setTargetPosition(glm::vec3());
|
||||
|
||||
} else {
|
||||
_mirrorCamera.setTargetPosition(_myAvatar->getHead()->calculateAverageEyePosition());
|
||||
_mirrorCamera.setTargetPosition(_myAvatar->getHead()->getEyePosition());
|
||||
}
|
||||
}
|
||||
_mirrorCamera.setAspectRatio((float)region.width() / region.height());
|
||||
|
@ -2949,7 +2956,7 @@ void Application::renderRearViewMirror(const QRect& region, bool billboard) {
|
|||
_myAvatar->getSkeletonModel().getNeckPosition(neckPosition);
|
||||
|
||||
// get the eye position relative to the body
|
||||
glm::vec3 eyePosition = _myAvatar->getHead()->calculateAverageEyePosition();
|
||||
glm::vec3 eyePosition = _myAvatar->getHead()->getEyePosition();
|
||||
float eyeHeight = eyePosition.y - _myAvatar->getPosition().y;
|
||||
|
||||
// set the translation of the face relative to the neck position
|
||||
|
@ -2967,6 +2974,13 @@ void Application::renderRearViewMirror(const QRect& region, bool billboard) {
|
|||
attachment->setTranslation(attachment->getTranslation() + delta);
|
||||
}
|
||||
|
||||
// and lo, even the shadow matrices
|
||||
glm::mat4 savedShadowMatrices[CASCADED_SHADOW_MATRIX_COUNT];
|
||||
for (int i = 0; i < CASCADED_SHADOW_MATRIX_COUNT; i++) {
|
||||
savedShadowMatrices[i] = _shadowMatrices[i];
|
||||
_shadowMatrices[i] = glm::transpose(glm::transpose(_shadowMatrices[i]) * glm::translate(-delta));
|
||||
}
|
||||
|
||||
displaySide(_mirrorCamera, true);
|
||||
|
||||
// restore absolute translations
|
||||
|
@ -2975,6 +2989,11 @@ void Application::renderRearViewMirror(const QRect& region, bool billboard) {
|
|||
for (int i = 0; i < absoluteAttachmentTranslations.size(); i++) {
|
||||
_myAvatar->getAttachmentModels().at(i)->setTranslation(absoluteAttachmentTranslations.at(i));
|
||||
}
|
||||
|
||||
// restore the shadow matrices
|
||||
for (int i = 0; i < CASCADED_SHADOW_MATRIX_COUNT; i++) {
|
||||
_shadowMatrices[i] = savedShadowMatrices[i];
|
||||
}
|
||||
} else {
|
||||
displaySide(_mirrorCamera, true);
|
||||
}
|
||||
|
@ -3177,6 +3196,7 @@ void Application::resetSensors() {
|
|||
OculusManager::reset();
|
||||
|
||||
_prioVR.reset();
|
||||
//_leapmotion.reset();
|
||||
|
||||
QCursor::setPos(_mouseX, _mouseY);
|
||||
_myAvatar->reset();
|
||||
|
@ -3327,7 +3347,7 @@ void Application::nodeKilled(SharedNodePointer node) {
|
|||
_entityEditSender.nodeKilled(node);
|
||||
|
||||
if (node->getType() == NodeType::AudioMixer) {
|
||||
QMetaObject::invokeMethod(&_audio, "resetIncomingMixedAudioSequenceNumberStats");
|
||||
QMetaObject::invokeMethod(&_audio, "audioMixerKilled");
|
||||
}
|
||||
|
||||
if (node->getType() == NodeType::VoxelServer) {
|
||||
|
@ -3612,6 +3632,9 @@ ScriptEngine* Application::loadScript(const QString& scriptName, bool loadScript
|
|||
scriptEngine->getEntityScriptingInterface()->setPacketSender(&_entityEditSender);
|
||||
scriptEngine->getEntityScriptingInterface()->setEntityTree(_entities.getTree());
|
||||
|
||||
// model has some custom types
|
||||
Model::registerMetaTypes(scriptEngine);
|
||||
|
||||
// hook our avatar object into this script engine
|
||||
scriptEngine->setAvatarData(_myAvatar, "MyAvatar"); // leave it as a MyAvatar class to expose thrust features
|
||||
|
||||
|
@ -3641,6 +3664,7 @@ ScriptEngine* Application::loadScript(const QString& scriptName, bool loadScript
|
|||
scriptEngine->registerGlobalObject("AnimationCache", &_animationCache);
|
||||
scriptEngine->registerGlobalObject("AudioReflector", &_audioReflector);
|
||||
scriptEngine->registerGlobalObject("Account", AccountScriptingInterface::getInstance());
|
||||
scriptEngine->registerGlobalObject("Metavoxels", &_metavoxels);
|
||||
|
||||
scriptEngine->registerGlobalObject("AvatarManager", &_avatarManager);
|
||||
|
||||
|
|
|
@ -159,9 +159,9 @@ public:
|
|||
void focusOutEvent(QFocusEvent* event);
|
||||
void focusInEvent(QFocusEvent* event);
|
||||
|
||||
void mouseMoveEvent(QMouseEvent* event);
|
||||
void mousePressEvent(QMouseEvent* event);
|
||||
void mouseReleaseEvent(QMouseEvent* event);
|
||||
void mouseMoveEvent(QMouseEvent* event, unsigned int deviceID = 0);
|
||||
void mousePressEvent(QMouseEvent* event, unsigned int deviceID = 0);
|
||||
void mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID = 0);
|
||||
|
||||
void touchBeginEvent(QTouchEvent* event);
|
||||
void touchEndEvent(QTouchEvent* event);
|
||||
|
@ -207,7 +207,7 @@ public:
|
|||
const glm::vec3& getMouseRayDirection() const { return _mouseRayDirection; }
|
||||
int getMouseX() const { return _mouseX; }
|
||||
int getMouseY() const { return _mouseY; }
|
||||
unsigned int getLastMouseMoveType() const { return _lastMouseMoveType; }
|
||||
bool getLastMouseMoveWasSimulated() const { return _lastMouseMoveWasSimulated;; }
|
||||
Faceplus* getFaceplus() { return &_faceplus; }
|
||||
Faceshift* getFaceshift() { return &_faceshift; }
|
||||
Visage* getVisage() { return &_visage; }
|
||||
|
@ -258,6 +258,8 @@ public:
|
|||
/// the view matrix translation.
|
||||
void updateUntranslatedViewMatrix(const glm::vec3& viewMatrixTranslation = glm::vec3());
|
||||
|
||||
const glm::mat4& getUntranslatedViewMatrix() const { return _untranslatedViewMatrix; }
|
||||
|
||||
/// Loads a view matrix that incorporates the specified model translation without the precision issues that can
|
||||
/// result from matrix multiplication at high translation magnitudes.
|
||||
void loadTranslatedViewMatrix(const glm::vec3& translation);
|
||||
|
@ -507,7 +509,7 @@ private:
|
|||
int _mouseDragStartedX;
|
||||
int _mouseDragStartedY;
|
||||
quint64 _lastMouseMove;
|
||||
unsigned int _lastMouseMoveType;
|
||||
bool _lastMouseMoveWasSimulated;
|
||||
bool _mouseHidden;
|
||||
bool _seenMouseMove;
|
||||
|
||||
|
|
|
@ -43,6 +43,7 @@
|
|||
#include "Audio.h"
|
||||
#include "Menu.h"
|
||||
#include "Util.h"
|
||||
#include "AudioRingBuffer.h"
|
||||
|
||||
static const float AUDIO_CALLBACK_MSECS = (float) NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL / (float)SAMPLE_RATE * 1000.0;
|
||||
|
||||
|
@ -56,6 +57,10 @@ static const int TIME_GAP_STATS_WINDOW_INTERVALS = 30;
|
|||
static const int INCOMING_SEQ_STATS_HISTORY_LENGTH = INCOMING_SEQ_STATS_HISTORY_LENGTH_SECONDS /
|
||||
(TOO_LONG_SINCE_LAST_SEND_DOWNSTREAM_AUDIO_STATS / USECS_PER_SECOND);
|
||||
|
||||
// the stats for the total frames available in the ring buffer and the audio output buffer
|
||||
// will sample every second, update every second, and have a moving window covering 10 seconds
|
||||
static const int FRAMES_AVAILABLE_STATS_WINDOW_SECONDS = 10;
|
||||
|
||||
// Mute icon configration
|
||||
static const int MUTE_ICON_SIZE = 24;
|
||||
|
||||
|
@ -75,8 +80,17 @@ Audio::Audio(int16_t initialJitterBufferSamples, QObject* parent) :
|
|||
_loopbackOutputDevice(NULL),
|
||||
_proceduralAudioOutput(NULL),
|
||||
_proceduralOutputDevice(NULL),
|
||||
|
||||
// NOTE: Be very careful making changes to the initializers of these ring buffers. There is a known problem with some
|
||||
// Mac audio devices that slowly introduce additional delay in the audio device because they play out audio slightly
|
||||
// slower than real time (or at least the desired sample rate). If you increase the size of the ring buffer, then it
|
||||
// this delay will slowly add up and the longer someone runs, they more delayed their audio will be.
|
||||
_inputRingBuffer(0),
|
||||
#ifdef _WIN32
|
||||
_ringBuffer(NETWORK_BUFFER_LENGTH_SAMPLES_STEREO, false, 100),
|
||||
#else
|
||||
_ringBuffer(NETWORK_BUFFER_LENGTH_SAMPLES_STEREO), // DO NOT CHANGE THIS UNLESS YOU SOLVE THE AUDIO DEVICE DRIFT PROBLEM!!!
|
||||
#endif
|
||||
_isStereoInput(false),
|
||||
_averagedLatency(0.0),
|
||||
_measuredJitter(0),
|
||||
|
@ -112,11 +126,16 @@ Audio::Audio(int16_t initialJitterBufferSamples, QObject* parent) :
|
|||
_scopeInput(0),
|
||||
_scopeOutputLeft(0),
|
||||
_scopeOutputRight(0),
|
||||
_statsEnabled(false),
|
||||
_starveCount(0),
|
||||
_consecutiveNotMixedCount(0),
|
||||
_outgoingAvatarAudioSequenceNumber(0),
|
||||
_incomingMixedAudioSequenceNumberStats(INCOMING_SEQ_STATS_HISTORY_LENGTH),
|
||||
_interframeTimeGapStats(TIME_GAPS_STATS_INTERVAL_SAMPLES, TIME_GAP_STATS_WINDOW_INTERVALS)
|
||||
_interframeTimeGapStats(TIME_GAPS_STATS_INTERVAL_SAMPLES, TIME_GAP_STATS_WINDOW_INTERVALS),
|
||||
_audioInputMsecsReadStats(MSECS_PER_SECOND / (float)AUDIO_CALLBACK_MSECS * CALLBACK_ACCELERATOR_RATIO, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
|
||||
_inputRingBufferMsecsAvailableStats(1, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
|
||||
_outputRingBufferFramesAvailableStats(1, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
|
||||
_audioOutputMsecsUnplayedStats(1, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS)
|
||||
{
|
||||
// clear the array of locally injected samples
|
||||
memset(_localProceduralSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL);
|
||||
|
@ -132,15 +151,34 @@ void Audio::init(QGLWidget *parent) {
|
|||
|
||||
void Audio::reset() {
|
||||
_ringBuffer.reset();
|
||||
|
||||
|
||||
// we don't want to reset seq numbers when space-bar reset occurs.
|
||||
//_outgoingAvatarAudioSequenceNumber = 0;
|
||||
|
||||
resetStats();
|
||||
}
|
||||
|
||||
void Audio::resetStats() {
|
||||
_starveCount = 0;
|
||||
_consecutiveNotMixedCount = 0;
|
||||
|
||||
_audioMixerAvatarStreamAudioStats = AudioStreamStats();
|
||||
_audioMixerInjectedStreamAudioStatsMap.clear();
|
||||
|
||||
_outgoingAvatarAudioSequenceNumber = 0;
|
||||
_incomingMixedAudioSequenceNumberStats.reset();
|
||||
|
||||
_interframeTimeGapStats.reset();
|
||||
|
||||
_audioInputMsecsReadStats.reset();
|
||||
_inputRingBufferMsecsAvailableStats.reset();
|
||||
|
||||
_outputRingBufferFramesAvailableStats.reset();
|
||||
_audioOutputMsecsUnplayedStats.reset();
|
||||
}
|
||||
|
||||
void Audio::audioMixerKilled() {
|
||||
_outgoingAvatarAudioSequenceNumber = 0;
|
||||
resetStats();
|
||||
}
|
||||
|
||||
QAudioDeviceInfo getNamedAudioDeviceForMode(QAudio::Mode mode, const QString& deviceName) {
|
||||
|
@ -483,8 +521,11 @@ void Audio::handleAudioInput() {
|
|||
}
|
||||
|
||||
_inputRingBuffer.writeData(inputByteArray.data(), inputByteArray.size());
|
||||
|
||||
float audioInputMsecsRead = inputByteArray.size() / (float)(_inputFormat.bytesForDuration(USECS_PER_MSEC));
|
||||
_audioInputMsecsReadStats.update(audioInputMsecsRead);
|
||||
|
||||
while (_inputRingBuffer.samplesAvailable() > inputSamplesRequired) {
|
||||
while (_inputRingBuffer.samplesAvailable() >= inputSamplesRequired) {
|
||||
|
||||
int16_t* inputAudioSamples = new int16_t[inputSamplesRequired];
|
||||
_inputRingBuffer.readSamples(inputAudioSamples, inputSamplesRequired);
|
||||
|
@ -780,7 +821,7 @@ AudioStreamStats Audio::getDownstreamAudioStreamStats() const {
|
|||
stats._timeGapWindowAverage = _interframeTimeGapStats.getWindowAverage();
|
||||
|
||||
stats._ringBufferFramesAvailable = _ringBuffer.framesAvailable();
|
||||
stats._ringBufferCurrentJitterBufferFrames = 0;
|
||||
stats._ringBufferFramesAvailableAverage = _outputRingBufferFramesAvailableStats.getWindowAverage();
|
||||
stats._ringBufferDesiredJitterBufferFrames = getDesiredJitterBufferFrames();
|
||||
stats._ringBufferStarveCount = _starveCount;
|
||||
stats._ringBufferConsecutiveNotMixedCount = _consecutiveNotMixedCount;
|
||||
|
@ -795,6 +836,13 @@ AudioStreamStats Audio::getDownstreamAudioStreamStats() const {
|
|||
|
||||
void Audio::sendDownstreamAudioStatsPacket() {
|
||||
|
||||
// since this function is called every second, we'll sample some of our stats here
|
||||
|
||||
_inputRingBufferMsecsAvailableStats.update(getInputRingBufferMsecsAvailable());
|
||||
|
||||
_outputRingBufferFramesAvailableStats.update(_ringBuffer.framesAvailable());
|
||||
_audioOutputMsecsUnplayedStats.update(getAudioOutputMsecsUnplayed());
|
||||
|
||||
// push the current seq number stats into history, which moves the history window forward 1s
|
||||
// (since that's how often pushStatsToHistory() is called)
|
||||
_incomingMixedAudioSequenceNumberStats.pushStatsToHistory();
|
||||
|
@ -955,8 +1003,9 @@ void Audio::processReceivedAudio(const QByteArray& audioByteArray) {
|
|||
if (Menu::getInstance()->isOptionChecked(MenuOption::DisableQAudioOutputOverflowCheck)) {
|
||||
numNetworkOutputSamples = _ringBuffer.samplesAvailable();
|
||||
} else {
|
||||
int numSamplesAudioOutputRoomFor = _audioOutput->bytesFree() / sizeof(int16_t);
|
||||
numNetworkOutputSamples = std::min(_ringBuffer.samplesAvailable(), (int)(numSamplesAudioOutputRoomFor * networkOutputToOutputRatio));
|
||||
// make sure to push a whole number of frames to the audio output
|
||||
int numFramesAudioOutputRoomFor = _audioOutput->bytesFree() / sizeof(int16_t) * networkOutputToOutputRatio / _ringBuffer.getNumFrameSamples();
|
||||
numNetworkOutputSamples = std::min(_ringBuffer.samplesAvailable(), numFramesAudioOutputRoomFor * _ringBuffer.getNumFrameSamples());
|
||||
}
|
||||
|
||||
// if there is data in the ring buffer and room in the audio output, decide what to do
|
||||
|
@ -1263,6 +1312,10 @@ void Audio::toggleScopePause() {
|
|||
_scopeEnabledPause = !_scopeEnabledPause;
|
||||
}
|
||||
|
||||
void Audio::toggleStats() {
|
||||
_statsEnabled = !_statsEnabled;
|
||||
}
|
||||
|
||||
void Audio::selectAudioScopeFiveFrames() {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::AudioScopeFiveFrames)) {
|
||||
reallocateScope(5);
|
||||
|
@ -1342,6 +1395,174 @@ void Audio::addBufferToScope(
|
|||
}
|
||||
}
|
||||
|
||||
void Audio::renderStats(const float* color, int width, int height) {
|
||||
if (!_statsEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
const int LINES_WHEN_CENTERED = 30;
|
||||
const int CENTERED_BACKGROUND_HEIGHT = STATS_HEIGHT_PER_LINE * LINES_WHEN_CENTERED;
|
||||
|
||||
int lines = _audioMixerInjectedStreamAudioStatsMap.size() * 7 + 23;
|
||||
int statsHeight = STATS_HEIGHT_PER_LINE * lines;
|
||||
|
||||
|
||||
static const float backgroundColor[4] = { 0.2f, 0.2f, 0.2f, 0.6f };
|
||||
|
||||
int x = std::max((width - (int)STATS_WIDTH) / 2, 0);
|
||||
int y = std::max((height - CENTERED_BACKGROUND_HEIGHT) / 2, 0);
|
||||
int w = STATS_WIDTH;
|
||||
int h = statsHeight;
|
||||
renderBackground(backgroundColor, x, y, w, h);
|
||||
|
||||
|
||||
int horizontalOffset = x + 5;
|
||||
int verticalOffset = y;
|
||||
|
||||
float scale = 0.10f;
|
||||
float rotation = 0.0f;
|
||||
int font = 2;
|
||||
|
||||
|
||||
char latencyStatString[512];
|
||||
|
||||
const float BUFFER_SEND_INTERVAL_MSECS = BUFFER_SEND_INTERVAL_USECS / (float)USECS_PER_MSEC;
|
||||
|
||||
float audioInputBufferLatency = 0.0f, inputRingBufferLatency = 0.0f, networkRoundtripLatency = 0.0f, mixerRingBufferLatency = 0.0f, outputRingBufferLatency = 0.0f, audioOutputBufferLatency = 0.0f;
|
||||
|
||||
SharedNodePointer audioMixerNodePointer = NodeList::getInstance()->soloNodeOfType(NodeType::AudioMixer);
|
||||
if (!audioMixerNodePointer.isNull()) {
|
||||
audioInputBufferLatency = _audioInputMsecsReadStats.getWindowAverage();
|
||||
inputRingBufferLatency = getInputRingBufferAverageMsecsAvailable();
|
||||
networkRoundtripLatency = audioMixerNodePointer->getPingMs();
|
||||
mixerRingBufferLatency = _audioMixerAvatarStreamAudioStats._ringBufferFramesAvailableAverage * BUFFER_SEND_INTERVAL_MSECS;
|
||||
outputRingBufferLatency = _outputRingBufferFramesAvailableStats.getWindowAverage() * BUFFER_SEND_INTERVAL_MSECS;
|
||||
audioOutputBufferLatency = _audioOutputMsecsUnplayedStats.getWindowAverage();
|
||||
}
|
||||
float totalLatency = audioInputBufferLatency + inputRingBufferLatency + networkRoundtripLatency + mixerRingBufferLatency + outputRingBufferLatency + audioOutputBufferLatency;
|
||||
|
||||
sprintf(latencyStatString, " Audio input buffer: %7.2fms - avg msecs of samples read to the input ring buffer in last 10s", audioInputBufferLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString, " Input ring buffer: %7.2fms - avg msecs of samples in input ring buffer in last 10s", inputRingBufferLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString, " Network to mixer: %7.2fms - half of last ping value calculated by the node list", networkRoundtripLatency / 2.0f);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString, " AudioMixer ring buffer: %7.2fms - avg msecs of samples in audio mixer's ring buffer in last 10s", mixerRingBufferLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString, " Network to client: %7.2fms - half of last ping value calculated by the node list", networkRoundtripLatency / 2.0f);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString, " Output ring buffer: %7.2fms - avg msecs of samples in output ring buffer in last 10s", outputRingBufferLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString, " Audio output buffer: %7.2fms - avg msecs of samples in audio output buffer in last 10s", audioOutputBufferLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString, " TOTAL: %7.2fms\n", totalLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
|
||||
|
||||
|
||||
char downstreamLabelString[] = "Downstream mixed audio stats:";
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, downstreamLabelString, color);
|
||||
|
||||
renderAudioStreamStats(getDownstreamAudioStreamStats(), horizontalOffset, verticalOffset, scale, rotation, font, color, true);
|
||||
|
||||
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
|
||||
|
||||
char upstreamMicLabelString[] = "Upstream mic audio stats:";
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, upstreamMicLabelString, color);
|
||||
|
||||
renderAudioStreamStats(_audioMixerAvatarStreamAudioStats, horizontalOffset, verticalOffset, scale, rotation, font, color);
|
||||
|
||||
|
||||
foreach(const AudioStreamStats& injectedStreamAudioStats, _audioMixerInjectedStreamAudioStatsMap) {
|
||||
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
|
||||
|
||||
char upstreamInjectedLabelString[512];
|
||||
sprintf(upstreamInjectedLabelString, "Upstream injected audio stats: stream ID: %s",
|
||||
injectedStreamAudioStats._streamIdentifier.toString().toLatin1().data());
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, upstreamInjectedLabelString, color);
|
||||
|
||||
renderAudioStreamStats(injectedStreamAudioStats, horizontalOffset, verticalOffset, scale, rotation, font, color);
|
||||
}
|
||||
}
|
||||
|
||||
void Audio::renderAudioStreamStats(const AudioStreamStats& streamStats, int horizontalOffset, int& verticalOffset,
|
||||
float scale, float rotation, int font, const float* color, bool isDownstreamStats) {
|
||||
|
||||
char stringBuffer[512];
|
||||
|
||||
sprintf(stringBuffer, " Packet loss | overall: %5.2f%% (%d lost), last_30s: %5.2f%% (%d lost)",
|
||||
streamStats._packetStreamStats.getLostRate() * 100.0f,
|
||||
streamStats._packetStreamStats._numLost,
|
||||
streamStats._packetStreamWindowStats.getLostRate() * 100.0f,
|
||||
streamStats._packetStreamWindowStats._numLost);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
|
||||
if (isDownstreamStats) {
|
||||
|
||||
const float BUFFER_SEND_INTERVAL_MSECS = BUFFER_SEND_INTERVAL_USECS / (float)USECS_PER_MSEC;
|
||||
sprintf(stringBuffer, " Ringbuffer frames | desired: %u, avg_available(10s): %u+%d, available: %u+%d",
|
||||
streamStats._ringBufferDesiredJitterBufferFrames,
|
||||
streamStats._ringBufferFramesAvailableAverage,
|
||||
(int)(getAudioOutputAverageMsecsUnplayed() / BUFFER_SEND_INTERVAL_MSECS),
|
||||
streamStats._ringBufferFramesAvailable,
|
||||
(int)(getAudioOutputMsecsUnplayed() / BUFFER_SEND_INTERVAL_MSECS));
|
||||
} else {
|
||||
sprintf(stringBuffer, " Ringbuffer frames | desired: %u, avg_available(10s): %u, available: %u",
|
||||
streamStats._ringBufferDesiredJitterBufferFrames,
|
||||
streamStats._ringBufferFramesAvailableAverage,
|
||||
streamStats._ringBufferFramesAvailable);
|
||||
}
|
||||
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
|
||||
sprintf(stringBuffer, " Ringbuffer stats | starves: %u, prev_starve_lasted: %u, frames_dropped: %u, overflows: %u",
|
||||
streamStats._ringBufferStarveCount,
|
||||
streamStats._ringBufferConsecutiveNotMixedCount,
|
||||
streamStats._ringBufferSilentFramesDropped,
|
||||
streamStats._ringBufferOverflowCount);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
|
||||
sprintf(stringBuffer, " Inter-packet timegaps (overall) | min: %9s, max: %9s, avg: %9s",
|
||||
formatUsecTime(streamStats._timeGapMin).toLatin1().data(),
|
||||
formatUsecTime(streamStats._timeGapMax).toLatin1().data(),
|
||||
formatUsecTime(streamStats._timeGapAverage).toLatin1().data());
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
|
||||
sprintf(stringBuffer, " Inter-packet timegaps (last 30s) | min: %9s, max: %9s, avg: %9s",
|
||||
formatUsecTime(streamStats._timeGapWindowMin).toLatin1().data(),
|
||||
formatUsecTime(streamStats._timeGapWindowMax).toLatin1().data(),
|
||||
formatUsecTime(streamStats._timeGapWindowAverage).toLatin1().data());
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
}
|
||||
|
||||
|
||||
void Audio::renderScope(int width, int height) {
|
||||
|
||||
if (!_scopeEnabled)
|
||||
|
@ -1577,7 +1798,7 @@ const float Audio::CALLBACK_ACCELERATOR_RATIO = 2.0f;
|
|||
const float Audio::CALLBACK_ACCELERATOR_RATIO = 2.0f;
|
||||
#endif
|
||||
|
||||
int Audio::calculateNumberOfInputCallbackBytes(const QAudioFormat& format) {
|
||||
int Audio::calculateNumberOfInputCallbackBytes(const QAudioFormat& format) const {
|
||||
int numInputCallbackBytes = (int)(((NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL
|
||||
* format.channelCount()
|
||||
* (format.sampleRate() / SAMPLE_RATE))
|
||||
|
@ -1586,7 +1807,7 @@ int Audio::calculateNumberOfInputCallbackBytes(const QAudioFormat& format) {
|
|||
return numInputCallbackBytes;
|
||||
}
|
||||
|
||||
float Audio::calculateDeviceToNetworkInputRatio(int numBytes) {
|
||||
float Audio::calculateDeviceToNetworkInputRatio(int numBytes) const {
|
||||
float inputToNetworkInputRatio = (int)((_numInputCallbackBytes
|
||||
* CALLBACK_ACCELERATOR_RATIO
|
||||
/ NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL) + 0.5f);
|
||||
|
@ -1594,7 +1815,19 @@ float Audio::calculateDeviceToNetworkInputRatio(int numBytes) {
|
|||
return inputToNetworkInputRatio;
|
||||
}
|
||||
|
||||
int Audio::calculateNumberOfFrameSamples(int numBytes) {
|
||||
int Audio::calculateNumberOfFrameSamples(int numBytes) const {
|
||||
int frameSamples = (int)(numBytes * CALLBACK_ACCELERATOR_RATIO + 0.5f) / sizeof(int16_t);
|
||||
return frameSamples;
|
||||
}
|
||||
|
||||
float Audio::getAudioOutputMsecsUnplayed() const {
|
||||
int bytesAudioOutputUnplayed = _audioOutput->bufferSize() - _audioOutput->bytesFree();
|
||||
float msecsAudioOutputUnplayed = bytesAudioOutputUnplayed / (float)_outputFormat.bytesForDuration(USECS_PER_MSEC);
|
||||
return msecsAudioOutputUnplayed;
|
||||
}
|
||||
|
||||
float Audio::getInputRingBufferMsecsAvailable() const {
|
||||
int bytesInInputRingBuffer = _inputRingBuffer.samplesAvailable() * sizeof(int16_t);
|
||||
float msecsInInputRingBuffer = bytesInInputRingBuffer / (float)(_inputFormat.bytesForDuration(USECS_PER_MSEC));
|
||||
return msecsInInputRingBuffer;
|
||||
}
|
||||
|
|
|
@ -71,6 +71,7 @@ public:
|
|||
|
||||
void renderToolBox(int x, int y, bool boxed);
|
||||
void renderScope(int width, int height);
|
||||
void renderStats(const float* color, int width, int height);
|
||||
|
||||
int getNetworkSampleRate() { return SAMPLE_RATE; }
|
||||
int getNetworkBufferLengthSamplesPerChannel() { return NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL; }
|
||||
|
@ -78,6 +79,12 @@ public:
|
|||
bool getProcessSpatialAudio() const { return _processSpatialAudio; }
|
||||
|
||||
const SequenceNumberStats& getIncomingMixedAudioSequenceNumberStats() const { return _incomingMixedAudioSequenceNumberStats; }
|
||||
|
||||
float getInputRingBufferMsecsAvailable() const;
|
||||
float getInputRingBufferAverageMsecsAvailable() const { return (float)_inputRingBufferMsecsAvailableStats.getWindowAverage(); }
|
||||
|
||||
float getAudioOutputMsecsUnplayed() const;
|
||||
float getAudioOutputAverageMsecsUnplayed() const { return (float)_audioOutputMsecsUnplayedStats.getWindowAverage(); }
|
||||
|
||||
public slots:
|
||||
void start();
|
||||
|
@ -87,12 +94,14 @@ public slots:
|
|||
void addSpatialAudioToBuffer(unsigned int sampleTime, const QByteArray& spatialAudio, unsigned int numSamples);
|
||||
void handleAudioInput();
|
||||
void reset();
|
||||
void resetIncomingMixedAudioSequenceNumberStats() { _incomingMixedAudioSequenceNumberStats.reset(); }
|
||||
void resetStats();
|
||||
void audioMixerKilled();
|
||||
void toggleMute();
|
||||
void toggleAudioNoiseReduction();
|
||||
void toggleToneInjection();
|
||||
void toggleScope();
|
||||
void toggleScopePause();
|
||||
void toggleStats();
|
||||
void toggleAudioSpatialProcessing();
|
||||
void toggleStereoInput();
|
||||
void selectAudioScopeFiveFrames();
|
||||
|
@ -221,9 +230,9 @@ private:
|
|||
|
||||
// Callback acceleration dependent calculations
|
||||
static const float CALLBACK_ACCELERATOR_RATIO;
|
||||
int calculateNumberOfInputCallbackBytes(const QAudioFormat& format);
|
||||
int calculateNumberOfFrameSamples(int numBytes);
|
||||
float calculateDeviceToNetworkInputRatio(int numBytes);
|
||||
int calculateNumberOfInputCallbackBytes(const QAudioFormat& format) const;
|
||||
int calculateNumberOfFrameSamples(int numBytes) const;
|
||||
float calculateDeviceToNetworkInputRatio(int numBytes) const;
|
||||
|
||||
// Audio scope methods for allocation/deallocation
|
||||
void allocateScope();
|
||||
|
@ -239,6 +248,10 @@ private:
|
|||
void renderGrid(const float* color, int x, int y, int width, int height, int rows, int cols);
|
||||
void renderLineStrip(const float* color, int x, int y, int n, int offset, const QByteArray* byteArray);
|
||||
|
||||
// audio stats methods for rendering
|
||||
void renderAudioStreamStats(const AudioStreamStats& streamStats, int horizontalOffset, int& verticalOffset,
|
||||
float scale, float rotation, int font, const float* color, bool isDownstreamStats = false);
|
||||
|
||||
// Audio scope data
|
||||
static const unsigned int NETWORK_SAMPLES_PER_FRAME = NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL;
|
||||
static const unsigned int DEFAULT_FRAMES_PER_SCOPE = 5;
|
||||
|
@ -255,6 +268,13 @@ private:
|
|||
QByteArray* _scopeInput;
|
||||
QByteArray* _scopeOutputLeft;
|
||||
QByteArray* _scopeOutputRight;
|
||||
#ifdef _WIN32
|
||||
static const unsigned int STATS_WIDTH = 1500;
|
||||
#else
|
||||
static const unsigned int STATS_WIDTH = 650;
|
||||
#endif
|
||||
static const unsigned int STATS_HEIGHT_PER_LINE = 20;
|
||||
bool _statsEnabled;
|
||||
|
||||
int _starveCount;
|
||||
int _consecutiveNotMixedCount;
|
||||
|
@ -266,6 +286,12 @@ private:
|
|||
SequenceNumberStats _incomingMixedAudioSequenceNumberStats;
|
||||
|
||||
MovingMinMaxAvg<quint64> _interframeTimeGapStats;
|
||||
|
||||
MovingMinMaxAvg<float> _audioInputMsecsReadStats;
|
||||
MovingMinMaxAvg<float> _inputRingBufferMsecsAvailableStats;
|
||||
|
||||
MovingMinMaxAvg<int> _outputRingBufferFramesAvailableStats;
|
||||
MovingMinMaxAvg<float> _audioOutputMsecsUnplayedStats;
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -47,8 +47,8 @@ Camera::Camera() :
|
|||
_targetPosition(0.0f, 0.0f, 0.0f),
|
||||
_fieldOfView(DEFAULT_FIELD_OF_VIEW_DEGREES),
|
||||
_aspectRatio(16.0f/9.0f),
|
||||
_nearClip(0.08f), // default
|
||||
_farClip(50.0f * TREE_SCALE), // default
|
||||
_nearClip(DEFAULT_NEAR_CLIP), // default
|
||||
_farClip(DEFAULT_FAR_CLIP), // default
|
||||
_upShift(0.0f),
|
||||
_distance(0.0f),
|
||||
_tightness(10.0f), // default
|
||||
|
|
|
@ -18,7 +18,8 @@
|
|||
const float HAIR_DAMPING = 0.99f;
|
||||
const float CONSTRAINT_RELAXATION = 10.0f;
|
||||
const float HAIR_ACCELERATION_COUPLING = 0.025f;
|
||||
const float HAIR_ANGULAR_VELOCITY_COUPLING = 0.10f;
|
||||
const float HAIR_ANGULAR_VELOCITY_COUPLING = 0.01f;
|
||||
const float HAIR_ANGULAR_ACCELERATION_COUPLING = 0.001f;
|
||||
const float HAIR_MAX_LINEAR_ACCELERATION = 4.0f;
|
||||
const float HAIR_STIFFNESS = 0.005f;
|
||||
const glm::vec3 HAIR_COLOR1(0.98f, 0.92f, 0.843f);
|
||||
|
@ -36,6 +37,7 @@ Hair::Hair(int strands,
|
|||
_radius(radius),
|
||||
_acceleration(0.0f),
|
||||
_angularVelocity(0.0f),
|
||||
_angularAcceleration(0.0f),
|
||||
_gravity(0.0f)
|
||||
{
|
||||
_hairPosition = new glm::vec3[_strands * _links];
|
||||
|
@ -127,13 +129,15 @@ void Hair::simulate(float deltaTime) {
|
|||
const float ANGULAR_VELOCITY_MIN = 0.001f;
|
||||
if (glm::length(_angularVelocity) > ANGULAR_VELOCITY_MIN) {
|
||||
glm::vec3 yawVector = _hairPosition[vertexIndex];
|
||||
glm::vec3 angularVelocity = _angularVelocity * HAIR_ANGULAR_VELOCITY_COUPLING;
|
||||
glm::vec3 angularAcceleration = _angularAcceleration * HAIR_ANGULAR_ACCELERATION_COUPLING;
|
||||
yawVector.y = 0.f;
|
||||
if (glm::length(yawVector) > EPSILON) {
|
||||
float radius = glm::length(yawVector);
|
||||
yawVector = glm::normalize(yawVector);
|
||||
float angle = atan2f(yawVector.x, -yawVector.z) + PI;
|
||||
glm::vec3 delta = glm::vec3(-1.f, 0.f, 0.f) * glm::angleAxis(angle, glm::vec3(0, 1, 0));
|
||||
_hairPosition[vertexIndex] -= delta * radius * _angularVelocity.y * HAIR_ANGULAR_VELOCITY_COUPLING * deltaTime;
|
||||
_hairPosition[vertexIndex] -= delta * radius * (angularVelocity.y - angularAcceleration.y) * deltaTime;
|
||||
}
|
||||
glm::vec3 pitchVector = _hairPosition[vertexIndex];
|
||||
pitchVector.x = 0.f;
|
||||
|
@ -142,7 +146,7 @@ void Hair::simulate(float deltaTime) {
|
|||
pitchVector = glm::normalize(pitchVector);
|
||||
float angle = atan2f(pitchVector.y, -pitchVector.z) + PI;
|
||||
glm::vec3 delta = glm::vec3(0.0f, 1.0f, 0.f) * glm::angleAxis(angle, glm::vec3(1, 0, 0));
|
||||
_hairPosition[vertexIndex] -= delta * radius * _angularVelocity.x * HAIR_ANGULAR_VELOCITY_COUPLING * deltaTime;
|
||||
_hairPosition[vertexIndex] -= delta * radius * (angularVelocity.x - angularAcceleration.x) * deltaTime;
|
||||
}
|
||||
glm::vec3 rollVector = _hairPosition[vertexIndex];
|
||||
rollVector.z = 0.f;
|
||||
|
@ -151,7 +155,7 @@ void Hair::simulate(float deltaTime) {
|
|||
pitchVector = glm::normalize(rollVector);
|
||||
float angle = atan2f(rollVector.x, rollVector.y) + PI;
|
||||
glm::vec3 delta = glm::vec3(-1.0f, 0.0f, 0.f) * glm::angleAxis(angle, glm::vec3(0, 0, 1));
|
||||
_hairPosition[vertexIndex] -= delta * radius * _angularVelocity.z * HAIR_ANGULAR_VELOCITY_COUPLING * deltaTime;
|
||||
_hairPosition[vertexIndex] -= delta * radius * (angularVelocity.z - angularAcceleration.z) * deltaTime;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -25,9 +25,9 @@ const int HAIR_CONSTRAINTS = 2;
|
|||
|
||||
const int DEFAULT_HAIR_STRANDS = 50;
|
||||
const int DEFAULT_HAIR_LINKS = 10;
|
||||
const float DEFAULT_HAIR_RADIUS = 0.15;
|
||||
const float DEFAULT_HAIR_LINK_LENGTH = 0.03;
|
||||
const float DEFAULT_HAIR_THICKNESS = 0.015;
|
||||
const float DEFAULT_HAIR_RADIUS = 0.15f;
|
||||
const float DEFAULT_HAIR_LINK_LENGTH = 0.03f;
|
||||
const float DEFAULT_HAIR_THICKNESS = 0.015f;
|
||||
|
||||
class Hair {
|
||||
public:
|
||||
|
@ -40,6 +40,7 @@ public:
|
|||
void render();
|
||||
void setAcceleration(const glm::vec3& acceleration) { _acceleration = acceleration; }
|
||||
void setAngularVelocity(const glm::vec3& angularVelocity) { _angularVelocity = angularVelocity; }
|
||||
void setAngularAcceleration(const glm::vec3& angularAcceleration) { _angularAcceleration = angularAcceleration; }
|
||||
void setGravity(const glm::vec3& gravity) { _gravity = gravity; }
|
||||
|
||||
private:
|
||||
|
@ -58,6 +59,7 @@ private:
|
|||
int* _hairConstraints;
|
||||
glm::vec3 _acceleration;
|
||||
glm::vec3 _angularVelocity;
|
||||
glm::vec3 _angularAcceleration;
|
||||
glm::vec3 _gravity;
|
||||
|
||||
|
||||
|
|
|
@ -32,6 +32,7 @@
|
|||
#include <AccountManager.h>
|
||||
#include <XmppClient.h>
|
||||
#include <UUID.h>
|
||||
#include <UserActivityLogger.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "AccountManager.h"
|
||||
|
@ -111,7 +112,8 @@ Menu::Menu() :
|
|||
_preferencesDialog(NULL),
|
||||
_loginDialog(NULL),
|
||||
_snapshotsLocation(),
|
||||
_scriptsLocation()
|
||||
_scriptsLocation(),
|
||||
_walletPrivateKey()
|
||||
{
|
||||
Application *appInstance = Application::getInstance();
|
||||
|
||||
|
@ -240,8 +242,8 @@ Menu::Menu() :
|
|||
|
||||
const QXmppClient& xmppClient = XmppClient::getInstance().getXMPPClient();
|
||||
toggleChat();
|
||||
connect(&xmppClient, SIGNAL(connected()), this, SLOT(toggleChat()));
|
||||
connect(&xmppClient, SIGNAL(disconnected()), this, SLOT(toggleChat()));
|
||||
connect(&xmppClient, &QXmppClient::connected, this, &Menu::toggleChat);
|
||||
connect(&xmppClient, &QXmppClient::disconnected, this, &Menu::toggleChat);
|
||||
|
||||
QDir::setCurrent(Application::resourcesPath());
|
||||
// init chat window to listen chat
|
||||
|
@ -389,12 +391,15 @@ Menu::Menu() :
|
|||
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::CollideAsRagdoll);
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::LookAtVectors, 0, false);
|
||||
#ifdef HAVE_FACESHIFT
|
||||
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu,
|
||||
MenuOption::Faceshift,
|
||||
0,
|
||||
true,
|
||||
appInstance->getFaceshift(),
|
||||
SLOT(setTCPEnabled(bool)));
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_FACEPLUS
|
||||
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::Faceplus, 0, true,
|
||||
appInstance->getFaceplus(), SLOT(updateEnabled()));
|
||||
|
@ -411,6 +416,7 @@ Menu::Menu() :
|
|||
|
||||
QMenu* sixenseOptionsMenu = developerMenu->addMenu("Sixense Options");
|
||||
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::SixenseMouseInput, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::SixenseLasers, 0, true);
|
||||
|
||||
QMenu* handOptionsMenu = developerMenu->addMenu("Hand Options");
|
||||
|
||||
|
@ -433,8 +439,16 @@ Menu::Menu() :
|
|||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::ShowIKConstraints, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlignForearmsWithWrists, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlternateIK, 0, false);
|
||||
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::DisableNackPackets, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(developerMenu,
|
||||
MenuOption::DisableActivityLogger,
|
||||
0,
|
||||
false,
|
||||
&UserActivityLogger::getInstance(),
|
||||
SLOT(disable(bool)));
|
||||
|
||||
addActionToQMenuAndActionHash(developerMenu, MenuOption::WalletPrivateKey, 0, this, SLOT(changePrivateKey()));
|
||||
|
||||
addDisabledActionAndSeparator(developerMenu, "Testing");
|
||||
|
||||
|
@ -579,7 +593,13 @@ Menu::Menu() :
|
|||
Qt::CTRL | Qt::SHIFT | Qt::Key_U,
|
||||
false);
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::DisableQAudioOutputOverflowCheck, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioStats,
|
||||
0,
|
||||
false,
|
||||
appInstance->getAudio(),
|
||||
SLOT(toggleStats()));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::DisableQAudioOutputOverflowCheck, 0, true);
|
||||
|
||||
addActionToQMenuAndActionHash(developerMenu, MenuOption::PasteToVoxel,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_V,
|
||||
|
@ -632,6 +652,8 @@ void Menu::loadSettings(QSettings* settings) {
|
|||
_viewFrustumOffset.distance = loadSetting(settings, "viewFrustumOffsetDistance", 0.0f);
|
||||
_viewFrustumOffset.up = loadSetting(settings, "viewFrustumOffsetUp", 0.0f);
|
||||
settings->endGroup();
|
||||
|
||||
_walletPrivateKey = settings->value("privateKey").toByteArray();
|
||||
|
||||
scanMenuBar(&loadAction, settings);
|
||||
Application::getInstance()->getAvatar()->loadData(settings);
|
||||
|
@ -675,6 +697,7 @@ void Menu::saveSettings(QSettings* settings) {
|
|||
settings->setValue("viewFrustumOffsetDistance", _viewFrustumOffset.distance);
|
||||
settings->setValue("viewFrustumOffsetUp", _viewFrustumOffset.up);
|
||||
settings->endGroup();
|
||||
settings->setValue("privateKey", _walletPrivateKey);
|
||||
|
||||
scanMenuBar(&saveAction, settings);
|
||||
Application::getInstance()->getAvatar()->saveData(settings);
|
||||
|
@ -989,6 +1012,25 @@ void Menu::editAnimations() {
|
|||
}
|
||||
}
|
||||
|
||||
void Menu::changePrivateKey() {
|
||||
// setup the dialog
|
||||
QInputDialog privateKeyDialog(Application::getInstance()->getWindow());
|
||||
privateKeyDialog.setWindowTitle("Change Private Key");
|
||||
privateKeyDialog.setLabelText("RSA 2048-bit Private Key:");
|
||||
privateKeyDialog.setWindowFlags(Qt::Sheet);
|
||||
privateKeyDialog.setTextValue(QString(_walletPrivateKey));
|
||||
privateKeyDialog.resize(privateKeyDialog.parentWidget()->size().width() * DIALOG_RATIO_OF_WINDOW,
|
||||
privateKeyDialog.size().height());
|
||||
|
||||
int dialogReturn = privateKeyDialog.exec();
|
||||
if (dialogReturn == QDialog::Accepted) {
|
||||
// pull the private key from the dialog
|
||||
_walletPrivateKey = privateKeyDialog.textValue().toUtf8();
|
||||
}
|
||||
|
||||
sendFakeEnterEvent();
|
||||
}
|
||||
|
||||
void Menu::goToDomain(const QString newDomain) {
|
||||
if (NodeList::getInstance()->getDomainHandler().getHostname() != newDomain) {
|
||||
// send a node kill request, indicating to other clients that they should play the "disappeared" effect
|
||||
|
|
|
@ -159,6 +159,8 @@ public:
|
|||
void static goToOrientation(QString orientation);
|
||||
void static goToDomain(const QString newDomain);
|
||||
void static goTo(QString destination);
|
||||
|
||||
const QByteArray& getWalletPrivateKey() const { return _walletPrivateKey; }
|
||||
|
||||
signals:
|
||||
void scriptLocationChanged(const QString& newPath);
|
||||
|
@ -197,6 +199,7 @@ private slots:
|
|||
void editPreferences();
|
||||
void editAttachments();
|
||||
void editAnimations();
|
||||
void changePrivateKey();
|
||||
void goToDomainDialog();
|
||||
void goToLocation();
|
||||
void nameLocation();
|
||||
|
@ -293,6 +296,8 @@ private:
|
|||
QAction* _chatAction;
|
||||
QString _snapshotsLocation;
|
||||
QString _scriptsLocation;
|
||||
QByteArray _walletPrivateKey;
|
||||
|
||||
};
|
||||
|
||||
namespace MenuOption {
|
||||
|
@ -305,13 +310,15 @@ namespace MenuOption {
|
|||
const QString Attachments = "Attachments...";
|
||||
const QString AudioNoiseReduction = "Audio Noise Reduction";
|
||||
const QString AudioScope = "Audio Scope";
|
||||
const QString AudioScopePause = "Pause Audio Scope";
|
||||
const QString AudioScopeFrames = "Display Frames";
|
||||
const QString AudioScopeFiveFrames = "Five";
|
||||
const QString AudioScopeTwentyFrames = "Twenty";
|
||||
const QString AudioScopeFiftyFrames = "Fifty";
|
||||
const QString AudioToneInjection = "Inject Test Tone";
|
||||
const QString AudioScopeFiveFrames = "Five";
|
||||
const QString AudioScopeFrames = "Display Frames";
|
||||
const QString AudioScopePause = "Pause Audio Scope";
|
||||
const QString AudioScopeTwentyFrames = "Twenty";
|
||||
const QString AudioStats = "Audio Stats";
|
||||
const QString AudioSpatialProcessingAlternateDistanceAttenuate = "Alternate distance attenuation";
|
||||
const QString AudioSpatialProcessing = "Audio Spatial Processing";
|
||||
const QString AudioSpatialProcessingDontDistanceAttenuate = "Don't calculate distance attenuation";
|
||||
const QString AudioSpatialProcessingHeadOriented = "Head Oriented";
|
||||
const QString AudioSpatialProcessingIncludeOriginal = "Includes Network Original";
|
||||
const QString AudioSpatialProcessingPreDelay = "Add Pre-Delay";
|
||||
|
@ -321,14 +328,12 @@ namespace MenuOption {
|
|||
const QString AudioSpatialProcessingSlightlyRandomSurfaces = "Slightly Random Surfaces";
|
||||
const QString AudioSpatialProcessingStereoSource = "Stereo Source";
|
||||
const QString AudioSpatialProcessingWithDiffusions = "With Diffusions";
|
||||
const QString AudioSpatialProcessingDontDistanceAttenuate = "Don't calculate distance attenuation";
|
||||
const QString AudioSpatialProcessingAlternateDistanceAttenuate = "Alternate distance attenuation";
|
||||
const QString AudioToneInjection = "Inject Test Tone";
|
||||
const QString Avatars = "Avatars";
|
||||
const QString AvatarsReceiveShadows = "Avatars Receive Shadows";
|
||||
const QString Bandwidth = "Bandwidth Display";
|
||||
const QString BandwidthDetails = "Bandwidth Details";
|
||||
const QString BuckyBalls = "Bucky Balls";
|
||||
const QString StringHair = "String Hair";
|
||||
const QString CascadedShadows = "Cascaded";
|
||||
const QString Chat = "Chat...";
|
||||
const QString ChatCircling = "Chat Circling";
|
||||
|
@ -341,21 +346,22 @@ namespace MenuOption {
|
|||
const QString Console = "Console...";
|
||||
const QString DecreaseAvatarSize = "Decrease Avatar Size";
|
||||
const QString DecreaseVoxelSize = "Decrease Voxel Size";
|
||||
const QString DisableActivityLogger = "Disable Activity Logger";
|
||||
const QString DisableAutoAdjustLOD = "Disable Automatically Adjusting LOD";
|
||||
const QString DisableNackPackets = "Disable NACK Packets";
|
||||
const QString DisableQAudioOutputOverflowCheck = "Disable QAudioOutput Overflow Check";
|
||||
const QString DisableQAudioOutputOverflowCheck = "Disable Audio Output Overflow Check";
|
||||
const QString DisplayFrustum = "Display Frustum";
|
||||
const QString DisplayHands = "Display Hands";
|
||||
const QString DisplayHandTargets = "Display Hand Targets";
|
||||
const QString DisplayModelBounds = "Display Model Bounds";
|
||||
const QString DisplayModelElementProxy = "Display Model Element Bounds";
|
||||
const QString DisplayModelElementChildProxies = "Display Model Element Children";
|
||||
const QString DisplayModelElementProxy = "Display Model Element Bounds";
|
||||
const QString DisplayTimingDetails = "Display Timing Details";
|
||||
const QString DontFadeOnVoxelServerChanges = "Don't Fade In/Out on Voxel Server Changes";
|
||||
const QString EchoLocalAudio = "Echo Local Audio";
|
||||
const QString EchoServerAudio = "Echo Server Audio";
|
||||
const QString EnableGlowEffect = "Enable Glow Effect (Warning: Poor Oculus Performance)";
|
||||
const QString Enable3DTVMode = "Enable 3DTV Mode";
|
||||
const QString EnableGlowEffect = "Enable Glow Effect (Warning: Poor Oculus Performance)";
|
||||
const QString EnableVRMode = "Enable VR Mode";
|
||||
const QString ExpandMyAvatarSimulateTiming = "Expand /myAvatar/simulation";
|
||||
const QString ExpandMyAvatarTiming = "Expand /myAvatar";
|
||||
|
@ -365,7 +371,6 @@ namespace MenuOption {
|
|||
const QString Faceplus = "Faceplus";
|
||||
const QString Faceshift = "Faceshift";
|
||||
const QString FilterSixense = "Smooth Sixense Movement";
|
||||
const QString LowVelocityFilter = "Low Velocity Filter";
|
||||
const QString FirstPerson = "First Person";
|
||||
const QString FocusIndicators = "Focus Indicators";
|
||||
const QString FrameTimer = "Show Timer";
|
||||
|
@ -375,10 +380,9 @@ namespace MenuOption {
|
|||
const QString GlowMode = "Cycle Glow Mode";
|
||||
const QString GlowWhenSpeaking = "Glow When Speaking";
|
||||
const QString GoHome = "Go Home";
|
||||
const QString GoTo = "Go To...";
|
||||
const QString GoToDomain = "Go To Domain...";
|
||||
const QString GoTo = "Go To...";
|
||||
const QString GoToLocation = "Go To Location...";
|
||||
const QString ObeyEnvironmentalGravity = "Obey Environmental Gravity";
|
||||
const QString HandsCollideWithSelf = "Collide With Self";
|
||||
const QString HeadMouse = "Head Mouse";
|
||||
const QString IncreaseAvatarSize = "Increase Avatar Size";
|
||||
|
@ -386,21 +390,23 @@ namespace MenuOption {
|
|||
const QString LoadScript = "Open and Run Script File...";
|
||||
const QString LoadScriptURL = "Open and Run Script from URL...";
|
||||
const QString LodTools = "LOD Tools";
|
||||
const QString Log = "Log";
|
||||
const QString Login = "Login";
|
||||
const QString Log = "Log";
|
||||
const QString Logout = "Logout";
|
||||
const QString LookAtVectors = "Look-at Vectors";
|
||||
const QString LowVelocityFilter = "Low Velocity Filter";
|
||||
const QString MetavoxelEditor = "Metavoxel Editor...";
|
||||
const QString Metavoxels = "Metavoxels";
|
||||
const QString Mirror = "Mirror";
|
||||
const QString Models = "Models";
|
||||
const QString ModelOptions = "Model Options";
|
||||
const QString Models = "Models";
|
||||
const QString MoveWithLean = "Move with Lean";
|
||||
const QString MuteAudio = "Mute Microphone";
|
||||
const QString MuteEnvironment = "Mute Environment";
|
||||
const QString MyLocations = "My Locations...";
|
||||
const QString NameLocation = "Name this location";
|
||||
const QString NewVoxelCullingMode = "New Voxel Culling Mode";
|
||||
const QString ObeyEnvironmentalGravity = "Obey Environmental Gravity";
|
||||
const QString OctreeStats = "Voxel and Particle Statistics";
|
||||
const QString OffAxisProjection = "Off-Axis Projection";
|
||||
const QString OldVoxelCullingMode = "Old Voxel Culling Mode";
|
||||
|
@ -420,17 +426,19 @@ namespace MenuOption {
|
|||
const QString ScriptEditor = "Script Editor...";
|
||||
const QString SettingsExport = "Export Settings";
|
||||
const QString SettingsImport = "Import Settings";
|
||||
const QString SimpleShadows = "Simple";
|
||||
const QString SixenseMouseInput = "Enable Sixense Mouse Input";
|
||||
const QString ShowBordersVoxelNodes = "Show Voxel Nodes";
|
||||
const QString ShowBordersModelNodes = "Show Model Nodes";
|
||||
const QString ShowBordersParticleNodes = "Show Particle Nodes";
|
||||
const QString ShowBordersVoxelNodes = "Show Voxel Nodes";
|
||||
const QString ShowIKConstraints = "Show IK Constraints";
|
||||
const QString SimpleShadows = "Simple";
|
||||
const QString SixenseMouseInput = "Enable Sixense Mouse Input";
|
||||
const QString SixenseLasers = "Enable Sixense UI Lasers";
|
||||
const QString StandOnNearbyFloors = "Stand on nearby floors";
|
||||
const QString Stars = "Stars";
|
||||
const QString Stats = "Stats";
|
||||
const QString StereoAudio = "Stereo Audio";
|
||||
const QString StopAllScripts = "Stop All Scripts";
|
||||
const QString StringHair = "String Hair";
|
||||
const QString SuppressShortTimings = "Suppress Timings Less than 10ms";
|
||||
const QString TestPing = "Test Ping";
|
||||
const QString TransmitterDrive = "Transmitter Drive";
|
||||
|
@ -443,6 +451,7 @@ namespace MenuOption {
|
|||
const QString VoxelMode = "Cycle Voxel Mode";
|
||||
const QString Voxels = "Voxels";
|
||||
const QString VoxelTextures = "Voxel Textures";
|
||||
const QString WalletPrivateKey = "Wallet Private Key";
|
||||
}
|
||||
|
||||
void sendFakeEnterEvent();
|
||||
|
|
82
interface/src/SignedWalletTransaction.cpp
Normal file
82
interface/src/SignedWalletTransaction.cpp
Normal file
|
@ -0,0 +1,82 @@
|
|||
//
|
||||
// SignedWalletTransaction.cpp
|
||||
// interface/src
|
||||
//
|
||||
// Created by Stephen Birarda on 2014-07-11.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <QtCore/QCryptographicHash>
|
||||
#include <QtCore/QDebug>
|
||||
#include <QtCore/QFile>
|
||||
|
||||
#include <openssl/bio.h>
|
||||
#include <openssl/rsa.h>
|
||||
#include <openssl/pem.h>
|
||||
|
||||
#include <AccountManager.h>
|
||||
|
||||
#include "Menu.h"
|
||||
|
||||
#include "SignedWalletTransaction.h"
|
||||
|
||||
SignedWalletTransaction::SignedWalletTransaction(const QUuid& destinationUUID, qint64 amount,
|
||||
qint64 messageTimestamp, qint64 expiryDelta) :
|
||||
WalletTransaction(destinationUUID, amount),
|
||||
_messageTimestamp(messageTimestamp),
|
||||
_expiryDelta(expiryDelta)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
QByteArray SignedWalletTransaction::hexMessage() {
|
||||
// build the message using the components of this transaction
|
||||
|
||||
// UUID, source UUID, destination UUID, message timestamp, expiry delta, amount
|
||||
QByteArray messageBinary;
|
||||
|
||||
messageBinary.append(_uuid.toRfc4122());
|
||||
|
||||
messageBinary.append(reinterpret_cast<const char*>(&_messageTimestamp), sizeof(_messageTimestamp));
|
||||
messageBinary.append(reinterpret_cast<const char*>(&_expiryDelta), sizeof(_expiryDelta));
|
||||
|
||||
messageBinary.append(AccountManager::getInstance().getAccountInfo().getWalletID().toRfc4122());
|
||||
|
||||
messageBinary.append(_destinationUUID.toRfc4122());
|
||||
|
||||
messageBinary.append(reinterpret_cast<const char*>(&_amount), sizeof(_amount));
|
||||
|
||||
return messageBinary.toHex();
|
||||
}
|
||||
|
||||
QByteArray SignedWalletTransaction::messageDigest() {
|
||||
return QCryptographicHash::hash(hexMessage(), QCryptographicHash::Sha256).toHex();
|
||||
}
|
||||
|
||||
QByteArray SignedWalletTransaction::signedMessageDigest() {
|
||||
// pull the current private key from menu into RSA structure in memory
|
||||
QByteArray privateKeyByteArray = Menu::getInstance()->getWalletPrivateKey();
|
||||
|
||||
BIO* privateKeyBIO = NULL;
|
||||
RSA* rsaPrivateKey = NULL;
|
||||
|
||||
privateKeyBIO = BIO_new_mem_buf(privateKeyByteArray.data(), privateKeyByteArray.size());
|
||||
PEM_read_bio_RSAPrivateKey(privateKeyBIO, &rsaPrivateKey, NULL, NULL);
|
||||
|
||||
QByteArray digestToEncrypt = messageDigest();
|
||||
QByteArray encryptedDigest(RSA_size(rsaPrivateKey), 0);
|
||||
|
||||
int encryptReturn = RSA_private_encrypt(digestToEncrypt.size(),
|
||||
reinterpret_cast<const unsigned char*>(digestToEncrypt.constData()),
|
||||
reinterpret_cast<unsigned char*>(encryptedDigest.data()),
|
||||
rsaPrivateKey, RSA_PKCS1_PADDING);
|
||||
|
||||
// free the two structures used
|
||||
BIO_free(privateKeyBIO);
|
||||
RSA_free(rsaPrivateKey);
|
||||
|
||||
return (encryptReturn != -1) ? encryptedDigest : QByteArray();
|
||||
}
|
31
interface/src/SignedWalletTransaction.h
Normal file
31
interface/src/SignedWalletTransaction.h
Normal file
|
@ -0,0 +1,31 @@
|
|||
//
|
||||
// SignedWalletTransaction.h
|
||||
// interfac/src
|
||||
//
|
||||
// Created by Stephen Birarda on 2014-07-11.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_SignedWalletTransaction_h
|
||||
#define hifi_SignedWalletTransaction_h
|
||||
|
||||
#include <WalletTransaction.h>
|
||||
|
||||
class SignedWalletTransaction : public WalletTransaction {
|
||||
Q_OBJECT
|
||||
public:
|
||||
SignedWalletTransaction(const QUuid& destinationUUID, qint64 amount, qint64 messageTimestamp, qint64 expiryDelta);
|
||||
|
||||
QByteArray hexMessage();
|
||||
QByteArray messageDigest();
|
||||
QByteArray signedMessageDigest();
|
||||
|
||||
private:
|
||||
qint64 _messageTimestamp;
|
||||
qint64 _expiryDelta;
|
||||
};
|
||||
|
||||
#endif // hifi_SignedWalletTransaction_h
|
|
@ -9,7 +9,6 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifdef HAVE_QXMPP
|
||||
|
||||
#include <AccountManager.h>
|
||||
|
||||
|
@ -18,6 +17,7 @@
|
|||
const QString DEFAULT_XMPP_SERVER = "chat.highfidelity.io";
|
||||
const QString DEFAULT_CHAT_ROOM = "public@public-chat.highfidelity.io";
|
||||
|
||||
#ifdef HAVE_QXMPP
|
||||
XmppClient::XmppClient() :
|
||||
_xmppClient(),
|
||||
_xmppMUCManager()
|
||||
|
@ -26,6 +26,11 @@ XmppClient::XmppClient() :
|
|||
connect(&accountManager, SIGNAL(profileChanged()), this, SLOT(connectToServer()));
|
||||
connect(&accountManager, SIGNAL(logoutComplete()), this, SLOT(disconnectFromServer()));
|
||||
}
|
||||
#else
|
||||
XmppClient::XmppClient() {
|
||||
|
||||
}
|
||||
#endif
|
||||
|
||||
XmppClient& XmppClient::getInstance() {
|
||||
static XmppClient sharedInstance;
|
||||
|
@ -33,18 +38,23 @@ XmppClient& XmppClient::getInstance() {
|
|||
}
|
||||
|
||||
void XmppClient::xmppConnected() {
|
||||
#ifdef HAVE_QXMPP
|
||||
_publicChatRoom = _xmppMUCManager.addRoom(DEFAULT_CHAT_ROOM);
|
||||
_publicChatRoom->setNickName(AccountManager::getInstance().getAccountInfo().getUsername());
|
||||
_publicChatRoom->join();
|
||||
emit joinedPublicChatRoom();
|
||||
#endif
|
||||
}
|
||||
|
||||
void XmppClient::xmppError(QXmppClient::Error error) {
|
||||
#ifdef HAVE_QXMPP
|
||||
qDebug() << "Error connnecting to XMPP for user "
|
||||
<< AccountManager::getInstance().getAccountInfo().getUsername() << ": " << error;
|
||||
#endif
|
||||
}
|
||||
|
||||
void XmppClient::connectToServer() {
|
||||
#ifdef HAVE_QXMPP
|
||||
disconnectFromServer();
|
||||
|
||||
if (_xmppClient.addExtension(&_xmppMUCManager)) {
|
||||
|
@ -55,12 +65,15 @@ void XmppClient::connectToServer() {
|
|||
QString user = accountManager.getAccountInfo().getUsername();
|
||||
const QString& password = accountManager.getAccountInfo().getXMPPPassword();
|
||||
_xmppClient.connectToServer(user + "@" + DEFAULT_XMPP_SERVER, password);
|
||||
#endif
|
||||
}
|
||||
|
||||
void XmppClient::disconnectFromServer() {
|
||||
#ifdef HAVE_QXMPP
|
||||
if (_xmppClient.isConnected()) {
|
||||
_xmppClient.disconnectFromServer();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
XmppClient::XmppClient(const XmppClient& other) {
|
||||
|
@ -70,5 +83,3 @@ XmppClient::XmppClient(const XmppClient& other) {
|
|||
void XmppClient::operator =(XmppClient const& other) {
|
||||
Q_UNUSED(other);
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
@ -9,25 +9,27 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifdef HAVE_QXMPP
|
||||
|
||||
#ifndef hifi_XmppClient_h
|
||||
#define hifi_XmppClient_h
|
||||
|
||||
#include <QObject>
|
||||
|
||||
#ifdef HAVE_QXMPP
|
||||
#include <QXmppClient.h>
|
||||
#include <QXmppMucManager.h>
|
||||
#endif
|
||||
|
||||
/// Generalized threaded processor for handling received inbound packets.
|
||||
class XmppClient : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
static XmppClient& getInstance();
|
||||
|
||||
#ifdef HAVE_QXMPP
|
||||
QXmppClient& getXMPPClient() { return _xmppClient; }
|
||||
const QXmppMucRoom* getPublicChatRoom() const { return _publicChatRoom; }
|
||||
|
||||
#endif
|
||||
|
||||
signals:
|
||||
void joinedPublicChatRoom();
|
||||
|
||||
|
@ -43,11 +45,11 @@ private:
|
|||
XmppClient(XmppClient const& other); // not implemented
|
||||
void operator=(XmppClient const& other); // not implemented
|
||||
|
||||
#ifdef HAVE_QXMPP
|
||||
QXmppClient _xmppClient;
|
||||
QXmppMucManager _xmppMUCManager;
|
||||
QXmppMucRoom* _publicChatRoom;
|
||||
#endif
|
||||
};
|
||||
|
||||
#endif // __interface__XmppClient__
|
||||
|
||||
#endif // hifi_XmppClient_h
|
||||
|
|
|
@ -52,6 +52,8 @@ Avatar::Avatar() :
|
|||
_lastVelocity(0.0f, 0.0f, 0.0f),
|
||||
_acceleration(0.0f, 0.0f, 0.0f),
|
||||
_angularVelocity(0.0f, 0.0f, 0.0f),
|
||||
_lastAngularVelocity(0.0f, 0.0f, 0.0f),
|
||||
_angularAcceleration(0.0f, 0.0f, 0.0f),
|
||||
_lastOrientation(),
|
||||
_leanScale(0.5f),
|
||||
_scale(1.0f),
|
||||
|
@ -60,7 +62,6 @@ Avatar::Avatar() :
|
|||
_mouseRayDirection(0.0f, 0.0f, 0.0f),
|
||||
_moving(false),
|
||||
_collisionGroups(0),
|
||||
_numLocalLights(0),
|
||||
_initialized(false),
|
||||
_shouldRenderBillboard(true)
|
||||
{
|
||||
|
@ -151,7 +152,8 @@ void Avatar::simulate(float deltaTime) {
|
|||
if (Menu::getInstance()->isOptionChecked(MenuOption::StringHair)) {
|
||||
PerformanceTimer perfTimer("hair");
|
||||
_hair.setAcceleration(getAcceleration() * getHead()->getFinalOrientationInWorldFrame());
|
||||
_hair.setAngularVelocity(getAngularVelocity() + getHead()->getAngularVelocity() * getHead()->getFinalOrientationInWorldFrame());
|
||||
_hair.setAngularVelocity((getAngularVelocity() + getHead()->getAngularVelocity()) * getHead()->getFinalOrientationInWorldFrame());
|
||||
_hair.setAngularAcceleration(getAngularAcceleration() * getHead()->getFinalOrientationInWorldFrame());
|
||||
_hair.setGravity(Application::getInstance()->getEnvironment()->getGravity(getPosition()) * getHead()->getFinalOrientationInWorldFrame());
|
||||
_hair.simulate(deltaTime);
|
||||
}
|
||||
|
@ -187,6 +189,7 @@ void Avatar::updateAcceleration(float deltaTime) {
|
|||
glm::quat orientation = getOrientation();
|
||||
glm::quat delta = glm::inverse(_lastOrientation) * orientation;
|
||||
_angularVelocity = safeEulerAngles(delta) * (1.f / deltaTime);
|
||||
_angularAcceleration = (_angularVelocity - _lastAngularVelocity) * (1.f / deltaTime);
|
||||
_lastOrientation = getOrientation();
|
||||
}
|
||||
|
||||
|
@ -215,6 +218,52 @@ static TextRenderer* textRenderer(TextRendererType type) {
|
|||
}
|
||||
|
||||
void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode) {
|
||||
|
||||
if (glm::distance(Application::getInstance()->getAvatar()->getPosition(),
|
||||
_position) < 10.0f) {
|
||||
// render pointing lasers
|
||||
glm::vec3 laserColor = glm::vec3(1.0f, 0.0f, 1.0f);
|
||||
float laserLength = 50.0f;
|
||||
if (_handState == HAND_STATE_LEFT_POINTING ||
|
||||
_handState == HAND_STATE_BOTH_POINTING) {
|
||||
int leftIndex = _skeletonModel.getLeftHandJointIndex();
|
||||
glm::vec3 leftPosition;
|
||||
glm::quat leftRotation;
|
||||
_skeletonModel.getJointPositionInWorldFrame(leftIndex, leftPosition);
|
||||
_skeletonModel.getJointRotationInWorldFrame(leftIndex, leftRotation);
|
||||
glPushMatrix(); {
|
||||
glTranslatef(leftPosition.x, leftPosition.y, leftPosition.z);
|
||||
float angle = glm::degrees(glm::angle(leftRotation));
|
||||
glm::vec3 axis = glm::axis(leftRotation);
|
||||
glRotatef(angle, axis.x, axis.y, axis.z);
|
||||
glBegin(GL_LINES);
|
||||
glColor3f(laserColor.x, laserColor.y, laserColor.z);
|
||||
glVertex3f(0.0f, 0.0f, 0.0f);
|
||||
glVertex3f(0.0f, laserLength, 0.0f);
|
||||
glEnd();
|
||||
} glPopMatrix();
|
||||
}
|
||||
if (_handState == HAND_STATE_RIGHT_POINTING ||
|
||||
_handState == HAND_STATE_BOTH_POINTING) {
|
||||
int rightIndex = _skeletonModel.getRightHandJointIndex();
|
||||
glm::vec3 rightPosition;
|
||||
glm::quat rightRotation;
|
||||
_skeletonModel.getJointPositionInWorldFrame(rightIndex, rightPosition);
|
||||
_skeletonModel.getJointRotationInWorldFrame(rightIndex, rightRotation);
|
||||
glPushMatrix(); {
|
||||
glTranslatef(rightPosition.x, rightPosition.y, rightPosition.z);
|
||||
float angle = glm::degrees(glm::angle(rightRotation));
|
||||
glm::vec3 axis = glm::axis(rightRotation);
|
||||
glRotatef(angle, axis.x, axis.y, axis.z);
|
||||
glBegin(GL_LINES);
|
||||
glColor3f(laserColor.x, laserColor.y, laserColor.z);
|
||||
glVertex3f(0.0f, 0.0f, 0.0f);
|
||||
glVertex3f(0.0f, laserLength, 0.0f);
|
||||
glEnd();
|
||||
} glPopMatrix();
|
||||
}
|
||||
}
|
||||
|
||||
// simple frustum check
|
||||
float boundingRadius = getBillboardSize();
|
||||
ViewFrustum* frustum = (renderMode == Avatar::SHADOW_RENDER_MODE) ?
|
||||
|
@ -245,19 +294,9 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode) {
|
|||
|
||||
|
||||
// local lights directions and colors
|
||||
getSkeletonModel().setNumLocalLights(_numLocalLights);
|
||||
getHead()->getFaceModel().setNumLocalLights(_numLocalLights);
|
||||
for (int i = 0; i < MAX_LOCAL_LIGHTS; i++) {
|
||||
glm::vec3 normalized = glm::normalize(_localLightDirections[i]);
|
||||
|
||||
// body
|
||||
getSkeletonModel().setLocalLightColor(_localLightColors[i], i);
|
||||
getSkeletonModel().setLocalLightDirection(normalized, i);
|
||||
|
||||
// head
|
||||
getHead()->getFaceModel().setLocalLightColor(_localLightColors[i], i);
|
||||
getHead()->getFaceModel().setLocalLightDirection(_localLightDirections[i], i);
|
||||
}
|
||||
const QVector<Model::LocalLight>& localLights = Application::getInstance()->getAvatarManager().getLocalLights();
|
||||
_skeletonModel.setLocalLights(localLights);
|
||||
getHead()->getFaceModel().setLocalLights(localLights);
|
||||
|
||||
// render body
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Avatars)) {
|
||||
|
@ -926,29 +965,3 @@ void Avatar::setShowDisplayName(bool showDisplayName) {
|
|||
|
||||
}
|
||||
|
||||
void Avatar::setLocalLightDirection(const glm::vec3& direction, int lightIndex) {
|
||||
_localLightDirections[lightIndex] = direction;
|
||||
qDebug( "set light %d direction ( %f, %f, %f )\n", lightIndex, direction.x, direction.y, direction.z );
|
||||
}
|
||||
|
||||
void Avatar::setLocalLightColor(const glm::vec3& color, int lightIndex) {
|
||||
_localLightColors[lightIndex] = color;
|
||||
qDebug( "set light %d color ( %f, %f, %f )\n", lightIndex, color.x, color.y, color.z );
|
||||
}
|
||||
|
||||
void Avatar::addLocalLight() {
|
||||
if (_numLocalLights + 1 <= MAX_LOCAL_LIGHTS) {
|
||||
++_numLocalLights;
|
||||
}
|
||||
|
||||
qDebug("ADD LOCAL LIGHT (numLocalLights = %d)\n", _numLocalLights);
|
||||
}
|
||||
|
||||
void Avatar::removeLocalLight() {
|
||||
if (_numLocalLights - 1 >= 0) {
|
||||
--_numLocalLights;
|
||||
}
|
||||
|
||||
qDebug("REMOVE LOCAL LIGHT (numLocalLights = %d)\n", _numLocalLights);
|
||||
}
|
||||
|
||||
|
|
|
@ -152,6 +152,8 @@ public:
|
|||
|
||||
glm::vec3 getAcceleration() const { return _acceleration; }
|
||||
glm::vec3 getAngularVelocity() const { return _angularVelocity; }
|
||||
glm::vec3 getAngularAcceleration() const { return _angularAcceleration; }
|
||||
|
||||
|
||||
/// Scales a world space position vector relative to the avatar position and scale
|
||||
/// \param vector position to be scaled. Will store the result
|
||||
|
@ -159,11 +161,7 @@ public:
|
|||
|
||||
public slots:
|
||||
void updateCollisionGroups();
|
||||
void setLocalLightDirection(const glm::vec3& direction, int lightIndex);
|
||||
void setLocalLightColor(const glm::vec3& color, int lightIndex);
|
||||
void addLocalLight();
|
||||
void removeLocalLight();
|
||||
|
||||
|
||||
signals:
|
||||
void collisionWithAvatar(const QUuid& myUUID, const QUuid& theirUUID, const CollisionInfo& collision);
|
||||
|
||||
|
@ -176,6 +174,8 @@ protected:
|
|||
glm::vec3 _lastVelocity;
|
||||
glm::vec3 _acceleration;
|
||||
glm::vec3 _angularVelocity;
|
||||
glm::vec3 _lastAngularVelocity;
|
||||
glm::vec3 _angularAcceleration;
|
||||
glm::quat _lastOrientation;
|
||||
float _leanScale;
|
||||
float _scale;
|
||||
|
@ -186,11 +186,6 @@ protected:
|
|||
bool _moving; ///< set when position is changing
|
||||
|
||||
quint32 _collisionGroups;
|
||||
|
||||
// always-present local lighting for the avatar
|
||||
glm::vec3 _localLightDirections[MAX_LOCAL_LIGHTS];
|
||||
glm::vec3 _localLightColors[MAX_LOCAL_LIGHTS];
|
||||
int _numLocalLights;
|
||||
|
||||
// protected methods...
|
||||
glm::vec3 getBodyRightDirection() const { return getOrientation() * IDENTITY_RIGHT; }
|
||||
|
|
|
@ -159,55 +159,21 @@ void AvatarManager::clearOtherAvatars() {
|
|||
_myAvatar->clearLookAtTargetAvatar();
|
||||
}
|
||||
|
||||
Avatar* AvatarManager::getAvatarFromIndex(int avatarIndex) {
|
||||
Avatar* avatar = NULL;
|
||||
int numAvatars = _avatarHash.count();
|
||||
if (avatarIndex < numAvatars) {
|
||||
QUuid key = (_avatarHash.keys())[avatarIndex];
|
||||
|
||||
const AvatarSharedPointer& avatarPointer = _avatarHash.value(key);
|
||||
avatar = static_cast<Avatar*>(avatarPointer.data());
|
||||
void AvatarManager::setLocalLights(const QVector<Model::LocalLight>& localLights) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "setLocalLights", Q_ARG(const QVector<Model::LocalLight>&, localLights));
|
||||
return;
|
||||
}
|
||||
|
||||
return avatar;
|
||||
_localLights = localLights;
|
||||
}
|
||||
|
||||
void AvatarManager::addAvatarLocalLight(int avatarIndex) {
|
||||
Avatar* avatar = getAvatarFromIndex(avatarIndex);
|
||||
if (avatar) {
|
||||
avatar->addLocalLight();
|
||||
QVector<Model::LocalLight> AvatarManager::getLocalLights() const {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QVector<Model::LocalLight> result;
|
||||
QMetaObject::invokeMethod(const_cast<AvatarManager*>(this), "getLocalLights", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(QVector<Model::LocalLight>, result));
|
||||
return result;
|
||||
}
|
||||
return _localLights;
|
||||
}
|
||||
|
||||
void AvatarManager::removeAvatarLocalLight(int avatarIndex) {
|
||||
Avatar* avatar = getAvatarFromIndex(avatarIndex);
|
||||
if (avatar) {
|
||||
avatar->removeLocalLight();
|
||||
}
|
||||
}
|
||||
|
||||
void AvatarManager::setAvatarLightDirection(const glm::vec3& direction, int lightIndex, int avatarIndex) {
|
||||
Avatar* avatar = getAvatarFromIndex(avatarIndex);
|
||||
if (avatar) {
|
||||
avatar->setLocalLightDirection(direction, lightIndex);
|
||||
}
|
||||
}
|
||||
|
||||
void AvatarManager::setAvatarLightColor(const glm::vec3& color, int lightIndex, int avatarIndex) {
|
||||
Avatar* avatar = getAvatarFromIndex(avatarIndex);
|
||||
if (avatar) {
|
||||
avatar->setLocalLightColor(color, lightIndex);
|
||||
}
|
||||
}
|
||||
|
||||
int AvatarManager::getNumAvatars() {
|
||||
return _avatarHash.count();
|
||||
}
|
||||
|
||||
QString AvatarManager::getAvatarHashKey(int index) {
|
||||
QString id = ((_avatarHash.keys())[index]).toString();
|
||||
std::string idString = id.toStdString();
|
||||
return id;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -37,13 +37,8 @@ public:
|
|||
|
||||
void clearOtherAvatars();
|
||||
|
||||
public slots:
|
||||
void setAvatarLightColor(const glm::vec3& color, int lightIndex, int avatarIndex);
|
||||
void setAvatarLightDirection(const glm::vec3& direction, int lightIndex, int avatarIndex);
|
||||
void removeAvatarLocalLight(int avatarIndex);
|
||||
void addAvatarLocalLight(int avatarIndex);
|
||||
int getNumAvatars();
|
||||
QString getAvatarHashKey(int index);
|
||||
Q_INVOKABLE void setLocalLights(const QVector<Model::LocalLight>& localLights);
|
||||
Q_INVOKABLE QVector<Model::LocalLight> getLocalLights() const;
|
||||
|
||||
private:
|
||||
AvatarManager(const AvatarManager& other);
|
||||
|
@ -53,13 +48,13 @@ private:
|
|||
|
||||
AvatarSharedPointer newSharedAvatar();
|
||||
|
||||
Avatar* getAvatarFromIndex(int avatarIndex);
|
||||
|
||||
// virtual override
|
||||
AvatarHash::iterator erase(const AvatarHash::iterator& iterator);
|
||||
|
||||
QVector<AvatarSharedPointer> _avatarFades;
|
||||
QSharedPointer<MyAvatar> _myAvatar;
|
||||
|
||||
QVector<Model::LocalLight> _localLights;
|
||||
};
|
||||
|
||||
#endif // hifi_AvatarManager_h
|
||||
|
|
|
@ -125,7 +125,7 @@ void Hand::render(bool isMine, Model::RenderMode renderMode) {
|
|||
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
glEnable(GL_RESCALE_NORMAL);
|
||||
}
|
||||
}
|
||||
|
||||
void Hand::renderHandTargets(bool isMine) {
|
||||
glPushMatrix();
|
||||
|
|
|
@ -159,6 +159,10 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
|
|||
}
|
||||
}
|
||||
_eyePosition = calculateAverageEyePosition();
|
||||
|
||||
float velocityFilter = glm::clamp(1.0f - glm::length(_filteredEyePosition - _eyePosition), 0.0f, 1.0f);
|
||||
_filteredEyePosition = velocityFilter * _filteredEyePosition + (1.0f - velocityFilter) * _eyePosition;
|
||||
|
||||
}
|
||||
|
||||
void Head::relaxLean(float deltaTime) {
|
||||
|
|
|
@ -88,8 +88,7 @@ public:
|
|||
|
||||
const bool getReturnToCenter() const { return _returnHeadToCenter; } // Do you want head to try to return to center (depends on interface detected)
|
||||
float getAverageLoudness() const { return _averageLoudness; }
|
||||
glm::vec3 calculateAverageEyePosition() const { return _leftEyePosition + (_rightEyePosition - _leftEyePosition ) * ONE_HALF; }
|
||||
|
||||
glm::vec3 getFilteredEyePosition() const { return _filteredEyePosition; }
|
||||
/// \return the point about which scaling occurs.
|
||||
glm::vec3 getScalePivot() const;
|
||||
|
||||
|
@ -110,6 +109,8 @@ public:
|
|||
void addLeanDeltas(float sideways, float forward);
|
||||
|
||||
private:
|
||||
glm::vec3 calculateAverageEyePosition() const { return _leftEyePosition + (_rightEyePosition - _leftEyePosition ) * ONE_HALF; }
|
||||
|
||||
// disallow copies of the Head, copy of owning Avatar is disallowed too
|
||||
Head(const Head&);
|
||||
Head& operator= (const Head&);
|
||||
|
@ -120,6 +121,8 @@ private:
|
|||
glm::vec3 _leftEyePosition;
|
||||
glm::vec3 _rightEyePosition;
|
||||
glm::vec3 _eyePosition;
|
||||
glm::vec3 _filteredEyePosition; // velocity filtered world space eye position
|
||||
|
||||
float _scale;
|
||||
float _lastLoudness;
|
||||
float _audioAttack;
|
||||
|
|
34
interface/src/avatar/MuscleConstraint.cpp
Normal file
34
interface/src/avatar/MuscleConstraint.cpp
Normal file
|
@ -0,0 +1,34 @@
|
|||
//
|
||||
// MuscleConstraint.cpp
|
||||
// interface/src/avatar
|
||||
//
|
||||
// Created by Andrew Meadows 2014.07.24
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <SharedUtil.h>
|
||||
#include <VerletPoint.h>
|
||||
|
||||
#include "MuscleConstraint.h"
|
||||
|
||||
const float DEFAULT_MUSCLE_STRENGTH = 0.5f * MAX_MUSCLE_STRENGTH;
|
||||
|
||||
MuscleConstraint::MuscleConstraint(VerletPoint* parent, VerletPoint* child)
|
||||
: _rootPoint(parent), _childPoint(child),
|
||||
_parentIndex(-1), _childndex(-1), _strength(DEFAULT_MUSCLE_STRENGTH) {
|
||||
_childOffset = child->_position - parent->_position;
|
||||
}
|
||||
|
||||
float MuscleConstraint::enforce() {
|
||||
_childPoint->_position = (1.0f - _strength) * _childPoint->_position + _strength * (_rootPoint->_position + _childOffset);
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
void MuscleConstraint::setIndices(int parent, int child) {
|
||||
_parentIndex = parent;
|
||||
_childndex = child;
|
||||
}
|
||||
|
43
interface/src/avatar/MuscleConstraint.h
Normal file
43
interface/src/avatar/MuscleConstraint.h
Normal file
|
@ -0,0 +1,43 @@
|
|||
//
|
||||
// MuscleConstraint.h
|
||||
// interface/src/avatar
|
||||
//
|
||||
// Created by Andrew Meadows 2014.07.24
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_MuscleConstraint_h
|
||||
#define hifi_MuscleConstraint_h
|
||||
|
||||
#include <Constraint.h>
|
||||
|
||||
// MuscleConstraint is a simple constraint that pushes the child toward an offset relative to the parent.
|
||||
// It does NOT push the parent.
|
||||
|
||||
const float MAX_MUSCLE_STRENGTH = 0.5f;
|
||||
|
||||
class MuscleConstraint : public Constraint {
|
||||
public:
|
||||
MuscleConstraint(VerletPoint* parent, VerletPoint* child);
|
||||
MuscleConstraint(const MuscleConstraint& other);
|
||||
float enforce();
|
||||
|
||||
void setIndices(int parent, int child);
|
||||
int getParentIndex() const { return _parentIndex; }
|
||||
int getChildIndex() const { return _childndex; }
|
||||
void setChildOffset(const glm::vec3& offset) { _childOffset = offset; }
|
||||
void setStrength(float strength) { _strength = glm::clamp(strength, 0.0f, MAX_MUSCLE_STRENGTH); }
|
||||
float getStrength() const { return _strength; }
|
||||
private:
|
||||
VerletPoint* _rootPoint;
|
||||
VerletPoint* _childPoint;
|
||||
int _parentIndex;
|
||||
int _childndex;
|
||||
glm::vec3 _childOffset;
|
||||
float _strength; // a value in range [0,MAX_MUSCLE_STRENGTH]
|
||||
};
|
||||
|
||||
#endif // hifi_MuscleConstraint_h
|
|
@ -135,9 +135,7 @@ void MyAvatar::simulate(float deltaTime) {
|
|||
setScale(scale);
|
||||
Application::getInstance()->getCamera()->setScale(scale);
|
||||
}
|
||||
|
||||
// no extra movement of the hand here any more ...
|
||||
_handState = HAND_STATE_NULL;
|
||||
_skeletonModel.setShowTrueJointTransforms(! Menu::getInstance()->isOptionChecked(MenuOption::CollideAsRagdoll));
|
||||
|
||||
{
|
||||
PerformanceTimer perfTimer("transform");
|
||||
|
@ -164,9 +162,16 @@ void MyAvatar::simulate(float deltaTime) {
|
|||
PerformanceTimer perfTimer("joints");
|
||||
// copy out the skeleton joints from the model
|
||||
_jointData.resize(_skeletonModel.getJointStateCount());
|
||||
for (int i = 0; i < _jointData.size(); i++) {
|
||||
JointData& data = _jointData[i];
|
||||
data.valid = _skeletonModel.getJointState(i, data.rotation);
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::CollideAsRagdoll)) {
|
||||
for (int i = 0; i < _jointData.size(); i++) {
|
||||
JointData& data = _jointData[i];
|
||||
data.valid = _skeletonModel.getVisibleJointState(i, data.rotation);
|
||||
}
|
||||
} else {
|
||||
for (int i = 0; i < _jointData.size(); i++) {
|
||||
JointData& data = _jointData[i];
|
||||
data.valid = _skeletonModel.getJointState(i, data.rotation);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -186,7 +191,8 @@ void MyAvatar::simulate(float deltaTime) {
|
|||
PerformanceTimer perfTimer("hair");
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::StringHair)) {
|
||||
_hair.setAcceleration(getAcceleration() * getHead()->getFinalOrientationInWorldFrame());
|
||||
_hair.setAngularVelocity(getAngularVelocity() + getHead()->getAngularVelocity() * getHead()->getFinalOrientationInWorldFrame());
|
||||
_hair.setAngularVelocity((getAngularVelocity() + getHead()->getAngularVelocity()) * getHead()->getFinalOrientationInWorldFrame());
|
||||
_hair.setAngularAcceleration(getAngularAcceleration() * getHead()->getFinalOrientationInWorldFrame());
|
||||
_hair.setGravity(Application::getInstance()->getEnvironment()->getGravity(getPosition()) * getHead()->getFinalOrientationInWorldFrame());
|
||||
_hair.simulate(deltaTime);
|
||||
}
|
||||
|
@ -428,6 +434,7 @@ glm::vec3 MyAvatar::getLeftPalmPosition() {
|
|||
leftHandPosition += HAND_TO_PALM_OFFSET * glm::inverse(leftRotation);
|
||||
return leftHandPosition;
|
||||
}
|
||||
|
||||
glm::vec3 MyAvatar::getRightPalmPosition() {
|
||||
glm::vec3 rightHandPosition;
|
||||
getSkeletonModel().getRightHandPosition(rightHandPosition);
|
||||
|
@ -905,7 +912,7 @@ const float RENDER_HEAD_CUTOFF_DISTANCE = 0.50f;
|
|||
bool MyAvatar::shouldRenderHead(const glm::vec3& cameraPosition, RenderMode renderMode) const {
|
||||
const Head* head = getHead();
|
||||
return (renderMode != NORMAL_RENDER_MODE) ||
|
||||
(glm::length(cameraPosition - head->calculateAverageEyePosition()) > RENDER_HEAD_CUTOFF_DISTANCE * _scale);
|
||||
(glm::length(cameraPosition - head->getEyePosition()) > RENDER_HEAD_CUTOFF_DISTANCE * _scale);
|
||||
}
|
||||
|
||||
float MyAvatar::computeDistanceToFloor(const glm::vec3& startPoint) {
|
||||
|
@ -1847,3 +1854,43 @@ void MyAvatar::applyCollision(const glm::vec3& contactPoint, const glm::vec3& pe
|
|||
}
|
||||
}
|
||||
|
||||
//Renders sixense laser pointers for UI selection with controllers
|
||||
void MyAvatar::renderLaserPointers() {
|
||||
const float PALM_TIP_ROD_RADIUS = 0.002f;
|
||||
|
||||
//If the Oculus is enabled, we will draw a blue cursor ray
|
||||
|
||||
for (size_t i = 0; i < getHand()->getNumPalms(); ++i) {
|
||||
PalmData& palm = getHand()->getPalms()[i];
|
||||
if (palm.isActive()) {
|
||||
glColor4f(0, 1, 1, 1);
|
||||
glm::vec3 tip = getLaserPointerTipPosition(&palm);
|
||||
glm::vec3 root = palm.getPosition();
|
||||
|
||||
//Scale the root vector with the avatar scale
|
||||
scaleVectorRelativeToPosition(root);
|
||||
|
||||
Avatar::renderJointConnectingCone(root, tip, PALM_TIP_ROD_RADIUS, PALM_TIP_ROD_RADIUS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//Gets the tip position for the laser pointer
|
||||
glm::vec3 MyAvatar::getLaserPointerTipPosition(const PalmData* palm) {
|
||||
const ApplicationOverlay& applicationOverlay = Application::getInstance()->getApplicationOverlay();
|
||||
const float PALM_TIP_ROD_LENGTH_MULT = 40.0f;
|
||||
|
||||
glm::vec3 direction = glm::normalize(palm->getTipPosition() - palm->getPosition());
|
||||
|
||||
glm::vec3 position = palm->getPosition();
|
||||
//scale the position with the avatar
|
||||
scaleVectorRelativeToPosition(position);
|
||||
|
||||
|
||||
glm::vec3 result;
|
||||
if (applicationOverlay.calculateRayUICollisionPoint(position, direction, result)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
return palm->getPosition();
|
||||
}
|
||||
|
|
|
@ -21,9 +21,9 @@
|
|||
enum AvatarHandState
|
||||
{
|
||||
HAND_STATE_NULL = 0,
|
||||
HAND_STATE_OPEN,
|
||||
HAND_STATE_GRASPING,
|
||||
HAND_STATE_POINTING,
|
||||
HAND_STATE_LEFT_POINTING,
|
||||
HAND_STATE_RIGHT_POINTING,
|
||||
HAND_STATE_BOTH_POINTING,
|
||||
NUM_HAND_STATES
|
||||
};
|
||||
|
||||
|
@ -127,6 +127,9 @@ public:
|
|||
|
||||
void applyCollision(const glm::vec3& contactPoint, const glm::vec3& penetration);
|
||||
|
||||
/// Renders a laser pointer for UI picking
|
||||
void renderLaserPointers();
|
||||
glm::vec3 getLaserPointerTipPosition(const PalmData* palm);
|
||||
public slots:
|
||||
void goHome();
|
||||
void increaseSize();
|
||||
|
|
|
@ -10,14 +10,18 @@
|
|||
//
|
||||
|
||||
#include <glm/gtx/transform.hpp>
|
||||
#include <QMultiMap>
|
||||
|
||||
#include <VerletCapsuleShape.h>
|
||||
#include <VerletSphereShape.h>
|
||||
#include <DistanceConstraint.h>
|
||||
#include <FixedConstraint.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "Avatar.h"
|
||||
#include "Hand.h"
|
||||
#include "Menu.h"
|
||||
#include "MuscleConstraint.h"
|
||||
#include "SkeletonModel.h"
|
||||
|
||||
SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent) :
|
||||
|
@ -69,7 +73,7 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
|
|||
int jointIndex = geometry.humanIKJointIndices.at(humanIKJointIndex);
|
||||
if (jointIndex != -1) {
|
||||
JointState& state = _jointStates[jointIndex];
|
||||
state.setRotationFromBindFrame(prioVR->getJointRotations().at(i), PALM_PRIORITY);
|
||||
state.setRotationInBindFrame(prioVR->getJointRotations().at(i), PALM_PRIORITY);
|
||||
}
|
||||
}
|
||||
return;
|
||||
|
@ -217,7 +221,7 @@ void SkeletonModel::applyPalmData(int jointIndex, PalmData& palm) {
|
|||
setJointPosition(parentJointIndex, palmPosition + forearm,
|
||||
glm::quat(), false, -1, false, glm::vec3(0.0f, -1.0f, 0.0f), PALM_PRIORITY);
|
||||
JointState& parentState = _jointStates[parentJointIndex];
|
||||
parentState.setRotationFromBindFrame(palmRotation, PALM_PRIORITY);
|
||||
parentState.setRotationInBindFrame(palmRotation, PALM_PRIORITY);
|
||||
// lock hand to forearm by slamming its rotation (in parent-frame) to identity
|
||||
_jointStates[jointIndex].setRotationInConstrainedFrame(glm::quat());
|
||||
} else {
|
||||
|
@ -381,13 +385,13 @@ void SkeletonModel::setHandPosition(int jointIndex, const glm::vec3& position, c
|
|||
glm::quat shoulderRotation = rotationBetween(forwardVector, elbowPosition - shoulderPosition);
|
||||
|
||||
JointState& shoulderState = _jointStates[shoulderJointIndex];
|
||||
shoulderState.setRotationFromBindFrame(shoulderRotation, PALM_PRIORITY);
|
||||
shoulderState.setRotationInBindFrame(shoulderRotation, PALM_PRIORITY);
|
||||
|
||||
JointState& elbowState = _jointStates[elbowJointIndex];
|
||||
elbowState.setRotationFromBindFrame(rotationBetween(shoulderRotation * forwardVector, wristPosition - elbowPosition) * shoulderRotation, PALM_PRIORITY);
|
||||
elbowState.setRotationInBindFrame(rotationBetween(shoulderRotation * forwardVector, wristPosition - elbowPosition) * shoulderRotation, PALM_PRIORITY);
|
||||
|
||||
JointState& handState = _jointStates[jointIndex];
|
||||
handState.setRotationFromBindFrame(rotation, PALM_PRIORITY);
|
||||
handState.setRotationInBindFrame(rotation, PALM_PRIORITY);
|
||||
}
|
||||
|
||||
bool SkeletonModel::getLeftHandPosition(glm::vec3& position) const {
|
||||
|
@ -504,6 +508,7 @@ void SkeletonModel::renderRagdoll() {
|
|||
// virtual
|
||||
void SkeletonModel::initRagdollPoints() {
|
||||
clearRagdollConstraintsAndPoints();
|
||||
_muscleConstraints.clear();
|
||||
|
||||
// one point for each joint
|
||||
int numJoints = _jointStates.size();
|
||||
|
@ -511,8 +516,7 @@ void SkeletonModel::initRagdollPoints() {
|
|||
for (int i = 0; i < numJoints; ++i) {
|
||||
const JointState& state = _jointStates.at(i);
|
||||
glm::vec3 position = state.getPosition();
|
||||
_ragdollPoints[i]._position = position;
|
||||
_ragdollPoints[i]._lastPosition = position;
|
||||
_ragdollPoints[i].initPosition(position);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -522,30 +526,123 @@ void SkeletonModel::buildRagdollConstraints() {
|
|||
const int numPoints = _ragdollPoints.size();
|
||||
assert(numPoints == _jointStates.size());
|
||||
|
||||
float minBone = FLT_MAX;
|
||||
float maxBone = -FLT_MAX;
|
||||
QMultiMap<int, int> families;
|
||||
for (int i = 0; i < numPoints; ++i) {
|
||||
const JointState& state = _jointStates.at(i);
|
||||
const FBXJoint& joint = state.getFBXJoint();
|
||||
int parentIndex = joint.parentIndex;
|
||||
int parentIndex = state.getParentIndex();
|
||||
if (parentIndex == -1) {
|
||||
FixedConstraint* anchor = new FixedConstraint(&(_ragdollPoints[i]), glm::vec3(0.0f));
|
||||
_ragdollConstraints.push_back(anchor);
|
||||
} else {
|
||||
DistanceConstraint* bone = new DistanceConstraint(&(_ragdollPoints[i]), &(_ragdollPoints[parentIndex]));
|
||||
bone->setDistance(state.getDistanceToParent());
|
||||
_ragdollConstraints.push_back(bone);
|
||||
families.insert(parentIndex, i);
|
||||
}
|
||||
float boneLength = glm::length(state.getPositionInParentFrame());
|
||||
if (boneLength > maxBone) {
|
||||
maxBone = boneLength;
|
||||
} else if (boneLength < minBone) {
|
||||
minBone = boneLength;
|
||||
}
|
||||
}
|
||||
// Joints that have multiple children effectively have rigid constraints between the children
|
||||
// in the parent frame, so we add DistanceConstraints between children in the same family.
|
||||
QMultiMap<int, int>::iterator itr = families.begin();
|
||||
while (itr != families.end()) {
|
||||
QList<int> children = families.values(itr.key());
|
||||
int numChildren = children.size();
|
||||
if (numChildren > 1) {
|
||||
for (int i = 1; i < numChildren; ++i) {
|
||||
DistanceConstraint* bone = new DistanceConstraint(&(_ragdollPoints[children[i-1]]), &(_ragdollPoints[children[i]]));
|
||||
_ragdollConstraints.push_back(bone);
|
||||
}
|
||||
if (numChildren > 2) {
|
||||
DistanceConstraint* bone = new DistanceConstraint(&(_ragdollPoints[children[numChildren-1]]), &(_ragdollPoints[children[0]]));
|
||||
_ragdollConstraints.push_back(bone);
|
||||
}
|
||||
}
|
||||
++itr;
|
||||
}
|
||||
|
||||
float MAX_STRENGTH = 0.3f;
|
||||
float MIN_STRENGTH = 0.005f;
|
||||
// each joint gets a MuscleConstraint to its parent
|
||||
for (int i = 1; i < numPoints; ++i) {
|
||||
const JointState& state = _jointStates.at(i);
|
||||
int p = state.getParentIndex();
|
||||
if (p == -1) {
|
||||
continue;
|
||||
}
|
||||
MuscleConstraint* constraint = new MuscleConstraint(&(_ragdollPoints[p]), &(_ragdollPoints[i]));
|
||||
_ragdollConstraints.push_back(constraint);
|
||||
_muscleConstraints.push_back(constraint);
|
||||
|
||||
// Short joints are more susceptible to wiggle so we modulate the strength based on the joint's length:
|
||||
// long = weak and short = strong.
|
||||
constraint->setIndices(p, i);
|
||||
float boneLength = glm::length(state.getPositionInParentFrame());
|
||||
|
||||
float strength = MIN_STRENGTH + (MAX_STRENGTH - MIN_STRENGTH) * (maxBone - boneLength) / (maxBone - minBone);
|
||||
if (!families.contains(i)) {
|
||||
// Although muscles only pull on the children not parents, nevertheless those joints that have
|
||||
// parents AND children are more stable than joints at the end such as fingers. For such joints we
|
||||
// bestow maximum strength which helps reduce wiggle.
|
||||
strength = MAX_MUSCLE_STRENGTH;
|
||||
}
|
||||
constraint->setStrength(strength);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void SkeletonModel::updateVisibleJointStates() {
|
||||
Model::updateVisibleJointStates();
|
||||
// TODO: implement this to move visible joints to agree with joint shape positions
|
||||
if (_showTrueJointTransforms) {
|
||||
// no need to update visible transforms
|
||||
return;
|
||||
}
|
||||
QVector<glm::vec3> points;
|
||||
points.reserve(_jointStates.size());
|
||||
for (int i = 0; i < _jointStates.size(); i++) {
|
||||
JointState& state = _jointStates[i];
|
||||
points.push_back(_ragdollPoints[i]._position);
|
||||
|
||||
// get the parent state (this is the state that we want to rotate)
|
||||
int parentIndex = state.getParentIndex();
|
||||
if (parentIndex == -1) {
|
||||
_jointStates[i].slaveVisibleTransform();
|
||||
continue;
|
||||
}
|
||||
JointState& parentState = _jointStates[parentIndex];
|
||||
|
||||
// check the grand-parent index (for now we don't want to rotate any root states)
|
||||
int grandParentIndex = parentState.getParentIndex();
|
||||
if (grandParentIndex == -1) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// make sure state's visibleTransform is up to date
|
||||
const glm::mat4& parentTransform = parentState.getVisibleTransform();
|
||||
state.computeVisibleTransform(parentTransform);
|
||||
|
||||
// we're looking for the rotation that moves visible bone parallel to ragdoll bone
|
||||
// rotationBetween(jointTip - jointPivot, shapeTip - shapePivot)
|
||||
glm::quat delta = rotationBetween(state.getVisiblePosition() - extractTranslation(parentTransform),
|
||||
points[i] - points[parentIndex]);
|
||||
|
||||
// apply
|
||||
parentState.mixVisibleRotationDelta(delta, 0.01f);
|
||||
// update transforms
|
||||
parentState.computeVisibleTransform(_jointStates[grandParentIndex].getVisibleTransform());
|
||||
state.computeVisibleTransform(parentState.getVisibleTransform());
|
||||
}
|
||||
}
|
||||
|
||||
// virtual
|
||||
void SkeletonModel::stepRagdollForward(float deltaTime) {
|
||||
const float RAGDOLL_FOLLOWS_JOINTS_TIMESCALE = 0.03f;
|
||||
float fraction = glm::clamp(deltaTime / RAGDOLL_FOLLOWS_JOINTS_TIMESCALE, 0.0f, 1.0f);
|
||||
moveShapesTowardJoints(fraction);
|
||||
Ragdoll::stepRagdollForward(deltaTime);
|
||||
updateMuscles();
|
||||
}
|
||||
|
||||
float DENSITY_OF_WATER = 1000.0f; // kg/m^3
|
||||
|
@ -554,7 +651,7 @@ float VERY_BIG_MASS = 1.0e6f;
|
|||
|
||||
// virtual
|
||||
void SkeletonModel::buildShapes() {
|
||||
if (!_geometry || _rootIndex == -1) {
|
||||
if (_geometry == NULL || _jointStates.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -610,21 +707,49 @@ void SkeletonModel::buildShapes() {
|
|||
buildRagdollConstraints();
|
||||
|
||||
// ... then move shapes back to current joint positions
|
||||
moveShapesTowardJoints(1.0f);
|
||||
if (_ragdollPoints.size() == numStates) {
|
||||
int numJoints = _jointStates.size();
|
||||
for (int i = 0; i < numJoints; ++i) {
|
||||
_ragdollPoints[i].initPosition(_jointStates.at(i).getPosition());
|
||||
}
|
||||
}
|
||||
enforceRagdollConstraints();
|
||||
}
|
||||
|
||||
void SkeletonModel::moveShapesTowardJoints(float fraction) {
|
||||
void SkeletonModel::moveShapesTowardJoints(float deltaTime) {
|
||||
// KEEP: although we don't currently use this method we may eventually need it to help
|
||||
// unravel a skelton that has become tangled in its constraints. So let's keep this
|
||||
// around for a while just in case.
|
||||
const int numStates = _jointStates.size();
|
||||
assert(_jointStates.size() == _ragdollPoints.size());
|
||||
assert(fraction >= 0.0f && fraction <= 1.0f);
|
||||
if (_ragdollPoints.size() == numStates) {
|
||||
float oneMinusFraction = 1.0f - fraction;
|
||||
int numJoints = _jointStates.size();
|
||||
for (int i = 0; i < numJoints; ++i) {
|
||||
_ragdollPoints[i]._lastPosition = _ragdollPoints[i]._position;
|
||||
_ragdollPoints[i]._position = oneMinusFraction * _ragdollPoints[i]._position + fraction * _jointStates.at(i).getPosition();
|
||||
if (_ragdollPoints.size() != numStates) {
|
||||
return;
|
||||
}
|
||||
|
||||
// fraction = 0 means keep old position, = 1 means slave 100% to target position
|
||||
const float RAGDOLL_FOLLOWS_JOINTS_TIMESCALE = 0.05f;
|
||||
float fraction = glm::clamp(deltaTime / RAGDOLL_FOLLOWS_JOINTS_TIMESCALE, 0.0f, 1.0f);
|
||||
|
||||
float oneMinusFraction = 1.0f - fraction;
|
||||
for (int i = 0; i < numStates; ++i) {
|
||||
_ragdollPoints[i].initPosition(oneMinusFraction * _ragdollPoints[i]._position +
|
||||
fraction * _jointStates.at(i).getPosition());
|
||||
}
|
||||
}
|
||||
|
||||
void SkeletonModel::updateMuscles() {
|
||||
int numConstraints = _muscleConstraints.size();
|
||||
for (int i = 0; i < numConstraints; ++i) {
|
||||
MuscleConstraint* constraint = _muscleConstraints[i];
|
||||
int j = constraint->getParentIndex();
|
||||
if (j == -1) {
|
||||
continue;
|
||||
}
|
||||
int k = constraint->getChildIndex();
|
||||
if (k == -1) {
|
||||
continue;
|
||||
}
|
||||
constraint->setChildOffset(_jointStates.at(k).getPosition() - _jointStates.at(j).getPosition());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -644,8 +769,7 @@ void SkeletonModel::computeBoundingShape(const FBXGeometry& geometry) {
|
|||
int parentIndex = joint.parentIndex;
|
||||
if (parentIndex == -1) {
|
||||
transforms[i] = _jointStates[i].getTransform();
|
||||
_ragdollPoints[i]._position = extractTranslation(transforms[i]);
|
||||
_ragdollPoints[i]._lastPosition = _ragdollPoints[i]._position;
|
||||
_ragdollPoints[i].initPosition(extractTranslation(transforms[i]));
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -653,8 +777,7 @@ void SkeletonModel::computeBoundingShape(const FBXGeometry& geometry) {
|
|||
transforms[i] = transforms[parentIndex] * glm::translate(joint.translation)
|
||||
* joint.preTransform * glm::mat4_cast(modifiedRotation) * joint.postTransform;
|
||||
// setting the ragdollPoints here slams the VerletShapes into their default positions
|
||||
_ragdollPoints[i]._position = extractTranslation(transforms[i]);
|
||||
_ragdollPoints[i]._lastPosition = _ragdollPoints[i]._position;
|
||||
_ragdollPoints[i].initPosition(extractTranslation(transforms[i]));
|
||||
}
|
||||
|
||||
// compute bounding box that encloses all shapes
|
||||
|
@ -708,7 +831,7 @@ void SkeletonModel::resetShapePositionsToDefaultPose() {
|
|||
// Moves shapes to the joint default locations for debug visibility into
|
||||
// how the bounding shape is computed.
|
||||
|
||||
if (!_geometry || _rootIndex == -1 || _shapes.isEmpty()) {
|
||||
if (!_geometry || _shapes.isEmpty()) {
|
||||
// geometry or joints have not yet been created
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
#include <Ragdoll.h>
|
||||
|
||||
class Avatar;
|
||||
class MuscleConstraint;
|
||||
|
||||
/// A skeleton loaded from a model.
|
||||
class SkeletonModel : public Model, public Ragdoll {
|
||||
|
@ -100,6 +101,7 @@ public:
|
|||
virtual void stepRagdollForward(float deltaTime);
|
||||
|
||||
void moveShapesTowardJoints(float fraction);
|
||||
void updateMuscles();
|
||||
|
||||
void computeBoundingShape(const FBXGeometry& geometry);
|
||||
void renderBoundingCollisionShapes(float alpha);
|
||||
|
@ -144,6 +146,7 @@ private:
|
|||
|
||||
CapsuleShape _boundingShape;
|
||||
glm::vec3 _boundingShapeLocalOffset;
|
||||
QVector<MuscleConstraint*> _muscleConstraints;
|
||||
};
|
||||
|
||||
#endif // hifi_SkeletonModel_h
|
||||
|
|
88
interface/src/devices/DeviceTracker.cpp
Normal file
88
interface/src/devices/DeviceTracker.cpp
Normal file
|
@ -0,0 +1,88 @@
|
|||
//
|
||||
// DeviceTracker.cpp
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Sam Cake on 6/20/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "DeviceTracker.h"
|
||||
|
||||
DeviceTracker::SingletonData::~SingletonData() {
|
||||
// Destroy all the device registered
|
||||
//TODO C++11 for (auto device = _devicesVector.begin(); device != _devicesVector.end(); device++) {
|
||||
for (Vector::iterator device = _devicesVector.begin(); device != _devicesVector.end(); device++) {
|
||||
delete (*device);
|
||||
}
|
||||
}
|
||||
|
||||
int DeviceTracker::getNumDevices() {
|
||||
return Singleton::get()->_devicesMap.size();
|
||||
}
|
||||
|
||||
DeviceTracker::ID DeviceTracker::getDeviceID(const Name& name) {
|
||||
//TODO C++11 auto deviceIt = Singleton::get()->_devicesMap.find(name);
|
||||
Map::iterator deviceIt = Singleton::get()->_devicesMap.find(name);
|
||||
if (deviceIt != Singleton::get()->_devicesMap.end()) {
|
||||
return (*deviceIt).second;
|
||||
} else {
|
||||
return INVALID_DEVICE;
|
||||
}
|
||||
}
|
||||
|
||||
DeviceTracker* DeviceTracker::getDevice(const Name& name) {
|
||||
return getDevice(getDeviceID(name));
|
||||
}
|
||||
|
||||
DeviceTracker* DeviceTracker::getDevice(DeviceTracker::ID deviceID) {
|
||||
if ((deviceID >= 0) && (deviceID < Singleton::get()->_devicesVector.size())) {
|
||||
return Singleton::get()->_devicesVector[ deviceID ];
|
||||
} else {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
DeviceTracker::ID DeviceTracker::registerDevice(const Name& name, DeviceTracker* device) {
|
||||
// Check that the device exists, if not exit
|
||||
if (!device) {
|
||||
return INVALID_DEVICE;
|
||||
}
|
||||
|
||||
// Look if the name is not already used
|
||||
ID deviceID = getDeviceID(name);
|
||||
if (deviceID >= 0) {
|
||||
return INVALID_DEVICE_NAME;
|
||||
}
|
||||
|
||||
// Good to register the device
|
||||
deviceID = Singleton::get()->_devicesVector.size();
|
||||
Singleton::get()->_devicesMap.insert(Map::value_type(name, deviceID));
|
||||
Singleton::get()->_devicesVector.push_back(device);
|
||||
device->assignIDAndName(deviceID, name);
|
||||
|
||||
return deviceID;
|
||||
}
|
||||
|
||||
void DeviceTracker::updateAll() {
|
||||
//TODO C++11 for (auto deviceIt = Singleton::get()->_devicesVector.begin(); deviceIt != Singleton::get()->_devicesVector.end(); deviceIt++) {
|
||||
for (Vector::iterator deviceIt = Singleton::get()->_devicesVector.begin(); deviceIt != Singleton::get()->_devicesVector.end(); deviceIt++) {
|
||||
if ((*deviceIt))
|
||||
(*deviceIt)->update();
|
||||
}
|
||||
}
|
||||
|
||||
// Core features of the Device Tracker
|
||||
DeviceTracker::DeviceTracker() :
|
||||
_ID(INVALID_DEVICE),
|
||||
_name("Unkown")
|
||||
{
|
||||
}
|
||||
|
||||
DeviceTracker::~DeviceTracker() {
|
||||
}
|
||||
|
||||
void DeviceTracker::update() {
|
||||
}
|
114
interface/src/devices/DeviceTracker.h
Normal file
114
interface/src/devices/DeviceTracker.h
Normal file
|
@ -0,0 +1,114 @@
|
|||
//
|
||||
// DeviceTracker.h
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Sam Cake on 6/20/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_DeviceTracker_h
|
||||
#define hifi_DeviceTracker_h
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
|
||||
// Singleton template class
|
||||
template < typename T >
|
||||
class TemplateSingleton {
|
||||
public:
|
||||
|
||||
static T* get() {
|
||||
if ( !_singleton._one ) {
|
||||
_singleton._one = new T();
|
||||
}
|
||||
return _singleton._one;
|
||||
}
|
||||
|
||||
TemplateSingleton() :
|
||||
_one(0)
|
||||
{
|
||||
}
|
||||
~TemplateSingleton() {
|
||||
if ( _one ) {
|
||||
delete _one;
|
||||
_one = 0;
|
||||
}
|
||||
}
|
||||
private:
|
||||
static TemplateSingleton< T > _singleton;
|
||||
T* _one;
|
||||
};
|
||||
template <typename T>
|
||||
TemplateSingleton<T> TemplateSingleton<T>::_singleton;
|
||||
|
||||
/// Base class for device trackers.
|
||||
class DeviceTracker {
|
||||
public:
|
||||
|
||||
// THe ID and Name types used to manage the pool of devices
|
||||
typedef std::string Name;
|
||||
typedef int ID;
|
||||
static const ID INVALID_DEVICE = -1;
|
||||
static const ID INVALID_DEVICE_NAME = -2;
|
||||
|
||||
// Singleton interface to register and query the devices currently connected
|
||||
static int getNumDevices();
|
||||
static ID getDeviceID(const Name& name);
|
||||
static DeviceTracker* getDevice(ID deviceID);
|
||||
static DeviceTracker* getDevice(const Name& name);
|
||||
|
||||
/// Update all the devices calling for their update() function
|
||||
/// This should be called every frame by the main loop to update all the devices that pull their state
|
||||
static void updateAll();
|
||||
|
||||
/// Register a device tracker to the factory
|
||||
/// Right after creating a new DeviceTracker, it should be registered
|
||||
/// This is why, it's recommended to use a factory static call in the specialized class
|
||||
/// to create a new input device
|
||||
///
|
||||
/// \param name The Name under wich registering the device
|
||||
/// \param parent The DeviceTracker
|
||||
///
|
||||
/// \return The Index of the newly registered device.
|
||||
/// Valid if everything went well.
|
||||
/// INVALID_DEVICE if the device is not valid (NULL)
|
||||
/// INVALID_DEVICE_NAME if the name is already taken
|
||||
static ID registerDevice(const Name& name, DeviceTracker* tracker);
|
||||
|
||||
// DeviceTracker interface
|
||||
|
||||
virtual void update();
|
||||
|
||||
/// Get the ID assigned to the Device when registered after creation, or INVALID_DEVICE if it hasn't been registered which should not happen.
|
||||
ID getID() const { return _ID; }
|
||||
|
||||
/// Get the name assigned to the Device when registered after creation, or "Unknown" if it hasn't been registered which should not happen.
|
||||
const Name& getName() const { return _name; }
|
||||
|
||||
protected:
|
||||
DeviceTracker();
|
||||
virtual ~DeviceTracker();
|
||||
|
||||
private:
|
||||
ID _ID;
|
||||
Name _name;
|
||||
|
||||
// this call is used by the singleton when the device tracker is currently beeing registered and beeing assigned an ID
|
||||
void assignIDAndName( const ID id, const Name& name ) { _ID = id; _name = name; }
|
||||
|
||||
typedef std::vector< DeviceTracker* > Vector;
|
||||
typedef std::map< Name, ID > Map;
|
||||
struct SingletonData {
|
||||
Map _devicesMap;
|
||||
Vector _devicesVector;
|
||||
|
||||
~SingletonData();
|
||||
};
|
||||
typedef TemplateSingleton< SingletonData > Singleton;
|
||||
};
|
||||
|
||||
#endif // hifi_DeviceTracker_h
|
|
@ -19,7 +19,10 @@
|
|||
#include "Menu.h"
|
||||
#include "Util.h"
|
||||
|
||||
#ifdef HAVE_FACESHIFT
|
||||
using namespace fs;
|
||||
#endif
|
||||
|
||||
using namespace std;
|
||||
|
||||
const quint16 FACESHIFT_PORT = 33433;
|
||||
|
@ -48,6 +51,7 @@ Faceshift::Faceshift() :
|
|||
_longTermAverageEyeYaw(0.0f),
|
||||
_longTermAverageInitialized(false)
|
||||
{
|
||||
#ifdef HAVE_FACESHIFT
|
||||
connect(&_tcpSocket, SIGNAL(connected()), SLOT(noteConnected()));
|
||||
connect(&_tcpSocket, SIGNAL(error(QAbstractSocket::SocketError)), SLOT(noteError(QAbstractSocket::SocketError)));
|
||||
connect(&_tcpSocket, SIGNAL(readyRead()), SLOT(readFromSocket()));
|
||||
|
@ -56,20 +60,27 @@ Faceshift::Faceshift() :
|
|||
connect(&_udpSocket, SIGNAL(readyRead()), SLOT(readPendingDatagrams()));
|
||||
|
||||
_udpSocket.bind(FACESHIFT_PORT);
|
||||
#endif
|
||||
}
|
||||
|
||||
void Faceshift::init() {
|
||||
#ifdef HAVE_FACESHIFT
|
||||
setTCPEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift));
|
||||
#endif
|
||||
}
|
||||
|
||||
bool Faceshift::isConnectedOrConnecting() const {
|
||||
return _tcpSocket.state() == QAbstractSocket::ConnectedState ||
|
||||
(_tcpRetryCount == 0 && _tcpSocket.state() != QAbstractSocket::UnconnectedState);
|
||||
(_tcpRetryCount == 0 && _tcpSocket.state() != QAbstractSocket::UnconnectedState);
|
||||
}
|
||||
|
||||
bool Faceshift::isActive() const {
|
||||
#ifdef HAVE_FACESHIFT
|
||||
const quint64 ACTIVE_TIMEOUT_USECS = 1000000;
|
||||
return (usecTimestampNow() - _lastTrackingStateReceived) < ACTIVE_TIMEOUT_USECS;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
void Faceshift::update() {
|
||||
|
@ -97,12 +108,14 @@ void Faceshift::update() {
|
|||
}
|
||||
|
||||
void Faceshift::reset() {
|
||||
#ifdef HAVE_FACESHIFT
|
||||
if (_tcpSocket.state() == QAbstractSocket::ConnectedState) {
|
||||
string message;
|
||||
fsBinaryStream::encode_message(message, fsMsgCalibrateNeutral());
|
||||
send(message);
|
||||
}
|
||||
_longTermAverageInitialized = false;
|
||||
#endif
|
||||
}
|
||||
|
||||
void Faceshift::updateFakeCoefficients(float leftBlink, float rightBlink, float browUp,
|
||||
|
@ -138,11 +151,13 @@ void Faceshift::connectSocket() {
|
|||
}
|
||||
|
||||
void Faceshift::noteConnected() {
|
||||
#ifdef HAVE_FACESHIFT
|
||||
qDebug("Faceshift: Connected.");
|
||||
// request the list of blendshape names
|
||||
string message;
|
||||
fsBinaryStream::encode_message(message, fsMsgSendBlendshapeNames());
|
||||
send(message);
|
||||
#endif
|
||||
}
|
||||
|
||||
void Faceshift::noteError(QAbstractSocket::SocketError error) {
|
||||
|
@ -179,6 +194,7 @@ void Faceshift::send(const std::string& message) {
|
|||
}
|
||||
|
||||
void Faceshift::receive(const QByteArray& buffer) {
|
||||
#ifdef HAVE_FACESHIFT
|
||||
_stream.received(buffer.size(), buffer.constData());
|
||||
fsMsgPtr msg;
|
||||
for (fsMsgPtr msg; (msg = _stream.get_message()); ) {
|
||||
|
@ -259,4 +275,5 @@ void Faceshift::receive(const QByteArray& buffer) {
|
|||
break;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
|
|
@ -15,7 +15,9 @@
|
|||
#include <QTcpSocket>
|
||||
#include <QUdpSocket>
|
||||
|
||||
#ifdef HAVE_FACESHIFT
|
||||
#include <fsbinarystream.h>
|
||||
#endif
|
||||
|
||||
#include "FaceTracker.h"
|
||||
|
||||
|
@ -24,7 +26,6 @@ class Faceshift : public FaceTracker {
|
|||
Q_OBJECT
|
||||
|
||||
public:
|
||||
|
||||
Faceshift();
|
||||
|
||||
void init();
|
||||
|
@ -88,7 +89,11 @@ private:
|
|||
|
||||
QTcpSocket _tcpSocket;
|
||||
QUdpSocket _udpSocket;
|
||||
|
||||
#ifdef HAVE_FACESHIFT
|
||||
fs::fsBinaryStream _stream;
|
||||
#endif
|
||||
|
||||
bool _tcpEnabled;
|
||||
int _tcpRetryCount;
|
||||
bool _tracking;
|
||||
|
@ -123,6 +128,7 @@ private:
|
|||
float _longTermAverageEyePitch;
|
||||
float _longTermAverageEyeYaw;
|
||||
bool _longTermAverageInitialized;
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi_Faceshift_h
|
||||
|
|
230
interface/src/devices/Leapmotion.cpp
Normal file
230
interface/src/devices/Leapmotion.cpp
Normal file
|
@ -0,0 +1,230 @@
|
|||
//
|
||||
// Leapmotion.cpp
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Sam Cake on 6/2/2014
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "SharedUtil.h"
|
||||
#include "Leapmotion.h"
|
||||
|
||||
const int PALMROOT_NUM_JOINTS = 3;
|
||||
const int FINGER_NUM_JOINTS = 4;
|
||||
const int HAND_NUM_JOINTS = FINGER_NUM_JOINTS*5+PALMROOT_NUM_JOINTS;
|
||||
|
||||
const DeviceTracker::Name Leapmotion::NAME = "Leapmotion";
|
||||
|
||||
// find the index of a joint from
|
||||
// the side: true = right
|
||||
// the finger & the bone:
|
||||
// finger in [0..4] : bone in [0..3] a finger phalange
|
||||
// [-1] up the hand branch : bone in [0..2] <=> [ hand, forearm, arm]
|
||||
MotionTracker::Index evalJointIndex(bool isRightSide, int finger, int bone) {
|
||||
|
||||
MotionTracker::Index offset = 1 // start after root
|
||||
+ (int(isRightSide) * HAND_NUM_JOINTS) // then offset for side
|
||||
+ PALMROOT_NUM_JOINTS; // then add the arm/forearm/hand chain
|
||||
if (finger >= 0) {
|
||||
// from there go down in the correct finger and bone
|
||||
return offset + (finger * FINGER_NUM_JOINTS) + bone;
|
||||
} else {
|
||||
// or go back up for the correct root bone
|
||||
return offset - 1 - bone;
|
||||
}
|
||||
}
|
||||
|
||||
// static
|
||||
void Leapmotion::init() {
|
||||
DeviceTracker* device = DeviceTracker::getDevice(NAME);
|
||||
|
||||
if (!device) {
|
||||
// create a new Leapmotion and register it
|
||||
Leapmotion* leap = new Leapmotion();
|
||||
DeviceTracker::registerDevice(NAME, leap);
|
||||
}
|
||||
}
|
||||
|
||||
// static
|
||||
Leapmotion* Leapmotion::getInstance() {
|
||||
DeviceTracker* device = DeviceTracker::getDevice(NAME);
|
||||
if (!device) {
|
||||
// create a new Leapmotion and register it
|
||||
device = new Leapmotion();
|
||||
DeviceTracker::registerDevice(NAME, device);
|
||||
}
|
||||
return dynamic_cast< Leapmotion* > (device);
|
||||
}
|
||||
|
||||
Leapmotion::Leapmotion() :
|
||||
MotionTracker(),
|
||||
_active(false)
|
||||
{
|
||||
// Create the Leapmotion joint hierarchy
|
||||
std::vector< Semantic > sides;
|
||||
sides.push_back("joint_L_");
|
||||
sides.push_back("joint_R_");
|
||||
|
||||
std::vector< Semantic > rootBones;
|
||||
rootBones.push_back("elbow");
|
||||
rootBones.push_back("hand");
|
||||
rootBones.push_back("wrist");
|
||||
|
||||
std::vector< Semantic > fingers;
|
||||
fingers.push_back("thumb");
|
||||
fingers.push_back("index");
|
||||
fingers.push_back("middle");
|
||||
fingers.push_back("ring");
|
||||
fingers.push_back("pinky");
|
||||
|
||||
std::vector< Semantic > fingerBones;
|
||||
fingerBones.push_back("1");
|
||||
fingerBones.push_back("2");
|
||||
fingerBones.push_back("3");
|
||||
fingerBones.push_back("4");
|
||||
|
||||
std::vector< Index > palms;
|
||||
for (unsigned int s = 0; s < sides.size(); s++) {
|
||||
Index rootJoint = 0;
|
||||
for (unsigned int rb = 0; rb < rootBones.size(); rb++) {
|
||||
rootJoint = addJoint(sides[s] + rootBones[rb], rootJoint);
|
||||
}
|
||||
|
||||
// capture the hand index for debug
|
||||
palms.push_back(rootJoint);
|
||||
|
||||
for (unsigned int f = 0; f < fingers.size(); f++) {
|
||||
for (unsigned int b = 0; b < fingerBones.size(); b++) {
|
||||
rootJoint = addJoint(sides[s] + fingers[f] + fingerBones[b], rootJoint);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Leapmotion::~Leapmotion() {
|
||||
}
|
||||
|
||||
#ifdef HAVE_LEAPMOTION
|
||||
glm::quat quatFromLeapBase(float sideSign, const Leap::Matrix& basis) {
|
||||
|
||||
// fix the handness to right and always...
|
||||
glm::vec3 xAxis = glm::normalize(sideSign * glm::vec3(basis.xBasis.x, basis.xBasis.y, basis.xBasis.z));
|
||||
glm::vec3 yAxis = glm::normalize(glm::vec3(basis.yBasis.x, basis.yBasis.y, basis.yBasis.z));
|
||||
glm::vec3 zAxis = glm::normalize(glm::vec3(basis.zBasis.x, basis.zBasis.y, basis.zBasis.z));
|
||||
|
||||
xAxis = glm::normalize(glm::cross(yAxis, zAxis));
|
||||
|
||||
glm::quat orientation = (glm::quat_cast(glm::mat3(xAxis, yAxis, zAxis)));
|
||||
return orientation;
|
||||
}
|
||||
|
||||
glm::vec3 vec3FromLeapVector(const Leap::Vector& vec) {
|
||||
return glm::vec3(vec.x * METERS_PER_MILLIMETER, vec.y * METERS_PER_MILLIMETER, vec.z * METERS_PER_MILLIMETER);
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
void Leapmotion::update() {
|
||||
#ifdef HAVE_LEAPMOTION
|
||||
// Check that the controller is actually active
|
||||
_active = _controller.isConnected();
|
||||
if (!_active) {
|
||||
return;
|
||||
}
|
||||
|
||||
// go through all the joints and increment their counter since last update
|
||||
// TODO C++11 for (auto jointIt = _jointsArray.begin(); jointIt != _jointsArray.end(); jointIt++) {
|
||||
for (JointTracker::Vector::iterator jointIt = _jointsArray.begin(); jointIt != _jointsArray.end(); jointIt++) {
|
||||
(*jointIt).tickNewFrame();
|
||||
}
|
||||
|
||||
// Get the most recent frame and report some basic information
|
||||
const Leap::Frame frame = _controller.frame();
|
||||
static int64_t lastFrameID = -1;
|
||||
int64_t newFrameID = frame.id();
|
||||
|
||||
// If too soon then exit
|
||||
if (lastFrameID >= newFrameID)
|
||||
return;
|
||||
|
||||
glm::vec3 delta(0.f);
|
||||
glm::quat handOri;
|
||||
if (!frame.hands().isEmpty()) {
|
||||
for (int handNum = 0; handNum < frame.hands().count(); handNum++) {
|
||||
|
||||
const Leap::Hand hand = frame.hands()[handNum];
|
||||
int side = (hand.isRight() ? 1 : -1);
|
||||
|
||||
JointTracker* parentJointTracker = _jointsArray.data();
|
||||
|
||||
|
||||
int rootBranchIndex = -1;
|
||||
|
||||
Leap::Arm arm = hand.arm();
|
||||
if (arm.isValid()) {
|
||||
glm::quat ori = quatFromLeapBase(float(side), arm.basis());
|
||||
glm::vec3 pos = vec3FromLeapVector(arm.elbowPosition());
|
||||
JointTracker* elbow = editJointTracker(evalJointIndex((side > 0), rootBranchIndex, 2)); // 2 is the index of the elbow joint
|
||||
elbow->editAbsFrame().setTranslation(pos);
|
||||
elbow->editAbsFrame().setRotation(ori);
|
||||
elbow->updateLocFromAbsTransform(parentJointTracker);
|
||||
elbow->activeFrame();
|
||||
parentJointTracker = elbow;
|
||||
|
||||
pos = vec3FromLeapVector(arm.wristPosition());
|
||||
JointTracker* wrist = editJointTracker(evalJointIndex((side > 0), rootBranchIndex, 1)); // 1 is the index of the wrist joint
|
||||
wrist->editAbsFrame().setTranslation(pos);
|
||||
wrist->editAbsFrame().setRotation(ori);
|
||||
wrist->updateLocFromAbsTransform(parentJointTracker);
|
||||
wrist->activeFrame();
|
||||
parentJointTracker = wrist;
|
||||
}
|
||||
|
||||
JointTracker* palmJoint = NULL;
|
||||
{
|
||||
glm::vec3 pos = vec3FromLeapVector(hand.palmPosition());
|
||||
glm::quat ori = quatFromLeapBase(float(side), hand.basis());
|
||||
|
||||
palmJoint = editJointTracker(evalJointIndex((side > 0), rootBranchIndex, 0)); // 0 is the index of the palm joint
|
||||
palmJoint->editAbsFrame().setTranslation(pos);
|
||||
palmJoint->editAbsFrame().setRotation(ori);
|
||||
palmJoint->updateLocFromAbsTransform(parentJointTracker);
|
||||
palmJoint->activeFrame();
|
||||
}
|
||||
|
||||
// Check if the hand has any fingers
|
||||
const Leap::FingerList fingers = hand.fingers();
|
||||
if (!fingers.isEmpty()) {
|
||||
// For every fingers in the list
|
||||
for (int i = 0; i < fingers.count(); ++i) {
|
||||
// Reset the parent joint to the palmJoint for every finger traversal
|
||||
parentJointTracker = palmJoint;
|
||||
|
||||
// surprisingly, Leap::Finger::Type start at 0 for thumb a until 4 for the pinky
|
||||
Index fingerIndex = evalJointIndex((side > 0), int(fingers[i].type()), 0);
|
||||
|
||||
// let's update the finger's joints
|
||||
for (int b = 0; b < FINGER_NUM_JOINTS; b++) {
|
||||
Leap::Bone::Type type = Leap::Bone::Type(b + Leap::Bone::TYPE_METACARPAL);
|
||||
Leap::Bone bone = fingers[i].bone(type);
|
||||
JointTracker* ljointTracker = editJointTracker(fingerIndex + b);
|
||||
if (bone.isValid()) {
|
||||
Leap::Vector bp = bone.nextJoint();
|
||||
|
||||
ljointTracker->editAbsFrame().setTranslation(vec3FromLeapVector(bp));
|
||||
ljointTracker->editAbsFrame().setRotation(quatFromLeapBase(float(side), bone.basis()));
|
||||
ljointTracker->updateLocFromAbsTransform(parentJointTracker);
|
||||
ljointTracker->activeFrame();
|
||||
}
|
||||
parentJointTracker = ljointTracker;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lastFrameID = newFrameID;
|
||||
#endif
|
||||
}
|
50
interface/src/devices/Leapmotion.h
Normal file
50
interface/src/devices/Leapmotion.h
Normal file
|
@ -0,0 +1,50 @@
|
|||
//
|
||||
// Leapmotion.h
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Sam Cake on 6/2/2014
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_Leapmotion_h
|
||||
#define hifi_Leapmotion_h
|
||||
|
||||
#include <QDateTime>
|
||||
|
||||
#include "MotionTracker.h"
|
||||
|
||||
#ifdef HAVE_LEAPMOTION
|
||||
#include <Leap.h>
|
||||
#endif
|
||||
|
||||
/// Handles interaction with the Leapmotion skeleton tracking suit.
|
||||
class Leapmotion : public MotionTracker {
|
||||
public:
|
||||
static const Name NAME;
|
||||
|
||||
static void init();
|
||||
|
||||
/// Leapmotion MotionTracker factory
|
||||
static Leapmotion* getInstance();
|
||||
|
||||
bool isActive() const { return _active; }
|
||||
|
||||
virtual void update();
|
||||
|
||||
protected:
|
||||
Leapmotion();
|
||||
virtual ~Leapmotion();
|
||||
|
||||
private:
|
||||
#ifdef HAVE_LEAPMOTION
|
||||
Leap::Listener _listener;
|
||||
Leap::Controller _controller;
|
||||
#endif
|
||||
|
||||
bool _active;
|
||||
};
|
||||
|
||||
#endif // hifi_Leapmotion_h
|
186
interface/src/devices/MotionTracker.cpp
Normal file
186
interface/src/devices/MotionTracker.cpp
Normal file
|
@ -0,0 +1,186 @@
|
|||
//
|
||||
// MotionTracker.cpp
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Sam Cake on 6/20/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "MotionTracker.h"
|
||||
|
||||
|
||||
// glm::mult(mat43, mat43) just the composition of the 2 matrices assuming they are in fact mat44 with the last raw = { 0, 0, 0, 1 }
|
||||
namespace glm {
|
||||
mat4x3 mult(const mat4& lhs, const mat4x3& rhs) {
|
||||
vec3 lrx(lhs[0].x, lhs[1].x, lhs[2].x);
|
||||
vec3 lry(lhs[0].y, lhs[1].y, lhs[2].y);
|
||||
vec3 lrz(lhs[0].z, lhs[1].z, lhs[2].z);
|
||||
return mat4x3(
|
||||
dot(lrx, rhs[0]),
|
||||
dot(lry, rhs[0]),
|
||||
dot(lrz, rhs[0]),
|
||||
|
||||
dot(lrx, rhs[1]),
|
||||
dot(lry, rhs[1]),
|
||||
dot(lrz, rhs[1]),
|
||||
|
||||
dot(lrx, rhs[2]),
|
||||
dot(lry, rhs[2]),
|
||||
dot(lrz, rhs[2]),
|
||||
|
||||
dot(lrx, rhs[3]) + lhs[3].x,
|
||||
dot(lry, rhs[3]) + lhs[3].y,
|
||||
dot(lrz, rhs[3]) + lhs[3].z
|
||||
);
|
||||
}
|
||||
mat4x3 mult(const mat4x3& lhs, const mat4x3& rhs) {
|
||||
vec3 lrx(lhs[0].x, lhs[1].x, lhs[2].x);
|
||||
vec3 lry(lhs[0].y, lhs[1].y, lhs[2].y);
|
||||
vec3 lrz(lhs[0].z, lhs[1].z, lhs[2].z);
|
||||
return mat4x3(
|
||||
dot(lrx, rhs[0]),
|
||||
dot(lry, rhs[0]),
|
||||
dot(lrz, rhs[0]),
|
||||
|
||||
dot(lrx, rhs[1]),
|
||||
dot(lry, rhs[1]),
|
||||
dot(lrz, rhs[1]),
|
||||
|
||||
dot(lrx, rhs[2]),
|
||||
dot(lry, rhs[2]),
|
||||
dot(lrz, rhs[2]),
|
||||
|
||||
dot(lrx, rhs[3]) + lhs[3].x,
|
||||
dot(lry, rhs[3]) + lhs[3].y,
|
||||
dot(lrz, rhs[3]) + lhs[3].z
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// MotionTracker
|
||||
MotionTracker::MotionTracker() :
|
||||
DeviceTracker()
|
||||
{
|
||||
_jointsArray.resize(1);
|
||||
_jointsMap.insert(JointTracker::Map::value_type(Semantic("Root"), 0));
|
||||
}
|
||||
|
||||
MotionTracker::~MotionTracker()
|
||||
{
|
||||
}
|
||||
|
||||
bool MotionTracker::isConnected() const {
|
||||
return false;
|
||||
}
|
||||
|
||||
MotionTracker::Index MotionTracker::addJoint(const Semantic& semantic, Index parent) {
|
||||
// Check the parent
|
||||
if (int(parent) < 0)
|
||||
return INVALID_PARENT;
|
||||
|
||||
// Check that the semantic is not already in use
|
||||
Index foundIndex = findJointIndex(semantic);
|
||||
if (foundIndex >= 0)
|
||||
return INVALID_SEMANTIC;
|
||||
|
||||
// All good then allocate the joint
|
||||
Index newIndex = _jointsArray.size();
|
||||
_jointsArray.push_back(JointTracker(semantic, parent));
|
||||
_jointsMap.insert(JointTracker::Map::value_type(semantic, newIndex));
|
||||
|
||||
return newIndex;
|
||||
}
|
||||
|
||||
MotionTracker::Index MotionTracker::findJointIndex(const Semantic& semantic) const {
|
||||
// TODO C++11 auto jointIt = _jointsMap.find(semantic);
|
||||
JointTracker::Map::const_iterator jointIt = _jointsMap.find(semantic);
|
||||
if (jointIt != _jointsMap.end())
|
||||
return (*jointIt).second;
|
||||
return INVALID_SEMANTIC;
|
||||
}
|
||||
|
||||
void MotionTracker::updateAllAbsTransform() {
|
||||
_jointsArray[0].updateAbsFromLocTransform(0);
|
||||
|
||||
// Because we know the hierarchy is stored from root down the branches let's just traverse and update
|
||||
for (Index i = 1; i < (Index)(_jointsArray.size()); i++) {
|
||||
JointTracker* joint = _jointsArray.data() + i;
|
||||
joint->updateAbsFromLocTransform(_jointsArray.data() + joint->getParent());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// MotionTracker::JointTracker
|
||||
MotionTracker::JointTracker::JointTracker() :
|
||||
_locFrame(),
|
||||
_absFrame(),
|
||||
_semantic(""),
|
||||
_parent(INVALID_PARENT),
|
||||
_lastUpdate(0)
|
||||
{
|
||||
}
|
||||
|
||||
MotionTracker::JointTracker::JointTracker(const Semantic& semantic, Index parent) :
|
||||
_semantic(semantic),
|
||||
_parent(parent),
|
||||
_lastUpdate(0)
|
||||
{
|
||||
}
|
||||
|
||||
MotionTracker::JointTracker::JointTracker(const JointTracker& tracker) :
|
||||
_locFrame(tracker._locFrame),
|
||||
_absFrame(tracker._absFrame),
|
||||
_semantic(tracker._semantic),
|
||||
_parent(tracker._parent),
|
||||
_lastUpdate(tracker._lastUpdate)
|
||||
{
|
||||
}
|
||||
|
||||
void MotionTracker::JointTracker::updateAbsFromLocTransform(const JointTracker* parentJoint) {
|
||||
if (parentJoint) {
|
||||
editAbsFrame()._transform = (parentJoint->getAbsFrame()._transform * getLocFrame()._transform);
|
||||
} else {
|
||||
editAbsFrame()._transform = getLocFrame()._transform;
|
||||
}
|
||||
}
|
||||
|
||||
void MotionTracker::JointTracker::updateLocFromAbsTransform(const JointTracker* parentJoint) {
|
||||
if (parentJoint) {
|
||||
glm::mat4 ip = glm::inverse(parentJoint->getAbsFrame()._transform);
|
||||
editLocFrame()._transform = (ip * getAbsFrame()._transform);
|
||||
} else {
|
||||
editLocFrame()._transform = getAbsFrame()._transform;
|
||||
}
|
||||
}
|
||||
|
||||
//--------------------------------------------------------------------------------------
|
||||
// MotionTracker::Frame
|
||||
//--------------------------------------------------------------------------------------
|
||||
|
||||
MotionTracker::Frame::Frame() :
|
||||
_transform()
|
||||
{
|
||||
}
|
||||
|
||||
void MotionTracker::Frame::setRotation(const glm::quat& rotation) {
|
||||
glm::mat3x3 rot = glm::mat3_cast(rotation);
|
||||
_transform[0] = glm::vec4(rot[0], 0.f);
|
||||
_transform[1] = glm::vec4(rot[1], 0.f);
|
||||
_transform[2] = glm::vec4(rot[2], 0.f);
|
||||
}
|
||||
|
||||
void MotionTracker::Frame::getRotation(glm::quat& rotation) const {
|
||||
rotation = glm::quat_cast( _transform);
|
||||
}
|
||||
|
||||
void MotionTracker::Frame::setTranslation(const glm::vec3& translation) {
|
||||
_transform[3] = glm::vec4(translation, 1.f);
|
||||
}
|
||||
|
||||
void MotionTracker::Frame::getTranslation(glm::vec3& translation) const {
|
||||
translation = glm::vec3(_transform[3]);
|
||||
}
|
||||
|
121
interface/src/devices/MotionTracker.h
Normal file
121
interface/src/devices/MotionTracker.h
Normal file
|
@ -0,0 +1,121 @@
|
|||
//
|
||||
// MotionTracker.h
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Sam Cake on 6/20/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_MotionTracker_h
|
||||
#define hifi_MotionTracker_h
|
||||
|
||||
#include "DeviceTracker.h"
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/quaternion.hpp>
|
||||
#include <glm/gtc/matrix_transform.hpp>
|
||||
|
||||
/// Base class for device trackers.
|
||||
class MotionTracker : public DeviceTracker {
|
||||
public:
|
||||
|
||||
class Frame {
|
||||
public:
|
||||
Frame();
|
||||
|
||||
glm::mat4 _transform;
|
||||
|
||||
void setRotation(const glm::quat& rotation);
|
||||
void getRotation(glm::quat& rotatio) const;
|
||||
|
||||
void setTranslation(const glm::vec3& translation);
|
||||
void getTranslation(glm::vec3& translation) const;
|
||||
};
|
||||
|
||||
// Semantic and Index types to retreive the JointTrackers of this MotionTracker
|
||||
typedef std::string Semantic;
|
||||
typedef int Index;
|
||||
static const Index INVALID_SEMANTIC = -1;
|
||||
static const Index INVALID_PARENT = -2;
|
||||
|
||||
class JointTracker {
|
||||
public:
|
||||
typedef std::vector< JointTracker > Vector;
|
||||
typedef std::map< Semantic, Index > Map;
|
||||
|
||||
JointTracker();
|
||||
JointTracker(const JointTracker& tracker);
|
||||
JointTracker(const Semantic& semantic, Index parent);
|
||||
|
||||
const Frame& getLocFrame() const { return _locFrame; }
|
||||
Frame& editLocFrame() { return _locFrame; }
|
||||
void setLocFrame(const Frame& frame) { editLocFrame() = frame; }
|
||||
|
||||
const Frame& getAbsFrame() const { return _absFrame; }
|
||||
Frame& editAbsFrame() { return _absFrame; }
|
||||
void setAbsFrame(const Frame& frame) { editAbsFrame() = frame; }
|
||||
|
||||
const Semantic& getSemantic() const { return _semantic; }
|
||||
const Index& getParent() const { return _parent; }
|
||||
|
||||
bool isActive() const { return (_lastUpdate <= 0); }
|
||||
void tickNewFrame() { _lastUpdate++; }
|
||||
void activeFrame() { _lastUpdate = 0; }
|
||||
|
||||
/// Update the loc/abs transform for this joint from the current abs/loc value and the specified parent joint abs frame
|
||||
void updateLocFromAbsTransform(const JointTracker* parentJoint);
|
||||
void updateAbsFromLocTransform(const JointTracker* parentJoint);
|
||||
|
||||
protected:
|
||||
Frame _locFrame;
|
||||
Frame _absFrame;
|
||||
Semantic _semantic;
|
||||
Index _parent;
|
||||
int _lastUpdate;
|
||||
};
|
||||
|
||||
virtual bool isConnected() const;
|
||||
|
||||
Index numJointTrackers() const { return _jointsArray.size(); }
|
||||
|
||||
/// Access a Joint from it's index.
|
||||
/// Index 0 is always the "Root".
|
||||
/// if the index is Invalid then returns NULL.
|
||||
const JointTracker* getJointTracker(Index index) const { return ((index > 0) && (index < (Index)(_jointsArray.size())) ? _jointsArray.data() + index : NULL); }
|
||||
JointTracker* editJointTracker(Index index) { return ((index > 0) && (index < (Index)(_jointsArray.size())) ? _jointsArray.data() + index : NULL); }
|
||||
|
||||
/// From a semantic, find the Index of the Joint.
|
||||
/// \return the index of the mapped Joint or INVALID_SEMANTIC if the semantic is not knowned.
|
||||
Index findJointIndex(const Semantic& semantic) const;
|
||||
|
||||
protected:
|
||||
MotionTracker();
|
||||
virtual ~MotionTracker();
|
||||
|
||||
JointTracker::Vector _jointsArray;
|
||||
JointTracker::Map _jointsMap;
|
||||
|
||||
/// Adding joint is only done from the specialized Motion Tracker, hence this function is protected.
|
||||
/// The hierarchy of joints must be created from the top down to the branches.
|
||||
/// The "Root" node is at index 0 and exists at creation of the Motion Tracker.
|
||||
///
|
||||
/// \param semantic A joint is defined by it's semantic, the unique name mapping to it
|
||||
/// \param parent The parent's index, the parent must be valid and correspond to a Joint that has been previously created
|
||||
///
|
||||
/// \return The Index of the newly created Joint.
|
||||
/// Valid if everything went well.
|
||||
/// INVALID_SEMANTIC if the semantic is already in use
|
||||
/// INVALID_PARENT if the parent is not valid
|
||||
Index addJoint(const Semantic& semantic, Index parent);
|
||||
|
||||
/// Update the absolute transform stack traversing the hierarchy from the root down the branches
|
||||
/// This is a generic way to update all the Joint's absFrame by combining the locFrame going down the hierarchy branch.
|
||||
void updateAllAbsTransform();
|
||||
};
|
||||
|
||||
|
||||
|
||||
#endif // hifi_MotionTracker_h
|
|
@ -310,7 +310,6 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
|
|||
|
||||
Matrix4f proj = ovrMatrix4f_Projection(_eyeRenderDesc[eye].Fov, whichCamera.getNearClip(), whichCamera.getFarClip(), true);
|
||||
proj.Transpose();
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glLoadIdentity();
|
||||
glLoadMatrixf((GLfloat *)proj.M);
|
||||
|
@ -462,52 +461,4 @@ QSize OculusManager::getRenderTargetSize() {
|
|||
#else
|
||||
return QSize(100, 100);
|
||||
#endif
|
||||
}
|
||||
|
||||
//Renders sixense laser pointers for UI selection in the oculus
|
||||
void OculusManager::renderLaserPointers() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
const float PALM_TIP_ROD_RADIUS = 0.002f;
|
||||
|
||||
MyAvatar* myAvatar = Application::getInstance()->getAvatar();
|
||||
|
||||
//If the Oculus is enabled, we will draw a blue cursor ray
|
||||
|
||||
for (size_t i = 0; i < myAvatar->getHand()->getNumPalms(); ++i) {
|
||||
PalmData& palm = myAvatar->getHand()->getPalms()[i];
|
||||
if (palm.isActive()) {
|
||||
glColor4f(0, 1, 1, 1);
|
||||
glm::vec3 tip = getLaserPointerTipPosition(&palm);
|
||||
glm::vec3 root = palm.getPosition();
|
||||
|
||||
//Scale the root vector with the avatar scale
|
||||
myAvatar->scaleVectorRelativeToPosition(root);
|
||||
|
||||
Avatar::renderJointConnectingCone(root, tip, PALM_TIP_ROD_RADIUS, PALM_TIP_ROD_RADIUS);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
//Gets the tip position for the laser pointer
|
||||
glm::vec3 OculusManager::getLaserPointerTipPosition(const PalmData* palm) {
|
||||
#ifdef HAVE_LIBOVR
|
||||
const ApplicationOverlay& applicationOverlay = Application::getInstance()->getApplicationOverlay();
|
||||
const float PALM_TIP_ROD_LENGTH_MULT = 40.0f;
|
||||
|
||||
glm::vec3 direction = glm::normalize(palm->getTipPosition() - palm->getPosition());
|
||||
|
||||
glm::vec3 position = palm->getPosition();
|
||||
//scale the position with the avatar
|
||||
Application::getInstance()->getAvatar()->scaleVectorRelativeToPosition(position);
|
||||
|
||||
|
||||
glm::vec3 result;
|
||||
if (applicationOverlay.calculateRayUICollisionPoint(position, direction, result)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
return palm->getPosition();
|
||||
#endif
|
||||
return glm::vec3(0.0f);
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue