resolve conflicts on merge with birarda/protocol

This commit is contained in:
Stephen Birarda 2015-08-18 11:48:36 -07:00
commit fd43c48eac
408 changed files with 17699 additions and 10138 deletions
BUILD.mdBUILD_OSX.mdBUILD_WIN.mdCMakeLists.txt
assignment-client/src/octree
cmake
domain-server
examples
interface

View file

@ -5,6 +5,7 @@
* [OpenSSL](https://www.openssl.org/related/binaries.html) ~> 1.0.1m
* IMPORTANT: Using the recommended version of OpenSSL is critical to avoid security vulnerabilities.
* [VHACD](https://github.com/virneo/v-hacd)(clone this repository)(Optional)
* [zlib](http://www.zlib.net/)
####CMake External Project Dependencies

View file

@ -1,7 +1,7 @@
Please read the [general build guide](BUILD.md) for information on dependencies required for all platforms. Only OS X specific instructions are found in this file.
###Homebrew
[Homebrew](http://brew.sh/) is an excellent package manager for OS X. It makes install of all hifi dependencies very simple.
[Homebrew](http://brew.sh/) is an excellent package manager for OS X. It makes install of all High Fidelity dependencies very simple.
brew tap highfidelity/homebrew-formulas
brew install cmake openssl

View file

@ -75,6 +75,16 @@ To prevent these problems, install OpenSSL yourself. Download the following bina
Install OpenSSL into the Windows system directory, to make sure that Qt uses the version that you've just installed, and not some other version.
####zlib
Install zlib from
[Zlib for Windows](http://gnuwin32.sourceforge.net/packages/zlib.htm)
and fix a header file, as described here:
[zlib zconf.h bug](http://sourceforge.net/p/gnuwin32/bugs/169/)
###Build High Fidelity using Visual Studio
Follow the same build steps from the CMake section of [BUILD.md](BUILD.md), but pass a different generator to CMake.

View file

@ -186,13 +186,10 @@ option(GET_POLYVOX "Get polyvox library automatically as external project" 1)
option(GET_OPENVR "Get OpenVR library automatically as external project" 1)
option(GET_BOOSTCONFIG "Get Boost-config library automatically as external project" 1)
option(GET_OGLPLUS "Get OGLplus library automatically as external project" 1)
option(GET_GLEW "Get GLEW library automatically as external project" 1)
option(USE_NSIGHT "Attempt to find the nSight libraries" 1)
if (WIN32)
option(GET_GLEW "Get GLEW library automatically as external project" 1)
endif ()
option(GET_SDL2 "Get SDL2 library automatically as external project" 0)
if (WIN32)

View file

@ -971,12 +971,7 @@ void OctreeServer::readConfiguration() {
strcpy(_persistFilename, qPrintable(persistFilename));
qDebug("persistFilename=%s", _persistFilename);
QString persistAsFileType;
if (!readOptionString(QString("persistAsFileType"), settingsSectionObject, persistAsFileType)) {
persistAsFileType = "svo";
}
_persistAsFileType = persistAsFileType;
qDebug() << "persistAsFileType=" << _persistAsFileType;
_persistAsFileType = "json.gz";
_persistInterval = OctreePersistThread::DEFAULT_PERSIST_INTERVAL;
readOptionInt(QString("persistInterval"), settingsSectionObject, _persistInterval);

View file

@ -9,8 +9,8 @@ if (WIN32)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://static.oculus.com/sdk-downloads/0.6.0.0/1431634088/ovr_sdk_win_0.6.0.0.zip
URL_MD5 a3dfdab037a854fdcf7e6033fa8d7028
URL http://static.oculus.com/sdk-downloads/0.6.0.1/Public/1435190862/ovr_sdk_win_0.6.0.1.zip
URL_MD5 4b3ef825f9a1d6d3035c9f6820687da9
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""

View file

@ -1,34 +1,34 @@
if (WIN32)
set(EXTERNAL_NAME glew)
set(EXTERNAL_NAME glew)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://hifi-public.s3.amazonaws.com/dependencies/glew-1.10.0-win32.zip
URL_MD5 37514e4e595a3b3dc587eee8f7e8ec2f
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
LOG_DOWNLOAD 1
)
if (ANDROID)
set(ANDROID_CMAKE_ARGS "-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}" "-DANDROID_NATIVE_API_LEVEL=19")
endif ()
# Hide this external target (for ide users)
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://hifi-public.s3.amazonaws.com/dependencies/glew_simple.zip
URL_MD5 0507dc08337a82a5e7ecbc5417f92cc1
CONFIGURE_COMMAND CMAKE_ARGS ${ANDROID_CMAKE_ARGS} -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
LOG_DOWNLOAD 1
LOG_CONFIGURE 1
LOG_BUILD 1
)
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
# Hide this external target (for ide users)
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/include CACHE PATH "List of glew include directories")
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
set(_LIB_DIR ${SOURCE_DIR}/lib/Release/x64)
set(${EXTERNAL_NAME_UPPER}_DLL_PATH ${SOURCE_DIR}/bin/Release/x64 CACHE FILEPATH "Location of GLEW DLL")
else()
set(_LIB_DIR ${SOURCE_DIR}/lib/Release/Win32)
set(${EXTERNAL_NAME_UPPER}_DLL_PATH ${SOURCE_DIR}/bin/Release/Win32 CACHE FILEPATH "Location of GLEW DLL")
endif()
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${INSTALL_DIR}/include CACHE PATH "List of glew include directories")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${_LIB_DIR}/glew32.lib CACHE FILEPATH "Location of GLEW release library")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG "" CACHE FILEPATH "Location of GLEW debug library")
if (UNIX)
set(LIB_PREFIX "lib")
set(LIB_EXT "a")
elseif (WIN32)
set(LIB_EXT "lib")
endif ()
endif ()
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/${LIB_PREFIX}glew_d.${LIB_EXT} CACHE FILEPATH "Path to glew debug library")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/${LIB_PREFIX}glew.${LIB_EXT} CACHE FILEPATH "Path to glew release library")

View file

@ -26,16 +26,19 @@ if (WIN32)
# FIXME need to account for different architectures
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/win32/openvr_api.lib CACHE TYPE INTERNAL)
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/win32)
elseif(APPLE)
# FIXME need to account for different architectures
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/osx32/libopenvr_api.dylib CACHE TYPE INTERNAL)
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/osx32)
elseif(NOT ANDROID)
# FIXME need to account for different architectures
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/linux32/libopenvr_api.so CACHE TYPE INTERNAL)
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/linux64/libopenvr_api.so CACHE TYPE INTERNAL)
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/linux64)
endif()

60
cmake/externals/sixense/CMakeLists.txt vendored Normal file
View file

@ -0,0 +1,60 @@
include(ExternalProject)
include(SelectLibraryConfigurations)
set(EXTERNAL_NAME Sixense)
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
ExternalProject_Add(
${EXTERNAL_NAME}
URL ./SixenseSDK_062612.zip
URL_MD5 10cc8dc470d2ac1244a88cf04bc549cc
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
LOG_DOWNLOAD 1
)
if (APPLE)
find_library(SIXENSE_LIBRARY_RELEASE lib/osx_x64/release_dll/libsixense_x64.dylib HINTS ${SIXENSE_SEARCH_DIRS})
find_library(SIXENSE_LIBRARY_DEBUG lib/osx_x64/debug_dll/libsixensed_x64.dylib HINTS ${SIXENSE_SEARCH_DIRS})
elseif (UNIX)
find_library(SIXENSE_LIBRARY_RELEASE lib/linux_x64/release/libsixense_x64.so HINTS ${SIXENSE_SEARCH_DIRS})
# find_library(SIXENSE_LIBRARY_DEBUG lib/linux_x64/debug/libsixensed_x64.so HINTS ${SIXENSE_SEARCH_DIRS})
elseif (WIN32)
endif ()
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/include CACHE TYPE INTERNAL)
if (WIN32)
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
set(ARCH_DIR "x64")
set(ARCH_SUFFIX "_x64")
else()
set(ARCH_DIR "Win32")
set(ARCH_SUFFIX "")
endif()
# FIXME need to account for different architectures
set(${EXTERNAL_NAME_UPPER}_LIBRARIES "${SOURCE_DIR}/lib/${ARCH_DIR}/release_dll/sixense${ARCH_SUFFIX}.lib" CACHE TYPE INTERNAL)
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/win32)
elseif(APPLE)
# FIXME need to account for different architectures
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/osx32/libopenvr_api.dylib CACHE TYPE INTERNAL)
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/osx32)
elseif(NOT ANDROID)
# FIXME need to account for different architectures
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/linux32/libopenvr_api.so CACHE TYPE INTERNAL)
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/linux32)
endif()

28
cmake/externals/zlib/CMakeLists.txt vendored Normal file
View file

@ -0,0 +1,28 @@
if (WIN32)
set(EXTERNAL_NAME zlib)
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://zlib.net/zlib128.zip
URL_MD5 126f8676442ffbd97884eb4d6f32afb4
INSTALL_COMMAND ""
LOG_DOWNLOAD 1
)
# Hide this external target (for ide users)
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/include CACHE PATH "List of zlib include directories")
ExternalProject_Get_Property(${EXTERNAL_NAME} BINARY_DIR)
set(${EXTERNAL_NAME_UPPER}_DLL_PATH ${BINARY_DIR}/Release CACHE FILEPATH "Location of GLEW DLL")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${BINARY_DIR}/Release/zlib.lib CACHE FILEPATH "Location of ZLib release library")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG "" CACHE FILEPATH "Location of ZLib debug library")
endif ()

View file

@ -0,0 +1,16 @@
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
macro(GroupSources curdir)
file(GLOB children RELATIVE ${PROJECT_SOURCE_DIR}/${curdir} ${PROJECT_SOURCE_DIR}/${curdir}/*)
foreach(child ${children})
if(IS_DIRECTORY ${PROJECT_SOURCE_DIR}/${curdir}/${child})
GroupSources(${curdir}/${child})
else()
string(REPLACE "/" "\\" groupname ${curdir})
source_group(${groupname} FILES ${PROJECT_SOURCE_DIR}/${curdir}/${child})
endif()
endforeach()
endmacro()

View file

@ -12,7 +12,7 @@ macro(SETUP_HIFI_LIBRARY)
project(${TARGET_NAME})
# grab the implemenation and header files
file(GLOB_RECURSE LIB_SRCS "src/*.h" "src/*.cpp")
file(GLOB_RECURSE LIB_SRCS "src/*.h" "src/*.cpp" "src/*.c")
list(APPEND ${TARGET_NAME}_SRCS ${LIB_SRCS})
# create a library and set the property so it can be referenced later

View file

@ -10,11 +10,6 @@ macro(SETUP_HIFI_OPENGL)
elseif (WIN32)
add_dependency_external_projects(glew)
find_package(GLEW REQUIRED)
target_include_directories(${TARGET_NAME} PUBLIC ${GLEW_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${GLEW_LIBRARIES} opengl32.lib)
if (USE_NSIGHT)
# try to find the Nsight package and add it to the build if we find it
find_package(NSIGHT)

View file

@ -0,0 +1,38 @@
#
# Find3DConnexionClient.cmake
#
# Once done this will define
# 3DCONNEXIONCLIENT_FOUND - system found 3DConnexion
# 3DCONNEXIONCLIENT_INCLUDE_DIRS - the 3DConnexion include directory
# 3DCONNEXIONCLIENT_LIBRARY - Link this to use 3DConnexion
#
# Created on 10/06/2015 by Marcel Verhagen
# Copyright 2015 High Fidelity, Inc.
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
hifi_library_search_hints("connexionclient")
if (APPLE)
find_library(3DCONNEXIONCLIENT_LIBRARIES NAMES 3DConnexionClient HINTS 3DCONNEXIONCLIENT_SEARCH_DIRS)
if(EXISTS ${3DConnexionClient})
set(3DCONNEXIONCLIENT_FOUND true)
set(3DCONNEXIONCLIENT_INCLUDE_DIRS ${3DConnexionClient})
set(3DCONNEXIONCLIENT_LIBRARY ${3DConnexionClient})
message(STATUS "Found 3DConnexion at " ${3DConnexionClient})
mark_as_advanced(3DCONNEXIONCLIENT_INCLUDE_DIR 3DCONNEXIONCLIENT_LIBRARY)
else ()
message(STATUS "Could NOT find 3DConnexionClient")
endif()
elseif (WIN32)
find_path(3DCONNEXIONCLIENT_INCLUDE_DIRS I3dMouseParams.h PATH_SUFFIXES include HINTS ${3DCONNEXIONCLIENT_SEARCH_DIRS})
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(3DConnexionClient DEFAULT_MSG 3DCONNEXIONCLIENT_INCLUDE_DIRS)
mark_as_advanced(3DCONNEXIONCLIENT_INCLUDE_DIRS 3DCONNEXIONCLIENT_SEARCH_DIRS)
endif()

View file

@ -1,7 +1,7 @@
#
# FindGLEW.cmake
#
# Try to find GLEW library and include path.
# Try to find GLEW library and include path. Note that this only handles static GLEW.
# Once done this will define
#
# GLEW_FOUND
@ -18,39 +18,18 @@
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
if (WIN32)
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
hifi_library_search_hints("glew")
find_path(GLEW_INCLUDE_DIRS GL/glew.h PATH_SUFFIXES include HINTS ${GLEW_SEARCH_DIRS})
find_library(GLEW_LIBRARY_RELEASE glew32 PATH_SUFFIXES "lib/Release/Win32" "lib" HINTS ${GLEW_SEARCH_DIRS})
find_library(GLEW_LIBRARY_DEBUG glew32d PATH_SUFFIXES "lib/Debug/Win32" "lib" HINTS ${GLEW_SEARCH_DIRS})
find_path(GLEW_DLL_PATH glew32.dll PATH_SUFFIXES "bin/Release/Win32" HINTS ${GLEW_SEARCH_DIRS})
include(SelectLibraryConfigurations)
select_library_configurations(GLEW)
set(GLEW_LIBRARIES ${GLEW_LIBRARY})
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
hifi_library_search_hints("glew")
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GLEW DEFAULT_MSG GLEW_INCLUDE_DIRS GLEW_LIBRARIES GLEW_DLL_PATH)
add_paths_to_fixup_libs(${GLEW_DLL_PATH})
elseif (APPLE)
else ()
find_path(GLEW_INCLUDE_DIR GL/glew.h)
find_library(GLEW_LIBRARY NAMES GLEW glew32 glew glew32s PATH_SUFFIXES lib64)
find_path(GLEW_INCLUDE_DIRS GL/glew.h PATH_SUFFIXES include HINTS ${GLEW_SEARCH_DIRS})
set(GLEW_INCLUDE_DIRS ${GLEW_INCLUDE_DIR})
set(GLEW_LIBRARIES ${GLEW_LIBRARY})
find_library(GLEW_LIBRARY_RELEASE glew32 PATH_SUFFIXES "lib/Release/Win32" "lib" HINTS ${GLEW_SEARCH_DIRS})
find_library(GLEW_LIBRARY_DEBUG glew32d PATH_SUFFIXES "lib/Debug/Win32" "lib" HINTS ${GLEW_SEARCH_DIRS})
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GLEW
REQUIRED_VARS GLEW_INCLUDE_DIR GLEW_LIBRARY)
include(SelectLibraryConfigurations)
select_library_configurations(GLEW)
mark_as_advanced(GLEW_INCLUDE_DIR GLEW_LIBRARY)
endif ()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GLEW DEFAULT_MSG GLEW_INCLUDE_DIRS GLEW_LIBRARIES)
message(STATUS "Found GLEW - Assuming that GLEW is static and defining GLEW_STATIC")

View file

@ -1,38 +0,0 @@
#
# FindconnexionClient.cmake
#
# Once done this will define
#
# 3DCONNEXIONCLIENT_INCLUDE_DIRS
#
# Created on 10/06/2015 by Marcel Verhagen
# Copyright 2015 High Fidelity, Inc.
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
# setup hints for 3DCONNEXIONCLIENT search
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
hifi_library_search_hints("connexionclient")
if (APPLE)
find_library(3DconnexionClient 3DconnexionClient)
if(EXISTS ${3DconnexionClient})
set(CONNEXIONCLIENT_FOUND true)
set(CONNEXIONCLIENT_INCLUDE_DIR ${3DconnexionClient})
set(CONNEXIONCLIENT_LIBRARY ${3DconnexionClient})
set_target_properties(${TARGET_NAME} PROPERTIES LINK_FLAGS "-weak_framework 3DconnexionClient")
message(STATUS "Found 3Dconnexion")
mark_as_advanced(CONNEXIONCLIENT_INCLUDE_DIR CONNEXIONCLIENT_LIBRARY)
endif()
endif()
if (WIN32)
find_path(CONNEXIONCLIENT_INCLUDE_DIRS I3dMouseParams.h PATH_SUFFIXES Inc HINTS ${CONNEXIONCLIENT_SEARCH_DIRS})
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(connexionClient DEFAULT_MSG CONNEXIONCLIENT_INCLUDE_DIRS)
mark_as_advanced(CONNEXIONCLIENT_INCLUDE_DIRS CONNEXIONCLIENT_SEARCH_DIRS)
endif()

View file

@ -0,0 +1,66 @@
#
# FindiViewHMD.cmake
#
# Try to find the SMI iViewHMD eye tracker library
#
# You must provide a IVIEWHMD_ROOT_DIR which contains 3rdParty, include, and libs directories
#
# Once done this will define
#
# IVIEWHMD_FOUND - system found iViewHMD
# IVIEWHMD_INCLUDE_DIRS - the iViewHMD include directory
# IVIEWHMD_LIBRARIES - link this to use iViewHMD
#
# Created on 27 Jul 2015 by David Rowe
# Copyright 2015 High Fidelity, Inc.
#
if (WIN32)
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
hifi_library_search_hints("iViewHMD")
find_path(IVIEWHMD_INCLUDE_DIRS iViewHMDAPI.h PATH_SUFFIXES include HINTS ${IVIEWHMD_SEARCH_DIRS})
find_library(IVIEWHMD_LIBRARIES NAMES iViewHMDAPI PATH_SUFFIXES libs HINTS ${IVIEWHMD_SEARCH_DIRS})
find_path(IVIEWHMD_API_DLL_PATH iViewHMDAPI.dll PATH_SUFFIXES libs HINTS ${IVIEWHMD_SEARCH_DIRS})
list(APPEND IVIEWHMD_REQUIREMENTS IVIEWHMD_INCLUDE_DIRS IVIEWHMD_LIBRARIES IVIEWHMD_API_DLL_PATH)
set(IVIEWHMD_DLLS
avcodec-53.dll
avformat-53.dll
avutil-51.dll
libboost_filesystem-mgw45-mt-1_49.dll
libboost_system-mgw45-mt-1_49.dll
libboost_thread-mgw45-mt-1_49.dll
libgcc_s_dw2-1.dll
libiViewNG-LibCore.dll
libopencv_calib3d244.dll
libopencv_core244.dll
libopencv_features2d244.dll
libopencv_flann244.dll
libopencv_highgui244.dll
libopencv_imgproc244.dll
libopencv_legacy244.dll
libopencv_ml244.dll
libopencv_video244.dll
libstdc++-6.dll
opencv_core220.dll
opencv_highgui220.dll
opencv_imgproc220.dll
swscale-2.dll
)
foreach(IVIEWHMD_DLL ${IVIEWHMD_DLLS})
find_path(IVIEWHMD_DLL_PATH ${IVIEWHMD_DLL} PATH_SUFFIXES 3rdParty HINTS ${IVIEWHMD_SEARCH_DIRS})
list(APPEND IVIEWHMD_REQUIREMENTS IVIEWHMD_DLL_PATH)
list(APPEND IVIEWHMD_DLL_PATHS ${IVIEWHMD_DLL_PATH})
endforeach()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(IVIEWHMD DEFAULT_MSG ${IVIEWHMD_REQUIREMENTS})
add_paths_to_fixup_libs(${IVIEWHMD_API_DLL_PATH} ${IVIEWHMD_DLL_PATHS})
mark_as_advanced(IVIEWHMD_INCLUDE_DIRS IVIEWHMD_LIBRARIES IVIEWHMD_SEARCH_DIRS)
endif()

View file

@ -371,30 +371,8 @@
"name": "persistFilename",
"label": "Entities Filename",
"help": "the path to the file entities are stored in. Make sure the path exists.",
"placeholder": "resources/models.svo",
"default": "resources/models.svo",
"advanced": true
},
{
"name": "persistAsFileType",
"label": "File format for entity server's persistent data",
"help": "This defines how the entity server will save entities to disk.",
"default": "svo",
"type": "select",
"options": [
{
"value": "svo",
"label": "Entity server persists data as SVO"
},
{
"value": "json",
"label": "Entity server persists data as JSON"
},
{
"value": "json.gz",
"label": "Entity server persists data as gzipped JSON"
}
],
"placeholder": "resources/models.json.gz",
"default": "resources/models.json.gz",
"advanced": true
},
{

View file

@ -624,31 +624,62 @@ void DomainServer::processConnectRequestPacket(QSharedPointer<NLPacket> packet)
return;
}
}
}
QList<NodeType_t> nodeInterestList;
QString username;
QByteArray usernameSignature;
packetStream >> nodeInterestList >> username >> usernameSignature;
auto limitedNodeList = DependencyManager::get<LimitedNodeList>();
packetStream >> nodeInterestList;
if (packet->bytesLeftToRead() > 0) {
// try to verify username
packetStream >> username;
}
bool isRestrictingAccess =
_settingsManager.valueOrDefaultValueForKeyPath(RESTRICTED_ACCESS_SETTINGS_KEYPATH).toBool();
// we always let in a user who is sending a packet from our local socket or from the localhost address
bool isLocalUser = (senderSockAddr.getAddress() == DependencyManager::get<LimitedNodeList>()->getLocalSockAddr().getAddress() || senderSockAddr.getAddress() == QHostAddress::LocalHost);
if (isRestrictingAccess && !isLocalUser) {
if (!username.isEmpty()) {
// if there's a username, try to unpack username signature
packetStream >> usernameSignature;
if (usernameSignature.isEmpty()) {
// if user didn't include usernameSignature in connect request, send a connectionToken packet
QUuid& connectionToken = _connectionTokenHash[username.toLower()];
if (connectionToken.isNull()) {
connectionToken = QUuid::createUuid();
}
static auto connectionTokenPacket = NLPacket::create(PacketType::DomainServerConnectionToken, NUM_BYTES_RFC4122_UUID);
connectionTokenPacket->reset();
connectionTokenPacket->write(connectionToken.toRfc4122());
limitedNodeList->sendUnreliablePacket(*connectionTokenPacket, packet->getSenderSockAddr());
return;
}
}
}
QString reason;
if (!isAssignment && !shouldAllowConnectionFromNode(username, usernameSignature, senderSockAddr, reason)) {
// this is an agent and we've decided we won't let them connect - send them a packet to deny connection
QByteArray utfString = reason.toUtf8();
quint16 payloadSize = utfString.size();
auto connectionDeniedPacket = NLPacket::create(PacketType::DomainConnectionDenied, payloadSize + sizeof(payloadSize));
connectionDeniedPacket->writePrimitive(payloadSize);
connectionDeniedPacket->write(utfString);
if (payloadSize > 0) {
connectionDeniedPacket->writePrimitive(payloadSize);
connectionDeniedPacket->write(utfString);
}
// tell client it has been refused.
limitedNodeList->sendPacket(std::move(connectionDeniedPacket), senderSockAddr);
return;
}
@ -735,6 +766,7 @@ void DomainServer::processConnectRequestPacket(QSharedPointer<NLPacket> packet)
}
}
void DomainServer::processListRequestPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
NodeType_t throwawayNodeType;
@ -765,52 +797,61 @@ unsigned int DomainServer::countConnectedUsers() {
}
bool DomainServer::verifyUsersKey(const QString& username,
const QByteArray& usernameSignature,
QString& reasonReturn) {
bool DomainServer::verifyUserSignature(const QString& username,
const QByteArray& usernameSignature,
QString& reasonReturn) {
// it's possible this user can be allowed to connect, but we need to check their username signature
QByteArray publicKeyArray = _userPublicKeys.value(username);
if (!publicKeyArray.isEmpty()) {
const QUuid& connectionToken = _connectionTokenHash.value(username.toLower());
if (!publicKeyArray.isEmpty() && !connectionToken.isNull()) {
// if we do have a public key for the user, check for a signature match
const unsigned char* publicKeyData = reinterpret_cast<const unsigned char*>(publicKeyArray.constData());
// first load up the public key into an RSA struct
RSA* rsaPublicKey = d2i_RSA_PUBKEY(NULL, &publicKeyData, publicKeyArray.size());
QByteArray lowercaseUsername = username.toLower().toUtf8();
QByteArray usernameWithToken = QCryptographicHash::hash(lowercaseUsername.append(connectionToken.toRfc4122()),
QCryptographicHash::Sha256);
if (rsaPublicKey) {
QByteArray decryptedArray(RSA_size(rsaPublicKey), 0);
int decryptResult =
RSA_public_decrypt(usernameSignature.size(),
reinterpret_cast<const unsigned char*>(usernameSignature.constData()),
reinterpret_cast<unsigned char*>(decryptedArray.data()),
rsaPublicKey, RSA_PKCS1_PADDING);
int decryptResult = RSA_verify(NID_sha256,
reinterpret_cast<const unsigned char*>(usernameWithToken.constData()),
usernameWithToken.size(),
reinterpret_cast<const unsigned char*>(usernameSignature.constData()),
usernameSignature.size(),
rsaPublicKey);
if (decryptResult == 1) {
qDebug() << "Username signature matches for" << username << "- allowing connection.";
if (decryptResult != -1) {
if (username.toLower() == decryptedArray) {
qDebug() << "Username signature matches for" << username << "- allowing connection.";
// free up the public key and remove connection token before we return
RSA_free(rsaPublicKey);
_connectionTokenHash.remove(username);
// free up the public key before we return
RSA_free(rsaPublicKey);
return true;
} else {
qDebug() << "Username signature did not match for" << username << "- denying connection.";
reasonReturn = "Username signature did not match.";
}
return true;
} else {
qDebug() << "Couldn't decrypt user signature for" << username << "- denying connection.";
reasonReturn = "Couldn't decrypt user signature.";
qDebug() << "Error decrypting username signature for " << username << "- denying connection.";
reasonReturn = "Error decrypting username signature.";
// free up the public key, we don't need it anymore
RSA_free(rsaPublicKey);
}
// free up the public key, we don't need it anymore
RSA_free(rsaPublicKey);
} else {
// we can't let this user in since we couldn't convert their public key to an RSA key we could use
qDebug() << "Couldn't convert data to RSA key for" << username << "- denying connection.";
reasonReturn = "Couldn't convert data to RSA key.";
}
} else {
qDebug() << "Insufficient data to decrypt username signature - denying connection.";
reasonReturn = "Insufficient data";
}
requestUserPublicKey(username); // no joy. maybe next time?
@ -822,41 +863,40 @@ bool DomainServer::shouldAllowConnectionFromNode(const QString& username,
const QByteArray& usernameSignature,
const HifiSockAddr& senderSockAddr,
QString& reasonReturn) {
//TODO: improve flow so these bools aren't declared twice
bool isRestrictingAccess =
_settingsManager.valueOrDefaultValueForKeyPath(RESTRICTED_ACCESS_SETTINGS_KEYPATH).toBool();
bool isLocalUser = (senderSockAddr.getAddress() == DependencyManager::get<LimitedNodeList>()->getLocalSockAddr().getAddress() || senderSockAddr.getAddress() == QHostAddress::LocalHost);
// we always let in a user who is sending a packet from our local socket or from the localhost address
if (senderSockAddr.getAddress() == DependencyManager::get<LimitedNodeList>()->getLocalSockAddr().getAddress()
|| senderSockAddr.getAddress() == QHostAddress::LocalHost) {
return true;
}
if (isRestrictingAccess) {
if (isRestrictingAccess && !isLocalUser) {
QStringList allowedUsers =
_settingsManager.valueOrDefaultValueForKeyPath(ALLOWED_USERS_SETTINGS_KEYPATH).toStringList();
if (allowedUsers.contains(username, Qt::CaseInsensitive)) {
if (!verifyUsersKey(username, usernameSignature, reasonReturn)) {
if (username.isEmpty()) {
qDebug() << "Connect request denied - no username provided.";
reasonReturn = "No username provided";
return false;
}
if (!verifyUserSignature(username, usernameSignature, reasonReturn)) {
return false;
}
} else {
qDebug() << "Connect request denied for user" << username << "not in allowed users list.";
reasonReturn = "User not on whitelist.";
return false;
}
}
// either we aren't restricting users, or this user is in the allowed list
// if this user is in the editors list, exempt them from the max-capacity check
const QVariant* allowedEditorsVariant =
valueForKeyPath(_settingsManager.getSettingsMap(), ALLOWED_EDITORS_SETTINGS_KEYPATH);
QStringList allowedEditors = allowedEditorsVariant ? allowedEditorsVariant->toStringList() : QStringList();
if (allowedEditors.contains(username)) {
if (verifyUsersKey(username, usernameSignature, reasonReturn)) {
if (verifyUserSignature(username, usernameSignature, reasonReturn)) {
return true;
}
}

View file

@ -90,7 +90,7 @@ private:
void pingPunchForConnectingPeer(const SharedNetworkPeer& peer);
unsigned int countConnectedUsers();
bool verifyUsersKey (const QString& username, const QByteArray& usernameSignature, QString& reasonReturn);
bool verifyUserSignature (const QString& username, const QByteArray& usernameSignature, QString& reasonReturn);
bool shouldAllowConnectionFromNode(const QString& username, const QByteArray& usernameSignature,
const HifiSockAddr& senderSockAddr, QString& reasonReturn);
@ -149,6 +149,8 @@ private:
QSet<QUuid> _webAuthenticationStateSet;
QHash<QUuid, DomainServerWebSessionData> _cookieSessionHash;
QHash<QString, QUuid> _connectionTokenHash;
QHash<QString, QByteArray> _userPublicKeys;

View file

@ -9,6 +9,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QtCore/QDataStream>
#include <QtCore/QDebug>
#include <QtCore/QJsonArray>
#include <QtCore/QJsonObject>

View file

@ -5,7 +5,7 @@
// Created by Zander Otavka on 7/15/15.
// Copyright 2015 High Fidelity, Inc.
//
// Shows a few common controls in a FloatingUIPanel on right click.
// Shows a few common controls in a OverlayPanel on right click.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
@ -22,14 +22,13 @@ var MIC_IMAGE_URL = HIFI_PUBLIC_BUCKET + "images/tools/mic-toggle.svg";
var FACE_IMAGE_URL = HIFI_PUBLIC_BUCKET + "images/tools/face-toggle.svg";
var ADDRESS_BAR_IMAGE_URL = HIFI_PUBLIC_BUCKET + "images/tools/address-bar-toggle.svg";
var panel = new FloatingUIPanel({
anchorPosition: {
bind: "myAvatar"
},
offsetPosition: { x: 0, y: 0.4, z: 1 }
var panel = new OverlayPanel({
anchorPositionBinding: { avatar: "MyAvatar" },
offsetPosition: { x: 0, y: 0.4, z: -1 },
visible: false
});
var background = new BillboardOverlay({
var background = new Image3DOverlay({
url: BG_IMAGE_URL,
dimensions: {
x: 0.5,
@ -37,11 +36,12 @@ var background = new BillboardOverlay({
},
isFacingAvatar: false,
alpha: 1.0,
ignoreRayIntersection: false
ignoreRayIntersection: false,
visible: false
});
panel.addChild(background);
var closeButton = new BillboardOverlay({
var closeButton = new Image3DOverlay({
url: CLOSE_IMAGE_URL,
dimensions: {
x: 0.15,
@ -51,17 +51,18 @@ var closeButton = new BillboardOverlay({
alpha: 1.0,
ignoreRayIntersection: false,
offsetPosition: {
x: -0.1,
x: 0.1,
y: 0.1,
z: -0.001
}
z: 0.001
},
visible: false
});
closeButton.onClick = function(event) {
panel.visible = false;
};
panel.addChild(closeButton);
var micMuteButton = new BillboardOverlay({
var micMuteButton = new Image3DOverlay({
url: MIC_IMAGE_URL,
subImage: {
x: 0,
@ -77,17 +78,18 @@ var micMuteButton = new BillboardOverlay({
alpha: 1.0,
ignoreRayIntersection: false,
offsetPosition: {
x: 0.1,
x: -0.1,
y: 0.1,
z: -0.001
}
z: 0.001
},
visible: false
});
micMuteButton.onClick = function(event) {
AudioDevice.toggleMute();
};
panel.addChild(micMuteButton);
var faceMuteButton = new BillboardOverlay({
var faceMuteButton = new Image3DOverlay({
url: FACE_IMAGE_URL,
subImage: {
x: 0,
@ -102,43 +104,47 @@ var faceMuteButton = new BillboardOverlay({
isFacingAvatar: false,
alpha: 1.0,
ignoreRayIntersection: false,
offsetPosition: {
x: 0.1,
y: -0.1,
z: -0.001
}
});
faceMuteButton.onClick = function(event) {
FaceTracker.toggleMute();
};
panel.addChild(faceMuteButton);
var addressBarButton = new BillboardOverlay({
url: ADDRESS_BAR_IMAGE_URL,
subImage: {
x: 0,
y: 0,
width: 45,
height: 45
},
dimensions: {
x: 0.15,
y: 0.15,
},
isFacingAvatar: false,
alpha: 1.0,
ignoreRayIntersection: false,
offsetPosition: {
x: -0.1,
y: -0.1,
z: -0.001
}
z: 0.001
},
visible: false
});
faceMuteButton.onClick = function(event) {
FaceTracker.toggleMute();
};
panel.addChild(faceMuteButton);
var addressBarButton = new Image3DOverlay({
url: ADDRESS_BAR_IMAGE_URL,
subImage: {
x: 0,
y: 0,
width: 45,
height: 45
},
dimensions: {
x: 0.15,
y: 0.15,
},
isFacingAvatar: false,
alpha: 1.0,
ignoreRayIntersection: false,
offsetPosition: {
x: 0.1,
y: -0.1,
z: 0.001
},
visible: false
});
addressBarButton.onClick = function(event) {
DialogsManager.toggleAddressBar();
};
panel.addChild(addressBarButton);
panel.setChildrenVisible();
function onMicMuteToggled() {
var offset;
@ -181,6 +187,16 @@ function onMouseDown(event) {
if (event.isRightButton) {
mouseDown.pos = { x: event.x, y: event.y };
}
mouseDown.maxDistance = 0;
}
function onMouseMove(event) {
if (mouseDown.maxDistance !== undefined) {
var dist = Vec3.distance(mouseDown.pos, { x: event.x, y: event.y });
if (dist > mouseDown.maxDistance) {
mouseDown.maxDistance = dist;
}
}
}
function onMouseUp(event) {
@ -190,13 +206,10 @@ function onMouseUp(event) {
overlay.onClick(event);
}
}
if (event.isRightButton && Vec3.distance(mouseDown.pos, { x: event.x, y: event.y }) < 5) {
if (event.isRightButton && mouseDown.maxDistance < 10) {
panel.setProperties({
visible: !panel.visible,
offsetRotation: {
bind: "quat",
value: Quat.multiply(MyAvatar.orientation, { x: 0, y: 1, z: 0, w: 0 })
}
anchorRotation: MyAvatar.orientation
});
}
@ -208,6 +221,7 @@ function onScriptEnd(event) {
}
Controller.mousePressEvent.connect(onMouseDown);
Controller.mouseMoveEvent.connect(onMouseMove);
Controller.mouseReleaseEvent.connect(onMouseUp);
AudioDevice.muteToggled.connect(onMicMuteToggled);
FaceTracker.muteToggled.connect(onFaceMuteToggled);

View file

@ -0,0 +1,79 @@
//
// squeezeHands.js
// examples
//
// Created by Philip Rosedale on June 4, 2014
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
var rightHandAnimation = HIFI_PUBLIC_BUCKET + "animations/RightHandAnimPhilip.fbx";
var leftHandAnimation = HIFI_PUBLIC_BUCKET + "animations/LeftHandAnimPhilip.fbx";
var LEFT = 0;
var RIGHT = 1;
var lastLeftFrame = 0;
var lastRightFrame = 0;
var leftDirection = true;
var rightDirection = true;
var LAST_FRAME = 15.0; // What is the number of the last frame we want to use in the animation?
var SMOOTH_FACTOR = 0.0;
var MAX_FRAMES = 30.0;
var LEFT_HAND_CLICK = Controller.findAction("LEFT_HAND_CLICK");
var RIGHT_HAND_CLICK = Controller.findAction("RIGHT_HAND_CLICK");
Script.update.connect(function(deltaTime) {
var leftTriggerValue = Controller.getActionValue(LEFT_HAND_CLICK);
var rightTriggerValue = Controller.getActionValue(RIGHT_HAND_CLICK);
var leftFrame, rightFrame;
// Average last few trigger frames together for a bit of smoothing
leftFrame = (leftTriggerValue * LAST_FRAME) * (1.0 - SMOOTH_FACTOR) + lastLeftFrame * SMOOTH_FACTOR;
rightFrame = (rightTriggerValue * LAST_FRAME) * (1.0 - SMOOTH_FACTOR) + lastRightFrame * SMOOTH_FACTOR;
if (!leftDirection) {
leftFrame = MAX_FRAMES - leftFrame;
}
if (!rightDirection) {
rightFrame = MAX_FRAMES - rightFrame;
}
if ((leftTriggerValue == 1.0) && (leftDirection == true)) {
leftDirection = false;
lastLeftFrame = MAX_FRAMES - leftFrame;
} else if ((leftTriggerValue == 0.0) && (leftDirection == false)) {
leftDirection = true;
lastLeftFrame = leftFrame;
}
if ((rightTriggerValue == 1.0) && (rightDirection == true)) {
rightDirection = false;
lastRightFrame = MAX_FRAMES - rightFrame;
} else if ((rightTriggerValue == 0.0) && (rightDirection == false)) {
rightDirection = true;
lastRightFrame = rightFrame;
}
if ((leftFrame != lastLeftFrame) && leftHandAnimation.length){
MyAvatar.startAnimation(leftHandAnimation, 30.0, 1.0, false, true, leftFrame, leftFrame);
}
if ((rightFrame != lastRightFrame) && rightHandAnimation.length) {
MyAvatar.startAnimation(rightHandAnimation, 30.0, 1.0, false, true, rightFrame, rightFrame);
}
lastLeftFrame = leftFrame;
lastRightFrame = rightFrame;
});
Script.scriptEnding.connect(function() {
MyAvatar.stopAnimation(leftHandAnimation);
MyAvatar.stopAnimation(rightHandAnimation);
});

View file

@ -0,0 +1,430 @@
// handGrab.js
// examples
//
// Created by Sam Gondelman on 8/3/2015
// Copyright 2015 High Fidelity, Inc.
//
// Allow avatar to grab the closest object to each hand and throw them
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("http://s3.amazonaws.com/hifi-public/scripts/libraries/toolBars.js");
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
var nullActionID = "00000000-0000-0000-0000-000000000000";
var controllerID;
var controllerActive;
var leftHandObjectID = null;
var rightHandObjectID = null;
var leftHandActionID = nullActionID;
var rightHandActionID = nullActionID;
var TRIGGER_THRESHOLD = 0.2;
var GRAB_RADIUS = 0.15;
var LEFT_HAND_CLICK = Controller.findAction("LEFT_HAND_CLICK");
var RIGHT_HAND_CLICK = Controller.findAction("RIGHT_HAND_CLICK");
var ACTION1 = Controller.findAction("ACTION1");
var ACTION2 = Controller.findAction("ACTION2");
var rightHandGrabAction = RIGHT_HAND_CLICK;
var leftHandGrabAction = LEFT_HAND_CLICK;
var rightHandGrabValue = 0;
var leftHandGrabValue = 0;
var prevRightHandGrabValue = 0
var prevLeftHandGrabValue = 0;
var grabColor = { red: 0, green: 255, blue: 0};
var releaseColor = { red: 0, green: 0, blue: 255};
var toolBar = new ToolBar(0, 0, ToolBar.vertical, "highfidelity.toybox.toolbar", function() {
return {
x: 100,
y: 380
};
});
var BUTTON_SIZE = 32;
var SWORD_IMAGE = "https://hifi-public.s3.amazonaws.com/images/sword/sword.svg"; // TODO: replace this with a table icon
var CLEANUP_IMAGE = "http://s3.amazonaws.com/hifi-public/images/delete.png";
var tableButton = toolBar.addOverlay("image", {
width: BUTTON_SIZE,
height: BUTTON_SIZE,
imageURL: SWORD_IMAGE,
alpha: 1
});
var cleanupButton = toolBar.addOverlay("image", {
width: BUTTON_SIZE,
height: BUTTON_SIZE,
imageURL: CLEANUP_IMAGE,
alpha: 1
});
var overlays = false;
var leftHandOverlay;
var rightHandOverlay;
if (overlays) {
leftHandOverlay = Overlays.addOverlay("sphere", {
position: MyAvatar.getLeftPalmPosition(),
size: GRAB_RADIUS,
color: releaseColor,
alpha: 0.5,
solid: false
});
rightHandOverlay = Overlays.addOverlay("sphere", {
position: MyAvatar.getRightPalmPosition(),
size: GRAB_RADIUS,
color: releaseColor,
alpha: 0.5,
solid: false
});
}
var OBJECT_HEIGHT_OFFSET = 0.5;
var MIN_OBJECT_SIZE = 0.05;
var MAX_OBJECT_SIZE = 0.3;
var TABLE_DIMENSIONS = {
x: 10.0,
y: 0.2,
z: 5.0
};
var GRAVITY = {
x: 0.0,
y: -2.0,
z: 0.0
}
var LEFT = 0;
var RIGHT = 1;
var tableCreated = false;
var NUM_OBJECTS = 100;
var tableEntities = Array(NUM_OBJECTS + 1); // Also includes table
var VELOCITY_MAG = 0.3;
var entitiesToResize = [];
var MODELS = Array(
{ modelURL: "https://hifi-public.s3.amazonaws.com/ozan/props/sword/sword.fbx" },
{ modelURL: "https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Vehicles/clara/spaceshuttle.fbx" },
{ modelURL: "https://s3.amazonaws.com/hifi-public/cozza13/apartment/Stargate.fbx" },
{ modelURL: "https://dl.dropboxusercontent.com/u/17344741/kelectricguitar10/kelectricguitar10.fbx" },
{ modelURL: "https://dl.dropboxusercontent.com/u/17344741/ktoilet10/ktoilet10.fbx" },
{ modelURL: "https://hifi-public.s3.amazonaws.com/models/props/MidCenturyModernLivingRoom/Interior/BilliardsTable.fbx" },
{ modelURL: "https://hifi-public.s3.amazonaws.com/ozan/avatars/robotMedic/robotMedicRed/robotMedicRed.fst" },
{ modelURL: "https://hifi-public.s3.amazonaws.com/ozan/avatars/robotMedic/robotMedicFaceRig/robotMedic.fst" },
{ modelURL: "https://hifi-public.s3.amazonaws.com/marketplace/contents/029db3d4-da2c-4cb2-9c08-b9612ba576f5/02949063e7c4aed42ad9d1a58461f56d.fst?1427169842" },
{ modelURL: "https://hifi-public.s3.amazonaws.com/models/props/MidCenturyModernLivingRoom/Interior/Bar.fbx" },
{ modelURL: "https://hifi-public.s3.amazonaws.com/marketplace/contents/96124d04-d603-4707-a5b3-e03bf47a53b2/1431770eba362c1c25c524126f2970fb.fst?1436924721" }
// Complex models:
// { modelURL: "https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Architecture/sketchfab/cudillero.fbx" },
// { modelURL: "https://hifi-public.s3.amazonaws.com/ozan/sets/musicality/musicality.fbx" },
// { modelURL: "https://hifi-public.s3.amazonaws.com/ozan/sets/statelyHome/statelyHome.fbx" }
);
var COLLISION_SOUNDS = Array(
"http://public.highfidelity.io/sounds/Collisions-ballhitsandcatches/pingpong_TableBounceMono.wav",
"http://public.highfidelity.io/sounds/Collisions-ballhitsandcatches/billiards/collision1.wav"
);
var RESIZE_TIMER = 0.0;
var RESIZE_WAIT = 0.05; // 50 milliseconds
var leftFist = Entities.addEntity( {
type: "Sphere",
shapeType: 'sphere',
position: MyAvatar.getLeftPalmPosition(),
dimensions: { x: GRAB_RADIUS, y: GRAB_RADIUS, z: GRAB_RADIUS },
rotation: MyAvatar.getLeftPalmRotation(),
visible: false,
collisionsWillMove: false,
ignoreForCollisions: true
});
var rightFist = Entities.addEntity( {
type: "Sphere",
shapeType: 'sphere',
position: MyAvatar.getRightPalmPosition(),
dimensions: { x: GRAB_RADIUS, y: GRAB_RADIUS, z: GRAB_RADIUS },
rotation: MyAvatar.getRightPalmRotation(),
visible: false,
collisionsWillMove: false,
ignoreForCollisions: true
});
function letGo(hand) {
var actionIDToRemove = (hand == LEFT) ? leftHandActionID : rightHandActionID;
var entityIDToEdit = (hand == LEFT) ? leftHandObjectID : rightHandObjectID;
var handVelocity = (hand == LEFT) ? MyAvatar.getLeftPalmVelocity() : MyAvatar.getRightPalmVelocity();
var handAngularVelocity = (hand == LEFT) ? MyAvatar.getLeftPalmAngularVelocity() :
MyAvatar.getRightPalmAngularVelocity();
if (actionIDToRemove != nullActionID && entityIDToEdit != null) {
Entities.deleteAction(entityIDToEdit, actionIDToRemove);
// TODO: upon successful letGo, restore collision groups
if (hand == LEFT) {
leftHandObjectID = null;
leftHandActionID = nullActionID;
} else {
rightHandObjectID = null;
rightHandActionID = nullActionID;
}
}
}
function setGrabbedObject(hand) {
var handPosition = (hand == LEFT) ? MyAvatar.getLeftPalmPosition() : MyAvatar.getRightPalmPosition();
var entities = Entities.findEntities(handPosition, GRAB_RADIUS);
var objectID = null;
var minDistance = GRAB_RADIUS;
for (var i = 0; i < entities.length; i++) {
// Don't grab the object in your other hands, your fists, or the table
if ((hand == LEFT && entities[i] == rightHandObjectID) ||
(hand == RIGHT && entities[i] == leftHandObjectID) ||
entities[i] == leftFist || entities[i] == rightFist ||
(tableCreated && entities[i] == tableEntities[0])) {
continue;
} else {
var distance = Vec3.distance(Entities.getEntityProperties(entities[i]).position, handPosition);
if (distance <= minDistance) {
objectID = entities[i];
minDistance = distance;
}
}
}
if (objectID == null) {
return false;
}
if (hand == LEFT) {
leftHandObjectID = objectID;
} else {
rightHandObjectID = objectID;
}
return true;
}
function grab(hand) {
if (!setGrabbedObject(hand)) {
// If you don't grab an object, make a fist
Entities.editEntity((hand == LEFT) ? leftFist : rightFist, { ignoreForCollisions: false } );
return;
}
var objectID = (hand == LEFT) ? leftHandObjectID : rightHandObjectID;
var handRotation = (hand == LEFT) ? MyAvatar.getLeftPalmRotation() : MyAvatar.getRightPalmRotation();
var handPosition = (hand == LEFT) ? MyAvatar.getLeftPalmPosition() : MyAvatar.getRightPalmPosition();
var objectRotation = Entities.getEntityProperties(objectID).rotation;
var offsetRotation = Quat.multiply(Quat.inverse(handRotation), objectRotation);
var objectPosition = Entities.getEntityProperties(objectID).position;
var offset = Vec3.subtract(objectPosition, handPosition);
var offsetPosition = Vec3.multiplyQbyV(Quat.inverse(Quat.multiply(handRotation, offsetRotation)), offset);
// print(JSON.stringify(offsetPosition));
var actionID = Entities.addAction("hold", objectID, {
relativePosition: { x: 0, y: 0, z: 0 },
relativeRotation: offsetRotation,
hand: (hand == LEFT) ? "left" : "right",
timeScale: 0.05
});
if (actionID == nullActionID) {
if (hand == LEFT) {
leftHandObjectID = null;
} else {
rightHandObjectID = null;
}
} else {
// TODO: upon successful grab, add to collision group so object doesn't collide with immovable entities
if (hand == LEFT) {
leftHandActionID = actionID;
} else {
rightHandActionID = actionID;
}
}
}
function resizeModels() {
var newEntitiesToResize = [];
for (var i = 0; i < entitiesToResize.length; i++) {
var naturalDimensions = Entities.getEntityProperties(entitiesToResize[i]).naturalDimensions;
if (naturalDimensions.x != 1.0 || naturalDimensions.y != 1.0 || naturalDimensions.z != 1.0) {
// bigger range of sizes for models
var dimensions = Vec3.multiply(randFloat(MIN_OBJECT_SIZE, 3.0*MAX_OBJECT_SIZE), Vec3.normalize(naturalDimensions));
Entities.editEntity(entitiesToResize[i], {
dimensions: dimensions,
shapeType: "box"
});
} else {
newEntitiesToResize.push(entitiesToResize[i]);
}
}
entitiesToResize = newEntitiesToResize;
}
function update(deltaTime) {
if (overlays) {
Overlays.editOverlay(leftHandOverlay, { position: MyAvatar.getLeftPalmPosition() });
Overlays.editOverlay(rightHandOverlay, { position: MyAvatar.getRightPalmPosition() });
}
// if (tableCreated && RESIZE_TIMER < RESIZE_WAIT) {
// RESIZE_TIMER += deltaTime;
// } else if (tableCreated) {
// resizeModels();
// }
rightHandGrabValue = Controller.getActionValue(rightHandGrabAction);
leftHandGrabValue = Controller.getActionValue(leftHandGrabAction);
Entities.editEntity(leftFist, { position: MyAvatar.getLeftPalmPosition() });
Entities.editEntity(rightFist, { position: MyAvatar.getRightPalmPosition() });
if (rightHandGrabValue > TRIGGER_THRESHOLD &&
prevRightHandGrabValue < TRIGGER_THRESHOLD) {
if (overlays) {
Overlays.editOverlay(rightHandOverlay, { color: grabColor });
}
grab(RIGHT);
} else if (rightHandGrabValue < TRIGGER_THRESHOLD &&
prevRightHandGrabValue > TRIGGER_THRESHOLD) {
Entities.editEntity(rightFist, { ignoreForCollisions: true } );
if (overlays) {
Overlays.editOverlay(rightHandOverlay, { color: releaseColor });
}
letGo(RIGHT);
}
if (leftHandGrabValue > TRIGGER_THRESHOLD &&
prevLeftHandGrabValue < TRIGGER_THRESHOLD) {
if (overlays) {
Overlays.editOverlay(leftHandOverlay, { color: grabColor });
}
grab(LEFT);
} else if (leftHandGrabValue < TRIGGER_THRESHOLD &&
prevLeftHandGrabValue > TRIGGER_THRESHOLD) {
Entities.editEntity(leftFist, { ignoreForCollisions: true } );
if (overlays) {
Overlays.editOverlay(leftHandOverlay, { color: releaseColor });
}
letGo(LEFT);
}
prevRightHandGrabValue = rightHandGrabValue;
prevLeftHandGrabValue = leftHandGrabValue;
}
function cleanUp() {
letGo(RIGHT);
letGo(LEFT);
if (overlays) {
Overlays.deleteOverlay(leftHandOverlay);
Overlays.deleteOverlay(rightHandOverlay);
}
Entities.deleteEntity(leftFist);
Entities.deleteEntity(rightFist);
removeTable();
toolBar.cleanup();
}
function onClick(event) {
if (event.deviceID != 0) {
return;
}
switch (Overlays.getOverlayAtPoint(event)) {
case tableButton:
if (!tableCreated) {
createTable();
tableCreated = true;
}
break;
case cleanupButton:
if (tableCreated) {
removeTable();
tableCreated = false;
}
break;
}
}
randFloat = function(low, high) {
return low + Math.random() * (high - low);
}
randInt = function(low, high) {
return Math.floor(randFloat(low, high));
}
function createTable() {
var tablePosition = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(MyAvatar.orientation)));
tableEntities[0] = Entities.addEntity( {
type: "Model",
shapeType: 'box',
position: tablePosition,
dimensions: TABLE_DIMENSIONS,
rotation: MyAvatar.orientation,
// color: { red: 102, green: 51, blue: 0 },
modelURL: HIFI_PUBLIC_BUCKET + 'eric/models/woodFloor.fbx',
collisionSoundURL: "http://public.highfidelity.io/sounds/dice/diceCollide.wav"
});
for (var i = 1; i < NUM_OBJECTS + 1; i++) {
var objectOffset = { x: TABLE_DIMENSIONS.x/2.0 * randFloat(-1, 1),
y: OBJECT_HEIGHT_OFFSET,
z: TABLE_DIMENSIONS.z/2.0 * randFloat(-1, 1) };
var objectPosition = Vec3.sum(tablePosition, Vec3.multiplyQbyV(MyAvatar.orientation, objectOffset));
var type;
var randType = randInt(0, 3);
switch (randType) {
case 0:
type = "Box";
break;
case 1:
type = "Sphere";
// break;
case 2:
type = "Model";
break;
}
tableEntities[i] = Entities.addEntity( {
type: type,
position: objectPosition,
velocity: { x: randFloat(-VELOCITY_MAG, VELOCITY_MAG),
y: randFloat(-VELOCITY_MAG, VELOCITY_MAG),
z: randFloat(-VELOCITY_MAG, VELOCITY_MAG) },
dimensions: { x: randFloat(MIN_OBJECT_SIZE, MAX_OBJECT_SIZE),
y: randFloat(MIN_OBJECT_SIZE, MAX_OBJECT_SIZE),
z: randFloat(MIN_OBJECT_SIZE, MAX_OBJECT_SIZE) },
rotation: MyAvatar.orientation,
gravity: GRAVITY,
damping: 0.1,
restitution: 0.01,
density: 0.5,
collisionsWillMove: true,
color: { red: randInt(0, 255), green: randInt(0, 255), blue: randInt(0, 255) },
// collisionSoundURL: COLLISION_SOUNDS[randInt(0, COLLISION_SOUNDS.length)]
});
if (type == "Model") {
var randModel = randInt(0, MODELS.length);
Entities.editEntity(tableEntities[i], {
shapeType: "box",
modelURL: MODELS[randModel].modelURL
});
entitiesToResize.push(tableEntities[i]);
}
}
}
function removeTable() {
RESIZE_TIMER = 0.0;
for (var i = 0; i < tableEntities.length; i++) {
Entities.deleteEntity(tableEntities[i]);
}
}
Script.scriptEnding.connect(cleanUp);
Script.update.connect(update);
Controller.mousePressEvent.connect(onClick);

View file

@ -574,8 +574,14 @@ function findClickedEntity(event) {
}
var mouseHasMovedSincePress = false;
var mousePressStartTime = 0;
var mousePressStartPosition = { x: 0, y: 0 };
var mouseDown = false;
function mousePressEvent(event) {
mouseDown = true;
mousePressStartPosition = { x: event.x, y: event.y };
mousePressStartTime = Date.now();
mouseHasMovedSincePress = false;
mouseCapturedByTool = false;
@ -595,6 +601,8 @@ var highlightedEntityID = null;
var mouseCapturedByTool = false;
var lastMousePosition = null;
var idleMouseTimerId = null;
var CLICK_TIME_THRESHOLD = 500 * 1000; // 500 ms
var CLICK_MOVE_DISTANCE_THRESHOLD = 8;
var IDLE_MOUSE_TIMEOUT = 200;
var DEFAULT_ENTITY_DRAG_DROP_DISTANCE = 2.0;
@ -603,7 +611,21 @@ function mouseMoveEventBuffered(event) {
lastMouseMoveEvent = event;
}
function mouseMove(event) {
mouseHasMovedSincePress = true;
if (mouseDown && !mouseHasMovedSincePress) {
var timeSincePressMicro = Date.now() - mousePressStartTime;
var dX = mousePressStartPosition.x - event.x;
var dY = mousePressStartPosition.y - event.y;
var sqDist = (dX * dX) + (dY * dY);
// If less than CLICK_TIME_THRESHOLD has passed since the mouse click AND the mouse has moved
// less than CLICK_MOVE_DISTANCE_THRESHOLD distance, then don't register this as a mouse move
// yet. The goal is to provide mouse clicks that are more lenient to small movements.
if (timeSincePressMicro < CLICK_TIME_THRESHOLD && sqDist < CLICK_MOVE_DISTANCE_THRESHOLD) {
return;
}
mouseHasMovedSincePress = true;
}
if (placingEntityID) {
var pickRay = Camera.computePickRay(event.x, event.y);
@ -670,6 +692,8 @@ function highlightEntityUnderCursor(position, accurateRay) {
function mouseReleaseEvent(event) {
mouseDown = false;
if (lastMouseMoveEvent) {
mouseMove(lastMouseMoveEvent);
lastMouseMoveEvent = null;

View file

@ -178,7 +178,7 @@
modelProperties.sittingPoints[seatIndex].rotation);
this.scale = MyAvatar.scale / 3;
this.sphere = Overlays.addOverlay("billboard", {
this.sphere = Overlays.addOverlay("image3d", {
subImage: { x: 0, y: buttonHeight, width: buttonWidth, height: buttonHeight},
url: buttonImageUrl,
position: this.position,

View file

@ -252,7 +252,7 @@ function SpriteBillboard(sprite_properties, overlay) {
}
var christmastree_loader = null;
christmastree_loader = new OverlayPreloader("billboard",
christmastree_loader = new OverlayPreloader("image3d",
{url: CHRISTMAS_TREE_SPRITES_URL, alpha: 0}, function() {
for (var i = 0; i < NUM_OF_TREES; i++) {
var clonedOverlay = Overlays.cloneOverlay(christmastree_loader.overlay);
@ -269,7 +269,7 @@ christmastree_loader = new OverlayPreloader("billboard",
);
var santa_loader = null;
santa_loader = new OverlayPreloader("billboard",
santa_loader = new OverlayPreloader("image3d",
{url: SANTA_SPRITES_URL, alpha: 0}, function() {
for (var i = 0; i < NUM_OF_SANTAS; i++) {
var clonedOverlay = Overlays.cloneOverlay(santa_loader.overlay);

View file

@ -19,7 +19,9 @@ Script.include('../utilities/tools/vector.js');
var URL = "https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Scripts/planets/";
SatelliteGame = function() {
SatelliteCreator = function() {
print("initializing satellite game");
var MAX_RANGE = 50.0;
var Y_AXIS = {
x: 0,
@ -36,6 +38,10 @@ SatelliteGame = function() {
var ZONE_DIM = 100.0;
var LIGHT_INTENSITY = 1.5;
var center, distance;
var earth;
Earth = function(position, size) {
this.earth = Entities.addEntity({
type: "Model",
@ -68,7 +74,7 @@ SatelliteGame = function() {
this.clouds = Entities.addEntity({
type: "Model",
shapeType: 'sphere',
modelURL: URL + "clouds.fbx?i=2",
modelURL: URL + "clouds.fbx",
position: position,
dimensions: {
x: size + CLOUDS_OFFSET,
@ -101,16 +107,42 @@ SatelliteGame = function() {
});
this.cleanup = function() {
print('cleaning up earth models');
Entities.deleteEntity(this.clouds);
Entities.deleteEntity(this.earth);
Entities.deleteEntity(this.zone);
}
}
// Create earth model
var center = Vec3.sum(Camera.getPosition(), Vec3.multiply(MAX_RANGE, Quat.getFront(Camera.getOrientation())));
var distance = Vec3.length(Vec3.subtract(center, Camera.getPosition()));
var earth = new Earth(center, EARTH_SIZE);
this.init = function() {
if (this.isActive) {
this.quitGame();
}
var confirmed = Window.confirm("Start satellite game?");
if (!confirmed) {
return false;
}
this.isActive = true;
MyAvatar.position = {
x: 1000,
y: 1000,
z: 1000
};
Camera.setPosition({
x: 1000,
y: 1000,
z: 1000
});
// Create earth model
center = Vec3.sum(Camera.getPosition(), Vec3.multiply(MAX_RANGE, Quat.getFront(Camera.getOrientation())));
distance = Vec3.length(Vec3.subtract(center, Camera.getPosition()));
earth = new Earth(center, EARTH_SIZE);
return true;
};
var satellites = [];
var SATELLITE_SIZE = 2.0;
@ -257,12 +289,16 @@ SatelliteGame = function() {
}
}
this.endGame = function() {
this.quitGame = function() {
print("ending satellite game");
this.isActive = false;
for (var i = 0; i < satellites.length; i++) {
Entities.deleteEntity(satellites[i].satellite);
satellites[i].arrow.cleanup();
}
earth.cleanup();
}
@ -283,6 +319,7 @@ SatelliteGame = function() {
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
Script.update.connect(update);
Script.scriptEnding.connect(this.endGame);
Script.scriptEnding.connect(this.quitGame);
}
}

View file

@ -13,11 +13,15 @@
/*jslint vars: true*/
var Script, Entities, MyAvatar, Window, Overlays, Controller, Vec3, Quat, print, ToolBar, Settings; // Referenced globals provided by High Fidelity.
Script.include("http://s3.amazonaws.com/hifi-public/scripts/libraries/toolBars.js");
var zombieGameScriptURL = "https://hifi-public.s3.amazonaws.com/eric/scripts/zombieFight.js?v2";
// var zombieGameScriptURL = "zombieFight.js";
Script.include(zombieGameScriptURL);
var zombieFight;
var zombieFight = new ZombieFight();
var hand = "right";
var zombieFight;
var nullActionID = "00000000-0000-0000-0000-000000000000";
var controllerID;
var controllerActive;
@ -78,7 +82,7 @@ var cleanupButton = toolBar.addOverlay("image", {
var flasher;
var leftTriggerButton = 0;
var leftHandClick = 14;
var leftTriggerValue = 0;
var prevLeftTriggerValue = 0;
@ -88,7 +92,7 @@ var RIGHT = 1;
var leftPalm = 2 * LEFT;
var rightPalm = 2 * RIGHT;
var rightTriggerButton = 1;
var rightHandClick = 15;
var prevRightTriggerValue = 0;
var rightTriggerValue = 0;
var TRIGGER_THRESHOLD = 0.2;
@ -357,8 +361,8 @@ function update() {
}
function updateControllerState() {
rightTriggerValue = Controller.getTriggerValue(rightTriggerButton);
leftTriggerValue = Controller.getTriggerValue(leftTriggerButton);
rightTriggerValue = Controller.getActionValue(rightHandClick);
leftTriggerValue = Controller.getActionValue(leftHandClick);
if (rightTriggerValue > TRIGGER_THRESHOLD && !swordHeld) {
grabSword("right")
@ -470,4 +474,4 @@ function onClick(event) {
Script.scriptEnding.connect(cleanUp);
Script.update.connect(update);
Controller.mousePressEvent.connect(onClick);
Controller.mousePressEvent.connect(onClick);

View file

@ -0,0 +1,225 @@
//
// hydraPaint.js
// examples
//
// Created by Eric Levin on 5/14/15.
// Copyright 2014 High Fidelity, Inc.
//
// This script allows you to paint with the hydra!
//
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var LEFT = 0;
var RIGHT = 1;
var LASER_WIDTH = 3;
var LASER_COLOR = {
red: 50,
green: 150,
blue: 200
};
var TRIGGER_THRESHOLD = .1;
var MAX_POINTS_PER_LINE = 40;
var LIFETIME = 6000;
var DRAWING_DEPTH = 1;
var LINE_DIMENSIONS = 20;
var MIN_POINT_DISTANCE = 0.01;
var MIN_BRUSH_RADIUS = 0.08;
var MAX_BRUSH_RADIUS = 0.1;
var RIGHT_BUTTON_1 = 7
var RIGHT_BUTTON_2 = 8
var RIGHT_BUTTON_3 = 9;
var RIGHT_BUTTON_4 = 10
var LEFT_BUTTON_1 = 1;
var LEFT_BUTTON_2 = 2;
var LEFT_BUTTON_3 = 3;
var LEFT_BUTTON_4 = 4;
var colorPalette = [{
red: 250,
green: 0,
blue: 0
}, {
red: 214,
green: 91,
blue: 67
}, {
red: 192,
green: 41,
blue: 66
}, {
red: 84,
green: 36,
blue: 55
}, {
red: 83,
green: 119,
blue: 122
}];
var MIN_STROKE_WIDTH = 0.002;
var MAX_STROKE_WIDTH = 0.05;
function controller(side, cycleColorButton) {
this.triggerHeld = false;
this.triggerThreshold = 0.9;
this.side = side;
this.palm = 2 * side;
this.tip = 2 * side + 1;
this.trigger = side;
this.cycleColorButton = cycleColorButton;
this.points = [];
this.normals = [];
this.strokeWidths = [];
this.currentColorIndex = 0;
this.currentColor = colorPalette[this.currentColorIndex];
var self = this;
this.brush = Entities.addEntity({
type: 'Sphere',
position: {
x: 0,
y: 0,
z: 0
},
color: this.currentColor,
dimensions: {
x: MIN_BRUSH_RADIUS,
y: MIN_BRUSH_RADIUS,
z: MIN_BRUSH_RADIUS
}
});
this.cycleColor = function() {
this.currentColor = colorPalette[++this.currentColorIndex];
if (this.currentColorIndex === colorPalette.length - 1) {
this.currentColorIndex = -1;
}
}
this.newLine = function(position) {
this.linePosition = position;
this.line = Entities.addEntity({
position: position,
type: "PolyLine",
color: this.currentColor,
dimensions: {
x: LINE_DIMENSIONS,
y: LINE_DIMENSIONS,
z: LINE_DIMENSIONS
},
lifetime: LIFETIME
});
this.points = [];
this.normals = []
this.strokeWidths = [];
}
this.update = function(deltaTime) {
this.updateControllerState();
var newBrushPosOffset = Vec3.multiply(Vec3.normalize(Vec3.subtract(this.tipPosition, this.palmPosition)), DRAWING_DEPTH);
var newBrushPos = Vec3.sum(this.palmPosition, newBrushPosOffset);
var brushRadius = map(this.triggerValue, TRIGGER_THRESHOLD, 1, MIN_BRUSH_RADIUS, MAX_BRUSH_RADIUS)
Entities.editEntity(this.brush, {
position: newBrushPos,
color: this.currentColor,
dimensions: {
x: brushRadius,
y: brushRadius,
z: brushRadius
}
});
if (this.triggerValue > TRIGGER_THRESHOLD && !this.drawing) {
this.newLine(newBrushPos);
this.drawing = true;
} else if (this.drawing && this.triggerValue < TRIGGER_THRESHOLD) {
this.drawing = false;
}
if (this.drawing && this.points.length < MAX_POINTS_PER_LINE) {
var localPoint = Vec3.subtract(newBrushPos, this.linePosition);
if (Vec3.distance(localPoint, this.points[this.points.length - 1]) < MIN_POINT_DISTANCE) {
//Need a minimum distance to avoid binormal NANs
return;
}
this.points.push(localPoint);
var normal = computeNormal(newBrushPos, Camera.getPosition());
this.normals.push(normal);
var strokeWidth = map(this.triggerValue, TRIGGER_THRESHOLD, 1, MIN_STROKE_WIDTH, MAX_STROKE_WIDTH);
this.strokeWidths.push(strokeWidth);
Entities.editEntity(this.line, {
linePoints: this.points,
normals: this.normals,
strokeWidths: this.strokeWidths,
color: this.currentColor
});
}
}
this.updateControllerState = function() {
this.cycleColorButtonPressed = Controller.isButtonPressed(this.cycleColorButton);
this.palmPosition = Controller.getSpatialControlPosition(this.palm);
this.tipPosition = Controller.getSpatialControlPosition(this.tip);
this.palmNormal = Controller.getSpatialControlNormal(this.palm);
this.triggerValue = Controller.getTriggerValue(this.trigger);
if (this.prevCycleColorButtonPressed === true && this.cycleColorButtonPressed === false) {
this.cycleColor();
Entities.editEntity(this.brush, {
// color: this.currentColor
});
}
this.prevCycleColorButtonPressed = this.cycleColorButtonPressed;
}
this.cleanup = function() {
Entities.deleteEntity(self.brush);
}
}
function computeNormal(p1, p2) {
return Vec3.normalize(Vec3.subtract(p2, p1));
}
function update(deltaTime) {
leftController.update(deltaTime);
rightController.update(deltaTime);
}
function scriptEnding() {
leftController.cleanup();
rightController.cleanup();
}
function vectorIsZero(v) {
return v.x === 0 && v.y === 0 && v.z === 0;
}
var rightController = new controller(RIGHT, RIGHT_BUTTON_4);
var leftController = new controller(LEFT, LEFT_BUTTON_4);
Script.update.connect(update);
Script.scriptEnding.connect(scriptEnding);
function map(value, min1, max1, min2, max2) {
return min2 + (max2 - min2) * ((value - min1) / (max1 - min1));
}

View file

@ -0,0 +1,194 @@
//
// mousePaint.js
// examples
//
// Created by Eric Levin on 6/4/15.
// Copyright 2014 High Fidelity, Inc.
//
// This script allows you to paint with the hydra or mouse!
//
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
var LINE_DIMENSIONS = 10;
var LIFETIME = 6000;
var EVENT_CHANGE_THRESHOLD = 200;
var LINE_WIDTH = .07;
var MAX_POINTS_PER_LINE = 40;
var points = [];
var normals = [];
var deletedLines = [];
var strokeWidths = [];
var count = 0;
var prevEvent = {x: 0, y: 0};
var eventChange;
var MIN_POINT_DISTANCE = .01;
var colorPalette = [{
red: 250,
green: 0,
blue: 0
}, {
red: 214,
green: 91,
blue: 67
}, {
red: 192,
green: 41,
blue: 66
}, {
red: 84,
green: 36,
blue: 55
}, {
red: 83,
green: 119,
blue: 122
}];
var currentColorIndex = 0;
var currentColor = colorPalette[currentColorIndex];
function cycleColor() {
currentColor = colorPalette[++currentColorIndex];
if (currentColorIndex === colorPalette.length - 1) {
currentColorIndex = -1;
}
}
MousePaint();
function MousePaint() {
var DRAWING_DISTANCE = 5;
var lines = [];
var isDrawing = false;
var line, linePosition;
var BRUSH_SIZE = .05;
var brush = Entities.addEntity({
type: 'Sphere',
position: {
x: 0,
y: 0,
z: 0
},
color: currentColor,
dimensions: {
x: BRUSH_SIZE,
y: BRUSH_SIZE,
z: BRUSH_SIZE
}
});
function newLine(position) {
linePosition = position;
line = Entities.addEntity({
position: position,
type: "PolyLine",
color: currentColor,
dimensions: {
x: LINE_DIMENSIONS,
y: LINE_DIMENSIONS,
z: LINE_DIMENSIONS
},
linePoints: [],
lifetime: LIFETIME
});
points = [];
normals = []
strokeWidths = [];
lines.push(line);
}
function mouseMoveEvent(event) {
var pickRay = Camera.computePickRay(event.x, event.y);
count++;
var worldPoint = computeWorldPoint(pickRay);
Entities.editEntity(brush, {
position: worldPoint
});
eventChange = Math.sqrt(Math.pow(event.x - prevEvent.x, 2) + Math.pow(event.y - prevEvent.y, 2));
localPoint = computeLocalPoint(worldPoint);
if (!isDrawing || points.length > MAX_POINTS_PER_LINE || eventChange > EVENT_CHANGE_THRESHOLD ||
Vec3.distance(points[points.length - 1], localPoint) < MIN_POINT_DISTANCE) {
return;
}
points.push(localPoint)
normals.push(computeNormal(worldPoint, pickRay.origin));
strokeWidths.push(LINE_WIDTH);
Entities.editEntity(line, {
strokeWidths: strokeWidths,
linePoints: points,
normals: normals,
});
prevEvent = event;
}
function computeNormal(p1, p2) {
return Vec3.normalize(Vec3.subtract(p2, p1));
}
function computeWorldPoint(pickRay) {
var addVector = Vec3.multiply(Vec3.normalize(pickRay.direction), DRAWING_DISTANCE);
return Vec3.sum(pickRay.origin, addVector);
}
function computeLocalPoint(worldPoint) {
var localPoint = Vec3.subtract(worldPoint, linePosition);
return localPoint;
}
function mousePressEvent(event) {
if (!event.isLeftButton) {
isDrawing = false;
return;
}
var pickRay = Camera.computePickRay(event.x, event.y);
prevEvent = {x: event.x, y:event.y};
var worldPoint = computeWorldPoint(pickRay);
newLine(worldPoint);
var localPoint = computeLocalPoint(worldPoint);
points.push(localPoint);
normals.push(computeNormal(worldPoint, pickRay.origin));
strokeWidths.push(0.07);
isDrawing = true;
}
function mouseReleaseEvent() {
isDrawing = false;
}
function keyPressEvent(event) {
if (event.text === "SPACE") {
cycleColor();
Entities.editEntity(brush, {
color: currentColor
});
}
}
function cleanup() {
lines.forEach(function(line) {
// Entities.deleteEntity(line);
});
Entities.deleteEntity(brush);
}
Controller.mousePressEvent.connect(mousePressEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Script.scriptEnding.connect(cleanup);
Controller.keyPressEvent.connect(keyPressEvent);
}

View file

@ -0,0 +1,515 @@
//
// widgets-example.js
// games
//
// Copyright 2015 High Fidelity, Inc.
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var ICONS_URL = 'https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Scripts/planets/images/';
var panelX = 1250;
var panelY = 500;
var panelWidth = 50;
var panelHeight = 210;
Script.include('../libraries/uiwidgets.js');
UI.setDefaultVisibility(true);
var ICON_WIDTH = 40.0;
var ICON_HEIGHT = 40.0;
var ICON_COLOR = UI.rgba(45, 45, 45, 0.7);
var FOCUSED_COLOR = UI.rgba(250, 250, 250, 1.0);
var PANEL_BACKGROUND_COLOR = UI.rgba(120, 120, 120, 0.8);
var PANEL_PADDING = 7.0;
var PANEL_BORDER = 12.0;
var SUBPANEL_GAP = 1.0;
var icons = [];
function addImage(panel, iconId) {
var icon = panel.add(new UI.Image({
'imageURL': ICONS_URL + iconId + '.svg',
'width': ICON_WIDTH,
'height': ICON_HEIGHT,
'color': ICON_COLOR,
'alpha': ICON_COLOR.a
}));
icons.push(icon);
return icon;
}
var panels = [];
function addPanel(properties) {
properties.background = properties.background || {};
properties.background.backgroundColor = properties.background.backgroundColor ||
PANEL_BACKGROUND_COLOR;
properties.background.backgroundAlpha = properties.background.backgroundAlpha ||
PANEL_BACKGROUND_COLOR.a;
properties.padding = properties.padding || {
x: PANEL_PADDING,
y: PANEL_PADDING
};
properties.border = properties.border || {
x: PANEL_BORDER,
y: PANEL_BORDER
};
var panel = new UI.WidgetStack(properties);
panels.push(panel);
return panel;
}
function makeDraggable(panel, target) {
if (!target) {
target = panel;
}
var dragStart = null;
var initialPos = null;
panel.addAction('onDragBegin', function(event) {
dragStart = {
x: event.x,
y: event.y
};
initialPos = {
x: target.position.x,
y: target.position.y
};
});
panel.addAction('onDragUpdate', function(event) {
target.setPosition(
initialPos.x + event.x - dragStart.x,
initialPos.y + event.y - dragStart.y
);
UI.updateLayout();
});
panel.addAction('onDragEnd', function() {
dragStart = dragEnd = null;
});
}
function setText(text) {
return function() {
demoLabel.setText(text);
UI.updateLayout();
};
}
function join(obj) {
var s = "{";
var sep = "\n";
for (var k in obj) {
s += sep + k + ": " + ("" + obj[k]).replace("\n", "\n");
sep = ",\n";
}
if (s.length > 1)
return s + " }";
return s + "}";
}
setText = undefined;
var tooltipWidget = new UI.Label({
text: "<tooltip>",
width: 500,
height: 20,
visible: false
});
function addTooltip(widget, text) {
widget.addAction('onMouseOver', function(event, widget) {
tooltipWidget.setVisible(true);
tooltipWidget.setPosition(widget.position.x + widget.getWidth() + 20, widget.position.y + 10);
tooltipWidget.setText(text);
UI.updateLayout();
});
widget.addAction('onMouseExit', function() {
tooltipWidget.setVisible(false);
UI.updateLayout();
});
}
var mainPanel = addPanel({
dir: '+y'
});
makeDraggable(mainPanel);
mainPanel.setPosition(1200, 250);
mainPanel.setVisible(true);
var systemViewButton = addImage(mainPanel, 'solarsystems');
var zoomButton = addImage(mainPanel, 'magnifier');
var satelliteButton = addImage(mainPanel, 'satellite');
var settingsButton = addImage(mainPanel, 'settings');
var stopButton = addImage(mainPanel, 'close');
addTooltip(systemViewButton, "system view");
addTooltip(zoomButton, "zoom");
addTooltip(satelliteButton, "satelite view");
addTooltip(settingsButton, "settings");
addTooltip(stopButton, "exit");
var systemViewPanel = addPanel({
dir: '+x',
visible: false
});
var restartButton = addImage(systemViewPanel, 'refresh');
var pauseButton = addImage(systemViewPanel, 'playpause');
var rideButton = addImage(systemViewPanel, 'rocket');
var tweening, tweeningPaused;
Script.include('https://hifi-staff.s3.amazonaws.com/bridget/tween.js');
pauseButton.addAction('onClick', function() {
if (tweening) {
if (!tweeningPaused) {
tweeningPaused = true;
} else {
tweeningPaused = false;
}
return;
}
if (!paused) {
pause();
} else {
resume();
}
});
// Allow to toggle pause with spacebar
function keyPressEvent(event) {
if (event.text == "SPACE") {
if (!paused) {
pause();
} else {
resume();
}
}
}
rideButton.addAction('onClick', function() {
if (!paused) {
pause();
}
if (tweening) {
tweening = false;
tweeningPaused = true;
restart();
return;
}
var confirmed = Window.confirm('Ride through the solar system?');
if (confirmed) {
init();
tweening = true;
tweeningPaused = false;
}
});
restartButton.addAction('onClick', function() {
restart();
tweening = false;
});
var zoomPanel = addPanel({
dir: '+x',
visible: false
});
var zoomButtons = [];
for (var i = 0; i < planets.length; ++i) {
var label = zoomPanel.add(new UI.Label({
text: planets[i].name,
width: 80,
height: 20
}));
zoomButtons.push(label);
UI.updateLayout();
}
UI.updateLayout();
var zoomView = false;
zoomButtons.forEach(function(button, i) {
var planet = planets[i];
button.addAction('onClick', function() {
if (!planets[i].isZoomed) {
planet.zoom();
planet.isZoomed = true;
zoomView = true;
} else {
MyAvatar.position = startingPosition;
Camera.setPosition(cameraStart);
planet.isZoomed = false;
zoomView = false;
}
});
});
var settingsPanel = addPanel({
dir: '+y',
visible: false
});
function addCheckbox(parent, label, labelWidth, enabled, onValueChanged) {
var layout = parent.add(new UI.WidgetStack({
dir: '+x',
visible: true,
backgroundAlpha: 0.0
}));
var label = layout.add(new UI.Label({
text: label,
width: labelWidth,
height: 20,
backgroundAlpha: 0.0
}));
var defaultColor = UI.rgb(10, 10, 10);
var checkbox = layout.add(new UI.Checkbox({
width: 20,
height: 20,
padding: {
x: 3,
y: 3
},
backgroundColor: defaultColor,
backgroundAlpha: 0.9,
checked: enabled,
onValueChanged: onValueChanged
}));
checkbox.label = label;
checkbox.layout = layout;
checkbox.setValue = function(value) {
checkbox.setChecked(value);
}
return checkbox;
}
function addSlider(parent, label, labelWidth, defaultValue, min, max, valueChanged) {
var layout = parent.add(new UI.WidgetStack({
dir: '+x',
visible: true
}));
var label = layout.add(new UI.Label({
text: label,
width: labelWidth,
height: 27
}));
var display = layout.add(new UI.Label({
text: " ",
width: 50,
height: 27
}));
var slider = layout.add(new UI.Slider({
value: defaultValue,
maxValue: max,
minValue: min,
width: 300,
height: 20,
backgroundColor: UI.rgb(10, 10, 10),
backgroundAlpha: 1.0,
slider: { // slider knob
width: 30,
height: 18,
backgroundColor: UI.rgb(120, 120, 120),
backgroundAlpha: 1.0
}
}));
slider.addAction('onDoubleClick', function() {
slider.setValue(defaultValue);
UI.updateLayout();
});
display.setText("" + (+slider.getValue().toFixed(2)));
slider.onValueChanged = function(value) {
valueChanged(value);
display.setText("" + (+value.toFixed(2)));
UI.updateLayout();
}
slider.label = label;
slider.layout = layout;
return slider;
}
settingsPanel.showTrailsButton = addCheckbox(settingsPanel, "show trails", 120, trailsEnabled, function(value) {
trailsEnabled = value;
if (trailsEnabled) {
for (var i = 0; i < planets.length; ++i) {
planets[i].resetTrails();
}
//if trails are off and we've already created trails, remove existing trails
} else {
for (var i = 0; i < planets.length; ++i) {
planets[i].clearTrails();
}
}
});
var g_multiplier = 1.0;
settingsPanel.gravitySlider = addSlider(settingsPanel, "gravity scale ", 200, g_multiplier, 0.0, 5.0, function(value) {
g_multiplier = value;
GRAVITY = REFERENCE_GRAVITY * g_multiplier;
});
var period_multiplier = 1.0;
var last_alpha = period_multiplier;
settingsPanel.periodSlider = addSlider(settingsPanel, "orbital period scale ", 200, period_multiplier, 0.0, 3.0, function(value) {
period_multiplier = value;
changePeriod(period_multiplier);
});
function changePeriod(alpha) {
var ratio = last_alpha / alpha;
GRAVITY = Math.pow(ratio, 2.0) * GRAVITY;
for (var i = 0; i < planets.length; ++i) {
planets[i].period = ratio * planets[i].period;
planets[i].velocity = Vec3.multiply(ratio, planets[i].velocity);
planets[i].resetTrails();
}
last_alpha = alpha;
}
var satelliteGame;
satelliteButton.addAction('onClick', function() {
if (satelliteGame && satelliteGame.isActive) {
MyAvatar.position = startingPosition;
satelliteGame.quitGame();
if (paused) {
resume();
}
} else {
pause();
satelliteGame = new SatelliteCreator();
satelliteGame.init();
}
});
var subpanels = [systemViewPanel, zoomPanel, settingsPanel];
function hideSubpanelsExcept(panel) {
subpanels.forEach(function(x) {
if (x != panel) {
x.setVisible(false);
}
});
}
function attachPanel(panel, button) {
button.addAction('onClick', function() {
hideSubpanelsExcept(panel);
panel.setVisible(!panel.isVisible());
UI.updateLayout();
})
UI.addAttachment(panel, button, function(target, rel) {
target.setPosition(
rel.position.x - (target.getWidth() + target.border.x + SUBPANEL_GAP),
rel.position.y - target.border.y
);
});
}
attachPanel(systemViewPanel, systemViewButton);
attachPanel(zoomPanel, zoomButton);
attachPanel(settingsPanel, settingsButton);
var addColorToggle = function(widget) {
widget.addAction('onMouseOver', function() {
widget.setColor(FOCUSED_COLOR);
});
widget.addAction('onMouseExit', function() {
widget.setColor(ICON_COLOR);
});
}
systemViewPanel.addAction('onMouseOver', function() {
hideSubpanelsExcept(systemViewPanel);
UI.updateLayout();
});
zoomButton.addAction('onClick', function() {
if (zoomView) {
restart();
}
hideSubpanelsExcept(zoomPanel);
UI.updateLayout();
});
UI.updateLayout();
stopButton.addAction('onClick', function() {
teardown();
Script.stop();
});
// Panel drag behavior
// (click + drag on border to drag)
(function() {
var dragged = null;
this.startDrag = function(dragAction) {
dragged = dragAction;
}
this.updateDrag = function(event) {
if (dragged) {
print("Update drag");
dragged.updateDrag(event);
}
}
this.clearDrag = function(event) {
if (dragged)
print("End drag");
dragged = null;
}
})();
var buttons = icons;
buttons.map(addColorToggle);
panels.map(function(panel) {
makeDraggable(panel, mainPanel);
});
// Cleanup script resources
function teardown() {
UI.teardown();
if (satelliteGame) {
satelliteGame.quitGame();
}
};
UI.debug.setVisible(false);
var inputHandler = {
onMouseMove: function(event) {
updateDrag(event);
UI.handleMouseMove(event);
},
onMousePress: function(event) {
UI.handleMousePress(event);
},
onMouseRelease: function(event) {
clearDrag(event);
UI.handleMouseRelease(event);
},
onMouseDoublePress: function(event) {
UI.handleMouseDoublePress(event);
}
};
Controller.mousePressEvent.connect(inputHandler.onMousePress);
Controller.mouseMoveEvent.connect(inputHandler.onMouseMove);
Controller.mouseReleaseEvent.connect(inputHandler.onMouseRelease);
Controller.mouseDoublePressEvent.connect(inputHandler.onMouseDoublePress);
Controller.keyPressEvent.connect(keyPressEvent);
Script.scriptEnding.connect(teardown);

File diff suppressed because it is too large Load diff

View file

@ -1,5 +1,5 @@
//
// floatingUI.js
// overlayPanelExample.js
// examples/example/ui
//
// Created by Alexander Otavka
@ -18,19 +18,18 @@ var BG_IMAGE_URL = HIFI_PUBLIC_BUCKET + "images/card-bg.svg";
var RED_DOT_IMAGE_URL = HIFI_PUBLIC_BUCKET + "images/red-dot.svg";
var BLUE_SQUARE_IMAGE_URL = HIFI_PUBLIC_BUCKET + "images/blue-square.svg";
var mainPanel = new FloatingUIPanel({
offsetRotation: {
bind: "quat",
value: { w: 1, x: 0, y: 0, z: 0 }
},
offsetPosition: { x: 0, y: 0.4, z: 1 }
var mainPanel = new OverlayPanel({
anchorPositionBinding: { avatar: "MyAvatar" },
offsetPosition: { x: 0, y: 0.4, z: -1 },
isFacingAvatar: false
});
var bluePanel = mainPanel.addChild(new FloatingUIPanel ({
offsetPosition: { x: 0.1, y: 0.1, z: -0.2 }
var bluePanel = mainPanel.addChild(new OverlayPanel ({
offsetPosition: { x: 0.1, y: 0.1, z: 0.2 },
offsetScale: 0.5
}));
var mainPanelBackground = new BillboardOverlay({
var mainPanelBackground = new Image3DOverlay({
url: BG_IMAGE_URL,
dimensions: {
x: 0.5,
@ -42,20 +41,44 @@ var mainPanelBackground = new BillboardOverlay({
offsetPosition: {
x: 0,
y: 0,
z: 0.001
z: -0.001
}
});
var bluePanelBackground = mainPanelBackground.clone();
bluePanelBackground.dimensions = {
x: 0.3,
y: 0.3
};
mainPanel.addChild(mainPanelBackground);
bluePanel.addChild(bluePanelBackground);
var redDot = mainPanel.addChild(new BillboardOverlay({
var textWidth = .25;
var textHeight = .1;
var numberOfLines = 1;
var textMargin = 0.00625;
var lineHeight = (textHeight - (2 * textMargin)) / numberOfLines;
var text = mainPanel.addChild(new Text3DOverlay({
text: "TEXT",
isFacingAvatar: false,
alpha: 1.0,
ignoreRayIntersection: false,
offsetPosition: {
x: 0.1,
y: -0.15,
z: 0.001
},
dimensions: { x: textWidth, y: textHeight },
backgroundColor: { red: 0, green: 0, blue: 0 },
color: { red: 255, green: 255, blue: 255 },
topMargin: textMargin,
leftMargin: textMargin,
bottomMargin: textMargin,
rightMargin: textMargin,
lineHeight: lineHeight,
alpha: 0.9,
backgroundAlpha: 0.9
}));
var redDot = mainPanel.addChild(new Image3DOverlay({
url: RED_DOT_IMAGE_URL,
dimensions: {
x: 0.1,
@ -71,7 +94,7 @@ var redDot = mainPanel.addChild(new BillboardOverlay({
}
}));
var redDot2 = mainPanel.addChild(new BillboardOverlay({
var redDot2 = mainPanel.addChild(new Image3DOverlay({
url: RED_DOT_IMAGE_URL,
dimensions: {
x: 0.1,
@ -87,46 +110,45 @@ var redDot2 = mainPanel.addChild(new BillboardOverlay({
}
}));
var blueSquare = bluePanel.addChild(new BillboardOverlay({
var blueSquare = bluePanel.addChild(new Image3DOverlay({
url: BLUE_SQUARE_IMAGE_URL,
dimensions: {
x: 0.1,
y: 0.1,
x: 0.15,
y: 0.15,
},
isFacingAvatar: false,
alpha: 1.0,
ignoreRayIntersection: false,
offsetPosition: {
x: 0.055,
y: -0.055,
x: 0.09,
y: -0.09,
z: 0
}
}));
var blueSquare2 = bluePanel.addChild(new BillboardOverlay({
var blueSquare2 = bluePanel.addChild(new Image3DOverlay({
url: BLUE_SQUARE_IMAGE_URL,
dimensions: {
x: 0.1,
y: 0.1,
x: 0.15,
y: 0.15,
},
isFacingAvatar: false,
alpha: 1.0,
ignoreRayIntersection: false,
offsetPosition: {
x: 0.055,
y: 0.055,
x: 0.09,
y: 0.09,
z: 0
}
}));
var blueSquare3 = blueSquare2.clone();
blueSquare3.offsetPosition = {
x: -0.055,
y: 0.055,
x: -0.09,
y: 0.09,
z: 0
};
var mouseDown = {};
function onMouseDown(event) {
@ -136,26 +158,30 @@ function onMouseDown(event) {
if (event.isRightButton) {
mouseDown.pos = { x: event.x, y: event.y };
}
mouseDown.maxDistance = 0;
}
function onMouseMove(event) {
if (mouseDown.maxDistance !== undefined) {
var dist = Vec3.distance(mouseDown.pos, { x: event.x, y: event.y });
if (dist > mouseDown.maxDistance) {
mouseDown.maxDistance = dist;
}
}
}
function onMouseUp(event) {
if (event.isLeftButton) {
var overlay = OverlayManager.findAtPoint({ x: event.x, y: event.y });
if (overlay === mouseDown.overlay) {
if (overlay.attachedPanel === bluePanel) {
if (overlay && overlay === mouseDown.overlay) {
if (overlay.parentPanel === bluePanel) {
overlay.destroy();
} else if (overlay) {
var oldPos = overlay.offsetPosition;
var newPos = {
x: Number(oldPos.x),
y: Number(oldPos.y),
z: Number(oldPos.z) + 0.1
};
overlay.offsetPosition = newPos;
} else {
overlay.offsetPosition = Vec3.sum(overlay.offsetPosition, { x: 0, y: 0, z: -0.1 });
}
}
}
if (event.isRightButton && Vec3.distance(mouseDown.pos, { x: event.x, y: event.y }) < 5) {
if (event.isRightButton && mouseDown.maxDistance < 10) {
mainPanel.visible = !mainPanel.visible;
}
}
@ -165,5 +191,6 @@ function onScriptEnd() {
}
Controller.mousePressEvent.connect(onMouseDown);
Controller.mouseMoveEvent.connect(onMouseMove);
Controller.mouseReleaseEvent.connect(onMouseUp);
Script.scriptEnding.connect(onScriptEnd);
Script.scriptEnding.connect(onScriptEnd);

View file

@ -0,0 +1,439 @@
//
// widgets-example.js
// games
//
// Copyright 2015 High Fidelity, Inc.
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var paddingX = 8;
var paddingY = 8;
var buttonWidth = 30;
var buttonHeight = 30;
var ICONS_URL = 'https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Scripts/planets/images/';
var panelX = 1250;
var panelY = 500;
var panelWidth = 50;
var panelHeight = 210;
// var mainPanel = new UIPanel(panelX, panelY, panelWidth, panelHeight);
// var systemViewButton = mainPanel.addImage('solarsystems');
// var zoomButton = mainPanel.addImage('magnifier');
// var satelliteButton = mainPanel.addImage('satellite');
// var settingsButton = mainPanel.addImage('settings');
// var stopButton = mainPanel.addImage('close');
//
// mainPanel.show();
//
// var systemViewPanel = new UIPanel(panelX - 120, panelY, 120, 40);
// var reverseButton = systemViewPanel.addImage('reverse');
// var pauseButton = systemViewPanel.addImage('playpause');
// var forwardButton = systemViewPanel.addImage('forward');
//
// var zoomPanel = new UIPanel(panelX - 60, panelY + buttonHeight + paddingY, 650, 50);
// for (var i = 0; i < planets.length; ++i) {
// zoomPanel.addText(planets[i].name);
// }
Script.include('../libraries/uiwidgets.js');
UI.setDefaultVisibility(true);
UI.setErrorHandler(function(err) {
teardown();
// print(err);
// Script.stop();
});
// Controller.mouseMoveEvent.connect(function panelMouseMoveEvent(event) { return settings.mouseMoveEvent(event); });
// Controller.mousePressEvent.connect( function panelMousePressEvent(event) { return settings.mousePressEvent(event); });
// Controller.mouseDoublePressEvent.connect( function panelMouseDoublePressEvent(event) { return settings.mouseDoublePressEvent(event); });
// Controller.mouseReleaseEvent.connect(function(event) { return settings.mouseReleaseEvent(event); });
// Controller.keyPressEvent.connect(function(event) { return settings.keyPressEvent(event); });
// var ICON_WIDTH = 50.0;
// var ICON_HEIGHT = 50.0;
var ICON_WIDTH = 40.0;
var ICON_HEIGHT = 40.0;
var ICON_COLOR = UI.rgba(45, 45, 45, 0.7);
var FOCUSED_COLOR = UI.rgba(250, 250, 250, 1.0);
var PANEL_BACKGROUND_COLOR = UI.rgba(50, 50, 50, 0.7);
var PANEL_PADDING = 7.0;
var PANEL_BORDER = 12.0;
var SUBPANEL_GAP = 1.0;
var icons = [];
function addImage(panel, iconId) {
var icon = panel.add(new UI.Image({
'imageURL': ICONS_URL + iconId + '.svg',
'width': ICON_WIDTH,
'height': ICON_HEIGHT,
'color': ICON_COLOR,
'alpha': ICON_COLOR.a
}));
icons.push(icon);
return icon;
}
var panels = [];
function addPanel (properties) {
properties.background = properties.background || {};
properties.background.backgroundColor = properties.background.backgroundColor ||
PANEL_BACKGROUND_COLOR;
properties.background.backgroundAlpha = properties.background.backgroundAlpha ||
PANEL_BACKGROUND_COLOR.a;
properties.padding = properties.padding || { x: PANEL_PADDING, y: PANEL_PADDING };
properties.border = properties.border || { x: PANEL_BORDER, y: PANEL_BORDER };
var panel = new UI.WidgetStack(properties);
panels.push(panel);
return panel;
}
function makeDraggable (panel, target) {
if (!target)
target = panel;
var dragStart = null;
var initialPos = null;
panel.addAction('onDragBegin', function (event) {
dragStart = { x: event.x, y: event.y };
initialPos = { x: target.position.x, y: target.position.y };
});
panel.addAction('onDragUpdate', function (event) {
target.setPosition(
initialPos.x + event.x - dragStart.x,
initialPos.y + event.y - dragStart.y
);
UI.updateLayout();
});
panel.addAction('onDragEnd', function () {
dragStart = dragEnd = null;
});
}
// var panelContainer = new UI.WidgetContainer();
// panelContainer.setPosition(500, 250);
// panelContainer.setVisible(true);
var demoPane = addPanel({ dir: '+y' });
var demoLabel = demoPane.add(new UI.Label({
text: "< no events >",
width: 400, height: 20
}));
var demoButton = demoPane.add(new UI.Box({
width: 200, height: 80,
text: "Button"
}));
function setText(text) {
return function () {
demoLabel.setText(text);
UI.updateLayout();
};
}
function addDebugActions(widget, msg, actions) {
actions.forEach(function(action) {
widget.addAction(action, setText(action + " " + msg + widget));
});
}
var debugEvents = [
'onMouseOver',
'onMouseExit',
'onMouseDown',
'onMouseUp',
'onDragBegin',
'onDragEnd',
'onDragUpdate'
];
addDebugActions(demoPane, "(container) ", debugEvents);
addDebugActions(demoButton, "(button) ", debugEvents);
addDebugActions(demoLabel, "(label) ", debugEvents);
// demoPane.addAction('onMouseOver', setText("onMouseOver " + demoPane));
// demoPane.addAction('onMouseExit', setText("onMouseExit " + demoPane));
// demoPane.addAction('onMouseDown', setText("onMouseDown " + demoPane));
// demoPane.addAction('onMouseUp', setText("onMouseUp " + demoPane));
makeDraggable(demoPane, demoPane);
demoPane.setPosition(600, 200);
// demoButton.addAction('onMouseOver', setText("onMouseOver " + demoButton));
// demoButton.addAction('onMouseExit', setText("onMouseExit " + demoButton));
// demoButton.addAction()
// var resizablePanel = new UI.Label({
// text: "Resizable panel",
// width: 200, height: 200,
// backgroundAlpha: 0.5
// });
// resizablePanel.setPosition(1100, 200);
var debugToggle = new UI.Box({
text: "debug", width: 150, height: 20
});
debugToggle.setPosition(200, 0);
debugToggle.addAction('onClick', function () {
UI.debug.setVisible(!UI.debug.isVisible());
});
// debugEvents.forEach(function (action) {
// resizablePanel.addAction(action, function (event, widget) {
// widget.setText(action + " " + widget);
// });
// })
function join(obj) {
var s = "{";
var sep = "\n";
for (var k in obj) {
s += sep + k + ": " + (""+obj[k]).replace("\n", "\n");
sep = ",\n";
}
if (s.length > 1)
return s + " }";
return s + "}";
}
// resizablePanel.getOverlay().update({
// text: "" + join(resizablePanel.actions)
// });
setText = addDebugActions = undefined;
var tooltipWidget = new UI.Label({
text: "<tooltip>",
width: 500, height: 20,
visible: false
});
function addTooltip (widget, text) {
widget.addAction('onMouseOver', function (event, widget) {
tooltipWidget.setVisible(true);
tooltipWidget.setPosition(widget.position.x + widget.getWidth() + 20, widget.position.y);
tooltipWidget.setText(text);
UI.updateLayout();
});
widget.addAction('onMouseExit', function () {
tooltipWidget.setVisible(false);
UI.updateLayout();
});
}
var mainPanel = addPanel({ dir: '+y' });
mainPanel.setPosition(500, 250);
mainPanel.setVisible(true);
var systemViewButton = addImage(mainPanel, 'solarsystems');
var zoomButton = addImage(mainPanel, 'magnifier');
var satelliteButton = addImage(mainPanel, 'satellite');
var settingsButton = addImage(mainPanel, 'settings');
var stopButton = addImage(mainPanel, 'close');
addTooltip(systemViewButton, "system view");
addTooltip(zoomButton, "zoom");
addTooltip(satelliteButton, "satelite view");
addTooltip(settingsButton, "settings");
addTooltip(stopButton, "exit");
var systemViewPanel = addPanel({ dir: '+x', visible: false });
var reverseButton = addImage(systemViewPanel, 'reverse');
var pauseButton = addImage(systemViewPanel, 'playpause');
var forwardButton = addImage(systemViewPanel, 'forward');
var zoomPanel = addPanel({ dir: '+y', visible: true });
var label = new UI.Label({
text: "Foo",
width: 120,
height: 15,
color: UI.rgb(245, 290, 20),
alpha: 1.0,
backgroundColor: UI.rgb(10, 10, 10),
backgroundAlpha: 0.0
});
zoomPanel.add(label);
label.addAction('onMouseOver', function () {
label.setText("Bar");
UI.updateLayout();
});
label.addAction('onMouseExit', function () {
label.setText("Foo");
UI.updateLayout();
});
label.setText("Label id: " + label.id + ", parent id " + label.parent.id);
label.parent.addAction('onMouseOver', function () {
label.setText("on parent");
UI.updateLayout();
});
label.parent.addAction('onMouseExit', function () {
label.setText('exited parent');
UI.updateLayout();
});
var sliderLayout = zoomPanel.add(new UI.WidgetStack({
dir: '+x', visible: true, backgroundAlpha: 0.0
}));
var sliderLabel = sliderLayout.add(new UI.Label({
text: " ", width: 45, height: 20
}));
var slider = sliderLayout.add(new UI.Slider({
value: 10, maxValue: 100, minValue: 0,
width: 300, height: 20,
backgroundColor: UI.rgb(10, 10, 10),
backgroundAlpha: 1.0,
slider: { // slider knob
width: 30,
height: 18,
backgroundColor: UI.rgb(120, 120, 120),
backgroundAlpha: 1.0
}
}));
sliderLabel.setText("" + (+slider.getValue().toFixed(1)));
slider.onValueChanged = function (value) {
sliderLabel.setText("" + (+value.toFixed(1)));
UI.updateLayout();
}
var checkBoxLayout = zoomPanel.add(new UI.WidgetStack({
dir: '+x', visible: true, backgroundAlpha: 0.0
}));
// var padding = checkBoxLayout.add(new UI.Label({
// text: " ", width: 45, height: 20
// }));
var checkBoxLabel = checkBoxLayout.add(new UI.Label({
text: "set red", width: 60, height: 20,
backgroundAlpha: 0.0
}));
checkBoxLabel.setText("set red");
var defaultColor = UI.rgb(10, 10, 10);
var redColor = UI.rgb(210, 80, 80);
var checkbox = checkBoxLayout.add(new UI.Checkbox({
width: 20, height: 20, padding: { x: 3, y: 3 },
backgroundColor: defaultColor,
backgroundAlpha: 0.9,
checked: false,
onValueChanged: function (red) {
zoomPanel.getOverlay().update({
// backgroundAlpha: 0.1,
backgroundColor: red ? redColor : defaultColor
});
}
}));
addImage(zoomPanel, 'reverse');
UI.updateLayout();
var subpanels = [ systemViewPanel, zoomPanel ];
function hideSubpanelsExcept (panel) {
subpanels.forEach(function (x) {
if (x != panel) {
x.setVisible(false);
}
});
}
function attachPanel (panel, button) {
button.addAction('onClick', function () {
hideSubpanelsExcept(panel);
panel.setVisible(!panel.isVisible());
UI.updateLayout();
})
UI.addAttachment(panel, button, function (target, rel) {
target.setPosition(
rel.position.x - (target.getWidth() + target.border.x + SUBPANEL_GAP),
rel.position.y - target.border.y
);
});
}
attachPanel(systemViewPanel, systemViewButton);
attachPanel(zoomPanel, zoomButton);
var addColorToggle = function (widget) {
widget.addAction('onMouseOver', function () {
widget.setColor(FOCUSED_COLOR);
});
widget.addAction('onMouseExit', function () {
widget.setColor(ICON_COLOR);
});
}
reverseButton.addAction('onClick', function() {});
systemViewPanel.addAction('onMouseOver', function() {
hideSubpanels();
UI.updateLayout();
});
zoomButton.addAction('onClick', function() {
hideSubpanels();
UI.updateLayout();
});
UI.updateLayout();
stopButton.addAction('onClick', function() {
// Script.stop();
teardown();
});
// Panel drag behavior
// (click + drag on border to drag)
(function () {
var dragged = null;
this.startDrag = function (dragAction) {
dragged = dragAction;
}
this.updateDrag = function (event) {
if (dragged) {
print("Update drag");
dragged.updateDrag(event);
}
}
this.clearDrag = function (event) {
if (dragged)
print("End drag");
dragged = null;
}
})();
var buttons = icons;
buttons.map(addColorToggle);
panels.map(function (panel) { makeDraggable(panel, mainPanel); });
// Cleanup script resources
function teardown() {
UI.teardown();
// etc...
};
var inputHandler = {
onMouseMove: function (event) {
updateDrag(event);
UI.handleMouseMove(event);
},
onMousePress: function (event) {
UI.handleMousePress(event);
},
onMouseRelease: function (event) {
clearDrag(event);
UI.handleMouseRelease(event);
}
};
Controller.mousePressEvent.connect(inputHandler.onMousePress);
Controller.mouseMoveEvent.connect(inputHandler.onMouseMove);
Controller.mouseReleaseEvent.connect(inputHandler.onMouseRelease);
Script.scriptEnding.connect(teardown);

251
examples/fireworks.js Normal file
View file

@ -0,0 +1,251 @@
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
var fireSound = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Guns/GUN-SHOT2.raw");
var audioOptions = {
volume: 0.9,
position: Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()))
};
var DISTANCE_FROM_CAMERA = 7.0;
var bluePalette = [{
red: 0,
green: 206,
blue: 209
}, {
red: 173,
green: 216,
blue: 230
}, {
red: 0,
green: 191,
blue: 255
}];
var greenPalette = [{
red: 152,
green: 251,
blue: 152
}, {
red: 127,
green: 255,
blue: 0
}, {
red: 50,
green: 205,
blue: 50
}];
var redPalette = [{
red: 255,
green: 20,
blue: 147
}, {
red: 255,
green: 69,
blue: 0
}, {
red: 255,
green: 90,
blue: 120
}];
var COLOR_RED = {red: 255, green: 0, blue: 0 };
var COLOR_GREEN = {red: 0, green: 255, blue: 0};
var COLOR_BLUE = {red: 0, green: 0, blue: 255};
var iconsX = 700;
var iconsY = 660;
var ICON_SIZE = 30;
var redIcon = Overlays.addOverlay("text", {
backgroundColor: COLOR_RED,
x: iconsX,
y: iconsY,
width: ICON_SIZE,
height: ICON_SIZE,
alpha: 0.0,
backgroundAlpha: 1.0,
visible: true
});
var greenIcon = Overlays.addOverlay("text", {
backgroundColor: COLOR_GREEN,
x: iconsX + 50,
y: iconsY,
width: ICON_SIZE,
height: ICON_SIZE,
alpha: 0.0,
backgroundAlpha: 1.0,
visible: true
});
var blueIcon = Overlays.addOverlay("text", {
backgroundColor: COLOR_BLUE,
x: iconsX + 100,
y: iconsY,
width: ICON_SIZE,
height: ICON_SIZE,
alpha: 0.0,
backgroundAlpha: 1.0,
visible: true
});
var NUM_BURSTS = 11;
var SPEED = 6.0;
var rockets = [];
Rocket = function(point, colorPalette) {
//default to blue palette if no palette passed in
this.colors = colorPalette;
this.point = point;
this.bursts = [];
this.burst = false;
this.emitRate = randInt(80, 120);
this.emitStrength = randInt(5.0, 7.0);
this.rocket = Entities.addEntity({
type: "Sphere",
position: this.point,
dimensions: {
x: 0.07,
y: 0.07,
z: 0.07
},
color: {
red: 240,
green: 240,
blue: 240
}
});
this.animationSettings = JSON.stringify({
fps: 40,
frameIndex: 0,
running: true,
firstFrame: 0,
lastFrame: 20,
loop: false
});
this.direction = {
x: randFloat(-0.4, 0.4),
y: 1.0,
z: 0.0
}
this.time = 0.0;
this.timeout = randInt(15, 40);
};
Rocket.prototype.update = function(deltaTime) {
this.time++;
Entities.editEntity(this.rocket, {
velocity: Vec3.multiply(SPEED, this.direction)
});
var position = Entities.getEntityProperties(this.rocket).position;
if (this.time > this.timeout) {
this.explode(position);
return;
}
};
Rocket.prototype.explode = function(position) {
Audio.playSound(fireSound, audioOptions);
Entities.editEntity(this.rocket, {
velocity: {
x: 0,
y: 0,
z: 0
}
});
var colorIndex = 0;
for (var i = 0; i < NUM_BURSTS; ++i) {
var color = this.colors[colorIndex];
print(JSON.stringify(color));
this.bursts.push(Entities.addEntity({
type: "ParticleEffect",
animationSettings: this.animationSettings,
position: position,
textures: 'https://raw.githubusercontent.com/ericrius1/SantasLair/santa/assets/smokeparticle.png',
emitRate: this.emitRate,
emitStrength: this.emitStrength,
emitDirection: {
x: Math.pow(-1, i) * randFloat(0.0, 1.4),
y: 1.0,
z: 0.0
},
color: color,
lifespan: 1.0,
visible: true,
locked: false
}));
if (colorIndex < this.colors.length - 1) {
colorIndex++;
}
}
this.burst = true;
Entities.deleteEntity(this.rocket);
};
//var lastLoudness;
var LOUDNESS_RADIUS_RATIO = 10;
function update(deltaTime) {
for (var i = 0; i < rockets.length; i++) {
if (!rockets[i].burst) {
rockets[i].update();
}
}
}
function randFloat(min, max) {
return Math.random() * (max - min) + min;
}
function randInt(min, max) {
return Math.floor(Math.random() * (max - min)) + min;
}
function computeWorldPoint(event) {
var pickRay = Camera.computePickRay(event.x, event.y);
var addVector = Vec3.multiply(Vec3.normalize(pickRay.direction), DISTANCE_FROM_CAMERA);
return Vec3.sum(Camera.getPosition(), addVector);
}
function mousePressEvent(event) {
var clickedOverlay = Overlays.getOverlayAtPoint({
x: event.x,
y: event.y
});
if(clickedOverlay === redIcon) {
rockets.push(new Rocket(computeWorldPoint(event), redPalette));
} else if (clickedOverlay === greenIcon) {
rockets.push(new Rocket(computeWorldPoint(event), greenPalette));
} else if (clickedOverlay === blueIcon) {
rockets.push(new Rocket(computeWorldPoint(event), bluePalette));
}
}
function cleanup() {
Overlays.deleteOverlay(redIcon);
Overlays.deleteOverlay(greenIcon);
Overlays.deleteOverlay(blueIcon);
for (var i = 0; i < rockets.length; ++i) {
Entities.deleteEntity(rockets[i].rocket);
for (var j = 0; j < NUM_BURSTS; ++j) {
Entities.deleteEntity(rockets[i].bursts[j]);
}
}
}
Script.update.connect(update);
Script.scriptEnding.connect(cleanup);
Controller.mousePressEvent.connect(mousePressEvent);

View file

@ -55,8 +55,8 @@ var warpLine = Overlays.addOverlay("line3d", {
var velocity = { x: 0, y: 0, z: 0 };
var VERY_LONG_TIME = 1000000.0;
var active = Menu.isOptionChecked("Enable VR Mode");
var prevVRMode = Menu.isOptionChecked("Enable VR Mode");
var active = HMD.active;
var prevVRMode = HMD.active;
var hmdControls = (function () {
@ -121,28 +121,28 @@ var hmdControls = (function () {
velocity = Vec3.sum(velocity, direction);
break;
case findAction("YAW_LEFT"):
if (yawTimer < 0.0 && Menu.isOptionChecked("Enable VR Mode")) {
if (yawTimer < 0.0 && HMD.active) {
yawChange = yawChange + (shifted ? SHIFT_MAG * VR_YAW_INCREMENT : VR_YAW_INCREMENT);
yawTimer = CAMERA_UPDATE_TIME;
} else if (!Menu.isOptionChecked("Enable VR Mode")) {
} else if (!HMD.active) {
yawChange = yawChange + (shifted ? SHIFT_MAG * YAW_INCREMENT : YAW_INCREMENT);
}
break;
case findAction("YAW_RIGHT"):
if (yawTimer < 0.0 && Menu.isOptionChecked("Enable VR Mode")) {
if (yawTimer < 0.0 && HMD.active) {
yawChange = yawChange - (shifted ? SHIFT_MAG * VR_YAW_INCREMENT : VR_YAW_INCREMENT);
yawTimer = CAMERA_UPDATE_TIME;
} else if (!Menu.isOptionChecked("Enable VR Mode")) {
} else if (!HMD.active) {
yawChange = yawChange - (shifted ? SHIFT_MAG * YAW_INCREMENT : YAW_INCREMENT);
}
break;
case findAction("PITCH_DOWN"):
if (!Menu.isOptionChecked("Enable VR Mode")) {
if (!HMD.active) {
pitchChange = pitchChange - (shifted ? SHIFT_MAG * PITCH_INCREMENT : PITCH_INCREMENT);
}
break;
case findAction("PITCH_UP"):
if (!Menu.isOptionChecked("Enable VR Mode")) {
if (!HMD.active) {
pitchChange = pitchChange + (shifted ? SHIFT_MAG * PITCH_INCREMENT : PITCH_INCREMENT);
}
break;
@ -175,9 +175,9 @@ var hmdControls = (function () {
}
function update(dt) {
if (prevVRMode != Menu.isOptionChecked("Enable VR Mode")) {
active = Menu.isOptionChecked("Enable VR Mode");
prevVRMode = Menu.isOptionChecked("Enable VR Mode");
if (prevVRMode != HMD.active) {
active = HMD.active;
prevVRMode = HMD.active;
}
if (yawTimer >= 0.0) {

View file

@ -362,6 +362,9 @@
var elVoxelVolumeSizeY = document.getElementById("property-voxel-volume-size-y");
var elVoxelVolumeSizeZ = document.getElementById("property-voxel-volume-size-z");
var elVoxelSurfaceStyle = document.getElementById("property-voxel-surface-style");
var elXTextureURL = document.getElementById("property-x-texture-url");
var elYTextureURL = document.getElementById("property-y-texture-url");
var elZTextureURL = document.getElementById("property-z-texture-url");
var elHyperlinkHref = document.getElementById("property-hyperlink-href");
var elHyperlinkDescription = document.getElementById("property-hyperlink-description");
@ -614,6 +617,9 @@
elVoxelVolumeSizeY.value = properties.voxelVolumeSize.y.toFixed(2);
elVoxelVolumeSizeZ.value = properties.voxelVolumeSize.z.toFixed(2);
elVoxelSurfaceStyle.value = properties.voxelSurfaceStyle;
elXTextureURL.value = properties.xTextureURL;
elYTextureURL.value = properties.yTextureURL;
elZTextureURL.value = properties.zTextureURL;
}
if (selected) {
@ -867,6 +873,9 @@
elVoxelVolumeSizeY.addEventListener('change', voxelVolumeSizeChangeFunction);
elVoxelVolumeSizeZ.addEventListener('change', voxelVolumeSizeChangeFunction);
elVoxelSurfaceStyle.addEventListener('change', createEmitTextPropertyUpdateFunction('voxelSurfaceStyle'));
elXTextureURL.addEventListener('change', createEmitTextPropertyUpdateFunction('xTextureURL'));
elYTextureURL.addEventListener('change', createEmitTextPropertyUpdateFunction('yTextureURL'));
elZTextureURL.addEventListener('change', createEmitTextPropertyUpdateFunction('zTextureURL'));
elMoveSelectionToGrid.addEventListener("click", function() {
EventBridge.emitWebEvent(JSON.stringify({
@ -1063,7 +1072,22 @@
<option value='0'>marching cubes</option>
<option value='1'>cubic</option>
<option value='2'>edged cubic</option>
</select>
</select>
</div>
<div class="label">X-axis Texture URL</div>
<div class="value">
<input type="text" id="property-x-texture-url" class="url"></input>
</div>
<div class="label">Y-axis Texture URL</div>
<div class="value">
<input type="text" id="property-y-texture-url" class="url"></input>
</div>
<div class="label">Z-axis Texture URL</div>
<div class="value">
<input type="text" id="property-z-texture-url" class="url"></input>
</div>
</div>

View file

@ -98,8 +98,8 @@ EntityPropertyDialogBox = (function () {
index++;
}
if (properties.type == "PolyVox") {
array.push({ label: "Voxel Space Size:", type: "header" });
if (properties.type == "PolyVox") {
array.push({ label: "Voxel Space Size:", type: "header" });
index++;
array.push({ label: "X:", value: properties.voxelVolumeSize.x.toFixed(decimals) });
@ -109,9 +109,16 @@ EntityPropertyDialogBox = (function () {
array.push({ label: "Z:", value: properties.voxelVolumeSize.z.toFixed(decimals) });
index++;
array.push({ label: "Surface Extractor", value: properties.voxelSurfaceStyle });
index++;
}
array.push({ label: "Surface Extractor", value: properties.voxelSurfaceStyle });
index++;
array.push({ label: "X-axis Texture URL:", value: properties.xTextureURL });
index++;
array.push({ label: "Y-axis Texture URL:", value: properties.yTextureURL });
index++;
array.push({ label: "Z-axis Texture URL:", value: properties.zTextureURL });
index++;
}
array.push({ label: "Position:", type: "header" });
index++;
@ -348,14 +355,17 @@ EntityPropertyDialogBox = (function () {
properties.backgroundColor.blue = array[index++].value;
}
if (properties.type == "PolyVox") {
if (properties.type == "PolyVox") {
properties.shapeType = array[index++].value;
index++; // skip header
properties.voxelVolumeSize.x = array[index++].value;
properties.voxelVolumeSize.y = array[index++].value;
properties.voxelVolumeSize.z = array[index++].value;
properties.voxelSurfaceStyle = array[index++].value;
index++; // skip header
properties.voxelVolumeSize.x = array[index++].value;
properties.voxelVolumeSize.y = array[index++].value;
properties.voxelVolumeSize.z = array[index++].value;
properties.voxelSurfaceStyle = array[index++].value;
properties.xTextureURL = array[index++].value;
properties.yTextureURL = array[index++].value;
properties.zTextureURL = array[index++].value;
}
index++; // skip header

View file

@ -340,7 +340,7 @@ SelectionDisplay = (function () {
leftMargin: 0,
});
var grabberMoveUp = Overlays.addOverlay("billboard", {
var grabberMoveUp = Overlays.addOverlay("image3d", {
url: HIFI_PUBLIC_BUCKET + "images/up-arrow.svg",
position: { x:0, y: 0, z: 0},
color: handleColor,
@ -609,7 +609,7 @@ SelectionDisplay = (function () {
minorTickMarksColor: { red: 0, green: 0, blue: 0 },
});
var yawHandle = Overlays.addOverlay("billboard", {
var yawHandle = Overlays.addOverlay("image3d", {
url: ROTATE_ARROW_WEST_NORTH_URL,
position: { x:0, y: 0, z: 0},
color: handleColor,
@ -622,7 +622,7 @@ SelectionDisplay = (function () {
});
var pitchHandle = Overlays.addOverlay("billboard", {
var pitchHandle = Overlays.addOverlay("image3d", {
url: ROTATE_ARROW_WEST_NORTH_URL,
position: { x:0, y: 0, z: 0},
color: handleColor,
@ -635,7 +635,7 @@ SelectionDisplay = (function () {
});
var rollHandle = Overlays.addOverlay("billboard", {
var rollHandle = Overlays.addOverlay("image3d", {
url: ROTATE_ARROW_WEST_NORTH_URL,
position: { x:0, y: 0, z: 0},
color: handleColor,

View file

@ -61,7 +61,7 @@ LightOverlayManager = function() {
// Allocate or get an unused overlay
function getOverlay() {
if (unusedOverlays.length == 0) {
var overlay = Overlays.addOverlay("billboard", {
var overlay = Overlays.addOverlay("image3d", {
});
allOverlays.push(overlay);
} else {

View file

@ -5,10 +5,13 @@
// Created by Zander Otavka on 7/24/15
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Manage overlays with object oriented goodness, instead of ugly `Overlays.h` methods.
// Instead of:
//
// var billboard = Overlays.addOverlay("billboard", { visible: false });
// var billboard = Overlays.addOverlay("image3d", { visible: false });
// ...
// Overlays.editOverlay(billboard, { visible: true });
// ...
@ -16,35 +19,66 @@
//
// You can now do:
//
// var billboard = new BillboardOverlay({ visible: false });
// var billboard = new Image3DOverlay({ visible: false });
// ...
// billboard.visible = true;
// ...
// billboard.destroy();
//
// See more on usage below.
// More on usage below. Examples in `examples/example/overlayPanelExample.js`.
//
// Note that including this file will delete Overlays from the global scope. All the
// functionality of Overlays is represented here, just better. If you try to use Overlays in
// tandem, there may be performance problems or nasty surprises.
// Note that including this file will delete `Overlays` from the global scope. All the
// functionality of `Overlays` is represented here, just better. If you try to use `Overlays`
// in tandem, there may be performance problems or nasty surprises.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
(function() {
// Delete `Overlays` from the global scope.
var Overlays = this.Overlays;
delete this.Overlays;
var ABSTRACT = null;
var overlays = {};
var panels = {};
var overlayTypes;
var Overlay, Overlay2D, Base3DOverlay, Planar3DOverlay, Volume3DOverlay;
var overlayTypes = {};
function generateOverlayClass(superclass, type, properties) {
var that;
if (type == ABSTRACT) {
that = function(type, params) {
superclass.call(this, type, params);
};
} else {
that = function(params) {
superclass.call(this, type, params);
};
overlayTypes[type] = that;
}
that.prototype = new superclass();
that.prototype.constructor = that;
properties.forEach(function(prop) {
Object.defineProperty(that.prototype, prop, {
get: function() {
return Overlays.getProperty(this._id, prop);
},
set: function(newValue) {
var keyValuePair = {};
keyValuePair[prop] = newValue;
this.setProperties(keyValuePair);
},
configurable: false
});
});
return that;
}
//
// Create a new JavaScript object for an overlay of given ID.
//
@ -55,10 +89,6 @@
}
var overlay = new overlayTypes[type]();
overlay._id = id;
var panelID = Overlays.getAttachedPanel(id)
if (panelID && panelID in panels) {
panels[panelID].addChild(overlay);
}
overlays[id] = overlay;
return overlay;
}
@ -74,7 +104,7 @@
//
function findOverlay(id, knownOverlaysOnly, searchList) {
if (id > 0) {
knownOverlaysOnly = Boolean(knownOverlaysOnly) || Boolean(searchList);
knownOverlaysOnly = Boolean(knownOverlaysOnly);
searchList = searchList || overlays;
var foundOverlay = searchList[id];
if (foundOverlay) {
@ -87,257 +117,187 @@
return null;
}
//
// Perform global scoped operations on overlays, such as finding by ray intersection.
// Create a new JavaScript object for a panel of given ID.
//
OverlayManager = {
findOnRay: function(pickRay, knownOverlaysOnly, searchList) {
var rayPickResult = Overlays.findRayIntersection(pickRay);
print("raypick " + rayPickResult.overlayID);
if (rayPickResult.intersects) {
return findOverlay(rayPickResult.overlayID, knownOverlaysOnly, searchList);
}
function makePanelFromId(id) {
if (!Overlays.isAddedPanel(id)) {
return null;
},
findAtPoint: function(point, knownOverlaysOnly, searchList) {
var foundID = Overlays.getOverlayAtPoint(point);
print("at point " + foundID);
if (foundID) {
return findOverlay(foundID, knownOverlaysOnly, searchList);
} else {
var pickRay = Camera.computePickRay(point.x, point.y);
return OverlayManager.findOnRay(pickRay, knownOverlaysOnly, searchList);
}
},
makeSearchList: function(overlayArray) {
var searchList = {};
overlayArray.forEach(function(overlay){
searchList[overlay._id] = overlay;
});
return searchList;
}
};
var panel = new OverlayPanel();
panel._id = id;
overlays[id] = overlay;
return overlay;
}
//
// Object oriented abstraction layer for overlays.
// Get or create a panel object from the id.
//
// Usage:
// // Create an overlay
// var billboard = new BillboardOverlay({
// visible: true,
// isFacingAvatar: true,
// ignoreRayIntersections: false
// });
// @param knownOverlaysOnly (Optional: Boolean)
// If true, a new object will not be created.
// @param searchList (Optional: Object)
// Map of overlay id's and overlay objects. Can be generated with
// `OverlayManager.makeSearchList`.
//
// // Get a property
// var isVisible = billboard.visible;
//
// // Set a single property
// billboard.position = { x: 1, y: 3, z: 2 };
//
// // Set multiple properties at the same time
// billboard.setProperties({
// url: "http://images.com/overlayImage.jpg",
// dimensions: { x: 2, y: 2 }
// });
//
// // Clone an overlay
// var clonedBillboard = billboard.clone();
//
// // Remove an overlay from the world
// billboard.destroy();
//
// // Remember, there is a poor orphaned JavaScript object left behind. You should
// // remove any references to it so you don't accidentally try to modify an overlay that
// // isn't there.
// billboard = undefined;
//
(function() {
var ABSTRACT = null;
overlayTypes = {};
function generateOverlayClass(superclass, type, properties) {
var that;
if (type == ABSTRACT) {
that = function(type, params) {
superclass.call(this, type, params);
};
} else {
that = function(params) {
superclass.call(this, type, params);
};
overlayTypes[type] = that;
function findPanel(id, knownPanelsOnly, searchList) {
if (id > 0) {
knownPanelsOnly = Boolean(knownPanelsOnly);
searchList = searchList || panels;
var foundPanel = searchList[id];
if (foundPanel) {
return foundPanel;
}
if (!knownPanelsOnly) {
return makePanelFromId(id);
}
that.prototype = new superclass();
that.prototype.constructor = that;
properties.forEach(function(prop) {
Object.defineProperty(that.prototype, prop, {
get: function() {
return Overlays.getProperty(this._id, prop);
},
set: function(newValue) {
var keyValuePair = {};
keyValuePair[prop] = newValue;
this.setProperties(keyValuePair);
},
configurable: false
});
});
return that;
}
return null;
}
// Supports multiple inheritance of properties. Just `concat` them onto the end of the
// properties list.
var PANEL_ATTACHABLE_FIELDS = ["offsetPosition", "facingRotation"];
Overlay = (function() {
var that = function(type, params) {
if (type && params) {
this._id = Overlays.addOverlay(type, params);
overlays[this._id] = this;
} else {
this._id = 0;
}
this._attachedPanelPointer = null;
};
that.prototype.constructor = that;
Object.defineProperty(that.prototype, "isLoaded", {
get: function() {
return Overlays.isLoaded(this._id);
}
});
Object.defineProperty(that.prototype, "attachedPanel", {
get: function() {
return this._attachedPanelPointer;
}
});
that.prototype.getTextSize = function(text) {
return Overlays.textSize(this._id, text);
};
that.prototype.setProperties = function(properties) {
Overlays.editOverlay(this._id, properties);
};
that.prototype.clone = function() {
return makeOverlayFromId(Overlays.cloneOverlay(this._id));
};
that.prototype.destroy = function() {
Overlays.deleteOverlay(this._id);
};
return generateOverlayClass(that, ABSTRACT, [
"alpha", "glowLevel", "pulseMax", "pulseMin", "pulsePeriod", "glowLevelPulse",
"alphaPulse", "colorPulse", "visible", "anchor"
]);
})();
Overlay2D = generateOverlayClass(Overlay, ABSTRACT, [
"bounds", "x", "y", "width", "height"
]);
Base3DOverlay = generateOverlayClass(Overlay, ABSTRACT, [
"position", "lineWidth", "rotation", "isSolid", "isFilled", "isWire", "isDashedLine",
"ignoreRayIntersection", "drawInFront", "drawOnHUD"
]);
Planar3DOverlay = generateOverlayClass(Base3DOverlay, ABSTRACT, [
"dimensions"
]);
Volume3DOverlay = generateOverlayClass(Base3DOverlay, ABSTRACT, [
"dimensions"
]);
generateOverlayClass(Overlay2D, "image", [
"subImage", "imageURL"
]);
generateOverlayClass(Overlay2D, "text", [
"font", "text", "backgroundColor", "backgroundAlpha", "leftMargin", "topMargin"
]);
generateOverlayClass(Planar3DOverlay, "text3d", [
"text", "backgroundColor", "backgroundAlpha", "lineHeight", "leftMargin", "topMargin",
"rightMargin", "bottomMargin", "isFacingAvatar"
]);
generateOverlayClass(Volume3DOverlay, "cube", [
"borderSize"
]);
generateOverlayClass(Volume3DOverlay, "sphere", [
]);
generateOverlayClass(Planar3DOverlay, "circle3d", [
"startAt", "endAt", "outerRadius", "innerRadius", "hasTickMarks",
"majorTickMarksAngle", "minorTickMarksAngle", "majorTickMarksLength",
"minorTickMarksLength", "majorTickMarksColor", "minorTickMarksColor"
]);
generateOverlayClass(Planar3DOverlay, "rectangle3d", [
]);
generateOverlayClass(Base3DOverlay, "line3d", [
"start", "end"
]);
generateOverlayClass(Planar3DOverlay, "grid", [
"minorGridWidth", "majorGridEvery"
]);
generateOverlayClass(Volume3DOverlay, "localmodels", [
]);
generateOverlayClass(Volume3DOverlay, "model", [
"url", "dimensions", "textures"
]);
generateOverlayClass(Planar3DOverlay, "billboard", [
"url", "subImage", "isFacingAvatar"
].concat(PANEL_ATTACHABLE_FIELDS));
})();
ImageOverlay = overlayTypes["image"];
TextOverlay = overlayTypes["text"];
Text3DOverlay = overlayTypes["text3d"];
Cube3DOverlay = overlayTypes["cube"];
Sphere3DOverlay = overlayTypes["sphere"];
Circle3DOverlay = overlayTypes["circle3d"];
Rectangle3DOverlay = overlayTypes["rectangle3d"];
Line3DOverlay = overlayTypes["line3d"];
Grid3DOverlay = overlayTypes["grid"];
LocalModelsOverlay = overlayTypes["localmodels"];
ModelOverlay = overlayTypes["model"];
BillboardOverlay = overlayTypes["billboard"];
function findOverlayOrPanel(id, knownObjectsOnly, searchList) {
return findOverlay(id, knownObjectsOnly, searchList) ||
findPanel(id, knownObjectsOnly, searchList);
}
//
// Object oriented abstraction layer for panels.
//
FloatingUIPanel = (function() {
var that = function(params) {
this._id = Overlays.addPanel(params);
this._children = [];
this._visible = Boolean(params.visible);
panels[this._id] = this;
this._attachedPanelPointer = null;
var Overlay = (function() {
var that = function(type, params) {
if (type && params) {
this._id = Overlays.addOverlay(type, params);
overlays[this._id] = this;
} else {
this._id = 0;
}
};
that.prototype.constructor = that;
var FIELDS = ["offsetPosition", "offsetRotation", "facingRotation"];
FIELDS.forEach(function(prop) {
Object.defineProperty(that.prototype, "isLoaded", {
get: function() {
return Overlays.isLoaded(this._id);
}
});
Object.defineProperty(that.prototype, "parentPanel", {
get: function() {
return findPanel(Overlays.getParentPanel(this._id));
}
});
that.prototype.getTextSize = function(text) {
return Overlays.textSize(this._id, text);
};
that.prototype.setProperties = function(properties) {
Overlays.editOverlay(this._id, properties);
};
that.prototype.clone = function() {
return makeOverlayFromId(Overlays.cloneOverlay(this._id));
};
that.prototype.destroy = function() {
Overlays.deleteOverlay(this._id);
};
that.prototype.isPanelAttachable = function() {
return false;
};
return generateOverlayClass(that, ABSTRACT, [
"alpha", "glowLevel", "pulseMax", "pulseMin", "pulsePeriod", "glowLevelPulse",
"alphaPulse", "colorPulse", "visible", "anchor"
]);
})();
// Supports multiple inheritance of properties. Just `concat` them onto the end of the
// properties list.
var PanelAttachable = ["offsetPosition", "offsetRotation", "offsetScale"];
var Billboardable = ["isFacingAvatar"];
var Overlay2D = generateOverlayClass(Overlay, ABSTRACT, [
"bounds", "x", "y", "width", "height"
]);
var Base3DOverlay = generateOverlayClass(Overlay, ABSTRACT, [
"position", "lineWidth", "rotation", "isSolid", "isFilled", "isWire", "isDashedLine",
"ignoreRayIntersection", "drawInFront", "drawOnHUD"
]);
var Planar3DOverlay = generateOverlayClass(Base3DOverlay, ABSTRACT, [
"dimensions"
]);
var Billboard3DOverlay = generateOverlayClass(Planar3DOverlay, ABSTRACT, [
].concat(PanelAttachable).concat(Billboardable));
Billboard3DOverlay.prototype.isPanelAttachable = function() { return true; };
var Volume3DOverlay = generateOverlayClass(Base3DOverlay, ABSTRACT, [
"dimensions"
]);
ImageOverlay = generateOverlayClass(Overlay2D, "image", [
"subImage", "imageURL"
]);
Image3DOverlay = generateOverlayClass(Billboard3DOverlay, "image3d", [
"url", "subImage"
]);
TextOverlay = generateOverlayClass(Overlay2D, "text", [
"font", "text", "backgroundColor", "backgroundAlpha", "leftMargin", "topMargin"
]);
Text3DOverlay = generateOverlayClass(Billboard3DOverlay, "text3d", [
"text", "backgroundColor", "backgroundAlpha", "lineHeight", "leftMargin", "topMargin",
"rightMargin", "bottomMargin"
]);
Cube3DOverlay = generateOverlayClass(Volume3DOverlay, "cube", [
"borderSize"
]);
Sphere3DOverlay = generateOverlayClass(Volume3DOverlay, "sphere", [
]);
Circle3DOverlay = generateOverlayClass(Planar3DOverlay, "circle3d", [
"startAt", "endAt", "outerRadius", "innerRadius", "hasTickMarks",
"majorTickMarksAngle", "minorTickMarksAngle", "majorTickMarksLength",
"minorTickMarksLength", "majorTickMarksColor", "minorTickMarksColor"
]);
Rectangle3DOverlay = generateOverlayClass(Planar3DOverlay, "rectangle3d", [
]);
Line3DOverlay = generateOverlayClass(Base3DOverlay, "line3d", [
"start", "end"
]);
Grid3DOverlay = generateOverlayClass(Planar3DOverlay, "grid", [
"minorGridWidth", "majorGridEvery"
]);
LocalModelsOverlay = generateOverlayClass(Volume3DOverlay, "localmodels", [
]);
ModelOverlay = generateOverlayClass(Volume3DOverlay, "model", [
"url", "dimensions", "textures"
]);
OverlayPanel = (function() {
var that = function(params) {
this._id = Overlays.addPanel(params);
panels[this._id] = this;
};
that.prototype.constructor = that;
var props = [
"anchorPosition", "anchorPositionBinding", "anchorRotation", "anchorRotationBinding", "anchorScale", "visible"
].concat(PanelAttachable).concat(Billboardable)
props.forEach(function(prop) {
Object.defineProperty(that.prototype, prop, {
get: function() {
return Overlays.getPanelProperty(this._id, prop);
@ -351,78 +311,47 @@
});
});
var PSEUDO_FIELDS = [];
PSEUDO_FIELDS.push("children");
Object.defineProperty(that.prototype, "children", {
Object.defineProperty(that.prototype, "parentPanel", {
get: function() {
return this._children.slice();
return findPanel(Overlays.getParentPanel(this._id));
}
});
PSEUDO_FIELDS.push("visible");
Object.defineProperty(that.prototype, "visible", {
Object.defineProperty(that.prototype, "children", {
get: function() {
return this._visible;
},
set: function(visible) {
this._visible = visible;
this._children.forEach(function(child) {
child.visible = visible;
});
var idArray = Overlays.getPanelProperty(this._id, "children");
var objArray = [];
for (var i = 0; i < idArray.length; i++) {
objArray[i] = findOverlayOrPanel(idArray[i]);
}
return objArray;
}
});
that.prototype.addChild = function(child) {
if (child instanceof Overlay) {
Overlays.setAttachedPanel(child._id, this._id);
} else if (child instanceof FloatingUIPanel) {
child.setProperties({
anchorPosition: {
bind: "panel",
value: this._id
},
offsetRotation: {
bind: "panel",
value: this._id
}
});
}
child._attachedPanelPointer = this;
child.visible = this.visible;
this._children.push(child);
Overlays.setParentPanel(child._id, this._id);
return child;
};
that.prototype.removeChild = function(child) {
var i = this._children.indexOf(child);
if (i >= 0) {
if (child instanceof Overlay) {
Overlays.setAttachedPanel(child._id, 0);
} else if (child instanceof FloatingUIPanel) {
child.setProperties({
anchorPosition: {
bind: "myAvatar"
},
offsetRotation: {
bind: "myAvatar"
}
});
}
child._attachedPanelPointer = null;
this._children.splice(i, 1);
if (child.parentPanel === this) {
Overlays.setParentPanel(child._id, 0);
}
};
that.prototype.setProperties = function(properties) {
for (var i in PSEUDO_FIELDS) {
if (properties[PSEUDO_FIELDS[i]] !== undefined) {
this[PSEUDO_FIELDS[i]] = properties[PSEUDO_FIELDS[i]];
}
}
Overlays.editPanel(this._id, properties);
};
that.prototype.setChildrenVisible = function() {
this.children.forEach(function(child) {
child.visible = true;
if (child.setChildrenVisible !== undefined) {
child.setChildrenVisible();
}
});
};
that.prototype.destroy = function() {
Overlays.deletePanel(this._id);
};
@ -431,10 +360,39 @@
})();
OverlayManager = {
findOnRay: function(pickRay, knownOverlaysOnly, searchList) {
var rayPickResult = Overlays.findRayIntersection(pickRay);
if (rayPickResult.intersects) {
return findOverlay(rayPickResult.overlayID, knownOverlaysOnly, searchList);
}
return null;
},
findAtPoint: function(point, knownOverlaysOnly, searchList) {
var foundID = Overlays.getOverlayAtPoint(point);
if (foundID) {
return findOverlay(foundID, knownOverlaysOnly, searchList);
} else {
var pickRay = Camera.computePickRay(point.x, point.y);
return OverlayManager.findOnRay(pickRay, knownOverlaysOnly, searchList);
}
},
makeSearchList: function(array) {
var searchList = {};
array.forEach(function(object) {
searchList[object._id] = object;
});
return searchList;
}
};
// Threadsafe cleanup of JavaScript objects.
function onOverlayDeleted(id) {
if (id in overlays) {
if (overlays[id]._attachedPanelPointer) {
overlays[id]._attachedPanelPointer.removeChild(overlays[id]);
if (overlays[id].parentPanel) {
overlays[id].parentPanel.removeChild(overlays[id]);
}
delete overlays[id];
}
@ -442,10 +400,9 @@
function onPanelDeleted(id) {
if (id in panels) {
panels[id]._children.forEach(function(child) {
print(JSON.stringify(child.destroy));
child.destroy();
});
if (panels[id].parentPanel) {
panels[id].parentPanel.removeChild(panels[id]);
}
delete panels[id];
}
}

File diff suppressed because it is too large Load diff

View file

@ -148,7 +148,7 @@ KeyboardKey = (function(keyboard, keyProperties) {
};
for (var i = 0; i < this.bounds.length; i++) {
if (THREE_D_MODE) {
this.overlays.push(Overlays.addOverlay("billboard", {
this.overlays.push(Overlays.addOverlay("image3d", {
scale: 1,
rotation: MyAvatar.rotation,
isFacingAvatar: false,
@ -202,7 +202,7 @@ Keyboard = (function(params) {
return windowDimensions.y - this.height();
};
if (THREE_D_MODE) {
this.background = Overlays.addOverlay("billboard", {
this.background = Overlays.addOverlay("image3d", {
scale: 1,
position: MyAvatar.position,
rotation: MyAvatar.rotation,

View file

@ -130,7 +130,6 @@ var heights = [];
var myAlpha = [];
var arrays = [];
var isOnHMD = false,
ENABLE_VR_MODE = "Enable VR Mode",
NOTIFICATIONS_3D_DIRECTION = 0.0, // Degrees from avatar orientation.
NOTIFICATIONS_3D_DISTANCE = 0.6, // Horizontal distance from avatar position.
NOTIFICATIONS_3D_ELEVATION = -0.8, // Height of top middle of top notification relative to avatar eyes.
@ -260,7 +259,7 @@ function notify(notice, button, height) {
positions = calculate3DOverlayPositions(noticeWidth, noticeHeight, notice.y);
notifications.push((Overlays.addOverlay("text3d", notice)));
buttons.push((Overlays.addOverlay("billboard", button)));
buttons.push((Overlays.addOverlay("image3d", button)));
overlay3DDetails.push({
notificationOrientation: positions.notificationOrientation,
notificationPosition: positions.notificationPosition,
@ -414,7 +413,7 @@ function update() {
j,
k;
if (isOnHMD !== Menu.isOptionChecked(ENABLE_VR_MODE)) {
if (isOnHMD !== HMD.active) {
while (arrays.length > 0) {
deleteNotification(0);
}
@ -596,7 +595,7 @@ function menuItemEvent(menuItem) {
LODManager.LODDecreased.connect(function() {
var warningText = "\n"
+ "Due to the complexity of the content, the \n"
+ "level of detail has been decreased."
+ "level of detail has been decreased. "
+ "You can now see: \n"
+ LODManager.getLODFeedbackText();

155
examples/particleDance.js Normal file
View file

@ -0,0 +1,155 @@
(function() {
var NUM_BURSTS = 3;
var NUM_EMITTERS_PER_BURST = 11;
var RANGE = 5.0;
var AUDIO_RANGE = 0.5 * RANGE;
var DIST_BETWEEN_BURSTS = 1.0;
var LOUDNESS_RADIUS_RATIO = 10;
var TEXTURE_PATH = 'https://raw.githubusercontent.com/ericrius1/SantasLair/santa/assets/smokeparticle.png';
var cameraAxis = Quat.getFront(Camera.getOrientation());
var center = Vec3.sum(Camera.getPosition(), Vec3.multiply(RANGE, cameraAxis));
var audioPosition = Vec3.sum(Camera.getPosition(), Vec3.multiply(AUDIO_RANGE, cameraAxis));
var song = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/songs/Made%20In%20Heights%20-%20Forgiveness.wav");
var audioOptions = {
volume: 0.9, position: audioPosition
};
var DISTANCE_FROM_CAMERA = 7.0;
var colorPalette = [{
red: 0,
green: 206,
blue: 209
}, {
red: 173,
green: 216,
blue: 230
}, {
red: 0,
green: 191,
blue: 255
}];
var bursts = [];
var audioStats;
Burst = function(point) {
if (!audioStats) {
audioStats = Audio.playSound(song, audioOptions);
}
this.point = point;
this.emitters = [];
this.emitRate = randInt(80, 120);
this.emitStrength = randInt(4.0, 6.0);
this.animationSettings = JSON.stringify({
fps: 10,
frameIndex: 0,
running: true,
firstFrame: 0,
lastFrame: 50,
loop: true
});
this.direction = {
x: randFloat(-0.3, 0.3),
y: 1.0,
z: 0.0
}
this.base = Entities.addEntity({
type: "Sphere",
position: this.point,
dimensions: {
x: 0.05,
y: 0.05,
z: 0.05
},
color: {
red: 240,
green: 240,
blue: 240
}
});
for (var i = 0; i < NUM_EMITTERS_PER_BURST; ++i) {
var colorIndex = randInt(0, colorPalette.length - 1);
var color = colorPalette[colorIndex];
this.emitters.push(Entities.addEntity({
type: "ParticleEffect",
animationSettings: this.animationSettings,
position: this.point,
textures: TEXTURE_PATH,
emitRate: this.emitRate,
emitStrength: this.emitStrength,
emitDirection: {
x: Math.pow(-1, i) * randFloat(0.0, 0.4),
y: 1.0,
z: 0.0
},
color: color,
lifespan: 1.0,
visible: true,
locked: false
}));
}
};
var nextPosition = center;
var posOrNeg = -1;
for (var i = 0; i < NUM_BURSTS; ++i) {
posOrNeg *= -1;
bursts.push(new Burst(nextPosition));
var offset = {
x: RANGE/(i+2) * posOrNeg,
y: 0,
z: 0
};
var nextPosition = Vec3.sum(nextPosition, offset);
}
function update(deltaTime) {
for (var i = 0; i < NUM_BURSTS; i++) {
if (audioStats && audioStats.loudness > 0.0) {
for (var j = 0; j < NUM_EMITTERS_PER_BURST; ++j) {
Entities.editEntity(bursts[i].emitters[j], {
particleRadius: audioStats.loudness / LOUDNESS_RADIUS_RATIO
});
}
}
}
}
function randFloat(min, max) {
return Math.random() * (max - min) + min;
}
function randInt(min, max) {
return Math.floor(Math.random() * (max - min)) + min;
}
this.cleanup = function() {
for (var i = 0; i < NUM_BURSTS; ++i) {
Entities.deleteEntity(bursts[i].base);
for (var j = 0; j < NUM_EMITTERS_PER_BURST; ++j) {
var emitter = bursts[i].emitters[j];
Entities.deleteEntity(emitter);
}
}
Audio.stop();
}
Script.update.connect(update);
})();
Script.scriptEnding.connect(cleanup);

View file

@ -41,7 +41,6 @@
SCALE_2D = 0.35, // Scale the SVGs for 2D display.
background3D = {},
bar3D = {},
ENABLE_VR_MODE_MENU_ITEM = "Enable VR Mode",
PROGRESS_3D_DIRECTION = 0.0, // Degrees from avatar orientation.
PROGRESS_3D_DISTANCE = 0.602, // Horizontal distance from avatar position.
PROGRESS_3D_ELEVATION = -0.8, // Height of top middle of top notification relative to avatar eyes.
@ -117,7 +116,7 @@
visible: false,
ignoreRayIntersection: true
});
bar3D.overlay = Overlays.addOverlay("billboard", {
bar3D.overlay = Overlays.addOverlay("image3d", {
url: BAR_URL,
subImage: { x: BAR_WIDTH, y: 0, width: BAR_WIDTH, height: BAR_HEIGHT },
scale: SCALE_3D * BAR_WIDTH,
@ -157,7 +156,7 @@
eyePosition,
avatarOrientation;
if (isOnHMD !== Menu.isOptionChecked(ENABLE_VR_MODE_MENU_ITEM)) {
if (isOnHMD !== HMD.active) {
deleteOverlays();
isOnHMD = !isOnHMD;
createOverlays();

View file

@ -175,7 +175,7 @@ function SeatIndicator(modelProperties, seatIndex) {
modelProperties.sittingPoints[seatIndex].rotation);
this.scale = MyAvatar.scale / 12;
this.sphere = Overlays.addOverlay("billboard", {
this.sphere = Overlays.addOverlay("image3d", {
subImage: { x: 0, y: buttonHeight, width: buttonWidth, height: buttonHeight},
url: buttonImageUrl,
position: this.position,

View file

@ -1360,7 +1360,7 @@ var CHECK_MARK_COLOR = {
this.nextY = this.y + this.getHeight();
var item = new CollapsablePanelItem(name, this.x, this.nextY, textWidth, rawHeight, panel);
var item = new CollapsablePanelItem(name, this.x, this.nextY, textWidth, rawHeight);
item.isSubPanel = true;
this.nextY += 1.5 * item.height;

View file

@ -44,7 +44,7 @@ function mousePressEvent(event) {
}
}
// if the PolyVox entity is empty, we can't pick against its voxel. try picking against its
// if the PolyVox entity is empty, we can't pick against its "on" voxels. try picking against its
// bounding box, instead.
intersection = Entities.findRayIntersection(pickRay, false); // bounding box picking
if (intersection.intersects) {

View file

@ -2,7 +2,7 @@ set(TARGET_NAME interface)
project(${TARGET_NAME})
# set a default root dir for each of our optional externals if it was not passed
set(OPTIONAL_EXTERNALS "Faceshift" "Sixense" "LeapMotion" "RtMidi" "SDL2" "RSSDK" "connexionClient")
set(OPTIONAL_EXTERNALS "Faceshift" "LeapMotion" "RtMidi" "RSSDK" "3DConnexionClient" "iViewHMD")
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
string(TOUPPER ${EXTERNAL} ${EXTERNAL}_UPPERCASE)
if (NOT ${${EXTERNAL}_UPPERCASE}_ROOT_DIR)
@ -29,18 +29,6 @@ endif()
configure_file(InterfaceVersion.h.in "${PROJECT_BINARY_DIR}/includes/InterfaceVersion.h")
macro(GroupSources curdir)
file(GLOB children RELATIVE ${PROJECT_SOURCE_DIR}/${curdir} ${PROJECT_SOURCE_DIR}/${curdir}/*)
foreach(child ${children})
if(IS_DIRECTORY ${PROJECT_SOURCE_DIR}/${curdir}/${child})
GroupSources(${curdir}/${child})
else()
string(REPLACE "/" "\\" groupname ${curdir})
source_group(${groupname} FILES ${PROJECT_SOURCE_DIR}/${curdir}/${child})
endif()
endforeach()
endmacro()
# grab the implementation and header files from src dirs
file(GLOB_RECURSE INTERFACE_SRCS "src/*.cpp" "src/*.h")
GroupSources("src")
@ -115,16 +103,12 @@ else()
add_executable(${TARGET_NAME} ${INTERFACE_SRCS} ${QM})
endif()
# set up the external glm library
add_dependency_external_projects(glm bullet)
# set up the external glm library
find_package(GLM REQUIRED)
target_include_directories(${TARGET_NAME} PRIVATE ${GLM_INCLUDE_DIRS})
add_dependency_external_projects(LibOVR)
find_package(LibOVR REQUIRED)
target_include_directories(${TARGET_NAME} PRIVATE ${LIBOVR_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${LIBOVR_LIBRARIES})
find_package(Bullet REQUIRED)
# perform the system include hack for OS X to ignore warnings
@ -137,9 +121,10 @@ endif()
target_link_libraries(${TARGET_NAME} ${BULLET_LIBRARIES})
# link required hifi libraries
link_hifi_libraries(shared octree environment gpu model render fbx networking entities avatars
audio audio-client animation script-engine physics
render-utils entities-renderer ui auto-updater)
link_hifi_libraries(shared octree environment gpu model render fbx networking entities avatars
audio audio-client animation script-engine physics
render-utils entities-renderer ui auto-updater
plugins display-plugins input-plugins)
add_dependency_external_projects(sdl2)

View file

@ -0,0 +1,3 @@
The Mac version does not require any files. The 3D Connexion driver should be installed from http://www.3dconnexion.eu/service/drivers.html
For Windows the provided header file is required: include/I3dMouseParams.h

View file

@ -1,4 +0,0 @@
The mac version does not require any files here. 3D connexion should be installed from
http://www.3dconnexion.eu/service/drivers.html
For windows a header file is required Inc/I3dMouseParams.h

14
interface/external/iViewHMD/readme.txt vendored Normal file
View file

@ -0,0 +1,14 @@
Instructions for adding SMI HMD Eye Tracking to Interface on Windows
David Rowe, 27 Jul 2015.
1. Download and install the SMI HMD Eye Tracking software from http://update.smivision.com/iViewNG-HMD.exe.
2. Copy the SDK folders (3rdParty, include, libs) from the SDK installation folder C:\Program Files (x86)\SMI\iViewNG-HMD\SDK
into the interface/externals/iViewHMD folder. This readme.txt should be there as well.
You may optionally choose to copy the SDK folders to a location outside the repository (so you can re-use with different
checkouts and different projects). If so, set the ENV variable "HIFI_LIB_DIR" to a directory containing a subfolder
"iViewHMD" that contains the folders mentioned above.
3. Clear your build directory, run cmake and build, and you should be all set.

Binary file not shown.

Binary file not shown.

After

(image error) Size: 25 KiB

View file

@ -47,6 +47,11 @@ Item {
font.pixelSize: root.fontSize
text: "Framerate: " + root.framerate
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Simrate: " + root.simrate
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize

File diff suppressed because it is too large Load diff

View file

@ -36,6 +36,9 @@
#include <StDev.h>
#include <udt/PacketHeaders.h>
#include <ViewFrustum.h>
#include <plugins/PluginContainer.h>
#include <plugins/PluginManager.h>
#include <SimpleMovingAverage.h>
#include "AudioClient.h"
#include "Bookmarks.h"
@ -47,13 +50,12 @@
#include "Stars.h"
#include "avatar/Avatar.h"
#include "avatar/MyAvatar.h"
#include "devices/SixenseManager.h"
#include <input-plugins/KeyboardMouseDevice.h>
#include "scripting/ControllerScriptingInterface.h"
#include "scripting/DialogsManagerScriptingInterface.h"
#include "scripting/WebWindowClass.h"
#include "ui/AudioStatsDialog.h"
#include "ui/BandwidthDialog.h"
#include "ui/HMDToolsDialog.h"
#include "ui/ModelsBrowser.h"
#include "ui/OctreeStatsDialog.h"
#include "ui/SnapshotShareDialog.h"
@ -62,10 +64,9 @@
#include "ui/overlays/Overlays.h"
#include "ui/ApplicationOverlay.h"
#include "ui/ApplicationCompositor.h"
#include "ui/OverlayConductor.h"
#include "ui/RunningScriptsWidget.h"
#include "ui/ToolWindow.h"
#include "ui/UserInputMapper.h"
#include "devices/KeyboardMouseDevice.h"
#include "octree/OctreePacketProcessor.h"
#include "UndoStackScriptingInterface.h"
@ -79,6 +80,7 @@ class QMouseEvent;
class QSystemTrayIcon;
class QTouchEvent;
class QWheelEvent;
class OffscreenGlCanvas;
class GLCanvas;
class FaceTracker;
@ -86,6 +88,12 @@ class MainWindow;
class Node;
class ScriptEngine;
namespace gpu {
class Context;
typedef std::shared_ptr<Context> ContextPointer;
}
static const QString SNAPSHOT_EXTENSION = ".jpg";
static const QString SVO_EXTENSION = ".svo";
static const QString SVO_JSON_EXTENSION = ".svo.json";
@ -124,7 +132,7 @@ class Application;
typedef bool (Application::* AcceptURLMethod)(const QString &);
class Application : public QApplication, public AbstractViewStateInterface, public AbstractScriptingServicesInterface {
class Application : public QApplication, public AbstractViewStateInterface, public AbstractScriptingServicesInterface, PluginContainer {
Q_OBJECT
friend class OctreePacketProcessor;
@ -136,7 +144,6 @@ public:
static glm::quat getOrientationForPath() { return getInstance()->_myAvatar->getOrientation(); }
static glm::vec3 getPositionForAudio() { return getInstance()->_myAvatar->getHead()->getPosition(); }
static glm::quat getOrientationForAudio() { return getInstance()->_myAvatar->getHead()->getFinalOrientationInWorldFrame(); }
static UserInputMapper* getUserInputMapper() { return &getInstance()->_userInputMapper; }
static void initPlugins();
static void shutdownPlugins();
@ -179,6 +186,7 @@ public:
bool eventFilter(QObject* object, QEvent* event);
glm::uvec2 getCanvasSize() const;
glm::uvec2 getUiSize() const;
QSize getDeviceSize() const;
bool hasFocus() const;
PickRay computePickRay() const;
@ -262,14 +270,7 @@ public:
void displaySide(RenderArgs* renderArgs, Camera& whichCamera, bool selfAvatarOnly = false, bool billboard = false);
virtual const glm::vec3& getShadowDistances() const { return _shadowDistances; }
/// Computes the off-axis frustum parameters for the view frustum, taking mirroring into account.
virtual void computeOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) const;
virtual ViewFrustum* getCurrentViewFrustum() { return getDisplayViewFrustum(); }
virtual bool getShadowsEnabled();
virtual bool getCascadeShadowsEnabled();
virtual QThread* getMainThread() { return thread(); }
virtual float getSizeScale() const;
virtual int getBoundaryLevelAdjust() const;
@ -279,6 +280,25 @@ public:
virtual void endOverrideEnvironmentData() { _environment.endOverride(); }
virtual qreal getDevicePixelRatio();
// Plugin container support
virtual void addMenu(const QString& menuName);
virtual void removeMenu(const QString& menuName);
virtual void addMenuItem(const QString& path, const QString& name, std::function<void(bool)> onClicked, bool checkable, bool checked, const QString& groupName);
virtual void removeMenuItem(const QString& menuName, const QString& menuItem);
virtual bool isOptionChecked(const QString& name);
virtual void setIsOptionChecked(const QString& path, bool checked);
virtual void setFullscreen(const QScreen* target) override;
virtual void unsetFullscreen(const QScreen* avoid) override;
virtual void showDisplayPluginsTools() override;
virtual QGLWidget* getPrimarySurface() override;
void setActiveDisplayPlugin(const QString& pluginName);
DisplayPlugin * getActiveDisplayPlugin();
const DisplayPlugin * getActiveDisplayPlugin() const;
public:
FileLogger* getLogger() { return _logger; }
glm::vec2 getViewportDimensions() const;
@ -302,10 +322,9 @@ public:
// rendering of several elements depend on that
// TODO: carry that information on the Camera as a setting
bool isHMDMode() const;
glm::quat getHeadOrientation() const;
glm::vec3 getHeadPosition() const;
glm::mat4 getHeadPose() const;
glm::mat4 getHMDSensorPose() const;
glm::mat4 getEyePose(int eye) const;
glm::mat4 getEyeOffset(int eye) const;
glm::mat4 getEyeProjection(int eye) const;
QRect getDesirableApplicationGeometry();
@ -332,6 +351,8 @@ public:
const QRect& getMirrorViewRect() const { return _mirrorViewRect; }
float getAverageSimsPerSecond();
signals:
/// Fired when we're simulating; allows external parties to hook in.
@ -355,6 +376,7 @@ signals:
void fullAvatarURLChanged(const QString& newValue, const QString& modelName);
void beforeAboutToQuit();
void activeDisplayPluginChanged();
public slots:
void setSessionUUID(const QUuid& sessionUUID);
@ -363,6 +385,8 @@ public slots:
void nodeAdded(SharedNodePointer node);
void nodeKilled(SharedNodePointer node);
void packetSent(quint64 length);
void updateDisplayMode();
void updateInputModes();
QVector<EntityItemID> pasteEntities(float x, float y, float z);
bool exportEntities(const QString& filename, const QVector<EntityItemID>& entityIDs);
@ -408,6 +432,11 @@ public slots:
void resetSensors();
void setActiveFaceTracker();
void setActiveEyeTracker();
void calibrateEyeTracker1Point();
void calibrateEyeTracker3Points();
void calibrateEyeTracker5Points();
void aboutApp();
void showEditEntitiesHelp();
@ -433,15 +462,8 @@ private slots:
void connectedToDomain(const QString& hostname);
friend class HMDToolsDialog;
void setFullscreen(bool fullscreen);
void setEnable3DTVMode(bool enable3DTVMode);
void setEnableVRMode(bool enableVRMode);
void rotationModeChanged();
glm::vec2 getScaledScreenPoint(glm::vec2 projectedPoint);
void closeMirrorView();
void restoreMirrorView();
void shrinkMirrorView();
@ -469,6 +491,9 @@ private:
void update(float deltaTime);
void setPalmData(Hand* hand, UserInputMapper::PoseValue pose, float deltaTime, int index);
void emulateMouse(Hand* hand, float click, float shift, int index);
// Various helper functions called during update()
void updateLOD();
void updateMouseRay();
@ -497,6 +522,11 @@ private:
int sendNackPackets();
bool _dependencyManagerIsSetup;
OffscreenGlCanvas* _offscreenContext;
DisplayPluginPointer _displayPlugin;
InputPluginList _activeInputPlugins;
MainWindow* _window;
ToolWindow* _toolWindow;
@ -534,11 +564,10 @@ private:
OctreeQuery _octreeQuery; // NodeData derived class for querying octee cells from octree servers
KeyboardMouseDevice _keyboardMouseDevice; // Default input device, the good old keyboard mouse and maybe touchpad
UserInputMapper _userInputMapper; // User input mapper allowing to mapp different real devices to the action channels that the application has to offer
MyAvatar* _myAvatar; // TODO: move this and relevant code to AvatarManager (or MyAvatar as the case may be)
Camera _myCamera; // My view onto the world
Camera _mirrorCamera; // Cammera for mirror view
KeyboardMouseDevice* _keyboardMouseDevice{ nullptr }; // Default input device, the good old keyboard mouse and maybe touchpad
MyAvatar* _myAvatar; // TODO: move this and relevant code to AvatarManager (or MyAvatar as the case may be)
Camera _myCamera; // My view onto the world
Camera _mirrorCamera; // Cammera for mirror view
QRect _mirrorViewRect;
Setting::Handle<bool> _firstRun;
@ -625,9 +654,6 @@ private:
void checkSkeleton();
QWidget* _fullscreenMenuWidget = new QWidget();
int _menuBarHeight;
QHash<QString, AcceptURLMethod> _acceptedExtensions;
QList<QString> _domainConnectionRefusals;
@ -644,9 +670,24 @@ private:
Overlays _overlays;
ApplicationOverlay _applicationOverlay;
ApplicationCompositor _compositor;
OverlayConductor _overlayConductor;
int _oldHandMouseX[2];
int _oldHandMouseY[2];
bool _oldHandLeftClick[2];
bool _oldHandRightClick[2];
int _numFramesSinceLastResize = 0;
bool _overlayEnabled = true;
QRect _savedGeometry;
DialogsManagerScriptingInterface* _dialogsManagerScriptingInterface = new DialogsManagerScriptingInterface();
EntityItemID _keyboardFocusedItem;
quint64 _lastAcceptedKeyPress = 0;
SimpleMovingAverage _simsPerSecond{10};
int _simsPerSecondReport = 0;
quint64 _lastSimsPerSecondUpdate = 0;
};
#endif // hifi_Application_h

View file

@ -46,11 +46,7 @@ QString modeToString(CameraMode mode) {
}
Camera::Camera() :
_mode(CAMERA_MODE_THIRD_PERSON),
_position(0.0f, 0.0f, 0.0f),
_projection(glm::perspective(glm::radians(DEFAULT_FIELD_OF_VIEW_DEGREES), 16.0f/9.0f, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP)),
_isKeepLookingAt(false),
_lookingAt(0.0f, 0.0f, 0.0f)
_projection(glm::perspective(glm::radians(DEFAULT_FIELD_OF_VIEW_DEGREES), 16.0f/9.0f, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP))
{
}
@ -61,12 +57,33 @@ void Camera::update(float deltaTime) {
return;
}
void Camera::recompose() {
mat4 orientation = glm::mat4_cast(_rotation);
mat4 translation = glm::translate(mat4(), _position);
_transform = translation * orientation;
}
void Camera::decompose() {
_position = vec3(_transform[3]);
_rotation = glm::quat_cast(_transform);
}
void Camera::setTransform(const glm::mat4& transform) {
_transform = transform;
decompose();
}
void Camera::setPosition(const glm::vec3& position) {
_position = position;
_position = position;
recompose();
if (_isKeepLookingAt) {
lookAt(_lookingAt);
}
}
void Camera::setRotation(const glm::quat& rotation) {
_rotation = rotation;
recompose();
if (_isKeepLookingAt) {
lookAt(_lookingAt);
}
@ -129,3 +146,21 @@ void Camera::keepLookingAt(const glm::vec3& point) {
_isKeepLookingAt = true;
_lookingAt = point;
}
void Camera::loadViewFrustum(ViewFrustum& frustum) const {
// We will use these below, from either the camera or head vectors calculated above
frustum.setProjection(getProjection());
// Set the viewFrustum up with the correct position and orientation of the camera
frustum.setPosition(getPosition());
frustum.setOrientation(getRotation());
// Ask the ViewFrustum class to calculate our corners
frustum.calculate();
}
ViewFrustum Camera::toViewFrustum() const {
ViewFrustum result;
loadViewFrustum(result);
return result;
}

View file

@ -43,24 +43,31 @@ public:
void update( float deltaTime );
void setRotation(const glm::quat& rotation);
void setProjection(const glm::mat4 & projection);
CameraMode getMode() const { return _mode; }
void setMode(CameraMode m);
glm::quat getRotation() const { return _rotation; }
const glm::mat4& getProjection() const { return _projection; }
CameraMode getMode() const { return _mode; }
void loadViewFrustum(ViewFrustum& frustum) const;
ViewFrustum toViewFrustum() const;
public slots:
QString getModeString() const;
void setModeString(const QString& mode);
glm::quat getRotation() const { return _rotation; }
void setRotation(const glm::quat& rotation);
glm::vec3 getPosition() const { return _position; }
void setPosition(const glm::vec3& position);
glm::quat getOrientation() const { return getRotation(); }
void setOrientation(const glm::quat& orientation) { setRotation(orientation); }
const glm::mat4& getTransform() const { return _transform; }
void setTransform(const glm::mat4& transform);
const glm::mat4& getProjection() const { return _projection; }
void setProjection(const glm::mat4& projection);
PickRay computePickRay(float x, float y);
// These only work on independent cameras
@ -78,11 +85,17 @@ signals:
void modeUpdated(const QString& newMode);
private:
CameraMode _mode;
void recompose();
void decompose();
CameraMode _mode{ CAMERA_MODE_THIRD_PERSON };
glm::mat4 _transform;
glm::mat4 _projection;
// derived
glm::vec3 _position;
glm::quat _rotation;
glm::mat4 _projection;
bool _isKeepLookingAt;
bool _isKeepLookingAt{ false };
glm::vec3 _lookingAt;
};

View file

@ -9,17 +9,33 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Application.h"
#include "GLCanvas.h"
#include <QMimeData>
#include <QUrl>
#include <QWindow>
#include "Application.h"
#include "GLCanvas.h"
#include "MainWindow.h"
const int MSECS_PER_FRAME_WHEN_THROTTLED = 66;
GLCanvas::GLCanvas() : QGLWidget(QGL::NoDepthBuffer | QGL::NoStencilBuffer),
static QGLFormat& getDesiredGLFormat() {
// Specify an OpenGL 3.3 format using the Core profile.
// That is, no old-school fixed pipeline functionality
static QGLFormat glFormat;
static std::once_flag once;
std::call_once(once, [] {
glFormat.setVersion(4, 1);
glFormat.setProfile(QGLFormat::CoreProfile); // Requires >=Qt-4.8.0
glFormat.setSampleBuffers(false);
glFormat.setDepth(false);
glFormat.setStencil(false);
});
return glFormat;
}
GLCanvas::GLCanvas() : QGLWidget(getDesiredGLFormat()),
_throttleRendering(false),
_idleRenderInterval(MSECS_PER_FRAME_WHEN_THROTTLED)
{
@ -48,7 +64,6 @@ int GLCanvas::getDeviceHeight() const {
}
void GLCanvas::initializeGL() {
Application::getInstance()->initializeGL();
setAttribute(Qt::WA_AcceptTouchEvents);
setAcceptDrops(true);
connect(Application::getInstance(), SIGNAL(applicationStateChanged(Qt::ApplicationState)), this, SLOT(activeChanged(Qt::ApplicationState)));

View file

@ -22,7 +22,7 @@ class GLCanvas : public QGLWidget {
public:
GLCanvas();
void stopFrameTimer();
bool isThrottleRendering() const;

View file

@ -97,10 +97,6 @@ void MainWindow::changeEvent(QEvent* event) {
} else {
emit windowShown(true);
}
if (isFullScreen() != Menu::getInstance()->isOptionChecked(MenuOption::Fullscreen)) {
Menu::getInstance()->setIsOptionChecked(MenuOption::Fullscreen, isFullScreen());
}
} else if (event->type() == QEvent::ActivationChange) {
if (isActiveWindow()) {
emit windowShown(true);

View file

@ -28,8 +28,7 @@
#include "devices/DdeFaceTracker.h"
#include "devices/Faceshift.h"
#include "devices/RealSense.h"
#include "devices/SixenseManager.h"
#include "devices/3Dconnexion.h"
#include "devices/3DConnexionClient.h"
#include "MainWindow.h"
#include "scripting/MenuScriptingInterface.h"
#if defined(Q_OS_MAC) || defined(Q_OS_WIN)
@ -221,9 +220,20 @@ Menu::Menu() {
addActionToQMenuAndActionHash(toolsMenu, MenuOption::PackageModel, 0,
qApp, SLOT(packageModel()));
MenuWrapper* displayMenu = addMenu("Display");
{
MenuWrapper* displayModeMenu = addMenu(MenuOption::OutputMenu);
QActionGroup* displayModeGroup = new QActionGroup(displayModeMenu);
displayModeGroup->setExclusive(true);
}
MenuWrapper* avatarMenu = addMenu("Avatar");
QObject* avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
MenuWrapper* inputModeMenu = addMenu(MenuOption::InputMenu);
QActionGroup* inputModeGroup = new QActionGroup(inputModeMenu);
inputModeGroup->setExclusive(false);
MenuWrapper* avatarSizeMenu = avatarMenu->addMenu("Size");
addActionToQMenuAndActionHash(avatarSizeMenu,
MenuOption::IncreaseAvatarSize,
@ -242,26 +252,16 @@ Menu::Menu() {
SLOT(resetSize()));
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::KeyboardMotorControl,
Qt::CTRL | Qt::SHIFT | Qt::Key_K, true, avatar, SLOT(updateMotionBehavior()));
Qt::CTRL | Qt::SHIFT | Qt::Key_K, true, avatar, SLOT(updateMotionBehaviorFromMenu()));
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ScriptedMotorControl, 0, true,
avatar, SLOT(updateMotionBehavior()));
avatar, SLOT(updateMotionBehaviorFromMenu()));
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::NamesAboveHeads, 0, true);
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::BlueSpeechSphere, 0, true);
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::EnableCharacterController, 0, true,
avatar, SLOT(updateMotionBehavior()));
MenuWrapper* viewMenu = addMenu("View");
addCheckableActionToQMenuAndActionHash(viewMenu,
MenuOption::Fullscreen,
#ifdef Q_OS_MAC
Qt::CTRL | Qt::META | Qt::Key_F,
#else
Qt::CTRL | Qt::Key_F,
#endif
false,
qApp,
SLOT(setFullscreen(bool)));
addActionToQMenuAndActionHash(viewMenu, MenuOption::ReloadContent, 0, qApp, SLOT(reloadResourceCaches()));
MenuWrapper* cameraModeMenu = viewMenu->addMenu("Camera Mode");
QActionGroup* cameraModeGroup = new QActionGroup(cameraModeMenu);
@ -289,28 +289,11 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::CenterPlayerInView,
0, false, qApp, SLOT(rotationModeChanged()));
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::HMDTools,
#ifdef Q_OS_MAC
Qt::META | Qt::Key_H,
#else
Qt::CTRL | Qt::Key_H,
#endif
false,
dialogsManager.data(),
SLOT(hmdTools(bool)));
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::EnableVRMode, 0,
false,
qApp,
SLOT(setEnableVRMode(bool)));
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Enable3DTVMode, 0,
false,
qApp,
SLOT(setEnable3DTVMode(bool)));
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::TurnWithHead, 0, false);
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::StandingHMDSensorMode, 0, false,
avatar, SLOT(updateStandingHMDModeFromMenu()));
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Stats);
addActionToQMenuAndActionHash(viewMenu, MenuOption::Log,
Qt::CTRL | Qt::SHIFT | Qt::Key_L,
@ -346,12 +329,6 @@ Menu::Menu() {
ambientLightGroup->addAction(addCheckableActionToQMenuAndActionHash(ambientLightMenu, MenuOption::RenderAmbientLight8, 0, false));
ambientLightGroup->addAction(addCheckableActionToQMenuAndActionHash(ambientLightMenu, MenuOption::RenderAmbientLight9, 0, false));
MenuWrapper* shadowMenu = renderOptionsMenu->addMenu("Shadows");
QActionGroup* shadowGroup = new QActionGroup(shadowMenu);
shadowGroup->addAction(addCheckableActionToQMenuAndActionHash(shadowMenu, "None", 0, true));
shadowGroup->addAction(addCheckableActionToQMenuAndActionHash(shadowMenu, MenuOption::SimpleShadows, 0, false));
shadowGroup->addAction(addCheckableActionToQMenuAndActionHash(shadowMenu, MenuOption::CascadedShadows, 0, false));
{
MenuWrapper* framerateMenu = renderOptionsMenu->addMenu(MenuOption::RenderTargetFramerate);
QActionGroup* framerateGroup = new QActionGroup(framerateMenu);
@ -438,6 +415,23 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::AutoMuteAudio, 0, false);
#endif
#ifdef HAVE_IVIEWHMD
MenuWrapper* eyeTrackingMenu = avatarDebugMenu->addMenu("Eye Tracking");
addCheckableActionToQMenuAndActionHash(eyeTrackingMenu, MenuOption::SMIEyeTracking, 0, false,
qApp, SLOT(setActiveEyeTracker()));
{
MenuWrapper* calibrateEyeTrackingMenu = eyeTrackingMenu->addMenu("Calibrate");
addActionToQMenuAndActionHash(calibrateEyeTrackingMenu, MenuOption::OnePointCalibration, 0,
qApp, SLOT(calibrateEyeTracker1Point()));
addActionToQMenuAndActionHash(calibrateEyeTrackingMenu, MenuOption::ThreePointCalibration, 0,
qApp, SLOT(calibrateEyeTracker3Points()));
addActionToQMenuAndActionHash(calibrateEyeTrackingMenu, MenuOption::FivePointCalibration, 0,
qApp, SLOT(calibrateEyeTracker5Points()));
}
addCheckableActionToQMenuAndActionHash(eyeTrackingMenu, MenuOption::SimulateEyeTracking, 0, false,
qApp, SLOT(setActiveEyeTracker()));
#endif
auto avatarManager = DependencyManager::get<AvatarManager>();
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AvatarReceiveStats, 0, false,
avatarManager.data(), SLOT(setShouldShowReceiveStats(bool)));
@ -446,8 +440,10 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderHeadCollisionShapes);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderBoundingCollisionShapes);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderLookAtVectors, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderLookAtTargets, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderFocusIndicator, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowWhosLookingAtMe, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::FixGaze, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu,
MenuOption::Connexion,
0, false,
@ -459,30 +455,11 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlternateIK, 0, false);
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHands, 0, true);
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHandTargets, 0, false);
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::HandMouseInput, 0, true);
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::LowVelocityFilter, 0, true,
qApp, SLOT(setLowVelocityFilter(bool)));
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::ShowIKConstraints, 0, false);
MenuWrapper* sixenseOptionsMenu = handOptionsMenu->addMenu("Sixense");
#ifdef __APPLE__
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu,
MenuOption::SixenseEnabled,
0, false,
&SixenseManager::getInstance(),
SLOT(toggleSixense(bool)));
#endif
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu,
MenuOption::FilterSixense,
0,
true,
&SixenseManager::getInstance(),
SLOT(setFilter(bool)));
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu,
MenuOption::LowVelocityFilter,
0,
true,
qApp,
SLOT(setLowVelocityFilter(bool)));
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::SixenseMouseInput, 0, true);
MenuWrapper* leapOptionsMenu = handOptionsMenu->addMenu("Leap Motion");
addCheckableActionToQMenuAndActionHash(leapOptionsMenu, MenuOption::LeapMotionOnHMD, 0, false);

View file

@ -80,6 +80,13 @@ public:
const QKeySequence& shortcut = 0,
QAction::MenuRole role = QAction::NoRole,
int menuItemLocation = UNSPECIFIED_POSITION);
QAction* addCheckableActionToQMenuAndActionHash(MenuWrapper* destinationMenu,
const QString& actionName,
const QKeySequence& shortcut = 0,
const bool checked = false,
const QObject* receiver = NULL,
const char* member = NULL,
int menuItemLocation = UNSPECIFIED_POSITION);
void removeAction(MenuWrapper* menu, const QString& actionName);
@ -109,14 +116,6 @@ private:
void addDisabledActionAndSeparator(MenuWrapper* destinationMenu, const QString& actionName,
int menuItemLocation = UNSPECIFIED_POSITION);
QAction* addCheckableActionToQMenuAndActionHash(MenuWrapper* destinationMenu,
const QString& actionName,
const QKeySequence& shortcut = 0,
const bool checked = false,
const QObject* receiver = NULL,
const char* member = NULL,
int menuItemLocation = UNSPECIFIED_POSITION);
QAction* getActionFromName(const QString& menuName, MenuWrapper* menu);
MenuWrapper* getSubMenuFromName(const QString& menuName, MenuWrapper* menu);
MenuWrapper* getMenuParent(const QString& menuName, QString& finalMenuPart);
@ -154,7 +153,6 @@ namespace MenuOption {
const QString BlueSpeechSphere = "Blue Sphere While Speaking";
const QString BookmarkLocation = "Bookmark Location";
const QString Bookmarks = "Bookmarks";
const QString CascadedShadows = "Cascaded";
const QString CachesSize = "RAM Caches Size";
const QString CalibrateCamera = "Calibrate Camera";
const QString CenterPlayerInView = "Center Player In View";
@ -187,22 +185,23 @@ namespace MenuOption {
const QString EditEntitiesHelp = "Edit Entities Help...";
const QString Enable3DTVMode = "Enable 3DTV Mode";
const QString EnableCharacterController = "Enable avatar collisions";
const QString EnableVRMode = "Enable VR Mode";
const QString ExpandMyAvatarSimulateTiming = "Expand /myAvatar/simulation";
const QString ExpandMyAvatarTiming = "Expand /myAvatar";
const QString ExpandOtherAvatarTiming = "Expand /otherAvatar";
const QString ExpandPaintGLTiming = "Expand /paintGL";
const QString ExpandUpdateTiming = "Expand /update";
const QString Faceshift = "Faceshift";
const QString FilterSixense = "Smooth Sixense Movement";
const QString FirstPerson = "First Person";
const QString FivePointCalibration = "5 Point Calibration";
const QString FixGaze = "Fix Gaze (no saccade)";
const QString Forward = "Forward";
const QString FrameTimer = "Show Timer";
const QString Fullscreen = "Fullscreen";
const QString FullscreenMirror = "Fullscreen Mirror";
const QString HMDTools = "HMD Tools";
const QString GlowWhenSpeaking = "Glow When Speaking";
const QString HandMouseInput = "Enable Hand Mouse Input";
const QString IncreaseAvatarSize = "Increase Avatar Size";
const QString IndependentMode = "Independent Mode";
const QString InputMenu = "Avatar>Input Devices";
const QString KeyboardMotorControl = "Enable Keyboard Motor Control";
const QString LeapMotionOnHMD = "Leap Motion on HMD";
const QString LoadScript = "Open and Run Script File...";
@ -220,7 +219,9 @@ namespace MenuOption {
const QString NamesAboveHeads = "Names Above Heads";
const QString NoFaceTracking = "None";
const QString OctreeStats = "Entity Statistics";
const QString OnePointCalibration = "1 Point Calibration";
const QString OnlyDisplayTopTen = "Only Display Top Ten";
const QString OutputMenu = "Display>Mode";
const QString PackageModel = "Package Model...";
const QString Pair = "Pair";
const QString PhysicsShowOwned = "Highlight Simulation Ownership";
@ -233,6 +234,7 @@ namespace MenuOption {
const QString RenderBoundingCollisionShapes = "Show Bounding Collision Shapes";
const QString RenderFocusIndicator = "Show Eye Focus";
const QString RenderHeadCollisionShapes = "Show Head Collision Shapes";
const QString RenderLookAtTargets = "Show Look-at Targets";
const QString RenderLookAtVectors = "Show Look-at Vectors";
const QString RenderSkeletonCollisionShapes = "Show Skeleton Collision Shapes";
const QString RenderTargetFramerate = "Framerate";
@ -271,15 +273,16 @@ namespace MenuOption {
const QString ShowIKConstraints = "Show IK Constraints";
const QString ShowRealtimeEntityStats = "Show Realtime Entity Stats";
const QString ShowWhosLookingAtMe = "Show Who's Looking at Me";
const QString SimpleShadows = "Simple";
const QString SixenseEnabled = "Enable Hydra Support";
const QString SixenseMouseInput = "Enable Sixense Mouse Input";
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode";
const QString SimulateEyeTracking = "Simulate";
const QString SMIEyeTracking = "SMI Eye Tracking";
const QString Stars = "Stars";
const QString Stats = "Stats";
const QString StopAllScripts = "Stop All Scripts";
const QString SuppressShortTimings = "Suppress Timings Less than 10ms";
const QString TestPing = "Test Ping";
const QString ThirdPerson = "Third Person";
const QString ThreePointCalibration = "3 Point Calibration";
const QString ThrottleFPSIfNotFocus = "Throttle FPS If Not Focus";
const QString ToolWindow = "Tool Window";
const QString TransmitterDrive = "Transmitter Drive";

View file

@ -12,8 +12,6 @@
#include <QStyle>
#include <QStyleOptionTitleBar>
#include "GLCanvas.h"
#include "UIUtil.h"
int UIUtil::getWindowTitleBarHeight(const QWidget* window) {

View file

@ -137,7 +137,7 @@ void AudioScope::render(RenderArgs* renderArgs, int width, int height) {
batch.setProjectionTransform(legacyProjection);
batch.setModelTransform(Transform());
batch.setViewTransform(Transform());
batch._glLineWidth(1.0f); // default
geometryCache->renderQuad(batch, x, y, w, h, backgroundColor, _audioScopeBackground);
geometryCache->renderGrid(batch, x, y, w, h, gridRows, gridCols, gridColor, _audioScopeGrid);
renderLineStrip(batch, _inputID, inputColor, x, y, _samplesPerScope, _scopeInputOffset, _scopeInput);

View file

@ -69,6 +69,8 @@ namespace render {
auto avatarPtr = static_pointer_cast<Avatar>(avatar);
bool renderLookAtVectors = Menu::getInstance()->isOptionChecked(MenuOption::RenderLookAtVectors);
avatarPtr->setDisplayingLookatVectors(renderLookAtVectors);
bool renderLookAtTarget = Menu::getInstance()->isOptionChecked(MenuOption::RenderLookAtTargets);
avatarPtr->setDisplayingLookatTarget(renderLookAtTarget);
if (avatarPtr->isInitialized() && args) {
avatarPtr->render(args, Application::getInstance()->getCamera()->getPosition());
@ -245,7 +247,7 @@ void Avatar::simulate(float deltaTime) {
}
void Avatar::slamPosition(const glm::vec3& newPosition) {
AvatarData::setPosition(newPosition);
setPosition(newPosition);
_positionDeltaAccumulator = glm::vec3(0.0f);
_velocity = glm::vec3(0.0f);
_lastVelocity = glm::vec3(0.0f);
@ -601,7 +603,9 @@ void Avatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, floa
getHand()->render(renderArgs, false);
}
getHead()->render(renderArgs, 1.0f, renderFrustum);
getHead()->renderLookAts(renderArgs);
}
bool Avatar::shouldRenderHead(const RenderArgs* renderArgs) const {
@ -717,6 +721,29 @@ Transform Avatar::calculateDisplayNameTransform(const ViewFrustum& frustum, floa
// Compute correct scale to apply
float scale = DESIRED_HIGHT_ON_SCREEN / (fontSize * pixelHeight) * devicePixelRatio;
#ifdef DEBUG
// TODO: Temporary logging to track cause of invalid scale vale; remove once cause has been fixed.
if (scale == 0.0f || glm::isnan(scale) || glm::isinf(scale)) {
if (scale == 0.0f) {
qDebug() << "ASSERT because scale == 0.0f";
}
if (glm::isnan(scale)) {
qDebug() << "ASSERT because isnan(scale)";
}
if (glm::isinf(scale)) {
qDebug() << "ASSERT because isinf(scale)";
}
qDebug() << "windowSizeY =" << windowSizeY;
qDebug() << "p1.y =" << p1.y;
qDebug() << "p1.w =" << p1.w;
qDebug() << "p0.y =" << p0.y;
qDebug() << "p0.w =" << p0.w;
qDebug() << "qApp->getDevicePixelRatio() =" << qApp->getDevicePixelRatio();
qDebug() << "fontSize =" << fontSize;
qDebug() << "pixelHeight =" << pixelHeight;
qDebug() << "devicePixelRatio =" << devicePixelRatio;
}
#endif
// Compute pixel alignment offset
float clipToPix = 0.5f * windowSizeY / p1.w; // Got from clip to pixel coordinates

View file

@ -91,6 +91,7 @@ public:
//setters
void setDisplayingLookatVectors(bool displayingLookatVectors) { getHead()->setRenderLookatVectors(displayingLookatVectors); }
void setDisplayingLookatTarget(bool displayingLookatTarget) { getHead()->setRenderLookatTarget(displayingLookatTarget); }
void setIsLookAtTarget(const bool isLookAtTarget) { _isLookAtTarget = isLookAtTarget; }
bool getIsLookAtTarget() const { return _isLookAtTarget; }
//getters

View file

@ -279,7 +279,7 @@ void AvatarManager::handleCollisionEvents(CollisionEvents& collisionEvents) {
const QString& collisionSoundURL = myAvatar->getCollisionSoundURL();
if (!collisionSoundURL.isEmpty()) {
const float velocityChange = glm::length(collision.velocityChange);
const float MIN_AVATAR_COLLISION_ACCELERATION = 0.01;
const float MIN_AVATAR_COLLISION_ACCELERATION = 0.01f;
const bool isSound = (collision.type == CONTACT_EVENT_TYPE_START) && (velocityChange > MIN_AVATAR_COLLISION_ACCELERATION);
if (!isSound) {

View file

@ -56,12 +56,12 @@ void FaceModel::simulate(float deltaTime, bool fullUpdate) {
}
}
void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, int index) {
void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const JointState& state, int index) {
// get the rotation axes in joint space and use them to adjust the rotation
glm::mat3 axes = glm::mat3_cast(glm::quat());
glm::mat3 inverse = glm::mat3(glm::inverse(parentState.getTransform() *
glm::translate(_rig->getJointDefaultTranslationInConstrainedFrame(index)) *
joint.preTransform * glm::mat4_cast(joint.preRotation)));
state.getPreTransform() * glm::mat4_cast(state.getPreRotation())));
glm::vec3 pitchYawRoll = safeEulerAngles(_owningHead->getFinalOrientationInLocalFrame());
glm::vec3 lean = glm::radians(glm::vec3(_owningHead->getFinalLeanForward(),
_owningHead->getTorsoTwist(),
@ -71,15 +71,15 @@ void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const FBX
glm::angleAxis(-pitchYawRoll.z, glm::normalize(inverse * axes[2]))
* glm::angleAxis(pitchYawRoll.y, glm::normalize(inverse * axes[1]))
* glm::angleAxis(-pitchYawRoll.x, glm::normalize(inverse * axes[0]))
* joint.rotation, DEFAULT_PRIORITY);
* state.getDefaultRotation(), DEFAULT_PRIORITY);
}
void FaceModel::maybeUpdateEyeRotation(Model* model, const JointState& parentState, const FBXJoint& joint, int index) {
void FaceModel::maybeUpdateEyeRotation(Model* model, const JointState& parentState, const JointState& state, int index) {
// likewise with the eye joints
// NOTE: at the moment we do the math in the world-frame, hence the inverse transform is more complex than usual.
glm::mat4 inverse = glm::inverse(glm::mat4_cast(model->getRotation()) * parentState.getTransform() *
glm::translate(_rig->getJointDefaultTranslationInConstrainedFrame(index)) *
joint.preTransform * glm::mat4_cast(joint.preRotation * joint.rotation));
state.getPreTransform() * glm::mat4_cast(state.getPreRotation() * state.getDefaultRotation()));
glm::vec3 front = glm::vec3(inverse * glm::vec4(_owningHead->getFinalOrientationInWorldFrame() * IDENTITY_FRONT, 0.0f));
glm::vec3 lookAtDelta = _owningHead->getCorrectedLookAtPosition() - model->getTranslation();
glm::vec3 lookAt = glm::vec3(inverse * glm::vec4(lookAtDelta + glm::length(lookAtDelta) * _owningHead->getSaccade(), 1.0f));
@ -87,22 +87,22 @@ void FaceModel::maybeUpdateEyeRotation(Model* model, const JointState& parentSta
const float MAX_ANGLE = 30.0f * RADIANS_PER_DEGREE;
_rig->setJointRotationInConstrainedFrame(index, glm::angleAxis(glm::clamp(glm::angle(between),
-MAX_ANGLE, MAX_ANGLE), glm::axis(between)) *
joint.rotation, DEFAULT_PRIORITY);
state.getDefaultRotation(), DEFAULT_PRIORITY);
}
void FaceModel::maybeUpdateNeckAndEyeRotation(int index) {
const JointState& state = _rig->getJointState(index);
const FBXJoint& joint = state.getFBXJoint();
const FBXGeometry& geometry = _geometry->getFBXGeometry();
const int parentIndex = state.getParentIndex();
// guard against out-of-bounds access to _jointStates
if (joint.parentIndex != -1 && joint.parentIndex >= 0 && joint.parentIndex < _rig->getJointStateCount()) {
const JointState& parentState = _rig->getJointState(joint.parentIndex);
if (parentIndex != -1 && parentIndex >= 0 && parentIndex < _rig->getJointStateCount()) {
const JointState& parentState = _rig->getJointState(parentIndex);
if (index == geometry.neckJointIndex) {
maybeUpdateNeckRotation(parentState, joint, index);
maybeUpdateNeckRotation(parentState, state, index);
} else if (index == geometry.leftEyeJointIndex || index == geometry.rightEyeJointIndex) {
maybeUpdateEyeRotation(this, parentState, joint, index);
maybeUpdateEyeRotation(this, parentState, state, index);
}
}
}

View file

@ -26,8 +26,8 @@ public:
virtual void simulate(float deltaTime, bool fullUpdate = true);
void maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, int index);
void maybeUpdateEyeRotation(Model* model, const JointState& parentState, const FBXJoint& joint, int index);
void maybeUpdateNeckRotation(const JointState& parentState, const JointState& state, int index);
void maybeUpdateEyeRotation(Model* model, const JointState& parentState, const JointState& state, int index);
void maybeUpdateNeckAndEyeRotation(int index);
/// Retrieve the positions of up to two eye meshes.

View file

@ -17,11 +17,13 @@
#include "Application.h"
#include "Avatar.h"
#include "DependencyManager.h"
#include "GeometryUtil.h"
#include "Head.h"
#include "Menu.h"
#include "Util.h"
#include "devices/DdeFaceTracker.h"
#include "devices/EyeTracker.h"
#include "devices/Faceshift.h"
#include "AvatarRig.h"
@ -44,6 +46,7 @@ Head::Head(Avatar* owningAvatar) :
_mouth3(0.0f),
_mouth4(0.0f),
_renderLookatVectors(false),
_renderLookatTarget(false),
_saccade(0.0f, 0.0f, 0.0f),
_saccadeTarget(0.0f, 0.0f, 0.0f),
_leftEyeBlinkVelocity(0.0f),
@ -116,29 +119,40 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
applyEyelidOffset(getFinalOrientationInWorldFrame());
}
}
auto eyeTracker = DependencyManager::get<EyeTracker>();
_isEyeTrackerConnected = eyeTracker->isTracking();
}
if (!myAvatar->getStandingHMDSensorMode()) {
// Twist the upper body to follow the rotation of the head, but only do this with my avatar,
// since everyone else will see the full joint rotations for other people.
const float BODY_FOLLOW_HEAD_YAW_RATE = 0.1f;
const float BODY_FOLLOW_HEAD_FACTOR = 0.66f;
float currentTwist = getTorsoTwist();
setTorsoTwist(currentTwist + (getFinalYaw() * BODY_FOLLOW_HEAD_FACTOR - currentTwist) * BODY_FOLLOW_HEAD_YAW_RATE);
}
// Twist the upper body to follow the rotation of the head, but only do this with my avatar,
// since everyone else will see the full joint rotations for other people.
const float BODY_FOLLOW_HEAD_YAW_RATE = 0.1f;
const float BODY_FOLLOW_HEAD_FACTOR = 0.66f;
float currentTwist = getTorsoTwist();
setTorsoTwist(currentTwist + (getFinalYaw() * BODY_FOLLOW_HEAD_FACTOR - currentTwist) * BODY_FOLLOW_HEAD_YAW_RATE);
}
if (!(_isFaceTrackerConnected || billboard)) {
// Update eye saccades
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
const float MICROSACCADE_MAGNITUDE = 0.002f;
const float SACCADE_MAGNITUDE = 0.04f;
const float NOMINAL_FRAME_RATE = 60.0f;
if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
_saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
} else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
if (!_isEyeTrackerConnected) {
// Update eye saccades
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
const float MICROSACCADE_MAGNITUDE = 0.002f;
const float SACCADE_MAGNITUDE = 0.04f;
const float NOMINAL_FRAME_RATE = 60.0f;
if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
_saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
} else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
}
_saccade += (_saccadeTarget - _saccade) * pow(0.5f, NOMINAL_FRAME_RATE * deltaTime);
} else {
_saccade = glm::vec3();
}
_saccade += (_saccadeTarget - _saccade) * pow(0.5f, NOMINAL_FRAME_RATE * deltaTime);
// Detect transition from talking to not; force blink after that and a delay
bool forceBlink = false;
@ -215,6 +229,9 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
} else {
_saccade = glm::vec3();
}
if (Menu::getInstance()->isOptionChecked(MenuOption::FixGaze)) { // if debug menu turns off, use no saccade
_saccade = glm::vec3();
}
if (!isMine) {
_faceModel.setLODDistance(static_cast<Avatar*>(_owningAvatar)->getLODDistance());
@ -260,7 +277,7 @@ void Head::calculateMouthShapes() {
void Head::applyEyelidOffset(glm::quat headOrientation) {
// Adjusts the eyelid blendshape coefficients so that the eyelid follows the iris as the head pitches.
glm::quat eyeRotation = rotationBetween(headOrientation * IDENTITY_FRONT, getCorrectedLookAtPosition() - _eyePosition);
glm::quat eyeRotation = rotationBetween(headOrientation * IDENTITY_FRONT, getLookAtPosition() - _eyePosition);
eyeRotation = eyeRotation * glm::angleAxis(safeEulerAngles(headOrientation).y, IDENTITY_UP); // Rotation w.r.t. head
float eyePitch = safeEulerAngles(eyeRotation).x;
@ -297,8 +314,18 @@ void Head::relaxLean(float deltaTime) {
}
void Head::render(RenderArgs* renderArgs, float alpha, ViewFrustum* renderFrustum) {
}
void Head::renderLookAts(RenderArgs* renderArgs) {
renderLookAts(renderArgs, _leftEyePosition, _rightEyePosition);
}
void Head::renderLookAts(RenderArgs* renderArgs, glm::vec3 leftEyePosition, glm::vec3 rightEyePosition) {
if (_renderLookatVectors) {
renderLookatVectors(renderArgs, _leftEyePosition, _rightEyePosition, getCorrectedLookAtPosition());
renderLookatVectors(renderArgs, leftEyePosition, rightEyePosition, getCorrectedLookAtPosition());
}
if (_renderLookatTarget) {
renderLookatTarget(renderArgs, getCorrectedLookAtPosition());
}
}
@ -317,6 +344,20 @@ glm::quat Head::getFinalOrientationInLocalFrame() const {
return glm::quat(glm::radians(glm::vec3(getFinalPitch(), getFinalYaw(), getFinalRoll() )));
}
// Everyone else's head keeps track of a lookAtPosition that everybody sees the same, and refers to where that head
// is looking in model space -- e.g., at someone's eyeball, or between their eyes, or mouth, etc. Everyon's Interface
// will have the same value for the lookAtPosition of any given head.
//
// Everyone else's head also keeps track of a correctedLookAtPosition that may be different for the same head within
// different Interfaces. If that head is not looking at me, the correctedLookAtPosition is the same as the lookAtPosition.
// However, if that head is looking at me, then I will attempt to adjust the lookAtPosition by the difference between
// my (singular) eye position and my actual camera position. This adjustment is used on their eyeballs during rendering
// (and also on any lookAt vector display for that head, during rendering). Note that:
// 1. this adjustment can be made directly to the other head's eyeball joints, because we won't be send their joint information to others.
// 2. the corrected position is a separate ivar, so the common/uncorrected value is still available
//
// There is a pun here: The two lookAtPositions will always be the same for my own avatar in my own Interface, because I
// will not be looking at myself. (Even in a mirror, I will be looking at the camera.)
glm::vec3 Head::getCorrectedLookAtPosition() {
if (isLookingAtMe()) {
return _correctedLookAtPosition;
@ -337,21 +378,27 @@ void Head::setCorrectedLookAtPosition(glm::vec3 correctedLookAtPosition) {
bool Head::isLookingAtMe() {
// Allow for outages such as may be encountered during avatar movement
quint64 now = usecTimestampNow();
const quint64 LOOKING_AT_ME_GAP_ALLOWED = 1000000; // microseconds
const quint64 LOOKING_AT_ME_GAP_ALLOWED = (5 * 1000 * 1000) / 60; // n frames, in microseconds
return _isLookingAtMe || (now - _wasLastLookingAtMe) < LOOKING_AT_ME_GAP_ALLOWED;
}
glm::quat Head::getCameraOrientation() const {
// NOTE: Head::getCameraOrientation() is not used for orienting the camera "view" while in Oculus mode, so
// you may wonder why this code is here. This method will be called while in Oculus mode to determine how
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
// head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not
// always the same.
if (qApp->isHMDMode()) {
return getOrientation();
MyAvatar* myAvatar = dynamic_cast<MyAvatar*>(_owningAvatar);
if (myAvatar && myAvatar->getStandingHMDSensorMode()) {
return glm::quat_cast(myAvatar->getSensorToWorldMatrix()) * myAvatar->getHMDSensorOrientation();
} else {
return getOrientation();
}
} else {
Avatar* owningAvatar = static_cast<Avatar*>(_owningAvatar);
return owningAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
}
Avatar* owningAvatar = static_cast<Avatar*>(_owningAvatar);
return owningAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
}
glm::quat Head::getEyeRotation(const glm::vec3& eyePosition) const {
@ -397,7 +444,7 @@ void Head::renderLookatVectors(RenderArgs* renderArgs, glm::vec3 leftEyePosition
auto& batch = *renderArgs->_batch;
auto transform = Transform{};
batch.setModelTransform(transform);
batch._glLineWidth(2.0f);
// FIXME: THe line width of 2.0f is not supported anymore, we ll need a workaround
auto deferredLighting = DependencyManager::get<DeferredLightingEffect>();
deferredLighting->bindSimpleProgram(batch);
@ -409,4 +456,17 @@ void Head::renderLookatVectors(RenderArgs* renderArgs, glm::vec3 leftEyePosition
geometryCache->renderLine(batch, rightEyePosition, lookatPosition, startColor, endColor, _rightEyeLookAtID);
}
void Head::renderLookatTarget(RenderArgs* renderArgs, glm::vec3 lookatPosition) {
auto& batch = *renderArgs->_batch;
auto transform = Transform{};
transform.setTranslation(lookatPosition);
batch.setModelTransform(transform);
auto deferredLighting = DependencyManager::get<DeferredLightingEffect>();
deferredLighting->bindSimpleProgram(batch);
auto geometryCache = DependencyManager::get<GeometryCache>();
const float LOOK_AT_TARGET_RADIUS = 0.075f;
const glm::vec4 LOOK_AT_TARGET_COLOR = { 0.8f, 0.0f, 0.0f, 0.75f };
geometryCache->renderSphere(batch, LOOK_AT_TARGET_RADIUS, 15, 15, LOOK_AT_TARGET_COLOR, true);
}

View file

@ -39,6 +39,9 @@ public:
void setAverageLoudness(float averageLoudness) { _averageLoudness = averageLoudness; }
void setReturnToCenter (bool returnHeadToCenter) { _returnHeadToCenter = returnHeadToCenter; }
void setRenderLookatVectors(bool onOff) { _renderLookatVectors = onOff; }
void setRenderLookatTarget(bool onOff) { _renderLookatTarget = onOff; }
void renderLookAts(RenderArgs* renderArgs);
void renderLookAts(RenderArgs* renderArgs, glm::vec3 leftEyePosition, glm::vec3 rightEyePosition);
/// \return orientationBase+Delta
glm::quat getFinalOrientationInLocalFrame() const;
@ -123,6 +126,7 @@ private:
float _mouth3;
float _mouth4;
bool _renderLookatVectors;
bool _renderLookatTarget;
glm::vec3 _saccade;
glm::vec3 _saccadeTarget;
float _leftEyeBlinkVelocity;
@ -151,6 +155,7 @@ private:
// private methods
void renderLookatVectors(RenderArgs* renderArgs, glm::vec3 leftEyePosition, glm::vec3 rightEyePosition, glm::vec3 lookatPosition);
void renderLookatTarget(RenderArgs* renderArgs, glm::vec3 lookatPosition);
void calculateMouthShapes();
void applyEyelidOffset(glm::quat headOrientation);

View file

@ -107,7 +107,7 @@ JointReferential::JointReferential(Referential* referential, EntityTree* tree, A
EntityItemPointer item = _tree->findEntityByID(_entityID);
const Model* model = getModel(item);
if (!isValid() || model == NULL || _jointIndex >= (uint32_t)(model->getJointStateCount())) {
if (isValid() && model != NULL && _jointIndex < (uint32_t)(model->getJointStateCount())) {
_lastRefDimension = item->getDimensions();
model->getJointRotationInWorldFrame(_jointIndex, _refRotation);
model->getJointPositionInWorldFrame(_jointIndex, _refPosition);

View file

@ -24,6 +24,7 @@
#include <AnimationHandle.h>
#include <AudioClient.h>
#include <DependencyManager.h>
#include <display-plugins/DisplayPlugin.h>
#include <GeometryUtil.h>
#include <NodeList.h>
#include <udt/PacketHeaders.h>
@ -34,7 +35,6 @@
#include <UserActivityLogger.h>
#include "devices/Faceshift.h"
#include "devices/OculusManager.h"
#include "Application.h"
#include "AvatarManager.h"
@ -47,9 +47,6 @@
#include "Util.h"
#include "InterfaceLogging.h"
#include "gpu/GLBackend.h"
using namespace std;
const glm::vec3 DEFAULT_UP_DIRECTION(0.0f, 1.0f, 0.0f);
@ -100,6 +97,15 @@ MyAvatar::MyAvatar(RigPointer rig) :
_eyeContactTarget(LEFT_EYE),
_realWorldFieldOfView("realWorldFieldOfView",
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
_hmdSensorMatrix(),
_hmdSensorOrientation(),
_hmdSensorPosition(),
_bodySensorMatrix(),
_sensorToWorldMatrix(),
_standingHMDSensorMode(false),
_goToPending(false),
_goToPosition(),
_goToOrientation(),
_rig(rig),
_prevShouldDrawHead(true)
{
@ -145,6 +151,13 @@ void MyAvatar::reset() {
}
void MyAvatar::update(float deltaTime) {
if (_goToPending) {
setPosition(_goToPosition);
setOrientation(_goToOrientation);
_goToPending = false;
}
if (_referential) {
_referential->update();
}
@ -152,6 +165,7 @@ void MyAvatar::update(float deltaTime) {
Head* head = getHead();
head->relaxLean(deltaTime);
updateFromTrackers(deltaTime);
// Get audio loudness data from audio input device
auto audio = DependencyManager::get<AudioClient>();
head->setAudioLoudness(audio->getLastInputLoudness());
@ -231,6 +245,41 @@ void MyAvatar::simulate(float deltaTime) {
maybeUpdateBillboard();
}
glm::mat4 MyAvatar::getSensorToWorldMatrix() const {
if (getStandingHMDSensorMode()) {
return _sensorToWorldMatrix;
} else {
return createMatFromQuatAndPos(getWorldAlignedOrientation(), getDefaultEyePosition());
}
}
// best called at start of main loop just after we have a fresh hmd pose.
// update internal body position from new hmd pose.
void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix) {
// update the sensorMatrices based on the new hmd pose
_hmdSensorMatrix = hmdSensorMatrix;
_hmdSensorPosition = extractTranslation(hmdSensorMatrix);
_hmdSensorOrientation = glm::quat_cast(hmdSensorMatrix);
_bodySensorMatrix = deriveBodyFromHMDSensor();
if (getStandingHMDSensorMode()) {
// set the body position/orientation to reflect motion due to the head.
auto worldMat = _sensorToWorldMatrix * _bodySensorMatrix;
setPosition(extractTranslation(worldMat));
setOrientation(glm::quat_cast(worldMat));
}
}
// best called at end of main loop, just before rendering.
// update sensor to world matrix from current body position and hmd sensor.
// This is so the correct camera can be used for rendering.
void MyAvatar::updateSensorToWorldMatrix() {
// update the sensor mat so that the body position will end up in the desired
// position when driven from the head.
glm::mat4 desiredMat = createMatFromQuatAndPos(getOrientation(), getPosition());
_sensorToWorldMatrix = desiredMat * glm::inverse(_bodySensorMatrix);
}
// Update avatar head rotation with sensor data
void MyAvatar::updateFromTrackers(float deltaTime) {
glm::vec3 estimatedPosition, estimatedRotation;
@ -245,7 +294,7 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
bool inFacetracker = tracker && !tracker->isMuted();
if (inHmd) {
estimatedPosition = qApp->getHeadPosition();
estimatedPosition = extractTranslation(getHMDSensorMatrix());
estimatedPosition.x *= -1.0f;
_trackedHeadPosition = estimatedPosition;
@ -289,15 +338,18 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
Head* head = getHead();
if (inHmd || isPlaying()) {
head->setDeltaPitch(estimatedRotation.x);
head->setDeltaYaw(estimatedRotation.y);
if (!getStandingHMDSensorMode()) {
head->setDeltaPitch(estimatedRotation.x);
head->setDeltaYaw(estimatedRotation.y);
head->setDeltaRoll(estimatedRotation.z);
}
} else {
float magnifyFieldOfView = qApp->getFieldOfView() /
_realWorldFieldOfView.get();
head->setDeltaPitch(estimatedRotation.x * magnifyFieldOfView);
head->setDeltaYaw(estimatedRotation.y * magnifyFieldOfView);
head->setDeltaRoll(estimatedRotation.z);
}
head->setDeltaRoll(estimatedRotation.z);
// Update torso lean distance based on accelerometer data
const float TORSO_LENGTH = 0.5f;
@ -312,10 +364,12 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
relativePosition.x = -relativePosition.x;
}
head->setLeanSideways(glm::clamp(glm::degrees(atanf(relativePosition.x * _leanScale / TORSO_LENGTH)),
-MAX_LEAN, MAX_LEAN));
head->setLeanForward(glm::clamp(glm::degrees(atanf(relativePosition.z * _leanScale / TORSO_LENGTH)),
-MAX_LEAN, MAX_LEAN));
if (!(inHmd && getStandingHMDSensorMode())) {
head->setLeanSideways(glm::clamp(glm::degrees(atanf(relativePosition.x * _leanScale / TORSO_LENGTH)),
-MAX_LEAN, MAX_LEAN));
head->setLeanForward(glm::clamp(glm::degrees(atanf(relativePosition.z * _leanScale / TORSO_LENGTH)),
-MAX_LEAN, MAX_LEAN));
}
}
@ -346,6 +400,22 @@ glm::vec3 MyAvatar::getLeftPalmPosition() {
return leftHandPosition;
}
glm::vec3 MyAvatar::getLeftPalmVelocity() {
const PalmData* palm = getHand()->getPalm(LEFT_HAND_INDEX);
if (palm != NULL) {
return palm->getVelocity();
}
return glm::vec3(0.0f);
}
glm::vec3 MyAvatar::getLeftPalmAngularVelocity() {
const PalmData* palm = getHand()->getPalm(LEFT_HAND_INDEX);
if (palm != NULL) {
return palm->getRawAngularVelocity();
}
return glm::vec3(0.0f);
}
glm::quat MyAvatar::getLeftPalmRotation() {
glm::quat leftRotation;
getSkeletonModel().getJointRotationInWorldFrame(getSkeletonModel().getLeftHandJointIndex(), leftRotation);
@ -361,6 +431,22 @@ glm::vec3 MyAvatar::getRightPalmPosition() {
return rightHandPosition;
}
glm::vec3 MyAvatar::getRightPalmVelocity() {
const PalmData* palm = getHand()->getPalm(RIGHT_HAND_INDEX);
if (palm != NULL) {
return palm->getVelocity();
}
return glm::vec3(0.0f);
}
glm::vec3 MyAvatar::getRightPalmAngularVelocity() {
const PalmData* palm = getHand()->getPalm(RIGHT_HAND_INDEX);
if (palm != NULL) {
return palm->getRawAngularVelocity();
}
return glm::vec3(0.0f);
}
glm::quat MyAvatar::getRightPalmRotation() {
glm::quat rightRotation;
getSkeletonModel().getJointRotationInWorldFrame(getSkeletonModel().getRightHandJointIndex(), rightRotation);
@ -623,6 +709,12 @@ float loadSetting(QSettings& settings, const char* name, float defaultValue) {
return value;
}
void MyAvatar::setEnableRigAnimations(bool isEnabled) {
Settings settings;
settings.setValue("enableRig", isEnabled);
_rig->setEnableRig(isEnabled);
}
void MyAvatar::loadData() {
Settings settings;
settings.beginGroup("Avatar");
@ -840,19 +932,13 @@ void MyAvatar::updateLookAtTargetAvatar() {
const float HUMAN_EYE_SEPARATION = 0.065f;
float myEyeSeparation = glm::length(getHead()->getLeftEyePosition() - getHead()->getRightEyePosition());
gazeOffset = gazeOffset * HUMAN_EYE_SEPARATION / myEyeSeparation;
if (Application::getInstance()->isHMDMode()) {
//avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getCamera()->getPosition()
// + OculusManager::getMidEyePosition() + gazeOffset);
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
+ OculusManager::getMidEyePosition() + gazeOffset);
} else {
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
+ gazeOffset);
}
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
+ gazeOffset);
} else {
avatar->getHead()->clearCorrectedLookAtPosition();
}
} else {
avatar->getHead()->clearCorrectedLookAtPosition();
}
}
auto avatarPointer = _lookAtTargetAvatar.lock();
@ -886,7 +972,7 @@ eyeContactTarget MyAvatar::getEyeContactTarget() {
}
glm::vec3 MyAvatar::getDefaultEyePosition() const {
return _position + getWorldAlignedOrientation() * _skeletonModel.getDefaultEyeModelPosition();
return getPosition() + getWorldAlignedOrientation() * _skeletonModel.getDefaultEyeModelPosition();
}
const float SCRIPT_PRIORITY = DEFAULT_PRIORITY + 1.0f;
@ -1160,6 +1246,24 @@ void MyAvatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, fl
if (shouldRenderHead(renderArgs)) {
getHead()->render(renderArgs, 1.0f, renderFrustum);
}
if (qApp->isHMDMode()) {
glm::vec3 cameraPosition = Application::getInstance()->getCamera()->getPosition();
glm::mat4 leftEyePose = Application::getInstance()->getActiveDisplayPlugin()->getEyePose(Eye::Left);
glm::vec3 leftEyePosition = glm::vec3(leftEyePose[3]);
glm::mat4 rightEyePose = Application::getInstance()->getActiveDisplayPlugin()->getEyePose(Eye::Right);
glm::vec3 rightEyePosition = glm::vec3(rightEyePose[3]);
glm::mat4 headPose = Application::getInstance()->getActiveDisplayPlugin()->getHeadPose();
glm::vec3 headPosition = glm::vec3(headPose[3]);
getHead()->renderLookAts(renderArgs,
cameraPosition + getOrientation() * (leftEyePosition - headPosition),
cameraPosition + getOrientation() * (rightEyePosition - headPosition));
} else {
getHead()->renderLookAts(renderArgs);
}
getHand()->render(renderArgs, true);
}
@ -1260,10 +1364,13 @@ void MyAvatar::updateOrientation(float deltaTime) {
glm::quat(glm::radians(glm::vec3(0.0f, _bodyYawDelta * deltaTime, 0.0f))));
if (qApp->isHMDMode()) {
glm::quat orientation = glm::quat_cast(getSensorToWorldMatrix()) * getHMDSensorOrientation();
glm::quat bodyOrientation = getWorldBodyOrientation();
glm::quat localOrientation = glm::inverse(bodyOrientation) * orientation;
// these angles will be in radians
glm::quat orientation = qApp->getHeadOrientation();
// ... so they need to be converted to degrees before we do math...
glm::vec3 euler = glm::eulerAngles(orientation) * DEGREES_PER_RADIAN;
glm::vec3 euler = glm::eulerAngles(localOrientation) * DEGREES_PER_RADIAN;
//Invert yaw and roll when in mirror mode
if (Application::getInstance()->getCamera()->getMode() == CAMERA_MODE_MIRROR) {
@ -1326,6 +1433,8 @@ glm::vec3 MyAvatar::applyKeyboardMotor(float deltaTime, const glm::vec3& localVe
glm::vec3 direction = front + right + up;
float directionLength = glm::length(direction);
//qCDebug(interfaceapp, "direction = (%.5f, %.5f, %.5f)", direction.x, direction.y, direction.z);
// Compute motor magnitude
if (directionLength > EPSILON) {
direction /= directionLength;
@ -1430,7 +1539,6 @@ void MyAvatar::updatePosition(float deltaTime) {
// update _moving flag based on speed
const float MOVING_SPEED_THRESHOLD = 0.01f;
_moving = speed > MOVING_SPEED_THRESHOLD;
}
void MyAvatar::updateCollisionSound(const glm::vec3 &penetration, float deltaTime, float frequency) {
@ -1525,32 +1633,31 @@ void MyAvatar::goToLocation(const glm::vec3& newPosition,
qCDebug(interfaceapp).nospace() << "MyAvatar goToLocation - moving to " << newPosition.x << ", "
<< newPosition.y << ", " << newPosition.z;
glm::vec3 shiftedPosition = newPosition;
_goToPending = true;
_goToPosition = newPosition;
_goToOrientation = getOrientation();
if (hasOrientation) {
qCDebug(interfaceapp).nospace() << "MyAvatar goToLocation - new orientation is "
<< newOrientation.x << ", " << newOrientation.y << ", " << newOrientation.z << ", " << newOrientation.w;
<< newOrientation.x << ", " << newOrientation.y << ", " << newOrientation.z << ", " << newOrientation.w;
// orient the user to face the target
glm::quat quatOrientation = newOrientation;
if (shouldFaceLocation) {
quatOrientation = newOrientation * glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
// move the user a couple units away
const float DISTANCE_TO_USER = 2.0f;
shiftedPosition = newPosition - quatOrientation * IDENTITY_FRONT * DISTANCE_TO_USER;
_goToPosition = newPosition - quatOrientation * IDENTITY_FRONT * DISTANCE_TO_USER;
}
setOrientation(quatOrientation);
_goToOrientation = quatOrientation;
}
slamPosition(shiftedPosition);
emit transformChanged();
}
void MyAvatar::updateMotionBehavior() {
void MyAvatar::updateMotionBehaviorFromMenu() {
Menu* menu = Menu::getInstance();
if (menu->isOptionChecked(MenuOption::KeyboardMotorControl)) {
_motionBehaviors |= AVATAR_MOTION_KEYBOARD_MOTOR_ENABLED;
@ -1565,6 +1672,11 @@ void MyAvatar::updateMotionBehavior() {
_characterController.setEnabled(menu->isOptionChecked(MenuOption::EnableCharacterController));
}
void MyAvatar::updateStandingHMDModeFromMenu() {
Menu* menu = Menu::getInstance();
_standingHMDSensorMode = menu->isOptionChecked(MenuOption::StandingHMDSensorMode);
}
//Renders sixense laser pointers for UI selection with controllers
void MyAvatar::renderLaserPointers(gpu::Batch& batch) {
const float PALM_TIP_ROD_RADIUS = 0.002f;
@ -1616,3 +1728,37 @@ void MyAvatar::relayDriveKeysToCharacterController() {
_characterController.jump();
}
}
glm::vec3 MyAvatar::getWorldBodyPosition() const {
return transformPoint(_sensorToWorldMatrix, extractTranslation(_bodySensorMatrix));
}
glm::quat MyAvatar::getWorldBodyOrientation() const {
return glm::quat_cast(_sensorToWorldMatrix * _bodySensorMatrix);
}
// derive avatar body position and orientation from the current HMD Sensor location.
// results are in sensor space
glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
// HMD is in sensor space.
const glm::vec3 hmdPosition = getHMDSensorPosition();
const glm::quat hmdOrientation = getHMDSensorOrientation();
const glm::quat hmdOrientationYawOnly = cancelOutRollAndPitch(hmdOrientation);
// In sensor space, figure out where the avatar body should be,
// by applying offsets from the avatar's neck & head joints.
vec3 localEyes = _skeletonModel.getDefaultEyeModelPosition();
vec3 localNeck(0.0f, 0.48f, 0.0f); // start with some kind of guess if the skeletonModel is not loaded yet.
_skeletonModel.getLocalNeckPosition(localNeck);
// apply simplistic head/neck model
// eyeToNeck offset is relative full HMD orientation.
// while neckToRoot offset is only relative to HMDs yaw.
glm::vec3 eyeToNeck = hmdOrientation * (localNeck - localEyes);
glm::vec3 neckToRoot = hmdOrientationYawOnly * -localNeck;
glm::vec3 bodyPos = hmdPosition + eyeToNeck + neckToRoot;
// avatar facing is determined solely by hmd orientation.
return createMatFromQuatAndPos(hmdOrientationYawOnly, bodyPos);
}

View file

@ -44,6 +44,20 @@ public:
void update(float deltaTime);
void preRender(RenderArgs* renderArgs);
const glm::mat4& getHMDSensorMatrix() const { return _hmdSensorMatrix; }
const glm::vec3& getHMDSensorPosition() const { return _hmdSensorPosition; }
const glm::quat& getHMDSensorOrientation() const { return _hmdSensorOrientation; }
glm::mat4 getSensorToWorldMatrix() const;
// best called at start of main loop just after we have a fresh hmd pose.
// update internal body position from new hmd pose.
void updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix);
// best called at end of main loop, just before rendering.
// update sensor to world matrix from current body position and hmd sensor.
// This is so the correct camera can be used for rendering.
void updateSensorToWorldMatrix();
void setLeanScale(float scale) { _leanScale = scale; }
void setRealWorldFieldOfView(float realWorldFov) { _realWorldFieldOfView.set(realWorldFov); }
@ -59,6 +73,7 @@ public:
Q_INVOKABLE void startAnimation(const QString& url, float fps = 30.0f, float priority = 1.0f, bool loop = false,
bool hold = false, float firstFrame = 0.0f,
float lastFrame = FLT_MAX, const QStringList& maskedJoints = QStringList());
/// Stops an animation as identified by a URL.
Q_INVOKABLE void stopAnimation(const QString& url);
@ -72,6 +87,7 @@ public:
Q_INVOKABLE AnimationDetails getAnimationDetailsByRole(const QString& role);
Q_INVOKABLE AnimationDetails getAnimationDetails(const QString& url);
void clearJointAnimationPriorities();
Q_INVOKABLE void setEnableRigAnimations(bool isEnabled);
// get/set avatar data
void saveData();
@ -147,6 +163,8 @@ public:
static const float ZOOM_MAX;
static const float ZOOM_DEFAULT;
bool getStandingHMDSensorMode() const { return _standingHMDSensorMode; }
public slots:
void increaseSize();
void decreaseSize();
@ -161,11 +179,16 @@ public slots:
glm::vec3 getThrust() { return _thrust; };
void setThrust(glm::vec3 newThrust) { _thrust = newThrust; }
void updateMotionBehavior();
void updateMotionBehaviorFromMenu();
void updateStandingHMDModeFromMenu();
glm::vec3 getLeftPalmPosition();
glm::vec3 getLeftPalmVelocity();
glm::vec3 getLeftPalmAngularVelocity();
glm::quat getLeftPalmRotation();
glm::vec3 getRightPalmPosition();
glm::vec3 getRightPalmVelocity();
glm::vec3 getRightPalmAngularVelocity();
glm::quat getRightPalmRotation();
void clearReferential();
@ -188,6 +211,8 @@ signals:
private:
glm::vec3 getWorldBodyPosition() const;
glm::quat getWorldBodyOrientation() const;
QByteArray toByteArray();
void simulate(float deltaTime);
void updateFromTrackers(float deltaTime);
@ -223,6 +248,10 @@ private:
void setVisibleInSceneIfReady(Model* model, render::ScenePointer scene, bool visiblity);
// derive avatar body position and orientation from the current HMD Sensor location.
// results are in sensor space
glm::mat4 deriveBodyFromHMDSensor() const;
glm::vec3 _gravity;
float _driveKeys[MAX_DRIVE_KEYS];
@ -280,6 +309,26 @@ private:
RigPointer _rig;
bool _prevShouldDrawHead;
// cache of the current HMD sensor position and orientation
// in sensor space.
glm::mat4 _hmdSensorMatrix;
glm::quat _hmdSensorOrientation;
glm::vec3 _hmdSensorPosition;
// cache of the current body position and orientation of the avatar's body,
// in sensor space.
glm::mat4 _bodySensorMatrix;
// used to transform any sensor into world space, including the _hmdSensorMat, or hand controllers.
glm::mat4 _sensorToWorldMatrix;
bool _standingHMDSensorMode;
bool _goToPending;
glm::vec3 _goToPosition;
glm::quat _goToOrientation;
std::unordered_set<int> _headBoneSet;
};

View file

@ -97,31 +97,51 @@ void SkeletonModel::initJointStates(QVector<JointState> states) {
}
const float PALM_PRIORITY = DEFAULT_PRIORITY;
const float LEAN_PRIORITY = DEFAULT_PRIORITY;
// Called within Model::simulate call, below.
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
_rig->computeMotionAnimationState(deltaTime, _owningAvatar->getPosition(), _owningAvatar->getVelocity(), _owningAvatar->getOrientation());
if (_owningAvatar->isMyAvatar()) {
_rig->computeMotionAnimationState(deltaTime, _owningAvatar->getPosition(), _owningAvatar->getVelocity(), _owningAvatar->getOrientation());
}
Model::updateRig(deltaTime, parentTransform);
if (_owningAvatar->isMyAvatar()) {
const FBXGeometry& geometry = _geometry->getFBXGeometry();
Head* head = _owningAvatar->getHead();
Rig::HeadParameters params;
params.leanSideways = _owningAvatar->getHead()->getFinalLeanSideways();
params.leanForward = _owningAvatar->getHead()->getFinalLeanSideways();
params.torsoTwist = _owningAvatar->getHead()->getTorsoTwist();
params.localHeadOrientation = _owningAvatar->getHead()->getFinalOrientationInLocalFrame();
params.worldHeadOrientation = _owningAvatar->getHead()->getFinalOrientationInWorldFrame();
params.eyeLookAt = _owningAvatar->getHead()->getCorrectedLookAtPosition();
params.eyeSaccade = _owningAvatar->getHead()->getSaccade();
params.modelRotation = getRotation();
params.modelTranslation = getTranslation();
params.leanSideways = head->getFinalLeanSideways();
params.leanForward = head->getFinalLeanForward();
params.torsoTwist = head->getTorsoTwist();
params.localHeadOrientation = head->getFinalOrientationInLocalFrame();
params.worldHeadOrientation = head->getFinalOrientationInWorldFrame();
params.eyeLookAt = head->getLookAtPosition();
params.eyeSaccade = head->getSaccade();
params.leanJointIndex = geometry.leanJointIndex;
params.neckJointIndex = geometry.neckJointIndex;
params.leftEyeJointIndex = geometry.leftEyeJointIndex;
params.rightEyeJointIndex = geometry.rightEyeJointIndex;
_rig->updateFromHeadParameters(params);
} else {
// This is a little more work than we really want.
//
// Other avatars joint, including their eyes, should already be set just like any other joints
// from the wire data. But when looking at me, we want the eyes to use the corrected lookAt.
//
// Thus this should really only be ... else if (_owningAvatar->getHead()->isLookingAtMe()) {...
// However, in the !isLookingAtMe case, the eyes aren't rotating the way they should right now.
// (They latch their looking at me position.) We will revisit that as priorities allow.
const FBXGeometry& geometry = _geometry->getFBXGeometry();
Head* head = _owningAvatar->getHead();
_rig->updateEyeJoints(geometry.leftEyeJointIndex, geometry.rightEyeJointIndex,
getTranslation(), getRotation(),
head->getFinalOrientationInWorldFrame(), head->getCorrectedLookAtPosition());
}
}
// Called by Avatar::simulate after it has set the joint states (fullUpdate true if changed),
// but just before head has been simulated.
void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
setTranslation(_owningAvatar->getSkeletonPosition());
static const glm::quat refOrientation = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
@ -264,7 +284,8 @@ void SkeletonModel::renderJointConstraints(gpu::Batch& batch, int jointIndex) {
const FBXGeometry& geometry = _geometry->getFBXGeometry();
const float BASE_DIRECTION_SIZE = 0.3f;
float directionSize = BASE_DIRECTION_SIZE * extractUniformScale(_scale);
batch._glLineWidth(3.0f);
// FIXME: THe line width of 3.0f is not supported anymore, we ll need a workaround
do {
const FBXJoint& joint = geometry.joints.at(jointIndex);
const JointState& jointState = _rig->getJointState(jointIndex);
@ -388,6 +409,10 @@ bool SkeletonModel::getNeckPosition(glm::vec3& neckPosition) const {
return isActive() && getJointPositionInWorldFrame(_geometry->getFBXGeometry().neckJointIndex, neckPosition);
}
bool SkeletonModel::getLocalNeckPosition(glm::vec3& neckPosition) const {
return isActive() && getJointPosition(_geometry->getFBXGeometry().neckJointIndex, neckPosition);
}
bool SkeletonModel::getNeckParentRotationFromDefaultOrientation(glm::quat& neckParentRotation) const {
if (!isActive()) {
return false;
@ -400,7 +425,7 @@ bool SkeletonModel::getNeckParentRotationFromDefaultOrientation(glm::quat& neckP
glm::quat worldFrameRotation;
bool success = getJointRotationInWorldFrame(parentIndex, worldFrameRotation);
if (success) {
neckParentRotation = worldFrameRotation * _rig->getJointState(parentIndex).getFBXJoint().inverseDefaultRotation;
neckParentRotation = worldFrameRotation * _rig->getJointState(parentIndex).getInverseDefaultRotation();
}
return success;
}
@ -476,18 +501,17 @@ void SkeletonModel::computeBoundingShape(const FBXGeometry& geometry) {
for (int i = 0; i < numStates; i++) {
// compute the default transform of this joint
const JointState& state = _rig->getJointState(i);
const FBXJoint& joint = state.getFBXJoint();
int parentIndex = joint.parentIndex;
int parentIndex = state.getParentIndex();
if (parentIndex == -1) {
transforms[i] = _rig->getJointTransform(i);
} else {
glm::quat modifiedRotation = joint.preRotation * joint.rotation * joint.postRotation;
transforms[i] = transforms[parentIndex] * glm::translate(joint.translation)
* joint.preTransform * glm::mat4_cast(modifiedRotation) * joint.postTransform;
glm::quat modifiedRotation = state.getPreRotation() * state.getDefaultRotation() * state.getPostRotation();
transforms[i] = transforms[parentIndex] * glm::translate(state.getTranslation())
* state.getPreTransform() * glm::mat4_cast(modifiedRotation) * state.getPostTransform();
}
// Each joint contributes a sphere at its position
glm::vec3 axis(joint.boneRadius);
glm::vec3 axis(state.getBoneRadius());
glm::vec3 jointPosition = extractTranslation(transforms[i]);
totalExtents.addPoint(jointPosition + axis);
totalExtents.addPoint(jointPosition - axis);

View file

@ -80,6 +80,8 @@ public:
/// \return whether or not the neck was found
bool getNeckPosition(glm::vec3& neckPosition) const;
bool getLocalNeckPosition(glm::vec3& neckPosition) const;
/// Returns the rotation of the neck joint's parent from default orientation
/// \return whether or not the neck was found
bool getNeckParentRotationFromDefaultOrientation(glm::quat& neckParentRotation) const;

View file

@ -1,5 +1,5 @@
// 3DConnexion.h
// hifi
// 3DConnexionClient.h
// interface/src/devices
//
// Created by Marcel Verhagen on 09-06-15.
// Copyright 2015 High Fidelity, Inc.
@ -8,32 +8,33 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_ConnexionClient_h
#define hifi_ConnexionClient_h
#ifndef hifi_3DConnexionClient_h
#define hifi_3DConnexionClient_h
#include <QObject>
#include <QLibrary>
#include <input-plugins/UserInputMapper.h>
#include <qobject.h>
#include <qlibrary.h>
#include "InterfaceLogging.h"
#include "Application.h"
#include "ui/UserInputMapper.h"
#ifndef HAVE_CONNEXIONCLIENT
#ifndef HAVE_3DCONNEXIONCLIENT
class ConnexionClient : public QObject {
Q_OBJECT
public:
static ConnexionClient& getInstance();
static void init() {};
static void destroy() {};
static bool Is3dmouseAttached() { return false; };
void init() {};
void destroy() {};
bool Is3dmouseAttached() { return false; };
public slots:
void toggleConnexion(bool shouldEnable) {};
};
#endif // NOT_HAVE_CONNEXIONCLIENT
#endif // NOT_HAVE_3DCONNEXIONCLIENT
#ifdef HAVE_CONNEXIONCLIENT
#ifdef HAVE_3DCONNEXIONCLIENT
// the windows connexion rawinput
#ifdef _WIN32
#ifdef Q_OS_WIN
#include "I3dMouseParams.h"
#include <QAbstractNativeEventFilter>
@ -45,7 +46,6 @@ public slots:
class MouseParameters : public I3dMouseParam {
public:
MouseParameters();
~MouseParameters();
// I3dmouseSensor interface
bool IsPanZoom() const;
@ -86,16 +86,14 @@ private:
class ConnexionClient : public QObject, public QAbstractNativeEventFilter {
Q_OBJECT
public:
ConnexionClient();
~ConnexionClient();
ConnexionClient() {};
static ConnexionClient& getInstance();
ConnexionClient* client;
static void init();
static void destroy();
static bool Is3dmouseAttached();
static ConnexionClient& getInstance();
void init();
void destroy();
bool Is3dmouseAttached();
ConnexionClient* client;
I3dMouseParam& MouseParams();
const I3dMouseParam& MouseParams() const;
@ -107,7 +105,7 @@ public:
virtual bool nativeEventFilter(const QByteArray& eventType, void* message, long* result) Q_DECL_OVERRIDE
{
MSG* msg = static_cast< MSG * >(message);
return ConnexionClient::RawInputEventFilter(message, result);
return RawInputEventFilter(message, result);
}
public slots:
@ -121,7 +119,7 @@ signals:
private:
bool InitializeRawInput(HWND hwndTarget);
static bool RawInputEventFilter(void* msg, long* result);
bool RawInputEventFilter(void* msg, long* result);
void OnRawInput(UINT nInputCode, HRAWINPUT hRawInput);
UINT GetRawInputBuffer(PRAWINPUT pData, PUINT pcbSize, UINT cbSizeHeader);
@ -166,16 +164,16 @@ class ConnexionClient : public QObject {
Q_OBJECT
public:
static ConnexionClient& getInstance();
static bool Is3dmouseAttached();
static void init();
static void destroy();
void init();
void destroy();
bool Is3dmouseAttached();
public slots:
void toggleConnexion(bool shouldEnable);
};
#endif // __APPLE__
#endif // HAVE_CONNEXIONCLIENT
#endif // HAVE_3DCONNEXIONCLIENT
// connnects to the userinputmapper
@ -241,4 +239,4 @@ protected:
AxisStateMap _axisStateMap;
};
#endif // defined(hifi_ConnexionClient_h)
#endif // defined(hifi_3DConnexionClient_h)

View file

@ -0,0 +1,305 @@
//
// EyeTracker.cpp
// interface/src/devices
//
// Created by David Rowe on 27 Jul 2015.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "EyeTracker.h"
#include <QFuture>
#include <QMessageBox>
#include <QtConcurrent/QtConcurrentRun>
#include <SharedUtil.h>
#include "InterfaceLogging.h"
#include "OctreeConstants.h"
#ifdef HAVE_IVIEWHMD
char* HIGH_FIDELITY_EYE_TRACKER_CALIBRATION = "HighFidelityEyeTrackerCalibration";
#endif
#ifdef HAVE_IVIEWHMD
static void CALLBACK eyeTrackerCallback(smi_CallbackDataStruct* data) {
auto eyeTracker = DependencyManager::get<EyeTracker>();
if (eyeTracker) { // Guard against a few callbacks that continue to be received after smi_quit().
eyeTracker->processData(data);
}
}
#endif
EyeTracker::~EyeTracker() {
#ifdef HAVE_IVIEWHMD
if (_isStreaming) {
int result = smi_quit();
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error terminating tracking:" << smiReturnValueToString(result);
}
}
#endif
}
#ifdef HAVE_IVIEWHMD
void EyeTracker::processData(smi_CallbackDataStruct* data) {
_lastProcessDataTimestamp = usecTimestampNow();
if (!_isEnabled) {
return;
}
if (data->type == SMI_SIMPLE_GAZE_SAMPLE) {
// Calculate the intersections of the left and right eye look-at vectors with a vertical plane along the monocular
// gaze direction. Average these positions to give the look-at point.
// If the eyes are parallel or diverged, gaze at a distant look-at point calculated the same as for non eye tracking.
// Line-plane intersection: https://en.wikipedia.org/wiki/Line%E2%80%93plane_intersection
smi_SampleHMDStruct* sample = (smi_SampleHMDStruct*)data->result;
// The iViewHMD coordinate system has x and z axes reversed compared to Interface, i.e., wearing the HMD:
// - x is left
// - y is up
// - z is forwards
// Plane
smi_Vec3d point = sample->gazeBasePoint; // mm
smi_Vec3d direction = sample->gazeDirection;
glm::vec3 planePoint = glm::vec3(-point.x, point.y, -point.z) / 1000.0f;
glm::vec3 planeNormal = glm::vec3(-direction.z, 0.0f, direction.x);
glm::vec3 monocularDirection = glm::vec3(-direction.x, direction.y, -direction.z);
// Left eye
point = sample->left.gazeBasePoint; // mm
direction = sample->left.gazeDirection;
glm::vec3 leftLinePoint = glm::vec3(-point.x, point.y, -point.z) / 1000.0f;
glm::vec3 leftLineDirection = glm::vec3(-direction.x, direction.y, -direction.z);
// Right eye
point = sample->right.gazeBasePoint; // mm
direction = sample->right.gazeDirection;
glm::vec3 rightLinePoint = glm::vec3(-point.x, point.y, -point.z) / 1000.0f;
glm::vec3 rightLineDirection = glm::vec3(-direction.x, direction.y, -direction.z);
// Plane - line dot products
float leftLinePlaneDotProduct = glm::dot(leftLineDirection, planeNormal);
float rightLinePlaneDotProduct = glm::dot(rightLineDirection, planeNormal);
// Gaze into distance if eyes are parallel or diverged; otherwise the look-at is the average of look-at points
glm::vec3 lookAtPosition;
if (abs(leftLinePlaneDotProduct) <= FLT_EPSILON || abs(rightLinePlaneDotProduct) <= FLT_EPSILON) {
lookAtPosition = monocularDirection * (float)TREE_SCALE;
} else {
float leftDistance = glm::dot(planePoint - leftLinePoint, planeNormal) / leftLinePlaneDotProduct;
float rightDistance = glm::dot(planePoint - rightLinePoint, planeNormal) / rightLinePlaneDotProduct;
if (leftDistance <= 0.0f || rightDistance <= 0.0f
|| leftDistance > (float)TREE_SCALE || rightDistance > (float)TREE_SCALE) {
lookAtPosition = monocularDirection * (float)TREE_SCALE;
} else {
glm::vec3 leftIntersectionPoint = leftLinePoint + leftDistance * leftLineDirection;
glm::vec3 rightIntersectionPoint = rightLinePoint + rightDistance * rightLineDirection;
lookAtPosition = (leftIntersectionPoint + rightIntersectionPoint) / 2.0f;
}
}
if (glm::isnan(lookAtPosition.x) || glm::isnan(lookAtPosition.y) || glm::isnan(lookAtPosition.z)) {
return;
}
_lookAtPosition = lookAtPosition;
}
}
#endif
void EyeTracker::init() {
if (_isInitialized) {
qCWarning(interfaceapp) << "Eye Tracker: Already initialized";
return;
}
#ifdef HAVE_IVIEWHMD
int result = smi_setCallback(eyeTrackerCallback);
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error setting callback:" << smiReturnValueToString(result);
QMessageBox::warning(nullptr, "Eye Tracker Error", smiReturnValueToString(result));
} else {
_isInitialized = true;
}
connect(&_startStreamingWatcher, SIGNAL(finished()), this, SLOT(onStreamStarted()));
#endif
}
#ifdef HAVE_IVIEWHMD
int EyeTracker::startStreaming(bool simulate) {
return smi_startStreaming(simulate); // This call blocks execution.
}
#endif
#ifdef HAVE_IVIEWHMD
void EyeTracker::onStreamStarted() {
int result = _startStreamingWatcher.result();
_isStreaming = (result == SMI_RET_SUCCESS);
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error starting streaming:" << smiReturnValueToString(result);
// Display error dialog unless SMI SDK has already displayed an error message.
if (result != SMI_ERROR_HMD_NOT_SUPPORTED) {
QMessageBox::warning(nullptr, "Eye Tracker Error", smiReturnValueToString(result));
}
} else {
qCDebug(interfaceapp) << "Eye Tracker: Started streaming";
}
if (_isStreaming) {
// Automatically load calibration if one has been saved.
QString availableCalibrations = QString(smi_getAvailableCalibrations());
if (availableCalibrations.contains(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION)) {
result = smi_loadCalibration(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION);
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error loading calibration:" << smiReturnValueToString(result);
QMessageBox::warning(nullptr, "Eye Tracker Error", "Error loading calibration"
+ smiReturnValueToString(result));
} else {
qCDebug(interfaceapp) << "Eye Tracker: Loaded calibration";
}
}
}
}
#endif
void EyeTracker::setEnabled(bool enabled, bool simulate) {
if (!_isInitialized) {
return;
}
#ifdef HAVE_IVIEWHMD
qCDebug(interfaceapp) << "Eye Tracker: Set enabled =" << enabled << ", simulate =" << simulate;
// There is no smi_stopStreaming() method and after an smi_quit(), streaming cannot be restarted (at least not for
// simulated data). So keep streaming once started in case tracking is re-enabled after stopping.
// Try to stop streaming if changing whether simulating or not.
if (enabled && _isStreaming && _isStreamSimulating != simulate) {
int result = smi_quit();
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error stopping streaming:" << smiReturnValueToString(result);
}
_isStreaming = false;
}
if (enabled && !_isStreaming) {
// Start SMI streaming in a separate thread because it blocks.
QFuture<int> future = QtConcurrent::run(this, &EyeTracker::startStreaming, simulate);
_startStreamingWatcher.setFuture(future);
_isStreamSimulating = simulate;
}
_isEnabled = enabled;
_isSimulating = simulate;
#endif
}
void EyeTracker::reset() {
// Nothing to do.
}
bool EyeTracker::isTracking() const {
static const quint64 ACTIVE_TIMEOUT_USECS = 2000000; // 2 secs
return _isEnabled && (usecTimestampNow() - _lastProcessDataTimestamp < ACTIVE_TIMEOUT_USECS);
}
#ifdef HAVE_IVIEWHMD
void EyeTracker::calibrate(int points) {
if (!_isStreaming) {
qCWarning(interfaceapp) << "Eye Tracker: Cannot calibrate because not streaming";
return;
}
smi_CalibrationHMDStruct* calibrationHMDStruct;
smi_createCalibrationHMDStruct(&calibrationHMDStruct);
smi_CalibrationTypeEnum calibrationType;
switch (points) {
case 1:
calibrationType = SMI_ONE_POINT_CALIBRATION;
qCDebug(interfaceapp) << "Eye Tracker: One point calibration";
break;
case 3:
calibrationType = SMI_THREE_POINT_CALIBRATION;
qCDebug(interfaceapp) << "Eye Tracker: Three point calibration";
break;
case 5:
calibrationType = SMI_FIVE_POINT_CALIBRATION;
qCDebug(interfaceapp) << "Eye Tracker: Five point calibration";
break;
default:
qCWarning(interfaceapp) << "Eye Tracker: Invalid calibration specified";
return;
}
calibrationHMDStruct->type = calibrationType;
calibrationHMDStruct->backgroundColor->blue = 0.5;
calibrationHMDStruct->backgroundColor->green = 0.5;
calibrationHMDStruct->backgroundColor->red = 0.5;
calibrationHMDStruct->foregroundColor->blue = 1.0;
calibrationHMDStruct->foregroundColor->green = 1.0;
calibrationHMDStruct->foregroundColor->red = 1.0;
int result = smi_setupCalibration(calibrationHMDStruct);
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error setting up calibration:" << smiReturnValueToString(result);
return;
} else {
result = smi_calibrate();
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error performing calibration:" << smiReturnValueToString(result);
} else {
result = smi_saveCalibration(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION);
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error saving calibration:" << smiReturnValueToString(result);
}
}
}
if (result != SMI_RET_SUCCESS) {
QMessageBox::warning(nullptr, "Eye Tracker Error", "Calibration error: " + smiReturnValueToString(result));
}
}
#endif
#ifdef HAVE_IVIEWHMD
QString EyeTracker::smiReturnValueToString(int value) {
switch (value)
{
case smi_ErrorReturnValue::SMI_ERROR_NO_CALLBACK_SET:
return "No callback set";
case smi_ErrorReturnValue::SMI_ERROR_CONNECTING_TO_HMD:
return "Error connecting to HMD";
case smi_ErrorReturnValue::SMI_ERROR_HMD_NOT_SUPPORTED:
return "HMD not supported";
case smi_ErrorReturnValue::SMI_ERROR_NOT_IMPLEMENTED:
return "Not implmented";
case smi_ErrorReturnValue::SMI_ERROR_INVALID_PARAMETER:
return "Invalid parameter";
case smi_ErrorReturnValue::SMI_ERROR_EYECAMERAS_NOT_AVAILABLE:
return "Eye cameras not available";
case smi_ErrorReturnValue::SMI_ERROR_OCULUS_RUNTIME_NOT_SUPPORTED:
return "Oculus runtime not supported";
case smi_ErrorReturnValue::SMI_ERROR_FILE_NOT_FOUND:
return "File not found";
case smi_ErrorReturnValue::SMI_ERROR_FILE_EMPTY:
return "File empty";
case smi_ErrorReturnValue::SMI_ERROR_UNKNOWN:
return "Unknown error";
default:
QString number;
number.setNum(value);
return number;
}
}
#endif

View file

@ -0,0 +1,71 @@
//
// EyeTracker.h
// interface/src/devices
//
// Created by David Rowe on 27 Jul 2015.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_EyeTracker_h
#define hifi_EyeTracker_h
#include <QObject>
#include <QFutureWatcher>
#include <glm/glm.hpp>
#include <DependencyManager.h>
#ifdef HAVE_IVIEWHMD
#include <iViewHMDAPI.h>
#endif
class EyeTracker : public QObject, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
public:
~EyeTracker();
void init();
void setEnabled(bool enabled, bool simulate);
void reset();
bool isInitialized() const { return _isInitialized; }
bool isEnabled() const { return _isEnabled; }
bool isTracking() const;
bool isSimulating() const { return _isSimulating; }
glm::vec3 getLookAtPosition() const { return _lookAtPosition; } // From mid eye point in head frame.
#ifdef HAVE_IVIEWHMD
void processData(smi_CallbackDataStruct* data);
void calibrate(int points);
int startStreaming(bool simulate);
private slots:
void onStreamStarted();
#endif
private:
QString smiReturnValueToString(int value);
bool _isInitialized = false;
bool _isEnabled = false;
bool _isSimulating = false;
bool _isStreaming = false;
bool _isStreamSimulating = false;
quint64 _lastProcessDataTimestamp;
glm::vec3 _lookAtPosition;
QFutureWatcher<int> _startStreamingWatcher;
};
#endif // hifi_EyeTracker_h

View file

@ -1,887 +0,0 @@
//
// OculusManager.cpp
// interface/src/devices
//
// Created by Stephen Birarda on 5/9/13.
// Refactored by Ben Arnold on 6/30/2014
// Copyright 2012 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "OculusManager.h"
#include <glm/glm.hpp>
#include <QDesktopWidget>
#include <QGuiApplication>
#include <gpu/GPUConfig.h>
#include <QScreen>
#include <CursorManager.h>
#include <QOpenGLTimerQuery>
#include <QGLWidget>
#include <avatar/AvatarManager.h>
#include <avatar/MyAvatar.h>
#include <GlWindow.h>
#include <gpu/GLBackend.h>
#include <OglplusHelpers.h>
#include <PathUtils.h>
#include <SharedUtil.h>
#include <UserActivityLogger.h>
#include <FramebufferCache.h>
#include <OVR_CAPI_GL.h>
#include "InterfaceLogging.h"
#include "Application.h"
#include "ui/overlays/Text3DOverlay.h"
template <typename Function>
void for_each_eye(Function function) {
for (ovrEyeType eye = ovrEyeType::ovrEye_Left;
eye < ovrEyeType::ovrEye_Count;
eye = static_cast<ovrEyeType>(eye + 1)) {
function(eye);
}
}
template <typename Function>
void for_each_eye(const ovrHmd & hmd, Function function) {
for (int i = 0; i < ovrEye_Count; ++i) {
ovrEyeType eye = hmd->EyeRenderOrder[i];
function(eye);
}
}
enum CalibrationState {
UNCALIBRATED,
WAITING_FOR_DELTA,
WAITING_FOR_ZERO,
WAITING_FOR_ZERO_HELD,
CALIBRATED
};
inline glm::mat4 toGlm(const ovrMatrix4f & om) {
return glm::transpose(glm::make_mat4(&om.M[0][0]));
}
inline glm::mat4 toGlm(const ovrFovPort & fovport, float nearPlane = 0.01f, float farPlane = 10000.0f) {
return toGlm(ovrMatrix4f_Projection(fovport, nearPlane, farPlane, true));
}
inline glm::vec3 toGlm(const ovrVector3f & ov) {
return glm::make_vec3(&ov.x);
}
inline glm::vec2 toGlm(const ovrVector2f & ov) {
return glm::make_vec2(&ov.x);
}
inline glm::ivec2 toGlm(const ovrVector2i & ov) {
return glm::ivec2(ov.x, ov.y);
}
inline glm::uvec2 toGlm(const ovrSizei & ov) {
return glm::uvec2(ov.w, ov.h);
}
inline glm::quat toGlm(const ovrQuatf & oq) {
return glm::make_quat(&oq.x);
}
inline glm::mat4 toGlm(const ovrPosef & op) {
glm::mat4 orientation = glm::mat4_cast(toGlm(op.Orientation));
glm::mat4 translation = glm::translate(glm::mat4(), toGlm(op.Position));
return translation * orientation;
}
inline ovrMatrix4f ovrFromGlm(const glm::mat4 & m) {
ovrMatrix4f result;
glm::mat4 transposed(glm::transpose(m));
memcpy(result.M, &(transposed[0][0]), sizeof(float) * 16);
return result;
}
inline ovrVector3f ovrFromGlm(const glm::vec3 & v) {
return{ v.x, v.y, v.z };
}
inline ovrVector2f ovrFromGlm(const glm::vec2 & v) {
return{ v.x, v.y };
}
inline ovrSizei ovrFromGlm(const glm::uvec2 & v) {
return{ (int)v.x, (int)v.y };
}
inline ovrQuatf ovrFromGlm(const glm::quat & q) {
return{ q.x, q.y, q.z, q.w };
}
#ifdef Q_OS_WIN
// A base class for FBO wrappers that need to use the Oculus C
// API to manage textures via ovrHmd_CreateSwapTextureSetGL,
// ovrHmd_CreateMirrorTextureGL, etc
template <typename C>
struct RiftFramebufferWrapper : public FramebufferWrapper<C, char> {
ovrHmd hmd;
RiftFramebufferWrapper(const ovrHmd & hmd) : hmd(hmd) {
color = 0;
depth = 0;
};
void Resize(const uvec2 & size) {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
this->size = size;
initColor();
initDone();
}
protected:
virtual void initDepth() override final {
}
};
// A wrapper for constructing and using a swap texture set,
// where each frame you draw to a texture via the FBO,
// then submit it and increment to the next texture.
// The Oculus SDK manages the creation and destruction of
// the textures
struct SwapFramebufferWrapper : public RiftFramebufferWrapper<ovrSwapTextureSet*> {
SwapFramebufferWrapper(const ovrHmd & hmd)
: RiftFramebufferWrapper(hmd) {
}
~SwapFramebufferWrapper() {
if (color) {
ovrHmd_DestroySwapTextureSet(hmd, color);
color = nullptr;
}
}
void Increment() {
++color->CurrentIndex;
color->CurrentIndex %= color->TextureCount;
}
protected:
virtual void initColor() override {
if (color) {
ovrHmd_DestroySwapTextureSet(hmd, color);
color = nullptr;
}
ovrResult result = ovrHmd_CreateSwapTextureSetGL(hmd, GL_RGBA, size.x, size.y, &color);
Q_ASSERT(OVR_SUCCESS(result));
for (int i = 0; i < color->TextureCount; ++i) {
ovrGLTexture& ovrTex = (ovrGLTexture&)color->Textures[i];
glBindTexture(GL_TEXTURE_2D, ovrTex.OGL.TexId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
glBindTexture(GL_TEXTURE_2D, 0);
}
virtual void initDone() override {
}
virtual void onBind(oglplus::Framebuffer::Target target) override {
ovrGLTexture& tex = (ovrGLTexture&)(color->Textures[color->CurrentIndex]);
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex.OGL.TexId, 0);
}
virtual void onUnbind(oglplus::Framebuffer::Target target) override {
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
}
};
// We use a FBO to wrap the mirror texture because it makes it easier to
// render to the screen via glBlitFramebuffer
struct MirrorFramebufferWrapper : public RiftFramebufferWrapper<ovrGLTexture*> {
MirrorFramebufferWrapper(const ovrHmd & hmd)
: RiftFramebufferWrapper(hmd) {
}
virtual ~MirrorFramebufferWrapper() {
if (color) {
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color);
color = nullptr;
}
}
private:
void initColor() override {
if (color) {
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color);
color = nullptr;
}
ovrResult result = ovrHmd_CreateMirrorTextureGL(hmd, GL_RGBA, size.x, size.y, (ovrTexture**)&color);
Q_ASSERT(OVR_SUCCESS(result));
}
void initDone() override {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, color->OGL.TexId, 0);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
}
};
static SwapFramebufferWrapper* _swapFbo{ nullptr };
static MirrorFramebufferWrapper* _mirrorFbo{ nullptr };
static ovrLayerEyeFov _sceneLayer;
#else
static ovrTexture _eyeTextures[ovrEye_Count];
static GlWindow* _outputWindow{ nullptr };
#endif
static bool _isConnected = false;
static ovrHmd _ovrHmd;
static ovrFovPort _eyeFov[ovrEye_Count];
static ovrEyeRenderDesc _eyeRenderDesc[ovrEye_Count];
static ovrSizei _renderTargetSize;
static glm::mat4 _eyeProjection[ovrEye_Count];
static unsigned int _frameIndex = 0;
static bool _frameTimingActive = false;
static Camera* _camera = NULL;
static ovrEyeType _activeEye = ovrEye_Count;
static bool _hswDismissed = false;
static const float CALIBRATION_DELTA_MINIMUM_LENGTH = 0.02f;
static const float CALIBRATION_DELTA_MINIMUM_ANGLE = 5.0f * RADIANS_PER_DEGREE;
static const float CALIBRATION_ZERO_MAXIMUM_LENGTH = 0.01f;
static const float CALIBRATION_ZERO_MAXIMUM_ANGLE = 2.0f * RADIANS_PER_DEGREE;
static const quint64 CALIBRATION_ZERO_HOLD_TIME = 3000000; // usec
static const float CALIBRATION_MESSAGE_DISTANCE = 2.5f;
static CalibrationState _calibrationState;
static glm::vec3 _calibrationPosition;
static glm::quat _calibrationOrientation;
static quint64 _calibrationStartTime;
static int _calibrationMessage = 0;
static glm::vec3 _eyePositions[ovrEye_Count];
// TODO expose this as a developer toggle
static bool _eyePerFrameMode = false;
static ovrEyeType _lastEyeRendered = ovrEye_Count;
static ovrSizei _recommendedTexSize = { 0, 0 };
static float _offscreenRenderScale = 1.0;
static glm::mat4 _combinedProjection;
static ovrPosef _eyeRenderPoses[ovrEye_Count];
static ovrRecti _eyeViewports[ovrEye_Count];
static ovrVector3f _eyeOffsets[ovrEye_Count];
glm::vec3 OculusManager::getLeftEyePosition() { return _eyePositions[ovrEye_Left]; }
glm::vec3 OculusManager::getRightEyePosition() { return _eyePositions[ovrEye_Right]; }
glm::vec3 OculusManager::getMidEyePosition() { return (_eyePositions[ovrEye_Left] + _eyePositions[ovrEye_Right]) / 2.0f; }
void OculusManager::connect(QOpenGLContext* shareContext) {
qCDebug(interfaceapp) << "Oculus SDK" << OVR_VERSION_STRING;
ovrInitParams initParams; memset(&initParams, 0, sizeof(initParams));
#ifdef DEBUG
initParams.Flags |= ovrInit_Debug;
#endif
ovr_Initialize(&initParams);
#ifdef Q_OS_WIN
ovrResult res = ovrHmd_Create(0, &_ovrHmd);
#ifdef DEBUG
if (!OVR_SUCCESS(res)) {
res = ovrHmd_CreateDebug(ovrHmd_DK2, &_ovrHmd);
Q_ASSERT(OVR_SUCCESS(res));
}
#endif
#else
_ovrHmd = ovrHmd_Create(0);
#ifdef DEBUG
if (!_ovrHmd) {
_ovrHmd = ovrHmd_CreateDebug(ovrHmd_DK2);
}
#endif
#endif
if (!_ovrHmd) {
_isConnected = false;
// we're definitely not in "VR mode" so tell the menu that
Menu::getInstance()->getActionForOption(MenuOption::EnableVRMode)->setChecked(false);
ovr_Shutdown();
return;
}
_calibrationState = UNCALIBRATED;
if (!_isConnected) {
UserActivityLogger::getInstance().connectedDevice("hmd", "oculus");
}
_isConnected = true;
for_each_eye([&](ovrEyeType eye) {
_eyeFov[eye] = _ovrHmd->DefaultEyeFov[eye];
_eyeProjection[eye] = toGlm(ovrMatrix4f_Projection(_eyeFov[eye],
DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
ovrEyeRenderDesc erd = ovrHmd_GetRenderDesc(_ovrHmd, eye, _eyeFov[eye]);
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
});
ovrFovPort combinedFov = _ovrHmd->MaxEyeFov[0];
combinedFov.RightTan = _ovrHmd->MaxEyeFov[1].RightTan;
_combinedProjection = toGlm(ovrMatrix4f_Projection(combinedFov,
DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
_recommendedTexSize = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left, _eyeFov[ovrEye_Left], 1.0f);
_renderTargetSize = { _recommendedTexSize.w * 2, _recommendedTexSize.h };
#ifdef Q_OS_WIN
_mirrorFbo = new MirrorFramebufferWrapper(_ovrHmd);
_swapFbo = new SwapFramebufferWrapper(_ovrHmd);
_swapFbo->Init(toGlm(_renderTargetSize));
_sceneLayer.ColorTexture[0] = _swapFbo->color;
_sceneLayer.ColorTexture[1] = nullptr;
_sceneLayer.Viewport[0].Pos = { 0, 0 };
_sceneLayer.Viewport[0].Size = _recommendedTexSize;
_sceneLayer.Viewport[1].Pos = { _recommendedTexSize.w, 0 };
_sceneLayer.Viewport[1].Size = _recommendedTexSize;
_sceneLayer.Header.Type = ovrLayerType_EyeFov;
_sceneLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
for_each_eye([&](ovrEyeType eye) {
_eyeViewports[eye] = _sceneLayer.Viewport[eye];
_sceneLayer.Fov[eye] = _eyeFov[eye];
});
#else
_outputWindow = new GlWindow(shareContext);
_outputWindow->show();
// _outputWindow->setFlags(Qt::FramelessWindowHint );
// _outputWindow->resize(_ovrHmd->Resolution.w, _ovrHmd->Resolution.h);
// _outputWindow->setPosition(_ovrHmd->WindowsPos.x, _ovrHmd->WindowsPos.y);
ivec2 desiredPosition = toGlm(_ovrHmd->WindowsPos);
foreach(QScreen* screen, qGuiApp->screens()) {
ivec2 screenPosition = toGlm(screen->geometry().topLeft());
if (screenPosition == desiredPosition) {
_outputWindow->setScreen(screen);
break;
}
}
_outputWindow->showFullScreen();
_outputWindow->makeCurrent();
ovrGLConfig cfg;
memset(&cfg, 0, sizeof(cfg));
cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
cfg.OGL.Header.BackBufferSize = _ovrHmd->Resolution;
cfg.OGL.Header.Multisample = 0;
int distortionCaps = 0
| ovrDistortionCap_Vignette
| ovrDistortionCap_Overdrive
| ovrDistortionCap_TimeWarp;
int configResult = ovrHmd_ConfigureRendering(_ovrHmd, &cfg.Config,
distortionCaps, _eyeFov, _eyeRenderDesc);
assert(configResult);
Q_UNUSED(configResult);
_outputWindow->doneCurrent();
for_each_eye([&](ovrEyeType eye) {
//Get texture size
_eyeTextures[eye].Header.API = ovrRenderAPI_OpenGL;
_eyeTextures[eye].Header.TextureSize = _renderTargetSize;
_eyeTextures[eye].Header.RenderViewport.Pos = { 0, 0 };
_eyeTextures[eye].Header.RenderViewport.Size = _renderTargetSize;
_eyeTextures[eye].Header.RenderViewport.Size.w /= 2;
});
_eyeTextures[ovrEye_Right].Header.RenderViewport.Pos.x = _recommendedTexSize.w;
for_each_eye([&](ovrEyeType eye) {
_eyeViewports[eye] = _eyeTextures[eye].Header.RenderViewport;
});
#endif
ovrHmd_SetEnabledCaps(_ovrHmd,
ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);
ovrHmd_ConfigureTracking(_ovrHmd,
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection,
ovrTrackingCap_Orientation);
if (!_camera) {
_camera = new Camera;
configureCamera(*_camera); // no need to use screen dimensions; they're ignored
}
}
//Disconnects and deallocates the OR
void OculusManager::disconnect() {
if (_isConnected) {
#ifdef Q_OS_WIN
if (_swapFbo) {
delete _swapFbo;
_swapFbo = nullptr;
}
if (_mirrorFbo) {
delete _mirrorFbo;
_mirrorFbo = nullptr;
}
#else
_outputWindow->showNormal();
_outputWindow->deleteLater();
_outputWindow = nullptr;
#endif
if (_ovrHmd) {
ovrHmd_Destroy(_ovrHmd);
_ovrHmd = nullptr;
}
ovr_Shutdown();
_isConnected = false;
// Prepare to potentially have to dismiss the HSW again
// if the user re-enables VR
_hswDismissed = false;
}
}
void positionCalibrationBillboard(Text3DOverlay* billboard) {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::quat headOrientation = myAvatar->getHeadOrientation();
headOrientation.x = 0;
headOrientation.z = 0;
glm::normalize(headOrientation);
billboard->setPosition(myAvatar->getHeadPosition()
+ headOrientation * glm::vec3(0.0f, 0.0f, -CALIBRATION_MESSAGE_DISTANCE));
billboard->setRotation(headOrientation);
}
void calibrate(const glm::vec3& position, const glm::quat& orientation) {
static QString instructionMessage = "Hold still to calibrate";
static QString progressMessage;
static Text3DOverlay* billboard;
switch (_calibrationState) {
case UNCALIBRATED:
if (position != glm::vec3() && orientation != glm::quat()) { // Handle zero values at start-up.
_calibrationPosition = position;
_calibrationOrientation = orientation;
_calibrationState = WAITING_FOR_DELTA;
}
break;
case WAITING_FOR_DELTA:
if (glm::length(position - _calibrationPosition) > CALIBRATION_DELTA_MINIMUM_LENGTH
|| glm::angle(orientation * glm::inverse(_calibrationOrientation)) > CALIBRATION_DELTA_MINIMUM_ANGLE) {
_calibrationPosition = position;
_calibrationOrientation = orientation;
_calibrationState = WAITING_FOR_ZERO;
}
break;
case WAITING_FOR_ZERO:
if (glm::length(position - _calibrationPosition) < CALIBRATION_ZERO_MAXIMUM_LENGTH
&& glm::angle(orientation * glm::inverse(_calibrationOrientation)) < CALIBRATION_ZERO_MAXIMUM_ANGLE) {
_calibrationStartTime = usecTimestampNow();
_calibrationState = WAITING_FOR_ZERO_HELD;
if (!_calibrationMessage) {
qCDebug(interfaceapp) << "Hold still to calibrate HMD";
billboard = new Text3DOverlay();
billboard->setDimensions(glm::vec2(2.0f, 1.25f));
billboard->setTopMargin(0.35f);
billboard->setLeftMargin(0.28f);
billboard->setText(instructionMessage);
billboard->setAlpha(0.5f);
billboard->setLineHeight(0.1f);
billboard->setIsFacingAvatar(false);
positionCalibrationBillboard(billboard);
_calibrationMessage = Application::getInstance()->getOverlays().addOverlay(billboard);
}
progressMessage = "";
} else {
_calibrationPosition = position;
_calibrationOrientation = orientation;
}
break;
case WAITING_FOR_ZERO_HELD:
if (glm::length(position - _calibrationPosition) < CALIBRATION_ZERO_MAXIMUM_LENGTH
&& glm::angle(orientation * glm::inverse(_calibrationOrientation)) < CALIBRATION_ZERO_MAXIMUM_ANGLE) {
if ((usecTimestampNow() - _calibrationStartTime) > CALIBRATION_ZERO_HOLD_TIME) {
_calibrationState = CALIBRATED;
qCDebug(interfaceapp) << "HMD calibrated";
Application::getInstance()->getOverlays().deleteOverlay(_calibrationMessage);
_calibrationMessage = 0;
Application::getInstance()->resetSensors();
} else {
quint64 quarterSeconds = (usecTimestampNow() - _calibrationStartTime) / 250000;
if (quarterSeconds + 1 > (quint64)progressMessage.length()) {
// 3...2...1...
if (quarterSeconds == 4 * (quarterSeconds / 4)) {
quint64 wholeSeconds = CALIBRATION_ZERO_HOLD_TIME / 1000000 - quarterSeconds / 4;
if (wholeSeconds == 3) {
positionCalibrationBillboard(billboard);
}
progressMessage += QString::number(wholeSeconds);
} else {
progressMessage += ".";
}
billboard->setText(instructionMessage + "\n\n" + progressMessage);
}
}
} else {
_calibrationPosition = position;
_calibrationOrientation = orientation;
_calibrationState = WAITING_FOR_ZERO;
}
break;
default:
break;
}
}
void OculusManager::recalibrate() {
_calibrationState = UNCALIBRATED;
}
void OculusManager::abandonCalibration() {
_calibrationState = CALIBRATED;
if (_calibrationMessage) {
qCDebug(interfaceapp) << "Abandoned HMD calibration";
Application::getInstance()->getOverlays().deleteOverlay(_calibrationMessage);
_calibrationMessage = 0;
}
}
bool OculusManager::isConnected() {
return _isConnected;
}
//Begins the frame timing for oculus prediction purposes
void OculusManager::beginFrameTiming() {
if (_frameTimingActive) {
printf("WARNING: Called OculusManager::beginFrameTiming() twice in a row, need to call OculusManager::endFrameTiming().");
}
_frameTimingActive = true;
}
bool OculusManager::allowSwap() {
return false;
}
//Ends frame timing
void OculusManager::endFrameTiming() {
_frameIndex++;
_frameTimingActive = false;
}
//Sets the camera FoV and aspect ratio
void OculusManager::configureCamera(Camera& camera) {
if (_activeEye == ovrEye_Count) {
// When not rendering, provide a FOV encompasing both eyes
camera.setProjection(_combinedProjection);
return;
}
camera.setProjection(_eyeProjection[_activeEye]);
}
//Displays everything for the oculus, frame timing must be active
void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera) {
#ifdef DEBUG
// Ensure the frame counter always increments by exactly 1
static int oldFrameIndex = -1;
assert(oldFrameIndex == -1 || (unsigned int)oldFrameIndex == _frameIndex - 1);
oldFrameIndex = _frameIndex;
#endif
#ifndef Q_OS_WIN
// FIXME: we need a better way of responding to the HSW. In particular
// we need to ensure that it's only displayed once per session, rather than
// every time the user toggles VR mode, and we need to hook it up to actual
// keyboard input. OVR claim they are refactoring HSW
// https://forums.oculus.com/viewtopic.php?f=20&t=21720#p258599
static ovrHSWDisplayState hasWarningState;
if (!_hswDismissed) {
ovrHmd_GetHSWDisplayState(_ovrHmd, &hasWarningState);
if (hasWarningState.Displayed) {
ovrHmd_DismissHSWDisplay(_ovrHmd);
} else {
_hswDismissed = true;
}
}
#endif
//beginFrameTiming must be called before display
if (!_frameTimingActive) {
printf("WARNING: Called OculusManager::display() without calling OculusManager::beginFrameTiming() first.");
return;
}
auto primaryFBO = DependencyManager::get<FramebufferCache>()->getPrimaryFramebuffer();
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(primaryFBO));
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glm::quat orientation;
glm::vec3 trackerPosition;
auto deviceSize = qApp->getDeviceSize();
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
ovrVector3f ovrHeadPosition = ts.HeadPose.ThePose.Position;
trackerPosition = glm::vec3(ovrHeadPosition.x, ovrHeadPosition.y, ovrHeadPosition.z);
if (_calibrationState != CALIBRATED) {
ovrQuatf ovrHeadOrientation = ts.HeadPose.ThePose.Orientation;
orientation = glm::quat(ovrHeadOrientation.w, ovrHeadOrientation.x, ovrHeadOrientation.y, ovrHeadOrientation.z);
calibrate(trackerPosition, orientation);
}
trackerPosition = bodyOrientation * trackerPosition;
ovrPosef eyePoses[ovrEye_Count];
ovrHmd_GetEyePoses(_ovrHmd, _frameIndex, _eyeOffsets, eyePoses, nullptr);
#ifndef Q_OS_WIN
ovrHmd_BeginFrame(_ovrHmd, _frameIndex);
#endif
//Render each eye into an fbo
for_each_eye(_ovrHmd, [&](ovrEyeType eye){
// If we're in eye-per-frame mode, only render one eye
// per call to display, and allow timewarp to correct for
// the other eye. Poor man's perf improvement
if (_eyePerFrameMode && eye == _lastEyeRendered) {
return;
}
_lastEyeRendered = _activeEye = eye;
_eyeRenderPoses[eye] = eyePoses[eye];
// Set the camera rotation for this eye
_eyePositions[eye] = toGlm(_eyeRenderPoses[eye].Position);
_eyePositions[eye] = whichCamera.getRotation() * _eyePositions[eye];
quat eyeRotation = toGlm(_eyeRenderPoses[eye].Orientation);
// Update our camera to what the application camera is doing
_camera->setRotation(whichCamera.getRotation() * eyeRotation);
_camera->setPosition(whichCamera.getPosition() + _eyePositions[eye]);
configureCamera(*_camera);
_camera->update(1.0f / Application::getInstance()->getFps());
ovrRecti & vp = _eyeViewports[eye];
vp.Size.h = _recommendedTexSize.h * _offscreenRenderScale;
vp.Size.w = _recommendedTexSize.w * _offscreenRenderScale;
glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
renderArgs->_viewport = glm::ivec4(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
renderArgs->_renderSide = RenderArgs::MONO;
qApp->displaySide(renderArgs, *_camera);
qApp->getApplicationCompositor().displayOverlayTextureHmd(renderArgs, eye);
});
_activeEye = ovrEye_Count;
gpu::FramebufferPointer finalFbo;
finalFbo = DependencyManager::get<FramebufferCache>()->getPrimaryFramebuffer();
glBindFramebuffer(GL_FRAMEBUFFER, 0);
// restore our normal viewport
glViewport(0, 0, deviceSize.width(), deviceSize.height());
#ifdef Q_OS_WIN
auto srcFboSize = finalFbo->getSize();
// Blit to the oculus provided texture
glBindFramebuffer(GL_READ_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(finalFbo));
_swapFbo->Bound(oglplus::Framebuffer::Target::Draw, [&] {
glBlitFramebuffer(
0, 0, srcFboSize.x, srcFboSize.y,
0, 0, _swapFbo->size.x, _swapFbo->size.y,
GL_COLOR_BUFFER_BIT, GL_NEAREST);
});
// Blit to the onscreen window
auto destWindowSize = qApp->getDeviceSize();
glBlitFramebuffer(
0, 0, srcFboSize.x, srcFboSize.y,
0, 0, destWindowSize.width(), destWindowSize.height(),
GL_COLOR_BUFFER_BIT, GL_NEAREST);
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
// Submit the frame to the Oculus SDK for timewarp and distortion
for_each_eye([&](ovrEyeType eye) {
_sceneLayer.RenderPose[eye] = _eyeRenderPoses[eye];
});
auto header = &_sceneLayer.Header;
ovrResult res = ovrHmd_SubmitFrame(_ovrHmd, _frameIndex, nullptr, &header, 1);
Q_ASSERT(OVR_SUCCESS(res));
_swapFbo->Increment();
#else
GLsync syncObject = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
glFlush();
_outputWindow->makeCurrent();
// force the compositing context to wait for the texture
// rendering to complete before it starts the distortion rendering,
// but without triggering a CPU/GPU synchronization
glWaitSync(syncObject, 0, GL_TIMEOUT_IGNORED);
glDeleteSync(syncObject);
GLuint textureId = gpu::GLBackend::getTextureID(finalFbo->getRenderBuffer(0));
for_each_eye([&](ovrEyeType eye) {
ovrGLTexture & glEyeTexture = reinterpret_cast<ovrGLTexture&>(_eyeTextures[eye]);
glEyeTexture.OGL.TexId = textureId;
});
// restore our normal viewport
ovrHmd_EndFrame(_ovrHmd, _eyeRenderPoses, _eyeTextures);
glCanvas->makeCurrent();
#endif
// in order to account account for changes in the pick ray caused by head movement
// we need to force a mouse move event on every frame (perhaps we could change this
// to based on the head moving a minimum distance from the last position in which we
// sent?)
{
QMouseEvent mouseEvent(QEvent::MouseMove, glCanvas->mapFromGlobal(QCursor::pos()),
Qt::NoButton, Qt::NoButton, 0);
qApp->mouseMoveEvent(&mouseEvent, 0);
}
}
//Tries to reconnect to the sensors
void OculusManager::reset() {
if (_isConnected) {
ovrHmd_RecenterPose(_ovrHmd);
}
}
glm::vec3 OculusManager::getRelativePosition() {
ovrTrackingState trackingState = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
return toGlm(trackingState.HeadPose.ThePose.Position);
}
glm::quat OculusManager::getOrientation() {
ovrTrackingState trackingState = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
return toGlm(trackingState.HeadPose.ThePose.Orientation);
}
QSize OculusManager::getRenderTargetSize() {
QSize rv;
rv.setWidth(_renderTargetSize.w);
rv.setHeight(_renderTargetSize.h);
return rv;
}
void OculusManager::overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) {
if (_activeEye != ovrEye_Count) {
const ovrFovPort& port = _eyeFov[_activeEye];
right = nearVal * port.RightTan;
left = -nearVal * port.LeftTan;
top = nearVal * port.UpTan;
bottom = -nearVal * port.DownTan;
}
}
int OculusManager::getHMDScreen() {
#ifdef Q_OS_WIN
return -1;
#else
int hmdScreenIndex = -1; // unknown
// TODO: it might be smarter to handle multiple HMDs connected in this case. but for now,
// we will simply assume the initialization code that set up _ovrHmd picked the best hmd
if (_ovrHmd) {
QString productNameFromOVR = _ovrHmd->ProductName;
int hmdWidth = _ovrHmd->Resolution.w;
int hmdHeight = _ovrHmd->Resolution.h;
int hmdAtX = _ovrHmd->WindowsPos.x;
int hmdAtY = _ovrHmd->WindowsPos.y;
// we will score the likelihood that each screen is a match based on the following
// rubrik of potential matching features
const int EXACT_NAME_MATCH = 100;
const int SIMILAR_NAMES = 10;
const int EXACT_LOCATION_MATCH = 50;
const int EXACT_RESOLUTION_MATCH = 25;
int bestMatchScore = 0;
// look at the display list and see if we can find the best match
QDesktopWidget* desktop = QApplication::desktop();
int screenNumber = 0;
foreach (QScreen* screen, QGuiApplication::screens()) {
QString screenName = screen->name();
QRect screenRect = desktop->screenGeometry(screenNumber);
int screenScore = 0;
if (screenName == productNameFromOVR) {
screenScore += EXACT_NAME_MATCH;
}
if (similarStrings(screenName, productNameFromOVR)) {
screenScore += SIMILAR_NAMES;
}
if (hmdWidth == screenRect.width() && hmdHeight == screenRect.height()) {
screenScore += EXACT_RESOLUTION_MATCH;
}
if (hmdAtX == screenRect.x() && hmdAtY == screenRect.y()) {
screenScore += EXACT_LOCATION_MATCH;
}
if (screenScore > bestMatchScore) {
bestMatchScore = screenScore;
hmdScreenIndex = screenNumber;
}
screenNumber++;
}
}
return hmdScreenIndex;
#endif
}
mat4 OculusManager::getEyeProjection(int eye) {
return _eyeProjection[eye];
}
mat4 OculusManager::getEyePose(int eye) {
return toGlm(_eyeRenderPoses[eye]);
}
mat4 OculusManager::getHeadPose() {
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
return toGlm(ts.HeadPose.ThePose);
}

View file

@ -1,61 +0,0 @@
//
// OculusManager.h
// interface/src/devices
//
// Created by Stephen Birarda on 5/9/13.
// Refactored by Ben Arnold on 6/30/2014
// Copyright 2012 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_OculusManager_h
#define hifi_OculusManager_h
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>
#include <QSize>
#include "RenderArgs.h"
class QOpenGLContext;
class QGLWidget;
class Camera;
/// Handles interaction with the Oculus Rift.
class OculusManager {
public:
static void connect(QOpenGLContext* shareContext);
static void disconnect();
static bool isConnected();
static void recalibrate();
static void abandonCalibration();
static void beginFrameTiming();
static void endFrameTiming();
static bool allowSwap();
static void configureCamera(Camera& camera);
static void display(QGLWidget * glCanvas, RenderArgs* renderArgs, const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera);
static void reset();
static glm::vec3 getRelativePosition();
static glm::quat getOrientation();
static QSize getRenderTargetSize();
static void overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane);
static glm::vec3 getLeftEyePosition();
static glm::vec3 getRightEyePosition();
static glm::vec3 getMidEyePosition();
static int getHMDScreen();
static glm::mat4 getEyeProjection(int eye);
static glm::mat4 getEyePose(int eye);
static glm::mat4 getHeadPose();
};
#endif // hifi_OculusManager_h

View file

@ -1,151 +0,0 @@
//
// TV3DManager.cpp
// interface/src/devices
//
// Created by Brad Hefta-Gaub on 12/24/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "TV3DManager.h"
#include <glm/glm.hpp>
#include <glm/gtc/type_ptr.hpp>
#include <RenderArgs.h>
#include "Application.h"
#include "Menu.h"
int TV3DManager::_screenWidth = 1;
int TV3DManager::_screenHeight = 1;
double TV3DManager::_aspect = 1.0;
eyeFrustum TV3DManager::_leftEye;
eyeFrustum TV3DManager::_rightEye;
eyeFrustum* TV3DManager::_activeEye = NULL;
bool TV3DManager::isConnected() {
return Menu::getInstance()->isOptionChecked(MenuOption::Enable3DTVMode);
}
void TV3DManager::connect() {
auto deviceSize = qApp->getDeviceSize();
configureCamera(*(qApp->getCamera()), deviceSize.width(), deviceSize.height());
}
// The basic strategy of this stereoscopic rendering is explained here:
// http://www.orthostereo.com/geometryopengl.html
void TV3DManager::setFrustum(Camera& whichCamera) {
const double DTR = 0.0174532925; // degree to radians
const double IOD = 0.05; //intraocular distance
double fovy = DEFAULT_FIELD_OF_VIEW_DEGREES; // field of view in y-axis
double nearZ = DEFAULT_NEAR_CLIP; // near clipping plane
double screenZ = 0.25f; // screen projection plane
double top = nearZ * tan(DTR * fovy / 2.0); //sets top of frustum based on fovy and near clipping plane
double right = _aspect * top; // sets right of frustum based on aspect ratio
double frustumshift = (IOD / 2) * nearZ / screenZ;
_leftEye.top = top;
_leftEye.bottom = -top;
_leftEye.left = -right + frustumshift;
_leftEye.right = right + frustumshift;
_leftEye.modelTranslation = IOD / 2;
_rightEye.top = top;
_rightEye.bottom = -top;
_rightEye.left = -right - frustumshift;
_rightEye.right = right - frustumshift;
_rightEye.modelTranslation = -IOD / 2;
}
void TV3DManager::configureCamera(Camera& whichCamera, int screenWidth, int screenHeight) {
#ifdef THIS_CURRENTLY_BROKEN_WAITING_FOR_DISPLAY_PLUGINS
if (screenHeight == 0) {
screenHeight = 1; // prevent divide by 0
}
_screenWidth = screenWidth;
_screenHeight = screenHeight;
_aspect= (double)_screenWidth / (double)_screenHeight;
setFrustum(whichCamera);
glViewport (0, 0, _screenWidth, _screenHeight); // sets drawing viewport
#endif
}
void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
#ifdef THIS_CURRENTLY_BROKEN_WAITING_FOR_DISPLAY_PLUGINS
double nearZ = DEFAULT_NEAR_CLIP; // near clipping plane
double farZ = DEFAULT_FAR_CLIP; // far clipping plane
// left eye portal
int portalX = 0;
int portalY = 0;
QSize deviceSize = qApp->getDeviceSize() *
qApp->getRenderResolutionScale();
int portalW = deviceSize.width() / 2;
int portalH = deviceSize.height();
// FIXME - glow effect is removed, 3D TV mode broken until we get display plugins working
DependencyManager::get<GlowEffect>()->prepare(renderArgs);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
Camera eyeCamera;
eyeCamera.setRotation(whichCamera.getRotation());
eyeCamera.setPosition(whichCamera.getPosition());
glEnable(GL_SCISSOR_TEST);
forEachEye([&](eyeFrustum& eye){
_activeEye = &eye;
glViewport(portalX, portalY, portalW, portalH);
glScissor(portalX, portalY, portalW, portalH);
renderArgs->_viewport = glm::ivec4(portalX, portalY, portalW, portalH);
glm::mat4 projection = glm::frustum<float>(eye.left, eye.right, eye.bottom, eye.top, nearZ, farZ);
projection = glm::translate(projection, vec3(eye.modelTranslation, 0, 0));
eyeCamera.setProjection(projection);
renderArgs->_renderSide = RenderArgs::MONO;
qApp->displaySide(renderArgs, eyeCamera, false);
qApp->getApplicationCompositor().displayOverlayTexture(renderArgs);
_activeEye = NULL;
}, [&]{
// render right side view
portalX = deviceSize.width() / 2;
});
glDisable(GL_SCISSOR_TEST);
// FIXME - glow effect is removed, 3D TV mode broken until we get display plugins working
auto finalFbo = DependencyManager::get<GlowEffect>()->render(renderArgs);
auto fboSize = finalFbo->getSize();
// Get the ACTUAL device size for the BLIT
deviceSize = qApp->getDeviceSize();
glBindFramebuffer(GL_READ_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(finalFbo));
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
glBlitFramebuffer(0, 0, fboSize.x, fboSize.y,
0, 0, deviceSize.width(), deviceSize.height(),
GL_COLOR_BUFFER_BIT, GL_NEAREST);
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
// reset the viewport to how we started
glViewport(0, 0, deviceSize.width(), deviceSize.height());
#endif
}
void TV3DManager::overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) {
if (_activeEye) {
left = _activeEye->left;
right = _activeEye->right;
bottom = _activeEye->bottom;
top = _activeEye->top;
}
}

View file

@ -1,64 +0,0 @@
//
// TV3DManager.h
// interface/src/devices
//
// Created by Brad Hefta-Gaub on 12/24/2013.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_TV3DManager_h
#define hifi_TV3DManager_h
#include <iostream>
#include <glm/glm.hpp>
class Camera;
class RenderArgs;
struct eyeFrustum {
double left;
double right;
double bottom;
double top;
float modelTranslation;
};
/// Handles interaction with 3D TVs
class TV3DManager {
public:
static void connect();
static bool isConnected();
static void configureCamera(Camera& camera, int screenWidth, int screenHeight);
static void display(RenderArgs* renderArgs, Camera& whichCamera);
static void overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane);
private:
static void setFrustum(Camera& whichCamera);
static int _screenWidth;
static int _screenHeight;
static double _aspect;
static eyeFrustum _leftEye;
static eyeFrustum _rightEye;
static eyeFrustum* _activeEye;
// The first function is the code executed for each eye
// while the second is code to be executed between the two eyes.
// The use case here is to modify the output viewport coordinates
// for the new eye.
// FIXME: we'd like to have a default empty lambda for the second parameter,
// but gcc 4.8.1 complains about it due to a bug. See
// http://stackoverflow.com/questions/25490662/lambda-as-default-parameter-to-a-member-function-template
template<typename F, typename FF>
static void forEachEye(F f, FF ff) {
f(_leftEye);
ff();
f(_rightEye);
}
};
#endif // hifi_TV3DManager_h

View file

@ -11,15 +11,16 @@
#include <avatar/AvatarManager.h>
#include <avatar/MyAvatar.h>
#include <GLCanvas.h>
#include <HandData.h>
#include <HFBackEvent.h>
#include "Application.h"
#include "devices/MotionTracker.h"
#include "devices/SixenseManager.h"
#include "ControllerScriptingInterface.h"
// TODO: this needs to be removed, as well as any related controller-specific information
#include <input-plugins/SixenseManager.h>
ControllerScriptingInterface::ControllerScriptingInterface() :
_mouseCaptured(false),
@ -82,13 +83,14 @@ void inputChannelFromScriptValue(const QScriptValue& object, UserInputMapper::In
QScriptValue actionToScriptValue(QScriptEngine* engine, const UserInputMapper::Action& action) {
QScriptValue obj = engine->newObject();
QVector<UserInputMapper::InputChannel> inputChannels = Application::getUserInputMapper()->getInputChannelsForAction(action);
auto userInputMapper = DependencyManager::get<UserInputMapper>();
QVector<UserInputMapper::InputChannel> inputChannels = userInputMapper->getInputChannelsForAction(action);
QScriptValue _inputChannels = engine->newArray(inputChannels.size());
for (int i = 0; i < inputChannels.size(); i++) {
_inputChannels.setProperty(i, inputChannelToScriptValue(engine, inputChannels[i]));
}
obj.setProperty("action", (int) action);
obj.setProperty("actionName", Application::getUserInputMapper()->getActionName(action));
obj.setProperty("actionName", userInputMapper->getActionName(action));
obj.setProperty("inputChannels", _inputChannels);
return obj;
}
@ -376,7 +378,7 @@ void ControllerScriptingInterface::releaseJoystick(int joystickIndex) {
}
glm::vec2 ControllerScriptingInterface::getViewportDimensions() const {
return Application::getInstance()->getCanvasSize();
return Application::getInstance()->getUiSize();
}
AbstractInputController* ControllerScriptingInterface::createInputController(const QString& deviceName, const QString& tracker) {
@ -428,43 +430,59 @@ void ControllerScriptingInterface::updateInputControllers() {
}
QVector<UserInputMapper::Action> ControllerScriptingInterface::getAllActions() {
return Application::getUserInputMapper()->getAllActions();
return DependencyManager::get<UserInputMapper>()->getAllActions();
}
QVector<UserInputMapper::InputChannel> ControllerScriptingInterface::getInputChannelsForAction(UserInputMapper::Action action) {
return Application::getUserInputMapper()->getInputChannelsForAction(action);
return DependencyManager::get<UserInputMapper>()->getInputChannelsForAction(action);
}
QString ControllerScriptingInterface::getDeviceName(unsigned int device) {
return Application::getUserInputMapper()->getDeviceName((unsigned short) device);
return DependencyManager::get<UserInputMapper>()->getDeviceName((unsigned short)device);
}
QVector<UserInputMapper::InputChannel> ControllerScriptingInterface::getAllInputsForDevice(unsigned int device) {
return Application::getUserInputMapper()->getAllInputsForDevice(device);
return DependencyManager::get<UserInputMapper>()->getAllInputsForDevice(device);
}
bool ControllerScriptingInterface::addInputChannel(UserInputMapper::InputChannel inputChannel) {
return Application::getUserInputMapper()->addInputChannel(inputChannel._action, inputChannel._input, inputChannel._modifier, inputChannel._scale);
return DependencyManager::get<UserInputMapper>()->addInputChannel(inputChannel._action, inputChannel._input, inputChannel._modifier, inputChannel._scale);
}
bool ControllerScriptingInterface::removeInputChannel(UserInputMapper::InputChannel inputChannel) {
return Application::getUserInputMapper()->removeInputChannel(inputChannel);
return DependencyManager::get<UserInputMapper>()->removeInputChannel(inputChannel);
}
QVector<UserInputMapper::InputPair> ControllerScriptingInterface::getAvailableInputs(unsigned int device) {
return Application::getUserInputMapper()->getAvailableInputs((unsigned short) device);
return DependencyManager::get<UserInputMapper>()->getAvailableInputs((unsigned short)device);
}
void ControllerScriptingInterface::resetAllDeviceBindings() {
Application::getUserInputMapper()->resetAllDeviceBindings();
DependencyManager::get<UserInputMapper>()->resetAllDeviceBindings();
}
void ControllerScriptingInterface::resetDevice(unsigned int device) {
Application::getUserInputMapper()->resetDevice(device);
DependencyManager::get<UserInputMapper>()->resetDevice(device);
}
int ControllerScriptingInterface::findDevice(QString name) {
return Application::getUserInputMapper()->findDevice(name);
return DependencyManager::get<UserInputMapper>()->findDevice(name);
}
float ControllerScriptingInterface::getActionValue(int action) {
return DependencyManager::get<UserInputMapper>()->getActionState(UserInputMapper::Action(action));
}
int ControllerScriptingInterface::findAction(QString actionName) {
auto userInputMapper = DependencyManager::get<UserInputMapper>();
auto actions = getAllActions();
for (auto action : actions) {
if (userInputMapper->getActionName(action) == actionName) {
return action;
}
}
// If the action isn't found, return -1
return -1;
}
InputController::InputController(int deviceTrackerId, int subTrackerId, QObject* parent) :
@ -502,4 +520,4 @@ const unsigned int INPUTCONTROLLER_KEY_DEVICE_MASK = 16;
InputController::Key InputController::getKey() const {
return (((_deviceTrackerId & INPUTCONTROLLER_KEY_DEVICE_MASK) << INPUTCONTROLLER_KEY_DEVICE_OFFSET) | _subTrackerId);
}
}

View file

@ -14,7 +14,7 @@
#include <QtCore/QObject>
#include "ui/UserInputMapper.h"
#include <input-plugins/UserInputMapper.h>
#include <AbstractControllerScriptingInterface.h>
class PalmData;
@ -86,15 +86,24 @@ public:
public slots:
Q_INVOKABLE virtual QVector<UserInputMapper::Action> getAllActions();
Q_INVOKABLE virtual QVector<UserInputMapper::InputChannel> getInputChannelsForAction(UserInputMapper::Action action);
Q_INVOKABLE virtual QString getDeviceName(unsigned int device);
Q_INVOKABLE virtual QVector<UserInputMapper::InputChannel> getAllInputsForDevice(unsigned int device);
Q_INVOKABLE virtual bool addInputChannel(UserInputMapper::InputChannel inputChannel);
Q_INVOKABLE virtual bool removeInputChannel(UserInputMapper::InputChannel inputChannel);
Q_INVOKABLE virtual QVector<UserInputMapper::InputChannel> getInputChannelsForAction(UserInputMapper::Action action);
Q_INVOKABLE virtual QVector<UserInputMapper::InputPair> getAvailableInputs(unsigned int device);
Q_INVOKABLE virtual void resetAllDeviceBindings();
Q_INVOKABLE virtual QVector<UserInputMapper::InputChannel> getAllInputsForDevice(unsigned int device);
Q_INVOKABLE virtual QString getDeviceName(unsigned int device);
Q_INVOKABLE virtual float getActionValue(int action);
Q_INVOKABLE virtual void resetDevice(unsigned int device);
Q_INVOKABLE virtual void resetAllDeviceBindings();
Q_INVOKABLE virtual int findDevice(QString name);
Q_INVOKABLE virtual int findAction(QString actionName);
virtual bool isPrimaryButtonPressed() const;
virtual glm::vec2 getPrimaryJoystickPosition() const;

View file

@ -58,6 +58,7 @@ WebWindowClass::WebWindowClass(const QString& title, const QString& url, int wid
auto dialogWidget = new QDialog(Application::getInstance()->getWindow(), Qt::Window);
dialogWidget->setWindowTitle(title);
dialogWidget->resize(width, height);
dialogWidget->installEventFilter(this);
connect(dialogWidget, &QDialog::finished, this, &WebWindowClass::hasClosed);
auto layout = new QVBoxLayout(dialogWidget);
@ -93,6 +94,19 @@ WebWindowClass::WebWindowClass(const QString& title, const QString& url, int wid
WebWindowClass::~WebWindowClass() {
}
bool WebWindowClass::eventFilter(QObject* sender, QEvent* event) {
if (sender == _windowWidget) {
if (event->type() == QEvent::Move) {
emit moved(getPosition());
}
if (event->type() == QEvent::Resize) {
emit resized(getSize());
}
}
return false;
}
void WebWindowClass::hasClosed() {
emit closed();
}
@ -122,6 +136,40 @@ void WebWindowClass::setURL(const QString& url) {
_webView->setUrl(url);
}
QSizeF WebWindowClass::getSize() const {
QSizeF size = _windowWidget->size();
return size;
}
void WebWindowClass::setSize(QSizeF size) {
setSize(size.width(), size.height());
}
void WebWindowClass::setSize(int width, int height) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "setSize", Qt::AutoConnection, Q_ARG(int, width), Q_ARG(int, height));
return;
}
_windowWidget->resize(width, height);
}
glm::vec2 WebWindowClass::getPosition() const {
QPoint position = _windowWidget->pos();
return glm::vec2(position.x(), position.y());
}
void WebWindowClass::setPosition(glm::vec2 position) {
setPosition(position.x, position.y);
}
void WebWindowClass::setPosition(int x, int y) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "setPosition", Qt::AutoConnection, Q_ARG(int, x), Q_ARG(int, y));
return;
}
_windowWidget->move(x, y);
}
void WebWindowClass::raise() {
QMetaObject::invokeMethod(_windowWidget, "showNormal", Qt::AutoConnection);
QMetaObject::invokeMethod(_windowWidget, "raise", Qt::AutoConnection);

View file

@ -35,6 +35,9 @@ class WebWindowClass : public QObject {
Q_OBJECT
Q_PROPERTY(QObject* eventBridge READ getEventBridge)
Q_PROPERTY(QString url READ getURL)
Q_PROPERTY(glm::vec2 position READ getPosition WRITE setPosition);
Q_PROPERTY(QSizeF size READ getSize WRITE setSize);
public:
WebWindowClass(const QString& title, const QString& url, int width, int height, bool isToolWindow = false);
~WebWindowClass();
@ -43,6 +46,12 @@ public:
public slots:
void setVisible(bool visible);
glm::vec2 getPosition() const;
void setPosition(int x, int y);
void setPosition(glm::vec2 position);
QSizeF getSize() const;
void setSize(QSizeF size);
void setSize(int width, int height);
QString getURL() const { return _webView->url().url(); }
void setURL(const QString& url);
void raise();
@ -51,8 +60,13 @@ public slots:
void setTitle(const QString& title);
signals:
void moved(glm::vec2 position);
void resized(QSizeF size);
void closed();
protected:
virtual bool eventFilter(QObject* sender, QEvent* event);
private slots:
void hasClosed();

View file

@ -11,6 +11,10 @@
#include "ApplicationCompositor.h"
#include <memory>
#include <QPropertyAnimation>
#include <glm/gtc/type_ptr.hpp>
#include <avatar/AvatarManager.h>
@ -21,6 +25,8 @@
#include "Tooltip.h"
#include "Application.h"
#include <input-plugins/SixenseManager.h> // TODO: any references to sixense should be removed here
#include <input-plugins/InputDevice.h>
// Used to animate the magnification windows
@ -106,7 +112,9 @@ bool raySphereIntersect(const glm::vec3 &dir, const glm::vec3 &origin, float r,
}
}
ApplicationCompositor::ApplicationCompositor() {
ApplicationCompositor::ApplicationCompositor() :
_alphaPropertyAnimation(new QPropertyAnimation(this, "alpha"))
{
memset(_reticleActive, 0, sizeof(_reticleActive));
memset(_magActive, 0, sizeof(_reticleActive));
memset(_magSizeMult, 0, sizeof(_magSizeMult));
@ -163,6 +171,8 @@ ApplicationCompositor::ApplicationCompositor() {
}
}
});
_alphaPropertyAnimation.reset(new QPropertyAnimation(this, "alpha"));
}
ApplicationCompositor::~ApplicationCompositor() {
@ -184,7 +194,8 @@ void ApplicationCompositor::bindCursorTexture(gpu::Batch& batch, uint8_t cursorI
// Draws the FBO texture for the screen
void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
PROFILE_RANGE(__FUNCTION__);
if (_alpha == 0.0f) {
if (_alpha <= 0.0f) {
return;
}
@ -204,7 +215,7 @@ void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
auto geometryCache = DependencyManager::get<GeometryCache>();
geometryCache->useSimpleDrawPipeline(batch);
batch.setViewportTransform(glm::ivec4(0, 0, deviceSize.width(), deviceSize.height()));
batch.setViewportTransform(renderArgs->_viewport);
batch.setModelTransform(Transform());
batch.setViewTransform(Transform());
batch.setProjectionTransform(mat4());
@ -232,15 +243,17 @@ void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
}
vec2 getPolarCoordinates(const PalmData& palm) {
vec2 ApplicationCompositor::getPolarCoordinates(const PalmData& palm) const {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
auto avatarOrientation = myAvatar->getOrientation();
auto eyePos = myAvatar->getDefaultEyePosition();
glm::vec3 tip = myAvatar->getLaserPointerTipPosition(&palm);
// Direction of the tip relative to the eye
glm::vec3 tipDirection = tip - eyePos;
// orient into avatar space
tipDirection = glm::inverse(avatarOrientation) * tipDirection;
glm::vec3 relativePos = myAvatar->getDefaultEyePosition();
glm::quat rotation = myAvatar->getOrientation();
if (Menu::getInstance()->isOptionChecked(MenuOption::StandingHMDSensorMode)) {
relativePos = _modelTransform.getTranslation();
rotation = _modelTransform.getRotation();
}
glm::vec3 tipDirection = tip - relativePos;
tipDirection = glm::inverse(rotation) * tipDirection;
// Normalize for trig functions
tipDirection = glm::normalize(tipDirection);
// Convert to polar coordinates
@ -251,7 +264,8 @@ vec2 getPolarCoordinates(const PalmData& palm) {
// Draws the FBO texture for Oculus rift.
void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int eye) {
PROFILE_RANGE(__FUNCTION__);
if (_alpha == 0.0f) {
if (_alpha <= 0.0f) {
return;
}
@ -278,11 +292,13 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
batch.setResourceTexture(0, overlayFramebuffer->getRenderBuffer(0));
batch.setViewTransform(Transform());
batch.setProjectionTransform(qApp->getEyeProjection(eye));
mat4 camMat;
_cameraBaseTransform.getMatrix(camMat);
camMat = camMat * qApp->getEyePose(eye);
batch.setViewportTransform(renderArgs->_viewport);
batch.setViewTransform(camMat);
mat4 eyePose = qApp->getEyePose(eye);
glm::mat4 overlayXfm = glm::inverse(eyePose);
batch.setProjectionTransform(qApp->getEyeProjection(eye));
#ifdef DEBUG_OVERLAY
{
@ -291,7 +307,9 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
}
#else
{
batch.setModelTransform(overlayXfm);
//batch.setModelTransform(overlayXfm);
batch.setModelTransform(_modelTransform);
drawSphereSection(batch);
}
#endif
@ -302,8 +320,11 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
bindCursorTexture(batch);
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
//Controller Pointers
glm::mat4 overlayXfm;
_modelTransform.getMatrix(overlayXfm);
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
for (int i = 0; i < (int)myAvatar->getHand()->getNumPalms(); i++) {
PalmData& palm = myAvatar->getHand()->getPalms()[i];
if (palm.isActive()) {
@ -345,13 +366,18 @@ void ApplicationCompositor::computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& or
// We need the RAW camera orientation and position, because this is what the overlay is
// rendered relative to
const glm::vec3 overlayPosition = qApp->getCamera()->getPosition();
const glm::quat overlayOrientation = qApp->getCamera()->getRotation();
glm::vec3 overlayPosition = qApp->getCamera()->getPosition();
glm::quat overlayOrientation = qApp->getCamera()->getRotation();
if (Menu::getInstance()->isOptionChecked(MenuOption::StandingHMDSensorMode)) {
overlayPosition = _modelTransform.getTranslation();
overlayOrientation = _modelTransform.getRotation();
}
// Intersection UI overlay space
glm::vec3 worldSpaceDirection = overlayOrientation * overlaySpaceDirection;
glm::vec3 worldSpaceIntersection = (glm::normalize(worldSpaceDirection) * _oculusUIRadius) + overlayPosition;
glm::vec3 worldSpaceHeadPosition = (overlayOrientation * glm::vec3(qApp->getHeadPose()[3])) + overlayPosition;
glm::vec3 worldSpaceHeadPosition = (overlayOrientation * extractTranslation(qApp->getHMDSensorPose())) + overlayPosition;
// Intersection in world space
origin = worldSpaceHeadPosition;
@ -410,13 +436,15 @@ bool ApplicationCompositor::calculateRayUICollisionPoint(const glm::vec3& positi
void ApplicationCompositor::renderPointers(gpu::Batch& batch) {
if (qApp->isHMDMode() && !qApp->getLastMouseMoveWasSimulated() && !qApp->isMouseHidden()) {
//If we are in oculus, render reticle later
auto trueMouse = qApp->getTrueMouse();
trueMouse /= qApp->getCanvasSize();
QPoint position = QPoint(qApp->getTrueMouseX(), qApp->getTrueMouseY());
_reticlePosition[MOUSE] = position;
_reticleActive[MOUSE] = true;
_magActive[MOUSE] = _magnifier;
_reticleActive[LEFT_CONTROLLER] = false;
_reticleActive[RIGHT_CONTROLLER] = false;
} else if (qApp->getLastMouseMoveWasSimulated() && Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput)) {
} else if (qApp->getLastMouseMoveWasSimulated() && Menu::getInstance()->isOptionChecked(MenuOption::HandMouseInput)) {
//only render controller pointer if we aren't already rendering a mouse pointer
_reticleActive[MOUSE] = false;
_magActive[MOUSE] = false;
@ -491,6 +519,7 @@ void ApplicationCompositor::renderControllerPointers(gpu::Batch& batch) {
auto canvasSize = qApp->getCanvasSize();
int mouseX, mouseY;
// Get directon relative to avatar orientation
glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * palmData->getFingerDirection();
@ -499,7 +528,7 @@ void ApplicationCompositor::renderControllerPointers(gpu::Batch& batch) {
float yAngle = 0.5f - ((atan2f(direction.z, direction.y) + (float)PI_OVER_TWO));
// Get the pixel range over which the xAngle and yAngle are scaled
float cursorRange = canvasSize.x * SixenseManager::getInstance().getCursorPixelRangeMult();
float cursorRange = canvasSize.x * InputDevice::getCursorPixelRangeMult();
mouseX = (canvasSize.x / 2.0f + cursorRange * xAngle);
mouseY = (canvasSize.y / 2.0f + cursorRange * yAngle);
@ -611,6 +640,19 @@ void ApplicationCompositor::drawSphereSection(gpu::Batch& batch) {
batch.setInputFormat(streamFormat);
static const int VERTEX_STRIDE = sizeof(vec3) + sizeof(vec2) + sizeof(vec4);
if (_prevAlpha != _alpha) {
// adjust alpha by munging vertex color alpha.
// FIXME we should probably just use a uniform for this.
float* floatPtr = reinterpret_cast<float*>(_hemiVertices->editData());
const auto ALPHA_FLOAT_OFFSET = (sizeof(vec3) + sizeof(vec2) + sizeof(vec3)) / sizeof(float);
const auto VERTEX_FLOAT_STRIDE = (sizeof(vec3) + sizeof(vec2) + sizeof(vec4)) / sizeof(float);
const auto NUM_VERTS = _hemiVertices->getSize() / VERTEX_STRIDE;
for (size_t i = 0; i < NUM_VERTS; i++) {
floatPtr[i * VERTEX_FLOAT_STRIDE + ALPHA_FLOAT_OFFSET] = _alpha;
}
}
gpu::BufferView posView(_hemiVertices, 0, _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::POSITION)._element);
gpu::BufferView uvView(_hemiVertices, sizeof(vec3), _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::TEXCOORD)._element);
gpu::BufferView colView(_hemiVertices, sizeof(vec3) + sizeof(vec2), _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::COLOR)._element);
@ -700,3 +742,29 @@ void ApplicationCompositor::updateTooltips() {
}
}
}
static const float FADE_DURATION = 500.0f;
void ApplicationCompositor::fadeIn() {
_fadeInAlpha = true;
_alphaPropertyAnimation->setDuration(FADE_DURATION);
_alphaPropertyAnimation->setStartValue(_alpha);
_alphaPropertyAnimation->setEndValue(1.0f);
_alphaPropertyAnimation->start();
}
void ApplicationCompositor::fadeOut() {
_fadeInAlpha = false;
_alphaPropertyAnimation->setDuration(FADE_DURATION);
_alphaPropertyAnimation->setStartValue(_alpha);
_alphaPropertyAnimation->setEndValue(0.0f);
_alphaPropertyAnimation->start();
}
void ApplicationCompositor::toggle() {
if (_fadeInAlpha) {
fadeOut();
} else {
fadeIn();
}
}

View file

@ -10,6 +10,7 @@
#define hifi_ApplicationCompositor_h
#include <QObject>
#include <QPropertyAnimation>
#include <cstdint>
#include <EntityItemID.h>
@ -33,6 +34,8 @@ const float DEFAULT_HMD_UI_ANGULAR_SIZE = 72.0f;
// facilities of this class
class ApplicationCompositor : public QObject {
Q_OBJECT
Q_PROPERTY(float alpha READ getAlpha WRITE setAlpha)
public:
ApplicationCompositor();
~ApplicationCompositor();
@ -64,6 +67,19 @@ public:
void computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& origin, glm::vec3& direction) const;
uint32_t getOverlayTexture() const;
void setCameraBaseTransform(const Transform& transform) { _cameraBaseTransform = transform; }
const Transform& getCameraBaseTransform() const { return _cameraBaseTransform; }
void setModelTransform(const Transform& transform) { _modelTransform = transform; }
const Transform& getModelTransform() const { return _modelTransform; }
void fadeIn();
void fadeOut();
void toggle();
float getAlpha() const { return _alpha; }
void setAlpha(float alpha) { _alpha = alpha; }
static glm::vec2 directionToSpherical(const glm::vec3 & direction);
static glm::vec3 sphericalToDirection(const glm::vec2 & sphericalPos);
static glm::vec2 screenToSpherical(const glm::vec2 & screenPos);
@ -78,7 +94,8 @@ private:
void renderPointers(gpu::Batch& batch);
void renderControllerPointers(gpu::Batch& batch);
void renderPointersOculus(gpu::Batch& batch);
vec2 getPolarCoordinates(const PalmData& palm) const;
// Support for hovering and tooltips
static EntityItemID _noItemId;
@ -100,6 +117,8 @@ private:
bool _magnifier{ true };
float _alpha{ 1.0f };
float _prevAlpha{ 1.0f };
float _fadeInAlpha{ true };
float _oculusUIRadius{ 1.0f };
QMap<uint16_t, gpu::TexturePointer> _cursors;
@ -115,6 +134,11 @@ private:
glm::vec3 _previousMagnifierBottomRight;
glm::vec3 _previousMagnifierTopLeft;
glm::vec3 _previousMagnifierTopRight;
Transform _modelTransform;
Transform _cameraBaseTransform;
std::unique_ptr<QPropertyAnimation> _alphaPropertyAnimation;
};
#endif // hifi_ApplicationCompositor_h

View file

@ -14,7 +14,6 @@
#include <avatar/AvatarManager.h>
#include <DeferredLightingEffect.h>
#include <GLMHelpers.h>
#include <gpu/GLBackend.h>
#include <gpu/GLBackendShared.h>
#include <FramebufferCache.h>
#include <GLMHelpers.h>
@ -32,7 +31,6 @@
#include "ui/AvatarInputs.h"
const vec4 CONNECTION_STATUS_BORDER_COLOR{ 1.0f, 0.0f, 0.0f, 0.8f };
const float CONNECTION_STATUS_BORDER_LINE_WIDTH = 4.0f;
static const float ORTHO_NEAR_CLIP = -1000.0f;
static const float ORTHO_FAR_CLIP = 1000.0f;
@ -48,12 +46,7 @@ ApplicationOverlay::ApplicationOverlay()
// then release it back to the UI for re-use
auto offscreenUi = DependencyManager::get<OffscreenUi>();
connect(offscreenUi.data(), &OffscreenUi::textureUpdated, this, [&](GLuint textureId) {
auto offscreenUi = DependencyManager::get<OffscreenUi>();
offscreenUi->lockTexture(textureId);
std::swap(_uiTexture, textureId);
if (textureId) {
offscreenUi->releaseTexture(textureId);
}
_uiTexture = textureId;
});
}
@ -137,8 +130,7 @@ void ApplicationOverlay::renderAudioScope(RenderArgs* renderArgs) {
batch.setProjectionTransform(legacyProjection);
batch.setModelTransform(Transform());
batch.setViewTransform(Transform());
batch._glLineWidth(1.0f); // default
// Render the audio scope
DependencyManager::get<AudioScope>()->render(renderArgs, width, height);
}
@ -157,8 +149,7 @@ void ApplicationOverlay::renderOverlays(RenderArgs* renderArgs) {
batch.setProjectionTransform(legacyProjection);
batch.setModelTransform(Transform());
batch.setViewTransform(Transform());
batch._glLineWidth(1.0f); // default
// Render all of the Script based "HUD" aka 2D overlays.
// note: we call them HUD, as opposed to 2D, only because there are some cases of 3D HUD overlays, like the
// cameral controls for the edit.js
@ -195,6 +186,7 @@ void ApplicationOverlay::renderRearView(RenderArgs* renderArgs) {
glm::vec2 texCoordMinCorner(0.0f, 0.0f);
glm::vec2 texCoordMaxCorner(viewport.width() * renderRatio / float(selfieTexture->getWidth()), viewport.height() * renderRatio / float(selfieTexture->getHeight()));
geometryCache->useSimpleDrawPipeline(batch, true);
batch.setResourceTexture(0, selfieTexture);
geometryCache->renderQuad(batch, bottomLeft, topRight, texCoordMinCorner, texCoordMaxCorner, glm::vec4(1.0f, 1.0f, 1.0f, 1.0f));
@ -247,7 +239,7 @@ void ApplicationOverlay::renderDomainConnectionStatusBorder(RenderArgs* renderAr
batch.setModelTransform(Transform());
batch.setViewTransform(Transform());
batch.setResourceTexture(0, DependencyManager::get<TextureCache>()->getWhiteTexture());
batch._glLineWidth(CONNECTION_STATUS_BORDER_LINE_WIDTH);
// FIXME: THe line width of CONNECTION_STATUS_BORDER_LINE_WIDTH is not supported anymore, we ll need a workaround
// TODO animate the disconnect border for some excitement while not connected?
//double usecs = usecTimestampNow();

View file

@ -15,7 +15,6 @@
#include <AudioClient.h>
#include <avatar/AvatarManager.h>
#include <devices/Faceshift.h>
#include <devices/SixenseManager.h>
#include <NetworkingConstants.h>
#include "Application.h"

View file

@ -173,7 +173,6 @@ void DialogsManager::hmdTools(bool showTools) {
}
void DialogsManager::hmdToolsClosed() {
Menu::getInstance()->getActionForOption(MenuOption::HMDTools)->setChecked(false);
_hmdToolsDialog->hide();
}

View file

@ -19,48 +19,60 @@
#include <QScreen>
#include <QWindow>
#include <plugins/PluginManager.h>
#include <display-plugins/DisplayPlugin.h>
#include "MainWindow.h"
#include "Menu.h"
#include "ui/DialogsManager.h"
#include "ui/HMDToolsDialog.h"
#include "devices/OculusManager.h"
static const int WIDTH = 350;
static const int HEIGHT = 100;
HMDToolsDialog::HMDToolsDialog(QWidget* parent) :
QDialog(parent, Qt::Window | Qt::WindowCloseButtonHint | Qt::WindowStaysOnTopHint) ,
_previousScreen(NULL),
_hmdScreen(NULL),
_hmdScreenNumber(-1),
_switchModeButton(NULL),
_debugDetails(NULL),
_previousDialogScreen(NULL),
_inHDMMode(false)
QDialog(parent, Qt::Window | Qt::CustomizeWindowHint | Qt::WindowTitleHint | Qt::WindowStaysOnTopHint)
{
this->setWindowTitle("HMD Tools");
// FIXME do we want to support more than one connected HMD? It seems like a pretty corner case
foreach(auto displayPlugin, PluginManager::getInstance()->getDisplayPlugins()) {
// The first plugin is always the standard 2D display, by convention
if (_defaultPluginName.isEmpty()) {
_defaultPluginName = displayPlugin->getName();
continue;
}
if (displayPlugin->isHmd()) {
// Not all HMD's have corresponding screens
if (displayPlugin->getHmdScreen() >= 0) {
_hmdScreenNumber = displayPlugin->getHmdScreen();
}
_hmdPluginName = displayPlugin->getName();
break;
}
}
setWindowTitle("HMD Tools");
// Create layouter
QFormLayout* form = new QFormLayout();
const int WIDTH = 350;
{
QFormLayout* form = new QFormLayout();
// Add a button to enter
_switchModeButton = new QPushButton("Toggle HMD Mode");
if (_hmdPluginName.isEmpty()) {
_switchModeButton->setEnabled(false);
}
// Add a button to enter
_switchModeButton->setFixedWidth(WIDTH);
form->addRow("", _switchModeButton);
// Create a label with debug details...
_debugDetails = new QLabel();
_debugDetails->setFixedSize(WIDTH, HEIGHT);
form->addRow("", _debugDetails);
setLayout(form);
}
// Add a button to enter
_switchModeButton = new QPushButton("Enter HMD Mode");
_switchModeButton->setFixedWidth(WIDTH);
form->addRow("", _switchModeButton);
connect(_switchModeButton,SIGNAL(clicked(bool)),this,SLOT(switchModeClicked(bool)));
// Create a label with debug details...
_debugDetails = new QLabel();
_debugDetails->setText(getDebugDetails());
const int HEIGHT = 100;
_debugDetails->setFixedSize(WIDTH, HEIGHT);
form->addRow("", _debugDetails);
this->QDialog::setLayout(form);
Application::getInstance()->getWindow()->activateWindow();
// watch for our application window moving screens. If it does we want to update our screen details
QWindow* mainWindow = Application::getInstance()->getWindow()->windowHandle();
connect(mainWindow, &QWindow::screenChanged, this, &HMDToolsDialog::applicationWindowScreenChanged);
qApp->getWindow()->activateWindow();
// watch for our dialog window moving screens. If it does we want to enforce our rules about
// what screens we're allowed on
@ -82,11 +94,31 @@ HMDToolsDialog::HMDToolsDialog(QWidget* parent) :
watchWindow(dialogsManager->getLodToolsDialog()->windowHandle());
}
connect(_switchModeButton, &QPushButton::clicked, [this]{
toggleHMDMode();
});
// when the application is about to quit, leave HDM mode
connect(Application::getInstance(), SIGNAL(beforeAboutToQuit()), this, SLOT(aboutToQuit()));
connect(qApp, &Application::beforeAboutToQuit, [this]{
// FIXME this is ineffective because it doesn't trigger the menu to
// save the fact that VR Mode is not checked.
leaveHMDMode();
});
connect(qApp, &Application::activeDisplayPluginChanged, [this]{
updateUi();
});
// watch for our application window moving screens. If it does we want to update our screen details
QWindow* mainWindow = Application::getInstance()->getWindow()->windowHandle();
connect(mainWindow, &QWindow::screenChanged, [this]{
updateUi();
});
// keep track of changes to the number of screens
connect(QApplication::desktop(), &QDesktopWidget::screenCountChanged, this, &HMDToolsDialog::screenCountChanged);
updateUi();
}
HMDToolsDialog::~HMDToolsDialog() {
@ -96,18 +128,13 @@ HMDToolsDialog::~HMDToolsDialog() {
_windowWatchers.clear();
}
void HMDToolsDialog::applicationWindowScreenChanged(QScreen* screen) {
_debugDetails->setText(getDebugDetails());
}
QString HMDToolsDialog::getDebugDetails() const {
QString results;
int hmdScreenNumber = OculusManager::getHMDScreen();
if (hmdScreenNumber >= 0) {
results += "HMD Screen: " + QGuiApplication::screens()[hmdScreenNumber]->name() + "\n";
if (_hmdScreenNumber >= 0) {
results += "HMD Screen: " + QGuiApplication::screens()[_hmdScreenNumber]->name() + "\n";
} else {
results += "HMD Screen Name: Unknown\n";
results += "HMD Screen Name: N/A\n";
}
int desktopPrimaryScreenNumber = QApplication::desktop()->primaryScreen();
@ -122,49 +149,35 @@ QString HMDToolsDialog::getDebugDetails() const {
return results;
}
void HMDToolsDialog::switchModeClicked(bool checked) {
if (!_inHDMMode) {
enterHDMMode();
void HMDToolsDialog::toggleHMDMode() {
if (!qApp->isHMDMode()) {
enterHMDMode();
} else {
leaveHDMMode();
leaveHMDMode();
}
}
void HMDToolsDialog::enterHDMMode() {
if (!_inHDMMode) {
_switchModeButton->setText("Leave HMD Mode");
_debugDetails->setText(getDebugDetails());
// if we're on a single screen setup, then hide our tools window when entering HMD mode
if (QApplication::desktop()->screenCount() == 1) {
close();
}
Application::getInstance()->setEnableVRMode(true);
_inHDMMode = true;
}
}
void HMDToolsDialog::leaveHDMMode() {
if (_inHDMMode) {
_switchModeButton->setText("Enter HMD Mode");
_debugDetails->setText(getDebugDetails());
Application::getInstance()->setEnableVRMode(false);
void HMDToolsDialog::enterHMDMode() {
if (!qApp->isHMDMode()) {
Application::getInstance()->setActiveDisplayPlugin(_hmdPluginName);
Application::getInstance()->getWindow()->activateWindow();
}
}
void HMDToolsDialog::leaveHMDMode() {
if (qApp->isHMDMode()) {
Application::getInstance()->setActiveDisplayPlugin(_defaultPluginName);
Application::getInstance()->getWindow()->activateWindow();
_inHDMMode = false;
}
}
void HMDToolsDialog::reject() {
// Just regularly close upon ESC
close();
// We don't want this window to be closable from a close icon, just from our "Leave HMD Mode" button
}
void HMDToolsDialog::closeEvent(QCloseEvent* event) {
// TODO: consider if we want to prevent closing of this window with event->ignore();
this->QDialog::closeEvent(event);
emit closed();
// We don't want this window to be closable from a close icon, just from our "Leave HMD Mode" button
event->ignore();
}
void HMDToolsDialog::centerCursorOnWidget(QWidget* widget) {
@ -174,9 +187,15 @@ void HMDToolsDialog::centerCursorOnWidget(QWidget* widget) {
QCursor::setPos(screen, windowCenter);
}
void HMDToolsDialog::updateUi() {
_switchModeButton->setText(qApp->isHMDMode() ? "Leave HMD Mode" : "Enter HMD Mode");
_debugDetails->setText(getDebugDetails());
}
void HMDToolsDialog::showEvent(QShowEvent* event) {
// center the cursor on the hmd tools dialog
centerCursorOnWidget(this);
updateUi();
}
void HMDToolsDialog::hideEvent(QHideEvent* event) {
@ -184,33 +203,31 @@ void HMDToolsDialog::hideEvent(QHideEvent* event) {
centerCursorOnWidget(Application::getInstance()->getWindow());
}
void HMDToolsDialog::aboutToQuit() {
if (_inHDMMode) {
// FIXME this is ineffective because it doesn't trigger the menu to
// save the fact that VR Mode is not checked.
leaveHDMMode();
}
}
void HMDToolsDialog::screenCountChanged(int newCount) {
if (!OculusManager::isConnected()) {
//OculusManager::connect();
int hmdScreenNumber = -1;
auto displayPlugins = PluginManager::getInstance()->getDisplayPlugins();
foreach(auto dp, displayPlugins) {
if (dp->isHmd()) {
if (dp->getHmdScreen() >= 0) {
hmdScreenNumber = dp->getHmdScreen();
}
break;
}
}
int hmdScreenNumber = OculusManager::getHMDScreen();
if (_inHDMMode && _hmdScreenNumber != hmdScreenNumber) {
if (qApp->isHMDMode() && _hmdScreenNumber != hmdScreenNumber) {
qDebug() << "HMD Display changed WHILE IN HMD MODE";
leaveHDMMode();
leaveHMDMode();
// if there is a new best HDM screen then go back into HDM mode after done leaving
if (hmdScreenNumber >= 0) {
qDebug() << "Trying to go back into HDM Mode";
qDebug() << "Trying to go back into HMD Mode";
const int SLIGHT_DELAY = 2000;
QTimer::singleShot(SLIGHT_DELAY, this, SLOT(enterHDMMode()));
QTimer::singleShot(SLIGHT_DELAY, [this]{
enterHMDMode();
});
}
}
_debugDetails->setText(getDebugDetails());
}
void HMDToolsDialog::watchWindow(QWindow* window) {
@ -247,9 +264,8 @@ void HMDWindowWatcher::windowScreenChanged(QScreen* screen) {
// if we have more than one screen, and a known hmdScreen then try to
// keep our dialog off of the hmdScreen
if (QApplication::desktop()->screenCount() > 1) {
int hmdScreenNumber = _hmdTools->_hmdScreenNumber;
// we want to use a local variable here because we are not necesarily in HMD mode
int hmdScreenNumber = OculusManager::getHMDScreen();
if (hmdScreenNumber >= 0) {
QScreen* hmdScreen = QGuiApplication::screens()[hmdScreenNumber];
if (screen == hmdScreen) {

View file

@ -34,9 +34,6 @@ signals:
public slots:
void reject();
void switchModeClicked(bool checked);
void applicationWindowScreenChanged(QScreen* screen);
void aboutToQuit();
void screenCountChanged(int newCount);
protected:
@ -46,20 +43,24 @@ protected:
private:
void centerCursorOnWidget(QWidget* widget);
void enterHDMMode();
void leaveHDMMode();
void enterHMDMode();
void leaveHMDMode();
void toggleHMDMode();
void updateUi();
QScreen* _previousScreen;
QScreen* _hmdScreen;
int _hmdScreenNumber;
QPushButton* _switchModeButton;
QLabel* _debugDetails;
QScreen* _previousScreen{ nullptr };
QScreen* _hmdScreen{ nullptr };
int _hmdScreenNumber{ -1 };
QPushButton* _switchModeButton{ nullptr };
QLabel* _debugDetails{ nullptr };
QRect _previousDialogRect;
QScreen* _previousDialogScreen;
bool _inHDMMode;
QScreen* _previousDialogScreen{ nullptr };
QString _hmdPluginName;
QString _defaultPluginName;
QHash<QWindow*, HMDWindowWatcher*> _windowWatchers;
friend class HMDWindowWatcher;
};

Some files were not shown because too many files have changed in this diff Show more